diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..c151be6 --- /dev/null +++ b/.gitignore @@ -0,0 +1,238 @@ +# Virtualenv +# http://iamzed.com/2009/05/07/a-primer-on-virtualenv/ +.Python +[Bb]in +[Ii]nclude +[Ll]ib +[Ll]ib64 +[Ll]ocal +[Ss]cripts +pyvenv.cfg +.venv + +pip-selfcheck.json + +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +cover/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +.pybuilder/ +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +# For a library or package, you might want to ignore these files since the code is +# intended to run in multiple environments; otherwise, check them in: +# .python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# poetry +# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. +# This is especially recommended for binary packages to ensure reproducibility, and is more +# commonly ignored for libraries. +# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control +#poetry.lock + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# pytype static type analyzer +.pytype/ + +# Cython debug symbols +cython_debug/ + +# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio, WebStorm and Rider +# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839 + +# User-specific stuff +.idea/**/workspace.xml +.idea/**/tasks.xml +.idea/**/usage.statistics.xml +.idea/**/dictionaries +.idea/**/shelf + +# AWS User-specific +.idea/**/aws.xml + +# Generated files +.idea/**/contentModel.xml + +# Sensitive or high-churn files +.idea/**/dataSources/ +.idea/**/dataSources.ids +.idea/**/dataSources.local.xml +.idea/**/sqlDataSources.xml +.idea/**/dynamic.xml +.idea/**/uiDesigner.xml +.idea/**/dbnavigator.xml + +# Gradle +.idea/**/gradle.xml +.idea/**/libraries + +# Gradle and Maven with auto-import +# When using Gradle or Maven with auto-import, you should exclude module files, +# since they will be recreated, and may cause churn. Uncomment if using +# auto-import. +# .idea/artifacts +# .idea/compiler.xml +# .idea/jarRepositories.xml +# .idea/modules.xml +# .idea/*.iml +# .idea/modules +# *.iml +# *.ipr + +# CMake +cmake-build-*/ + +# Mongo Explorer plugin +.idea/**/mongoSettings.xml + +# File-based project format +*.iws + +# IntelliJ +out/ + +# mpeltonen/sbt-idea plugin +.idea_modules/ + +# JIRA plugin +atlassian-ide-plugin.xml + +# Cursive Clojure plugin +.idea/replstate.xml + +# SonarLint plugin +.idea/sonarlint/ + +# Crashlytics plugin (for Android Studio and IntelliJ) +com_crashlytics_export_strings.xml +crashlytics.properties +crashlytics-build.properties +fabric.properties + +# Editor-based Rest Client +.idea/httpRequests + +# Android studio 3.1+ serialized cache file +.idea/caches/build_file_checksums.ser +.idea/ \ No newline at end of file diff --git a/.idea/.gitignore b/.idea/.gitignore deleted file mode 100644 index 73f69e0..0000000 --- a/.idea/.gitignore +++ /dev/null @@ -1,8 +0,0 @@ -# Default ignored files -/shelf/ -/workspace.xml -# Datasource local storage ignored files -/dataSources/ -/dataSources.local.xml -# Editor-based HTTP Client requests -/httpRequests/ diff --git a/.idea/AI_wozek_widlowy.iml b/.idea/AI_wozek_widlowy.iml deleted file mode 100644 index 7eb1baf..0000000 --- a/.idea/AI_wozek_widlowy.iml +++ /dev/null @@ -1,10 +0,0 @@ - - - - - - - - - - \ No newline at end of file diff --git a/.idea/inspectionProfiles/profiles_settings.xml b/.idea/inspectionProfiles/profiles_settings.xml deleted file mode 100644 index 105ce2d..0000000 --- a/.idea/inspectionProfiles/profiles_settings.xml +++ /dev/null @@ -1,6 +0,0 @@ - - - - \ No newline at end of file diff --git a/.idea/misc.xml b/.idea/misc.xml deleted file mode 100644 index d56657a..0000000 --- a/.idea/misc.xml +++ /dev/null @@ -1,4 +0,0 @@ - - - - \ No newline at end of file diff --git a/.idea/modules.xml b/.idea/modules.xml deleted file mode 100644 index 5984f79..0000000 --- a/.idea/modules.xml +++ /dev/null @@ -1,8 +0,0 @@ - - - - - - - - \ No newline at end of file diff --git a/.idea/vcs.xml b/.idea/vcs.xml deleted file mode 100644 index 94a25f7..0000000 --- a/.idea/vcs.xml +++ /dev/null @@ -1,6 +0,0 @@ - - - - - - \ No newline at end of file diff --git a/__pycache__/Environment.cpython-39.pyc b/__pycache__/Environment.cpython-39.pyc deleted file mode 100644 index c20c4b3..0000000 Binary files a/__pycache__/Environment.cpython-39.pyc and /dev/null differ diff --git a/__pycache__/Grid.cpython-39.pyc b/__pycache__/Grid.cpython-39.pyc deleted file mode 100644 index 5199792..0000000 Binary files a/__pycache__/Grid.cpython-39.pyc and /dev/null differ diff --git a/__pycache__/Package.cpython-39.pyc b/__pycache__/Package.cpython-39.pyc deleted file mode 100644 index 90413c1..0000000 Binary files a/__pycache__/Package.cpython-39.pyc and /dev/null differ diff --git a/__pycache__/Program.cpython-39.pyc b/__pycache__/Program.cpython-39.pyc deleted file mode 100644 index d80d896..0000000 Binary files a/__pycache__/Program.cpython-39.pyc and /dev/null differ diff --git a/__pycache__/Shelf.cpython-39.pyc b/__pycache__/Shelf.cpython-39.pyc deleted file mode 100644 index 33185f7..0000000 Binary files a/__pycache__/Shelf.cpython-39.pyc and /dev/null differ diff --git a/__pycache__/Truck.cpython-39.pyc b/__pycache__/Truck.cpython-39.pyc deleted file mode 100644 index 7a6043c..0000000 Binary files a/__pycache__/Truck.cpython-39.pyc and /dev/null differ diff --git a/venv/Include/site/python3.9/pygame/_camera.h b/venv/Include/site/python3.9/pygame/_camera.h deleted file mode 100644 index 075ef6f..0000000 --- a/venv/Include/site/python3.9/pygame/_camera.h +++ /dev/null @@ -1,26 +0,0 @@ -/* - pygame - Python Game Library - - This library is free software; you can redistribute it and/or - modify it under the terms of the GNU Library General Public - License as published by the Free Software Foundation; either - version 2 of the License, or (at your option) any later version. - - This library is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - Library General Public License for more details. - - You should have received a copy of the GNU Library General Public - License along with this library; if not, write to the Free - Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA - -*/ - -#ifndef _CAMERA_H -#define _CAMERA_H - -#include "_pygame.h" -#include "camera.h" - -#endif diff --git a/venv/Include/site/python3.9/pygame/_pygame.h b/venv/Include/site/python3.9/pygame/_pygame.h deleted file mode 100644 index 23da37f..0000000 --- a/venv/Include/site/python3.9/pygame/_pygame.h +++ /dev/null @@ -1,326 +0,0 @@ -/* - pygame - Python Game Library - Copyright (C) 2000-2001 Pete Shinners - - This library is free software; you can redistribute it and/or - modify it under the terms of the GNU Library General Public - License as published by the Free Software Foundation; either - version 2 of the License, or (at your option) any later version. - - This library is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - Library General Public License for more details. - - You should have received a copy of the GNU Library General Public - License along with this library; if not, write to the Free - Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA - - Pete Shinners - pete@shinners.org -*/ - -/* This will use PYGAMEAPI_EXTERN_SLOTS instead - * of PYGAMEAPI_DEFINE_SLOTS for base modules. - */ -#ifndef _PYGAME_INTERNAL_H -#define _PYGAME_INTERNAL_H - -#include "pgplatform.h" -/* - If PY_SSIZE_T_CLEAN is defined before including Python.h, length is a - Py_ssize_t rather than an int for all # variants of formats (s#, y#, etc.) -*/ -#define PY_SSIZE_T_CLEAN -#include - -/* Ensure PyPy-specific code is not in use when running on GraalPython (PR - * #2580) */ -#if defined(GRAALVM_PYTHON) && defined(PYPY_VERSION) -#undef PYPY_VERSION -#endif - -#include - -/* SDL 1.2 constants removed from SDL 2 */ -typedef enum { - SDL_HWSURFACE = 0, - SDL_RESIZABLE = SDL_WINDOW_RESIZABLE, - SDL_ASYNCBLIT = 0, - SDL_OPENGL = SDL_WINDOW_OPENGL, - SDL_OPENGLBLIT = 0, - SDL_ANYFORMAT = 0, - SDL_HWPALETTE = 0, - SDL_DOUBLEBUF = 0, - SDL_FULLSCREEN = SDL_WINDOW_FULLSCREEN, - SDL_HWACCEL = 0, - SDL_SRCCOLORKEY = 0, - SDL_RLEACCELOK = 0, - SDL_SRCALPHA = 0, - SDL_NOFRAME = SDL_WINDOW_BORDERLESS, - SDL_GL_SWAP_CONTROL = 0, - TIMER_RESOLUTION = 0 -} PygameVideoFlags; - -/* the wheel button constants were removed from SDL 2 */ -typedef enum { - PGM_BUTTON_LEFT = SDL_BUTTON_LEFT, - PGM_BUTTON_RIGHT = SDL_BUTTON_RIGHT, - PGM_BUTTON_MIDDLE = SDL_BUTTON_MIDDLE, - PGM_BUTTON_WHEELUP = 4, - PGM_BUTTON_WHEELDOWN = 5, - PGM_BUTTON_X1 = SDL_BUTTON_X1 + 2, - PGM_BUTTON_X2 = SDL_BUTTON_X2 + 2, - PGM_BUTTON_KEEP = 0x80 -} PygameMouseFlags; - -typedef enum { - /* Any SDL_* events here are for backward compatibility. */ - SDL_NOEVENT = 0, - - SDL_ACTIVEEVENT = SDL_USEREVENT, - SDL_VIDEORESIZE, - SDL_VIDEOEXPOSE, - - PGE_MIDIIN, - PGE_MIDIOUT, - PGE_KEYREPEAT, /* Special internal pygame event, for managing key-presses - */ - - /* DO NOT CHANGE THE ORDER OF EVENTS HERE */ - PGE_WINDOWSHOWN, - PGE_WINDOWHIDDEN, - PGE_WINDOWEXPOSED, - PGE_WINDOWMOVED, - PGE_WINDOWRESIZED, - PGE_WINDOWSIZECHANGED, - PGE_WINDOWMINIMIZED, - PGE_WINDOWMAXIMIZED, - PGE_WINDOWRESTORED, - PGE_WINDOWENTER, - PGE_WINDOWLEAVE, - PGE_WINDOWFOCUSGAINED, - PGE_WINDOWFOCUSLOST, - PGE_WINDOWCLOSE, - PGE_WINDOWTAKEFOCUS, - PGE_WINDOWHITTEST, - - /* Here we define PGPOST_* events, events that act as a one-to-one - * proxy for SDL events (and some extra events too!), the proxy is used - * internally when pygame users use event.post() - * - * At a first glance, these may look redundant, but they are really - * important, especially with event blocking. If proxy events are - * not there, blocked events dont make it to our event filter, and - * that can break a lot of stuff. - * - * IMPORTANT NOTE: Do not post events directly with these proxy types, - * use the appropriate functions from event.c, that handle these proxy - * events for you. - * Proxy events are for internal use only */ - PGPOST_EVENTBEGIN, /* mark start of proxy-events */ - PGPOST_ACTIVEEVENT = PGPOST_EVENTBEGIN, - PGPOST_AUDIODEVICEADDED, - PGPOST_AUDIODEVICEREMOVED, - PGPOST_CONTROLLERAXISMOTION, - PGPOST_CONTROLLERBUTTONDOWN, - PGPOST_CONTROLLERBUTTONUP, - PGPOST_CONTROLLERDEVICEADDED, - PGPOST_CONTROLLERDEVICEREMOVED, - PGPOST_CONTROLLERDEVICEREMAPPED, - PGPOST_CONTROLLERTOUCHPADDOWN, - PGPOST_CONTROLLERTOUCHPADMOTION, - PGPOST_CONTROLLERTOUCHPADUP, - PGPOST_DOLLARGESTURE, - PGPOST_DOLLARRECORD, - PGPOST_DROPFILE, - PGPOST_DROPTEXT, - PGPOST_DROPBEGIN, - PGPOST_DROPCOMPLETE, - PGPOST_FINGERMOTION, - PGPOST_FINGERDOWN, - PGPOST_FINGERUP, - PGPOST_KEYDOWN, - PGPOST_KEYUP, - PGPOST_JOYAXISMOTION, - PGPOST_JOYBALLMOTION, - PGPOST_JOYHATMOTION, - PGPOST_JOYBUTTONDOWN, - PGPOST_JOYBUTTONUP, - PGPOST_JOYDEVICEADDED, - PGPOST_JOYDEVICEREMOVED, - PGPOST_MIDIIN, - PGPOST_MIDIOUT, - PGPOST_MOUSEMOTION, - PGPOST_MOUSEBUTTONDOWN, - PGPOST_MOUSEBUTTONUP, - PGPOST_MOUSEWHEEL, - PGPOST_MULTIGESTURE, - PGPOST_NOEVENT, - PGPOST_QUIT, - PGPOST_SYSWMEVENT, - PGPOST_TEXTEDITING, - PGPOST_TEXTINPUT, - PGPOST_VIDEORESIZE, - PGPOST_VIDEOEXPOSE, - PGPOST_WINDOWSHOWN, - PGPOST_WINDOWHIDDEN, - PGPOST_WINDOWEXPOSED, - PGPOST_WINDOWMOVED, - PGPOST_WINDOWRESIZED, - PGPOST_WINDOWSIZECHANGED, - PGPOST_WINDOWMINIMIZED, - PGPOST_WINDOWMAXIMIZED, - PGPOST_WINDOWRESTORED, - PGPOST_WINDOWENTER, - PGPOST_WINDOWLEAVE, - PGPOST_WINDOWFOCUSGAINED, - PGPOST_WINDOWFOCUSLOST, - PGPOST_WINDOWCLOSE, - PGPOST_WINDOWTAKEFOCUS, - PGPOST_WINDOWHITTEST, - - PGE_USEREVENT, /* this event must stay in this position only */ - - PG_NUMEVENTS = - SDL_LASTEVENT /* Not an event. Indicates end of user events. */ -} PygameEventCode; - -typedef enum { - SDL_APPFOCUSMOUSE, - SDL_APPINPUTFOCUS, - SDL_APPACTIVE -} PygameAppCode; - -/* Surface flags: based on SDL 1.2 flags */ -typedef enum { - PGS_SWSURFACE = 0x00000000, - PGS_HWSURFACE = 0x00000001, - PGS_ASYNCBLIT = 0x00000004, - - PGS_ANYFORMAT = 0x10000000, - PGS_HWPALETTE = 0x20000000, - PGS_DOUBLEBUF = 0x40000000, - PGS_FULLSCREEN = 0x80000000, - PGS_SCALED = 0x00000200, - - PGS_OPENGL = 0x00000002, - PGS_OPENGLBLIT = 0x0000000A, - PGS_RESIZABLE = 0x00000010, - PGS_NOFRAME = 0x00000020, - PGS_SHOWN = 0x00000040, /* Added from SDL 2 */ - PGS_HIDDEN = 0x00000080, /* Added from SDL 2 */ - - PGS_HWACCEL = 0x00000100, - PGS_SRCCOLORKEY = 0x00001000, - PGS_RLEACCELOK = 0x00002000, - PGS_RLEACCEL = 0x00004000, - PGS_SRCALPHA = 0x00010000, - PGS_PREALLOC = 0x01000000 -} PygameSurfaceFlags; - -// TODO Implement check below in a way that does not break CI -/* New buffer protocol (PEP 3118) implemented on all supported Py versions. -#if !defined(Py_TPFLAGS_HAVE_NEWBUFFER) -#error No support for PEP 3118/Py_TPFLAGS_HAVE_NEWBUFFER. Please use a -supported Python version. #endif */ - -#define RAISE(x, y) (PyErr_SetString((x), (y)), (PyObject *)NULL) -#define DEL_ATTR_NOT_SUPPORTED_CHECK(name, value) \ - do { \ - if (!value) { \ - if (name) { \ - PyErr_Format(PyExc_AttributeError, \ - "Cannot delete attribute %s", name); \ - } \ - else { \ - PyErr_SetString(PyExc_AttributeError, \ - "Cannot delete attribute"); \ - } \ - return -1; \ - } \ - } while (0) - -/* - * Initialization checks - */ - -#define VIDEO_INIT_CHECK() \ - if (!SDL_WasInit(SDL_INIT_VIDEO)) \ - return RAISE(pgExc_SDLError, "video system not initialized") - -#define CDROM_INIT_CHECK() \ - if (!SDL_WasInit(SDL_INIT_CDROM)) \ - return RAISE(pgExc_SDLError, "cdrom system not initialized") - -#define JOYSTICK_INIT_CHECK() \ - if (!SDL_WasInit(SDL_INIT_JOYSTICK)) \ - return RAISE(pgExc_SDLError, "joystick system not initialized") - -/* thread check */ -#ifdef WITH_THREAD -#define PG_CHECK_THREADS() (1) -#else /* ~WITH_THREAD */ -#define PG_CHECK_THREADS() \ - (RAISE(PyExc_NotImplementedError, "Python built without thread support")) -#endif /* ~WITH_THREAD */ - -#define PyType_Init(x) (((x).ob_type) = &PyType_Type) - -/* - * event module internals - */ -struct pgEventObject { - PyObject_HEAD int type; - PyObject *dict; -}; - -/* - * surflock module internals - */ -typedef struct { - PyObject_HEAD PyObject *surface; - PyObject *lockobj; - PyObject *weakrefs; -} pgLifetimeLockObject; - -/* - * surface module internals - */ -struct pgSubSurface_Data { - PyObject *owner; - int pixeloffset; - int offsetx, offsety; -}; - -/* - * color module internals - */ -struct pgColorObject { - PyObject_HEAD Uint8 data[4]; - Uint8 len; -}; - -/* - * include public API - */ -#include "include/_pygame.h" - -/* Slot counts. - * Remember to keep these constants up to date. - */ - -#define PYGAMEAPI_RECT_NUMSLOTS 5 -#define PYGAMEAPI_JOYSTICK_NUMSLOTS 2 -#define PYGAMEAPI_DISPLAY_NUMSLOTS 2 -#define PYGAMEAPI_SURFACE_NUMSLOTS 4 -#define PYGAMEAPI_SURFLOCK_NUMSLOTS 8 -#define PYGAMEAPI_RWOBJECT_NUMSLOTS 7 -#define PYGAMEAPI_PIXELARRAY_NUMSLOTS 2 -#define PYGAMEAPI_COLOR_NUMSLOTS 5 -#define PYGAMEAPI_MATH_NUMSLOTS 2 -#define PYGAMEAPI_CDROM_NUMSLOTS 2 -#define PYGAMEAPI_BASE_NUMSLOTS 24 -#define PYGAMEAPI_EVENT_NUMSLOTS 6 - -#endif /* _PYGAME_INTERNAL_H */ diff --git a/venv/Include/site/python3.9/pygame/_surface.h b/venv/Include/site/python3.9/pygame/_surface.h deleted file mode 100644 index b2b4644..0000000 --- a/venv/Include/site/python3.9/pygame/_surface.h +++ /dev/null @@ -1,30 +0,0 @@ -/* - pygame - Python Game Library - Copyright (C) 2000-2001 Pete Shinners - Copyright (C) 2007 Marcus von Appen - - This library is free software; you can redistribute it and/or - modify it under the terms of the GNU Library General Public - License as published by the Free Software Foundation; either - version 2 of the License, or (at your option) any later version. - - This library is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - Library General Public License for more details. - - You should have received a copy of the GNU Library General Public - License along with this library; if not, write to the Free - Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA - - Pete Shinners - pete@shinners.org -*/ - -#ifndef _SURFACE_H -#define _SURFACE_H - -#include "_pygame.h" -#include "surface.h" - -#endif diff --git a/venv/Include/site/python3.9/pygame/camera.h b/venv/Include/site/python3.9/pygame/camera.h deleted file mode 100644 index 6806dfe..0000000 --- a/venv/Include/site/python3.9/pygame/camera.h +++ /dev/null @@ -1,252 +0,0 @@ -#ifndef CAMERA_H -#define CAMERA_H -/* - pygame - Python Game Library - - This library is free software; you can redistribute it and/or - modify it under the terms of the GNU Library General Public - License as published by the Free Software Foundation; either - version 2 of the License, or (at your option) any later version. - - This library is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - Library General Public License for more details. - - You should have received a copy of the GNU Library General Public - License along with this library; if not, write to the Free - Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA - -*/ - -#include "pygame.h" -#include "pgcompat.h" -#include "doc/camera_doc.h" - -#if defined(__unix__) -#include -#include -#include -#include -#include - -#include /* low-level i/o */ -#include -#include -#include -#include -#include -#include -#include - -/* on freebsd there is no asm/types */ -#ifdef linux -#include /* for videodev2.h */ -#endif - -#include -#endif - -#if defined(__WIN32__) -#define PYGAME_WINDOWS_CAMERA 1 - -#include -#include -#include -#include -#include -#include -#endif - -/* some constants used which are not defined on non-v4l machines. */ -#ifndef V4L2_PIX_FMT_RGB24 -#define V4L2_PIX_FMT_RGB24 'RGB3' -#endif -#ifndef V4L2_PIX_FMT_RGB444 -#define V4L2_PIX_FMT_RGB444 'R444' -#endif -#ifndef V4L2_PIX_FMT_YUYV -#define V4L2_PIX_FMT_YUYV 'YUYV' -#endif -#ifndef V4L2_PIX_FMT_XBGR32 -#define V4L2_PIX_FMT_XBGR32 'XR24' -#endif - -#define CLEAR(x) memset(&(x), 0, sizeof(x)) -#define SAT(c) \ - if (c & (~255)) { \ - if (c < 0) \ - c = 0; \ - else \ - c = 255; \ - } -#define SAT2(c) ((c) & (~255) ? ((c) < 0 ? 0 : 255) : (c)) -#define DEFAULT_WIDTH 640 -#define DEFAULT_HEIGHT 480 -#define RGB_OUT 1 -#define YUV_OUT 2 -#define HSV_OUT 4 -#define CAM_V4L \ - 1 /* deprecated. the incomplete support in pygame was removed */ -#define CAM_V4L2 2 - -struct buffer { - void *start; - size_t length; -}; - -#if defined(__unix__) -typedef struct pgCameraObject { - PyObject_HEAD char *device_name; - int camera_type; - unsigned long pixelformat; - unsigned int color_out; - struct buffer *buffers; - unsigned int n_buffers; - int width; - int height; - int size; - int hflip; - int vflip; - int brightness; - int fd; -} pgCameraObject; -#elif defined(PYGAME_WINDOWS_CAMERA) -typedef struct pgCameraObject { - PyObject_HEAD WCHAR *device_name; - IMFSourceReader *reader; - IMFTransform *transform; - IMFVideoProcessorControl *control; - IMFMediaBuffer *buf; - IMFMediaBuffer *raw_buf; - int buffer_ready; - short open; /* used to signal the update_function to exit */ - HANDLE t_handle; - HRESULT t_error; - int t_error_line; - int width; - int height; - int hflip; - int vflip; - int last_vflip; - int color_out; - unsigned long pixelformat; -} pgCameraObject; - -#else -/* generic definition. - */ - -typedef struct pgCameraObject { - PyObject_HEAD char *device_name; - int camera_type; - unsigned long pixelformat; - unsigned int color_out; - struct buffer *buffers; - unsigned int n_buffers; - int width; - int height; - int size; - int hflip; - int vflip; - int brightness; - int fd; -} pgCameraObject; -#endif - -/* internal functions for colorspace conversion */ -void -colorspace(SDL_Surface *src, SDL_Surface *dst, int cspace); -void -rgb24_to_rgb(const void *src, void *dst, int length, SDL_PixelFormat *format); -void -bgr32_to_rgb(const void *src, void *dst, int length, SDL_PixelFormat *format); -void -rgb444_to_rgb(const void *src, void *dst, int length, SDL_PixelFormat *format); -void -rgb_to_yuv(const void *src, void *dst, int length, unsigned long source, - SDL_PixelFormat *format); -void -rgb_to_hsv(const void *src, void *dst, int length, unsigned long source, - SDL_PixelFormat *format); -void -yuyv_to_rgb(const void *src, void *dst, int length, SDL_PixelFormat *format); -void -yuyv_to_yuv(const void *src, void *dst, int length, SDL_PixelFormat *format); -void -uyvy_to_rgb(const void *src, void *dst, int length, SDL_PixelFormat *format); -void -uyvy_to_yuv(const void *src, void *dst, int length, SDL_PixelFormat *format); -void -sbggr8_to_rgb(const void *src, void *dst, int width, int height, - SDL_PixelFormat *format); -void -yuv420_to_rgb(const void *src, void *dst, int width, int height, - SDL_PixelFormat *format); -void -yuv420_to_yuv(const void *src, void *dst, int width, int height, - SDL_PixelFormat *format); - -#if defined(__unix__) -/* internal functions specific to v4l2 */ -char ** -v4l2_list_cameras(int *num_devices); -int -v4l2_get_control(int fd, int id, int *value); -int -v4l2_set_control(int fd, int id, int value); -PyObject * -v4l2_read_raw(pgCameraObject *self); -int -v4l2_xioctl(int fd, int request, void *arg); -int -v4l2_process_image(pgCameraObject *self, const void *image, - unsigned int buffer_size, SDL_Surface *surf); -int -v4l2_query_buffer(pgCameraObject *self); -int -v4l2_read_frame(pgCameraObject *self, SDL_Surface *surf); -int -v4l2_stop_capturing(pgCameraObject *self); -int -v4l2_start_capturing(pgCameraObject *self); -int -v4l2_uninit_device(pgCameraObject *self); -int -v4l2_init_mmap(pgCameraObject *self); -int -v4l2_init_device(pgCameraObject *self); -int -v4l2_close_device(pgCameraObject *self); -int -v4l2_open_device(pgCameraObject *self); - -#elif defined(PYGAME_WINDOWS_CAMERA) -/* internal functions specific to WINDOWS */ -WCHAR ** -windows_list_cameras(int *num_devices); -int -windows_init_device(pgCameraObject *self); -int -windows_open_device(pgCameraObject *self); -IMFActivate * -windows_device_from_name(WCHAR *device_name); -int -windows_close_device(pgCameraObject *self); -int -windows_read_frame(pgCameraObject *self, SDL_Surface *surf); -int -windows_frame_ready(pgCameraObject *self, int *result); -PyObject * -windows_read_raw(pgCameraObject *self); -int -windows_process_image(pgCameraObject *self, BYTE *data, DWORD buffer_size, - SDL_Surface *surf); -void -windows_dealloc_device(pgCameraObject *self); -int -windows_init_device(pgCameraObject *self); - -#endif - -#endif /* !CAMERA_H */ diff --git a/venv/Include/site/python3.9/pygame/font.h b/venv/Include/site/python3.9/pygame/font.h deleted file mode 100644 index f5eedb2..0000000 --- a/venv/Include/site/python3.9/pygame/font.h +++ /dev/null @@ -1,15 +0,0 @@ -#ifndef PGFONT_INTERNAL_H -#define PGFONT_INTERNAL_H - -#include - -/* test font initialization */ -#define FONT_INIT_CHECK() \ - if (!(*(int *)PyFONT_C_API[2])) \ - return RAISE(pgExc_SDLError, "font system not initialized") - -#include "include/pygame_font.h" - -#define PYGAMEAPI_FONT_NUMSLOTS 3 - -#endif /* ~PGFONT_INTERNAL_H */ diff --git a/venv/Include/site/python3.9/pygame/freetype.h b/venv/Include/site/python3.9/pygame/freetype.h deleted file mode 100644 index fd86bc2..0000000 --- a/venv/Include/site/python3.9/pygame/freetype.h +++ /dev/null @@ -1,114 +0,0 @@ -/* - pygame - Python Game Library - Copyright (C) 2009 Vicent Marti - - This library is free software; you can redistribute it and/or - modify it under the terms of the GNU Library General Public - License as published by the Free Software Foundation; either - version 2 of the License, or (at your option) any later version. - - This library is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - Library General Public License for more details. - - You should have received a copy of the GNU Library General Public - License along with this library; if not, write to the Free - Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA - -*/ -#ifndef _PYGAME_FREETYPE_INTERNAL_H_ -#define _PYGAME_FREETYPE_INTERNAL_H_ - -#include "pgcompat.h" -#include "pgplatform.h" - -#include -#include FT_FREETYPE_H -#include FT_CACHE_H -#include FT_XFREE86_H -#include FT_TRIGONOMETRY_H - -/********************************************************** - * Global module constants - **********************************************************/ - -/* Render styles */ -#define FT_STYLE_NORMAL 0x00 -#define FT_STYLE_STRONG 0x01 -#define FT_STYLE_OBLIQUE 0x02 -#define FT_STYLE_UNDERLINE 0x04 -#define FT_STYLE_WIDE 0x08 -#define FT_STYLE_DEFAULT 0xFF - -/* Bounding box modes */ -#define FT_BBOX_EXACT FT_GLYPH_BBOX_SUBPIXELS -#define FT_BBOX_EXACT_GRIDFIT FT_GLYPH_BBOX_GRIDFIT -#define FT_BBOX_PIXEL FT_GLYPH_BBOX_TRUNCATE -#define FT_BBOX_PIXEL_GRIDFIT FT_GLYPH_BBOX_PIXELS - -/* Rendering flags */ -#define FT_RFLAG_NONE (0) -#define FT_RFLAG_ANTIALIAS (1 << 0) -#define FT_RFLAG_AUTOHINT (1 << 1) -#define FT_RFLAG_VERTICAL (1 << 2) -#define FT_RFLAG_HINTED (1 << 3) -#define FT_RFLAG_KERNING (1 << 4) -#define FT_RFLAG_TRANSFORM (1 << 5) -#define FT_RFLAG_PAD (1 << 6) -#define FT_RFLAG_ORIGIN (1 << 7) -#define FT_RFLAG_UCS4 (1 << 8) -#define FT_RFLAG_USE_BITMAP_STRIKES (1 << 9) -#define FT_RFLAG_DEFAULTS \ - (FT_RFLAG_HINTED | FT_RFLAG_USE_BITMAP_STRIKES | FT_RFLAG_ANTIALIAS) - -#define FT_RENDER_NEWBYTEARRAY 0x0 -#define FT_RENDER_NEWSURFACE 0x1 -#define FT_RENDER_EXISTINGSURFACE 0x2 - -/********************************************************** - * Global module types - **********************************************************/ - -typedef struct _scale_s { - FT_UInt x, y; -} Scale_t; -typedef FT_Angle Angle_t; - -struct fontinternals_; -struct freetypeinstance_; - -typedef struct { - FT_Long font_index; - FT_Open_Args open_args; -} pgFontId; - -typedef struct { - PyObject_HEAD pgFontId id; - PyObject *path; - int is_scalable; - int is_bg_col_set; - - Scale_t face_size; - FT_Int16 style; - FT_Int16 render_flags; - double strength; - double underline_adjustment; - FT_UInt resolution; - Angle_t rotation; - FT_Matrix transform; - FT_Byte fgcolor[4]; - FT_Byte bgcolor[4]; - - struct freetypeinstance_ *freetype; /* Personal reference */ - struct fontinternals_ *_internals; -} pgFontObject; - -#define pgFont_IS_ALIVE(o) (((pgFontObject *)(o))->_internals != 0) - -/* import public API */ -#include "include/pygame_freetype.h" - -#define PYGAMEAPI_FREETYPE_NUMSLOTS 2 - -#endif /* ~_PYGAME_FREETYPE_INTERNAL_H_ */ diff --git a/venv/Include/site/python3.9/pygame/include/_pygame.h b/venv/Include/site/python3.9/pygame/include/_pygame.h deleted file mode 100644 index 9817f96..0000000 --- a/venv/Include/site/python3.9/pygame/include/_pygame.h +++ /dev/null @@ -1,497 +0,0 @@ -/* - pygame - Python Game Library - Copyright (C) 2000-2001 Pete Shinners - - This library is free software; you can redistribute it and/or - modify it under the terms of the GNU Library General Public - License as published by the Free Software Foundation; either - version 2 of the License, or (at your option) any later version. - - This library is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - Library General Public License for more details. - - You should have received a copy of the GNU Library General Public - License along with this library; if not, write to the Free - Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA - - Pete Shinners - pete@shinners.org -*/ - -#ifndef _PYGAME_H -#define _PYGAME_H - -/** This header file includes all the definitions for the - ** base pygame extensions. This header only requires - ** Python includes (and SDL.h for functions that use SDL types). - ** The reason for functions prototyped with #define's is - ** to allow for maximum Python portability. It also uses - ** Python as the runtime linker, which allows for late binding. - '' For more information on this style of development, read - ** the Python docs on this subject. - ** http://www.python.org/doc/current/ext/using-cobjects.html - ** - ** If using this to build your own derived extensions, - ** you'll see that the functions available here are mainly - ** used to help convert between python objects and SDL objects. - ** Since this library doesn't add a lot of functionality to - ** the SDL library, it doesn't need to offer a lot either. - ** - ** When initializing your extension module, you must manually - ** import the modules you want to use. (this is the part about - ** using python as the runtime linker). Each module has its - ** own import_xxx() routine. You need to perform this import - ** after you have initialized your own module, and before - ** you call any routines from that module. Since every module - ** in pygame does this, there are plenty of examples. - ** - ** The base module does include some useful conversion routines - ** that you are free to use in your own extension. - **/ - -#include "pgplatform.h" -#include - -/* version macros (defined since version 1.9.5) */ -#define PG_MAJOR_VERSION 2 -#define PG_MINOR_VERSION 1 -#define PG_PATCH_VERSION 2 -#define PG_VERSIONNUM(MAJOR, MINOR, PATCH) \ - (1000 * (MAJOR) + 100 * (MINOR) + (PATCH)) -#define PG_VERSION_ATLEAST(MAJOR, MINOR, PATCH) \ - (PG_VERSIONNUM(PG_MAJOR_VERSION, PG_MINOR_VERSION, PG_PATCH_VERSION) >= \ - PG_VERSIONNUM(MAJOR, MINOR, PATCH)) - -#include "pgcompat.h" - -/* Flag indicating a pg_buffer; used for assertions within callbacks */ -#ifndef NDEBUG -#define PyBUF_PYGAME 0x4000 -#endif -#define PyBUF_HAS_FLAG(f, F) (((f) & (F)) == (F)) - -/* Array information exchange struct C type; inherits from Py_buffer - * - * Pygame uses its own Py_buffer derived C struct as an internal representation - * of an imported array buffer. The extended Py_buffer allows for a - * per-instance release callback, - */ -typedef void (*pybuffer_releaseproc)(Py_buffer *); - -typedef struct pg_bufferinfo_s { - Py_buffer view; - PyObject *consumer; /* Input: Borrowed reference */ - pybuffer_releaseproc release_buffer; -} pg_buffer; - -#include "pgimport.h" - -/* - * BASE module - */ -#ifndef PYGAMEAPI_BASE_INTERNAL -#define pgExc_SDLError ((PyObject *)PYGAMEAPI_GET_SLOT(base, 0)) - -#define pg_RegisterQuit \ - (*(void (*)(void (*)(void)))PYGAMEAPI_GET_SLOT(base, 1)) - -#define pg_IntFromObj \ - (*(int (*)(PyObject *, int *))PYGAMEAPI_GET_SLOT(base, 2)) - -#define pg_IntFromObjIndex \ - (*(int (*)(PyObject *, int, int *))PYGAMEAPI_GET_SLOT(base, 3)) - -#define pg_TwoIntsFromObj \ - (*(int (*)(PyObject *, int *, int *))PYGAMEAPI_GET_SLOT(base, 4)) - -#define pg_FloatFromObj \ - (*(int (*)(PyObject *, float *))PYGAMEAPI_GET_SLOT(base, 5)) - -#define pg_FloatFromObjIndex \ - (*(int (*)(PyObject *, int, float *))PYGAMEAPI_GET_SLOT(base, 6)) - -#define pg_TwoFloatsFromObj \ - (*(int (*)(PyObject *, float *, float *))PYGAMEAPI_GET_SLOT(base, 7)) - -#define pg_UintFromObj \ - (*(int (*)(PyObject *, Uint32 *))PYGAMEAPI_GET_SLOT(base, 8)) - -#define pg_UintFromObjIndex \ - (*(int (*)(PyObject *, int, Uint32 *))PYGAMEAPI_GET_SLOT(base, 9)) - -#define pg_mod_autoinit (*(int (*)(const char *))PYGAMEAPI_GET_SLOT(base, 10)) - -#define pg_mod_autoquit (*(void (*)(const char *))PYGAMEAPI_GET_SLOT(base, 11)) - -#define pg_RGBAFromObj \ - (*(int (*)(PyObject *, Uint8 *))PYGAMEAPI_GET_SLOT(base, 12)) - -#define pgBuffer_AsArrayInterface \ - (*(PyObject * (*)(Py_buffer *)) PYGAMEAPI_GET_SLOT(base, 13)) - -#define pgBuffer_AsArrayStruct \ - (*(PyObject * (*)(Py_buffer *)) PYGAMEAPI_GET_SLOT(base, 14)) - -#define pgObject_GetBuffer \ - (*(int (*)(PyObject *, pg_buffer *, int))PYGAMEAPI_GET_SLOT(base, 15)) - -#define pgBuffer_Release (*(void (*)(pg_buffer *))PYGAMEAPI_GET_SLOT(base, 16)) - -#define pgDict_AsBuffer \ - (*(int (*)(pg_buffer *, PyObject *, int))PYGAMEAPI_GET_SLOT(base, 17)) - -#define pgExc_BufferError ((PyObject *)PYGAMEAPI_GET_SLOT(base, 18)) - -#define pg_GetDefaultWindow \ - (*(SDL_Window * (*)(void)) PYGAMEAPI_GET_SLOT(base, 19)) - -#define pg_SetDefaultWindow \ - (*(void (*)(SDL_Window *))PYGAMEAPI_GET_SLOT(base, 20)) - -#define pg_GetDefaultWindowSurface \ - (*(pgSurfaceObject * (*)(void)) PYGAMEAPI_GET_SLOT(base, 21)) - -#define pg_SetDefaultWindowSurface \ - (*(void (*)(pgSurfaceObject *))PYGAMEAPI_GET_SLOT(base, 22)) - -#define pg_EnvShouldBlendAlphaSDL2 \ - (*(char *(*)(void))PYGAMEAPI_GET_SLOT(base, 23)) - -#define import_pygame_base() IMPORT_PYGAME_MODULE(base) -#endif /* ~PYGAMEAPI_BASE_INTERNAL */ - -typedef struct { - PyObject_HEAD SDL_Rect r; - PyObject *weakreflist; -} pgRectObject; - -#define pgRect_AsRect(x) (((pgRectObject *)x)->r) -#ifndef PYGAMEAPI_RECT_INTERNAL -#define pgRect_Type (*(PyTypeObject *)PYGAMEAPI_GET_SLOT(rect, 0)) - -#define pgRect_Check(x) ((x)->ob_type == &pgRect_Type) -#define pgRect_New (*(PyObject * (*)(SDL_Rect *)) PYGAMEAPI_GET_SLOT(rect, 1)) - -#define pgRect_New4 \ - (*(PyObject * (*)(int, int, int, int)) PYGAMEAPI_GET_SLOT(rect, 2)) - -#define pgRect_FromObject \ - (*(SDL_Rect * (*)(PyObject *, SDL_Rect *)) PYGAMEAPI_GET_SLOT(rect, 3)) - -#define pgRect_Normalize (*(void (*)(SDL_Rect *))PYGAMEAPI_GET_SLOT(rect, 4)) - -#define import_pygame_rect() IMPORT_PYGAME_MODULE(rect) -#endif /* ~PYGAMEAPI_RECT_INTERNAL */ - -/* - * CDROM module - */ - -typedef struct { - PyObject_HEAD int id; -} pgCDObject; - -#define pgCD_AsID(x) (((pgCDObject *)x)->id) -#ifndef PYGAMEAPI_CDROM_INTERNAL -#define pgCD_Type (*(PyTypeObject *)PYGAMEAPI_GET_SLOT(cdrom, 0)) - -#define pgCD_Check(x) ((x)->ob_type == &pgCD_Type) -#define pgCD_New (*(PyObject * (*)(int)) PYGAMEAPI_GET_SLOT(cdrom, 1)) - -#define import_pygame_cd() IMPORT_PYGAME_MODULE(cdrom) -#endif - -/* - * JOYSTICK module - */ -typedef struct pgJoystickObject { - PyObject_HEAD int id; - SDL_Joystick *joy; - - /* Joysticks form an intrusive linked list. - * - * Note that we don't maintain refcounts for these so they are weakrefs - * from the Python side. - */ - struct pgJoystickObject *next; - struct pgJoystickObject *prev; -} pgJoystickObject; - -#define pgJoystick_AsID(x) (((pgJoystickObject *)x)->id) -#define pgJoystick_AsSDL(x) (((pgJoystickObject *)x)->joy) - -#ifndef PYGAMEAPI_JOYSTICK_INTERNAL -#define pgJoystick_Type (*(PyTypeObject *)PYGAMEAPI_GET_SLOT(joystick, 0)) - -#define pgJoystick_Check(x) ((x)->ob_type == &pgJoystick_Type) -#define pgJoystick_New (*(PyObject * (*)(int)) PYGAMEAPI_GET_SLOT(joystick, 1)) - -#define import_pygame_joystick() IMPORT_PYGAME_MODULE(joystick) -#endif - -/* - * DISPLAY module - */ - -typedef struct { - Uint32 hw_available : 1; - Uint32 wm_available : 1; - Uint32 blit_hw : 1; - Uint32 blit_hw_CC : 1; - Uint32 blit_hw_A : 1; - Uint32 blit_sw : 1; - Uint32 blit_sw_CC : 1; - Uint32 blit_sw_A : 1; - Uint32 blit_fill : 1; - Uint32 video_mem; - SDL_PixelFormat *vfmt; - SDL_PixelFormat vfmt_data; - int current_w; - int current_h; -} pg_VideoInfo; - -typedef struct { - PyObject_HEAD pg_VideoInfo info; -} pgVidInfoObject; - -#define pgVidInfo_AsVidInfo(x) (((pgVidInfoObject *)x)->info) - -#ifndef PYGAMEAPI_DISPLAY_INTERNAL -#define pgVidInfo_Type (*(PyTypeObject *)PYGAMEAPI_GET_SLOT(display, 0)) - -#define pgVidInfo_Check(x) ((x)->ob_type == &pgVidInfo_Type) -#define pgVidInfo_New \ - (*(PyObject * (*)(pg_VideoInfo *)) PYGAMEAPI_GET_SLOT(display, 1)) - -#define import_pygame_display() IMPORT_PYGAME_MODULE(display) -#endif /* ~PYGAMEAPI_DISPLAY_INTERNAL */ - -/* - * SURFACE module - */ -struct pgSubSurface_Data; -struct SDL_Surface; - -typedef struct { - PyObject_HEAD struct SDL_Surface *surf; - int owner; - struct pgSubSurface_Data *subsurface; /* ptr to subsurface data (if a - * subsurface)*/ - PyObject *weakreflist; - PyObject *locklist; - PyObject *dependency; -} pgSurfaceObject; -#define pgSurface_AsSurface(x) (((pgSurfaceObject *)x)->surf) - -#ifndef PYGAMEAPI_SURFACE_INTERNAL -#define pgSurface_Type (*(PyTypeObject *)PYGAMEAPI_GET_SLOT(surface, 0)) - -#define pgSurface_Check(x) \ - (PyObject_IsInstance((x), (PyObject *)&pgSurface_Type)) -#define pgSurface_New2 \ - (*(pgSurfaceObject * (*)(SDL_Surface *, int)) \ - PYGAMEAPI_GET_SLOT(surface, 1)) - -#define pgSurface_SetSurface \ - (*(int (*)(pgSurfaceObject *, SDL_Surface *, int))PYGAMEAPI_GET_SLOT( \ - surface, 3)) - -#define pgSurface_Blit \ - (*(int (*)(pgSurfaceObject *, pgSurfaceObject *, SDL_Rect *, SDL_Rect *, \ - int))PYGAMEAPI_GET_SLOT(surface, 2)) - -#define import_pygame_surface() \ - do { \ - IMPORT_PYGAME_MODULE(surface); \ - if (PyErr_Occurred() != NULL) \ - break; \ - IMPORT_PYGAME_MODULE(surflock); \ - } while (0) - -#define pgSurface_New(surface) pgSurface_New2((surface), 1) -#define pgSurface_NewNoOwn(surface) pgSurface_New2((surface), 0) - -#endif /* ~PYGAMEAPI_SURFACE_INTERNAL */ - -/* - * SURFLOCK module - * auto imported/initialized by surface - */ -#ifndef PYGAMEAPI_SURFLOCK_INTERNAL -#define pgLifetimeLock_Type (*(PyTypeObject *)PYGAMEAPI_GET_SLOT(surflock, 0)) - -#define pgLifetimeLock_Check(x) ((x)->ob_type == &pgLifetimeLock_Type) - -#define pgSurface_Prep(x) \ - if ((x)->subsurface) \ - (*(*(void (*)(pgSurfaceObject *))PYGAMEAPI_GET_SLOT(surflock, 1)))(x) - -#define pgSurface_Unprep(x) \ - if ((x)->subsurface) \ - (*(*(void (*)(pgSurfaceObject *))PYGAMEAPI_GET_SLOT(surflock, 2)))(x) - -#define pgSurface_Lock \ - (*(int (*)(pgSurfaceObject *))PYGAMEAPI_GET_SLOT(surflock, 3)) - -#define pgSurface_Unlock \ - (*(int (*)(pgSurfaceObject *))PYGAMEAPI_GET_SLOT(surflock, 4)) - -#define pgSurface_LockBy \ - (*(int (*)(pgSurfaceObject *, PyObject *))PYGAMEAPI_GET_SLOT(surflock, 5)) - -#define pgSurface_UnlockBy \ - (*(int (*)(pgSurfaceObject *, PyObject *))PYGAMEAPI_GET_SLOT(surflock, 6)) - -#define pgSurface_LockLifetime \ - (*(PyObject * (*)(PyObject *, PyObject *)) PYGAMEAPI_GET_SLOT(surflock, 7)) -#endif - -/* - * EVENT module - */ -typedef struct pgEventObject pgEventObject; - -#ifndef PYGAMEAPI_EVENT_INTERNAL -#define pgEvent_Type (*(PyTypeObject *)PYGAMEAPI_GET_SLOT(event, 0)) - -#define pgEvent_Check(x) ((x)->ob_type == &pgEvent_Type) - -#define pgEvent_New \ - (*(PyObject * (*)(SDL_Event *)) PYGAMEAPI_GET_SLOT(event, 1)) - -#define pgEvent_New2 \ - (*(PyObject * (*)(int, PyObject *)) PYGAMEAPI_GET_SLOT(event, 2)) - -#define pgEvent_FillUserEvent \ - (*(int (*)(pgEventObject *, SDL_Event *))PYGAMEAPI_GET_SLOT(event, 3)) - -#define pg_EnableKeyRepeat (*(int (*)(int, int))PYGAMEAPI_GET_SLOT(event, 4)) - -#define pg_GetKeyRepeat (*(void (*)(int *, int *))PYGAMEAPI_GET_SLOT(event, 5)) - -#define import_pygame_event() IMPORT_PYGAME_MODULE(event) -#endif - -/* - * RWOBJECT module - * the rwobject are only needed for C side work, not accessable from python. - */ -#ifndef PYGAMEAPI_RWOBJECT_INTERNAL -#define pgRWops_FromObject \ - (*(SDL_RWops * (*)(PyObject *)) PYGAMEAPI_GET_SLOT(rwobject, 0)) - -#define pgRWops_IsFileObject \ - (*(int (*)(SDL_RWops *))PYGAMEAPI_GET_SLOT(rwobject, 1)) - -#define pg_EncodeFilePath \ - (*(PyObject * (*)(PyObject *, PyObject *)) PYGAMEAPI_GET_SLOT(rwobject, 2)) - -#define pg_EncodeString \ - (*(PyObject * (*)(PyObject *, const char *, const char *, PyObject *)) \ - PYGAMEAPI_GET_SLOT(rwobject, 3)) - -#define pgRWops_FromFileObject \ - (*(SDL_RWops * (*)(PyObject *)) PYGAMEAPI_GET_SLOT(rwobject, 4)) - -#define pgRWops_ReleaseObject \ - (*(int (*)(SDL_RWops *))PYGAMEAPI_GET_SLOT(rwobject, 5)) - -#define pgRWops_GetFileExtension \ - (*(char *(*)(SDL_RWops *))PYGAMEAPI_GET_SLOT(rwobject, 6)) - -#define import_pygame_rwobject() IMPORT_PYGAME_MODULE(rwobject) - -#endif - -/* - * PixelArray module - */ -#ifndef PYGAMEAPI_PIXELARRAY_INTERNAL -#define PyPixelArray_Type ((PyTypeObject *)PYGAMEAPI_GET_SLOT(pixelarray, 0)) - -#define PyPixelArray_Check(x) ((x)->ob_type == &PyPixelArray_Type) -#define PyPixelArray_New (*(PyObject * (*)) PYGAMEAPI_GET_SLOT(pixelarray, 1)) - -#define import_pygame_pixelarray() IMPORT_PYGAME_MODULE(pixelarray) -#endif /* PYGAMEAPI_PIXELARRAY_INTERNAL */ - -/* - * Color module - */ -typedef struct pgColorObject pgColorObject; - -#ifndef PYGAMEAPI_COLOR_INTERNAL -#define pgColor_Type (*(PyObject *)PYGAMEAPI_GET_SLOT(color, 0)) - -#define pgColor_Check(x) ((x)->ob_type == &pgColor_Type) -#define pgColor_New (*(PyObject * (*)(Uint8 *)) PYGAMEAPI_GET_SLOT(color, 1)) - -#define pgColor_NewLength \ - (*(PyObject * (*)(Uint8 *, Uint8)) PYGAMEAPI_GET_SLOT(color, 3)) - -#define pg_RGBAFromColorObj \ - (*(int (*)(PyObject *, Uint8 *))PYGAMEAPI_GET_SLOT(color, 2)) - -#define pg_RGBAFromFuzzyColorObj \ - (*(int (*)(PyObject *, Uint8 *))PYGAMEAPI_GET_SLOT(color, 4)) - -#define pgColor_AsArray(x) (((pgColorObject *)x)->data) -#define pgColor_NumComponents(x) (((pgColorObject *)x)->len) - -#define import_pygame_color() IMPORT_PYGAME_MODULE(color) -#endif /* PYGAMEAPI_COLOR_INTERNAL */ - -/* - * Math module - */ -#ifndef PYGAMEAPI_MATH_INTERNAL -#define pgVector2_Check(x) \ - ((x)->ob_type == (PyTypeObject *)PYGAMEAPI_GET_SLOT(math, 0)) - -#define pgVector3_Check(x) \ - ((x)->ob_type == (PyTypeObject *)PYGAMEAPI_GET_SLOT(math, 1)) -/* -#define pgVector2_New \ - (*(PyObject*(*)) \ - PYGAMEAPI_GET_SLOT(PyGAME_C_API, 1)) -*/ -#define import_pygame_math() IMPORT_PYGAME_MODULE(math) -#endif /* PYGAMEAPI_MATH_INTERNAL */ - -#define IMPORT_PYGAME_MODULE _IMPORT_PYGAME_MODULE - -/* - * base pygame API slots - * disable slots with NO_PYGAME_C_API - */ -#ifdef PYGAME_H -PYGAMEAPI_DEFINE_SLOTS(base); -PYGAMEAPI_DEFINE_SLOTS(rect); -PYGAMEAPI_DEFINE_SLOTS(cdrom); -PYGAMEAPI_DEFINE_SLOTS(joystick); -PYGAMEAPI_DEFINE_SLOTS(display); -PYGAMEAPI_DEFINE_SLOTS(surface); -PYGAMEAPI_DEFINE_SLOTS(surflock); -PYGAMEAPI_DEFINE_SLOTS(event); -PYGAMEAPI_DEFINE_SLOTS(rwobject); -PYGAMEAPI_DEFINE_SLOTS(pixelarray); -PYGAMEAPI_DEFINE_SLOTS(color); -PYGAMEAPI_DEFINE_SLOTS(math); -#else /* ~PYGAME_H */ -PYGAMEAPI_EXTERN_SLOTS(base); -PYGAMEAPI_EXTERN_SLOTS(rect); -PYGAMEAPI_EXTERN_SLOTS(cdrom); -PYGAMEAPI_EXTERN_SLOTS(joystick); -PYGAMEAPI_EXTERN_SLOTS(display); -PYGAMEAPI_EXTERN_SLOTS(surface); -PYGAMEAPI_EXTERN_SLOTS(surflock); -PYGAMEAPI_EXTERN_SLOTS(event); -PYGAMEAPI_EXTERN_SLOTS(rwobject); -PYGAMEAPI_EXTERN_SLOTS(pixelarray); -PYGAMEAPI_EXTERN_SLOTS(color); -PYGAMEAPI_EXTERN_SLOTS(math); -#endif /* ~PYGAME_H */ - -#endif /* PYGAME_H */ diff --git a/venv/Include/site/python3.9/pygame/include/bitmask.h b/venv/Include/site/python3.9/pygame/include/bitmask.h deleted file mode 100644 index eee09b7..0000000 --- a/venv/Include/site/python3.9/pygame/include/bitmask.h +++ /dev/null @@ -1,171 +0,0 @@ -/* - Bitmask 1.7 - A pixel-perfect collision detection library. - - Copyright (C) 2002-2005 Ulf Ekstrom except for the bitcount - function which is copyright (C) Donald W. Gillies, 1992. - - This library is free software; you can redistribute it and/or - modify it under the terms of the GNU Library General Public - License as published by the Free Software Foundation; either - version 2 of the License, or (at your option) any later version. - - This library is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - Library General Public License for more details. - - You should have received a copy of the GNU Library General Public - License along with this library; if not, write to the Free - Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA - */ -#ifndef BITMASK_H -#define BITMASK_H - -#ifdef __cplusplus -extern "C" { -#endif - -#include -/* Define INLINE for different compilers. If your compiler does not - support inlining then there might be a performance hit in - bitmask_overlap_area(). -*/ -#ifndef INLINE -#ifdef __GNUC__ -#define INLINE inline -#else -#ifdef _MSC_VER -#define INLINE __inline -#else -#define INLINE -#endif -#endif -#endif - -#define BITMASK_W unsigned long int -#define BITMASK_W_LEN (sizeof(BITMASK_W) * CHAR_BIT) -#define BITMASK_W_MASK (BITMASK_W_LEN - 1) -#define BITMASK_N(n) ((BITMASK_W)1 << (n)) - -typedef struct bitmask { - int w, h; - BITMASK_W bits[1]; -} bitmask_t; - -/* Creates a bitmask of width w and height h, where - w and h must both be greater than or equal to 0. - The mask is automatically cleared when created. - */ -bitmask_t * -bitmask_create(int w, int h); - -/* Frees all the memory allocated by bitmask_create for m. */ -void -bitmask_free(bitmask_t *m); - -/* Create a copy of the given bitmask. */ -bitmask_t * -bitmask_copy(bitmask_t *m); - -/* Clears all bits in the mask */ -void -bitmask_clear(bitmask_t *m); - -/* Sets all bits in the mask */ -void -bitmask_fill(bitmask_t *m); - -/* Flips all bits in the mask */ -void -bitmask_invert(bitmask_t *m); - -/* Counts the bits in the mask */ -unsigned int -bitmask_count(bitmask_t *m); - -/* Returns nonzero if the bit at (x,y) is set. Coordinates start at - (0,0) */ -static INLINE int -bitmask_getbit(const bitmask_t *m, int x, int y) -{ - return (m->bits[x / BITMASK_W_LEN * m->h + y] & - BITMASK_N(x & BITMASK_W_MASK)) != 0; -} - -/* Sets the bit at (x,y) */ -static INLINE void -bitmask_setbit(bitmask_t *m, int x, int y) -{ - m->bits[x / BITMASK_W_LEN * m->h + y] |= BITMASK_N(x & BITMASK_W_MASK); -} - -/* Clears the bit at (x,y) */ -static INLINE void -bitmask_clearbit(bitmask_t *m, int x, int y) -{ - m->bits[x / BITMASK_W_LEN * m->h + y] &= ~BITMASK_N(x & BITMASK_W_MASK); -} - -/* Returns nonzero if the masks overlap with the given offset. - The overlap tests uses the following offsets (which may be negative): - - +----+----------.. - |A | yoffset - | +-+----------.. - +--|B - |xoffset - | | - : : -*/ -int -bitmask_overlap(const bitmask_t *a, const bitmask_t *b, int xoffset, - int yoffset); - -/* Like bitmask_overlap(), but will also give a point of intersection. - x and y are given in the coordinates of mask a, and are untouched - if there is no overlap. */ -int -bitmask_overlap_pos(const bitmask_t *a, const bitmask_t *b, int xoffset, - int yoffset, int *x, int *y); - -/* Returns the number of overlapping 'pixels' */ -int -bitmask_overlap_area(const bitmask_t *a, const bitmask_t *b, int xoffset, - int yoffset); - -/* Fills a mask with the overlap of two other masks. A bitwise AND. */ -void -bitmask_overlap_mask(const bitmask_t *a, const bitmask_t *b, bitmask_t *c, - int xoffset, int yoffset); - -/* Draws mask b onto mask a (bitwise OR). Can be used to compose large - (game background?) mask from several submasks, which may speed up - the testing. */ - -void -bitmask_draw(bitmask_t *a, const bitmask_t *b, int xoffset, int yoffset); - -void -bitmask_erase(bitmask_t *a, const bitmask_t *b, int xoffset, int yoffset); - -/* Return a new scaled bitmask, with dimensions w*h. The quality of the - scaling may not be perfect for all circumstances, but it should - be reasonable. If either w or h is 0 a clear 1x1 mask is returned. */ -bitmask_t * -bitmask_scale(const bitmask_t *m, int w, int h); - -/* Convolve b into a, drawing the output into o, shifted by offset. If offset - * is 0, then the (x,y) bit will be set if and only if - * bitmask_overlap(a, b, x - b->w - 1, y - b->h - 1) returns true. - * - * Modifies bits o[xoffset ... xoffset + a->w + b->w - 1) - * [yoffset ... yoffset + a->h + b->h - 1). */ -void -bitmask_convolve(const bitmask_t *a, const bitmask_t *b, bitmask_t *o, - int xoffset, int yoffset); - -#ifdef __cplusplus -} /* End of extern "C" { */ -#endif - -#endif diff --git a/venv/Include/site/python3.9/pygame/include/pgcompat.h b/venv/Include/site/python3.9/pygame/include/pgcompat.h deleted file mode 100644 index 4a11ca0..0000000 --- a/venv/Include/site/python3.9/pygame/include/pgcompat.h +++ /dev/null @@ -1,108 +0,0 @@ -/* Python 2.x/3.x and SDL compatibility tools - */ - -#if !defined(PGCOMPAT_H) -#define PGCOMPAT_H - -#include - -/* define common types where SDL is not included */ -#ifndef SDL_VERSION_ATLEAST -#ifdef _MSC_VER -typedef unsigned __int8 uint8_t; -typedef unsigned __int32 uint32_t; -#else -#include -#endif -typedef uint32_t Uint32; -typedef uint8_t Uint8; -#endif /* no SDL */ - -#if defined(SDL_VERSION_ATLEAST) - -#ifndef SDL_WINDOW_VULKAN -#define SDL_WINDOW_VULKAN 0 -#endif - -#ifndef SDL_WINDOW_ALWAYS_ON_TOP -#define SDL_WINDOW_ALWAYS_ON_TOP 0 -#endif - -#ifndef SDL_WINDOW_SKIP_TASKBAR -#define SDL_WINDOW_SKIP_TASKBAR 0 -#endif - -#ifndef SDL_WINDOW_UTILITY -#define SDL_WINDOW_UTILITY 0 -#endif - -#ifndef SDL_WINDOW_TOOLTIP -#define SDL_WINDOW_TOOLTIP 0 -#endif - -#ifndef SDL_WINDOW_POPUP_MENU -#define SDL_WINDOW_POPUP_MENU 0 -#endif - -#ifndef SDL_WINDOW_INPUT_GRABBED -#define SDL_WINDOW_INPUT_GRABBED 0 -#endif - -#ifndef SDL_WINDOW_INPUT_FOCUS -#define SDL_WINDOW_INPUT_FOCUS 0 -#endif - -#ifndef SDL_WINDOW_MOUSE_FOCUS -#define SDL_WINDOW_MOUSE_FOCUS 0 -#endif - -#ifndef SDL_WINDOW_FOREIGN -#define SDL_WINDOW_FOREIGN 0 -#endif - -#ifndef SDL_WINDOW_ALLOW_HIGHDPI -#define SDL_WINDOW_ALLOW_HIGHDPI 0 -#endif - -#ifndef SDL_WINDOW_MOUSE_CAPTURE -#define SDL_WINDOW_MOUSE_CAPTURE 0 -#endif - -#ifndef SDL_WINDOW_ALWAYS_ON_TOP -#define SDL_WINDOW_ALWAYS_ON_TOP 0 -#endif - -#ifndef SDL_WINDOW_SKIP_TASKBAR -#define SDL_WINDOW_SKIP_TASKBAR 0 -#endif - -#ifndef SDL_WINDOW_UTILITY -#define SDL_WINDOW_UTILITY 0 -#endif - -#ifndef SDL_WINDOW_TOOLTIP -#define SDL_WINDOW_TOOLTIP 0 -#endif - -#ifndef SDL_WINDOW_POPUP_MENU -#define SDL_WINDOW_POPUP_MENU 0 -#endif - -#if SDL_VERSION_ATLEAST(2, 0, 4) -/* To control the use of: - * SDL_AUDIODEVICEADDED - * SDL_AUDIODEVICEREMOVED - * - * Ref: https://wiki.libsdl.org/SDL_EventType - * Ref: https://wiki.libsdl.org/SDL_AudioDeviceEvent - */ -#define SDL2_AUDIODEVICE_SUPPORTED -#endif - -#ifndef SDL_MOUSEWHEEL_FLIPPED -#define NO_SDL_MOUSEWHEEL_FLIPPED -#endif - -#endif /* defined(SDL_VERSION_ATLEAST) */ - -#endif /* ~defined(PGCOMPAT_H) */ diff --git a/venv/Include/site/python3.9/pygame/include/pgimport.h b/venv/Include/site/python3.9/pygame/include/pgimport.h deleted file mode 100644 index 16a36db..0000000 --- a/venv/Include/site/python3.9/pygame/include/pgimport.h +++ /dev/null @@ -1,80 +0,0 @@ -#ifndef PGIMPORT_H -#define PGIMPORT_H - -/* Prefix when initializing module */ -#define MODPREFIX "" -/* Prefix when importing module */ -#define IMPPREFIX "pygame." - -#ifdef __SYMBIAN32__ - -/* On Symbian there is no pygame package. The extensions are built-in or in - * sys\bin. */ -#undef MODPREFIX -#undef IMPPREFIX -#define MODPREFIX "pygame_" -#define IMPPREFIX "pygame_" - -#endif /* __SYMBIAN32__ */ - -#include "pgcompat.h" - -#define PYGAMEAPI_LOCAL_ENTRY "_PYGAME_C_API" -#define PG_CAPSULE_NAME(m) (IMPPREFIX m "." PYGAMEAPI_LOCAL_ENTRY) - -/* - * fill API slots defined by PYGAMEAPI_DEFINE_SLOTS/PYGAMEAPI_EXTERN_SLOTS - */ -#define _IMPORT_PYGAME_MODULE(module) \ - { \ - PyObject *_mod_##module = PyImport_ImportModule(IMPPREFIX #module); \ - \ - if (_mod_##module != NULL) { \ - PyObject *_c_api = \ - PyObject_GetAttrString(_mod_##module, PYGAMEAPI_LOCAL_ENTRY); \ - \ - Py_DECREF(_mod_##module); \ - if (_c_api != NULL && PyCapsule_CheckExact(_c_api)) { \ - void **localptr = (void **)PyCapsule_GetPointer( \ - _c_api, PG_CAPSULE_NAME(#module)); \ - _PGSLOTS_##module = localptr; \ - } \ - Py_XDECREF(_c_api); \ - } \ - } - -#define PYGAMEAPI_IS_IMPORTED(module) (_PGSLOTS_##module != NULL) - -/* - * source file must include one of these in order to use _IMPORT_PYGAME_MODULE. - * this is set by import_pygame_*() functions. - * disable with NO_PYGAME_C_API - */ -#define PYGAMEAPI_DEFINE_SLOTS(module) void **_PGSLOTS_##module = NULL -#define PYGAMEAPI_EXTERN_SLOTS(module) extern void **_PGSLOTS_##module -#define PYGAMEAPI_GET_SLOT(module, index) _PGSLOTS_##module[(index)] - -/* - * disabled API with NO_PYGAME_C_API; do nothing instead - */ -#ifdef NO_PYGAME_C_API - -#undef PYGAMEAPI_DEFINE_SLOTS -#undef PYGAMEAPI_EXTERN_SLOTS - -#define PYGAMEAPI_DEFINE_SLOTS(module) -#define PYGAMEAPI_EXTERN_SLOTS(module) - -/* intentionally leave this defined to cause a compiler error * -#define PYGAMEAPI_GET_SLOT(api_root, index) -#undef PYGAMEAPI_GET_SLOT*/ - -#undef _IMPORT_PYGAME_MODULE -#define _IMPORT_PYGAME_MODULE(module) - -#endif /* NO_PYGAME_C_API */ - -#define encapsulate_api(ptr, module) \ - PyCapsule_New(ptr, PG_CAPSULE_NAME(module), NULL) - -#endif /* ~PGIMPORT_H */ diff --git a/venv/Include/site/python3.9/pygame/include/pgplatform.h b/venv/Include/site/python3.9/pygame/include/pgplatform.h deleted file mode 100644 index c73cc24..0000000 --- a/venv/Include/site/python3.9/pygame/include/pgplatform.h +++ /dev/null @@ -1,92 +0,0 @@ -/* platform/compiler adjustments */ -#ifndef PG_PLATFORM_H -#define PG_PLATFORM_H - -#if defined(HAVE_SNPRINTF) /* defined in python.h (pyerrors.h) and SDL.h \ - (SDL_config.h) */ -#undef HAVE_SNPRINTF /* remove GCC redefine warning */ -#endif /* HAVE_SNPRINTF */ - -#ifndef PG_INLINE -#if defined(__clang__) -#define PG_INLINE __inline__ __attribute__((__unused__)) -#elif defined(__GNUC__) -#define PG_INLINE __inline__ -#elif defined(_MSC_VER) -#define PG_INLINE __inline -#elif defined(__STDC_VERSION__) && __STDC_VERSION__ >= 199901L -#define PG_INLINE inline -#else -#define PG_INLINE -#endif -#endif /* ~PG_INLINE */ - -// Worth trying this on MSVC/win32 builds to see if provides any speed up -#ifndef PG_FORCEINLINE -#if defined(__clang__) -#define PG_FORCEINLINE __inline__ __attribute__((__unused__)) -#elif defined(__GNUC__) -#define PG_FORCEINLINE __inline__ -#elif defined(_MSC_VER) -#define PG_FORCEINLINE __forceinline -#elif defined(__STDC_VERSION__) && __STDC_VERSION__ >= 199901L -#define PG_FORCEINLINE inline -#else -#define PG_FORCEINLINE -#endif -#endif /* ~PG_FORCEINLINE */ - -/* This is unconditionally defined in Python.h */ -#if defined(_POSIX_C_SOURCE) -#undef _POSIX_C_SOURCE -#endif - -/* No signal() */ -#if defined(__SYMBIAN32__) && defined(HAVE_SIGNAL_H) -#undef HAVE_SIGNAL_H -#endif - -#if defined(HAVE_SNPRINTF) -#undef HAVE_SNPRINTF -#endif - -/* SDL needs WIN32 */ -#if !defined(WIN32) && \ - (defined(MS_WIN32) || defined(_WIN32) || defined(__WIN32) || \ - defined(__WIN32__) || defined(_WINDOWS)) -#define WIN32 -#endif - -/* Commenting out SSE4_2 stuff because it does not do runtime detection. -#ifndef PG_TARGET_SSE4_2 -#if defined(__clang__) || (defined(__GNUC__) && ((__GNUC__ == 4 && -__GNUC_MINOR__ >= 9) || __GNUC__ >= 5 )) -//The old gcc 4.8 on centos used by manylinux1 does not seem to get sse4.2 -intrinsics #define PG_FUNCTION_TARGET_SSE4_2 __attribute__((target("sse4.2"))) -// No else; we define the fallback later -#endif -#endif -*/ -/* ~PG_TARGET_SSE4_2 */ - -/* -#ifdef PG_FUNCTION_TARGET_SSE4_2 -#if !defined(__SSE4_2__) && !defined(PG_COMPILE_SSE4_2) -#if defined(__x86_64__) || defined(__i386__) -#define PG_COMPILE_SSE4_2 1 -#endif -#endif -#endif -*/ -/* ~PG_TARGET_SSE4_2 */ - -/* Fallback definition of target attribute */ -#ifndef PG_FUNCTION_TARGET_SSE4_2 -#define PG_FUNCTION_TARGET_SSE4_2 -#endif - -#ifndef PG_COMPILE_SSE4_2 -#define PG_COMPILE_SSE4_2 0 -#endif - -#endif /* ~PG_PLATFORM_H */ diff --git a/venv/Include/site/python3.9/pygame/include/pygame.h b/venv/Include/site/python3.9/pygame/include/pygame.h deleted file mode 100644 index 3772ae6..0000000 --- a/venv/Include/site/python3.9/pygame/include/pygame.h +++ /dev/null @@ -1,34 +0,0 @@ -/* - pygame - Python Game Library - Copyright (C) 2000-2001 Pete Shinners - - This library is free software; you can redistribute it and/or - modify it under the terms of the GNU Library General Public - License as published by the Free Software Foundation; either - version 2 of the License, or (at your option) any later version. - - This library is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - Library General Public License for more details. - - You should have received a copy of the GNU Library General Public - License along with this library; if not, write to the Free - Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA - - Pete Shinners - pete@shinners.org -*/ - -/* To allow the Pygame C api to be globally shared by all code within an - * extension module built from multiple C files, only include the pygame.h - * header within the top level C file, the one which calls the - * 'import_pygame_*' macros. All other C source files of the module should - * include _pygame.h instead. - */ -#ifndef PYGAME_H -#define PYGAME_H - -#include "_pygame.h" - -#endif diff --git a/venv/Include/site/python3.9/pygame/include/pygame_bufferproxy.h b/venv/Include/site/python3.9/pygame/include/pygame_bufferproxy.h deleted file mode 100644 index 9284ff2..0000000 --- a/venv/Include/site/python3.9/pygame/include/pygame_bufferproxy.h +++ /dev/null @@ -1,56 +0,0 @@ -/* - pygame - Python Game Library - Copyright (C) 2000-2001 Pete Shinners - Copyright (C) 2007 Rene Dudfield, Richard Goedeken - - This library is free software; you can redistribute it and/or - modify it under the terms of the GNU Library General Public - License as published by the Free Software Foundation; either - version 2 of the License, or (at your option) any later version. - - This library is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - Library General Public License for more details. - - You should have received a copy of the GNU Library General Public - License along with this library; if not, write to the Free - Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA - - Pete Shinners - pete@shinners.org -*/ - -/* Bufferproxy module C api. */ -#if !defined(PG_BUFPROXY_HEADER) -#define PG_BUFPROXY_HEADER - -#include - -typedef PyObject *(*_pgbufproxy_new_t)(PyObject *, getbufferproc); -typedef PyObject *(*_pgbufproxy_get_obj_t)(PyObject *); -typedef int (*_pgbufproxy_trip_t)(PyObject *); - -#ifndef PYGAMEAPI_BUFPROXY_INTERNAL - -#include "pgimport.h" - -PYGAMEAPI_DEFINE_SLOTS(bufferproxy); - -#define pgBufproxy_Type (*(PyTypeObject *)PYGAMEAPI_GET_SLOT(bufferproxy, 0)) - -#define pgBufproxy_Check(x) ((x)->ob_type == &pgBufproxy_Type) - -#define pgBufproxy_New (*(_pgbufproxy_new_t)PYGAMEAPI_GET_SLOT(bufferproxy, 1)) - -#define pgBufproxy_GetParent \ - (*(_pgbufproxy_get_obj_t)PYGAMEAPI_GET_SLOT(bufferproxy, 2)) - -#define pgBufproxy_Trip \ - (*(_pgbufproxy_trip_t)PYGAMEAPI_GET_SLOT(bufferproxy, 3)) - -#define import_pygame_bufferproxy() _IMPORT_PYGAME_MODULE(bufferproxy) - -#endif /* ~PYGAMEAPI_BUFPROXY_INTERNAL */ - -#endif /* ~defined(PG_BUFPROXY_HEADER) */ diff --git a/venv/Include/site/python3.9/pygame/include/pygame_font.h b/venv/Include/site/python3.9/pygame/include/pygame_font.h deleted file mode 100644 index aae41bf..0000000 --- a/venv/Include/site/python3.9/pygame/include/pygame_font.h +++ /dev/null @@ -1,50 +0,0 @@ -/* - pygame - Python Game Library - Copyright (C) 2000-2001 Pete Shinners - - This library is free software; you can redistribute it and/or - modify it under the terms of the GNU Library General Public - License as published by the Free Software Foundation; either - version 2 of the License, or (at your option) any later version. - - This library is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - Library General Public License for more details. - - You should have received a copy of the GNU Library General Public - License along with this library; if not, write to the Free - Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA - - Pete Shinners - pete@shinners.org -*/ - -#include -#include "pgplatform.h" - -struct TTF_Font; - -typedef struct { - PyObject_HEAD TTF_Font *font; - PyObject *weakreflist; - unsigned int ttf_init_generation; -} PyFontObject; -#define PyFont_AsFont(x) (((PyFontObject *)x)->font) - -#ifndef PYGAMEAPI_FONT_INTERNAL - -#include "pgimport.h" - -PYGAMEAPI_DEFINE_SLOTS(font); - -#define PyFont_Type (*(PyTypeObject *)PYGAMEAPI_GET_SLOT(font, 0)) -#define PyFont_Check(x) ((x)->ob_type == &PyFont_Type) - -#define PyFont_New (*(PyObject * (*)(TTF_Font *)) PYGAMEAPI_GET_SLOT(font, 1)) - -/*slot 2 taken by FONT_INIT_CHECK*/ - -#define import_pygame_font() _IMPORT_PYGAME_MODULE(font) - -#endif diff --git a/venv/Include/site/python3.9/pygame/include/pygame_freetype.h b/venv/Include/site/python3.9/pygame/include/pygame_freetype.h deleted file mode 100644 index 90172cc..0000000 --- a/venv/Include/site/python3.9/pygame/include/pygame_freetype.h +++ /dev/null @@ -1,42 +0,0 @@ -/* - pygame - Python Game Library - Copyright (C) 2009 Vicent Marti - - This library is free software; you can redistribute it and/or - modify it under the terms of the GNU Library General Public - License as published by the Free Software Foundation; either - version 2 of the License, or (at your option) any later version. - - This library is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - Library General Public License for more details. - - You should have received a copy of the GNU Library General Public - License along with this library; if not, write to the Free - Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA - -*/ -#ifndef PYGAME_FREETYPE_H_ -#define PYGAME_FREETYPE_H_ - -#include "pgplatform.h" -#include "pgimport.h" -#include "pgcompat.h" - -#ifndef PYGAME_FREETYPE_INTERNAL - -PYGAMEAPI_DEFINE_SLOTS(_freetype); - -#define pgFont_Type (*(PyTypeObject *)PYGAMEAPI_GET_SLOT(_freetype, 0)) - -#define pgFont_Check(x) ((x)->ob_type == &pgFont_Type) - -#define pgFont_New \ - (*(PyObject * (*)(const char *, long)) PYGAMEAPI_GET_SLOT(_freetype, 1)) - -#define import_pygame_freetype() _IMPORT_PYGAME_MODULE(_freetype) - -#endif /* PYGAME_FREETYPE_INTERNAL */ - -#endif /* PYGAME_FREETYPE_H_ */ diff --git a/venv/Include/site/python3.9/pygame/include/pygame_mask.h b/venv/Include/site/python3.9/pygame/include/pygame_mask.h deleted file mode 100644 index 8dd8f17..0000000 --- a/venv/Include/site/python3.9/pygame/include/pygame_mask.h +++ /dev/null @@ -1,45 +0,0 @@ -/* - pygame - Python Game Library - - This library is free software; you can redistribute it and/or - modify it under the terms of the GNU Library General Public - License as published by the Free Software Foundation; either - version 2 of the License, or (at your option) any later version. - - This library is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - Library General Public License for more details. - - You should have received a copy of the GNU Library General Public - License along with this library; if not, write to the Free - Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA -*/ - -#ifndef PGMASK_H -#define PGMASK_H - -#include -#include "bitmask.h" - -typedef struct { - PyObject_HEAD bitmask_t *mask; - void *bufdata; -} pgMaskObject; - -#define pgMask_AsBitmap(x) (((pgMaskObject *)x)->mask) - -#ifndef PYGAMEAPI_MASK_INTERNAL - -#include "pgimport.h" - -PYGAMEAPI_DEFINE_SLOTS(mask); - -#define pgMask_Type (*(PyTypeObject *)PYGAMEAPI_GET_SLOT(mask, 0)) -#define pgMask_Check(x) ((x)->ob_type == &pgMask_Type) - -#define import_pygame_mask() _IMPORT_PYGAME_MODULE(mask) - -#endif /* ~PYGAMEAPI_MASK_INTERNAL */ - -#endif /* ~PGMASK_H */ diff --git a/venv/Include/site/python3.9/pygame/include/pygame_mixer.h b/venv/Include/site/python3.9/pygame/include/pygame_mixer.h deleted file mode 100644 index e19d273..0000000 --- a/venv/Include/site/python3.9/pygame/include/pygame_mixer.h +++ /dev/null @@ -1,71 +0,0 @@ -/* - pygame - Python Game Library - Copyright (C) 2000-2001 Pete Shinners - - This library is free software; you can redistribute it and/or - modify it under the terms of the GNU Library General Public - License as published by the Free Software Foundation; either - version 2 of the License, or (at your option) any later version. - - This library is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - Library General Public License for more details. - - You should have received a copy of the GNU Library General Public - License along with this library; if not, write to the Free - Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA - - Pete Shinners - pete@shinners.org -*/ - -#ifndef PGMIXER_H -#define PGMIXER_H - -#include -#include - -#include "pgcompat.h" - -struct Mix_Chunk; - -typedef struct { - PyObject_HEAD Mix_Chunk *chunk; - Uint8 *mem; - PyObject *weakreflist; -} pgSoundObject; - -typedef struct { - PyObject_HEAD int chan; -} pgChannelObject; - -#define pgSound_AsChunk(x) (((pgSoundObject *)x)->chunk) -#define pgChannel_AsInt(x) (((pgChannelObject *)x)->chan) - -#include "pgimport.h" - -#ifndef PYGAMEAPI_MIXER_INTERNAL - -PYGAMEAPI_DEFINE_SLOTS(mixer); - -#define pgSound_Type (*(PyTypeObject *)PYGAMEAPI_GET_SLOT(mixer, 0)) - -#define pgSound_Check(x) ((x)->ob_type == &pgSound_Type) - -#define pgSound_New \ - (*(PyObject * (*)(Mix_Chunk *)) PYGAMEAPI_GET_SLOT(mixer, 1)) - -#define pgSound_Play \ - (*(PyObject * (*)(PyObject *, PyObject *)) PYGAMEAPI_GET_SLOT(mixer, 2)) - -#define pgChannel_Type (*(PyTypeObject *)PYGAMEAPI_GET_SLOT(mixer, 3)) -#define pgChannel_Check(x) ((x)->ob_type == &pgChannel_Type) - -#define pgChannel_New (*(PyObject * (*)(int)) PYGAMEAPI_GET_SLOT(mixer, 4)) - -#define import_pygame_mixer() _IMPORT_PYGAME_MODULE(mixer) - -#endif /* PYGAMEAPI_MIXER_INTERNAL */ - -#endif /* ~PGMIXER_H */ diff --git a/venv/Include/site/python3.9/pygame/include/sse2neon.h b/venv/Include/site/python3.9/pygame/include/sse2neon.h deleted file mode 100644 index a3e3ac0..0000000 --- a/venv/Include/site/python3.9/pygame/include/sse2neon.h +++ /dev/null @@ -1,6203 +0,0 @@ -#ifndef SSE2NEON_H -#define SSE2NEON_H - -// This header file provides a simple API translation layer -// between SSE intrinsics to their corresponding Arm/Aarch64 NEON versions -// -// This header file does not yet translate all of the SSE intrinsics. -// -// Contributors to this work are: -// John W. Ratcliff -// Brandon Rowlett -// Ken Fast -// Eric van Beurden -// Alexander Potylitsin -// Hasindu Gamaarachchi -// Jim Huang -// Mark Cheng -// Malcolm James MacLeod -// Devin Hussey (easyaspi314) -// Sebastian Pop -// Developer Ecosystem Engineering -// Danila Kutenin -// François Turban (JishinMaster) -// Pei-Hsuan Hung -// Yang-Hao Yuan - -/* - * sse2neon is freely redistributable under the MIT License. - * - * Permission is hereby granted, free of charge, to any person obtaining a copy - * of this software and associated documentation files (the "Software"), to deal - * in the Software without restriction, including without limitation the rights - * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell - * copies of the Software, and to permit persons to whom the Software is - * furnished to do so, subject to the following conditions: - * - * The above copyright notice and this permission notice shall be included in - * all copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, - * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE - * SOFTWARE. - */ - -/* Tunable configurations */ - -/* Enable precise implementation of _mm_min_ps and _mm_max_ps - * This would slow down the computation a bit, but gives consistent result with - * x86 SSE2. (e.g. would solve a hole or NaN pixel in the rendering result) - */ -#ifndef SSE2NEON_PRECISE_MINMAX -#define SSE2NEON_PRECISE_MINMAX (0) -#endif - -#if defined(__GNUC__) || defined(__clang__) -#pragma push_macro("FORCE_INLINE") -#pragma push_macro("ALIGN_STRUCT") -#define FORCE_INLINE static inline __attribute__((always_inline)) -#define ALIGN_STRUCT(x) __attribute__((aligned(x))) -#else -#error "Macro name collisions may happen with unsupported compiler." -#ifdef FORCE_INLINE -#undef FORCE_INLINE -#endif -#define FORCE_INLINE static inline -#ifndef ALIGN_STRUCT -#define ALIGN_STRUCT(x) __declspec(align(x)) -#endif -#endif - -#include -#include - -// These cause the build to fail on raspberry pi with 'unsupported target' -// and don't seem to do anything particularly useful -///* Architecture-specific build options */ -///* FIXME: #pragma GCC push_options is only available on GCC */ -//#if defined(__GNUC__) -//#if defined(__arm__) && __ARM_ARCH == 7 -///* According to ARM C Language Extensions Architecture specification, -// * __ARM_NEON is defined to a value indicating the Advanced SIMD (NEON) -// * architecture supported. -// */ -//#if !defined(__ARM_NEON) || !defined(__ARM_NEON__) -//#error "You must enable NEON instructions (e.g. -mfpu=neon) to use SSE2NEON." -//#endif -//#pragma GCC push_options -//#pragma GCC target("fpu=neon") -//#elif defined(__aarch64__) -//#pragma GCC push_options -//#pragma GCC target("+simd") -//#else -//#error "Unsupported target. Must be either ARMv7-A+NEON or ARMv8-A." -//#endif -//#endif - -#include - -/* Rounding functions require either Aarch64 instructions or libm failback */ -#if !defined(__aarch64__) -#include -#endif - -/* "__has_builtin" can be used to query support for built-in functions - * provided by gcc/clang and other compilers that support it. - */ -#ifndef __has_builtin /* GCC prior to 10 or non-clang compilers */ -/* Compatibility with gcc <= 9 */ -#if __GNUC__ <= 9 -#define __has_builtin(x) HAS##x -#define HAS__builtin_popcount 1 -#define HAS__builtin_popcountll 1 -#else -#define __has_builtin(x) 0 -#endif -#endif - -/** - * MACRO for shuffle parameter for _mm_shuffle_ps(). - * Argument fp3 is a digit[0123] that represents the fp from argument "b" - * of mm_shuffle_ps that will be placed in fp3 of result. fp2 is the same - * for fp2 in result. fp1 is a digit[0123] that represents the fp from - * argument "a" of mm_shuffle_ps that will be places in fp1 of result. - * fp0 is the same for fp0 of result. - */ -#define _MM_SHUFFLE(fp3, fp2, fp1, fp0) \ - (((fp3) << 6) | ((fp2) << 4) | ((fp1) << 2) | ((fp0))) - -/* Rounding mode macros. */ -#define _MM_FROUND_TO_NEAREST_INT 0x00 -#define _MM_FROUND_TO_NEG_INF 0x01 -#define _MM_FROUND_TO_POS_INF 0x02 -#define _MM_FROUND_TO_ZERO 0x03 -#define _MM_FROUND_CUR_DIRECTION 0x04 -#define _MM_FROUND_NO_EXC 0x08 - -/* indicate immediate constant argument in a given range */ -#define __constrange(a, b) const - -/* A few intrinsics accept traditional data types like ints or floats, but - * most operate on data types that are specific to SSE. - * If a vector type ends in d, it contains doubles, and if it does not have - * a suffix, it contains floats. An integer vector type can contain any type - * of integer, from chars to shorts to unsigned long longs. - */ -typedef int64x1_t __m64; -typedef float32x4_t __m128; /* 128-bit vector containing 4 floats */ -// On ARM 32-bit architecture, the float64x2_t is not supported. -// The data type __m128d should be represented in a different way for related -// intrinsic conversion. -#if defined(__aarch64__) -typedef float64x2_t __m128d; /* 128-bit vector containing 2 doubles */ -#else -typedef float32x4_t __m128d; -#endif -typedef int64x2_t __m128i; /* 128-bit vector containing integers */ - -/* type-safe casting between types */ - -#define vreinterpretq_m128_f16(x) vreinterpretq_f32_f16(x) -#define vreinterpretq_m128_f32(x) (x) -#define vreinterpretq_m128_f64(x) vreinterpretq_f32_f64(x) - -#define vreinterpretq_m128_u8(x) vreinterpretq_f32_u8(x) -#define vreinterpretq_m128_u16(x) vreinterpretq_f32_u16(x) -#define vreinterpretq_m128_u32(x) vreinterpretq_f32_u32(x) -#define vreinterpretq_m128_u64(x) vreinterpretq_f32_u64(x) - -#define vreinterpretq_m128_s8(x) vreinterpretq_f32_s8(x) -#define vreinterpretq_m128_s16(x) vreinterpretq_f32_s16(x) -#define vreinterpretq_m128_s32(x) vreinterpretq_f32_s32(x) -#define vreinterpretq_m128_s64(x) vreinterpretq_f32_s64(x) - -#define vreinterpretq_f16_m128(x) vreinterpretq_f16_f32(x) -#define vreinterpretq_f32_m128(x) (x) -#define vreinterpretq_f64_m128(x) vreinterpretq_f64_f32(x) - -#define vreinterpretq_u8_m128(x) vreinterpretq_u8_f32(x) -#define vreinterpretq_u16_m128(x) vreinterpretq_u16_f32(x) -#define vreinterpretq_u32_m128(x) vreinterpretq_u32_f32(x) -#define vreinterpretq_u64_m128(x) vreinterpretq_u64_f32(x) - -#define vreinterpretq_s8_m128(x) vreinterpretq_s8_f32(x) -#define vreinterpretq_s16_m128(x) vreinterpretq_s16_f32(x) -#define vreinterpretq_s32_m128(x) vreinterpretq_s32_f32(x) -#define vreinterpretq_s64_m128(x) vreinterpretq_s64_f32(x) - -#define vreinterpretq_m128i_s8(x) vreinterpretq_s64_s8(x) -#define vreinterpretq_m128i_s16(x) vreinterpretq_s64_s16(x) -#define vreinterpretq_m128i_s32(x) vreinterpretq_s64_s32(x) -#define vreinterpretq_m128i_s64(x) (x) - -#define vreinterpretq_m128i_u8(x) vreinterpretq_s64_u8(x) -#define vreinterpretq_m128i_u16(x) vreinterpretq_s64_u16(x) -#define vreinterpretq_m128i_u32(x) vreinterpretq_s64_u32(x) -#define vreinterpretq_m128i_u64(x) vreinterpretq_s64_u64(x) - -#define vreinterpretq_s8_m128i(x) vreinterpretq_s8_s64(x) -#define vreinterpretq_s16_m128i(x) vreinterpretq_s16_s64(x) -#define vreinterpretq_s32_m128i(x) vreinterpretq_s32_s64(x) -#define vreinterpretq_s64_m128i(x) (x) - -#define vreinterpretq_u8_m128i(x) vreinterpretq_u8_s64(x) -#define vreinterpretq_u16_m128i(x) vreinterpretq_u16_s64(x) -#define vreinterpretq_u32_m128i(x) vreinterpretq_u32_s64(x) -#define vreinterpretq_u64_m128i(x) vreinterpretq_u64_s64(x) - -#define vreinterpret_m64_s8(x) vreinterpret_s64_s8(x) -#define vreinterpret_m64_s16(x) vreinterpret_s64_s16(x) -#define vreinterpret_m64_s32(x) vreinterpret_s64_s32(x) -#define vreinterpret_m64_s64(x) (x) - -#define vreinterpret_m64_u8(x) vreinterpret_s64_u8(x) -#define vreinterpret_m64_u16(x) vreinterpret_s64_u16(x) -#define vreinterpret_m64_u32(x) vreinterpret_s64_u32(x) -#define vreinterpret_m64_u64(x) vreinterpret_s64_u64(x) - -#define vreinterpret_m64_f16(x) vreinterpret_s64_f16(x) -#define vreinterpret_m64_f32(x) vreinterpret_s64_f32(x) -#define vreinterpret_m64_f64(x) vreinterpret_s64_f64(x) - -#define vreinterpret_u8_m64(x) vreinterpret_u8_s64(x) -#define vreinterpret_u16_m64(x) vreinterpret_u16_s64(x) -#define vreinterpret_u32_m64(x) vreinterpret_u32_s64(x) -#define vreinterpret_u64_m64(x) vreinterpret_u64_s64(x) - -#define vreinterpret_s8_m64(x) vreinterpret_s8_s64(x) -#define vreinterpret_s16_m64(x) vreinterpret_s16_s64(x) -#define vreinterpret_s32_m64(x) vreinterpret_s32_s64(x) -#define vreinterpret_s64_m64(x) (x) - -#define vreinterpret_f32_m64(x) vreinterpret_f32_s64(x) - -#if defined(__aarch64__) -#define vreinterpretq_m128d_s32(x) vreinterpretq_f64_s32(x) -#define vreinterpretq_m128d_s64(x) vreinterpretq_f64_s64(x) - -#define vreinterpretq_m128d_f64(x) (x) - -#define vreinterpretq_s64_m128d(x) vreinterpretq_s64_f64(x) - -#define vreinterpretq_f64_m128d(x) (x) -#else -#define vreinterpretq_m128d_s32(x) vreinterpretq_f32_s32(x) -#define vreinterpretq_m128d_s64(x) vreinterpretq_f32_s64(x) - -#define vreinterpretq_m128d_f32(x) (x) - -#define vreinterpretq_s64_m128d(x) vreinterpretq_s64_f32(x) - -#define vreinterpretq_f32_m128d(x) (x) -#endif - -// A struct is defined in this header file called 'SIMDVec' which can be used -// by applications which attempt to access the contents of an _m128 struct -// directly. It is important to note that accessing the __m128 struct directly -// is bad coding practice by Microsoft: @see: -// https://msdn.microsoft.com/en-us/library/ayeb3ayc.aspx -// -// However, some legacy source code may try to access the contents of an __m128 -// struct directly so the developer can use the SIMDVec as an alias for it. Any -// casting must be done manually by the developer, as you cannot cast or -// otherwise alias the base NEON data type for intrinsic operations. -// -// union intended to allow direct access to an __m128 variable using the names -// that the MSVC compiler provides. This union should really only be used when -// trying to access the members of the vector as integer values. GCC/clang -// allow native access to the float members through a simple array access -// operator (in C since 4.6, in C++ since 4.8). -// -// Ideally direct accesses to SIMD vectors should not be used since it can cause -// a performance hit. If it really is needed however, the original __m128 -// variable can be aliased with a pointer to this union and used to access -// individual components. The use of this union should be hidden behind a macro -// that is used throughout the codebase to access the members instead of always -// declaring this type of variable. -typedef union ALIGN_STRUCT(16) SIMDVec { - float m128_f32[4]; // as floats - DON'T USE. Added for convenience. - int8_t m128_i8[16]; // as signed 8-bit integers. - int16_t m128_i16[8]; // as signed 16-bit integers. - int32_t m128_i32[4]; // as signed 32-bit integers. - int64_t m128_i64[2]; // as signed 64-bit integers. - uint8_t m128_u8[16]; // as unsigned 8-bit integers. - uint16_t m128_u16[8]; // as unsigned 16-bit integers. - uint32_t m128_u32[4]; // as unsigned 32-bit integers. - uint64_t m128_u64[2]; // as unsigned 64-bit integers. -} SIMDVec; - -// casting using SIMDVec -#define vreinterpretq_nth_u64_m128i(x, n) (((SIMDVec *) &x)->m128_u64[n]) -#define vreinterpretq_nth_u32_m128i(x, n) (((SIMDVec *) &x)->m128_u32[n]) -#define vreinterpretq_nth_u8_m128i(x, n) (((SIMDVec *) &x)->m128_u8[n]) - -/* Backwards compatibility for compilers with lack of specific type support */ - -// Older gcc does not define vld1q_u8_x4 type -#if defined(__GNUC__) && !defined(__clang__) -#if __GNUC__ <= 9 -FORCE_INLINE uint8x16x4_t vld1q_u8_x4(const uint8_t *p) -{ - uint8x16x4_t ret; - ret.val[0] = vld1q_u8(p + 0); - ret.val[1] = vld1q_u8(p + 16); - ret.val[2] = vld1q_u8(p + 32); - ret.val[3] = vld1q_u8(p + 48); - return ret; -} -#endif -#endif - -/* Function Naming Conventions - * The naming convention of SSE intrinsics is straightforward. A generic SSE - * intrinsic function is given as follows: - * _mm__ - * - * The parts of this format are given as follows: - * 1. describes the operation performed by the intrinsic - * 2. identifies the data type of the function's primary arguments - * - * This last part, , is a little complicated. It identifies the - * content of the input values, and can be set to any of the following values: - * + ps - vectors contain floats (ps stands for packed single-precision) - * + pd - vectors cantain doubles (pd stands for packed double-precision) - * + epi8/epi16/epi32/epi64 - vectors contain 8-bit/16-bit/32-bit/64-bit - * signed integers - * + epu8/epu16/epu32/epu64 - vectors contain 8-bit/16-bit/32-bit/64-bit - * unsigned integers - * + si128 - unspecified 128-bit vector or 256-bit vector - * + m128/m128i/m128d - identifies input vector types when they are different - * than the type of the returned vector - * - * For example, _mm_setzero_ps. The _mm implies that the function returns - * a 128-bit vector. The _ps at the end implies that the argument vectors - * contain floats. - * - * A complete example: Byte Shuffle - pshufb (_mm_shuffle_epi8) - * // Set packed 16-bit integers. 128 bits, 8 short, per 16 bits - * __m128i v_in = _mm_setr_epi16(1, 2, 3, 4, 5, 6, 7, 8); - * // Set packed 8-bit integers - * // 128 bits, 16 chars, per 8 bits - * __m128i v_perm = _mm_setr_epi8(1, 0, 2, 3, 8, 9, 10, 11, - * 4, 5, 12, 13, 6, 7, 14, 15); - * // Shuffle packed 8-bit integers - * __m128i v_out = _mm_shuffle_epi8(v_in, v_perm); // pshufb - * - * Data (Number, Binary, Byte Index): - +------+------+-------------+------+------+-------------+ - | 1 | 2 | 3 | 4 | Number - +------+------+------+------+------+------+------+------+ - | 0000 | 0001 | 0000 | 0010 | 0000 | 0011 | 0000 | 0100 | Binary - +------+------+------+------+------+------+------+------+ - | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | Index - +------+------+------+------+------+------+------+------+ - - +------+------+------+------+------+------+------+------+ - | 5 | 6 | 7 | 8 | Number - +------+------+------+------+------+------+------+------+ - | 0000 | 0101 | 0000 | 0110 | 0000 | 0111 | 0000 | 1000 | Binary - +------+------+------+------+------+------+------+------+ - | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | Index - +------+------+------+------+------+------+------+------+ - * Index (Byte Index): - +------+------+------+------+------+------+------+------+ - | 1 | 0 | 2 | 3 | 8 | 9 | 10 | 11 | - +------+------+------+------+------+------+------+------+ - - +------+------+------+------+------+------+------+------+ - | 4 | 5 | 12 | 13 | 6 | 7 | 14 | 15 | - +------+------+------+------+------+------+------+------+ - * Result: - +------+------+------+------+------+------+------+------+ - | 1 | 0 | 2 | 3 | 8 | 9 | 10 | 11 | Index - +------+------+------+------+------+------+------+------+ - | 0001 | 0000 | 0000 | 0010 | 0000 | 0101 | 0000 | 0110 | Binary - +------+------+------+------+------+------+------+------+ - | 256 | 2 | 5 | 6 | Number - +------+------+------+------+------+------+------+------+ - - +------+------+------+------+------+------+------+------+ - | 4 | 5 | 12 | 13 | 6 | 7 | 14 | 15 | Index - +------+------+------+------+------+------+------+------+ - | 0000 | 0011 | 0000 | 0111 | 0000 | 0100 | 0000 | 1000 | Binary - +------+------+------+------+------+------+------+------+ - | 3 | 7 | 4 | 8 | Number - +------+------+------+------+------+------+-------------+ - */ - -/* Set/get methods */ - -/* Constants for use with _mm_prefetch. */ -enum _mm_hint { - _MM_HINT_NTA = 0, /* load data to L1 and L2 cache, mark it as NTA */ - _MM_HINT_T0 = 1, /* load data to L1 and L2 cache */ - _MM_HINT_T1 = 2, /* load data to L2 cache only */ - _MM_HINT_T2 = 3, /* load data to L2 cache only, mark it as NTA */ - _MM_HINT_ENTA = 4, /* exclusive version of _MM_HINT_NTA */ - _MM_HINT_ET0 = 5, /* exclusive version of _MM_HINT_T0 */ - _MM_HINT_ET1 = 6, /* exclusive version of _MM_HINT_T1 */ - _MM_HINT_ET2 = 7 /* exclusive version of _MM_HINT_T2 */ -}; - -// Loads one cache line of data from address p to a location closer to the -// processor. https://msdn.microsoft.com/en-us/library/84szxsww(v=vs.100).aspx -FORCE_INLINE void _mm_prefetch(const void *p, int i) -{ - (void) i; - __builtin_prefetch(p); -} - -// Copy the lower single-precision (32-bit) floating-point element of a to dst. -// -// dst[31:0] := a[31:0] -// -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvtss_f32 -FORCE_INLINE float _mm_cvtss_f32(__m128 a) -{ - return vgetq_lane_f32(vreinterpretq_f32_m128(a), 0); -} - -// Convert the lower single-precision (32-bit) floating-point element in a to a -// 32-bit integer, and store the result in dst. -// -// dst[31:0] := Convert_FP32_To_Int32(a[31:0]) -// -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvtss_si32 -#define _mm_cvtss_si32(a) _mm_cvt_ss2si(a) - -// Convert the lower single-precision (32-bit) floating-point element in a to a -// 64-bit integer, and store the result in dst. -// -// dst[63:0] := Convert_FP32_To_Int64(a[31:0]) -// -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvtss_si64 -FORCE_INLINE int _mm_cvtss_si64(__m128 a) -{ -#if defined(__aarch64__) - return vgetq_lane_s64( - vreinterpretq_s64_s32(vcvtnq_s32_f32(vreinterpretq_f32_m128(a))), 0); -#else - float32_t data = vgetq_lane_f32(vreinterpretq_f32_m128(a), 0); - float32_t diff = data - floor(data); - if (diff > 0.5) - return (int64_t) ceil(data); - if (diff == 0.5) { - int64_t f = (int64_t) floor(data); - int64_t c = (int64_t) ceil(data); - return c & 1 ? f : c; - } - return (int64_t) floor(data); -#endif -} - -// Convert packed single-precision (32-bit) floating-point elements in a to -// packed 32-bit integers with truncation, and store the results in dst. -// -// FOR j := 0 to 1 -// i := 32*j -// dst[i+31:i] := Convert_FP32_To_Int32_Truncate(a[i+31:i]) -// ENDFOR -// -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvtt_ps2pi -FORCE_INLINE __m64 _mm_cvtt_ps2pi(__m128 a) -{ - return vreinterpret_m64_s32( - vget_low_s32(vcvtq_s32_f32(vreinterpretq_f32_m128(a)))); -} - -// Convert the lower single-precision (32-bit) floating-point element in a to a -// 32-bit integer with truncation, and store the result in dst. -// -// dst[31:0] := Convert_FP32_To_Int32_Truncate(a[31:0]) -// -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvtt_ss2si -FORCE_INLINE int _mm_cvtt_ss2si(__m128 a) -{ - return vgetq_lane_s32(vcvtq_s32_f32(vreinterpretq_f32_m128(a)), 0); -} - -// Convert packed single-precision (32-bit) floating-point elements in a to -// packed 32-bit integers with truncation, and store the results in dst. -// -// FOR j := 0 to 1 -// i := 32*j -// dst[i+31:i] := Convert_FP32_To_Int32_Truncate(a[i+31:i]) -// ENDFOR -// -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvttps_pi32 -#define _mm_cvttps_pi32(a) _mm_cvtt_ps2pi(a) - -// Convert the lower single-precision (32-bit) floating-point element in a to a -// 32-bit integer with truncation, and store the result in dst. -// -// dst[31:0] := Convert_FP32_To_Int32_Truncate(a[31:0]) -// -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvttss_si32 -#define _mm_cvttss_si32(a) _mm_cvtt_ss2si(a) - -// Convert the lower single-precision (32-bit) floating-point element in a to a -// 64-bit integer with truncation, and store the result in dst. -// -// dst[63:0] := Convert_FP32_To_Int64_Truncate(a[31:0]) -// -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvttss_si64 -FORCE_INLINE int64_t _mm_cvttss_si64(__m128 a) -{ - return vgetq_lane_s64( - vmovl_s32(vget_low_s32(vcvtq_s32_f32(vreinterpretq_f32_m128(a)))), 0); -} - -// Sets the 128-bit value to zero -// https://msdn.microsoft.com/en-us/library/vstudio/ys7dw0kh(v=vs.100).aspx -FORCE_INLINE __m128i _mm_setzero_si128(void) -{ - return vreinterpretq_m128i_s32(vdupq_n_s32(0)); -} - -// Clears the four single-precision, floating-point values. -// https://msdn.microsoft.com/en-us/library/vstudio/tk1t2tbz(v=vs.100).aspx -FORCE_INLINE __m128 _mm_setzero_ps(void) -{ - return vreinterpretq_m128_f32(vdupq_n_f32(0)); -} - -// Return vector of type __m128d with all elements set to zero. -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_setzero_pd -FORCE_INLINE __m128d _mm_setzero_pd(void) -{ -#if defined(__aarch64__) - return vreinterpretq_m128d_f64(vdupq_n_f64(0)); -#else - return vreinterpretq_m128d_f32(vdupq_n_f32(0)); -#endif -} - -// Sets the four single-precision, floating-point values to w. -// -// r0 := r1 := r2 := r3 := w -// -// https://msdn.microsoft.com/en-us/library/vstudio/2x1se8ha(v=vs.100).aspx -FORCE_INLINE __m128 _mm_set1_ps(float _w) -{ - return vreinterpretq_m128_f32(vdupq_n_f32(_w)); -} - -// Sets the four single-precision, floating-point values to w. -// https://msdn.microsoft.com/en-us/library/vstudio/2x1se8ha(v=vs.100).aspx -FORCE_INLINE __m128 _mm_set_ps1(float _w) -{ - return vreinterpretq_m128_f32(vdupq_n_f32(_w)); -} - -// Sets the four single-precision, floating-point values to the four inputs. -// https://msdn.microsoft.com/en-us/library/vstudio/afh0zf75(v=vs.100).aspx -FORCE_INLINE __m128 _mm_set_ps(float w, float z, float y, float x) -{ - float ALIGN_STRUCT(16) data[4] = {x, y, z, w}; - return vreinterpretq_m128_f32(vld1q_f32(data)); -} - -// Copy single-precision (32-bit) floating-point element a to the lower element -// of dst, and zero the upper 3 elements. -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_set_ss -FORCE_INLINE __m128 _mm_set_ss(float a) -{ - float ALIGN_STRUCT(16) data[4] = {a, 0, 0, 0}; - return vreinterpretq_m128_f32(vld1q_f32(data)); -} - -// Sets the four single-precision, floating-point values to the four inputs in -// reverse order. -// https://msdn.microsoft.com/en-us/library/vstudio/d2172ct3(v=vs.100).aspx -FORCE_INLINE __m128 _mm_setr_ps(float w, float z, float y, float x) -{ - float ALIGN_STRUCT(16) data[4] = {w, z, y, x}; - return vreinterpretq_m128_f32(vld1q_f32(data)); -} - -// Sets the 8 signed 16-bit integer values in reverse order. -// -// Return Value -// r0 := w0 -// r1 := w1 -// ... -// r7 := w7 -FORCE_INLINE __m128i _mm_setr_epi16(short w0, - short w1, - short w2, - short w3, - short w4, - short w5, - short w6, - short w7) -{ - int16_t ALIGN_STRUCT(16) data[8] = {w0, w1, w2, w3, w4, w5, w6, w7}; - return vreinterpretq_m128i_s16(vld1q_s16((int16_t *) data)); -} - -// Sets the 4 signed 32-bit integer values in reverse order -// https://technet.microsoft.com/en-us/library/security/27yb3ee5(v=vs.90).aspx -FORCE_INLINE __m128i _mm_setr_epi32(int i3, int i2, int i1, int i0) -{ - int32_t ALIGN_STRUCT(16) data[4] = {i3, i2, i1, i0}; - return vreinterpretq_m128i_s32(vld1q_s32(data)); -} - -// Set packed 64-bit integers in dst with the supplied values in reverse order. -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_setr_epi64 -FORCE_INLINE __m128i _mm_setr_epi64(__m64 e1, __m64 e0) -{ - return vreinterpretq_m128i_s64(vcombine_s64(e1, e0)); -} - -// Sets the 16 signed 8-bit integer values to b. -// -// r0 := b -// r1 := b -// ... -// r15 := b -// -// https://msdn.microsoft.com/en-us/library/6e14xhyf(v=vs.100).aspx -FORCE_INLINE __m128i _mm_set1_epi8(signed char w) -{ - return vreinterpretq_m128i_s8(vdupq_n_s8(w)); -} - -// Sets the 8 signed 16-bit integer values to w. -// -// r0 := w -// r1 := w -// ... -// r7 := w -// -// https://msdn.microsoft.com/en-us/library/k0ya3x0e(v=vs.90).aspx -FORCE_INLINE __m128i _mm_set1_epi16(short w) -{ - return vreinterpretq_m128i_s16(vdupq_n_s16(w)); -} - -// Sets the 16 signed 8-bit integer values. -// https://msdn.microsoft.com/en-us/library/x0cx8zd3(v=vs.90).aspx -FORCE_INLINE __m128i _mm_set_epi8(signed char b15, - signed char b14, - signed char b13, - signed char b12, - signed char b11, - signed char b10, - signed char b9, - signed char b8, - signed char b7, - signed char b6, - signed char b5, - signed char b4, - signed char b3, - signed char b2, - signed char b1, - signed char b0) -{ - int8_t ALIGN_STRUCT(16) - data[16] = {(int8_t) b0, (int8_t) b1, (int8_t) b2, (int8_t) b3, - (int8_t) b4, (int8_t) b5, (int8_t) b6, (int8_t) b7, - (int8_t) b8, (int8_t) b9, (int8_t) b10, (int8_t) b11, - (int8_t) b12, (int8_t) b13, (int8_t) b14, (int8_t) b15}; - return (__m128i) vld1q_s8(data); -} - -// Sets the 8 signed 16-bit integer values. -// https://msdn.microsoft.com/en-au/library/3e0fek84(v=vs.90).aspx -FORCE_INLINE __m128i _mm_set_epi16(short i7, - short i6, - short i5, - short i4, - short i3, - short i2, - short i1, - short i0) -{ - int16_t ALIGN_STRUCT(16) data[8] = {i0, i1, i2, i3, i4, i5, i6, i7}; - return vreinterpretq_m128i_s16(vld1q_s16(data)); -} - -// Sets the 16 signed 8-bit integer values in reverse order. -// https://msdn.microsoft.com/en-us/library/2khb9c7k(v=vs.90).aspx -FORCE_INLINE __m128i _mm_setr_epi8(signed char b0, - signed char b1, - signed char b2, - signed char b3, - signed char b4, - signed char b5, - signed char b6, - signed char b7, - signed char b8, - signed char b9, - signed char b10, - signed char b11, - signed char b12, - signed char b13, - signed char b14, - signed char b15) -{ - int8_t ALIGN_STRUCT(16) - data[16] = {(int8_t) b0, (int8_t) b1, (int8_t) b2, (int8_t) b3, - (int8_t) b4, (int8_t) b5, (int8_t) b6, (int8_t) b7, - (int8_t) b8, (int8_t) b9, (int8_t) b10, (int8_t) b11, - (int8_t) b12, (int8_t) b13, (int8_t) b14, (int8_t) b15}; - return (__m128i) vld1q_s8(data); -} - -// Sets the 4 signed 32-bit integer values to i. -// -// r0 := i -// r1 := i -// r2 := i -// r3 := I -// -// https://msdn.microsoft.com/en-us/library/vstudio/h4xscxat(v=vs.100).aspx -FORCE_INLINE __m128i _mm_set1_epi32(int _i) -{ - return vreinterpretq_m128i_s32(vdupq_n_s32(_i)); -} - -// Sets the 2 signed 64-bit integer values to i. -// https://docs.microsoft.com/en-us/previous-versions/visualstudio/visual-studio-2010/whtfzhzk(v=vs.100) -FORCE_INLINE __m128i _mm_set1_epi64(__m64 _i) -{ - return vreinterpretq_m128i_s64(vdupq_n_s64((int64_t) _i)); -} - -// Sets the 2 signed 64-bit integer values to i. -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_set1_epi64x -FORCE_INLINE __m128i _mm_set1_epi64x(int64_t _i) -{ - return vreinterpretq_m128i_s64(vdupq_n_s64(_i)); -} - -// Sets the 4 signed 32-bit integer values. -// https://msdn.microsoft.com/en-us/library/vstudio/019beekt(v=vs.100).aspx -FORCE_INLINE __m128i _mm_set_epi32(int i3, int i2, int i1, int i0) -{ - int32_t ALIGN_STRUCT(16) data[4] = {i0, i1, i2, i3}; - return vreinterpretq_m128i_s32(vld1q_s32(data)); -} - -// Returns the __m128i structure with its two 64-bit integer values -// initialized to the values of the two 64-bit integers passed in. -// https://msdn.microsoft.com/en-us/library/dk2sdw0h(v=vs.120).aspx -FORCE_INLINE __m128i _mm_set_epi64x(int64_t i1, int64_t i2) -{ - int64_t ALIGN_STRUCT(16) data[2] = {i2, i1}; - return vreinterpretq_m128i_s64(vld1q_s64(data)); -} - -// Returns the __m128i structure with its two 64-bit integer values -// initialized to the values of the two 64-bit integers passed in. -// https://msdn.microsoft.com/en-us/library/dk2sdw0h(v=vs.120).aspx -FORCE_INLINE __m128i _mm_set_epi64(__m64 i1, __m64 i2) -{ - return _mm_set_epi64x((int64_t) i1, (int64_t) i2); -} - -// Set packed double-precision (64-bit) floating-point elements in dst with the -// supplied values. -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_set_pd -FORCE_INLINE __m128d _mm_set_pd(double e1, double e0) -{ - double ALIGN_STRUCT(16) data[2] = {e0, e1}; -#if defined(__aarch64__) - return vreinterpretq_m128d_f64(vld1q_f64((float64_t *) data)); -#else - return vreinterpretq_m128d_f32(vld1q_f32((float32_t *) data)); -#endif -} - -// Stores four single-precision, floating-point values. -// https://msdn.microsoft.com/en-us/library/vstudio/s3h4ay6y(v=vs.100).aspx -FORCE_INLINE void _mm_store_ps(float *p, __m128 a) -{ - vst1q_f32(p, vreinterpretq_f32_m128(a)); -} - -// Stores four single-precision, floating-point values. -// https://msdn.microsoft.com/en-us/library/44e30x22(v=vs.100).aspx -FORCE_INLINE void _mm_storeu_ps(float *p, __m128 a) -{ - vst1q_f32(p, vreinterpretq_f32_m128(a)); -} - -// Stores four 32-bit integer values as (as a __m128i value) at the address p. -// https://msdn.microsoft.com/en-us/library/vstudio/edk11s13(v=vs.100).aspx -FORCE_INLINE void _mm_store_si128(__m128i *p, __m128i a) -{ - vst1q_s32((int32_t *) p, vreinterpretq_s32_m128i(a)); -} - -// Stores four 32-bit integer values as (as a __m128i value) at the address p. -// https://msdn.microsoft.com/en-us/library/vstudio/edk11s13(v=vs.100).aspx -FORCE_INLINE void _mm_storeu_si128(__m128i *p, __m128i a) -{ - vst1q_s32((int32_t *) p, vreinterpretq_s32_m128i(a)); -} - -// Stores the lower single - precision, floating - point value. -// https://msdn.microsoft.com/en-us/library/tzz10fbx(v=vs.100).aspx -FORCE_INLINE void _mm_store_ss(float *p, __m128 a) -{ - vst1q_lane_f32(p, vreinterpretq_f32_m128(a), 0); -} - -// Store 128-bits (composed of 2 packed double-precision (64-bit) floating-point -// elements) from a into memory. mem_addr must be aligned on a 16-byte boundary -// or a general-protection exception may be generated. -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_store_pd -FORCE_INLINE void _mm_store_pd(double *mem_addr, __m128d a) -{ -#if defined(__aarch64__) - vst1q_f64((float64_t *) mem_addr, vreinterpretq_f64_m128d(a)); -#else - vst1q_f32((float32_t *) mem_addr, vreinterpretq_f32_m128d(a)); -#endif -} - -// Store 128-bits (composed of 2 packed double-precision (64-bit) floating-point -// elements) from a into memory. mem_addr does not need to be aligned on any -// particular boundary. -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_storeu_pd -FORCE_INLINE void _mm_storeu_pd(double *mem_addr, __m128d a) -{ - _mm_store_pd(mem_addr, a); -} - -// Reads the lower 64 bits of b and stores them into the lower 64 bits of a. -// https://msdn.microsoft.com/en-us/library/hhwf428f%28v=vs.90%29.aspx -FORCE_INLINE void _mm_storel_epi64(__m128i *a, __m128i b) -{ - uint64x1_t hi = vget_high_u64(vreinterpretq_u64_m128i(*a)); - uint64x1_t lo = vget_low_u64(vreinterpretq_u64_m128i(b)); - *a = vreinterpretq_m128i_u64(vcombine_u64(lo, hi)); -} - -// Stores the lower two single-precision floating point values of a to the -// address p. -// -// *p0 := a0 -// *p1 := a1 -// -// https://msdn.microsoft.com/en-us/library/h54t98ks(v=vs.90).aspx -FORCE_INLINE void _mm_storel_pi(__m64 *p, __m128 a) -{ - *p = vreinterpret_m64_f32(vget_low_f32(a)); -} - -// Stores the upper two single-precision, floating-point values of a to the -// address p. -// -// *p0 := a2 -// *p1 := a3 -// -// https://msdn.microsoft.com/en-us/library/a7525fs8(v%3dvs.90).aspx -FORCE_INLINE void _mm_storeh_pi(__m64 *p, __m128 a) -{ - *p = vreinterpret_m64_f32(vget_high_f32(a)); -} - -// Loads a single single-precision, floating-point value, copying it into all -// four words -// https://msdn.microsoft.com/en-us/library/vstudio/5cdkf716(v=vs.100).aspx -FORCE_INLINE __m128 _mm_load1_ps(const float *p) -{ - return vreinterpretq_m128_f32(vld1q_dup_f32(p)); -} - -// Load a single-precision (32-bit) floating-point element from memory into all -// elements of dst. -// -// dst[31:0] := MEM[mem_addr+31:mem_addr] -// dst[63:32] := MEM[mem_addr+31:mem_addr] -// dst[95:64] := MEM[mem_addr+31:mem_addr] -// dst[127:96] := MEM[mem_addr+31:mem_addr] -// -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_load_ps1 -#define _mm_load_ps1 _mm_load1_ps - -// Sets the lower two single-precision, floating-point values with 64 -// bits of data loaded from the address p; the upper two values are passed -// through from a. -// -// Return Value -// r0 := *p0 -// r1 := *p1 -// r2 := a2 -// r3 := a3 -// -// https://msdn.microsoft.com/en-us/library/s57cyak2(v=vs.100).aspx -FORCE_INLINE __m128 _mm_loadl_pi(__m128 a, __m64 const *p) -{ - return vreinterpretq_m128_f32( - vcombine_f32(vld1_f32((const float32_t *) p), vget_high_f32(a))); -} - -// Load 4 single-precision (32-bit) floating-point elements from memory into dst -// in reverse order. mem_addr must be aligned on a 16-byte boundary or a -// general-protection exception may be generated. -// -// dst[31:0] := MEM[mem_addr+127:mem_addr+96] -// dst[63:32] := MEM[mem_addr+95:mem_addr+64] -// dst[95:64] := MEM[mem_addr+63:mem_addr+32] -// dst[127:96] := MEM[mem_addr+31:mem_addr] -// -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_loadr_ps -FORCE_INLINE __m128 _mm_loadr_ps(const float *p) -{ - float32x4_t v = vrev64q_f32(vld1q_f32(p)); - return vreinterpretq_m128_f32(vextq_f32(v, v, 2)); -} - -// Sets the upper two single-precision, floating-point values with 64 -// bits of data loaded from the address p; the lower two values are passed -// through from a. -// -// r0 := a0 -// r1 := a1 -// r2 := *p0 -// r3 := *p1 -// -// https://msdn.microsoft.com/en-us/library/w92wta0x(v%3dvs.100).aspx -FORCE_INLINE __m128 _mm_loadh_pi(__m128 a, __m64 const *p) -{ - return vreinterpretq_m128_f32( - vcombine_f32(vget_low_f32(a), vld1_f32((const float32_t *) p))); -} - -// Loads four single-precision, floating-point values. -// https://msdn.microsoft.com/en-us/library/vstudio/zzd50xxt(v=vs.100).aspx -FORCE_INLINE __m128 _mm_load_ps(const float *p) -{ - return vreinterpretq_m128_f32(vld1q_f32(p)); -} - -// Loads four single-precision, floating-point values. -// https://msdn.microsoft.com/en-us/library/x1b16s7z%28v=vs.90%29.aspx -FORCE_INLINE __m128 _mm_loadu_ps(const float *p) -{ - // for neon, alignment doesn't matter, so _mm_load_ps and _mm_loadu_ps are - // equivalent for neon - return vreinterpretq_m128_f32(vld1q_f32(p)); -} - -// Load unaligned 16-bit integer from memory into the first element of dst. -// -// dst[15:0] := MEM[mem_addr+15:mem_addr] -// dst[MAX:16] := 0 -// -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_loadu_si16 -FORCE_INLINE __m128i _mm_loadu_si16(const void *p) -{ - return vreinterpretq_m128i_s16( - vsetq_lane_s16(*(const int16_t *) p, vdupq_n_s16(0), 0)); -} - -// Load unaligned 64-bit integer from memory into the first element of dst. -// -// dst[63:0] := MEM[mem_addr+63:mem_addr] -// dst[MAX:64] := 0 -// -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_loadu_si64 -FORCE_INLINE __m128i _mm_loadu_si64(const void *p) -{ - return vreinterpretq_m128i_s64( - vcombine_s64(vld1_s64((const int64_t *) p), vdup_n_s64(0))); -} - -// Load a double-precision (64-bit) floating-point element from memory into the -// lower of dst, and zero the upper element. mem_addr does not need to be -// aligned on any particular boundary. -// -// dst[63:0] := MEM[mem_addr+63:mem_addr] -// dst[127:64] := 0 -// -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_load_sd -FORCE_INLINE __m128d _mm_load_sd(const double *p) -{ -#if defined(__aarch64__) - return vreinterpretq_m128d_f64(vsetq_lane_f64(*p, vdupq_n_f64(0), 0)); -#else - const float *fp = (const float *) p; - float ALIGN_STRUCT(16) data[4] = {fp[0], fp[1], 0, 0}; - return vreinterpretq_m128d_f32(vld1q_f32(data)); -#endif -} - -// Loads two double-precision from 16-byte aligned memory, floating-point -// values. -// -// dst[127:0] := MEM[mem_addr+127:mem_addr] -// -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_load_pd -FORCE_INLINE __m128d _mm_load_pd(const double *p) -{ -#if defined(__aarch64__) - return vreinterpretq_m128d_f64(vld1q_f64(p)); -#else - const float *fp = (const float *) p; - float ALIGN_STRUCT(16) data[4] = {fp[0], fp[1], fp[2], fp[3]}; - return vreinterpretq_m128d_f32(vld1q_f32(data)); -#endif -} - -// Loads two double-precision from unaligned memory, floating-point values. -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_loadu_pd -FORCE_INLINE __m128d _mm_loadu_pd(const double *p) -{ - return _mm_load_pd(p); -} - -// Loads an single - precision, floating - point value into the low word and -// clears the upper three words. -// https://msdn.microsoft.com/en-us/library/548bb9h4%28v=vs.90%29.aspx -FORCE_INLINE __m128 _mm_load_ss(const float *p) -{ - return vreinterpretq_m128_f32(vsetq_lane_f32(*p, vdupq_n_f32(0), 0)); -} - -FORCE_INLINE __m128i _mm_loadl_epi64(__m128i const *p) -{ - /* Load the lower 64 bits of the value pointed to by p into the - * lower 64 bits of the result, zeroing the upper 64 bits of the result. - */ - return vreinterpretq_m128i_s32( - vcombine_s32(vld1_s32((int32_t const *) p), vcreate_s32(0))); -} - -// Load a double-precision (64-bit) floating-point element from memory into the -// lower element of dst, and copy the upper element from a to dst. mem_addr does -// not need to be aligned on any particular boundary. -// -// dst[63:0] := MEM[mem_addr+63:mem_addr] -// dst[127:64] := a[127:64] -// -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_loadl_pd -FORCE_INLINE __m128d _mm_loadl_pd(__m128d a, const double *p) -{ -#if defined(__aarch64__) - return vreinterpretq_m128d_f64( - vcombine_f64(vld1_f64(p), vget_high_f64(vreinterpretq_f64_m128d(a)))); -#else - return vreinterpretq_m128d_f32( - vcombine_f32(vld1_f32((const float *) p), - vget_high_f32(vreinterpretq_f32_m128d(a)))); -#endif -} - -// Load 2 double-precision (64-bit) floating-point elements from memory into dst -// in reverse order. mem_addr must be aligned on a 16-byte boundary or a -// general-protection exception may be generated. -// -// dst[63:0] := MEM[mem_addr+127:mem_addr+64] -// dst[127:64] := MEM[mem_addr+63:mem_addr] -// -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_loadr_pd -FORCE_INLINE __m128d _mm_loadr_pd(const double *p) -{ -#if defined(__aarch64__) - float64x2_t v = vld1q_f64(p); - return vreinterpretq_m128d_f64(vextq_f64(v, v, 1)); -#else - int64x2_t v = vld1q_s64((const int64_t *) p); - return vreinterpretq_m128d_s64(vextq_s64(v, v, 1)); -#endif -} - -// Sets the low word to the single-precision, floating-point value of b -// https://docs.microsoft.com/en-us/previous-versions/visualstudio/visual-studio-2010/35hdzazd(v=vs.100) -FORCE_INLINE __m128 _mm_move_ss(__m128 a, __m128 b) -{ - return vreinterpretq_m128_f32( - vsetq_lane_f32(vgetq_lane_f32(vreinterpretq_f32_m128(b), 0), - vreinterpretq_f32_m128(a), 0)); -} - -// Copy the lower 64-bit integer in a to the lower element of dst, and zero the -// upper element. -// -// dst[63:0] := a[63:0] -// dst[127:64] := 0 -// -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_move_epi64 -FORCE_INLINE __m128i _mm_move_epi64(__m128i a) -{ - return vreinterpretq_m128i_s64( - vsetq_lane_s64(0, vreinterpretq_s64_m128i(a), 1)); -} - -// Return vector of type __m128 with undefined elements. -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_undefined_ps -FORCE_INLINE __m128 _mm_undefined_ps(void) -{ - __m128 a; - return a; -} - -/* Logic/Binary operations */ - -// Computes the bitwise AND-NOT of the four single-precision, floating-point -// values of a and b. -// -// r0 := ~a0 & b0 -// r1 := ~a1 & b1 -// r2 := ~a2 & b2 -// r3 := ~a3 & b3 -// -// https://msdn.microsoft.com/en-us/library/vstudio/68h7wd02(v=vs.100).aspx -FORCE_INLINE __m128 _mm_andnot_ps(__m128 a, __m128 b) -{ - return vreinterpretq_m128_s32( - vbicq_s32(vreinterpretq_s32_m128(b), - vreinterpretq_s32_m128(a))); // *NOTE* argument swap -} - -// Compute the bitwise NOT of packed double-precision (64-bit) floating-point -// elements in a and then AND with b, and store the results in dst. -// -// FOR j := 0 to 1 -// i := j*64 -// dst[i+63:i] := ((NOT a[i+63:i]) AND b[i+63:i]) -// ENDFOR -// -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_andnot_pd -FORCE_INLINE __m128d _mm_andnot_pd(__m128d a, __m128d b) -{ - // *NOTE* argument swap - return vreinterpretq_m128d_s64( - vbicq_s64(vreinterpretq_s64_m128d(b), vreinterpretq_s64_m128d(a))); -} - -// Computes the bitwise AND of the 128-bit value in b and the bitwise NOT of the -// 128-bit value in a. -// -// r := (~a) & b -// -// https://msdn.microsoft.com/en-us/library/vstudio/1beaceh8(v=vs.100).aspx -FORCE_INLINE __m128i _mm_andnot_si128(__m128i a, __m128i b) -{ - return vreinterpretq_m128i_s32( - vbicq_s32(vreinterpretq_s32_m128i(b), - vreinterpretq_s32_m128i(a))); // *NOTE* argument swap -} - -// Computes the bitwise AND of the 128-bit value in a and the 128-bit value in -// b. -// -// r := a & b -// -// https://msdn.microsoft.com/en-us/library/vstudio/6d1txsa8(v=vs.100).aspx -FORCE_INLINE __m128i _mm_and_si128(__m128i a, __m128i b) -{ - return vreinterpretq_m128i_s32( - vandq_s32(vreinterpretq_s32_m128i(a), vreinterpretq_s32_m128i(b))); -} - -// Computes the bitwise AND of the four single-precision, floating-point values -// of a and b. -// -// r0 := a0 & b0 -// r1 := a1 & b1 -// r2 := a2 & b2 -// r3 := a3 & b3 -// -// https://msdn.microsoft.com/en-us/library/vstudio/73ck1xc5(v=vs.100).aspx -FORCE_INLINE __m128 _mm_and_ps(__m128 a, __m128 b) -{ - return vreinterpretq_m128_s32( - vandq_s32(vreinterpretq_s32_m128(a), vreinterpretq_s32_m128(b))); -} - -// Compute the bitwise AND of packed double-precision (64-bit) floating-point -// elements in a and b, and store the results in dst. -// -// FOR j := 0 to 1 -// i := j*64 -// dst[i+63:i] := a[i+63:i] AND b[i+63:i] -// ENDFOR -// -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_and_pd -FORCE_INLINE __m128d _mm_and_pd(__m128d a, __m128d b) -{ - return vreinterpretq_m128d_s64( - vandq_s64(vreinterpretq_s64_m128d(a), vreinterpretq_s64_m128d(b))); -} - -// Computes the bitwise OR of the four single-precision, floating-point values -// of a and b. -// https://msdn.microsoft.com/en-us/library/vstudio/7ctdsyy0(v=vs.100).aspx -FORCE_INLINE __m128 _mm_or_ps(__m128 a, __m128 b) -{ - return vreinterpretq_m128_s32( - vorrq_s32(vreinterpretq_s32_m128(a), vreinterpretq_s32_m128(b))); -} - -// Computes bitwise EXOR (exclusive-or) of the four single-precision, -// floating-point values of a and b. -// https://msdn.microsoft.com/en-us/library/ss6k3wk8(v=vs.100).aspx -FORCE_INLINE __m128 _mm_xor_ps(__m128 a, __m128 b) -{ - return vreinterpretq_m128_s32( - veorq_s32(vreinterpretq_s32_m128(a), vreinterpretq_s32_m128(b))); -} - -// Compute the bitwise XOR of packed double-precision (64-bit) floating-point -// elements in a and b, and store the results in dst. -// -// FOR j := 0 to 1 -// i := j*64 -// dst[i+63:i] := a[i+63:i] XOR b[i+63:i] -// ENDFOR -// -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_xor_pd -FORCE_INLINE __m128d _mm_xor_pd(__m128d a, __m128d b) -{ - return vreinterpretq_m128d_s64( - veorq_s64(vreinterpretq_s64_m128d(a), vreinterpretq_s64_m128d(b))); -} - -// Computes the bitwise OR of the 128-bit value in a and the 128-bit value in b. -// -// r := a | b -// -// https://msdn.microsoft.com/en-us/library/vstudio/ew8ty0db(v=vs.100).aspx -FORCE_INLINE __m128i _mm_or_si128(__m128i a, __m128i b) -{ - return vreinterpretq_m128i_s32( - vorrq_s32(vreinterpretq_s32_m128i(a), vreinterpretq_s32_m128i(b))); -} - -// Computes the bitwise XOR of the 128-bit value in a and the 128-bit value in -// b. https://msdn.microsoft.com/en-us/library/fzt08www(v=vs.100).aspx -FORCE_INLINE __m128i _mm_xor_si128(__m128i a, __m128i b) -{ - return vreinterpretq_m128i_s32( - veorq_s32(vreinterpretq_s32_m128i(a), vreinterpretq_s32_m128i(b))); -} - -// Duplicate odd-indexed single-precision (32-bit) floating-point elements -// from a, and store the results in dst. -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_movehdup_ps -FORCE_INLINE __m128 _mm_movehdup_ps(__m128 a) -{ -#if __has_builtin(__builtin_shufflevector) - return vreinterpretq_m128_f32(__builtin_shufflevector( - vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(a), 1, 1, 3, 3)); -#else - float32_t a1 = vgetq_lane_f32(vreinterpretq_f32_m128(a), 1); - float32_t a3 = vgetq_lane_f32(vreinterpretq_f32_m128(a), 3); - float ALIGN_STRUCT(16) data[4] = {a1, a1, a3, a3}; - return vreinterpretq_m128_f32(vld1q_f32(data)); -#endif -} - -// Duplicate even-indexed single-precision (32-bit) floating-point elements -// from a, and store the results in dst. -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_moveldup_ps -FORCE_INLINE __m128 _mm_moveldup_ps(__m128 a) -{ -#if __has_builtin(__builtin_shufflevector) - return vreinterpretq_m128_f32(__builtin_shufflevector( - vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(a), 0, 0, 2, 2)); -#else - float32_t a0 = vgetq_lane_f32(vreinterpretq_f32_m128(a), 0); - float32_t a2 = vgetq_lane_f32(vreinterpretq_f32_m128(a), 2); - float ALIGN_STRUCT(16) data[4] = {a0, a0, a2, a2}; - return vreinterpretq_m128_f32(vld1q_f32(data)); -#endif -} - -// Moves the upper two values of B into the lower two values of A. -// -// r3 := a3 -// r2 := a2 -// r1 := b3 -// r0 := b2 -FORCE_INLINE __m128 _mm_movehl_ps(__m128 __A, __m128 __B) -{ - float32x2_t a32 = vget_high_f32(vreinterpretq_f32_m128(__A)); - float32x2_t b32 = vget_high_f32(vreinterpretq_f32_m128(__B)); - return vreinterpretq_m128_f32(vcombine_f32(b32, a32)); -} - -// Moves the lower two values of B into the upper two values of A. -// -// r3 := b1 -// r2 := b0 -// r1 := a1 -// r0 := a0 -FORCE_INLINE __m128 _mm_movelh_ps(__m128 __A, __m128 __B) -{ - float32x2_t a10 = vget_low_f32(vreinterpretq_f32_m128(__A)); - float32x2_t b10 = vget_low_f32(vreinterpretq_f32_m128(__B)); - return vreinterpretq_m128_f32(vcombine_f32(a10, b10)); -} - -// Compute the absolute value of packed signed 32-bit integers in a, and store -// the unsigned results in dst. -// -// FOR j := 0 to 3 -// i := j*32 -// dst[i+31:i] := ABS(a[i+31:i]) -// ENDFOR -// -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_abs_epi32 -FORCE_INLINE __m128i _mm_abs_epi32(__m128i a) -{ - return vreinterpretq_m128i_s32(vabsq_s32(vreinterpretq_s32_m128i(a))); -} - -// Compute the absolute value of packed signed 16-bit integers in a, and store -// the unsigned results in dst. -// -// FOR j := 0 to 7 -// i := j*16 -// dst[i+15:i] := ABS(a[i+15:i]) -// ENDFOR -// -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_abs_epi16 -FORCE_INLINE __m128i _mm_abs_epi16(__m128i a) -{ - return vreinterpretq_m128i_s16(vabsq_s16(vreinterpretq_s16_m128i(a))); -} - -// Compute the absolute value of packed signed 8-bit integers in a, and store -// the unsigned results in dst. -// -// FOR j := 0 to 15 -// i := j*8 -// dst[i+7:i] := ABS(a[i+7:i]) -// ENDFOR -// -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_abs_epi8 -FORCE_INLINE __m128i _mm_abs_epi8(__m128i a) -{ - return vreinterpretq_m128i_s8(vabsq_s8(vreinterpretq_s8_m128i(a))); -} - -// Compute the absolute value of packed signed 32-bit integers in a, and store -// the unsigned results in dst. -// -// FOR j := 0 to 1 -// i := j*32 -// dst[i+31:i] := ABS(a[i+31:i]) -// ENDFOR -// -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_abs_pi32 -FORCE_INLINE __m64 _mm_abs_pi32(__m64 a) -{ - return vreinterpret_m64_s32(vabs_s32(vreinterpret_s32_m64(a))); -} - -// Compute the absolute value of packed signed 16-bit integers in a, and store -// the unsigned results in dst. -// -// FOR j := 0 to 3 -// i := j*16 -// dst[i+15:i] := ABS(a[i+15:i]) -// ENDFOR -// -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_abs_pi16 -FORCE_INLINE __m64 _mm_abs_pi16(__m64 a) -{ - return vreinterpret_m64_s16(vabs_s16(vreinterpret_s16_m64(a))); -} - -// Compute the absolute value of packed signed 8-bit integers in a, and store -// the unsigned results in dst. -// -// FOR j := 0 to 7 -// i := j*8 -// dst[i+7:i] := ABS(a[i+7:i]) -// ENDFOR -// -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_abs_pi8 -FORCE_INLINE __m64 _mm_abs_pi8(__m64 a) -{ - return vreinterpret_m64_s8(vabs_s8(vreinterpret_s8_m64(a))); -} - -// Takes the upper 64 bits of a and places it in the low end of the result -// Takes the lower 64 bits of b and places it into the high end of the result. -FORCE_INLINE __m128 _mm_shuffle_ps_1032(__m128 a, __m128 b) -{ - float32x2_t a32 = vget_high_f32(vreinterpretq_f32_m128(a)); - float32x2_t b10 = vget_low_f32(vreinterpretq_f32_m128(b)); - return vreinterpretq_m128_f32(vcombine_f32(a32, b10)); -} - -// takes the lower two 32-bit values from a and swaps them and places in high -// end of result takes the higher two 32 bit values from b and swaps them and -// places in low end of result. -FORCE_INLINE __m128 _mm_shuffle_ps_2301(__m128 a, __m128 b) -{ - float32x2_t a01 = vrev64_f32(vget_low_f32(vreinterpretq_f32_m128(a))); - float32x2_t b23 = vrev64_f32(vget_high_f32(vreinterpretq_f32_m128(b))); - return vreinterpretq_m128_f32(vcombine_f32(a01, b23)); -} - -FORCE_INLINE __m128 _mm_shuffle_ps_0321(__m128 a, __m128 b) -{ - float32x2_t a21 = vget_high_f32( - vextq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(a), 3)); - float32x2_t b03 = vget_low_f32( - vextq_f32(vreinterpretq_f32_m128(b), vreinterpretq_f32_m128(b), 3)); - return vreinterpretq_m128_f32(vcombine_f32(a21, b03)); -} - -FORCE_INLINE __m128 _mm_shuffle_ps_2103(__m128 a, __m128 b) -{ - float32x2_t a03 = vget_low_f32( - vextq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(a), 3)); - float32x2_t b21 = vget_high_f32( - vextq_f32(vreinterpretq_f32_m128(b), vreinterpretq_f32_m128(b), 3)); - return vreinterpretq_m128_f32(vcombine_f32(a03, b21)); -} - -FORCE_INLINE __m128 _mm_shuffle_ps_1010(__m128 a, __m128 b) -{ - float32x2_t a10 = vget_low_f32(vreinterpretq_f32_m128(a)); - float32x2_t b10 = vget_low_f32(vreinterpretq_f32_m128(b)); - return vreinterpretq_m128_f32(vcombine_f32(a10, b10)); -} - -FORCE_INLINE __m128 _mm_shuffle_ps_1001(__m128 a, __m128 b) -{ - float32x2_t a01 = vrev64_f32(vget_low_f32(vreinterpretq_f32_m128(a))); - float32x2_t b10 = vget_low_f32(vreinterpretq_f32_m128(b)); - return vreinterpretq_m128_f32(vcombine_f32(a01, b10)); -} - -FORCE_INLINE __m128 _mm_shuffle_ps_0101(__m128 a, __m128 b) -{ - float32x2_t a01 = vrev64_f32(vget_low_f32(vreinterpretq_f32_m128(a))); - float32x2_t b01 = vrev64_f32(vget_low_f32(vreinterpretq_f32_m128(b))); - return vreinterpretq_m128_f32(vcombine_f32(a01, b01)); -} - -// keeps the low 64 bits of b in the low and puts the high 64 bits of a in the -// high -FORCE_INLINE __m128 _mm_shuffle_ps_3210(__m128 a, __m128 b) -{ - float32x2_t a10 = vget_low_f32(vreinterpretq_f32_m128(a)); - float32x2_t b32 = vget_high_f32(vreinterpretq_f32_m128(b)); - return vreinterpretq_m128_f32(vcombine_f32(a10, b32)); -} - -FORCE_INLINE __m128 _mm_shuffle_ps_0011(__m128 a, __m128 b) -{ - float32x2_t a11 = vdup_lane_f32(vget_low_f32(vreinterpretq_f32_m128(a)), 1); - float32x2_t b00 = vdup_lane_f32(vget_low_f32(vreinterpretq_f32_m128(b)), 0); - return vreinterpretq_m128_f32(vcombine_f32(a11, b00)); -} - -FORCE_INLINE __m128 _mm_shuffle_ps_0022(__m128 a, __m128 b) -{ - float32x2_t a22 = - vdup_lane_f32(vget_high_f32(vreinterpretq_f32_m128(a)), 0); - float32x2_t b00 = vdup_lane_f32(vget_low_f32(vreinterpretq_f32_m128(b)), 0); - return vreinterpretq_m128_f32(vcombine_f32(a22, b00)); -} - -FORCE_INLINE __m128 _mm_shuffle_ps_2200(__m128 a, __m128 b) -{ - float32x2_t a00 = vdup_lane_f32(vget_low_f32(vreinterpretq_f32_m128(a)), 0); - float32x2_t b22 = - vdup_lane_f32(vget_high_f32(vreinterpretq_f32_m128(b)), 0); - return vreinterpretq_m128_f32(vcombine_f32(a00, b22)); -} - -FORCE_INLINE __m128 _mm_shuffle_ps_3202(__m128 a, __m128 b) -{ - float32_t a0 = vgetq_lane_f32(vreinterpretq_f32_m128(a), 0); - float32x2_t a22 = - vdup_lane_f32(vget_high_f32(vreinterpretq_f32_m128(a)), 0); - float32x2_t a02 = vset_lane_f32(a0, a22, 1); /* TODO: use vzip ?*/ - float32x2_t b32 = vget_high_f32(vreinterpretq_f32_m128(b)); - return vreinterpretq_m128_f32(vcombine_f32(a02, b32)); -} - -FORCE_INLINE __m128 _mm_shuffle_ps_1133(__m128 a, __m128 b) -{ - float32x2_t a33 = - vdup_lane_f32(vget_high_f32(vreinterpretq_f32_m128(a)), 1); - float32x2_t b11 = vdup_lane_f32(vget_low_f32(vreinterpretq_f32_m128(b)), 1); - return vreinterpretq_m128_f32(vcombine_f32(a33, b11)); -} - -FORCE_INLINE __m128 _mm_shuffle_ps_2010(__m128 a, __m128 b) -{ - float32x2_t a10 = vget_low_f32(vreinterpretq_f32_m128(a)); - float32_t b2 = vgetq_lane_f32(vreinterpretq_f32_m128(b), 2); - float32x2_t b00 = vdup_lane_f32(vget_low_f32(vreinterpretq_f32_m128(b)), 0); - float32x2_t b20 = vset_lane_f32(b2, b00, 1); - return vreinterpretq_m128_f32(vcombine_f32(a10, b20)); -} - -FORCE_INLINE __m128 _mm_shuffle_ps_2001(__m128 a, __m128 b) -{ - float32x2_t a01 = vrev64_f32(vget_low_f32(vreinterpretq_f32_m128(a))); - float32_t b2 = vgetq_lane_f32(b, 2); - float32x2_t b00 = vdup_lane_f32(vget_low_f32(vreinterpretq_f32_m128(b)), 0); - float32x2_t b20 = vset_lane_f32(b2, b00, 1); - return vreinterpretq_m128_f32(vcombine_f32(a01, b20)); -} - -FORCE_INLINE __m128 _mm_shuffle_ps_2032(__m128 a, __m128 b) -{ - float32x2_t a32 = vget_high_f32(vreinterpretq_f32_m128(a)); - float32_t b2 = vgetq_lane_f32(b, 2); - float32x2_t b00 = vdup_lane_f32(vget_low_f32(vreinterpretq_f32_m128(b)), 0); - float32x2_t b20 = vset_lane_f32(b2, b00, 1); - return vreinterpretq_m128_f32(vcombine_f32(a32, b20)); -} - -// NEON does not support a general purpose permute intrinsic -// Selects four specific single-precision, floating-point values from a and b, -// based on the mask i. -// -// C equivalent: -// __m128 _mm_shuffle_ps_default(__m128 a, __m128 b, -// __constrange(0, 255) int imm) { -// __m128 ret; -// ret[0] = a[imm & 0x3]; ret[1] = a[(imm >> 2) & 0x3]; -// ret[2] = b[(imm >> 4) & 0x03]; ret[3] = b[(imm >> 6) & 0x03]; -// return ret; -// } -// -// https://msdn.microsoft.com/en-us/library/vstudio/5f0858x0(v=vs.100).aspx -#define _mm_shuffle_ps_default(a, b, imm) \ - __extension__({ \ - float32x4_t ret; \ - ret = vmovq_n_f32( \ - vgetq_lane_f32(vreinterpretq_f32_m128(a), (imm) & (0x3))); \ - ret = vsetq_lane_f32( \ - vgetq_lane_f32(vreinterpretq_f32_m128(a), ((imm) >> 2) & 0x3), \ - ret, 1); \ - ret = vsetq_lane_f32( \ - vgetq_lane_f32(vreinterpretq_f32_m128(b), ((imm) >> 4) & 0x3), \ - ret, 2); \ - ret = vsetq_lane_f32( \ - vgetq_lane_f32(vreinterpretq_f32_m128(b), ((imm) >> 6) & 0x3), \ - ret, 3); \ - vreinterpretq_m128_f32(ret); \ - }) - -// FORCE_INLINE __m128 _mm_shuffle_ps(__m128 a, __m128 b, __constrange(0,255) -// int imm) -#if __has_builtin(__builtin_shufflevector) -#define _mm_shuffle_ps(a, b, imm) \ - __extension__({ \ - float32x4_t _input1 = vreinterpretq_f32_m128(a); \ - float32x4_t _input2 = vreinterpretq_f32_m128(b); \ - float32x4_t _shuf = __builtin_shufflevector( \ - _input1, _input2, (imm) & (0x3), ((imm) >> 2) & 0x3, \ - (((imm) >> 4) & 0x3) + 4, (((imm) >> 6) & 0x3) + 4); \ - vreinterpretq_m128_f32(_shuf); \ - }) -#else // generic -#define _mm_shuffle_ps(a, b, imm) \ - __extension__({ \ - __m128 ret; \ - switch (imm) { \ - case _MM_SHUFFLE(1, 0, 3, 2): \ - ret = _mm_shuffle_ps_1032((a), (b)); \ - break; \ - case _MM_SHUFFLE(2, 3, 0, 1): \ - ret = _mm_shuffle_ps_2301((a), (b)); \ - break; \ - case _MM_SHUFFLE(0, 3, 2, 1): \ - ret = _mm_shuffle_ps_0321((a), (b)); \ - break; \ - case _MM_SHUFFLE(2, 1, 0, 3): \ - ret = _mm_shuffle_ps_2103((a), (b)); \ - break; \ - case _MM_SHUFFLE(1, 0, 1, 0): \ - ret = _mm_movelh_ps((a), (b)); \ - break; \ - case _MM_SHUFFLE(1, 0, 0, 1): \ - ret = _mm_shuffle_ps_1001((a), (b)); \ - break; \ - case _MM_SHUFFLE(0, 1, 0, 1): \ - ret = _mm_shuffle_ps_0101((a), (b)); \ - break; \ - case _MM_SHUFFLE(3, 2, 1, 0): \ - ret = _mm_shuffle_ps_3210((a), (b)); \ - break; \ - case _MM_SHUFFLE(0, 0, 1, 1): \ - ret = _mm_shuffle_ps_0011((a), (b)); \ - break; \ - case _MM_SHUFFLE(0, 0, 2, 2): \ - ret = _mm_shuffle_ps_0022((a), (b)); \ - break; \ - case _MM_SHUFFLE(2, 2, 0, 0): \ - ret = _mm_shuffle_ps_2200((a), (b)); \ - break; \ - case _MM_SHUFFLE(3, 2, 0, 2): \ - ret = _mm_shuffle_ps_3202((a), (b)); \ - break; \ - case _MM_SHUFFLE(3, 2, 3, 2): \ - ret = _mm_movehl_ps((b), (a)); \ - break; \ - case _MM_SHUFFLE(1, 1, 3, 3): \ - ret = _mm_shuffle_ps_1133((a), (b)); \ - break; \ - case _MM_SHUFFLE(2, 0, 1, 0): \ - ret = _mm_shuffle_ps_2010((a), (b)); \ - break; \ - case _MM_SHUFFLE(2, 0, 0, 1): \ - ret = _mm_shuffle_ps_2001((a), (b)); \ - break; \ - case _MM_SHUFFLE(2, 0, 3, 2): \ - ret = _mm_shuffle_ps_2032((a), (b)); \ - break; \ - default: \ - ret = _mm_shuffle_ps_default((a), (b), (imm)); \ - break; \ - } \ - ret; \ - }) -#endif - -// Takes the upper 64 bits of a and places it in the low end of the result -// Takes the lower 64 bits of a and places it into the high end of the result. -FORCE_INLINE __m128i _mm_shuffle_epi_1032(__m128i a) -{ - int32x2_t a32 = vget_high_s32(vreinterpretq_s32_m128i(a)); - int32x2_t a10 = vget_low_s32(vreinterpretq_s32_m128i(a)); - return vreinterpretq_m128i_s32(vcombine_s32(a32, a10)); -} - -// takes the lower two 32-bit values from a and swaps them and places in low end -// of result takes the higher two 32 bit values from a and swaps them and places -// in high end of result. -FORCE_INLINE __m128i _mm_shuffle_epi_2301(__m128i a) -{ - int32x2_t a01 = vrev64_s32(vget_low_s32(vreinterpretq_s32_m128i(a))); - int32x2_t a23 = vrev64_s32(vget_high_s32(vreinterpretq_s32_m128i(a))); - return vreinterpretq_m128i_s32(vcombine_s32(a01, a23)); -} - -// rotates the least significant 32 bits into the most signficant 32 bits, and -// shifts the rest down -FORCE_INLINE __m128i _mm_shuffle_epi_0321(__m128i a) -{ - return vreinterpretq_m128i_s32( - vextq_s32(vreinterpretq_s32_m128i(a), vreinterpretq_s32_m128i(a), 1)); -} - -// rotates the most significant 32 bits into the least signficant 32 bits, and -// shifts the rest up -FORCE_INLINE __m128i _mm_shuffle_epi_2103(__m128i a) -{ - return vreinterpretq_m128i_s32( - vextq_s32(vreinterpretq_s32_m128i(a), vreinterpretq_s32_m128i(a), 3)); -} - -// gets the lower 64 bits of a, and places it in the upper 64 bits -// gets the lower 64 bits of a and places it in the lower 64 bits -FORCE_INLINE __m128i _mm_shuffle_epi_1010(__m128i a) -{ - int32x2_t a10 = vget_low_s32(vreinterpretq_s32_m128i(a)); - return vreinterpretq_m128i_s32(vcombine_s32(a10, a10)); -} - -// gets the lower 64 bits of a, swaps the 0 and 1 elements, and places it in the -// lower 64 bits gets the lower 64 bits of a, and places it in the upper 64 bits -FORCE_INLINE __m128i _mm_shuffle_epi_1001(__m128i a) -{ - int32x2_t a01 = vrev64_s32(vget_low_s32(vreinterpretq_s32_m128i(a))); - int32x2_t a10 = vget_low_s32(vreinterpretq_s32_m128i(a)); - return vreinterpretq_m128i_s32(vcombine_s32(a01, a10)); -} - -// gets the lower 64 bits of a, swaps the 0 and 1 elements and places it in the -// upper 64 bits gets the lower 64 bits of a, swaps the 0 and 1 elements, and -// places it in the lower 64 bits -FORCE_INLINE __m128i _mm_shuffle_epi_0101(__m128i a) -{ - int32x2_t a01 = vrev64_s32(vget_low_s32(vreinterpretq_s32_m128i(a))); - return vreinterpretq_m128i_s32(vcombine_s32(a01, a01)); -} - -FORCE_INLINE __m128i _mm_shuffle_epi_2211(__m128i a) -{ - int32x2_t a11 = vdup_lane_s32(vget_low_s32(vreinterpretq_s32_m128i(a)), 1); - int32x2_t a22 = vdup_lane_s32(vget_high_s32(vreinterpretq_s32_m128i(a)), 0); - return vreinterpretq_m128i_s32(vcombine_s32(a11, a22)); -} - -FORCE_INLINE __m128i _mm_shuffle_epi_0122(__m128i a) -{ - int32x2_t a22 = vdup_lane_s32(vget_high_s32(vreinterpretq_s32_m128i(a)), 0); - int32x2_t a01 = vrev64_s32(vget_low_s32(vreinterpretq_s32_m128i(a))); - return vreinterpretq_m128i_s32(vcombine_s32(a22, a01)); -} - -FORCE_INLINE __m128i _mm_shuffle_epi_3332(__m128i a) -{ - int32x2_t a32 = vget_high_s32(vreinterpretq_s32_m128i(a)); - int32x2_t a33 = vdup_lane_s32(vget_high_s32(vreinterpretq_s32_m128i(a)), 1); - return vreinterpretq_m128i_s32(vcombine_s32(a32, a33)); -} - -// Shuffle packed 8-bit integers in a according to shuffle control mask in the -// corresponding 8-bit element of b, and store the results in dst. -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_shuffle_epi8 -FORCE_INLINE __m128i _mm_shuffle_epi8(__m128i a, __m128i b) -{ - int8x16_t tbl = vreinterpretq_s8_m128i(a); // input a - uint8x16_t idx = vreinterpretq_u8_m128i(b); // input b - uint8x16_t idx_masked = - vandq_u8(idx, vdupq_n_u8(0x8F)); // avoid using meaningless bits -#if defined(__aarch64__) - return vreinterpretq_m128i_s8(vqtbl1q_s8(tbl, idx_masked)); -#elif defined(__GNUC__) - int8x16_t ret; - // %e and %f represent the even and odd D registers - // respectively. - __asm__ __volatile__( - "vtbl.8 %e[ret], {%e[tbl], %f[tbl]}, %e[idx]\n" - "vtbl.8 %f[ret], {%e[tbl], %f[tbl]}, %f[idx]\n" - : [ret] "=&w"(ret) - : [tbl] "w"(tbl), [idx] "w"(idx_masked)); - return vreinterpretq_m128i_s8(ret); -#else - // use this line if testing on aarch64 - int8x8x2_t a_split = {vget_low_s8(tbl), vget_high_s8(tbl)}; - return vreinterpretq_m128i_s8( - vcombine_s8(vtbl2_s8(a_split, vget_low_u8(idx_masked)), - vtbl2_s8(a_split, vget_high_u8(idx_masked)))); -#endif -} - -// C equivalent: -// __m128i _mm_shuffle_epi32_default(__m128i a, -// __constrange(0, 255) int imm) { -// __m128i ret; -// ret[0] = a[imm & 0x3]; ret[1] = a[(imm >> 2) & 0x3]; -// ret[2] = a[(imm >> 4) & 0x03]; ret[3] = a[(imm >> 6) & 0x03]; -// return ret; -// } -#define _mm_shuffle_epi32_default(a, imm) \ - __extension__({ \ - int32x4_t ret; \ - ret = vmovq_n_s32( \ - vgetq_lane_s32(vreinterpretq_s32_m128i(a), (imm) & (0x3))); \ - ret = vsetq_lane_s32( \ - vgetq_lane_s32(vreinterpretq_s32_m128i(a), ((imm) >> 2) & 0x3), \ - ret, 1); \ - ret = vsetq_lane_s32( \ - vgetq_lane_s32(vreinterpretq_s32_m128i(a), ((imm) >> 4) & 0x3), \ - ret, 2); \ - ret = vsetq_lane_s32( \ - vgetq_lane_s32(vreinterpretq_s32_m128i(a), ((imm) >> 6) & 0x3), \ - ret, 3); \ - vreinterpretq_m128i_s32(ret); \ - }) - -// FORCE_INLINE __m128i _mm_shuffle_epi32_splat(__m128i a, __constrange(0,255) -// int imm) -#if defined(__aarch64__) -#define _mm_shuffle_epi32_splat(a, imm) \ - __extension__({ \ - vreinterpretq_m128i_s32( \ - vdupq_laneq_s32(vreinterpretq_s32_m128i(a), (imm))); \ - }) -#else -#define _mm_shuffle_epi32_splat(a, imm) \ - __extension__({ \ - vreinterpretq_m128i_s32( \ - vdupq_n_s32(vgetq_lane_s32(vreinterpretq_s32_m128i(a), (imm)))); \ - }) -#endif - -// Shuffles the 4 signed or unsigned 32-bit integers in a as specified by imm. -// https://msdn.microsoft.com/en-us/library/56f67xbk%28v=vs.90%29.aspx -// FORCE_INLINE __m128i _mm_shuffle_epi32(__m128i a, -// __constrange(0,255) int imm) -#if __has_builtin(__builtin_shufflevector) -#define _mm_shuffle_epi32(a, imm) \ - __extension__({ \ - int32x4_t _input = vreinterpretq_s32_m128i(a); \ - int32x4_t _shuf = __builtin_shufflevector( \ - _input, _input, (imm) & (0x3), ((imm) >> 2) & 0x3, \ - ((imm) >> 4) & 0x3, ((imm) >> 6) & 0x3); \ - vreinterpretq_m128i_s32(_shuf); \ - }) -#else // generic -#define _mm_shuffle_epi32(a, imm) \ - __extension__({ \ - __m128i ret; \ - switch (imm) { \ - case _MM_SHUFFLE(1, 0, 3, 2): \ - ret = _mm_shuffle_epi_1032((a)); \ - break; \ - case _MM_SHUFFLE(2, 3, 0, 1): \ - ret = _mm_shuffle_epi_2301((a)); \ - break; \ - case _MM_SHUFFLE(0, 3, 2, 1): \ - ret = _mm_shuffle_epi_0321((a)); \ - break; \ - case _MM_SHUFFLE(2, 1, 0, 3): \ - ret = _mm_shuffle_epi_2103((a)); \ - break; \ - case _MM_SHUFFLE(1, 0, 1, 0): \ - ret = _mm_shuffle_epi_1010((a)); \ - break; \ - case _MM_SHUFFLE(1, 0, 0, 1): \ - ret = _mm_shuffle_epi_1001((a)); \ - break; \ - case _MM_SHUFFLE(0, 1, 0, 1): \ - ret = _mm_shuffle_epi_0101((a)); \ - break; \ - case _MM_SHUFFLE(2, 2, 1, 1): \ - ret = _mm_shuffle_epi_2211((a)); \ - break; \ - case _MM_SHUFFLE(0, 1, 2, 2): \ - ret = _mm_shuffle_epi_0122((a)); \ - break; \ - case _MM_SHUFFLE(3, 3, 3, 2): \ - ret = _mm_shuffle_epi_3332((a)); \ - break; \ - case _MM_SHUFFLE(0, 0, 0, 0): \ - ret = _mm_shuffle_epi32_splat((a), 0); \ - break; \ - case _MM_SHUFFLE(1, 1, 1, 1): \ - ret = _mm_shuffle_epi32_splat((a), 1); \ - break; \ - case _MM_SHUFFLE(2, 2, 2, 2): \ - ret = _mm_shuffle_epi32_splat((a), 2); \ - break; \ - case _MM_SHUFFLE(3, 3, 3, 3): \ - ret = _mm_shuffle_epi32_splat((a), 3); \ - break; \ - default: \ - ret = _mm_shuffle_epi32_default((a), (imm)); \ - break; \ - } \ - ret; \ - }) -#endif - -// Shuffles the lower 4 signed or unsigned 16-bit integers in a as specified -// by imm. -// https://docs.microsoft.com/en-us/previous-versions/visualstudio/visual-studio-2010/y41dkk37(v=vs.100) -// FORCE_INLINE __m128i _mm_shufflelo_epi16_function(__m128i a, -// __constrange(0,255) int -// imm) -#define _mm_shufflelo_epi16_function(a, imm) \ - __extension__({ \ - int16x8_t ret = vreinterpretq_s16_m128i(a); \ - int16x4_t lowBits = vget_low_s16(ret); \ - ret = vsetq_lane_s16(vget_lane_s16(lowBits, (imm) & (0x3)), ret, 0); \ - ret = vsetq_lane_s16(vget_lane_s16(lowBits, ((imm) >> 2) & 0x3), ret, \ - 1); \ - ret = vsetq_lane_s16(vget_lane_s16(lowBits, ((imm) >> 4) & 0x3), ret, \ - 2); \ - ret = vsetq_lane_s16(vget_lane_s16(lowBits, ((imm) >> 6) & 0x3), ret, \ - 3); \ - vreinterpretq_m128i_s16(ret); \ - }) - -// FORCE_INLINE __m128i _mm_shufflelo_epi16(__m128i a, -// __constrange(0,255) int imm) -#if __has_builtin(__builtin_shufflevector) -#define _mm_shufflelo_epi16(a, imm) \ - __extension__({ \ - int16x8_t _input = vreinterpretq_s16_m128i(a); \ - int16x8_t _shuf = __builtin_shufflevector( \ - _input, _input, ((imm) & (0x3)), (((imm) >> 2) & 0x3), \ - (((imm) >> 4) & 0x3), (((imm) >> 6) & 0x3), 4, 5, 6, 7); \ - vreinterpretq_m128i_s16(_shuf); \ - }) -#else // generic -#define _mm_shufflelo_epi16(a, imm) _mm_shufflelo_epi16_function((a), (imm)) -#endif - -// Shuffles the upper 4 signed or unsigned 16-bit integers in a as specified -// by imm. -// https://msdn.microsoft.com/en-us/library/13ywktbs(v=vs.100).aspx -// FORCE_INLINE __m128i _mm_shufflehi_epi16_function(__m128i a, -// __constrange(0,255) int -// imm) -#define _mm_shufflehi_epi16_function(a, imm) \ - __extension__({ \ - int16x8_t ret = vreinterpretq_s16_m128i(a); \ - int16x4_t highBits = vget_high_s16(ret); \ - ret = vsetq_lane_s16(vget_lane_s16(highBits, (imm) & (0x3)), ret, 4); \ - ret = vsetq_lane_s16(vget_lane_s16(highBits, ((imm) >> 2) & 0x3), ret, \ - 5); \ - ret = vsetq_lane_s16(vget_lane_s16(highBits, ((imm) >> 4) & 0x3), ret, \ - 6); \ - ret = vsetq_lane_s16(vget_lane_s16(highBits, ((imm) >> 6) & 0x3), ret, \ - 7); \ - vreinterpretq_m128i_s16(ret); \ - }) - -// FORCE_INLINE __m128i _mm_shufflehi_epi16(__m128i a, -// __constrange(0,255) int imm) -#if __has_builtin(__builtin_shufflevector) -#define _mm_shufflehi_epi16(a, imm) \ - __extension__({ \ - int16x8_t _input = vreinterpretq_s16_m128i(a); \ - int16x8_t _shuf = __builtin_shufflevector( \ - _input, _input, 0, 1, 2, 3, ((imm) & (0x3)) + 4, \ - (((imm) >> 2) & 0x3) + 4, (((imm) >> 4) & 0x3) + 4, \ - (((imm) >> 6) & 0x3) + 4); \ - vreinterpretq_m128i_s16(_shuf); \ - }) -#else // generic -#define _mm_shufflehi_epi16(a, imm) _mm_shufflehi_epi16_function((a), (imm)) -#endif - -// Blend packed 16-bit integers from a and b using control mask imm8, and store -// the results in dst. -// -// FOR j := 0 to 7 -// i := j*16 -// IF imm8[j] -// dst[i+15:i] := b[i+15:i] -// ELSE -// dst[i+15:i] := a[i+15:i] -// FI -// ENDFOR -// FORCE_INLINE __m128i _mm_blend_epi16(__m128i a, __m128i b, -// __constrange(0,255) int imm) -#define _mm_blend_epi16(a, b, imm) \ - __extension__({ \ - const uint16_t _mask[8] = {((imm) & (1 << 0)) ? 0xFFFF : 0x0000, \ - ((imm) & (1 << 1)) ? 0xFFFF : 0x0000, \ - ((imm) & (1 << 2)) ? 0xFFFF : 0x0000, \ - ((imm) & (1 << 3)) ? 0xFFFF : 0x0000, \ - ((imm) & (1 << 4)) ? 0xFFFF : 0x0000, \ - ((imm) & (1 << 5)) ? 0xFFFF : 0x0000, \ - ((imm) & (1 << 6)) ? 0xFFFF : 0x0000, \ - ((imm) & (1 << 7)) ? 0xFFFF : 0x0000}; \ - uint16x8_t _mask_vec = vld1q_u16(_mask); \ - uint16x8_t _a = vreinterpretq_u16_m128i(a); \ - uint16x8_t _b = vreinterpretq_u16_m128i(b); \ - vreinterpretq_m128i_u16(vbslq_u16(_mask_vec, _b, _a)); \ - }) - -// Blend packed 8-bit integers from a and b using mask, and store the results in -// dst. -// -// FOR j := 0 to 15 -// i := j*8 -// IF mask[i+7] -// dst[i+7:i] := b[i+7:i] -// ELSE -// dst[i+7:i] := a[i+7:i] -// FI -// ENDFOR -FORCE_INLINE __m128i _mm_blendv_epi8(__m128i _a, __m128i _b, __m128i _mask) -{ - // Use a signed shift right to create a mask with the sign bit - uint8x16_t mask = - vreinterpretq_u8_s8(vshrq_n_s8(vreinterpretq_s8_m128i(_mask), 7)); - uint8x16_t a = vreinterpretq_u8_m128i(_a); - uint8x16_t b = vreinterpretq_u8_m128i(_b); - return vreinterpretq_m128i_u8(vbslq_u8(mask, b, a)); -} - -/* Shifts */ - - -// Shift packed 16-bit integers in a right by imm while shifting in sign -// bits, and store the results in dst. -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_srai_epi16 -FORCE_INLINE __m128i _mm_srai_epi16(__m128i a, int imm) -{ - const int count = (imm & ~15) ? 15 : imm; - return (__m128i) vshlq_s16((int16x8_t) a, vdupq_n_s16(-count)); -} - -// Shifts the 8 signed or unsigned 16-bit integers in a left by count bits while -// shifting in zeros. -// -// r0 := a0 << count -// r1 := a1 << count -// ... -// r7 := a7 << count -// -// https://msdn.microsoft.com/en-us/library/es73bcsy(v=vs.90).aspx -#define _mm_slli_epi16(a, imm) \ - __extension__({ \ - __m128i ret; \ - if ((imm) <= 0) { \ - ret = a; \ - } else if ((imm) > 15) { \ - ret = _mm_setzero_si128(); \ - } else { \ - ret = vreinterpretq_m128i_s16( \ - vshlq_n_s16(vreinterpretq_s16_m128i(a), (imm))); \ - } \ - ret; \ - }) - -// Shifts the 4 signed or unsigned 32-bit integers in a left by count bits while -// shifting in zeros. : -// https://msdn.microsoft.com/en-us/library/z2k3bbtb%28v=vs.90%29.aspx -// FORCE_INLINE __m128i _mm_slli_epi32(__m128i a, __constrange(0,255) int imm) -FORCE_INLINE __m128i _mm_slli_epi32(__m128i a, int imm) -{ - if (imm <= 0) /* TODO: add constant range macro: [0, 255] */ - return a; - if (imm > 31) /* TODO: add unlikely macro */ - return _mm_setzero_si128(); - return vreinterpretq_m128i_s32( - vshlq_s32(vreinterpretq_s32_m128i(a), vdupq_n_s32(imm))); -} - -// Shift packed 64-bit integers in a left by imm8 while shifting in zeros, and -// store the results in dst. -FORCE_INLINE __m128i _mm_slli_epi64(__m128i a, int imm) -{ - if (imm <= 0) /* TODO: add constant range macro: [0, 255] */ - return a; - if (imm > 63) /* TODO: add unlikely macro */ - return _mm_setzero_si128(); - return vreinterpretq_m128i_s64( - vshlq_s64(vreinterpretq_s64_m128i(a), vdupq_n_s64(imm))); -} - -// Shift packed 16-bit integers in a right by imm8 while shifting in zeros, and -// store the results in dst. -// -// FOR j := 0 to 7 -// i := j*16 -// IF imm8[7:0] > 15 -// dst[i+15:i] := 0 -// ELSE -// dst[i+15:i] := ZeroExtend16(a[i+15:i] >> imm8[7:0]) -// FI -// ENDFOR -// -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_srli_epi16 -#define _mm_srli_epi16(a, imm) \ - __extension__({ \ - __m128i ret; \ - if ((imm) == 0) { \ - ret = a; \ - } else if (0 < (imm) && (imm) < 16) { \ - ret = vreinterpretq_m128i_u16( \ - vshlq_u16(vreinterpretq_u16_m128i(a), vdupq_n_s16(-imm))); \ - } else { \ - ret = _mm_setzero_si128(); \ - } \ - ret; \ - }) - -// Shift packed 32-bit integers in a right by imm8 while shifting in zeros, and -// store the results in dst. -// -// FOR j := 0 to 3 -// i := j*32 -// IF imm8[7:0] > 31 -// dst[i+31:i] := 0 -// ELSE -// dst[i+31:i] := ZeroExtend32(a[i+31:i] >> imm8[7:0]) -// FI -// ENDFOR -// -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_srli_epi32 -// FORCE_INLINE __m128i _mm_srli_epi32(__m128i a, __constrange(0,255) int imm) -#define _mm_srli_epi32(a, imm) \ - __extension__({ \ - __m128i ret; \ - if ((imm) == 0) { \ - ret = a; \ - } else if (0 < (imm) && (imm) < 32) { \ - ret = vreinterpretq_m128i_u32( \ - vshlq_u32(vreinterpretq_u32_m128i(a), vdupq_n_s32(-imm))); \ - } else { \ - ret = _mm_setzero_si128(); \ - } \ - ret; \ - }) - -// Shift packed 64-bit integers in a right by imm8 while shifting in zeros, and -// store the results in dst. -// -// FOR j := 0 to 1 -// i := j*64 -// IF imm8[7:0] > 63 -// dst[i+63:i] := 0 -// ELSE -// dst[i+63:i] := ZeroExtend64(a[i+63:i] >> imm8[7:0]) -// FI -// ENDFOR -// -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_srli_epi64 -#define _mm_srli_epi64(a, imm) \ - __extension__({ \ - __m128i ret; \ - if ((imm) == 0) { \ - ret = a; \ - } else if (0 < (imm) && (imm) < 64) { \ - ret = vreinterpretq_m128i_u64( \ - vshlq_u64(vreinterpretq_u64_m128i(a), vdupq_n_s64(-imm))); \ - } else { \ - ret = _mm_setzero_si128(); \ - } \ - ret; \ - }) - -// Shift packed 32-bit integers in a right by imm8 while shifting in sign bits, -// and store the results in dst. -// -// FOR j := 0 to 3 -// i := j*32 -// IF imm8[7:0] > 31 -// dst[i+31:i] := (a[i+31] ? 0xFFFFFFFF : 0x0) -// ELSE -// dst[i+31:i] := SignExtend32(a[i+31:i] >> imm8[7:0]) -// FI -// ENDFOR -// -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_srai_epi32 -// FORCE_INLINE __m128i _mm_srai_epi32(__m128i a, __constrange(0,255) int imm) -#define _mm_srai_epi32(a, imm) \ - __extension__({ \ - __m128i ret; \ - if ((imm) == 0) { \ - ret = a; \ - } else if (0 < (imm) && (imm) < 32) { \ - ret = vreinterpretq_m128i_s32( \ - vshlq_s32(vreinterpretq_s32_m128i(a), vdupq_n_s32(-imm))); \ - } else { \ - ret = vreinterpretq_m128i_s32( \ - vshrq_n_s32(vreinterpretq_s32_m128i(a), 31)); \ - } \ - ret; \ - }) - -// Shifts the 128 - bit value in a right by imm bytes while shifting in -// zeros.imm must be an immediate. -// -// r := srl(a, imm*8) -// -// https://msdn.microsoft.com/en-us/library/305w28yz(v=vs.100).aspx -// FORCE_INLINE _mm_srli_si128(__m128i a, __constrange(0,255) int imm) -#define _mm_srli_si128(a, imm) \ - __extension__({ \ - __m128i ret; \ - if ((imm) <= 0) { \ - ret = a; \ - } else if ((imm) > 15) { \ - ret = _mm_setzero_si128(); \ - } else { \ - ret = vreinterpretq_m128i_s8( \ - vextq_s8(vreinterpretq_s8_m128i(a), vdupq_n_s8(0), (imm))); \ - } \ - ret; \ - }) - -// Shifts the 128-bit value in a left by imm bytes while shifting in zeros. imm -// must be an immediate. -// -// r := a << (imm * 8) -// -// https://msdn.microsoft.com/en-us/library/34d3k2kt(v=vs.100).aspx -// FORCE_INLINE __m128i _mm_slli_si128(__m128i a, __constrange(0,255) int imm) -#define _mm_slli_si128(a, imm) \ - __extension__({ \ - __m128i ret; \ - if ((imm) <= 0) { \ - ret = a; \ - } else if ((imm) > 15) { \ - ret = _mm_setzero_si128(); \ - } else { \ - ret = vreinterpretq_m128i_s8(vextq_s8( \ - vdupq_n_s8(0), vreinterpretq_s8_m128i(a), 16 - (imm))); \ - } \ - ret; \ - }) - -// Shifts the 8 signed or unsigned 16-bit integers in a left by count bits while -// shifting in zeros. -// -// r0 := a0 << count -// r1 := a1 << count -// ... -// r7 := a7 << count -// -// https://msdn.microsoft.com/en-us/library/c79w388h(v%3dvs.90).aspx -FORCE_INLINE __m128i _mm_sll_epi16(__m128i a, __m128i count) -{ - uint64_t c = vreinterpretq_nth_u64_m128i(count, 0); - if (c > 15) - return _mm_setzero_si128(); - - int16x8_t vc = vdupq_n_s16((int16_t) c); - return vreinterpretq_m128i_s16(vshlq_s16(vreinterpretq_s16_m128i(a), vc)); -} - -// Shifts the 4 signed or unsigned 32-bit integers in a left by count bits while -// shifting in zeros. -// -// r0 := a0 << count -// r1 := a1 << count -// r2 := a2 << count -// r3 := a3 << count -// -// https://msdn.microsoft.com/en-us/library/6fe5a6s9(v%3dvs.90).aspx -FORCE_INLINE __m128i _mm_sll_epi32(__m128i a, __m128i count) -{ - uint64_t c = vreinterpretq_nth_u64_m128i(count, 0); - if (c > 31) - return _mm_setzero_si128(); - - int32x4_t vc = vdupq_n_s32((int32_t) c); - return vreinterpretq_m128i_s32(vshlq_s32(vreinterpretq_s32_m128i(a), vc)); -} - -// Shifts the 2 signed or unsigned 64-bit integers in a left by count bits while -// shifting in zeros. -// -// r0 := a0 << count -// r1 := a1 << count -// -// https://msdn.microsoft.com/en-us/library/6ta9dffd(v%3dvs.90).aspx -FORCE_INLINE __m128i _mm_sll_epi64(__m128i a, __m128i count) -{ - uint64_t c = vreinterpretq_nth_u64_m128i(count, 0); - if (c > 63) - return _mm_setzero_si128(); - - int64x2_t vc = vdupq_n_s64((int64_t) c); - return vreinterpretq_m128i_s64(vshlq_s64(vreinterpretq_s64_m128i(a), vc)); -} - -// Shifts the 8 signed or unsigned 16-bit integers in a right by count bits -// while shifting in zeros. -// -// r0 := srl(a0, count) -// r1 := srl(a1, count) -// ... -// r7 := srl(a7, count) -// -// https://msdn.microsoft.com/en-us/library/wd5ax830(v%3dvs.90).aspx -FORCE_INLINE __m128i _mm_srl_epi16(__m128i a, __m128i count) -{ - uint64_t c = vreinterpretq_nth_u64_m128i(count, 0); - if (c > 15) - return _mm_setzero_si128(); - - int16x8_t vc = vdupq_n_s16(-(int16_t) c); - return vreinterpretq_m128i_u16(vshlq_u16(vreinterpretq_u16_m128i(a), vc)); -} - -// Shifts the 4 signed or unsigned 32-bit integers in a right by count bits -// while shifting in zeros. -// -// r0 := srl(a0, count) -// r1 := srl(a1, count) -// r2 := srl(a2, count) -// r3 := srl(a3, count) -// -// https://msdn.microsoft.com/en-us/library/a9cbttf4(v%3dvs.90).aspx -FORCE_INLINE __m128i _mm_srl_epi32(__m128i a, __m128i count) -{ - uint64_t c = vreinterpretq_nth_u64_m128i(count, 0); - if (c > 31) - return _mm_setzero_si128(); - - int32x4_t vc = vdupq_n_s32(-(int32_t) c); - return vreinterpretq_m128i_u32(vshlq_u32(vreinterpretq_u32_m128i(a), vc)); -} - -// Shifts the 2 signed or unsigned 64-bit integers in a right by count bits -// while shifting in zeros. -// -// r0 := srl(a0, count) -// r1 := srl(a1, count) -// -// https://msdn.microsoft.com/en-us/library/yf6cf9k8(v%3dvs.90).aspx -FORCE_INLINE __m128i _mm_srl_epi64(__m128i a, __m128i count) -{ - uint64_t c = vreinterpretq_nth_u64_m128i(count, 0); - if (c > 63) - return _mm_setzero_si128(); - - int64x2_t vc = vdupq_n_s64(-(int64_t) c); - return vreinterpretq_m128i_u64(vshlq_u64(vreinterpretq_u64_m128i(a), vc)); -} - -// NEON does not provide a version of this function. -// Creates a 16-bit mask from the most significant bits of the 16 signed or -// unsigned 8-bit integers in a and zero extends the upper bits. -// https://msdn.microsoft.com/en-us/library/vstudio/s090c8fk(v=vs.100).aspx -FORCE_INLINE int _mm_movemask_epi8(__m128i a) -{ -#if defined(__aarch64__) - uint8x16_t input = vreinterpretq_u8_m128i(a); - const int8_t ALIGN_STRUCT(16) - xr[16] = {-7, -6, -5, -4, -3, -2, -1, 0, -7, -6, -5, -4, -3, -2, -1, 0}; - const uint8x16_t mask_and = vdupq_n_u8(0x80); - const int8x16_t mask_shift = vld1q_s8(xr); - const uint8x16_t mask_result = - vshlq_u8(vandq_u8(input, mask_and), mask_shift); - uint8x8_t lo = vget_low_u8(mask_result); - uint8x8_t hi = vget_high_u8(mask_result); - - return vaddv_u8(lo) + (vaddv_u8(hi) << 8); -#else - // Use increasingly wide shifts+adds to collect the sign bits - // together. - // Since the widening shifts would be rather confusing to follow in little - // endian, everything will be illustrated in big endian order instead. This - // has a different result - the bits would actually be reversed on a big - // endian machine. - - // Starting input (only half the elements are shown): - // 89 ff 1d c0 00 10 99 33 - uint8x16_t input = vreinterpretq_u8_m128i(a); - - // Shift out everything but the sign bits with an unsigned shift right. - // - // Bytes of the vector:: - // 89 ff 1d c0 00 10 99 33 - // \ \ \ \ \ \ \ \ high_bits = (uint16x4_t)(input >> 7) - // | | | | | | | | - // 01 01 00 01 00 00 01 00 - // - // Bits of first important lane(s): - // 10001001 (89) - // \______ - // | - // 00000001 (01) - uint16x8_t high_bits = vreinterpretq_u16_u8(vshrq_n_u8(input, 7)); - - // Merge the even lanes together with a 16-bit unsigned shift right + add. - // 'xx' represents garbage data which will be ignored in the final result. - // In the important bytes, the add functions like a binary OR. - // - // 01 01 00 01 00 00 01 00 - // \_ | \_ | \_ | \_ | paired16 = (uint32x4_t)(input + (input >> 7)) - // \| \| \| \| - // xx 03 xx 01 xx 00 xx 02 - // - // 00000001 00000001 (01 01) - // \_______ | - // \| - // xxxxxxxx xxxxxx11 (xx 03) - uint32x4_t paired16 = - vreinterpretq_u32_u16(vsraq_n_u16(high_bits, high_bits, 7)); - - // Repeat with a wider 32-bit shift + add. - // xx 03 xx 01 xx 00 xx 02 - // \____ | \____ | paired32 = (uint64x1_t)(paired16 + (paired16 >> - // 14)) - // \| \| - // xx xx xx 0d xx xx xx 02 - // - // 00000011 00000001 (03 01) - // \\_____ || - // '----.\|| - // xxxxxxxx xxxx1101 (xx 0d) - uint64x2_t paired32 = - vreinterpretq_u64_u32(vsraq_n_u32(paired16, paired16, 14)); - - // Last, an even wider 64-bit shift + add to get our result in the low 8 bit - // lanes. xx xx xx 0d xx xx xx 02 - // \_________ | paired64 = (uint8x8_t)(paired32 + (paired32 >> - // 28)) - // \| - // xx xx xx xx xx xx xx d2 - // - // 00001101 00000010 (0d 02) - // \ \___ | | - // '---. \| | - // xxxxxxxx 11010010 (xx d2) - uint8x16_t paired64 = - vreinterpretq_u8_u64(vsraq_n_u64(paired32, paired32, 28)); - - // Extract the low 8 bits from each 64-bit lane with 2 8-bit extracts. - // xx xx xx xx xx xx xx d2 - // || return paired64[0] - // d2 - // Note: Little endian would return the correct value 4b (01001011) instead. - return vgetq_lane_u8(paired64, 0) | ((int) vgetq_lane_u8(paired64, 8) << 8); -#endif -} - -// Copy the lower 64-bit integer in a to dst. -// -// dst[63:0] := a[63:0] -// -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_movepi64_pi64 -FORCE_INLINE __m64 _mm_movepi64_pi64(__m128i a) -{ - return vreinterpret_m64_s64(vget_low_s64(vreinterpretq_s64_m128i(a))); -} - -// Copy the 64-bit integer a to the lower element of dst, and zero the upper -// element. -// -// dst[63:0] := a[63:0] -// dst[127:64] := 0 -// -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_movpi64_epi64 -FORCE_INLINE __m128i _mm_movpi64_epi64(__m64 a) -{ - return vreinterpretq_m128i_s64( - vcombine_s64(vreinterpret_s64_m64(a), vdup_n_s64(0))); -} - -// NEON does not provide this method -// Creates a 4-bit mask from the most significant bits of the four -// single-precision, floating-point values. -// https://msdn.microsoft.com/en-us/library/vstudio/4490ys29(v=vs.100).aspx -FORCE_INLINE int _mm_movemask_ps(__m128 a) -{ - uint32x4_t input = vreinterpretq_u32_m128(a); -#if defined(__aarch64__) - static const int32x4_t shift = {0, 1, 2, 3}; - uint32x4_t tmp = vshrq_n_u32(input, 31); - return vaddvq_u32(vshlq_u32(tmp, shift)); -#else - // Uses the exact same method as _mm_movemask_epi8, see that for details. - // Shift out everything but the sign bits with a 32-bit unsigned shift - // right. - uint64x2_t high_bits = vreinterpretq_u64_u32(vshrq_n_u32(input, 31)); - // Merge the two pairs together with a 64-bit unsigned shift right + add. - uint8x16_t paired = - vreinterpretq_u8_u64(vsraq_n_u64(high_bits, high_bits, 31)); - // Extract the result. - return vgetq_lane_u8(paired, 0) | (vgetq_lane_u8(paired, 8) << 2); -#endif -} - -// Compute the bitwise NOT of a and then AND with a 128-bit vector containing -// all 1's, and return 1 if the result is zero, otherwise return 0. -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_test_all_ones -FORCE_INLINE int _mm_test_all_ones(__m128i a) -{ - return (uint64_t)(vgetq_lane_s64(a, 0) & vgetq_lane_s64(a, 1)) == - ~(uint64_t) 0; -} - -// Compute the bitwise AND of 128 bits (representing integer data) in a and -// mask, and return 1 if the result is zero, otherwise return 0. -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_test_all_zeros -FORCE_INLINE int _mm_test_all_zeros(__m128i a, __m128i mask) -{ - int64x2_t a_and_mask = - vandq_s64(vreinterpretq_s64_m128i(a), vreinterpretq_s64_m128i(mask)); - return (vgetq_lane_s64(a_and_mask, 0) | vgetq_lane_s64(a_and_mask, 1)) ? 0 - : 1; -} - -/* Math operations */ - -// Subtracts the four single-precision, floating-point values of a and b. -// -// r0 := a0 - b0 -// r1 := a1 - b1 -// r2 := a2 - b2 -// r3 := a3 - b3 -// -// https://msdn.microsoft.com/en-us/library/vstudio/1zad2k61(v=vs.100).aspx -FORCE_INLINE __m128 _mm_sub_ps(__m128 a, __m128 b) -{ - return vreinterpretq_m128_f32( - vsubq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(b))); -} - -// Subtract the lower single-precision (32-bit) floating-point element in b from -// the lower single-precision (32-bit) floating-point element in a, store the -// result in the lower element of dst, and copy the upper 3 packed elements from -// a to the upper elements of dst. -// -// dst[31:0] := a[31:0] - b[31:0] -// dst[127:32] := a[127:32] -// -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_sub_ss -FORCE_INLINE __m128 _mm_sub_ss(__m128 a, __m128 b) -{ - return _mm_move_ss(a, _mm_sub_ps(a, b)); -} - -// Subtract 2 packed 64-bit integers in b from 2 packed 64-bit integers in a, -// and store the results in dst. -// r0 := a0 - b0 -// r1 := a1 - b1 -FORCE_INLINE __m128i _mm_sub_epi64(__m128i a, __m128i b) -{ - return vreinterpretq_m128i_s64( - vsubq_s64(vreinterpretq_s64_m128i(a), vreinterpretq_s64_m128i(b))); -} - -// Subtracts the 4 signed or unsigned 32-bit integers of b from the 4 signed or -// unsigned 32-bit integers of a. -// -// r0 := a0 - b0 -// r1 := a1 - b1 -// r2 := a2 - b2 -// r3 := a3 - b3 -// -// https://msdn.microsoft.com/en-us/library/vstudio/fhh866h0(v=vs.100).aspx -FORCE_INLINE __m128i _mm_sub_epi32(__m128i a, __m128i b) -{ - return vreinterpretq_m128i_s32( - vsubq_s32(vreinterpretq_s32_m128i(a), vreinterpretq_s32_m128i(b))); -} - -FORCE_INLINE __m128i _mm_sub_epi16(__m128i a, __m128i b) -{ - return vreinterpretq_m128i_s16( - vsubq_s16(vreinterpretq_s16_m128i(a), vreinterpretq_s16_m128i(b))); -} - -FORCE_INLINE __m128i _mm_sub_epi8(__m128i a, __m128i b) -{ - return vreinterpretq_m128i_s8( - vsubq_s8(vreinterpretq_s8_m128i(a), vreinterpretq_s8_m128i(b))); -} - -// Subtract 64-bit integer b from 64-bit integer a, and store the result in dst. -// -// dst[63:0] := a[63:0] - b[63:0] -// -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_sub_si64 -FORCE_INLINE __m64 _mm_sub_si64(__m64 a, __m64 b) -{ - return vreinterpret_m64_s64( - vsub_s64(vreinterpret_s64_m64(a), vreinterpret_s64_m64(b))); -} - -// Subtracts the 8 unsigned 16-bit integers of bfrom the 8 unsigned 16-bit -// integers of a and saturates.. -// https://technet.microsoft.com/en-us/subscriptions/index/f44y0s19(v=vs.90).aspx -FORCE_INLINE __m128i _mm_subs_epu16(__m128i a, __m128i b) -{ - return vreinterpretq_m128i_u16( - vqsubq_u16(vreinterpretq_u16_m128i(a), vreinterpretq_u16_m128i(b))); -} - -// Subtracts the 16 unsigned 8-bit integers of b from the 16 unsigned 8-bit -// integers of a and saturates. -// -// r0 := UnsignedSaturate(a0 - b0) -// r1 := UnsignedSaturate(a1 - b1) -// ... -// r15 := UnsignedSaturate(a15 - b15) -// -// https://technet.microsoft.com/en-us/subscriptions/yadkxc18(v=vs.90) -FORCE_INLINE __m128i _mm_subs_epu8(__m128i a, __m128i b) -{ - return vreinterpretq_m128i_u8( - vqsubq_u8(vreinterpretq_u8_m128i(a), vreinterpretq_u8_m128i(b))); -} - -// Subtracts the 16 signed 8-bit integers of b from the 16 signed 8-bit integers -// of a and saturates. -// -// r0 := SignedSaturate(a0 - b0) -// r1 := SignedSaturate(a1 - b1) -// ... -// r15 := SignedSaturate(a15 - b15) -// -// https://technet.microsoft.com/en-us/subscriptions/by7kzks1(v=vs.90) -FORCE_INLINE __m128i _mm_subs_epi8(__m128i a, __m128i b) -{ - return vreinterpretq_m128i_s8( - vqsubq_s8(vreinterpretq_s8_m128i(a), vreinterpretq_s8_m128i(b))); -} - -// Subtracts the 8 signed 16-bit integers of b from the 8 signed 16-bit integers -// of a and saturates. -// -// r0 := SignedSaturate(a0 - b0) -// r1 := SignedSaturate(a1 - b1) -// ... -// r7 := SignedSaturate(a7 - b7) -// -// https://technet.microsoft.com/en-us/subscriptions/3247z5b8(v=vs.90) -FORCE_INLINE __m128i _mm_subs_epi16(__m128i a, __m128i b) -{ - return vreinterpretq_m128i_s16( - vqsubq_s16(vreinterpretq_s16_m128i(a), vreinterpretq_s16_m128i(b))); -} - -FORCE_INLINE __m128i _mm_adds_epu16(__m128i a, __m128i b) -{ - return vreinterpretq_m128i_u16( - vqaddq_u16(vreinterpretq_u16_m128i(a), vreinterpretq_u16_m128i(b))); -} - -// Negate packed 8-bit integers in a when the corresponding signed -// 8-bit integer in b is negative, and store the results in dst. -// Element in dst are zeroed out when the corresponding element -// in b is zero. -// -// for i in 0..15 -// if b[i] < 0 -// r[i] := -a[i] -// else if b[i] == 0 -// r[i] := 0 -// else -// r[i] := a[i] -// fi -// done -FORCE_INLINE __m128i _mm_sign_epi8(__m128i _a, __m128i _b) -{ - int8x16_t a = vreinterpretq_s8_m128i(_a); - int8x16_t b = vreinterpretq_s8_m128i(_b); - - // signed shift right: faster than vclt - // (b < 0) ? 0xFF : 0 - uint8x16_t ltMask = vreinterpretq_u8_s8(vshrq_n_s8(b, 7)); - - // (b == 0) ? 0xFF : 0 -#if defined(__aarch64__) - int8x16_t zeroMask = vreinterpretq_s8_u8(vceqzq_s8(b)); -#else - int8x16_t zeroMask = vreinterpretq_s8_u8(vceqq_s8(b, vdupq_n_s8(0))); -#endif - - // bitwise select either a or nagative 'a' (vnegq_s8(a) return nagative 'a') - // based on ltMask - int8x16_t masked = vbslq_s8(ltMask, vnegq_s8(a), a); - // res = masked & (~zeroMask) - int8x16_t res = vbicq_s8(masked, zeroMask); - - return vreinterpretq_m128i_s8(res); -} - -// Negate packed 16-bit integers in a when the corresponding signed -// 16-bit integer in b is negative, and store the results in dst. -// Element in dst are zeroed out when the corresponding element -// in b is zero. -// -// for i in 0..7 -// if b[i] < 0 -// r[i] := -a[i] -// else if b[i] == 0 -// r[i] := 0 -// else -// r[i] := a[i] -// fi -// done -FORCE_INLINE __m128i _mm_sign_epi16(__m128i _a, __m128i _b) -{ - int16x8_t a = vreinterpretq_s16_m128i(_a); - int16x8_t b = vreinterpretq_s16_m128i(_b); - - // signed shift right: faster than vclt - // (b < 0) ? 0xFFFF : 0 - uint16x8_t ltMask = vreinterpretq_u16_s16(vshrq_n_s16(b, 15)); - // (b == 0) ? 0xFFFF : 0 -#if defined(__aarch64__) - int16x8_t zeroMask = vreinterpretq_s16_u16(vceqzq_s16(b)); -#else - int16x8_t zeroMask = vreinterpretq_s16_u16(vceqq_s16(b, vdupq_n_s16(0))); -#endif - - // bitwise select either a or negative 'a' (vnegq_s16(a) equals to negative - // 'a') based on ltMask - int16x8_t masked = vbslq_s16(ltMask, vnegq_s16(a), a); - // res = masked & (~zeroMask) - int16x8_t res = vbicq_s16(masked, zeroMask); - return vreinterpretq_m128i_s16(res); -} - -// Negate packed 32-bit integers in a when the corresponding signed -// 32-bit integer in b is negative, and store the results in dst. -// Element in dst are zeroed out when the corresponding element -// in b is zero. -// -// for i in 0..3 -// if b[i] < 0 -// r[i] := -a[i] -// else if b[i] == 0 -// r[i] := 0 -// else -// r[i] := a[i] -// fi -// done -FORCE_INLINE __m128i _mm_sign_epi32(__m128i _a, __m128i _b) -{ - int32x4_t a = vreinterpretq_s32_m128i(_a); - int32x4_t b = vreinterpretq_s32_m128i(_b); - - // signed shift right: faster than vclt - // (b < 0) ? 0xFFFFFFFF : 0 - uint32x4_t ltMask = vreinterpretq_u32_s32(vshrq_n_s32(b, 31)); - - // (b == 0) ? 0xFFFFFFFF : 0 -#if defined(__aarch64__) - int32x4_t zeroMask = vreinterpretq_s32_u32(vceqzq_s32(b)); -#else - int32x4_t zeroMask = vreinterpretq_s32_u32(vceqq_s32(b, vdupq_n_s32(0))); -#endif - - // bitwise select either a or negative 'a' (vnegq_s32(a) equals to negative - // 'a') based on ltMask - int32x4_t masked = vbslq_s32(ltMask, vnegq_s32(a), a); - // res = masked & (~zeroMask) - int32x4_t res = vbicq_s32(masked, zeroMask); - return vreinterpretq_m128i_s32(res); -} - -// Negate packed 16-bit integers in a when the corresponding signed 16-bit -// integer in b is negative, and store the results in dst. Element in dst are -// zeroed out when the corresponding element in b is zero. -// -// FOR j := 0 to 3 -// i := j*16 -// IF b[i+15:i] < 0 -// dst[i+15:i] := -(a[i+15:i]) -// ELSE IF b[i+15:i] == 0 -// dst[i+15:i] := 0 -// ELSE -// dst[i+15:i] := a[i+15:i] -// FI -// ENDFOR -// -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_sign_pi16 -FORCE_INLINE __m64 _mm_sign_pi16(__m64 _a, __m64 _b) -{ - int16x4_t a = vreinterpret_s16_m64(_a); - int16x4_t b = vreinterpret_s16_m64(_b); - - // signed shift right: faster than vclt - // (b < 0) ? 0xFFFF : 0 - uint16x4_t ltMask = vreinterpret_u16_s16(vshr_n_s16(b, 15)); - - // (b == 0) ? 0xFFFF : 0 -#if defined(__aarch64__) - int16x4_t zeroMask = vreinterpret_s16_u16(vceqz_s16(b)); -#else - int16x4_t zeroMask = vreinterpret_s16_u16(vceq_s16(b, vdup_n_s16(0))); -#endif - - // bitwise select either a or nagative 'a' (vneg_s16(a) return nagative 'a') - // based on ltMask - int16x4_t masked = vbsl_s16(ltMask, vneg_s16(a), a); - // res = masked & (~zeroMask) - int16x4_t res = vbic_s16(masked, zeroMask); - - return vreinterpret_m64_s16(res); -} - -// Negate packed 32-bit integers in a when the corresponding signed 32-bit -// integer in b is negative, and store the results in dst. Element in dst are -// zeroed out when the corresponding element in b is zero. -// -// FOR j := 0 to 1 -// i := j*32 -// IF b[i+31:i] < 0 -// dst[i+31:i] := -(a[i+31:i]) -// ELSE IF b[i+31:i] == 0 -// dst[i+31:i] := 0 -// ELSE -// dst[i+31:i] := a[i+31:i] -// FI -// ENDFOR -// -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_sign_pi32 -FORCE_INLINE __m64 _mm_sign_pi32(__m64 _a, __m64 _b) -{ - int32x2_t a = vreinterpret_s32_m64(_a); - int32x2_t b = vreinterpret_s32_m64(_b); - - // signed shift right: faster than vclt - // (b < 0) ? 0xFFFFFFFF : 0 - uint32x2_t ltMask = vreinterpret_u32_s32(vshr_n_s32(b, 31)); - - // (b == 0) ? 0xFFFFFFFF : 0 -#if defined(__aarch64__) - int32x2_t zeroMask = vreinterpret_s32_u32(vceqz_s32(b)); -#else - int32x2_t zeroMask = vreinterpret_s32_u32(vceq_s32(b, vdup_n_s32(0))); -#endif - - // bitwise select either a or nagative 'a' (vneg_s32(a) return nagative 'a') - // based on ltMask - int32x2_t masked = vbsl_s32(ltMask, vneg_s32(a), a); - // res = masked & (~zeroMask) - int32x2_t res = vbic_s32(masked, zeroMask); - - return vreinterpret_m64_s32(res); -} - -// Negate packed 8-bit integers in a when the corresponding signed 8-bit integer -// in b is negative, and store the results in dst. Element in dst are zeroed out -// when the corresponding element in b is zero. -// -// FOR j := 0 to 7 -// i := j*8 -// IF b[i+7:i] < 0 -// dst[i+7:i] := -(a[i+7:i]) -// ELSE IF b[i+7:i] == 0 -// dst[i+7:i] := 0 -// ELSE -// dst[i+7:i] := a[i+7:i] -// FI -// ENDFOR -// -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_sign_pi8 -FORCE_INLINE __m64 _mm_sign_pi8(__m64 _a, __m64 _b) -{ - int8x8_t a = vreinterpret_s8_m64(_a); - int8x8_t b = vreinterpret_s8_m64(_b); - - // signed shift right: faster than vclt - // (b < 0) ? 0xFF : 0 - uint8x8_t ltMask = vreinterpret_u8_s8(vshr_n_s8(b, 7)); - - // (b == 0) ? 0xFF : 0 -#if defined(__aarch64__) - int8x8_t zeroMask = vreinterpret_s8_u8(vceqz_s8(b)); -#else - int8x8_t zeroMask = vreinterpret_s8_u8(vceq_s8(b, vdup_n_s8(0))); -#endif - - // bitwise select either a or nagative 'a' (vneg_s8(a) return nagative 'a') - // based on ltMask - int8x8_t masked = vbsl_s8(ltMask, vneg_s8(a), a); - // res = masked & (~zeroMask) - int8x8_t res = vbic_s8(masked, zeroMask); - - return vreinterpret_m64_s8(res); -} - -// Average packed unsigned 16-bit integers in a and b, and store the results in -// dst. -// -// FOR j := 0 to 3 -// i := j*16 -// dst[i+15:i] := (a[i+15:i] + b[i+15:i] + 1) >> 1 -// ENDFOR -// -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_avg_pu16 -FORCE_INLINE __m64 _mm_avg_pu16(__m64 a, __m64 b) -{ - return vreinterpret_m64_u16( - vrhadd_u16(vreinterpret_u16_m64(a), vreinterpret_u16_m64(b))); -} - -// Average packed unsigned 8-bit integers in a and b, and store the results in -// dst. -// -// FOR j := 0 to 7 -// i := j*8 -// dst[i+7:i] := (a[i+7:i] + b[i+7:i] + 1) >> 1 -// ENDFOR -// -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_avg_pu8 -FORCE_INLINE __m64 _mm_avg_pu8(__m64 a, __m64 b) -{ - return vreinterpret_m64_u8( - vrhadd_u8(vreinterpret_u8_m64(a), vreinterpret_u8_m64(b))); -} - -// Average packed unsigned 8-bit integers in a and b, and store the results in -// dst. -// -// FOR j := 0 to 7 -// i := j*8 -// dst[i+7:i] := (a[i+7:i] + b[i+7:i] + 1) >> 1 -// ENDFOR -// -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_m_pavgb -#define _m_pavgb(a, b) _mm_avg_pu8(a, b) - -// Average packed unsigned 16-bit integers in a and b, and store the results in -// dst. -// -// FOR j := 0 to 3 -// i := j*16 -// dst[i+15:i] := (a[i+15:i] + b[i+15:i] + 1) >> 1 -// ENDFOR -// -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_m_pavgw -#define _m_pavgw(a, b) _mm_avg_pu16(a, b) - -// Computes the average of the 16 unsigned 8-bit integers in a and the 16 -// unsigned 8-bit integers in b and rounds. -// -// r0 := (a0 + b0) / 2 -// r1 := (a1 + b1) / 2 -// ... -// r15 := (a15 + b15) / 2 -// -// https://msdn.microsoft.com/en-us/library/vstudio/8zwh554a(v%3dvs.90).aspx -FORCE_INLINE __m128i _mm_avg_epu8(__m128i a, __m128i b) -{ - return vreinterpretq_m128i_u8( - vrhaddq_u8(vreinterpretq_u8_m128i(a), vreinterpretq_u8_m128i(b))); -} - -// Computes the average of the 8 unsigned 16-bit integers in a and the 8 -// unsigned 16-bit integers in b and rounds. -// -// r0 := (a0 + b0) / 2 -// r1 := (a1 + b1) / 2 -// ... -// r7 := (a7 + b7) / 2 -// -// https://msdn.microsoft.com/en-us/library/vstudio/y13ca3c8(v=vs.90).aspx -FORCE_INLINE __m128i _mm_avg_epu16(__m128i a, __m128i b) -{ - return (__m128i) vrhaddq_u16(vreinterpretq_u16_m128i(a), - vreinterpretq_u16_m128i(b)); -} - -// Adds the four single-precision, floating-point values of a and b. -// -// r0 := a0 + b0 -// r1 := a1 + b1 -// r2 := a2 + b2 -// r3 := a3 + b3 -// -// https://msdn.microsoft.com/en-us/library/vstudio/c9848chc(v=vs.100).aspx -FORCE_INLINE __m128 _mm_add_ps(__m128 a, __m128 b) -{ - return vreinterpretq_m128_f32( - vaddq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(b))); -} - -// Add packed double-precision (64-bit) floating-point elements in a and b, and -// store the results in dst. -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_add_pd -FORCE_INLINE __m128d _mm_add_pd(__m128d a, __m128d b) -{ -#if defined(__aarch64__) - return vreinterpretq_m128d_f64( - vaddq_f64(vreinterpretq_f64_m128d(a), vreinterpretq_f64_m128d(b))); -#else - double *da = (double *) &a; - double *db = (double *) &b; - double c[2]; - c[0] = da[0] + db[0]; - c[1] = da[1] + db[1]; - return vld1q_f32((float32_t *) c); -#endif -} - -// Add 64-bit integers a and b, and store the result in dst. -// -// dst[63:0] := a[63:0] + b[63:0] -// -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_add_si64 -FORCE_INLINE __m64 _mm_add_si64(__m64 a, __m64 b) -{ - return vreinterpret_m64_s64( - vadd_s64(vreinterpret_s64_m64(a), vreinterpret_s64_m64(b))); -} - -// adds the scalar single-precision floating point values of a and b. -// https://msdn.microsoft.com/en-us/library/be94x2y6(v=vs.100).aspx -FORCE_INLINE __m128 _mm_add_ss(__m128 a, __m128 b) -{ - float32_t b0 = vgetq_lane_f32(vreinterpretq_f32_m128(b), 0); - float32x4_t value = vsetq_lane_f32(b0, vdupq_n_f32(0), 0); - // the upper values in the result must be the remnants of . - return vreinterpretq_m128_f32(vaddq_f32(a, value)); -} - -// Adds the 4 signed or unsigned 64-bit integers in a to the 4 signed or -// unsigned 32-bit integers in b. -// https://msdn.microsoft.com/en-us/library/vstudio/09xs4fkk(v=vs.100).aspx -FORCE_INLINE __m128i _mm_add_epi64(__m128i a, __m128i b) -{ - return vreinterpretq_m128i_s64( - vaddq_s64(vreinterpretq_s64_m128i(a), vreinterpretq_s64_m128i(b))); -} - -// Adds the 4 signed or unsigned 32-bit integers in a to the 4 signed or -// unsigned 32-bit integers in b. -// -// r0 := a0 + b0 -// r1 := a1 + b1 -// r2 := a2 + b2 -// r3 := a3 + b3 -// -// https://msdn.microsoft.com/en-us/library/vstudio/09xs4fkk(v=vs.100).aspx -FORCE_INLINE __m128i _mm_add_epi32(__m128i a, __m128i b) -{ - return vreinterpretq_m128i_s32( - vaddq_s32(vreinterpretq_s32_m128i(a), vreinterpretq_s32_m128i(b))); -} - -// Adds the 8 signed or unsigned 16-bit integers in a to the 8 signed or -// unsigned 16-bit integers in b. -// https://msdn.microsoft.com/en-us/library/fceha5k4(v=vs.100).aspx -FORCE_INLINE __m128i _mm_add_epi16(__m128i a, __m128i b) -{ - return vreinterpretq_m128i_s16( - vaddq_s16(vreinterpretq_s16_m128i(a), vreinterpretq_s16_m128i(b))); -} - -// Adds the 16 signed or unsigned 8-bit integers in a to the 16 signed or -// unsigned 8-bit integers in b. -// https://technet.microsoft.com/en-us/subscriptions/yc7tcyzs(v=vs.90) -FORCE_INLINE __m128i _mm_add_epi8(__m128i a, __m128i b) -{ - return vreinterpretq_m128i_s8( - vaddq_s8(vreinterpretq_s8_m128i(a), vreinterpretq_s8_m128i(b))); -} - -// Adds the 8 signed 16-bit integers in a to the 8 signed 16-bit integers in b -// and saturates. -// -// r0 := SignedSaturate(a0 + b0) -// r1 := SignedSaturate(a1 + b1) -// ... -// r7 := SignedSaturate(a7 + b7) -// -// https://msdn.microsoft.com/en-us/library/1a306ef8(v=vs.100).aspx -FORCE_INLINE __m128i _mm_adds_epi16(__m128i a, __m128i b) -{ - return vreinterpretq_m128i_s16( - vqaddq_s16(vreinterpretq_s16_m128i(a), vreinterpretq_s16_m128i(b))); -} - -// Add packed signed 8-bit integers in a and b using saturation, and store the -// results in dst. -// -// FOR j := 0 to 15 -// i := j*8 -// dst[i+7:i] := Saturate8( a[i+7:i] + b[i+7:i] ) -// ENDFOR -// -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_adds_epi8 -FORCE_INLINE __m128i _mm_adds_epi8(__m128i a, __m128i b) -{ - return vreinterpretq_m128i_s8( - vqaddq_s8(vreinterpretq_s8_m128i(a), vreinterpretq_s8_m128i(b))); -} - -// Adds the 16 unsigned 8-bit integers in a to the 16 unsigned 8-bit integers in -// b and saturates.. -// https://msdn.microsoft.com/en-us/library/9hahyddy(v=vs.100).aspx -FORCE_INLINE __m128i _mm_adds_epu8(__m128i a, __m128i b) -{ - return vreinterpretq_m128i_u8( - vqaddq_u8(vreinterpretq_u8_m128i(a), vreinterpretq_u8_m128i(b))); -} - -// Multiplies the 8 signed or unsigned 16-bit integers from a by the 8 signed or -// unsigned 16-bit integers from b. -// -// r0 := (a0 * b0)[15:0] -// r1 := (a1 * b1)[15:0] -// ... -// r7 := (a7 * b7)[15:0] -// -// https://msdn.microsoft.com/en-us/library/vstudio/9ks1472s(v=vs.100).aspx -FORCE_INLINE __m128i _mm_mullo_epi16(__m128i a, __m128i b) -{ - return vreinterpretq_m128i_s16( - vmulq_s16(vreinterpretq_s16_m128i(a), vreinterpretq_s16_m128i(b))); -} - -// Multiplies the 4 signed or unsigned 32-bit integers from a by the 4 signed or -// unsigned 32-bit integers from b. -// https://msdn.microsoft.com/en-us/library/vstudio/bb531409(v=vs.100).aspx -FORCE_INLINE __m128i _mm_mullo_epi32(__m128i a, __m128i b) -{ - return vreinterpretq_m128i_s32( - vmulq_s32(vreinterpretq_s32_m128i(a), vreinterpretq_s32_m128i(b))); -} - -// Multiply the packed unsigned 16-bit integers in a and b, producing -// intermediate 32-bit integers, and store the high 16 bits of the intermediate -// integers in dst. -// -// FOR j := 0 to 3 -// i := j*16 -// tmp[31:0] := a[i+15:i] * b[i+15:i] -// dst[i+15:i] := tmp[31:16] -// ENDFOR -// -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_m_pmulhuw -#define _m_pmulhuw(a, b) _mm_mulhi_pu16(a, b) - -// Multiplies the four single-precision, floating-point values of a and b. -// -// r0 := a0 * b0 -// r1 := a1 * b1 -// r2 := a2 * b2 -// r3 := a3 * b3 -// -// https://msdn.microsoft.com/en-us/library/vstudio/22kbk6t9(v=vs.100).aspx -FORCE_INLINE __m128 _mm_mul_ps(__m128 a, __m128 b) -{ - return vreinterpretq_m128_f32( - vmulq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(b))); -} - -// Multiply packed double-precision (64-bit) floating-point elements in a and b, -// and store the results in dst. -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_mul_pd -FORCE_INLINE __m128d _mm_mul_pd(__m128d a, __m128d b) -{ -#if defined(__aarch64__) - return vreinterpretq_m128d_f64( - vmulq_f64(vreinterpretq_f64_m128d(a), vreinterpretq_f64_m128d(b))); -#else - double *da = (double *) &a; - double *db = (double *) &b; - double c[2]; - c[0] = da[0] * db[0]; - c[1] = da[1] * db[1]; - return vld1q_f32((float32_t *) c); -#endif -} - -// Multiply the lower single-precision (32-bit) floating-point element in a and -// b, store the result in the lower element of dst, and copy the upper 3 packed -// elements from a to the upper elements of dst. -// -// dst[31:0] := a[31:0] * b[31:0] -// dst[127:32] := a[127:32] -// -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_mul_ss -FORCE_INLINE __m128 _mm_mul_ss(__m128 a, __m128 b) -{ - return _mm_move_ss(a, _mm_mul_ps(a, b)); -} - -// Multiply the low unsigned 32-bit integers from each packed 64-bit element in -// a and b, and store the unsigned 64-bit results in dst. -// -// r0 := (a0 & 0xFFFFFFFF) * (b0 & 0xFFFFFFFF) -// r1 := (a2 & 0xFFFFFFFF) * (b2 & 0xFFFFFFFF) -FORCE_INLINE __m128i _mm_mul_epu32(__m128i a, __m128i b) -{ - // vmull_u32 upcasts instead of masking, so we downcast. - uint32x2_t a_lo = vmovn_u64(vreinterpretq_u64_m128i(a)); - uint32x2_t b_lo = vmovn_u64(vreinterpretq_u64_m128i(b)); - return vreinterpretq_m128i_u64(vmull_u32(a_lo, b_lo)); -} - -// Multiply the low unsigned 32-bit integers from a and b, and store the -// unsigned 64-bit result in dst. -// -// dst[63:0] := a[31:0] * b[31:0] -// -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_mul_su32 -FORCE_INLINE __m64 _mm_mul_su32(__m64 a, __m64 b) -{ - return vreinterpret_m64_u64(vget_low_u64( - vmull_u32(vreinterpret_u32_m64(a), vreinterpret_u32_m64(b)))); -} - -// Multiply the low signed 32-bit integers from each packed 64-bit element in -// a and b, and store the signed 64-bit results in dst. -// -// r0 := (int64_t)(int32_t)a0 * (int64_t)(int32_t)b0 -// r1 := (int64_t)(int32_t)a2 * (int64_t)(int32_t)b2 -FORCE_INLINE __m128i _mm_mul_epi32(__m128i a, __m128i b) -{ - // vmull_s32 upcasts instead of masking, so we downcast. - int32x2_t a_lo = vmovn_s64(vreinterpretq_s64_m128i(a)); - int32x2_t b_lo = vmovn_s64(vreinterpretq_s64_m128i(b)); - return vreinterpretq_m128i_s64(vmull_s32(a_lo, b_lo)); -} - -// Multiplies the 8 signed 16-bit integers from a by the 8 signed 16-bit -// integers from b. -// -// r0 := (a0 * b0) + (a1 * b1) -// r1 := (a2 * b2) + (a3 * b3) -// r2 := (a4 * b4) + (a5 * b5) -// r3 := (a6 * b6) + (a7 * b7) -// https://msdn.microsoft.com/en-us/library/yht36sa6(v=vs.90).aspx -FORCE_INLINE __m128i _mm_madd_epi16(__m128i a, __m128i b) -{ - int32x4_t low = vmull_s16(vget_low_s16(vreinterpretq_s16_m128i(a)), - vget_low_s16(vreinterpretq_s16_m128i(b))); - int32x4_t high = vmull_s16(vget_high_s16(vreinterpretq_s16_m128i(a)), - vget_high_s16(vreinterpretq_s16_m128i(b))); - - int32x2_t low_sum = vpadd_s32(vget_low_s32(low), vget_high_s32(low)); - int32x2_t high_sum = vpadd_s32(vget_low_s32(high), vget_high_s32(high)); - - return vreinterpretq_m128i_s32(vcombine_s32(low_sum, high_sum)); -} - -// Multiply packed signed 16-bit integers in a and b, producing intermediate -// signed 32-bit integers. Shift right by 15 bits while rounding up, and store -// the packed 16-bit integers in dst. -// -// r0 := Round(((int32_t)a0 * (int32_t)b0) >> 15) -// r1 := Round(((int32_t)a1 * (int32_t)b1) >> 15) -// r2 := Round(((int32_t)a2 * (int32_t)b2) >> 15) -// ... -// r7 := Round(((int32_t)a7 * (int32_t)b7) >> 15) -FORCE_INLINE __m128i _mm_mulhrs_epi16(__m128i a, __m128i b) -{ - // Has issues due to saturation - // return vreinterpretq_m128i_s16(vqrdmulhq_s16(a, b)); - - // Multiply - int32x4_t mul_lo = vmull_s16(vget_low_s16(vreinterpretq_s16_m128i(a)), - vget_low_s16(vreinterpretq_s16_m128i(b))); - int32x4_t mul_hi = vmull_s16(vget_high_s16(vreinterpretq_s16_m128i(a)), - vget_high_s16(vreinterpretq_s16_m128i(b))); - - // Rounding narrowing shift right - // narrow = (int16_t)((mul + 16384) >> 15); - int16x4_t narrow_lo = vrshrn_n_s32(mul_lo, 15); - int16x4_t narrow_hi = vrshrn_n_s32(mul_hi, 15); - - // Join together - return vreinterpretq_m128i_s16(vcombine_s16(narrow_lo, narrow_hi)); -} - -// Vertically multiply each unsigned 8-bit integer from a with the corresponding -// signed 8-bit integer from b, producing intermediate signed 16-bit integers. -// Horizontally add adjacent pairs of intermediate signed 16-bit integers, -// and pack the saturated results in dst. -// -// FOR j := 0 to 7 -// i := j*16 -// dst[i+15:i] := Saturate_To_Int16( a[i+15:i+8]*b[i+15:i+8] + -// a[i+7:i]*b[i+7:i] ) -// ENDFOR -FORCE_INLINE __m128i _mm_maddubs_epi16(__m128i _a, __m128i _b) -{ -#if defined(__aarch64__) - uint8x16_t a = vreinterpretq_u8_m128i(_a); - int8x16_t b = vreinterpretq_s8_m128i(_b); - int16x8_t tl = vmulq_s16(vreinterpretq_s16_u16(vmovl_u8(vget_low_u8(a))), - vmovl_s8(vget_low_s8(b))); - int16x8_t th = vmulq_s16(vreinterpretq_s16_u16(vmovl_u8(vget_high_u8(a))), - vmovl_s8(vget_high_s8(b))); - return vreinterpretq_m128i_s16( - vqaddq_s16(vuzp1q_s16(tl, th), vuzp2q_s16(tl, th))); -#else - // This would be much simpler if x86 would choose to zero extend OR sign - // extend, not both. This could probably be optimized better. - uint16x8_t a = vreinterpretq_u16_m128i(_a); - int16x8_t b = vreinterpretq_s16_m128i(_b); - - // Zero extend a - int16x8_t a_odd = vreinterpretq_s16_u16(vshrq_n_u16(a, 8)); - int16x8_t a_even = vreinterpretq_s16_u16(vbicq_u16(a, vdupq_n_u16(0xff00))); - - // Sign extend by shifting left then shifting right. - int16x8_t b_even = vshrq_n_s16(vshlq_n_s16(b, 8), 8); - int16x8_t b_odd = vshrq_n_s16(b, 8); - - // multiply - int16x8_t prod1 = vmulq_s16(a_even, b_even); - int16x8_t prod2 = vmulq_s16(a_odd, b_odd); - - // saturated add - return vreinterpretq_m128i_s16(vqaddq_s16(prod1, prod2)); -#endif -} - -// Computes the fused multiple add product of 32-bit floating point numbers. -// -// Return Value -// Multiplies A and B, and adds C to the temporary result before returning it. -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_fmadd -FORCE_INLINE __m128 _mm_fmadd_ps(__m128 a, __m128 b, __m128 c) -{ -#if defined(__aarch64__) - return vreinterpretq_m128_f32(vfmaq_f32(vreinterpretq_f32_m128(c), - vreinterpretq_f32_m128(b), - vreinterpretq_f32_m128(a))); -#else - return _mm_add_ps(_mm_mul_ps(a, b), c); -#endif -} - -// Alternatively add and subtract packed single-precision (32-bit) -// floating-point elements in a to/from packed elements in b, and store the -// results in dst. -// -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=addsub_ps -FORCE_INLINE __m128 _mm_addsub_ps(__m128 a, __m128 b) -{ - __m128 mask = {-1.0f, 1.0f, -1.0f, 1.0f}; - return _mm_fmadd_ps(b, mask, a); -} - -// Compute the absolute differences of packed unsigned 8-bit integers in a and -// b, then horizontally sum each consecutive 8 differences to produce two -// unsigned 16-bit integers, and pack these unsigned 16-bit integers in the low -// 16 bits of 64-bit elements in dst. -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_sad_epu8 -FORCE_INLINE __m128i _mm_sad_epu8(__m128i a, __m128i b) -{ - uint16x8_t t = vpaddlq_u8(vabdq_u8((uint8x16_t) a, (uint8x16_t) b)); - uint16_t r0 = t[0] + t[1] + t[2] + t[3]; - uint16_t r4 = t[4] + t[5] + t[6] + t[7]; - uint16x8_t r = vsetq_lane_u16(r0, vdupq_n_u16(0), 0); - return (__m128i) vsetq_lane_u16(r4, r, 4); -} - -// Compute the absolute differences of packed unsigned 8-bit integers in a and -// b, then horizontally sum each consecutive 8 differences to produce four -// unsigned 16-bit integers, and pack these unsigned 16-bit integers in the low -// 16 bits of dst. -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_sad_pu8 -FORCE_INLINE __m64 _mm_sad_pu8(__m64 a, __m64 b) -{ - uint16x4_t t = - vpaddl_u8(vabd_u8(vreinterpret_u8_m64(a), vreinterpret_u8_m64(b))); - uint16_t r0 = t[0] + t[1] + t[2] + t[3]; - return vreinterpret_m64_u16(vset_lane_u16(r0, vdup_n_u16(0), 0)); -} - -// Compute the absolute differences of packed unsigned 8-bit integers in a and -// b, then horizontally sum each consecutive 8 differences to produce four -// unsigned 16-bit integers, and pack these unsigned 16-bit integers in the low -// 16 bits of dst. -// -// FOR j := 0 to 7 -// i := j*8 -// tmp[i+7:i] := ABS(a[i+7:i] - b[i+7:i]) -// ENDFOR -// dst[15:0] := tmp[7:0] + tmp[15:8] + tmp[23:16] + tmp[31:24] + tmp[39:32] + -// tmp[47:40] + tmp[55:48] + tmp[63:56] dst[63:16] := 0 -// -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_m_psadbw -#define _m_psadbw(a, b) _mm_sad_pu8(a, b) - -// Divides the four single-precision, floating-point values of a and b. -// -// r0 := a0 / b0 -// r1 := a1 / b1 -// r2 := a2 / b2 -// r3 := a3 / b3 -// -// https://msdn.microsoft.com/en-us/library/edaw8147(v=vs.100).aspx -FORCE_INLINE __m128 _mm_div_ps(__m128 a, __m128 b) -{ -#if defined(__aarch64__) - return vreinterpretq_m128_f32( - vdivq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(b))); -#else - float32x4_t recip0 = vrecpeq_f32(vreinterpretq_f32_m128(b)); - float32x4_t recip1 = - vmulq_f32(recip0, vrecpsq_f32(recip0, vreinterpretq_f32_m128(b))); - return vreinterpretq_m128_f32(vmulq_f32(vreinterpretq_f32_m128(a), recip1)); -#endif -} - -// Divides the scalar single-precision floating point value of a by b. -// https://msdn.microsoft.com/en-us/library/4y73xa49(v=vs.100).aspx -FORCE_INLINE __m128 _mm_div_ss(__m128 a, __m128 b) -{ - float32_t value = - vgetq_lane_f32(vreinterpretq_f32_m128(_mm_div_ps(a, b)), 0); - return vreinterpretq_m128_f32( - vsetq_lane_f32(value, vreinterpretq_f32_m128(a), 0)); -} - -// Compute the approximate reciprocal of packed single-precision (32-bit) -// floating-point elements in a, and store the results in dst. The maximum -// relative error for this approximation is less than 1.5*2^-12. -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_rcp_ps -FORCE_INLINE __m128 _mm_rcp_ps(__m128 in) -{ -#if defined(__aarch64__) - return vreinterpretq_m128_f32( - vdivq_f32(vdupq_n_f32(1.0f), vreinterpretq_f32_m128(in))); -#else - float32x4_t recip = vrecpeq_f32(vreinterpretq_f32_m128(in)); - recip = vmulq_f32(recip, vrecpsq_f32(recip, vreinterpretq_f32_m128(in))); - return vreinterpretq_m128_f32(recip); -#endif -} - -// Compute the approximate reciprocal of the lower single-precision (32-bit) -// floating-point element in a, store the result in the lower element of dst, -// and copy the upper 3 packed elements from a to the upper elements of dst. The -// maximum relative error for this approximation is less than 1.5*2^-12. -// -// dst[31:0] := (1.0 / a[31:0]) -// dst[127:32] := a[127:32] -// -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_rcp_ss -FORCE_INLINE __m128 _mm_rcp_ss(__m128 a) -{ - return _mm_move_ss(a, _mm_rcp_ps(a)); -} - -// Computes the approximations of square roots of the four single-precision, -// floating-point values of a. First computes reciprocal square roots and then -// reciprocals of the four values. -// -// r0 := sqrt(a0) -// r1 := sqrt(a1) -// r2 := sqrt(a2) -// r3 := sqrt(a3) -// -// https://msdn.microsoft.com/en-us/library/vstudio/8z67bwwk(v=vs.100).aspx -FORCE_INLINE __m128 _mm_sqrt_ps(__m128 in) -{ -#if defined(__aarch64__) - return vreinterpretq_m128_f32(vsqrtq_f32(vreinterpretq_f32_m128(in))); -#else - float32x4_t recipsq = vrsqrteq_f32(vreinterpretq_f32_m128(in)); - float32x4_t sq = vrecpeq_f32(recipsq); - // ??? use step versions of both sqrt and recip for better accuracy? - return vreinterpretq_m128_f32(sq); -#endif -} - -// Computes the approximation of the square root of the scalar single-precision -// floating point value of in. -// https://msdn.microsoft.com/en-us/library/ahfsc22d(v=vs.100).aspx -FORCE_INLINE __m128 _mm_sqrt_ss(__m128 in) -{ - float32_t value = - vgetq_lane_f32(vreinterpretq_f32_m128(_mm_sqrt_ps(in)), 0); - return vreinterpretq_m128_f32( - vsetq_lane_f32(value, vreinterpretq_f32_m128(in), 0)); -} - -// Computes the approximations of the reciprocal square roots of the four -// single-precision floating point values of in. -// https://msdn.microsoft.com/en-us/library/22hfsh53(v=vs.100).aspx -FORCE_INLINE __m128 _mm_rsqrt_ps(__m128 in) -{ - return vreinterpretq_m128_f32(vrsqrteq_f32(vreinterpretq_f32_m128(in))); -} - -// Compute the approximate reciprocal square root of the lower single-precision -// (32-bit) floating-point element in a, store the result in the lower element -// of dst, and copy the upper 3 packed elements from a to the upper elements of -// dst. -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_rsqrt_ss -FORCE_INLINE __m128 _mm_rsqrt_ss(__m128 in) -{ - return vsetq_lane_f32(vgetq_lane_f32(_mm_rsqrt_ps(in), 0), in, 0); -} - -// Compare packed signed 16-bit integers in a and b, and store packed maximum -// values in dst. -// -// FOR j := 0 to 3 -// i := j*16 -// dst[i+15:i] := MAX(a[i+15:i], b[i+15:i]) -// ENDFOR -// -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_max_pi16 -FORCE_INLINE __m64 _mm_max_pi16(__m64 a, __m64 b) -{ - return vreinterpret_m64_s16( - vmax_s16(vreinterpret_s16_m64(a), vreinterpret_s16_m64(b))); -} - -// Compare packed signed 16-bit integers in a and b, and store packed maximum -// values in dst. -// -// FOR j := 0 to 3 -// i := j*16 -// dst[i+15:i] := MAX(a[i+15:i], b[i+15:i]) -// ENDFOR -// -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_max_pi16 -#define _m_pmaxsw(a, b) _mm_max_pi16(a, b) - -// Computes the maximums of the four single-precision, floating-point values of -// a and b. -// https://msdn.microsoft.com/en-us/library/vstudio/ff5d607a(v=vs.100).aspx -FORCE_INLINE __m128 _mm_max_ps(__m128 a, __m128 b) -{ -#if SSE2NEON_PRECISE_MINMAX - float32x4_t _a = vreinterpretq_f32_m128(a); - float32x4_t _b = vreinterpretq_f32_m128(b); - return vbslq_f32(vcltq_f32(_b, _a), _a, _b); -#else - return vreinterpretq_m128_f32( - vmaxq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(b))); -#endif -} - -// Compare packed unsigned 8-bit integers in a and b, and store packed maximum -// values in dst. -// -// FOR j := 0 to 7 -// i := j*8 -// dst[i+7:i] := MAX(a[i+7:i], b[i+7:i]) -// ENDFOR -// -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_max_pu8 -FORCE_INLINE __m64 _mm_max_pu8(__m64 a, __m64 b) -{ - return vreinterpret_m64_u8( - vmax_u8(vreinterpret_u8_m64(a), vreinterpret_u8_m64(b))); -} - -// Compare packed unsigned 8-bit integers in a and b, and store packed maximum -// values in dst. -// -// FOR j := 0 to 7 -// i := j*8 -// dst[i+7:i] := MAX(a[i+7:i], b[i+7:i]) -// ENDFOR -// -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_max_pu8 -#define _m_pmaxub(a, b) _mm_max_pu8(a, b) - -// Compare packed signed 16-bit integers in a and b, and store packed minimum -// values in dst. -// -// FOR j := 0 to 3 -// i := j*16 -// dst[i+15:i] := MIN(a[i+15:i], b[i+15:i]) -// ENDFOR -// -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_min_pi16 -FORCE_INLINE __m64 _mm_min_pi16(__m64 a, __m64 b) -{ - return vreinterpret_m64_s16( - vmin_s16(vreinterpret_s16_m64(a), vreinterpret_s16_m64(b))); -} - -// Compare packed signed 16-bit integers in a and b, and store packed minimum -// values in dst. -// -// FOR j := 0 to 3 -// i := j*16 -// dst[i+15:i] := MIN(a[i+15:i], b[i+15:i]) -// ENDFOR -// -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_min_pi16 -#define _m_pminsw(a, b) _mm_min_pi16(a, b) - -// Computes the minima of the four single-precision, floating-point values of a -// and b. -// https://msdn.microsoft.com/en-us/library/vstudio/wh13kadz(v=vs.100).aspx -FORCE_INLINE __m128 _mm_min_ps(__m128 a, __m128 b) -{ -#if SSE2NEON_PRECISE_MINMAX - float32x4_t _a = vreinterpretq_f32_m128(a); - float32x4_t _b = vreinterpretq_f32_m128(b); - return vbslq_f32(vcltq_f32(_a, _b), _a, _b); -#else - return vreinterpretq_m128_f32( - vminq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(b))); -#endif -} - -// Compare packed unsigned 8-bit integers in a and b, and store packed minimum -// values in dst. -// -// FOR j := 0 to 7 -// i := j*8 -// dst[i+7:i] := MIN(a[i+7:i], b[i+7:i]) -// ENDFOR -// -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_min_pu8 -FORCE_INLINE __m64 _mm_min_pu8(__m64 a, __m64 b) -{ - return vreinterpret_m64_u8( - vmin_u8(vreinterpret_u8_m64(a), vreinterpret_u8_m64(b))); -} - -// Compare packed unsigned 8-bit integers in a and b, and store packed minimum -// values in dst. -// -// FOR j := 0 to 7 -// i := j*8 -// dst[i+7:i] := MIN(a[i+7:i], b[i+7:i]) -// ENDFOR -// -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_min_pu8 -#define _m_pminub(a, b) _mm_min_pu8(a, b) - -// Computes the maximum of the two lower scalar single-precision floating point -// values of a and b. -// https://msdn.microsoft.com/en-us/library/s6db5esz(v=vs.100).aspx -FORCE_INLINE __m128 _mm_max_ss(__m128 a, __m128 b) -{ - float32_t value = vgetq_lane_f32(_mm_max_ps(a, b), 0); - return vreinterpretq_m128_f32( - vsetq_lane_f32(value, vreinterpretq_f32_m128(a), 0)); -} - -// Computes the minimum of the two lower scalar single-precision floating point -// values of a and b. -// https://msdn.microsoft.com/en-us/library/0a9y7xaa(v=vs.100).aspx -FORCE_INLINE __m128 _mm_min_ss(__m128 a, __m128 b) -{ - float32_t value = vgetq_lane_f32(_mm_min_ps(a, b), 0); - return vreinterpretq_m128_f32( - vsetq_lane_f32(value, vreinterpretq_f32_m128(a), 0)); -} - -// Computes the pairwise maxima of the 16 unsigned 8-bit integers from a and the -// 16 unsigned 8-bit integers from b. -// https://msdn.microsoft.com/en-us/library/st6634za(v=vs.100).aspx -FORCE_INLINE __m128i _mm_max_epu8(__m128i a, __m128i b) -{ - return vreinterpretq_m128i_u8( - vmaxq_u8(vreinterpretq_u8_m128i(a), vreinterpretq_u8_m128i(b))); -} - -// Computes the pairwise minima of the 16 unsigned 8-bit integers from a and the -// 16 unsigned 8-bit integers from b. -// https://msdn.microsoft.com/ko-kr/library/17k8cf58(v=vs.100).aspxx -FORCE_INLINE __m128i _mm_min_epu8(__m128i a, __m128i b) -{ - return vreinterpretq_m128i_u8( - vminq_u8(vreinterpretq_u8_m128i(a), vreinterpretq_u8_m128i(b))); -} - -// Computes the pairwise minima of the 8 signed 16-bit integers from a and the 8 -// signed 16-bit integers from b. -// https://msdn.microsoft.com/en-us/library/vstudio/6te997ew(v=vs.100).aspx -FORCE_INLINE __m128i _mm_min_epi16(__m128i a, __m128i b) -{ - return vreinterpretq_m128i_s16( - vminq_s16(vreinterpretq_s16_m128i(a), vreinterpretq_s16_m128i(b))); -} - -// Compare packed signed 8-bit integers in a and b, and store packed maximum -// values in dst. -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_max_epi8 -FORCE_INLINE __m128i _mm_max_epi8(__m128i a, __m128i b) -{ - return vreinterpretq_m128i_s8( - vmaxq_s8(vreinterpretq_s8_m128i(a), vreinterpretq_s8_m128i(b))); -} - -// Compare packed unsigned 16-bit integers in a and b, and store packed maximum -// values in dst. -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_max_epu16 -FORCE_INLINE __m128i _mm_max_epu16(__m128i a, __m128i b) -{ - return vreinterpretq_m128i_u16( - vmaxq_u16(vreinterpretq_u16_m128i(a), vreinterpretq_u16_m128i(b))); -} - -// Compare packed signed 8-bit integers in a and b, and store packed minimum -// values in dst. -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_min_epi8 -FORCE_INLINE __m128i _mm_min_epi8(__m128i a, __m128i b) -{ - return vreinterpretq_m128i_s8( - vminq_s8(vreinterpretq_s8_m128i(a), vreinterpretq_s8_m128i(b))); -} - -// Compare packed unsigned 16-bit integers in a and b, and store packed minimum -// values in dst. -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_min_epu16 -FORCE_INLINE __m128i _mm_min_epu16(__m128i a, __m128i b) -{ - return vreinterpretq_m128i_u16( - vminq_u16(vreinterpretq_u16_m128i(a), vreinterpretq_u16_m128i(b))); -} - -// Computes the pairwise maxima of the 8 signed 16-bit integers from a and the 8 -// signed 16-bit integers from b. -// https://msdn.microsoft.com/en-us/LIBRary/3x060h7c(v=vs.100).aspx -FORCE_INLINE __m128i _mm_max_epi16(__m128i a, __m128i b) -{ - return vreinterpretq_m128i_s16( - vmaxq_s16(vreinterpretq_s16_m128i(a), vreinterpretq_s16_m128i(b))); -} - -// epi versions of min/max -// Computes the pariwise maximums of the four signed 32-bit integer values of a -// and b. -// -// A 128-bit parameter that can be defined with the following equations: -// r0 := (a0 > b0) ? a0 : b0 -// r1 := (a1 > b1) ? a1 : b1 -// r2 := (a2 > b2) ? a2 : b2 -// r3 := (a3 > b3) ? a3 : b3 -// -// https://msdn.microsoft.com/en-us/library/vstudio/bb514055(v=vs.100).aspx -FORCE_INLINE __m128i _mm_max_epi32(__m128i a, __m128i b) -{ - return vreinterpretq_m128i_s32( - vmaxq_s32(vreinterpretq_s32_m128i(a), vreinterpretq_s32_m128i(b))); -} - -// Computes the pariwise minima of the four signed 32-bit integer values of a -// and b. -// -// A 128-bit parameter that can be defined with the following equations: -// r0 := (a0 < b0) ? a0 : b0 -// r1 := (a1 < b1) ? a1 : b1 -// r2 := (a2 < b2) ? a2 : b2 -// r3 := (a3 < b3) ? a3 : b3 -// -// https://msdn.microsoft.com/en-us/library/vstudio/bb531476(v=vs.100).aspx -FORCE_INLINE __m128i _mm_min_epi32(__m128i a, __m128i b) -{ - return vreinterpretq_m128i_s32( - vminq_s32(vreinterpretq_s32_m128i(a), vreinterpretq_s32_m128i(b))); -} - -// Compare packed unsigned 32-bit integers in a and b, and store packed maximum -// values in dst. -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_max_epu32 -FORCE_INLINE __m128i _mm_max_epu32(__m128i a, __m128i b) -{ - return vreinterpretq_m128i_u32( - vmaxq_u32(vreinterpretq_u32_m128i(a), vreinterpretq_u32_m128i(b))); -} - -// Compare packed unsigned 32-bit integers in a and b, and store packed minimum -// values in dst. -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_max_epu32 -FORCE_INLINE __m128i _mm_min_epu32(__m128i a, __m128i b) -{ - return vreinterpretq_m128i_u32( - vminq_u32(vreinterpretq_u32_m128i(a), vreinterpretq_u32_m128i(b))); -} - -// Multiply the packed unsigned 16-bit integers in a and b, producing -// intermediate 32-bit integers, and store the high 16 bits of the intermediate -// integers in dst. -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_mulhi_pu16 -FORCE_INLINE __m64 _mm_mulhi_pu16(__m64 a, __m64 b) -{ - return vreinterpret_m64_u16(vshrn_n_u32( - vmull_u16(vreinterpret_u16_m64(a), vreinterpret_u16_m64(b)), 16)); -} - -// Multiplies the 8 signed 16-bit integers from a by the 8 signed 16-bit -// integers from b. -// -// r0 := (a0 * b0)[31:16] -// r1 := (a1 * b1)[31:16] -// ... -// r7 := (a7 * b7)[31:16] -// -// https://msdn.microsoft.com/en-us/library/vstudio/59hddw1d(v=vs.100).aspx -FORCE_INLINE __m128i _mm_mulhi_epi16(__m128i a, __m128i b) -{ - /* FIXME: issue with large values because of result saturation */ - // int16x8_t ret = vqdmulhq_s16(vreinterpretq_s16_m128i(a), - // vreinterpretq_s16_m128i(b)); /* =2*a*b */ return - // vreinterpretq_m128i_s16(vshrq_n_s16(ret, 1)); - int16x4_t a3210 = vget_low_s16(vreinterpretq_s16_m128i(a)); - int16x4_t b3210 = vget_low_s16(vreinterpretq_s16_m128i(b)); - int32x4_t ab3210 = vmull_s16(a3210, b3210); /* 3333222211110000 */ - int16x4_t a7654 = vget_high_s16(vreinterpretq_s16_m128i(a)); - int16x4_t b7654 = vget_high_s16(vreinterpretq_s16_m128i(b)); - int32x4_t ab7654 = vmull_s16(a7654, b7654); /* 7777666655554444 */ - uint16x8x2_t r = - vuzpq_u16(vreinterpretq_u16_s32(ab3210), vreinterpretq_u16_s32(ab7654)); - return vreinterpretq_m128i_u16(r.val[1]); -} - -// Multiply the packed unsigned 16-bit integers in a and b, producing -// intermediate 32-bit integers, and store the high 16 bits of the intermediate -// integers in dst. -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_mulhi_epu16 -FORCE_INLINE __m128i _mm_mulhi_epu16(__m128i a, __m128i b) -{ - uint16x4_t a3210 = vget_low_u16(vreinterpretq_u16_m128i(a)); - uint16x4_t b3210 = vget_low_u16(vreinterpretq_u16_m128i(b)); - uint32x4_t ab3210 = vmull_u16(a3210, b3210); -#if defined(__aarch64__) - uint32x4_t ab7654 = - vmull_high_u16(vreinterpretq_u16_m128i(a), vreinterpretq_u16_m128i(b)); - uint16x8_t r = vuzp2q_u16(vreinterpretq_u16_u32(ab3210), - vreinterpretq_u16_u32(ab7654)); - return vreinterpretq_m128i_u16(r); -#else - uint16x4_t a7654 = vget_high_u16(vreinterpretq_u16_m128i(a)); - uint16x4_t b7654 = vget_high_u16(vreinterpretq_u16_m128i(b)); - uint32x4_t ab7654 = vmull_u16(a7654, b7654); - uint16x8x2_t r = - vuzpq_u16(vreinterpretq_u16_u32(ab3210), vreinterpretq_u16_u32(ab7654)); - return vreinterpretq_m128i_u16(r.val[1]); -#endif -} - -// Computes pairwise add of each argument as single-precision, floating-point -// values a and b. -// https://msdn.microsoft.com/en-us/library/yd9wecaa.aspx -FORCE_INLINE __m128 _mm_hadd_ps(__m128 a, __m128 b) -{ -#if defined(__aarch64__) - return vreinterpretq_m128_f32( - vpaddq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(b))); -#else - float32x2_t a10 = vget_low_f32(vreinterpretq_f32_m128(a)); - float32x2_t a32 = vget_high_f32(vreinterpretq_f32_m128(a)); - float32x2_t b10 = vget_low_f32(vreinterpretq_f32_m128(b)); - float32x2_t b32 = vget_high_f32(vreinterpretq_f32_m128(b)); - return vreinterpretq_m128_f32( - vcombine_f32(vpadd_f32(a10, a32), vpadd_f32(b10, b32))); -#endif -} - -// Computes pairwise add of each argument as a 16-bit signed or unsigned integer -// values a and b. -FORCE_INLINE __m128i _mm_hadd_epi16(__m128i _a, __m128i _b) -{ - int16x8_t a = vreinterpretq_s16_m128i(_a); - int16x8_t b = vreinterpretq_s16_m128i(_b); -#if defined(__aarch64__) - return vreinterpretq_m128i_s16(vpaddq_s16(a, b)); -#else - return vreinterpretq_m128i_s16( - vcombine_s16(vpadd_s16(vget_low_s16(a), vget_high_s16(a)), - vpadd_s16(vget_low_s16(b), vget_high_s16(b)))); -#endif -} - -// Horizontally substract adjacent pairs of single-precision (32-bit) -// floating-point elements in a and b, and pack the results in dst. -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_hsub_ps -FORCE_INLINE __m128 _mm_hsub_ps(__m128 _a, __m128 _b) -{ -#if defined(__aarch64__) - return vreinterpretq_m128_f32(vsubq_f32( - vuzp1q_f32(vreinterpretq_f32_m128(_a), vreinterpretq_f32_m128(_b)), - vuzp2q_f32(vreinterpretq_f32_m128(_a), vreinterpretq_f32_m128(_b)))); -#else - float32x4x2_t c = - vuzpq_f32(vreinterpretq_f32_m128(_a), vreinterpretq_f32_m128(_b)); - return vreinterpretq_m128_f32(vsubq_f32(c.val[0], c.val[1])); -#endif -} - -// Horizontally add adjacent pairs of 16-bit integers in a and b, and pack the -// signed 16-bit results in dst. -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_hadd_pi16 -FORCE_INLINE __m64 _mm_hadd_pi16(__m64 a, __m64 b) -{ - return vreinterpret_m64_s16( - vpadd_s16(vreinterpret_s16_m64(a), vreinterpret_s16_m64(b))); -} - -// Horizontally add adjacent pairs of 32-bit integers in a and b, and pack the -// signed 32-bit results in dst. -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_hadd_pi32 -FORCE_INLINE __m64 _mm_hadd_pi32(__m64 a, __m64 b) -{ - return vreinterpret_m64_s32( - vpadd_s32(vreinterpret_s32_m64(a), vreinterpret_s32_m64(b))); -} - -// Computes pairwise difference of each argument as a 16-bit signed or unsigned -// integer values a and b. -FORCE_INLINE __m128i _mm_hsub_epi16(__m128i _a, __m128i _b) -{ - int32x4_t a = vreinterpretq_s32_m128i(_a); - int32x4_t b = vreinterpretq_s32_m128i(_b); - // Interleave using vshrn/vmovn - // [a0|a2|a4|a6|b0|b2|b4|b6] - // [a1|a3|a5|a7|b1|b3|b5|b7] - int16x8_t ab0246 = vcombine_s16(vmovn_s32(a), vmovn_s32(b)); - int16x8_t ab1357 = vcombine_s16(vshrn_n_s32(a, 16), vshrn_n_s32(b, 16)); - // Subtract - return vreinterpretq_m128i_s16(vsubq_s16(ab0246, ab1357)); -} - -// Computes saturated pairwise sub of each argument as a 16-bit signed -// integer values a and b. -FORCE_INLINE __m128i _mm_hadds_epi16(__m128i _a, __m128i _b) -{ -#if defined(__aarch64__) - int16x8_t a = vreinterpretq_s16_m128i(_a); - int16x8_t b = vreinterpretq_s16_m128i(_b); - return vreinterpretq_s64_s16( - vqaddq_s16(vuzp1q_s16(a, b), vuzp2q_s16(a, b))); -#else - int32x4_t a = vreinterpretq_s32_m128i(_a); - int32x4_t b = vreinterpretq_s32_m128i(_b); - // Interleave using vshrn/vmovn - // [a0|a2|a4|a6|b0|b2|b4|b6] - // [a1|a3|a5|a7|b1|b3|b5|b7] - int16x8_t ab0246 = vcombine_s16(vmovn_s32(a), vmovn_s32(b)); - int16x8_t ab1357 = vcombine_s16(vshrn_n_s32(a, 16), vshrn_n_s32(b, 16)); - // Saturated add - return vreinterpretq_m128i_s16(vqaddq_s16(ab0246, ab1357)); -#endif -} - -// Computes saturated pairwise difference of each argument as a 16-bit signed -// integer values a and b. -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_hsubs_epi16 -FORCE_INLINE __m128i _mm_hsubs_epi16(__m128i _a, __m128i _b) -{ -#if defined(__aarch64__) - int16x8_t a = vreinterpretq_s16_m128i(_a); - int16x8_t b = vreinterpretq_s16_m128i(_b); - return vreinterpretq_s64_s16( - vqsubq_s16(vuzp1q_s16(a, b), vuzp2q_s16(a, b))); -#else - int32x4_t a = vreinterpretq_s32_m128i(_a); - int32x4_t b = vreinterpretq_s32_m128i(_b); - // Interleave using vshrn/vmovn - // [a0|a2|a4|a6|b0|b2|b4|b6] - // [a1|a3|a5|a7|b1|b3|b5|b7] - int16x8_t ab0246 = vcombine_s16(vmovn_s32(a), vmovn_s32(b)); - int16x8_t ab1357 = vcombine_s16(vshrn_n_s32(a, 16), vshrn_n_s32(b, 16)); - // Saturated subtract - return vreinterpretq_m128i_s16(vqsubq_s16(ab0246, ab1357)); -#endif -} - -// Computes pairwise add of each argument as a 32-bit signed or unsigned integer -// values a and b. -FORCE_INLINE __m128i _mm_hadd_epi32(__m128i _a, __m128i _b) -{ - int32x4_t a = vreinterpretq_s32_m128i(_a); - int32x4_t b = vreinterpretq_s32_m128i(_b); - return vreinterpretq_m128i_s32( - vcombine_s32(vpadd_s32(vget_low_s32(a), vget_high_s32(a)), - vpadd_s32(vget_low_s32(b), vget_high_s32(b)))); -} - -// Computes pairwise difference of each argument as a 32-bit signed or unsigned -// integer values a and b. -FORCE_INLINE __m128i _mm_hsub_epi32(__m128i _a, __m128i _b) -{ - int64x2_t a = vreinterpretq_s64_m128i(_a); - int64x2_t b = vreinterpretq_s64_m128i(_b); - // Interleave using vshrn/vmovn - // [a0|a2|b0|b2] - // [a1|a2|b1|b3] - int32x4_t ab02 = vcombine_s32(vmovn_s64(a), vmovn_s64(b)); - int32x4_t ab13 = vcombine_s32(vshrn_n_s64(a, 32), vshrn_n_s64(b, 32)); - // Subtract - return vreinterpretq_m128i_s32(vsubq_s32(ab02, ab13)); -} - -// Kahan summation for accurate summation of floating-point numbers. -// http://blog.zachbjornson.com/2019/08/11/fast-float-summation.html -FORCE_INLINE void sse2neon_kadd_f32(float *sum, float *c, float y) -{ - y -= *c; - float t = *sum + y; - *c = (t - *sum) - y; - *sum = t; -} - -// Conditionally multiply the packed single-precision (32-bit) floating-point -// elements in a and b using the high 4 bits in imm8, sum the four products, -// and conditionally store the sum in dst using the low 4 bits of imm. -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_dp_ps -FORCE_INLINE __m128 _mm_dp_ps(__m128 a, __m128 b, const int imm) -{ -#if defined(__aarch64__) - /* shortcuts */ - if (imm == 0xFF) { - return _mm_set1_ps(vaddvq_f32(_mm_mul_ps(a, b))); - } - if (imm == 0x7F) { - float32x4_t m = _mm_mul_ps(a, b); - m[3] = 0; - return _mm_set1_ps(vaddvq_f32(m)); - } -#endif - - float s = 0, c = 0; - float32x4_t f32a = vreinterpretq_f32_m128(a); - float32x4_t f32b = vreinterpretq_f32_m128(b); - - /* To improve the accuracy of floating-point summation, Kahan algorithm - * is used for each operation. - */ - if (imm & (1 << 4)) - sse2neon_kadd_f32(&s, &c, f32a[0] * f32b[0]); - if (imm & (1 << 5)) - sse2neon_kadd_f32(&s, &c, f32a[1] * f32b[1]); - if (imm & (1 << 6)) - sse2neon_kadd_f32(&s, &c, f32a[2] * f32b[2]); - if (imm & (1 << 7)) - sse2neon_kadd_f32(&s, &c, f32a[3] * f32b[3]); - s += c; - - float32x4_t res = { - (imm & 0x1) ? s : 0, - (imm & 0x2) ? s : 0, - (imm & 0x4) ? s : 0, - (imm & 0x8) ? s : 0, - }; - return vreinterpretq_m128_f32(res); -} - -/* Compare operations */ - -// Compares for less than -// https://msdn.microsoft.com/en-us/library/vstudio/f330yhc8(v=vs.100).aspx -FORCE_INLINE __m128 _mm_cmplt_ps(__m128 a, __m128 b) -{ - return vreinterpretq_m128_u32( - vcltq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(b))); -} - -// Compares for less than -// https://docs.microsoft.com/en-us/previous-versions/visualstudio/visual-studio-2010/fy94wye7(v=vs.100) -FORCE_INLINE __m128 _mm_cmplt_ss(__m128 a, __m128 b) -{ - return _mm_move_ss(a, _mm_cmplt_ps(a, b)); -} - -// Compares for greater than. -// -// r0 := (a0 > b0) ? 0xffffffff : 0x0 -// r1 := (a1 > b1) ? 0xffffffff : 0x0 -// r2 := (a2 > b2) ? 0xffffffff : 0x0 -// r3 := (a3 > b3) ? 0xffffffff : 0x0 -// -// https://msdn.microsoft.com/en-us/library/vstudio/11dy102s(v=vs.100).aspx -FORCE_INLINE __m128 _mm_cmpgt_ps(__m128 a, __m128 b) -{ - return vreinterpretq_m128_u32( - vcgtq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(b))); -} - -// Compares for greater than. -// https://docs.microsoft.com/en-us/previous-versions/visualstudio/visual-studio-2010/1xyyyy9e(v=vs.100) -FORCE_INLINE __m128 _mm_cmpgt_ss(__m128 a, __m128 b) -{ - return _mm_move_ss(a, _mm_cmpgt_ps(a, b)); -} - -// Compares for greater than or equal. -// https://msdn.microsoft.com/en-us/library/vstudio/fs813y2t(v=vs.100).aspx -FORCE_INLINE __m128 _mm_cmpge_ps(__m128 a, __m128 b) -{ - return vreinterpretq_m128_u32( - vcgeq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(b))); -} - -// Compares for greater than or equal. -// https://docs.microsoft.com/en-us/previous-versions/visualstudio/visual-studio-2010/kesh3ddc(v=vs.100) -FORCE_INLINE __m128 _mm_cmpge_ss(__m128 a, __m128 b) -{ - return _mm_move_ss(a, _mm_cmpge_ps(a, b)); -} - -// Compares for less than or equal. -// -// r0 := (a0 <= b0) ? 0xffffffff : 0x0 -// r1 := (a1 <= b1) ? 0xffffffff : 0x0 -// r2 := (a2 <= b2) ? 0xffffffff : 0x0 -// r3 := (a3 <= b3) ? 0xffffffff : 0x0 -// -// https://msdn.microsoft.com/en-us/library/vstudio/1s75w83z(v=vs.100).aspx -FORCE_INLINE __m128 _mm_cmple_ps(__m128 a, __m128 b) -{ - return vreinterpretq_m128_u32( - vcleq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(b))); -} - -// Compares for less than or equal. -// https://docs.microsoft.com/en-us/previous-versions/visualstudio/visual-studio-2010/a7x0hbhw(v=vs.100) -FORCE_INLINE __m128 _mm_cmple_ss(__m128 a, __m128 b) -{ - return _mm_move_ss(a, _mm_cmple_ps(a, b)); -} - -// Compares for equality. -// https://msdn.microsoft.com/en-us/library/vstudio/36aectz5(v=vs.100).aspx -FORCE_INLINE __m128 _mm_cmpeq_ps(__m128 a, __m128 b) -{ - return vreinterpretq_m128_u32( - vceqq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(b))); -} - -// Compares for equality. -// https://docs.microsoft.com/en-us/previous-versions/visualstudio/visual-studio-2010/k423z28e(v=vs.100) -FORCE_INLINE __m128 _mm_cmpeq_ss(__m128 a, __m128 b) -{ - return _mm_move_ss(a, _mm_cmpeq_ps(a, b)); -} - -// Compares for inequality. -// https://msdn.microsoft.com/en-us/library/sf44thbx(v=vs.100).aspx -FORCE_INLINE __m128 _mm_cmpneq_ps(__m128 a, __m128 b) -{ - return vreinterpretq_m128_u32(vmvnq_u32( - vceqq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(b)))); -} - -// Compares for inequality. -// https://docs.microsoft.com/en-us/previous-versions/visualstudio/visual-studio-2010/ekya8fh4(v=vs.100) -FORCE_INLINE __m128 _mm_cmpneq_ss(__m128 a, __m128 b) -{ - return _mm_move_ss(a, _mm_cmpneq_ps(a, b)); -} - -// Compares for not greater than or equal. -// https://docs.microsoft.com/en-us/previous-versions/visualstudio/visual-studio-2010/wsexys62(v=vs.100) -FORCE_INLINE __m128 _mm_cmpnge_ps(__m128 a, __m128 b) -{ - return _mm_cmplt_ps(a, b); -} - -// Compares for not greater than or equal. -// https://docs.microsoft.com/en-us/previous-versions/visualstudio/visual-studio-2010/fk2y80s8(v=vs.100) -FORCE_INLINE __m128 _mm_cmpnge_ss(__m128 a, __m128 b) -{ - return _mm_cmplt_ss(a, b); -} - -// Compares for not greater than. -// https://docs.microsoft.com/en-us/previous-versions/visualstudio/visual-studio-2010/d0xh7w0s(v=vs.100) -FORCE_INLINE __m128 _mm_cmpngt_ps(__m128 a, __m128 b) -{ - return _mm_cmple_ps(a, b); -} - -// Compares for not greater than. -// https://docs.microsoft.com/en-us/previous-versions/visualstudio/visual-studio-2010/z7x9ydwh(v=vs.100) -FORCE_INLINE __m128 _mm_cmpngt_ss(__m128 a, __m128 b) -{ - return _mm_cmple_ss(a, b); -} - -// Compares for not less than or equal. -// https://docs.microsoft.com/en-us/previous-versions/visualstudio/visual-studio-2010/6a330kxw(v=vs.100) -FORCE_INLINE __m128 _mm_cmpnle_ps(__m128 a, __m128 b) -{ - return _mm_cmpgt_ps(a, b); -} - -// Compares for not less than or equal. -// https://docs.microsoft.com/en-us/previous-versions/visualstudio/visual-studio-2010/z7x9ydwh(v=vs.100) -FORCE_INLINE __m128 _mm_cmpnle_ss(__m128 a, __m128 b) -{ - return _mm_cmpgt_ss(a, b); -} - -// Compares for not less than. -// https://docs.microsoft.com/en-us/previous-versions/visualstudio/visual-studio-2010/4686bbdw(v=vs.100) -FORCE_INLINE __m128 _mm_cmpnlt_ps(__m128 a, __m128 b) -{ - return _mm_cmpge_ps(a, b); -} - -// Compares for not less than. -// https://docs.microsoft.com/en-us/previous-versions/visualstudio/visual-studio-2010/56b9z2wf(v=vs.100) -FORCE_INLINE __m128 _mm_cmpnlt_ss(__m128 a, __m128 b) -{ - return _mm_cmpge_ss(a, b); -} - -// Compares the 16 signed or unsigned 8-bit integers in a and the 16 signed or -// unsigned 8-bit integers in b for equality. -// https://msdn.microsoft.com/en-us/library/windows/desktop/bz5xk21a(v=vs.90).aspx -FORCE_INLINE __m128i _mm_cmpeq_epi8(__m128i a, __m128i b) -{ - return vreinterpretq_m128i_u8( - vceqq_s8(vreinterpretq_s8_m128i(a), vreinterpretq_s8_m128i(b))); -} - -// Compares the 8 signed or unsigned 16-bit integers in a and the 8 signed or -// unsigned 16-bit integers in b for equality. -// https://msdn.microsoft.com/en-us/library/2ay060te(v=vs.100).aspx -FORCE_INLINE __m128i _mm_cmpeq_epi16(__m128i a, __m128i b) -{ - return vreinterpretq_m128i_u16( - vceqq_s16(vreinterpretq_s16_m128i(a), vreinterpretq_s16_m128i(b))); -} - -// Compare packed 32-bit integers in a and b for equality, and store the results -// in dst -FORCE_INLINE __m128i _mm_cmpeq_epi32(__m128i a, __m128i b) -{ - return vreinterpretq_m128i_u32( - vceqq_s32(vreinterpretq_s32_m128i(a), vreinterpretq_s32_m128i(b))); -} - -// Compare packed 64-bit integers in a and b for equality, and store the results -// in dst -FORCE_INLINE __m128i _mm_cmpeq_epi64(__m128i a, __m128i b) -{ -#if defined(__aarch64__) - return vreinterpretq_m128i_u64( - vceqq_u64(vreinterpretq_u64_m128i(a), vreinterpretq_u64_m128i(b))); -#else - // ARMv7 lacks vceqq_u64 - // (a == b) -> (a_lo == b_lo) && (a_hi == b_hi) - uint32x4_t cmp = - vceqq_u32(vreinterpretq_u32_m128i(a), vreinterpretq_u32_m128i(b)); - uint32x4_t swapped = vrev64q_u32(cmp); - return vreinterpretq_m128i_u32(vandq_u32(cmp, swapped)); -#endif -} - -// Compares the 16 signed 8-bit integers in a and the 16 signed 8-bit integers -// in b for lesser than. -// https://msdn.microsoft.com/en-us/library/windows/desktop/9s46csht(v=vs.90).aspx -FORCE_INLINE __m128i _mm_cmplt_epi8(__m128i a, __m128i b) -{ - return vreinterpretq_m128i_u8( - vcltq_s8(vreinterpretq_s8_m128i(a), vreinterpretq_s8_m128i(b))); -} - -// Compares the 16 signed 8-bit integers in a and the 16 signed 8-bit integers -// in b for greater than. -// -// r0 := (a0 > b0) ? 0xff : 0x0 -// r1 := (a1 > b1) ? 0xff : 0x0 -// ... -// r15 := (a15 > b15) ? 0xff : 0x0 -// -// https://msdn.microsoft.com/zh-tw/library/wf45zt2b(v=vs.100).aspx -FORCE_INLINE __m128i _mm_cmpgt_epi8(__m128i a, __m128i b) -{ - return vreinterpretq_m128i_u8( - vcgtq_s8(vreinterpretq_s8_m128i(a), vreinterpretq_s8_m128i(b))); -} - -// Compares the 8 signed 16-bit integers in a and the 8 signed 16-bit integers -// in b for less than. -// -// r0 := (a0 < b0) ? 0xffff : 0x0 -// r1 := (a1 < b1) ? 0xffff : 0x0 -// ... -// r7 := (a7 < b7) ? 0xffff : 0x0 -// -// https://technet.microsoft.com/en-us/library/t863edb2(v=vs.100).aspx -FORCE_INLINE __m128i _mm_cmplt_epi16(__m128i a, __m128i b) -{ - return vreinterpretq_m128i_u16( - vcltq_s16(vreinterpretq_s16_m128i(a), vreinterpretq_s16_m128i(b))); -} - -// Compares the 8 signed 16-bit integers in a and the 8 signed 16-bit integers -// in b for greater than. -// -// r0 := (a0 > b0) ? 0xffff : 0x0 -// r1 := (a1 > b1) ? 0xffff : 0x0 -// ... -// r7 := (a7 > b7) ? 0xffff : 0x0 -// -// https://technet.microsoft.com/en-us/library/xd43yfsa(v=vs.100).aspx -FORCE_INLINE __m128i _mm_cmpgt_epi16(__m128i a, __m128i b) -{ - return vreinterpretq_m128i_u16( - vcgtq_s16(vreinterpretq_s16_m128i(a), vreinterpretq_s16_m128i(b))); -} - - -// Compares the 4 signed 32-bit integers in a and the 4 signed 32-bit integers -// in b for less than. -// https://msdn.microsoft.com/en-us/library/vstudio/4ak0bf5d(v=vs.100).aspx -FORCE_INLINE __m128i _mm_cmplt_epi32(__m128i a, __m128i b) -{ - return vreinterpretq_m128i_u32( - vcltq_s32(vreinterpretq_s32_m128i(a), vreinterpretq_s32_m128i(b))); -} - -// Compares the 4 signed 32-bit integers in a and the 4 signed 32-bit integers -// in b for greater than. -// https://msdn.microsoft.com/en-us/library/vstudio/1s9f2z0y(v=vs.100).aspx -FORCE_INLINE __m128i _mm_cmpgt_epi32(__m128i a, __m128i b) -{ - return vreinterpretq_m128i_u32( - vcgtq_s32(vreinterpretq_s32_m128i(a), vreinterpretq_s32_m128i(b))); -} - -// Compares the 2 signed 64-bit integers in a and the 2 signed 64-bit integers -// in b for greater than. -FORCE_INLINE __m128i _mm_cmpgt_epi64(__m128i a, __m128i b) -{ -#if defined(__aarch64__) - return vreinterpretq_m128i_u64( - vcgtq_s64(vreinterpretq_s64_m128i(a), vreinterpretq_s64_m128i(b))); -#else - // ARMv7 lacks vcgtq_s64. - // This is based off of Clang's SSE2 polyfill: - // (a > b) -> ((a_hi > b_hi) || (a_lo > b_lo && a_hi == b_hi)) - - // Mask the sign bit out since we need a signed AND an unsigned comparison - // and it is ugly to try and split them. - int32x4_t mask = vreinterpretq_s32_s64(vdupq_n_s64(0x80000000ull)); - int32x4_t a_mask = veorq_s32(vreinterpretq_s32_m128i(a), mask); - int32x4_t b_mask = veorq_s32(vreinterpretq_s32_m128i(b), mask); - // Check if a > b - int64x2_t greater = vreinterpretq_s64_u32(vcgtq_s32(a_mask, b_mask)); - // Copy upper mask to lower mask - // a_hi > b_hi - int64x2_t gt_hi = vshrq_n_s64(greater, 63); - // Copy lower mask to upper mask - // a_lo > b_lo - int64x2_t gt_lo = vsliq_n_s64(greater, greater, 32); - // Compare for equality - int64x2_t equal = vreinterpretq_s64_u32(vceqq_s32(a_mask, b_mask)); - // Copy upper mask to lower mask - // a_hi == b_hi - int64x2_t eq_hi = vshrq_n_s64(equal, 63); - // a_hi > b_hi || (a_lo > b_lo && a_hi == b_hi) - int64x2_t ret = vorrq_s64(gt_hi, vandq_s64(gt_lo, eq_hi)); - return vreinterpretq_m128i_s64(ret); -#endif -} - -// Compares the four 32-bit floats in a and b to check if any values are NaN. -// Ordered compare between each value returns true for "orderable" and false for -// "not orderable" (NaN). -// https://msdn.microsoft.com/en-us/library/vstudio/0h9w00fx(v=vs.100).aspx see -// also: -// http://stackoverflow.com/questions/8627331/what-does-ordered-unordered-comparison-mean -// http://stackoverflow.com/questions/29349621/neon-isnanval-intrinsics -FORCE_INLINE __m128 _mm_cmpord_ps(__m128 a, __m128 b) -{ - // Note: NEON does not have ordered compare builtin - // Need to compare a eq a and b eq b to check for NaN - // Do AND of results to get final - uint32x4_t ceqaa = - vceqq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(a)); - uint32x4_t ceqbb = - vceqq_f32(vreinterpretq_f32_m128(b), vreinterpretq_f32_m128(b)); - return vreinterpretq_m128_u32(vandq_u32(ceqaa, ceqbb)); -} - -// Compares for ordered. -// https://docs.microsoft.com/en-us/previous-versions/visualstudio/visual-studio-2010/343t62da(v=vs.100) -FORCE_INLINE __m128 _mm_cmpord_ss(__m128 a, __m128 b) -{ - return _mm_move_ss(a, _mm_cmpord_ps(a, b)); -} - -// Compares for unordered. -// https://docs.microsoft.com/en-us/previous-versions/visualstudio/visual-studio-2010/khy6fk1t(v=vs.100) -FORCE_INLINE __m128 _mm_cmpunord_ps(__m128 a, __m128 b) -{ - uint32x4_t f32a = - vceqq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(a)); - uint32x4_t f32b = - vceqq_f32(vreinterpretq_f32_m128(b), vreinterpretq_f32_m128(b)); - return vreinterpretq_m128_u32(vmvnq_u32(vandq_u32(f32a, f32b))); -} - -// Compares for unordered. -// https://docs.microsoft.com/en-us/previous-versions/visualstudio/visual-studio-2010/2as2387b(v=vs.100) -FORCE_INLINE __m128 _mm_cmpunord_ss(__m128 a, __m128 b) -{ - return _mm_move_ss(a, _mm_cmpunord_ps(a, b)); -} - -// Compares the lower single-precision floating point scalar values of a and b -// using a less than operation. : -// https://msdn.microsoft.com/en-us/library/2kwe606b(v=vs.90).aspx Important -// note!! The documentation on MSDN is incorrect! If either of the values is a -// NAN the docs say you will get a one, but in fact, it will return a zero!! -FORCE_INLINE int _mm_comilt_ss(__m128 a, __m128 b) -{ - uint32x4_t a_not_nan = - vceqq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(a)); - uint32x4_t b_not_nan = - vceqq_f32(vreinterpretq_f32_m128(b), vreinterpretq_f32_m128(b)); - uint32x4_t a_and_b_not_nan = vandq_u32(a_not_nan, b_not_nan); - uint32x4_t a_lt_b = - vcltq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(b)); - return (vgetq_lane_u32(vandq_u32(a_and_b_not_nan, a_lt_b), 0) != 0) ? 1 : 0; -} - -// Compares the lower single-precision floating point scalar values of a and b -// using a greater than operation. : -// https://msdn.microsoft.com/en-us/library/b0738e0t(v=vs.100).aspx -FORCE_INLINE int _mm_comigt_ss(__m128 a, __m128 b) -{ - // return vgetq_lane_u32(vcgtq_f32(vreinterpretq_f32_m128(a), - // vreinterpretq_f32_m128(b)), 0); - uint32x4_t a_not_nan = - vceqq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(a)); - uint32x4_t b_not_nan = - vceqq_f32(vreinterpretq_f32_m128(b), vreinterpretq_f32_m128(b)); - uint32x4_t a_and_b_not_nan = vandq_u32(a_not_nan, b_not_nan); - uint32x4_t a_gt_b = - vcgtq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(b)); - return (vgetq_lane_u32(vandq_u32(a_and_b_not_nan, a_gt_b), 0) != 0) ? 1 : 0; -} - -// Compares the lower single-precision floating point scalar values of a and b -// using a less than or equal operation. : -// https://msdn.microsoft.com/en-us/library/1w4t7c57(v=vs.90).aspx -FORCE_INLINE int _mm_comile_ss(__m128 a, __m128 b) -{ - // return vgetq_lane_u32(vcleq_f32(vreinterpretq_f32_m128(a), - // vreinterpretq_f32_m128(b)), 0); - uint32x4_t a_not_nan = - vceqq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(a)); - uint32x4_t b_not_nan = - vceqq_f32(vreinterpretq_f32_m128(b), vreinterpretq_f32_m128(b)); - uint32x4_t a_and_b_not_nan = vandq_u32(a_not_nan, b_not_nan); - uint32x4_t a_le_b = - vcleq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(b)); - return (vgetq_lane_u32(vandq_u32(a_and_b_not_nan, a_le_b), 0) != 0) ? 1 : 0; -} - -// Compares the lower single-precision floating point scalar values of a and b -// using a greater than or equal operation. : -// https://msdn.microsoft.com/en-us/library/8t80des6(v=vs.100).aspx -FORCE_INLINE int _mm_comige_ss(__m128 a, __m128 b) -{ - // return vgetq_lane_u32(vcgeq_f32(vreinterpretq_f32_m128(a), - // vreinterpretq_f32_m128(b)), 0); - uint32x4_t a_not_nan = - vceqq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(a)); - uint32x4_t b_not_nan = - vceqq_f32(vreinterpretq_f32_m128(b), vreinterpretq_f32_m128(b)); - uint32x4_t a_and_b_not_nan = vandq_u32(a_not_nan, b_not_nan); - uint32x4_t a_ge_b = - vcgeq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(b)); - return (vgetq_lane_u32(vandq_u32(a_and_b_not_nan, a_ge_b), 0) != 0) ? 1 : 0; -} - -// Compares the lower single-precision floating point scalar values of a and b -// using an equality operation. : -// https://msdn.microsoft.com/en-us/library/93yx2h2b(v=vs.100).aspx -FORCE_INLINE int _mm_comieq_ss(__m128 a, __m128 b) -{ - // return vgetq_lane_u32(vceqq_f32(vreinterpretq_f32_m128(a), - // vreinterpretq_f32_m128(b)), 0); - uint32x4_t a_not_nan = - vceqq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(a)); - uint32x4_t b_not_nan = - vceqq_f32(vreinterpretq_f32_m128(b), vreinterpretq_f32_m128(b)); - uint32x4_t a_and_b_not_nan = vandq_u32(a_not_nan, b_not_nan); - uint32x4_t a_eq_b = - vceqq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(b)); - return (vgetq_lane_u32(vandq_u32(a_and_b_not_nan, a_eq_b), 0) != 0) ? 1 : 0; -} - -// Compares the lower single-precision floating point scalar values of a and b -// using an inequality operation. : -// https://msdn.microsoft.com/en-us/library/bafh5e0a(v=vs.90).aspx -FORCE_INLINE int _mm_comineq_ss(__m128 a, __m128 b) -{ - // return !vgetq_lane_u32(vceqq_f32(vreinterpretq_f32_m128(a), - // vreinterpretq_f32_m128(b)), 0); - uint32x4_t a_not_nan = - vceqq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(a)); - uint32x4_t b_not_nan = - vceqq_f32(vreinterpretq_f32_m128(b), vreinterpretq_f32_m128(b)); - uint32x4_t a_or_b_nan = vmvnq_u32(vandq_u32(a_not_nan, b_not_nan)); - uint32x4_t a_neq_b = vmvnq_u32( - vceqq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(b))); - return (vgetq_lane_u32(vorrq_u32(a_or_b_nan, a_neq_b), 0) != 0) ? 1 : 0; -} - -// according to the documentation, these intrinsics behave the same as the -// non-'u' versions. We'll just alias them here. -#define _mm_ucomilt_ss _mm_comilt_ss -#define _mm_ucomile_ss _mm_comile_ss -#define _mm_ucomigt_ss _mm_comigt_ss -#define _mm_ucomige_ss _mm_comige_ss -#define _mm_ucomieq_ss _mm_comieq_ss -#define _mm_ucomineq_ss _mm_comineq_ss - -/* Conversions */ - -// Convert packed signed 32-bit integers in b to packed single-precision -// (32-bit) floating-point elements, store the results in the lower 2 elements -// of dst, and copy the upper 2 packed elements from a to the upper elements of -// dst. -// -// dst[31:0] := Convert_Int32_To_FP32(b[31:0]) -// dst[63:32] := Convert_Int32_To_FP32(b[63:32]) -// dst[95:64] := a[95:64] -// dst[127:96] := a[127:96] -// -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvt_pi2ps -FORCE_INLINE __m128 _mm_cvt_pi2ps(__m128 a, __m64 b) -{ - return vreinterpretq_m128_f32( - vcombine_f32(vcvt_f32_s32(vreinterpret_s32_m64(b)), - vget_high_f32(vreinterpretq_f32_m128(a)))); -} - -// Convert the signed 32-bit integer b to a single-precision (32-bit) -// floating-point element, store the result in the lower element of dst, and -// copy the upper 3 packed elements from a to the upper elements of dst. -// -// dst[31:0] := Convert_Int32_To_FP32(b[31:0]) -// dst[127:32] := a[127:32] -// -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvt_si2ss -FORCE_INLINE __m128 _mm_cvt_si2ss(__m128 a, int b) -{ - return vreinterpretq_m128_f32( - vsetq_lane_f32((float) b, vreinterpretq_f32_m128(a), 0)); -} - -// Convert the signed 32-bit integer b to a single-precision (32-bit) -// floating-point element, store the result in the lower element of dst, and -// copy the upper 3 packed elements from a to the upper elements of dst. -// -// dst[31:0] := Convert_Int32_To_FP32(b[31:0]) -// dst[127:32] := a[127:32] -// -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvtsi32_ss -#define _mm_cvtsi32_ss(a, b) _mm_cvt_si2ss(a, b) - -// Convert the signed 64-bit integer b to a single-precision (32-bit) -// floating-point element, store the result in the lower element of dst, and -// copy the upper 3 packed elements from a to the upper elements of dst. -// -// dst[31:0] := Convert_Int64_To_FP32(b[63:0]) -// dst[127:32] := a[127:32] -// -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvtsi64_ss -FORCE_INLINE __m128 _mm_cvtsi64_ss(__m128 a, int64_t b) -{ - return vreinterpretq_m128_f32( - vsetq_lane_f32((float) b, vreinterpretq_f32_m128(a), 0)); -} - -// Convert the lower single-precision (32-bit) floating-point element in a to a -// 32-bit integer, and store the result in dst. -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvt_ss2si -FORCE_INLINE int _mm_cvt_ss2si(__m128 a) -{ -#if defined(__aarch64__) - return vgetq_lane_s32(vcvtnq_s32_f32(vreinterpretq_f32_m128(a)), 0); -#else - float32_t data = vgetq_lane_f32(vreinterpretq_f32_m128(a), 0); - float32_t diff = data - floor(data); - if (diff > 0.5) - return (int32_t) ceil(data); - if (diff == 0.5) { - int32_t f = (int32_t) floor(data); - int32_t c = (int32_t) ceil(data); - return c & 1 ? f : c; - } - return (int32_t) floor(data); -#endif -} - -// Convert packed 16-bit integers in a to packed single-precision (32-bit) -// floating-point elements, and store the results in dst. -// -// FOR j := 0 to 3 -// i := j*16 -// m := j*32 -// dst[m+31:m] := Convert_Int16_To_FP32(a[i+15:i]) -// ENDFOR -// -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvtpi16_ps -FORCE_INLINE __m128 _mm_cvtpi16_ps(__m64 a) -{ - return vreinterpretq_m128_f32( - vcvtq_f32_s32(vmovl_s16(vreinterpret_s16_m64(a)))); -} - -// Convert packed 32-bit integers in b to packed single-precision (32-bit) -// floating-point elements, store the results in the lower 2 elements of dst, -// and copy the upper 2 packed elements from a to the upper elements of dst. -// -// dst[31:0] := Convert_Int32_To_FP32(b[31:0]) -// dst[63:32] := Convert_Int32_To_FP32(b[63:32]) -// dst[95:64] := a[95:64] -// dst[127:96] := a[127:96] -// -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvtpi32_ps -FORCE_INLINE __m128 _mm_cvtpi32_ps(__m128 a, __m64 b) -{ - return vreinterpretq_m128_f32( - vcombine_f32(vcvt_f32_s32(vreinterpret_s32_m64(b)), - vget_high_f32(vreinterpretq_f32_m128(a)))); -} - -// Convert packed signed 32-bit integers in a to packed single-precision -// (32-bit) floating-point elements, store the results in the lower 2 elements -// of dst, then covert the packed signed 32-bit integers in b to -// single-precision (32-bit) floating-point element, and store the results in -// the upper 2 elements of dst. -// -// dst[31:0] := Convert_Int32_To_FP32(a[31:0]) -// dst[63:32] := Convert_Int32_To_FP32(a[63:32]) -// dst[95:64] := Convert_Int32_To_FP32(b[31:0]) -// dst[127:96] := Convert_Int32_To_FP32(b[63:32]) -// -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvtpi32x2_ps -FORCE_INLINE __m128 _mm_cvtpi32x2_ps(__m64 a, __m64 b) -{ - return vreinterpretq_m128_f32(vcvtq_f32_s32( - vcombine_s32(vreinterpret_s32_m64(a), vreinterpret_s32_m64(b)))); -} - -// Convert the lower packed 8-bit integers in a to packed single-precision -// (32-bit) floating-point elements, and store the results in dst. -// -// FOR j := 0 to 3 -// i := j*8 -// m := j*32 -// dst[m+31:m] := Convert_Int8_To_FP32(a[i+7:i]) -// ENDFOR -// -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvtpi8_ps -FORCE_INLINE __m128 _mm_cvtpi8_ps(__m64 a) -{ - return vreinterpretq_m128_f32(vcvtq_f32_s32( - vmovl_s16(vget_low_s16(vmovl_s8(vreinterpret_s8_m64(a)))))); -} - -// Convert packed unsigned 16-bit integers in a to packed single-precision -// (32-bit) floating-point elements, and store the results in dst. -// -// FOR j := 0 to 3 -// i := j*16 -// m := j*32 -// dst[m+31:m] := Convert_UInt16_To_FP32(a[i+15:i]) -// ENDFOR -// -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvtpu16_ps -FORCE_INLINE __m128 _mm_cvtpu16_ps(__m64 a) -{ - return vreinterpretq_m128_f32( - vcvtq_f32_u32(vmovl_u16(vreinterpret_u16_m64(a)))); -} - -// Convert the lower packed unsigned 8-bit integers in a to packed -// single-precision (32-bit) floating-point elements, and store the results in -// dst. -// -// FOR j := 0 to 3 -// i := j*8 -// m := j*32 -// dst[m+31:m] := Convert_UInt8_To_FP32(a[i+7:i]) -// ENDFOR -// -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvtpu8_ps -FORCE_INLINE __m128 _mm_cvtpu8_ps(__m64 a) -{ - return vreinterpretq_m128_f32(vcvtq_f32_u32( - vmovl_u16(vget_low_u16(vmovl_u8(vreinterpret_u8_m64(a)))))); -} - -// Converts the four single-precision, floating-point values of a to signed -// 32-bit integer values using truncate. -// https://msdn.microsoft.com/en-us/library/vstudio/1h005y6x(v=vs.100).aspx -FORCE_INLINE __m128i _mm_cvttps_epi32(__m128 a) -{ - return vreinterpretq_m128i_s32(vcvtq_s32_f32(vreinterpretq_f32_m128(a))); -} - -// Convert the lower double-precision (64-bit) floating-point element in a to a -// 64-bit integer with truncation, and store the result in dst. -// -// dst[63:0] := Convert_FP64_To_Int64_Truncate(a[63:0]) -// -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvttsd_si64 -FORCE_INLINE int64_t _mm_cvttsd_si64(__m128d a) -{ -#if defined(__aarch64__) - return vgetq_lane_s64(vcvtq_s64_f64(vreinterpretq_f64_m128d(a)), 0); -#else - double ret = *((double *) &a); - return (int64_t) ret; -#endif -} - -// Convert the lower double-precision (64-bit) floating-point element in a to a -// 64-bit integer with truncation, and store the result in dst. -// -// dst[63:0] := Convert_FP64_To_Int64_Truncate(a[63:0]) -// -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvttsd_si64x -#define _mm_cvttsd_si64x(a) _mm_cvttsd_si64(a) - -// Converts the four signed 32-bit integer values of a to single-precision, -// floating-point values -// https://msdn.microsoft.com/en-us/library/vstudio/36bwxcx5(v=vs.100).aspx -FORCE_INLINE __m128 _mm_cvtepi32_ps(__m128i a) -{ - return vreinterpretq_m128_f32(vcvtq_f32_s32(vreinterpretq_s32_m128i(a))); -} - -// Converts the four unsigned 8-bit integers in the lower 16 bits to four -// unsigned 32-bit integers. -FORCE_INLINE __m128i _mm_cvtepu8_epi16(__m128i a) -{ - uint8x16_t u8x16 = vreinterpretq_u8_m128i(a); /* xxxx xxxx xxxx DCBA */ - uint16x8_t u16x8 = vmovl_u8(vget_low_u8(u8x16)); /* 0x0x 0x0x 0D0C 0B0A */ - return vreinterpretq_m128i_u16(u16x8); -} - -// Converts the four unsigned 8-bit integers in the lower 32 bits to four -// unsigned 32-bit integers. -// https://msdn.microsoft.com/en-us/library/bb531467%28v=vs.100%29.aspx -FORCE_INLINE __m128i _mm_cvtepu8_epi32(__m128i a) -{ - uint8x16_t u8x16 = vreinterpretq_u8_m128i(a); /* xxxx xxxx xxxx DCBA */ - uint16x8_t u16x8 = vmovl_u8(vget_low_u8(u8x16)); /* 0x0x 0x0x 0D0C 0B0A */ - uint32x4_t u32x4 = vmovl_u16(vget_low_u16(u16x8)); /* 000D 000C 000B 000A */ - return vreinterpretq_m128i_u32(u32x4); -} - -// Converts the two unsigned 8-bit integers in the lower 16 bits to two -// unsigned 64-bit integers. -FORCE_INLINE __m128i _mm_cvtepu8_epi64(__m128i a) -{ - uint8x16_t u8x16 = vreinterpretq_u8_m128i(a); /* xxxx xxxx xxxx xxBA */ - uint16x8_t u16x8 = vmovl_u8(vget_low_u8(u8x16)); /* 0x0x 0x0x 0x0x 0B0A */ - uint32x4_t u32x4 = vmovl_u16(vget_low_u16(u16x8)); /* 000x 000x 000B 000A */ - uint64x2_t u64x2 = vmovl_u32(vget_low_u32(u32x4)); /* 0000 000B 0000 000A */ - return vreinterpretq_m128i_u64(u64x2); -} - -// Converts the four unsigned 8-bit integers in the lower 16 bits to four -// unsigned 32-bit integers. -FORCE_INLINE __m128i _mm_cvtepi8_epi16(__m128i a) -{ - int8x16_t s8x16 = vreinterpretq_s8_m128i(a); /* xxxx xxxx xxxx DCBA */ - int16x8_t s16x8 = vmovl_s8(vget_low_s8(s8x16)); /* 0x0x 0x0x 0D0C 0B0A */ - return vreinterpretq_m128i_s16(s16x8); -} - -// Converts the four unsigned 8-bit integers in the lower 32 bits to four -// unsigned 32-bit integers. -FORCE_INLINE __m128i _mm_cvtepi8_epi32(__m128i a) -{ - int8x16_t s8x16 = vreinterpretq_s8_m128i(a); /* xxxx xxxx xxxx DCBA */ - int16x8_t s16x8 = vmovl_s8(vget_low_s8(s8x16)); /* 0x0x 0x0x 0D0C 0B0A */ - int32x4_t s32x4 = vmovl_s16(vget_low_s16(s16x8)); /* 000D 000C 000B 000A */ - return vreinterpretq_m128i_s32(s32x4); -} - -// Converts the two signed 8-bit integers in the lower 32 bits to four -// signed 64-bit integers. -FORCE_INLINE __m128i _mm_cvtepi8_epi64(__m128i a) -{ - int8x16_t s8x16 = vreinterpretq_s8_m128i(a); /* xxxx xxxx xxxx xxBA */ - int16x8_t s16x8 = vmovl_s8(vget_low_s8(s8x16)); /* 0x0x 0x0x 0x0x 0B0A */ - int32x4_t s32x4 = vmovl_s16(vget_low_s16(s16x8)); /* 000x 000x 000B 000A */ - int64x2_t s64x2 = vmovl_s32(vget_low_s32(s32x4)); /* 0000 000B 0000 000A */ - return vreinterpretq_m128i_s64(s64x2); -} - -// Converts the four signed 16-bit integers in the lower 64 bits to four signed -// 32-bit integers. -FORCE_INLINE __m128i _mm_cvtepi16_epi32(__m128i a) -{ - return vreinterpretq_m128i_s32( - vmovl_s16(vget_low_s16(vreinterpretq_s16_m128i(a)))); -} - -// Converts the two signed 16-bit integers in the lower 32 bits two signed -// 32-bit integers. -FORCE_INLINE __m128i _mm_cvtepi16_epi64(__m128i a) -{ - int16x8_t s16x8 = vreinterpretq_s16_m128i(a); /* xxxx xxxx xxxx 0B0A */ - int32x4_t s32x4 = vmovl_s16(vget_low_s16(s16x8)); /* 000x 000x 000B 000A */ - int64x2_t s64x2 = vmovl_s32(vget_low_s32(s32x4)); /* 0000 000B 0000 000A */ - return vreinterpretq_m128i_s64(s64x2); -} - -// Converts the four unsigned 16-bit integers in the lower 64 bits to four -// unsigned 32-bit integers. -FORCE_INLINE __m128i _mm_cvtepu16_epi32(__m128i a) -{ - return vreinterpretq_m128i_u32( - vmovl_u16(vget_low_u16(vreinterpretq_u16_m128i(a)))); -} - -// Converts the two unsigned 16-bit integers in the lower 32 bits to two -// unsigned 64-bit integers. -FORCE_INLINE __m128i _mm_cvtepu16_epi64(__m128i a) -{ - uint16x8_t u16x8 = vreinterpretq_u16_m128i(a); /* xxxx xxxx xxxx 0B0A */ - uint32x4_t u32x4 = vmovl_u16(vget_low_u16(u16x8)); /* 000x 000x 000B 000A */ - uint64x2_t u64x2 = vmovl_u32(vget_low_u32(u32x4)); /* 0000 000B 0000 000A */ - return vreinterpretq_m128i_u64(u64x2); -} - -// Converts the two unsigned 32-bit integers in the lower 64 bits to two -// unsigned 64-bit integers. -FORCE_INLINE __m128i _mm_cvtepu32_epi64(__m128i a) -{ - return vreinterpretq_m128i_u64( - vmovl_u32(vget_low_u32(vreinterpretq_u32_m128i(a)))); -} - -// Converts the two signed 32-bit integers in the lower 64 bits to two signed -// 64-bit integers. -FORCE_INLINE __m128i _mm_cvtepi32_epi64(__m128i a) -{ - return vreinterpretq_m128i_s64( - vmovl_s32(vget_low_s32(vreinterpretq_s32_m128i(a)))); -} - -// Converts the four single-precision, floating-point values of a to signed -// 32-bit integer values. -// -// r0 := (int) a0 -// r1 := (int) a1 -// r2 := (int) a2 -// r3 := (int) a3 -// -// https://msdn.microsoft.com/en-us/library/vstudio/xdc42k5e(v=vs.100).aspx -// *NOTE*. The default rounding mode on SSE is 'round to even', which ARMv7-A -// does not support! It is supported on ARMv8-A however. -FORCE_INLINE __m128i _mm_cvtps_epi32(__m128 a) -{ -#if defined(__aarch64__) - return vreinterpretq_m128i_s32(vcvtnq_s32_f32(a)); -#else - uint32x4_t signmask = vdupq_n_u32(0x80000000); - float32x4_t half = vbslq_f32(signmask, vreinterpretq_f32_m128(a), - vdupq_n_f32(0.5f)); /* +/- 0.5 */ - int32x4_t r_normal = vcvtq_s32_f32(vaddq_f32( - vreinterpretq_f32_m128(a), half)); /* round to integer: [a + 0.5]*/ - int32x4_t r_trunc = - vcvtq_s32_f32(vreinterpretq_f32_m128(a)); /* truncate to integer: [a] */ - int32x4_t plusone = vreinterpretq_s32_u32(vshrq_n_u32( - vreinterpretq_u32_s32(vnegq_s32(r_trunc)), 31)); /* 1 or 0 */ - int32x4_t r_even = vbicq_s32(vaddq_s32(r_trunc, plusone), - vdupq_n_s32(1)); /* ([a] + {0,1}) & ~1 */ - float32x4_t delta = vsubq_f32( - vreinterpretq_f32_m128(a), - vcvtq_f32_s32(r_trunc)); /* compute delta: delta = (a - [a]) */ - uint32x4_t is_delta_half = vceqq_f32(delta, half); /* delta == +/- 0.5 */ - return vreinterpretq_m128i_s32(vbslq_s32(is_delta_half, r_even, r_normal)); -#endif -} - -// Copy the lower 32-bit integer in a to dst. -// -// dst[31:0] := a[31:0] -// -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvtsi128_si32 -FORCE_INLINE int _mm_cvtsi128_si32(__m128i a) -{ - return vgetq_lane_s32(vreinterpretq_s32_m128i(a), 0); -} - -// Copy the lower 64-bit integer in a to dst. -// -// dst[63:0] := a[63:0] -// -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvtsi128_si64 -FORCE_INLINE int64_t _mm_cvtsi128_si64(__m128i a) -{ - return vgetq_lane_s64(vreinterpretq_s64_m128i(a), 0); -} - -// Copy the lower 64-bit integer in a to dst. -// -// dst[63:0] := a[63:0] -// -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvtsi128_si64x -#define _mm_cvtsi128_si64x(a) _mm_cvtsi128_si64(a) - -// Moves 32-bit integer a to the least significant 32 bits of an __m128 object, -// zero extending the upper bits. -// -// r0 := a -// r1 := 0x0 -// r2 := 0x0 -// r3 := 0x0 -// -// https://msdn.microsoft.com/en-us/library/ct3539ha%28v=vs.90%29.aspx -FORCE_INLINE __m128i _mm_cvtsi32_si128(int a) -{ - return vreinterpretq_m128i_s32(vsetq_lane_s32(a, vdupq_n_s32(0), 0)); -} - -// Moves 64-bit integer a to the least significant 64 bits of an __m128 object, -// zero extending the upper bits. -// -// r0 := a -// r1 := 0x0 -FORCE_INLINE __m128i _mm_cvtsi64_si128(int64_t a) -{ - return vreinterpretq_m128i_s64(vsetq_lane_s64(a, vdupq_n_s64(0), 0)); -} - -// Cast vector of type __m128 to type __m128d. This intrinsic is only used for -// compilation and does not generate any instructions, thus it has zero latency. -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_castps_pd -FORCE_INLINE __m128d _mm_castps_pd(__m128 a) -{ - return vreinterpretq_m128d_s32(vreinterpretq_s32_m128(a)); -} - -// Applies a type cast to reinterpret four 32-bit floating point values passed -// in as a 128-bit parameter as packed 32-bit integers. -// https://msdn.microsoft.com/en-us/library/bb514099.aspx -FORCE_INLINE __m128i _mm_castps_si128(__m128 a) -{ - return vreinterpretq_m128i_s32(vreinterpretq_s32_m128(a)); -} - -// Applies a type cast to reinterpret four 32-bit integers passed in as a -// 128-bit parameter as packed 32-bit floating point values. -// https://msdn.microsoft.com/en-us/library/bb514029.aspx -FORCE_INLINE __m128 _mm_castsi128_ps(__m128i a) -{ - return vreinterpretq_m128_s32(vreinterpretq_s32_m128i(a)); -} - -// Loads 128-bit value. : -// https://msdn.microsoft.com/en-us/library/atzzad1h(v=vs.80).aspx -FORCE_INLINE __m128i _mm_load_si128(const __m128i *p) -{ - return vreinterpretq_m128i_s32(vld1q_s32((const int32_t *) p)); -} - -// Load a double-precision (64-bit) floating-point element from memory into both -// elements of dst. -// -// dst[63:0] := MEM[mem_addr+63:mem_addr] -// dst[127:64] := MEM[mem_addr+63:mem_addr] -// -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_load1_pd -FORCE_INLINE __m128d _mm_load1_pd(const double *p) -{ -#if defined(__aarch64__) - return vreinterpretq_m128d_f64(vld1q_dup_f64(p)); -#else - return vreinterpretq_m128d_s64(vdupq_n_s64(*(const int64_t *) p)); -#endif -} - -// Load a double-precision (64-bit) floating-point element from memory into the -// upper element of dst, and copy the lower element from a to dst. mem_addr does -// not need to be aligned on any particular boundary. -// -// dst[63:0] := a[63:0] -// dst[127:64] := MEM[mem_addr+63:mem_addr] -// -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_loadh_pd -FORCE_INLINE __m128d _mm_loadh_pd(__m128d a, const double *p) -{ -#if defined(__aarch64__) - return vreinterpretq_m128d_f64( - vcombine_f64(vget_low_f64(vreinterpretq_f64_m128d(a)), vld1_f64(p))); -#else - return vreinterpretq_m128d_f32(vcombine_f32( - vget_low_f32(vreinterpretq_f32_m128d(a)), vld1_f32((const float *) p))); -#endif -} - -// Load a double-precision (64-bit) floating-point element from memory into both -// elements of dst. -// -// dst[63:0] := MEM[mem_addr+63:mem_addr] -// dst[127:64] := MEM[mem_addr+63:mem_addr] -// -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_load_pd1 -#define _mm_load_pd1 _mm_load1_pd - -// Load a double-precision (64-bit) floating-point element from memory into both -// elements of dst. -// -// dst[63:0] := MEM[mem_addr+63:mem_addr] -// dst[127:64] := MEM[mem_addr+63:mem_addr] -// -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_loaddup_pd -#define _mm_loaddup_pd _mm_load1_pd - -// Loads 128-bit value. : -// https://msdn.microsoft.com/zh-cn/library/f4k12ae8(v=vs.90).aspx -FORCE_INLINE __m128i _mm_loadu_si128(const __m128i *p) -{ - return vreinterpretq_m128i_s32(vld1q_s32((const int32_t *) p)); -} - -// Load unaligned 32-bit integer from memory into the first element of dst. -// -// dst[31:0] := MEM[mem_addr+31:mem_addr] -// dst[MAX:32] := 0 -// -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_loadu_si32 -FORCE_INLINE __m128i _mm_loadu_si32(const void *p) -{ - return vreinterpretq_m128i_s32( - vsetq_lane_s32(*(const int32_t *) p, vdupq_n_s32(0), 0)); -} - -// Convert packed double-precision (64-bit) floating-point elements in a to -// packed single-precision (32-bit) floating-point elements, and store the -// results in dst. -// -// FOR j := 0 to 1 -// i := 32*j -// k := 64*j -// dst[i+31:i] := Convert_FP64_To_FP32(a[k+64:k]) -// ENDFOR -// dst[127:64] := 0 -// -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvtpd_ps -FORCE_INLINE __m128 _mm_cvtpd_ps(__m128d a) -{ -#if defined(__aarch64__) - float32x2_t tmp = vcvt_f32_f64(vreinterpretq_f64_m128d(a)); - return vreinterpretq_m128_f32(vcombine_f32(tmp, vdup_n_f32(0))); -#else - float a0 = (float) ((double *) &a)[0]; - float a1 = (float) ((double *) &a)[1]; - return _mm_set_ps(0, 0, a1, a0); -#endif -} - -// Copy the lower double-precision (64-bit) floating-point element of a to dst. -// -// dst[63:0] := a[63:0] -// -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvtsd_f64 -FORCE_INLINE double _mm_cvtsd_f64(__m128d a) -{ -#if defined(__aarch64__) - return (double) vgetq_lane_f64(vreinterpretq_f64_m128d(a), 0); -#else - return ((double *) &a)[0]; -#endif -} - -// Convert packed single-precision (32-bit) floating-point elements in a to -// packed double-precision (64-bit) floating-point elements, and store the -// results in dst. -// -// FOR j := 0 to 1 -// i := 64*j -// k := 32*j -// dst[i+63:i] := Convert_FP32_To_FP64(a[k+31:k]) -// ENDFOR -// -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvtps_pd -FORCE_INLINE __m128d _mm_cvtps_pd(__m128 a) -{ -#if defined(__aarch64__) - return vreinterpretq_m128d_f64( - vcvt_f64_f32(vget_low_f32(vreinterpretq_f32_m128(a)))); -#else - double a0 = (double) vgetq_lane_f32(vreinterpretq_f32_m128(a), 0); - double a1 = (double) vgetq_lane_f32(vreinterpretq_f32_m128(a), 1); - return _mm_set_pd(a1, a0); -#endif -} - -// Cast vector of type __m128d to type __m128i. This intrinsic is only used for -// compilation and does not generate any instructions, thus it has zero latency. -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_castpd_si128 -FORCE_INLINE __m128i _mm_castpd_si128(__m128d a) -{ - return vreinterpretq_m128i_s64(vreinterpretq_s64_m128d(a)); -} - -// Blend packed single-precision (32-bit) floating-point elements from a and b -// using mask, and store the results in dst. -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_blendv_ps -FORCE_INLINE __m128 _mm_blendv_ps(__m128 a, __m128 b, __m128 mask) -{ - return vreinterpretq_m128_f32(vbslq_f32(vreinterpretq_u32_m128(mask), - vreinterpretq_f32_m128(b), - vreinterpretq_f32_m128(a))); -} - -// Round the packed single-precision (32-bit) floating-point elements in a using -// the rounding parameter, and store the results as packed single-precision -// floating-point elements in dst. -// software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_round_ps -FORCE_INLINE __m128 _mm_round_ps(__m128 a, int rounding) -{ -#if defined(__aarch64__) - switch (rounding) { - case (_MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC): - return vreinterpretq_m128_f32(vrndnq_f32(vreinterpretq_f32_m128(a))); - case (_MM_FROUND_TO_NEG_INF | _MM_FROUND_NO_EXC): - return vreinterpretq_m128_f32(vrndmq_f32(vreinterpretq_f32_m128(a))); - case (_MM_FROUND_TO_POS_INF | _MM_FROUND_NO_EXC): - return vreinterpretq_m128_f32(vrndpq_f32(vreinterpretq_f32_m128(a))); - case (_MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC): - return vreinterpretq_m128_f32(vrndq_f32(vreinterpretq_f32_m128(a))); - default: //_MM_FROUND_CUR_DIRECTION - return vreinterpretq_m128_f32(vrndiq_f32(vreinterpretq_f32_m128(a))); - } -#else - float *v_float = (float *) &a; - __m128 zero, neg_inf, pos_inf; - - switch (rounding) { - case (_MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC): - return _mm_cvtepi32_ps(_mm_cvtps_epi32(a)); - case (_MM_FROUND_TO_NEG_INF | _MM_FROUND_NO_EXC): - return (__m128){floorf(v_float[0]), floorf(v_float[1]), - floorf(v_float[2]), floorf(v_float[3])}; - case (_MM_FROUND_TO_POS_INF | _MM_FROUND_NO_EXC): - return (__m128){ceilf(v_float[0]), ceilf(v_float[1]), ceilf(v_float[2]), - ceilf(v_float[3])}; - case (_MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC): - zero = _mm_set_ps(0.0f, 0.0f, 0.0f, 0.0f); - neg_inf = _mm_set_ps(floorf(v_float[0]), floorf(v_float[1]), - floorf(v_float[2]), floorf(v_float[3])); - pos_inf = _mm_set_ps(ceilf(v_float[0]), ceilf(v_float[1]), - ceilf(v_float[2]), ceilf(v_float[3])); - return _mm_blendv_ps(pos_inf, neg_inf, _mm_cmple_ps(a, zero)); - default: //_MM_FROUND_CUR_DIRECTION - return (__m128){roundf(v_float[0]), roundf(v_float[1]), - roundf(v_float[2]), roundf(v_float[3])}; - } -#endif -} - -// Convert packed single-precision (32-bit) floating-point elements in a to -// packed 32-bit integers, and store the results in dst. -// -// FOR j := 0 to 1 -// i := 32*j -// dst[i+31:i] := Convert_FP32_To_Int32(a[i+31:i]) -// ENDFOR -// -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvt_ps2pi -FORCE_INLINE __m64 _mm_cvt_ps2pi(__m128 a) -{ -#if defined(__aarch64__) - return vreinterpret_m64_s32( - vget_low_s32(vcvtnq_s32_f32(vreinterpretq_f32_m128(a)))); -#else - return vreinterpret_m64_s32( - vcvt_s32_f32(vget_low_f32(vreinterpretq_f32_m128( - _mm_round_ps(a, _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC))))); -#endif -} - -// Round the packed single-precision (32-bit) floating-point elements in a up to -// an integer value, and store the results as packed single-precision -// floating-point elements in dst. -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_ceil_ps -FORCE_INLINE __m128 _mm_ceil_ps(__m128 a) -{ - return _mm_round_ps(a, _MM_FROUND_TO_POS_INF | _MM_FROUND_NO_EXC); -} - -// Round the packed single-precision (32-bit) floating-point elements in a down -// to an integer value, and store the results as packed single-precision -// floating-point elements in dst. -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_floor_ps -FORCE_INLINE __m128 _mm_floor_ps(__m128 a) -{ - return _mm_round_ps(a, _MM_FROUND_TO_NEG_INF | _MM_FROUND_NO_EXC); -} - - -// Load 128-bits of integer data from unaligned memory into dst. This intrinsic -// may perform better than _mm_loadu_si128 when the data crosses a cache line -// boundary. -// -// dst[127:0] := MEM[mem_addr+127:mem_addr] -// -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_lddqu_si128 -#define _mm_lddqu_si128 _mm_loadu_si128 - -/* Miscellaneous Operations */ - -// Shifts the 8 signed 16-bit integers in a right by count bits while shifting -// in the sign bit. -// -// r0 := a0 >> count -// r1 := a1 >> count -// ... -// r7 := a7 >> count -// -// https://msdn.microsoft.com/en-us/library/3c9997dk(v%3dvs.90).aspx -FORCE_INLINE __m128i _mm_sra_epi16(__m128i a, __m128i count) -{ - int64_t c = (int64_t) vget_low_s64((int64x2_t) count); - if (c > 15) - return _mm_cmplt_epi16(a, _mm_setzero_si128()); - return vreinterpretq_m128i_s16(vshlq_s16((int16x8_t) a, vdupq_n_s16(-c))); -} - -// Shifts the 4 signed 32-bit integers in a right by count bits while shifting -// in the sign bit. -// -// r0 := a0 >> count -// r1 := a1 >> count -// r2 := a2 >> count -// r3 := a3 >> count -// -// https://msdn.microsoft.com/en-us/library/ce40009e(v%3dvs.100).aspx -FORCE_INLINE __m128i _mm_sra_epi32(__m128i a, __m128i count) -{ - int64_t c = (int64_t) vget_low_s64((int64x2_t) count); - if (c > 31) - return _mm_cmplt_epi32(a, _mm_setzero_si128()); - return vreinterpretq_m128i_s32(vshlq_s32((int32x4_t) a, vdupq_n_s32(-c))); -} - -// Packs the 16 signed 16-bit integers from a and b into 8-bit integers and -// saturates. -// https://msdn.microsoft.com/en-us/library/k4y4f7w5%28v=vs.90%29.aspx -FORCE_INLINE __m128i _mm_packs_epi16(__m128i a, __m128i b) -{ - return vreinterpretq_m128i_s8( - vcombine_s8(vqmovn_s16(vreinterpretq_s16_m128i(a)), - vqmovn_s16(vreinterpretq_s16_m128i(b)))); -} - -// Packs the 16 signed 16 - bit integers from a and b into 8 - bit unsigned -// integers and saturates. -// -// r0 := UnsignedSaturate(a0) -// r1 := UnsignedSaturate(a1) -// ... -// r7 := UnsignedSaturate(a7) -// r8 := UnsignedSaturate(b0) -// r9 := UnsignedSaturate(b1) -// ... -// r15 := UnsignedSaturate(b7) -// -// https://msdn.microsoft.com/en-us/library/07ad1wx4(v=vs.100).aspx -FORCE_INLINE __m128i _mm_packus_epi16(const __m128i a, const __m128i b) -{ - return vreinterpretq_m128i_u8( - vcombine_u8(vqmovun_s16(vreinterpretq_s16_m128i(a)), - vqmovun_s16(vreinterpretq_s16_m128i(b)))); -} - -// Packs the 8 signed 32-bit integers from a and b into signed 16-bit integers -// and saturates. -// -// r0 := SignedSaturate(a0) -// r1 := SignedSaturate(a1) -// r2 := SignedSaturate(a2) -// r3 := SignedSaturate(a3) -// r4 := SignedSaturate(b0) -// r5 := SignedSaturate(b1) -// r6 := SignedSaturate(b2) -// r7 := SignedSaturate(b3) -// -// https://msdn.microsoft.com/en-us/library/393t56f9%28v=vs.90%29.aspx -FORCE_INLINE __m128i _mm_packs_epi32(__m128i a, __m128i b) -{ - return vreinterpretq_m128i_s16( - vcombine_s16(vqmovn_s32(vreinterpretq_s32_m128i(a)), - vqmovn_s32(vreinterpretq_s32_m128i(b)))); -} - -// Packs the 8 unsigned 32-bit integers from a and b into unsigned 16-bit -// integers and saturates. -// -// r0 := UnsignedSaturate(a0) -// r1 := UnsignedSaturate(a1) -// r2 := UnsignedSaturate(a2) -// r3 := UnsignedSaturate(a3) -// r4 := UnsignedSaturate(b0) -// r5 := UnsignedSaturate(b1) -// r6 := UnsignedSaturate(b2) -// r7 := UnsignedSaturate(b3) -FORCE_INLINE __m128i _mm_packus_epi32(__m128i a, __m128i b) -{ - return vreinterpretq_m128i_u16( - vcombine_u16(vqmovun_s32(vreinterpretq_s32_m128i(a)), - vqmovun_s32(vreinterpretq_s32_m128i(b)))); -} - -// Interleaves the lower 8 signed or unsigned 8-bit integers in a with the lower -// 8 signed or unsigned 8-bit integers in b. -// -// r0 := a0 -// r1 := b0 -// r2 := a1 -// r3 := b1 -// ... -// r14 := a7 -// r15 := b7 -// -// https://msdn.microsoft.com/en-us/library/xf7k860c%28v=vs.90%29.aspx -FORCE_INLINE __m128i _mm_unpacklo_epi8(__m128i a, __m128i b) -{ -#if defined(__aarch64__) - return vreinterpretq_m128i_s8( - vzip1q_s8(vreinterpretq_s8_m128i(a), vreinterpretq_s8_m128i(b))); -#else - int8x8_t a1 = vreinterpret_s8_s16(vget_low_s16(vreinterpretq_s16_m128i(a))); - int8x8_t b1 = vreinterpret_s8_s16(vget_low_s16(vreinterpretq_s16_m128i(b))); - int8x8x2_t result = vzip_s8(a1, b1); - return vreinterpretq_m128i_s8(vcombine_s8(result.val[0], result.val[1])); -#endif -} - -// Interleaves the lower 4 signed or unsigned 16-bit integers in a with the -// lower 4 signed or unsigned 16-bit integers in b. -// -// r0 := a0 -// r1 := b0 -// r2 := a1 -// r3 := b1 -// r4 := a2 -// r5 := b2 -// r6 := a3 -// r7 := b3 -// -// https://msdn.microsoft.com/en-us/library/btxb17bw%28v=vs.90%29.aspx -FORCE_INLINE __m128i _mm_unpacklo_epi16(__m128i a, __m128i b) -{ -#if defined(__aarch64__) - return vreinterpretq_m128i_s16( - vzip1q_s16(vreinterpretq_s16_m128i(a), vreinterpretq_s16_m128i(b))); -#else - int16x4_t a1 = vget_low_s16(vreinterpretq_s16_m128i(a)); - int16x4_t b1 = vget_low_s16(vreinterpretq_s16_m128i(b)); - int16x4x2_t result = vzip_s16(a1, b1); - return vreinterpretq_m128i_s16(vcombine_s16(result.val[0], result.val[1])); -#endif -} - -// Interleaves the lower 2 signed or unsigned 32 - bit integers in a with the -// lower 2 signed or unsigned 32 - bit integers in b. -// -// r0 := a0 -// r1 := b0 -// r2 := a1 -// r3 := b1 -// -// https://msdn.microsoft.com/en-us/library/x8atst9d(v=vs.100).aspx -FORCE_INLINE __m128i _mm_unpacklo_epi32(__m128i a, __m128i b) -{ -#if defined(__aarch64__) - return vreinterpretq_m128i_s32( - vzip1q_s32(vreinterpretq_s32_m128i(a), vreinterpretq_s32_m128i(b))); -#else - int32x2_t a1 = vget_low_s32(vreinterpretq_s32_m128i(a)); - int32x2_t b1 = vget_low_s32(vreinterpretq_s32_m128i(b)); - int32x2x2_t result = vzip_s32(a1, b1); - return vreinterpretq_m128i_s32(vcombine_s32(result.val[0], result.val[1])); -#endif -} - -FORCE_INLINE __m128i _mm_unpacklo_epi64(__m128i a, __m128i b) -{ - int64x1_t a_l = vget_low_s64(vreinterpretq_s64_m128i(a)); - int64x1_t b_l = vget_low_s64(vreinterpretq_s64_m128i(b)); - return vreinterpretq_m128i_s64(vcombine_s64(a_l, b_l)); -} - -// Selects and interleaves the lower two single-precision, floating-point values -// from a and b. -// -// r0 := a0 -// r1 := b0 -// r2 := a1 -// r3 := b1 -// -// https://msdn.microsoft.com/en-us/library/25st103b%28v=vs.90%29.aspx -FORCE_INLINE __m128 _mm_unpacklo_ps(__m128 a, __m128 b) -{ -#if defined(__aarch64__) - return vreinterpretq_m128_f32( - vzip1q_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(b))); -#else - float32x2_t a1 = vget_low_f32(vreinterpretq_f32_m128(a)); - float32x2_t b1 = vget_low_f32(vreinterpretq_f32_m128(b)); - float32x2x2_t result = vzip_f32(a1, b1); - return vreinterpretq_m128_f32(vcombine_f32(result.val[0], result.val[1])); -#endif -} - -// Selects and interleaves the upper two single-precision, floating-point values -// from a and b. -// -// r0 := a2 -// r1 := b2 -// r2 := a3 -// r3 := b3 -// -// https://msdn.microsoft.com/en-us/library/skccxx7d%28v=vs.90%29.aspx -FORCE_INLINE __m128 _mm_unpackhi_ps(__m128 a, __m128 b) -{ -#if defined(__aarch64__) - return vreinterpretq_m128_f32( - vzip2q_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(b))); -#else - float32x2_t a1 = vget_high_f32(vreinterpretq_f32_m128(a)); - float32x2_t b1 = vget_high_f32(vreinterpretq_f32_m128(b)); - float32x2x2_t result = vzip_f32(a1, b1); - return vreinterpretq_m128_f32(vcombine_f32(result.val[0], result.val[1])); -#endif -} - -// Interleaves the upper 8 signed or unsigned 8-bit integers in a with the upper -// 8 signed or unsigned 8-bit integers in b. -// -// r0 := a8 -// r1 := b8 -// r2 := a9 -// r3 := b9 -// ... -// r14 := a15 -// r15 := b15 -// -// https://msdn.microsoft.com/en-us/library/t5h7783k(v=vs.100).aspx -FORCE_INLINE __m128i _mm_unpackhi_epi8(__m128i a, __m128i b) -{ -#if defined(__aarch64__) - return vreinterpretq_m128i_s8( - vzip2q_s8(vreinterpretq_s8_m128i(a), vreinterpretq_s8_m128i(b))); -#else - int8x8_t a1 = - vreinterpret_s8_s16(vget_high_s16(vreinterpretq_s16_m128i(a))); - int8x8_t b1 = - vreinterpret_s8_s16(vget_high_s16(vreinterpretq_s16_m128i(b))); - int8x8x2_t result = vzip_s8(a1, b1); - return vreinterpretq_m128i_s8(vcombine_s8(result.val[0], result.val[1])); -#endif -} - -// Interleaves the upper 4 signed or unsigned 16-bit integers in a with the -// upper 4 signed or unsigned 16-bit integers in b. -// -// r0 := a4 -// r1 := b4 -// r2 := a5 -// r3 := b5 -// r4 := a6 -// r5 := b6 -// r6 := a7 -// r7 := b7 -// -// https://msdn.microsoft.com/en-us/library/03196cz7(v=vs.100).aspx -FORCE_INLINE __m128i _mm_unpackhi_epi16(__m128i a, __m128i b) -{ -#if defined(__aarch64__) - return vreinterpretq_m128i_s16( - vzip2q_s16(vreinterpretq_s16_m128i(a), vreinterpretq_s16_m128i(b))); -#else - int16x4_t a1 = vget_high_s16(vreinterpretq_s16_m128i(a)); - int16x4_t b1 = vget_high_s16(vreinterpretq_s16_m128i(b)); - int16x4x2_t result = vzip_s16(a1, b1); - return vreinterpretq_m128i_s16(vcombine_s16(result.val[0], result.val[1])); -#endif -} - -// Interleaves the upper 2 signed or unsigned 32-bit integers in a with the -// upper 2 signed or unsigned 32-bit integers in b. -// https://msdn.microsoft.com/en-us/library/65sa7cbs(v=vs.100).aspx -FORCE_INLINE __m128i _mm_unpackhi_epi32(__m128i a, __m128i b) -{ -#if defined(__aarch64__) - return vreinterpretq_m128i_s32( - vzip2q_s32(vreinterpretq_s32_m128i(a), vreinterpretq_s32_m128i(b))); -#else - int32x2_t a1 = vget_high_s32(vreinterpretq_s32_m128i(a)); - int32x2_t b1 = vget_high_s32(vreinterpretq_s32_m128i(b)); - int32x2x2_t result = vzip_s32(a1, b1); - return vreinterpretq_m128i_s32(vcombine_s32(result.val[0], result.val[1])); -#endif -} - -// Interleaves the upper signed or unsigned 64-bit integer in a with the -// upper signed or unsigned 64-bit integer in b. -// -// r0 := a1 -// r1 := b1 -FORCE_INLINE __m128i _mm_unpackhi_epi64(__m128i a, __m128i b) -{ - int64x1_t a_h = vget_high_s64(vreinterpretq_s64_m128i(a)); - int64x1_t b_h = vget_high_s64(vreinterpretq_s64_m128i(b)); - return vreinterpretq_m128i_s64(vcombine_s64(a_h, b_h)); -} - -// Horizontally compute the minimum amongst the packed unsigned 16-bit integers -// in a, store the minimum and index in dst, and zero the remaining bits in dst. -// -// index[2:0] := 0 -// min[15:0] := a[15:0] -// FOR j := 0 to 7 -// i := j*16 -// IF a[i+15:i] < min[15:0] -// index[2:0] := j -// min[15:0] := a[i+15:i] -// FI -// ENDFOR -// dst[15:0] := min[15:0] -// dst[18:16] := index[2:0] -// dst[127:19] := 0 -// -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_minpos_epu16 -FORCE_INLINE __m128i _mm_minpos_epu16(__m128i a) -{ - __m128i dst; - uint16_t min, idx = 0; - // Find the minimum value -#if defined(__aarch64__) - min = vminvq_u16(vreinterpretq_u16_m128i(a)); -#else - __m64 tmp; - tmp = vreinterpret_m64_u16( - vmin_u16(vget_low_u16(vreinterpretq_u16_m128i(a)), - vget_high_u16(vreinterpretq_u16_m128i(a)))); - tmp = vreinterpret_m64_u16( - vpmin_u16(vreinterpret_u16_m64(tmp), vreinterpret_u16_m64(tmp))); - tmp = vreinterpret_m64_u16( - vpmin_u16(vreinterpret_u16_m64(tmp), vreinterpret_u16_m64(tmp))); - min = vget_lane_u16(vreinterpret_u16_m64(tmp), 0); -#endif - // Get the index of the minimum value - int i; - for (i = 0; i < 8; i++) { - if (min == vgetq_lane_u16(vreinterpretq_u16_m128i(a), 0)) { - idx = (uint16_t) i; - break; - } - a = _mm_srli_si128(a, 2); - } - // Generate result - dst = _mm_setzero_si128(); - dst = vreinterpretq_m128i_u16( - vsetq_lane_u16(min, vreinterpretq_u16_m128i(dst), 0)); - dst = vreinterpretq_m128i_u16( - vsetq_lane_u16(idx, vreinterpretq_u16_m128i(dst), 1)); - return dst; -} - -// shift to right -// https://msdn.microsoft.com/en-us/library/bb514041(v=vs.120).aspx -// http://blog.csdn.net/hemmingway/article/details/44828303 -// Clang requires a macro here, as it is extremely picky about c being a -// literal. -#define _mm_alignr_epi8(a, b, c) \ - ((__m128i) vextq_s8((int8x16_t)(b), (int8x16_t)(a), (c))) - -// Compute the bitwise AND of 128 bits (representing integer data) in a and b, -// and set ZF to 1 if the result is zero, otherwise set ZF to 0. Compute the -// bitwise NOT of a and then AND with b, and set CF to 1 if the result is zero, -// otherwise set CF to 0. Return the CF value. -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_testc_si128 -FORCE_INLINE int _mm_testc_si128(__m128i a, __m128i b) -{ - int64x2_t s64 = - vandq_s64(vreinterpretq_s64_s32(vmvnq_s32(vreinterpretq_s32_m128i(a))), - vreinterpretq_s64_m128i(b)); - return !(vgetq_lane_s64(s64, 0) | vgetq_lane_s64(s64, 1)); -} - -// Compute the bitwise AND of 128 bits (representing integer data) in a and b, -// and set ZF to 1 if the result is zero, otherwise set ZF to 0. Compute the -// bitwise NOT of a and then AND with b, and set CF to 1 if the result is zero, -// otherwise set CF to 0. Return the ZF value. -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_testz_si128 -FORCE_INLINE int _mm_testz_si128(__m128i a, __m128i b) -{ - int64x2_t s64 = - vandq_s64(vreinterpretq_s64_m128i(a), vreinterpretq_s64_m128i(b)); - return !(vgetq_lane_s64(s64, 0) | vgetq_lane_s64(s64, 1)); -} - -// Extracts the selected signed or unsigned 8-bit integer from a and zero -// extends. -// FORCE_INLINE int _mm_extract_epi8(__m128i a, __constrange(0,16) int imm) -#define _mm_extract_epi8(a, imm) vgetq_lane_u8(vreinterpretq_u8_m128i(a), (imm)) - -// Inserts the least significant 8 bits of b into the selected 8-bit integer -// of a. -// FORCE_INLINE __m128i _mm_insert_epi8(__m128i a, int b, -// __constrange(0,16) int imm) -#define _mm_insert_epi8(a, b, imm) \ - __extension__({ \ - vreinterpretq_m128i_s8( \ - vsetq_lane_s8((b), vreinterpretq_s8_m128i(a), (imm))); \ - }) - -// Extracts the selected signed or unsigned 16-bit integer from a and zero -// extends. -// https://msdn.microsoft.com/en-us/library/6dceta0c(v=vs.100).aspx -// FORCE_INLINE int _mm_extract_epi16(__m128i a, __constrange(0,8) int imm) -#define _mm_extract_epi16(a, imm) \ - vgetq_lane_u16(vreinterpretq_u16_m128i(a), (imm)) - -// Inserts the least significant 16 bits of b into the selected 16-bit integer -// of a. -// https://msdn.microsoft.com/en-us/library/kaze8hz1%28v=vs.100%29.aspx -// FORCE_INLINE __m128i _mm_insert_epi16(__m128i a, int b, -// __constrange(0,8) int imm) -#define _mm_insert_epi16(a, b, imm) \ - __extension__({ \ - vreinterpretq_m128i_s16( \ - vsetq_lane_s16((b), vreinterpretq_s16_m128i(a), (imm))); \ - }) - -// Extracts the selected signed or unsigned 32-bit integer from a and zero -// extends. -// FORCE_INLINE int _mm_extract_epi32(__m128i a, __constrange(0,4) int imm) -#define _mm_extract_epi32(a, imm) \ - vgetq_lane_s32(vreinterpretq_s32_m128i(a), (imm)) - -// Extracts the selected single-precision (32-bit) floating-point from a. -// FORCE_INLINE int _mm_extract_ps(__m128 a, __constrange(0,4) int imm) -#define _mm_extract_ps(a, imm) vgetq_lane_s32(vreinterpretq_s32_m128(a), (imm)) - -// Inserts the least significant 32 bits of b into the selected 32-bit integer -// of a. -// FORCE_INLINE __m128i _mm_insert_epi32(__m128i a, int b, -// __constrange(0,4) int imm) -#define _mm_insert_epi32(a, b, imm) \ - __extension__({ \ - vreinterpretq_m128i_s32( \ - vsetq_lane_s32((b), vreinterpretq_s32_m128i(a), (imm))); \ - }) - -// Extracts the selected signed or unsigned 64-bit integer from a and zero -// extends. -// FORCE_INLINE __int64 _mm_extract_epi64(__m128i a, __constrange(0,2) int imm) -#define _mm_extract_epi64(a, imm) \ - vgetq_lane_s64(vreinterpretq_s64_m128i(a), (imm)) - -// Inserts the least significant 64 bits of b into the selected 64-bit integer -// of a. -// FORCE_INLINE __m128i _mm_insert_epi64(__m128i a, __int64 b, -// __constrange(0,2) int imm) -#define _mm_insert_epi64(a, b, imm) \ - __extension__({ \ - vreinterpretq_m128i_s64( \ - vsetq_lane_s64((b), vreinterpretq_s64_m128i(a), (imm))); \ - }) - -// Count the number of bits set to 1 in unsigned 32-bit integer a, and -// return that count in dst. -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_popcnt_u32 -FORCE_INLINE int _mm_popcnt_u32(unsigned int a) -{ -#if defined(__aarch64__) -#if __has_builtin(__builtin_popcount) - return __builtin_popcount(a); -#else - return (int) vaddlv_u8(vcnt_u8(vcreate_u8((uint64_t) a))); -#endif -#else - uint32_t count = 0; - uint8x8_t input_val, count8x8_val; - uint16x4_t count16x4_val; - uint32x2_t count32x2_val; - - input_val = vld1_u8((uint8_t *) &a); - count8x8_val = vcnt_u8(input_val); - count16x4_val = vpaddl_u8(count8x8_val); - count32x2_val = vpaddl_u16(count16x4_val); - - vst1_u32(&count, count32x2_val); - return count; -#endif -} - -// Count the number of bits set to 1 in unsigned 64-bit integer a, and -// return that count in dst. -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_popcnt_u64 -FORCE_INLINE int64_t _mm_popcnt_u64(uint64_t a) -{ -#if defined(__aarch64__) -#if __has_builtin(__builtin_popcountll) - return __builtin_popcountll(a); -#else - return (int64_t) vaddlv_u8(vcnt_u8(vcreate_u8(a))); -#endif -#else - uint64_t count = 0; - uint8x8_t input_val, count8x8_val; - uint16x4_t count16x4_val; - uint32x2_t count32x2_val; - uint64x1_t count64x1_val; - - input_val = vld1_u8((uint8_t *) &a); - count8x8_val = vcnt_u8(input_val); - count16x4_val = vpaddl_u8(count8x8_val); - count32x2_val = vpaddl_u16(count16x4_val); - count64x1_val = vpaddl_u32(count32x2_val); - vst1_u64(&count, count64x1_val); - return count; -#endif -} - -// Macro: Transpose the 4x4 matrix formed by the 4 rows of single-precision -// (32-bit) floating-point elements in row0, row1, row2, and row3, and store the -// transposed matrix in these vectors (row0 now contains column 0, etc.). -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=MM_TRANSPOSE4_PS -#define _MM_TRANSPOSE4_PS(row0, row1, row2, row3) \ - do { \ - float32x4x2_t ROW01 = vtrnq_f32(row0, row1); \ - float32x4x2_t ROW23 = vtrnq_f32(row2, row3); \ - row0 = vcombine_f32(vget_low_f32(ROW01.val[0]), \ - vget_low_f32(ROW23.val[0])); \ - row1 = vcombine_f32(vget_low_f32(ROW01.val[1]), \ - vget_low_f32(ROW23.val[1])); \ - row2 = vcombine_f32(vget_high_f32(ROW01.val[0]), \ - vget_high_f32(ROW23.val[0])); \ - row3 = vcombine_f32(vget_high_f32(ROW01.val[1]), \ - vget_high_f32(ROW23.val[1])); \ - } while (0) - -/* Crypto Extensions */ - -#if defined(__ARM_FEATURE_CRYPTO) -// Wraps vmull_p64 -FORCE_INLINE uint64x2_t _sse2neon_vmull_p64(uint64x1_t _a, uint64x1_t _b) -{ - poly64_t a = vget_lane_p64(vreinterpret_p64_u64(_a), 0); - poly64_t b = vget_lane_p64(vreinterpret_p64_u64(_b), 0); - return vreinterpretq_u64_p128(vmull_p64(a, b)); -} -#else // ARMv7 polyfill -// ARMv7/some A64 lacks vmull_p64, but it has vmull_p8. -// -// vmull_p8 calculates 8 8-bit->16-bit polynomial multiplies, but we need a -// 64-bit->128-bit polynomial multiply. -// -// It needs some work and is somewhat slow, but it is still faster than all -// known scalar methods. -// -// Algorithm adapted to C from -// https://www.workofard.com/2017/07/ghash-for-low-end-cores/, which is adapted -// from "Fast Software Polynomial Multiplication on ARM Processors Using the -// NEON Engine" by Danilo Camara, Conrado Gouvea, Julio Lopez and Ricardo Dahab -// (https://hal.inria.fr/hal-01506572) -static uint64x2_t _sse2neon_vmull_p64(uint64x1_t _a, uint64x1_t _b) -{ - poly8x8_t a = vreinterpret_p8_u64(_a); - poly8x8_t b = vreinterpret_p8_u64(_b); - - // Masks - uint8x16_t k48_32 = vcombine_u8(vcreate_u8(0x0000ffffffffffff), - vcreate_u8(0x00000000ffffffff)); - uint8x16_t k16_00 = vcombine_u8(vcreate_u8(0x000000000000ffff), - vcreate_u8(0x0000000000000000)); - - // Do the multiplies, rotating with vext to get all combinations - uint8x16_t d = vreinterpretq_u8_p16(vmull_p8(a, b)); // D = A0 * B0 - uint8x16_t e = - vreinterpretq_u8_p16(vmull_p8(a, vext_p8(b, b, 1))); // E = A0 * B1 - uint8x16_t f = - vreinterpretq_u8_p16(vmull_p8(vext_p8(a, a, 1), b)); // F = A1 * B0 - uint8x16_t g = - vreinterpretq_u8_p16(vmull_p8(a, vext_p8(b, b, 2))); // G = A0 * B2 - uint8x16_t h = - vreinterpretq_u8_p16(vmull_p8(vext_p8(a, a, 2), b)); // H = A2 * B0 - uint8x16_t i = - vreinterpretq_u8_p16(vmull_p8(a, vext_p8(b, b, 3))); // I = A0 * B3 - uint8x16_t j = - vreinterpretq_u8_p16(vmull_p8(vext_p8(a, a, 3), b)); // J = A3 * B0 - uint8x16_t k = - vreinterpretq_u8_p16(vmull_p8(a, vext_p8(b, b, 4))); // L = A0 * B4 - - // Add cross products - uint8x16_t l = veorq_u8(e, f); // L = E + F - uint8x16_t m = veorq_u8(g, h); // M = G + H - uint8x16_t n = veorq_u8(i, j); // N = I + J - - // Interleave. Using vzip1 and vzip2 prevents Clang from emitting TBL - // instructions. -#if defined(__aarch64__) - uint8x16_t lm_p0 = vreinterpretq_u8_u64( - vzip1q_u64(vreinterpretq_u64_u8(l), vreinterpretq_u64_u8(m))); - uint8x16_t lm_p1 = vreinterpretq_u8_u64( - vzip2q_u64(vreinterpretq_u64_u8(l), vreinterpretq_u64_u8(m))); - uint8x16_t nk_p0 = vreinterpretq_u8_u64( - vzip1q_u64(vreinterpretq_u64_u8(n), vreinterpretq_u64_u8(k))); - uint8x16_t nk_p1 = vreinterpretq_u8_u64( - vzip2q_u64(vreinterpretq_u64_u8(n), vreinterpretq_u64_u8(k))); -#else - uint8x16_t lm_p0 = vcombine_u8(vget_low_u8(l), vget_low_u8(m)); - uint8x16_t lm_p1 = vcombine_u8(vget_high_u8(l), vget_high_u8(m)); - uint8x16_t nk_p0 = vcombine_u8(vget_low_u8(n), vget_low_u8(k)); - uint8x16_t nk_p1 = vcombine_u8(vget_high_u8(n), vget_high_u8(k)); -#endif - // t0 = (L) (P0 + P1) << 8 - // t1 = (M) (P2 + P3) << 16 - uint8x16_t t0t1_tmp = veorq_u8(lm_p0, lm_p1); - uint8x16_t t0t1_h = vandq_u8(lm_p1, k48_32); - uint8x16_t t0t1_l = veorq_u8(t0t1_tmp, t0t1_h); - - // t2 = (N) (P4 + P5) << 24 - // t3 = (K) (P6 + P7) << 32 - uint8x16_t t2t3_tmp = veorq_u8(nk_p0, nk_p1); - uint8x16_t t2t3_h = vandq_u8(nk_p1, k16_00); - uint8x16_t t2t3_l = veorq_u8(t2t3_tmp, t2t3_h); - - // De-interleave -#if defined(__aarch64__) - uint8x16_t t0 = vreinterpretq_u8_u64( - vuzp1q_u64(vreinterpretq_u64_u8(t0t1_l), vreinterpretq_u64_u8(t0t1_h))); - uint8x16_t t1 = vreinterpretq_u8_u64( - vuzp2q_u64(vreinterpretq_u64_u8(t0t1_l), vreinterpretq_u64_u8(t0t1_h))); - uint8x16_t t2 = vreinterpretq_u8_u64( - vuzp1q_u64(vreinterpretq_u64_u8(t2t3_l), vreinterpretq_u64_u8(t2t3_h))); - uint8x16_t t3 = vreinterpretq_u8_u64( - vuzp2q_u64(vreinterpretq_u64_u8(t2t3_l), vreinterpretq_u64_u8(t2t3_h))); -#else - uint8x16_t t1 = vcombine_u8(vget_high_u8(t0t1_l), vget_high_u8(t0t1_h)); - uint8x16_t t0 = vcombine_u8(vget_low_u8(t0t1_l), vget_low_u8(t0t1_h)); - uint8x16_t t3 = vcombine_u8(vget_high_u8(t2t3_l), vget_high_u8(t2t3_h)); - uint8x16_t t2 = vcombine_u8(vget_low_u8(t2t3_l), vget_low_u8(t2t3_h)); -#endif - // Shift the cross products - uint8x16_t t0_shift = vextq_u8(t0, t0, 15); // t0 << 8 - uint8x16_t t1_shift = vextq_u8(t1, t1, 14); // t1 << 16 - uint8x16_t t2_shift = vextq_u8(t2, t2, 13); // t2 << 24 - uint8x16_t t3_shift = vextq_u8(t3, t3, 12); // t3 << 32 - - // Accumulate the products - uint8x16_t cross1 = veorq_u8(t0_shift, t1_shift); - uint8x16_t cross2 = veorq_u8(t2_shift, t3_shift); - uint8x16_t mix = veorq_u8(d, cross1); - uint8x16_t r = veorq_u8(mix, cross2); - return vreinterpretq_u64_u8(r); -} -#endif // ARMv7 polyfill - -FORCE_INLINE __m128i _mm_clmulepi64_si128(__m128i _a, __m128i _b, const int imm) -{ - uint64x2_t a = vreinterpretq_u64_m128i(_a); - uint64x2_t b = vreinterpretq_u64_m128i(_b); - switch (imm & 0x11) { - case 0x00: - return vreinterpretq_m128i_u64( - _sse2neon_vmull_p64(vget_low_u64(a), vget_low_u64(b))); - case 0x01: - return vreinterpretq_m128i_u64( - _sse2neon_vmull_p64(vget_high_u64(a), vget_low_u64(b))); - case 0x10: - return vreinterpretq_m128i_u64( - _sse2neon_vmull_p64(vget_low_u64(a), vget_high_u64(b))); - case 0x11: - return vreinterpretq_m128i_u64( - _sse2neon_vmull_p64(vget_high_u64(a), vget_high_u64(b))); - default: - abort(); - } -} - -#if !defined(__ARM_FEATURE_CRYPTO) -/* clang-format off */ -#define SSE2NEON_AES_DATA(w) \ - { \ - w(0x63), w(0x7c), w(0x77), w(0x7b), w(0xf2), w(0x6b), w(0x6f), \ - w(0xc5), w(0x30), w(0x01), w(0x67), w(0x2b), w(0xfe), w(0xd7), \ - w(0xab), w(0x76), w(0xca), w(0x82), w(0xc9), w(0x7d), w(0xfa), \ - w(0x59), w(0x47), w(0xf0), w(0xad), w(0xd4), w(0xa2), w(0xaf), \ - w(0x9c), w(0xa4), w(0x72), w(0xc0), w(0xb7), w(0xfd), w(0x93), \ - w(0x26), w(0x36), w(0x3f), w(0xf7), w(0xcc), w(0x34), w(0xa5), \ - w(0xe5), w(0xf1), w(0x71), w(0xd8), w(0x31), w(0x15), w(0x04), \ - w(0xc7), w(0x23), w(0xc3), w(0x18), w(0x96), w(0x05), w(0x9a), \ - w(0x07), w(0x12), w(0x80), w(0xe2), w(0xeb), w(0x27), w(0xb2), \ - w(0x75), w(0x09), w(0x83), w(0x2c), w(0x1a), w(0x1b), w(0x6e), \ - w(0x5a), w(0xa0), w(0x52), w(0x3b), w(0xd6), w(0xb3), w(0x29), \ - w(0xe3), w(0x2f), w(0x84), w(0x53), w(0xd1), w(0x00), w(0xed), \ - w(0x20), w(0xfc), w(0xb1), w(0x5b), w(0x6a), w(0xcb), w(0xbe), \ - w(0x39), w(0x4a), w(0x4c), w(0x58), w(0xcf), w(0xd0), w(0xef), \ - w(0xaa), w(0xfb), w(0x43), w(0x4d), w(0x33), w(0x85), w(0x45), \ - w(0xf9), w(0x02), w(0x7f), w(0x50), w(0x3c), w(0x9f), w(0xa8), \ - w(0x51), w(0xa3), w(0x40), w(0x8f), w(0x92), w(0x9d), w(0x38), \ - w(0xf5), w(0xbc), w(0xb6), w(0xda), w(0x21), w(0x10), w(0xff), \ - w(0xf3), w(0xd2), w(0xcd), w(0x0c), w(0x13), w(0xec), w(0x5f), \ - w(0x97), w(0x44), w(0x17), w(0xc4), w(0xa7), w(0x7e), w(0x3d), \ - w(0x64), w(0x5d), w(0x19), w(0x73), w(0x60), w(0x81), w(0x4f), \ - w(0xdc), w(0x22), w(0x2a), w(0x90), w(0x88), w(0x46), w(0xee), \ - w(0xb8), w(0x14), w(0xde), w(0x5e), w(0x0b), w(0xdb), w(0xe0), \ - w(0x32), w(0x3a), w(0x0a), w(0x49), w(0x06), w(0x24), w(0x5c), \ - w(0xc2), w(0xd3), w(0xac), w(0x62), w(0x91), w(0x95), w(0xe4), \ - w(0x79), w(0xe7), w(0xc8), w(0x37), w(0x6d), w(0x8d), w(0xd5), \ - w(0x4e), w(0xa9), w(0x6c), w(0x56), w(0xf4), w(0xea), w(0x65), \ - w(0x7a), w(0xae), w(0x08), w(0xba), w(0x78), w(0x25), w(0x2e), \ - w(0x1c), w(0xa6), w(0xb4), w(0xc6), w(0xe8), w(0xdd), w(0x74), \ - w(0x1f), w(0x4b), w(0xbd), w(0x8b), w(0x8a), w(0x70), w(0x3e), \ - w(0xb5), w(0x66), w(0x48), w(0x03), w(0xf6), w(0x0e), w(0x61), \ - w(0x35), w(0x57), w(0xb9), w(0x86), w(0xc1), w(0x1d), w(0x9e), \ - w(0xe1), w(0xf8), w(0x98), w(0x11), w(0x69), w(0xd9), w(0x8e), \ - w(0x94), w(0x9b), w(0x1e), w(0x87), w(0xe9), w(0xce), w(0x55), \ - w(0x28), w(0xdf), w(0x8c), w(0xa1), w(0x89), w(0x0d), w(0xbf), \ - w(0xe6), w(0x42), w(0x68), w(0x41), w(0x99), w(0x2d), w(0x0f), \ - w(0xb0), w(0x54), w(0xbb), w(0x16) \ - } -/* clang-format on */ - -/* X Macro trick. See https://en.wikipedia.org/wiki/X_Macro */ -#define SSE2NEON_AES_H0(x) (x) -static const uint8_t SSE2NEON_sbox[256] = SSE2NEON_AES_DATA(SSE2NEON_AES_H0); -#undef SSE2NEON_AES_H0 - -// In the absence of crypto extensions, implement aesenc using regular neon -// intrinsics instead. See: -// https://www.workofard.com/2017/01/accelerated-aes-for-the-arm64-linux-kernel/ -// https://www.workofard.com/2017/07/ghash-for-low-end-cores/ and -// https://github.com/ColinIanKing/linux-next-mirror/blob/b5f466091e130caaf0735976648f72bd5e09aa84/crypto/aegis128-neon-inner.c#L52 -// for more information Reproduced with permission of the author. -FORCE_INLINE __m128i _mm_aesenc_si128(__m128i EncBlock, __m128i RoundKey) -{ -#if defined(__aarch64__) - static const uint8_t shift_rows[] = {0x0, 0x5, 0xa, 0xf, 0x4, 0x9, - 0xe, 0x3, 0x8, 0xd, 0x2, 0x7, - 0xc, 0x1, 0x6, 0xb}; - static const uint8_t ror32by8[] = {0x1, 0x2, 0x3, 0x0, 0x5, 0x6, 0x7, 0x4, - 0x9, 0xa, 0xb, 0x8, 0xd, 0xe, 0xf, 0xc}; - - uint8x16_t v; - uint8x16_t w = vreinterpretq_u8_m128i(EncBlock); - - // shift rows - w = vqtbl1q_u8(w, vld1q_u8(shift_rows)); - - // sub bytes - v = vqtbl4q_u8(vld1q_u8_x4(SSE2NEON_sbox), w); - v = vqtbx4q_u8(v, vld1q_u8_x4(SSE2NEON_sbox + 0x40), w - 0x40); - v = vqtbx4q_u8(v, vld1q_u8_x4(SSE2NEON_sbox + 0x80), w - 0x80); - v = vqtbx4q_u8(v, vld1q_u8_x4(SSE2NEON_sbox + 0xc0), w - 0xc0); - - // mix columns - w = (v << 1) ^ (uint8x16_t)(((int8x16_t) v >> 7) & 0x1b); - w ^= (uint8x16_t) vrev32q_u16((uint16x8_t) v); - w ^= vqtbl1q_u8(v ^ w, vld1q_u8(ror32by8)); - - // add round key - return vreinterpretq_m128i_u8(w) ^ RoundKey; - -#else /* ARMv7-A NEON implementation */ -#define SSE2NEON_AES_B2W(b0, b1, b2, b3) \ - (((uint32_t)(b3) << 24) | ((uint32_t)(b2) << 16) | ((uint32_t)(b1) << 8) | \ - (b0)) -#define SSE2NEON_AES_F2(x) ((x << 1) ^ (((x >> 7) & 1) * 0x011b /* WPOLY */)) -#define SSE2NEON_AES_F3(x) (SSE2NEON_AES_F2(x) ^ x) -#define SSE2NEON_AES_U0(p) \ - SSE2NEON_AES_B2W(SSE2NEON_AES_F2(p), p, p, SSE2NEON_AES_F3(p)) -#define SSE2NEON_AES_U1(p) \ - SSE2NEON_AES_B2W(SSE2NEON_AES_F3(p), SSE2NEON_AES_F2(p), p, p) -#define SSE2NEON_AES_U2(p) \ - SSE2NEON_AES_B2W(p, SSE2NEON_AES_F3(p), SSE2NEON_AES_F2(p), p) -#define SSE2NEON_AES_U3(p) \ - SSE2NEON_AES_B2W(p, p, SSE2NEON_AES_F3(p), SSE2NEON_AES_F2(p)) - static const uint32_t ALIGN_STRUCT(16) aes_table[4][256] = { - SSE2NEON_AES_DATA(SSE2NEON_AES_U0), - SSE2NEON_AES_DATA(SSE2NEON_AES_U1), - SSE2NEON_AES_DATA(SSE2NEON_AES_U2), - SSE2NEON_AES_DATA(SSE2NEON_AES_U3), - }; -#undef SSE2NEON_AES_B2W -#undef SSE2NEON_AES_F2 -#undef SSE2NEON_AES_F3 -#undef SSE2NEON_AES_U0 -#undef SSE2NEON_AES_U1 -#undef SSE2NEON_AES_U2 -#undef SSE2NEON_AES_U3 - - uint32_t x0 = _mm_cvtsi128_si32(EncBlock); - uint32_t x1 = _mm_cvtsi128_si32(_mm_shuffle_epi32(EncBlock, 0x55)); - uint32_t x2 = _mm_cvtsi128_si32(_mm_shuffle_epi32(EncBlock, 0xAA)); - uint32_t x3 = _mm_cvtsi128_si32(_mm_shuffle_epi32(EncBlock, 0xFF)); - - __m128i out = _mm_set_epi32( - (aes_table[0][x3 & 0xff] ^ aes_table[1][(x0 >> 8) & 0xff] ^ - aes_table[2][(x1 >> 16) & 0xff] ^ aes_table[3][x2 >> 24]), - (aes_table[0][x2 & 0xff] ^ aes_table[1][(x3 >> 8) & 0xff] ^ - aes_table[2][(x0 >> 16) & 0xff] ^ aes_table[3][x1 >> 24]), - (aes_table[0][x1 & 0xff] ^ aes_table[1][(x2 >> 8) & 0xff] ^ - aes_table[2][(x3 >> 16) & 0xff] ^ aes_table[3][x0 >> 24]), - (aes_table[0][x0 & 0xff] ^ aes_table[1][(x1 >> 8) & 0xff] ^ - aes_table[2][(x2 >> 16) & 0xff] ^ aes_table[3][x3 >> 24])); - - return _mm_xor_si128(out, RoundKey); -#endif -} - -FORCE_INLINE __m128i _mm_aesenclast_si128(__m128i a, __m128i RoundKey) -{ - /* FIXME: optimized for NEON */ - uint8_t v[4][4] = { - [0] = {SSE2NEON_sbox[vreinterpretq_nth_u8_m128i(a, 0)], - SSE2NEON_sbox[vreinterpretq_nth_u8_m128i(a, 5)], - SSE2NEON_sbox[vreinterpretq_nth_u8_m128i(a, 10)], - SSE2NEON_sbox[vreinterpretq_nth_u8_m128i(a, 15)]}, - [1] = {SSE2NEON_sbox[vreinterpretq_nth_u8_m128i(a, 4)], - SSE2NEON_sbox[vreinterpretq_nth_u8_m128i(a, 9)], - SSE2NEON_sbox[vreinterpretq_nth_u8_m128i(a, 14)], - SSE2NEON_sbox[vreinterpretq_nth_u8_m128i(a, 3)]}, - [2] = {SSE2NEON_sbox[vreinterpretq_nth_u8_m128i(a, 8)], - SSE2NEON_sbox[vreinterpretq_nth_u8_m128i(a, 13)], - SSE2NEON_sbox[vreinterpretq_nth_u8_m128i(a, 2)], - SSE2NEON_sbox[vreinterpretq_nth_u8_m128i(a, 7)]}, - [3] = {SSE2NEON_sbox[vreinterpretq_nth_u8_m128i(a, 12)], - SSE2NEON_sbox[vreinterpretq_nth_u8_m128i(a, 1)], - SSE2NEON_sbox[vreinterpretq_nth_u8_m128i(a, 6)], - SSE2NEON_sbox[vreinterpretq_nth_u8_m128i(a, 11)]}, - }; - for (int i = 0; i < 16; i++) - vreinterpretq_nth_u8_m128i(a, i) = - v[i / 4][i % 4] ^ vreinterpretq_nth_u8_m128i(RoundKey, i); - return a; -} - -// Emits the Advanced Encryption Standard (AES) instruction aeskeygenassist. -// This instruction generates a round key for AES encryption. See -// https://kazakov.life/2017/11/01/cryptocurrency-mining-on-ios-devices/ -// for details. -// -// https://msdn.microsoft.com/en-us/library/cc714138(v=vs.120).aspx -FORCE_INLINE __m128i _mm_aeskeygenassist_si128(__m128i key, const int rcon) -{ - uint32_t X1 = _mm_cvtsi128_si32(_mm_shuffle_epi32(key, 0x55)); - uint32_t X3 = _mm_cvtsi128_si32(_mm_shuffle_epi32(key, 0xFF)); - for (int i = 0; i < 4; ++i) { - ((uint8_t *) &X1)[i] = SSE2NEON_sbox[((uint8_t *) &X1)[i]]; - ((uint8_t *) &X3)[i] = SSE2NEON_sbox[((uint8_t *) &X3)[i]]; - } - return _mm_set_epi32(((X3 >> 8) | (X3 << 24)) ^ rcon, X3, - ((X1 >> 8) | (X1 << 24)) ^ rcon, X1); -} -#undef SSE2NEON_AES_DATA - -#else /* __ARM_FEATURE_CRYPTO */ -// Implements equivalent of 'aesenc' by combining AESE (with an empty key) and -// AESMC and then manually applying the real key as an xor operation. This -// unfortunately means an additional xor op; the compiler should be able to -// optimize this away for repeated calls however. See -// https://blog.michaelbrase.com/2018/05/08/emulating-x86-aes-intrinsics-on-armv8-a -// for more details. -FORCE_INLINE __m128i _mm_aesenc_si128(__m128i a, __m128i b) -{ - return vreinterpretq_m128i_u8( - vaesmcq_u8(vaeseq_u8(vreinterpretq_u8_m128i(a), vdupq_n_u8(0))) ^ - vreinterpretq_u8_m128i(b)); -} - -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_aesenclast_si128 -FORCE_INLINE __m128i _mm_aesenclast_si128(__m128i a, __m128i RoundKey) -{ - return _mm_xor_si128(vreinterpretq_m128i_u8(vaeseq_u8( - vreinterpretq_u8_m128i(a), vdupq_n_u8(0))), - RoundKey); -} - -FORCE_INLINE __m128i _mm_aeskeygenassist_si128(__m128i a, const int rcon) -{ - // AESE does ShiftRows and SubBytes on A - uint8x16_t u8 = vaeseq_u8(vreinterpretq_u8_m128i(a), vdupq_n_u8(0)); - - uint8x16_t dest = { - // Undo ShiftRows step from AESE and extract X1 and X3 - u8[0x4], u8[0x1], u8[0xE], u8[0xB], // SubBytes(X1) - u8[0x1], u8[0xE], u8[0xB], u8[0x4], // ROT(SubBytes(X1)) - u8[0xC], u8[0x9], u8[0x6], u8[0x3], // SubBytes(X3) - u8[0x9], u8[0x6], u8[0x3], u8[0xC], // ROT(SubBytes(X3)) - }; - uint32x4_t r = {0, (unsigned) rcon, 0, (unsigned) rcon}; - return vreinterpretq_m128i_u8(dest) ^ vreinterpretq_m128i_u32(r); -} -#endif - -/* Streaming Extensions */ - -// Guarantees that every preceding store is globally visible before any -// subsequent store. -// https://msdn.microsoft.com/en-us/library/5h2w73d1%28v=vs.90%29.aspx -FORCE_INLINE void _mm_sfence(void) -{ - __sync_synchronize(); -} - -// Store 128-bits (composed of 4 packed single-precision (32-bit) floating- -// point elements) from a into memory using a non-temporal memory hint. -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_stream_ps -FORCE_INLINE void _mm_stream_ps(float *p, __m128 a) -{ -#if __has_builtin(__builtin_nontemporal_store) - __builtin_nontemporal_store(a, (float32x4_t *) p); -#else - vst1q_f32(p, vreinterpretq_f32_m128(a)); -#endif -} - -// Stores the data in a to the address p without polluting the caches. If the -// cache line containing address p is already in the cache, the cache will be -// updated. -// https://msdn.microsoft.com/en-us/library/ba08y07y%28v=vs.90%29.aspx -FORCE_INLINE void _mm_stream_si128(__m128i *p, __m128i a) -{ -#if __has_builtin(__builtin_nontemporal_store) - __builtin_nontemporal_store(a, p); -#else - vst1q_s64((int64_t *) p, vreinterpretq_s64_m128i(a)); -#endif -} - -// Load 128-bits of integer data from memory into dst using a non-temporal -// memory hint. mem_addr must be aligned on a 16-byte boundary or a -// general-protection exception may be generated. -// -// dst[127:0] := MEM[mem_addr+127:mem_addr] -// -// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_stream_load_si128 -FORCE_INLINE __m128i _mm_stream_load_si128(__m128i *p) -{ -#if __has_builtin(__builtin_nontemporal_store) - return __builtin_nontemporal_load(p); -#else - return vreinterpretq_m128i_s64(vld1q_s64((int64_t *) p)); -#endif -} - -// Cache line containing p is flushed and invalidated from all caches in the -// coherency domain. : -// https://msdn.microsoft.com/en-us/library/ba08y07y(v=vs.100).aspx -FORCE_INLINE void _mm_clflush(void const *p) -{ - (void) p; - // no corollary for Neon? -} - -// Allocate aligned blocks of memory. -// https://software.intel.com/en-us/ -// cpp-compiler-developer-guide-and-reference-allocating-and-freeing-aligned-memory-blocks -FORCE_INLINE void *_mm_malloc(size_t size, size_t align) -{ - void *ptr; - if (align == 1) - return malloc(size); - if (align == 2 || (sizeof(void *) == 8 && align == 4)) - align = sizeof(void *); - if (!posix_memalign(&ptr, align, size)) - return ptr; - return NULL; -} - -FORCE_INLINE void _mm_free(void *addr) -{ - free(addr); -} - -// Starting with the initial value in crc, accumulates a CRC32 value for -// unsigned 8-bit integer v. -// https://msdn.microsoft.com/en-us/library/bb514036(v=vs.100) -FORCE_INLINE uint32_t _mm_crc32_u8(uint32_t crc, uint8_t v) -{ -#if defined(__aarch64__) && defined(__ARM_FEATURE_CRC32) - __asm__ __volatile__("crc32cb %w[c], %w[c], %w[v]\n\t" - : [c] "+r"(crc) - : [v] "r"(v)); -#else - crc ^= v; - for (int bit = 0; bit < 8; bit++) { - if (crc & 1) - crc = (crc >> 1) ^ UINT32_C(0x82f63b78); - else - crc = (crc >> 1); - } -#endif - return crc; -} - -// Starting with the initial value in crc, accumulates a CRC32 value for -// unsigned 16-bit integer v. -// https://msdn.microsoft.com/en-us/library/bb531411(v=vs.100) -FORCE_INLINE uint32_t _mm_crc32_u16(uint32_t crc, uint16_t v) -{ -#if defined(__aarch64__) && defined(__ARM_FEATURE_CRC32) - __asm__ __volatile__("crc32ch %w[c], %w[c], %w[v]\n\t" - : [c] "+r"(crc) - : [v] "r"(v)); -#else - crc = _mm_crc32_u8(crc, v & 0xff); - crc = _mm_crc32_u8(crc, (v >> 8) & 0xff); -#endif - return crc; -} - -// Starting with the initial value in crc, accumulates a CRC32 value for -// unsigned 32-bit integer v. -// https://msdn.microsoft.com/en-us/library/bb531394(v=vs.100) -FORCE_INLINE uint32_t _mm_crc32_u32(uint32_t crc, uint32_t v) -{ -#if defined(__aarch64__) && defined(__ARM_FEATURE_CRC32) - __asm__ __volatile__("crc32cw %w[c], %w[c], %w[v]\n\t" - : [c] "+r"(crc) - : [v] "r"(v)); -#else - crc = _mm_crc32_u16(crc, v & 0xffff); - crc = _mm_crc32_u16(crc, (v >> 16) & 0xffff); -#endif - return crc; -} - -// Starting with the initial value in crc, accumulates a CRC32 value for -// unsigned 64-bit integer v. -// https://msdn.microsoft.com/en-us/library/bb514033(v=vs.100) -FORCE_INLINE uint64_t _mm_crc32_u64(uint64_t crc, uint64_t v) -{ -#if defined(__aarch64__) && defined(__ARM_FEATURE_CRC32) - __asm__ __volatile__("crc32cx %w[c], %w[c], %x[v]\n\t" - : [c] "+r"(crc) - : [v] "r"(v)); -#else - crc = _mm_crc32_u32((uint32_t)(crc), v & 0xffffffff); - crc = _mm_crc32_u32((uint32_t)(crc), (v >> 32) & 0xffffffff); -#endif - return crc; -} - -#if defined(__GNUC__) || defined(__clang__) -#pragma pop_macro("ALIGN_STRUCT") -#pragma pop_macro("FORCE_INLINE") -#endif - -#if defined(__GNUC__) -#pragma GCC pop_options -#endif - -#endif diff --git a/venv/Include/site/python3.9/pygame/mask.h b/venv/Include/site/python3.9/pygame/mask.h deleted file mode 100644 index 45ad8c5..0000000 --- a/venv/Include/site/python3.9/pygame/mask.h +++ /dev/null @@ -1,7 +0,0 @@ -#ifndef PGMASK_INTERNAL_H -#define PGMASK_INTERNAL_H - -#include "include/pygame_mask.h" -#define PYGAMEAPI_MASK_NUMSLOTS 1 - -#endif /* ~PGMASK_INTERNAL_H */ diff --git a/venv/Include/site/python3.9/pygame/mixer.h b/venv/Include/site/python3.9/pygame/mixer.h deleted file mode 100644 index 97f5a0f..0000000 --- a/venv/Include/site/python3.9/pygame/mixer.h +++ /dev/null @@ -1,14 +0,0 @@ -#ifndef MIXER_INTERNAL_H -#define MIXER_INTERNAL_H - -#include - -/* test mixer initializations */ -#define MIXER_INIT_CHECK() \ - if (!SDL_WasInit(SDL_INIT_AUDIO)) \ - return RAISE(pgExc_SDLError, "mixer not initialized") - -#define PYGAMEAPI_MIXER_NUMSLOTS 5 -#include "include/pygame_mixer.h" - -#endif /* ~MIXER_INTERNAL_H */ diff --git a/venv/Include/site/python3.9/pygame/palette.h b/venv/Include/site/python3.9/pygame/palette.h deleted file mode 100644 index 1ae4cf6..0000000 --- a/venv/Include/site/python3.9/pygame/palette.h +++ /dev/null @@ -1,123 +0,0 @@ -/* - pygame - Python Game Library - Copyright (C) 2000-2001 Pete Shinners - - This library is free software; you can redistribute it and/or - modify it under the terms of the GNU Library General Public - License as published by the Free Software Foundation; either - version 2 of the License, or (at your option) any later version. - - This library is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - Library General Public License for more details. - - You should have received a copy of the GNU Library General Public - License along with this library; if not, write to the Free - Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA - - Pete Shinners - pete@shinners.org -*/ - -#ifndef PALETTE_H -#define PALETTE_H - -#include - -/* SDL 2 does not assign a default palette color scheme to a new 8 bit - * surface. Instead, the palette is set all white. This defines the SDL 1.2 - * default palette. - */ -static const SDL_Color default_palette_colors[] = { - {0, 0, 0, 255}, {0, 0, 85, 255}, {0, 0, 170, 255}, - {0, 0, 255, 255}, {0, 36, 0, 255}, {0, 36, 85, 255}, - {0, 36, 170, 255}, {0, 36, 255, 255}, {0, 73, 0, 255}, - {0, 73, 85, 255}, {0, 73, 170, 255}, {0, 73, 255, 255}, - {0, 109, 0, 255}, {0, 109, 85, 255}, {0, 109, 170, 255}, - {0, 109, 255, 255}, {0, 146, 0, 255}, {0, 146, 85, 255}, - {0, 146, 170, 255}, {0, 146, 255, 255}, {0, 182, 0, 255}, - {0, 182, 85, 255}, {0, 182, 170, 255}, {0, 182, 255, 255}, - {0, 219, 0, 255}, {0, 219, 85, 255}, {0, 219, 170, 255}, - {0, 219, 255, 255}, {0, 255, 0, 255}, {0, 255, 85, 255}, - {0, 255, 170, 255}, {0, 255, 255, 255}, {85, 0, 0, 255}, - {85, 0, 85, 255}, {85, 0, 170, 255}, {85, 0, 255, 255}, - {85, 36, 0, 255}, {85, 36, 85, 255}, {85, 36, 170, 255}, - {85, 36, 255, 255}, {85, 73, 0, 255}, {85, 73, 85, 255}, - {85, 73, 170, 255}, {85, 73, 255, 255}, {85, 109, 0, 255}, - {85, 109, 85, 255}, {85, 109, 170, 255}, {85, 109, 255, 255}, - {85, 146, 0, 255}, {85, 146, 85, 255}, {85, 146, 170, 255}, - {85, 146, 255, 255}, {85, 182, 0, 255}, {85, 182, 85, 255}, - {85, 182, 170, 255}, {85, 182, 255, 255}, {85, 219, 0, 255}, - {85, 219, 85, 255}, {85, 219, 170, 255}, {85, 219, 255, 255}, - {85, 255, 0, 255}, {85, 255, 85, 255}, {85, 255, 170, 255}, - {85, 255, 255, 255}, {170, 0, 0, 255}, {170, 0, 85, 255}, - {170, 0, 170, 255}, {170, 0, 255, 255}, {170, 36, 0, 255}, - {170, 36, 85, 255}, {170, 36, 170, 255}, {170, 36, 255, 255}, - {170, 73, 0, 255}, {170, 73, 85, 255}, {170, 73, 170, 255}, - {170, 73, 255, 255}, {170, 109, 0, 255}, {170, 109, 85, 255}, - {170, 109, 170, 255}, {170, 109, 255, 255}, {170, 146, 0, 255}, - {170, 146, 85, 255}, {170, 146, 170, 255}, {170, 146, 255, 255}, - {170, 182, 0, 255}, {170, 182, 85, 255}, {170, 182, 170, 255}, - {170, 182, 255, 255}, {170, 219, 0, 255}, {170, 219, 85, 255}, - {170, 219, 170, 255}, {170, 219, 255, 255}, {170, 255, 0, 255}, - {170, 255, 85, 255}, {170, 255, 170, 255}, {170, 255, 255, 255}, - {255, 0, 0, 255}, {255, 0, 85, 255}, {255, 0, 170, 255}, - {255, 0, 255, 255}, {255, 36, 0, 255}, {255, 36, 85, 255}, - {255, 36, 170, 255}, {255, 36, 255, 255}, {255, 73, 0, 255}, - {255, 73, 85, 255}, {255, 73, 170, 255}, {255, 73, 255, 255}, - {255, 109, 0, 255}, {255, 109, 85, 255}, {255, 109, 170, 255}, - {255, 109, 255, 255}, {255, 146, 0, 255}, {255, 146, 85, 255}, - {255, 146, 170, 255}, {255, 146, 255, 255}, {255, 182, 0, 255}, - {255, 182, 85, 255}, {255, 182, 170, 255}, {255, 182, 255, 255}, - {255, 219, 0, 255}, {255, 219, 85, 255}, {255, 219, 170, 255}, - {255, 219, 255, 255}, {255, 255, 0, 255}, {255, 255, 85, 255}, - {255, 255, 170, 255}, {255, 255, 255, 255}, {0, 0, 0, 255}, - {0, 0, 85, 255}, {0, 0, 170, 255}, {0, 0, 255, 255}, - {0, 36, 0, 255}, {0, 36, 85, 255}, {0, 36, 170, 255}, - {0, 36, 255, 255}, {0, 73, 0, 255}, {0, 73, 85, 255}, - {0, 73, 170, 255}, {0, 73, 255, 255}, {0, 109, 0, 255}, - {0, 109, 85, 255}, {0, 109, 170, 255}, {0, 109, 255, 255}, - {0, 146, 0, 255}, {0, 146, 85, 255}, {0, 146, 170, 255}, - {0, 146, 255, 255}, {0, 182, 0, 255}, {0, 182, 85, 255}, - {0, 182, 170, 255}, {0, 182, 255, 255}, {0, 219, 0, 255}, - {0, 219, 85, 255}, {0, 219, 170, 255}, {0, 219, 255, 255}, - {0, 255, 0, 255}, {0, 255, 85, 255}, {0, 255, 170, 255}, - {0, 255, 255, 255}, {85, 0, 0, 255}, {85, 0, 85, 255}, - {85, 0, 170, 255}, {85, 0, 255, 255}, {85, 36, 0, 255}, - {85, 36, 85, 255}, {85, 36, 170, 255}, {85, 36, 255, 255}, - {85, 73, 0, 255}, {85, 73, 85, 255}, {85, 73, 170, 255}, - {85, 73, 255, 255}, {85, 109, 0, 255}, {85, 109, 85, 255}, - {85, 109, 170, 255}, {85, 109, 255, 255}, {85, 146, 0, 255}, - {85, 146, 85, 255}, {85, 146, 170, 255}, {85, 146, 255, 255}, - {85, 182, 0, 255}, {85, 182, 85, 255}, {85, 182, 170, 255}, - {85, 182, 255, 255}, {85, 219, 0, 255}, {85, 219, 85, 255}, - {85, 219, 170, 255}, {85, 219, 255, 255}, {85, 255, 0, 255}, - {85, 255, 85, 255}, {85, 255, 170, 255}, {85, 255, 255, 255}, - {170, 0, 0, 255}, {170, 0, 85, 255}, {170, 0, 170, 255}, - {170, 0, 255, 255}, {170, 36, 0, 255}, {170, 36, 85, 255}, - {170, 36, 170, 255}, {170, 36, 255, 255}, {170, 73, 0, 255}, - {170, 73, 85, 255}, {170, 73, 170, 255}, {170, 73, 255, 255}, - {170, 109, 0, 255}, {170, 109, 85, 255}, {170, 109, 170, 255}, - {170, 109, 255, 255}, {170, 146, 0, 255}, {170, 146, 85, 255}, - {170, 146, 170, 255}, {170, 146, 255, 255}, {170, 182, 0, 255}, - {170, 182, 85, 255}, {170, 182, 170, 255}, {170, 182, 255, 255}, - {170, 219, 0, 255}, {170, 219, 85, 255}, {170, 219, 170, 255}, - {170, 219, 255, 255}, {170, 255, 0, 255}, {170, 255, 85, 255}, - {170, 255, 170, 255}, {170, 255, 255, 255}, {255, 0, 0, 255}, - {255, 0, 85, 255}, {255, 0, 170, 255}, {255, 0, 255, 255}, - {255, 36, 0, 255}, {255, 36, 85, 255}, {255, 36, 170, 255}, - {255, 36, 255, 255}, {255, 73, 0, 255}, {255, 73, 85, 255}, - {255, 73, 170, 255}, {255, 73, 255, 255}, {255, 109, 0, 255}, - {255, 109, 85, 255}, {255, 109, 170, 255}, {255, 109, 255, 255}, - {255, 146, 0, 255}, {255, 146, 85, 255}, {255, 146, 170, 255}, - {255, 146, 255, 255}, {255, 182, 0, 255}, {255, 182, 85, 255}, - {255, 182, 170, 255}, {255, 182, 255, 255}, {255, 219, 0, 255}, - {255, 219, 85, 255}, {255, 219, 170, 255}, {255, 219, 255, 255}, - {255, 255, 0, 255}, {255, 255, 85, 255}, {255, 255, 170, 255}, - {255, 255, 255, 255}}; - -static const int default_palette_size = - (int)(sizeof(default_palette_colors) / sizeof(SDL_Color)); - -#endif diff --git a/venv/Include/site/python3.9/pygame/pgarrinter.h b/venv/Include/site/python3.9/pygame/pgarrinter.h deleted file mode 100644 index 5ba096b..0000000 --- a/venv/Include/site/python3.9/pygame/pgarrinter.h +++ /dev/null @@ -1,26 +0,0 @@ -/* array structure interface version 3 declarations */ - -#if !defined(PG_ARRAYINTER_HEADER) -#define PG_ARRAYINTER_HEADER - -static const int PAI_CONTIGUOUS = 0x01; -static const int PAI_FORTRAN = 0x02; -static const int PAI_ALIGNED = 0x100; -static const int PAI_NOTSWAPPED = 0x200; -static const int PAI_WRITEABLE = 0x400; -static const int PAI_ARR_HAS_DESCR = 0x800; - -typedef struct { - int two; /* contains the integer 2 -- simple sanity check */ - int nd; /* number of dimensions */ - char typekind; /* kind in array -- character code of typestr */ - int itemsize; /* size of each element */ - int flags; /* flags indicating how the data should be */ - /* interpreted */ - Py_intptr_t *shape; /* A length-nd array of shape information */ - Py_intptr_t *strides; /* A length-nd array of stride information */ - void *data; /* A pointer to the first element of the array */ - PyObject *descr; /* NULL or a data-description */ -} PyArrayInterface; - -#endif diff --git a/venv/Include/site/python3.9/pygame/pgbufferproxy.h b/venv/Include/site/python3.9/pygame/pgbufferproxy.h deleted file mode 100644 index 1507608..0000000 --- a/venv/Include/site/python3.9/pygame/pgbufferproxy.h +++ /dev/null @@ -1,7 +0,0 @@ -#ifndef PG_BUFPROXY_INTERNAL_H -#define PG_BUFPROXY_INTERNAL_H - -#include "include/pygame_bufferproxy.h" -#define PYGAMEAPI_BUFPROXY_NUMSLOTS 4 - -#endif /* ~PG_BUFPROXY_INTERNAL_H */ diff --git a/venv/Include/site/python3.9/pygame/pgcompat.h b/venv/Include/site/python3.9/pygame/pgcompat.h deleted file mode 100644 index 602043d..0000000 --- a/venv/Include/site/python3.9/pygame/pgcompat.h +++ /dev/null @@ -1,57 +0,0 @@ -/* Python 2.x/3.x compatibility tools (internal) - */ -#ifndef PGCOMPAT_INTERNAL_H -#define PGCOMPAT_INTERNAL_H - -#include "include/pgcompat.h" - -/* Module init function returns new module instance. */ -#define MODINIT_DEFINE(mod_name) PyMODINIT_FUNC PyInit_##mod_name(void) - -/* Defaults for unicode file path encoding */ -#if defined(MS_WIN32) -#define UNICODE_DEF_FS_ERROR "replace" -#else -#define UNICODE_DEF_FS_ERROR "surrogateescape" -#endif - -#define RELATIVE_MODULE(m) ("." m) - -#ifndef Py_TPFLAGS_HAVE_NEWBUFFER -#define Py_TPFLAGS_HAVE_NEWBUFFER 0 -#endif - -#ifndef Py_TPFLAGS_HAVE_CLASS -#define Py_TPFLAGS_HAVE_CLASS 0 -#endif - -#ifndef Py_TPFLAGS_CHECKTYPES -#define Py_TPFLAGS_CHECKTYPES 0 -#endif - -#define Slice_GET_INDICES_EX(slice, length, start, stop, step, slicelength) \ - PySlice_GetIndicesEx(slice, length, start, stop, step, slicelength) - -#if defined(SDL_VERSION_ATLEAST) -#if !(SDL_VERSION_ATLEAST(2, 0, 5)) -/* These functions require SDL 2.0.5 or greater. - - https://wiki.libsdl.org/SDL_SetWindowResizable -*/ -void -SDL_SetWindowResizable(SDL_Window *window, SDL_bool resizable); -int -SDL_GetWindowOpacity(SDL_Window *window, float *opacity); -int -SDL_SetWindowOpacity(SDL_Window *window, float opacity); -int -SDL_SetWindowModalFor(SDL_Window *modal_window, SDL_Window *parent_window); -int -SDL_SetWindowInputFocus(SDL_Window *window); -SDL_Surface * -SDL_CreateRGBSurfaceWithFormat(Uint32 flags, int width, int height, int depth, - Uint32 format); -#endif /* !(SDL_VERSION_ATLEAST(2, 0, 5)) */ -#endif /* defined(SDL_VERSION_ATLEAST) */ - -#endif /* ~PGCOMPAT_INTERNAL_H */ diff --git a/venv/Include/site/python3.9/pygame/pgopengl.h b/venv/Include/site/python3.9/pygame/pgopengl.h deleted file mode 100644 index a845cbf..0000000 --- a/venv/Include/site/python3.9/pygame/pgopengl.h +++ /dev/null @@ -1,20 +0,0 @@ -#if !defined(PGOPENGL_H) -#define PGOPENGL_H - -/** This header includes definitions of Opengl functions as pointer types for - ** use with the SDL function SDL_GL_GetProcAddress. - **/ - -#if defined(_WIN32) -#define GL_APIENTRY __stdcall -#else -#define GL_APIENTRY -#endif - -typedef void(GL_APIENTRY *GL_glReadPixels_Func)(int, int, int, int, - unsigned int, unsigned int, - void *); - -typedef void(GL_APIENTRY *GL_glViewport_Func)(int, int, unsigned int, - unsigned int); -#endif diff --git a/venv/Include/site/python3.9/pygame/pgplatform.h b/venv/Include/site/python3.9/pygame/pgplatform.h deleted file mode 100644 index 1c6c285..0000000 --- a/venv/Include/site/python3.9/pygame/pgplatform.h +++ /dev/null @@ -1,39 +0,0 @@ -/* platform/compiler adjustments (internal) */ -#ifndef PG_PLATFORM_INTERNAL_H -#define PG_PLATFORM_INTERNAL_H - -/* This must be before all else */ -#if defined(__SYMBIAN32__) && defined(OPENC) -#include -#if defined(__WINS__) -void * -_alloca(size_t size); -#define alloca _alloca -#endif /* __WINS__ */ -#endif /* defined(__SYMBIAN32__) && defined(OPENC) */ - -#include "include/pgplatform.h" - -#ifndef MIN -#define MIN(a, b) ((a) < (b) ? (a) : (b)) -#endif -#ifndef MAX -#define MAX(a, b) ((a) > (b) ? (a) : (b)) -#endif -#ifndef ABS -#define ABS(a) (((a) < 0) ? -(a) : (a)) -#endif - -#if defined(macintosh) && defined(__MWERKS__) || defined(__SYMBIAN32__) -#define PYGAME_EXPORT __declspec(export) -#else -#define PYGAME_EXPORT -#endif - -/* warnings */ -#define PG_STRINGIZE_HELPER(x) #x -#define PG_STRINGIZE(x) PG_STRINGIZE_HELPER(x) -#define PG_WARN(desc) \ - message(__FILE__ "(" PG_STRINGIZE(__LINE__) "): WARNING: " #desc) - -#endif /* ~PG_PLATFORM_INTERNAL_H */ diff --git a/venv/Include/site/python3.9/pygame/pygame.h b/venv/Include/site/python3.9/pygame/pygame.h deleted file mode 100644 index d7eaf73..0000000 --- a/venv/Include/site/python3.9/pygame/pygame.h +++ /dev/null @@ -1,32 +0,0 @@ -/* - pygame - Python Game Library - Copyright (C) 2000-2001 Pete Shinners - - This library is free software; you can redistribute it and/or - modify it under the terms of the GNU Library General Public - License as published by the Free Software Foundation; either - version 2 of the License, or (at your option) any later version. - - This library is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - Library General Public License for more details. - - You should have received a copy of the GNU Library General Public - License along with this library; if not, write to the Free - Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA - - Pete Shinners - pete@shinners.org -*/ - -/* This will use PYGAMEAPI_DEFINE_SLOTS instead - * of PYGAMEAPI_EXTERN_SLOTS for base modules. - */ -#ifndef PYGAME_INTERNAL_H -#define PYGAME_INTERNAL_H - -#define PYGAME_H -#include "_pygame.h" - -#endif /* ~PYGAME_INTERNAL_H */ diff --git a/venv/Include/site/python3.9/pygame/scrap.h b/venv/Include/site/python3.9/pygame/scrap.h deleted file mode 100644 index b3265a3..0000000 --- a/venv/Include/site/python3.9/pygame/scrap.h +++ /dev/null @@ -1,147 +0,0 @@ -/* - pygame - Python Game Library - Copyright (C) 2006, 2007 Rene Dudfield, Marcus von Appen - - Originally put in the public domain by Sam Lantinga. - - This library is free software; you can redistribute it and/or - modify it under the terms of the GNU Library General Public - License as published by the Free Software Foundation; either - version 2 of the License, or (at your option) any later version. - - This library is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - Library General Public License for more details. - - You should have received a copy of the GNU Library General Public - License along with this library; if not, write to the Free - Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA -*/ - -#ifndef SCRAP_H -#define SCRAP_H - -/* This is unconditionally defined in Python.h */ -#if defined(_POSIX_C_SOURCE) -#undef _POSIX_C_SOURCE -#endif - -#include - -/* Handle clipboard text and data in arbitrary formats */ - -/** - * Predefined supported pygame scrap types. - */ -#define PYGAME_SCRAP_TEXT "text/plain" -#define PYGAME_SCRAP_BMP "image/bmp" -#define PYGAME_SCRAP_PPM "image/ppm" -#define PYGAME_SCRAP_PBM "image/pbm" - -/** - * The supported scrap clipboard types. - * - * This is only relevant in a X11 environment, which supports mouse - * selections as well. For Win32 and MacOS environments the default - * clipboard is used, no matter what value is passed. - */ -typedef enum { - SCRAP_CLIPBOARD, - SCRAP_SELECTION /* only supported in X11 environments. */ -} ScrapClipType; - -/** - * Macro for initialization checks. - */ -#define PYGAME_SCRAP_INIT_CHECK() \ - if (!pygame_scrap_initialized()) \ - return (PyErr_SetString(pgExc_SDLError, "scrap system not initialized."), \ - NULL) - -/** - * \brief Checks, whether the pygame scrap module was initialized. - * - * \return 1 if the modules was initialized, 0 otherwise. - */ -extern int -pygame_scrap_initialized(void); - -/** - * \brief Initializes the pygame scrap module internals. Call this before any - * other method. - * - * \return 1 on successful initialization, 0 otherwise. - */ -extern int -pygame_scrap_init(void); - -/** - * \brief Checks, whether the pygame window lost the clipboard focus or not. - * - * \return 1 if the window lost the focus, 0 otherwise. - */ -extern int -pygame_scrap_lost(void); - -/** - * \brief Places content of a specific type into the clipboard. - * - * \note For X11 the following notes are important: The following types - * are reserved for internal usage and thus will throw an error on - * setting them: "TIMESTAMP", "TARGETS", "SDL_SELECTION". - * Setting PYGAME_SCRAP_TEXT ("text/plain") will also automatically - * set the X11 types "STRING" (XA_STRING), "TEXT" and "UTF8_STRING". - * - * For Win32 the following notes are important: Setting - * PYGAME_SCRAP_TEXT ("text/plain") will also automatically set - * the Win32 type "TEXT" (CF_TEXT). - * - * For QNX the following notes are important: Setting - * PYGAME_SCRAP_TEXT ("text/plain") will also automatically set - * the QNX type "TEXT" (Ph_CL_TEXT). - * - * \param type The type of the content. - * \param srclen The length of the content. - * \param src The NULL terminated content. - * \return 1, if the content could be successfully pasted into the clipboard, - * 0 otherwise. - */ -extern int -pygame_scrap_put(char *type, int srclen, char *src); - -/** - * \brief Gets the current content from the clipboard. - * - * \note The received content does not need to be the content previously - * placed in the clipboard using pygame_put_scrap(). See the - * pygame_put_scrap() notes for more details. - * - * \param type The type of the content to receive. - * \param count The size of the returned content. - * \return The content or NULL in case of an error or if no content of the - * specified type was available. - */ -extern char * -pygame_scrap_get(char *type, unsigned long *count); - -/** - * \brief Gets the currently available content types from the clipboard. - * - * \return The different available content types or NULL in case of an - * error or if no content type is available. - */ -extern char ** -pygame_scrap_get_types(void); - -/** - * \brief Checks whether content for the specified scrap type is currently - * available in the clipboard. - * - * \param type The type to check for. - * \return 1, if there is content and 0 otherwise. - */ -extern int -pygame_scrap_contains(char *type); - -#endif /* SCRAP_H */ diff --git a/venv/Include/site/python3.9/pygame/surface.h b/venv/Include/site/python3.9/pygame/surface.h deleted file mode 100644 index eb9bbed..0000000 --- a/venv/Include/site/python3.9/pygame/surface.h +++ /dev/null @@ -1,355 +0,0 @@ -/* - pygame - Python Game Library - Copyright (C) 2000-2001 Pete Shinners - Copyright (C) 2007 Marcus von Appen - - This library is free software; you can redistribute it and/or - modify it under the terms of the GNU Library General Public - License as published by the Free Software Foundation; either - version 2 of the License, or (at your option) any later version. - - This library is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - Library General Public License for more details. - - You should have received a copy of the GNU Library General Public - License along with this library; if not, write to the Free - Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA - - Pete Shinners - pete@shinners.org -*/ - -#ifndef SURFACE_H -#define SURFACE_H - -/* This is defined in SDL.h */ -#if defined(_POSIX_C_SOURCE) -#undef _POSIX_C_SOURCE -#endif - -#include -#include "pygame.h" - -/* Blend modes */ -#define PYGAME_BLEND_ADD 0x1 -#define PYGAME_BLEND_SUB 0x2 -#define PYGAME_BLEND_MULT 0x3 -#define PYGAME_BLEND_MIN 0x4 -#define PYGAME_BLEND_MAX 0x5 - -#define PYGAME_BLEND_RGB_ADD 0x1 -#define PYGAME_BLEND_RGB_SUB 0x2 -#define PYGAME_BLEND_RGB_MULT 0x3 -#define PYGAME_BLEND_RGB_MIN 0x4 -#define PYGAME_BLEND_RGB_MAX 0x5 - -#define PYGAME_BLEND_RGBA_ADD 0x6 -#define PYGAME_BLEND_RGBA_SUB 0x7 -#define PYGAME_BLEND_RGBA_MULT 0x8 -#define PYGAME_BLEND_RGBA_MIN 0x9 -#define PYGAME_BLEND_RGBA_MAX 0x10 -#define PYGAME_BLEND_PREMULTIPLIED 0x11 -#define PYGAME_BLEND_ALPHA_SDL2 0x12 - -#if SDL_BYTEORDER == SDL_LIL_ENDIAN -#define GET_PIXEL_24(b) (b[0] + (b[1] << 8) + (b[2] << 16)) -#else -#define GET_PIXEL_24(b) (b[2] + (b[1] << 8) + (b[0] << 16)) -#endif - -#define GET_PIXEL(pxl, bpp, source) \ - switch (bpp) { \ - case 2: \ - pxl = *((Uint16 *)(source)); \ - break; \ - case 4: \ - pxl = *((Uint32 *)(source)); \ - break; \ - default: { \ - Uint8 *b = (Uint8 *)source; \ - pxl = GET_PIXEL_24(b); \ - } break; \ - } - -#define GET_PIXELVALS(_sR, _sG, _sB, _sA, px, fmt, ppa) \ - SDL_GetRGBA(px, fmt, &(_sR), &(_sG), &(_sB), &(_sA)); \ - if (!ppa) { \ - _sA = 255; \ - } - -#define GET_PIXELVALS_1(sr, sg, sb, sa, _src, _fmt) \ - sr = _fmt->palette->colors[*((Uint8 *)(_src))].r; \ - sg = _fmt->palette->colors[*((Uint8 *)(_src))].g; \ - sb = _fmt->palette->colors[*((Uint8 *)(_src))].b; \ - sa = 255; - -/* For 1 byte palette pixels */ -#define SET_PIXELVAL(px, fmt, _dR, _dG, _dB, _dA) \ - *(px) = (Uint8)SDL_MapRGBA(fmt, _dR, _dG, _dB, _dA) - -#if SDL_BYTEORDER == SDL_LIL_ENDIAN -#define SET_OFFSETS_24(or, og, ob, fmt) \ - { \ - or = (fmt->Rshift == 0 ? 0 : fmt->Rshift == 8 ? 1 : 2); \ - og = (fmt->Gshift == 0 ? 0 : fmt->Gshift == 8 ? 1 : 2); \ - ob = (fmt->Bshift == 0 ? 0 : fmt->Bshift == 8 ? 1 : 2); \ - } - -#define SET_OFFSETS_32(or, og, ob, fmt) \ - { \ - or = (fmt->Rshift == 0 ? 0 \ - : fmt->Rshift == 8 ? 1 \ - : fmt->Rshift == 16 ? 2 \ - : 3); \ - og = (fmt->Gshift == 0 ? 0 \ - : fmt->Gshift == 8 ? 1 \ - : fmt->Gshift == 16 ? 2 \ - : 3); \ - ob = (fmt->Bshift == 0 ? 0 \ - : fmt->Bshift == 8 ? 1 \ - : fmt->Bshift == 16 ? 2 \ - : 3); \ - } -#else -#define SET_OFFSETS_24(or, og, ob, fmt) \ - { \ - or = (fmt->Rshift == 0 ? 2 : fmt->Rshift == 8 ? 1 : 0); \ - og = (fmt->Gshift == 0 ? 2 : fmt->Gshift == 8 ? 1 : 0); \ - ob = (fmt->Bshift == 0 ? 2 : fmt->Bshift == 8 ? 1 : 0); \ - } - -#define SET_OFFSETS_32(or, og, ob, fmt) \ - { \ - or = (fmt->Rshift == 0 ? 3 \ - : fmt->Rshift == 8 ? 2 \ - : fmt->Rshift == 16 ? 1 \ - : 0); \ - og = (fmt->Gshift == 0 ? 3 \ - : fmt->Gshift == 8 ? 2 \ - : fmt->Gshift == 16 ? 1 \ - : 0); \ - ob = (fmt->Bshift == 0 ? 3 \ - : fmt->Bshift == 8 ? 2 \ - : fmt->Bshift == 16 ? 1 \ - : 0); \ - } -#endif - -#define CREATE_PIXEL(buf, r, g, b, a, bp, ft) \ - switch (bp) { \ - case 2: \ - *((Uint16 *)(buf)) = ((r >> ft->Rloss) << ft->Rshift) | \ - ((g >> ft->Gloss) << ft->Gshift) | \ - ((b >> ft->Bloss) << ft->Bshift) | \ - ((a >> ft->Aloss) << ft->Ashift); \ - break; \ - case 4: \ - *((Uint32 *)(buf)) = ((r >> ft->Rloss) << ft->Rshift) | \ - ((g >> ft->Gloss) << ft->Gshift) | \ - ((b >> ft->Bloss) << ft->Bshift) | \ - ((a >> ft->Aloss) << ft->Ashift); \ - break; \ - } - -/* Pretty good idea from Tom Duff :-). */ -#define LOOP_UNROLLED4(code, n, width) \ - n = (width + 3) / 4; \ - switch (width & 3) { \ - case 0: \ - do { \ - code; \ - case 3: \ - code; \ - case 2: \ - code; \ - case 1: \ - code; \ - } while (--n > 0); \ - } - -/* Used in the srcbpp == dstbpp == 1 blend functions */ -#define REPEAT_3(code) \ - code; \ - code; \ - code; - -#define REPEAT_4(code) \ - code; \ - code; \ - code; \ - code; - -#define BLEND_ADD(tmp, sR, sG, sB, sA, dR, dG, dB, dA) \ - tmp = dR + sR; \ - dR = (tmp <= 255 ? tmp : 255); \ - tmp = dG + sG; \ - dG = (tmp <= 255 ? tmp : 255); \ - tmp = dB + sB; \ - dB = (tmp <= 255 ? tmp : 255); - -#define BLEND_SUB(tmp, sR, sG, sB, sA, dR, dG, dB, dA) \ - tmp = dR - sR; \ - dR = (tmp >= 0 ? tmp : 0); \ - tmp = dG - sG; \ - dG = (tmp >= 0 ? tmp : 0); \ - tmp = dB - sB; \ - dB = (tmp >= 0 ? tmp : 0); - -#define BLEND_MULT(sR, sG, sB, sA, dR, dG, dB, dA) \ - dR = (dR && sR) ? (dR * sR) >> 8 : 0; \ - dG = (dG && sG) ? (dG * sG) >> 8 : 0; \ - dB = (dB && sB) ? (dB * sB) >> 8 : 0; - -#define BLEND_MIN(sR, sG, sB, sA, dR, dG, dB, dA) \ - if (sR < dR) { \ - dR = sR; \ - } \ - if (sG < dG) { \ - dG = sG; \ - } \ - if (sB < dB) { \ - dB = sB; \ - } - -#define BLEND_MAX(sR, sG, sB, sA, dR, dG, dB, dA) \ - if (sR > dR) { \ - dR = sR; \ - } \ - if (sG > dG) { \ - dG = sG; \ - } \ - if (sB > dB) { \ - dB = sB; \ - } - -#define BLEND_RGBA_ADD(tmp, sR, sG, sB, sA, dR, dG, dB, dA) \ - tmp = dR + sR; \ - dR = (tmp <= 255 ? tmp : 255); \ - tmp = dG + sG; \ - dG = (tmp <= 255 ? tmp : 255); \ - tmp = dB + sB; \ - dB = (tmp <= 255 ? tmp : 255); \ - tmp = dA + sA; \ - dA = (tmp <= 255 ? tmp : 255); - -#define BLEND_RGBA_SUB(tmp, sR, sG, sB, sA, dR, dG, dB, dA) \ - tmp = dR - sR; \ - dR = (tmp >= 0 ? tmp : 0); \ - tmp = dG - sG; \ - dG = (tmp >= 0 ? tmp : 0); \ - tmp = dB - sB; \ - dB = (tmp >= 0 ? tmp : 0); \ - tmp = dA - sA; \ - dA = (tmp >= 0 ? tmp : 0); - -#define BLEND_RGBA_MULT(sR, sG, sB, sA, dR, dG, dB, dA) \ - dR = (dR && sR) ? (dR * sR) >> 8 : 0; \ - dG = (dG && sG) ? (dG * sG) >> 8 : 0; \ - dB = (dB && sB) ? (dB * sB) >> 8 : 0; \ - dA = (dA && sA) ? (dA * sA) >> 8 : 0; - -#define BLEND_RGBA_MIN(sR, sG, sB, sA, dR, dG, dB, dA) \ - if (sR < dR) { \ - dR = sR; \ - } \ - if (sG < dG) { \ - dG = sG; \ - } \ - if (sB < dB) { \ - dB = sB; \ - } \ - if (sA < dA) { \ - dA = sA; \ - } - -#define BLEND_RGBA_MAX(sR, sG, sB, sA, dR, dG, dB, dA) \ - if (sR > dR) { \ - dR = sR; \ - } \ - if (sG > dG) { \ - dG = sG; \ - } \ - if (sB > dB) { \ - dB = sB; \ - } \ - if (sA > dA) { \ - dA = sA; \ - } - -#if 1 -/* Choose an alpha blend equation. If the sign is preserved on a right shift - * then use a specialized, faster, equation. Otherwise a more general form, - * where all additions are done before the shift, is needed. - */ -#if (-1 >> 1) < 0 -#define ALPHA_BLEND_COMP(sC, dC, sA) ((((sC - dC) * sA + sC) >> 8) + dC) -#else -#define ALPHA_BLEND_COMP(sC, dC, sA) (((dC << 8) + (sC - dC) * sA + sC) >> 8) -#endif - -#define ALPHA_BLEND(sR, sG, sB, sA, dR, dG, dB, dA) \ - do { \ - if (dA) { \ - dR = ALPHA_BLEND_COMP(sR, dR, sA); \ - dG = ALPHA_BLEND_COMP(sG, dG, sA); \ - dB = ALPHA_BLEND_COMP(sB, dB, sA); \ - dA = sA + dA - ((sA * dA) / 255); \ - } \ - else { \ - dR = sR; \ - dG = sG; \ - dB = sB; \ - dA = sA; \ - } \ - } while (0) - -#define ALPHA_BLEND_PREMULTIPLIED_COMP(sC, dC, sA) \ - (sC + dC - ((dC + 1) * sA >> 8)) - -#define ALPHA_BLEND_PREMULTIPLIED(tmp, sR, sG, sB, sA, dR, dG, dB, dA) \ - do { \ - dR = ALPHA_BLEND_PREMULTIPLIED_COMP(sR, dR, sA); \ - dG = ALPHA_BLEND_PREMULTIPLIED_COMP(sG, dG, sA); \ - dB = ALPHA_BLEND_PREMULTIPLIED_COMP(sB, dB, sA); \ - dA = ALPHA_BLEND_PREMULTIPLIED_COMP(sA, dA, sA); \ - } while (0) -#elif 0 - -#define ALPHA_BLEND(sR, sG, sB, sA, dR, dG, dB, dA) \ - do { \ - if (sA) { \ - if (dA && sA < 255) { \ - int dContrib = dA * (255 - sA) / 255; \ - dA = sA + dA - ((sA * dA) / 255); \ - dR = (dR * dContrib + sR * sA) / dA; \ - dG = (dG * dContrib + sG * sA) / dA; \ - dB = (dB * dContrib + sB * sA) / dA; \ - } \ - else { \ - dR = sR; \ - dG = sG; \ - dB = sB; \ - dA = sA; \ - } \ - } \ - } while (0) -#endif - -int -surface_fill_blend(SDL_Surface *surface, SDL_Rect *rect, Uint32 color, - int blendargs); - -void -surface_respect_clip_rect(SDL_Surface *surface, SDL_Rect *rect); - -int -pygame_AlphaBlit(SDL_Surface *src, SDL_Rect *srcrect, SDL_Surface *dst, - SDL_Rect *dstrect, int the_args); - -int -pygame_Blit(SDL_Surface *src, SDL_Rect *srcrect, SDL_Surface *dst, - SDL_Rect *dstrect, int the_args); - -#endif /* SURFACE_H */ diff --git a/venv/Lib/site-packages/_distutils_hack/__init__.py b/venv/Lib/site-packages/_distutils_hack/__init__.py deleted file mode 100644 index 605a6ed..0000000 --- a/venv/Lib/site-packages/_distutils_hack/__init__.py +++ /dev/null @@ -1,187 +0,0 @@ -# don't import any costly modules -import sys -import os - - -is_pypy = '__pypy__' in sys.builtin_module_names - - -def warn_distutils_present(): - if 'distutils' not in sys.modules: - return - if is_pypy and sys.version_info < (3, 7): - # PyPy for 3.6 unconditionally imports distutils, so bypass the warning - # https://foss.heptapod.net/pypy/pypy/-/blob/be829135bc0d758997b3566062999ee8b23872b4/lib-python/3/site.py#L250 - return - import warnings - warnings.warn( - "Distutils was imported before Setuptools, but importing Setuptools " - "also replaces the `distutils` module in `sys.modules`. This may lead " - "to undesirable behaviors or errors. To avoid these issues, avoid " - "using distutils directly, ensure that setuptools is installed in the " - "traditional way (e.g. not an editable install), and/or make sure " - "that setuptools is always imported before distutils.") - - -def clear_distutils(): - if 'distutils' not in sys.modules: - return - import warnings - warnings.warn("Setuptools is replacing distutils.") - mods = [ - name for name in sys.modules - if name == "distutils" or name.startswith("distutils.") - ] - for name in mods: - del sys.modules[name] - - -def enabled(): - """ - Allow selection of distutils by environment variable. - """ - which = os.environ.get('SETUPTOOLS_USE_DISTUTILS', 'local') - return which == 'local' - - -def ensure_local_distutils(): - import importlib - clear_distutils() - - # With the DistutilsMetaFinder in place, - # perform an import to cause distutils to be - # loaded from setuptools._distutils. Ref #2906. - with shim(): - importlib.import_module('distutils') - - # check that submodules load as expected - core = importlib.import_module('distutils.core') - assert '_distutils' in core.__file__, core.__file__ - assert 'setuptools._distutils.log' not in sys.modules - - -def do_override(): - """ - Ensure that the local copy of distutils is preferred over stdlib. - - See https://github.com/pypa/setuptools/issues/417#issuecomment-392298401 - for more motivation. - """ - if enabled(): - warn_distutils_present() - ensure_local_distutils() - - -class _TrivialRe: - def __init__(self, *patterns): - self._patterns = patterns - - def match(self, string): - return all(pat in string for pat in self._patterns) - - -class DistutilsMetaFinder: - def find_spec(self, fullname, path, target=None): - if path is not None: - return - - method_name = 'spec_for_{fullname}'.format(**locals()) - method = getattr(self, method_name, lambda: None) - return method() - - def spec_for_distutils(self): - if self.is_cpython(): - return - - import importlib - import importlib.abc - import importlib.util - - try: - mod = importlib.import_module('setuptools._distutils') - except Exception: - # There are a couple of cases where setuptools._distutils - # may not be present: - # - An older Setuptools without a local distutils is - # taking precedence. Ref #2957. - # - Path manipulation during sitecustomize removes - # setuptools from the path but only after the hook - # has been loaded. Ref #2980. - # In either case, fall back to stdlib behavior. - return - - class DistutilsLoader(importlib.abc.Loader): - - def create_module(self, spec): - mod.__name__ = 'distutils' - return mod - - def exec_module(self, module): - pass - - return importlib.util.spec_from_loader( - 'distutils', DistutilsLoader(), origin=mod.__file__ - ) - - @staticmethod - def is_cpython(): - """ - Suppress supplying distutils for CPython (build and tests). - Ref #2965 and #3007. - """ - return os.path.isfile('pybuilddir.txt') - - def spec_for_pip(self): - """ - Ensure stdlib distutils when running under pip. - See pypa/pip#8761 for rationale. - """ - if self.pip_imported_during_build(): - return - clear_distutils() - self.spec_for_distutils = lambda: None - - @classmethod - def pip_imported_during_build(cls): - """ - Detect if pip is being imported in a build script. Ref #2355. - """ - import traceback - return any( - cls.frame_file_is_setup(frame) - for frame, line in traceback.walk_stack(None) - ) - - @staticmethod - def frame_file_is_setup(frame): - """ - Return True if the indicated frame suggests a setup.py file. - """ - # some frames may not have __file__ (#2940) - return frame.f_globals.get('__file__', '').endswith('setup.py') - - -DISTUTILS_FINDER = DistutilsMetaFinder() - - -def add_shim(): - DISTUTILS_FINDER in sys.meta_path or insert_shim() - - -class shim: - def __enter__(self): - insert_shim() - - def __exit__(self, exc, value, tb): - remove_shim() - - -def insert_shim(): - sys.meta_path.insert(0, DISTUTILS_FINDER) - - -def remove_shim(): - try: - sys.meta_path.remove(DISTUTILS_FINDER) - except ValueError: - pass diff --git a/venv/Lib/site-packages/_distutils_hack/__pycache__/__init__.cpython-39.pyc b/venv/Lib/site-packages/_distutils_hack/__pycache__/__init__.cpython-39.pyc deleted file mode 100644 index eb021d6..0000000 Binary files a/venv/Lib/site-packages/_distutils_hack/__pycache__/__init__.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/_distutils_hack/__pycache__/override.cpython-39.pyc b/venv/Lib/site-packages/_distutils_hack/__pycache__/override.cpython-39.pyc deleted file mode 100644 index d091cd4..0000000 Binary files a/venv/Lib/site-packages/_distutils_hack/__pycache__/override.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/_distutils_hack/override.py b/venv/Lib/site-packages/_distutils_hack/override.py deleted file mode 100644 index 2cc433a..0000000 --- a/venv/Lib/site-packages/_distutils_hack/override.py +++ /dev/null @@ -1 +0,0 @@ -__import__('_distutils_hack').do_override() diff --git a/venv/Lib/site-packages/distutils-precedence.pth b/venv/Lib/site-packages/distutils-precedence.pth deleted file mode 100644 index 7f009fe..0000000 --- a/venv/Lib/site-packages/distutils-precedence.pth +++ /dev/null @@ -1 +0,0 @@ -import os; var = 'SETUPTOOLS_USE_DISTUTILS'; enabled = os.environ.get(var, 'local') == 'local'; enabled and __import__('_distutils_hack').add_shim(); diff --git a/venv/Lib/site-packages/pip-22.0.4.dist-info/INSTALLER b/venv/Lib/site-packages/pip-22.0.4.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/venv/Lib/site-packages/pip-22.0.4.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/venv/Lib/site-packages/pip-22.0.4.dist-info/LICENSE.txt b/venv/Lib/site-packages/pip-22.0.4.dist-info/LICENSE.txt deleted file mode 100644 index 8e7b65e..0000000 --- a/venv/Lib/site-packages/pip-22.0.4.dist-info/LICENSE.txt +++ /dev/null @@ -1,20 +0,0 @@ -Copyright (c) 2008-present The pip developers (see AUTHORS.txt file) - -Permission is hereby granted, free of charge, to any person obtaining -a copy of this software and associated documentation files (the -"Software"), to deal in the Software without restriction, including -without limitation the rights to use, copy, modify, merge, publish, -distribute, sublicense, and/or sell copies of the Software, and to -permit persons to whom the Software is furnished to do so, subject to -the following conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/venv/Lib/site-packages/pip-22.0.4.dist-info/METADATA b/venv/Lib/site-packages/pip-22.0.4.dist-info/METADATA deleted file mode 100644 index 4e68399..0000000 --- a/venv/Lib/site-packages/pip-22.0.4.dist-info/METADATA +++ /dev/null @@ -1,92 +0,0 @@ -Metadata-Version: 2.1 -Name: pip -Version: 22.0.4 -Summary: The PyPA recommended tool for installing Python packages. -Home-page: https://pip.pypa.io/ -Author: The pip developers -Author-email: distutils-sig@python.org -License: MIT -Project-URL: Documentation, https://pip.pypa.io -Project-URL: Source, https://github.com/pypa/pip -Project-URL: Changelog, https://pip.pypa.io/en/stable/news/ -Platform: UNKNOWN -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Topic :: Software Development :: Build Tools -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3 :: Only -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Python :: Implementation :: PyPy -Requires-Python: >=3.7 -License-File: LICENSE.txt - -pip - The Python Package Installer -================================== - -.. image:: https://img.shields.io/pypi/v/pip.svg - :target: https://pypi.org/project/pip/ - -.. image:: https://readthedocs.org/projects/pip/badge/?version=latest - :target: https://pip.pypa.io/en/latest - -pip is the `package installer`_ for Python. You can use pip to install packages from the `Python Package Index`_ and other indexes. - -Please take a look at our documentation for how to install and use pip: - -* `Installation`_ -* `Usage`_ - -We release updates regularly, with a new version every 3 months. Find more details in our documentation: - -* `Release notes`_ -* `Release process`_ - -In pip 20.3, we've `made a big improvement to the heart of pip`_; `learn more`_. We want your input, so `sign up for our user experience research studies`_ to help us do it right. - -**Note**: pip 21.0, in January 2021, removed Python 2 support, per pip's `Python 2 support policy`_. Please migrate to Python 3. - -If you find bugs, need help, or want to talk to the developers, please use our mailing lists or chat rooms: - -* `Issue tracking`_ -* `Discourse channel`_ -* `User IRC`_ - -If you want to get involved head over to GitHub to get the source code, look at our development documentation and feel free to jump on the developer mailing lists and chat rooms: - -* `GitHub page`_ -* `Development documentation`_ -* `Development mailing list`_ -* `Development IRC`_ - -Code of Conduct ---------------- - -Everyone interacting in the pip project's codebases, issue trackers, chat -rooms, and mailing lists is expected to follow the `PSF Code of Conduct`_. - -.. _package installer: https://packaging.python.org/guides/tool-recommendations/ -.. _Python Package Index: https://pypi.org -.. _Installation: https://pip.pypa.io/en/stable/installation/ -.. _Usage: https://pip.pypa.io/en/stable/ -.. _Release notes: https://pip.pypa.io/en/stable/news.html -.. _Release process: https://pip.pypa.io/en/latest/development/release-process/ -.. _GitHub page: https://github.com/pypa/pip -.. _Development documentation: https://pip.pypa.io/en/latest/development -.. _made a big improvement to the heart of pip: https://pyfound.blogspot.com/2020/11/pip-20-3-new-resolver.html -.. _learn more: https://pip.pypa.io/en/latest/user_guide/#changes-to-the-pip-dependency-resolver-in-20-3-2020 -.. _sign up for our user experience research studies: https://pyfound.blogspot.com/2020/03/new-pip-resolver-to-roll-out-this-year.html -.. _Python 2 support policy: https://pip.pypa.io/en/latest/development/release-process/#python-2-support -.. _Issue tracking: https://github.com/pypa/pip/issues -.. _Discourse channel: https://discuss.python.org/c/packaging -.. _Development mailing list: https://mail.python.org/mailman3/lists/distutils-sig.python.org/ -.. _User IRC: https://kiwiirc.com/nextclient/#ircs://irc.libera.chat:+6697/pypa -.. _Development IRC: https://kiwiirc.com/nextclient/#ircs://irc.libera.chat:+6697/pypa-dev -.. _PSF Code of Conduct: https://github.com/pypa/.github/blob/main/CODE_OF_CONDUCT.md - - diff --git a/venv/Lib/site-packages/pip-22.0.4.dist-info/RECORD b/venv/Lib/site-packages/pip-22.0.4.dist-info/RECORD deleted file mode 100644 index 2ff22f8..0000000 --- a/venv/Lib/site-packages/pip-22.0.4.dist-info/RECORD +++ /dev/null @@ -1,1052 +0,0 @@ -../../Scripts/pip.exe,sha256=RfYKY3iY1pSe2G1PbhvZHVZTyzHMesphk6tWC32M2-0,106390 -../../Scripts/pip3.9.exe,sha256=RfYKY3iY1pSe2G1PbhvZHVZTyzHMesphk6tWC32M2-0,106390 -../../Scripts/pip3.exe,sha256=RfYKY3iY1pSe2G1PbhvZHVZTyzHMesphk6tWC32M2-0,106390 -pip-22.0.4.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pip-22.0.4.dist-info/LICENSE.txt,sha256=Y0MApmnUmurmWxLGxIySTFGkzfPR_whtw0VtyLyqIQQ,1093 -pip-22.0.4.dist-info/METADATA,sha256=bGtDzdgW1AF93Nx32ySc78yQHtHkOrRD146Dvsz85CM,4166 -pip-22.0.4.dist-info/RECORD,, -pip-22.0.4.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92 -pip-22.0.4.dist-info/entry_points.txt,sha256=5ExSa1s54zSPNA_1epJn5SX06786S8k5YHwskMvVYzw,125 -pip-22.0.4.dist-info/top_level.txt,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pip/__init__.py,sha256=b_avKym1xgWv1_1CobpKDp59sdjm_9nwB7sencJQUEY,357 -pip/__main__.py,sha256=mXwWDftNLMKfwVqKFWGE_uuBZvGSIiUELhLkeysIuZc,1198 -pip/__pycache__/__init__.cpython-39.pyc,, -pip/__pycache__/__main__.cpython-39.pyc,, -pip/_internal/__init__.py,sha256=nnFCuxrPMgALrIDxSoy-H6Zj4W4UY60D-uL1aJyq0pc,573 -pip/_internal/__pycache__/__init__.cpython-39.pyc,, -pip/_internal/__pycache__/build_env.cpython-39.pyc,, -pip/_internal/__pycache__/cache.cpython-39.pyc,, -pip/_internal/__pycache__/configuration.cpython-39.pyc,, -pip/_internal/__pycache__/exceptions.cpython-39.pyc,, -pip/_internal/__pycache__/main.cpython-39.pyc,, -pip/_internal/__pycache__/pyproject.cpython-39.pyc,, -pip/_internal/__pycache__/self_outdated_check.cpython-39.pyc,, -pip/_internal/__pycache__/wheel_builder.cpython-39.pyc,, -pip/_internal/build_env.py,sha256=QAsnxJFvj74jS2cZUcxk7zXLvrtAYiRL0EkSPkpSJTo,9739 -pip/_internal/cache.py,sha256=71eaYwrls34HJ6gzbmmYiotiKhPNFTM_tqYJXD5nf3s,9441 -pip/_internal/cli/__init__.py,sha256=FkHBgpxxb-_gd6r1FjnNhfMOzAUYyXoXKJ6abijfcFU,132 -pip/_internal/cli/__pycache__/__init__.cpython-39.pyc,, -pip/_internal/cli/__pycache__/autocompletion.cpython-39.pyc,, -pip/_internal/cli/__pycache__/base_command.cpython-39.pyc,, -pip/_internal/cli/__pycache__/cmdoptions.cpython-39.pyc,, -pip/_internal/cli/__pycache__/command_context.cpython-39.pyc,, -pip/_internal/cli/__pycache__/main.cpython-39.pyc,, -pip/_internal/cli/__pycache__/main_parser.cpython-39.pyc,, -pip/_internal/cli/__pycache__/parser.cpython-39.pyc,, -pip/_internal/cli/__pycache__/progress_bars.cpython-39.pyc,, -pip/_internal/cli/__pycache__/req_command.cpython-39.pyc,, -pip/_internal/cli/__pycache__/spinners.cpython-39.pyc,, -pip/_internal/cli/__pycache__/status_codes.cpython-39.pyc,, -pip/_internal/cli/autocompletion.py,sha256=wY2JPZY2Eji1vhR7bVo-yCBPJ9LCy6P80iOAhZD1Vi8,6676 -pip/_internal/cli/base_command.py,sha256=zYHdQssEkCvWHYf3dtIApaVaxOwAh3maA61nVxaZF2M,8152 -pip/_internal/cli/cmdoptions.py,sha256=TTYO0nxK9YyLwrXk1NHrn7X3dbGoqdV02Yb8kdVoVhc,28526 -pip/_internal/cli/command_context.py,sha256=a1pBBvvGLDiZ1Kw64_4tT6HmRTwYDoYy8JFgG5Czn7s,760 -pip/_internal/cli/main.py,sha256=ioJ8IVlb2K1qLOxR-tXkee9lURhYV89CDM71MKag7YY,2472 -pip/_internal/cli/main_parser.py,sha256=Q9TnytfuC5Z2JSjBFWVGtEdYLFy7rukNIb04movHdAo,2614 -pip/_internal/cli/parser.py,sha256=CDXTuFr2UD8ozOlZYf1KDziQdo9-X_IaYOiUcyJQwrA,10788 -pip/_internal/cli/progress_bars.py,sha256=_52w11WoZrvDSR3oItLWvLrEZFUKAfLf4Y6I6WtOnIU,10339 -pip/_internal/cli/req_command.py,sha256=VwqonOy18QwZsRsVjHhp-6w15fG9x3Ltwoa8yJqQno8,18669 -pip/_internal/cli/spinners.py,sha256=TFhjxtOnLeNJ5YmRvQm4eKPgPbJNkZiqO8jOXuxRaYU,5076 -pip/_internal/cli/status_codes.py,sha256=sEFHUaUJbqv8iArL3HAtcztWZmGOFX01hTesSytDEh0,116 -pip/_internal/commands/__init__.py,sha256=Vc1HjsLEtyCh7506OozPHPKXe2Hk-z9cFkFF3BMj1lM,3736 -pip/_internal/commands/__pycache__/__init__.cpython-39.pyc,, -pip/_internal/commands/__pycache__/cache.cpython-39.pyc,, -pip/_internal/commands/__pycache__/check.cpython-39.pyc,, -pip/_internal/commands/__pycache__/completion.cpython-39.pyc,, -pip/_internal/commands/__pycache__/configuration.cpython-39.pyc,, -pip/_internal/commands/__pycache__/debug.cpython-39.pyc,, -pip/_internal/commands/__pycache__/download.cpython-39.pyc,, -pip/_internal/commands/__pycache__/freeze.cpython-39.pyc,, -pip/_internal/commands/__pycache__/hash.cpython-39.pyc,, -pip/_internal/commands/__pycache__/help.cpython-39.pyc,, -pip/_internal/commands/__pycache__/index.cpython-39.pyc,, -pip/_internal/commands/__pycache__/install.cpython-39.pyc,, -pip/_internal/commands/__pycache__/list.cpython-39.pyc,, -pip/_internal/commands/__pycache__/search.cpython-39.pyc,, -pip/_internal/commands/__pycache__/show.cpython-39.pyc,, -pip/_internal/commands/__pycache__/uninstall.cpython-39.pyc,, -pip/_internal/commands/__pycache__/wheel.cpython-39.pyc,, -pip/_internal/commands/cache.py,sha256=p9gvc6W_xgxE2zO0o8NXqO1gGJEinEK42qEC-a7Cnuk,7524 -pip/_internal/commands/check.py,sha256=0gjXR7j36xJT5cs2heYU_dfOfpnFfzX8OoPNNoKhqdM,1685 -pip/_internal/commands/completion.py,sha256=kTG_I1VR3N5kGC4Ma9pQTSoY9Q1URCrNyseHSQ-rCL4,2958 -pip/_internal/commands/configuration.py,sha256=arE8vLstjBg-Ar1krXF-bBmT1qBtnL7Fpk-NVh38a0U,8944 -pip/_internal/commands/debug.py,sha256=krET-y45CnQzXwKR1qA3M_tJE4LE2vnQtm3yfGyDSnE,6629 -pip/_internal/commands/download.py,sha256=gVIAEOcpWolhRj9hl89Qzn52G2b_pcZ8naXhxaXobdo,4942 -pip/_internal/commands/freeze.py,sha256=gCjoD6foBZPBAAYx5t8zZLkJhsF_ZRtnb3dPuD7beO8,2951 -pip/_internal/commands/hash.py,sha256=EVVOuvGtoPEdFi8SNnmdqlCQrhCxV-kJsdwtdcCnXGQ,1703 -pip/_internal/commands/help.py,sha256=gcc6QDkcgHMOuAn5UxaZwAStsRBrnGSn_yxjS57JIoM,1132 -pip/_internal/commands/index.py,sha256=8pYkICUJlccjm3E83b7UuZ5DtOfLh1N7ZHXAgkajjHo,4849 -pip/_internal/commands/install.py,sha256=YVygBF6vfrNi0jmdNBCM6bcoWb7vaALEGG1--8Mmf88,27893 -pip/_internal/commands/list.py,sha256=tTjZ7u0VIh3uhnX231Q9pwt6ObT_zrDfixRQvgpJAvM,12221 -pip/_internal/commands/search.py,sha256=sbBZiARRc050QquOKcCvOr2K3XLsoYebLKZGRi__iUI,5697 -pip/_internal/commands/show.py,sha256=2VicM3jF0YWgn4O1jG_QF5oxOT0ln57VDu1NE6hqWcM,5859 -pip/_internal/commands/uninstall.py,sha256=DNTYAGJNljMO_YYBxrpcwj0FEl7lo_P55_98O6g2TNk,3526 -pip/_internal/commands/wheel.py,sha256=7HAjLclZxIzBrX6JmhmGBVxH5xrjaBYCtSdpQi1pWCE,6206 -pip/_internal/configuration.py,sha256=qmCX3uuVM73PQeAuWQHic22bhops8s31B8k02nFAoiQ,13171 -pip/_internal/distributions/__init__.py,sha256=Hq6kt6gXBgjNit5hTTWLAzeCNOKoB-N0pGYSqehrli8,858 -pip/_internal/distributions/__pycache__/__init__.cpython-39.pyc,, -pip/_internal/distributions/__pycache__/base.cpython-39.pyc,, -pip/_internal/distributions/__pycache__/installed.cpython-39.pyc,, -pip/_internal/distributions/__pycache__/sdist.cpython-39.pyc,, -pip/_internal/distributions/__pycache__/wheel.cpython-39.pyc,, -pip/_internal/distributions/base.py,sha256=3FUYD8Gb4YuSu3pggC_FRctZBDbpm5ZK89tPksIUjoE,1172 -pip/_internal/distributions/installed.py,sha256=HzfNRu3smoOm54m8H2iK6LHzBx6_DEnka4OPEsizbXg,680 -pip/_internal/distributions/sdist.py,sha256=0nJvU1RhZtbwaeYtLbzSwYrbGRcY6IgNsWdEhAHROK8,5499 -pip/_internal/distributions/wheel.py,sha256=-NgzdIs-w_hcer_U81yzgpVTljJRg5m79xufqvbjv0s,1115 -pip/_internal/exceptions.py,sha256=U-dV1ixkSz6NAU6Aw9dosKi2EzZ5D3BA7ilYZuTLKeU,20912 -pip/_internal/index/__init__.py,sha256=vpt-JeTZefh8a-FC22ZeBSXFVbuBcXSGiILhQZJaNpQ,30 -pip/_internal/index/__pycache__/__init__.cpython-39.pyc,, -pip/_internal/index/__pycache__/collector.cpython-39.pyc,, -pip/_internal/index/__pycache__/package_finder.cpython-39.pyc,, -pip/_internal/index/__pycache__/sources.cpython-39.pyc,, -pip/_internal/index/collector.py,sha256=E4yZHzlzPtaXg2BxaugrNg1Jwtwgs4gC-Q_0bzYrBU4,19671 -pip/_internal/index/package_finder.py,sha256=9UVg-7582nYNEWa0cIIl8otzPm4mlfyrQVuozAcssLo,36783 -pip/_internal/index/sources.py,sha256=SVyPitv08-Qalh2_Bk5diAJ9GAA_d-a93koouQodAG0,6557 -pip/_internal/locations/__init__.py,sha256=ergvPwlfNTmQYFmaRYbj--ZwTN5izgTL9KE5d0FB7-8,17362 -pip/_internal/locations/__pycache__/__init__.cpython-39.pyc,, -pip/_internal/locations/__pycache__/_distutils.cpython-39.pyc,, -pip/_internal/locations/__pycache__/_sysconfig.cpython-39.pyc,, -pip/_internal/locations/__pycache__/base.cpython-39.pyc,, -pip/_internal/locations/_distutils.py,sha256=Sk7tw8ZP1DWMYJ8MibABsa8IME2Ejv1PKeGlYQCBTZc,5871 -pip/_internal/locations/_sysconfig.py,sha256=LQNKTJKyjVqxXaPntlBwdUqTG1xwYf6GVCKMbyRJx5M,7918 -pip/_internal/locations/base.py,sha256=x5D1ONktmPJd8nnUTh-ELsAJ7fiXA-k-0a_vhfi2_Us,1579 -pip/_internal/main.py,sha256=r-UnUe8HLo5XFJz8inTcOOTiu_sxNhgHb6VwlGUllOI,340 -pip/_internal/metadata/__init__.py,sha256=iGoDbe_iTXQTIAEVy9f7dm-VQfZANO8kkwFr1CpqxqI,2036 -pip/_internal/metadata/__pycache__/__init__.cpython-39.pyc,, -pip/_internal/metadata/__pycache__/base.cpython-39.pyc,, -pip/_internal/metadata/__pycache__/pkg_resources.cpython-39.pyc,, -pip/_internal/metadata/base.py,sha256=SCRPtShrtPy0lfFxuaFTgJJHsRXToGFToQUAZoBBbeA,19429 -pip/_internal/metadata/pkg_resources.py,sha256=wAnEtrcgH9YtV996MfoBjR2hGLHvi3uxk0vUOHbqBak,9456 -pip/_internal/models/__init__.py,sha256=3DHUd_qxpPozfzouoqa9g9ts1Czr5qaHfFxbnxriepM,63 -pip/_internal/models/__pycache__/__init__.cpython-39.pyc,, -pip/_internal/models/__pycache__/candidate.cpython-39.pyc,, -pip/_internal/models/__pycache__/direct_url.cpython-39.pyc,, -pip/_internal/models/__pycache__/format_control.cpython-39.pyc,, -pip/_internal/models/__pycache__/index.cpython-39.pyc,, -pip/_internal/models/__pycache__/link.cpython-39.pyc,, -pip/_internal/models/__pycache__/scheme.cpython-39.pyc,, -pip/_internal/models/__pycache__/search_scope.cpython-39.pyc,, -pip/_internal/models/__pycache__/selection_prefs.cpython-39.pyc,, -pip/_internal/models/__pycache__/target_python.cpython-39.pyc,, -pip/_internal/models/__pycache__/wheel.cpython-39.pyc,, -pip/_internal/models/candidate.py,sha256=6pcABsaR7CfIHlbJbr2_kMkVJFL_yrYjTx6SVWUnCPQ,990 -pip/_internal/models/direct_url.py,sha256=7XtGQSLLDQb5ZywI2EMnnLcddtf5CJLx44lMtTHPxFw,6350 -pip/_internal/models/format_control.py,sha256=DJpMYjxeYKKQdwNcML2_F0vtAh-qnKTYe-CpTxQe-4g,2520 -pip/_internal/models/index.py,sha256=tYnL8oxGi4aSNWur0mG8DAP7rC6yuha_MwJO8xw0crI,1030 -pip/_internal/models/link.py,sha256=hoT_qsOBAgLBm9GKqpBrNF_mrEXeGXQE-aH_RX2cGgg,9817 -pip/_internal/models/scheme.py,sha256=3EFQp_ICu_shH1-TBqhl0QAusKCPDFOlgHFeN4XowWs,738 -pip/_internal/models/search_scope.py,sha256=LwloG0PJAmtI1hFXIypsD95kWE9xfR5hf_a2v1Vw7sk,4520 -pip/_internal/models/selection_prefs.py,sha256=KZdi66gsR-_RUXUr9uejssk3rmTHrQVJWeNA2sV-VSY,1907 -pip/_internal/models/target_python.py,sha256=qKpZox7J8NAaPmDs5C_aniwfPDxzvpkrCKqfwndG87k,3858 -pip/_internal/models/wheel.py,sha256=hN9Ub-m-cAJCajCcQHyQNsqpcDCbPPDlEzBDwaBMc14,3500 -pip/_internal/network/__init__.py,sha256=jf6Tt5nV_7zkARBrKojIXItgejvoegVJVKUbhAa5Ioc,50 -pip/_internal/network/__pycache__/__init__.cpython-39.pyc,, -pip/_internal/network/__pycache__/auth.cpython-39.pyc,, -pip/_internal/network/__pycache__/cache.cpython-39.pyc,, -pip/_internal/network/__pycache__/download.cpython-39.pyc,, -pip/_internal/network/__pycache__/lazy_wheel.cpython-39.pyc,, -pip/_internal/network/__pycache__/session.cpython-39.pyc,, -pip/_internal/network/__pycache__/utils.cpython-39.pyc,, -pip/_internal/network/__pycache__/xmlrpc.cpython-39.pyc,, -pip/_internal/network/auth.py,sha256=a3C7Xaa8kTJjXkdi_wrUjqaySc8Z9Yz7U6QIbXfzMyc,12190 -pip/_internal/network/cache.py,sha256=FJ3uTUo3wgf2KHmeZ3ltN9x3tQoy_0X6qNsRtNXsuL0,2131 -pip/_internal/network/download.py,sha256=12Ef_L7MlhNUN_0-n_3DggozWJER8c9J0us16cbvkKA,6062 -pip/_internal/network/lazy_wheel.py,sha256=1b8ZJ1w4bSBzpGzGwJR_CL2yQ6AFIwWQkS1vbPPw2XU,7627 -pip/_internal/network/session.py,sha256=38IKGKC64MTVUIH5XOR1hr2pOCzp39RccykdmGAvqRU,16729 -pip/_internal/network/utils.py,sha256=igLlTu_-q0LmL8FdJKq-Uj7AT_owrQ-T9FfyarkhK5U,4059 -pip/_internal/network/xmlrpc.py,sha256=AzQgG4GgS152_cqmGr_Oz2MIXsCal-xfsis7fA7nmU0,1791 -pip/_internal/operations/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -pip/_internal/operations/__pycache__/__init__.cpython-39.pyc,, -pip/_internal/operations/__pycache__/check.cpython-39.pyc,, -pip/_internal/operations/__pycache__/freeze.cpython-39.pyc,, -pip/_internal/operations/__pycache__/prepare.cpython-39.pyc,, -pip/_internal/operations/build/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -pip/_internal/operations/build/__pycache__/__init__.cpython-39.pyc,, -pip/_internal/operations/build/__pycache__/metadata.cpython-39.pyc,, -pip/_internal/operations/build/__pycache__/metadata_editable.cpython-39.pyc,, -pip/_internal/operations/build/__pycache__/metadata_legacy.cpython-39.pyc,, -pip/_internal/operations/build/__pycache__/wheel.cpython-39.pyc,, -pip/_internal/operations/build/__pycache__/wheel_editable.cpython-39.pyc,, -pip/_internal/operations/build/__pycache__/wheel_legacy.cpython-39.pyc,, -pip/_internal/operations/build/metadata.py,sha256=ES_uRmAvhrNm_nDTpZxshBfUsvnXtkj-g_4rZrH9Rww,1404 -pip/_internal/operations/build/metadata_editable.py,sha256=_Rai0VZjxoeJUkjkuICrq45LtjwFoDOveosMYH43rKc,1456 -pip/_internal/operations/build/metadata_legacy.py,sha256=o-eU21As175hDC7dluM1fJJ_FqokTIShyWpjKaIpHZw,2198 -pip/_internal/operations/build/wheel.py,sha256=AO9XnTGhTgHtZmU8Dkbfo1OGr41rBuSDjIgAa4zUKgE,1063 -pip/_internal/operations/build/wheel_editable.py,sha256=TVETY-L_M_dSEKBhTIcQOP75zKVXw8tuq1U354Mm30A,1405 -pip/_internal/operations/build/wheel_legacy.py,sha256=C9j6rukgQI1n_JeQLoZGuDdfUwzCXShyIdPTp6edbMQ,3064 -pip/_internal/operations/check.py,sha256=ca4O9CkPt9Em9sLCf3H0iVt1GIcW7M8C0U5XooaBuT4,5109 -pip/_internal/operations/freeze.py,sha256=ZiYw5GlUpLVx4VJHz4S1AP2JFNyvH0iq5kpcYj2ovyw,9770 -pip/_internal/operations/install/__init__.py,sha256=mX7hyD2GNBO2mFGokDQ30r_GXv7Y_PLdtxcUv144e-s,51 -pip/_internal/operations/install/__pycache__/__init__.cpython-39.pyc,, -pip/_internal/operations/install/__pycache__/editable_legacy.cpython-39.pyc,, -pip/_internal/operations/install/__pycache__/legacy.cpython-39.pyc,, -pip/_internal/operations/install/__pycache__/wheel.cpython-39.pyc,, -pip/_internal/operations/install/editable_legacy.py,sha256=ee4kfJHNuzTdKItbfAsNOSEwq_vD7DRPGkBdK48yBhU,1354 -pip/_internal/operations/install/legacy.py,sha256=x7BG8kBm0K3JO6AR4sBl0zh2LOrfUaz7EdNt-keHBv4,4091 -pip/_internal/operations/install/wheel.py,sha256=QuQyCZE-XjuJjDYRixo40oUt2ucFhNmSrCbcXY7A9aE,27412 -pip/_internal/operations/prepare.py,sha256=LJP97jsuiCAaTGVIRrcINvxc1ntVsB45MoRbyMIukg4,24145 -pip/_internal/pyproject.py,sha256=Wm2ljdT6spC-tSdf1LBRaMYSJaXr1xUxV3OwdHCW9jc,6722 -pip/_internal/req/__init__.py,sha256=A7mUvT1KAcCYP3H7gUOTx2GRMlgoDur3H68Q0OJqM5A,2793 -pip/_internal/req/__pycache__/__init__.cpython-39.pyc,, -pip/_internal/req/__pycache__/constructors.cpython-39.pyc,, -pip/_internal/req/__pycache__/req_file.cpython-39.pyc,, -pip/_internal/req/__pycache__/req_install.cpython-39.pyc,, -pip/_internal/req/__pycache__/req_set.cpython-39.pyc,, -pip/_internal/req/__pycache__/req_tracker.cpython-39.pyc,, -pip/_internal/req/__pycache__/req_uninstall.cpython-39.pyc,, -pip/_internal/req/constructors.py,sha256=fXmtNI_J77JFP_HRvYcQW-1nKw3AiUu6Q3b1Nm8aMm0,16094 -pip/_internal/req/req_file.py,sha256=5N8OTouPCof-305StC2YK9HBxQMw-xO46skRoBPbkZo,17421 -pip/_internal/req/req_install.py,sha256=jU1HQBT_DnXZean7jY8wPNMhb6_CzdKHcilHFY_o-Fc,32524 -pip/_internal/req/req_set.py,sha256=kHYiLvkKRx21WaLTwOI-54Ng0SSzZZ9SE7FD0PsfvYA,7584 -pip/_internal/req/req_tracker.py,sha256=jK7JDu-Wt73X-gqozrFtgJVlUlnQo0P4IQ4x4_gPlfM,4117 -pip/_internal/req/req_uninstall.py,sha256=K2BHYRRJAfkSpFqcPzc9XfX2EvbhaRtQIPRFmMtUdfo,23814 -pip/_internal/resolution/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -pip/_internal/resolution/__pycache__/__init__.cpython-39.pyc,, -pip/_internal/resolution/__pycache__/base.cpython-39.pyc,, -pip/_internal/resolution/base.py,sha256=qlmh325SBVfvG6Me9gc5Nsh5sdwHBwzHBq6aEXtKsLA,583 -pip/_internal/resolution/legacy/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -pip/_internal/resolution/legacy/__pycache__/__init__.cpython-39.pyc,, -pip/_internal/resolution/legacy/__pycache__/resolver.cpython-39.pyc,, -pip/_internal/resolution/legacy/resolver.py,sha256=b7bf5qL1ROg73sl8dhTvLdD1w5XF8xybBAF6eF_kz7c,18288 -pip/_internal/resolution/resolvelib/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -pip/_internal/resolution/resolvelib/__pycache__/__init__.cpython-39.pyc,, -pip/_internal/resolution/resolvelib/__pycache__/base.cpython-39.pyc,, -pip/_internal/resolution/resolvelib/__pycache__/candidates.cpython-39.pyc,, -pip/_internal/resolution/resolvelib/__pycache__/factory.cpython-39.pyc,, -pip/_internal/resolution/resolvelib/__pycache__/found_candidates.cpython-39.pyc,, -pip/_internal/resolution/resolvelib/__pycache__/provider.cpython-39.pyc,, -pip/_internal/resolution/resolvelib/__pycache__/reporter.cpython-39.pyc,, -pip/_internal/resolution/resolvelib/__pycache__/requirements.cpython-39.pyc,, -pip/_internal/resolution/resolvelib/__pycache__/resolver.cpython-39.pyc,, -pip/_internal/resolution/resolvelib/base.py,sha256=u1O4fkvCO4mhmu5i32xrDv9AX5NgUci_eYVyBDQhTIM,5220 -pip/_internal/resolution/resolvelib/candidates.py,sha256=KR5jxZRSahByOABXbwrX-zNoawa7Gm9Iss-HrvrcvNw,18357 -pip/_internal/resolution/resolvelib/factory.py,sha256=0bbxnUSSjaeTmtIEgeeKtEqhEFfNhv3xpq7j9IaMq2c,28298 -pip/_internal/resolution/resolvelib/found_candidates.py,sha256=hvL3Hoa9VaYo-qEOZkBi2Iqw251UDxPz-uMHVaWmLpE,5705 -pip/_internal/resolution/resolvelib/provider.py,sha256=LzQQyzMVaZYAwLgKInbq-it6mbQL1gX0hGohz5Cr5wg,9915 -pip/_internal/resolution/resolvelib/reporter.py,sha256=3ZVVYrs5PqvLFJkGLcuXoMK5mTInFzl31xjUpDBpZZk,2526 -pip/_internal/resolution/resolvelib/requirements.py,sha256=B1ndvKPSuyyyTEXt9sKhbwminViSWnBrJa7qO2ln4Z0,5455 -pip/_internal/resolution/resolvelib/resolver.py,sha256=UsWuwuTu9aYHIfEBnEb7e1r3tXGgJbSA5LVgQqdVZ2w,11633 -pip/_internal/self_outdated_check.py,sha256=GKSatNlt2cz_CMGxu72FbUzuPaXpWOnIVKOOYIk0gvY,6849 -pip/_internal/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -pip/_internal/utils/__pycache__/__init__.cpython-39.pyc,, -pip/_internal/utils/__pycache__/_log.cpython-39.pyc,, -pip/_internal/utils/__pycache__/appdirs.cpython-39.pyc,, -pip/_internal/utils/__pycache__/compat.cpython-39.pyc,, -pip/_internal/utils/__pycache__/compatibility_tags.cpython-39.pyc,, -pip/_internal/utils/__pycache__/datetime.cpython-39.pyc,, -pip/_internal/utils/__pycache__/deprecation.cpython-39.pyc,, -pip/_internal/utils/__pycache__/direct_url_helpers.cpython-39.pyc,, -pip/_internal/utils/__pycache__/distutils_args.cpython-39.pyc,, -pip/_internal/utils/__pycache__/egg_link.cpython-39.pyc,, -pip/_internal/utils/__pycache__/encoding.cpython-39.pyc,, -pip/_internal/utils/__pycache__/entrypoints.cpython-39.pyc,, -pip/_internal/utils/__pycache__/filesystem.cpython-39.pyc,, -pip/_internal/utils/__pycache__/filetypes.cpython-39.pyc,, -pip/_internal/utils/__pycache__/glibc.cpython-39.pyc,, -pip/_internal/utils/__pycache__/hashes.cpython-39.pyc,, -pip/_internal/utils/__pycache__/inject_securetransport.cpython-39.pyc,, -pip/_internal/utils/__pycache__/logging.cpython-39.pyc,, -pip/_internal/utils/__pycache__/misc.cpython-39.pyc,, -pip/_internal/utils/__pycache__/models.cpython-39.pyc,, -pip/_internal/utils/__pycache__/packaging.cpython-39.pyc,, -pip/_internal/utils/__pycache__/setuptools_build.cpython-39.pyc,, -pip/_internal/utils/__pycache__/subprocess.cpython-39.pyc,, -pip/_internal/utils/__pycache__/temp_dir.cpython-39.pyc,, -pip/_internal/utils/__pycache__/unpacking.cpython-39.pyc,, -pip/_internal/utils/__pycache__/urls.cpython-39.pyc,, -pip/_internal/utils/__pycache__/virtualenv.cpython-39.pyc,, -pip/_internal/utils/__pycache__/wheel.cpython-39.pyc,, -pip/_internal/utils/_log.py,sha256=-jHLOE_THaZz5BFcCnoSL9EYAtJ0nXem49s9of4jvKw,1015 -pip/_internal/utils/appdirs.py,sha256=swgcTKOm3daLeXTW6v5BUS2Ti2RvEnGRQYH_yDXklAo,1665 -pip/_internal/utils/compat.py,sha256=ACyBfLgj3_XG-iA5omEDrXqDM0cQKzi8h8HRBInzG6Q,1884 -pip/_internal/utils/compatibility_tags.py,sha256=ydin8QG8BHqYRsPY4OL6cmb44CbqXl1T0xxS97VhHkk,5377 -pip/_internal/utils/datetime.py,sha256=m21Y3wAtQc-ji6Veb6k_M5g6A0ZyFI4egchTdnwh-pQ,242 -pip/_internal/utils/deprecation.py,sha256=NKo8VqLioJ4nnXXGmW4KdasxF90EFHkZaHeX1fT08C8,3627 -pip/_internal/utils/direct_url_helpers.py,sha256=6F1tc2rcKaCZmgfVwsE6ObIe_Pux23mUVYA-2D9wCFc,3206 -pip/_internal/utils/distutils_args.py,sha256=mcAscyp80vTt3xAGTipnpgc83V-_wCvydNELVXLq7JI,1249 -pip/_internal/utils/egg_link.py,sha256=5MVlpz5LirT4iLQq86OYzjXaYF0D4Qk1dprEI7ThST4,2203 -pip/_internal/utils/encoding.py,sha256=bdZ3YgUpaOEBI5MP4-DEXiQarCW3V0rxw1kRz-TaU1Q,1169 -pip/_internal/utils/entrypoints.py,sha256=aPvCnQVi9Hdk35Kloww_D5ibjUpqxgqcJP8O9VuMZek,1055 -pip/_internal/utils/filesystem.py,sha256=rrl-rY1w8TYyKYndUyZlE9ffkQyA4-jI9x_59zXkn5s,5893 -pip/_internal/utils/filetypes.py,sha256=i8XAQ0eFCog26Fw9yV0Yb1ygAqKYB1w9Cz9n0fj8gZU,716 -pip/_internal/utils/glibc.py,sha256=tDfwVYnJCOC0BNVpItpy8CGLP9BjkxFHdl0mTS0J7fc,3110 -pip/_internal/utils/hashes.py,sha256=anpZfFGIT6HcIj2td9NHtE8AWg6GeAIhwpP8GPvZE0E,4811 -pip/_internal/utils/inject_securetransport.py,sha256=o-QRVMGiENrTJxw3fAhA7uxpdEdw6M41TjHYtSVRrcg,795 -pip/_internal/utils/logging.py,sha256=Rvght-fDXL70VWib1cpgZ3iU-kXODV98bNeLUlbqVto,11522 -pip/_internal/utils/misc.py,sha256=yLQuNWaRGtSGQqK7GT-Kj2mO7oXnPcJZkr5-9Q7AGwE,18392 -pip/_internal/utils/models.py,sha256=5GoYU586SrxURMvDn_jBMJInitviJg4O5-iOU-6I0WY,1193 -pip/_internal/utils/packaging.py,sha256=5Wm6_x7lKrlqVjPI5MBN_RurcRHwVYoQ7Ksrs84de7s,2108 -pip/_internal/utils/setuptools_build.py,sha256=vNH9hQB9wT6d-h1hVQhBKw91jNeT42meHpVeii-urOI,5652 -pip/_internal/utils/subprocess.py,sha256=vIWGpet5ARBmZ2Qn4NEHNgzCOduqbPIuByZmhhmr6mM,9182 -pip/_internal/utils/temp_dir.py,sha256=zob3PYMVevONkheOMUp_4jDofrEY3HIu5DHK78cSspI,7662 -pip/_internal/utils/unpacking.py,sha256=HUFlMEyCa9dPwdLh6sWeh95DeKytV8rsOyKShEw9y6g,8906 -pip/_internal/utils/urls.py,sha256=AhaesUGl-9it6uvG6fsFPOr9ynFpGaTMk4t5XTX7Z_Q,1759 -pip/_internal/utils/virtualenv.py,sha256=4_48qMzCwB_F5jIK5BC_ua7uiAMVifmQWU9NdaGUoVA,3459 -pip/_internal/utils/wheel.py,sha256=lXOgZyTlOm5HmK8tw5iw0A3_5A6wRzsXHOaQkIvvloU,4549 -pip/_internal/vcs/__init__.py,sha256=UAqvzpbi0VbZo3Ub6skEeZAw-ooIZR-zX_WpCbxyCoU,596 -pip/_internal/vcs/__pycache__/__init__.cpython-39.pyc,, -pip/_internal/vcs/__pycache__/bazaar.cpython-39.pyc,, -pip/_internal/vcs/__pycache__/git.cpython-39.pyc,, -pip/_internal/vcs/__pycache__/mercurial.cpython-39.pyc,, -pip/_internal/vcs/__pycache__/subversion.cpython-39.pyc,, -pip/_internal/vcs/__pycache__/versioncontrol.cpython-39.pyc,, -pip/_internal/vcs/bazaar.py,sha256=IGb5ca1xSZfgegRD2_JeyoZPrQQHs7lEYEIgpVsKpoU,3047 -pip/_internal/vcs/git.py,sha256=mjhwudCx9WlLNkxZ6_kOKmueF0rLoU2i1xeASKF6yiQ,18116 -pip/_internal/vcs/mercurial.py,sha256=Bzbd518Jsx-EJI0IhIobiQqiRsUv5TWYnrmRIFWE0Gw,5238 -pip/_internal/vcs/subversion.py,sha256=TEMRdwECvMcXakZX0pTNUep79kmBYkWDkWFkrYmcmac,11718 -pip/_internal/vcs/versioncontrol.py,sha256=KUOc-hN51em9jrqxKwUR3JnkgSE-xSOqMiiJcSaL6B8,22811 -pip/_internal/wheel_builder.py,sha256=65rOA8FSYt3c3HyqEw17uujjlCgqmoKEIv6rv9xN2NM,12307 -pip/_vendor/__init__.py,sha256=xjcBX0EP50pkaMdCssrsBXoZgo2hTtYxlcH1CIyA3T4,4708 -pip/_vendor/__pycache__/__init__.cpython-39.pyc,, -pip/_vendor/__pycache__/distro.cpython-39.pyc,, -pip/_vendor/__pycache__/six.cpython-39.pyc,, -pip/_vendor/__pycache__/typing_extensions.cpython-39.pyc,, -pip/_vendor/cachecontrol/__init__.py,sha256=1j_YQfjmiix6YyouLrftC6NzksAm8e8xGSjMKMRPIkM,465 -pip/_vendor/cachecontrol/__pycache__/__init__.cpython-39.pyc,, -pip/_vendor/cachecontrol/__pycache__/_cmd.cpython-39.pyc,, -pip/_vendor/cachecontrol/__pycache__/adapter.cpython-39.pyc,, -pip/_vendor/cachecontrol/__pycache__/cache.cpython-39.pyc,, -pip/_vendor/cachecontrol/__pycache__/compat.cpython-39.pyc,, -pip/_vendor/cachecontrol/__pycache__/controller.cpython-39.pyc,, -pip/_vendor/cachecontrol/__pycache__/filewrapper.cpython-39.pyc,, -pip/_vendor/cachecontrol/__pycache__/heuristics.cpython-39.pyc,, -pip/_vendor/cachecontrol/__pycache__/serialize.cpython-39.pyc,, -pip/_vendor/cachecontrol/__pycache__/wrapper.cpython-39.pyc,, -pip/_vendor/cachecontrol/_cmd.py,sha256=lxUXqfNTVx84zf6tcWbkLZHA6WVBRtJRpfeA9ZqhaAY,1379 -pip/_vendor/cachecontrol/adapter.py,sha256=ew9OYEQHEOjvGl06ZsuX8W3DAvHWsQKHwWAxISyGug8,5033 -pip/_vendor/cachecontrol/cache.py,sha256=eMS9Bn9JWQkHiIYA5GPRBqKVU95uS-yXkxrzpoafRig,917 -pip/_vendor/cachecontrol/caches/__init__.py,sha256=gGFOtIH8QDRvkP4YAfGIh-u9YYcGZVxwLM1-6e1mPNI,170 -pip/_vendor/cachecontrol/caches/__pycache__/__init__.cpython-39.pyc,, -pip/_vendor/cachecontrol/caches/__pycache__/file_cache.cpython-39.pyc,, -pip/_vendor/cachecontrol/caches/__pycache__/redis_cache.cpython-39.pyc,, -pip/_vendor/cachecontrol/caches/file_cache.py,sha256=P2KHcNXiqxEW7fCq5KC-NYHGSk0nNR9NIKuN-vBTn-E,4251 -pip/_vendor/cachecontrol/caches/redis_cache.py,sha256=tu_YBV7EV8vdBRGazUErkoRqYYjSBmNcB8dZ7BNomqk,940 -pip/_vendor/cachecontrol/compat.py,sha256=LNx7vqBndYdHU8YuJt53ab_8rzMGTXVrvMb7CZJkxG0,778 -pip/_vendor/cachecontrol/controller.py,sha256=9DSEiV58Gx7Ce69fLCrRcpN-_sHzXTY4ol9bEviatR0,15625 -pip/_vendor/cachecontrol/filewrapper.py,sha256=X4BAQOO26GNOR7nH_fhTzAfeuct2rBQcx_15MyFBpcs,3946 -pip/_vendor/cachecontrol/heuristics.py,sha256=8kAyuZLSCyEIgQr6vbUwfhpqg9ows4mM0IV6DWazevI,4154 -pip/_vendor/cachecontrol/serialize.py,sha256=dlySaeA5U7Q5eHvjiObgo1M8j8_huVjfWjid7Aq-r8c,6783 -pip/_vendor/cachecontrol/wrapper.py,sha256=X3-KMZ20Ho3VtqyVaXclpeQpFzokR5NE8tZSfvKVaB8,774 -pip/_vendor/certifi/__init__.py,sha256=xWdRgntT3j1V95zkRipGOg_A1UfEju2FcpujhysZLRI,62 -pip/_vendor/certifi/__main__.py,sha256=1k3Cr95vCxxGRGDljrW3wMdpZdL3Nhf0u1n-k2qdsCY,255 -pip/_vendor/certifi/__pycache__/__init__.cpython-39.pyc,, -pip/_vendor/certifi/__pycache__/__main__.cpython-39.pyc,, -pip/_vendor/certifi/__pycache__/core.cpython-39.pyc,, -pip/_vendor/certifi/cacert.pem,sha256=-og4Keu4zSpgL5shwfhd4kz0eUnVILzrGCi0zRy2kGw,265969 -pip/_vendor/certifi/core.py,sha256=gOFd0zHYlx4krrLEn982esOtmz3djiG0BFSDhgjlvcI,2840 -pip/_vendor/chardet/__init__.py,sha256=mWZaWmvZkhwfBEAT9O1Y6nRTfKzhT7FHhQTTAujbqUA,3271 -pip/_vendor/chardet/__pycache__/__init__.cpython-39.pyc,, -pip/_vendor/chardet/__pycache__/big5freq.cpython-39.pyc,, -pip/_vendor/chardet/__pycache__/big5prober.cpython-39.pyc,, -pip/_vendor/chardet/__pycache__/chardistribution.cpython-39.pyc,, -pip/_vendor/chardet/__pycache__/charsetgroupprober.cpython-39.pyc,, -pip/_vendor/chardet/__pycache__/charsetprober.cpython-39.pyc,, -pip/_vendor/chardet/__pycache__/codingstatemachine.cpython-39.pyc,, -pip/_vendor/chardet/__pycache__/compat.cpython-39.pyc,, -pip/_vendor/chardet/__pycache__/cp949prober.cpython-39.pyc,, -pip/_vendor/chardet/__pycache__/enums.cpython-39.pyc,, -pip/_vendor/chardet/__pycache__/escprober.cpython-39.pyc,, -pip/_vendor/chardet/__pycache__/escsm.cpython-39.pyc,, -pip/_vendor/chardet/__pycache__/eucjpprober.cpython-39.pyc,, -pip/_vendor/chardet/__pycache__/euckrfreq.cpython-39.pyc,, -pip/_vendor/chardet/__pycache__/euckrprober.cpython-39.pyc,, -pip/_vendor/chardet/__pycache__/euctwfreq.cpython-39.pyc,, -pip/_vendor/chardet/__pycache__/euctwprober.cpython-39.pyc,, -pip/_vendor/chardet/__pycache__/gb2312freq.cpython-39.pyc,, -pip/_vendor/chardet/__pycache__/gb2312prober.cpython-39.pyc,, -pip/_vendor/chardet/__pycache__/hebrewprober.cpython-39.pyc,, -pip/_vendor/chardet/__pycache__/jisfreq.cpython-39.pyc,, -pip/_vendor/chardet/__pycache__/jpcntx.cpython-39.pyc,, -pip/_vendor/chardet/__pycache__/langbulgarianmodel.cpython-39.pyc,, -pip/_vendor/chardet/__pycache__/langgreekmodel.cpython-39.pyc,, -pip/_vendor/chardet/__pycache__/langhebrewmodel.cpython-39.pyc,, -pip/_vendor/chardet/__pycache__/langhungarianmodel.cpython-39.pyc,, -pip/_vendor/chardet/__pycache__/langrussianmodel.cpython-39.pyc,, -pip/_vendor/chardet/__pycache__/langthaimodel.cpython-39.pyc,, -pip/_vendor/chardet/__pycache__/langturkishmodel.cpython-39.pyc,, -pip/_vendor/chardet/__pycache__/latin1prober.cpython-39.pyc,, -pip/_vendor/chardet/__pycache__/mbcharsetprober.cpython-39.pyc,, -pip/_vendor/chardet/__pycache__/mbcsgroupprober.cpython-39.pyc,, -pip/_vendor/chardet/__pycache__/mbcssm.cpython-39.pyc,, -pip/_vendor/chardet/__pycache__/sbcharsetprober.cpython-39.pyc,, -pip/_vendor/chardet/__pycache__/sbcsgroupprober.cpython-39.pyc,, -pip/_vendor/chardet/__pycache__/sjisprober.cpython-39.pyc,, -pip/_vendor/chardet/__pycache__/universaldetector.cpython-39.pyc,, -pip/_vendor/chardet/__pycache__/utf8prober.cpython-39.pyc,, -pip/_vendor/chardet/__pycache__/version.cpython-39.pyc,, -pip/_vendor/chardet/big5freq.py,sha256=D_zK5GyzoVsRes0HkLJziltFQX0bKCLOrFe9_xDvO_8,31254 -pip/_vendor/chardet/big5prober.py,sha256=kBxHbdetBpPe7xrlb-e990iot64g_eGSLd32lB7_h3M,1757 -pip/_vendor/chardet/chardistribution.py,sha256=3woWS62KrGooKyqz4zQSnjFbJpa6V7g02daAibTwcl8,9411 -pip/_vendor/chardet/charsetgroupprober.py,sha256=GZLReHP6FRRn43hvSOoGCxYamErKzyp6RgOQxVeC3kg,3839 -pip/_vendor/chardet/charsetprober.py,sha256=KSmwJErjypyj0bRZmC5F5eM7c8YQgLYIjZXintZNstg,5110 -pip/_vendor/chardet/cli/__init__.py,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1 -pip/_vendor/chardet/cli/__pycache__/__init__.cpython-39.pyc,, -pip/_vendor/chardet/cli/__pycache__/chardetect.cpython-39.pyc,, -pip/_vendor/chardet/cli/chardetect.py,sha256=XK5zqjUG2a4-y6eLHZ8ThYcp6WWUrdlmELxNypcc2SE,2747 -pip/_vendor/chardet/codingstatemachine.py,sha256=VYp_6cyyki5sHgXDSZnXW4q1oelHc3cu9AyQTX7uug8,3590 -pip/_vendor/chardet/compat.py,sha256=40zr6wICZwknxyuLGGcIOPyve8DTebBCbbvttvnmp5Q,1200 -pip/_vendor/chardet/cp949prober.py,sha256=TZ434QX8zzBsnUvL_8wm4AQVTZ2ZkqEEQL_lNw9f9ow,1855 -pip/_vendor/chardet/enums.py,sha256=Aimwdb9as1dJKZaFNUH2OhWIVBVd6ZkJJ_WK5sNY8cU,1661 -pip/_vendor/chardet/escprober.py,sha256=kkyqVg1Yw3DIOAMJ2bdlyQgUFQhuHAW8dUGskToNWSc,3950 -pip/_vendor/chardet/escsm.py,sha256=RuXlgNvTIDarndvllNCk5WZBIpdCxQ0kcd9EAuxUh84,10510 -pip/_vendor/chardet/eucjpprober.py,sha256=iD8Jdp0ISRjgjiVN7f0e8xGeQJ5GM2oeZ1dA8nbSeUw,3749 -pip/_vendor/chardet/euckrfreq.py,sha256=-7GdmvgWez4-eO4SuXpa7tBiDi5vRXQ8WvdFAzVaSfo,13546 -pip/_vendor/chardet/euckrprober.py,sha256=MqFMTQXxW4HbzIpZ9lKDHB3GN8SP4yiHenTmf8g_PxY,1748 -pip/_vendor/chardet/euctwfreq.py,sha256=No1WyduFOgB5VITUA7PLyC5oJRNzRyMbBxaKI1l16MA,31621 -pip/_vendor/chardet/euctwprober.py,sha256=13p6EP4yRaxqnP4iHtxHOJ6R2zxHq1_m8hTRjzVZ95c,1747 -pip/_vendor/chardet/gb2312freq.py,sha256=JX8lsweKLmnCwmk8UHEQsLgkr_rP_kEbvivC4qPOrlc,20715 -pip/_vendor/chardet/gb2312prober.py,sha256=gGvIWi9WhDjE-xQXHvNIyrnLvEbMAYgyUSZ65HUfylw,1754 -pip/_vendor/chardet/hebrewprober.py,sha256=c3SZ-K7hvyzGY6JRAZxJgwJ_sUS9k0WYkvMY00YBYFo,13838 -pip/_vendor/chardet/jisfreq.py,sha256=vpmJv2Bu0J8gnMVRPHMFefTRvo_ha1mryLig8CBwgOg,25777 -pip/_vendor/chardet/jpcntx.py,sha256=PYlNqRUQT8LM3cT5FmHGP0iiscFlTWED92MALvBungo,19643 -pip/_vendor/chardet/langbulgarianmodel.py,sha256=rk9CJpuxO0bObboJcv6gNgWuosYZmd8qEEds5y7DS_Y,105697 -pip/_vendor/chardet/langgreekmodel.py,sha256=S-uNQ1ihC75yhBvSux24gLFZv3QyctMwC6OxLJdX-bw,99571 -pip/_vendor/chardet/langhebrewmodel.py,sha256=DzPP6TPGG_-PV7tqspu_d8duueqm7uN-5eQ0aHUw1Gg,98776 -pip/_vendor/chardet/langhungarianmodel.py,sha256=RtJH7DZdsmaHqyK46Kkmnk5wQHiJwJPPJSqqIlpeZRc,102498 -pip/_vendor/chardet/langrussianmodel.py,sha256=THqJOhSxiTQcHboDNSc5yofc2koXXQFHFyjtyuntUfM,131180 -pip/_vendor/chardet/langthaimodel.py,sha256=R1wXHnUMtejpw0JnH_JO8XdYasME6wjVqp1zP7TKLgg,103312 -pip/_vendor/chardet/langturkishmodel.py,sha256=rfwanTptTwSycE4-P-QasPmzd-XVYgevytzjlEzBBu8,95946 -pip/_vendor/chardet/latin1prober.py,sha256=S2IoORhFk39FEFOlSFWtgVybRiP6h7BlLldHVclNkU8,5370 -pip/_vendor/chardet/mbcharsetprober.py,sha256=AR95eFH9vuqSfvLQZN-L5ijea25NOBCoXqw8s5O9xLQ,3413 -pip/_vendor/chardet/mbcsgroupprober.py,sha256=h6TRnnYq2OxG1WdD5JOyxcdVpn7dG0q-vB8nWr5mbh4,2012 -pip/_vendor/chardet/mbcssm.py,sha256=SY32wVIF3HzcjY3BaEspy9metbNSKxIIB0RKPn7tjpI,25481 -pip/_vendor/chardet/metadata/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -pip/_vendor/chardet/metadata/__pycache__/__init__.cpython-39.pyc,, -pip/_vendor/chardet/metadata/__pycache__/languages.cpython-39.pyc,, -pip/_vendor/chardet/metadata/languages.py,sha256=41tLq3eLSrBEbEVVQpVGFq9K7o1ln9b1HpY1l0hCUQo,19474 -pip/_vendor/chardet/sbcharsetprober.py,sha256=nmyMyuxzG87DN6K3Rk2MUzJLMLR69MrWpdnHzOwVUwQ,6136 -pip/_vendor/chardet/sbcsgroupprober.py,sha256=hqefQuXmiFyDBArOjujH6hd6WFXlOD1kWCsxDhjx5Vc,4309 -pip/_vendor/chardet/sjisprober.py,sha256=IIt-lZj0WJqK4rmUZzKZP4GJlE8KUEtFYVuY96ek5MQ,3774 -pip/_vendor/chardet/universaldetector.py,sha256=DpZTXCX0nUHXxkQ9sr4GZxGB_hveZ6hWt3uM94cgWKs,12503 -pip/_vendor/chardet/utf8prober.py,sha256=IdD8v3zWOsB8OLiyPi-y_fqwipRFxV9Nc1eKBLSuIEw,2766 -pip/_vendor/chardet/version.py,sha256=A4CILFAd8MRVG1HoXPp45iK9RLlWyV73a1EtwE8Tvn8,242 -pip/_vendor/colorama/__init__.py,sha256=pCdErryzLSzDW5P-rRPBlPLqbBtIRNJB6cMgoeJns5k,239 -pip/_vendor/colorama/__pycache__/__init__.cpython-39.pyc,, -pip/_vendor/colorama/__pycache__/ansi.cpython-39.pyc,, -pip/_vendor/colorama/__pycache__/ansitowin32.cpython-39.pyc,, -pip/_vendor/colorama/__pycache__/initialise.cpython-39.pyc,, -pip/_vendor/colorama/__pycache__/win32.cpython-39.pyc,, -pip/_vendor/colorama/__pycache__/winterm.cpython-39.pyc,, -pip/_vendor/colorama/ansi.py,sha256=Top4EeEuaQdBWdteKMEcGOTeKeF19Q-Wo_6_Cj5kOzQ,2522 -pip/_vendor/colorama/ansitowin32.py,sha256=yV7CEmCb19MjnJKODZEEvMH_fnbJhwnpzo4sxZuGXmA,10517 -pip/_vendor/colorama/initialise.py,sha256=PprovDNxMTrvoNHFcL2NZjpH2XzDc8BLxLxiErfUl4k,1915 -pip/_vendor/colorama/win32.py,sha256=bJ8Il9jwaBN5BJ8bmN6FoYZ1QYuMKv2j8fGrXh7TJjw,5404 -pip/_vendor/colorama/winterm.py,sha256=2y_2b7Zsv34feAsP67mLOVc-Bgq51mdYGo571VprlrM,6438 -pip/_vendor/distlib/__init__.py,sha256=HTGLP7dnTRTQCbEZNGUxBq-0sobr0KQUMn3yd6uEObA,581 -pip/_vendor/distlib/__pycache__/__init__.cpython-39.pyc,, -pip/_vendor/distlib/__pycache__/compat.cpython-39.pyc,, -pip/_vendor/distlib/__pycache__/database.cpython-39.pyc,, -pip/_vendor/distlib/__pycache__/index.cpython-39.pyc,, -pip/_vendor/distlib/__pycache__/locators.cpython-39.pyc,, -pip/_vendor/distlib/__pycache__/manifest.cpython-39.pyc,, -pip/_vendor/distlib/__pycache__/markers.cpython-39.pyc,, -pip/_vendor/distlib/__pycache__/metadata.cpython-39.pyc,, -pip/_vendor/distlib/__pycache__/resources.cpython-39.pyc,, -pip/_vendor/distlib/__pycache__/scripts.cpython-39.pyc,, -pip/_vendor/distlib/__pycache__/util.cpython-39.pyc,, -pip/_vendor/distlib/__pycache__/version.cpython-39.pyc,, -pip/_vendor/distlib/__pycache__/wheel.cpython-39.pyc,, -pip/_vendor/distlib/_backport/__init__.py,sha256=bqS_dTOH6uW9iGgd0uzfpPjo6vZ4xpPZ7kyfZJ2vNaw,274 -pip/_vendor/distlib/_backport/__pycache__/__init__.cpython-39.pyc,, -pip/_vendor/distlib/_backport/__pycache__/misc.cpython-39.pyc,, -pip/_vendor/distlib/_backport/__pycache__/shutil.cpython-39.pyc,, -pip/_vendor/distlib/_backport/__pycache__/sysconfig.cpython-39.pyc,, -pip/_vendor/distlib/_backport/__pycache__/tarfile.cpython-39.pyc,, -pip/_vendor/distlib/_backport/misc.py,sha256=KWecINdbFNOxSOP1fGF680CJnaC6S4fBRgEtaYTw0ig,971 -pip/_vendor/distlib/_backport/shutil.py,sha256=IX_G2NPqwecJibkIDje04bqu0xpHkfSQ2GaGdEVqM5Y,25707 -pip/_vendor/distlib/_backport/sysconfig.cfg,sha256=swZKxq9RY5e9r3PXCrlvQPMsvOdiWZBTHLEbqS8LJLU,2617 -pip/_vendor/distlib/_backport/sysconfig.py,sha256=BQHFlb6pubCl_dvT1NjtzIthylofjKisox239stDg0U,26854 -pip/_vendor/distlib/_backport/tarfile.py,sha256=Ihp7rXRcjbIKw8COm9wSePV9ARGXbSF9gGXAMn2Q-KU,92628 -pip/_vendor/distlib/compat.py,sha256=fbsxc5PfJ2wBx1K4k6mQ2goAYs-GZW0tcOPIlE_vf0I,41495 -pip/_vendor/distlib/database.py,sha256=Kl0YvPQKc4OcpVi7k5cFziydM1xOK8iqdxLGXgbZHV4,51059 -pip/_vendor/distlib/index.py,sha256=UfcimNW19AB7IKWam4VaJbXuCBvArKfSxhV16EwavzE,20739 -pip/_vendor/distlib/locators.py,sha256=AKlB3oZvfOTg4E0CtfwOzujFL19X5V4XUA4eHdKOu44,51965 -pip/_vendor/distlib/manifest.py,sha256=nQEhYmgoreaBZzyFzwYsXxJARu3fo4EkunU163U16iE,14811 -pip/_vendor/distlib/markers.py,sha256=9c70ISEKwBjmUOHuIdOygVnRVESOKdNYp9a2TVn4qrI,4989 -pip/_vendor/distlib/metadata.py,sha256=vatoxFdmBr6ie-sTVXVNPOPG3uwMDWJTnEECnm7xDCw,39109 -pip/_vendor/distlib/resources.py,sha256=LwbPksc0A1JMbi6XnuPdMBUn83X7BPuFNWqPGEKI698,10820 -pip/_vendor/distlib/scripts.py,sha256=tjSwENINeV91ROZxec5zTSMRg2jEeKc4enyCHDzNvEE,17720 -pip/_vendor/distlib/t32.exe,sha256=NS3xBCVAld35JVFNmb-1QRyVtThukMrwZVeXn4LhaEQ,96768 -pip/_vendor/distlib/t64-arm.exe,sha256=8WGDh6aI8WJAjngRNQpyJpB21Sv20PCYYFSNW1fWd6w,180736 -pip/_vendor/distlib/t64.exe,sha256=oAqHes78rUWVM0OtVqIhUvequl_PKhAhXYQWnUf7zR0,105984 -pip/_vendor/distlib/util.py,sha256=0Uq_qa63FCLtdyNdWvMnmPbiSvVa-ykHM2E8HT7LSIU,67766 -pip/_vendor/distlib/version.py,sha256=WG__LyAa2GwmA6qSoEJtvJE8REA1LZpbSizy8WvhJLk,23513 -pip/_vendor/distlib/w32.exe,sha256=lJtnZdeUxTZWya_EW5DZos_K5rswRECGspIl8ZJCIXs,90112 -pip/_vendor/distlib/w64-arm.exe,sha256=Q_HdzVu9zxYdaBa3m0iJ5_ddLOEqtPe8x30WADoXza8,166400 -pip/_vendor/distlib/w64.exe,sha256=0aRzoN2BO9NWW4ENy4_4vHkHR4qZTFZNVSAJJYlODTI,99840 -pip/_vendor/distlib/wheel.py,sha256=pj5VVCjqZMcHvgizORWwAFPS7hOk61CZ59dxP8laQ4E,42943 -pip/_vendor/distro.py,sha256=O1EeHMq1-xAO373JI2_6pYEtd09yEkxtmrYkdY-9S-w,48414 -pip/_vendor/html5lib/__init__.py,sha256=BYzcKCqeEii52xDrqBFruhnmtmkiuHXFyFh-cglQ8mk,1160 -pip/_vendor/html5lib/__pycache__/__init__.cpython-39.pyc,, -pip/_vendor/html5lib/__pycache__/_ihatexml.cpython-39.pyc,, -pip/_vendor/html5lib/__pycache__/_inputstream.cpython-39.pyc,, -pip/_vendor/html5lib/__pycache__/_tokenizer.cpython-39.pyc,, -pip/_vendor/html5lib/__pycache__/_utils.cpython-39.pyc,, -pip/_vendor/html5lib/__pycache__/constants.cpython-39.pyc,, -pip/_vendor/html5lib/__pycache__/html5parser.cpython-39.pyc,, -pip/_vendor/html5lib/__pycache__/serializer.cpython-39.pyc,, -pip/_vendor/html5lib/_ihatexml.py,sha256=ifOwF7pXqmyThIXc3boWc96s4MDezqRrRVp7FwDYUFs,16728 -pip/_vendor/html5lib/_inputstream.py,sha256=jErNASMlkgs7MpOM9Ve_VdLDJyFFweAjLuhVutZz33U,32353 -pip/_vendor/html5lib/_tokenizer.py,sha256=04mgA2sNTniutl2fxFv-ei5bns4iRaPxVXXHh_HrV_4,77040 -pip/_vendor/html5lib/_trie/__init__.py,sha256=nqfgO910329BEVJ5T4psVwQtjd2iJyEXQ2-X8c1YxwU,109 -pip/_vendor/html5lib/_trie/__pycache__/__init__.cpython-39.pyc,, -pip/_vendor/html5lib/_trie/__pycache__/_base.cpython-39.pyc,, -pip/_vendor/html5lib/_trie/__pycache__/py.cpython-39.pyc,, -pip/_vendor/html5lib/_trie/_base.py,sha256=CaybYyMro8uERQYjby2tTeSUatnWDfWroUN9N7ety5w,1013 -pip/_vendor/html5lib/_trie/py.py,sha256=wXmQLrZRf4MyWNyg0m3h81m9InhLR7GJ002mIIZh-8o,1775 -pip/_vendor/html5lib/_utils.py,sha256=Dx9AKntksRjFT1veBj7I362pf5OgIaT0zglwq43RnfU,4931 -pip/_vendor/html5lib/constants.py,sha256=Ll-yzLU_jcjyAI_h57zkqZ7aQWE5t5xA4y_jQgoUUhw,83464 -pip/_vendor/html5lib/filters/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -pip/_vendor/html5lib/filters/__pycache__/__init__.cpython-39.pyc,, -pip/_vendor/html5lib/filters/__pycache__/alphabeticalattributes.cpython-39.pyc,, -pip/_vendor/html5lib/filters/__pycache__/base.cpython-39.pyc,, -pip/_vendor/html5lib/filters/__pycache__/inject_meta_charset.cpython-39.pyc,, -pip/_vendor/html5lib/filters/__pycache__/lint.cpython-39.pyc,, -pip/_vendor/html5lib/filters/__pycache__/optionaltags.cpython-39.pyc,, -pip/_vendor/html5lib/filters/__pycache__/sanitizer.cpython-39.pyc,, -pip/_vendor/html5lib/filters/__pycache__/whitespace.cpython-39.pyc,, -pip/_vendor/html5lib/filters/alphabeticalattributes.py,sha256=lViZc2JMCclXi_5gduvmdzrRxtO5Xo9ONnbHBVCsykU,919 -pip/_vendor/html5lib/filters/base.py,sha256=z-IU9ZAYjpsVsqmVt7kuWC63jR11hDMr6CVrvuao8W0,286 -pip/_vendor/html5lib/filters/inject_meta_charset.py,sha256=egDXUEHXmAG9504xz0K6ALDgYkvUrC2q15YUVeNlVQg,2945 -pip/_vendor/html5lib/filters/lint.py,sha256=jk6q56xY0ojiYfvpdP-OZSm9eTqcAdRqhCoPItemPYA,3643 -pip/_vendor/html5lib/filters/optionaltags.py,sha256=8lWT75J0aBOHmPgfmqTHSfPpPMp01T84NKu0CRedxcE,10588 -pip/_vendor/html5lib/filters/sanitizer.py,sha256=m6oGmkBhkGAnn2nV6D4hE78SCZ6WEnK9rKdZB3uXBIc,26897 -pip/_vendor/html5lib/filters/whitespace.py,sha256=8eWqZxd4UC4zlFGW6iyY6f-2uuT8pOCSALc3IZt7_t4,1214 -pip/_vendor/html5lib/html5parser.py,sha256=anr-aXre_ImfrkQ35c_rftKXxC80vJCREKe06Tq15HA,117186 -pip/_vendor/html5lib/serializer.py,sha256=_PpvcZF07cwE7xr9uKkZqh5f4UEaI8ltCU2xPJzaTpk,15759 -pip/_vendor/html5lib/treeadapters/__init__.py,sha256=A0rY5gXIe4bJOiSGRO_j_tFhngRBO8QZPzPtPw5dFzo,679 -pip/_vendor/html5lib/treeadapters/__pycache__/__init__.cpython-39.pyc,, -pip/_vendor/html5lib/treeadapters/__pycache__/genshi.cpython-39.pyc,, -pip/_vendor/html5lib/treeadapters/__pycache__/sax.cpython-39.pyc,, -pip/_vendor/html5lib/treeadapters/genshi.py,sha256=CH27pAsDKmu4ZGkAUrwty7u0KauGLCZRLPMzaO3M5vo,1715 -pip/_vendor/html5lib/treeadapters/sax.py,sha256=BKS8woQTnKiqeffHsxChUqL4q2ZR_wb5fc9MJ3zQC8s,1776 -pip/_vendor/html5lib/treebuilders/__init__.py,sha256=AysSJyvPfikCMMsTVvaxwkgDieELD5dfR8FJIAuq7hY,3592 -pip/_vendor/html5lib/treebuilders/__pycache__/__init__.cpython-39.pyc,, -pip/_vendor/html5lib/treebuilders/__pycache__/base.cpython-39.pyc,, -pip/_vendor/html5lib/treebuilders/__pycache__/dom.cpython-39.pyc,, -pip/_vendor/html5lib/treebuilders/__pycache__/etree.cpython-39.pyc,, -pip/_vendor/html5lib/treebuilders/__pycache__/etree_lxml.cpython-39.pyc,, -pip/_vendor/html5lib/treebuilders/base.py,sha256=z-o51vt9r_l2IDG5IioTOKGzZne4Fy3_Fc-7ztrOh4I,14565 -pip/_vendor/html5lib/treebuilders/dom.py,sha256=22whb0C71zXIsai5mamg6qzBEiigcBIvaDy4Asw3at0,8925 -pip/_vendor/html5lib/treebuilders/etree.py,sha256=w5ZFpKk6bAxnrwD2_BrF5EVC7vzz0L3LMi9Sxrbc_8w,12836 -pip/_vendor/html5lib/treebuilders/etree_lxml.py,sha256=9gqDjs-IxsPhBYa5cpvv2FZ1KZlG83Giusy2lFmvIkE,14766 -pip/_vendor/html5lib/treewalkers/__init__.py,sha256=OBPtc1TU5mGyy18QDMxKEyYEz0wxFUUNj5v0-XgmYhY,5719 -pip/_vendor/html5lib/treewalkers/__pycache__/__init__.cpython-39.pyc,, -pip/_vendor/html5lib/treewalkers/__pycache__/base.cpython-39.pyc,, -pip/_vendor/html5lib/treewalkers/__pycache__/dom.cpython-39.pyc,, -pip/_vendor/html5lib/treewalkers/__pycache__/etree.cpython-39.pyc,, -pip/_vendor/html5lib/treewalkers/__pycache__/etree_lxml.cpython-39.pyc,, -pip/_vendor/html5lib/treewalkers/__pycache__/genshi.cpython-39.pyc,, -pip/_vendor/html5lib/treewalkers/base.py,sha256=ouiOsuSzvI0KgzdWP8PlxIaSNs9falhbiinAEc_UIJY,7476 -pip/_vendor/html5lib/treewalkers/dom.py,sha256=EHyFR8D8lYNnyDU9lx_IKigVJRyecUGua0mOi7HBukc,1413 -pip/_vendor/html5lib/treewalkers/etree.py,sha256=xo1L5m9VtkfpFJK0pFmkLVajhqYYVisVZn3k9kYpPkI,4551 -pip/_vendor/html5lib/treewalkers/etree_lxml.py,sha256=_b0LAVWLcVu9WaU_-w3D8f0IRSpCbjf667V-3NRdhTw,6357 -pip/_vendor/html5lib/treewalkers/genshi.py,sha256=4D2PECZ5n3ZN3qu3jMl9yY7B81jnQApBQSVlfaIuYbA,2309 -pip/_vendor/idna/__init__.py,sha256=KJQN1eQBr8iIK5SKrJ47lXvxG0BJ7Lm38W4zT0v_8lk,849 -pip/_vendor/idna/__pycache__/__init__.cpython-39.pyc,, -pip/_vendor/idna/__pycache__/codec.cpython-39.pyc,, -pip/_vendor/idna/__pycache__/compat.cpython-39.pyc,, -pip/_vendor/idna/__pycache__/core.cpython-39.pyc,, -pip/_vendor/idna/__pycache__/idnadata.cpython-39.pyc,, -pip/_vendor/idna/__pycache__/intranges.cpython-39.pyc,, -pip/_vendor/idna/__pycache__/package_data.cpython-39.pyc,, -pip/_vendor/idna/__pycache__/uts46data.cpython-39.pyc,, -pip/_vendor/idna/codec.py,sha256=6ly5odKfqrytKT9_7UrlGklHnf1DSK2r9C6cSM4sa28,3374 -pip/_vendor/idna/compat.py,sha256=0_sOEUMT4CVw9doD3vyRhX80X19PwqFoUBs7gWsFME4,321 -pip/_vendor/idna/core.py,sha256=RFIkY-HhFZaDoBEFjGwyGd_vWI04uOAQjnzueMWqwOU,12795 -pip/_vendor/idna/idnadata.py,sha256=fzMzkCea2xieVxcrjngJ-2pLsKQNejPCZFlBajIuQdw,44025 -pip/_vendor/idna/intranges.py,sha256=YBr4fRYuWH7kTKS2tXlFjM24ZF1Pdvcir-aywniInqg,1881 -pip/_vendor/idna/package_data.py,sha256=szxQhV0ZD0nKJ84Kuobw3l8q4_KeCyXjFRdpwIpKZmw,21 -pip/_vendor/idna/uts46data.py,sha256=o-D7V-a0fOLZNd7tvxof6MYfUd0TBZzE2bLR5XO67xU,204400 -pip/_vendor/msgpack/__init__.py,sha256=2gJwcsTIaAtCM0GMi2rU-_Y6kILeeQuqRkrQ22jSANc,1118 -pip/_vendor/msgpack/__pycache__/__init__.cpython-39.pyc,, -pip/_vendor/msgpack/__pycache__/_version.cpython-39.pyc,, -pip/_vendor/msgpack/__pycache__/exceptions.cpython-39.pyc,, -pip/_vendor/msgpack/__pycache__/ext.cpython-39.pyc,, -pip/_vendor/msgpack/__pycache__/fallback.cpython-39.pyc,, -pip/_vendor/msgpack/_version.py,sha256=JpTcnRd3YUioA24NDtDZbLW0Nhl2yA-N1Rq2lLDBB-g,20 -pip/_vendor/msgpack/exceptions.py,sha256=dCTWei8dpkrMsQDcjQk74ATl9HsIBH0ybt8zOPNqMYc,1081 -pip/_vendor/msgpack/ext.py,sha256=4l356Y4sVEcvCla2dh_cL57vh4GMhZfa3kuWHFHYz6A,6088 -pip/_vendor/msgpack/fallback.py,sha256=L5jriXysURbf6rPbbHbvXgvoFrKZiryIBmujMTcrf3A,34475 -pip/_vendor/packaging/__about__.py,sha256=ugASIO2w1oUyH8_COqQ2X_s0rDhjbhQC3yJocD03h2c,661 -pip/_vendor/packaging/__init__.py,sha256=b9Kk5MF7KxhhLgcDmiUWukN-LatWFxPdNug0joPhHSk,497 -pip/_vendor/packaging/__pycache__/__about__.cpython-39.pyc,, -pip/_vendor/packaging/__pycache__/__init__.cpython-39.pyc,, -pip/_vendor/packaging/__pycache__/_manylinux.cpython-39.pyc,, -pip/_vendor/packaging/__pycache__/_musllinux.cpython-39.pyc,, -pip/_vendor/packaging/__pycache__/_structures.cpython-39.pyc,, -pip/_vendor/packaging/__pycache__/markers.cpython-39.pyc,, -pip/_vendor/packaging/__pycache__/requirements.cpython-39.pyc,, -pip/_vendor/packaging/__pycache__/specifiers.cpython-39.pyc,, -pip/_vendor/packaging/__pycache__/tags.cpython-39.pyc,, -pip/_vendor/packaging/__pycache__/utils.cpython-39.pyc,, -pip/_vendor/packaging/__pycache__/version.cpython-39.pyc,, -pip/_vendor/packaging/_manylinux.py,sha256=XcbiXB-qcjv3bcohp6N98TMpOP4_j3m-iOA8ptK2GWY,11488 -pip/_vendor/packaging/_musllinux.py,sha256=_KGgY_qc7vhMGpoqss25n2hiLCNKRtvz9mCrS7gkqyc,4378 -pip/_vendor/packaging/_structures.py,sha256=q3eVNmbWJGG_S0Dit_S3Ao8qQqz_5PYTXFAKBZe5yr4,1431 -pip/_vendor/packaging/markers.py,sha256=AJBOcY8Oq0kYc570KuuPTkvuqjAlhufaE2c9sCUbm64,8487 -pip/_vendor/packaging/requirements.py,sha256=NtDlPBtojpn1IUC85iMjPNsUmufjpSlwnNA-Xb4m5NA,4676 -pip/_vendor/packaging/specifiers.py,sha256=LRQ0kFsHrl5qfcFNEEJrIFYsnIHQUJXY9fIsakTrrqE,30110 -pip/_vendor/packaging/tags.py,sha256=lmsnGNiJ8C4D_Pf9PbM0qgbZvD9kmB9lpZBQUZa3R_Y,15699 -pip/_vendor/packaging/utils.py,sha256=dJjeat3BS-TYn1RrUFVwufUMasbtzLfYRoy_HXENeFQ,4200 -pip/_vendor/packaging/version.py,sha256=_fLRNrFrxYcHVfyo8vk9j8s6JM8N_xsSxVFr6RJyco8,14665 -pip/_vendor/pep517/__init__.py,sha256=Y1bATL2qbFNN6M_DQa4yyrwqjpIiL-j9T6kBmR0DS14,130 -pip/_vendor/pep517/__pycache__/__init__.cpython-39.pyc,, -pip/_vendor/pep517/__pycache__/build.cpython-39.pyc,, -pip/_vendor/pep517/__pycache__/check.cpython-39.pyc,, -pip/_vendor/pep517/__pycache__/colorlog.cpython-39.pyc,, -pip/_vendor/pep517/__pycache__/compat.cpython-39.pyc,, -pip/_vendor/pep517/__pycache__/dirtools.cpython-39.pyc,, -pip/_vendor/pep517/__pycache__/envbuild.cpython-39.pyc,, -pip/_vendor/pep517/__pycache__/meta.cpython-39.pyc,, -pip/_vendor/pep517/__pycache__/wrappers.cpython-39.pyc,, -pip/_vendor/pep517/build.py,sha256=2bar6EdjwIz2Dlfy94qdxn3oA9mVnnny40mfoT5f-qI,3457 -pip/_vendor/pep517/check.py,sha256=bCORq1WrHjhpTONa-zpAqG0EB9rHNuhO1ORu6DsDuL8,6084 -pip/_vendor/pep517/colorlog.py,sha256=Tk9AuYm_cLF3BKTBoSTJt9bRryn0aFojIQOwbfVUTxQ,4098 -pip/_vendor/pep517/compat.py,sha256=NmLImE5oiDT3gbEhJ4w7xeoMFcpAPrGu_NltBytSJUY,1253 -pip/_vendor/pep517/dirtools.py,sha256=2mkAkAL0mRz_elYFjRKuekTJVipH1zTn4tbf1EDev84,1129 -pip/_vendor/pep517/envbuild.py,sha256=zFde--rmzjXMLXcm7SA_3hDtgk5VCTA8hjpk88RbF6E,6100 -pip/_vendor/pep517/in_process/__init__.py,sha256=MyWoAi8JHdcBv7yXuWpUSVADbx6LSB9rZh7kTIgdA8Y,563 -pip/_vendor/pep517/in_process/__pycache__/__init__.cpython-39.pyc,, -pip/_vendor/pep517/in_process/__pycache__/_in_process.cpython-39.pyc,, -pip/_vendor/pep517/in_process/_in_process.py,sha256=D3waguyNSGcwosociD5USfcycYr2RCzCjYtxX5UHQmQ,11201 -pip/_vendor/pep517/meta.py,sha256=8mnM5lDnT4zXQpBTliJbRGfesH7iioHwozbDxALPS9Y,2463 -pip/_vendor/pep517/wrappers.py,sha256=impq7Cz_LL1iDF1iiOzYWB4MaEu6O6Gps7TJ5qsJz1Q,13429 -pip/_vendor/pkg_resources/__init__.py,sha256=NnpQ3g6BCHzpMgOR_OLBmYtniY4oOzdKpwqghfq_6ug,108287 -pip/_vendor/pkg_resources/__pycache__/__init__.cpython-39.pyc,, -pip/_vendor/pkg_resources/__pycache__/py31compat.cpython-39.pyc,, -pip/_vendor/pkg_resources/py31compat.py,sha256=CRk8fkiPRDLsbi5pZcKsHI__Pbmh_94L8mr9Qy9Ab2U,562 -pip/_vendor/platformdirs/__init__.py,sha256=Aizpxewwd4nY63Gqw-Od1Rso9Ah4bSoc6rkx-GBRu2Y,12676 -pip/_vendor/platformdirs/__main__.py,sha256=ZmsnTxEOxtTvwa-Y_Vfab_JN3X4XCVeN8X0yyy9-qnc,1176 -pip/_vendor/platformdirs/__pycache__/__init__.cpython-39.pyc,, -pip/_vendor/platformdirs/__pycache__/__main__.cpython-39.pyc,, -pip/_vendor/platformdirs/__pycache__/android.cpython-39.pyc,, -pip/_vendor/platformdirs/__pycache__/api.cpython-39.pyc,, -pip/_vendor/platformdirs/__pycache__/macos.cpython-39.pyc,, -pip/_vendor/platformdirs/__pycache__/unix.cpython-39.pyc,, -pip/_vendor/platformdirs/__pycache__/version.cpython-39.pyc,, -pip/_vendor/platformdirs/__pycache__/windows.cpython-39.pyc,, -pip/_vendor/platformdirs/android.py,sha256=xhlD4NmrKCARe5lgnpBGYo4lOYxEOBOByNDNYy91gEE,4012 -pip/_vendor/platformdirs/api.py,sha256=MXKHXOL3eh_-trSok-JUTjAR_zjmmKF3rjREVABjP8s,4910 -pip/_vendor/platformdirs/macos.py,sha256=-3UXQewbT0yMhMdkzRXfXGAntmLIH7Qt4a9Hlf8I5_Y,2655 -pip/_vendor/platformdirs/unix.py,sha256=b4aVYTz0qZ50HntwOXo8r6tp82jAa3qTjxw-WlnC2yc,6910 -pip/_vendor/platformdirs/version.py,sha256=bXzLJCe23FNQRQrf7ZRWKejxWnct_wft7dxdkMGT33E,80 -pip/_vendor/platformdirs/windows.py,sha256=ISruopR5UGBePC0BxCxXevkZYfjJsIZc49YWU5iYfQ4,6439 -pip/_vendor/progress/__init__.py,sha256=1HejNZtv2ouUNQeStUDAtZrtwkz_3FmYKQ476hJ7zOs,5294 -pip/_vendor/progress/__pycache__/__init__.cpython-39.pyc,, -pip/_vendor/progress/__pycache__/bar.cpython-39.pyc,, -pip/_vendor/progress/__pycache__/colors.cpython-39.pyc,, -pip/_vendor/progress/__pycache__/counter.cpython-39.pyc,, -pip/_vendor/progress/__pycache__/spinner.cpython-39.pyc,, -pip/_vendor/progress/bar.py,sha256=GbedY0oZ-Q1duXjmvVLO0tSf-uTSH7hJ3zzyI91Esws,2942 -pip/_vendor/progress/colors.py,sha256=cCYXQnYFYVmQKKmYEbQ_lj6SPSFzdw4FN98F2x2kR-U,2655 -pip/_vendor/progress/counter.py,sha256=zYt9DWH0_05s8Q9TrJwHVud-WwsyyaR3PwYtk5hxwwQ,1613 -pip/_vendor/progress/spinner.py,sha256=u5ElzW94XEiLGH-aAlr54VJtKfeK745xr6UfGvvflzU,1461 -pip/_vendor/pygments/__init__.py,sha256=CAmA9UthykwxvtutUcH0IxqtiyQcSg6CmYdM-jKlcRY,3002 -pip/_vendor/pygments/__main__.py,sha256=X7rGLMUC54EXgO14FZ9goKXZDmhPzKXTsUglmb_McIU,353 -pip/_vendor/pygments/__pycache__/__init__.cpython-39.pyc,, -pip/_vendor/pygments/__pycache__/__main__.cpython-39.pyc,, -pip/_vendor/pygments/__pycache__/cmdline.cpython-39.pyc,, -pip/_vendor/pygments/__pycache__/console.cpython-39.pyc,, -pip/_vendor/pygments/__pycache__/filter.cpython-39.pyc,, -pip/_vendor/pygments/__pycache__/formatter.cpython-39.pyc,, -pip/_vendor/pygments/__pycache__/lexer.cpython-39.pyc,, -pip/_vendor/pygments/__pycache__/modeline.cpython-39.pyc,, -pip/_vendor/pygments/__pycache__/plugin.cpython-39.pyc,, -pip/_vendor/pygments/__pycache__/regexopt.cpython-39.pyc,, -pip/_vendor/pygments/__pycache__/scanner.cpython-39.pyc,, -pip/_vendor/pygments/__pycache__/sphinxext.cpython-39.pyc,, -pip/_vendor/pygments/__pycache__/style.cpython-39.pyc,, -pip/_vendor/pygments/__pycache__/token.cpython-39.pyc,, -pip/_vendor/pygments/__pycache__/unistring.cpython-39.pyc,, -pip/_vendor/pygments/__pycache__/util.cpython-39.pyc,, -pip/_vendor/pygments/cmdline.py,sha256=XpsyWgErcSqHC7rXiYKLF3Y61Uy8SR2DNQDDhZGuezg,23408 -pip/_vendor/pygments/console.py,sha256=QZXBUAkyl4dPLQ1e6XHjQu3mmXBWvuGQwsQT2q1mtCY,1697 -pip/_vendor/pygments/filter.py,sha256=35iMZiB1rcuogxokm92kViB2DPXPp_wWoxWuMmwvvzY,1938 -pip/_vendor/pygments/filters/__init__.py,sha256=-veOimzCyYGEARru2Dfo6ofSYcZ8tGsIVuMprtaZQ24,40292 -pip/_vendor/pygments/filters/__pycache__/__init__.cpython-39.pyc,, -pip/_vendor/pygments/formatter.py,sha256=zSBbX2U_OOriy7SJvSTK6OAxjuXtROWxQlNpJEJZjBA,2917 -pip/_vendor/pygments/formatters/__init__.py,sha256=fjkYDy5-F998XczKi0ymHFayr5ObIRLHF8cgp9k8kpA,5119 -pip/_vendor/pygments/formatters/__pycache__/__init__.cpython-39.pyc,, -pip/_vendor/pygments/formatters/__pycache__/_mapping.cpython-39.pyc,, -pip/_vendor/pygments/formatters/__pycache__/bbcode.cpython-39.pyc,, -pip/_vendor/pygments/formatters/__pycache__/groff.cpython-39.pyc,, -pip/_vendor/pygments/formatters/__pycache__/html.cpython-39.pyc,, -pip/_vendor/pygments/formatters/__pycache__/img.cpython-39.pyc,, -pip/_vendor/pygments/formatters/__pycache__/irc.cpython-39.pyc,, -pip/_vendor/pygments/formatters/__pycache__/latex.cpython-39.pyc,, -pip/_vendor/pygments/formatters/__pycache__/other.cpython-39.pyc,, -pip/_vendor/pygments/formatters/__pycache__/pangomarkup.cpython-39.pyc,, -pip/_vendor/pygments/formatters/__pycache__/rtf.cpython-39.pyc,, -pip/_vendor/pygments/formatters/__pycache__/svg.cpython-39.pyc,, -pip/_vendor/pygments/formatters/__pycache__/terminal.cpython-39.pyc,, -pip/_vendor/pygments/formatters/__pycache__/terminal256.cpython-39.pyc,, -pip/_vendor/pygments/formatters/_mapping.py,sha256=3A1rYSjYN9MLduCFWy2_mYhllPVpwlw55anRYnPXX8w,6516 -pip/_vendor/pygments/formatters/bbcode.py,sha256=cSKMOioUnE4TzvCCsK4IbJ6G78W07ZwHtkz4V1Wte0U,3314 -pip/_vendor/pygments/formatters/groff.py,sha256=ULgMKvGeLswX0KZn3IBp0p0U3rruiSHBtpl6O5qbqLs,5005 -pip/_vendor/pygments/formatters/html.py,sha256=0jM7Jc4xA4tsjmPq35uklm_En_OVdcNb0__SEXp2pDQ,35330 -pip/_vendor/pygments/formatters/img.py,sha256=r4iag_jCfyv_LhIt-1fRDeVEEoAfVJzkD9nZChIwiS8,21819 -pip/_vendor/pygments/formatters/irc.py,sha256=gi_IeIZeNaTfTMtvseLigZdS6lNicN7r7O7rnI6myo0,5871 -pip/_vendor/pygments/formatters/latex.py,sha256=qZUerrHt2Nn2aB4gJcdqj99qBkIxl_1v1ukYsf230Gk,18930 -pip/_vendor/pygments/formatters/other.py,sha256=Q01LtkqPZ8m_EYdgMVzXPUGjHoL00lXI3By97wzytYU,5073 -pip/_vendor/pygments/formatters/pangomarkup.py,sha256=ZpjALTSuGFwviJd5kOYwr-1NgqxCX3XRJrjXC7x1UbQ,2212 -pip/_vendor/pygments/formatters/rtf.py,sha256=qh7-z_wbUsTY6z7fZUGrYECYBlWB0wEdBwIZVEVybL0,5014 -pip/_vendor/pygments/formatters/svg.py,sha256=T7Jj004I3JUPOr48aAhQ368K2qWCciUyMQ2tdU-LB-4,7335 -pip/_vendor/pygments/formatters/terminal.py,sha256=cRD5hitINOkYlGZo9ma252vpJYPSGNgLivrsm6zGyec,4674 -pip/_vendor/pygments/formatters/terminal256.py,sha256=Bvz9zZL3UWc94TDm1GhKMI4x0BTit0XplhyRL0zmtkw,11753 -pip/_vendor/pygments/lexer.py,sha256=ECXWlEsbRnKs_njozZns6BGQ4riTMzct_BzAr3zV6dY,31937 -pip/_vendor/pygments/lexers/__init__.py,sha256=6Ds0GVBP3jrIU02wmjRdpoL4eFGhwT2IVD1zf3cV5_Y,11307 -pip/_vendor/pygments/lexers/__pycache__/__init__.cpython-39.pyc,, -pip/_vendor/pygments/lexers/__pycache__/_mapping.cpython-39.pyc,, -pip/_vendor/pygments/lexers/__pycache__/python.cpython-39.pyc,, -pip/_vendor/pygments/lexers/_mapping.py,sha256=jAxmvh5wvNkD-p3Fh6E7hY_B0sGbcxWRfseT6iq7ex4,70032 -pip/_vendor/pygments/lexers/python.py,sha256=LXnk43Lcngqn9xj6eRqdk2f73oF4kHZWiwgHMM_RlVM,52776 -pip/_vendor/pygments/modeline.py,sha256=37fen3cf1moCz4vMVJqX41eAQCmj8pzUchikgPcHp-U,986 -pip/_vendor/pygments/plugin.py,sha256=zGSig3S7QX-3o6RDxd4_Uvice_t25l_BN9aQQ9k8vmU,1727 -pip/_vendor/pygments/regexopt.py,sha256=mj8Fgu3sT0d5PZwRwDLexEvVOQbuHeosubQnqVwgiqs,3072 -pip/_vendor/pygments/scanner.py,sha256=nGoHy-Npk2ylUd4bws_CJN1hK785Xqo8e0teRmNX2jo,3091 -pip/_vendor/pygments/sphinxext.py,sha256=FZ2puvLe2Bztqtj6UJvQd7D8TvtOZ1GsfRJObvH59tE,4630 -pip/_vendor/pygments/style.py,sha256=lGyan5bU42q1kGMfFqafwL3g1j5EurTvfkv8vdP7NzQ,6257 -pip/_vendor/pygments/styles/__init__.py,sha256=Qx2zq6ufbDNE2cTp51M-s9zW-sDE-KLIqFw31qr3Bhg,3252 -pip/_vendor/pygments/styles/__pycache__/__init__.cpython-39.pyc,, -pip/_vendor/pygments/token.py,sha256=lNPgeaQTzu2DEUi6n_lxAIU7uy4DVj8LMI3nSVnTjks,6143 -pip/_vendor/pygments/unistring.py,sha256=Xs0FzOzE0l0iWRoTlcgi-Q_kAMdF5Gt5FL_goGKJc98,63188 -pip/_vendor/pygments/util.py,sha256=s9n8BQXIxG3lIwCPWv5-ci8yhaqq5JbEVK9v8Z-8_3I,9123 -pip/_vendor/pyparsing/__init__.py,sha256=jXheGTFT1b6r_4WxuOE0uVUqiouLJ3WHzOScpLieRgQ,9107 -pip/_vendor/pyparsing/__pycache__/__init__.cpython-39.pyc,, -pip/_vendor/pyparsing/__pycache__/actions.cpython-39.pyc,, -pip/_vendor/pyparsing/__pycache__/common.cpython-39.pyc,, -pip/_vendor/pyparsing/__pycache__/core.cpython-39.pyc,, -pip/_vendor/pyparsing/__pycache__/exceptions.cpython-39.pyc,, -pip/_vendor/pyparsing/__pycache__/helpers.cpython-39.pyc,, -pip/_vendor/pyparsing/__pycache__/results.cpython-39.pyc,, -pip/_vendor/pyparsing/__pycache__/testing.cpython-39.pyc,, -pip/_vendor/pyparsing/__pycache__/unicode.cpython-39.pyc,, -pip/_vendor/pyparsing/__pycache__/util.cpython-39.pyc,, -pip/_vendor/pyparsing/actions.py,sha256=60v7mETOBzc01YPH_qQD5isavgcSJpAfIKpzgjM3vaU,6429 -pip/_vendor/pyparsing/common.py,sha256=lFL97ooIeR75CmW5hjURZqwDCTgruqltcTCZ-ulLO2Q,12936 -pip/_vendor/pyparsing/core.py,sha256=GtQsD06HlwKPc7M8K8hyOuOW-cRnd87AxAHq-ad5lEk,212248 -pip/_vendor/pyparsing/diagram/__init__.py,sha256=h0gsUwmo5N3shgvfXVQTtqvTpUAv-ZdQjSQ6IUJmsxY,22165 -pip/_vendor/pyparsing/diagram/__pycache__/__init__.cpython-39.pyc,, -pip/_vendor/pyparsing/exceptions.py,sha256=H4D9gqMavqmAFSsdrU_J6bO-jA-T-A7yvtXWZpooIUA,9030 -pip/_vendor/pyparsing/helpers.py,sha256=kqpIZFG-y0fQ3g_TmloYllo9we6YCYiewZMXIK0y5wc,38299 -pip/_vendor/pyparsing/results.py,sha256=4D-oURF1cLeL7k0d3zMqUuWH_gTjop_OrZwik9O0HXU,25339 -pip/_vendor/pyparsing/testing.py,sha256=szs8AKZREZMhL0y0vsMfaTVAnpqPHetg6VKJBNmc4QY,13388 -pip/_vendor/pyparsing/unicode.py,sha256=IR-ioeGY29cZ49tG8Ts7ITPWWNP5G2DcZs58oa8zn44,10381 -pip/_vendor/pyparsing/util.py,sha256=kq772O5YSeXOSdP-M31EWpbH_ayj7BMHImBYo9xPD5M,6805 -pip/_vendor/requests/__init__.py,sha256=6IUFQM6K9V2NIu4fe4LtUsN21-TFbw_w3EfPpdUN-qc,5130 -pip/_vendor/requests/__pycache__/__init__.cpython-39.pyc,, -pip/_vendor/requests/__pycache__/__version__.cpython-39.pyc,, -pip/_vendor/requests/__pycache__/_internal_utils.cpython-39.pyc,, -pip/_vendor/requests/__pycache__/adapters.cpython-39.pyc,, -pip/_vendor/requests/__pycache__/api.cpython-39.pyc,, -pip/_vendor/requests/__pycache__/auth.cpython-39.pyc,, -pip/_vendor/requests/__pycache__/certs.cpython-39.pyc,, -pip/_vendor/requests/__pycache__/compat.cpython-39.pyc,, -pip/_vendor/requests/__pycache__/cookies.cpython-39.pyc,, -pip/_vendor/requests/__pycache__/exceptions.cpython-39.pyc,, -pip/_vendor/requests/__pycache__/help.cpython-39.pyc,, -pip/_vendor/requests/__pycache__/hooks.cpython-39.pyc,, -pip/_vendor/requests/__pycache__/models.cpython-39.pyc,, -pip/_vendor/requests/__pycache__/packages.cpython-39.pyc,, -pip/_vendor/requests/__pycache__/sessions.cpython-39.pyc,, -pip/_vendor/requests/__pycache__/status_codes.cpython-39.pyc,, -pip/_vendor/requests/__pycache__/structures.cpython-39.pyc,, -pip/_vendor/requests/__pycache__/utils.cpython-39.pyc,, -pip/_vendor/requests/__version__.py,sha256=q8miOQaomOv3S74lK4eQs1zZ5jwcnOusyEU-M2idhts,441 -pip/_vendor/requests/_internal_utils.py,sha256=Zx3PnEUccyfsB-ie11nZVAW8qClJy0gx1qNME7rgT18,1096 -pip/_vendor/requests/adapters.py,sha256=WazYJQ_b2LHhNDb_y0hscNlWVsSe5ca5I3pymPrer5w,21861 -pip/_vendor/requests/api.py,sha256=hjuoP79IAEmX6Dysrw8t032cLfwLHxbI_wM4gC5G9t0,6402 -pip/_vendor/requests/auth.py,sha256=OMoJIVKyRLy9THr91y8rxysZuclwPB-K1Xg1zBomUhQ,10207 -pip/_vendor/requests/certs.py,sha256=nXRVq9DtGmv_1AYbwjTu9UrgAcdJv05ZvkNeaoLOZxY,465 -pip/_vendor/requests/compat.py,sha256=N1281mkcTluMjKqCSLf88LR6HNOygEhS1TbR9LLsoVY,2114 -pip/_vendor/requests/cookies.py,sha256=Y-bKX6TvW3FnYlE6Au0SXtVVWcaNdFvuAwQxw-G0iTI,18430 -pip/_vendor/requests/exceptions.py,sha256=VcpBXOL-9JYhNbK8OZxCIImBgpQSXJlUelDPf1f-pmM,3446 -pip/_vendor/requests/help.py,sha256=dyhe3lcmHXnFCzDiZVjcGmVvvO_jtsfAm-AC542ndw8,3972 -pip/_vendor/requests/hooks.py,sha256=QReGyy0bRcr5rkwCuObNakbYsc7EkiKeBwG4qHekr2Q,757 -pip/_vendor/requests/models.py,sha256=7pzscX_47qxx7-zEaBWGxMoB33Vdf6HLoUKZh1ktEvM,35116 -pip/_vendor/requests/packages.py,sha256=njJmVifY4aSctuW3PP5EFRCxjEwMRDO6J_feG2dKWsI,695 -pip/_vendor/requests/sessions.py,sha256=Zu-Y9YPlwTIsyFx1hvIrc3ziyeFpuFPqcOuSuz8BNWs,29835 -pip/_vendor/requests/status_codes.py,sha256=gT79Pbs_cQjBgp-fvrUgg1dn2DQO32bDj4TInjnMPSc,4188 -pip/_vendor/requests/structures.py,sha256=msAtr9mq1JxHd-JRyiILfdFlpbJwvvFuP3rfUQT_QxE,3005 -pip/_vendor/requests/utils.py,sha256=siud-FQ6xgKFbL49DRvAb3PMQMMHoeCL_TCmuHh9AUU,33301 -pip/_vendor/resolvelib/__init__.py,sha256=UL-B2BDI0_TRIqkfGwLHKLxY-LjBlomz7941wDqzB1I,537 -pip/_vendor/resolvelib/__pycache__/__init__.cpython-39.pyc,, -pip/_vendor/resolvelib/__pycache__/providers.cpython-39.pyc,, -pip/_vendor/resolvelib/__pycache__/reporters.cpython-39.pyc,, -pip/_vendor/resolvelib/__pycache__/resolvers.cpython-39.pyc,, -pip/_vendor/resolvelib/__pycache__/structs.cpython-39.pyc,, -pip/_vendor/resolvelib/compat/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -pip/_vendor/resolvelib/compat/__pycache__/__init__.cpython-39.pyc,, -pip/_vendor/resolvelib/compat/__pycache__/collections_abc.cpython-39.pyc,, -pip/_vendor/resolvelib/compat/collections_abc.py,sha256=uy8xUZ-NDEw916tugUXm8HgwCGiMO0f-RcdnpkfXfOs,156 -pip/_vendor/resolvelib/providers.py,sha256=roVmFBItQJ0TkhNua65h8LdNny7rmeqVEXZu90QiP4o,5872 -pip/_vendor/resolvelib/reporters.py,sha256=fW91NKf-lK8XN7i6Yd_rczL5QeOT3sc6AKhpaTEnP3E,1583 -pip/_vendor/resolvelib/resolvers.py,sha256=2wYzVGBGerbmcIpH8cFmgSKgLSETz8jmwBMGjCBMHG4,17592 -pip/_vendor/resolvelib/structs.py,sha256=IVIYof6sA_N4ZEiE1C1UhzTX495brCNnyCdgq6CYq28,4794 -pip/_vendor/rich/__init__.py,sha256=wF1th4JGBCVC02xfaw8j6P2MrFcJaQJL72scKtEmDYQ,5804 -pip/_vendor/rich/__main__.py,sha256=vd1PP-o7_1un-ThdgMU9LHV-D8z56yz_-fryczn38eE,8810 -pip/_vendor/rich/__pycache__/__init__.cpython-39.pyc,, -pip/_vendor/rich/__pycache__/__main__.cpython-39.pyc,, -pip/_vendor/rich/__pycache__/_cell_widths.cpython-39.pyc,, -pip/_vendor/rich/__pycache__/_emoji_codes.cpython-39.pyc,, -pip/_vendor/rich/__pycache__/_emoji_replace.cpython-39.pyc,, -pip/_vendor/rich/__pycache__/_extension.cpython-39.pyc,, -pip/_vendor/rich/__pycache__/_inspect.cpython-39.pyc,, -pip/_vendor/rich/__pycache__/_log_render.cpython-39.pyc,, -pip/_vendor/rich/__pycache__/_loop.cpython-39.pyc,, -pip/_vendor/rich/__pycache__/_lru_cache.cpython-39.pyc,, -pip/_vendor/rich/__pycache__/_palettes.cpython-39.pyc,, -pip/_vendor/rich/__pycache__/_pick.cpython-39.pyc,, -pip/_vendor/rich/__pycache__/_ratio.cpython-39.pyc,, -pip/_vendor/rich/__pycache__/_spinners.cpython-39.pyc,, -pip/_vendor/rich/__pycache__/_stack.cpython-39.pyc,, -pip/_vendor/rich/__pycache__/_timer.cpython-39.pyc,, -pip/_vendor/rich/__pycache__/_windows.cpython-39.pyc,, -pip/_vendor/rich/__pycache__/_wrap.cpython-39.pyc,, -pip/_vendor/rich/__pycache__/abc.cpython-39.pyc,, -pip/_vendor/rich/__pycache__/align.cpython-39.pyc,, -pip/_vendor/rich/__pycache__/ansi.cpython-39.pyc,, -pip/_vendor/rich/__pycache__/bar.cpython-39.pyc,, -pip/_vendor/rich/__pycache__/box.cpython-39.pyc,, -pip/_vendor/rich/__pycache__/cells.cpython-39.pyc,, -pip/_vendor/rich/__pycache__/color.cpython-39.pyc,, -pip/_vendor/rich/__pycache__/color_triplet.cpython-39.pyc,, -pip/_vendor/rich/__pycache__/columns.cpython-39.pyc,, -pip/_vendor/rich/__pycache__/console.cpython-39.pyc,, -pip/_vendor/rich/__pycache__/constrain.cpython-39.pyc,, -pip/_vendor/rich/__pycache__/containers.cpython-39.pyc,, -pip/_vendor/rich/__pycache__/control.cpython-39.pyc,, -pip/_vendor/rich/__pycache__/default_styles.cpython-39.pyc,, -pip/_vendor/rich/__pycache__/diagnose.cpython-39.pyc,, -pip/_vendor/rich/__pycache__/emoji.cpython-39.pyc,, -pip/_vendor/rich/__pycache__/errors.cpython-39.pyc,, -pip/_vendor/rich/__pycache__/file_proxy.cpython-39.pyc,, -pip/_vendor/rich/__pycache__/filesize.cpython-39.pyc,, -pip/_vendor/rich/__pycache__/highlighter.cpython-39.pyc,, -pip/_vendor/rich/__pycache__/json.cpython-39.pyc,, -pip/_vendor/rich/__pycache__/jupyter.cpython-39.pyc,, -pip/_vendor/rich/__pycache__/layout.cpython-39.pyc,, -pip/_vendor/rich/__pycache__/live.cpython-39.pyc,, -pip/_vendor/rich/__pycache__/live_render.cpython-39.pyc,, -pip/_vendor/rich/__pycache__/logging.cpython-39.pyc,, -pip/_vendor/rich/__pycache__/markup.cpython-39.pyc,, -pip/_vendor/rich/__pycache__/measure.cpython-39.pyc,, -pip/_vendor/rich/__pycache__/padding.cpython-39.pyc,, -pip/_vendor/rich/__pycache__/pager.cpython-39.pyc,, -pip/_vendor/rich/__pycache__/palette.cpython-39.pyc,, -pip/_vendor/rich/__pycache__/panel.cpython-39.pyc,, -pip/_vendor/rich/__pycache__/pretty.cpython-39.pyc,, -pip/_vendor/rich/__pycache__/progress.cpython-39.pyc,, -pip/_vendor/rich/__pycache__/progress_bar.cpython-39.pyc,, -pip/_vendor/rich/__pycache__/prompt.cpython-39.pyc,, -pip/_vendor/rich/__pycache__/protocol.cpython-39.pyc,, -pip/_vendor/rich/__pycache__/region.cpython-39.pyc,, -pip/_vendor/rich/__pycache__/repr.cpython-39.pyc,, -pip/_vendor/rich/__pycache__/rule.cpython-39.pyc,, -pip/_vendor/rich/__pycache__/scope.cpython-39.pyc,, -pip/_vendor/rich/__pycache__/screen.cpython-39.pyc,, -pip/_vendor/rich/__pycache__/segment.cpython-39.pyc,, -pip/_vendor/rich/__pycache__/spinner.cpython-39.pyc,, -pip/_vendor/rich/__pycache__/status.cpython-39.pyc,, -pip/_vendor/rich/__pycache__/style.cpython-39.pyc,, -pip/_vendor/rich/__pycache__/styled.cpython-39.pyc,, -pip/_vendor/rich/__pycache__/syntax.cpython-39.pyc,, -pip/_vendor/rich/__pycache__/table.cpython-39.pyc,, -pip/_vendor/rich/__pycache__/tabulate.cpython-39.pyc,, -pip/_vendor/rich/__pycache__/terminal_theme.cpython-39.pyc,, -pip/_vendor/rich/__pycache__/text.cpython-39.pyc,, -pip/_vendor/rich/__pycache__/theme.cpython-39.pyc,, -pip/_vendor/rich/__pycache__/themes.cpython-39.pyc,, -pip/_vendor/rich/__pycache__/traceback.cpython-39.pyc,, -pip/_vendor/rich/__pycache__/tree.cpython-39.pyc,, -pip/_vendor/rich/_cell_widths.py,sha256=2n4EiJi3X9sqIq0O16kUZ_zy6UYMd3xFfChlKfnW1Hc,10096 -pip/_vendor/rich/_emoji_codes.py,sha256=hu1VL9nbVdppJrVoijVshRlcRRe_v3dju3Mmd2sKZdY,140235 -pip/_vendor/rich/_emoji_replace.py,sha256=n-kcetsEUx2ZUmhQrfeMNc-teeGhpuSQ5F8VPBsyvDo,1064 -pip/_vendor/rich/_extension.py,sha256=Xt47QacCKwYruzjDi-gOBq724JReDj9Cm9xUi5fr-34,265 -pip/_vendor/rich/_inspect.py,sha256=vq6BjewwEvddjcBTr_lCcjYQBsKi92aTNpcXyaA5ERA,7444 -pip/_vendor/rich/_log_render.py,sha256=1ByI0PA1ZpxZY3CGJOK54hjlq4X-Bz_boIjIqCd8Kns,3225 -pip/_vendor/rich/_loop.py,sha256=hV_6CLdoPm0va22Wpw4zKqM0RYsz3TZxXj0PoS-9eDQ,1236 -pip/_vendor/rich/_lru_cache.py,sha256=M7H1ZQF32o6SxrpOur9zTIhEHlNXT9XnrcdhruUmG5I,1246 -pip/_vendor/rich/_palettes.py,sha256=cdev1JQKZ0JvlguV9ipHgznTdnvlIzUFDBb0It2PzjI,7063 -pip/_vendor/rich/_pick.py,sha256=evDt8QN4lF5CiwrUIXlOJCntitBCOsI3ZLPEIAVRLJU,423 -pip/_vendor/rich/_ratio.py,sha256=2lLSliL025Y-YMfdfGbutkQDevhcyDqc-DtUYW9mU70,5472 -pip/_vendor/rich/_spinners.py,sha256=huT1biTlwyp9Lm8S7bLfVzg1psUaIH5xHDwTaWEHVh0,26521 -pip/_vendor/rich/_stack.py,sha256=-C8OK7rxn3sIUdVwxZBBpeHhIzX0eI-VM3MemYfaXm0,351 -pip/_vendor/rich/_timer.py,sha256=zelxbT6oPFZnNrwWPpc1ktUeAT-Vc4fuFcRZLQGLtMI,417 -pip/_vendor/rich/_windows.py,sha256=nBO71icHMIHlzT7hg6fkoIdh1mT-5MvDdPDwunkshyw,2065 -pip/_vendor/rich/_wrap.py,sha256=OtnSxnERkuNlSM1d_MYtNg8KIYTcTBk3peg16dCZH_U,1804 -pip/_vendor/rich/abc.py,sha256=ON-E-ZqSSheZ88VrKX2M3PXpFbGEUUZPMa_Af0l-4f0,890 -pip/_vendor/rich/align.py,sha256=2zRHV8SzR5eP-vQkSDgjmgsBLBluCBwykgejAW6oRD0,10425 -pip/_vendor/rich/ansi.py,sha256=QaVVkfvVL6C3OsuWI9iQ-iJFkMsMohjYlxgMLnVTEPo,6676 -pip/_vendor/rich/bar.py,sha256=a7UD303BccRCrEhGjfMElpv5RFYIinaAhAuqYqhUvmw,3264 -pip/_vendor/rich/box.py,sha256=o0ywz1iW0WjGLPrRVDAZPh1CVPEgAOaWsn8Bf3sf43g,9069 -pip/_vendor/rich/cells.py,sha256=NadN20gFxE8Aj-2S3Drn7qgn-ZpsRZcNnTNtweRL7rA,4285 -pip/_vendor/rich/color.py,sha256=SD3yTf3t8japb-jOv8GYCMCDqyzpipzXS_0rAXhSlU4,17285 -pip/_vendor/rich/color_triplet.py,sha256=3lhQkdJbvWPoLDO-AnYImAWmJvV5dlgYNCVZ97ORaN4,1054 -pip/_vendor/rich/columns.py,sha256=HUX0KcMm9dsKNi11fTbiM_h2iDtl8ySCaVcxlalEzq8,7131 -pip/_vendor/rich/console.py,sha256=bioCy8012eZ8PIOBxMyyqxYPltKk2pGEG9jmwylNCQk,81236 -pip/_vendor/rich/constrain.py,sha256=1VIPuC8AgtKWrcncQrjBdYqA3JVWysu6jZo1rrh7c7Q,1288 -pip/_vendor/rich/containers.py,sha256=aKgm5UDHn5Nmui6IJaKdsZhbHClh_X7D-_Wg8Ehrr7s,5497 -pip/_vendor/rich/control.py,sha256=qxg6Yjd78XuF0VxthlT8O4dpvpACYwKkBfm2S4-IvHA,5298 -pip/_vendor/rich/default_styles.py,sha256=At42PcWzmnYWcx5fUOKyOUpI8HK5m4ItZqxkgHToaMs,7614 -pip/_vendor/rich/diagnose.py,sha256=4L8SZfbqjIRotzJ39QzD9-d4I80FyV1mNKHryg1eArE,183 -pip/_vendor/rich/emoji.py,sha256=omTF9asaAnsM4yLY94eR_9dgRRSm1lHUszX20D1yYCQ,2501 -pip/_vendor/rich/errors.py,sha256=5pP3Kc5d4QJ_c0KFsxrfyhjiPVe7J1zOqSFbFAzcV-Y,642 -pip/_vendor/rich/file_proxy.py,sha256=fHeReSO3VJ7IbH_9ri-OrPYbFC3UYOzeTNjngiiWOcY,1613 -pip/_vendor/rich/filesize.py,sha256=oQJnM5_7ygkpzt3GtNq5l3F6gmB7YahBA5vpdQVKLwI,2511 -pip/_vendor/rich/highlighter.py,sha256=AdhjC0meTYswZ_xKgka0cRYdNjLABLUzHAbyF3QpPWo,4894 -pip/_vendor/rich/json.py,sha256=RCm4lXBXrjvXHpqrWPH8wdGP0jEo4IohLmkddlhRY18,5051 -pip/_vendor/rich/jupyter.py,sha256=4sxNAwJs4g3dYfWy_enPw9fp0Tdn-82tV4T9uh9vAOM,3025 -pip/_vendor/rich/layout.py,sha256=b64KMDP2EPiC103P-v-_VZKGY13oWiiGS418P_KRRlc,14048 -pip/_vendor/rich/live.py,sha256=OKxMaFU5sFfuR--cJftGYjSvg1VPQri1U_DNZUjCsvI,13711 -pip/_vendor/rich/live_render.py,sha256=zElm3PrfSIvjOce28zETHMIUf9pFYSUA5o0AflgUP64,3667 -pip/_vendor/rich/logging.py,sha256=YNcCSK6pCo2Wg6JKqScAe6VgFqebHBnS5nDnBO4gXAA,10868 -pip/_vendor/rich/markup.py,sha256=hsVW_k1TIvj5OPPQ12ihAii9HSVa8N1TStvA5B2GGpo,8058 -pip/_vendor/rich/measure.py,sha256=Z74XvzIgLZm0xH-QIo1uX5d4oahavHe8D8MKyxLNqPQ,5258 -pip/_vendor/rich/padding.py,sha256=kTFGsdGe0os7tXLnHKpwTI90CXEvrceeZGCshmJy5zw,4970 -pip/_vendor/rich/pager.py,sha256=VK_2EfH0JduZWdyV-KZma06bvi_V5PWmHG6W7BoiaTg,838 -pip/_vendor/rich/palette.py,sha256=lInvR1ODDT2f3UZMfL1grq7dY_pDdKHw4bdUgOGaM4Y,3396 -pip/_vendor/rich/panel.py,sha256=O6ORyIhDcOLSEasTjpcDvmhvIcppPGCeQoXpoycIUT8,8637 -pip/_vendor/rich/pretty.py,sha256=HAB68BpYysaL1EXeV4X5Tt-U2hDlcLpbFz06fkojWWE,32572 -pip/_vendor/rich/progress.py,sha256=jcgi7aMnQ_YjSpAmQkalwtNsgVn9i56SeZGprr7tuOk,35926 -pip/_vendor/rich/progress_bar.py,sha256=ELiBaxJOgsRYKpNIrot7BC0bFXvmf8cTd6nxI02BbK0,7762 -pip/_vendor/rich/prompt.py,sha256=gKVd13YWv6jedzwcRPZGUINBjC-xcJhJ_xz_NvMW80c,11307 -pip/_vendor/rich/protocol.py,sha256=Vx6n4fEoSDhzSup8t3KH0iK2RWyssIOks5E0S1qw1GA,1401 -pip/_vendor/rich/region.py,sha256=rNT9xZrVZTYIXZC0NYn41CJQwYNbR-KecPOxTgQvB8Y,166 -pip/_vendor/rich/repr.py,sha256=1A0U0_ibG_bZbw71pUBIctO9Az-CQUuyOTbiKcJOwyw,4309 -pip/_vendor/rich/rule.py,sha256=cPK6NYo4kzh-vM_8a-rXajXplsbaHa6ahErYvGSsrJ0,4197 -pip/_vendor/rich/scope.py,sha256=HX13XsJfqzQHpPfw4Jn9JmJjCsRj9uhHxXQEqjkwyLA,2842 -pip/_vendor/rich/screen.py,sha256=YoeReESUhx74grqb0mSSb9lghhysWmFHYhsbMVQjXO8,1591 -pip/_vendor/rich/segment.py,sha256=MBBAWaHyqCQFCfiNbrTW4BGaFR1uU31XktJ1S3Taqb4,23916 -pip/_vendor/rich/spinner.py,sha256=V6dW0jIk5IO0_2MyxyftQf5VjCHI0T2cRhJ4F31hPIQ,4312 -pip/_vendor/rich/status.py,sha256=gJsIXIZeSo3urOyxRUjs6VrhX5CZrA0NxIQ-dxhCnwo,4425 -pip/_vendor/rich/style.py,sha256=AD1I7atfclsFCtGeL8ronH1Jj-02WLp9ZQ2VYqmpBjM,26469 -pip/_vendor/rich/styled.py,sha256=eZNnzGrI4ki_54pgY3Oj0T-x3lxdXTYh4_ryDB24wBU,1258 -pip/_vendor/rich/syntax.py,sha256=pJAD08ywowg5xVwTGCqUOMpDYskjoMoDYEV-hryEX5s,26994 -pip/_vendor/rich/table.py,sha256=oQAEBaV4zMUPyg_tSA93_GrCirdIf-osolxf9wb3pEo,36757 -pip/_vendor/rich/tabulate.py,sha256=nl0oeNbiXectEgTHyj3K7eN4NZMISpaogpOdZyEOGbs,1700 -pip/_vendor/rich/terminal_theme.py,sha256=E0nI_ycFpvflamt-KVCY4J52LmUjRi1Y6ICB-Ef3gMo,1459 -pip/_vendor/rich/text.py,sha256=auX3LpY-I6PBiNyxB3o3LyMEx7lna2cx9IbNQJDwtw8,44424 -pip/_vendor/rich/theme.py,sha256=GKNtQhDBZKAzDaY0vQVQQFzbc0uWfFe6CJXA-syT7zQ,3627 -pip/_vendor/rich/themes.py,sha256=0xgTLozfabebYtcJtDdC5QkX5IVUEaviqDUJJh4YVFk,102 -pip/_vendor/rich/traceback.py,sha256=hAU3IR295eFuup_px2NU4aCEWu7KQs1qpZbnqoHCtR0,25935 -pip/_vendor/rich/tree.py,sha256=JxyWbc27ZuwoLQnd7I-rSsRsqI9lzaVKlfTLJXla9U0,9122 -pip/_vendor/six.py,sha256=TOOfQi7nFGfMrIvtdr6wX4wyHH8M7aknmuLfo2cBBrM,34549 -pip/_vendor/tenacity/__init__.py,sha256=GLLsTFD4Bd5VDgTR6mU_FxyOsrxc48qONorVaRebeD4,18257 -pip/_vendor/tenacity/__pycache__/__init__.cpython-39.pyc,, -pip/_vendor/tenacity/__pycache__/_asyncio.cpython-39.pyc,, -pip/_vendor/tenacity/__pycache__/_utils.cpython-39.pyc,, -pip/_vendor/tenacity/__pycache__/after.cpython-39.pyc,, -pip/_vendor/tenacity/__pycache__/before.cpython-39.pyc,, -pip/_vendor/tenacity/__pycache__/before_sleep.cpython-39.pyc,, -pip/_vendor/tenacity/__pycache__/nap.cpython-39.pyc,, -pip/_vendor/tenacity/__pycache__/retry.cpython-39.pyc,, -pip/_vendor/tenacity/__pycache__/stop.cpython-39.pyc,, -pip/_vendor/tenacity/__pycache__/tornadoweb.cpython-39.pyc,, -pip/_vendor/tenacity/__pycache__/wait.cpython-39.pyc,, -pip/_vendor/tenacity/_asyncio.py,sha256=HEb0BVJEeBJE9P-m9XBxh1KcaF96BwoeqkJCL5sbVcQ,3314 -pip/_vendor/tenacity/_utils.py,sha256=-y68scDcyoqvTJuJJ0GTfjdSCljEYlbCYvgk7nM4NdM,1944 -pip/_vendor/tenacity/after.py,sha256=dlmyxxFy2uqpLXDr838DiEd7jgv2AGthsWHGYcGYsaI,1496 -pip/_vendor/tenacity/before.py,sha256=7XtvRmO0dRWUp8SVn24OvIiGFj8-4OP5muQRUiWgLh0,1376 -pip/_vendor/tenacity/before_sleep.py,sha256=ThyDvqKU5yle_IvYQz_b6Tp6UjUS0PhVp6zgqYl9U6Y,1908 -pip/_vendor/tenacity/nap.py,sha256=fRWvnz1aIzbIq9Ap3gAkAZgDH6oo5zxMrU6ZOVByq0I,1383 -pip/_vendor/tenacity/retry.py,sha256=62R71W59bQjuNyFKsDM7hE2aEkEPtwNBRA0tnsEvgSk,6645 -pip/_vendor/tenacity/stop.py,sha256=sKHmHaoSaW6sKu3dTxUVKr1-stVkY7lw4Y9yjZU30zQ,2790 -pip/_vendor/tenacity/tornadoweb.py,sha256=E8lWO2nwe6dJgoB-N2HhQprYLDLB_UdSgFnv-EN6wKE,2145 -pip/_vendor/tenacity/wait.py,sha256=e_Saa6I2tsNLpCL1t9897wN2fGb0XQMQlE4bU2t9V2w,6691 -pip/_vendor/tomli/__init__.py,sha256=z1Elt0nLAqU5Y0DOn9p__8QnLWavlEOpRyQikdYgKro,230 -pip/_vendor/tomli/__pycache__/__init__.cpython-39.pyc,, -pip/_vendor/tomli/__pycache__/_parser.cpython-39.pyc,, -pip/_vendor/tomli/__pycache__/_re.cpython-39.pyc,, -pip/_vendor/tomli/_parser.py,sha256=50BD4o9YbzFAGAYyZLqZC8F81DQ7iWWyJnrHNwBKa6A,22415 -pip/_vendor/tomli/_re.py,sha256=5GPfgXKteg7wRFCF-DzlkAPI2ilHbkMK2-JC49F-AJQ,2681 -pip/_vendor/typing_extensions.py,sha256=1uqi_RSlI7gos4eJB_NEV3d5wQwzTUQHd3_jrkbTo8Q,87149 -pip/_vendor/urllib3/__init__.py,sha256=j3yzHIbmW7CS-IKQJ9-PPQf_YKO8EOAey_rMW0UR7us,2763 -pip/_vendor/urllib3/__pycache__/__init__.cpython-39.pyc,, -pip/_vendor/urllib3/__pycache__/_collections.cpython-39.pyc,, -pip/_vendor/urllib3/__pycache__/_version.cpython-39.pyc,, -pip/_vendor/urllib3/__pycache__/connection.cpython-39.pyc,, -pip/_vendor/urllib3/__pycache__/connectionpool.cpython-39.pyc,, -pip/_vendor/urllib3/__pycache__/exceptions.cpython-39.pyc,, -pip/_vendor/urllib3/__pycache__/fields.cpython-39.pyc,, -pip/_vendor/urllib3/__pycache__/filepost.cpython-39.pyc,, -pip/_vendor/urllib3/__pycache__/poolmanager.cpython-39.pyc,, -pip/_vendor/urllib3/__pycache__/request.cpython-39.pyc,, -pip/_vendor/urllib3/__pycache__/response.cpython-39.pyc,, -pip/_vendor/urllib3/_collections.py,sha256=Rp1mVyBgc_UlAcp6M3at1skJBXR5J43NawRTvW2g_XY,10811 -pip/_vendor/urllib3/_version.py,sha256=_NdMUQaeBvFHAX2z3zAIX2Wum58A6rVtY1f7ByHsQ4g,63 -pip/_vendor/urllib3/connection.py,sha256=6zokyboYYKm9VkyrQvVVLgxMyCZK7n9Vmg_2ZK6pbhc,20076 -pip/_vendor/urllib3/connectionpool.py,sha256=qz-ICrW6g4TZVCbDQ8fRe68BMpXkskkR9vAVY9zUWtA,39013 -pip/_vendor/urllib3/contrib/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -pip/_vendor/urllib3/contrib/__pycache__/__init__.cpython-39.pyc,, -pip/_vendor/urllib3/contrib/__pycache__/_appengine_environ.cpython-39.pyc,, -pip/_vendor/urllib3/contrib/__pycache__/appengine.cpython-39.pyc,, -pip/_vendor/urllib3/contrib/__pycache__/ntlmpool.cpython-39.pyc,, -pip/_vendor/urllib3/contrib/__pycache__/pyopenssl.cpython-39.pyc,, -pip/_vendor/urllib3/contrib/__pycache__/securetransport.cpython-39.pyc,, -pip/_vendor/urllib3/contrib/__pycache__/socks.cpython-39.pyc,, -pip/_vendor/urllib3/contrib/_appengine_environ.py,sha256=bDbyOEhW2CKLJcQqAKAyrEHN-aklsyHFKq6vF8ZFsmk,957 -pip/_vendor/urllib3/contrib/_securetransport/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -pip/_vendor/urllib3/contrib/_securetransport/__pycache__/__init__.cpython-39.pyc,, -pip/_vendor/urllib3/contrib/_securetransport/__pycache__/bindings.cpython-39.pyc,, -pip/_vendor/urllib3/contrib/_securetransport/__pycache__/low_level.cpython-39.pyc,, -pip/_vendor/urllib3/contrib/_securetransport/bindings.py,sha256=4Xk64qIkPBt09A5q-RIFUuDhNc9mXilVapm7WnYnzRw,17632 -pip/_vendor/urllib3/contrib/_securetransport/low_level.py,sha256=B2JBB2_NRP02xK6DCa1Pa9IuxrPwxzDzZbixQkb7U9M,13922 -pip/_vendor/urllib3/contrib/appengine.py,sha256=lfzpHFmJiO82shClLEm3QB62SYgHWnjpZOH_2JhU5Tc,11034 -pip/_vendor/urllib3/contrib/ntlmpool.py,sha256=ej9gGvfAb2Gt00lafFp45SIoRz-QwrQ4WChm6gQmAlM,4538 -pip/_vendor/urllib3/contrib/pyopenssl.py,sha256=DD4pInv_3OEEGffEFynBoirc8ldR789sLmGSKukzA0E,16900 -pip/_vendor/urllib3/contrib/securetransport.py,sha256=4qUKo7PUV-vVIqXmr2BD-sH7qplB918jiD5eNsRI9vU,34449 -pip/_vendor/urllib3/contrib/socks.py,sha256=aRi9eWXo9ZEb95XUxef4Z21CFlnnjbEiAo9HOseoMt4,7097 -pip/_vendor/urllib3/exceptions.py,sha256=0Mnno3KHTNfXRfY7638NufOPkUb6mXOm-Lqj-4x2w8A,8217 -pip/_vendor/urllib3/fields.py,sha256=kvLDCg_JmH1lLjUUEY_FLS8UhY7hBvDPuVETbY8mdrM,8579 -pip/_vendor/urllib3/filepost.py,sha256=5b_qqgRHVlL7uLtdAYBzBh-GHmU5AfJVt_2N0XS3PeY,2440 -pip/_vendor/urllib3/packages/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -pip/_vendor/urllib3/packages/__pycache__/__init__.cpython-39.pyc,, -pip/_vendor/urllib3/packages/__pycache__/six.cpython-39.pyc,, -pip/_vendor/urllib3/packages/backports/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -pip/_vendor/urllib3/packages/backports/__pycache__/__init__.cpython-39.pyc,, -pip/_vendor/urllib3/packages/backports/__pycache__/makefile.cpython-39.pyc,, -pip/_vendor/urllib3/packages/backports/makefile.py,sha256=nbzt3i0agPVP07jqqgjhaYjMmuAi_W5E0EywZivVO8E,1417 -pip/_vendor/urllib3/packages/six.py,sha256=1LVW7ljqRirFlfExjwl-v1B7vSAUNTmzGMs-qays2zg,34666 -pip/_vendor/urllib3/poolmanager.py,sha256=whzlX6UTEgODMOCy0ZDMUONRBCz5wyIM8Z9opXAY-Lk,19763 -pip/_vendor/urllib3/request.py,sha256=ZFSIqX0C6WizixecChZ3_okyu7BEv0lZu1VT0s6h4SM,5985 -pip/_vendor/urllib3/response.py,sha256=hGhGBh7TkEkh_IQg5C1W_xuPNrgIKv5BUXPyE-q0LuE,28203 -pip/_vendor/urllib3/util/__init__.py,sha256=JEmSmmqqLyaw8P51gUImZh8Gwg9i1zSe-DoqAitn2nc,1155 -pip/_vendor/urllib3/util/__pycache__/__init__.cpython-39.pyc,, -pip/_vendor/urllib3/util/__pycache__/connection.cpython-39.pyc,, -pip/_vendor/urllib3/util/__pycache__/proxy.cpython-39.pyc,, -pip/_vendor/urllib3/util/__pycache__/queue.cpython-39.pyc,, -pip/_vendor/urllib3/util/__pycache__/request.cpython-39.pyc,, -pip/_vendor/urllib3/util/__pycache__/response.cpython-39.pyc,, -pip/_vendor/urllib3/util/__pycache__/retry.cpython-39.pyc,, -pip/_vendor/urllib3/util/__pycache__/ssl_.cpython-39.pyc,, -pip/_vendor/urllib3/util/__pycache__/ssl_match_hostname.cpython-39.pyc,, -pip/_vendor/urllib3/util/__pycache__/ssltransport.cpython-39.pyc,, -pip/_vendor/urllib3/util/__pycache__/timeout.cpython-39.pyc,, -pip/_vendor/urllib3/util/__pycache__/url.cpython-39.pyc,, -pip/_vendor/urllib3/util/__pycache__/wait.cpython-39.pyc,, -pip/_vendor/urllib3/util/connection.py,sha256=5Lx2B1PW29KxBn2T0xkN1CBgRBa3gGVJBKoQoRogEVk,4901 -pip/_vendor/urllib3/util/proxy.py,sha256=zUvPPCJrp6dOF0N4GAVbOcl6o-4uXKSrGiTkkr5vUS4,1605 -pip/_vendor/urllib3/util/queue.py,sha256=nRgX8_eX-_VkvxoX096QWoz8Ps0QHUAExILCY_7PncM,498 -pip/_vendor/urllib3/util/request.py,sha256=NnzaEKQ1Pauw5MFMV6HmgEMHITf0Aua9fQuzi2uZzGc,4123 -pip/_vendor/urllib3/util/response.py,sha256=GJpg3Egi9qaJXRwBh5wv-MNuRWan5BIu40oReoxWP28,3510 -pip/_vendor/urllib3/util/retry.py,sha256=iESg2PvViNdXBRY4MpL4h0kqwOOkHkxmLn1kkhFHPU8,22001 -pip/_vendor/urllib3/util/ssl_.py,sha256=X4-AqW91aYPhPx6-xbf66yHFQKbqqfC_5Zt4WkLX1Hc,17177 -pip/_vendor/urllib3/util/ssl_match_hostname.py,sha256=w01jCYuwvQ038p9mhc1P1gF8IiTN1qHakThpoukOlbw,5751 -pip/_vendor/urllib3/util/ssltransport.py,sha256=NA-u5rMTrDFDFC8QzRKUEKMG0561hOD4qBTr3Z4pv6E,6895 -pip/_vendor/urllib3/util/timeout.py,sha256=QSbBUNOB9yh6AnDn61SrLQ0hg5oz0I9-uXEG91AJuIg,10003 -pip/_vendor/urllib3/util/url.py,sha256=QVEzcbHipbXyCWwH6R4K4TR-N8T4LM55WEMwNUTBmLE,14047 -pip/_vendor/urllib3/util/wait.py,sha256=3MUKRSAUJDB2tgco7qRUskW0zXGAWYvRRE4Q1_6xlLs,5404 -pip/_vendor/vendor.txt,sha256=SpijkWP2aapE1DEgOKL1wxuOz1ztM7E2Xs2PZ-V1PKA,496 -pip/_vendor/webencodings/__init__.py,sha256=qOBJIuPy_4ByYH6W_bNgJF-qYQ2DoU-dKsDu5yRWCXg,10579 -pip/_vendor/webencodings/__pycache__/__init__.cpython-39.pyc,, -pip/_vendor/webencodings/__pycache__/labels.cpython-39.pyc,, -pip/_vendor/webencodings/__pycache__/mklabels.cpython-39.pyc,, -pip/_vendor/webencodings/__pycache__/tests.cpython-39.pyc,, -pip/_vendor/webencodings/__pycache__/x_user_defined.cpython-39.pyc,, -pip/_vendor/webencodings/labels.py,sha256=4AO_KxTddqGtrL9ns7kAPjb0CcN6xsCIxbK37HY9r3E,8979 -pip/_vendor/webencodings/mklabels.py,sha256=GYIeywnpaLnP0GSic8LFWgd0UVvO_l1Nc6YoF-87R_4,1305 -pip/_vendor/webencodings/tests.py,sha256=OtGLyjhNY1fvkW1GvLJ_FV9ZoqC9Anyjr7q3kxTbzNs,6563 -pip/_vendor/webencodings/x_user_defined.py,sha256=yOqWSdmpytGfUgh_Z6JYgDNhoc-BAHyyeeT15Fr42tM,4307 -pip/py.typed,sha256=EBVvvPRTn_eIpz5e5QztSCdrMX7Qwd7VP93RSoIlZ2I,286 diff --git a/venv/Lib/site-packages/pip-22.0.4.dist-info/WHEEL b/venv/Lib/site-packages/pip-22.0.4.dist-info/WHEEL deleted file mode 100644 index becc9a6..0000000 --- a/venv/Lib/site-packages/pip-22.0.4.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.37.1) -Root-Is-Purelib: true -Tag: py3-none-any - diff --git a/venv/Lib/site-packages/pip-22.0.4.dist-info/entry_points.txt b/venv/Lib/site-packages/pip-22.0.4.dist-info/entry_points.txt deleted file mode 100644 index 9609f72..0000000 --- a/venv/Lib/site-packages/pip-22.0.4.dist-info/entry_points.txt +++ /dev/null @@ -1,5 +0,0 @@ -[console_scripts] -pip = pip._internal.cli.main:main -pip3 = pip._internal.cli.main:main -pip3.9 = pip._internal.cli.main:main - diff --git a/venv/Lib/site-packages/pip-22.0.4.dist-info/top_level.txt b/venv/Lib/site-packages/pip-22.0.4.dist-info/top_level.txt deleted file mode 100644 index a1b589e..0000000 --- a/venv/Lib/site-packages/pip-22.0.4.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/venv/Lib/site-packages/pip/__init__.py b/venv/Lib/site-packages/pip/__init__.py deleted file mode 100644 index 3a0d263..0000000 --- a/venv/Lib/site-packages/pip/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -from typing import List, Optional - -__version__ = "22.0.4" - - -def main(args: Optional[List[str]] = None) -> int: - """This is an internal API only meant for use by pip's own console scripts. - - For additional details, see https://github.com/pypa/pip/issues/7498. - """ - from pip._internal.utils.entrypoints import _wrapper - - return _wrapper(args) diff --git a/venv/Lib/site-packages/pip/__main__.py b/venv/Lib/site-packages/pip/__main__.py deleted file mode 100644 index fe34a7b..0000000 --- a/venv/Lib/site-packages/pip/__main__.py +++ /dev/null @@ -1,31 +0,0 @@ -import os -import sys -import warnings - -# Remove '' and current working directory from the first entry -# of sys.path, if present to avoid using current directory -# in pip commands check, freeze, install, list and show, -# when invoked as python -m pip -if sys.path[0] in ("", os.getcwd()): - sys.path.pop(0) - -# If we are running from a wheel, add the wheel to sys.path -# This allows the usage python pip-*.whl/pip install pip-*.whl -if __package__ == "": - # __file__ is pip-*.whl/pip/__main__.py - # first dirname call strips of '/__main__.py', second strips off '/pip' - # Resulting path is the name of the wheel itself - # Add that to sys.path so we can import pip - path = os.path.dirname(os.path.dirname(__file__)) - sys.path.insert(0, path) - -if __name__ == "__main__": - # Work around the error reported in #9540, pending a proper fix. - # Note: It is essential the warning filter is set *before* importing - # pip, as the deprecation happens at import time, not runtime. - warnings.filterwarnings( - "ignore", category=DeprecationWarning, module=".*packaging\\.version" - ) - from pip._internal.cli.main import main as _main - - sys.exit(_main()) diff --git a/venv/Lib/site-packages/pip/__pycache__/__init__.cpython-39.pyc b/venv/Lib/site-packages/pip/__pycache__/__init__.cpython-39.pyc deleted file mode 100644 index 0e25b14..0000000 Binary files a/venv/Lib/site-packages/pip/__pycache__/__init__.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/__pycache__/__main__.cpython-39.pyc b/venv/Lib/site-packages/pip/__pycache__/__main__.cpython-39.pyc deleted file mode 100644 index 51742d9..0000000 Binary files a/venv/Lib/site-packages/pip/__pycache__/__main__.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/__init__.py b/venv/Lib/site-packages/pip/_internal/__init__.py deleted file mode 100644 index 6afb5c6..0000000 --- a/venv/Lib/site-packages/pip/_internal/__init__.py +++ /dev/null @@ -1,19 +0,0 @@ -from typing import List, Optional - -import pip._internal.utils.inject_securetransport # noqa -from pip._internal.utils import _log - -# init_logging() must be called before any call to logging.getLogger() -# which happens at import of most modules. -_log.init_logging() - - -def main(args: (Optional[List[str]]) = None) -> int: - """This is preserved for old console scripts that may still be referencing - it. - - For additional details, see https://github.com/pypa/pip/issues/7498. - """ - from pip._internal.utils.entrypoints import _wrapper - - return _wrapper(args) diff --git a/venv/Lib/site-packages/pip/_internal/__pycache__/__init__.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/__pycache__/__init__.cpython-39.pyc deleted file mode 100644 index a835e8f..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/__pycache__/__init__.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/__pycache__/build_env.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/__pycache__/build_env.cpython-39.pyc deleted file mode 100644 index bd1643e..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/__pycache__/build_env.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/__pycache__/cache.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/__pycache__/cache.cpython-39.pyc deleted file mode 100644 index edd0f1e..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/__pycache__/cache.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/__pycache__/configuration.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/__pycache__/configuration.cpython-39.pyc deleted file mode 100644 index 4113875..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/__pycache__/configuration.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/__pycache__/exceptions.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/__pycache__/exceptions.cpython-39.pyc deleted file mode 100644 index c5efd65..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/__pycache__/exceptions.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/__pycache__/main.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/__pycache__/main.cpython-39.pyc deleted file mode 100644 index b16c3c6..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/__pycache__/main.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/__pycache__/pyproject.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/__pycache__/pyproject.cpython-39.pyc deleted file mode 100644 index 9d4ccbf..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/__pycache__/pyproject.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/__pycache__/self_outdated_check.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/__pycache__/self_outdated_check.cpython-39.pyc deleted file mode 100644 index e19c1e2..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/__pycache__/self_outdated_check.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/__pycache__/wheel_builder.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/__pycache__/wheel_builder.cpython-39.pyc deleted file mode 100644 index fde4d84..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/__pycache__/wheel_builder.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/build_env.py b/venv/Lib/site-packages/pip/_internal/build_env.py deleted file mode 100644 index daeb7fb..0000000 --- a/venv/Lib/site-packages/pip/_internal/build_env.py +++ /dev/null @@ -1,296 +0,0 @@ -"""Build Environment used for isolation during sdist building -""" - -import contextlib -import logging -import os -import pathlib -import sys -import textwrap -import zipfile -from collections import OrderedDict -from sysconfig import get_paths -from types import TracebackType -from typing import TYPE_CHECKING, Iterable, Iterator, List, Optional, Set, Tuple, Type - -from pip._vendor.certifi import where -from pip._vendor.packaging.requirements import Requirement -from pip._vendor.packaging.version import Version - -from pip import __file__ as pip_location -from pip._internal.cli.spinners import open_spinner -from pip._internal.locations import get_platlib, get_prefixed_libs, get_purelib -from pip._internal.metadata import get_environment -from pip._internal.utils.subprocess import call_subprocess -from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds - -if TYPE_CHECKING: - from pip._internal.index.package_finder import PackageFinder - -logger = logging.getLogger(__name__) - - -class _Prefix: - def __init__(self, path: str) -> None: - self.path = path - self.setup = False - self.bin_dir = get_paths( - "nt" if os.name == "nt" else "posix_prefix", - vars={"base": path, "platbase": path}, - )["scripts"] - self.lib_dirs = get_prefixed_libs(path) - - -@contextlib.contextmanager -def _create_standalone_pip() -> Iterator[str]: - """Create a "standalone pip" zip file. - - The zip file's content is identical to the currently-running pip. - It will be used to install requirements into the build environment. - """ - source = pathlib.Path(pip_location).resolve().parent - - # Return the current instance if `source` is not a directory. We can't build - # a zip from this, and it likely means the instance is already standalone. - if not source.is_dir(): - yield str(source) - return - - with TempDirectory(kind="standalone-pip") as tmp_dir: - pip_zip = os.path.join(tmp_dir.path, "__env_pip__.zip") - kwargs = {} - if sys.version_info >= (3, 8): - kwargs["strict_timestamps"] = False - with zipfile.ZipFile(pip_zip, "w", **kwargs) as zf: - for child in source.rglob("*"): - zf.write(child, child.relative_to(source.parent).as_posix()) - yield os.path.join(pip_zip, "pip") - - -class BuildEnvironment: - """Creates and manages an isolated environment to install build deps""" - - def __init__(self) -> None: - temp_dir = TempDirectory(kind=tempdir_kinds.BUILD_ENV, globally_managed=True) - - self._prefixes = OrderedDict( - (name, _Prefix(os.path.join(temp_dir.path, name))) - for name in ("normal", "overlay") - ) - - self._bin_dirs: List[str] = [] - self._lib_dirs: List[str] = [] - for prefix in reversed(list(self._prefixes.values())): - self._bin_dirs.append(prefix.bin_dir) - self._lib_dirs.extend(prefix.lib_dirs) - - # Customize site to: - # - ensure .pth files are honored - # - prevent access to system site packages - system_sites = { - os.path.normcase(site) for site in (get_purelib(), get_platlib()) - } - self._site_dir = os.path.join(temp_dir.path, "site") - if not os.path.exists(self._site_dir): - os.mkdir(self._site_dir) - with open( - os.path.join(self._site_dir, "sitecustomize.py"), "w", encoding="utf-8" - ) as fp: - fp.write( - textwrap.dedent( - """ - import os, site, sys - - # First, drop system-sites related paths. - original_sys_path = sys.path[:] - known_paths = set() - for path in {system_sites!r}: - site.addsitedir(path, known_paths=known_paths) - system_paths = set( - os.path.normcase(path) - for path in sys.path[len(original_sys_path):] - ) - original_sys_path = [ - path for path in original_sys_path - if os.path.normcase(path) not in system_paths - ] - sys.path = original_sys_path - - # Second, add lib directories. - # ensuring .pth file are processed. - for path in {lib_dirs!r}: - assert not path in sys.path - site.addsitedir(path) - """ - ).format(system_sites=system_sites, lib_dirs=self._lib_dirs) - ) - - def __enter__(self) -> None: - self._save_env = { - name: os.environ.get(name, None) - for name in ("PATH", "PYTHONNOUSERSITE", "PYTHONPATH") - } - - path = self._bin_dirs[:] - old_path = self._save_env["PATH"] - if old_path: - path.extend(old_path.split(os.pathsep)) - - pythonpath = [self._site_dir] - - os.environ.update( - { - "PATH": os.pathsep.join(path), - "PYTHONNOUSERSITE": "1", - "PYTHONPATH": os.pathsep.join(pythonpath), - } - ) - - def __exit__( - self, - exc_type: Optional[Type[BaseException]], - exc_val: Optional[BaseException], - exc_tb: Optional[TracebackType], - ) -> None: - for varname, old_value in self._save_env.items(): - if old_value is None: - os.environ.pop(varname, None) - else: - os.environ[varname] = old_value - - def check_requirements( - self, reqs: Iterable[str] - ) -> Tuple[Set[Tuple[str, str]], Set[str]]: - """Return 2 sets: - - conflicting requirements: set of (installed, wanted) reqs tuples - - missing requirements: set of reqs - """ - missing = set() - conflicting = set() - if reqs: - env = get_environment(self._lib_dirs) - for req_str in reqs: - req = Requirement(req_str) - dist = env.get_distribution(req.name) - if not dist: - missing.add(req_str) - continue - if isinstance(dist.version, Version): - installed_req_str = f"{req.name}=={dist.version}" - else: - installed_req_str = f"{req.name}==={dist.version}" - if dist.version not in req.specifier: - conflicting.add((installed_req_str, req_str)) - # FIXME: Consider direct URL? - return conflicting, missing - - def install_requirements( - self, - finder: "PackageFinder", - requirements: Iterable[str], - prefix_as_string: str, - *, - kind: str, - ) -> None: - prefix = self._prefixes[prefix_as_string] - assert not prefix.setup - prefix.setup = True - if not requirements: - return - with contextlib.ExitStack() as ctx: - pip_runnable = ctx.enter_context(_create_standalone_pip()) - self._install_requirements( - pip_runnable, - finder, - requirements, - prefix, - kind=kind, - ) - - @staticmethod - def _install_requirements( - pip_runnable: str, - finder: "PackageFinder", - requirements: Iterable[str], - prefix: _Prefix, - *, - kind: str, - ) -> None: - args: List[str] = [ - sys.executable, - pip_runnable, - "install", - "--ignore-installed", - "--no-user", - "--prefix", - prefix.path, - "--no-warn-script-location", - ] - if logger.getEffectiveLevel() <= logging.DEBUG: - args.append("-v") - for format_control in ("no_binary", "only_binary"): - formats = getattr(finder.format_control, format_control) - args.extend( - ( - "--" + format_control.replace("_", "-"), - ",".join(sorted(formats or {":none:"})), - ) - ) - - index_urls = finder.index_urls - if index_urls: - args.extend(["-i", index_urls[0]]) - for extra_index in index_urls[1:]: - args.extend(["--extra-index-url", extra_index]) - else: - args.append("--no-index") - for link in finder.find_links: - args.extend(["--find-links", link]) - - for host in finder.trusted_hosts: - args.extend(["--trusted-host", host]) - if finder.allow_all_prereleases: - args.append("--pre") - if finder.prefer_binary: - args.append("--prefer-binary") - args.append("--") - args.extend(requirements) - extra_environ = {"_PIP_STANDALONE_CERT": where()} - with open_spinner(f"Installing {kind}") as spinner: - call_subprocess( - args, - command_desc=f"pip subprocess to install {kind}", - spinner=spinner, - extra_environ=extra_environ, - ) - - -class NoOpBuildEnvironment(BuildEnvironment): - """A no-op drop-in replacement for BuildEnvironment""" - - def __init__(self) -> None: - pass - - def __enter__(self) -> None: - pass - - def __exit__( - self, - exc_type: Optional[Type[BaseException]], - exc_val: Optional[BaseException], - exc_tb: Optional[TracebackType], - ) -> None: - pass - - def cleanup(self) -> None: - pass - - def install_requirements( - self, - finder: "PackageFinder", - requirements: Iterable[str], - prefix_as_string: str, - *, - kind: str, - ) -> None: - raise NotImplementedError() diff --git a/venv/Lib/site-packages/pip/_internal/cache.py b/venv/Lib/site-packages/pip/_internal/cache.py deleted file mode 100644 index 1d6df22..0000000 --- a/venv/Lib/site-packages/pip/_internal/cache.py +++ /dev/null @@ -1,264 +0,0 @@ -"""Cache Management -""" - -import hashlib -import json -import logging -import os -from typing import Any, Dict, List, Optional, Set - -from pip._vendor.packaging.tags import Tag, interpreter_name, interpreter_version -from pip._vendor.packaging.utils import canonicalize_name - -from pip._internal.exceptions import InvalidWheelFilename -from pip._internal.models.format_control import FormatControl -from pip._internal.models.link import Link -from pip._internal.models.wheel import Wheel -from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds -from pip._internal.utils.urls import path_to_url - -logger = logging.getLogger(__name__) - - -def _hash_dict(d: Dict[str, str]) -> str: - """Return a stable sha224 of a dictionary.""" - s = json.dumps(d, sort_keys=True, separators=(",", ":"), ensure_ascii=True) - return hashlib.sha224(s.encode("ascii")).hexdigest() - - -class Cache: - """An abstract class - provides cache directories for data from links - - - :param cache_dir: The root of the cache. - :param format_control: An object of FormatControl class to limit - binaries being read from the cache. - :param allowed_formats: which formats of files the cache should store. - ('binary' and 'source' are the only allowed values) - """ - - def __init__( - self, cache_dir: str, format_control: FormatControl, allowed_formats: Set[str] - ) -> None: - super().__init__() - assert not cache_dir or os.path.isabs(cache_dir) - self.cache_dir = cache_dir or None - self.format_control = format_control - self.allowed_formats = allowed_formats - - _valid_formats = {"source", "binary"} - assert self.allowed_formats.union(_valid_formats) == _valid_formats - - def _get_cache_path_parts(self, link: Link) -> List[str]: - """Get parts of part that must be os.path.joined with cache_dir""" - - # We want to generate an url to use as our cache key, we don't want to - # just re-use the URL because it might have other items in the fragment - # and we don't care about those. - key_parts = {"url": link.url_without_fragment} - if link.hash_name is not None and link.hash is not None: - key_parts[link.hash_name] = link.hash - if link.subdirectory_fragment: - key_parts["subdirectory"] = link.subdirectory_fragment - - # Include interpreter name, major and minor version in cache key - # to cope with ill-behaved sdists that build a different wheel - # depending on the python version their setup.py is being run on, - # and don't encode the difference in compatibility tags. - # https://github.com/pypa/pip/issues/7296 - key_parts["interpreter_name"] = interpreter_name() - key_parts["interpreter_version"] = interpreter_version() - - # Encode our key url with sha224, we'll use this because it has similar - # security properties to sha256, but with a shorter total output (and - # thus less secure). However the differences don't make a lot of - # difference for our use case here. - hashed = _hash_dict(key_parts) - - # We want to nest the directories some to prevent having a ton of top - # level directories where we might run out of sub directories on some - # FS. - parts = [hashed[:2], hashed[2:4], hashed[4:6], hashed[6:]] - - return parts - - def _get_candidates(self, link: Link, canonical_package_name: str) -> List[Any]: - can_not_cache = not self.cache_dir or not canonical_package_name or not link - if can_not_cache: - return [] - - formats = self.format_control.get_allowed_formats(canonical_package_name) - if not self.allowed_formats.intersection(formats): - return [] - - candidates = [] - path = self.get_path_for_link(link) - if os.path.isdir(path): - for candidate in os.listdir(path): - candidates.append((candidate, path)) - return candidates - - def get_path_for_link(self, link: Link) -> str: - """Return a directory to store cached items in for link.""" - raise NotImplementedError() - - def get( - self, - link: Link, - package_name: Optional[str], - supported_tags: List[Tag], - ) -> Link: - """Returns a link to a cached item if it exists, otherwise returns the - passed link. - """ - raise NotImplementedError() - - -class SimpleWheelCache(Cache): - """A cache of wheels for future installs.""" - - def __init__(self, cache_dir: str, format_control: FormatControl) -> None: - super().__init__(cache_dir, format_control, {"binary"}) - - def get_path_for_link(self, link: Link) -> str: - """Return a directory to store cached wheels for link - - Because there are M wheels for any one sdist, we provide a directory - to cache them in, and then consult that directory when looking up - cache hits. - - We only insert things into the cache if they have plausible version - numbers, so that we don't contaminate the cache with things that were - not unique. E.g. ./package might have dozens of installs done for it - and build a version of 0.0...and if we built and cached a wheel, we'd - end up using the same wheel even if the source has been edited. - - :param link: The link of the sdist for which this will cache wheels. - """ - parts = self._get_cache_path_parts(link) - assert self.cache_dir - # Store wheels within the root cache_dir - return os.path.join(self.cache_dir, "wheels", *parts) - - def get( - self, - link: Link, - package_name: Optional[str], - supported_tags: List[Tag], - ) -> Link: - candidates = [] - - if not package_name: - return link - - canonical_package_name = canonicalize_name(package_name) - for wheel_name, wheel_dir in self._get_candidates(link, canonical_package_name): - try: - wheel = Wheel(wheel_name) - except InvalidWheelFilename: - continue - if canonicalize_name(wheel.name) != canonical_package_name: - logger.debug( - "Ignoring cached wheel %s for %s as it " - "does not match the expected distribution name %s.", - wheel_name, - link, - package_name, - ) - continue - if not wheel.supported(supported_tags): - # Built for a different python/arch/etc - continue - candidates.append( - ( - wheel.support_index_min(supported_tags), - wheel_name, - wheel_dir, - ) - ) - - if not candidates: - return link - - _, wheel_name, wheel_dir = min(candidates) - return Link(path_to_url(os.path.join(wheel_dir, wheel_name))) - - -class EphemWheelCache(SimpleWheelCache): - """A SimpleWheelCache that creates it's own temporary cache directory""" - - def __init__(self, format_control: FormatControl) -> None: - self._temp_dir = TempDirectory( - kind=tempdir_kinds.EPHEM_WHEEL_CACHE, - globally_managed=True, - ) - - super().__init__(self._temp_dir.path, format_control) - - -class CacheEntry: - def __init__( - self, - link: Link, - persistent: bool, - ): - self.link = link - self.persistent = persistent - - -class WheelCache(Cache): - """Wraps EphemWheelCache and SimpleWheelCache into a single Cache - - This Cache allows for gracefully degradation, using the ephem wheel cache - when a certain link is not found in the simple wheel cache first. - """ - - def __init__(self, cache_dir: str, format_control: FormatControl) -> None: - super().__init__(cache_dir, format_control, {"binary"}) - self._wheel_cache = SimpleWheelCache(cache_dir, format_control) - self._ephem_cache = EphemWheelCache(format_control) - - def get_path_for_link(self, link: Link) -> str: - return self._wheel_cache.get_path_for_link(link) - - def get_ephem_path_for_link(self, link: Link) -> str: - return self._ephem_cache.get_path_for_link(link) - - def get( - self, - link: Link, - package_name: Optional[str], - supported_tags: List[Tag], - ) -> Link: - cache_entry = self.get_cache_entry(link, package_name, supported_tags) - if cache_entry is None: - return link - return cache_entry.link - - def get_cache_entry( - self, - link: Link, - package_name: Optional[str], - supported_tags: List[Tag], - ) -> Optional[CacheEntry]: - """Returns a CacheEntry with a link to a cached item if it exists or - None. The cache entry indicates if the item was found in the persistent - or ephemeral cache. - """ - retval = self._wheel_cache.get( - link=link, - package_name=package_name, - supported_tags=supported_tags, - ) - if retval is not link: - return CacheEntry(retval, persistent=True) - - retval = self._ephem_cache.get( - link=link, - package_name=package_name, - supported_tags=supported_tags, - ) - if retval is not link: - return CacheEntry(retval, persistent=False) - - return None diff --git a/venv/Lib/site-packages/pip/_internal/cli/__init__.py b/venv/Lib/site-packages/pip/_internal/cli/__init__.py deleted file mode 100644 index e589bb9..0000000 --- a/venv/Lib/site-packages/pip/_internal/cli/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -"""Subpackage containing all of pip's command line interface related code -""" - -# This file intentionally does not import submodules diff --git a/venv/Lib/site-packages/pip/_internal/cli/__pycache__/__init__.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/cli/__pycache__/__init__.cpython-39.pyc deleted file mode 100644 index da14672..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/cli/__pycache__/__init__.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/cli/__pycache__/autocompletion.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/cli/__pycache__/autocompletion.cpython-39.pyc deleted file mode 100644 index 426e174..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/cli/__pycache__/autocompletion.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/cli/__pycache__/base_command.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/cli/__pycache__/base_command.cpython-39.pyc deleted file mode 100644 index 7de1356..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/cli/__pycache__/base_command.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/cli/__pycache__/cmdoptions.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/cli/__pycache__/cmdoptions.cpython-39.pyc deleted file mode 100644 index 2943eab..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/cli/__pycache__/cmdoptions.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/cli/__pycache__/command_context.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/cli/__pycache__/command_context.cpython-39.pyc deleted file mode 100644 index 865e9e5..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/cli/__pycache__/command_context.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/cli/__pycache__/main.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/cli/__pycache__/main.cpython-39.pyc deleted file mode 100644 index dc1892f..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/cli/__pycache__/main.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/cli/__pycache__/main_parser.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/cli/__pycache__/main_parser.cpython-39.pyc deleted file mode 100644 index 944129e..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/cli/__pycache__/main_parser.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/cli/__pycache__/parser.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/cli/__pycache__/parser.cpython-39.pyc deleted file mode 100644 index d064cba..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/cli/__pycache__/parser.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/cli/__pycache__/progress_bars.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/cli/__pycache__/progress_bars.cpython-39.pyc deleted file mode 100644 index 32fa353..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/cli/__pycache__/progress_bars.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/cli/__pycache__/req_command.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/cli/__pycache__/req_command.cpython-39.pyc deleted file mode 100644 index 686bf70..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/cli/__pycache__/req_command.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/cli/__pycache__/spinners.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/cli/__pycache__/spinners.cpython-39.pyc deleted file mode 100644 index 3d99993..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/cli/__pycache__/spinners.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/cli/__pycache__/status_codes.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/cli/__pycache__/status_codes.cpython-39.pyc deleted file mode 100644 index 77b9909..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/cli/__pycache__/status_codes.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/cli/autocompletion.py b/venv/Lib/site-packages/pip/_internal/cli/autocompletion.py deleted file mode 100644 index 226fe84..0000000 --- a/venv/Lib/site-packages/pip/_internal/cli/autocompletion.py +++ /dev/null @@ -1,171 +0,0 @@ -"""Logic that powers autocompletion installed by ``pip completion``. -""" - -import optparse -import os -import sys -from itertools import chain -from typing import Any, Iterable, List, Optional - -from pip._internal.cli.main_parser import create_main_parser -from pip._internal.commands import commands_dict, create_command -from pip._internal.metadata import get_default_environment - - -def autocomplete() -> None: - """Entry Point for completion of main and subcommand options.""" - # Don't complete if user hasn't sourced bash_completion file. - if "PIP_AUTO_COMPLETE" not in os.environ: - return - cwords = os.environ["COMP_WORDS"].split()[1:] - cword = int(os.environ["COMP_CWORD"]) - try: - current = cwords[cword - 1] - except IndexError: - current = "" - - parser = create_main_parser() - subcommands = list(commands_dict) - options = [] - - # subcommand - subcommand_name: Optional[str] = None - for word in cwords: - if word in subcommands: - subcommand_name = word - break - # subcommand options - if subcommand_name is not None: - # special case: 'help' subcommand has no options - if subcommand_name == "help": - sys.exit(1) - # special case: list locally installed dists for show and uninstall - should_list_installed = not current.startswith("-") and subcommand_name in [ - "show", - "uninstall", - ] - if should_list_installed: - env = get_default_environment() - lc = current.lower() - installed = [ - dist.canonical_name - for dist in env.iter_installed_distributions(local_only=True) - if dist.canonical_name.startswith(lc) - and dist.canonical_name not in cwords[1:] - ] - # if there are no dists installed, fall back to option completion - if installed: - for dist in installed: - print(dist) - sys.exit(1) - - should_list_installables = ( - not current.startswith("-") and subcommand_name == "install" - ) - if should_list_installables: - for path in auto_complete_paths(current, "path"): - print(path) - sys.exit(1) - - subcommand = create_command(subcommand_name) - - for opt in subcommand.parser.option_list_all: - if opt.help != optparse.SUPPRESS_HELP: - for opt_str in opt._long_opts + opt._short_opts: - options.append((opt_str, opt.nargs)) - - # filter out previously specified options from available options - prev_opts = [x.split("=")[0] for x in cwords[1 : cword - 1]] - options = [(x, v) for (x, v) in options if x not in prev_opts] - # filter options by current input - options = [(k, v) for k, v in options if k.startswith(current)] - # get completion type given cwords and available subcommand options - completion_type = get_path_completion_type( - cwords, - cword, - subcommand.parser.option_list_all, - ) - # get completion files and directories if ``completion_type`` is - # ````, ```` or ```` - if completion_type: - paths = auto_complete_paths(current, completion_type) - options = [(path, 0) for path in paths] - for option in options: - opt_label = option[0] - # append '=' to options which require args - if option[1] and option[0][:2] == "--": - opt_label += "=" - print(opt_label) - else: - # show main parser options only when necessary - - opts = [i.option_list for i in parser.option_groups] - opts.append(parser.option_list) - flattened_opts = chain.from_iterable(opts) - if current.startswith("-"): - for opt in flattened_opts: - if opt.help != optparse.SUPPRESS_HELP: - subcommands += opt._long_opts + opt._short_opts - else: - # get completion type given cwords and all available options - completion_type = get_path_completion_type(cwords, cword, flattened_opts) - if completion_type: - subcommands = list(auto_complete_paths(current, completion_type)) - - print(" ".join([x for x in subcommands if x.startswith(current)])) - sys.exit(1) - - -def get_path_completion_type( - cwords: List[str], cword: int, opts: Iterable[Any] -) -> Optional[str]: - """Get the type of path completion (``file``, ``dir``, ``path`` or None) - - :param cwords: same as the environmental variable ``COMP_WORDS`` - :param cword: same as the environmental variable ``COMP_CWORD`` - :param opts: The available options to check - :return: path completion type (``file``, ``dir``, ``path`` or None) - """ - if cword < 2 or not cwords[cword - 2].startswith("-"): - return None - for opt in opts: - if opt.help == optparse.SUPPRESS_HELP: - continue - for o in str(opt).split("/"): - if cwords[cword - 2].split("=")[0] == o: - if not opt.metavar or any( - x in ("path", "file", "dir") for x in opt.metavar.split("/") - ): - return opt.metavar - return None - - -def auto_complete_paths(current: str, completion_type: str) -> Iterable[str]: - """If ``completion_type`` is ``file`` or ``path``, list all regular files - and directories starting with ``current``; otherwise only list directories - starting with ``current``. - - :param current: The word to be completed - :param completion_type: path completion type(``file``, ``path`` or ``dir``) - :return: A generator of regular files and/or directories - """ - directory, filename = os.path.split(current) - current_path = os.path.abspath(directory) - # Don't complete paths if they can't be accessed - if not os.access(current_path, os.R_OK): - return - filename = os.path.normcase(filename) - # list all files that start with ``filename`` - file_list = ( - x for x in os.listdir(current_path) if os.path.normcase(x).startswith(filename) - ) - for f in file_list: - opt = os.path.join(current_path, f) - comp_file = os.path.normcase(os.path.join(directory, f)) - # complete regular files when there is not ```` after option - # complete directories when there is ````, ```` or - # ````after option - if completion_type != "dir" and os.path.isfile(opt): - yield comp_file - elif os.path.isdir(opt): - yield os.path.join(comp_file, "") diff --git a/venv/Lib/site-packages/pip/_internal/cli/base_command.py b/venv/Lib/site-packages/pip/_internal/cli/base_command.py deleted file mode 100644 index 78b96bb..0000000 --- a/venv/Lib/site-packages/pip/_internal/cli/base_command.py +++ /dev/null @@ -1,223 +0,0 @@ -"""Base Command class, and related routines""" - -import functools -import logging -import logging.config -import optparse -import os -import sys -import traceback -from optparse import Values -from typing import Any, Callable, List, Optional, Tuple - -from pip._vendor.rich import traceback as rich_traceback - -from pip._internal.cli import cmdoptions -from pip._internal.cli.command_context import CommandContextMixIn -from pip._internal.cli.parser import ConfigOptionParser, UpdatingDefaultsHelpFormatter -from pip._internal.cli.status_codes import ( - ERROR, - PREVIOUS_BUILD_DIR_ERROR, - UNKNOWN_ERROR, - VIRTUALENV_NOT_FOUND, -) -from pip._internal.exceptions import ( - BadCommand, - CommandError, - DiagnosticPipError, - InstallationError, - NetworkConnectionError, - PreviousBuildDirError, - UninstallationError, -) -from pip._internal.utils.filesystem import check_path_owner -from pip._internal.utils.logging import BrokenStdoutLoggingError, setup_logging -from pip._internal.utils.misc import get_prog, normalize_path -from pip._internal.utils.temp_dir import TempDirectoryTypeRegistry as TempDirRegistry -from pip._internal.utils.temp_dir import global_tempdir_manager, tempdir_registry -from pip._internal.utils.virtualenv import running_under_virtualenv - -__all__ = ["Command"] - -logger = logging.getLogger(__name__) - - -class Command(CommandContextMixIn): - usage: str = "" - ignore_require_venv: bool = False - - def __init__(self, name: str, summary: str, isolated: bool = False) -> None: - super().__init__() - - self.name = name - self.summary = summary - self.parser = ConfigOptionParser( - usage=self.usage, - prog=f"{get_prog()} {name}", - formatter=UpdatingDefaultsHelpFormatter(), - add_help_option=False, - name=name, - description=self.__doc__, - isolated=isolated, - ) - - self.tempdir_registry: Optional[TempDirRegistry] = None - - # Commands should add options to this option group - optgroup_name = f"{self.name.capitalize()} Options" - self.cmd_opts = optparse.OptionGroup(self.parser, optgroup_name) - - # Add the general options - gen_opts = cmdoptions.make_option_group( - cmdoptions.general_group, - self.parser, - ) - self.parser.add_option_group(gen_opts) - - self.add_options() - - def add_options(self) -> None: - pass - - def handle_pip_version_check(self, options: Values) -> None: - """ - This is a no-op so that commands by default do not do the pip version - check. - """ - # Make sure we do the pip version check if the index_group options - # are present. - assert not hasattr(options, "no_index") - - def run(self, options: Values, args: List[str]) -> int: - raise NotImplementedError - - def parse_args(self, args: List[str]) -> Tuple[Values, List[str]]: - # factored out for testability - return self.parser.parse_args(args) - - def main(self, args: List[str]) -> int: - try: - with self.main_context(): - return self._main(args) - finally: - logging.shutdown() - - def _main(self, args: List[str]) -> int: - # We must initialize this before the tempdir manager, otherwise the - # configuration would not be accessible by the time we clean up the - # tempdir manager. - self.tempdir_registry = self.enter_context(tempdir_registry()) - # Intentionally set as early as possible so globally-managed temporary - # directories are available to the rest of the code. - self.enter_context(global_tempdir_manager()) - - options, args = self.parse_args(args) - - # Set verbosity so that it can be used elsewhere. - self.verbosity = options.verbose - options.quiet - - level_number = setup_logging( - verbosity=self.verbosity, - no_color=options.no_color, - user_log_file=options.log, - ) - - # TODO: Try to get these passing down from the command? - # without resorting to os.environ to hold these. - # This also affects isolated builds and it should. - - if options.no_input: - os.environ["PIP_NO_INPUT"] = "1" - - if options.exists_action: - os.environ["PIP_EXISTS_ACTION"] = " ".join(options.exists_action) - - if options.require_venv and not self.ignore_require_venv: - # If a venv is required check if it can really be found - if not running_under_virtualenv(): - logger.critical("Could not find an activated virtualenv (required).") - sys.exit(VIRTUALENV_NOT_FOUND) - - if options.cache_dir: - options.cache_dir = normalize_path(options.cache_dir) - if not check_path_owner(options.cache_dir): - logger.warning( - "The directory '%s' or its parent directory is not owned " - "or is not writable by the current user. The cache " - "has been disabled. Check the permissions and owner of " - "that directory. If executing pip with sudo, you should " - "use sudo's -H flag.", - options.cache_dir, - ) - options.cache_dir = None - - if "2020-resolver" in options.features_enabled: - logger.warning( - "--use-feature=2020-resolver no longer has any effect, " - "since it is now the default dependency resolver in pip. " - "This will become an error in pip 21.0." - ) - - def intercepts_unhandled_exc( - run_func: Callable[..., int] - ) -> Callable[..., int]: - @functools.wraps(run_func) - def exc_logging_wrapper(*args: Any) -> int: - try: - status = run_func(*args) - assert isinstance(status, int) - return status - except DiagnosticPipError as exc: - logger.error("[present-diagnostic] %s", exc) - logger.debug("Exception information:", exc_info=True) - - return ERROR - except PreviousBuildDirError as exc: - logger.critical(str(exc)) - logger.debug("Exception information:", exc_info=True) - - return PREVIOUS_BUILD_DIR_ERROR - except ( - InstallationError, - UninstallationError, - BadCommand, - NetworkConnectionError, - ) as exc: - logger.critical(str(exc)) - logger.debug("Exception information:", exc_info=True) - - return ERROR - except CommandError as exc: - logger.critical("%s", exc) - logger.debug("Exception information:", exc_info=True) - - return ERROR - except BrokenStdoutLoggingError: - # Bypass our logger and write any remaining messages to - # stderr because stdout no longer works. - print("ERROR: Pipe to stdout was broken", file=sys.stderr) - if level_number <= logging.DEBUG: - traceback.print_exc(file=sys.stderr) - - return ERROR - except KeyboardInterrupt: - logger.critical("Operation cancelled by user") - logger.debug("Exception information:", exc_info=True) - - return ERROR - except BaseException: - logger.critical("Exception:", exc_info=True) - - return UNKNOWN_ERROR - - return exc_logging_wrapper - - try: - if not options.debug_mode: - run = intercepts_unhandled_exc(self.run) - else: - run = self.run - rich_traceback.install(show_locals=True) - return run(options, args) - finally: - self.handle_pip_version_check(options) diff --git a/venv/Lib/site-packages/pip/_internal/cli/cmdoptions.py b/venv/Lib/site-packages/pip/_internal/cli/cmdoptions.py deleted file mode 100644 index 71b1d19..0000000 --- a/venv/Lib/site-packages/pip/_internal/cli/cmdoptions.py +++ /dev/null @@ -1,1018 +0,0 @@ -""" -shared options and groups - -The principle here is to define options once, but *not* instantiate them -globally. One reason being that options with action='append' can carry state -between parses. pip parses general options twice internally, and shouldn't -pass on state. To be consistent, all options will follow this design. -""" - -# The following comment should be removed at some point in the future. -# mypy: strict-optional=False - -import logging -import os -import textwrap -from functools import partial -from optparse import SUPPRESS_HELP, Option, OptionGroup, OptionParser, Values -from textwrap import dedent -from typing import Any, Callable, Dict, Optional, Tuple - -from pip._vendor.packaging.utils import canonicalize_name - -from pip._internal.cli.parser import ConfigOptionParser -from pip._internal.cli.progress_bars import BAR_TYPES -from pip._internal.exceptions import CommandError -from pip._internal.locations import USER_CACHE_DIR, get_src_prefix -from pip._internal.models.format_control import FormatControl -from pip._internal.models.index import PyPI -from pip._internal.models.target_python import TargetPython -from pip._internal.utils.hashes import STRONG_HASHES -from pip._internal.utils.misc import strtobool - -logger = logging.getLogger(__name__) - - -def raise_option_error(parser: OptionParser, option: Option, msg: str) -> None: - """ - Raise an option parsing error using parser.error(). - - Args: - parser: an OptionParser instance. - option: an Option instance. - msg: the error text. - """ - msg = f"{option} error: {msg}" - msg = textwrap.fill(" ".join(msg.split())) - parser.error(msg) - - -def make_option_group(group: Dict[str, Any], parser: ConfigOptionParser) -> OptionGroup: - """ - Return an OptionGroup object - group -- assumed to be dict with 'name' and 'options' keys - parser -- an optparse Parser - """ - option_group = OptionGroup(parser, group["name"]) - for option in group["options"]: - option_group.add_option(option()) - return option_group - - -def check_install_build_global( - options: Values, check_options: Optional[Values] = None -) -> None: - """Disable wheels if per-setup.py call options are set. - - :param options: The OptionParser options to update. - :param check_options: The options to check, if not supplied defaults to - options. - """ - if check_options is None: - check_options = options - - def getname(n: str) -> Optional[Any]: - return getattr(check_options, n, None) - - names = ["build_options", "global_options", "install_options"] - if any(map(getname, names)): - control = options.format_control - control.disallow_binaries() - logger.warning( - "Disabling all use of wheels due to the use of --build-option " - "/ --global-option / --install-option.", - ) - - -def check_dist_restriction(options: Values, check_target: bool = False) -> None: - """Function for determining if custom platform options are allowed. - - :param options: The OptionParser options. - :param check_target: Whether or not to check if --target is being used. - """ - dist_restriction_set = any( - [ - options.python_version, - options.platforms, - options.abis, - options.implementation, - ] - ) - - binary_only = FormatControl(set(), {":all:"}) - sdist_dependencies_allowed = ( - options.format_control != binary_only and not options.ignore_dependencies - ) - - # Installations or downloads using dist restrictions must not combine - # source distributions and dist-specific wheels, as they are not - # guaranteed to be locally compatible. - if dist_restriction_set and sdist_dependencies_allowed: - raise CommandError( - "When restricting platform and interpreter constraints using " - "--python-version, --platform, --abi, or --implementation, " - "either --no-deps must be set, or --only-binary=:all: must be " - "set and --no-binary must not be set (or must be set to " - ":none:)." - ) - - if check_target: - if dist_restriction_set and not options.target_dir: - raise CommandError( - "Can not use any platform or abi specific options unless " - "installing via '--target'" - ) - - -def _path_option_check(option: Option, opt: str, value: str) -> str: - return os.path.expanduser(value) - - -def _package_name_option_check(option: Option, opt: str, value: str) -> str: - return canonicalize_name(value) - - -class PipOption(Option): - TYPES = Option.TYPES + ("path", "package_name") - TYPE_CHECKER = Option.TYPE_CHECKER.copy() - TYPE_CHECKER["package_name"] = _package_name_option_check - TYPE_CHECKER["path"] = _path_option_check - - -########### -# options # -########### - -help_: Callable[..., Option] = partial( - Option, - "-h", - "--help", - dest="help", - action="help", - help="Show help.", -) - -debug_mode: Callable[..., Option] = partial( - Option, - "--debug", - dest="debug_mode", - action="store_true", - default=False, - help=( - "Let unhandled exceptions propagate outside the main subroutine, " - "instead of logging them to stderr." - ), -) - -isolated_mode: Callable[..., Option] = partial( - Option, - "--isolated", - dest="isolated_mode", - action="store_true", - default=False, - help=( - "Run pip in an isolated mode, ignoring environment variables and user " - "configuration." - ), -) - -require_virtualenv: Callable[..., Option] = partial( - Option, - "--require-virtualenv", - "--require-venv", - dest="require_venv", - action="store_true", - default=False, - help=( - "Allow pip to only run in a virtual environment; " - "exit with an error otherwise." - ), -) - -verbose: Callable[..., Option] = partial( - Option, - "-v", - "--verbose", - dest="verbose", - action="count", - default=0, - help="Give more output. Option is additive, and can be used up to 3 times.", -) - -no_color: Callable[..., Option] = partial( - Option, - "--no-color", - dest="no_color", - action="store_true", - default=False, - help="Suppress colored output.", -) - -version: Callable[..., Option] = partial( - Option, - "-V", - "--version", - dest="version", - action="store_true", - help="Show version and exit.", -) - -quiet: Callable[..., Option] = partial( - Option, - "-q", - "--quiet", - dest="quiet", - action="count", - default=0, - help=( - "Give less output. Option is additive, and can be used up to 3" - " times (corresponding to WARNING, ERROR, and CRITICAL logging" - " levels)." - ), -) - -progress_bar: Callable[..., Option] = partial( - Option, - "--progress-bar", - dest="progress_bar", - type="choice", - choices=list(BAR_TYPES.keys()), - default="on", - help=( - "Specify type of progress to be displayed [" - + "|".join(BAR_TYPES.keys()) - + "] (default: %default)" - ), -) - -log: Callable[..., Option] = partial( - PipOption, - "--log", - "--log-file", - "--local-log", - dest="log", - metavar="path", - type="path", - help="Path to a verbose appending log.", -) - -no_input: Callable[..., Option] = partial( - Option, - # Don't ask for input - "--no-input", - dest="no_input", - action="store_true", - default=False, - help="Disable prompting for input.", -) - -proxy: Callable[..., Option] = partial( - Option, - "--proxy", - dest="proxy", - type="str", - default="", - help="Specify a proxy in the form [user:passwd@]proxy.server:port.", -) - -retries: Callable[..., Option] = partial( - Option, - "--retries", - dest="retries", - type="int", - default=5, - help="Maximum number of retries each connection should attempt " - "(default %default times).", -) - -timeout: Callable[..., Option] = partial( - Option, - "--timeout", - "--default-timeout", - metavar="sec", - dest="timeout", - type="float", - default=15, - help="Set the socket timeout (default %default seconds).", -) - - -def exists_action() -> Option: - return Option( - # Option when path already exist - "--exists-action", - dest="exists_action", - type="choice", - choices=["s", "i", "w", "b", "a"], - default=[], - action="append", - metavar="action", - help="Default action when a path already exists: " - "(s)witch, (i)gnore, (w)ipe, (b)ackup, (a)bort.", - ) - - -cert: Callable[..., Option] = partial( - PipOption, - "--cert", - dest="cert", - type="path", - metavar="path", - help=( - "Path to PEM-encoded CA certificate bundle. " - "If provided, overrides the default. " - "See 'SSL Certificate Verification' in pip documentation " - "for more information." - ), -) - -client_cert: Callable[..., Option] = partial( - PipOption, - "--client-cert", - dest="client_cert", - type="path", - default=None, - metavar="path", - help="Path to SSL client certificate, a single file containing the " - "private key and the certificate in PEM format.", -) - -index_url: Callable[..., Option] = partial( - Option, - "-i", - "--index-url", - "--pypi-url", - dest="index_url", - metavar="URL", - default=PyPI.simple_url, - help="Base URL of the Python Package Index (default %default). " - "This should point to a repository compliant with PEP 503 " - "(the simple repository API) or a local directory laid out " - "in the same format.", -) - - -def extra_index_url() -> Option: - return Option( - "--extra-index-url", - dest="extra_index_urls", - metavar="URL", - action="append", - default=[], - help="Extra URLs of package indexes to use in addition to " - "--index-url. Should follow the same rules as " - "--index-url.", - ) - - -no_index: Callable[..., Option] = partial( - Option, - "--no-index", - dest="no_index", - action="store_true", - default=False, - help="Ignore package index (only looking at --find-links URLs instead).", -) - - -def find_links() -> Option: - return Option( - "-f", - "--find-links", - dest="find_links", - action="append", - default=[], - metavar="url", - help="If a URL or path to an html file, then parse for links to " - "archives such as sdist (.tar.gz) or wheel (.whl) files. " - "If a local path or file:// URL that's a directory, " - "then look for archives in the directory listing. " - "Links to VCS project URLs are not supported.", - ) - - -def trusted_host() -> Option: - return Option( - "--trusted-host", - dest="trusted_hosts", - action="append", - metavar="HOSTNAME", - default=[], - help="Mark this host or host:port pair as trusted, even though it " - "does not have valid or any HTTPS.", - ) - - -def constraints() -> Option: - return Option( - "-c", - "--constraint", - dest="constraints", - action="append", - default=[], - metavar="file", - help="Constrain versions using the given constraints file. " - "This option can be used multiple times.", - ) - - -def requirements() -> Option: - return Option( - "-r", - "--requirement", - dest="requirements", - action="append", - default=[], - metavar="file", - help="Install from the given requirements file. " - "This option can be used multiple times.", - ) - - -def editable() -> Option: - return Option( - "-e", - "--editable", - dest="editables", - action="append", - default=[], - metavar="path/url", - help=( - "Install a project in editable mode (i.e. setuptools " - '"develop mode") from a local project path or a VCS url.' - ), - ) - - -def _handle_src(option: Option, opt_str: str, value: str, parser: OptionParser) -> None: - value = os.path.abspath(value) - setattr(parser.values, option.dest, value) - - -src: Callable[..., Option] = partial( - PipOption, - "--src", - "--source", - "--source-dir", - "--source-directory", - dest="src_dir", - type="path", - metavar="dir", - default=get_src_prefix(), - action="callback", - callback=_handle_src, - help="Directory to check out editable projects into. " - 'The default in a virtualenv is "/src". ' - 'The default for global installs is "/src".', -) - - -def _get_format_control(values: Values, option: Option) -> Any: - """Get a format_control object.""" - return getattr(values, option.dest) - - -def _handle_no_binary( - option: Option, opt_str: str, value: str, parser: OptionParser -) -> None: - existing = _get_format_control(parser.values, option) - FormatControl.handle_mutual_excludes( - value, - existing.no_binary, - existing.only_binary, - ) - - -def _handle_only_binary( - option: Option, opt_str: str, value: str, parser: OptionParser -) -> None: - existing = _get_format_control(parser.values, option) - FormatControl.handle_mutual_excludes( - value, - existing.only_binary, - existing.no_binary, - ) - - -def no_binary() -> Option: - format_control = FormatControl(set(), set()) - return Option( - "--no-binary", - dest="format_control", - action="callback", - callback=_handle_no_binary, - type="str", - default=format_control, - help="Do not use binary packages. Can be supplied multiple times, and " - 'each time adds to the existing value. Accepts either ":all:" to ' - 'disable all binary packages, ":none:" to empty the set (notice ' - "the colons), or one or more package names with commas between " - "them (no colons). Note that some packages are tricky to compile " - "and may fail to install when this option is used on them.", - ) - - -def only_binary() -> Option: - format_control = FormatControl(set(), set()) - return Option( - "--only-binary", - dest="format_control", - action="callback", - callback=_handle_only_binary, - type="str", - default=format_control, - help="Do not use source packages. Can be supplied multiple times, and " - 'each time adds to the existing value. Accepts either ":all:" to ' - 'disable all source packages, ":none:" to empty the set, or one ' - "or more package names with commas between them. Packages " - "without binary distributions will fail to install when this " - "option is used on them.", - ) - - -platforms: Callable[..., Option] = partial( - Option, - "--platform", - dest="platforms", - metavar="platform", - action="append", - default=None, - help=( - "Only use wheels compatible with . Defaults to the " - "platform of the running system. Use this option multiple times to " - "specify multiple platforms supported by the target interpreter." - ), -) - - -# This was made a separate function for unit-testing purposes. -def _convert_python_version(value: str) -> Tuple[Tuple[int, ...], Optional[str]]: - """ - Convert a version string like "3", "37", or "3.7.3" into a tuple of ints. - - :return: A 2-tuple (version_info, error_msg), where `error_msg` is - non-None if and only if there was a parsing error. - """ - if not value: - # The empty string is the same as not providing a value. - return (None, None) - - parts = value.split(".") - if len(parts) > 3: - return ((), "at most three version parts are allowed") - - if len(parts) == 1: - # Then we are in the case of "3" or "37". - value = parts[0] - if len(value) > 1: - parts = [value[0], value[1:]] - - try: - version_info = tuple(int(part) for part in parts) - except ValueError: - return ((), "each version part must be an integer") - - return (version_info, None) - - -def _handle_python_version( - option: Option, opt_str: str, value: str, parser: OptionParser -) -> None: - """ - Handle a provided --python-version value. - """ - version_info, error_msg = _convert_python_version(value) - if error_msg is not None: - msg = "invalid --python-version value: {!r}: {}".format( - value, - error_msg, - ) - raise_option_error(parser, option=option, msg=msg) - - parser.values.python_version = version_info - - -python_version: Callable[..., Option] = partial( - Option, - "--python-version", - dest="python_version", - metavar="python_version", - action="callback", - callback=_handle_python_version, - type="str", - default=None, - help=dedent( - """\ - The Python interpreter version to use for wheel and "Requires-Python" - compatibility checks. Defaults to a version derived from the running - interpreter. The version can be specified using up to three dot-separated - integers (e.g. "3" for 3.0.0, "3.7" for 3.7.0, or "3.7.3"). A major-minor - version can also be given as a string without dots (e.g. "37" for 3.7.0). - """ - ), -) - - -implementation: Callable[..., Option] = partial( - Option, - "--implementation", - dest="implementation", - metavar="implementation", - default=None, - help=( - "Only use wheels compatible with Python " - "implementation , e.g. 'pp', 'jy', 'cp', " - " or 'ip'. If not specified, then the current " - "interpreter implementation is used. Use 'py' to force " - "implementation-agnostic wheels." - ), -) - - -abis: Callable[..., Option] = partial( - Option, - "--abi", - dest="abis", - metavar="abi", - action="append", - default=None, - help=( - "Only use wheels compatible with Python abi , e.g. 'pypy_41'. " - "If not specified, then the current interpreter abi tag is used. " - "Use this option multiple times to specify multiple abis supported " - "by the target interpreter. Generally you will need to specify " - "--implementation, --platform, and --python-version when using this " - "option." - ), -) - - -def add_target_python_options(cmd_opts: OptionGroup) -> None: - cmd_opts.add_option(platforms()) - cmd_opts.add_option(python_version()) - cmd_opts.add_option(implementation()) - cmd_opts.add_option(abis()) - - -def make_target_python(options: Values) -> TargetPython: - target_python = TargetPython( - platforms=options.platforms, - py_version_info=options.python_version, - abis=options.abis, - implementation=options.implementation, - ) - - return target_python - - -def prefer_binary() -> Option: - return Option( - "--prefer-binary", - dest="prefer_binary", - action="store_true", - default=False, - help="Prefer older binary packages over newer source packages.", - ) - - -cache_dir: Callable[..., Option] = partial( - PipOption, - "--cache-dir", - dest="cache_dir", - default=USER_CACHE_DIR, - metavar="dir", - type="path", - help="Store the cache data in .", -) - - -def _handle_no_cache_dir( - option: Option, opt: str, value: str, parser: OptionParser -) -> None: - """ - Process a value provided for the --no-cache-dir option. - - This is an optparse.Option callback for the --no-cache-dir option. - """ - # The value argument will be None if --no-cache-dir is passed via the - # command-line, since the option doesn't accept arguments. However, - # the value can be non-None if the option is triggered e.g. by an - # environment variable, like PIP_NO_CACHE_DIR=true. - if value is not None: - # Then parse the string value to get argument error-checking. - try: - strtobool(value) - except ValueError as exc: - raise_option_error(parser, option=option, msg=str(exc)) - - # Originally, setting PIP_NO_CACHE_DIR to a value that strtobool() - # converted to 0 (like "false" or "no") caused cache_dir to be disabled - # rather than enabled (logic would say the latter). Thus, we disable - # the cache directory not just on values that parse to True, but (for - # backwards compatibility reasons) also on values that parse to False. - # In other words, always set it to False if the option is provided in - # some (valid) form. - parser.values.cache_dir = False - - -no_cache: Callable[..., Option] = partial( - Option, - "--no-cache-dir", - dest="cache_dir", - action="callback", - callback=_handle_no_cache_dir, - help="Disable the cache.", -) - -no_deps: Callable[..., Option] = partial( - Option, - "--no-deps", - "--no-dependencies", - dest="ignore_dependencies", - action="store_true", - default=False, - help="Don't install package dependencies.", -) - -ignore_requires_python: Callable[..., Option] = partial( - Option, - "--ignore-requires-python", - dest="ignore_requires_python", - action="store_true", - help="Ignore the Requires-Python information.", -) - -no_build_isolation: Callable[..., Option] = partial( - Option, - "--no-build-isolation", - dest="build_isolation", - action="store_false", - default=True, - help="Disable isolation when building a modern source distribution. " - "Build dependencies specified by PEP 518 must be already installed " - "if this option is used.", -) - - -def _handle_no_use_pep517( - option: Option, opt: str, value: str, parser: OptionParser -) -> None: - """ - Process a value provided for the --no-use-pep517 option. - - This is an optparse.Option callback for the no_use_pep517 option. - """ - # Since --no-use-pep517 doesn't accept arguments, the value argument - # will be None if --no-use-pep517 is passed via the command-line. - # However, the value can be non-None if the option is triggered e.g. - # by an environment variable, for example "PIP_NO_USE_PEP517=true". - if value is not None: - msg = """A value was passed for --no-use-pep517, - probably using either the PIP_NO_USE_PEP517 environment variable - or the "no-use-pep517" config file option. Use an appropriate value - of the PIP_USE_PEP517 environment variable or the "use-pep517" - config file option instead. - """ - raise_option_error(parser, option=option, msg=msg) - - # Otherwise, --no-use-pep517 was passed via the command-line. - parser.values.use_pep517 = False - - -use_pep517: Any = partial( - Option, - "--use-pep517", - dest="use_pep517", - action="store_true", - default=None, - help="Use PEP 517 for building source distributions " - "(use --no-use-pep517 to force legacy behaviour).", -) - -no_use_pep517: Any = partial( - Option, - "--no-use-pep517", - dest="use_pep517", - action="callback", - callback=_handle_no_use_pep517, - default=None, - help=SUPPRESS_HELP, -) - -install_options: Callable[..., Option] = partial( - Option, - "--install-option", - dest="install_options", - action="append", - metavar="options", - help="Extra arguments to be supplied to the setup.py install " - 'command (use like --install-option="--install-scripts=/usr/local/' - 'bin"). Use multiple --install-option options to pass multiple ' - "options to setup.py install. If you are using an option with a " - "directory path, be sure to use absolute path.", -) - -build_options: Callable[..., Option] = partial( - Option, - "--build-option", - dest="build_options", - metavar="options", - action="append", - help="Extra arguments to be supplied to 'setup.py bdist_wheel'.", -) - -global_options: Callable[..., Option] = partial( - Option, - "--global-option", - dest="global_options", - action="append", - metavar="options", - help="Extra global options to be supplied to the setup.py " - "call before the install or bdist_wheel command.", -) - -no_clean: Callable[..., Option] = partial( - Option, - "--no-clean", - action="store_true", - default=False, - help="Don't clean up build directories.", -) - -pre: Callable[..., Option] = partial( - Option, - "--pre", - action="store_true", - default=False, - help="Include pre-release and development versions. By default, " - "pip only finds stable versions.", -) - -disable_pip_version_check: Callable[..., Option] = partial( - Option, - "--disable-pip-version-check", - dest="disable_pip_version_check", - action="store_true", - default=False, - help="Don't periodically check PyPI to determine whether a new version " - "of pip is available for download. Implied with --no-index.", -) - - -def _handle_merge_hash( - option: Option, opt_str: str, value: str, parser: OptionParser -) -> None: - """Given a value spelled "algo:digest", append the digest to a list - pointed to in a dict by the algo name.""" - if not parser.values.hashes: - parser.values.hashes = {} - try: - algo, digest = value.split(":", 1) - except ValueError: - parser.error( - "Arguments to {} must be a hash name " # noqa - "followed by a value, like --hash=sha256:" - "abcde...".format(opt_str) - ) - if algo not in STRONG_HASHES: - parser.error( - "Allowed hash algorithms for {} are {}.".format( # noqa - opt_str, ", ".join(STRONG_HASHES) - ) - ) - parser.values.hashes.setdefault(algo, []).append(digest) - - -hash: Callable[..., Option] = partial( - Option, - "--hash", - # Hash values eventually end up in InstallRequirement.hashes due to - # __dict__ copying in process_line(). - dest="hashes", - action="callback", - callback=_handle_merge_hash, - type="string", - help="Verify that the package's archive matches this " - "hash before installing. Example: --hash=sha256:abcdef...", -) - - -require_hashes: Callable[..., Option] = partial( - Option, - "--require-hashes", - dest="require_hashes", - action="store_true", - default=False, - help="Require a hash to check each requirement against, for " - "repeatable installs. This option is implied when any package in a " - "requirements file has a --hash option.", -) - - -list_path: Callable[..., Option] = partial( - PipOption, - "--path", - dest="path", - type="path", - action="append", - help="Restrict to the specified installation path for listing " - "packages (can be used multiple times).", -) - - -def check_list_path_option(options: Values) -> None: - if options.path and (options.user or options.local): - raise CommandError("Cannot combine '--path' with '--user' or '--local'") - - -list_exclude: Callable[..., Option] = partial( - PipOption, - "--exclude", - dest="excludes", - action="append", - metavar="package", - type="package_name", - help="Exclude specified package from the output", -) - - -no_python_version_warning: Callable[..., Option] = partial( - Option, - "--no-python-version-warning", - dest="no_python_version_warning", - action="store_true", - default=False, - help="Silence deprecation warnings for upcoming unsupported Pythons.", -) - - -use_new_feature: Callable[..., Option] = partial( - Option, - "--use-feature", - dest="features_enabled", - metavar="feature", - action="append", - default=[], - choices=["2020-resolver", "fast-deps", "in-tree-build"], - help="Enable new functionality, that may be backward incompatible.", -) - -use_deprecated_feature: Callable[..., Option] = partial( - Option, - "--use-deprecated", - dest="deprecated_features_enabled", - metavar="feature", - action="append", - default=[], - choices=[ - "legacy-resolver", - "out-of-tree-build", - "backtrack-on-build-failures", - "html5lib", - ], - help=("Enable deprecated functionality, that will be removed in the future."), -) - - -########## -# groups # -########## - -general_group: Dict[str, Any] = { - "name": "General Options", - "options": [ - help_, - debug_mode, - isolated_mode, - require_virtualenv, - verbose, - version, - quiet, - log, - no_input, - proxy, - retries, - timeout, - exists_action, - trusted_host, - cert, - client_cert, - cache_dir, - no_cache, - disable_pip_version_check, - no_color, - no_python_version_warning, - use_new_feature, - use_deprecated_feature, - ], -} - -index_group: Dict[str, Any] = { - "name": "Package Index Options", - "options": [ - index_url, - extra_index_url, - no_index, - find_links, - ], -} diff --git a/venv/Lib/site-packages/pip/_internal/cli/command_context.py b/venv/Lib/site-packages/pip/_internal/cli/command_context.py deleted file mode 100644 index ed68322..0000000 --- a/venv/Lib/site-packages/pip/_internal/cli/command_context.py +++ /dev/null @@ -1,27 +0,0 @@ -from contextlib import ExitStack, contextmanager -from typing import ContextManager, Iterator, TypeVar - -_T = TypeVar("_T", covariant=True) - - -class CommandContextMixIn: - def __init__(self) -> None: - super().__init__() - self._in_main_context = False - self._main_context = ExitStack() - - @contextmanager - def main_context(self) -> Iterator[None]: - assert not self._in_main_context - - self._in_main_context = True - try: - with self._main_context: - yield - finally: - self._in_main_context = False - - def enter_context(self, context_provider: ContextManager[_T]) -> _T: - assert self._in_main_context - - return self._main_context.enter_context(context_provider) diff --git a/venv/Lib/site-packages/pip/_internal/cli/main.py b/venv/Lib/site-packages/pip/_internal/cli/main.py deleted file mode 100644 index 0e31221..0000000 --- a/venv/Lib/site-packages/pip/_internal/cli/main.py +++ /dev/null @@ -1,70 +0,0 @@ -"""Primary application entrypoint. -""" -import locale -import logging -import os -import sys -from typing import List, Optional - -from pip._internal.cli.autocompletion import autocomplete -from pip._internal.cli.main_parser import parse_command -from pip._internal.commands import create_command -from pip._internal.exceptions import PipError -from pip._internal.utils import deprecation - -logger = logging.getLogger(__name__) - - -# Do not import and use main() directly! Using it directly is actively -# discouraged by pip's maintainers. The name, location and behavior of -# this function is subject to change, so calling it directly is not -# portable across different pip versions. - -# In addition, running pip in-process is unsupported and unsafe. This is -# elaborated in detail at -# https://pip.pypa.io/en/stable/user_guide/#using-pip-from-your-program. -# That document also provides suggestions that should work for nearly -# all users that are considering importing and using main() directly. - -# However, we know that certain users will still want to invoke pip -# in-process. If you understand and accept the implications of using pip -# in an unsupported manner, the best approach is to use runpy to avoid -# depending on the exact location of this entry point. - -# The following example shows how to use runpy to invoke pip in that -# case: -# -# sys.argv = ["pip", your, args, here] -# runpy.run_module("pip", run_name="__main__") -# -# Note that this will exit the process after running, unlike a direct -# call to main. As it is not safe to do any processing after calling -# main, this should not be an issue in practice. - - -def main(args: Optional[List[str]] = None) -> int: - if args is None: - args = sys.argv[1:] - - # Configure our deprecation warnings to be sent through loggers - deprecation.install_warning_logger() - - autocomplete() - - try: - cmd_name, cmd_args = parse_command(args) - except PipError as exc: - sys.stderr.write(f"ERROR: {exc}") - sys.stderr.write(os.linesep) - sys.exit(1) - - # Needed for locale.getpreferredencoding(False) to work - # in pip._internal.utils.encoding.auto_decode - try: - locale.setlocale(locale.LC_ALL, "") - except locale.Error as e: - # setlocale can apparently crash if locale are uninitialized - logger.debug("Ignoring error %s when setting locale", e) - command = create_command(cmd_name, isolated=("--isolated" in cmd_args)) - - return command.main(cmd_args) diff --git a/venv/Lib/site-packages/pip/_internal/cli/main_parser.py b/venv/Lib/site-packages/pip/_internal/cli/main_parser.py deleted file mode 100644 index 3666ab0..0000000 --- a/venv/Lib/site-packages/pip/_internal/cli/main_parser.py +++ /dev/null @@ -1,87 +0,0 @@ -"""A single place for constructing and exposing the main parser -""" - -import os -import sys -from typing import List, Tuple - -from pip._internal.cli import cmdoptions -from pip._internal.cli.parser import ConfigOptionParser, UpdatingDefaultsHelpFormatter -from pip._internal.commands import commands_dict, get_similar_commands -from pip._internal.exceptions import CommandError -from pip._internal.utils.misc import get_pip_version, get_prog - -__all__ = ["create_main_parser", "parse_command"] - - -def create_main_parser() -> ConfigOptionParser: - """Creates and returns the main parser for pip's CLI""" - - parser = ConfigOptionParser( - usage="\n%prog [options]", - add_help_option=False, - formatter=UpdatingDefaultsHelpFormatter(), - name="global", - prog=get_prog(), - ) - parser.disable_interspersed_args() - - parser.version = get_pip_version() - - # add the general options - gen_opts = cmdoptions.make_option_group(cmdoptions.general_group, parser) - parser.add_option_group(gen_opts) - - # so the help formatter knows - parser.main = True # type: ignore - - # create command listing for description - description = [""] + [ - f"{name:27} {command_info.summary}" - for name, command_info in commands_dict.items() - ] - parser.description = "\n".join(description) - - return parser - - -def parse_command(args: List[str]) -> Tuple[str, List[str]]: - parser = create_main_parser() - - # Note: parser calls disable_interspersed_args(), so the result of this - # call is to split the initial args into the general options before the - # subcommand and everything else. - # For example: - # args: ['--timeout=5', 'install', '--user', 'INITools'] - # general_options: ['--timeout==5'] - # args_else: ['install', '--user', 'INITools'] - general_options, args_else = parser.parse_args(args) - - # --version - if general_options.version: - sys.stdout.write(parser.version) - sys.stdout.write(os.linesep) - sys.exit() - - # pip || pip help -> print_help() - if not args_else or (args_else[0] == "help" and len(args_else) == 1): - parser.print_help() - sys.exit() - - # the subcommand name - cmd_name = args_else[0] - - if cmd_name not in commands_dict: - guess = get_similar_commands(cmd_name) - - msg = [f'unknown command "{cmd_name}"'] - if guess: - msg.append(f'maybe you meant "{guess}"') - - raise CommandError(" - ".join(msg)) - - # all the args without the subcommand - cmd_args = args[:] - cmd_args.remove(cmd_name) - - return cmd_name, cmd_args diff --git a/venv/Lib/site-packages/pip/_internal/cli/parser.py b/venv/Lib/site-packages/pip/_internal/cli/parser.py deleted file mode 100644 index a1c99a8..0000000 --- a/venv/Lib/site-packages/pip/_internal/cli/parser.py +++ /dev/null @@ -1,292 +0,0 @@ -"""Base option parser setup""" - -import logging -import optparse -import shutil -import sys -import textwrap -from contextlib import suppress -from typing import Any, Dict, Iterator, List, Tuple - -from pip._internal.cli.status_codes import UNKNOWN_ERROR -from pip._internal.configuration import Configuration, ConfigurationError -from pip._internal.utils.misc import redact_auth_from_url, strtobool - -logger = logging.getLogger(__name__) - - -class PrettyHelpFormatter(optparse.IndentedHelpFormatter): - """A prettier/less verbose help formatter for optparse.""" - - def __init__(self, *args: Any, **kwargs: Any) -> None: - # help position must be aligned with __init__.parseopts.description - kwargs["max_help_position"] = 30 - kwargs["indent_increment"] = 1 - kwargs["width"] = shutil.get_terminal_size()[0] - 2 - super().__init__(*args, **kwargs) - - def format_option_strings(self, option: optparse.Option) -> str: - return self._format_option_strings(option) - - def _format_option_strings( - self, option: optparse.Option, mvarfmt: str = " <{}>", optsep: str = ", " - ) -> str: - """ - Return a comma-separated list of option strings and metavars. - - :param option: tuple of (short opt, long opt), e.g: ('-f', '--format') - :param mvarfmt: metavar format string - :param optsep: separator - """ - opts = [] - - if option._short_opts: - opts.append(option._short_opts[0]) - if option._long_opts: - opts.append(option._long_opts[0]) - if len(opts) > 1: - opts.insert(1, optsep) - - if option.takes_value(): - assert option.dest is not None - metavar = option.metavar or option.dest.lower() - opts.append(mvarfmt.format(metavar.lower())) - - return "".join(opts) - - def format_heading(self, heading: str) -> str: - if heading == "Options": - return "" - return heading + ":\n" - - def format_usage(self, usage: str) -> str: - """ - Ensure there is only one newline between usage and the first heading - if there is no description. - """ - msg = "\nUsage: {}\n".format(self.indent_lines(textwrap.dedent(usage), " ")) - return msg - - def format_description(self, description: str) -> str: - # leave full control over description to us - if description: - if hasattr(self.parser, "main"): - label = "Commands" - else: - label = "Description" - # some doc strings have initial newlines, some don't - description = description.lstrip("\n") - # some doc strings have final newlines and spaces, some don't - description = description.rstrip() - # dedent, then reindent - description = self.indent_lines(textwrap.dedent(description), " ") - description = f"{label}:\n{description}\n" - return description - else: - return "" - - def format_epilog(self, epilog: str) -> str: - # leave full control over epilog to us - if epilog: - return epilog - else: - return "" - - def indent_lines(self, text: str, indent: str) -> str: - new_lines = [indent + line for line in text.split("\n")] - return "\n".join(new_lines) - - -class UpdatingDefaultsHelpFormatter(PrettyHelpFormatter): - """Custom help formatter for use in ConfigOptionParser. - - This is updates the defaults before expanding them, allowing - them to show up correctly in the help listing. - - Also redact auth from url type options - """ - - def expand_default(self, option: optparse.Option) -> str: - default_values = None - if self.parser is not None: - assert isinstance(self.parser, ConfigOptionParser) - self.parser._update_defaults(self.parser.defaults) - assert option.dest is not None - default_values = self.parser.defaults.get(option.dest) - help_text = super().expand_default(option) - - if default_values and option.metavar == "URL": - if isinstance(default_values, str): - default_values = [default_values] - - # If its not a list, we should abort and just return the help text - if not isinstance(default_values, list): - default_values = [] - - for val in default_values: - help_text = help_text.replace(val, redact_auth_from_url(val)) - - return help_text - - -class CustomOptionParser(optparse.OptionParser): - def insert_option_group( - self, idx: int, *args: Any, **kwargs: Any - ) -> optparse.OptionGroup: - """Insert an OptionGroup at a given position.""" - group = self.add_option_group(*args, **kwargs) - - self.option_groups.pop() - self.option_groups.insert(idx, group) - - return group - - @property - def option_list_all(self) -> List[optparse.Option]: - """Get a list of all options, including those in option groups.""" - res = self.option_list[:] - for i in self.option_groups: - res.extend(i.option_list) - - return res - - -class ConfigOptionParser(CustomOptionParser): - """Custom option parser which updates its defaults by checking the - configuration files and environmental variables""" - - def __init__( - self, - *args: Any, - name: str, - isolated: bool = False, - **kwargs: Any, - ) -> None: - self.name = name - self.config = Configuration(isolated) - - assert self.name - super().__init__(*args, **kwargs) - - def check_default(self, option: optparse.Option, key: str, val: Any) -> Any: - try: - return option.check_value(key, val) - except optparse.OptionValueError as exc: - print(f"An error occurred during configuration: {exc}") - sys.exit(3) - - def _get_ordered_configuration_items(self) -> Iterator[Tuple[str, Any]]: - # Configuration gives keys in an unordered manner. Order them. - override_order = ["global", self.name, ":env:"] - - # Pool the options into different groups - section_items: Dict[str, List[Tuple[str, Any]]] = { - name: [] for name in override_order - } - for section_key, val in self.config.items(): - # ignore empty values - if not val: - logger.debug( - "Ignoring configuration key '%s' as it's value is empty.", - section_key, - ) - continue - - section, key = section_key.split(".", 1) - if section in override_order: - section_items[section].append((key, val)) - - # Yield each group in their override order - for section in override_order: - for key, val in section_items[section]: - yield key, val - - def _update_defaults(self, defaults: Dict[str, Any]) -> Dict[str, Any]: - """Updates the given defaults with values from the config files and - the environ. Does a little special handling for certain types of - options (lists).""" - - # Accumulate complex default state. - self.values = optparse.Values(self.defaults) - late_eval = set() - # Then set the options with those values - for key, val in self._get_ordered_configuration_items(): - # '--' because configuration supports only long names - option = self.get_option("--" + key) - - # Ignore options not present in this parser. E.g. non-globals put - # in [global] by users that want them to apply to all applicable - # commands. - if option is None: - continue - - assert option.dest is not None - - if option.action in ("store_true", "store_false"): - try: - val = strtobool(val) - except ValueError: - self.error( - "{} is not a valid value for {} option, " # noqa - "please specify a boolean value like yes/no, " - "true/false or 1/0 instead.".format(val, key) - ) - elif option.action == "count": - with suppress(ValueError): - val = strtobool(val) - with suppress(ValueError): - val = int(val) - if not isinstance(val, int) or val < 0: - self.error( - "{} is not a valid value for {} option, " # noqa - "please instead specify either a non-negative integer " - "or a boolean value like yes/no or false/true " - "which is equivalent to 1/0.".format(val, key) - ) - elif option.action == "append": - val = val.split() - val = [self.check_default(option, key, v) for v in val] - elif option.action == "callback": - assert option.callback is not None - late_eval.add(option.dest) - opt_str = option.get_opt_string() - val = option.convert_value(opt_str, val) - # From take_action - args = option.callback_args or () - kwargs = option.callback_kwargs or {} - option.callback(option, opt_str, val, self, *args, **kwargs) - else: - val = self.check_default(option, key, val) - - defaults[option.dest] = val - - for key in late_eval: - defaults[key] = getattr(self.values, key) - self.values = None - return defaults - - def get_default_values(self) -> optparse.Values: - """Overriding to make updating the defaults after instantiation of - the option parser possible, _update_defaults() does the dirty work.""" - if not self.process_default_values: - # Old, pre-Optik 1.5 behaviour. - return optparse.Values(self.defaults) - - # Load the configuration, or error out in case of an error - try: - self.config.load() - except ConfigurationError as err: - self.exit(UNKNOWN_ERROR, str(err)) - - defaults = self._update_defaults(self.defaults.copy()) # ours - for option in self._get_all_options(): - assert option.dest is not None - default = defaults.get(option.dest) - if isinstance(default, str): - opt_str = option.get_opt_string() - defaults[option.dest] = option.check_value(opt_str, default) - return optparse.Values(defaults) - - def error(self, msg: str) -> None: - self.print_usage(sys.stderr) - self.exit(UNKNOWN_ERROR, f"{msg}\n") diff --git a/venv/Lib/site-packages/pip/_internal/cli/progress_bars.py b/venv/Lib/site-packages/pip/_internal/cli/progress_bars.py deleted file mode 100644 index ffa1964..0000000 --- a/venv/Lib/site-packages/pip/_internal/cli/progress_bars.py +++ /dev/null @@ -1,321 +0,0 @@ -import functools -import itertools -import sys -from signal import SIGINT, default_int_handler, signal -from typing import Any, Callable, Iterator, Optional, Tuple - -from pip._vendor.progress.bar import Bar, FillingCirclesBar, IncrementalBar -from pip._vendor.progress.spinner import Spinner -from pip._vendor.rich.progress import ( - BarColumn, - DownloadColumn, - FileSizeColumn, - Progress, - ProgressColumn, - SpinnerColumn, - TextColumn, - TimeElapsedColumn, - TimeRemainingColumn, - TransferSpeedColumn, -) - -from pip._internal.utils.compat import WINDOWS -from pip._internal.utils.logging import get_indentation -from pip._internal.utils.misc import format_size - -try: - from pip._vendor import colorama -# Lots of different errors can come from this, including SystemError and -# ImportError. -except Exception: - colorama = None - -DownloadProgressRenderer = Callable[[Iterator[bytes]], Iterator[bytes]] - - -def _select_progress_class(preferred: Bar, fallback: Bar) -> Bar: - encoding = getattr(preferred.file, "encoding", None) - - # If we don't know what encoding this file is in, then we'll just assume - # that it doesn't support unicode and use the ASCII bar. - if not encoding: - return fallback - - # Collect all of the possible characters we want to use with the preferred - # bar. - characters = [ - getattr(preferred, "empty_fill", ""), - getattr(preferred, "fill", ""), - ] - characters += list(getattr(preferred, "phases", [])) - - # Try to decode the characters we're using for the bar using the encoding - # of the given file, if this works then we'll assume that we can use the - # fancier bar and if not we'll fall back to the plaintext bar. - try: - "".join(characters).encode(encoding) - except UnicodeEncodeError: - return fallback - else: - return preferred - - -_BaseBar: Any = _select_progress_class(IncrementalBar, Bar) - - -class InterruptibleMixin: - """ - Helper to ensure that self.finish() gets called on keyboard interrupt. - - This allows downloads to be interrupted without leaving temporary state - (like hidden cursors) behind. - - This class is similar to the progress library's existing SigIntMixin - helper, but as of version 1.2, that helper has the following problems: - - 1. It calls sys.exit(). - 2. It discards the existing SIGINT handler completely. - 3. It leaves its own handler in place even after an uninterrupted finish, - which will have unexpected delayed effects if the user triggers an - unrelated keyboard interrupt some time after a progress-displaying - download has already completed, for example. - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - """ - Save the original SIGINT handler for later. - """ - # https://github.com/python/mypy/issues/5887 - super().__init__(*args, **kwargs) # type: ignore - - self.original_handler = signal(SIGINT, self.handle_sigint) - - # If signal() returns None, the previous handler was not installed from - # Python, and we cannot restore it. This probably should not happen, - # but if it does, we must restore something sensible instead, at least. - # The least bad option should be Python's default SIGINT handler, which - # just raises KeyboardInterrupt. - if self.original_handler is None: - self.original_handler = default_int_handler - - def finish(self) -> None: - """ - Restore the original SIGINT handler after finishing. - - This should happen regardless of whether the progress display finishes - normally, or gets interrupted. - """ - super().finish() # type: ignore - signal(SIGINT, self.original_handler) - - def handle_sigint(self, signum, frame): # type: ignore - """ - Call self.finish() before delegating to the original SIGINT handler. - - This handler should only be in place while the progress display is - active. - """ - self.finish() - self.original_handler(signum, frame) - - -class SilentBar(Bar): - def update(self) -> None: - pass - - -class BlueEmojiBar(IncrementalBar): - - suffix = "%(percent)d%%" - bar_prefix = " " - bar_suffix = " " - phases = ("\U0001F539", "\U0001F537", "\U0001F535") - - -class DownloadProgressMixin: - def __init__(self, *args: Any, **kwargs: Any) -> None: - # https://github.com/python/mypy/issues/5887 - super().__init__(*args, **kwargs) # type: ignore - self.message: str = (" " * (get_indentation() + 2)) + self.message - - @property - def downloaded(self) -> str: - return format_size(self.index) # type: ignore - - @property - def download_speed(self) -> str: - # Avoid zero division errors... - if self.avg == 0.0: # type: ignore - return "..." - return format_size(1 / self.avg) + "/s" # type: ignore - - @property - def pretty_eta(self) -> str: - if self.eta: # type: ignore - return f"eta {self.eta_td}" # type: ignore - return "" - - def iter(self, it): # type: ignore - for x in it: - yield x - # B305 is incorrectly raised here - # https://github.com/PyCQA/flake8-bugbear/issues/59 - self.next(len(x)) # noqa: B305 - self.finish() - - -class WindowsMixin: - def __init__(self, *args: Any, **kwargs: Any) -> None: - # The Windows terminal does not support the hide/show cursor ANSI codes - # even with colorama. So we'll ensure that hide_cursor is False on - # Windows. - # This call needs to go before the super() call, so that hide_cursor - # is set in time. The base progress bar class writes the "hide cursor" - # code to the terminal in its init, so if we don't set this soon - # enough, we get a "hide" with no corresponding "show"... - if WINDOWS and self.hide_cursor: # type: ignore - self.hide_cursor = False - - # https://github.com/python/mypy/issues/5887 - super().__init__(*args, **kwargs) # type: ignore - - # Check if we are running on Windows and we have the colorama module, - # if we do then wrap our file with it. - if WINDOWS and colorama: - self.file = colorama.AnsiToWin32(self.file) # type: ignore - # The progress code expects to be able to call self.file.isatty() - # but the colorama.AnsiToWin32() object doesn't have that, so we'll - # add it. - self.file.isatty = lambda: self.file.wrapped.isatty() - # The progress code expects to be able to call self.file.flush() - # but the colorama.AnsiToWin32() object doesn't have that, so we'll - # add it. - self.file.flush = lambda: self.file.wrapped.flush() - - -class BaseDownloadProgressBar(WindowsMixin, InterruptibleMixin, DownloadProgressMixin): - - file = sys.stdout - message = "%(percent)d%%" - suffix = "%(downloaded)s %(download_speed)s %(pretty_eta)s" - - -class DefaultDownloadProgressBar(BaseDownloadProgressBar, _BaseBar): - pass - - -class DownloadSilentBar(BaseDownloadProgressBar, SilentBar): - pass - - -class DownloadBar(BaseDownloadProgressBar, Bar): - pass - - -class DownloadFillingCirclesBar(BaseDownloadProgressBar, FillingCirclesBar): - pass - - -class DownloadBlueEmojiProgressBar(BaseDownloadProgressBar, BlueEmojiBar): - pass - - -class DownloadProgressSpinner( - WindowsMixin, InterruptibleMixin, DownloadProgressMixin, Spinner -): - - file = sys.stdout - suffix = "%(downloaded)s %(download_speed)s" - - def next_phase(self) -> str: - if not hasattr(self, "_phaser"): - self._phaser = itertools.cycle(self.phases) - return next(self._phaser) - - def update(self) -> None: - message = self.message % self - phase = self.next_phase() - suffix = self.suffix % self - line = "".join( - [ - message, - " " if message else "", - phase, - " " if suffix else "", - suffix, - ] - ) - - self.writeln(line) - - -BAR_TYPES = { - "off": (DownloadSilentBar, DownloadSilentBar), - "on": (DefaultDownloadProgressBar, DownloadProgressSpinner), - "ascii": (DownloadBar, DownloadProgressSpinner), - "pretty": (DownloadFillingCirclesBar, DownloadProgressSpinner), - "emoji": (DownloadBlueEmojiProgressBar, DownloadProgressSpinner), -} - - -def _legacy_progress_bar( - progress_bar: str, max: Optional[int] -) -> DownloadProgressRenderer: - if max is None or max == 0: - return BAR_TYPES[progress_bar][1]().iter # type: ignore - else: - return BAR_TYPES[progress_bar][0](max=max).iter - - -# -# Modern replacement, for our legacy progress bars. -# -def _rich_progress_bar( - iterable: Iterator[bytes], - *, - bar_type: str, - size: int, -) -> Iterator[bytes]: - assert bar_type == "on", "This should only be used in the default mode." - - if not size: - total = float("inf") - columns: Tuple[ProgressColumn, ...] = ( - TextColumn("[progress.description]{task.description}"), - SpinnerColumn("line", speed=1.5), - FileSizeColumn(), - TransferSpeedColumn(), - TimeElapsedColumn(), - ) - else: - total = size - columns = ( - TextColumn("[progress.description]{task.description}"), - BarColumn(), - DownloadColumn(), - TransferSpeedColumn(), - TextColumn("eta"), - TimeRemainingColumn(), - ) - - progress = Progress(*columns, refresh_per_second=30) - task_id = progress.add_task(" " * (get_indentation() + 2), total=total) - with progress: - for chunk in iterable: - yield chunk - progress.update(task_id, advance=len(chunk)) - - -def get_download_progress_renderer( - *, bar_type: str, size: Optional[int] = None -) -> DownloadProgressRenderer: - """Get an object that can be used to render the download progress. - - Returns a callable, that takes an iterable to "wrap". - """ - if bar_type == "on": - return functools.partial(_rich_progress_bar, bar_type=bar_type, size=size) - elif bar_type == "off": - return iter # no-op, when passed an iterator - else: - return _legacy_progress_bar(bar_type, size) diff --git a/venv/Lib/site-packages/pip/_internal/cli/req_command.py b/venv/Lib/site-packages/pip/_internal/cli/req_command.py deleted file mode 100644 index 5d4d1f0..0000000 --- a/venv/Lib/site-packages/pip/_internal/cli/req_command.py +++ /dev/null @@ -1,506 +0,0 @@ -"""Contains the Command base classes that depend on PipSession. - -The classes in this module are in a separate module so the commands not -needing download / PackageFinder capability don't unnecessarily import the -PackageFinder machinery and all its vendored dependencies, etc. -""" - -import logging -import os -import sys -from functools import partial -from optparse import Values -from typing import Any, List, Optional, Tuple - -from pip._internal.cache import WheelCache -from pip._internal.cli import cmdoptions -from pip._internal.cli.base_command import Command -from pip._internal.cli.command_context import CommandContextMixIn -from pip._internal.exceptions import CommandError, PreviousBuildDirError -from pip._internal.index.collector import LinkCollector -from pip._internal.index.package_finder import PackageFinder -from pip._internal.models.selection_prefs import SelectionPreferences -from pip._internal.models.target_python import TargetPython -from pip._internal.network.session import PipSession -from pip._internal.operations.prepare import RequirementPreparer -from pip._internal.req.constructors import ( - install_req_from_editable, - install_req_from_line, - install_req_from_parsed_requirement, - install_req_from_req_string, -) -from pip._internal.req.req_file import parse_requirements -from pip._internal.req.req_install import InstallRequirement -from pip._internal.req.req_tracker import RequirementTracker -from pip._internal.resolution.base import BaseResolver -from pip._internal.self_outdated_check import pip_self_version_check -from pip._internal.utils.deprecation import deprecated -from pip._internal.utils.temp_dir import ( - TempDirectory, - TempDirectoryTypeRegistry, - tempdir_kinds, -) -from pip._internal.utils.virtualenv import running_under_virtualenv - -logger = logging.getLogger(__name__) - - -class SessionCommandMixin(CommandContextMixIn): - - """ - A class mixin for command classes needing _build_session(). - """ - - def __init__(self) -> None: - super().__init__() - self._session: Optional[PipSession] = None - - @classmethod - def _get_index_urls(cls, options: Values) -> Optional[List[str]]: - """Return a list of index urls from user-provided options.""" - index_urls = [] - if not getattr(options, "no_index", False): - url = getattr(options, "index_url", None) - if url: - index_urls.append(url) - urls = getattr(options, "extra_index_urls", None) - if urls: - index_urls.extend(urls) - # Return None rather than an empty list - return index_urls or None - - def get_default_session(self, options: Values) -> PipSession: - """Get a default-managed session.""" - if self._session is None: - self._session = self.enter_context(self._build_session(options)) - # there's no type annotation on requests.Session, so it's - # automatically ContextManager[Any] and self._session becomes Any, - # then https://github.com/python/mypy/issues/7696 kicks in - assert self._session is not None - return self._session - - def _build_session( - self, - options: Values, - retries: Optional[int] = None, - timeout: Optional[int] = None, - ) -> PipSession: - assert not options.cache_dir or os.path.isabs(options.cache_dir) - session = PipSession( - cache=( - os.path.join(options.cache_dir, "http") if options.cache_dir else None - ), - retries=retries if retries is not None else options.retries, - trusted_hosts=options.trusted_hosts, - index_urls=self._get_index_urls(options), - ) - - # Handle custom ca-bundles from the user - if options.cert: - session.verify = options.cert - - # Handle SSL client certificate - if options.client_cert: - session.cert = options.client_cert - - # Handle timeouts - if options.timeout or timeout: - session.timeout = timeout if timeout is not None else options.timeout - - # Handle configured proxies - if options.proxy: - session.proxies = { - "http": options.proxy, - "https": options.proxy, - } - - # Determine if we can prompt the user for authentication or not - session.auth.prompting = not options.no_input - - return session - - -class IndexGroupCommand(Command, SessionCommandMixin): - - """ - Abstract base class for commands with the index_group options. - - This also corresponds to the commands that permit the pip version check. - """ - - def handle_pip_version_check(self, options: Values) -> None: - """ - Do the pip version check if not disabled. - - This overrides the default behavior of not doing the check. - """ - # Make sure the index_group options are present. - assert hasattr(options, "no_index") - - if options.disable_pip_version_check or options.no_index: - return - - # Otherwise, check if we're using the latest version of pip available. - session = self._build_session( - options, retries=0, timeout=min(5, options.timeout) - ) - with session: - pip_self_version_check(session, options) - - -KEEPABLE_TEMPDIR_TYPES = [ - tempdir_kinds.BUILD_ENV, - tempdir_kinds.EPHEM_WHEEL_CACHE, - tempdir_kinds.REQ_BUILD, -] - - -def warn_if_run_as_root() -> None: - """Output a warning for sudo users on Unix. - - In a virtual environment, sudo pip still writes to virtualenv. - On Windows, users may run pip as Administrator without issues. - This warning only applies to Unix root users outside of virtualenv. - """ - if running_under_virtualenv(): - return - if not hasattr(os, "getuid"): - return - # On Windows, there are no "system managed" Python packages. Installing as - # Administrator via pip is the correct way of updating system environments. - # - # We choose sys.platform over utils.compat.WINDOWS here to enable Mypy platform - # checks: https://mypy.readthedocs.io/en/stable/common_issues.html - if sys.platform == "win32" or sys.platform == "cygwin": - return - - if os.getuid() != 0: - return - - logger.warning( - "Running pip as the 'root' user can result in broken permissions and " - "conflicting behaviour with the system package manager. " - "It is recommended to use a virtual environment instead: " - "https://pip.pypa.io/warnings/venv" - ) - - -def with_cleanup(func: Any) -> Any: - """Decorator for common logic related to managing temporary - directories. - """ - - def configure_tempdir_registry(registry: TempDirectoryTypeRegistry) -> None: - for t in KEEPABLE_TEMPDIR_TYPES: - registry.set_delete(t, False) - - def wrapper( - self: RequirementCommand, options: Values, args: List[Any] - ) -> Optional[int]: - assert self.tempdir_registry is not None - if options.no_clean: - configure_tempdir_registry(self.tempdir_registry) - - try: - return func(self, options, args) - except PreviousBuildDirError: - # This kind of conflict can occur when the user passes an explicit - # build directory with a pre-existing folder. In that case we do - # not want to accidentally remove it. - configure_tempdir_registry(self.tempdir_registry) - raise - - return wrapper - - -class RequirementCommand(IndexGroupCommand): - def __init__(self, *args: Any, **kw: Any) -> None: - super().__init__(*args, **kw) - - self.cmd_opts.add_option(cmdoptions.no_clean()) - - @staticmethod - def determine_resolver_variant(options: Values) -> str: - """Determines which resolver should be used, based on the given options.""" - if "legacy-resolver" in options.deprecated_features_enabled: - return "legacy" - - return "2020-resolver" - - @staticmethod - def determine_build_failure_suppression(options: Values) -> bool: - """Determines whether build failures should be suppressed and backtracked on.""" - if "backtrack-on-build-failures" not in options.deprecated_features_enabled: - return False - - if "legacy-resolver" in options.deprecated_features_enabled: - raise CommandError("Cannot backtrack with legacy resolver.") - - deprecated( - reason=( - "Backtracking on build failures can mask issues related to how " - "a package generates metadata or builds a wheel. This flag will " - "be removed in pip 22.2." - ), - gone_in=None, - replacement=( - "avoiding known-bad versions by explicitly telling pip to ignore them " - "(either directly as requirements, or via a constraints file)" - ), - feature_flag=None, - issue=10655, - ) - return True - - @classmethod - def make_requirement_preparer( - cls, - temp_build_dir: TempDirectory, - options: Values, - req_tracker: RequirementTracker, - session: PipSession, - finder: PackageFinder, - use_user_site: bool, - download_dir: Optional[str] = None, - verbosity: int = 0, - ) -> RequirementPreparer: - """ - Create a RequirementPreparer instance for the given parameters. - """ - temp_build_dir_path = temp_build_dir.path - assert temp_build_dir_path is not None - - resolver_variant = cls.determine_resolver_variant(options) - if resolver_variant == "2020-resolver": - lazy_wheel = "fast-deps" in options.features_enabled - if lazy_wheel: - logger.warning( - "pip is using lazily downloaded wheels using HTTP " - "range requests to obtain dependency information. " - "This experimental feature is enabled through " - "--use-feature=fast-deps and it is not ready for " - "production." - ) - else: - lazy_wheel = False - if "fast-deps" in options.features_enabled: - logger.warning( - "fast-deps has no effect when used with the legacy resolver." - ) - - in_tree_build = "out-of-tree-build" not in options.deprecated_features_enabled - if "in-tree-build" in options.features_enabled: - deprecated( - reason="In-tree builds are now the default.", - replacement="to remove the --use-feature=in-tree-build flag", - gone_in="22.1", - ) - if "out-of-tree-build" in options.deprecated_features_enabled: - deprecated( - reason="Out-of-tree builds are deprecated.", - replacement=None, - gone_in="22.1", - ) - - if options.progress_bar not in {"on", "off"}: - deprecated( - reason="Custom progress bar styles are deprecated", - replacement="to use the default progress bar style.", - gone_in="22.1", - ) - - return RequirementPreparer( - build_dir=temp_build_dir_path, - src_dir=options.src_dir, - download_dir=download_dir, - build_isolation=options.build_isolation, - req_tracker=req_tracker, - session=session, - progress_bar=options.progress_bar, - finder=finder, - require_hashes=options.require_hashes, - use_user_site=use_user_site, - lazy_wheel=lazy_wheel, - verbosity=verbosity, - in_tree_build=in_tree_build, - ) - - @classmethod - def make_resolver( - cls, - preparer: RequirementPreparer, - finder: PackageFinder, - options: Values, - wheel_cache: Optional[WheelCache] = None, - use_user_site: bool = False, - ignore_installed: bool = True, - ignore_requires_python: bool = False, - force_reinstall: bool = False, - upgrade_strategy: str = "to-satisfy-only", - use_pep517: Optional[bool] = None, - py_version_info: Optional[Tuple[int, ...]] = None, - ) -> BaseResolver: - """ - Create a Resolver instance for the given parameters. - """ - make_install_req = partial( - install_req_from_req_string, - isolated=options.isolated_mode, - use_pep517=use_pep517, - ) - suppress_build_failures = cls.determine_build_failure_suppression(options) - resolver_variant = cls.determine_resolver_variant(options) - # The long import name and duplicated invocation is needed to convince - # Mypy into correctly typechecking. Otherwise it would complain the - # "Resolver" class being redefined. - if resolver_variant == "2020-resolver": - import pip._internal.resolution.resolvelib.resolver - - return pip._internal.resolution.resolvelib.resolver.Resolver( - preparer=preparer, - finder=finder, - wheel_cache=wheel_cache, - make_install_req=make_install_req, - use_user_site=use_user_site, - ignore_dependencies=options.ignore_dependencies, - ignore_installed=ignore_installed, - ignore_requires_python=ignore_requires_python, - force_reinstall=force_reinstall, - upgrade_strategy=upgrade_strategy, - py_version_info=py_version_info, - suppress_build_failures=suppress_build_failures, - ) - import pip._internal.resolution.legacy.resolver - - return pip._internal.resolution.legacy.resolver.Resolver( - preparer=preparer, - finder=finder, - wheel_cache=wheel_cache, - make_install_req=make_install_req, - use_user_site=use_user_site, - ignore_dependencies=options.ignore_dependencies, - ignore_installed=ignore_installed, - ignore_requires_python=ignore_requires_python, - force_reinstall=force_reinstall, - upgrade_strategy=upgrade_strategy, - py_version_info=py_version_info, - ) - - def get_requirements( - self, - args: List[str], - options: Values, - finder: PackageFinder, - session: PipSession, - ) -> List[InstallRequirement]: - """ - Parse command-line arguments into the corresponding requirements. - """ - requirements: List[InstallRequirement] = [] - for filename in options.constraints: - for parsed_req in parse_requirements( - filename, - constraint=True, - finder=finder, - options=options, - session=session, - ): - req_to_add = install_req_from_parsed_requirement( - parsed_req, - isolated=options.isolated_mode, - user_supplied=False, - ) - requirements.append(req_to_add) - - for req in args: - req_to_add = install_req_from_line( - req, - None, - isolated=options.isolated_mode, - use_pep517=options.use_pep517, - user_supplied=True, - ) - requirements.append(req_to_add) - - for req in options.editables: - req_to_add = install_req_from_editable( - req, - user_supplied=True, - isolated=options.isolated_mode, - use_pep517=options.use_pep517, - ) - requirements.append(req_to_add) - - # NOTE: options.require_hashes may be set if --require-hashes is True - for filename in options.requirements: - for parsed_req in parse_requirements( - filename, finder=finder, options=options, session=session - ): - req_to_add = install_req_from_parsed_requirement( - parsed_req, - isolated=options.isolated_mode, - use_pep517=options.use_pep517, - user_supplied=True, - ) - requirements.append(req_to_add) - - # If any requirement has hash options, enable hash checking. - if any(req.has_hash_options for req in requirements): - options.require_hashes = True - - if not (args or options.editables or options.requirements): - opts = {"name": self.name} - if options.find_links: - raise CommandError( - "You must give at least one requirement to {name} " - '(maybe you meant "pip {name} {links}"?)'.format( - **dict(opts, links=" ".join(options.find_links)) - ) - ) - else: - raise CommandError( - "You must give at least one requirement to {name} " - '(see "pip help {name}")'.format(**opts) - ) - - return requirements - - @staticmethod - def trace_basic_info(finder: PackageFinder) -> None: - """ - Trace basic information about the provided objects. - """ - # Display where finder is looking for packages - search_scope = finder.search_scope - locations = search_scope.get_formatted_locations() - if locations: - logger.info(locations) - - def _build_package_finder( - self, - options: Values, - session: PipSession, - target_python: Optional[TargetPython] = None, - ignore_requires_python: Optional[bool] = None, - ) -> PackageFinder: - """ - Create a package finder appropriate to this requirement command. - - :param ignore_requires_python: Whether to ignore incompatible - "Requires-Python" values in links. Defaults to False. - """ - link_collector = LinkCollector.create(session, options=options) - selection_prefs = SelectionPreferences( - allow_yanked=True, - format_control=options.format_control, - allow_all_prereleases=options.pre, - prefer_binary=options.prefer_binary, - ignore_requires_python=ignore_requires_python, - ) - - return PackageFinder.create( - link_collector=link_collector, - selection_prefs=selection_prefs, - target_python=target_python, - use_deprecated_html5lib="html5lib" in options.deprecated_features_enabled, - ) diff --git a/venv/Lib/site-packages/pip/_internal/cli/spinners.py b/venv/Lib/site-packages/pip/_internal/cli/spinners.py deleted file mode 100644 index 1e313e1..0000000 --- a/venv/Lib/site-packages/pip/_internal/cli/spinners.py +++ /dev/null @@ -1,157 +0,0 @@ -import contextlib -import itertools -import logging -import sys -import time -from typing import IO, Iterator - -from pip._vendor.progress import HIDE_CURSOR, SHOW_CURSOR - -from pip._internal.utils.compat import WINDOWS -from pip._internal.utils.logging import get_indentation - -logger = logging.getLogger(__name__) - - -class SpinnerInterface: - def spin(self) -> None: - raise NotImplementedError() - - def finish(self, final_status: str) -> None: - raise NotImplementedError() - - -class InteractiveSpinner(SpinnerInterface): - def __init__( - self, - message: str, - file: IO[str] = None, - spin_chars: str = "-\\|/", - # Empirically, 8 updates/second looks nice - min_update_interval_seconds: float = 0.125, - ): - self._message = message - if file is None: - file = sys.stdout - self._file = file - self._rate_limiter = RateLimiter(min_update_interval_seconds) - self._finished = False - - self._spin_cycle = itertools.cycle(spin_chars) - - self._file.write(" " * get_indentation() + self._message + " ... ") - self._width = 0 - - def _write(self, status: str) -> None: - assert not self._finished - # Erase what we wrote before by backspacing to the beginning, writing - # spaces to overwrite the old text, and then backspacing again - backup = "\b" * self._width - self._file.write(backup + " " * self._width + backup) - # Now we have a blank slate to add our status - self._file.write(status) - self._width = len(status) - self._file.flush() - self._rate_limiter.reset() - - def spin(self) -> None: - if self._finished: - return - if not self._rate_limiter.ready(): - return - self._write(next(self._spin_cycle)) - - def finish(self, final_status: str) -> None: - if self._finished: - return - self._write(final_status) - self._file.write("\n") - self._file.flush() - self._finished = True - - -# Used for dumb terminals, non-interactive installs (no tty), etc. -# We still print updates occasionally (once every 60 seconds by default) to -# act as a keep-alive for systems like Travis-CI that take lack-of-output as -# an indication that a task has frozen. -class NonInteractiveSpinner(SpinnerInterface): - def __init__(self, message: str, min_update_interval_seconds: float = 60.0) -> None: - self._message = message - self._finished = False - self._rate_limiter = RateLimiter(min_update_interval_seconds) - self._update("started") - - def _update(self, status: str) -> None: - assert not self._finished - self._rate_limiter.reset() - logger.info("%s: %s", self._message, status) - - def spin(self) -> None: - if self._finished: - return - if not self._rate_limiter.ready(): - return - self._update("still running...") - - def finish(self, final_status: str) -> None: - if self._finished: - return - self._update(f"finished with status '{final_status}'") - self._finished = True - - -class RateLimiter: - def __init__(self, min_update_interval_seconds: float) -> None: - self._min_update_interval_seconds = min_update_interval_seconds - self._last_update: float = 0 - - def ready(self) -> bool: - now = time.time() - delta = now - self._last_update - return delta >= self._min_update_interval_seconds - - def reset(self) -> None: - self._last_update = time.time() - - -@contextlib.contextmanager -def open_spinner(message: str) -> Iterator[SpinnerInterface]: - # Interactive spinner goes directly to sys.stdout rather than being routed - # through the logging system, but it acts like it has level INFO, - # i.e. it's only displayed if we're at level INFO or better. - # Non-interactive spinner goes through the logging system, so it is always - # in sync with logging configuration. - if sys.stdout.isatty() and logger.getEffectiveLevel() <= logging.INFO: - spinner: SpinnerInterface = InteractiveSpinner(message) - else: - spinner = NonInteractiveSpinner(message) - try: - with hidden_cursor(sys.stdout): - yield spinner - except KeyboardInterrupt: - spinner.finish("canceled") - raise - except Exception: - spinner.finish("error") - raise - else: - spinner.finish("done") - - -@contextlib.contextmanager -def hidden_cursor(file: IO[str]) -> Iterator[None]: - # The Windows terminal does not support the hide/show cursor ANSI codes, - # even via colorama. So don't even try. - if WINDOWS: - yield - # We don't want to clutter the output with control characters if we're - # writing to a file, or if the user is running with --quiet. - # See https://github.com/pypa/pip/issues/3418 - elif not file.isatty() or logger.getEffectiveLevel() > logging.INFO: - yield - else: - file.write(HIDE_CURSOR) - try: - yield - finally: - file.write(SHOW_CURSOR) diff --git a/venv/Lib/site-packages/pip/_internal/cli/status_codes.py b/venv/Lib/site-packages/pip/_internal/cli/status_codes.py deleted file mode 100644 index 5e29502..0000000 --- a/venv/Lib/site-packages/pip/_internal/cli/status_codes.py +++ /dev/null @@ -1,6 +0,0 @@ -SUCCESS = 0 -ERROR = 1 -UNKNOWN_ERROR = 2 -VIRTUALENV_NOT_FOUND = 3 -PREVIOUS_BUILD_DIR_ERROR = 4 -NO_MATCHES_FOUND = 23 diff --git a/venv/Lib/site-packages/pip/_internal/commands/__init__.py b/venv/Lib/site-packages/pip/_internal/commands/__init__.py deleted file mode 100644 index c72f24f..0000000 --- a/venv/Lib/site-packages/pip/_internal/commands/__init__.py +++ /dev/null @@ -1,127 +0,0 @@ -""" -Package containing all pip commands -""" - -import importlib -from collections import namedtuple -from typing import Any, Dict, Optional - -from pip._internal.cli.base_command import Command - -CommandInfo = namedtuple("CommandInfo", "module_path, class_name, summary") - -# This dictionary does a bunch of heavy lifting for help output: -# - Enables avoiding additional (costly) imports for presenting `--help`. -# - The ordering matters for help display. -# -# Even though the module path starts with the same "pip._internal.commands" -# prefix, the full path makes testing easier (specifically when modifying -# `commands_dict` in test setup / teardown). -commands_dict: Dict[str, CommandInfo] = { - "install": CommandInfo( - "pip._internal.commands.install", - "InstallCommand", - "Install packages.", - ), - "download": CommandInfo( - "pip._internal.commands.download", - "DownloadCommand", - "Download packages.", - ), - "uninstall": CommandInfo( - "pip._internal.commands.uninstall", - "UninstallCommand", - "Uninstall packages.", - ), - "freeze": CommandInfo( - "pip._internal.commands.freeze", - "FreezeCommand", - "Output installed packages in requirements format.", - ), - "list": CommandInfo( - "pip._internal.commands.list", - "ListCommand", - "List installed packages.", - ), - "show": CommandInfo( - "pip._internal.commands.show", - "ShowCommand", - "Show information about installed packages.", - ), - "check": CommandInfo( - "pip._internal.commands.check", - "CheckCommand", - "Verify installed packages have compatible dependencies.", - ), - "config": CommandInfo( - "pip._internal.commands.configuration", - "ConfigurationCommand", - "Manage local and global configuration.", - ), - "search": CommandInfo( - "pip._internal.commands.search", - "SearchCommand", - "Search PyPI for packages.", - ), - "cache": CommandInfo( - "pip._internal.commands.cache", - "CacheCommand", - "Inspect and manage pip's wheel cache.", - ), - "index": CommandInfo( - "pip._internal.commands.index", - "IndexCommand", - "Inspect information available from package indexes.", - ), - "wheel": CommandInfo( - "pip._internal.commands.wheel", - "WheelCommand", - "Build wheels from your requirements.", - ), - "hash": CommandInfo( - "pip._internal.commands.hash", - "HashCommand", - "Compute hashes of package archives.", - ), - "completion": CommandInfo( - "pip._internal.commands.completion", - "CompletionCommand", - "A helper command used for command completion.", - ), - "debug": CommandInfo( - "pip._internal.commands.debug", - "DebugCommand", - "Show information useful for debugging.", - ), - "help": CommandInfo( - "pip._internal.commands.help", - "HelpCommand", - "Show help for commands.", - ), -} - - -def create_command(name: str, **kwargs: Any) -> Command: - """ - Create an instance of the Command class with the given name. - """ - module_path, class_name, summary = commands_dict[name] - module = importlib.import_module(module_path) - command_class = getattr(module, class_name) - command = command_class(name=name, summary=summary, **kwargs) - - return command - - -def get_similar_commands(name: str) -> Optional[str]: - """Command name auto-correct.""" - from difflib import get_close_matches - - name = name.lower() - - close_commands = get_close_matches(name, commands_dict.keys()) - - if close_commands: - return close_commands[0] - else: - return None diff --git a/venv/Lib/site-packages/pip/_internal/commands/__pycache__/__init__.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/commands/__pycache__/__init__.cpython-39.pyc deleted file mode 100644 index 72ad052..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/commands/__pycache__/__init__.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/commands/__pycache__/cache.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/commands/__pycache__/cache.cpython-39.pyc deleted file mode 100644 index 785ed5e..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/commands/__pycache__/cache.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/commands/__pycache__/check.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/commands/__pycache__/check.cpython-39.pyc deleted file mode 100644 index 02c436b..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/commands/__pycache__/check.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/commands/__pycache__/completion.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/commands/__pycache__/completion.cpython-39.pyc deleted file mode 100644 index ef68477..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/commands/__pycache__/completion.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/commands/__pycache__/configuration.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/commands/__pycache__/configuration.cpython-39.pyc deleted file mode 100644 index 0260dea..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/commands/__pycache__/configuration.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/commands/__pycache__/debug.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/commands/__pycache__/debug.cpython-39.pyc deleted file mode 100644 index a23c58f..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/commands/__pycache__/debug.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/commands/__pycache__/download.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/commands/__pycache__/download.cpython-39.pyc deleted file mode 100644 index 742d93f..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/commands/__pycache__/download.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/commands/__pycache__/freeze.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/commands/__pycache__/freeze.cpython-39.pyc deleted file mode 100644 index 3da8fab..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/commands/__pycache__/freeze.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/commands/__pycache__/hash.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/commands/__pycache__/hash.cpython-39.pyc deleted file mode 100644 index 49117a4..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/commands/__pycache__/hash.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/commands/__pycache__/help.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/commands/__pycache__/help.cpython-39.pyc deleted file mode 100644 index c77cfb4..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/commands/__pycache__/help.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/commands/__pycache__/index.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/commands/__pycache__/index.cpython-39.pyc deleted file mode 100644 index cec2174..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/commands/__pycache__/index.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/commands/__pycache__/install.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/commands/__pycache__/install.cpython-39.pyc deleted file mode 100644 index cd32b67..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/commands/__pycache__/install.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/commands/__pycache__/list.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/commands/__pycache__/list.cpython-39.pyc deleted file mode 100644 index 81b5baf..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/commands/__pycache__/list.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/commands/__pycache__/search.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/commands/__pycache__/search.cpython-39.pyc deleted file mode 100644 index 687c3e7..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/commands/__pycache__/search.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/commands/__pycache__/show.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/commands/__pycache__/show.cpython-39.pyc deleted file mode 100644 index 12c34f7..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/commands/__pycache__/show.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/commands/__pycache__/uninstall.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/commands/__pycache__/uninstall.cpython-39.pyc deleted file mode 100644 index d24a2e3..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/commands/__pycache__/uninstall.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/commands/__pycache__/wheel.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/commands/__pycache__/wheel.cpython-39.pyc deleted file mode 100644 index 1371079..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/commands/__pycache__/wheel.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/commands/cache.py b/venv/Lib/site-packages/pip/_internal/commands/cache.py deleted file mode 100644 index f1a489d..0000000 --- a/venv/Lib/site-packages/pip/_internal/commands/cache.py +++ /dev/null @@ -1,223 +0,0 @@ -import os -import textwrap -from optparse import Values -from typing import Any, List - -import pip._internal.utils.filesystem as filesystem -from pip._internal.cli.base_command import Command -from pip._internal.cli.status_codes import ERROR, SUCCESS -from pip._internal.exceptions import CommandError, PipError -from pip._internal.utils.logging import getLogger - -logger = getLogger(__name__) - - -class CacheCommand(Command): - """ - Inspect and manage pip's wheel cache. - - Subcommands: - - - dir: Show the cache directory. - - info: Show information about the cache. - - list: List filenames of packages stored in the cache. - - remove: Remove one or more package from the cache. - - purge: Remove all items from the cache. - - ```` can be a glob expression or a package name. - """ - - ignore_require_venv = True - usage = """ - %prog dir - %prog info - %prog list [] [--format=[human, abspath]] - %prog remove - %prog purge - """ - - def add_options(self) -> None: - - self.cmd_opts.add_option( - "--format", - action="store", - dest="list_format", - default="human", - choices=("human", "abspath"), - help="Select the output format among: human (default) or abspath", - ) - - self.parser.insert_option_group(0, self.cmd_opts) - - def run(self, options: Values, args: List[str]) -> int: - handlers = { - "dir": self.get_cache_dir, - "info": self.get_cache_info, - "list": self.list_cache_items, - "remove": self.remove_cache_items, - "purge": self.purge_cache, - } - - if not options.cache_dir: - logger.error("pip cache commands can not function since cache is disabled.") - return ERROR - - # Determine action - if not args or args[0] not in handlers: - logger.error( - "Need an action (%s) to perform.", - ", ".join(sorted(handlers)), - ) - return ERROR - - action = args[0] - - # Error handling happens here, not in the action-handlers. - try: - handlers[action](options, args[1:]) - except PipError as e: - logger.error(e.args[0]) - return ERROR - - return SUCCESS - - def get_cache_dir(self, options: Values, args: List[Any]) -> None: - if args: - raise CommandError("Too many arguments") - - logger.info(options.cache_dir) - - def get_cache_info(self, options: Values, args: List[Any]) -> None: - if args: - raise CommandError("Too many arguments") - - num_http_files = len(self._find_http_files(options)) - num_packages = len(self._find_wheels(options, "*")) - - http_cache_location = self._cache_dir(options, "http") - wheels_cache_location = self._cache_dir(options, "wheels") - http_cache_size = filesystem.format_directory_size(http_cache_location) - wheels_cache_size = filesystem.format_directory_size(wheels_cache_location) - - message = ( - textwrap.dedent( - """ - Package index page cache location: {http_cache_location} - Package index page cache size: {http_cache_size} - Number of HTTP files: {num_http_files} - Wheels location: {wheels_cache_location} - Wheels size: {wheels_cache_size} - Number of wheels: {package_count} - """ - ) - .format( - http_cache_location=http_cache_location, - http_cache_size=http_cache_size, - num_http_files=num_http_files, - wheels_cache_location=wheels_cache_location, - package_count=num_packages, - wheels_cache_size=wheels_cache_size, - ) - .strip() - ) - - logger.info(message) - - def list_cache_items(self, options: Values, args: List[Any]) -> None: - if len(args) > 1: - raise CommandError("Too many arguments") - - if args: - pattern = args[0] - else: - pattern = "*" - - files = self._find_wheels(options, pattern) - if options.list_format == "human": - self.format_for_human(files) - else: - self.format_for_abspath(files) - - def format_for_human(self, files: List[str]) -> None: - if not files: - logger.info("Nothing cached.") - return - - results = [] - for filename in files: - wheel = os.path.basename(filename) - size = filesystem.format_file_size(filename) - results.append(f" - {wheel} ({size})") - logger.info("Cache contents:\n") - logger.info("\n".join(sorted(results))) - - def format_for_abspath(self, files: List[str]) -> None: - if not files: - return - - results = [] - for filename in files: - results.append(filename) - - logger.info("\n".join(sorted(results))) - - def remove_cache_items(self, options: Values, args: List[Any]) -> None: - if len(args) > 1: - raise CommandError("Too many arguments") - - if not args: - raise CommandError("Please provide a pattern") - - files = self._find_wheels(options, args[0]) - - no_matching_msg = "No matching packages" - if args[0] == "*": - # Only fetch http files if no specific pattern given - files += self._find_http_files(options) - else: - # Add the pattern to the log message - no_matching_msg += ' for pattern "{}"'.format(args[0]) - - if not files: - logger.warning(no_matching_msg) - - for filename in files: - os.unlink(filename) - logger.verbose("Removed %s", filename) - logger.info("Files removed: %s", len(files)) - - def purge_cache(self, options: Values, args: List[Any]) -> None: - if args: - raise CommandError("Too many arguments") - - return self.remove_cache_items(options, ["*"]) - - def _cache_dir(self, options: Values, subdir: str) -> str: - return os.path.join(options.cache_dir, subdir) - - def _find_http_files(self, options: Values) -> List[str]: - http_dir = self._cache_dir(options, "http") - return filesystem.find_files(http_dir, "*") - - def _find_wheels(self, options: Values, pattern: str) -> List[str]: - wheel_dir = self._cache_dir(options, "wheels") - - # The wheel filename format, as specified in PEP 427, is: - # {distribution}-{version}(-{build})?-{python}-{abi}-{platform}.whl - # - # Additionally, non-alphanumeric values in the distribution are - # normalized to underscores (_), meaning hyphens can never occur - # before `-{version}`. - # - # Given that information: - # - If the pattern we're given contains a hyphen (-), the user is - # providing at least the version. Thus, we can just append `*.whl` - # to match the rest of it. - # - If the pattern we're given doesn't contain a hyphen (-), the - # user is only providing the name. Thus, we append `-*.whl` to - # match the hyphen before the version, followed by anything else. - # - # PEP 427: https://www.python.org/dev/peps/pep-0427/ - pattern = pattern + ("*.whl" if "-" in pattern else "-*.whl") - - return filesystem.find_files(wheel_dir, pattern) diff --git a/venv/Lib/site-packages/pip/_internal/commands/check.py b/venv/Lib/site-packages/pip/_internal/commands/check.py deleted file mode 100644 index 3864220..0000000 --- a/venv/Lib/site-packages/pip/_internal/commands/check.py +++ /dev/null @@ -1,53 +0,0 @@ -import logging -from optparse import Values -from typing import List - -from pip._internal.cli.base_command import Command -from pip._internal.cli.status_codes import ERROR, SUCCESS -from pip._internal.operations.check import ( - check_package_set, - create_package_set_from_installed, -) -from pip._internal.utils.misc import write_output - -logger = logging.getLogger(__name__) - - -class CheckCommand(Command): - """Verify installed packages have compatible dependencies.""" - - usage = """ - %prog [options]""" - - def run(self, options: Values, args: List[str]) -> int: - - package_set, parsing_probs = create_package_set_from_installed() - missing, conflicting = check_package_set(package_set) - - for project_name in missing: - version = package_set[project_name].version - for dependency in missing[project_name]: - write_output( - "%s %s requires %s, which is not installed.", - project_name, - version, - dependency[0], - ) - - for project_name in conflicting: - version = package_set[project_name].version - for dep_name, dep_version, req in conflicting[project_name]: - write_output( - "%s %s has requirement %s, but you have %s %s.", - project_name, - version, - req, - dep_name, - dep_version, - ) - - if missing or conflicting or parsing_probs: - return ERROR - else: - write_output("No broken requirements found.") - return SUCCESS diff --git a/venv/Lib/site-packages/pip/_internal/commands/completion.py b/venv/Lib/site-packages/pip/_internal/commands/completion.py deleted file mode 100644 index c0fb4ca..0000000 --- a/venv/Lib/site-packages/pip/_internal/commands/completion.py +++ /dev/null @@ -1,96 +0,0 @@ -import sys -import textwrap -from optparse import Values -from typing import List - -from pip._internal.cli.base_command import Command -from pip._internal.cli.status_codes import SUCCESS -from pip._internal.utils.misc import get_prog - -BASE_COMPLETION = """ -# pip {shell} completion start{script}# pip {shell} completion end -""" - -COMPLETION_SCRIPTS = { - "bash": """ - _pip_completion() - {{ - COMPREPLY=( $( COMP_WORDS="${{COMP_WORDS[*]}}" \\ - COMP_CWORD=$COMP_CWORD \\ - PIP_AUTO_COMPLETE=1 $1 2>/dev/null ) ) - }} - complete -o default -F _pip_completion {prog} - """, - "zsh": """ - function _pip_completion {{ - local words cword - read -Ac words - read -cn cword - reply=( $( COMP_WORDS="$words[*]" \\ - COMP_CWORD=$(( cword-1 )) \\ - PIP_AUTO_COMPLETE=1 $words[1] 2>/dev/null )) - }} - compctl -K _pip_completion {prog} - """, - "fish": """ - function __fish_complete_pip - set -lx COMP_WORDS (commandline -o) "" - set -lx COMP_CWORD ( \\ - math (contains -i -- (commandline -t) $COMP_WORDS)-1 \\ - ) - set -lx PIP_AUTO_COMPLETE 1 - string split \\ -- (eval $COMP_WORDS[1]) - end - complete -fa "(__fish_complete_pip)" -c {prog} - """, -} - - -class CompletionCommand(Command): - """A helper command to be used for command completion.""" - - ignore_require_venv = True - - def add_options(self) -> None: - self.cmd_opts.add_option( - "--bash", - "-b", - action="store_const", - const="bash", - dest="shell", - help="Emit completion code for bash", - ) - self.cmd_opts.add_option( - "--zsh", - "-z", - action="store_const", - const="zsh", - dest="shell", - help="Emit completion code for zsh", - ) - self.cmd_opts.add_option( - "--fish", - "-f", - action="store_const", - const="fish", - dest="shell", - help="Emit completion code for fish", - ) - - self.parser.insert_option_group(0, self.cmd_opts) - - def run(self, options: Values, args: List[str]) -> int: - """Prints the completion code of the given shell""" - shells = COMPLETION_SCRIPTS.keys() - shell_options = ["--" + shell for shell in sorted(shells)] - if options.shell in shells: - script = textwrap.dedent( - COMPLETION_SCRIPTS.get(options.shell, "").format(prog=get_prog()) - ) - print(BASE_COMPLETION.format(script=script, shell=options.shell)) - return SUCCESS - else: - sys.stderr.write( - "ERROR: You must pass {}\n".format(" or ".join(shell_options)) - ) - return SUCCESS diff --git a/venv/Lib/site-packages/pip/_internal/commands/configuration.py b/venv/Lib/site-packages/pip/_internal/commands/configuration.py deleted file mode 100644 index c6c74ed..0000000 --- a/venv/Lib/site-packages/pip/_internal/commands/configuration.py +++ /dev/null @@ -1,266 +0,0 @@ -import logging -import os -import subprocess -from optparse import Values -from typing import Any, List, Optional - -from pip._internal.cli.base_command import Command -from pip._internal.cli.status_codes import ERROR, SUCCESS -from pip._internal.configuration import ( - Configuration, - Kind, - get_configuration_files, - kinds, -) -from pip._internal.exceptions import PipError -from pip._internal.utils.logging import indent_log -from pip._internal.utils.misc import get_prog, write_output - -logger = logging.getLogger(__name__) - - -class ConfigurationCommand(Command): - """ - Manage local and global configuration. - - Subcommands: - - - list: List the active configuration (or from the file specified) - - edit: Edit the configuration file in an editor - - get: Get the value associated with name - - set: Set the name=value - - unset: Unset the value associated with name - - debug: List the configuration files and values defined under them - - If none of --user, --global and --site are passed, a virtual - environment configuration file is used if one is active and the file - exists. Otherwise, all modifications happen to the user file by - default. - """ - - ignore_require_venv = True - usage = """ - %prog [] list - %prog [] [--editor ] edit - - %prog [] get name - %prog [] set name value - %prog [] unset name - %prog [] debug - """ - - def add_options(self) -> None: - self.cmd_opts.add_option( - "--editor", - dest="editor", - action="store", - default=None, - help=( - "Editor to use to edit the file. Uses VISUAL or EDITOR " - "environment variables if not provided." - ), - ) - - self.cmd_opts.add_option( - "--global", - dest="global_file", - action="store_true", - default=False, - help="Use the system-wide configuration file only", - ) - - self.cmd_opts.add_option( - "--user", - dest="user_file", - action="store_true", - default=False, - help="Use the user configuration file only", - ) - - self.cmd_opts.add_option( - "--site", - dest="site_file", - action="store_true", - default=False, - help="Use the current environment configuration file only", - ) - - self.parser.insert_option_group(0, self.cmd_opts) - - def run(self, options: Values, args: List[str]) -> int: - handlers = { - "list": self.list_values, - "edit": self.open_in_editor, - "get": self.get_name, - "set": self.set_name_value, - "unset": self.unset_name, - "debug": self.list_config_values, - } - - # Determine action - if not args or args[0] not in handlers: - logger.error( - "Need an action (%s) to perform.", - ", ".join(sorted(handlers)), - ) - return ERROR - - action = args[0] - - # Determine which configuration files are to be loaded - # Depends on whether the command is modifying. - try: - load_only = self._determine_file( - options, need_value=(action in ["get", "set", "unset", "edit"]) - ) - except PipError as e: - logger.error(e.args[0]) - return ERROR - - # Load a new configuration - self.configuration = Configuration( - isolated=options.isolated_mode, load_only=load_only - ) - self.configuration.load() - - # Error handling happens here, not in the action-handlers. - try: - handlers[action](options, args[1:]) - except PipError as e: - logger.error(e.args[0]) - return ERROR - - return SUCCESS - - def _determine_file(self, options: Values, need_value: bool) -> Optional[Kind]: - file_options = [ - key - for key, value in ( - (kinds.USER, options.user_file), - (kinds.GLOBAL, options.global_file), - (kinds.SITE, options.site_file), - ) - if value - ] - - if not file_options: - if not need_value: - return None - # Default to user, unless there's a site file. - elif any( - os.path.exists(site_config_file) - for site_config_file in get_configuration_files()[kinds.SITE] - ): - return kinds.SITE - else: - return kinds.USER - elif len(file_options) == 1: - return file_options[0] - - raise PipError( - "Need exactly one file to operate upon " - "(--user, --site, --global) to perform." - ) - - def list_values(self, options: Values, args: List[str]) -> None: - self._get_n_args(args, "list", n=0) - - for key, value in sorted(self.configuration.items()): - write_output("%s=%r", key, value) - - def get_name(self, options: Values, args: List[str]) -> None: - key = self._get_n_args(args, "get [name]", n=1) - value = self.configuration.get_value(key) - - write_output("%s", value) - - def set_name_value(self, options: Values, args: List[str]) -> None: - key, value = self._get_n_args(args, "set [name] [value]", n=2) - self.configuration.set_value(key, value) - - self._save_configuration() - - def unset_name(self, options: Values, args: List[str]) -> None: - key = self._get_n_args(args, "unset [name]", n=1) - self.configuration.unset_value(key) - - self._save_configuration() - - def list_config_values(self, options: Values, args: List[str]) -> None: - """List config key-value pairs across different config files""" - self._get_n_args(args, "debug", n=0) - - self.print_env_var_values() - # Iterate over config files and print if they exist, and the - # key-value pairs present in them if they do - for variant, files in sorted(self.configuration.iter_config_files()): - write_output("%s:", variant) - for fname in files: - with indent_log(): - file_exists = os.path.exists(fname) - write_output("%s, exists: %r", fname, file_exists) - if file_exists: - self.print_config_file_values(variant) - - def print_config_file_values(self, variant: Kind) -> None: - """Get key-value pairs from the file of a variant""" - for name, value in self.configuration.get_values_in_config(variant).items(): - with indent_log(): - write_output("%s: %s", name, value) - - def print_env_var_values(self) -> None: - """Get key-values pairs present as environment variables""" - write_output("%s:", "env_var") - with indent_log(): - for key, value in sorted(self.configuration.get_environ_vars()): - env_var = f"PIP_{key.upper()}" - write_output("%s=%r", env_var, value) - - def open_in_editor(self, options: Values, args: List[str]) -> None: - editor = self._determine_editor(options) - - fname = self.configuration.get_file_to_edit() - if fname is None: - raise PipError("Could not determine appropriate file.") - - try: - subprocess.check_call([editor, fname]) - except subprocess.CalledProcessError as e: - raise PipError( - "Editor Subprocess exited with exit code {}".format(e.returncode) - ) - - def _get_n_args(self, args: List[str], example: str, n: int) -> Any: - """Helper to make sure the command got the right number of arguments""" - if len(args) != n: - msg = ( - "Got unexpected number of arguments, expected {}. " - '(example: "{} config {}")' - ).format(n, get_prog(), example) - raise PipError(msg) - - if n == 1: - return args[0] - else: - return args - - def _save_configuration(self) -> None: - # We successfully ran a modifying command. Need to save the - # configuration. - try: - self.configuration.save() - except Exception: - logger.exception( - "Unable to save configuration. Please report this as a bug." - ) - raise PipError("Internal Error.") - - def _determine_editor(self, options: Values) -> str: - if options.editor is not None: - return options.editor - elif "VISUAL" in os.environ: - return os.environ["VISUAL"] - elif "EDITOR" in os.environ: - return os.environ["EDITOR"] - else: - raise PipError("Could not determine editor to use.") diff --git a/venv/Lib/site-packages/pip/_internal/commands/debug.py b/venv/Lib/site-packages/pip/_internal/commands/debug.py deleted file mode 100644 index d3f1f28..0000000 --- a/venv/Lib/site-packages/pip/_internal/commands/debug.py +++ /dev/null @@ -1,202 +0,0 @@ -import locale -import logging -import os -import sys -from optparse import Values -from types import ModuleType -from typing import Any, Dict, List, Optional - -import pip._vendor -from pip._vendor.certifi import where -from pip._vendor.packaging.version import parse as parse_version - -from pip import __file__ as pip_location -from pip._internal.cli import cmdoptions -from pip._internal.cli.base_command import Command -from pip._internal.cli.cmdoptions import make_target_python -from pip._internal.cli.status_codes import SUCCESS -from pip._internal.configuration import Configuration -from pip._internal.metadata import get_environment -from pip._internal.utils.logging import indent_log -from pip._internal.utils.misc import get_pip_version - -logger = logging.getLogger(__name__) - - -def show_value(name: str, value: Any) -> None: - logger.info("%s: %s", name, value) - - -def show_sys_implementation() -> None: - logger.info("sys.implementation:") - implementation_name = sys.implementation.name - with indent_log(): - show_value("name", implementation_name) - - -def create_vendor_txt_map() -> Dict[str, str]: - vendor_txt_path = os.path.join( - os.path.dirname(pip_location), "_vendor", "vendor.txt" - ) - - with open(vendor_txt_path) as f: - # Purge non version specifying lines. - # Also, remove any space prefix or suffixes (including comments). - lines = [ - line.strip().split(" ", 1)[0] for line in f.readlines() if "==" in line - ] - - # Transform into "module" -> version dict. - return dict(line.split("==", 1) for line in lines) # type: ignore - - -def get_module_from_module_name(module_name: str) -> ModuleType: - # Module name can be uppercase in vendor.txt for some reason... - module_name = module_name.lower() - # PATCH: setuptools is actually only pkg_resources. - if module_name == "setuptools": - module_name = "pkg_resources" - - __import__(f"pip._vendor.{module_name}", globals(), locals(), level=0) - return getattr(pip._vendor, module_name) - - -def get_vendor_version_from_module(module_name: str) -> Optional[str]: - module = get_module_from_module_name(module_name) - version = getattr(module, "__version__", None) - - if not version: - # Try to find version in debundled module info. - env = get_environment([os.path.dirname(module.__file__)]) - dist = env.get_distribution(module_name) - if dist: - version = str(dist.version) - - return version - - -def show_actual_vendor_versions(vendor_txt_versions: Dict[str, str]) -> None: - """Log the actual version and print extra info if there is - a conflict or if the actual version could not be imported. - """ - for module_name, expected_version in vendor_txt_versions.items(): - extra_message = "" - actual_version = get_vendor_version_from_module(module_name) - if not actual_version: - extra_message = ( - " (Unable to locate actual module version, using" - " vendor.txt specified version)" - ) - actual_version = expected_version - elif parse_version(actual_version) != parse_version(expected_version): - extra_message = ( - " (CONFLICT: vendor.txt suggests version should" - " be {})".format(expected_version) - ) - logger.info("%s==%s%s", module_name, actual_version, extra_message) - - -def show_vendor_versions() -> None: - logger.info("vendored library versions:") - - vendor_txt_versions = create_vendor_txt_map() - with indent_log(): - show_actual_vendor_versions(vendor_txt_versions) - - -def show_tags(options: Values) -> None: - tag_limit = 10 - - target_python = make_target_python(options) - tags = target_python.get_tags() - - # Display the target options that were explicitly provided. - formatted_target = target_python.format_given() - suffix = "" - if formatted_target: - suffix = f" (target: {formatted_target})" - - msg = "Compatible tags: {}{}".format(len(tags), suffix) - logger.info(msg) - - if options.verbose < 1 and len(tags) > tag_limit: - tags_limited = True - tags = tags[:tag_limit] - else: - tags_limited = False - - with indent_log(): - for tag in tags: - logger.info(str(tag)) - - if tags_limited: - msg = ( - "...\n[First {tag_limit} tags shown. Pass --verbose to show all.]" - ).format(tag_limit=tag_limit) - logger.info(msg) - - -def ca_bundle_info(config: Configuration) -> str: - levels = set() - for key, _ in config.items(): - levels.add(key.split(".")[0]) - - if not levels: - return "Not specified" - - levels_that_override_global = ["install", "wheel", "download"] - global_overriding_level = [ - level for level in levels if level in levels_that_override_global - ] - if not global_overriding_level: - return "global" - - if "global" in levels: - levels.remove("global") - return ", ".join(levels) - - -class DebugCommand(Command): - """ - Display debug information. - """ - - usage = """ - %prog """ - ignore_require_venv = True - - def add_options(self) -> None: - cmdoptions.add_target_python_options(self.cmd_opts) - self.parser.insert_option_group(0, self.cmd_opts) - self.parser.config.load() - - def run(self, options: Values, args: List[str]) -> int: - logger.warning( - "This command is only meant for debugging. " - "Do not use this with automation for parsing and getting these " - "details, since the output and options of this command may " - "change without notice." - ) - show_value("pip version", get_pip_version()) - show_value("sys.version", sys.version) - show_value("sys.executable", sys.executable) - show_value("sys.getdefaultencoding", sys.getdefaultencoding()) - show_value("sys.getfilesystemencoding", sys.getfilesystemencoding()) - show_value( - "locale.getpreferredencoding", - locale.getpreferredencoding(), - ) - show_value("sys.platform", sys.platform) - show_sys_implementation() - - show_value("'cert' config value", ca_bundle_info(self.parser.config)) - show_value("REQUESTS_CA_BUNDLE", os.environ.get("REQUESTS_CA_BUNDLE")) - show_value("CURL_CA_BUNDLE", os.environ.get("CURL_CA_BUNDLE")) - show_value("pip._vendor.certifi.where()", where()) - show_value("pip._vendor.DEBUNDLED", pip._vendor.DEBUNDLED) - - show_vendor_versions() - - show_tags(options) - - return SUCCESS diff --git a/venv/Lib/site-packages/pip/_internal/commands/download.py b/venv/Lib/site-packages/pip/_internal/commands/download.py deleted file mode 100644 index 233b7e9..0000000 --- a/venv/Lib/site-packages/pip/_internal/commands/download.py +++ /dev/null @@ -1,140 +0,0 @@ -import logging -import os -from optparse import Values -from typing import List - -from pip._internal.cli import cmdoptions -from pip._internal.cli.cmdoptions import make_target_python -from pip._internal.cli.req_command import RequirementCommand, with_cleanup -from pip._internal.cli.status_codes import SUCCESS -from pip._internal.req.req_tracker import get_requirement_tracker -from pip._internal.utils.misc import ensure_dir, normalize_path, write_output -from pip._internal.utils.temp_dir import TempDirectory - -logger = logging.getLogger(__name__) - - -class DownloadCommand(RequirementCommand): - """ - Download packages from: - - - PyPI (and other indexes) using requirement specifiers. - - VCS project urls. - - Local project directories. - - Local or remote source archives. - - pip also supports downloading from "requirements files", which provide - an easy way to specify a whole environment to be downloaded. - """ - - usage = """ - %prog [options] [package-index-options] ... - %prog [options] -r [package-index-options] ... - %prog [options] ... - %prog [options] ... - %prog [options] ...""" - - def add_options(self) -> None: - self.cmd_opts.add_option(cmdoptions.constraints()) - self.cmd_opts.add_option(cmdoptions.requirements()) - self.cmd_opts.add_option(cmdoptions.no_deps()) - self.cmd_opts.add_option(cmdoptions.global_options()) - self.cmd_opts.add_option(cmdoptions.no_binary()) - self.cmd_opts.add_option(cmdoptions.only_binary()) - self.cmd_opts.add_option(cmdoptions.prefer_binary()) - self.cmd_opts.add_option(cmdoptions.src()) - self.cmd_opts.add_option(cmdoptions.pre()) - self.cmd_opts.add_option(cmdoptions.require_hashes()) - self.cmd_opts.add_option(cmdoptions.progress_bar()) - self.cmd_opts.add_option(cmdoptions.no_build_isolation()) - self.cmd_opts.add_option(cmdoptions.use_pep517()) - self.cmd_opts.add_option(cmdoptions.no_use_pep517()) - self.cmd_opts.add_option(cmdoptions.ignore_requires_python()) - - self.cmd_opts.add_option( - "-d", - "--dest", - "--destination-dir", - "--destination-directory", - dest="download_dir", - metavar="dir", - default=os.curdir, - help="Download packages into .", - ) - - cmdoptions.add_target_python_options(self.cmd_opts) - - index_opts = cmdoptions.make_option_group( - cmdoptions.index_group, - self.parser, - ) - - self.parser.insert_option_group(0, index_opts) - self.parser.insert_option_group(0, self.cmd_opts) - - @with_cleanup - def run(self, options: Values, args: List[str]) -> int: - - options.ignore_installed = True - # editable doesn't really make sense for `pip download`, but the bowels - # of the RequirementSet code require that property. - options.editables = [] - - cmdoptions.check_dist_restriction(options) - - options.download_dir = normalize_path(options.download_dir) - ensure_dir(options.download_dir) - - session = self.get_default_session(options) - - target_python = make_target_python(options) - finder = self._build_package_finder( - options=options, - session=session, - target_python=target_python, - ignore_requires_python=options.ignore_requires_python, - ) - - req_tracker = self.enter_context(get_requirement_tracker()) - - directory = TempDirectory( - delete=not options.no_clean, - kind="download", - globally_managed=True, - ) - - reqs = self.get_requirements(args, options, finder, session) - - preparer = self.make_requirement_preparer( - temp_build_dir=directory, - options=options, - req_tracker=req_tracker, - session=session, - finder=finder, - download_dir=options.download_dir, - use_user_site=False, - verbosity=self.verbosity, - ) - - resolver = self.make_resolver( - preparer=preparer, - finder=finder, - options=options, - ignore_requires_python=options.ignore_requires_python, - py_version_info=options.python_version, - ) - - self.trace_basic_info(finder) - - requirement_set = resolver.resolve(reqs, check_supported_wheels=True) - - downloaded: List[str] = [] - for req in requirement_set.requirements.values(): - if req.satisfied_by is None: - assert req.name is not None - preparer.save_linked_requirement(req) - downloaded.append(req.name) - if downloaded: - write_output("Successfully downloaded %s", " ".join(downloaded)) - - return SUCCESS diff --git a/venv/Lib/site-packages/pip/_internal/commands/freeze.py b/venv/Lib/site-packages/pip/_internal/commands/freeze.py deleted file mode 100644 index 5fa6d39..0000000 --- a/venv/Lib/site-packages/pip/_internal/commands/freeze.py +++ /dev/null @@ -1,97 +0,0 @@ -import sys -from optparse import Values -from typing import List - -from pip._internal.cli import cmdoptions -from pip._internal.cli.base_command import Command -from pip._internal.cli.status_codes import SUCCESS -from pip._internal.operations.freeze import freeze -from pip._internal.utils.compat import stdlib_pkgs - -DEV_PKGS = {"pip", "setuptools", "distribute", "wheel"} - - -class FreezeCommand(Command): - """ - Output installed packages in requirements format. - - packages are listed in a case-insensitive sorted order. - """ - - usage = """ - %prog [options]""" - log_streams = ("ext://sys.stderr", "ext://sys.stderr") - - def add_options(self) -> None: - self.cmd_opts.add_option( - "-r", - "--requirement", - dest="requirements", - action="append", - default=[], - metavar="file", - help=( - "Use the order in the given requirements file and its " - "comments when generating output. This option can be " - "used multiple times." - ), - ) - self.cmd_opts.add_option( - "-l", - "--local", - dest="local", - action="store_true", - default=False, - help=( - "If in a virtualenv that has global access, do not output " - "globally-installed packages." - ), - ) - self.cmd_opts.add_option( - "--user", - dest="user", - action="store_true", - default=False, - help="Only output packages installed in user-site.", - ) - self.cmd_opts.add_option(cmdoptions.list_path()) - self.cmd_opts.add_option( - "--all", - dest="freeze_all", - action="store_true", - help=( - "Do not skip these packages in the output:" - " {}".format(", ".join(DEV_PKGS)) - ), - ) - self.cmd_opts.add_option( - "--exclude-editable", - dest="exclude_editable", - action="store_true", - help="Exclude editable package from output.", - ) - self.cmd_opts.add_option(cmdoptions.list_exclude()) - - self.parser.insert_option_group(0, self.cmd_opts) - - def run(self, options: Values, args: List[str]) -> int: - skip = set(stdlib_pkgs) - if not options.freeze_all: - skip.update(DEV_PKGS) - - if options.excludes: - skip.update(options.excludes) - - cmdoptions.check_list_path_option(options) - - for line in freeze( - requirement=options.requirements, - local_only=options.local, - user_only=options.user, - paths=options.path, - isolated=options.isolated_mode, - skip=skip, - exclude_editable=options.exclude_editable, - ): - sys.stdout.write(line + "\n") - return SUCCESS diff --git a/venv/Lib/site-packages/pip/_internal/commands/hash.py b/venv/Lib/site-packages/pip/_internal/commands/hash.py deleted file mode 100644 index 042dac8..0000000 --- a/venv/Lib/site-packages/pip/_internal/commands/hash.py +++ /dev/null @@ -1,59 +0,0 @@ -import hashlib -import logging -import sys -from optparse import Values -from typing import List - -from pip._internal.cli.base_command import Command -from pip._internal.cli.status_codes import ERROR, SUCCESS -from pip._internal.utils.hashes import FAVORITE_HASH, STRONG_HASHES -from pip._internal.utils.misc import read_chunks, write_output - -logger = logging.getLogger(__name__) - - -class HashCommand(Command): - """ - Compute a hash of a local package archive. - - These can be used with --hash in a requirements file to do repeatable - installs. - """ - - usage = "%prog [options] ..." - ignore_require_venv = True - - def add_options(self) -> None: - self.cmd_opts.add_option( - "-a", - "--algorithm", - dest="algorithm", - choices=STRONG_HASHES, - action="store", - default=FAVORITE_HASH, - help="The hash algorithm to use: one of {}".format( - ", ".join(STRONG_HASHES) - ), - ) - self.parser.insert_option_group(0, self.cmd_opts) - - def run(self, options: Values, args: List[str]) -> int: - if not args: - self.parser.print_usage(sys.stderr) - return ERROR - - algorithm = options.algorithm - for path in args: - write_output( - "%s:\n--hash=%s:%s", path, algorithm, _hash_of_file(path, algorithm) - ) - return SUCCESS - - -def _hash_of_file(path: str, algorithm: str) -> str: - """Return the hash digest of a file.""" - with open(path, "rb") as archive: - hash = hashlib.new(algorithm) - for chunk in read_chunks(archive): - hash.update(chunk) - return hash.hexdigest() diff --git a/venv/Lib/site-packages/pip/_internal/commands/help.py b/venv/Lib/site-packages/pip/_internal/commands/help.py deleted file mode 100644 index 6206631..0000000 --- a/venv/Lib/site-packages/pip/_internal/commands/help.py +++ /dev/null @@ -1,41 +0,0 @@ -from optparse import Values -from typing import List - -from pip._internal.cli.base_command import Command -from pip._internal.cli.status_codes import SUCCESS -from pip._internal.exceptions import CommandError - - -class HelpCommand(Command): - """Show help for commands""" - - usage = """ - %prog """ - ignore_require_venv = True - - def run(self, options: Values, args: List[str]) -> int: - from pip._internal.commands import ( - commands_dict, - create_command, - get_similar_commands, - ) - - try: - # 'pip help' with no args is handled by pip.__init__.parseopt() - cmd_name = args[0] # the command we need help for - except IndexError: - return SUCCESS - - if cmd_name not in commands_dict: - guess = get_similar_commands(cmd_name) - - msg = [f'unknown command "{cmd_name}"'] - if guess: - msg.append(f'maybe you meant "{guess}"') - - raise CommandError(" - ".join(msg)) - - command = create_command(cmd_name) - command.parser.print_help() - - return SUCCESS diff --git a/venv/Lib/site-packages/pip/_internal/commands/index.py b/venv/Lib/site-packages/pip/_internal/commands/index.py deleted file mode 100644 index 9d8aae3..0000000 --- a/venv/Lib/site-packages/pip/_internal/commands/index.py +++ /dev/null @@ -1,139 +0,0 @@ -import logging -from optparse import Values -from typing import Any, Iterable, List, Optional, Union - -from pip._vendor.packaging.version import LegacyVersion, Version - -from pip._internal.cli import cmdoptions -from pip._internal.cli.req_command import IndexGroupCommand -from pip._internal.cli.status_codes import ERROR, SUCCESS -from pip._internal.commands.search import print_dist_installation_info -from pip._internal.exceptions import CommandError, DistributionNotFound, PipError -from pip._internal.index.collector import LinkCollector -from pip._internal.index.package_finder import PackageFinder -from pip._internal.models.selection_prefs import SelectionPreferences -from pip._internal.models.target_python import TargetPython -from pip._internal.network.session import PipSession -from pip._internal.utils.misc import write_output - -logger = logging.getLogger(__name__) - - -class IndexCommand(IndexGroupCommand): - """ - Inspect information available from package indexes. - """ - - usage = """ - %prog versions - """ - - def add_options(self) -> None: - cmdoptions.add_target_python_options(self.cmd_opts) - - self.cmd_opts.add_option(cmdoptions.ignore_requires_python()) - self.cmd_opts.add_option(cmdoptions.pre()) - self.cmd_opts.add_option(cmdoptions.no_binary()) - self.cmd_opts.add_option(cmdoptions.only_binary()) - - index_opts = cmdoptions.make_option_group( - cmdoptions.index_group, - self.parser, - ) - - self.parser.insert_option_group(0, index_opts) - self.parser.insert_option_group(0, self.cmd_opts) - - def run(self, options: Values, args: List[str]) -> int: - handlers = { - "versions": self.get_available_package_versions, - } - - logger.warning( - "pip index is currently an experimental command. " - "It may be removed/changed in a future release " - "without prior warning." - ) - - # Determine action - if not args or args[0] not in handlers: - logger.error( - "Need an action (%s) to perform.", - ", ".join(sorted(handlers)), - ) - return ERROR - - action = args[0] - - # Error handling happens here, not in the action-handlers. - try: - handlers[action](options, args[1:]) - except PipError as e: - logger.error(e.args[0]) - return ERROR - - return SUCCESS - - def _build_package_finder( - self, - options: Values, - session: PipSession, - target_python: Optional[TargetPython] = None, - ignore_requires_python: Optional[bool] = None, - ) -> PackageFinder: - """ - Create a package finder appropriate to the index command. - """ - link_collector = LinkCollector.create(session, options=options) - - # Pass allow_yanked=False to ignore yanked versions. - selection_prefs = SelectionPreferences( - allow_yanked=False, - allow_all_prereleases=options.pre, - ignore_requires_python=ignore_requires_python, - ) - - return PackageFinder.create( - link_collector=link_collector, - selection_prefs=selection_prefs, - target_python=target_python, - use_deprecated_html5lib="html5lib" in options.deprecated_features_enabled, - ) - - def get_available_package_versions(self, options: Values, args: List[Any]) -> None: - if len(args) != 1: - raise CommandError("You need to specify exactly one argument") - - target_python = cmdoptions.make_target_python(options) - query = args[0] - - with self._build_session(options) as session: - finder = self._build_package_finder( - options=options, - session=session, - target_python=target_python, - ignore_requires_python=options.ignore_requires_python, - ) - - versions: Iterable[Union[LegacyVersion, Version]] = ( - candidate.version for candidate in finder.find_all_candidates(query) - ) - - if not options.pre: - # Remove prereleases - versions = ( - version for version in versions if not version.is_prerelease - ) - versions = set(versions) - - if not versions: - raise DistributionNotFound( - "No matching distribution found for {}".format(query) - ) - - formatted_versions = [str(ver) for ver in sorted(versions, reverse=True)] - latest = formatted_versions[0] - - write_output("{} ({})".format(query, latest)) - write_output("Available versions: {}".format(", ".join(formatted_versions))) - print_dist_installation_info(query, latest) diff --git a/venv/Lib/site-packages/pip/_internal/commands/install.py b/venv/Lib/site-packages/pip/_internal/commands/install.py deleted file mode 100644 index 34e4c2f..0000000 --- a/venv/Lib/site-packages/pip/_internal/commands/install.py +++ /dev/null @@ -1,771 +0,0 @@ -import errno -import operator -import os -import shutil -import site -from optparse import SUPPRESS_HELP, Values -from typing import Iterable, List, Optional - -from pip._vendor.packaging.utils import canonicalize_name - -from pip._internal.cache import WheelCache -from pip._internal.cli import cmdoptions -from pip._internal.cli.cmdoptions import make_target_python -from pip._internal.cli.req_command import ( - RequirementCommand, - warn_if_run_as_root, - with_cleanup, -) -from pip._internal.cli.status_codes import ERROR, SUCCESS -from pip._internal.exceptions import CommandError, InstallationError -from pip._internal.locations import get_scheme -from pip._internal.metadata import get_environment -from pip._internal.models.format_control import FormatControl -from pip._internal.operations.check import ConflictDetails, check_install_conflicts -from pip._internal.req import install_given_reqs -from pip._internal.req.req_install import InstallRequirement -from pip._internal.req.req_tracker import get_requirement_tracker -from pip._internal.utils.compat import WINDOWS -from pip._internal.utils.distutils_args import parse_distutils_args -from pip._internal.utils.filesystem import test_writable_dir -from pip._internal.utils.logging import getLogger -from pip._internal.utils.misc import ( - ensure_dir, - get_pip_version, - protect_pip_from_modification_on_windows, - write_output, -) -from pip._internal.utils.temp_dir import TempDirectory -from pip._internal.utils.virtualenv import ( - running_under_virtualenv, - virtualenv_no_global, -) -from pip._internal.wheel_builder import ( - BinaryAllowedPredicate, - build, - should_build_for_install_command, -) - -logger = getLogger(__name__) - - -def get_check_binary_allowed(format_control: FormatControl) -> BinaryAllowedPredicate: - def check_binary_allowed(req: InstallRequirement) -> bool: - canonical_name = canonicalize_name(req.name or "") - allowed_formats = format_control.get_allowed_formats(canonical_name) - return "binary" in allowed_formats - - return check_binary_allowed - - -class InstallCommand(RequirementCommand): - """ - Install packages from: - - - PyPI (and other indexes) using requirement specifiers. - - VCS project urls. - - Local project directories. - - Local or remote source archives. - - pip also supports installing from "requirements files", which provide - an easy way to specify a whole environment to be installed. - """ - - usage = """ - %prog [options] [package-index-options] ... - %prog [options] -r [package-index-options] ... - %prog [options] [-e] ... - %prog [options] [-e] ... - %prog [options] ...""" - - def add_options(self) -> None: - self.cmd_opts.add_option(cmdoptions.requirements()) - self.cmd_opts.add_option(cmdoptions.constraints()) - self.cmd_opts.add_option(cmdoptions.no_deps()) - self.cmd_opts.add_option(cmdoptions.pre()) - - self.cmd_opts.add_option(cmdoptions.editable()) - self.cmd_opts.add_option( - "-t", - "--target", - dest="target_dir", - metavar="dir", - default=None, - help=( - "Install packages into . " - "By default this will not replace existing files/folders in " - ". Use --upgrade to replace existing packages in " - "with new versions." - ), - ) - cmdoptions.add_target_python_options(self.cmd_opts) - - self.cmd_opts.add_option( - "--user", - dest="use_user_site", - action="store_true", - help=( - "Install to the Python user install directory for your " - "platform. Typically ~/.local/, or %APPDATA%\\Python on " - "Windows. (See the Python documentation for site.USER_BASE " - "for full details.)" - ), - ) - self.cmd_opts.add_option( - "--no-user", - dest="use_user_site", - action="store_false", - help=SUPPRESS_HELP, - ) - self.cmd_opts.add_option( - "--root", - dest="root_path", - metavar="dir", - default=None, - help="Install everything relative to this alternate root directory.", - ) - self.cmd_opts.add_option( - "--prefix", - dest="prefix_path", - metavar="dir", - default=None, - help=( - "Installation prefix where lib, bin and other top-level " - "folders are placed" - ), - ) - - self.cmd_opts.add_option(cmdoptions.src()) - - self.cmd_opts.add_option( - "-U", - "--upgrade", - dest="upgrade", - action="store_true", - help=( - "Upgrade all specified packages to the newest available " - "version. The handling of dependencies depends on the " - "upgrade-strategy used." - ), - ) - - self.cmd_opts.add_option( - "--upgrade-strategy", - dest="upgrade_strategy", - default="only-if-needed", - choices=["only-if-needed", "eager"], - help=( - "Determines how dependency upgrading should be handled " - "[default: %default]. " - '"eager" - dependencies are upgraded regardless of ' - "whether the currently installed version satisfies the " - "requirements of the upgraded package(s). " - '"only-if-needed" - are upgraded only when they do not ' - "satisfy the requirements of the upgraded package(s)." - ), - ) - - self.cmd_opts.add_option( - "--force-reinstall", - dest="force_reinstall", - action="store_true", - help="Reinstall all packages even if they are already up-to-date.", - ) - - self.cmd_opts.add_option( - "-I", - "--ignore-installed", - dest="ignore_installed", - action="store_true", - help=( - "Ignore the installed packages, overwriting them. " - "This can break your system if the existing package " - "is of a different version or was installed " - "with a different package manager!" - ), - ) - - self.cmd_opts.add_option(cmdoptions.ignore_requires_python()) - self.cmd_opts.add_option(cmdoptions.no_build_isolation()) - self.cmd_opts.add_option(cmdoptions.use_pep517()) - self.cmd_opts.add_option(cmdoptions.no_use_pep517()) - - self.cmd_opts.add_option(cmdoptions.install_options()) - self.cmd_opts.add_option(cmdoptions.global_options()) - - self.cmd_opts.add_option( - "--compile", - action="store_true", - dest="compile", - default=True, - help="Compile Python source files to bytecode", - ) - - self.cmd_opts.add_option( - "--no-compile", - action="store_false", - dest="compile", - help="Do not compile Python source files to bytecode", - ) - - self.cmd_opts.add_option( - "--no-warn-script-location", - action="store_false", - dest="warn_script_location", - default=True, - help="Do not warn when installing scripts outside PATH", - ) - self.cmd_opts.add_option( - "--no-warn-conflicts", - action="store_false", - dest="warn_about_conflicts", - default=True, - help="Do not warn about broken dependencies", - ) - - self.cmd_opts.add_option(cmdoptions.no_binary()) - self.cmd_opts.add_option(cmdoptions.only_binary()) - self.cmd_opts.add_option(cmdoptions.prefer_binary()) - self.cmd_opts.add_option(cmdoptions.require_hashes()) - self.cmd_opts.add_option(cmdoptions.progress_bar()) - - index_opts = cmdoptions.make_option_group( - cmdoptions.index_group, - self.parser, - ) - - self.parser.insert_option_group(0, index_opts) - self.parser.insert_option_group(0, self.cmd_opts) - - @with_cleanup - def run(self, options: Values, args: List[str]) -> int: - if options.use_user_site and options.target_dir is not None: - raise CommandError("Can not combine '--user' and '--target'") - - cmdoptions.check_install_build_global(options) - upgrade_strategy = "to-satisfy-only" - if options.upgrade: - upgrade_strategy = options.upgrade_strategy - - cmdoptions.check_dist_restriction(options, check_target=True) - - install_options = options.install_options or [] - - logger.verbose("Using %s", get_pip_version()) - options.use_user_site = decide_user_install( - options.use_user_site, - prefix_path=options.prefix_path, - target_dir=options.target_dir, - root_path=options.root_path, - isolated_mode=options.isolated_mode, - ) - - target_temp_dir: Optional[TempDirectory] = None - target_temp_dir_path: Optional[str] = None - if options.target_dir: - options.ignore_installed = True - options.target_dir = os.path.abspath(options.target_dir) - if ( - # fmt: off - os.path.exists(options.target_dir) and - not os.path.isdir(options.target_dir) - # fmt: on - ): - raise CommandError( - "Target path exists but is not a directory, will not continue." - ) - - # Create a target directory for using with the target option - target_temp_dir = TempDirectory(kind="target") - target_temp_dir_path = target_temp_dir.path - self.enter_context(target_temp_dir) - - global_options = options.global_options or [] - - session = self.get_default_session(options) - - target_python = make_target_python(options) - finder = self._build_package_finder( - options=options, - session=session, - target_python=target_python, - ignore_requires_python=options.ignore_requires_python, - ) - wheel_cache = WheelCache(options.cache_dir, options.format_control) - - req_tracker = self.enter_context(get_requirement_tracker()) - - directory = TempDirectory( - delete=not options.no_clean, - kind="install", - globally_managed=True, - ) - - try: - reqs = self.get_requirements(args, options, finder, session) - - # Only when installing is it permitted to use PEP 660. - # In other circumstances (pip wheel, pip download) we generate - # regular (i.e. non editable) metadata and wheels. - for req in reqs: - req.permit_editable_wheels = True - - reject_location_related_install_options(reqs, options.install_options) - - preparer = self.make_requirement_preparer( - temp_build_dir=directory, - options=options, - req_tracker=req_tracker, - session=session, - finder=finder, - use_user_site=options.use_user_site, - verbosity=self.verbosity, - ) - resolver = self.make_resolver( - preparer=preparer, - finder=finder, - options=options, - wheel_cache=wheel_cache, - use_user_site=options.use_user_site, - ignore_installed=options.ignore_installed, - ignore_requires_python=options.ignore_requires_python, - force_reinstall=options.force_reinstall, - upgrade_strategy=upgrade_strategy, - use_pep517=options.use_pep517, - ) - - self.trace_basic_info(finder) - - requirement_set = resolver.resolve( - reqs, check_supported_wheels=not options.target_dir - ) - - try: - pip_req = requirement_set.get_requirement("pip") - except KeyError: - modifying_pip = False - else: - # If we're not replacing an already installed pip, - # we're not modifying it. - modifying_pip = pip_req.satisfied_by is None - protect_pip_from_modification_on_windows(modifying_pip=modifying_pip) - - check_binary_allowed = get_check_binary_allowed(finder.format_control) - - reqs_to_build = [ - r - for r in requirement_set.requirements.values() - if should_build_for_install_command(r, check_binary_allowed) - ] - - _, build_failures = build( - reqs_to_build, - wheel_cache=wheel_cache, - verify=True, - build_options=[], - global_options=[], - ) - - # If we're using PEP 517, we cannot do a legacy setup.py install - # so we fail here. - pep517_build_failure_names: List[str] = [ - r.name for r in build_failures if r.use_pep517 # type: ignore - ] - if pep517_build_failure_names: - raise InstallationError( - "Could not build wheels for {}, which is required to " - "install pyproject.toml-based projects".format( - ", ".join(pep517_build_failure_names) - ) - ) - - # For now, we just warn about failures building legacy - # requirements, as we'll fall through to a setup.py install for - # those. - for r in build_failures: - if not r.use_pep517: - r.legacy_install_reason = 8368 - - to_install = resolver.get_installation_order(requirement_set) - - # Check for conflicts in the package set we're installing. - conflicts: Optional[ConflictDetails] = None - should_warn_about_conflicts = ( - not options.ignore_dependencies and options.warn_about_conflicts - ) - if should_warn_about_conflicts: - conflicts = self._determine_conflicts(to_install) - - # Don't warn about script install locations if - # --target or --prefix has been specified - warn_script_location = options.warn_script_location - if options.target_dir or options.prefix_path: - warn_script_location = False - - installed = install_given_reqs( - to_install, - install_options, - global_options, - root=options.root_path, - home=target_temp_dir_path, - prefix=options.prefix_path, - warn_script_location=warn_script_location, - use_user_site=options.use_user_site, - pycompile=options.compile, - ) - - lib_locations = get_lib_location_guesses( - user=options.use_user_site, - home=target_temp_dir_path, - root=options.root_path, - prefix=options.prefix_path, - isolated=options.isolated_mode, - ) - env = get_environment(lib_locations) - - installed.sort(key=operator.attrgetter("name")) - items = [] - for result in installed: - item = result.name - try: - installed_dist = env.get_distribution(item) - if installed_dist is not None: - item = f"{item}-{installed_dist.version}" - except Exception: - pass - items.append(item) - - if conflicts is not None: - self._warn_about_conflicts( - conflicts, - resolver_variant=self.determine_resolver_variant(options), - ) - - installed_desc = " ".join(items) - if installed_desc: - write_output( - "Successfully installed %s", - installed_desc, - ) - except OSError as error: - show_traceback = self.verbosity >= 1 - - message = create_os_error_message( - error, - show_traceback, - options.use_user_site, - ) - logger.error(message, exc_info=show_traceback) # noqa - - return ERROR - - if options.target_dir: - assert target_temp_dir - self._handle_target_dir( - options.target_dir, target_temp_dir, options.upgrade - ) - - warn_if_run_as_root() - return SUCCESS - - def _handle_target_dir( - self, target_dir: str, target_temp_dir: TempDirectory, upgrade: bool - ) -> None: - ensure_dir(target_dir) - - # Checking both purelib and platlib directories for installed - # packages to be moved to target directory - lib_dir_list = [] - - # Checking both purelib and platlib directories for installed - # packages to be moved to target directory - scheme = get_scheme("", home=target_temp_dir.path) - purelib_dir = scheme.purelib - platlib_dir = scheme.platlib - data_dir = scheme.data - - if os.path.exists(purelib_dir): - lib_dir_list.append(purelib_dir) - if os.path.exists(platlib_dir) and platlib_dir != purelib_dir: - lib_dir_list.append(platlib_dir) - if os.path.exists(data_dir): - lib_dir_list.append(data_dir) - - for lib_dir in lib_dir_list: - for item in os.listdir(lib_dir): - if lib_dir == data_dir: - ddir = os.path.join(data_dir, item) - if any(s.startswith(ddir) for s in lib_dir_list[:-1]): - continue - target_item_dir = os.path.join(target_dir, item) - if os.path.exists(target_item_dir): - if not upgrade: - logger.warning( - "Target directory %s already exists. Specify " - "--upgrade to force replacement.", - target_item_dir, - ) - continue - if os.path.islink(target_item_dir): - logger.warning( - "Target directory %s already exists and is " - "a link. pip will not automatically replace " - "links, please remove if replacement is " - "desired.", - target_item_dir, - ) - continue - if os.path.isdir(target_item_dir): - shutil.rmtree(target_item_dir) - else: - os.remove(target_item_dir) - - shutil.move(os.path.join(lib_dir, item), target_item_dir) - - def _determine_conflicts( - self, to_install: List[InstallRequirement] - ) -> Optional[ConflictDetails]: - try: - return check_install_conflicts(to_install) - except Exception: - logger.exception( - "Error while checking for conflicts. Please file an issue on " - "pip's issue tracker: https://github.com/pypa/pip/issues/new" - ) - return None - - def _warn_about_conflicts( - self, conflict_details: ConflictDetails, resolver_variant: str - ) -> None: - package_set, (missing, conflicting) = conflict_details - if not missing and not conflicting: - return - - parts: List[str] = [] - if resolver_variant == "legacy": - parts.append( - "pip's legacy dependency resolver does not consider dependency " - "conflicts when selecting packages. This behaviour is the " - "source of the following dependency conflicts." - ) - else: - assert resolver_variant == "2020-resolver" - parts.append( - "pip's dependency resolver does not currently take into account " - "all the packages that are installed. This behaviour is the " - "source of the following dependency conflicts." - ) - - # NOTE: There is some duplication here, with commands/check.py - for project_name in missing: - version = package_set[project_name][0] - for dependency in missing[project_name]: - message = ( - "{name} {version} requires {requirement}, " - "which is not installed." - ).format( - name=project_name, - version=version, - requirement=dependency[1], - ) - parts.append(message) - - for project_name in conflicting: - version = package_set[project_name][0] - for dep_name, dep_version, req in conflicting[project_name]: - message = ( - "{name} {version} requires {requirement}, but {you} have " - "{dep_name} {dep_version} which is incompatible." - ).format( - name=project_name, - version=version, - requirement=req, - dep_name=dep_name, - dep_version=dep_version, - you=("you" if resolver_variant == "2020-resolver" else "you'll"), - ) - parts.append(message) - - logger.critical("\n".join(parts)) - - -def get_lib_location_guesses( - user: bool = False, - home: Optional[str] = None, - root: Optional[str] = None, - isolated: bool = False, - prefix: Optional[str] = None, -) -> List[str]: - scheme = get_scheme( - "", - user=user, - home=home, - root=root, - isolated=isolated, - prefix=prefix, - ) - return [scheme.purelib, scheme.platlib] - - -def site_packages_writable(root: Optional[str], isolated: bool) -> bool: - return all( - test_writable_dir(d) - for d in set(get_lib_location_guesses(root=root, isolated=isolated)) - ) - - -def decide_user_install( - use_user_site: Optional[bool], - prefix_path: Optional[str] = None, - target_dir: Optional[str] = None, - root_path: Optional[str] = None, - isolated_mode: bool = False, -) -> bool: - """Determine whether to do a user install based on the input options. - - If use_user_site is False, no additional checks are done. - If use_user_site is True, it is checked for compatibility with other - options. - If use_user_site is None, the default behaviour depends on the environment, - which is provided by the other arguments. - """ - # In some cases (config from tox), use_user_site can be set to an integer - # rather than a bool, which 'use_user_site is False' wouldn't catch. - if (use_user_site is not None) and (not use_user_site): - logger.debug("Non-user install by explicit request") - return False - - if use_user_site: - if prefix_path: - raise CommandError( - "Can not combine '--user' and '--prefix' as they imply " - "different installation locations" - ) - if virtualenv_no_global(): - raise InstallationError( - "Can not perform a '--user' install. User site-packages " - "are not visible in this virtualenv." - ) - logger.debug("User install by explicit request") - return True - - # If we are here, user installs have not been explicitly requested/avoided - assert use_user_site is None - - # user install incompatible with --prefix/--target - if prefix_path or target_dir: - logger.debug("Non-user install due to --prefix or --target option") - return False - - # If user installs are not enabled, choose a non-user install - if not site.ENABLE_USER_SITE: - logger.debug("Non-user install because user site-packages disabled") - return False - - # If we have permission for a non-user install, do that, - # otherwise do a user install. - if site_packages_writable(root=root_path, isolated=isolated_mode): - logger.debug("Non-user install because site-packages writeable") - return False - - logger.info( - "Defaulting to user installation because normal site-packages " - "is not writeable" - ) - return True - - -def reject_location_related_install_options( - requirements: List[InstallRequirement], options: Optional[List[str]] -) -> None: - """If any location-changing --install-option arguments were passed for - requirements or on the command-line, then show a deprecation warning. - """ - - def format_options(option_names: Iterable[str]) -> List[str]: - return ["--{}".format(name.replace("_", "-")) for name in option_names] - - offenders = [] - - for requirement in requirements: - install_options = requirement.install_options - location_options = parse_distutils_args(install_options) - if location_options: - offenders.append( - "{!r} from {}".format( - format_options(location_options.keys()), requirement - ) - ) - - if options: - location_options = parse_distutils_args(options) - if location_options: - offenders.append( - "{!r} from command line".format(format_options(location_options.keys())) - ) - - if not offenders: - return - - raise CommandError( - "Location-changing options found in --install-option: {}." - " This is unsupported, use pip-level options like --user," - " --prefix, --root, and --target instead.".format("; ".join(offenders)) - ) - - -def create_os_error_message( - error: OSError, show_traceback: bool, using_user_site: bool -) -> str: - """Format an error message for an OSError - - It may occur anytime during the execution of the install command. - """ - parts = [] - - # Mention the error if we are not going to show a traceback - parts.append("Could not install packages due to an OSError") - if not show_traceback: - parts.append(": ") - parts.append(str(error)) - else: - parts.append(".") - - # Spilt the error indication from a helper message (if any) - parts[-1] += "\n" - - # Suggest useful actions to the user: - # (1) using user site-packages or (2) verifying the permissions - if error.errno == errno.EACCES: - user_option_part = "Consider using the `--user` option" - permissions_part = "Check the permissions" - - if not running_under_virtualenv() and not using_user_site: - parts.extend( - [ - user_option_part, - " or ", - permissions_part.lower(), - ] - ) - else: - parts.append(permissions_part) - parts.append(".\n") - - # Suggest the user to enable Long Paths if path length is - # more than 260 - if ( - WINDOWS - and error.errno == errno.ENOENT - and error.filename - and len(error.filename) > 260 - ): - parts.append( - "HINT: This error might have occurred since " - "this system does not have Windows Long Path " - "support enabled. You can find information on " - "how to enable this at " - "https://pip.pypa.io/warnings/enable-long-paths\n" - ) - - return "".join(parts).strip() + "\n" diff --git a/venv/Lib/site-packages/pip/_internal/commands/list.py b/venv/Lib/site-packages/pip/_internal/commands/list.py deleted file mode 100644 index 57f05e0..0000000 --- a/venv/Lib/site-packages/pip/_internal/commands/list.py +++ /dev/null @@ -1,361 +0,0 @@ -import json -import logging -from optparse import Values -from typing import TYPE_CHECKING, Iterator, List, Optional, Sequence, Tuple, cast - -from pip._vendor.packaging.utils import canonicalize_name - -from pip._internal.cli import cmdoptions -from pip._internal.cli.req_command import IndexGroupCommand -from pip._internal.cli.status_codes import SUCCESS -from pip._internal.exceptions import CommandError -from pip._internal.index.collector import LinkCollector -from pip._internal.index.package_finder import PackageFinder -from pip._internal.metadata import BaseDistribution, get_environment -from pip._internal.models.selection_prefs import SelectionPreferences -from pip._internal.network.session import PipSession -from pip._internal.utils.compat import stdlib_pkgs -from pip._internal.utils.misc import tabulate, write_output - -if TYPE_CHECKING: - from pip._internal.metadata.base import DistributionVersion - - class _DistWithLatestInfo(BaseDistribution): - """Give the distribution object a couple of extra fields. - - These will be populated during ``get_outdated()``. This is dirty but - makes the rest of the code much cleaner. - """ - - latest_version: DistributionVersion - latest_filetype: str - - _ProcessedDists = Sequence[_DistWithLatestInfo] - - -logger = logging.getLogger(__name__) - - -class ListCommand(IndexGroupCommand): - """ - List installed packages, including editables. - - Packages are listed in a case-insensitive sorted order. - """ - - ignore_require_venv = True - usage = """ - %prog [options]""" - - def add_options(self) -> None: - self.cmd_opts.add_option( - "-o", - "--outdated", - action="store_true", - default=False, - help="List outdated packages", - ) - self.cmd_opts.add_option( - "-u", - "--uptodate", - action="store_true", - default=False, - help="List uptodate packages", - ) - self.cmd_opts.add_option( - "-e", - "--editable", - action="store_true", - default=False, - help="List editable projects.", - ) - self.cmd_opts.add_option( - "-l", - "--local", - action="store_true", - default=False, - help=( - "If in a virtualenv that has global access, do not list " - "globally-installed packages." - ), - ) - self.cmd_opts.add_option( - "--user", - dest="user", - action="store_true", - default=False, - help="Only output packages installed in user-site.", - ) - self.cmd_opts.add_option(cmdoptions.list_path()) - self.cmd_opts.add_option( - "--pre", - action="store_true", - default=False, - help=( - "Include pre-release and development versions. By default, " - "pip only finds stable versions." - ), - ) - - self.cmd_opts.add_option( - "--format", - action="store", - dest="list_format", - default="columns", - choices=("columns", "freeze", "json"), - help="Select the output format among: columns (default), freeze, or json", - ) - - self.cmd_opts.add_option( - "--not-required", - action="store_true", - dest="not_required", - help="List packages that are not dependencies of installed packages.", - ) - - self.cmd_opts.add_option( - "--exclude-editable", - action="store_false", - dest="include_editable", - help="Exclude editable package from output.", - ) - self.cmd_opts.add_option( - "--include-editable", - action="store_true", - dest="include_editable", - help="Include editable package from output.", - default=True, - ) - self.cmd_opts.add_option(cmdoptions.list_exclude()) - index_opts = cmdoptions.make_option_group(cmdoptions.index_group, self.parser) - - self.parser.insert_option_group(0, index_opts) - self.parser.insert_option_group(0, self.cmd_opts) - - def _build_package_finder( - self, options: Values, session: PipSession - ) -> PackageFinder: - """ - Create a package finder appropriate to this list command. - """ - link_collector = LinkCollector.create(session, options=options) - - # Pass allow_yanked=False to ignore yanked versions. - selection_prefs = SelectionPreferences( - allow_yanked=False, - allow_all_prereleases=options.pre, - ) - - return PackageFinder.create( - link_collector=link_collector, - selection_prefs=selection_prefs, - use_deprecated_html5lib="html5lib" in options.deprecated_features_enabled, - ) - - def run(self, options: Values, args: List[str]) -> int: - if options.outdated and options.uptodate: - raise CommandError("Options --outdated and --uptodate cannot be combined.") - - cmdoptions.check_list_path_option(options) - - skip = set(stdlib_pkgs) - if options.excludes: - skip.update(canonicalize_name(n) for n in options.excludes) - - packages: "_ProcessedDists" = [ - cast("_DistWithLatestInfo", d) - for d in get_environment(options.path).iter_installed_distributions( - local_only=options.local, - user_only=options.user, - editables_only=options.editable, - include_editables=options.include_editable, - skip=skip, - ) - ] - - # get_not_required must be called firstly in order to find and - # filter out all dependencies correctly. Otherwise a package - # can't be identified as requirement because some parent packages - # could be filtered out before. - if options.not_required: - packages = self.get_not_required(packages, options) - - if options.outdated: - packages = self.get_outdated(packages, options) - elif options.uptodate: - packages = self.get_uptodate(packages, options) - - self.output_package_listing(packages, options) - return SUCCESS - - def get_outdated( - self, packages: "_ProcessedDists", options: Values - ) -> "_ProcessedDists": - return [ - dist - for dist in self.iter_packages_latest_infos(packages, options) - if dist.latest_version > dist.version - ] - - def get_uptodate( - self, packages: "_ProcessedDists", options: Values - ) -> "_ProcessedDists": - return [ - dist - for dist in self.iter_packages_latest_infos(packages, options) - if dist.latest_version == dist.version - ] - - def get_not_required( - self, packages: "_ProcessedDists", options: Values - ) -> "_ProcessedDists": - dep_keys = { - canonicalize_name(dep.name) - for dist in packages - for dep in (dist.iter_dependencies() or ()) - } - - # Create a set to remove duplicate packages, and cast it to a list - # to keep the return type consistent with get_outdated and - # get_uptodate - return list({pkg for pkg in packages if pkg.canonical_name not in dep_keys}) - - def iter_packages_latest_infos( - self, packages: "_ProcessedDists", options: Values - ) -> Iterator["_DistWithLatestInfo"]: - with self._build_session(options) as session: - finder = self._build_package_finder(options, session) - - def latest_info( - dist: "_DistWithLatestInfo", - ) -> Optional["_DistWithLatestInfo"]: - all_candidates = finder.find_all_candidates(dist.canonical_name) - if not options.pre: - # Remove prereleases - all_candidates = [ - candidate - for candidate in all_candidates - if not candidate.version.is_prerelease - ] - - evaluator = finder.make_candidate_evaluator( - project_name=dist.canonical_name, - ) - best_candidate = evaluator.sort_best_candidate(all_candidates) - if best_candidate is None: - return None - - remote_version = best_candidate.version - if best_candidate.link.is_wheel: - typ = "wheel" - else: - typ = "sdist" - dist.latest_version = remote_version - dist.latest_filetype = typ - return dist - - for dist in map(latest_info, packages): - if dist is not None: - yield dist - - def output_package_listing( - self, packages: "_ProcessedDists", options: Values - ) -> None: - packages = sorted( - packages, - key=lambda dist: dist.canonical_name, - ) - if options.list_format == "columns" and packages: - data, header = format_for_columns(packages, options) - self.output_package_listing_columns(data, header) - elif options.list_format == "freeze": - for dist in packages: - if options.verbose >= 1: - write_output( - "%s==%s (%s)", dist.raw_name, dist.version, dist.location - ) - else: - write_output("%s==%s", dist.raw_name, dist.version) - elif options.list_format == "json": - write_output(format_for_json(packages, options)) - - def output_package_listing_columns( - self, data: List[List[str]], header: List[str] - ) -> None: - # insert the header first: we need to know the size of column names - if len(data) > 0: - data.insert(0, header) - - pkg_strings, sizes = tabulate(data) - - # Create and add a separator. - if len(data) > 0: - pkg_strings.insert(1, " ".join(map(lambda x: "-" * x, sizes))) - - for val in pkg_strings: - write_output(val) - - -def format_for_columns( - pkgs: "_ProcessedDists", options: Values -) -> Tuple[List[List[str]], List[str]]: - """ - Convert the package data into something usable - by output_package_listing_columns. - """ - header = ["Package", "Version"] - - running_outdated = options.outdated - if running_outdated: - header.extend(["Latest", "Type"]) - - has_editables = any(x.editable for x in pkgs) - if has_editables: - header.append("Editable project location") - - if options.verbose >= 1: - header.append("Location") - if options.verbose >= 1: - header.append("Installer") - - data = [] - for proj in pkgs: - # if we're working on the 'outdated' list, separate out the - # latest_version and type - row = [proj.raw_name, str(proj.version)] - - if running_outdated: - row.append(str(proj.latest_version)) - row.append(proj.latest_filetype) - - if has_editables: - row.append(proj.editable_project_location or "") - - if options.verbose >= 1: - row.append(proj.location or "") - if options.verbose >= 1: - row.append(proj.installer) - - data.append(row) - - return data, header - - -def format_for_json(packages: "_ProcessedDists", options: Values) -> str: - data = [] - for dist in packages: - info = { - "name": dist.raw_name, - "version": str(dist.version), - } - if options.verbose >= 1: - info["location"] = dist.location or "" - info["installer"] = dist.installer - if options.outdated: - info["latest_version"] = str(dist.latest_version) - info["latest_filetype"] = dist.latest_filetype - editable_project_location = dist.editable_project_location - if editable_project_location: - info["editable_project_location"] = editable_project_location - data.append(info) - return json.dumps(data) diff --git a/venv/Lib/site-packages/pip/_internal/commands/search.py b/venv/Lib/site-packages/pip/_internal/commands/search.py deleted file mode 100644 index 03ed925..0000000 --- a/venv/Lib/site-packages/pip/_internal/commands/search.py +++ /dev/null @@ -1,174 +0,0 @@ -import logging -import shutil -import sys -import textwrap -import xmlrpc.client -from collections import OrderedDict -from optparse import Values -from typing import TYPE_CHECKING, Dict, List, Optional - -from pip._vendor.packaging.version import parse as parse_version - -from pip._internal.cli.base_command import Command -from pip._internal.cli.req_command import SessionCommandMixin -from pip._internal.cli.status_codes import NO_MATCHES_FOUND, SUCCESS -from pip._internal.exceptions import CommandError -from pip._internal.metadata import get_default_environment -from pip._internal.models.index import PyPI -from pip._internal.network.xmlrpc import PipXmlrpcTransport -from pip._internal.utils.logging import indent_log -from pip._internal.utils.misc import write_output - -if TYPE_CHECKING: - from typing import TypedDict - - class TransformedHit(TypedDict): - name: str - summary: str - versions: List[str] - - -logger = logging.getLogger(__name__) - - -class SearchCommand(Command, SessionCommandMixin): - """Search for PyPI packages whose name or summary contains .""" - - usage = """ - %prog [options] """ - ignore_require_venv = True - - def add_options(self) -> None: - self.cmd_opts.add_option( - "-i", - "--index", - dest="index", - metavar="URL", - default=PyPI.pypi_url, - help="Base URL of Python Package Index (default %default)", - ) - - self.parser.insert_option_group(0, self.cmd_opts) - - def run(self, options: Values, args: List[str]) -> int: - if not args: - raise CommandError("Missing required argument (search query).") - query = args - pypi_hits = self.search(query, options) - hits = transform_hits(pypi_hits) - - terminal_width = None - if sys.stdout.isatty(): - terminal_width = shutil.get_terminal_size()[0] - - print_results(hits, terminal_width=terminal_width) - if pypi_hits: - return SUCCESS - return NO_MATCHES_FOUND - - def search(self, query: List[str], options: Values) -> List[Dict[str, str]]: - index_url = options.index - - session = self.get_default_session(options) - - transport = PipXmlrpcTransport(index_url, session) - pypi = xmlrpc.client.ServerProxy(index_url, transport) - try: - hits = pypi.search({"name": query, "summary": query}, "or") - except xmlrpc.client.Fault as fault: - message = "XMLRPC request failed [code: {code}]\n{string}".format( - code=fault.faultCode, - string=fault.faultString, - ) - raise CommandError(message) - assert isinstance(hits, list) - return hits - - -def transform_hits(hits: List[Dict[str, str]]) -> List["TransformedHit"]: - """ - The list from pypi is really a list of versions. We want a list of - packages with the list of versions stored inline. This converts the - list from pypi into one we can use. - """ - packages: Dict[str, "TransformedHit"] = OrderedDict() - for hit in hits: - name = hit["name"] - summary = hit["summary"] - version = hit["version"] - - if name not in packages.keys(): - packages[name] = { - "name": name, - "summary": summary, - "versions": [version], - } - else: - packages[name]["versions"].append(version) - - # if this is the highest version, replace summary and score - if version == highest_version(packages[name]["versions"]): - packages[name]["summary"] = summary - - return list(packages.values()) - - -def print_dist_installation_info(name: str, latest: str) -> None: - env = get_default_environment() - dist = env.get_distribution(name) - if dist is not None: - with indent_log(): - if dist.version == latest: - write_output("INSTALLED: %s (latest)", dist.version) - else: - write_output("INSTALLED: %s", dist.version) - if parse_version(latest).pre: - write_output( - "LATEST: %s (pre-release; install" - " with `pip install --pre`)", - latest, - ) - else: - write_output("LATEST: %s", latest) - - -def print_results( - hits: List["TransformedHit"], - name_column_width: Optional[int] = None, - terminal_width: Optional[int] = None, -) -> None: - if not hits: - return - if name_column_width is None: - name_column_width = ( - max( - [ - len(hit["name"]) + len(highest_version(hit.get("versions", ["-"]))) - for hit in hits - ] - ) - + 4 - ) - - for hit in hits: - name = hit["name"] - summary = hit["summary"] or "" - latest = highest_version(hit.get("versions", ["-"])) - if terminal_width is not None: - target_width = terminal_width - name_column_width - 5 - if target_width > 10: - # wrap and indent summary to fit terminal - summary_lines = textwrap.wrap(summary, target_width) - summary = ("\n" + " " * (name_column_width + 3)).join(summary_lines) - - name_latest = f"{name} ({latest})" - line = f"{name_latest:{name_column_width}} - {summary}" - try: - write_output(line) - print_dist_installation_info(name, latest) - except UnicodeEncodeError: - pass - - -def highest_version(versions: List[str]) -> str: - return max(versions, key=parse_version) diff --git a/venv/Lib/site-packages/pip/_internal/commands/show.py b/venv/Lib/site-packages/pip/_internal/commands/show.py deleted file mode 100644 index d5540d6..0000000 --- a/venv/Lib/site-packages/pip/_internal/commands/show.py +++ /dev/null @@ -1,178 +0,0 @@ -import logging -from optparse import Values -from typing import Iterator, List, NamedTuple, Optional - -from pip._vendor.packaging.utils import canonicalize_name - -from pip._internal.cli.base_command import Command -from pip._internal.cli.status_codes import ERROR, SUCCESS -from pip._internal.metadata import BaseDistribution, get_default_environment -from pip._internal.utils.misc import write_output - -logger = logging.getLogger(__name__) - - -class ShowCommand(Command): - """ - Show information about one or more installed packages. - - The output is in RFC-compliant mail header format. - """ - - usage = """ - %prog [options] ...""" - ignore_require_venv = True - - def add_options(self) -> None: - self.cmd_opts.add_option( - "-f", - "--files", - dest="files", - action="store_true", - default=False, - help="Show the full list of installed files for each package.", - ) - - self.parser.insert_option_group(0, self.cmd_opts) - - def run(self, options: Values, args: List[str]) -> int: - if not args: - logger.warning("ERROR: Please provide a package name or names.") - return ERROR - query = args - - results = search_packages_info(query) - if not print_results( - results, list_files=options.files, verbose=options.verbose - ): - return ERROR - return SUCCESS - - -class _PackageInfo(NamedTuple): - name: str - version: str - location: str - requires: List[str] - required_by: List[str] - installer: str - metadata_version: str - classifiers: List[str] - summary: str - homepage: str - author: str - author_email: str - license: str - entry_points: List[str] - files: Optional[List[str]] - - -def search_packages_info(query: List[str]) -> Iterator[_PackageInfo]: - """ - Gather details from installed distributions. Print distribution name, - version, location, and installed files. Installed files requires a - pip generated 'installed-files.txt' in the distributions '.egg-info' - directory. - """ - env = get_default_environment() - - installed = {dist.canonical_name: dist for dist in env.iter_distributions()} - query_names = [canonicalize_name(name) for name in query] - missing = sorted( - [name for name, pkg in zip(query, query_names) if pkg not in installed] - ) - if missing: - logger.warning("Package(s) not found: %s", ", ".join(missing)) - - def _get_requiring_packages(current_dist: BaseDistribution) -> Iterator[str]: - return ( - dist.metadata["Name"] or "UNKNOWN" - for dist in installed.values() - if current_dist.canonical_name - in {canonicalize_name(d.name) for d in dist.iter_dependencies()} - ) - - for query_name in query_names: - try: - dist = installed[query_name] - except KeyError: - continue - - requires = sorted((req.name for req in dist.iter_dependencies()), key=str.lower) - required_by = sorted(_get_requiring_packages(dist), key=str.lower) - - try: - entry_points_text = dist.read_text("entry_points.txt") - entry_points = entry_points_text.splitlines(keepends=False) - except FileNotFoundError: - entry_points = [] - - files_iter = dist.iter_declared_entries() - if files_iter is None: - files: Optional[List[str]] = None - else: - files = sorted(files_iter) - - metadata = dist.metadata - - yield _PackageInfo( - name=dist.raw_name, - version=str(dist.version), - location=dist.location or "", - requires=requires, - required_by=required_by, - installer=dist.installer, - metadata_version=dist.metadata_version or "", - classifiers=metadata.get_all("Classifier", []), - summary=metadata.get("Summary", ""), - homepage=metadata.get("Home-page", ""), - author=metadata.get("Author", ""), - author_email=metadata.get("Author-email", ""), - license=metadata.get("License", ""), - entry_points=entry_points, - files=files, - ) - - -def print_results( - distributions: Iterator[_PackageInfo], - list_files: bool, - verbose: bool, -) -> bool: - """ - Print the information from installed distributions found. - """ - results_printed = False - for i, dist in enumerate(distributions): - results_printed = True - if i > 0: - write_output("---") - - write_output("Name: %s", dist.name) - write_output("Version: %s", dist.version) - write_output("Summary: %s", dist.summary) - write_output("Home-page: %s", dist.homepage) - write_output("Author: %s", dist.author) - write_output("Author-email: %s", dist.author_email) - write_output("License: %s", dist.license) - write_output("Location: %s", dist.location) - write_output("Requires: %s", ", ".join(dist.requires)) - write_output("Required-by: %s", ", ".join(dist.required_by)) - - if verbose: - write_output("Metadata-Version: %s", dist.metadata_version) - write_output("Installer: %s", dist.installer) - write_output("Classifiers:") - for classifier in dist.classifiers: - write_output(" %s", classifier) - write_output("Entry-points:") - for entry in dist.entry_points: - write_output(" %s", entry.strip()) - if list_files: - write_output("Files:") - if dist.files is None: - write_output("Cannot locate RECORD or installed-files.txt") - else: - for line in dist.files: - write_output(" %s", line.strip()) - return results_printed diff --git a/venv/Lib/site-packages/pip/_internal/commands/uninstall.py b/venv/Lib/site-packages/pip/_internal/commands/uninstall.py deleted file mode 100644 index bb9e8e6..0000000 --- a/venv/Lib/site-packages/pip/_internal/commands/uninstall.py +++ /dev/null @@ -1,105 +0,0 @@ -import logging -from optparse import Values -from typing import List - -from pip._vendor.packaging.utils import canonicalize_name - -from pip._internal.cli.base_command import Command -from pip._internal.cli.req_command import SessionCommandMixin, warn_if_run_as_root -from pip._internal.cli.status_codes import SUCCESS -from pip._internal.exceptions import InstallationError -from pip._internal.req import parse_requirements -from pip._internal.req.constructors import ( - install_req_from_line, - install_req_from_parsed_requirement, -) -from pip._internal.utils.misc import protect_pip_from_modification_on_windows - -logger = logging.getLogger(__name__) - - -class UninstallCommand(Command, SessionCommandMixin): - """ - Uninstall packages. - - pip is able to uninstall most installed packages. Known exceptions are: - - - Pure distutils packages installed with ``python setup.py install``, which - leave behind no metadata to determine what files were installed. - - Script wrappers installed by ``python setup.py develop``. - """ - - usage = """ - %prog [options] ... - %prog [options] -r ...""" - - def add_options(self) -> None: - self.cmd_opts.add_option( - "-r", - "--requirement", - dest="requirements", - action="append", - default=[], - metavar="file", - help=( - "Uninstall all the packages listed in the given requirements " - "file. This option can be used multiple times." - ), - ) - self.cmd_opts.add_option( - "-y", - "--yes", - dest="yes", - action="store_true", - help="Don't ask for confirmation of uninstall deletions.", - ) - - self.parser.insert_option_group(0, self.cmd_opts) - - def run(self, options: Values, args: List[str]) -> int: - session = self.get_default_session(options) - - reqs_to_uninstall = {} - for name in args: - req = install_req_from_line( - name, - isolated=options.isolated_mode, - ) - if req.name: - reqs_to_uninstall[canonicalize_name(req.name)] = req - else: - logger.warning( - "Invalid requirement: %r ignored -" - " the uninstall command expects named" - " requirements.", - name, - ) - for filename in options.requirements: - for parsed_req in parse_requirements( - filename, options=options, session=session - ): - req = install_req_from_parsed_requirement( - parsed_req, isolated=options.isolated_mode - ) - if req.name: - reqs_to_uninstall[canonicalize_name(req.name)] = req - if not reqs_to_uninstall: - raise InstallationError( - f"You must give at least one requirement to {self.name} (see " - f'"pip help {self.name}")' - ) - - protect_pip_from_modification_on_windows( - modifying_pip="pip" in reqs_to_uninstall - ) - - for req in reqs_to_uninstall.values(): - uninstall_pathset = req.uninstall( - auto_confirm=options.yes, - verbose=self.verbosity > 0, - ) - if uninstall_pathset: - uninstall_pathset.commit() - - warn_if_run_as_root() - return SUCCESS diff --git a/venv/Lib/site-packages/pip/_internal/commands/wheel.py b/venv/Lib/site-packages/pip/_internal/commands/wheel.py deleted file mode 100644 index d5b20dc..0000000 --- a/venv/Lib/site-packages/pip/_internal/commands/wheel.py +++ /dev/null @@ -1,178 +0,0 @@ -import logging -import os -import shutil -from optparse import Values -from typing import List - -from pip._internal.cache import WheelCache -from pip._internal.cli import cmdoptions -from pip._internal.cli.req_command import RequirementCommand, with_cleanup -from pip._internal.cli.status_codes import SUCCESS -from pip._internal.exceptions import CommandError -from pip._internal.req.req_install import InstallRequirement -from pip._internal.req.req_tracker import get_requirement_tracker -from pip._internal.utils.misc import ensure_dir, normalize_path -from pip._internal.utils.temp_dir import TempDirectory -from pip._internal.wheel_builder import build, should_build_for_wheel_command - -logger = logging.getLogger(__name__) - - -class WheelCommand(RequirementCommand): - """ - Build Wheel archives for your requirements and dependencies. - - Wheel is a built-package format, and offers the advantage of not - recompiling your software during every install. For more details, see the - wheel docs: https://wheel.readthedocs.io/en/latest/ - - Requirements: setuptools>=0.8, and wheel. - - 'pip wheel' uses the bdist_wheel setuptools extension from the wheel - package to build individual wheels. - - """ - - usage = """ - %prog [options] ... - %prog [options] -r ... - %prog [options] [-e] ... - %prog [options] [-e] ... - %prog [options] ...""" - - def add_options(self) -> None: - - self.cmd_opts.add_option( - "-w", - "--wheel-dir", - dest="wheel_dir", - metavar="dir", - default=os.curdir, - help=( - "Build wheels into , where the default is the " - "current working directory." - ), - ) - self.cmd_opts.add_option(cmdoptions.no_binary()) - self.cmd_opts.add_option(cmdoptions.only_binary()) - self.cmd_opts.add_option(cmdoptions.prefer_binary()) - self.cmd_opts.add_option(cmdoptions.no_build_isolation()) - self.cmd_opts.add_option(cmdoptions.use_pep517()) - self.cmd_opts.add_option(cmdoptions.no_use_pep517()) - self.cmd_opts.add_option(cmdoptions.constraints()) - self.cmd_opts.add_option(cmdoptions.editable()) - self.cmd_opts.add_option(cmdoptions.requirements()) - self.cmd_opts.add_option(cmdoptions.src()) - self.cmd_opts.add_option(cmdoptions.ignore_requires_python()) - self.cmd_opts.add_option(cmdoptions.no_deps()) - self.cmd_opts.add_option(cmdoptions.progress_bar()) - - self.cmd_opts.add_option( - "--no-verify", - dest="no_verify", - action="store_true", - default=False, - help="Don't verify if built wheel is valid.", - ) - - self.cmd_opts.add_option(cmdoptions.build_options()) - self.cmd_opts.add_option(cmdoptions.global_options()) - - self.cmd_opts.add_option( - "--pre", - action="store_true", - default=False, - help=( - "Include pre-release and development versions. By default, " - "pip only finds stable versions." - ), - ) - - self.cmd_opts.add_option(cmdoptions.require_hashes()) - - index_opts = cmdoptions.make_option_group( - cmdoptions.index_group, - self.parser, - ) - - self.parser.insert_option_group(0, index_opts) - self.parser.insert_option_group(0, self.cmd_opts) - - @with_cleanup - def run(self, options: Values, args: List[str]) -> int: - cmdoptions.check_install_build_global(options) - - session = self.get_default_session(options) - - finder = self._build_package_finder(options, session) - wheel_cache = WheelCache(options.cache_dir, options.format_control) - - options.wheel_dir = normalize_path(options.wheel_dir) - ensure_dir(options.wheel_dir) - - req_tracker = self.enter_context(get_requirement_tracker()) - - directory = TempDirectory( - delete=not options.no_clean, - kind="wheel", - globally_managed=True, - ) - - reqs = self.get_requirements(args, options, finder, session) - - preparer = self.make_requirement_preparer( - temp_build_dir=directory, - options=options, - req_tracker=req_tracker, - session=session, - finder=finder, - download_dir=options.wheel_dir, - use_user_site=False, - verbosity=self.verbosity, - ) - - resolver = self.make_resolver( - preparer=preparer, - finder=finder, - options=options, - wheel_cache=wheel_cache, - ignore_requires_python=options.ignore_requires_python, - use_pep517=options.use_pep517, - ) - - self.trace_basic_info(finder) - - requirement_set = resolver.resolve(reqs, check_supported_wheels=True) - - reqs_to_build: List[InstallRequirement] = [] - for req in requirement_set.requirements.values(): - if req.is_wheel: - preparer.save_linked_requirement(req) - elif should_build_for_wheel_command(req): - reqs_to_build.append(req) - - # build wheels - build_successes, build_failures = build( - reqs_to_build, - wheel_cache=wheel_cache, - verify=(not options.no_verify), - build_options=options.build_options or [], - global_options=options.global_options or [], - ) - for req in build_successes: - assert req.link and req.link.is_wheel - assert req.local_file_path - # copy from cache to target directory - try: - shutil.copy(req.local_file_path, options.wheel_dir) - except OSError as e: - logger.warning( - "Building wheel for %s failed: %s", - req.name, - e, - ) - build_failures.append(req) - if len(build_failures) != 0: - raise CommandError("Failed to build one or more wheels") - - return SUCCESS diff --git a/venv/Lib/site-packages/pip/_internal/configuration.py b/venv/Lib/site-packages/pip/_internal/configuration.py deleted file mode 100644 index a8092d1..0000000 --- a/venv/Lib/site-packages/pip/_internal/configuration.py +++ /dev/null @@ -1,366 +0,0 @@ -"""Configuration management setup - -Some terminology: -- name - As written in config files. -- value - Value associated with a name -- key - Name combined with it's section (section.name) -- variant - A single word describing where the configuration key-value pair came from -""" - -import configparser -import locale -import os -import sys -from typing import Any, Dict, Iterable, List, NewType, Optional, Tuple - -from pip._internal.exceptions import ( - ConfigurationError, - ConfigurationFileCouldNotBeLoaded, -) -from pip._internal.utils import appdirs -from pip._internal.utils.compat import WINDOWS -from pip._internal.utils.logging import getLogger -from pip._internal.utils.misc import ensure_dir, enum - -RawConfigParser = configparser.RawConfigParser # Shorthand -Kind = NewType("Kind", str) - -CONFIG_BASENAME = "pip.ini" if WINDOWS else "pip.conf" -ENV_NAMES_IGNORED = "version", "help" - -# The kinds of configurations there are. -kinds = enum( - USER="user", # User Specific - GLOBAL="global", # System Wide - SITE="site", # [Virtual] Environment Specific - ENV="env", # from PIP_CONFIG_FILE - ENV_VAR="env-var", # from Environment Variables -) -OVERRIDE_ORDER = kinds.GLOBAL, kinds.USER, kinds.SITE, kinds.ENV, kinds.ENV_VAR -VALID_LOAD_ONLY = kinds.USER, kinds.GLOBAL, kinds.SITE - -logger = getLogger(__name__) - - -# NOTE: Maybe use the optionx attribute to normalize keynames. -def _normalize_name(name: str) -> str: - """Make a name consistent regardless of source (environment or file)""" - name = name.lower().replace("_", "-") - if name.startswith("--"): - name = name[2:] # only prefer long opts - return name - - -def _disassemble_key(name: str) -> List[str]: - if "." not in name: - error_message = ( - "Key does not contain dot separated section and key. " - "Perhaps you wanted to use 'global.{}' instead?" - ).format(name) - raise ConfigurationError(error_message) - return name.split(".", 1) - - -def get_configuration_files() -> Dict[Kind, List[str]]: - global_config_files = [ - os.path.join(path, CONFIG_BASENAME) for path in appdirs.site_config_dirs("pip") - ] - - site_config_file = os.path.join(sys.prefix, CONFIG_BASENAME) - legacy_config_file = os.path.join( - os.path.expanduser("~"), - "pip" if WINDOWS else ".pip", - CONFIG_BASENAME, - ) - new_config_file = os.path.join(appdirs.user_config_dir("pip"), CONFIG_BASENAME) - return { - kinds.GLOBAL: global_config_files, - kinds.SITE: [site_config_file], - kinds.USER: [legacy_config_file, new_config_file], - } - - -class Configuration: - """Handles management of configuration. - - Provides an interface to accessing and managing configuration files. - - This class converts provides an API that takes "section.key-name" style - keys and stores the value associated with it as "key-name" under the - section "section". - - This allows for a clean interface wherein the both the section and the - key-name are preserved in an easy to manage form in the configuration files - and the data stored is also nice. - """ - - def __init__(self, isolated: bool, load_only: Optional[Kind] = None) -> None: - super().__init__() - - if load_only is not None and load_only not in VALID_LOAD_ONLY: - raise ConfigurationError( - "Got invalid value for load_only - should be one of {}".format( - ", ".join(map(repr, VALID_LOAD_ONLY)) - ) - ) - self.isolated = isolated - self.load_only = load_only - - # Because we keep track of where we got the data from - self._parsers: Dict[Kind, List[Tuple[str, RawConfigParser]]] = { - variant: [] for variant in OVERRIDE_ORDER - } - self._config: Dict[Kind, Dict[str, Any]] = { - variant: {} for variant in OVERRIDE_ORDER - } - self._modified_parsers: List[Tuple[str, RawConfigParser]] = [] - - def load(self) -> None: - """Loads configuration from configuration files and environment""" - self._load_config_files() - if not self.isolated: - self._load_environment_vars() - - def get_file_to_edit(self) -> Optional[str]: - """Returns the file with highest priority in configuration""" - assert self.load_only is not None, "Need to be specified a file to be editing" - - try: - return self._get_parser_to_modify()[0] - except IndexError: - return None - - def items(self) -> Iterable[Tuple[str, Any]]: - """Returns key-value pairs like dict.items() representing the loaded - configuration - """ - return self._dictionary.items() - - def get_value(self, key: str) -> Any: - """Get a value from the configuration.""" - try: - return self._dictionary[key] - except KeyError: - raise ConfigurationError(f"No such key - {key}") - - def set_value(self, key: str, value: Any) -> None: - """Modify a value in the configuration.""" - self._ensure_have_load_only() - - assert self.load_only - fname, parser = self._get_parser_to_modify() - - if parser is not None: - section, name = _disassemble_key(key) - - # Modify the parser and the configuration - if not parser.has_section(section): - parser.add_section(section) - parser.set(section, name, value) - - self._config[self.load_only][key] = value - self._mark_as_modified(fname, parser) - - def unset_value(self, key: str) -> None: - """Unset a value in the configuration.""" - self._ensure_have_load_only() - - assert self.load_only - if key not in self._config[self.load_only]: - raise ConfigurationError(f"No such key - {key}") - - fname, parser = self._get_parser_to_modify() - - if parser is not None: - section, name = _disassemble_key(key) - if not ( - parser.has_section(section) and parser.remove_option(section, name) - ): - # The option was not removed. - raise ConfigurationError( - "Fatal Internal error [id=1]. Please report as a bug." - ) - - # The section may be empty after the option was removed. - if not parser.items(section): - parser.remove_section(section) - self._mark_as_modified(fname, parser) - - del self._config[self.load_only][key] - - def save(self) -> None: - """Save the current in-memory state.""" - self._ensure_have_load_only() - - for fname, parser in self._modified_parsers: - logger.info("Writing to %s", fname) - - # Ensure directory exists. - ensure_dir(os.path.dirname(fname)) - - with open(fname, "w") as f: - parser.write(f) - - # - # Private routines - # - - def _ensure_have_load_only(self) -> None: - if self.load_only is None: - raise ConfigurationError("Needed a specific file to be modifying.") - logger.debug("Will be working with %s variant only", self.load_only) - - @property - def _dictionary(self) -> Dict[str, Any]: - """A dictionary representing the loaded configuration.""" - # NOTE: Dictionaries are not populated if not loaded. So, conditionals - # are not needed here. - retval = {} - - for variant in OVERRIDE_ORDER: - retval.update(self._config[variant]) - - return retval - - def _load_config_files(self) -> None: - """Loads configuration from configuration files""" - config_files = dict(self.iter_config_files()) - if config_files[kinds.ENV][0:1] == [os.devnull]: - logger.debug( - "Skipping loading configuration files due to " - "environment's PIP_CONFIG_FILE being os.devnull" - ) - return - - for variant, files in config_files.items(): - for fname in files: - # If there's specific variant set in `load_only`, load only - # that variant, not the others. - if self.load_only is not None and variant != self.load_only: - logger.debug("Skipping file '%s' (variant: %s)", fname, variant) - continue - - parser = self._load_file(variant, fname) - - # Keeping track of the parsers used - self._parsers[variant].append((fname, parser)) - - def _load_file(self, variant: Kind, fname: str) -> RawConfigParser: - logger.verbose("For variant '%s', will try loading '%s'", variant, fname) - parser = self._construct_parser(fname) - - for section in parser.sections(): - items = parser.items(section) - self._config[variant].update(self._normalized_keys(section, items)) - - return parser - - def _construct_parser(self, fname: str) -> RawConfigParser: - parser = configparser.RawConfigParser() - # If there is no such file, don't bother reading it but create the - # parser anyway, to hold the data. - # Doing this is useful when modifying and saving files, where we don't - # need to construct a parser. - if os.path.exists(fname): - locale_encoding = locale.getpreferredencoding(False) - try: - parser.read(fname, encoding=locale_encoding) - except UnicodeDecodeError: - # See https://github.com/pypa/pip/issues/4963 - raise ConfigurationFileCouldNotBeLoaded( - reason=f"contains invalid {locale_encoding} characters", - fname=fname, - ) - except configparser.Error as error: - # See https://github.com/pypa/pip/issues/4893 - raise ConfigurationFileCouldNotBeLoaded(error=error) - return parser - - def _load_environment_vars(self) -> None: - """Loads configuration from environment variables""" - self._config[kinds.ENV_VAR].update( - self._normalized_keys(":env:", self.get_environ_vars()) - ) - - def _normalized_keys( - self, section: str, items: Iterable[Tuple[str, Any]] - ) -> Dict[str, Any]: - """Normalizes items to construct a dictionary with normalized keys. - - This routine is where the names become keys and are made the same - regardless of source - configuration files or environment. - """ - normalized = {} - for name, val in items: - key = section + "." + _normalize_name(name) - normalized[key] = val - return normalized - - def get_environ_vars(self) -> Iterable[Tuple[str, str]]: - """Returns a generator with all environmental vars with prefix PIP_""" - for key, val in os.environ.items(): - if key.startswith("PIP_"): - name = key[4:].lower() - if name not in ENV_NAMES_IGNORED: - yield name, val - - # XXX: This is patched in the tests. - def iter_config_files(self) -> Iterable[Tuple[Kind, List[str]]]: - """Yields variant and configuration files associated with it. - - This should be treated like items of a dictionary. - """ - # SMELL: Move the conditions out of this function - - # environment variables have the lowest priority - config_file = os.environ.get("PIP_CONFIG_FILE", None) - if config_file is not None: - yield kinds.ENV, [config_file] - else: - yield kinds.ENV, [] - - config_files = get_configuration_files() - - # at the base we have any global configuration - yield kinds.GLOBAL, config_files[kinds.GLOBAL] - - # per-user configuration next - should_load_user_config = not self.isolated and not ( - config_file and os.path.exists(config_file) - ) - if should_load_user_config: - # The legacy config file is overridden by the new config file - yield kinds.USER, config_files[kinds.USER] - - # finally virtualenv configuration first trumping others - yield kinds.SITE, config_files[kinds.SITE] - - def get_values_in_config(self, variant: Kind) -> Dict[str, Any]: - """Get values present in a config file""" - return self._config[variant] - - def _get_parser_to_modify(self) -> Tuple[str, RawConfigParser]: - # Determine which parser to modify - assert self.load_only - parsers = self._parsers[self.load_only] - if not parsers: - # This should not happen if everything works correctly. - raise ConfigurationError( - "Fatal Internal error [id=2]. Please report as a bug." - ) - - # Use the highest priority parser. - return parsers[-1] - - # XXX: This is patched in the tests. - def _mark_as_modified(self, fname: str, parser: RawConfigParser) -> None: - file_parser_tuple = (fname, parser) - if file_parser_tuple not in self._modified_parsers: - self._modified_parsers.append(file_parser_tuple) - - def __repr__(self) -> str: - return f"{self.__class__.__name__}({self._dictionary!r})" diff --git a/venv/Lib/site-packages/pip/_internal/distributions/__init__.py b/venv/Lib/site-packages/pip/_internal/distributions/__init__.py deleted file mode 100644 index 9a89a83..0000000 --- a/venv/Lib/site-packages/pip/_internal/distributions/__init__.py +++ /dev/null @@ -1,21 +0,0 @@ -from pip._internal.distributions.base import AbstractDistribution -from pip._internal.distributions.sdist import SourceDistribution -from pip._internal.distributions.wheel import WheelDistribution -from pip._internal.req.req_install import InstallRequirement - - -def make_distribution_for_install_requirement( - install_req: InstallRequirement, -) -> AbstractDistribution: - """Returns a Distribution for the given InstallRequirement""" - # Editable requirements will always be source distributions. They use the - # legacy logic until we create a modern standard for them. - if install_req.editable: - return SourceDistribution(install_req) - - # If it's a wheel, it's a WheelDistribution - if install_req.is_wheel: - return WheelDistribution(install_req) - - # Otherwise, a SourceDistribution - return SourceDistribution(install_req) diff --git a/venv/Lib/site-packages/pip/_internal/distributions/__pycache__/__init__.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/distributions/__pycache__/__init__.cpython-39.pyc deleted file mode 100644 index 7995837..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/distributions/__pycache__/__init__.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/distributions/__pycache__/base.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/distributions/__pycache__/base.cpython-39.pyc deleted file mode 100644 index 8c4f740..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/distributions/__pycache__/base.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/distributions/__pycache__/installed.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/distributions/__pycache__/installed.cpython-39.pyc deleted file mode 100644 index e937afb..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/distributions/__pycache__/installed.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/distributions/__pycache__/sdist.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/distributions/__pycache__/sdist.cpython-39.pyc deleted file mode 100644 index 5a83f7a..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/distributions/__pycache__/sdist.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/distributions/__pycache__/wheel.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/distributions/__pycache__/wheel.cpython-39.pyc deleted file mode 100644 index d906d02..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/distributions/__pycache__/wheel.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/distributions/base.py b/venv/Lib/site-packages/pip/_internal/distributions/base.py deleted file mode 100644 index 149fff5..0000000 --- a/venv/Lib/site-packages/pip/_internal/distributions/base.py +++ /dev/null @@ -1,36 +0,0 @@ -import abc - -from pip._internal.index.package_finder import PackageFinder -from pip._internal.metadata.base import BaseDistribution -from pip._internal.req import InstallRequirement - - -class AbstractDistribution(metaclass=abc.ABCMeta): - """A base class for handling installable artifacts. - - The requirements for anything installable are as follows: - - - we must be able to determine the requirement name - (or we can't correctly handle the non-upgrade case). - - - for packages with setup requirements, we must also be able - to determine their requirements without installing additional - packages (for the same reason as run-time dependencies) - - - we must be able to create a Distribution object exposing the - above metadata. - """ - - def __init__(self, req: InstallRequirement) -> None: - super().__init__() - self.req = req - - @abc.abstractmethod - def get_metadata_distribution(self) -> BaseDistribution: - raise NotImplementedError() - - @abc.abstractmethod - def prepare_distribution_metadata( - self, finder: PackageFinder, build_isolation: bool - ) -> None: - raise NotImplementedError() diff --git a/venv/Lib/site-packages/pip/_internal/distributions/installed.py b/venv/Lib/site-packages/pip/_internal/distributions/installed.py deleted file mode 100644 index be5962f..0000000 --- a/venv/Lib/site-packages/pip/_internal/distributions/installed.py +++ /dev/null @@ -1,20 +0,0 @@ -from pip._internal.distributions.base import AbstractDistribution -from pip._internal.index.package_finder import PackageFinder -from pip._internal.metadata import BaseDistribution - - -class InstalledDistribution(AbstractDistribution): - """Represents an installed package. - - This does not need any preparation as the required information has already - been computed. - """ - - def get_metadata_distribution(self) -> BaseDistribution: - assert self.req.satisfied_by is not None, "not actually installed" - return self.req.satisfied_by - - def prepare_distribution_metadata( - self, finder: PackageFinder, build_isolation: bool - ) -> None: - pass diff --git a/venv/Lib/site-packages/pip/_internal/distributions/sdist.py b/venv/Lib/site-packages/pip/_internal/distributions/sdist.py deleted file mode 100644 index bdaf403..0000000 --- a/venv/Lib/site-packages/pip/_internal/distributions/sdist.py +++ /dev/null @@ -1,127 +0,0 @@ -import logging -from typing import Iterable, Set, Tuple - -from pip._internal.build_env import BuildEnvironment -from pip._internal.distributions.base import AbstractDistribution -from pip._internal.exceptions import InstallationError -from pip._internal.index.package_finder import PackageFinder -from pip._internal.metadata import BaseDistribution -from pip._internal.utils.subprocess import runner_with_spinner_message - -logger = logging.getLogger(__name__) - - -class SourceDistribution(AbstractDistribution): - """Represents a source distribution. - - The preparation step for these needs metadata for the packages to be - generated, either using PEP 517 or using the legacy `setup.py egg_info`. - """ - - def get_metadata_distribution(self) -> BaseDistribution: - return self.req.get_dist() - - def prepare_distribution_metadata( - self, finder: PackageFinder, build_isolation: bool - ) -> None: - # Load pyproject.toml, to determine whether PEP 517 is to be used - self.req.load_pyproject_toml() - - # Set up the build isolation, if this requirement should be isolated - should_isolate = self.req.use_pep517 and build_isolation - if should_isolate: - # Setup an isolated environment and install the build backend static - # requirements in it. - self._prepare_build_backend(finder) - # Check that if the requirement is editable, it either supports PEP 660 or - # has a setup.py or a setup.cfg. This cannot be done earlier because we need - # to setup the build backend to verify it supports build_editable, nor can - # it be done later, because we want to avoid installing build requirements - # needlessly. Doing it here also works around setuptools generating - # UNKNOWN.egg-info when running get_requires_for_build_wheel on a directory - # without setup.py nor setup.cfg. - self.req.isolated_editable_sanity_check() - # Install the dynamic build requirements. - self._install_build_reqs(finder) - - self.req.prepare_metadata() - - def _prepare_build_backend(self, finder: PackageFinder) -> None: - # Isolate in a BuildEnvironment and install the build-time - # requirements. - pyproject_requires = self.req.pyproject_requires - assert pyproject_requires is not None - - self.req.build_env = BuildEnvironment() - self.req.build_env.install_requirements( - finder, pyproject_requires, "overlay", kind="build dependencies" - ) - conflicting, missing = self.req.build_env.check_requirements( - self.req.requirements_to_check - ) - if conflicting: - self._raise_conflicts("PEP 517/518 supported requirements", conflicting) - if missing: - logger.warning( - "Missing build requirements in pyproject.toml for %s.", - self.req, - ) - logger.warning( - "The project does not specify a build backend, and " - "pip cannot fall back to setuptools without %s.", - " and ".join(map(repr, sorted(missing))), - ) - - def _get_build_requires_wheel(self) -> Iterable[str]: - with self.req.build_env: - runner = runner_with_spinner_message("Getting requirements to build wheel") - backend = self.req.pep517_backend - assert backend is not None - with backend.subprocess_runner(runner): - return backend.get_requires_for_build_wheel() - - def _get_build_requires_editable(self) -> Iterable[str]: - with self.req.build_env: - runner = runner_with_spinner_message( - "Getting requirements to build editable" - ) - backend = self.req.pep517_backend - assert backend is not None - with backend.subprocess_runner(runner): - return backend.get_requires_for_build_editable() - - def _install_build_reqs(self, finder: PackageFinder) -> None: - # Install any extra build dependencies that the backend requests. - # This must be done in a second pass, as the pyproject.toml - # dependencies must be installed before we can call the backend. - if ( - self.req.editable - and self.req.permit_editable_wheels - and self.req.supports_pyproject_editable() - ): - build_reqs = self._get_build_requires_editable() - else: - build_reqs = self._get_build_requires_wheel() - conflicting, missing = self.req.build_env.check_requirements(build_reqs) - if conflicting: - self._raise_conflicts("the backend dependencies", conflicting) - self.req.build_env.install_requirements( - finder, missing, "normal", kind="backend dependencies" - ) - - def _raise_conflicts( - self, conflicting_with: str, conflicting_reqs: Set[Tuple[str, str]] - ) -> None: - format_string = ( - "Some build dependencies for {requirement} " - "conflict with {conflicting_with}: {description}." - ) - error_message = format_string.format( - requirement=self.req, - conflicting_with=conflicting_with, - description=", ".join( - f"{installed} is incompatible with {wanted}" - for installed, wanted in sorted(conflicting_reqs) - ), - ) - raise InstallationError(error_message) diff --git a/venv/Lib/site-packages/pip/_internal/distributions/wheel.py b/venv/Lib/site-packages/pip/_internal/distributions/wheel.py deleted file mode 100644 index 340b0f3..0000000 --- a/venv/Lib/site-packages/pip/_internal/distributions/wheel.py +++ /dev/null @@ -1,31 +0,0 @@ -from pip._vendor.packaging.utils import canonicalize_name - -from pip._internal.distributions.base import AbstractDistribution -from pip._internal.index.package_finder import PackageFinder -from pip._internal.metadata import ( - BaseDistribution, - FilesystemWheel, - get_wheel_distribution, -) - - -class WheelDistribution(AbstractDistribution): - """Represents a wheel distribution. - - This does not need any preparation as wheels can be directly unpacked. - """ - - def get_metadata_distribution(self) -> BaseDistribution: - """Loads the metadata from the wheel file into memory and returns a - Distribution that uses it, not relying on the wheel file or - requirement. - """ - assert self.req.local_file_path, "Set as part of preparation during download" - assert self.req.name, "Wheels are never unnamed" - wheel = FilesystemWheel(self.req.local_file_path) - return get_wheel_distribution(wheel, canonicalize_name(self.req.name)) - - def prepare_distribution_metadata( - self, finder: PackageFinder, build_isolation: bool - ) -> None: - pass diff --git a/venv/Lib/site-packages/pip/_internal/exceptions.py b/venv/Lib/site-packages/pip/_internal/exceptions.py deleted file mode 100644 index 97b9612..0000000 --- a/venv/Lib/site-packages/pip/_internal/exceptions.py +++ /dev/null @@ -1,658 +0,0 @@ -"""Exceptions used throughout package. - -This module MUST NOT try to import from anything within `pip._internal` to -operate. This is expected to be importable from any/all files within the -subpackage and, thus, should not depend on them. -""" - -import configparser -import re -from itertools import chain, groupby, repeat -from typing import TYPE_CHECKING, Dict, List, Optional, Union - -from pip._vendor.requests.models import Request, Response -from pip._vendor.rich.console import Console, ConsoleOptions, RenderResult -from pip._vendor.rich.markup import escape -from pip._vendor.rich.text import Text - -if TYPE_CHECKING: - from hashlib import _Hash - from typing import Literal - - from pip._internal.metadata import BaseDistribution - from pip._internal.req.req_install import InstallRequirement - - -# -# Scaffolding -# -def _is_kebab_case(s: str) -> bool: - return re.match(r"^[a-z]+(-[a-z]+)*$", s) is not None - - -def _prefix_with_indent( - s: Union[Text, str], - console: Console, - *, - prefix: str, - indent: str, -) -> Text: - if isinstance(s, Text): - text = s - else: - text = console.render_str(s) - - return console.render_str(prefix, overflow="ignore") + console.render_str( - f"\n{indent}", overflow="ignore" - ).join(text.split(allow_blank=True)) - - -class PipError(Exception): - """The base pip error.""" - - -class DiagnosticPipError(PipError): - """An error, that presents diagnostic information to the user. - - This contains a bunch of logic, to enable pretty presentation of our error - messages. Each error gets a unique reference. Each error can also include - additional context, a hint and/or a note -- which are presented with the - main error message in a consistent style. - - This is adapted from the error output styling in `sphinx-theme-builder`. - """ - - reference: str - - def __init__( - self, - *, - kind: 'Literal["error", "warning"]' = "error", - reference: Optional[str] = None, - message: Union[str, Text], - context: Optional[Union[str, Text]], - hint_stmt: Optional[Union[str, Text]], - note_stmt: Optional[Union[str, Text]] = None, - link: Optional[str] = None, - ) -> None: - # Ensure a proper reference is provided. - if reference is None: - assert hasattr(self, "reference"), "error reference not provided!" - reference = self.reference - assert _is_kebab_case(reference), "error reference must be kebab-case!" - - self.kind = kind - self.reference = reference - - self.message = message - self.context = context - - self.note_stmt = note_stmt - self.hint_stmt = hint_stmt - - self.link = link - - super().__init__(f"<{self.__class__.__name__}: {self.reference}>") - - def __repr__(self) -> str: - return ( - f"<{self.__class__.__name__}(" - f"reference={self.reference!r}, " - f"message={self.message!r}, " - f"context={self.context!r}, " - f"note_stmt={self.note_stmt!r}, " - f"hint_stmt={self.hint_stmt!r}" - ")>" - ) - - def __rich_console__( - self, - console: Console, - options: ConsoleOptions, - ) -> RenderResult: - colour = "red" if self.kind == "error" else "yellow" - - yield f"[{colour} bold]{self.kind}[/]: [bold]{self.reference}[/]" - yield "" - - if not options.ascii_only: - # Present the main message, with relevant context indented. - if self.context is not None: - yield _prefix_with_indent( - self.message, - console, - prefix=f"[{colour}]×[/] ", - indent=f"[{colour}]│[/] ", - ) - yield _prefix_with_indent( - self.context, - console, - prefix=f"[{colour}]╰─>[/] ", - indent=f"[{colour}] [/] ", - ) - else: - yield _prefix_with_indent( - self.message, - console, - prefix="[red]×[/] ", - indent=" ", - ) - else: - yield self.message - if self.context is not None: - yield "" - yield self.context - - if self.note_stmt is not None or self.hint_stmt is not None: - yield "" - - if self.note_stmt is not None: - yield _prefix_with_indent( - self.note_stmt, - console, - prefix="[magenta bold]note[/]: ", - indent=" ", - ) - if self.hint_stmt is not None: - yield _prefix_with_indent( - self.hint_stmt, - console, - prefix="[cyan bold]hint[/]: ", - indent=" ", - ) - - if self.link is not None: - yield "" - yield f"Link: {self.link}" - - -# -# Actual Errors -# -class ConfigurationError(PipError): - """General exception in configuration""" - - -class InstallationError(PipError): - """General exception during installation""" - - -class UninstallationError(PipError): - """General exception during uninstallation""" - - -class MissingPyProjectBuildRequires(DiagnosticPipError): - """Raised when pyproject.toml has `build-system`, but no `build-system.requires`.""" - - reference = "missing-pyproject-build-system-requires" - - def __init__(self, *, package: str) -> None: - super().__init__( - message=f"Can not process {escape(package)}", - context=Text( - "This package has an invalid pyproject.toml file.\n" - "The [build-system] table is missing the mandatory `requires` key." - ), - note_stmt="This is an issue with the package mentioned above, not pip.", - hint_stmt=Text("See PEP 518 for the detailed specification."), - ) - - -class InvalidPyProjectBuildRequires(DiagnosticPipError): - """Raised when pyproject.toml an invalid `build-system.requires`.""" - - reference = "invalid-pyproject-build-system-requires" - - def __init__(self, *, package: str, reason: str) -> None: - super().__init__( - message=f"Can not process {escape(package)}", - context=Text( - "This package has an invalid `build-system.requires` key in " - f"pyproject.toml.\n{reason}" - ), - note_stmt="This is an issue with the package mentioned above, not pip.", - hint_stmt=Text("See PEP 518 for the detailed specification."), - ) - - -class NoneMetadataError(PipError): - """Raised when accessing a Distribution's "METADATA" or "PKG-INFO". - - This signifies an inconsistency, when the Distribution claims to have - the metadata file (if not, raise ``FileNotFoundError`` instead), but is - not actually able to produce its content. This may be due to permission - errors. - """ - - def __init__( - self, - dist: "BaseDistribution", - metadata_name: str, - ) -> None: - """ - :param dist: A Distribution object. - :param metadata_name: The name of the metadata being accessed - (can be "METADATA" or "PKG-INFO"). - """ - self.dist = dist - self.metadata_name = metadata_name - - def __str__(self) -> str: - # Use `dist` in the error message because its stringification - # includes more information, like the version and location. - return "None {} metadata found for distribution: {}".format( - self.metadata_name, - self.dist, - ) - - -class UserInstallationInvalid(InstallationError): - """A --user install is requested on an environment without user site.""" - - def __str__(self) -> str: - return "User base directory is not specified" - - -class InvalidSchemeCombination(InstallationError): - def __str__(self) -> str: - before = ", ".join(str(a) for a in self.args[:-1]) - return f"Cannot set {before} and {self.args[-1]} together" - - -class DistributionNotFound(InstallationError): - """Raised when a distribution cannot be found to satisfy a requirement""" - - -class RequirementsFileParseError(InstallationError): - """Raised when a general error occurs parsing a requirements file line.""" - - -class BestVersionAlreadyInstalled(PipError): - """Raised when the most up-to-date version of a package is already - installed.""" - - -class BadCommand(PipError): - """Raised when virtualenv or a command is not found""" - - -class CommandError(PipError): - """Raised when there is an error in command-line arguments""" - - -class PreviousBuildDirError(PipError): - """Raised when there's a previous conflicting build directory""" - - -class NetworkConnectionError(PipError): - """HTTP connection error""" - - def __init__( - self, error_msg: str, response: Response = None, request: Request = None - ) -> None: - """ - Initialize NetworkConnectionError with `request` and `response` - objects. - """ - self.response = response - self.request = request - self.error_msg = error_msg - if ( - self.response is not None - and not self.request - and hasattr(response, "request") - ): - self.request = self.response.request - super().__init__(error_msg, response, request) - - def __str__(self) -> str: - return str(self.error_msg) - - -class InvalidWheelFilename(InstallationError): - """Invalid wheel filename.""" - - -class UnsupportedWheel(InstallationError): - """Unsupported wheel.""" - - -class InvalidWheel(InstallationError): - """Invalid (e.g. corrupt) wheel.""" - - def __init__(self, location: str, name: str): - self.location = location - self.name = name - - def __str__(self) -> str: - return f"Wheel '{self.name}' located at {self.location} is invalid." - - -class MetadataInconsistent(InstallationError): - """Built metadata contains inconsistent information. - - This is raised when the metadata contains values (e.g. name and version) - that do not match the information previously obtained from sdist filename - or user-supplied ``#egg=`` value. - """ - - def __init__( - self, ireq: "InstallRequirement", field: str, f_val: str, m_val: str - ) -> None: - self.ireq = ireq - self.field = field - self.f_val = f_val - self.m_val = m_val - - def __str__(self) -> str: - template = ( - "Requested {} has inconsistent {}: " - "filename has {!r}, but metadata has {!r}" - ) - return template.format(self.ireq, self.field, self.f_val, self.m_val) - - -class LegacyInstallFailure(DiagnosticPipError): - """Error occurred while executing `setup.py install`""" - - reference = "legacy-install-failure" - - def __init__(self, package_details: str) -> None: - super().__init__( - message="Encountered error while trying to install package.", - context=package_details, - hint_stmt="See above for output from the failure.", - note_stmt="This is an issue with the package mentioned above, not pip.", - ) - - -class InstallationSubprocessError(DiagnosticPipError, InstallationError): - """A subprocess call failed.""" - - reference = "subprocess-exited-with-error" - - def __init__( - self, - *, - command_description: str, - exit_code: int, - output_lines: Optional[List[str]], - ) -> None: - if output_lines is None: - output_prompt = Text("See above for output.") - else: - output_prompt = ( - Text.from_markup(f"[red][{len(output_lines)} lines of output][/]\n") - + Text("".join(output_lines)) - + Text.from_markup(R"[red]\[end of output][/]") - ) - - super().__init__( - message=( - f"[green]{escape(command_description)}[/] did not run successfully.\n" - f"exit code: {exit_code}" - ), - context=output_prompt, - hint_stmt=None, - note_stmt=( - "This error originates from a subprocess, and is likely not a " - "problem with pip." - ), - ) - - self.command_description = command_description - self.exit_code = exit_code - - def __str__(self) -> str: - return f"{self.command_description} exited with {self.exit_code}" - - -class MetadataGenerationFailed(InstallationSubprocessError, InstallationError): - reference = "metadata-generation-failed" - - def __init__( - self, - *, - package_details: str, - ) -> None: - super(InstallationSubprocessError, self).__init__( - message="Encountered error while generating package metadata.", - context=escape(package_details), - hint_stmt="See above for details.", - note_stmt="This is an issue with the package mentioned above, not pip.", - ) - - def __str__(self) -> str: - return "metadata generation failed" - - -class HashErrors(InstallationError): - """Multiple HashError instances rolled into one for reporting""" - - def __init__(self) -> None: - self.errors: List["HashError"] = [] - - def append(self, error: "HashError") -> None: - self.errors.append(error) - - def __str__(self) -> str: - lines = [] - self.errors.sort(key=lambda e: e.order) - for cls, errors_of_cls in groupby(self.errors, lambda e: e.__class__): - lines.append(cls.head) - lines.extend(e.body() for e in errors_of_cls) - if lines: - return "\n".join(lines) - return "" - - def __bool__(self) -> bool: - return bool(self.errors) - - -class HashError(InstallationError): - """ - A failure to verify a package against known-good hashes - - :cvar order: An int sorting hash exception classes by difficulty of - recovery (lower being harder), so the user doesn't bother fretting - about unpinned packages when he has deeper issues, like VCS - dependencies, to deal with. Also keeps error reports in a - deterministic order. - :cvar head: A section heading for display above potentially many - exceptions of this kind - :ivar req: The InstallRequirement that triggered this error. This is - pasted on after the exception is instantiated, because it's not - typically available earlier. - - """ - - req: Optional["InstallRequirement"] = None - head = "" - order: int = -1 - - def body(self) -> str: - """Return a summary of me for display under the heading. - - This default implementation simply prints a description of the - triggering requirement. - - :param req: The InstallRequirement that provoked this error, with - its link already populated by the resolver's _populate_link(). - - """ - return f" {self._requirement_name()}" - - def __str__(self) -> str: - return f"{self.head}\n{self.body()}" - - def _requirement_name(self) -> str: - """Return a description of the requirement that triggered me. - - This default implementation returns long description of the req, with - line numbers - - """ - return str(self.req) if self.req else "unknown package" - - -class VcsHashUnsupported(HashError): - """A hash was provided for a version-control-system-based requirement, but - we don't have a method for hashing those.""" - - order = 0 - head = ( - "Can't verify hashes for these requirements because we don't " - "have a way to hash version control repositories:" - ) - - -class DirectoryUrlHashUnsupported(HashError): - """A hash was provided for a version-control-system-based requirement, but - we don't have a method for hashing those.""" - - order = 1 - head = ( - "Can't verify hashes for these file:// requirements because they " - "point to directories:" - ) - - -class HashMissing(HashError): - """A hash was needed for a requirement but is absent.""" - - order = 2 - head = ( - "Hashes are required in --require-hashes mode, but they are " - "missing from some requirements. Here is a list of those " - "requirements along with the hashes their downloaded archives " - "actually had. Add lines like these to your requirements files to " - "prevent tampering. (If you did not enable --require-hashes " - "manually, note that it turns on automatically when any package " - "has a hash.)" - ) - - def __init__(self, gotten_hash: str) -> None: - """ - :param gotten_hash: The hash of the (possibly malicious) archive we - just downloaded - """ - self.gotten_hash = gotten_hash - - def body(self) -> str: - # Dodge circular import. - from pip._internal.utils.hashes import FAVORITE_HASH - - package = None - if self.req: - # In the case of URL-based requirements, display the original URL - # seen in the requirements file rather than the package name, - # so the output can be directly copied into the requirements file. - package = ( - self.req.original_link - if self.req.original_link - # In case someone feeds something downright stupid - # to InstallRequirement's constructor. - else getattr(self.req, "req", None) - ) - return " {} --hash={}:{}".format( - package or "unknown package", FAVORITE_HASH, self.gotten_hash - ) - - -class HashUnpinned(HashError): - """A requirement had a hash specified but was not pinned to a specific - version.""" - - order = 3 - head = ( - "In --require-hashes mode, all requirements must have their " - "versions pinned with ==. These do not:" - ) - - -class HashMismatch(HashError): - """ - Distribution file hash values don't match. - - :ivar package_name: The name of the package that triggered the hash - mismatch. Feel free to write to this after the exception is raise to - improve its error message. - - """ - - order = 4 - head = ( - "THESE PACKAGES DO NOT MATCH THE HASHES FROM THE REQUIREMENTS " - "FILE. If you have updated the package versions, please update " - "the hashes. Otherwise, examine the package contents carefully; " - "someone may have tampered with them." - ) - - def __init__(self, allowed: Dict[str, List[str]], gots: Dict[str, "_Hash"]) -> None: - """ - :param allowed: A dict of algorithm names pointing to lists of allowed - hex digests - :param gots: A dict of algorithm names pointing to hashes we - actually got from the files under suspicion - """ - self.allowed = allowed - self.gots = gots - - def body(self) -> str: - return " {}:\n{}".format(self._requirement_name(), self._hash_comparison()) - - def _hash_comparison(self) -> str: - """ - Return a comparison of actual and expected hash values. - - Example:: - - Expected sha256 abcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcde - or 123451234512345123451234512345123451234512345 - Got bcdefbcdefbcdefbcdefbcdefbcdefbcdefbcdefbcdef - - """ - - def hash_then_or(hash_name: str) -> "chain[str]": - # For now, all the decent hashes have 6-char names, so we can get - # away with hard-coding space literals. - return chain([hash_name], repeat(" or")) - - lines: List[str] = [] - for hash_name, expecteds in self.allowed.items(): - prefix = hash_then_or(hash_name) - lines.extend( - (" Expected {} {}".format(next(prefix), e)) for e in expecteds - ) - lines.append( - " Got {}\n".format(self.gots[hash_name].hexdigest()) - ) - return "\n".join(lines) - - -class UnsupportedPythonVersion(InstallationError): - """Unsupported python version according to Requires-Python package - metadata.""" - - -class ConfigurationFileCouldNotBeLoaded(ConfigurationError): - """When there are errors while loading a configuration file""" - - def __init__( - self, - reason: str = "could not be loaded", - fname: Optional[str] = None, - error: Optional[configparser.Error] = None, - ) -> None: - super().__init__(error) - self.reason = reason - self.fname = fname - self.error = error - - def __str__(self) -> str: - if self.fname is not None: - message_part = f" in {self.fname}." - else: - assert self.error is not None - message_part = f".\n{self.error}\n" - return f"Configuration file {self.reason}{message_part}" diff --git a/venv/Lib/site-packages/pip/_internal/index/__init__.py b/venv/Lib/site-packages/pip/_internal/index/__init__.py deleted file mode 100644 index 7a17b7b..0000000 --- a/venv/Lib/site-packages/pip/_internal/index/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -"""Index interaction code -""" diff --git a/venv/Lib/site-packages/pip/_internal/index/__pycache__/__init__.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/index/__pycache__/__init__.cpython-39.pyc deleted file mode 100644 index 2123085..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/index/__pycache__/__init__.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/index/__pycache__/collector.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/index/__pycache__/collector.cpython-39.pyc deleted file mode 100644 index 7268ff3..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/index/__pycache__/collector.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/index/__pycache__/package_finder.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/index/__pycache__/package_finder.cpython-39.pyc deleted file mode 100644 index cbd4f60..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/index/__pycache__/package_finder.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/index/__pycache__/sources.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/index/__pycache__/sources.cpython-39.pyc deleted file mode 100644 index b650f8a..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/index/__pycache__/sources.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/index/collector.py b/venv/Lib/site-packages/pip/_internal/index/collector.py deleted file mode 100644 index e6e9469..0000000 --- a/venv/Lib/site-packages/pip/_internal/index/collector.py +++ /dev/null @@ -1,610 +0,0 @@ -""" -The main purpose of this module is to expose LinkCollector.collect_sources(). -""" - -import cgi -import collections -import functools -import itertools -import logging -import os -import re -import urllib.parse -import urllib.request -import xml.etree.ElementTree -from html.parser import HTMLParser -from optparse import Values -from typing import ( - TYPE_CHECKING, - Callable, - Dict, - Iterable, - List, - MutableMapping, - NamedTuple, - Optional, - Sequence, - Tuple, - Union, -) - -from pip._vendor import html5lib, requests -from pip._vendor.requests import Response -from pip._vendor.requests.exceptions import RetryError, SSLError - -from pip._internal.exceptions import NetworkConnectionError -from pip._internal.models.link import Link -from pip._internal.models.search_scope import SearchScope -from pip._internal.network.session import PipSession -from pip._internal.network.utils import raise_for_status -from pip._internal.utils.filetypes import is_archive_file -from pip._internal.utils.misc import pairwise, redact_auth_from_url -from pip._internal.vcs import vcs - -from .sources import CandidatesFromPage, LinkSource, build_source - -if TYPE_CHECKING: - from typing import Protocol -else: - Protocol = object - -logger = logging.getLogger(__name__) - -HTMLElement = xml.etree.ElementTree.Element -ResponseHeaders = MutableMapping[str, str] - - -def _match_vcs_scheme(url: str) -> Optional[str]: - """Look for VCS schemes in the URL. - - Returns the matched VCS scheme, or None if there's no match. - """ - for scheme in vcs.schemes: - if url.lower().startswith(scheme) and url[len(scheme)] in "+:": - return scheme - return None - - -class _NotHTML(Exception): - def __init__(self, content_type: str, request_desc: str) -> None: - super().__init__(content_type, request_desc) - self.content_type = content_type - self.request_desc = request_desc - - -def _ensure_html_header(response: Response) -> None: - """Check the Content-Type header to ensure the response contains HTML. - - Raises `_NotHTML` if the content type is not text/html. - """ - content_type = response.headers.get("Content-Type", "") - if not content_type.lower().startswith("text/html"): - raise _NotHTML(content_type, response.request.method) - - -class _NotHTTP(Exception): - pass - - -def _ensure_html_response(url: str, session: PipSession) -> None: - """Send a HEAD request to the URL, and ensure the response contains HTML. - - Raises `_NotHTTP` if the URL is not available for a HEAD request, or - `_NotHTML` if the content type is not text/html. - """ - scheme, netloc, path, query, fragment = urllib.parse.urlsplit(url) - if scheme not in {"http", "https"}: - raise _NotHTTP() - - resp = session.head(url, allow_redirects=True) - raise_for_status(resp) - - _ensure_html_header(resp) - - -def _get_html_response(url: str, session: PipSession) -> Response: - """Access an HTML page with GET, and return the response. - - This consists of three parts: - - 1. If the URL looks suspiciously like an archive, send a HEAD first to - check the Content-Type is HTML, to avoid downloading a large file. - Raise `_NotHTTP` if the content type cannot be determined, or - `_NotHTML` if it is not HTML. - 2. Actually perform the request. Raise HTTP exceptions on network failures. - 3. Check the Content-Type header to make sure we got HTML, and raise - `_NotHTML` otherwise. - """ - if is_archive_file(Link(url).filename): - _ensure_html_response(url, session=session) - - logger.debug("Getting page %s", redact_auth_from_url(url)) - - resp = session.get( - url, - headers={ - "Accept": "text/html", - # We don't want to blindly returned cached data for - # /simple/, because authors generally expecting that - # twine upload && pip install will function, but if - # they've done a pip install in the last ~10 minutes - # it won't. Thus by setting this to zero we will not - # blindly use any cached data, however the benefit of - # using max-age=0 instead of no-cache, is that we will - # still support conditional requests, so we will still - # minimize traffic sent in cases where the page hasn't - # changed at all, we will just always incur the round - # trip for the conditional GET now instead of only - # once per 10 minutes. - # For more information, please see pypa/pip#5670. - "Cache-Control": "max-age=0", - }, - ) - raise_for_status(resp) - - # The check for archives above only works if the url ends with - # something that looks like an archive. However that is not a - # requirement of an url. Unless we issue a HEAD request on every - # url we cannot know ahead of time for sure if something is HTML - # or not. However we can check after we've downloaded it. - _ensure_html_header(resp) - - return resp - - -def _get_encoding_from_headers(headers: ResponseHeaders) -> Optional[str]: - """Determine if we have any encoding information in our headers.""" - if headers and "Content-Type" in headers: - content_type, params = cgi.parse_header(headers["Content-Type"]) - if "charset" in params: - return params["charset"] - return None - - -def _determine_base_url(document: HTMLElement, page_url: str) -> str: - """Determine the HTML document's base URL. - - This looks for a ```` tag in the HTML document. If present, its href - attribute denotes the base URL of anchor tags in the document. If there is - no such tag (or if it does not have a valid href attribute), the HTML - file's URL is used as the base URL. - - :param document: An HTML document representation. The current - implementation expects the result of ``html5lib.parse()``. - :param page_url: The URL of the HTML document. - - TODO: Remove when `html5lib` is dropped. - """ - for base in document.findall(".//base"): - href = base.get("href") - if href is not None: - return href - return page_url - - -def _clean_url_path_part(part: str) -> str: - """ - Clean a "part" of a URL path (i.e. after splitting on "@" characters). - """ - # We unquote prior to quoting to make sure nothing is double quoted. - return urllib.parse.quote(urllib.parse.unquote(part)) - - -def _clean_file_url_path(part: str) -> str: - """ - Clean the first part of a URL path that corresponds to a local - filesystem path (i.e. the first part after splitting on "@" characters). - """ - # We unquote prior to quoting to make sure nothing is double quoted. - # Also, on Windows the path part might contain a drive letter which - # should not be quoted. On Linux where drive letters do not - # exist, the colon should be quoted. We rely on urllib.request - # to do the right thing here. - return urllib.request.pathname2url(urllib.request.url2pathname(part)) - - -# percent-encoded: / -_reserved_chars_re = re.compile("(@|%2F)", re.IGNORECASE) - - -def _clean_url_path(path: str, is_local_path: bool) -> str: - """ - Clean the path portion of a URL. - """ - if is_local_path: - clean_func = _clean_file_url_path - else: - clean_func = _clean_url_path_part - - # Split on the reserved characters prior to cleaning so that - # revision strings in VCS URLs are properly preserved. - parts = _reserved_chars_re.split(path) - - cleaned_parts = [] - for to_clean, reserved in pairwise(itertools.chain(parts, [""])): - cleaned_parts.append(clean_func(to_clean)) - # Normalize %xx escapes (e.g. %2f -> %2F) - cleaned_parts.append(reserved.upper()) - - return "".join(cleaned_parts) - - -def _clean_link(url: str) -> str: - """ - Make sure a link is fully quoted. - For example, if ' ' occurs in the URL, it will be replaced with "%20", - and without double-quoting other characters. - """ - # Split the URL into parts according to the general structure - # `scheme://netloc/path;parameters?query#fragment`. - result = urllib.parse.urlparse(url) - # If the netloc is empty, then the URL refers to a local filesystem path. - is_local_path = not result.netloc - path = _clean_url_path(result.path, is_local_path=is_local_path) - return urllib.parse.urlunparse(result._replace(path=path)) - - -def _create_link_from_element( - element_attribs: Dict[str, Optional[str]], - page_url: str, - base_url: str, -) -> Optional[Link]: - """ - Convert an anchor element's attributes in a simple repository page to a Link. - """ - href = element_attribs.get("href") - if not href: - return None - - url = _clean_link(urllib.parse.urljoin(base_url, href)) - pyrequire = element_attribs.get("data-requires-python") - yanked_reason = element_attribs.get("data-yanked") - - link = Link( - url, - comes_from=page_url, - requires_python=pyrequire, - yanked_reason=yanked_reason, - ) - - return link - - -class CacheablePageContent: - def __init__(self, page: "HTMLPage") -> None: - assert page.cache_link_parsing - self.page = page - - def __eq__(self, other: object) -> bool: - return isinstance(other, type(self)) and self.page.url == other.page.url - - def __hash__(self) -> int: - return hash(self.page.url) - - -class ParseLinks(Protocol): - def __call__( - self, page: "HTMLPage", use_deprecated_html5lib: bool - ) -> Iterable[Link]: - ... - - -def with_cached_html_pages(fn: ParseLinks) -> ParseLinks: - """ - Given a function that parses an Iterable[Link] from an HTMLPage, cache the - function's result (keyed by CacheablePageContent), unless the HTMLPage - `page` has `page.cache_link_parsing == False`. - """ - - @functools.lru_cache(maxsize=None) - def wrapper( - cacheable_page: CacheablePageContent, use_deprecated_html5lib: bool - ) -> List[Link]: - return list(fn(cacheable_page.page, use_deprecated_html5lib)) - - @functools.wraps(fn) - def wrapper_wrapper(page: "HTMLPage", use_deprecated_html5lib: bool) -> List[Link]: - if page.cache_link_parsing: - return wrapper(CacheablePageContent(page), use_deprecated_html5lib) - return list(fn(page, use_deprecated_html5lib)) - - return wrapper_wrapper - - -def _parse_links_html5lib(page: "HTMLPage") -> Iterable[Link]: - """ - Parse an HTML document, and yield its anchor elements as Link objects. - - TODO: Remove when `html5lib` is dropped. - """ - document = html5lib.parse( - page.content, - transport_encoding=page.encoding, - namespaceHTMLElements=False, - ) - - url = page.url - base_url = _determine_base_url(document, url) - for anchor in document.findall(".//a"): - link = _create_link_from_element( - anchor.attrib, - page_url=url, - base_url=base_url, - ) - if link is None: - continue - yield link - - -@with_cached_html_pages -def parse_links(page: "HTMLPage", use_deprecated_html5lib: bool) -> Iterable[Link]: - """ - Parse an HTML document, and yield its anchor elements as Link objects. - """ - - if use_deprecated_html5lib: - yield from _parse_links_html5lib(page) - return - - parser = HTMLLinkParser(page.url) - encoding = page.encoding or "utf-8" - parser.feed(page.content.decode(encoding)) - - url = page.url - base_url = parser.base_url or url - for anchor in parser.anchors: - link = _create_link_from_element( - anchor, - page_url=url, - base_url=base_url, - ) - if link is None: - continue - yield link - - -class HTMLPage: - """Represents one page, along with its URL""" - - def __init__( - self, - content: bytes, - encoding: Optional[str], - url: str, - cache_link_parsing: bool = True, - ) -> None: - """ - :param encoding: the encoding to decode the given content. - :param url: the URL from which the HTML was downloaded. - :param cache_link_parsing: whether links parsed from this page's url - should be cached. PyPI index urls should - have this set to False, for example. - """ - self.content = content - self.encoding = encoding - self.url = url - self.cache_link_parsing = cache_link_parsing - - def __str__(self) -> str: - return redact_auth_from_url(self.url) - - -class HTMLLinkParser(HTMLParser): - """ - HTMLParser that keeps the first base HREF and a list of all anchor - elements' attributes. - """ - - def __init__(self, url: str) -> None: - super().__init__(convert_charrefs=True) - - self.url: str = url - self.base_url: Optional[str] = None - self.anchors: List[Dict[str, Optional[str]]] = [] - - def handle_starttag(self, tag: str, attrs: List[Tuple[str, Optional[str]]]) -> None: - if tag == "base" and self.base_url is None: - href = self.get_href(attrs) - if href is not None: - self.base_url = href - elif tag == "a": - self.anchors.append(dict(attrs)) - - def get_href(self, attrs: List[Tuple[str, Optional[str]]]) -> Optional[str]: - for name, value in attrs: - if name == "href": - return value - return None - - -def _handle_get_page_fail( - link: Link, - reason: Union[str, Exception], - meth: Optional[Callable[..., None]] = None, -) -> None: - if meth is None: - meth = logger.debug - meth("Could not fetch URL %s: %s - skipping", link, reason) - - -def _make_html_page(response: Response, cache_link_parsing: bool = True) -> HTMLPage: - encoding = _get_encoding_from_headers(response.headers) - return HTMLPage( - response.content, - encoding=encoding, - url=response.url, - cache_link_parsing=cache_link_parsing, - ) - - -def _get_html_page( - link: Link, session: Optional[PipSession] = None -) -> Optional["HTMLPage"]: - if session is None: - raise TypeError( - "_get_html_page() missing 1 required keyword argument: 'session'" - ) - - url = link.url.split("#", 1)[0] - - # Check for VCS schemes that do not support lookup as web pages. - vcs_scheme = _match_vcs_scheme(url) - if vcs_scheme: - logger.warning( - "Cannot look at %s URL %s because it does not support lookup as web pages.", - vcs_scheme, - link, - ) - return None - - # Tack index.html onto file:// URLs that point to directories - scheme, _, path, _, _, _ = urllib.parse.urlparse(url) - if scheme == "file" and os.path.isdir(urllib.request.url2pathname(path)): - # add trailing slash if not present so urljoin doesn't trim - # final segment - if not url.endswith("/"): - url += "/" - url = urllib.parse.urljoin(url, "index.html") - logger.debug(" file: URL is directory, getting %s", url) - - try: - resp = _get_html_response(url, session=session) - except _NotHTTP: - logger.warning( - "Skipping page %s because it looks like an archive, and cannot " - "be checked by a HTTP HEAD request.", - link, - ) - except _NotHTML as exc: - logger.warning( - "Skipping page %s because the %s request got Content-Type: %s." - "The only supported Content-Type is text/html", - link, - exc.request_desc, - exc.content_type, - ) - except NetworkConnectionError as exc: - _handle_get_page_fail(link, exc) - except RetryError as exc: - _handle_get_page_fail(link, exc) - except SSLError as exc: - reason = "There was a problem confirming the ssl certificate: " - reason += str(exc) - _handle_get_page_fail(link, reason, meth=logger.info) - except requests.ConnectionError as exc: - _handle_get_page_fail(link, f"connection error: {exc}") - except requests.Timeout: - _handle_get_page_fail(link, "timed out") - else: - return _make_html_page(resp, cache_link_parsing=link.cache_link_parsing) - return None - - -class CollectedSources(NamedTuple): - find_links: Sequence[Optional[LinkSource]] - index_urls: Sequence[Optional[LinkSource]] - - -class LinkCollector: - - """ - Responsible for collecting Link objects from all configured locations, - making network requests as needed. - - The class's main method is its collect_sources() method. - """ - - def __init__( - self, - session: PipSession, - search_scope: SearchScope, - ) -> None: - self.search_scope = search_scope - self.session = session - - @classmethod - def create( - cls, - session: PipSession, - options: Values, - suppress_no_index: bool = False, - ) -> "LinkCollector": - """ - :param session: The Session to use to make requests. - :param suppress_no_index: Whether to ignore the --no-index option - when constructing the SearchScope object. - """ - index_urls = [options.index_url] + options.extra_index_urls - if options.no_index and not suppress_no_index: - logger.debug( - "Ignoring indexes: %s", - ",".join(redact_auth_from_url(url) for url in index_urls), - ) - index_urls = [] - - # Make sure find_links is a list before passing to create(). - find_links = options.find_links or [] - - search_scope = SearchScope.create( - find_links=find_links, - index_urls=index_urls, - ) - link_collector = LinkCollector( - session=session, - search_scope=search_scope, - ) - return link_collector - - @property - def find_links(self) -> List[str]: - return self.search_scope.find_links - - def fetch_page(self, location: Link) -> Optional[HTMLPage]: - """ - Fetch an HTML page containing package links. - """ - return _get_html_page(location, session=self.session) - - def collect_sources( - self, - project_name: str, - candidates_from_page: CandidatesFromPage, - ) -> CollectedSources: - # The OrderedDict calls deduplicate sources by URL. - index_url_sources = collections.OrderedDict( - build_source( - loc, - candidates_from_page=candidates_from_page, - page_validator=self.session.is_secure_origin, - expand_dir=False, - cache_link_parsing=False, - ) - for loc in self.search_scope.get_index_urls_locations(project_name) - ).values() - find_links_sources = collections.OrderedDict( - build_source( - loc, - candidates_from_page=candidates_from_page, - page_validator=self.session.is_secure_origin, - expand_dir=True, - cache_link_parsing=True, - ) - for loc in self.find_links - ).values() - - if logger.isEnabledFor(logging.DEBUG): - lines = [ - f"* {s.link}" - for s in itertools.chain(find_links_sources, index_url_sources) - if s is not None and s.link is not None - ] - lines = [ - f"{len(lines)} location(s) to search " - f"for versions of {project_name}:" - ] + lines - logger.debug("\n".join(lines)) - - return CollectedSources( - find_links=list(find_links_sources), - index_urls=list(index_url_sources), - ) diff --git a/venv/Lib/site-packages/pip/_internal/index/package_finder.py b/venv/Lib/site-packages/pip/_internal/index/package_finder.py deleted file mode 100644 index 223d06d..0000000 --- a/venv/Lib/site-packages/pip/_internal/index/package_finder.py +++ /dev/null @@ -1,1004 +0,0 @@ -"""Routines related to PyPI, indexes""" - -# The following comment should be removed at some point in the future. -# mypy: strict-optional=False - -import functools -import itertools -import logging -import re -from typing import FrozenSet, Iterable, List, Optional, Set, Tuple, Union - -from pip._vendor.packaging import specifiers -from pip._vendor.packaging.tags import Tag -from pip._vendor.packaging.utils import canonicalize_name -from pip._vendor.packaging.version import _BaseVersion -from pip._vendor.packaging.version import parse as parse_version - -from pip._internal.exceptions import ( - BestVersionAlreadyInstalled, - DistributionNotFound, - InvalidWheelFilename, - UnsupportedWheel, -) -from pip._internal.index.collector import LinkCollector, parse_links -from pip._internal.models.candidate import InstallationCandidate -from pip._internal.models.format_control import FormatControl -from pip._internal.models.link import Link -from pip._internal.models.search_scope import SearchScope -from pip._internal.models.selection_prefs import SelectionPreferences -from pip._internal.models.target_python import TargetPython -from pip._internal.models.wheel import Wheel -from pip._internal.req import InstallRequirement -from pip._internal.utils._log import getLogger -from pip._internal.utils.filetypes import WHEEL_EXTENSION -from pip._internal.utils.hashes import Hashes -from pip._internal.utils.logging import indent_log -from pip._internal.utils.misc import build_netloc -from pip._internal.utils.packaging import check_requires_python -from pip._internal.utils.unpacking import SUPPORTED_EXTENSIONS - -__all__ = ["FormatControl", "BestCandidateResult", "PackageFinder"] - - -logger = getLogger(__name__) - -BuildTag = Union[Tuple[()], Tuple[int, str]] -CandidateSortingKey = Tuple[int, int, int, _BaseVersion, Optional[int], BuildTag] - - -def _check_link_requires_python( - link: Link, - version_info: Tuple[int, int, int], - ignore_requires_python: bool = False, -) -> bool: - """ - Return whether the given Python version is compatible with a link's - "Requires-Python" value. - - :param version_info: A 3-tuple of ints representing the Python - major-minor-micro version to check. - :param ignore_requires_python: Whether to ignore the "Requires-Python" - value if the given Python version isn't compatible. - """ - try: - is_compatible = check_requires_python( - link.requires_python, - version_info=version_info, - ) - except specifiers.InvalidSpecifier: - logger.debug( - "Ignoring invalid Requires-Python (%r) for link: %s", - link.requires_python, - link, - ) - else: - if not is_compatible: - version = ".".join(map(str, version_info)) - if not ignore_requires_python: - logger.verbose( - "Link requires a different Python (%s not in: %r): %s", - version, - link.requires_python, - link, - ) - return False - - logger.debug( - "Ignoring failed Requires-Python check (%s not in: %r) for link: %s", - version, - link.requires_python, - link, - ) - - return True - - -class LinkEvaluator: - - """ - Responsible for evaluating links for a particular project. - """ - - _py_version_re = re.compile(r"-py([123]\.?[0-9]?)$") - - # Don't include an allow_yanked default value to make sure each call - # site considers whether yanked releases are allowed. This also causes - # that decision to be made explicit in the calling code, which helps - # people when reading the code. - def __init__( - self, - project_name: str, - canonical_name: str, - formats: FrozenSet[str], - target_python: TargetPython, - allow_yanked: bool, - ignore_requires_python: Optional[bool] = None, - ) -> None: - """ - :param project_name: The user supplied package name. - :param canonical_name: The canonical package name. - :param formats: The formats allowed for this package. Should be a set - with 'binary' or 'source' or both in it. - :param target_python: The target Python interpreter to use when - evaluating link compatibility. This is used, for example, to - check wheel compatibility, as well as when checking the Python - version, e.g. the Python version embedded in a link filename - (or egg fragment) and against an HTML link's optional PEP 503 - "data-requires-python" attribute. - :param allow_yanked: Whether files marked as yanked (in the sense - of PEP 592) are permitted to be candidates for install. - :param ignore_requires_python: Whether to ignore incompatible - PEP 503 "data-requires-python" values in HTML links. Defaults - to False. - """ - if ignore_requires_python is None: - ignore_requires_python = False - - self._allow_yanked = allow_yanked - self._canonical_name = canonical_name - self._ignore_requires_python = ignore_requires_python - self._formats = formats - self._target_python = target_python - - self.project_name = project_name - - def evaluate_link(self, link: Link) -> Tuple[bool, Optional[str]]: - """ - Determine whether a link is a candidate for installation. - - :return: A tuple (is_candidate, result), where `result` is (1) a - version string if `is_candidate` is True, and (2) if - `is_candidate` is False, an optional string to log the reason - the link fails to qualify. - """ - version = None - if link.is_yanked and not self._allow_yanked: - reason = link.yanked_reason or "" - return (False, f"yanked for reason: {reason}") - - if link.egg_fragment: - egg_info = link.egg_fragment - ext = link.ext - else: - egg_info, ext = link.splitext() - if not ext: - return (False, "not a file") - if ext not in SUPPORTED_EXTENSIONS: - return (False, f"unsupported archive format: {ext}") - if "binary" not in self._formats and ext == WHEEL_EXTENSION: - reason = "No binaries permitted for {}".format(self.project_name) - return (False, reason) - if "macosx10" in link.path and ext == ".zip": - return (False, "macosx10 one") - if ext == WHEEL_EXTENSION: - try: - wheel = Wheel(link.filename) - except InvalidWheelFilename: - return (False, "invalid wheel filename") - if canonicalize_name(wheel.name) != self._canonical_name: - reason = "wrong project name (not {})".format(self.project_name) - return (False, reason) - - supported_tags = self._target_python.get_tags() - if not wheel.supported(supported_tags): - # Include the wheel's tags in the reason string to - # simplify troubleshooting compatibility issues. - file_tags = wheel.get_formatted_file_tags() - reason = ( - "none of the wheel's tags ({}) are compatible " - "(run pip debug --verbose to show compatible tags)".format( - ", ".join(file_tags) - ) - ) - return (False, reason) - - version = wheel.version - - # This should be up by the self.ok_binary check, but see issue 2700. - if "source" not in self._formats and ext != WHEEL_EXTENSION: - reason = f"No sources permitted for {self.project_name}" - return (False, reason) - - if not version: - version = _extract_version_from_fragment( - egg_info, - self._canonical_name, - ) - if not version: - reason = f"Missing project version for {self.project_name}" - return (False, reason) - - match = self._py_version_re.search(version) - if match: - version = version[: match.start()] - py_version = match.group(1) - if py_version != self._target_python.py_version: - return (False, "Python version is incorrect") - - supports_python = _check_link_requires_python( - link, - version_info=self._target_python.py_version_info, - ignore_requires_python=self._ignore_requires_python, - ) - if not supports_python: - # Return None for the reason text to suppress calling - # _log_skipped_link(). - return (False, None) - - logger.debug("Found link %s, version: %s", link, version) - - return (True, version) - - -def filter_unallowed_hashes( - candidates: List[InstallationCandidate], - hashes: Hashes, - project_name: str, -) -> List[InstallationCandidate]: - """ - Filter out candidates whose hashes aren't allowed, and return a new - list of candidates. - - If at least one candidate has an allowed hash, then all candidates with - either an allowed hash or no hash specified are returned. Otherwise, - the given candidates are returned. - - Including the candidates with no hash specified when there is a match - allows a warning to be logged if there is a more preferred candidate - with no hash specified. Returning all candidates in the case of no - matches lets pip report the hash of the candidate that would otherwise - have been installed (e.g. permitting the user to more easily update - their requirements file with the desired hash). - """ - if not hashes: - logger.debug( - "Given no hashes to check %s links for project %r: " - "discarding no candidates", - len(candidates), - project_name, - ) - # Make sure we're not returning back the given value. - return list(candidates) - - matches_or_no_digest = [] - # Collect the non-matches for logging purposes. - non_matches = [] - match_count = 0 - for candidate in candidates: - link = candidate.link - if not link.has_hash: - pass - elif link.is_hash_allowed(hashes=hashes): - match_count += 1 - else: - non_matches.append(candidate) - continue - - matches_or_no_digest.append(candidate) - - if match_count: - filtered = matches_or_no_digest - else: - # Make sure we're not returning back the given value. - filtered = list(candidates) - - if len(filtered) == len(candidates): - discard_message = "discarding no candidates" - else: - discard_message = "discarding {} non-matches:\n {}".format( - len(non_matches), - "\n ".join(str(candidate.link) for candidate in non_matches), - ) - - logger.debug( - "Checked %s links for project %r against %s hashes " - "(%s matches, %s no digest): %s", - len(candidates), - project_name, - hashes.digest_count, - match_count, - len(matches_or_no_digest) - match_count, - discard_message, - ) - - return filtered - - -class CandidatePreferences: - - """ - Encapsulates some of the preferences for filtering and sorting - InstallationCandidate objects. - """ - - def __init__( - self, - prefer_binary: bool = False, - allow_all_prereleases: bool = False, - ) -> None: - """ - :param allow_all_prereleases: Whether to allow all pre-releases. - """ - self.allow_all_prereleases = allow_all_prereleases - self.prefer_binary = prefer_binary - - -class BestCandidateResult: - """A collection of candidates, returned by `PackageFinder.find_best_candidate`. - - This class is only intended to be instantiated by CandidateEvaluator's - `compute_best_candidate()` method. - """ - - def __init__( - self, - candidates: List[InstallationCandidate], - applicable_candidates: List[InstallationCandidate], - best_candidate: Optional[InstallationCandidate], - ) -> None: - """ - :param candidates: A sequence of all available candidates found. - :param applicable_candidates: The applicable candidates. - :param best_candidate: The most preferred candidate found, or None - if no applicable candidates were found. - """ - assert set(applicable_candidates) <= set(candidates) - - if best_candidate is None: - assert not applicable_candidates - else: - assert best_candidate in applicable_candidates - - self._applicable_candidates = applicable_candidates - self._candidates = candidates - - self.best_candidate = best_candidate - - def iter_all(self) -> Iterable[InstallationCandidate]: - """Iterate through all candidates.""" - return iter(self._candidates) - - def iter_applicable(self) -> Iterable[InstallationCandidate]: - """Iterate through the applicable candidates.""" - return iter(self._applicable_candidates) - - -class CandidateEvaluator: - - """ - Responsible for filtering and sorting candidates for installation based - on what tags are valid. - """ - - @classmethod - def create( - cls, - project_name: str, - target_python: Optional[TargetPython] = None, - prefer_binary: bool = False, - allow_all_prereleases: bool = False, - specifier: Optional[specifiers.BaseSpecifier] = None, - hashes: Optional[Hashes] = None, - ) -> "CandidateEvaluator": - """Create a CandidateEvaluator object. - - :param target_python: The target Python interpreter to use when - checking compatibility. If None (the default), a TargetPython - object will be constructed from the running Python. - :param specifier: An optional object implementing `filter` - (e.g. `packaging.specifiers.SpecifierSet`) to filter applicable - versions. - :param hashes: An optional collection of allowed hashes. - """ - if target_python is None: - target_python = TargetPython() - if specifier is None: - specifier = specifiers.SpecifierSet() - - supported_tags = target_python.get_tags() - - return cls( - project_name=project_name, - supported_tags=supported_tags, - specifier=specifier, - prefer_binary=prefer_binary, - allow_all_prereleases=allow_all_prereleases, - hashes=hashes, - ) - - def __init__( - self, - project_name: str, - supported_tags: List[Tag], - specifier: specifiers.BaseSpecifier, - prefer_binary: bool = False, - allow_all_prereleases: bool = False, - hashes: Optional[Hashes] = None, - ) -> None: - """ - :param supported_tags: The PEP 425 tags supported by the target - Python in order of preference (most preferred first). - """ - self._allow_all_prereleases = allow_all_prereleases - self._hashes = hashes - self._prefer_binary = prefer_binary - self._project_name = project_name - self._specifier = specifier - self._supported_tags = supported_tags - # Since the index of the tag in the _supported_tags list is used - # as a priority, precompute a map from tag to index/priority to be - # used in wheel.find_most_preferred_tag. - self._wheel_tag_preferences = { - tag: idx for idx, tag in enumerate(supported_tags) - } - - def get_applicable_candidates( - self, - candidates: List[InstallationCandidate], - ) -> List[InstallationCandidate]: - """ - Return the applicable candidates from a list of candidates. - """ - # Using None infers from the specifier instead. - allow_prereleases = self._allow_all_prereleases or None - specifier = self._specifier - versions = { - str(v) - for v in specifier.filter( - # We turn the version object into a str here because otherwise - # when we're debundled but setuptools isn't, Python will see - # packaging.version.Version and - # pkg_resources._vendor.packaging.version.Version as different - # types. This way we'll use a str as a common data interchange - # format. If we stop using the pkg_resources provided specifier - # and start using our own, we can drop the cast to str(). - (str(c.version) for c in candidates), - prereleases=allow_prereleases, - ) - } - - # Again, converting version to str to deal with debundling. - applicable_candidates = [c for c in candidates if str(c.version) in versions] - - filtered_applicable_candidates = filter_unallowed_hashes( - candidates=applicable_candidates, - hashes=self._hashes, - project_name=self._project_name, - ) - - return sorted(filtered_applicable_candidates, key=self._sort_key) - - def _sort_key(self, candidate: InstallationCandidate) -> CandidateSortingKey: - """ - Function to pass as the `key` argument to a call to sorted() to sort - InstallationCandidates by preference. - - Returns a tuple such that tuples sorting as greater using Python's - default comparison operator are more preferred. - - The preference is as follows: - - First and foremost, candidates with allowed (matching) hashes are - always preferred over candidates without matching hashes. This is - because e.g. if the only candidate with an allowed hash is yanked, - we still want to use that candidate. - - Second, excepting hash considerations, candidates that have been - yanked (in the sense of PEP 592) are always less preferred than - candidates that haven't been yanked. Then: - - If not finding wheels, they are sorted by version only. - If finding wheels, then the sort order is by version, then: - 1. existing installs - 2. wheels ordered via Wheel.support_index_min(self._supported_tags) - 3. source archives - If prefer_binary was set, then all wheels are sorted above sources. - - Note: it was considered to embed this logic into the Link - comparison operators, but then different sdist links - with the same version, would have to be considered equal - """ - valid_tags = self._supported_tags - support_num = len(valid_tags) - build_tag: BuildTag = () - binary_preference = 0 - link = candidate.link - if link.is_wheel: - # can raise InvalidWheelFilename - wheel = Wheel(link.filename) - try: - pri = -( - wheel.find_most_preferred_tag( - valid_tags, self._wheel_tag_preferences - ) - ) - except ValueError: - raise UnsupportedWheel( - "{} is not a supported wheel for this platform. It " - "can't be sorted.".format(wheel.filename) - ) - if self._prefer_binary: - binary_preference = 1 - if wheel.build_tag is not None: - match = re.match(r"^(\d+)(.*)$", wheel.build_tag) - build_tag_groups = match.groups() - build_tag = (int(build_tag_groups[0]), build_tag_groups[1]) - else: # sdist - pri = -(support_num) - has_allowed_hash = int(link.is_hash_allowed(self._hashes)) - yank_value = -1 * int(link.is_yanked) # -1 for yanked. - return ( - has_allowed_hash, - yank_value, - binary_preference, - candidate.version, - pri, - build_tag, - ) - - def sort_best_candidate( - self, - candidates: List[InstallationCandidate], - ) -> Optional[InstallationCandidate]: - """ - Return the best candidate per the instance's sort order, or None if - no candidate is acceptable. - """ - if not candidates: - return None - best_candidate = max(candidates, key=self._sort_key) - return best_candidate - - def compute_best_candidate( - self, - candidates: List[InstallationCandidate], - ) -> BestCandidateResult: - """ - Compute and return a `BestCandidateResult` instance. - """ - applicable_candidates = self.get_applicable_candidates(candidates) - - best_candidate = self.sort_best_candidate(applicable_candidates) - - return BestCandidateResult( - candidates, - applicable_candidates=applicable_candidates, - best_candidate=best_candidate, - ) - - -class PackageFinder: - """This finds packages. - - This is meant to match easy_install's technique for looking for - packages, by reading pages and looking for appropriate links. - """ - - def __init__( - self, - link_collector: LinkCollector, - target_python: TargetPython, - allow_yanked: bool, - use_deprecated_html5lib: bool, - format_control: Optional[FormatControl] = None, - candidate_prefs: Optional[CandidatePreferences] = None, - ignore_requires_python: Optional[bool] = None, - ) -> None: - """ - This constructor is primarily meant to be used by the create() class - method and from tests. - - :param format_control: A FormatControl object, used to control - the selection of source packages / binary packages when consulting - the index and links. - :param candidate_prefs: Options to use when creating a - CandidateEvaluator object. - """ - if candidate_prefs is None: - candidate_prefs = CandidatePreferences() - - format_control = format_control or FormatControl(set(), set()) - - self._allow_yanked = allow_yanked - self._candidate_prefs = candidate_prefs - self._ignore_requires_python = ignore_requires_python - self._link_collector = link_collector - self._target_python = target_python - self._use_deprecated_html5lib = use_deprecated_html5lib - - self.format_control = format_control - - # These are boring links that have already been logged somehow. - self._logged_links: Set[Link] = set() - - # Don't include an allow_yanked default value to make sure each call - # site considers whether yanked releases are allowed. This also causes - # that decision to be made explicit in the calling code, which helps - # people when reading the code. - @classmethod - def create( - cls, - link_collector: LinkCollector, - selection_prefs: SelectionPreferences, - target_python: Optional[TargetPython] = None, - *, - use_deprecated_html5lib: bool, - ) -> "PackageFinder": - """Create a PackageFinder. - - :param selection_prefs: The candidate selection preferences, as a - SelectionPreferences object. - :param target_python: The target Python interpreter to use when - checking compatibility. If None (the default), a TargetPython - object will be constructed from the running Python. - """ - if target_python is None: - target_python = TargetPython() - - candidate_prefs = CandidatePreferences( - prefer_binary=selection_prefs.prefer_binary, - allow_all_prereleases=selection_prefs.allow_all_prereleases, - ) - - return cls( - candidate_prefs=candidate_prefs, - link_collector=link_collector, - target_python=target_python, - allow_yanked=selection_prefs.allow_yanked, - format_control=selection_prefs.format_control, - ignore_requires_python=selection_prefs.ignore_requires_python, - use_deprecated_html5lib=use_deprecated_html5lib, - ) - - @property - def target_python(self) -> TargetPython: - return self._target_python - - @property - def search_scope(self) -> SearchScope: - return self._link_collector.search_scope - - @search_scope.setter - def search_scope(self, search_scope: SearchScope) -> None: - self._link_collector.search_scope = search_scope - - @property - def find_links(self) -> List[str]: - return self._link_collector.find_links - - @property - def index_urls(self) -> List[str]: - return self.search_scope.index_urls - - @property - def trusted_hosts(self) -> Iterable[str]: - for host_port in self._link_collector.session.pip_trusted_origins: - yield build_netloc(*host_port) - - @property - def allow_all_prereleases(self) -> bool: - return self._candidate_prefs.allow_all_prereleases - - def set_allow_all_prereleases(self) -> None: - self._candidate_prefs.allow_all_prereleases = True - - @property - def prefer_binary(self) -> bool: - return self._candidate_prefs.prefer_binary - - def set_prefer_binary(self) -> None: - self._candidate_prefs.prefer_binary = True - - def make_link_evaluator(self, project_name: str) -> LinkEvaluator: - canonical_name = canonicalize_name(project_name) - formats = self.format_control.get_allowed_formats(canonical_name) - - return LinkEvaluator( - project_name=project_name, - canonical_name=canonical_name, - formats=formats, - target_python=self._target_python, - allow_yanked=self._allow_yanked, - ignore_requires_python=self._ignore_requires_python, - ) - - def _sort_links(self, links: Iterable[Link]) -> List[Link]: - """ - Returns elements of links in order, non-egg links first, egg links - second, while eliminating duplicates - """ - eggs, no_eggs = [], [] - seen: Set[Link] = set() - for link in links: - if link not in seen: - seen.add(link) - if link.egg_fragment: - eggs.append(link) - else: - no_eggs.append(link) - return no_eggs + eggs - - def _log_skipped_link(self, link: Link, reason: str) -> None: - if link not in self._logged_links: - # Put the link at the end so the reason is more visible and because - # the link string is usually very long. - logger.debug("Skipping link: %s: %s", reason, link) - self._logged_links.add(link) - - def get_install_candidate( - self, link_evaluator: LinkEvaluator, link: Link - ) -> Optional[InstallationCandidate]: - """ - If the link is a candidate for install, convert it to an - InstallationCandidate and return it. Otherwise, return None. - """ - is_candidate, result = link_evaluator.evaluate_link(link) - if not is_candidate: - if result: - self._log_skipped_link(link, reason=result) - return None - - return InstallationCandidate( - name=link_evaluator.project_name, - link=link, - version=result, - ) - - def evaluate_links( - self, link_evaluator: LinkEvaluator, links: Iterable[Link] - ) -> List[InstallationCandidate]: - """ - Convert links that are candidates to InstallationCandidate objects. - """ - candidates = [] - for link in self._sort_links(links): - candidate = self.get_install_candidate(link_evaluator, link) - if candidate is not None: - candidates.append(candidate) - - return candidates - - def process_project_url( - self, project_url: Link, link_evaluator: LinkEvaluator - ) -> List[InstallationCandidate]: - logger.debug( - "Fetching project page and analyzing links: %s", - project_url, - ) - html_page = self._link_collector.fetch_page(project_url) - if html_page is None: - return [] - - page_links = list(parse_links(html_page, self._use_deprecated_html5lib)) - - with indent_log(): - package_links = self.evaluate_links( - link_evaluator, - links=page_links, - ) - - return package_links - - @functools.lru_cache(maxsize=None) - def find_all_candidates(self, project_name: str) -> List[InstallationCandidate]: - """Find all available InstallationCandidate for project_name - - This checks index_urls and find_links. - All versions found are returned as an InstallationCandidate list. - - See LinkEvaluator.evaluate_link() for details on which files - are accepted. - """ - link_evaluator = self.make_link_evaluator(project_name) - - collected_sources = self._link_collector.collect_sources( - project_name=project_name, - candidates_from_page=functools.partial( - self.process_project_url, - link_evaluator=link_evaluator, - ), - ) - - page_candidates_it = itertools.chain.from_iterable( - source.page_candidates() - for sources in collected_sources - for source in sources - if source is not None - ) - page_candidates = list(page_candidates_it) - - file_links_it = itertools.chain.from_iterable( - source.file_links() - for sources in collected_sources - for source in sources - if source is not None - ) - file_candidates = self.evaluate_links( - link_evaluator, - sorted(file_links_it, reverse=True), - ) - - if logger.isEnabledFor(logging.DEBUG) and file_candidates: - paths = [] - for candidate in file_candidates: - assert candidate.link.url # we need to have a URL - try: - paths.append(candidate.link.file_path) - except Exception: - paths.append(candidate.link.url) # it's not a local file - - logger.debug("Local files found: %s", ", ".join(paths)) - - # This is an intentional priority ordering - return file_candidates + page_candidates - - def make_candidate_evaluator( - self, - project_name: str, - specifier: Optional[specifiers.BaseSpecifier] = None, - hashes: Optional[Hashes] = None, - ) -> CandidateEvaluator: - """Create a CandidateEvaluator object to use.""" - candidate_prefs = self._candidate_prefs - return CandidateEvaluator.create( - project_name=project_name, - target_python=self._target_python, - prefer_binary=candidate_prefs.prefer_binary, - allow_all_prereleases=candidate_prefs.allow_all_prereleases, - specifier=specifier, - hashes=hashes, - ) - - @functools.lru_cache(maxsize=None) - def find_best_candidate( - self, - project_name: str, - specifier: Optional[specifiers.BaseSpecifier] = None, - hashes: Optional[Hashes] = None, - ) -> BestCandidateResult: - """Find matches for the given project and specifier. - - :param specifier: An optional object implementing `filter` - (e.g. `packaging.specifiers.SpecifierSet`) to filter applicable - versions. - - :return: A `BestCandidateResult` instance. - """ - candidates = self.find_all_candidates(project_name) - candidate_evaluator = self.make_candidate_evaluator( - project_name=project_name, - specifier=specifier, - hashes=hashes, - ) - return candidate_evaluator.compute_best_candidate(candidates) - - def find_requirement( - self, req: InstallRequirement, upgrade: bool - ) -> Optional[InstallationCandidate]: - """Try to find a Link matching req - - Expects req, an InstallRequirement and upgrade, a boolean - Returns a InstallationCandidate if found, - Raises DistributionNotFound or BestVersionAlreadyInstalled otherwise - """ - hashes = req.hashes(trust_internet=False) - best_candidate_result = self.find_best_candidate( - req.name, - specifier=req.specifier, - hashes=hashes, - ) - best_candidate = best_candidate_result.best_candidate - - installed_version: Optional[_BaseVersion] = None - if req.satisfied_by is not None: - installed_version = req.satisfied_by.version - - def _format_versions(cand_iter: Iterable[InstallationCandidate]) -> str: - # This repeated parse_version and str() conversion is needed to - # handle different vendoring sources from pip and pkg_resources. - # If we stop using the pkg_resources provided specifier and start - # using our own, we can drop the cast to str(). - return ( - ", ".join( - sorted( - {str(c.version) for c in cand_iter}, - key=parse_version, - ) - ) - or "none" - ) - - if installed_version is None and best_candidate is None: - logger.critical( - "Could not find a version that satisfies the requirement %s " - "(from versions: %s)", - req, - _format_versions(best_candidate_result.iter_all()), - ) - - raise DistributionNotFound( - "No matching distribution found for {}".format(req) - ) - - best_installed = False - if installed_version and ( - best_candidate is None or best_candidate.version <= installed_version - ): - best_installed = True - - if not upgrade and installed_version is not None: - if best_installed: - logger.debug( - "Existing installed version (%s) is most up-to-date and " - "satisfies requirement", - installed_version, - ) - else: - logger.debug( - "Existing installed version (%s) satisfies requirement " - "(most up-to-date version is %s)", - installed_version, - best_candidate.version, - ) - return None - - if best_installed: - # We have an existing version, and its the best version - logger.debug( - "Installed version (%s) is most up-to-date (past versions: %s)", - installed_version, - _format_versions(best_candidate_result.iter_applicable()), - ) - raise BestVersionAlreadyInstalled - - logger.debug( - "Using version %s (newest of versions: %s)", - best_candidate.version, - _format_versions(best_candidate_result.iter_applicable()), - ) - return best_candidate - - -def _find_name_version_sep(fragment: str, canonical_name: str) -> int: - """Find the separator's index based on the package's canonical name. - - :param fragment: A + filename "fragment" (stem) or - egg fragment. - :param canonical_name: The package's canonical name. - - This function is needed since the canonicalized name does not necessarily - have the same length as the egg info's name part. An example:: - - >>> fragment = 'foo__bar-1.0' - >>> canonical_name = 'foo-bar' - >>> _find_name_version_sep(fragment, canonical_name) - 8 - """ - # Project name and version must be separated by one single dash. Find all - # occurrences of dashes; if the string in front of it matches the canonical - # name, this is the one separating the name and version parts. - for i, c in enumerate(fragment): - if c != "-": - continue - if canonicalize_name(fragment[:i]) == canonical_name: - return i - raise ValueError(f"{fragment} does not match {canonical_name}") - - -def _extract_version_from_fragment(fragment: str, canonical_name: str) -> Optional[str]: - """Parse the version string from a + filename - "fragment" (stem) or egg fragment. - - :param fragment: The string to parse. E.g. foo-2.1 - :param canonical_name: The canonicalized name of the package this - belongs to. - """ - try: - version_start = _find_name_version_sep(fragment, canonical_name) + 1 - except ValueError: - return None - version = fragment[version_start:] - if not version: - return None - return version diff --git a/venv/Lib/site-packages/pip/_internal/index/sources.py b/venv/Lib/site-packages/pip/_internal/index/sources.py deleted file mode 100644 index eec3f12..0000000 --- a/venv/Lib/site-packages/pip/_internal/index/sources.py +++ /dev/null @@ -1,224 +0,0 @@ -import logging -import mimetypes -import os -import pathlib -from typing import Callable, Iterable, Optional, Tuple - -from pip._internal.models.candidate import InstallationCandidate -from pip._internal.models.link import Link -from pip._internal.utils.urls import path_to_url, url_to_path -from pip._internal.vcs import is_url - -logger = logging.getLogger(__name__) - -FoundCandidates = Iterable[InstallationCandidate] -FoundLinks = Iterable[Link] -CandidatesFromPage = Callable[[Link], Iterable[InstallationCandidate]] -PageValidator = Callable[[Link], bool] - - -class LinkSource: - @property - def link(self) -> Optional[Link]: - """Returns the underlying link, if there's one.""" - raise NotImplementedError() - - def page_candidates(self) -> FoundCandidates: - """Candidates found by parsing an archive listing HTML file.""" - raise NotImplementedError() - - def file_links(self) -> FoundLinks: - """Links found by specifying archives directly.""" - raise NotImplementedError() - - -def _is_html_file(file_url: str) -> bool: - return mimetypes.guess_type(file_url, strict=False)[0] == "text/html" - - -class _FlatDirectorySource(LinkSource): - """Link source specified by ``--find-links=``. - - This looks the content of the directory, and returns: - - * ``page_candidates``: Links listed on each HTML file in the directory. - * ``file_candidates``: Archives in the directory. - """ - - def __init__( - self, - candidates_from_page: CandidatesFromPage, - path: str, - ) -> None: - self._candidates_from_page = candidates_from_page - self._path = pathlib.Path(os.path.realpath(path)) - - @property - def link(self) -> Optional[Link]: - return None - - def page_candidates(self) -> FoundCandidates: - for path in self._path.iterdir(): - url = path_to_url(str(path)) - if not _is_html_file(url): - continue - yield from self._candidates_from_page(Link(url)) - - def file_links(self) -> FoundLinks: - for path in self._path.iterdir(): - url = path_to_url(str(path)) - if _is_html_file(url): - continue - yield Link(url) - - -class _LocalFileSource(LinkSource): - """``--find-links=`` or ``--[extra-]index-url=``. - - If a URL is supplied, it must be a ``file:`` URL. If a path is supplied to - the option, it is converted to a URL first. This returns: - - * ``page_candidates``: Links listed on an HTML file. - * ``file_candidates``: The non-HTML file. - """ - - def __init__( - self, - candidates_from_page: CandidatesFromPage, - link: Link, - ) -> None: - self._candidates_from_page = candidates_from_page - self._link = link - - @property - def link(self) -> Optional[Link]: - return self._link - - def page_candidates(self) -> FoundCandidates: - if not _is_html_file(self._link.url): - return - yield from self._candidates_from_page(self._link) - - def file_links(self) -> FoundLinks: - if _is_html_file(self._link.url): - return - yield self._link - - -class _RemoteFileSource(LinkSource): - """``--find-links=`` or ``--[extra-]index-url=``. - - This returns: - - * ``page_candidates``: Links listed on an HTML file. - * ``file_candidates``: The non-HTML file. - """ - - def __init__( - self, - candidates_from_page: CandidatesFromPage, - page_validator: PageValidator, - link: Link, - ) -> None: - self._candidates_from_page = candidates_from_page - self._page_validator = page_validator - self._link = link - - @property - def link(self) -> Optional[Link]: - return self._link - - def page_candidates(self) -> FoundCandidates: - if not self._page_validator(self._link): - return - yield from self._candidates_from_page(self._link) - - def file_links(self) -> FoundLinks: - yield self._link - - -class _IndexDirectorySource(LinkSource): - """``--[extra-]index-url=``. - - This is treated like a remote URL; ``candidates_from_page`` contains logic - for this by appending ``index.html`` to the link. - """ - - def __init__( - self, - candidates_from_page: CandidatesFromPage, - link: Link, - ) -> None: - self._candidates_from_page = candidates_from_page - self._link = link - - @property - def link(self) -> Optional[Link]: - return self._link - - def page_candidates(self) -> FoundCandidates: - yield from self._candidates_from_page(self._link) - - def file_links(self) -> FoundLinks: - return () - - -def build_source( - location: str, - *, - candidates_from_page: CandidatesFromPage, - page_validator: PageValidator, - expand_dir: bool, - cache_link_parsing: bool, -) -> Tuple[Optional[str], Optional[LinkSource]]: - - path: Optional[str] = None - url: Optional[str] = None - if os.path.exists(location): # Is a local path. - url = path_to_url(location) - path = location - elif location.startswith("file:"): # A file: URL. - url = location - path = url_to_path(location) - elif is_url(location): - url = location - - if url is None: - msg = ( - "Location '%s' is ignored: " - "it is either a non-existing path or lacks a specific scheme." - ) - logger.warning(msg, location) - return (None, None) - - if path is None: - source: LinkSource = _RemoteFileSource( - candidates_from_page=candidates_from_page, - page_validator=page_validator, - link=Link(url, cache_link_parsing=cache_link_parsing), - ) - return (url, source) - - if os.path.isdir(path): - if expand_dir: - source = _FlatDirectorySource( - candidates_from_page=candidates_from_page, - path=path, - ) - else: - source = _IndexDirectorySource( - candidates_from_page=candidates_from_page, - link=Link(url, cache_link_parsing=cache_link_parsing), - ) - return (url, source) - elif os.path.isfile(path): - source = _LocalFileSource( - candidates_from_page=candidates_from_page, - link=Link(url, cache_link_parsing=cache_link_parsing), - ) - return (url, source) - logger.warning( - "Location '%s' is ignored: it is neither a file nor a directory.", - location, - ) - return (url, None) diff --git a/venv/Lib/site-packages/pip/_internal/locations/__init__.py b/venv/Lib/site-packages/pip/_internal/locations/__init__.py deleted file mode 100644 index ac0c166..0000000 --- a/venv/Lib/site-packages/pip/_internal/locations/__init__.py +++ /dev/null @@ -1,520 +0,0 @@ -import functools -import logging -import os -import pathlib -import sys -import sysconfig -from typing import Any, Dict, Iterator, List, Optional, Tuple - -from pip._internal.models.scheme import SCHEME_KEYS, Scheme -from pip._internal.utils.compat import WINDOWS -from pip._internal.utils.deprecation import deprecated -from pip._internal.utils.virtualenv import running_under_virtualenv - -from . import _distutils, _sysconfig -from .base import ( - USER_CACHE_DIR, - get_major_minor_version, - get_src_prefix, - is_osx_framework, - site_packages, - user_site, -) - -__all__ = [ - "USER_CACHE_DIR", - "get_bin_prefix", - "get_bin_user", - "get_major_minor_version", - "get_platlib", - "get_prefixed_libs", - "get_purelib", - "get_scheme", - "get_src_prefix", - "site_packages", - "user_site", -] - - -logger = logging.getLogger(__name__) - - -_PLATLIBDIR: str = getattr(sys, "platlibdir", "lib") - -_USE_SYSCONFIG_DEFAULT = sys.version_info >= (3, 10) - - -def _should_use_sysconfig() -> bool: - """This function determines the value of _USE_SYSCONFIG. - - By default, pip uses sysconfig on Python 3.10+. - But Python distributors can override this decision by setting: - sysconfig._PIP_USE_SYSCONFIG = True / False - Rationale in https://github.com/pypa/pip/issues/10647 - - This is a function for testability, but should be constant during any one - run. - """ - return bool(getattr(sysconfig, "_PIP_USE_SYSCONFIG", _USE_SYSCONFIG_DEFAULT)) - - -_USE_SYSCONFIG = _should_use_sysconfig() - -# Be noisy about incompatibilities if this platforms "should" be using -# sysconfig, but is explicitly opting out and using distutils instead. -if _USE_SYSCONFIG_DEFAULT and not _USE_SYSCONFIG: - _MISMATCH_LEVEL = logging.WARNING -else: - _MISMATCH_LEVEL = logging.DEBUG - - -def _looks_like_bpo_44860() -> bool: - """The resolution to bpo-44860 will change this incorrect platlib. - - See . - """ - from distutils.command.install import INSTALL_SCHEMES # type: ignore - - try: - unix_user_platlib = INSTALL_SCHEMES["unix_user"]["platlib"] - except KeyError: - return False - return unix_user_platlib == "$usersite" - - -def _looks_like_red_hat_patched_platlib_purelib(scheme: Dict[str, str]) -> bool: - platlib = scheme["platlib"] - if "/$platlibdir/" in platlib: - platlib = platlib.replace("/$platlibdir/", f"/{_PLATLIBDIR}/") - if "/lib64/" not in platlib: - return False - unpatched = platlib.replace("/lib64/", "/lib/") - return unpatched.replace("$platbase/", "$base/") == scheme["purelib"] - - -@functools.lru_cache(maxsize=None) -def _looks_like_red_hat_lib() -> bool: - """Red Hat patches platlib in unix_prefix and unix_home, but not purelib. - - This is the only way I can see to tell a Red Hat-patched Python. - """ - from distutils.command.install import INSTALL_SCHEMES # type: ignore - - return all( - k in INSTALL_SCHEMES - and _looks_like_red_hat_patched_platlib_purelib(INSTALL_SCHEMES[k]) - for k in ("unix_prefix", "unix_home") - ) - - -@functools.lru_cache(maxsize=None) -def _looks_like_debian_scheme() -> bool: - """Debian adds two additional schemes.""" - from distutils.command.install import INSTALL_SCHEMES # type: ignore - - return "deb_system" in INSTALL_SCHEMES and "unix_local" in INSTALL_SCHEMES - - -@functools.lru_cache(maxsize=None) -def _looks_like_red_hat_scheme() -> bool: - """Red Hat patches ``sys.prefix`` and ``sys.exec_prefix``. - - Red Hat's ``00251-change-user-install-location.patch`` changes the install - command's ``prefix`` and ``exec_prefix`` to append ``"/local"``. This is - (fortunately?) done quite unconditionally, so we create a default command - object without any configuration to detect this. - """ - from distutils.command.install import install - from distutils.dist import Distribution - - cmd: Any = install(Distribution()) - cmd.finalize_options() - return ( - cmd.exec_prefix == f"{os.path.normpath(sys.exec_prefix)}/local" - and cmd.prefix == f"{os.path.normpath(sys.prefix)}/local" - ) - - -@functools.lru_cache(maxsize=None) -def _looks_like_slackware_scheme() -> bool: - """Slackware patches sysconfig but fails to patch distutils and site. - - Slackware changes sysconfig's user scheme to use ``"lib64"`` for the lib - path, but does not do the same to the site module. - """ - if user_site is None: # User-site not available. - return False - try: - paths = sysconfig.get_paths(scheme="posix_user", expand=False) - except KeyError: # User-site not available. - return False - return "/lib64/" in paths["purelib"] and "/lib64/" not in user_site - - -@functools.lru_cache(maxsize=None) -def _looks_like_msys2_mingw_scheme() -> bool: - """MSYS2 patches distutils and sysconfig to use a UNIX-like scheme. - - However, MSYS2 incorrectly patches sysconfig ``nt`` scheme. The fix is - likely going to be included in their 3.10 release, so we ignore the warning. - See msys2/MINGW-packages#9319. - - MSYS2 MINGW's patch uses lowercase ``"lib"`` instead of the usual uppercase, - and is missing the final ``"site-packages"``. - """ - paths = sysconfig.get_paths("nt", expand=False) - return all( - "Lib" not in p and "lib" in p and not p.endswith("site-packages") - for p in (paths[key] for key in ("platlib", "purelib")) - ) - - -def _fix_abiflags(parts: Tuple[str]) -> Iterator[str]: - ldversion = sysconfig.get_config_var("LDVERSION") - abiflags: str = getattr(sys, "abiflags", None) - - # LDVERSION does not end with sys.abiflags. Just return the path unchanged. - if not ldversion or not abiflags or not ldversion.endswith(abiflags): - yield from parts - return - - # Strip sys.abiflags from LDVERSION-based path components. - for part in parts: - if part.endswith(ldversion): - part = part[: (0 - len(abiflags))] - yield part - - -@functools.lru_cache(maxsize=None) -def _warn_mismatched(old: pathlib.Path, new: pathlib.Path, *, key: str) -> None: - issue_url = "https://github.com/pypa/pip/issues/10151" - message = ( - "Value for %s does not match. Please report this to <%s>" - "\ndistutils: %s" - "\nsysconfig: %s" - ) - logger.log(_MISMATCH_LEVEL, message, key, issue_url, old, new) - - -def _warn_if_mismatch(old: pathlib.Path, new: pathlib.Path, *, key: str) -> bool: - if old == new: - return False - _warn_mismatched(old, new, key=key) - return True - - -@functools.lru_cache(maxsize=None) -def _log_context( - *, - user: bool = False, - home: Optional[str] = None, - root: Optional[str] = None, - prefix: Optional[str] = None, -) -> None: - parts = [ - "Additional context:", - "user = %r", - "home = %r", - "root = %r", - "prefix = %r", - ] - - logger.log(_MISMATCH_LEVEL, "\n".join(parts), user, home, root, prefix) - - -def get_scheme( - dist_name: str, - user: bool = False, - home: Optional[str] = None, - root: Optional[str] = None, - isolated: bool = False, - prefix: Optional[str] = None, -) -> Scheme: - new = _sysconfig.get_scheme( - dist_name, - user=user, - home=home, - root=root, - isolated=isolated, - prefix=prefix, - ) - if _USE_SYSCONFIG: - return new - - old = _distutils.get_scheme( - dist_name, - user=user, - home=home, - root=root, - isolated=isolated, - prefix=prefix, - ) - - warning_contexts = [] - for k in SCHEME_KEYS: - old_v = pathlib.Path(getattr(old, k)) - new_v = pathlib.Path(getattr(new, k)) - - if old_v == new_v: - continue - - # distutils incorrectly put PyPy packages under ``site-packages/python`` - # in the ``posix_home`` scheme, but PyPy devs said they expect the - # directory name to be ``pypy`` instead. So we treat this as a bug fix - # and not warn about it. See bpo-43307 and python/cpython#24628. - skip_pypy_special_case = ( - sys.implementation.name == "pypy" - and home is not None - and k in ("platlib", "purelib") - and old_v.parent == new_v.parent - and old_v.name.startswith("python") - and new_v.name.startswith("pypy") - ) - if skip_pypy_special_case: - continue - - # sysconfig's ``osx_framework_user`` does not include ``pythonX.Y`` in - # the ``include`` value, but distutils's ``headers`` does. We'll let - # CPython decide whether this is a bug or feature. See bpo-43948. - skip_osx_framework_user_special_case = ( - user - and is_osx_framework() - and k == "headers" - and old_v.parent.parent == new_v.parent - and old_v.parent.name.startswith("python") - ) - if skip_osx_framework_user_special_case: - continue - - # On Red Hat and derived Linux distributions, distutils is patched to - # use "lib64" instead of "lib" for platlib. - if k == "platlib" and _looks_like_red_hat_lib(): - continue - - # On Python 3.9+, sysconfig's posix_user scheme sets platlib against - # sys.platlibdir, but distutils's unix_user incorrectly coninutes - # using the same $usersite for both platlib and purelib. This creates a - # mismatch when sys.platlibdir is not "lib". - skip_bpo_44860 = ( - user - and k == "platlib" - and not WINDOWS - and sys.version_info >= (3, 9) - and _PLATLIBDIR != "lib" - and _looks_like_bpo_44860() - ) - if skip_bpo_44860: - continue - - # Slackware incorrectly patches posix_user to use lib64 instead of lib, - # but not usersite to match the location. - skip_slackware_user_scheme = ( - user - and k in ("platlib", "purelib") - and not WINDOWS - and _looks_like_slackware_scheme() - ) - if skip_slackware_user_scheme: - continue - - # Both Debian and Red Hat patch Python to place the system site under - # /usr/local instead of /usr. Debian also places lib in dist-packages - # instead of site-packages, but the /usr/local check should cover it. - skip_linux_system_special_case = ( - not (user or home or prefix or running_under_virtualenv()) - and old_v.parts[1:3] == ("usr", "local") - and len(new_v.parts) > 1 - and new_v.parts[1] == "usr" - and (len(new_v.parts) < 3 or new_v.parts[2] != "local") - and (_looks_like_red_hat_scheme() or _looks_like_debian_scheme()) - ) - if skip_linux_system_special_case: - continue - - # On Python 3.7 and earlier, sysconfig does not include sys.abiflags in - # the "pythonX.Y" part of the path, but distutils does. - skip_sysconfig_abiflag_bug = ( - sys.version_info < (3, 8) - and not WINDOWS - and k in ("headers", "platlib", "purelib") - and tuple(_fix_abiflags(old_v.parts)) == new_v.parts - ) - if skip_sysconfig_abiflag_bug: - continue - - # MSYS2 MINGW's sysconfig patch does not include the "site-packages" - # part of the path. This is incorrect and will be fixed in MSYS. - skip_msys2_mingw_bug = ( - WINDOWS and k in ("platlib", "purelib") and _looks_like_msys2_mingw_scheme() - ) - if skip_msys2_mingw_bug: - continue - - # CPython's POSIX install script invokes pip (via ensurepip) against the - # interpreter located in the source tree, not the install site. This - # triggers special logic in sysconfig that's not present in distutils. - # https://github.com/python/cpython/blob/8c21941ddaf/Lib/sysconfig.py#L178-L194 - skip_cpython_build = ( - sysconfig.is_python_build(check_home=True) - and not WINDOWS - and k in ("headers", "include", "platinclude") - ) - if skip_cpython_build: - continue - - warning_contexts.append((old_v, new_v, f"scheme.{k}")) - - if not warning_contexts: - return old - - # Check if this path mismatch is caused by distutils config files. Those - # files will no longer work once we switch to sysconfig, so this raises a - # deprecation message for them. - default_old = _distutils.distutils_scheme( - dist_name, - user, - home, - root, - isolated, - prefix, - ignore_config_files=True, - ) - if any(default_old[k] != getattr(old, k) for k in SCHEME_KEYS): - deprecated( - reason=( - "Configuring installation scheme with distutils config files " - "is deprecated and will no longer work in the near future. If you " - "are using a Homebrew or Linuxbrew Python, please see discussion " - "at https://github.com/Homebrew/homebrew-core/issues/76621" - ), - replacement=None, - gone_in=None, - ) - return old - - # Post warnings about this mismatch so user can report them back. - for old_v, new_v, key in warning_contexts: - _warn_mismatched(old_v, new_v, key=key) - _log_context(user=user, home=home, root=root, prefix=prefix) - - return old - - -def get_bin_prefix() -> str: - new = _sysconfig.get_bin_prefix() - if _USE_SYSCONFIG: - return new - - old = _distutils.get_bin_prefix() - if _warn_if_mismatch(pathlib.Path(old), pathlib.Path(new), key="bin_prefix"): - _log_context() - return old - - -def get_bin_user() -> str: - return _sysconfig.get_scheme("", user=True).scripts - - -def _looks_like_deb_system_dist_packages(value: str) -> bool: - """Check if the value is Debian's APT-controlled dist-packages. - - Debian's ``distutils.sysconfig.get_python_lib()`` implementation returns the - default package path controlled by APT, but does not patch ``sysconfig`` to - do the same. This is similar to the bug worked around in ``get_scheme()``, - but here the default is ``deb_system`` instead of ``unix_local``. Ultimately - we can't do anything about this Debian bug, and this detection allows us to - skip the warning when needed. - """ - if not _looks_like_debian_scheme(): - return False - if value == "/usr/lib/python3/dist-packages": - return True - return False - - -def get_purelib() -> str: - """Return the default pure-Python lib location.""" - new = _sysconfig.get_purelib() - if _USE_SYSCONFIG: - return new - - old = _distutils.get_purelib() - if _looks_like_deb_system_dist_packages(old): - return old - if _warn_if_mismatch(pathlib.Path(old), pathlib.Path(new), key="purelib"): - _log_context() - return old - - -def get_platlib() -> str: - """Return the default platform-shared lib location.""" - new = _sysconfig.get_platlib() - if _USE_SYSCONFIG: - return new - - old = _distutils.get_platlib() - if _looks_like_deb_system_dist_packages(old): - return old - if _warn_if_mismatch(pathlib.Path(old), pathlib.Path(new), key="platlib"): - _log_context() - return old - - -def _deduplicated(v1: str, v2: str) -> List[str]: - """Deduplicate values from a list.""" - if v1 == v2: - return [v1] - return [v1, v2] - - -def _looks_like_apple_library(path: str) -> bool: - """Apple patches sysconfig to *always* look under */Library/Python*.""" - if sys.platform[:6] != "darwin": - return False - return path == f"/Library/Python/{get_major_minor_version()}/site-packages" - - -def get_prefixed_libs(prefix: str) -> List[str]: - """Return the lib locations under ``prefix``.""" - new_pure, new_plat = _sysconfig.get_prefixed_libs(prefix) - if _USE_SYSCONFIG: - return _deduplicated(new_pure, new_plat) - - old_pure, old_plat = _distutils.get_prefixed_libs(prefix) - old_lib_paths = _deduplicated(old_pure, old_plat) - - # Apple's Python (shipped with Xcode and Command Line Tools) hard-code - # platlib and purelib to '/Library/Python/X.Y/site-packages'. This will - # cause serious build isolation bugs when Apple starts shipping 3.10 because - # pip will install build backends to the wrong location. This tells users - # who is at fault so Apple may notice it and fix the issue in time. - if all(_looks_like_apple_library(p) for p in old_lib_paths): - deprecated( - reason=( - "Python distributed by Apple's Command Line Tools incorrectly " - "patches sysconfig to always point to '/Library/Python'. This " - "will cause build isolation to operate incorrectly on Python " - "3.10 or later. Please help report this to Apple so they can " - "fix this. https://developer.apple.com/bug-reporting/" - ), - replacement=None, - gone_in=None, - ) - return old_lib_paths - - warned = [ - _warn_if_mismatch( - pathlib.Path(old_pure), - pathlib.Path(new_pure), - key="prefixed-purelib", - ), - _warn_if_mismatch( - pathlib.Path(old_plat), - pathlib.Path(new_plat), - key="prefixed-platlib", - ), - ] - if any(warned): - _log_context(prefix=prefix) - - return old_lib_paths diff --git a/venv/Lib/site-packages/pip/_internal/locations/__pycache__/__init__.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/locations/__pycache__/__init__.cpython-39.pyc deleted file mode 100644 index e28b0b6..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/locations/__pycache__/__init__.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/locations/__pycache__/_distutils.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/locations/__pycache__/_distutils.cpython-39.pyc deleted file mode 100644 index 6ebf97a..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/locations/__pycache__/_distutils.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/locations/__pycache__/_sysconfig.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/locations/__pycache__/_sysconfig.cpython-39.pyc deleted file mode 100644 index f7ec380..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/locations/__pycache__/_sysconfig.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/locations/__pycache__/base.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/locations/__pycache__/base.cpython-39.pyc deleted file mode 100644 index 3b32113..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/locations/__pycache__/base.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/locations/_distutils.py b/venv/Lib/site-packages/pip/_internal/locations/_distutils.py deleted file mode 100644 index 2ec79e6..0000000 --- a/venv/Lib/site-packages/pip/_internal/locations/_distutils.py +++ /dev/null @@ -1,169 +0,0 @@ -"""Locations where we look for configs, install stuff, etc""" - -# The following comment should be removed at some point in the future. -# mypy: strict-optional=False - -import logging -import os -import sys -from distutils.cmd import Command as DistutilsCommand -from distutils.command.install import SCHEME_KEYS -from distutils.command.install import install as distutils_install_command -from distutils.sysconfig import get_python_lib -from typing import Dict, List, Optional, Tuple, Union, cast - -from pip._internal.models.scheme import Scheme -from pip._internal.utils.compat import WINDOWS -from pip._internal.utils.virtualenv import running_under_virtualenv - -from .base import get_major_minor_version - -logger = logging.getLogger(__name__) - - -def distutils_scheme( - dist_name: str, - user: bool = False, - home: str = None, - root: str = None, - isolated: bool = False, - prefix: str = None, - *, - ignore_config_files: bool = False, -) -> Dict[str, str]: - """ - Return a distutils install scheme - """ - from distutils.dist import Distribution - - dist_args: Dict[str, Union[str, List[str]]] = {"name": dist_name} - if isolated: - dist_args["script_args"] = ["--no-user-cfg"] - - d = Distribution(dist_args) - if not ignore_config_files: - try: - d.parse_config_files() - except UnicodeDecodeError: - # Typeshed does not include find_config_files() for some reason. - paths = d.find_config_files() # type: ignore - logger.warning( - "Ignore distutils configs in %s due to encoding errors.", - ", ".join(os.path.basename(p) for p in paths), - ) - obj: Optional[DistutilsCommand] = None - obj = d.get_command_obj("install", create=True) - assert obj is not None - i = cast(distutils_install_command, obj) - # NOTE: setting user or home has the side-effect of creating the home dir - # or user base for installations during finalize_options() - # ideally, we'd prefer a scheme class that has no side-effects. - assert not (user and prefix), f"user={user} prefix={prefix}" - assert not (home and prefix), f"home={home} prefix={prefix}" - i.user = user or i.user - if user or home: - i.prefix = "" - i.prefix = prefix or i.prefix - i.home = home or i.home - i.root = root or i.root - i.finalize_options() - - scheme = {} - for key in SCHEME_KEYS: - scheme[key] = getattr(i, "install_" + key) - - # install_lib specified in setup.cfg should install *everything* - # into there (i.e. it takes precedence over both purelib and - # platlib). Note, i.install_lib is *always* set after - # finalize_options(); we only want to override here if the user - # has explicitly requested it hence going back to the config - if "install_lib" in d.get_option_dict("install"): - scheme.update(dict(purelib=i.install_lib, platlib=i.install_lib)) - - if running_under_virtualenv(): - if home: - prefix = home - elif user: - prefix = i.install_userbase # type: ignore - else: - prefix = i.prefix - scheme["headers"] = os.path.join( - prefix, - "include", - "site", - f"python{get_major_minor_version()}", - dist_name, - ) - - if root is not None: - path_no_drive = os.path.splitdrive(os.path.abspath(scheme["headers"]))[1] - scheme["headers"] = os.path.join(root, path_no_drive[1:]) - - return scheme - - -def get_scheme( - dist_name: str, - user: bool = False, - home: Optional[str] = None, - root: Optional[str] = None, - isolated: bool = False, - prefix: Optional[str] = None, -) -> Scheme: - """ - Get the "scheme" corresponding to the input parameters. The distutils - documentation provides the context for the available schemes: - https://docs.python.org/3/install/index.html#alternate-installation - - :param dist_name: the name of the package to retrieve the scheme for, used - in the headers scheme path - :param user: indicates to use the "user" scheme - :param home: indicates to use the "home" scheme and provides the base - directory for the same - :param root: root under which other directories are re-based - :param isolated: equivalent to --no-user-cfg, i.e. do not consider - ~/.pydistutils.cfg (posix) or ~/pydistutils.cfg (non-posix) for - scheme paths - :param prefix: indicates to use the "prefix" scheme and provides the - base directory for the same - """ - scheme = distutils_scheme(dist_name, user, home, root, isolated, prefix) - return Scheme( - platlib=scheme["platlib"], - purelib=scheme["purelib"], - headers=scheme["headers"], - scripts=scheme["scripts"], - data=scheme["data"], - ) - - -def get_bin_prefix() -> str: - # XXX: In old virtualenv versions, sys.prefix can contain '..' components, - # so we need to call normpath to eliminate them. - prefix = os.path.normpath(sys.prefix) - if WINDOWS: - bin_py = os.path.join(prefix, "Scripts") - # buildout uses 'bin' on Windows too? - if not os.path.exists(bin_py): - bin_py = os.path.join(prefix, "bin") - return bin_py - # Forcing to use /usr/local/bin for standard macOS framework installs - # Also log to ~/Library/Logs/ for use with the Console.app log viewer - if sys.platform[:6] == "darwin" and prefix[:16] == "/System/Library/": - return "/usr/local/bin" - return os.path.join(prefix, "bin") - - -def get_purelib() -> str: - return get_python_lib(plat_specific=False) - - -def get_platlib() -> str: - return get_python_lib(plat_specific=True) - - -def get_prefixed_libs(prefix: str) -> Tuple[str, str]: - return ( - get_python_lib(plat_specific=False, prefix=prefix), - get_python_lib(plat_specific=True, prefix=prefix), - ) diff --git a/venv/Lib/site-packages/pip/_internal/locations/_sysconfig.py b/venv/Lib/site-packages/pip/_internal/locations/_sysconfig.py deleted file mode 100644 index 5e141aa..0000000 --- a/venv/Lib/site-packages/pip/_internal/locations/_sysconfig.py +++ /dev/null @@ -1,219 +0,0 @@ -import distutils.util # FIXME: For change_root. -import logging -import os -import sys -import sysconfig -import typing - -from pip._internal.exceptions import InvalidSchemeCombination, UserInstallationInvalid -from pip._internal.models.scheme import SCHEME_KEYS, Scheme -from pip._internal.utils.virtualenv import running_under_virtualenv - -from .base import get_major_minor_version, is_osx_framework - -logger = logging.getLogger(__name__) - - -# Notes on _infer_* functions. -# Unfortunately ``get_default_scheme()`` didn't exist before 3.10, so there's no -# way to ask things like "what is the '_prefix' scheme on this platform". These -# functions try to answer that with some heuristics while accounting for ad-hoc -# platforms not covered by CPython's default sysconfig implementation. If the -# ad-hoc implementation does not fully implement sysconfig, we'll fall back to -# a POSIX scheme. - -_AVAILABLE_SCHEMES = set(sysconfig.get_scheme_names()) - -_PREFERRED_SCHEME_API = getattr(sysconfig, "get_preferred_scheme", None) - - -def _should_use_osx_framework_prefix() -> bool: - """Check for Apple's ``osx_framework_library`` scheme. - - Python distributed by Apple's Command Line Tools has this special scheme - that's used when: - - * This is a framework build. - * We are installing into the system prefix. - - This does not account for ``pip install --prefix`` (also means we're not - installing to the system prefix), which should use ``posix_prefix``, but - logic here means ``_infer_prefix()`` outputs ``osx_framework_library``. But - since ``prefix`` is not available for ``sysconfig.get_default_scheme()``, - which is the stdlib replacement for ``_infer_prefix()``, presumably Apple - wouldn't be able to magically switch between ``osx_framework_library`` and - ``posix_prefix``. ``_infer_prefix()`` returning ``osx_framework_library`` - means its behavior is consistent whether we use the stdlib implementation - or our own, and we deal with this special case in ``get_scheme()`` instead. - """ - return ( - "osx_framework_library" in _AVAILABLE_SCHEMES - and not running_under_virtualenv() - and is_osx_framework() - ) - - -def _infer_prefix() -> str: - """Try to find a prefix scheme for the current platform. - - This tries: - - * A special ``osx_framework_library`` for Python distributed by Apple's - Command Line Tools, when not running in a virtual environment. - * Implementation + OS, used by PyPy on Windows (``pypy_nt``). - * Implementation without OS, used by PyPy on POSIX (``pypy``). - * OS + "prefix", used by CPython on POSIX (``posix_prefix``). - * Just the OS name, used by CPython on Windows (``nt``). - - If none of the above works, fall back to ``posix_prefix``. - """ - if _PREFERRED_SCHEME_API: - return _PREFERRED_SCHEME_API("prefix") - if _should_use_osx_framework_prefix(): - return "osx_framework_library" - implementation_suffixed = f"{sys.implementation.name}_{os.name}" - if implementation_suffixed in _AVAILABLE_SCHEMES: - return implementation_suffixed - if sys.implementation.name in _AVAILABLE_SCHEMES: - return sys.implementation.name - suffixed = f"{os.name}_prefix" - if suffixed in _AVAILABLE_SCHEMES: - return suffixed - if os.name in _AVAILABLE_SCHEMES: # On Windows, prefx is just called "nt". - return os.name - return "posix_prefix" - - -def _infer_user() -> str: - """Try to find a user scheme for the current platform.""" - if _PREFERRED_SCHEME_API: - return _PREFERRED_SCHEME_API("user") - if is_osx_framework() and not running_under_virtualenv(): - suffixed = "osx_framework_user" - else: - suffixed = f"{os.name}_user" - if suffixed in _AVAILABLE_SCHEMES: - return suffixed - if "posix_user" not in _AVAILABLE_SCHEMES: # User scheme unavailable. - raise UserInstallationInvalid() - return "posix_user" - - -def _infer_home() -> str: - """Try to find a home for the current platform.""" - if _PREFERRED_SCHEME_API: - return _PREFERRED_SCHEME_API("home") - suffixed = f"{os.name}_home" - if suffixed in _AVAILABLE_SCHEMES: - return suffixed - return "posix_home" - - -# Update these keys if the user sets a custom home. -_HOME_KEYS = [ - "installed_base", - "base", - "installed_platbase", - "platbase", - "prefix", - "exec_prefix", -] -if sysconfig.get_config_var("userbase") is not None: - _HOME_KEYS.append("userbase") - - -def get_scheme( - dist_name: str, - user: bool = False, - home: typing.Optional[str] = None, - root: typing.Optional[str] = None, - isolated: bool = False, - prefix: typing.Optional[str] = None, -) -> Scheme: - """ - Get the "scheme" corresponding to the input parameters. - - :param dist_name: the name of the package to retrieve the scheme for, used - in the headers scheme path - :param user: indicates to use the "user" scheme - :param home: indicates to use the "home" scheme - :param root: root under which other directories are re-based - :param isolated: ignored, but kept for distutils compatibility (where - this controls whether the user-site pydistutils.cfg is honored) - :param prefix: indicates to use the "prefix" scheme and provides the - base directory for the same - """ - if user and prefix: - raise InvalidSchemeCombination("--user", "--prefix") - if home and prefix: - raise InvalidSchemeCombination("--home", "--prefix") - - if home is not None: - scheme_name = _infer_home() - elif user: - scheme_name = _infer_user() - else: - scheme_name = _infer_prefix() - - # Special case: When installing into a custom prefix, use posix_prefix - # instead of osx_framework_library. See _should_use_osx_framework_prefix() - # docstring for details. - if prefix is not None and scheme_name == "osx_framework_library": - scheme_name = "posix_prefix" - - if home is not None: - variables = {k: home for k in _HOME_KEYS} - elif prefix is not None: - variables = {k: prefix for k in _HOME_KEYS} - else: - variables = {} - - paths = sysconfig.get_paths(scheme=scheme_name, vars=variables) - - # Logic here is very arbitrary, we're doing it for compatibility, don't ask. - # 1. Pip historically uses a special header path in virtual environments. - # 2. If the distribution name is not known, distutils uses 'UNKNOWN'. We - # only do the same when not running in a virtual environment because - # pip's historical header path logic (see point 1) did not do this. - if running_under_virtualenv(): - if user: - base = variables.get("userbase", sys.prefix) - else: - base = variables.get("base", sys.prefix) - python_xy = f"python{get_major_minor_version()}" - paths["include"] = os.path.join(base, "include", "site", python_xy) - elif not dist_name: - dist_name = "UNKNOWN" - - scheme = Scheme( - platlib=paths["platlib"], - purelib=paths["purelib"], - headers=os.path.join(paths["include"], dist_name), - scripts=paths["scripts"], - data=paths["data"], - ) - if root is not None: - for key in SCHEME_KEYS: - value = distutils.util.change_root(root, getattr(scheme, key)) - setattr(scheme, key, value) - return scheme - - -def get_bin_prefix() -> str: - # Forcing to use /usr/local/bin for standard macOS framework installs. - if sys.platform[:6] == "darwin" and sys.prefix[:16] == "/System/Library/": - return "/usr/local/bin" - return sysconfig.get_paths()["scripts"] - - -def get_purelib() -> str: - return sysconfig.get_paths()["purelib"] - - -def get_platlib() -> str: - return sysconfig.get_paths()["platlib"] - - -def get_prefixed_libs(prefix: str) -> typing.Tuple[str, str]: - paths = sysconfig.get_paths(vars={"base": prefix, "platbase": prefix}) - return (paths["purelib"], paths["platlib"]) diff --git a/venv/Lib/site-packages/pip/_internal/locations/base.py b/venv/Lib/site-packages/pip/_internal/locations/base.py deleted file mode 100644 index 86dad4a..0000000 --- a/venv/Lib/site-packages/pip/_internal/locations/base.py +++ /dev/null @@ -1,52 +0,0 @@ -import functools -import os -import site -import sys -import sysconfig -import typing - -from pip._internal.utils import appdirs -from pip._internal.utils.virtualenv import running_under_virtualenv - -# Application Directories -USER_CACHE_DIR = appdirs.user_cache_dir("pip") - -# FIXME doesn't account for venv linked to global site-packages -site_packages: typing.Optional[str] = sysconfig.get_path("purelib") - - -def get_major_minor_version() -> str: - """ - Return the major-minor version of the current Python as a string, e.g. - "3.7" or "3.10". - """ - return "{}.{}".format(*sys.version_info) - - -def get_src_prefix() -> str: - if running_under_virtualenv(): - src_prefix = os.path.join(sys.prefix, "src") - else: - # FIXME: keep src in cwd for now (it is not a temporary folder) - try: - src_prefix = os.path.join(os.getcwd(), "src") - except OSError: - # In case the current working directory has been renamed or deleted - sys.exit("The folder you are executing pip from can no longer be found.") - - # under macOS + virtualenv sys.prefix is not properly resolved - # it is something like /path/to/python/bin/.. - return os.path.abspath(src_prefix) - - -try: - # Use getusersitepackages if this is present, as it ensures that the - # value is initialised properly. - user_site: typing.Optional[str] = site.getusersitepackages() -except AttributeError: - user_site = site.USER_SITE - - -@functools.lru_cache(maxsize=None) -def is_osx_framework() -> bool: - return bool(sysconfig.get_config_var("PYTHONFRAMEWORK")) diff --git a/venv/Lib/site-packages/pip/_internal/main.py b/venv/Lib/site-packages/pip/_internal/main.py deleted file mode 100644 index 33c6d24..0000000 --- a/venv/Lib/site-packages/pip/_internal/main.py +++ /dev/null @@ -1,12 +0,0 @@ -from typing import List, Optional - - -def main(args: Optional[List[str]] = None) -> int: - """This is preserved for old console scripts that may still be referencing - it. - - For additional details, see https://github.com/pypa/pip/issues/7498. - """ - from pip._internal.utils.entrypoints import _wrapper - - return _wrapper(args) diff --git a/venv/Lib/site-packages/pip/_internal/metadata/__init__.py b/venv/Lib/site-packages/pip/_internal/metadata/__init__.py deleted file mode 100644 index cc037c1..0000000 --- a/venv/Lib/site-packages/pip/_internal/metadata/__init__.py +++ /dev/null @@ -1,62 +0,0 @@ -from typing import List, Optional - -from .base import BaseDistribution, BaseEnvironment, FilesystemWheel, MemoryWheel, Wheel - -__all__ = [ - "BaseDistribution", - "BaseEnvironment", - "FilesystemWheel", - "MemoryWheel", - "Wheel", - "get_default_environment", - "get_environment", - "get_wheel_distribution", -] - - -def get_default_environment() -> BaseEnvironment: - """Get the default representation for the current environment. - - This returns an Environment instance from the chosen backend. The default - Environment instance should be built from ``sys.path`` and may use caching - to share instance state accorss calls. - """ - from .pkg_resources import Environment - - return Environment.default() - - -def get_environment(paths: Optional[List[str]]) -> BaseEnvironment: - """Get a representation of the environment specified by ``paths``. - - This returns an Environment instance from the chosen backend based on the - given import paths. The backend must build a fresh instance representing - the state of installed distributions when this function is called. - """ - from .pkg_resources import Environment - - return Environment.from_paths(paths) - - -def get_directory_distribution(directory: str) -> BaseDistribution: - """Get the distribution metadata representation in the specified directory. - - This returns a Distribution instance from the chosen backend based on - the given on-disk ``.dist-info`` directory. - """ - from .pkg_resources import Distribution - - return Distribution.from_directory(directory) - - -def get_wheel_distribution(wheel: Wheel, canonical_name: str) -> BaseDistribution: - """Get the representation of the specified wheel's distribution metadata. - - This returns a Distribution instance from the chosen backend based on - the given wheel's ``.dist-info`` directory. - - :param canonical_name: Normalized project name of the given wheel. - """ - from .pkg_resources import Distribution - - return Distribution.from_wheel(wheel, canonical_name) diff --git a/venv/Lib/site-packages/pip/_internal/metadata/__pycache__/__init__.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/metadata/__pycache__/__init__.cpython-39.pyc deleted file mode 100644 index 8df578e..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/metadata/__pycache__/__init__.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/metadata/__pycache__/base.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/metadata/__pycache__/base.cpython-39.pyc deleted file mode 100644 index ec2df9f..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/metadata/__pycache__/base.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/metadata/__pycache__/pkg_resources.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/metadata/__pycache__/pkg_resources.cpython-39.pyc deleted file mode 100644 index f4592c8..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/metadata/__pycache__/pkg_resources.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/metadata/base.py b/venv/Lib/site-packages/pip/_internal/metadata/base.py deleted file mode 100644 index 1a5a781..0000000 --- a/venv/Lib/site-packages/pip/_internal/metadata/base.py +++ /dev/null @@ -1,546 +0,0 @@ -import csv -import email.message -import json -import logging -import pathlib -import re -import zipfile -from typing import ( - IO, - TYPE_CHECKING, - Collection, - Container, - Iterable, - Iterator, - List, - Optional, - Tuple, - Union, -) - -from pip._vendor.packaging.requirements import Requirement -from pip._vendor.packaging.specifiers import InvalidSpecifier, SpecifierSet -from pip._vendor.packaging.utils import NormalizedName -from pip._vendor.packaging.version import LegacyVersion, Version - -from pip._internal.exceptions import NoneMetadataError -from pip._internal.locations import site_packages, user_site -from pip._internal.models.direct_url import ( - DIRECT_URL_METADATA_NAME, - DirectUrl, - DirectUrlValidationError, -) -from pip._internal.utils.compat import stdlib_pkgs # TODO: Move definition here. -from pip._internal.utils.egg_link import ( - egg_link_path_from_location, - egg_link_path_from_sys_path, -) -from pip._internal.utils.misc import is_local, normalize_path -from pip._internal.utils.urls import url_to_path - -if TYPE_CHECKING: - from typing import Protocol -else: - Protocol = object - -DistributionVersion = Union[LegacyVersion, Version] - -InfoPath = Union[str, pathlib.PurePosixPath] - -logger = logging.getLogger(__name__) - - -class BaseEntryPoint(Protocol): - @property - def name(self) -> str: - raise NotImplementedError() - - @property - def value(self) -> str: - raise NotImplementedError() - - @property - def group(self) -> str: - raise NotImplementedError() - - -def _convert_installed_files_path( - entry: Tuple[str, ...], - info: Tuple[str, ...], -) -> str: - """Convert a legacy installed-files.txt path into modern RECORD path. - - The legacy format stores paths relative to the info directory, while the - modern format stores paths relative to the package root, e.g. the - site-packages directory. - - :param entry: Path parts of the installed-files.txt entry. - :param info: Path parts of the egg-info directory relative to package root. - :returns: The converted entry. - - For best compatibility with symlinks, this does not use ``abspath()`` or - ``Path.resolve()``, but tries to work with path parts: - - 1. While ``entry`` starts with ``..``, remove the equal amounts of parts - from ``info``; if ``info`` is empty, start appending ``..`` instead. - 2. Join the two directly. - """ - while entry and entry[0] == "..": - if not info or info[-1] == "..": - info += ("..",) - else: - info = info[:-1] - entry = entry[1:] - return str(pathlib.Path(*info, *entry)) - - -class BaseDistribution(Protocol): - def __repr__(self) -> str: - return f"{self.raw_name} {self.version} ({self.location})" - - def __str__(self) -> str: - return f"{self.raw_name} {self.version}" - - @property - def location(self) -> Optional[str]: - """Where the distribution is loaded from. - - A string value is not necessarily a filesystem path, since distributions - can be loaded from other sources, e.g. arbitrary zip archives. ``None`` - means the distribution is created in-memory. - - Do not canonicalize this value with e.g. ``pathlib.Path.resolve()``. If - this is a symbolic link, we want to preserve the relative path between - it and files in the distribution. - """ - raise NotImplementedError() - - @property - def editable_project_location(self) -> Optional[str]: - """The project location for editable distributions. - - This is the directory where pyproject.toml or setup.py is located. - None if the distribution is not installed in editable mode. - """ - # TODO: this property is relatively costly to compute, memoize it ? - direct_url = self.direct_url - if direct_url: - if direct_url.is_local_editable(): - return url_to_path(direct_url.url) - else: - # Search for an .egg-link file by walking sys.path, as it was - # done before by dist_is_editable(). - egg_link_path = egg_link_path_from_sys_path(self.raw_name) - if egg_link_path: - # TODO: get project location from second line of egg_link file - # (https://github.com/pypa/pip/issues/10243) - return self.location - return None - - @property - def installed_location(self) -> Optional[str]: - """The distribution's "installed" location. - - This should generally be a ``site-packages`` directory. This is - usually ``dist.location``, except for legacy develop-installed packages, - where ``dist.location`` is the source code location, and this is where - the ``.egg-link`` file is. - - The returned location is normalized (in particular, with symlinks removed). - """ - egg_link = egg_link_path_from_location(self.raw_name) - if egg_link: - location = egg_link - elif self.location: - location = self.location - else: - return None - return normalize_path(location) - - @property - def info_location(self) -> Optional[str]: - """Location of the .[egg|dist]-info directory or file. - - Similarly to ``location``, a string value is not necessarily a - filesystem path. ``None`` means the distribution is created in-memory. - - For a modern .dist-info installation on disk, this should be something - like ``{location}/{raw_name}-{version}.dist-info``. - - Do not canonicalize this value with e.g. ``pathlib.Path.resolve()``. If - this is a symbolic link, we want to preserve the relative path between - it and other files in the distribution. - """ - raise NotImplementedError() - - @property - def installed_by_distutils(self) -> bool: - """Whether this distribution is installed with legacy distutils format. - - A distribution installed with "raw" distutils not patched by setuptools - uses one single file at ``info_location`` to store metadata. We need to - treat this specially on uninstallation. - """ - info_location = self.info_location - if not info_location: - return False - return pathlib.Path(info_location).is_file() - - @property - def installed_as_egg(self) -> bool: - """Whether this distribution is installed as an egg. - - This usually indicates the distribution was installed by (older versions - of) easy_install. - """ - location = self.location - if not location: - return False - return location.endswith(".egg") - - @property - def installed_with_setuptools_egg_info(self) -> bool: - """Whether this distribution is installed with the ``.egg-info`` format. - - This usually indicates the distribution was installed with setuptools - with an old pip version or with ``single-version-externally-managed``. - - Note that this ensure the metadata store is a directory. distutils can - also installs an ``.egg-info``, but as a file, not a directory. This - property is *False* for that case. Also see ``installed_by_distutils``. - """ - info_location = self.info_location - if not info_location: - return False - if not info_location.endswith(".egg-info"): - return False - return pathlib.Path(info_location).is_dir() - - @property - def installed_with_dist_info(self) -> bool: - """Whether this distribution is installed with the "modern format". - - This indicates a "modern" installation, e.g. storing metadata in the - ``.dist-info`` directory. This applies to installations made by - setuptools (but through pip, not directly), or anything using the - standardized build backend interface (PEP 517). - """ - info_location = self.info_location - if not info_location: - return False - if not info_location.endswith(".dist-info"): - return False - return pathlib.Path(info_location).is_dir() - - @property - def canonical_name(self) -> NormalizedName: - raise NotImplementedError() - - @property - def version(self) -> DistributionVersion: - raise NotImplementedError() - - @property - def setuptools_filename(self) -> str: - """Convert a project name to its setuptools-compatible filename. - - This is a copy of ``pkg_resources.to_filename()`` for compatibility. - """ - return self.raw_name.replace("-", "_") - - @property - def direct_url(self) -> Optional[DirectUrl]: - """Obtain a DirectUrl from this distribution. - - Returns None if the distribution has no `direct_url.json` metadata, - or if `direct_url.json` is invalid. - """ - try: - content = self.read_text(DIRECT_URL_METADATA_NAME) - except FileNotFoundError: - return None - try: - return DirectUrl.from_json(content) - except ( - UnicodeDecodeError, - json.JSONDecodeError, - DirectUrlValidationError, - ) as e: - logger.warning( - "Error parsing %s for %s: %s", - DIRECT_URL_METADATA_NAME, - self.canonical_name, - e, - ) - return None - - @property - def installer(self) -> str: - try: - installer_text = self.read_text("INSTALLER") - except (OSError, ValueError, NoneMetadataError): - return "" # Fail silently if the installer file cannot be read. - for line in installer_text.splitlines(): - cleaned_line = line.strip() - if cleaned_line: - return cleaned_line - return "" - - @property - def editable(self) -> bool: - return bool(self.editable_project_location) - - @property - def local(self) -> bool: - """If distribution is installed in the current virtual environment. - - Always True if we're not in a virtualenv. - """ - if self.installed_location is None: - return False - return is_local(self.installed_location) - - @property - def in_usersite(self) -> bool: - if self.installed_location is None or user_site is None: - return False - return self.installed_location.startswith(normalize_path(user_site)) - - @property - def in_site_packages(self) -> bool: - if self.installed_location is None or site_packages is None: - return False - return self.installed_location.startswith(normalize_path(site_packages)) - - def is_file(self, path: InfoPath) -> bool: - """Check whether an entry in the info directory is a file.""" - raise NotImplementedError() - - def iterdir(self, path: InfoPath) -> Iterator[pathlib.PurePosixPath]: - """Iterate through a directory in the info directory. - - Each item yielded would be a path relative to the info directory. - - :raise FileNotFoundError: If ``name`` does not exist in the directory. - :raise NotADirectoryError: If ``name`` does not point to a directory. - """ - raise NotImplementedError() - - def read_text(self, path: InfoPath) -> str: - """Read a file in the info directory. - - :raise FileNotFoundError: If ``name`` does not exist in the directory. - :raise NoneMetadataError: If ``name`` exists in the info directory, but - cannot be read. - """ - raise NotImplementedError() - - def iter_entry_points(self) -> Iterable[BaseEntryPoint]: - raise NotImplementedError() - - @property - def metadata(self) -> email.message.Message: - """Metadata of distribution parsed from e.g. METADATA or PKG-INFO. - - This should return an empty message if the metadata file is unavailable. - - :raises NoneMetadataError: If the metadata file is available, but does - not contain valid metadata. - """ - raise NotImplementedError() - - @property - def metadata_version(self) -> Optional[str]: - """Value of "Metadata-Version:" in distribution metadata, if available.""" - return self.metadata.get("Metadata-Version") - - @property - def raw_name(self) -> str: - """Value of "Name:" in distribution metadata.""" - # The metadata should NEVER be missing the Name: key, but if it somehow - # does, fall back to the known canonical name. - return self.metadata.get("Name", self.canonical_name) - - @property - def requires_python(self) -> SpecifierSet: - """Value of "Requires-Python:" in distribution metadata. - - If the key does not exist or contains an invalid value, an empty - SpecifierSet should be returned. - """ - value = self.metadata.get("Requires-Python") - if value is None: - return SpecifierSet() - try: - # Convert to str to satisfy the type checker; this can be a Header object. - spec = SpecifierSet(str(value)) - except InvalidSpecifier as e: - message = "Package %r has an invalid Requires-Python: %s" - logger.warning(message, self.raw_name, e) - return SpecifierSet() - return spec - - def iter_dependencies(self, extras: Collection[str] = ()) -> Iterable[Requirement]: - """Dependencies of this distribution. - - For modern .dist-info distributions, this is the collection of - "Requires-Dist:" entries in distribution metadata. - """ - raise NotImplementedError() - - def iter_provided_extras(self) -> Iterable[str]: - """Extras provided by this distribution. - - For modern .dist-info distributions, this is the collection of - "Provides-Extra:" entries in distribution metadata. - """ - raise NotImplementedError() - - def _iter_declared_entries_from_record(self) -> Optional[Iterator[str]]: - try: - text = self.read_text("RECORD") - except FileNotFoundError: - return None - # This extra Path-str cast normalizes entries. - return (str(pathlib.Path(row[0])) for row in csv.reader(text.splitlines())) - - def _iter_declared_entries_from_legacy(self) -> Optional[Iterator[str]]: - try: - text = self.read_text("installed-files.txt") - except FileNotFoundError: - return None - paths = (p for p in text.splitlines(keepends=False) if p) - root = self.location - info = self.info_location - if root is None or info is None: - return paths - try: - info_rel = pathlib.Path(info).relative_to(root) - except ValueError: # info is not relative to root. - return paths - if not info_rel.parts: # info *is* root. - return paths - return ( - _convert_installed_files_path(pathlib.Path(p).parts, info_rel.parts) - for p in paths - ) - - def iter_declared_entries(self) -> Optional[Iterator[str]]: - """Iterate through file entires declared in this distribution. - - For modern .dist-info distributions, this is the files listed in the - ``RECORD`` metadata file. For legacy setuptools distributions, this - comes from ``installed-files.txt``, with entries normalized to be - compatible with the format used by ``RECORD``. - - :return: An iterator for listed entries, or None if the distribution - contains neither ``RECORD`` nor ``installed-files.txt``. - """ - return ( - self._iter_declared_entries_from_record() - or self._iter_declared_entries_from_legacy() - ) - - -class BaseEnvironment: - """An environment containing distributions to introspect.""" - - @classmethod - def default(cls) -> "BaseEnvironment": - raise NotImplementedError() - - @classmethod - def from_paths(cls, paths: Optional[List[str]]) -> "BaseEnvironment": - raise NotImplementedError() - - def get_distribution(self, name: str) -> Optional["BaseDistribution"]: - """Given a requirement name, return the installed distributions. - - The name may not be normalized. The implementation must canonicalize - it for lookup. - """ - raise NotImplementedError() - - def _iter_distributions(self) -> Iterator["BaseDistribution"]: - """Iterate through installed distributions. - - This function should be implemented by subclass, but never called - directly. Use the public ``iter_distribution()`` instead, which - implements additional logic to make sure the distributions are valid. - """ - raise NotImplementedError() - - def iter_distributions(self) -> Iterator["BaseDistribution"]: - """Iterate through installed distributions.""" - for dist in self._iter_distributions(): - # Make sure the distribution actually comes from a valid Python - # packaging distribution. Pip's AdjacentTempDirectory leaves folders - # e.g. ``~atplotlib.dist-info`` if cleanup was interrupted. The - # valid project name pattern is taken from PEP 508. - project_name_valid = re.match( - r"^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$", - dist.canonical_name, - flags=re.IGNORECASE, - ) - if not project_name_valid: - logger.warning( - "Ignoring invalid distribution %s (%s)", - dist.canonical_name, - dist.location, - ) - continue - yield dist - - def iter_installed_distributions( - self, - local_only: bool = True, - skip: Container[str] = stdlib_pkgs, - include_editables: bool = True, - editables_only: bool = False, - user_only: bool = False, - ) -> Iterator[BaseDistribution]: - """Return a list of installed distributions. - - :param local_only: If True (default), only return installations - local to the current virtualenv, if in a virtualenv. - :param skip: An iterable of canonicalized project names to ignore; - defaults to ``stdlib_pkgs``. - :param include_editables: If False, don't report editables. - :param editables_only: If True, only report editables. - :param user_only: If True, only report installations in the user - site directory. - """ - it = self.iter_distributions() - if local_only: - it = (d for d in it if d.local) - if not include_editables: - it = (d for d in it if not d.editable) - if editables_only: - it = (d for d in it if d.editable) - if user_only: - it = (d for d in it if d.in_usersite) - return (d for d in it if d.canonical_name not in skip) - - -class Wheel(Protocol): - location: str - - def as_zipfile(self) -> zipfile.ZipFile: - raise NotImplementedError() - - -class FilesystemWheel(Wheel): - def __init__(self, location: str) -> None: - self.location = location - - def as_zipfile(self) -> zipfile.ZipFile: - return zipfile.ZipFile(self.location, allowZip64=True) - - -class MemoryWheel(Wheel): - def __init__(self, location: str, stream: IO[bytes]) -> None: - self.location = location - self.stream = stream - - def as_zipfile(self) -> zipfile.ZipFile: - return zipfile.ZipFile(self.stream, allowZip64=True) diff --git a/venv/Lib/site-packages/pip/_internal/metadata/pkg_resources.py b/venv/Lib/site-packages/pip/_internal/metadata/pkg_resources.py deleted file mode 100644 index d39f0ba..0000000 --- a/venv/Lib/site-packages/pip/_internal/metadata/pkg_resources.py +++ /dev/null @@ -1,256 +0,0 @@ -import email.message -import email.parser -import logging -import os -import pathlib -import zipfile -from typing import Collection, Iterable, Iterator, List, Mapping, NamedTuple, Optional - -from pip._vendor import pkg_resources -from pip._vendor.packaging.requirements import Requirement -from pip._vendor.packaging.utils import NormalizedName, canonicalize_name -from pip._vendor.packaging.version import parse as parse_version - -from pip._internal.exceptions import InvalidWheel, NoneMetadataError, UnsupportedWheel -from pip._internal.utils.misc import display_path -from pip._internal.utils.wheel import parse_wheel, read_wheel_metadata_file - -from .base import ( - BaseDistribution, - BaseEntryPoint, - BaseEnvironment, - DistributionVersion, - InfoPath, - Wheel, -) - -logger = logging.getLogger(__name__) - - -class EntryPoint(NamedTuple): - name: str - value: str - group: str - - -class WheelMetadata: - """IMetadataProvider that reads metadata files from a dictionary. - - This also maps metadata decoding exceptions to our internal exception type. - """ - - def __init__(self, metadata: Mapping[str, bytes], wheel_name: str) -> None: - self._metadata = metadata - self._wheel_name = wheel_name - - def has_metadata(self, name: str) -> bool: - return name in self._metadata - - def get_metadata(self, name: str) -> str: - try: - return self._metadata[name].decode() - except UnicodeDecodeError as e: - # Augment the default error with the origin of the file. - raise UnsupportedWheel( - f"Error decoding metadata for {self._wheel_name}: {e} in {name} file" - ) - - def get_metadata_lines(self, name: str) -> Iterable[str]: - return pkg_resources.yield_lines(self.get_metadata(name)) - - def metadata_isdir(self, name: str) -> bool: - return False - - def metadata_listdir(self, name: str) -> List[str]: - return [] - - def run_script(self, script_name: str, namespace: str) -> None: - pass - - -class Distribution(BaseDistribution): - def __init__(self, dist: pkg_resources.Distribution) -> None: - self._dist = dist - - @classmethod - def from_directory(cls, directory: str) -> "Distribution": - dist_dir = directory.rstrip(os.sep) - - # Build a PathMetadata object, from path to metadata. :wink: - base_dir, dist_dir_name = os.path.split(dist_dir) - metadata = pkg_resources.PathMetadata(base_dir, dist_dir) - - # Determine the correct Distribution object type. - if dist_dir.endswith(".egg-info"): - dist_cls = pkg_resources.Distribution - dist_name = os.path.splitext(dist_dir_name)[0] - else: - assert dist_dir.endswith(".dist-info") - dist_cls = pkg_resources.DistInfoDistribution - dist_name = os.path.splitext(dist_dir_name)[0].split("-")[0] - - dist = dist_cls(base_dir, project_name=dist_name, metadata=metadata) - return cls(dist) - - @classmethod - def from_wheel(cls, wheel: Wheel, name: str) -> "Distribution": - """Load the distribution from a given wheel. - - :raises InvalidWheel: Whenever loading of the wheel causes a - :py:exc:`zipfile.BadZipFile` exception to be thrown. - :raises UnsupportedWheel: If the wheel is a valid zip, but malformed - internally. - """ - try: - with wheel.as_zipfile() as zf: - info_dir, _ = parse_wheel(zf, name) - metadata_text = { - path.split("/", 1)[-1]: read_wheel_metadata_file(zf, path) - for path in zf.namelist() - if path.startswith(f"{info_dir}/") - } - except zipfile.BadZipFile as e: - raise InvalidWheel(wheel.location, name) from e - except UnsupportedWheel as e: - raise UnsupportedWheel(f"{name} has an invalid wheel, {e}") - dist = pkg_resources.DistInfoDistribution( - location=wheel.location, - metadata=WheelMetadata(metadata_text, wheel.location), - project_name=name, - ) - return cls(dist) - - @property - def location(self) -> Optional[str]: - return self._dist.location - - @property - def info_location(self) -> Optional[str]: - return self._dist.egg_info - - @property - def installed_by_distutils(self) -> bool: - # A distutils-installed distribution is provided by FileMetadata. This - # provider has a "path" attribute not present anywhere else. Not the - # best introspection logic, but pip has been doing this for a long time. - try: - return bool(self._dist._provider.path) - except AttributeError: - return False - - @property - def canonical_name(self) -> NormalizedName: - return canonicalize_name(self._dist.project_name) - - @property - def version(self) -> DistributionVersion: - return parse_version(self._dist.version) - - def is_file(self, path: InfoPath) -> bool: - return self._dist.has_metadata(str(path)) - - def iterdir(self, path: InfoPath) -> Iterator[pathlib.PurePosixPath]: - name = str(path) - if not self._dist.has_metadata(name): - raise FileNotFoundError(name) - if not self._dist.isdir(name): - raise NotADirectoryError(name) - for child in self._dist.metadata_listdir(name): - yield pathlib.PurePosixPath(path, child) - - def read_text(self, path: InfoPath) -> str: - name = str(path) - if not self._dist.has_metadata(name): - raise FileNotFoundError(name) - content = self._dist.get_metadata(name) - if content is None: - raise NoneMetadataError(self, name) - return content - - def iter_entry_points(self) -> Iterable[BaseEntryPoint]: - for group, entries in self._dist.get_entry_map().items(): - for name, entry_point in entries.items(): - name, _, value = str(entry_point).partition("=") - yield EntryPoint(name=name.strip(), value=value.strip(), group=group) - - @property - def metadata(self) -> email.message.Message: - """ - :raises NoneMetadataError: if the distribution reports `has_metadata()` - True but `get_metadata()` returns None. - """ - if isinstance(self._dist, pkg_resources.DistInfoDistribution): - metadata_name = "METADATA" - else: - metadata_name = "PKG-INFO" - try: - metadata = self.read_text(metadata_name) - except FileNotFoundError: - if self.location: - displaying_path = display_path(self.location) - else: - displaying_path = repr(self.location) - logger.warning("No metadata found in %s", displaying_path) - metadata = "" - feed_parser = email.parser.FeedParser() - feed_parser.feed(metadata) - return feed_parser.close() - - def iter_dependencies(self, extras: Collection[str] = ()) -> Iterable[Requirement]: - if extras: # pkg_resources raises on invalid extras, so we sanitize. - extras = frozenset(extras).intersection(self._dist.extras) - return self._dist.requires(extras) - - def iter_provided_extras(self) -> Iterable[str]: - return self._dist.extras - - -class Environment(BaseEnvironment): - def __init__(self, ws: pkg_resources.WorkingSet) -> None: - self._ws = ws - - @classmethod - def default(cls) -> BaseEnvironment: - return cls(pkg_resources.working_set) - - @classmethod - def from_paths(cls, paths: Optional[List[str]]) -> BaseEnvironment: - return cls(pkg_resources.WorkingSet(paths)) - - def _search_distribution(self, name: str) -> Optional[BaseDistribution]: - """Find a distribution matching the ``name`` in the environment. - - This searches from *all* distributions available in the environment, to - match the behavior of ``pkg_resources.get_distribution()``. - """ - canonical_name = canonicalize_name(name) - for dist in self.iter_distributions(): - if dist.canonical_name == canonical_name: - return dist - return None - - def get_distribution(self, name: str) -> Optional[BaseDistribution]: - # Search the distribution by looking through the working set. - dist = self._search_distribution(name) - if dist: - return dist - - # If distribution could not be found, call working_set.require to - # update the working set, and try to find the distribution again. - # This might happen for e.g. when you install a package twice, once - # using setup.py develop and again using setup.py install. Now when - # running pip uninstall twice, the package gets removed from the - # working set in the first uninstall, so we have to populate the - # working set again so that pip knows about it and the packages gets - # picked up and is successfully uninstalled the second time too. - try: - # We didn't pass in any version specifiers, so this can never - # raise pkg_resources.VersionConflict. - self._ws.require(name) - except pkg_resources.DistributionNotFound: - return None - return self._search_distribution(name) - - def _iter_distributions(self) -> Iterator[BaseDistribution]: - for dist in self._ws: - yield Distribution(dist) diff --git a/venv/Lib/site-packages/pip/_internal/models/__init__.py b/venv/Lib/site-packages/pip/_internal/models/__init__.py deleted file mode 100644 index 7855226..0000000 --- a/venv/Lib/site-packages/pip/_internal/models/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -"""A package that contains models that represent entities. -""" diff --git a/venv/Lib/site-packages/pip/_internal/models/__pycache__/__init__.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/models/__pycache__/__init__.cpython-39.pyc deleted file mode 100644 index f98b68b..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/models/__pycache__/__init__.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/models/__pycache__/candidate.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/models/__pycache__/candidate.cpython-39.pyc deleted file mode 100644 index b94d818..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/models/__pycache__/candidate.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/models/__pycache__/direct_url.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/models/__pycache__/direct_url.cpython-39.pyc deleted file mode 100644 index 2dbd8b2..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/models/__pycache__/direct_url.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/models/__pycache__/format_control.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/models/__pycache__/format_control.cpython-39.pyc deleted file mode 100644 index 664d731..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/models/__pycache__/format_control.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/models/__pycache__/index.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/models/__pycache__/index.cpython-39.pyc deleted file mode 100644 index 9c58517..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/models/__pycache__/index.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/models/__pycache__/link.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/models/__pycache__/link.cpython-39.pyc deleted file mode 100644 index 68cd567..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/models/__pycache__/link.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/models/__pycache__/scheme.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/models/__pycache__/scheme.cpython-39.pyc deleted file mode 100644 index 918de24..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/models/__pycache__/scheme.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/models/__pycache__/search_scope.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/models/__pycache__/search_scope.cpython-39.pyc deleted file mode 100644 index b93ddf3..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/models/__pycache__/search_scope.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/models/__pycache__/selection_prefs.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/models/__pycache__/selection_prefs.cpython-39.pyc deleted file mode 100644 index 945d215..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/models/__pycache__/selection_prefs.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/models/__pycache__/target_python.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/models/__pycache__/target_python.cpython-39.pyc deleted file mode 100644 index 347d561..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/models/__pycache__/target_python.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/models/__pycache__/wheel.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/models/__pycache__/wheel.cpython-39.pyc deleted file mode 100644 index 3766e97..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/models/__pycache__/wheel.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/models/candidate.py b/venv/Lib/site-packages/pip/_internal/models/candidate.py deleted file mode 100644 index a4963ae..0000000 --- a/venv/Lib/site-packages/pip/_internal/models/candidate.py +++ /dev/null @@ -1,34 +0,0 @@ -from pip._vendor.packaging.version import parse as parse_version - -from pip._internal.models.link import Link -from pip._internal.utils.models import KeyBasedCompareMixin - - -class InstallationCandidate(KeyBasedCompareMixin): - """Represents a potential "candidate" for installation.""" - - __slots__ = ["name", "version", "link"] - - def __init__(self, name: str, version: str, link: Link) -> None: - self.name = name - self.version = parse_version(version) - self.link = link - - super().__init__( - key=(self.name, self.version, self.link), - defining_class=InstallationCandidate, - ) - - def __repr__(self) -> str: - return "".format( - self.name, - self.version, - self.link, - ) - - def __str__(self) -> str: - return "{!r} candidate (version {} at {})".format( - self.name, - self.version, - self.link, - ) diff --git a/venv/Lib/site-packages/pip/_internal/models/direct_url.py b/venv/Lib/site-packages/pip/_internal/models/direct_url.py deleted file mode 100644 index 92060d4..0000000 --- a/venv/Lib/site-packages/pip/_internal/models/direct_url.py +++ /dev/null @@ -1,220 +0,0 @@ -""" PEP 610 """ -import json -import re -import urllib.parse -from typing import Any, Dict, Iterable, Optional, Type, TypeVar, Union - -__all__ = [ - "DirectUrl", - "DirectUrlValidationError", - "DirInfo", - "ArchiveInfo", - "VcsInfo", -] - -T = TypeVar("T") - -DIRECT_URL_METADATA_NAME = "direct_url.json" -ENV_VAR_RE = re.compile(r"^\$\{[A-Za-z0-9-_]+\}(:\$\{[A-Za-z0-9-_]+\})?$") - - -class DirectUrlValidationError(Exception): - pass - - -def _get( - d: Dict[str, Any], expected_type: Type[T], key: str, default: Optional[T] = None -) -> Optional[T]: - """Get value from dictionary and verify expected type.""" - if key not in d: - return default - value = d[key] - if not isinstance(value, expected_type): - raise DirectUrlValidationError( - "{!r} has unexpected type for {} (expected {})".format( - value, key, expected_type - ) - ) - return value - - -def _get_required( - d: Dict[str, Any], expected_type: Type[T], key: str, default: Optional[T] = None -) -> T: - value = _get(d, expected_type, key, default) - if value is None: - raise DirectUrlValidationError(f"{key} must have a value") - return value - - -def _exactly_one_of(infos: Iterable[Optional["InfoType"]]) -> "InfoType": - infos = [info for info in infos if info is not None] - if not infos: - raise DirectUrlValidationError( - "missing one of archive_info, dir_info, vcs_info" - ) - if len(infos) > 1: - raise DirectUrlValidationError( - "more than one of archive_info, dir_info, vcs_info" - ) - assert infos[0] is not None - return infos[0] - - -def _filter_none(**kwargs: Any) -> Dict[str, Any]: - """Make dict excluding None values.""" - return {k: v for k, v in kwargs.items() if v is not None} - - -class VcsInfo: - name = "vcs_info" - - def __init__( - self, - vcs: str, - commit_id: str, - requested_revision: Optional[str] = None, - resolved_revision: Optional[str] = None, - resolved_revision_type: Optional[str] = None, - ) -> None: - self.vcs = vcs - self.requested_revision = requested_revision - self.commit_id = commit_id - self.resolved_revision = resolved_revision - self.resolved_revision_type = resolved_revision_type - - @classmethod - def _from_dict(cls, d: Optional[Dict[str, Any]]) -> Optional["VcsInfo"]: - if d is None: - return None - return cls( - vcs=_get_required(d, str, "vcs"), - commit_id=_get_required(d, str, "commit_id"), - requested_revision=_get(d, str, "requested_revision"), - resolved_revision=_get(d, str, "resolved_revision"), - resolved_revision_type=_get(d, str, "resolved_revision_type"), - ) - - def _to_dict(self) -> Dict[str, Any]: - return _filter_none( - vcs=self.vcs, - requested_revision=self.requested_revision, - commit_id=self.commit_id, - resolved_revision=self.resolved_revision, - resolved_revision_type=self.resolved_revision_type, - ) - - -class ArchiveInfo: - name = "archive_info" - - def __init__( - self, - hash: Optional[str] = None, - ) -> None: - self.hash = hash - - @classmethod - def _from_dict(cls, d: Optional[Dict[str, Any]]) -> Optional["ArchiveInfo"]: - if d is None: - return None - return cls(hash=_get(d, str, "hash")) - - def _to_dict(self) -> Dict[str, Any]: - return _filter_none(hash=self.hash) - - -class DirInfo: - name = "dir_info" - - def __init__( - self, - editable: bool = False, - ) -> None: - self.editable = editable - - @classmethod - def _from_dict(cls, d: Optional[Dict[str, Any]]) -> Optional["DirInfo"]: - if d is None: - return None - return cls(editable=_get_required(d, bool, "editable", default=False)) - - def _to_dict(self) -> Dict[str, Any]: - return _filter_none(editable=self.editable or None) - - -InfoType = Union[ArchiveInfo, DirInfo, VcsInfo] - - -class DirectUrl: - def __init__( - self, - url: str, - info: InfoType, - subdirectory: Optional[str] = None, - ) -> None: - self.url = url - self.info = info - self.subdirectory = subdirectory - - def _remove_auth_from_netloc(self, netloc: str) -> str: - if "@" not in netloc: - return netloc - user_pass, netloc_no_user_pass = netloc.split("@", 1) - if ( - isinstance(self.info, VcsInfo) - and self.info.vcs == "git" - and user_pass == "git" - ): - return netloc - if ENV_VAR_RE.match(user_pass): - return netloc - return netloc_no_user_pass - - @property - def redacted_url(self) -> str: - """url with user:password part removed unless it is formed with - environment variables as specified in PEP 610, or it is ``git`` - in the case of a git URL. - """ - purl = urllib.parse.urlsplit(self.url) - netloc = self._remove_auth_from_netloc(purl.netloc) - surl = urllib.parse.urlunsplit( - (purl.scheme, netloc, purl.path, purl.query, purl.fragment) - ) - return surl - - def validate(self) -> None: - self.from_dict(self.to_dict()) - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> "DirectUrl": - return DirectUrl( - url=_get_required(d, str, "url"), - subdirectory=_get(d, str, "subdirectory"), - info=_exactly_one_of( - [ - ArchiveInfo._from_dict(_get(d, dict, "archive_info")), - DirInfo._from_dict(_get(d, dict, "dir_info")), - VcsInfo._from_dict(_get(d, dict, "vcs_info")), - ] - ), - ) - - def to_dict(self) -> Dict[str, Any]: - res = _filter_none( - url=self.redacted_url, - subdirectory=self.subdirectory, - ) - res[self.info.name] = self.info._to_dict() - return res - - @classmethod - def from_json(cls, s: str) -> "DirectUrl": - return cls.from_dict(json.loads(s)) - - def to_json(self) -> str: - return json.dumps(self.to_dict(), sort_keys=True) - - def is_local_editable(self) -> bool: - return isinstance(self.info, DirInfo) and self.info.editable diff --git a/venv/Lib/site-packages/pip/_internal/models/format_control.py b/venv/Lib/site-packages/pip/_internal/models/format_control.py deleted file mode 100644 index db3995e..0000000 --- a/venv/Lib/site-packages/pip/_internal/models/format_control.py +++ /dev/null @@ -1,80 +0,0 @@ -from typing import FrozenSet, Optional, Set - -from pip._vendor.packaging.utils import canonicalize_name - -from pip._internal.exceptions import CommandError - - -class FormatControl: - """Helper for managing formats from which a package can be installed.""" - - __slots__ = ["no_binary", "only_binary"] - - def __init__( - self, - no_binary: Optional[Set[str]] = None, - only_binary: Optional[Set[str]] = None, - ) -> None: - if no_binary is None: - no_binary = set() - if only_binary is None: - only_binary = set() - - self.no_binary = no_binary - self.only_binary = only_binary - - def __eq__(self, other: object) -> bool: - if not isinstance(other, self.__class__): - return NotImplemented - - if self.__slots__ != other.__slots__: - return False - - return all(getattr(self, k) == getattr(other, k) for k in self.__slots__) - - def __repr__(self) -> str: - return "{}({}, {})".format( - self.__class__.__name__, self.no_binary, self.only_binary - ) - - @staticmethod - def handle_mutual_excludes(value: str, target: Set[str], other: Set[str]) -> None: - if value.startswith("-"): - raise CommandError( - "--no-binary / --only-binary option requires 1 argument." - ) - new = value.split(",") - while ":all:" in new: - other.clear() - target.clear() - target.add(":all:") - del new[: new.index(":all:") + 1] - # Without a none, we want to discard everything as :all: covers it - if ":none:" not in new: - return - for name in new: - if name == ":none:": - target.clear() - continue - name = canonicalize_name(name) - other.discard(name) - target.add(name) - - def get_allowed_formats(self, canonical_name: str) -> FrozenSet[str]: - result = {"binary", "source"} - if canonical_name in self.only_binary: - result.discard("source") - elif canonical_name in self.no_binary: - result.discard("binary") - elif ":all:" in self.only_binary: - result.discard("source") - elif ":all:" in self.no_binary: - result.discard("binary") - return frozenset(result) - - def disallow_binaries(self) -> None: - self.handle_mutual_excludes( - ":all:", - self.no_binary, - self.only_binary, - ) diff --git a/venv/Lib/site-packages/pip/_internal/models/index.py b/venv/Lib/site-packages/pip/_internal/models/index.py deleted file mode 100644 index b94c325..0000000 --- a/venv/Lib/site-packages/pip/_internal/models/index.py +++ /dev/null @@ -1,28 +0,0 @@ -import urllib.parse - - -class PackageIndex: - """Represents a Package Index and provides easier access to endpoints""" - - __slots__ = ["url", "netloc", "simple_url", "pypi_url", "file_storage_domain"] - - def __init__(self, url: str, file_storage_domain: str) -> None: - super().__init__() - self.url = url - self.netloc = urllib.parse.urlsplit(url).netloc - self.simple_url = self._url_for_path("simple") - self.pypi_url = self._url_for_path("pypi") - - # This is part of a temporary hack used to block installs of PyPI - # packages which depend on external urls only necessary until PyPI can - # block such packages themselves - self.file_storage_domain = file_storage_domain - - def _url_for_path(self, path: str) -> str: - return urllib.parse.urljoin(self.url, path) - - -PyPI = PackageIndex("https://pypi.org/", file_storage_domain="files.pythonhosted.org") -TestPyPI = PackageIndex( - "https://test.pypi.org/", file_storage_domain="test-files.pythonhosted.org" -) diff --git a/venv/Lib/site-packages/pip/_internal/models/link.py b/venv/Lib/site-packages/pip/_internal/models/link.py deleted file mode 100644 index 6069b27..0000000 --- a/venv/Lib/site-packages/pip/_internal/models/link.py +++ /dev/null @@ -1,288 +0,0 @@ -import functools -import logging -import os -import posixpath -import re -import urllib.parse -from typing import TYPE_CHECKING, Dict, List, NamedTuple, Optional, Tuple, Union - -from pip._internal.utils.filetypes import WHEEL_EXTENSION -from pip._internal.utils.hashes import Hashes -from pip._internal.utils.misc import ( - redact_auth_from_url, - split_auth_from_netloc, - splitext, -) -from pip._internal.utils.models import KeyBasedCompareMixin -from pip._internal.utils.urls import path_to_url, url_to_path - -if TYPE_CHECKING: - from pip._internal.index.collector import HTMLPage - -logger = logging.getLogger(__name__) - - -_SUPPORTED_HASHES = ("sha1", "sha224", "sha384", "sha256", "sha512", "md5") - - -class Link(KeyBasedCompareMixin): - """Represents a parsed link from a Package Index's simple URL""" - - __slots__ = [ - "_parsed_url", - "_url", - "comes_from", - "requires_python", - "yanked_reason", - "cache_link_parsing", - ] - - def __init__( - self, - url: str, - comes_from: Optional[Union[str, "HTMLPage"]] = None, - requires_python: Optional[str] = None, - yanked_reason: Optional[str] = None, - cache_link_parsing: bool = True, - ) -> None: - """ - :param url: url of the resource pointed to (href of the link) - :param comes_from: instance of HTMLPage where the link was found, - or string. - :param requires_python: String containing the `Requires-Python` - metadata field, specified in PEP 345. This may be specified by - a data-requires-python attribute in the HTML link tag, as - described in PEP 503. - :param yanked_reason: the reason the file has been yanked, if the - file has been yanked, or None if the file hasn't been yanked. - This is the value of the "data-yanked" attribute, if present, in - a simple repository HTML link. If the file has been yanked but - no reason was provided, this should be the empty string. See - PEP 592 for more information and the specification. - :param cache_link_parsing: A flag that is used elsewhere to determine - whether resources retrieved from this link - should be cached. PyPI index urls should - generally have this set to False, for - example. - """ - - # url can be a UNC windows share - if url.startswith("\\\\"): - url = path_to_url(url) - - self._parsed_url = urllib.parse.urlsplit(url) - # Store the url as a private attribute to prevent accidentally - # trying to set a new value. - self._url = url - - self.comes_from = comes_from - self.requires_python = requires_python if requires_python else None - self.yanked_reason = yanked_reason - - super().__init__(key=url, defining_class=Link) - - self.cache_link_parsing = cache_link_parsing - - def __str__(self) -> str: - if self.requires_python: - rp = f" (requires-python:{self.requires_python})" - else: - rp = "" - if self.comes_from: - return "{} (from {}){}".format( - redact_auth_from_url(self._url), self.comes_from, rp - ) - else: - return redact_auth_from_url(str(self._url)) - - def __repr__(self) -> str: - return f"" - - @property - def url(self) -> str: - return self._url - - @property - def filename(self) -> str: - path = self.path.rstrip("/") - name = posixpath.basename(path) - if not name: - # Make sure we don't leak auth information if the netloc - # includes a username and password. - netloc, user_pass = split_auth_from_netloc(self.netloc) - return netloc - - name = urllib.parse.unquote(name) - assert name, f"URL {self._url!r} produced no filename" - return name - - @property - def file_path(self) -> str: - return url_to_path(self.url) - - @property - def scheme(self) -> str: - return self._parsed_url.scheme - - @property - def netloc(self) -> str: - """ - This can contain auth information. - """ - return self._parsed_url.netloc - - @property - def path(self) -> str: - return urllib.parse.unquote(self._parsed_url.path) - - def splitext(self) -> Tuple[str, str]: - return splitext(posixpath.basename(self.path.rstrip("/"))) - - @property - def ext(self) -> str: - return self.splitext()[1] - - @property - def url_without_fragment(self) -> str: - scheme, netloc, path, query, fragment = self._parsed_url - return urllib.parse.urlunsplit((scheme, netloc, path, query, "")) - - _egg_fragment_re = re.compile(r"[#&]egg=([^&]*)") - - @property - def egg_fragment(self) -> Optional[str]: - match = self._egg_fragment_re.search(self._url) - if not match: - return None - return match.group(1) - - _subdirectory_fragment_re = re.compile(r"[#&]subdirectory=([^&]*)") - - @property - def subdirectory_fragment(self) -> Optional[str]: - match = self._subdirectory_fragment_re.search(self._url) - if not match: - return None - return match.group(1) - - _hash_re = re.compile( - r"({choices})=([a-f0-9]+)".format(choices="|".join(_SUPPORTED_HASHES)) - ) - - @property - def hash(self) -> Optional[str]: - match = self._hash_re.search(self._url) - if match: - return match.group(2) - return None - - @property - def hash_name(self) -> Optional[str]: - match = self._hash_re.search(self._url) - if match: - return match.group(1) - return None - - @property - def show_url(self) -> str: - return posixpath.basename(self._url.split("#", 1)[0].split("?", 1)[0]) - - @property - def is_file(self) -> bool: - return self.scheme == "file" - - def is_existing_dir(self) -> bool: - return self.is_file and os.path.isdir(self.file_path) - - @property - def is_wheel(self) -> bool: - return self.ext == WHEEL_EXTENSION - - @property - def is_vcs(self) -> bool: - from pip._internal.vcs import vcs - - return self.scheme in vcs.all_schemes - - @property - def is_yanked(self) -> bool: - return self.yanked_reason is not None - - @property - def has_hash(self) -> bool: - return self.hash_name is not None - - def is_hash_allowed(self, hashes: Optional[Hashes]) -> bool: - """ - Return True if the link has a hash and it is allowed. - """ - if hashes is None or not self.has_hash: - return False - # Assert non-None so mypy knows self.hash_name and self.hash are str. - assert self.hash_name is not None - assert self.hash is not None - - return hashes.is_hash_allowed(self.hash_name, hex_digest=self.hash) - - -class _CleanResult(NamedTuple): - """Convert link for equivalency check. - - This is used in the resolver to check whether two URL-specified requirements - likely point to the same distribution and can be considered equivalent. This - equivalency logic avoids comparing URLs literally, which can be too strict - (e.g. "a=1&b=2" vs "b=2&a=1") and produce conflicts unexpecting to users. - - Currently this does three things: - - 1. Drop the basic auth part. This is technically wrong since a server can - serve different content based on auth, but if it does that, it is even - impossible to guarantee two URLs without auth are equivalent, since - the user can input different auth information when prompted. So the - practical solution is to assume the auth doesn't affect the response. - 2. Parse the query to avoid the ordering issue. Note that ordering under the - same key in the query are NOT cleaned; i.e. "a=1&a=2" and "a=2&a=1" are - still considered different. - 3. Explicitly drop most of the fragment part, except ``subdirectory=`` and - hash values, since it should have no impact the downloaded content. Note - that this drops the "egg=" part historically used to denote the requested - project (and extras), which is wrong in the strictest sense, but too many - people are supplying it inconsistently to cause superfluous resolution - conflicts, so we choose to also ignore them. - """ - - parsed: urllib.parse.SplitResult - query: Dict[str, List[str]] - subdirectory: str - hashes: Dict[str, str] - - -def _clean_link(link: Link) -> _CleanResult: - parsed = link._parsed_url - netloc = parsed.netloc.rsplit("@", 1)[-1] - # According to RFC 8089, an empty host in file: means localhost. - if parsed.scheme == "file" and not netloc: - netloc = "localhost" - fragment = urllib.parse.parse_qs(parsed.fragment) - if "egg" in fragment: - logger.debug("Ignoring egg= fragment in %s", link) - try: - # If there are multiple subdirectory values, use the first one. - # This matches the behavior of Link.subdirectory_fragment. - subdirectory = fragment["subdirectory"][0] - except (IndexError, KeyError): - subdirectory = "" - # If there are multiple hash values under the same algorithm, use the - # first one. This matches the behavior of Link.hash_value. - hashes = {k: fragment[k][0] for k in _SUPPORTED_HASHES if k in fragment} - return _CleanResult( - parsed=parsed._replace(netloc=netloc, query="", fragment=""), - query=urllib.parse.parse_qs(parsed.query), - subdirectory=subdirectory, - hashes=hashes, - ) - - -@functools.lru_cache(maxsize=None) -def links_equivalent(link1: Link, link2: Link) -> bool: - return _clean_link(link1) == _clean_link(link2) diff --git a/venv/Lib/site-packages/pip/_internal/models/scheme.py b/venv/Lib/site-packages/pip/_internal/models/scheme.py deleted file mode 100644 index f51190a..0000000 --- a/venv/Lib/site-packages/pip/_internal/models/scheme.py +++ /dev/null @@ -1,31 +0,0 @@ -""" -For types associated with installation schemes. - -For a general overview of available schemes and their context, see -https://docs.python.org/3/install/index.html#alternate-installation. -""" - - -SCHEME_KEYS = ["platlib", "purelib", "headers", "scripts", "data"] - - -class Scheme: - """A Scheme holds paths which are used as the base directories for - artifacts associated with a Python package. - """ - - __slots__ = SCHEME_KEYS - - def __init__( - self, - platlib: str, - purelib: str, - headers: str, - scripts: str, - data: str, - ) -> None: - self.platlib = platlib - self.purelib = purelib - self.headers = headers - self.scripts = scripts - self.data = data diff --git a/venv/Lib/site-packages/pip/_internal/models/search_scope.py b/venv/Lib/site-packages/pip/_internal/models/search_scope.py deleted file mode 100644 index e4e54c2..0000000 --- a/venv/Lib/site-packages/pip/_internal/models/search_scope.py +++ /dev/null @@ -1,129 +0,0 @@ -import itertools -import logging -import os -import posixpath -import urllib.parse -from typing import List - -from pip._vendor.packaging.utils import canonicalize_name - -from pip._internal.models.index import PyPI -from pip._internal.utils.compat import has_tls -from pip._internal.utils.misc import normalize_path, redact_auth_from_url - -logger = logging.getLogger(__name__) - - -class SearchScope: - - """ - Encapsulates the locations that pip is configured to search. - """ - - __slots__ = ["find_links", "index_urls"] - - @classmethod - def create( - cls, - find_links: List[str], - index_urls: List[str], - ) -> "SearchScope": - """ - Create a SearchScope object after normalizing the `find_links`. - """ - # Build find_links. If an argument starts with ~, it may be - # a local file relative to a home directory. So try normalizing - # it and if it exists, use the normalized version. - # This is deliberately conservative - it might be fine just to - # blindly normalize anything starting with a ~... - built_find_links: List[str] = [] - for link in find_links: - if link.startswith("~"): - new_link = normalize_path(link) - if os.path.exists(new_link): - link = new_link - built_find_links.append(link) - - # If we don't have TLS enabled, then WARN if anyplace we're looking - # relies on TLS. - if not has_tls(): - for link in itertools.chain(index_urls, built_find_links): - parsed = urllib.parse.urlparse(link) - if parsed.scheme == "https": - logger.warning( - "pip is configured with locations that require " - "TLS/SSL, however the ssl module in Python is not " - "available." - ) - break - - return cls( - find_links=built_find_links, - index_urls=index_urls, - ) - - def __init__( - self, - find_links: List[str], - index_urls: List[str], - ) -> None: - self.find_links = find_links - self.index_urls = index_urls - - def get_formatted_locations(self) -> str: - lines = [] - redacted_index_urls = [] - if self.index_urls and self.index_urls != [PyPI.simple_url]: - for url in self.index_urls: - - redacted_index_url = redact_auth_from_url(url) - - # Parse the URL - purl = urllib.parse.urlsplit(redacted_index_url) - - # URL is generally invalid if scheme and netloc is missing - # there are issues with Python and URL parsing, so this test - # is a bit crude. See bpo-20271, bpo-23505. Python doesn't - # always parse invalid URLs correctly - it should raise - # exceptions for malformed URLs - if not purl.scheme and not purl.netloc: - logger.warning( - 'The index url "%s" seems invalid, please provide a scheme.', - redacted_index_url, - ) - - redacted_index_urls.append(redacted_index_url) - - lines.append( - "Looking in indexes: {}".format(", ".join(redacted_index_urls)) - ) - - if self.find_links: - lines.append( - "Looking in links: {}".format( - ", ".join(redact_auth_from_url(url) for url in self.find_links) - ) - ) - return "\n".join(lines) - - def get_index_urls_locations(self, project_name: str) -> List[str]: - """Returns the locations found via self.index_urls - - Checks the url_name on the main (first in the list) index and - use this url_name to produce all locations - """ - - def mkurl_pypi_url(url: str) -> str: - loc = posixpath.join( - url, urllib.parse.quote(canonicalize_name(project_name)) - ) - # For maximum compatibility with easy_install, ensure the path - # ends in a trailing slash. Although this isn't in the spec - # (and PyPI can handle it without the slash) some other index - # implementations might break if they relied on easy_install's - # behavior. - if not loc.endswith("/"): - loc = loc + "/" - return loc - - return [mkurl_pypi_url(url) for url in self.index_urls] diff --git a/venv/Lib/site-packages/pip/_internal/models/selection_prefs.py b/venv/Lib/site-packages/pip/_internal/models/selection_prefs.py deleted file mode 100644 index 977bc4c..0000000 --- a/venv/Lib/site-packages/pip/_internal/models/selection_prefs.py +++ /dev/null @@ -1,51 +0,0 @@ -from typing import Optional - -from pip._internal.models.format_control import FormatControl - - -class SelectionPreferences: - """ - Encapsulates the candidate selection preferences for downloading - and installing files. - """ - - __slots__ = [ - "allow_yanked", - "allow_all_prereleases", - "format_control", - "prefer_binary", - "ignore_requires_python", - ] - - # Don't include an allow_yanked default value to make sure each call - # site considers whether yanked releases are allowed. This also causes - # that decision to be made explicit in the calling code, which helps - # people when reading the code. - def __init__( - self, - allow_yanked: bool, - allow_all_prereleases: bool = False, - format_control: Optional[FormatControl] = None, - prefer_binary: bool = False, - ignore_requires_python: Optional[bool] = None, - ) -> None: - """Create a SelectionPreferences object. - - :param allow_yanked: Whether files marked as yanked (in the sense - of PEP 592) are permitted to be candidates for install. - :param format_control: A FormatControl object or None. Used to control - the selection of source packages / binary packages when consulting - the index and links. - :param prefer_binary: Whether to prefer an old, but valid, binary - dist over a new source dist. - :param ignore_requires_python: Whether to ignore incompatible - "Requires-Python" values in links. Defaults to False. - """ - if ignore_requires_python is None: - ignore_requires_python = False - - self.allow_yanked = allow_yanked - self.allow_all_prereleases = allow_all_prereleases - self.format_control = format_control - self.prefer_binary = prefer_binary - self.ignore_requires_python = ignore_requires_python diff --git a/venv/Lib/site-packages/pip/_internal/models/target_python.py b/venv/Lib/site-packages/pip/_internal/models/target_python.py deleted file mode 100644 index 744bd7e..0000000 --- a/venv/Lib/site-packages/pip/_internal/models/target_python.py +++ /dev/null @@ -1,110 +0,0 @@ -import sys -from typing import List, Optional, Tuple - -from pip._vendor.packaging.tags import Tag - -from pip._internal.utils.compatibility_tags import get_supported, version_info_to_nodot -from pip._internal.utils.misc import normalize_version_info - - -class TargetPython: - - """ - Encapsulates the properties of a Python interpreter one is targeting - for a package install, download, etc. - """ - - __slots__ = [ - "_given_py_version_info", - "abis", - "implementation", - "platforms", - "py_version", - "py_version_info", - "_valid_tags", - ] - - def __init__( - self, - platforms: Optional[List[str]] = None, - py_version_info: Optional[Tuple[int, ...]] = None, - abis: Optional[List[str]] = None, - implementation: Optional[str] = None, - ) -> None: - """ - :param platforms: A list of strings or None. If None, searches for - packages that are supported by the current system. Otherwise, will - find packages that can be built on the platforms passed in. These - packages will only be downloaded for distribution: they will - not be built locally. - :param py_version_info: An optional tuple of ints representing the - Python version information to use (e.g. `sys.version_info[:3]`). - This can have length 1, 2, or 3 when provided. - :param abis: A list of strings or None. This is passed to - compatibility_tags.py's get_supported() function as is. - :param implementation: A string or None. This is passed to - compatibility_tags.py's get_supported() function as is. - """ - # Store the given py_version_info for when we call get_supported(). - self._given_py_version_info = py_version_info - - if py_version_info is None: - py_version_info = sys.version_info[:3] - else: - py_version_info = normalize_version_info(py_version_info) - - py_version = ".".join(map(str, py_version_info[:2])) - - self.abis = abis - self.implementation = implementation - self.platforms = platforms - self.py_version = py_version - self.py_version_info = py_version_info - - # This is used to cache the return value of get_tags(). - self._valid_tags: Optional[List[Tag]] = None - - def format_given(self) -> str: - """ - Format the given, non-None attributes for display. - """ - display_version = None - if self._given_py_version_info is not None: - display_version = ".".join( - str(part) for part in self._given_py_version_info - ) - - key_values = [ - ("platforms", self.platforms), - ("version_info", display_version), - ("abis", self.abis), - ("implementation", self.implementation), - ] - return " ".join( - f"{key}={value!r}" for key, value in key_values if value is not None - ) - - def get_tags(self) -> List[Tag]: - """ - Return the supported PEP 425 tags to check wheel candidates against. - - The tags are returned in order of preference (most preferred first). - """ - if self._valid_tags is None: - # Pass versions=None if no py_version_info was given since - # versions=None uses special default logic. - py_version_info = self._given_py_version_info - if py_version_info is None: - version = None - else: - version = version_info_to_nodot(py_version_info) - - tags = get_supported( - version=version, - platforms=self.platforms, - abis=self.abis, - impl=self.implementation, - ) - self._valid_tags = tags - - return self._valid_tags diff --git a/venv/Lib/site-packages/pip/_internal/models/wheel.py b/venv/Lib/site-packages/pip/_internal/models/wheel.py deleted file mode 100644 index e091612..0000000 --- a/venv/Lib/site-packages/pip/_internal/models/wheel.py +++ /dev/null @@ -1,89 +0,0 @@ -"""Represents a wheel file and provides access to the various parts of the -name that have meaning. -""" -import re -from typing import Dict, Iterable, List - -from pip._vendor.packaging.tags import Tag - -from pip._internal.exceptions import InvalidWheelFilename - - -class Wheel: - """A wheel file""" - - wheel_file_re = re.compile( - r"""^(?P(?P.+?)-(?P.*?)) - ((-(?P\d[^-]*?))?-(?P.+?)-(?P.+?)-(?P.+?) - \.whl|\.dist-info)$""", - re.VERBOSE, - ) - - def __init__(self, filename: str) -> None: - """ - :raises InvalidWheelFilename: when the filename is invalid for a wheel - """ - wheel_info = self.wheel_file_re.match(filename) - if not wheel_info: - raise InvalidWheelFilename(f"{filename} is not a valid wheel filename.") - self.filename = filename - self.name = wheel_info.group("name").replace("_", "-") - # we'll assume "_" means "-" due to wheel naming scheme - # (https://github.com/pypa/pip/issues/1150) - self.version = wheel_info.group("ver").replace("_", "-") - self.build_tag = wheel_info.group("build") - self.pyversions = wheel_info.group("pyver").split(".") - self.abis = wheel_info.group("abi").split(".") - self.plats = wheel_info.group("plat").split(".") - - # All the tag combinations from this file - self.file_tags = { - Tag(x, y, z) for x in self.pyversions for y in self.abis for z in self.plats - } - - def get_formatted_file_tags(self) -> List[str]: - """Return the wheel's tags as a sorted list of strings.""" - return sorted(str(tag) for tag in self.file_tags) - - def support_index_min(self, tags: List[Tag]) -> int: - """Return the lowest index that one of the wheel's file_tag combinations - achieves in the given list of supported tags. - - For example, if there are 8 supported tags and one of the file tags - is first in the list, then return 0. - - :param tags: the PEP 425 tags to check the wheel against, in order - with most preferred first. - - :raises ValueError: If none of the wheel's file tags match one of - the supported tags. - """ - return min(tags.index(tag) for tag in self.file_tags if tag in tags) - - def find_most_preferred_tag( - self, tags: List[Tag], tag_to_priority: Dict[Tag, int] - ) -> int: - """Return the priority of the most preferred tag that one of the wheel's file - tag combinations achieves in the given list of supported tags using the given - tag_to_priority mapping, where lower priorities are more-preferred. - - This is used in place of support_index_min in some cases in order to avoid - an expensive linear scan of a large list of tags. - - :param tags: the PEP 425 tags to check the wheel against. - :param tag_to_priority: a mapping from tag to priority of that tag, where - lower is more preferred. - - :raises ValueError: If none of the wheel's file tags match one of - the supported tags. - """ - return min( - tag_to_priority[tag] for tag in self.file_tags if tag in tag_to_priority - ) - - def supported(self, tags: Iterable[Tag]) -> bool: - """Return whether the wheel is compatible with one of the given tags. - - :param tags: the PEP 425 tags to check the wheel against. - """ - return not self.file_tags.isdisjoint(tags) diff --git a/venv/Lib/site-packages/pip/_internal/network/__init__.py b/venv/Lib/site-packages/pip/_internal/network/__init__.py deleted file mode 100644 index b51bde9..0000000 --- a/venv/Lib/site-packages/pip/_internal/network/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -"""Contains purely network-related utilities. -""" diff --git a/venv/Lib/site-packages/pip/_internal/network/__pycache__/__init__.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/network/__pycache__/__init__.cpython-39.pyc deleted file mode 100644 index b04da04..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/network/__pycache__/__init__.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/network/__pycache__/auth.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/network/__pycache__/auth.cpython-39.pyc deleted file mode 100644 index 6841778..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/network/__pycache__/auth.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/network/__pycache__/cache.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/network/__pycache__/cache.cpython-39.pyc deleted file mode 100644 index e2e627d..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/network/__pycache__/cache.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/network/__pycache__/download.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/network/__pycache__/download.cpython-39.pyc deleted file mode 100644 index e4435b7..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/network/__pycache__/download.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/network/__pycache__/lazy_wheel.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/network/__pycache__/lazy_wheel.cpython-39.pyc deleted file mode 100644 index 5006b16..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/network/__pycache__/lazy_wheel.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/network/__pycache__/session.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/network/__pycache__/session.cpython-39.pyc deleted file mode 100644 index 25f9eed..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/network/__pycache__/session.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/network/__pycache__/utils.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/network/__pycache__/utils.cpython-39.pyc deleted file mode 100644 index 06277c4..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/network/__pycache__/utils.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/network/__pycache__/xmlrpc.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/network/__pycache__/xmlrpc.cpython-39.pyc deleted file mode 100644 index 712712d..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/network/__pycache__/xmlrpc.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/network/auth.py b/venv/Lib/site-packages/pip/_internal/network/auth.py deleted file mode 100644 index ca42798..0000000 --- a/venv/Lib/site-packages/pip/_internal/network/auth.py +++ /dev/null @@ -1,323 +0,0 @@ -"""Network Authentication Helpers - -Contains interface (MultiDomainBasicAuth) and associated glue code for -providing credentials in the context of network requests. -""" - -import urllib.parse -from typing import Any, Dict, List, Optional, Tuple - -from pip._vendor.requests.auth import AuthBase, HTTPBasicAuth -from pip._vendor.requests.models import Request, Response -from pip._vendor.requests.utils import get_netrc_auth - -from pip._internal.utils.logging import getLogger -from pip._internal.utils.misc import ( - ask, - ask_input, - ask_password, - remove_auth_from_url, - split_auth_netloc_from_url, -) -from pip._internal.vcs.versioncontrol import AuthInfo - -logger = getLogger(__name__) - -Credentials = Tuple[str, str, str] - -try: - import keyring -except ImportError: - keyring = None # type: ignore[assignment] -except Exception as exc: - logger.warning( - "Keyring is skipped due to an exception: %s", - str(exc), - ) - keyring = None # type: ignore[assignment] - - -def get_keyring_auth(url: Optional[str], username: Optional[str]) -> Optional[AuthInfo]: - """Return the tuple auth for a given url from keyring.""" - global keyring - if not url or not keyring: - return None - - try: - try: - get_credential = keyring.get_credential - except AttributeError: - pass - else: - logger.debug("Getting credentials from keyring for %s", url) - cred = get_credential(url, username) - if cred is not None: - return cred.username, cred.password - return None - - if username: - logger.debug("Getting password from keyring for %s", url) - password = keyring.get_password(url, username) - if password: - return username, password - - except Exception as exc: - logger.warning( - "Keyring is skipped due to an exception: %s", - str(exc), - ) - keyring = None # type: ignore[assignment] - return None - - -class MultiDomainBasicAuth(AuthBase): - def __init__( - self, prompting: bool = True, index_urls: Optional[List[str]] = None - ) -> None: - self.prompting = prompting - self.index_urls = index_urls - self.passwords: Dict[str, AuthInfo] = {} - # When the user is prompted to enter credentials and keyring is - # available, we will offer to save them. If the user accepts, - # this value is set to the credentials they entered. After the - # request authenticates, the caller should call - # ``save_credentials`` to save these. - self._credentials_to_save: Optional[Credentials] = None - - def _get_index_url(self, url: str) -> Optional[str]: - """Return the original index URL matching the requested URL. - - Cached or dynamically generated credentials may work against - the original index URL rather than just the netloc. - - The provided url should have had its username and password - removed already. If the original index url had credentials then - they will be included in the return value. - - Returns None if no matching index was found, or if --no-index - was specified by the user. - """ - if not url or not self.index_urls: - return None - - for u in self.index_urls: - prefix = remove_auth_from_url(u).rstrip("/") + "/" - if url.startswith(prefix): - return u - return None - - def _get_new_credentials( - self, - original_url: str, - allow_netrc: bool = True, - allow_keyring: bool = False, - ) -> AuthInfo: - """Find and return credentials for the specified URL.""" - # Split the credentials and netloc from the url. - url, netloc, url_user_password = split_auth_netloc_from_url( - original_url, - ) - - # Start with the credentials embedded in the url - username, password = url_user_password - if username is not None and password is not None: - logger.debug("Found credentials in url for %s", netloc) - return url_user_password - - # Find a matching index url for this request - index_url = self._get_index_url(url) - if index_url: - # Split the credentials from the url. - index_info = split_auth_netloc_from_url(index_url) - if index_info: - index_url, _, index_url_user_password = index_info - logger.debug("Found index url %s", index_url) - - # If an index URL was found, try its embedded credentials - if index_url and index_url_user_password[0] is not None: - username, password = index_url_user_password - if username is not None and password is not None: - logger.debug("Found credentials in index url for %s", netloc) - return index_url_user_password - - # Get creds from netrc if we still don't have them - if allow_netrc: - netrc_auth = get_netrc_auth(original_url) - if netrc_auth: - logger.debug("Found credentials in netrc for %s", netloc) - return netrc_auth - - # If we don't have a password and keyring is available, use it. - if allow_keyring: - # The index url is more specific than the netloc, so try it first - # fmt: off - kr_auth = ( - get_keyring_auth(index_url, username) or - get_keyring_auth(netloc, username) - ) - # fmt: on - if kr_auth: - logger.debug("Found credentials in keyring for %s", netloc) - return kr_auth - - return username, password - - def _get_url_and_credentials( - self, original_url: str - ) -> Tuple[str, Optional[str], Optional[str]]: - """Return the credentials to use for the provided URL. - - If allowed, netrc and keyring may be used to obtain the - correct credentials. - - Returns (url_without_credentials, username, password). Note - that even if the original URL contains credentials, this - function may return a different username and password. - """ - url, netloc, _ = split_auth_netloc_from_url(original_url) - - # Try to get credentials from original url - username, password = self._get_new_credentials(original_url) - - # If credentials not found, use any stored credentials for this netloc. - # Do this if either the username or the password is missing. - # This accounts for the situation in which the user has specified - # the username in the index url, but the password comes from keyring. - if (username is None or password is None) and netloc in self.passwords: - un, pw = self.passwords[netloc] - # It is possible that the cached credentials are for a different username, - # in which case the cache should be ignored. - if username is None or username == un: - username, password = un, pw - - if username is not None or password is not None: - # Convert the username and password if they're None, so that - # this netloc will show up as "cached" in the conditional above. - # Further, HTTPBasicAuth doesn't accept None, so it makes sense to - # cache the value that is going to be used. - username = username or "" - password = password or "" - - # Store any acquired credentials. - self.passwords[netloc] = (username, password) - - assert ( - # Credentials were found - (username is not None and password is not None) - # Credentials were not found - or (username is None and password is None) - ), f"Could not load credentials from url: {original_url}" - - return url, username, password - - def __call__(self, req: Request) -> Request: - # Get credentials for this request - url, username, password = self._get_url_and_credentials(req.url) - - # Set the url of the request to the url without any credentials - req.url = url - - if username is not None and password is not None: - # Send the basic auth with this request - req = HTTPBasicAuth(username, password)(req) - - # Attach a hook to handle 401 responses - req.register_hook("response", self.handle_401) - - return req - - # Factored out to allow for easy patching in tests - def _prompt_for_password( - self, netloc: str - ) -> Tuple[Optional[str], Optional[str], bool]: - username = ask_input(f"User for {netloc}: ") - if not username: - return None, None, False - auth = get_keyring_auth(netloc, username) - if auth and auth[0] is not None and auth[1] is not None: - return auth[0], auth[1], False - password = ask_password("Password: ") - return username, password, True - - # Factored out to allow for easy patching in tests - def _should_save_password_to_keyring(self) -> bool: - if not keyring: - return False - return ask("Save credentials to keyring [y/N]: ", ["y", "n"]) == "y" - - def handle_401(self, resp: Response, **kwargs: Any) -> Response: - # We only care about 401 responses, anything else we want to just - # pass through the actual response - if resp.status_code != 401: - return resp - - # We are not able to prompt the user so simply return the response - if not self.prompting: - return resp - - parsed = urllib.parse.urlparse(resp.url) - - # Query the keyring for credentials: - username, password = self._get_new_credentials( - resp.url, - allow_netrc=False, - allow_keyring=True, - ) - - # Prompt the user for a new username and password - save = False - if not username and not password: - username, password, save = self._prompt_for_password(parsed.netloc) - - # Store the new username and password to use for future requests - self._credentials_to_save = None - if username is not None and password is not None: - self.passwords[parsed.netloc] = (username, password) - - # Prompt to save the password to keyring - if save and self._should_save_password_to_keyring(): - self._credentials_to_save = (parsed.netloc, username, password) - - # Consume content and release the original connection to allow our new - # request to reuse the same one. - resp.content - resp.raw.release_conn() - - # Add our new username and password to the request - req = HTTPBasicAuth(username or "", password or "")(resp.request) - req.register_hook("response", self.warn_on_401) - - # On successful request, save the credentials that were used to - # keyring. (Note that if the user responded "no" above, this member - # is not set and nothing will be saved.) - if self._credentials_to_save: - req.register_hook("response", self.save_credentials) - - # Send our new request - new_resp = resp.connection.send(req, **kwargs) - new_resp.history.append(resp) - - return new_resp - - def warn_on_401(self, resp: Response, **kwargs: Any) -> None: - """Response callback to warn about incorrect credentials.""" - if resp.status_code == 401: - logger.warning( - "401 Error, Credentials not correct for %s", - resp.request.url, - ) - - def save_credentials(self, resp: Response, **kwargs: Any) -> None: - """Response callback to save credentials on success.""" - assert keyring is not None, "should never reach here without keyring" - if not keyring: - return - - creds = self._credentials_to_save - self._credentials_to_save = None - if creds and resp.status_code < 400: - try: - logger.info("Saving credentials to keyring") - keyring.set_password(*creds) - except Exception: - logger.exception("Failed to save credentials") diff --git a/venv/Lib/site-packages/pip/_internal/network/cache.py b/venv/Lib/site-packages/pip/_internal/network/cache.py deleted file mode 100644 index 9dba7ed..0000000 --- a/venv/Lib/site-packages/pip/_internal/network/cache.py +++ /dev/null @@ -1,69 +0,0 @@ -"""HTTP cache implementation. -""" - -import os -from contextlib import contextmanager -from typing import Iterator, Optional - -from pip._vendor.cachecontrol.cache import BaseCache -from pip._vendor.cachecontrol.caches import FileCache -from pip._vendor.requests.models import Response - -from pip._internal.utils.filesystem import adjacent_tmp_file, replace -from pip._internal.utils.misc import ensure_dir - - -def is_from_cache(response: Response) -> bool: - return getattr(response, "from_cache", False) - - -@contextmanager -def suppressed_cache_errors() -> Iterator[None]: - """If we can't access the cache then we can just skip caching and process - requests as if caching wasn't enabled. - """ - try: - yield - except OSError: - pass - - -class SafeFileCache(BaseCache): - """ - A file based cache which is safe to use even when the target directory may - not be accessible or writable. - """ - - def __init__(self, directory: str) -> None: - assert directory is not None, "Cache directory must not be None." - super().__init__() - self.directory = directory - - def _get_cache_path(self, name: str) -> str: - # From cachecontrol.caches.file_cache.FileCache._fn, brought into our - # class for backwards-compatibility and to avoid using a non-public - # method. - hashed = FileCache.encode(name) - parts = list(hashed[:5]) + [hashed] - return os.path.join(self.directory, *parts) - - def get(self, key: str) -> Optional[bytes]: - path = self._get_cache_path(key) - with suppressed_cache_errors(): - with open(path, "rb") as f: - return f.read() - - def set(self, key: str, value: bytes, expires: Optional[int] = None) -> None: - path = self._get_cache_path(key) - with suppressed_cache_errors(): - ensure_dir(os.path.dirname(path)) - - with adjacent_tmp_file(path) as f: - f.write(value) - - replace(f.name, path) - - def delete(self, key: str) -> None: - path = self._get_cache_path(key) - with suppressed_cache_errors(): - os.remove(path) diff --git a/venv/Lib/site-packages/pip/_internal/network/download.py b/venv/Lib/site-packages/pip/_internal/network/download.py deleted file mode 100644 index 35bc970..0000000 --- a/venv/Lib/site-packages/pip/_internal/network/download.py +++ /dev/null @@ -1,185 +0,0 @@ -"""Download files with progress indicators. -""" -import cgi -import logging -import mimetypes -import os -from typing import Iterable, Optional, Tuple - -from pip._vendor.requests.models import CONTENT_CHUNK_SIZE, Response - -from pip._internal.cli.progress_bars import get_download_progress_renderer -from pip._internal.exceptions import NetworkConnectionError -from pip._internal.models.index import PyPI -from pip._internal.models.link import Link -from pip._internal.network.cache import is_from_cache -from pip._internal.network.session import PipSession -from pip._internal.network.utils import HEADERS, raise_for_status, response_chunks -from pip._internal.utils.misc import format_size, redact_auth_from_url, splitext - -logger = logging.getLogger(__name__) - - -def _get_http_response_size(resp: Response) -> Optional[int]: - try: - return int(resp.headers["content-length"]) - except (ValueError, KeyError, TypeError): - return None - - -def _prepare_download( - resp: Response, - link: Link, - progress_bar: str, -) -> Iterable[bytes]: - total_length = _get_http_response_size(resp) - - if link.netloc == PyPI.file_storage_domain: - url = link.show_url - else: - url = link.url_without_fragment - - logged_url = redact_auth_from_url(url) - - if total_length: - logged_url = "{} ({})".format(logged_url, format_size(total_length)) - - if is_from_cache(resp): - logger.info("Using cached %s", logged_url) - else: - logger.info("Downloading %s", logged_url) - - if logger.getEffectiveLevel() > logging.INFO: - show_progress = False - elif is_from_cache(resp): - show_progress = False - elif not total_length: - show_progress = True - elif total_length > (40 * 1000): - show_progress = True - else: - show_progress = False - - chunks = response_chunks(resp, CONTENT_CHUNK_SIZE) - - if not show_progress: - return chunks - - renderer = get_download_progress_renderer(bar_type=progress_bar, size=total_length) - return renderer(chunks) - - -def sanitize_content_filename(filename: str) -> str: - """ - Sanitize the "filename" value from a Content-Disposition header. - """ - return os.path.basename(filename) - - -def parse_content_disposition(content_disposition: str, default_filename: str) -> str: - """ - Parse the "filename" value from a Content-Disposition header, and - return the default filename if the result is empty. - """ - _type, params = cgi.parse_header(content_disposition) - filename = params.get("filename") - if filename: - # We need to sanitize the filename to prevent directory traversal - # in case the filename contains ".." path parts. - filename = sanitize_content_filename(filename) - return filename or default_filename - - -def _get_http_response_filename(resp: Response, link: Link) -> str: - """Get an ideal filename from the given HTTP response, falling back to - the link filename if not provided. - """ - filename = link.filename # fallback - # Have a look at the Content-Disposition header for a better guess - content_disposition = resp.headers.get("content-disposition") - if content_disposition: - filename = parse_content_disposition(content_disposition, filename) - ext: Optional[str] = splitext(filename)[1] - if not ext: - ext = mimetypes.guess_extension(resp.headers.get("content-type", "")) - if ext: - filename += ext - if not ext and link.url != resp.url: - ext = os.path.splitext(resp.url)[1] - if ext: - filename += ext - return filename - - -def _http_get_download(session: PipSession, link: Link) -> Response: - target_url = link.url.split("#", 1)[0] - resp = session.get(target_url, headers=HEADERS, stream=True) - raise_for_status(resp) - return resp - - -class Downloader: - def __init__( - self, - session: PipSession, - progress_bar: str, - ) -> None: - self._session = session - self._progress_bar = progress_bar - - def __call__(self, link: Link, location: str) -> Tuple[str, str]: - """Download the file given by link into location.""" - try: - resp = _http_get_download(self._session, link) - except NetworkConnectionError as e: - assert e.response is not None - logger.critical( - "HTTP error %s while getting %s", e.response.status_code, link - ) - raise - - filename = _get_http_response_filename(resp, link) - filepath = os.path.join(location, filename) - - chunks = _prepare_download(resp, link, self._progress_bar) - with open(filepath, "wb") as content_file: - for chunk in chunks: - content_file.write(chunk) - content_type = resp.headers.get("Content-Type", "") - return filepath, content_type - - -class BatchDownloader: - def __init__( - self, - session: PipSession, - progress_bar: str, - ) -> None: - self._session = session - self._progress_bar = progress_bar - - def __call__( - self, links: Iterable[Link], location: str - ) -> Iterable[Tuple[Link, Tuple[str, str]]]: - """Download the files given by links into location.""" - for link in links: - try: - resp = _http_get_download(self._session, link) - except NetworkConnectionError as e: - assert e.response is not None - logger.critical( - "HTTP error %s while getting %s", - e.response.status_code, - link, - ) - raise - - filename = _get_http_response_filename(resp, link) - filepath = os.path.join(location, filename) - - chunks = _prepare_download(resp, link, self._progress_bar) - with open(filepath, "wb") as content_file: - for chunk in chunks: - content_file.write(chunk) - content_type = resp.headers.get("Content-Type", "") - yield link, (filepath, content_type) diff --git a/venv/Lib/site-packages/pip/_internal/network/lazy_wheel.py b/venv/Lib/site-packages/pip/_internal/network/lazy_wheel.py deleted file mode 100644 index c9e44d5..0000000 --- a/venv/Lib/site-packages/pip/_internal/network/lazy_wheel.py +++ /dev/null @@ -1,210 +0,0 @@ -"""Lazy ZIP over HTTP""" - -__all__ = ["HTTPRangeRequestUnsupported", "dist_from_wheel_url"] - -from bisect import bisect_left, bisect_right -from contextlib import contextmanager -from tempfile import NamedTemporaryFile -from typing import Any, Dict, Iterator, List, Optional, Tuple -from zipfile import BadZipfile, ZipFile - -from pip._vendor.packaging.utils import canonicalize_name -from pip._vendor.requests.models import CONTENT_CHUNK_SIZE, Response - -from pip._internal.metadata import BaseDistribution, MemoryWheel, get_wheel_distribution -from pip._internal.network.session import PipSession -from pip._internal.network.utils import HEADERS, raise_for_status, response_chunks - - -class HTTPRangeRequestUnsupported(Exception): - pass - - -def dist_from_wheel_url(name: str, url: str, session: PipSession) -> BaseDistribution: - """Return a distribution object from the given wheel URL. - - This uses HTTP range requests to only fetch the potion of the wheel - containing metadata, just enough for the object to be constructed. - If such requests are not supported, HTTPRangeRequestUnsupported - is raised. - """ - with LazyZipOverHTTP(url, session) as zf: - # For read-only ZIP files, ZipFile only needs methods read, - # seek, seekable and tell, not the whole IO protocol. - wheel = MemoryWheel(zf.name, zf) # type: ignore - # After context manager exit, wheel.name - # is an invalid file by intention. - return get_wheel_distribution(wheel, canonicalize_name(name)) - - -class LazyZipOverHTTP: - """File-like object mapped to a ZIP file over HTTP. - - This uses HTTP range requests to lazily fetch the file's content, - which is supposed to be fed to ZipFile. If such requests are not - supported by the server, raise HTTPRangeRequestUnsupported - during initialization. - """ - - def __init__( - self, url: str, session: PipSession, chunk_size: int = CONTENT_CHUNK_SIZE - ) -> None: - head = session.head(url, headers=HEADERS) - raise_for_status(head) - assert head.status_code == 200 - self._session, self._url, self._chunk_size = session, url, chunk_size - self._length = int(head.headers["Content-Length"]) - self._file = NamedTemporaryFile() - self.truncate(self._length) - self._left: List[int] = [] - self._right: List[int] = [] - if "bytes" not in head.headers.get("Accept-Ranges", "none"): - raise HTTPRangeRequestUnsupported("range request is not supported") - self._check_zip() - - @property - def mode(self) -> str: - """Opening mode, which is always rb.""" - return "rb" - - @property - def name(self) -> str: - """Path to the underlying file.""" - return self._file.name - - def seekable(self) -> bool: - """Return whether random access is supported, which is True.""" - return True - - def close(self) -> None: - """Close the file.""" - self._file.close() - - @property - def closed(self) -> bool: - """Whether the file is closed.""" - return self._file.closed - - def read(self, size: int = -1) -> bytes: - """Read up to size bytes from the object and return them. - - As a convenience, if size is unspecified or -1, - all bytes until EOF are returned. Fewer than - size bytes may be returned if EOF is reached. - """ - download_size = max(size, self._chunk_size) - start, length = self.tell(), self._length - stop = length if size < 0 else min(start + download_size, length) - start = max(0, stop - download_size) - self._download(start, stop - 1) - return self._file.read(size) - - def readable(self) -> bool: - """Return whether the file is readable, which is True.""" - return True - - def seek(self, offset: int, whence: int = 0) -> int: - """Change stream position and return the new absolute position. - - Seek to offset relative position indicated by whence: - * 0: Start of stream (the default). pos should be >= 0; - * 1: Current position - pos may be negative; - * 2: End of stream - pos usually negative. - """ - return self._file.seek(offset, whence) - - def tell(self) -> int: - """Return the current position.""" - return self._file.tell() - - def truncate(self, size: Optional[int] = None) -> int: - """Resize the stream to the given size in bytes. - - If size is unspecified resize to the current position. - The current stream position isn't changed. - - Return the new file size. - """ - return self._file.truncate(size) - - def writable(self) -> bool: - """Return False.""" - return False - - def __enter__(self) -> "LazyZipOverHTTP": - self._file.__enter__() - return self - - def __exit__(self, *exc: Any) -> Optional[bool]: - return self._file.__exit__(*exc) - - @contextmanager - def _stay(self) -> Iterator[None]: - """Return a context manager keeping the position. - - At the end of the block, seek back to original position. - """ - pos = self.tell() - try: - yield - finally: - self.seek(pos) - - def _check_zip(self) -> None: - """Check and download until the file is a valid ZIP.""" - end = self._length - 1 - for start in reversed(range(0, end, self._chunk_size)): - self._download(start, end) - with self._stay(): - try: - # For read-only ZIP files, ZipFile only needs - # methods read, seek, seekable and tell. - ZipFile(self) # type: ignore - except BadZipfile: - pass - else: - break - - def _stream_response( - self, start: int, end: int, base_headers: Dict[str, str] = HEADERS - ) -> Response: - """Return HTTP response to a range request from start to end.""" - headers = base_headers.copy() - headers["Range"] = f"bytes={start}-{end}" - # TODO: Get range requests to be correctly cached - headers["Cache-Control"] = "no-cache" - return self._session.get(self._url, headers=headers, stream=True) - - def _merge( - self, start: int, end: int, left: int, right: int - ) -> Iterator[Tuple[int, int]]: - """Return an iterator of intervals to be fetched. - - Args: - start (int): Start of needed interval - end (int): End of needed interval - left (int): Index of first overlapping downloaded data - right (int): Index after last overlapping downloaded data - """ - lslice, rslice = self._left[left:right], self._right[left:right] - i = start = min([start] + lslice[:1]) - end = max([end] + rslice[-1:]) - for j, k in zip(lslice, rslice): - if j > i: - yield i, j - 1 - i = k + 1 - if i <= end: - yield i, end - self._left[left:right], self._right[left:right] = [start], [end] - - def _download(self, start: int, end: int) -> None: - """Download bytes from start to end inclusively.""" - with self._stay(): - left = bisect_left(self._right, start) - right = bisect_right(self._left, end) - for start, end in self._merge(start, end, left, right): - response = self._stream_response(start, end) - response.raise_for_status() - self.seek(start) - for chunk in response_chunks(response, self._chunk_size): - self._file.write(chunk) diff --git a/venv/Lib/site-packages/pip/_internal/network/session.py b/venv/Lib/site-packages/pip/_internal/network/session.py deleted file mode 100644 index cbe743b..0000000 --- a/venv/Lib/site-packages/pip/_internal/network/session.py +++ /dev/null @@ -1,454 +0,0 @@ -"""PipSession and supporting code, containing all pip-specific -network request configuration and behavior. -""" - -import email.utils -import io -import ipaddress -import json -import logging -import mimetypes -import os -import platform -import shutil -import subprocess -import sys -import urllib.parse -import warnings -from typing import Any, Dict, Iterator, List, Mapping, Optional, Sequence, Tuple, Union - -from pip._vendor import requests, urllib3 -from pip._vendor.cachecontrol import CacheControlAdapter -from pip._vendor.requests.adapters import BaseAdapter, HTTPAdapter -from pip._vendor.requests.models import PreparedRequest, Response -from pip._vendor.requests.structures import CaseInsensitiveDict -from pip._vendor.urllib3.connectionpool import ConnectionPool -from pip._vendor.urllib3.exceptions import InsecureRequestWarning - -from pip import __version__ -from pip._internal.metadata import get_default_environment -from pip._internal.models.link import Link -from pip._internal.network.auth import MultiDomainBasicAuth -from pip._internal.network.cache import SafeFileCache - -# Import ssl from compat so the initial import occurs in only one place. -from pip._internal.utils.compat import has_tls -from pip._internal.utils.glibc import libc_ver -from pip._internal.utils.misc import build_url_from_netloc, parse_netloc -from pip._internal.utils.urls import url_to_path - -logger = logging.getLogger(__name__) - -SecureOrigin = Tuple[str, str, Optional[Union[int, str]]] - - -# Ignore warning raised when using --trusted-host. -warnings.filterwarnings("ignore", category=InsecureRequestWarning) - - -SECURE_ORIGINS: List[SecureOrigin] = [ - # protocol, hostname, port - # Taken from Chrome's list of secure origins (See: http://bit.ly/1qrySKC) - ("https", "*", "*"), - ("*", "localhost", "*"), - ("*", "127.0.0.0/8", "*"), - ("*", "::1/128", "*"), - ("file", "*", None), - # ssh is always secure. - ("ssh", "*", "*"), -] - - -# These are environment variables present when running under various -# CI systems. For each variable, some CI systems that use the variable -# are indicated. The collection was chosen so that for each of a number -# of popular systems, at least one of the environment variables is used. -# This list is used to provide some indication of and lower bound for -# CI traffic to PyPI. Thus, it is okay if the list is not comprehensive. -# For more background, see: https://github.com/pypa/pip/issues/5499 -CI_ENVIRONMENT_VARIABLES = ( - # Azure Pipelines - "BUILD_BUILDID", - # Jenkins - "BUILD_ID", - # AppVeyor, CircleCI, Codeship, Gitlab CI, Shippable, Travis CI - "CI", - # Explicit environment variable. - "PIP_IS_CI", -) - - -def looks_like_ci() -> bool: - """ - Return whether it looks like pip is running under CI. - """ - # We don't use the method of checking for a tty (e.g. using isatty()) - # because some CI systems mimic a tty (e.g. Travis CI). Thus that - # method doesn't provide definitive information in either direction. - return any(name in os.environ for name in CI_ENVIRONMENT_VARIABLES) - - -def user_agent() -> str: - """ - Return a string representing the user agent. - """ - data: Dict[str, Any] = { - "installer": {"name": "pip", "version": __version__}, - "python": platform.python_version(), - "implementation": { - "name": platform.python_implementation(), - }, - } - - if data["implementation"]["name"] == "CPython": - data["implementation"]["version"] = platform.python_version() - elif data["implementation"]["name"] == "PyPy": - pypy_version_info = sys.pypy_version_info # type: ignore - if pypy_version_info.releaselevel == "final": - pypy_version_info = pypy_version_info[:3] - data["implementation"]["version"] = ".".join( - [str(x) for x in pypy_version_info] - ) - elif data["implementation"]["name"] == "Jython": - # Complete Guess - data["implementation"]["version"] = platform.python_version() - elif data["implementation"]["name"] == "IronPython": - # Complete Guess - data["implementation"]["version"] = platform.python_version() - - if sys.platform.startswith("linux"): - from pip._vendor import distro - - linux_distribution = distro.name(), distro.version(), distro.codename() - distro_infos: Dict[str, Any] = dict( - filter( - lambda x: x[1], - zip(["name", "version", "id"], linux_distribution), - ) - ) - libc = dict( - filter( - lambda x: x[1], - zip(["lib", "version"], libc_ver()), - ) - ) - if libc: - distro_infos["libc"] = libc - if distro_infos: - data["distro"] = distro_infos - - if sys.platform.startswith("darwin") and platform.mac_ver()[0]: - data["distro"] = {"name": "macOS", "version": platform.mac_ver()[0]} - - if platform.system(): - data.setdefault("system", {})["name"] = platform.system() - - if platform.release(): - data.setdefault("system", {})["release"] = platform.release() - - if platform.machine(): - data["cpu"] = platform.machine() - - if has_tls(): - import _ssl as ssl - - data["openssl_version"] = ssl.OPENSSL_VERSION - - setuptools_dist = get_default_environment().get_distribution("setuptools") - if setuptools_dist is not None: - data["setuptools_version"] = str(setuptools_dist.version) - - if shutil.which("rustc") is not None: - # If for any reason `rustc --version` fails, silently ignore it - try: - rustc_output = subprocess.check_output( - ["rustc", "--version"], stderr=subprocess.STDOUT, timeout=0.5 - ) - except Exception: - pass - else: - if rustc_output.startswith(b"rustc "): - # The format of `rustc --version` is: - # `b'rustc 1.52.1 (9bc8c42bb 2021-05-09)\n'` - # We extract just the middle (1.52.1) part - data["rustc_version"] = rustc_output.split(b" ")[1].decode() - - # Use None rather than False so as not to give the impression that - # pip knows it is not being run under CI. Rather, it is a null or - # inconclusive result. Also, we include some value rather than no - # value to make it easier to know that the check has been run. - data["ci"] = True if looks_like_ci() else None - - user_data = os.environ.get("PIP_USER_AGENT_USER_DATA") - if user_data is not None: - data["user_data"] = user_data - - return "{data[installer][name]}/{data[installer][version]} {json}".format( - data=data, - json=json.dumps(data, separators=(",", ":"), sort_keys=True), - ) - - -class LocalFSAdapter(BaseAdapter): - def send( - self, - request: PreparedRequest, - stream: bool = False, - timeout: Optional[Union[float, Tuple[float, float]]] = None, - verify: Union[bool, str] = True, - cert: Optional[Union[str, Tuple[str, str]]] = None, - proxies: Optional[Mapping[str, str]] = None, - ) -> Response: - pathname = url_to_path(request.url) - - resp = Response() - resp.status_code = 200 - resp.url = request.url - - try: - stats = os.stat(pathname) - except OSError as exc: - # format the exception raised as a io.BytesIO object, - # to return a better error message: - resp.status_code = 404 - resp.reason = type(exc).__name__ - resp.raw = io.BytesIO(f"{resp.reason}: {exc}".encode("utf8")) - else: - modified = email.utils.formatdate(stats.st_mtime, usegmt=True) - content_type = mimetypes.guess_type(pathname)[0] or "text/plain" - resp.headers = CaseInsensitiveDict( - { - "Content-Type": content_type, - "Content-Length": stats.st_size, - "Last-Modified": modified, - } - ) - - resp.raw = open(pathname, "rb") - resp.close = resp.raw.close - - return resp - - def close(self) -> None: - pass - - -class InsecureHTTPAdapter(HTTPAdapter): - def cert_verify( - self, - conn: ConnectionPool, - url: str, - verify: Union[bool, str], - cert: Optional[Union[str, Tuple[str, str]]], - ) -> None: - super().cert_verify(conn=conn, url=url, verify=False, cert=cert) - - -class InsecureCacheControlAdapter(CacheControlAdapter): - def cert_verify( - self, - conn: ConnectionPool, - url: str, - verify: Union[bool, str], - cert: Optional[Union[str, Tuple[str, str]]], - ) -> None: - super().cert_verify(conn=conn, url=url, verify=False, cert=cert) - - -class PipSession(requests.Session): - - timeout: Optional[int] = None - - def __init__( - self, - *args: Any, - retries: int = 0, - cache: Optional[str] = None, - trusted_hosts: Sequence[str] = (), - index_urls: Optional[List[str]] = None, - **kwargs: Any, - ) -> None: - """ - :param trusted_hosts: Domains not to emit warnings for when not using - HTTPS. - """ - super().__init__(*args, **kwargs) - - # Namespace the attribute with "pip_" just in case to prevent - # possible conflicts with the base class. - self.pip_trusted_origins: List[Tuple[str, Optional[int]]] = [] - - # Attach our User Agent to the request - self.headers["User-Agent"] = user_agent() - - # Attach our Authentication handler to the session - self.auth = MultiDomainBasicAuth(index_urls=index_urls) - - # Create our urllib3.Retry instance which will allow us to customize - # how we handle retries. - retries = urllib3.Retry( - # Set the total number of retries that a particular request can - # have. - total=retries, - # A 503 error from PyPI typically means that the Fastly -> Origin - # connection got interrupted in some way. A 503 error in general - # is typically considered a transient error so we'll go ahead and - # retry it. - # A 500 may indicate transient error in Amazon S3 - # A 520 or 527 - may indicate transient error in CloudFlare - status_forcelist=[500, 503, 520, 527], - # Add a small amount of back off between failed requests in - # order to prevent hammering the service. - backoff_factor=0.25, - ) # type: ignore - - # Our Insecure HTTPAdapter disables HTTPS validation. It does not - # support caching so we'll use it for all http:// URLs. - # If caching is disabled, we will also use it for - # https:// hosts that we've marked as ignoring - # TLS errors for (trusted-hosts). - insecure_adapter = InsecureHTTPAdapter(max_retries=retries) - - # We want to _only_ cache responses on securely fetched origins or when - # the host is specified as trusted. We do this because - # we can't validate the response of an insecurely/untrusted fetched - # origin, and we don't want someone to be able to poison the cache and - # require manual eviction from the cache to fix it. - if cache: - secure_adapter = CacheControlAdapter( - cache=SafeFileCache(cache), - max_retries=retries, - ) - self._trusted_host_adapter = InsecureCacheControlAdapter( - cache=SafeFileCache(cache), - max_retries=retries, - ) - else: - secure_adapter = HTTPAdapter(max_retries=retries) - self._trusted_host_adapter = insecure_adapter - - self.mount("https://", secure_adapter) - self.mount("http://", insecure_adapter) - - # Enable file:// urls - self.mount("file://", LocalFSAdapter()) - - for host in trusted_hosts: - self.add_trusted_host(host, suppress_logging=True) - - def update_index_urls(self, new_index_urls: List[str]) -> None: - """ - :param new_index_urls: New index urls to update the authentication - handler with. - """ - self.auth.index_urls = new_index_urls - - def add_trusted_host( - self, host: str, source: Optional[str] = None, suppress_logging: bool = False - ) -> None: - """ - :param host: It is okay to provide a host that has previously been - added. - :param source: An optional source string, for logging where the host - string came from. - """ - if not suppress_logging: - msg = f"adding trusted host: {host!r}" - if source is not None: - msg += f" (from {source})" - logger.info(msg) - - host_port = parse_netloc(host) - if host_port not in self.pip_trusted_origins: - self.pip_trusted_origins.append(host_port) - - self.mount( - build_url_from_netloc(host, scheme="http") + "/", self._trusted_host_adapter - ) - self.mount(build_url_from_netloc(host) + "/", self._trusted_host_adapter) - if not host_port[1]: - self.mount( - build_url_from_netloc(host, scheme="http") + ":", - self._trusted_host_adapter, - ) - # Mount wildcard ports for the same host. - self.mount(build_url_from_netloc(host) + ":", self._trusted_host_adapter) - - def iter_secure_origins(self) -> Iterator[SecureOrigin]: - yield from SECURE_ORIGINS - for host, port in self.pip_trusted_origins: - yield ("*", host, "*" if port is None else port) - - def is_secure_origin(self, location: Link) -> bool: - # Determine if this url used a secure transport mechanism - parsed = urllib.parse.urlparse(str(location)) - origin_protocol, origin_host, origin_port = ( - parsed.scheme, - parsed.hostname, - parsed.port, - ) - - # The protocol to use to see if the protocol matches. - # Don't count the repository type as part of the protocol: in - # cases such as "git+ssh", only use "ssh". (I.e., Only verify against - # the last scheme.) - origin_protocol = origin_protocol.rsplit("+", 1)[-1] - - # Determine if our origin is a secure origin by looking through our - # hardcoded list of secure origins, as well as any additional ones - # configured on this PackageFinder instance. - for secure_origin in self.iter_secure_origins(): - secure_protocol, secure_host, secure_port = secure_origin - if origin_protocol != secure_protocol and secure_protocol != "*": - continue - - try: - addr = ipaddress.ip_address(origin_host) - network = ipaddress.ip_network(secure_host) - except ValueError: - # We don't have both a valid address or a valid network, so - # we'll check this origin against hostnames. - if ( - origin_host - and origin_host.lower() != secure_host.lower() - and secure_host != "*" - ): - continue - else: - # We have a valid address and network, so see if the address - # is contained within the network. - if addr not in network: - continue - - # Check to see if the port matches. - if ( - origin_port != secure_port - and secure_port != "*" - and secure_port is not None - ): - continue - - # If we've gotten here, then this origin matches the current - # secure origin and we should return True - return True - - # If we've gotten to this point, then the origin isn't secure and we - # will not accept it as a valid location to search. We will however - # log a warning that we are ignoring it. - logger.warning( - "The repository located at %s is not a trusted or secure host and " - "is being ignored. If this repository is available via HTTPS we " - "recommend you use HTTPS instead, otherwise you may silence " - "this warning and allow it anyway with '--trusted-host %s'.", - origin_host, - origin_host, - ) - - return False - - def request(self, method: str, url: str, *args: Any, **kwargs: Any) -> Response: - # Allow setting a default timeout on a session - kwargs.setdefault("timeout", self.timeout) - - # Dispatch the actual request - return super().request(method, url, *args, **kwargs) diff --git a/venv/Lib/site-packages/pip/_internal/network/utils.py b/venv/Lib/site-packages/pip/_internal/network/utils.py deleted file mode 100644 index 094cf1b..0000000 --- a/venv/Lib/site-packages/pip/_internal/network/utils.py +++ /dev/null @@ -1,96 +0,0 @@ -from typing import Dict, Iterator - -from pip._vendor.requests.models import CONTENT_CHUNK_SIZE, Response - -from pip._internal.exceptions import NetworkConnectionError - -# The following comments and HTTP headers were originally added by -# Donald Stufft in git commit 22c562429a61bb77172039e480873fb239dd8c03. -# -# We use Accept-Encoding: identity here because requests defaults to -# accepting compressed responses. This breaks in a variety of ways -# depending on how the server is configured. -# - Some servers will notice that the file isn't a compressible file -# and will leave the file alone and with an empty Content-Encoding -# - Some servers will notice that the file is already compressed and -# will leave the file alone, adding a Content-Encoding: gzip header -# - Some servers won't notice anything at all and will take a file -# that's already been compressed and compress it again, and set -# the Content-Encoding: gzip header -# By setting this to request only the identity encoding we're hoping -# to eliminate the third case. Hopefully there does not exist a server -# which when given a file will notice it is already compressed and that -# you're not asking for a compressed file and will then decompress it -# before sending because if that's the case I don't think it'll ever be -# possible to make this work. -HEADERS: Dict[str, str] = {"Accept-Encoding": "identity"} - - -def raise_for_status(resp: Response) -> None: - http_error_msg = "" - if isinstance(resp.reason, bytes): - # We attempt to decode utf-8 first because some servers - # choose to localize their reason strings. If the string - # isn't utf-8, we fall back to iso-8859-1 for all other - # encodings. - try: - reason = resp.reason.decode("utf-8") - except UnicodeDecodeError: - reason = resp.reason.decode("iso-8859-1") - else: - reason = resp.reason - - if 400 <= resp.status_code < 500: - http_error_msg = ( - f"{resp.status_code} Client Error: {reason} for url: {resp.url}" - ) - - elif 500 <= resp.status_code < 600: - http_error_msg = ( - f"{resp.status_code} Server Error: {reason} for url: {resp.url}" - ) - - if http_error_msg: - raise NetworkConnectionError(http_error_msg, response=resp) - - -def response_chunks( - response: Response, chunk_size: int = CONTENT_CHUNK_SIZE -) -> Iterator[bytes]: - """Given a requests Response, provide the data chunks.""" - try: - # Special case for urllib3. - for chunk in response.raw.stream( - chunk_size, - # We use decode_content=False here because we don't - # want urllib3 to mess with the raw bytes we get - # from the server. If we decompress inside of - # urllib3 then we cannot verify the checksum - # because the checksum will be of the compressed - # file. This breakage will only occur if the - # server adds a Content-Encoding header, which - # depends on how the server was configured: - # - Some servers will notice that the file isn't a - # compressible file and will leave the file alone - # and with an empty Content-Encoding - # - Some servers will notice that the file is - # already compressed and will leave the file - # alone and will add a Content-Encoding: gzip - # header - # - Some servers won't notice anything at all and - # will take a file that's already been compressed - # and compress it again and set the - # Content-Encoding: gzip header - # - # By setting this not to decode automatically we - # hope to eliminate problems with the second case. - decode_content=False, - ): - yield chunk - except AttributeError: - # Standard file-like object. - while True: - chunk = response.raw.read(chunk_size) - if not chunk: - break - yield chunk diff --git a/venv/Lib/site-packages/pip/_internal/network/xmlrpc.py b/venv/Lib/site-packages/pip/_internal/network/xmlrpc.py deleted file mode 100644 index 4a7d55d..0000000 --- a/venv/Lib/site-packages/pip/_internal/network/xmlrpc.py +++ /dev/null @@ -1,60 +0,0 @@ -"""xmlrpclib.Transport implementation -""" - -import logging -import urllib.parse -import xmlrpc.client -from typing import TYPE_CHECKING, Tuple - -from pip._internal.exceptions import NetworkConnectionError -from pip._internal.network.session import PipSession -from pip._internal.network.utils import raise_for_status - -if TYPE_CHECKING: - from xmlrpc.client import _HostType, _Marshallable - -logger = logging.getLogger(__name__) - - -class PipXmlrpcTransport(xmlrpc.client.Transport): - """Provide a `xmlrpclib.Transport` implementation via a `PipSession` - object. - """ - - def __init__( - self, index_url: str, session: PipSession, use_datetime: bool = False - ) -> None: - super().__init__(use_datetime) - index_parts = urllib.parse.urlparse(index_url) - self._scheme = index_parts.scheme - self._session = session - - def request( - self, - host: "_HostType", - handler: str, - request_body: bytes, - verbose: bool = False, - ) -> Tuple["_Marshallable", ...]: - assert isinstance(host, str) - parts = (self._scheme, host, handler, None, None, None) - url = urllib.parse.urlunparse(parts) - try: - headers = {"Content-Type": "text/xml"} - response = self._session.post( - url, - data=request_body, - headers=headers, - stream=True, - ) - raise_for_status(response) - self.verbose = verbose - return self.parse_response(response.raw) - except NetworkConnectionError as exc: - assert exc.response - logger.critical( - "HTTP error %s while getting %s", - exc.response.status_code, - url, - ) - raise diff --git a/venv/Lib/site-packages/pip/_internal/operations/__init__.py b/venv/Lib/site-packages/pip/_internal/operations/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/venv/Lib/site-packages/pip/_internal/operations/__pycache__/__init__.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/operations/__pycache__/__init__.cpython-39.pyc deleted file mode 100644 index 022909c..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/operations/__pycache__/__init__.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/operations/__pycache__/check.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/operations/__pycache__/check.cpython-39.pyc deleted file mode 100644 index 28502f5..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/operations/__pycache__/check.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/operations/__pycache__/freeze.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/operations/__pycache__/freeze.cpython-39.pyc deleted file mode 100644 index 55de16c..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/operations/__pycache__/freeze.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/operations/__pycache__/prepare.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/operations/__pycache__/prepare.cpython-39.pyc deleted file mode 100644 index f691f03..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/operations/__pycache__/prepare.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/operations/build/__init__.py b/venv/Lib/site-packages/pip/_internal/operations/build/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/venv/Lib/site-packages/pip/_internal/operations/build/__pycache__/__init__.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/operations/build/__pycache__/__init__.cpython-39.pyc deleted file mode 100644 index b4a197e..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/operations/build/__pycache__/__init__.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/operations/build/__pycache__/metadata.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/operations/build/__pycache__/metadata.cpython-39.pyc deleted file mode 100644 index dc74bcc..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/operations/build/__pycache__/metadata.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/operations/build/__pycache__/metadata_editable.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/operations/build/__pycache__/metadata_editable.cpython-39.pyc deleted file mode 100644 index f037947..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/operations/build/__pycache__/metadata_editable.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/operations/build/__pycache__/metadata_legacy.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/operations/build/__pycache__/metadata_legacy.cpython-39.pyc deleted file mode 100644 index ef4df2a..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/operations/build/__pycache__/metadata_legacy.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/operations/build/__pycache__/wheel.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/operations/build/__pycache__/wheel.cpython-39.pyc deleted file mode 100644 index d8788db..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/operations/build/__pycache__/wheel.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/operations/build/__pycache__/wheel_editable.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/operations/build/__pycache__/wheel_editable.cpython-39.pyc deleted file mode 100644 index d9867eb..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/operations/build/__pycache__/wheel_editable.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/operations/build/__pycache__/wheel_legacy.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/operations/build/__pycache__/wheel_legacy.cpython-39.pyc deleted file mode 100644 index 926fcfa..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/operations/build/__pycache__/wheel_legacy.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/operations/build/metadata.py b/venv/Lib/site-packages/pip/_internal/operations/build/metadata.py deleted file mode 100644 index e2b7b44..0000000 --- a/venv/Lib/site-packages/pip/_internal/operations/build/metadata.py +++ /dev/null @@ -1,39 +0,0 @@ -"""Metadata generation logic for source distributions. -""" - -import os - -from pip._vendor.pep517.wrappers import Pep517HookCaller - -from pip._internal.build_env import BuildEnvironment -from pip._internal.exceptions import ( - InstallationSubprocessError, - MetadataGenerationFailed, -) -from pip._internal.utils.subprocess import runner_with_spinner_message -from pip._internal.utils.temp_dir import TempDirectory - - -def generate_metadata( - build_env: BuildEnvironment, backend: Pep517HookCaller, details: str -) -> str: - """Generate metadata using mechanisms described in PEP 517. - - Returns the generated metadata directory. - """ - metadata_tmpdir = TempDirectory(kind="modern-metadata", globally_managed=True) - - metadata_dir = metadata_tmpdir.path - - with build_env: - # Note that Pep517HookCaller implements a fallback for - # prepare_metadata_for_build_wheel, so we don't have to - # consider the possibility that this hook doesn't exist. - runner = runner_with_spinner_message("Preparing metadata (pyproject.toml)") - with backend.subprocess_runner(runner): - try: - distinfo_dir = backend.prepare_metadata_for_build_wheel(metadata_dir) - except InstallationSubprocessError as error: - raise MetadataGenerationFailed(package_details=details) from error - - return os.path.join(metadata_dir, distinfo_dir) diff --git a/venv/Lib/site-packages/pip/_internal/operations/build/metadata_editable.py b/venv/Lib/site-packages/pip/_internal/operations/build/metadata_editable.py deleted file mode 100644 index 4c3f48b..0000000 --- a/venv/Lib/site-packages/pip/_internal/operations/build/metadata_editable.py +++ /dev/null @@ -1,41 +0,0 @@ -"""Metadata generation logic for source distributions. -""" - -import os - -from pip._vendor.pep517.wrappers import Pep517HookCaller - -from pip._internal.build_env import BuildEnvironment -from pip._internal.exceptions import ( - InstallationSubprocessError, - MetadataGenerationFailed, -) -from pip._internal.utils.subprocess import runner_with_spinner_message -from pip._internal.utils.temp_dir import TempDirectory - - -def generate_editable_metadata( - build_env: BuildEnvironment, backend: Pep517HookCaller, details: str -) -> str: - """Generate metadata using mechanisms described in PEP 660. - - Returns the generated metadata directory. - """ - metadata_tmpdir = TempDirectory(kind="modern-metadata", globally_managed=True) - - metadata_dir = metadata_tmpdir.path - - with build_env: - # Note that Pep517HookCaller implements a fallback for - # prepare_metadata_for_build_wheel/editable, so we don't have to - # consider the possibility that this hook doesn't exist. - runner = runner_with_spinner_message( - "Preparing editable metadata (pyproject.toml)" - ) - with backend.subprocess_runner(runner): - try: - distinfo_dir = backend.prepare_metadata_for_build_editable(metadata_dir) - except InstallationSubprocessError as error: - raise MetadataGenerationFailed(package_details=details) from error - - return os.path.join(metadata_dir, distinfo_dir) diff --git a/venv/Lib/site-packages/pip/_internal/operations/build/metadata_legacy.py b/venv/Lib/site-packages/pip/_internal/operations/build/metadata_legacy.py deleted file mode 100644 index e60988d..0000000 --- a/venv/Lib/site-packages/pip/_internal/operations/build/metadata_legacy.py +++ /dev/null @@ -1,74 +0,0 @@ -"""Metadata generation logic for legacy source distributions. -""" - -import logging -import os - -from pip._internal.build_env import BuildEnvironment -from pip._internal.cli.spinners import open_spinner -from pip._internal.exceptions import ( - InstallationError, - InstallationSubprocessError, - MetadataGenerationFailed, -) -from pip._internal.utils.setuptools_build import make_setuptools_egg_info_args -from pip._internal.utils.subprocess import call_subprocess -from pip._internal.utils.temp_dir import TempDirectory - -logger = logging.getLogger(__name__) - - -def _find_egg_info(directory: str) -> str: - """Find an .egg-info subdirectory in `directory`.""" - filenames = [f for f in os.listdir(directory) if f.endswith(".egg-info")] - - if not filenames: - raise InstallationError(f"No .egg-info directory found in {directory}") - - if len(filenames) > 1: - raise InstallationError( - "More than one .egg-info directory found in {}".format(directory) - ) - - return os.path.join(directory, filenames[0]) - - -def generate_metadata( - build_env: BuildEnvironment, - setup_py_path: str, - source_dir: str, - isolated: bool, - details: str, -) -> str: - """Generate metadata using setup.py-based defacto mechanisms. - - Returns the generated metadata directory. - """ - logger.debug( - "Running setup.py (path:%s) egg_info for package %s", - setup_py_path, - details, - ) - - egg_info_dir = TempDirectory(kind="pip-egg-info", globally_managed=True).path - - args = make_setuptools_egg_info_args( - setup_py_path, - egg_info_dir=egg_info_dir, - no_user_config=isolated, - ) - - with build_env: - with open_spinner("Preparing metadata (setup.py)") as spinner: - try: - call_subprocess( - args, - cwd=source_dir, - command_desc="python setup.py egg_info", - spinner=spinner, - ) - except InstallationSubprocessError as error: - raise MetadataGenerationFailed(package_details=details) from error - - # Return the .egg-info directory. - return _find_egg_info(egg_info_dir) diff --git a/venv/Lib/site-packages/pip/_internal/operations/build/wheel.py b/venv/Lib/site-packages/pip/_internal/operations/build/wheel.py deleted file mode 100644 index b0d2fc9..0000000 --- a/venv/Lib/site-packages/pip/_internal/operations/build/wheel.py +++ /dev/null @@ -1,37 +0,0 @@ -import logging -import os -from typing import Optional - -from pip._vendor.pep517.wrappers import Pep517HookCaller - -from pip._internal.utils.subprocess import runner_with_spinner_message - -logger = logging.getLogger(__name__) - - -def build_wheel_pep517( - name: str, - backend: Pep517HookCaller, - metadata_directory: str, - tempd: str, -) -> Optional[str]: - """Build one InstallRequirement using the PEP 517 build process. - - Returns path to wheel if successfully built. Otherwise, returns None. - """ - assert metadata_directory is not None - try: - logger.debug("Destination directory: %s", tempd) - - runner = runner_with_spinner_message( - f"Building wheel for {name} (pyproject.toml)" - ) - with backend.subprocess_runner(runner): - wheel_name = backend.build_wheel( - tempd, - metadata_directory=metadata_directory, - ) - except Exception: - logger.error("Failed building wheel for %s", name) - return None - return os.path.join(tempd, wheel_name) diff --git a/venv/Lib/site-packages/pip/_internal/operations/build/wheel_editable.py b/venv/Lib/site-packages/pip/_internal/operations/build/wheel_editable.py deleted file mode 100644 index cf7b01a..0000000 --- a/venv/Lib/site-packages/pip/_internal/operations/build/wheel_editable.py +++ /dev/null @@ -1,46 +0,0 @@ -import logging -import os -from typing import Optional - -from pip._vendor.pep517.wrappers import HookMissing, Pep517HookCaller - -from pip._internal.utils.subprocess import runner_with_spinner_message - -logger = logging.getLogger(__name__) - - -def build_wheel_editable( - name: str, - backend: Pep517HookCaller, - metadata_directory: str, - tempd: str, -) -> Optional[str]: - """Build one InstallRequirement using the PEP 660 build process. - - Returns path to wheel if successfully built. Otherwise, returns None. - """ - assert metadata_directory is not None - try: - logger.debug("Destination directory: %s", tempd) - - runner = runner_with_spinner_message( - f"Building editable for {name} (pyproject.toml)" - ) - with backend.subprocess_runner(runner): - try: - wheel_name = backend.build_editable( - tempd, - metadata_directory=metadata_directory, - ) - except HookMissing as e: - logger.error( - "Cannot build editable %s because the build " - "backend does not have the %s hook", - name, - e, - ) - return None - except Exception: - logger.error("Failed building editable for %s", name) - return None - return os.path.join(tempd, wheel_name) diff --git a/venv/Lib/site-packages/pip/_internal/operations/build/wheel_legacy.py b/venv/Lib/site-packages/pip/_internal/operations/build/wheel_legacy.py deleted file mode 100644 index c5f0492..0000000 --- a/venv/Lib/site-packages/pip/_internal/operations/build/wheel_legacy.py +++ /dev/null @@ -1,102 +0,0 @@ -import logging -import os.path -from typing import List, Optional - -from pip._internal.cli.spinners import open_spinner -from pip._internal.utils.setuptools_build import make_setuptools_bdist_wheel_args -from pip._internal.utils.subprocess import call_subprocess, format_command_args - -logger = logging.getLogger(__name__) - - -def format_command_result( - command_args: List[str], - command_output: str, -) -> str: - """Format command information for logging.""" - command_desc = format_command_args(command_args) - text = f"Command arguments: {command_desc}\n" - - if not command_output: - text += "Command output: None" - elif logger.getEffectiveLevel() > logging.DEBUG: - text += "Command output: [use --verbose to show]" - else: - if not command_output.endswith("\n"): - command_output += "\n" - text += f"Command output:\n{command_output}" - - return text - - -def get_legacy_build_wheel_path( - names: List[str], - temp_dir: str, - name: str, - command_args: List[str], - command_output: str, -) -> Optional[str]: - """Return the path to the wheel in the temporary build directory.""" - # Sort for determinism. - names = sorted(names) - if not names: - msg = ("Legacy build of wheel for {!r} created no files.\n").format(name) - msg += format_command_result(command_args, command_output) - logger.warning(msg) - return None - - if len(names) > 1: - msg = ( - "Legacy build of wheel for {!r} created more than one file.\n" - "Filenames (choosing first): {}\n" - ).format(name, names) - msg += format_command_result(command_args, command_output) - logger.warning(msg) - - return os.path.join(temp_dir, names[0]) - - -def build_wheel_legacy( - name: str, - setup_py_path: str, - source_dir: str, - global_options: List[str], - build_options: List[str], - tempd: str, -) -> Optional[str]: - """Build one unpacked package using the "legacy" build process. - - Returns path to wheel if successfully built. Otherwise, returns None. - """ - wheel_args = make_setuptools_bdist_wheel_args( - setup_py_path, - global_options=global_options, - build_options=build_options, - destination_dir=tempd, - ) - - spin_message = f"Building wheel for {name} (setup.py)" - with open_spinner(spin_message) as spinner: - logger.debug("Destination directory: %s", tempd) - - try: - output = call_subprocess( - wheel_args, - command_desc="python setup.py bdist_wheel", - cwd=source_dir, - spinner=spinner, - ) - except Exception: - spinner.finish("error") - logger.error("Failed building wheel for %s", name) - return None - - names = os.listdir(tempd) - wheel_path = get_legacy_build_wheel_path( - names=names, - temp_dir=tempd, - name=name, - command_args=wheel_args, - command_output=output, - ) - return wheel_path diff --git a/venv/Lib/site-packages/pip/_internal/operations/check.py b/venv/Lib/site-packages/pip/_internal/operations/check.py deleted file mode 100644 index fb3ac8b..0000000 --- a/venv/Lib/site-packages/pip/_internal/operations/check.py +++ /dev/null @@ -1,149 +0,0 @@ -"""Validation of dependencies of packages -""" - -import logging -from typing import Callable, Dict, List, NamedTuple, Optional, Set, Tuple - -from pip._vendor.packaging.requirements import Requirement -from pip._vendor.packaging.utils import NormalizedName, canonicalize_name - -from pip._internal.distributions import make_distribution_for_install_requirement -from pip._internal.metadata import get_default_environment -from pip._internal.metadata.base import DistributionVersion -from pip._internal.req.req_install import InstallRequirement - -logger = logging.getLogger(__name__) - - -class PackageDetails(NamedTuple): - version: DistributionVersion - dependencies: List[Requirement] - - -# Shorthands -PackageSet = Dict[NormalizedName, PackageDetails] -Missing = Tuple[NormalizedName, Requirement] -Conflicting = Tuple[NormalizedName, DistributionVersion, Requirement] - -MissingDict = Dict[NormalizedName, List[Missing]] -ConflictingDict = Dict[NormalizedName, List[Conflicting]] -CheckResult = Tuple[MissingDict, ConflictingDict] -ConflictDetails = Tuple[PackageSet, CheckResult] - - -def create_package_set_from_installed() -> Tuple[PackageSet, bool]: - """Converts a list of distributions into a PackageSet.""" - package_set = {} - problems = False - env = get_default_environment() - for dist in env.iter_installed_distributions(local_only=False, skip=()): - name = dist.canonical_name - try: - dependencies = list(dist.iter_dependencies()) - package_set[name] = PackageDetails(dist.version, dependencies) - except (OSError, ValueError) as e: - # Don't crash on unreadable or broken metadata. - logger.warning("Error parsing requirements for %s: %s", name, e) - problems = True - return package_set, problems - - -def check_package_set( - package_set: PackageSet, should_ignore: Optional[Callable[[str], bool]] = None -) -> CheckResult: - """Check if a package set is consistent - - If should_ignore is passed, it should be a callable that takes a - package name and returns a boolean. - """ - - missing = {} - conflicting = {} - - for package_name, package_detail in package_set.items(): - # Info about dependencies of package_name - missing_deps: Set[Missing] = set() - conflicting_deps: Set[Conflicting] = set() - - if should_ignore and should_ignore(package_name): - continue - - for req in package_detail.dependencies: - name = canonicalize_name(req.name) - - # Check if it's missing - if name not in package_set: - missed = True - if req.marker is not None: - missed = req.marker.evaluate() - if missed: - missing_deps.add((name, req)) - continue - - # Check if there's a conflict - version = package_set[name].version - if not req.specifier.contains(version, prereleases=True): - conflicting_deps.add((name, version, req)) - - if missing_deps: - missing[package_name] = sorted(missing_deps, key=str) - if conflicting_deps: - conflicting[package_name] = sorted(conflicting_deps, key=str) - - return missing, conflicting - - -def check_install_conflicts(to_install: List[InstallRequirement]) -> ConflictDetails: - """For checking if the dependency graph would be consistent after \ - installing given requirements - """ - # Start from the current state - package_set, _ = create_package_set_from_installed() - # Install packages - would_be_installed = _simulate_installation_of(to_install, package_set) - - # Only warn about directly-dependent packages; create a whitelist of them - whitelist = _create_whitelist(would_be_installed, package_set) - - return ( - package_set, - check_package_set( - package_set, should_ignore=lambda name: name not in whitelist - ), - ) - - -def _simulate_installation_of( - to_install: List[InstallRequirement], package_set: PackageSet -) -> Set[NormalizedName]: - """Computes the version of packages after installing to_install.""" - # Keep track of packages that were installed - installed = set() - - # Modify it as installing requirement_set would (assuming no errors) - for inst_req in to_install: - abstract_dist = make_distribution_for_install_requirement(inst_req) - dist = abstract_dist.get_metadata_distribution() - name = dist.canonical_name - package_set[name] = PackageDetails(dist.version, list(dist.iter_dependencies())) - - installed.add(name) - - return installed - - -def _create_whitelist( - would_be_installed: Set[NormalizedName], package_set: PackageSet -) -> Set[NormalizedName]: - packages_affected = set(would_be_installed) - - for package_name in package_set: - if package_name in packages_affected: - continue - - for req in package_set[package_name].dependencies: - if canonicalize_name(req.name) in packages_affected: - packages_affected.add(package_name) - break - - return packages_affected diff --git a/venv/Lib/site-packages/pip/_internal/operations/freeze.py b/venv/Lib/site-packages/pip/_internal/operations/freeze.py deleted file mode 100644 index 4565540..0000000 --- a/venv/Lib/site-packages/pip/_internal/operations/freeze.py +++ /dev/null @@ -1,254 +0,0 @@ -import collections -import logging -import os -from typing import Container, Dict, Iterable, Iterator, List, NamedTuple, Optional, Set - -from pip._vendor.packaging.utils import canonicalize_name -from pip._vendor.packaging.version import Version - -from pip._internal.exceptions import BadCommand, InstallationError -from pip._internal.metadata import BaseDistribution, get_environment -from pip._internal.req.constructors import ( - install_req_from_editable, - install_req_from_line, -) -from pip._internal.req.req_file import COMMENT_RE -from pip._internal.utils.direct_url_helpers import direct_url_as_pep440_direct_reference - -logger = logging.getLogger(__name__) - - -class _EditableInfo(NamedTuple): - requirement: str - comments: List[str] - - -def freeze( - requirement: Optional[List[str]] = None, - local_only: bool = False, - user_only: bool = False, - paths: Optional[List[str]] = None, - isolated: bool = False, - exclude_editable: bool = False, - skip: Container[str] = (), -) -> Iterator[str]: - installations: Dict[str, FrozenRequirement] = {} - - dists = get_environment(paths).iter_installed_distributions( - local_only=local_only, - skip=(), - user_only=user_only, - ) - for dist in dists: - req = FrozenRequirement.from_dist(dist) - if exclude_editable and req.editable: - continue - installations[req.canonical_name] = req - - if requirement: - # the options that don't get turned into an InstallRequirement - # should only be emitted once, even if the same option is in multiple - # requirements files, so we need to keep track of what has been emitted - # so that we don't emit it again if it's seen again - emitted_options: Set[str] = set() - # keep track of which files a requirement is in so that we can - # give an accurate warning if a requirement appears multiple times. - req_files: Dict[str, List[str]] = collections.defaultdict(list) - for req_file_path in requirement: - with open(req_file_path) as req_file: - for line in req_file: - if ( - not line.strip() - or line.strip().startswith("#") - or line.startswith( - ( - "-r", - "--requirement", - "-f", - "--find-links", - "-i", - "--index-url", - "--pre", - "--trusted-host", - "--process-dependency-links", - "--extra-index-url", - "--use-feature", - ) - ) - ): - line = line.rstrip() - if line not in emitted_options: - emitted_options.add(line) - yield line - continue - - if line.startswith("-e") or line.startswith("--editable"): - if line.startswith("-e"): - line = line[2:].strip() - else: - line = line[len("--editable") :].strip().lstrip("=") - line_req = install_req_from_editable( - line, - isolated=isolated, - ) - else: - line_req = install_req_from_line( - COMMENT_RE.sub("", line).strip(), - isolated=isolated, - ) - - if not line_req.name: - logger.info( - "Skipping line in requirement file [%s] because " - "it's not clear what it would install: %s", - req_file_path, - line.strip(), - ) - logger.info( - " (add #egg=PackageName to the URL to avoid" - " this warning)" - ) - else: - line_req_canonical_name = canonicalize_name(line_req.name) - if line_req_canonical_name not in installations: - # either it's not installed, or it is installed - # but has been processed already - if not req_files[line_req.name]: - logger.warning( - "Requirement file [%s] contains %s, but " - "package %r is not installed", - req_file_path, - COMMENT_RE.sub("", line).strip(), - line_req.name, - ) - else: - req_files[line_req.name].append(req_file_path) - else: - yield str(installations[line_req_canonical_name]).rstrip() - del installations[line_req_canonical_name] - req_files[line_req.name].append(req_file_path) - - # Warn about requirements that were included multiple times (in a - # single requirements file or in different requirements files). - for name, files in req_files.items(): - if len(files) > 1: - logger.warning( - "Requirement %s included multiple times [%s]", - name, - ", ".join(sorted(set(files))), - ) - - yield ("## The following requirements were added by pip freeze:") - for installation in sorted(installations.values(), key=lambda x: x.name.lower()): - if installation.canonical_name not in skip: - yield str(installation).rstrip() - - -def _format_as_name_version(dist: BaseDistribution) -> str: - if isinstance(dist.version, Version): - return f"{dist.raw_name}=={dist.version}" - return f"{dist.raw_name}==={dist.version}" - - -def _get_editable_info(dist: BaseDistribution) -> _EditableInfo: - """ - Compute and return values (req, comments) for use in - FrozenRequirement.from_dist(). - """ - editable_project_location = dist.editable_project_location - assert editable_project_location - location = os.path.normcase(os.path.abspath(editable_project_location)) - - from pip._internal.vcs import RemoteNotFoundError, RemoteNotValidError, vcs - - vcs_backend = vcs.get_backend_for_dir(location) - - if vcs_backend is None: - display = _format_as_name_version(dist) - logger.debug( - 'No VCS found for editable requirement "%s" in: %r', - display, - location, - ) - return _EditableInfo( - requirement=location, - comments=[f"# Editable install with no version control ({display})"], - ) - - vcs_name = type(vcs_backend).__name__ - - try: - req = vcs_backend.get_src_requirement(location, dist.raw_name) - except RemoteNotFoundError: - display = _format_as_name_version(dist) - return _EditableInfo( - requirement=location, - comments=[f"# Editable {vcs_name} install with no remote ({display})"], - ) - except RemoteNotValidError as ex: - display = _format_as_name_version(dist) - return _EditableInfo( - requirement=location, - comments=[ - f"# Editable {vcs_name} install ({display}) with either a deleted " - f"local remote or invalid URI:", - f"# '{ex.url}'", - ], - ) - except BadCommand: - logger.warning( - "cannot determine version of editable source in %s " - "(%s command not found in path)", - location, - vcs_backend.name, - ) - return _EditableInfo(requirement=location, comments=[]) - except InstallationError as exc: - logger.warning("Error when trying to get requirement for VCS system %s", exc) - else: - return _EditableInfo(requirement=req, comments=[]) - - logger.warning("Could not determine repository location of %s", location) - - return _EditableInfo( - requirement=location, - comments=["## !! Could not determine repository location"], - ) - - -class FrozenRequirement: - def __init__( - self, - name: str, - req: str, - editable: bool, - comments: Iterable[str] = (), - ) -> None: - self.name = name - self.canonical_name = canonicalize_name(name) - self.req = req - self.editable = editable - self.comments = comments - - @classmethod - def from_dist(cls, dist: BaseDistribution) -> "FrozenRequirement": - editable = dist.editable - if editable: - req, comments = _get_editable_info(dist) - else: - comments = [] - direct_url = dist.direct_url - if direct_url: - # if PEP 610 metadata is present, use it - req = direct_url_as_pep440_direct_reference(direct_url, dist.raw_name) - else: - # name==version requirement - req = _format_as_name_version(dist) - - return cls(dist.raw_name, req, editable, comments=comments) - - def __str__(self) -> str: - req = self.req - if self.editable: - req = f"-e {req}" - return "\n".join(list(self.comments) + [str(req)]) + "\n" diff --git a/venv/Lib/site-packages/pip/_internal/operations/install/__init__.py b/venv/Lib/site-packages/pip/_internal/operations/install/__init__.py deleted file mode 100644 index 24d6a5d..0000000 --- a/venv/Lib/site-packages/pip/_internal/operations/install/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -"""For modules related to installing packages. -""" diff --git a/venv/Lib/site-packages/pip/_internal/operations/install/__pycache__/__init__.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/operations/install/__pycache__/__init__.cpython-39.pyc deleted file mode 100644 index 0513978..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/operations/install/__pycache__/__init__.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/operations/install/__pycache__/editable_legacy.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/operations/install/__pycache__/editable_legacy.cpython-39.pyc deleted file mode 100644 index a352d8f..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/operations/install/__pycache__/editable_legacy.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/operations/install/__pycache__/legacy.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/operations/install/__pycache__/legacy.cpython-39.pyc deleted file mode 100644 index 1376eb3..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/operations/install/__pycache__/legacy.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/operations/install/__pycache__/wheel.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/operations/install/__pycache__/wheel.cpython-39.pyc deleted file mode 100644 index 43ad2cd..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/operations/install/__pycache__/wheel.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/operations/install/editable_legacy.py b/venv/Lib/site-packages/pip/_internal/operations/install/editable_legacy.py deleted file mode 100644 index bb548cd..0000000 --- a/venv/Lib/site-packages/pip/_internal/operations/install/editable_legacy.py +++ /dev/null @@ -1,47 +0,0 @@ -"""Legacy editable installation process, i.e. `setup.py develop`. -""" -import logging -from typing import List, Optional, Sequence - -from pip._internal.build_env import BuildEnvironment -from pip._internal.utils.logging import indent_log -from pip._internal.utils.setuptools_build import make_setuptools_develop_args -from pip._internal.utils.subprocess import call_subprocess - -logger = logging.getLogger(__name__) - - -def install_editable( - install_options: List[str], - global_options: Sequence[str], - prefix: Optional[str], - home: Optional[str], - use_user_site: bool, - name: str, - setup_py_path: str, - isolated: bool, - build_env: BuildEnvironment, - unpacked_source_directory: str, -) -> None: - """Install a package in editable mode. Most arguments are pass-through - to setuptools. - """ - logger.info("Running setup.py develop for %s", name) - - args = make_setuptools_develop_args( - setup_py_path, - global_options=global_options, - install_options=install_options, - no_user_config=isolated, - prefix=prefix, - home=home, - use_user_site=use_user_site, - ) - - with indent_log(): - with build_env: - call_subprocess( - args, - command_desc="python setup.py develop", - cwd=unpacked_source_directory, - ) diff --git a/venv/Lib/site-packages/pip/_internal/operations/install/legacy.py b/venv/Lib/site-packages/pip/_internal/operations/install/legacy.py deleted file mode 100644 index 5b7ef90..0000000 --- a/venv/Lib/site-packages/pip/_internal/operations/install/legacy.py +++ /dev/null @@ -1,120 +0,0 @@ -"""Legacy installation process, i.e. `setup.py install`. -""" - -import logging -import os -from distutils.util import change_root -from typing import List, Optional, Sequence - -from pip._internal.build_env import BuildEnvironment -from pip._internal.exceptions import InstallationError, LegacyInstallFailure -from pip._internal.models.scheme import Scheme -from pip._internal.utils.misc import ensure_dir -from pip._internal.utils.setuptools_build import make_setuptools_install_args -from pip._internal.utils.subprocess import runner_with_spinner_message -from pip._internal.utils.temp_dir import TempDirectory - -logger = logging.getLogger(__name__) - - -def write_installed_files_from_setuptools_record( - record_lines: List[str], - root: Optional[str], - req_description: str, -) -> None: - def prepend_root(path: str) -> str: - if root is None or not os.path.isabs(path): - return path - else: - return change_root(root, path) - - for line in record_lines: - directory = os.path.dirname(line) - if directory.endswith(".egg-info"): - egg_info_dir = prepend_root(directory) - break - else: - message = ( - "{} did not indicate that it installed an " - ".egg-info directory. Only setup.py projects " - "generating .egg-info directories are supported." - ).format(req_description) - raise InstallationError(message) - - new_lines = [] - for line in record_lines: - filename = line.strip() - if os.path.isdir(filename): - filename += os.path.sep - new_lines.append(os.path.relpath(prepend_root(filename), egg_info_dir)) - new_lines.sort() - ensure_dir(egg_info_dir) - inst_files_path = os.path.join(egg_info_dir, "installed-files.txt") - with open(inst_files_path, "w") as f: - f.write("\n".join(new_lines) + "\n") - - -def install( - install_options: List[str], - global_options: Sequence[str], - root: Optional[str], - home: Optional[str], - prefix: Optional[str], - use_user_site: bool, - pycompile: bool, - scheme: Scheme, - setup_py_path: str, - isolated: bool, - req_name: str, - build_env: BuildEnvironment, - unpacked_source_directory: str, - req_description: str, -) -> bool: - - header_dir = scheme.headers - - with TempDirectory(kind="record") as temp_dir: - try: - record_filename = os.path.join(temp_dir.path, "install-record.txt") - install_args = make_setuptools_install_args( - setup_py_path, - global_options=global_options, - install_options=install_options, - record_filename=record_filename, - root=root, - prefix=prefix, - header_dir=header_dir, - home=home, - use_user_site=use_user_site, - no_user_config=isolated, - pycompile=pycompile, - ) - - runner = runner_with_spinner_message( - f"Running setup.py install for {req_name}" - ) - with build_env: - runner( - cmd=install_args, - cwd=unpacked_source_directory, - ) - - if not os.path.exists(record_filename): - logger.debug("Record file %s not found", record_filename) - # Signal to the caller that we didn't install the new package - return False - - except Exception as e: - # Signal to the caller that we didn't install the new package - raise LegacyInstallFailure(package_details=req_name) from e - - # At this point, we have successfully installed the requirement. - - # We intentionally do not use any encoding to read the file because - # setuptools writes the file using distutils.file_util.write_file, - # which does not specify an encoding. - with open(record_filename) as f: - record_lines = f.read().splitlines() - - write_installed_files_from_setuptools_record(record_lines, root, req_description) - return True diff --git a/venv/Lib/site-packages/pip/_internal/operations/install/wheel.py b/venv/Lib/site-packages/pip/_internal/operations/install/wheel.py deleted file mode 100644 index e191b13..0000000 --- a/venv/Lib/site-packages/pip/_internal/operations/install/wheel.py +++ /dev/null @@ -1,738 +0,0 @@ -"""Support for installing and building the "wheel" binary package format. -""" - -import collections -import compileall -import contextlib -import csv -import importlib -import logging -import os.path -import re -import shutil -import sys -import warnings -from base64 import urlsafe_b64encode -from email.message import Message -from itertools import chain, filterfalse, starmap -from typing import ( - IO, - TYPE_CHECKING, - Any, - BinaryIO, - Callable, - Dict, - Iterable, - Iterator, - List, - NewType, - Optional, - Sequence, - Set, - Tuple, - Union, - cast, -) -from zipfile import ZipFile, ZipInfo - -from pip._vendor.distlib.scripts import ScriptMaker -from pip._vendor.distlib.util import get_export_entry -from pip._vendor.packaging.utils import canonicalize_name - -from pip._internal.exceptions import InstallationError -from pip._internal.locations import get_major_minor_version -from pip._internal.metadata import ( - BaseDistribution, - FilesystemWheel, - get_wheel_distribution, -) -from pip._internal.models.direct_url import DIRECT_URL_METADATA_NAME, DirectUrl -from pip._internal.models.scheme import SCHEME_KEYS, Scheme -from pip._internal.utils.filesystem import adjacent_tmp_file, replace -from pip._internal.utils.misc import captured_stdout, ensure_dir, hash_file, partition -from pip._internal.utils.unpacking import ( - current_umask, - is_within_directory, - set_extracted_file_to_default_mode_plus_executable, - zip_item_is_executable, -) -from pip._internal.utils.wheel import parse_wheel - -if TYPE_CHECKING: - from typing import Protocol - - class File(Protocol): - src_record_path: "RecordPath" - dest_path: str - changed: bool - - def save(self) -> None: - pass - - -logger = logging.getLogger(__name__) - -RecordPath = NewType("RecordPath", str) -InstalledCSVRow = Tuple[RecordPath, str, Union[int, str]] - - -def rehash(path: str, blocksize: int = 1 << 20) -> Tuple[str, str]: - """Return (encoded_digest, length) for path using hashlib.sha256()""" - h, length = hash_file(path, blocksize) - digest = "sha256=" + urlsafe_b64encode(h.digest()).decode("latin1").rstrip("=") - return (digest, str(length)) - - -def csv_io_kwargs(mode: str) -> Dict[str, Any]: - """Return keyword arguments to properly open a CSV file - in the given mode. - """ - return {"mode": mode, "newline": "", "encoding": "utf-8"} - - -def fix_script(path: str) -> bool: - """Replace #!python with #!/path/to/python - Return True if file was changed. - """ - # XXX RECORD hashes will need to be updated - assert os.path.isfile(path) - - with open(path, "rb") as script: - firstline = script.readline() - if not firstline.startswith(b"#!python"): - return False - exename = sys.executable.encode(sys.getfilesystemencoding()) - firstline = b"#!" + exename + os.linesep.encode("ascii") - rest = script.read() - with open(path, "wb") as script: - script.write(firstline) - script.write(rest) - return True - - -def wheel_root_is_purelib(metadata: Message) -> bool: - return metadata.get("Root-Is-Purelib", "").lower() == "true" - - -def get_entrypoints(dist: BaseDistribution) -> Tuple[Dict[str, str], Dict[str, str]]: - console_scripts = {} - gui_scripts = {} - for entry_point in dist.iter_entry_points(): - if entry_point.group == "console_scripts": - console_scripts[entry_point.name] = entry_point.value - elif entry_point.group == "gui_scripts": - gui_scripts[entry_point.name] = entry_point.value - return console_scripts, gui_scripts - - -def message_about_scripts_not_on_PATH(scripts: Sequence[str]) -> Optional[str]: - """Determine if any scripts are not on PATH and format a warning. - Returns a warning message if one or more scripts are not on PATH, - otherwise None. - """ - if not scripts: - return None - - # Group scripts by the path they were installed in - grouped_by_dir: Dict[str, Set[str]] = collections.defaultdict(set) - for destfile in scripts: - parent_dir = os.path.dirname(destfile) - script_name = os.path.basename(destfile) - grouped_by_dir[parent_dir].add(script_name) - - # We don't want to warn for directories that are on PATH. - not_warn_dirs = [ - os.path.normcase(i).rstrip(os.sep) - for i in os.environ.get("PATH", "").split(os.pathsep) - ] - # If an executable sits with sys.executable, we don't warn for it. - # This covers the case of venv invocations without activating the venv. - not_warn_dirs.append(os.path.normcase(os.path.dirname(sys.executable))) - warn_for: Dict[str, Set[str]] = { - parent_dir: scripts - for parent_dir, scripts in grouped_by_dir.items() - if os.path.normcase(parent_dir) not in not_warn_dirs - } - if not warn_for: - return None - - # Format a message - msg_lines = [] - for parent_dir, dir_scripts in warn_for.items(): - sorted_scripts: List[str] = sorted(dir_scripts) - if len(sorted_scripts) == 1: - start_text = "script {} is".format(sorted_scripts[0]) - else: - start_text = "scripts {} are".format( - ", ".join(sorted_scripts[:-1]) + " and " + sorted_scripts[-1] - ) - - msg_lines.append( - "The {} installed in '{}' which is not on PATH.".format( - start_text, parent_dir - ) - ) - - last_line_fmt = ( - "Consider adding {} to PATH or, if you prefer " - "to suppress this warning, use --no-warn-script-location." - ) - if len(msg_lines) == 1: - msg_lines.append(last_line_fmt.format("this directory")) - else: - msg_lines.append(last_line_fmt.format("these directories")) - - # Add a note if any directory starts with ~ - warn_for_tilde = any( - i[0] == "~" for i in os.environ.get("PATH", "").split(os.pathsep) if i - ) - if warn_for_tilde: - tilde_warning_msg = ( - "NOTE: The current PATH contains path(s) starting with `~`, " - "which may not be expanded by all applications." - ) - msg_lines.append(tilde_warning_msg) - - # Returns the formatted multiline message - return "\n".join(msg_lines) - - -def _normalized_outrows( - outrows: Iterable[InstalledCSVRow], -) -> List[Tuple[str, str, str]]: - """Normalize the given rows of a RECORD file. - - Items in each row are converted into str. Rows are then sorted to make - the value more predictable for tests. - - Each row is a 3-tuple (path, hash, size) and corresponds to a record of - a RECORD file (see PEP 376 and PEP 427 for details). For the rows - passed to this function, the size can be an integer as an int or string, - or the empty string. - """ - # Normally, there should only be one row per path, in which case the - # second and third elements don't come into play when sorting. - # However, in cases in the wild where a path might happen to occur twice, - # we don't want the sort operation to trigger an error (but still want - # determinism). Since the third element can be an int or string, we - # coerce each element to a string to avoid a TypeError in this case. - # For additional background, see-- - # https://github.com/pypa/pip/issues/5868 - return sorted( - (record_path, hash_, str(size)) for record_path, hash_, size in outrows - ) - - -def _record_to_fs_path(record_path: RecordPath) -> str: - return record_path - - -def _fs_to_record_path(path: str, relative_to: Optional[str] = None) -> RecordPath: - if relative_to is not None: - # On Windows, do not handle relative paths if they belong to different - # logical disks - if ( - os.path.splitdrive(path)[0].lower() - == os.path.splitdrive(relative_to)[0].lower() - ): - path = os.path.relpath(path, relative_to) - path = path.replace(os.path.sep, "/") - return cast("RecordPath", path) - - -def get_csv_rows_for_installed( - old_csv_rows: List[List[str]], - installed: Dict[RecordPath, RecordPath], - changed: Set[RecordPath], - generated: List[str], - lib_dir: str, -) -> List[InstalledCSVRow]: - """ - :param installed: A map from archive RECORD path to installation RECORD - path. - """ - installed_rows: List[InstalledCSVRow] = [] - for row in old_csv_rows: - if len(row) > 3: - logger.warning("RECORD line has more than three elements: %s", row) - old_record_path = cast("RecordPath", row[0]) - new_record_path = installed.pop(old_record_path, old_record_path) - if new_record_path in changed: - digest, length = rehash(_record_to_fs_path(new_record_path)) - else: - digest = row[1] if len(row) > 1 else "" - length = row[2] if len(row) > 2 else "" - installed_rows.append((new_record_path, digest, length)) - for f in generated: - path = _fs_to_record_path(f, lib_dir) - digest, length = rehash(f) - installed_rows.append((path, digest, length)) - for installed_record_path in installed.values(): - installed_rows.append((installed_record_path, "", "")) - return installed_rows - - -def get_console_script_specs(console: Dict[str, str]) -> List[str]: - """ - Given the mapping from entrypoint name to callable, return the relevant - console script specs. - """ - # Don't mutate caller's version - console = console.copy() - - scripts_to_generate = [] - - # Special case pip and setuptools to generate versioned wrappers - # - # The issue is that some projects (specifically, pip and setuptools) use - # code in setup.py to create "versioned" entry points - pip2.7 on Python - # 2.7, pip3.3 on Python 3.3, etc. But these entry points are baked into - # the wheel metadata at build time, and so if the wheel is installed with - # a *different* version of Python the entry points will be wrong. The - # correct fix for this is to enhance the metadata to be able to describe - # such versioned entry points, but that won't happen till Metadata 2.0 is - # available. - # In the meantime, projects using versioned entry points will either have - # incorrect versioned entry points, or they will not be able to distribute - # "universal" wheels (i.e., they will need a wheel per Python version). - # - # Because setuptools and pip are bundled with _ensurepip and virtualenv, - # we need to use universal wheels. So, as a stopgap until Metadata 2.0, we - # override the versioned entry points in the wheel and generate the - # correct ones. This code is purely a short-term measure until Metadata 2.0 - # is available. - # - # To add the level of hack in this section of code, in order to support - # ensurepip this code will look for an ``ENSUREPIP_OPTIONS`` environment - # variable which will control which version scripts get installed. - # - # ENSUREPIP_OPTIONS=altinstall - # - Only pipX.Y and easy_install-X.Y will be generated and installed - # ENSUREPIP_OPTIONS=install - # - pipX.Y, pipX, easy_install-X.Y will be generated and installed. Note - # that this option is technically if ENSUREPIP_OPTIONS is set and is - # not altinstall - # DEFAULT - # - The default behavior is to install pip, pipX, pipX.Y, easy_install - # and easy_install-X.Y. - pip_script = console.pop("pip", None) - if pip_script: - if "ENSUREPIP_OPTIONS" not in os.environ: - scripts_to_generate.append("pip = " + pip_script) - - if os.environ.get("ENSUREPIP_OPTIONS", "") != "altinstall": - scripts_to_generate.append( - "pip{} = {}".format(sys.version_info[0], pip_script) - ) - - scripts_to_generate.append(f"pip{get_major_minor_version()} = {pip_script}") - # Delete any other versioned pip entry points - pip_ep = [k for k in console if re.match(r"pip(\d(\.\d)?)?$", k)] - for k in pip_ep: - del console[k] - easy_install_script = console.pop("easy_install", None) - if easy_install_script: - if "ENSUREPIP_OPTIONS" not in os.environ: - scripts_to_generate.append("easy_install = " + easy_install_script) - - scripts_to_generate.append( - "easy_install-{} = {}".format( - get_major_minor_version(), easy_install_script - ) - ) - # Delete any other versioned easy_install entry points - easy_install_ep = [ - k for k in console if re.match(r"easy_install(-\d\.\d)?$", k) - ] - for k in easy_install_ep: - del console[k] - - # Generate the console entry points specified in the wheel - scripts_to_generate.extend(starmap("{} = {}".format, console.items())) - - return scripts_to_generate - - -class ZipBackedFile: - def __init__( - self, src_record_path: RecordPath, dest_path: str, zip_file: ZipFile - ) -> None: - self.src_record_path = src_record_path - self.dest_path = dest_path - self._zip_file = zip_file - self.changed = False - - def _getinfo(self) -> ZipInfo: - return self._zip_file.getinfo(self.src_record_path) - - def save(self) -> None: - # directory creation is lazy and after file filtering - # to ensure we don't install empty dirs; empty dirs can't be - # uninstalled. - parent_dir = os.path.dirname(self.dest_path) - ensure_dir(parent_dir) - - # When we open the output file below, any existing file is truncated - # before we start writing the new contents. This is fine in most - # cases, but can cause a segfault if pip has loaded a shared - # object (e.g. from pyopenssl through its vendored urllib3) - # Since the shared object is mmap'd an attempt to call a - # symbol in it will then cause a segfault. Unlinking the file - # allows writing of new contents while allowing the process to - # continue to use the old copy. - if os.path.exists(self.dest_path): - os.unlink(self.dest_path) - - zipinfo = self._getinfo() - - with self._zip_file.open(zipinfo) as f: - with open(self.dest_path, "wb") as dest: - shutil.copyfileobj(f, dest) - - if zip_item_is_executable(zipinfo): - set_extracted_file_to_default_mode_plus_executable(self.dest_path) - - -class ScriptFile: - def __init__(self, file: "File") -> None: - self._file = file - self.src_record_path = self._file.src_record_path - self.dest_path = self._file.dest_path - self.changed = False - - def save(self) -> None: - self._file.save() - self.changed = fix_script(self.dest_path) - - -class MissingCallableSuffix(InstallationError): - def __init__(self, entry_point: str) -> None: - super().__init__( - "Invalid script entry point: {} - A callable " - "suffix is required. Cf https://packaging.python.org/" - "specifications/entry-points/#use-for-scripts for more " - "information.".format(entry_point) - ) - - -def _raise_for_invalid_entrypoint(specification: str) -> None: - entry = get_export_entry(specification) - if entry is not None and entry.suffix is None: - raise MissingCallableSuffix(str(entry)) - - -class PipScriptMaker(ScriptMaker): - def make(self, specification: str, options: Dict[str, Any] = None) -> List[str]: - _raise_for_invalid_entrypoint(specification) - return super().make(specification, options) - - -def _install_wheel( - name: str, - wheel_zip: ZipFile, - wheel_path: str, - scheme: Scheme, - pycompile: bool = True, - warn_script_location: bool = True, - direct_url: Optional[DirectUrl] = None, - requested: bool = False, -) -> None: - """Install a wheel. - - :param name: Name of the project to install - :param wheel_zip: open ZipFile for wheel being installed - :param scheme: Distutils scheme dictating the install directories - :param req_description: String used in place of the requirement, for - logging - :param pycompile: Whether to byte-compile installed Python files - :param warn_script_location: Whether to check that scripts are installed - into a directory on PATH - :raises UnsupportedWheel: - * when the directory holds an unpacked wheel with incompatible - Wheel-Version - * when the .dist-info dir does not match the wheel - """ - info_dir, metadata = parse_wheel(wheel_zip, name) - - if wheel_root_is_purelib(metadata): - lib_dir = scheme.purelib - else: - lib_dir = scheme.platlib - - # Record details of the files moved - # installed = files copied from the wheel to the destination - # changed = files changed while installing (scripts #! line typically) - # generated = files newly generated during the install (script wrappers) - installed: Dict[RecordPath, RecordPath] = {} - changed: Set[RecordPath] = set() - generated: List[str] = [] - - def record_installed( - srcfile: RecordPath, destfile: str, modified: bool = False - ) -> None: - """Map archive RECORD paths to installation RECORD paths.""" - newpath = _fs_to_record_path(destfile, lib_dir) - installed[srcfile] = newpath - if modified: - changed.add(_fs_to_record_path(destfile)) - - def is_dir_path(path: RecordPath) -> bool: - return path.endswith("/") - - def assert_no_path_traversal(dest_dir_path: str, target_path: str) -> None: - if not is_within_directory(dest_dir_path, target_path): - message = ( - "The wheel {!r} has a file {!r} trying to install" - " outside the target directory {!r}" - ) - raise InstallationError( - message.format(wheel_path, target_path, dest_dir_path) - ) - - def root_scheme_file_maker( - zip_file: ZipFile, dest: str - ) -> Callable[[RecordPath], "File"]: - def make_root_scheme_file(record_path: RecordPath) -> "File": - normed_path = os.path.normpath(record_path) - dest_path = os.path.join(dest, normed_path) - assert_no_path_traversal(dest, dest_path) - return ZipBackedFile(record_path, dest_path, zip_file) - - return make_root_scheme_file - - def data_scheme_file_maker( - zip_file: ZipFile, scheme: Scheme - ) -> Callable[[RecordPath], "File"]: - scheme_paths = {key: getattr(scheme, key) for key in SCHEME_KEYS} - - def make_data_scheme_file(record_path: RecordPath) -> "File": - normed_path = os.path.normpath(record_path) - try: - _, scheme_key, dest_subpath = normed_path.split(os.path.sep, 2) - except ValueError: - message = ( - "Unexpected file in {}: {!r}. .data directory contents" - " should be named like: '/'." - ).format(wheel_path, record_path) - raise InstallationError(message) - - try: - scheme_path = scheme_paths[scheme_key] - except KeyError: - valid_scheme_keys = ", ".join(sorted(scheme_paths)) - message = ( - "Unknown scheme key used in {}: {} (for file {!r}). .data" - " directory contents should be in subdirectories named" - " with a valid scheme key ({})" - ).format(wheel_path, scheme_key, record_path, valid_scheme_keys) - raise InstallationError(message) - - dest_path = os.path.join(scheme_path, dest_subpath) - assert_no_path_traversal(scheme_path, dest_path) - return ZipBackedFile(record_path, dest_path, zip_file) - - return make_data_scheme_file - - def is_data_scheme_path(path: RecordPath) -> bool: - return path.split("/", 1)[0].endswith(".data") - - paths = cast(List[RecordPath], wheel_zip.namelist()) - file_paths = filterfalse(is_dir_path, paths) - root_scheme_paths, data_scheme_paths = partition(is_data_scheme_path, file_paths) - - make_root_scheme_file = root_scheme_file_maker(wheel_zip, lib_dir) - files: Iterator[File] = map(make_root_scheme_file, root_scheme_paths) - - def is_script_scheme_path(path: RecordPath) -> bool: - parts = path.split("/", 2) - return len(parts) > 2 and parts[0].endswith(".data") and parts[1] == "scripts" - - other_scheme_paths, script_scheme_paths = partition( - is_script_scheme_path, data_scheme_paths - ) - - make_data_scheme_file = data_scheme_file_maker(wheel_zip, scheme) - other_scheme_files = map(make_data_scheme_file, other_scheme_paths) - files = chain(files, other_scheme_files) - - # Get the defined entry points - distribution = get_wheel_distribution( - FilesystemWheel(wheel_path), - canonicalize_name(name), - ) - console, gui = get_entrypoints(distribution) - - def is_entrypoint_wrapper(file: "File") -> bool: - # EP, EP.exe and EP-script.py are scripts generated for - # entry point EP by setuptools - path = file.dest_path - name = os.path.basename(path) - if name.lower().endswith(".exe"): - matchname = name[:-4] - elif name.lower().endswith("-script.py"): - matchname = name[:-10] - elif name.lower().endswith(".pya"): - matchname = name[:-4] - else: - matchname = name - # Ignore setuptools-generated scripts - return matchname in console or matchname in gui - - script_scheme_files: Iterator[File] = map( - make_data_scheme_file, script_scheme_paths - ) - script_scheme_files = filterfalse(is_entrypoint_wrapper, script_scheme_files) - script_scheme_files = map(ScriptFile, script_scheme_files) - files = chain(files, script_scheme_files) - - for file in files: - file.save() - record_installed(file.src_record_path, file.dest_path, file.changed) - - def pyc_source_file_paths() -> Iterator[str]: - # We de-duplicate installation paths, since there can be overlap (e.g. - # file in .data maps to same location as file in wheel root). - # Sorting installation paths makes it easier to reproduce and debug - # issues related to permissions on existing files. - for installed_path in sorted(set(installed.values())): - full_installed_path = os.path.join(lib_dir, installed_path) - if not os.path.isfile(full_installed_path): - continue - if not full_installed_path.endswith(".py"): - continue - yield full_installed_path - - def pyc_output_path(path: str) -> str: - """Return the path the pyc file would have been written to.""" - return importlib.util.cache_from_source(path) - - # Compile all of the pyc files for the installed files - if pycompile: - with captured_stdout() as stdout: - with warnings.catch_warnings(): - warnings.filterwarnings("ignore") - for path in pyc_source_file_paths(): - success = compileall.compile_file(path, force=True, quiet=True) - if success: - pyc_path = pyc_output_path(path) - assert os.path.exists(pyc_path) - pyc_record_path = cast( - "RecordPath", pyc_path.replace(os.path.sep, "/") - ) - record_installed(pyc_record_path, pyc_path) - logger.debug(stdout.getvalue()) - - maker = PipScriptMaker(None, scheme.scripts) - - # Ensure old scripts are overwritten. - # See https://github.com/pypa/pip/issues/1800 - maker.clobber = True - - # Ensure we don't generate any variants for scripts because this is almost - # never what somebody wants. - # See https://bitbucket.org/pypa/distlib/issue/35/ - maker.variants = {""} - - # This is required because otherwise distlib creates scripts that are not - # executable. - # See https://bitbucket.org/pypa/distlib/issue/32/ - maker.set_mode = True - - # Generate the console and GUI entry points specified in the wheel - scripts_to_generate = get_console_script_specs(console) - - gui_scripts_to_generate = list(starmap("{} = {}".format, gui.items())) - - generated_console_scripts = maker.make_multiple(scripts_to_generate) - generated.extend(generated_console_scripts) - - generated.extend(maker.make_multiple(gui_scripts_to_generate, {"gui": True})) - - if warn_script_location: - msg = message_about_scripts_not_on_PATH(generated_console_scripts) - if msg is not None: - logger.warning(msg) - - generated_file_mode = 0o666 & ~current_umask() - - @contextlib.contextmanager - def _generate_file(path: str, **kwargs: Any) -> Iterator[BinaryIO]: - with adjacent_tmp_file(path, **kwargs) as f: - yield f - os.chmod(f.name, generated_file_mode) - replace(f.name, path) - - dest_info_dir = os.path.join(lib_dir, info_dir) - - # Record pip as the installer - installer_path = os.path.join(dest_info_dir, "INSTALLER") - with _generate_file(installer_path) as installer_file: - installer_file.write(b"pip\n") - generated.append(installer_path) - - # Record the PEP 610 direct URL reference - if direct_url is not None: - direct_url_path = os.path.join(dest_info_dir, DIRECT_URL_METADATA_NAME) - with _generate_file(direct_url_path) as direct_url_file: - direct_url_file.write(direct_url.to_json().encode("utf-8")) - generated.append(direct_url_path) - - # Record the REQUESTED file - if requested: - requested_path = os.path.join(dest_info_dir, "REQUESTED") - with open(requested_path, "wb"): - pass - generated.append(requested_path) - - record_text = distribution.read_text("RECORD") - record_rows = list(csv.reader(record_text.splitlines())) - - rows = get_csv_rows_for_installed( - record_rows, - installed=installed, - changed=changed, - generated=generated, - lib_dir=lib_dir, - ) - - # Record details of all files installed - record_path = os.path.join(dest_info_dir, "RECORD") - - with _generate_file(record_path, **csv_io_kwargs("w")) as record_file: - # Explicitly cast to typing.IO[str] as a workaround for the mypy error: - # "writer" has incompatible type "BinaryIO"; expected "_Writer" - writer = csv.writer(cast("IO[str]", record_file)) - writer.writerows(_normalized_outrows(rows)) - - -@contextlib.contextmanager -def req_error_context(req_description: str) -> Iterator[None]: - try: - yield - except InstallationError as e: - message = "For req: {}. {}".format(req_description, e.args[0]) - raise InstallationError(message) from e - - -def install_wheel( - name: str, - wheel_path: str, - scheme: Scheme, - req_description: str, - pycompile: bool = True, - warn_script_location: bool = True, - direct_url: Optional[DirectUrl] = None, - requested: bool = False, -) -> None: - with ZipFile(wheel_path, allowZip64=True) as z: - with req_error_context(req_description): - _install_wheel( - name=name, - wheel_zip=z, - wheel_path=wheel_path, - scheme=scheme, - pycompile=pycompile, - warn_script_location=warn_script_location, - direct_url=direct_url, - requested=requested, - ) diff --git a/venv/Lib/site-packages/pip/_internal/operations/prepare.py b/venv/Lib/site-packages/pip/_internal/operations/prepare.py deleted file mode 100644 index a726f03..0000000 --- a/venv/Lib/site-packages/pip/_internal/operations/prepare.py +++ /dev/null @@ -1,642 +0,0 @@ -"""Prepares a distribution for installation -""" - -# The following comment should be removed at some point in the future. -# mypy: strict-optional=False - -import logging -import mimetypes -import os -import shutil -from typing import Dict, Iterable, List, Optional - -from pip._vendor.packaging.utils import canonicalize_name - -from pip._internal.distributions import make_distribution_for_install_requirement -from pip._internal.distributions.installed import InstalledDistribution -from pip._internal.exceptions import ( - DirectoryUrlHashUnsupported, - HashMismatch, - HashUnpinned, - InstallationError, - NetworkConnectionError, - PreviousBuildDirError, - VcsHashUnsupported, -) -from pip._internal.index.package_finder import PackageFinder -from pip._internal.metadata import BaseDistribution -from pip._internal.models.link import Link -from pip._internal.models.wheel import Wheel -from pip._internal.network.download import BatchDownloader, Downloader -from pip._internal.network.lazy_wheel import ( - HTTPRangeRequestUnsupported, - dist_from_wheel_url, -) -from pip._internal.network.session import PipSession -from pip._internal.req.req_install import InstallRequirement -from pip._internal.req.req_tracker import RequirementTracker -from pip._internal.utils.filesystem import copy2_fixed -from pip._internal.utils.hashes import Hashes, MissingHashes -from pip._internal.utils.logging import indent_log -from pip._internal.utils.misc import display_path, hide_url, is_installable_dir, rmtree -from pip._internal.utils.temp_dir import TempDirectory -from pip._internal.utils.unpacking import unpack_file -from pip._internal.vcs import vcs - -logger = logging.getLogger(__name__) - - -def _get_prepared_distribution( - req: InstallRequirement, - req_tracker: RequirementTracker, - finder: PackageFinder, - build_isolation: bool, -) -> BaseDistribution: - """Prepare a distribution for installation.""" - abstract_dist = make_distribution_for_install_requirement(req) - with req_tracker.track(req): - abstract_dist.prepare_distribution_metadata(finder, build_isolation) - return abstract_dist.get_metadata_distribution() - - -def unpack_vcs_link(link: Link, location: str, verbosity: int) -> None: - vcs_backend = vcs.get_backend_for_scheme(link.scheme) - assert vcs_backend is not None - vcs_backend.unpack(location, url=hide_url(link.url), verbosity=verbosity) - - -class File: - def __init__(self, path: str, content_type: Optional[str]) -> None: - self.path = path - if content_type is None: - self.content_type = mimetypes.guess_type(path)[0] - else: - self.content_type = content_type - - -def get_http_url( - link: Link, - download: Downloader, - download_dir: Optional[str] = None, - hashes: Optional[Hashes] = None, -) -> File: - temp_dir = TempDirectory(kind="unpack", globally_managed=True) - # If a download dir is specified, is the file already downloaded there? - already_downloaded_path = None - if download_dir: - already_downloaded_path = _check_download_dir(link, download_dir, hashes) - - if already_downloaded_path: - from_path = already_downloaded_path - content_type = None - else: - # let's download to a tmp dir - from_path, content_type = download(link, temp_dir.path) - if hashes: - hashes.check_against_path(from_path) - - return File(from_path, content_type) - - -def _copy2_ignoring_special_files(src: str, dest: str) -> None: - """Copying special files is not supported, but as a convenience to users - we skip errors copying them. This supports tools that may create e.g. - socket files in the project source directory. - """ - try: - copy2_fixed(src, dest) - except shutil.SpecialFileError as e: - # SpecialFileError may be raised due to either the source or - # destination. If the destination was the cause then we would actually - # care, but since the destination directory is deleted prior to - # copy we ignore all of them assuming it is caused by the source. - logger.warning( - "Ignoring special file error '%s' encountered copying %s to %s.", - str(e), - src, - dest, - ) - - -def _copy_source_tree(source: str, target: str) -> None: - target_abspath = os.path.abspath(target) - target_basename = os.path.basename(target_abspath) - target_dirname = os.path.dirname(target_abspath) - - def ignore(d: str, names: List[str]) -> List[str]: - skipped: List[str] = [] - if d == source: - # Pulling in those directories can potentially be very slow, - # exclude the following directories if they appear in the top - # level dir (and only it). - # See discussion at https://github.com/pypa/pip/pull/6770 - skipped += [".tox", ".nox"] - if os.path.abspath(d) == target_dirname: - # Prevent an infinite recursion if the target is in source. - # This can happen when TMPDIR is set to ${PWD}/... - # and we copy PWD to TMPDIR. - skipped += [target_basename] - return skipped - - shutil.copytree( - source, - target, - ignore=ignore, - symlinks=True, - copy_function=_copy2_ignoring_special_files, - ) - - -def get_file_url( - link: Link, download_dir: Optional[str] = None, hashes: Optional[Hashes] = None -) -> File: - """Get file and optionally check its hash.""" - # If a download dir is specified, is the file already there and valid? - already_downloaded_path = None - if download_dir: - already_downloaded_path = _check_download_dir(link, download_dir, hashes) - - if already_downloaded_path: - from_path = already_downloaded_path - else: - from_path = link.file_path - - # If --require-hashes is off, `hashes` is either empty, the - # link's embedded hash, or MissingHashes; it is required to - # match. If --require-hashes is on, we are satisfied by any - # hash in `hashes` matching: a URL-based or an option-based - # one; no internet-sourced hash will be in `hashes`. - if hashes: - hashes.check_against_path(from_path) - return File(from_path, None) - - -def unpack_url( - link: Link, - location: str, - download: Downloader, - verbosity: int, - download_dir: Optional[str] = None, - hashes: Optional[Hashes] = None, -) -> Optional[File]: - """Unpack link into location, downloading if required. - - :param hashes: A Hashes object, one of whose embedded hashes must match, - or HashMismatch will be raised. If the Hashes is empty, no matches are - required, and unhashable types of requirements (like VCS ones, which - would ordinarily raise HashUnsupported) are allowed. - """ - # non-editable vcs urls - if link.is_vcs: - unpack_vcs_link(link, location, verbosity=verbosity) - return None - - # Once out-of-tree-builds are no longer supported, could potentially - # replace the below condition with `assert not link.is_existing_dir` - # - unpack_url does not need to be called for in-tree-builds. - # - # As further cleanup, _copy_source_tree and accompanying tests can - # be removed. - # - # TODO when use-deprecated=out-of-tree-build is removed - if link.is_existing_dir(): - if os.path.isdir(location): - rmtree(location) - _copy_source_tree(link.file_path, location) - return None - - # file urls - if link.is_file: - file = get_file_url(link, download_dir, hashes=hashes) - - # http urls - else: - file = get_http_url( - link, - download, - download_dir, - hashes=hashes, - ) - - # unpack the archive to the build dir location. even when only downloading - # archives, they have to be unpacked to parse dependencies, except wheels - if not link.is_wheel: - unpack_file(file.path, location, file.content_type) - - return file - - -def _check_download_dir( - link: Link, download_dir: str, hashes: Optional[Hashes] -) -> Optional[str]: - """Check download_dir for previously downloaded file with correct hash - If a correct file is found return its path else None - """ - download_path = os.path.join(download_dir, link.filename) - - if not os.path.exists(download_path): - return None - - # If already downloaded, does its hash match? - logger.info("File was already downloaded %s", download_path) - if hashes: - try: - hashes.check_against_path(download_path) - except HashMismatch: - logger.warning( - "Previously-downloaded file %s has bad hash. Re-downloading.", - download_path, - ) - os.unlink(download_path) - return None - return download_path - - -class RequirementPreparer: - """Prepares a Requirement""" - - def __init__( - self, - build_dir: str, - download_dir: Optional[str], - src_dir: str, - build_isolation: bool, - req_tracker: RequirementTracker, - session: PipSession, - progress_bar: str, - finder: PackageFinder, - require_hashes: bool, - use_user_site: bool, - lazy_wheel: bool, - verbosity: int, - in_tree_build: bool, - ) -> None: - super().__init__() - - self.src_dir = src_dir - self.build_dir = build_dir - self.req_tracker = req_tracker - self._session = session - self._download = Downloader(session, progress_bar) - self._batch_download = BatchDownloader(session, progress_bar) - self.finder = finder - - # Where still-packed archives should be written to. If None, they are - # not saved, and are deleted immediately after unpacking. - self.download_dir = download_dir - - # Is build isolation allowed? - self.build_isolation = build_isolation - - # Should hash-checking be required? - self.require_hashes = require_hashes - - # Should install in user site-packages? - self.use_user_site = use_user_site - - # Should wheels be downloaded lazily? - self.use_lazy_wheel = lazy_wheel - - # How verbose should underlying tooling be? - self.verbosity = verbosity - - # Should in-tree builds be used for local paths? - self.in_tree_build = in_tree_build - - # Memoized downloaded files, as mapping of url: path. - self._downloaded: Dict[str, str] = {} - - # Previous "header" printed for a link-based InstallRequirement - self._previous_requirement_header = ("", "") - - def _log_preparing_link(self, req: InstallRequirement) -> None: - """Provide context for the requirement being prepared.""" - if req.link.is_file and not req.original_link_is_in_wheel_cache: - message = "Processing %s" - information = str(display_path(req.link.file_path)) - else: - message = "Collecting %s" - information = str(req.req or req) - - if (message, information) != self._previous_requirement_header: - self._previous_requirement_header = (message, information) - logger.info(message, information) - - if req.original_link_is_in_wheel_cache: - with indent_log(): - logger.info("Using cached %s", req.link.filename) - - def _ensure_link_req_src_dir( - self, req: InstallRequirement, parallel_builds: bool - ) -> None: - """Ensure source_dir of a linked InstallRequirement.""" - # Since source_dir is only set for editable requirements. - if req.link.is_wheel: - # We don't need to unpack wheels, so no need for a source - # directory. - return - assert req.source_dir is None - if req.link.is_existing_dir() and self.in_tree_build: - # build local directories in-tree - req.source_dir = req.link.file_path - return - - # We always delete unpacked sdists after pip runs. - req.ensure_has_source_dir( - self.build_dir, - autodelete=True, - parallel_builds=parallel_builds, - ) - - # If a checkout exists, it's unwise to keep going. version - # inconsistencies are logged later, but do not fail the - # installation. - # FIXME: this won't upgrade when there's an existing - # package unpacked in `req.source_dir` - # TODO: this check is now probably dead code - if is_installable_dir(req.source_dir): - raise PreviousBuildDirError( - "pip can't proceed with requirements '{}' due to a" - "pre-existing build directory ({}). This is likely " - "due to a previous installation that failed . pip is " - "being responsible and not assuming it can delete this. " - "Please delete it and try again.".format(req, req.source_dir) - ) - - def _get_linked_req_hashes(self, req: InstallRequirement) -> Hashes: - # By the time this is called, the requirement's link should have - # been checked so we can tell what kind of requirements req is - # and raise some more informative errors than otherwise. - # (For example, we can raise VcsHashUnsupported for a VCS URL - # rather than HashMissing.) - if not self.require_hashes: - return req.hashes(trust_internet=True) - - # We could check these first 2 conditions inside unpack_url - # and save repetition of conditions, but then we would - # report less-useful error messages for unhashable - # requirements, complaining that there's no hash provided. - if req.link.is_vcs: - raise VcsHashUnsupported() - if req.link.is_existing_dir(): - raise DirectoryUrlHashUnsupported() - - # Unpinned packages are asking for trouble when a new version - # is uploaded. This isn't a security check, but it saves users - # a surprising hash mismatch in the future. - # file:/// URLs aren't pinnable, so don't complain about them - # not being pinned. - if req.original_link is None and not req.is_pinned: - raise HashUnpinned() - - # If known-good hashes are missing for this requirement, - # shim it with a facade object that will provoke hash - # computation and then raise a HashMissing exception - # showing the user what the hash should be. - return req.hashes(trust_internet=False) or MissingHashes() - - def _fetch_metadata_using_lazy_wheel( - self, - link: Link, - ) -> Optional[BaseDistribution]: - """Fetch metadata using lazy wheel, if possible.""" - if not self.use_lazy_wheel: - return None - if self.require_hashes: - logger.debug("Lazy wheel is not used as hash checking is required") - return None - if link.is_file or not link.is_wheel: - logger.debug( - "Lazy wheel is not used as %r does not points to a remote wheel", - link, - ) - return None - - wheel = Wheel(link.filename) - name = canonicalize_name(wheel.name) - logger.info( - "Obtaining dependency information from %s %s", - name, - wheel.version, - ) - url = link.url.split("#", 1)[0] - try: - return dist_from_wheel_url(name, url, self._session) - except HTTPRangeRequestUnsupported: - logger.debug("%s does not support range requests", url) - return None - - def _complete_partial_requirements( - self, - partially_downloaded_reqs: Iterable[InstallRequirement], - parallel_builds: bool = False, - ) -> None: - """Download any requirements which were only fetched by metadata.""" - # Download to a temporary directory. These will be copied over as - # needed for downstream 'download', 'wheel', and 'install' commands. - temp_dir = TempDirectory(kind="unpack", globally_managed=True).path - - # Map each link to the requirement that owns it. This allows us to set - # `req.local_file_path` on the appropriate requirement after passing - # all the links at once into BatchDownloader. - links_to_fully_download: Dict[Link, InstallRequirement] = {} - for req in partially_downloaded_reqs: - assert req.link - links_to_fully_download[req.link] = req - - batch_download = self._batch_download( - links_to_fully_download.keys(), - temp_dir, - ) - for link, (filepath, _) in batch_download: - logger.debug("Downloading link %s to %s", link, filepath) - req = links_to_fully_download[link] - req.local_file_path = filepath - - # This step is necessary to ensure all lazy wheels are processed - # successfully by the 'download', 'wheel', and 'install' commands. - for req in partially_downloaded_reqs: - self._prepare_linked_requirement(req, parallel_builds) - - def prepare_linked_requirement( - self, req: InstallRequirement, parallel_builds: bool = False - ) -> BaseDistribution: - """Prepare a requirement to be obtained from req.link.""" - assert req.link - link = req.link - self._log_preparing_link(req) - with indent_log(): - # Check if the relevant file is already available - # in the download directory - file_path = None - if self.download_dir is not None and link.is_wheel: - hashes = self._get_linked_req_hashes(req) - file_path = _check_download_dir(req.link, self.download_dir, hashes) - - if file_path is not None: - # The file is already available, so mark it as downloaded - self._downloaded[req.link.url] = file_path - else: - # The file is not available, attempt to fetch only metadata - wheel_dist = self._fetch_metadata_using_lazy_wheel(link) - if wheel_dist is not None: - req.needs_more_preparation = True - return wheel_dist - - # None of the optimizations worked, fully prepare the requirement - return self._prepare_linked_requirement(req, parallel_builds) - - def prepare_linked_requirements_more( - self, reqs: Iterable[InstallRequirement], parallel_builds: bool = False - ) -> None: - """Prepare linked requirements more, if needed.""" - reqs = [req for req in reqs if req.needs_more_preparation] - for req in reqs: - # Determine if any of these requirements were already downloaded. - if self.download_dir is not None and req.link.is_wheel: - hashes = self._get_linked_req_hashes(req) - file_path = _check_download_dir(req.link, self.download_dir, hashes) - if file_path is not None: - self._downloaded[req.link.url] = file_path - req.needs_more_preparation = False - - # Prepare requirements we found were already downloaded for some - # reason. The other downloads will be completed separately. - partially_downloaded_reqs: List[InstallRequirement] = [] - for req in reqs: - if req.needs_more_preparation: - partially_downloaded_reqs.append(req) - else: - self._prepare_linked_requirement(req, parallel_builds) - - # TODO: separate this part out from RequirementPreparer when the v1 - # resolver can be removed! - self._complete_partial_requirements( - partially_downloaded_reqs, - parallel_builds=parallel_builds, - ) - - def _prepare_linked_requirement( - self, req: InstallRequirement, parallel_builds: bool - ) -> BaseDistribution: - assert req.link - link = req.link - - self._ensure_link_req_src_dir(req, parallel_builds) - hashes = self._get_linked_req_hashes(req) - - if link.is_existing_dir() and self.in_tree_build: - local_file = None - elif link.url not in self._downloaded: - try: - local_file = unpack_url( - link, - req.source_dir, - self._download, - self.verbosity, - self.download_dir, - hashes, - ) - except NetworkConnectionError as exc: - raise InstallationError( - "Could not install requirement {} because of HTTP " - "error {} for URL {}".format(req, exc, link) - ) - else: - file_path = self._downloaded[link.url] - if hashes: - hashes.check_against_path(file_path) - local_file = File(file_path, content_type=None) - - # For use in later processing, - # preserve the file path on the requirement. - if local_file: - req.local_file_path = local_file.path - - dist = _get_prepared_distribution( - req, - self.req_tracker, - self.finder, - self.build_isolation, - ) - return dist - - def save_linked_requirement(self, req: InstallRequirement) -> None: - assert self.download_dir is not None - assert req.link is not None - link = req.link - if link.is_vcs or (link.is_existing_dir() and req.editable): - # Make a .zip of the source_dir we already created. - req.archive(self.download_dir) - return - - if link.is_existing_dir(): - logger.debug( - "Not copying link to destination directory " - "since it is a directory: %s", - link, - ) - return - if req.local_file_path is None: - # No distribution was downloaded for this requirement. - return - - download_location = os.path.join(self.download_dir, link.filename) - if not os.path.exists(download_location): - shutil.copy(req.local_file_path, download_location) - download_path = display_path(download_location) - logger.info("Saved %s", download_path) - - def prepare_editable_requirement( - self, - req: InstallRequirement, - ) -> BaseDistribution: - """Prepare an editable requirement.""" - assert req.editable, "cannot prepare a non-editable req as editable" - - logger.info("Obtaining %s", req) - - with indent_log(): - if self.require_hashes: - raise InstallationError( - "The editable requirement {} cannot be installed when " - "requiring hashes, because there is no single file to " - "hash.".format(req) - ) - req.ensure_has_source_dir(self.src_dir) - req.update_editable() - - dist = _get_prepared_distribution( - req, - self.req_tracker, - self.finder, - self.build_isolation, - ) - - req.check_if_exists(self.use_user_site) - - return dist - - def prepare_installed_requirement( - self, - req: InstallRequirement, - skip_reason: str, - ) -> BaseDistribution: - """Prepare an already-installed requirement.""" - assert req.satisfied_by, "req should have been satisfied but isn't" - assert skip_reason is not None, ( - "did not get skip reason skipped but req.satisfied_by " - "is set to {}".format(req.satisfied_by) - ) - logger.info( - "Requirement %s: %s (%s)", skip_reason, req, req.satisfied_by.version - ) - with indent_log(): - if self.require_hashes: - logger.debug( - "Since it is already installed, we are trusting this " - "package without checking its hash. To ensure a " - "completely repeatable environment, install into an " - "empty virtualenv." - ) - return InstalledDistribution(req).get_metadata_distribution() diff --git a/venv/Lib/site-packages/pip/_internal/pyproject.py b/venv/Lib/site-packages/pip/_internal/pyproject.py deleted file mode 100644 index e183eaf..0000000 --- a/venv/Lib/site-packages/pip/_internal/pyproject.py +++ /dev/null @@ -1,168 +0,0 @@ -import os -from collections import namedtuple -from typing import Any, List, Optional - -from pip._vendor import tomli -from pip._vendor.packaging.requirements import InvalidRequirement, Requirement - -from pip._internal.exceptions import ( - InstallationError, - InvalidPyProjectBuildRequires, - MissingPyProjectBuildRequires, -) - - -def _is_list_of_str(obj: Any) -> bool: - return isinstance(obj, list) and all(isinstance(item, str) for item in obj) - - -def make_pyproject_path(unpacked_source_directory: str) -> str: - return os.path.join(unpacked_source_directory, "pyproject.toml") - - -BuildSystemDetails = namedtuple( - "BuildSystemDetails", ["requires", "backend", "check", "backend_path"] -) - - -def load_pyproject_toml( - use_pep517: Optional[bool], pyproject_toml: str, setup_py: str, req_name: str -) -> Optional[BuildSystemDetails]: - """Load the pyproject.toml file. - - Parameters: - use_pep517 - Has the user requested PEP 517 processing? None - means the user hasn't explicitly specified. - pyproject_toml - Location of the project's pyproject.toml file - setup_py - Location of the project's setup.py file - req_name - The name of the requirement we're processing (for - error reporting) - - Returns: - None if we should use the legacy code path, otherwise a tuple - ( - requirements from pyproject.toml, - name of PEP 517 backend, - requirements we should check are installed after setting - up the build environment - directory paths to import the backend from (backend-path), - relative to the project root. - ) - """ - has_pyproject = os.path.isfile(pyproject_toml) - has_setup = os.path.isfile(setup_py) - - if not has_pyproject and not has_setup: - raise InstallationError( - f"{req_name} does not appear to be a Python project: " - f"neither 'setup.py' nor 'pyproject.toml' found." - ) - - if has_pyproject: - with open(pyproject_toml, encoding="utf-8") as f: - pp_toml = tomli.loads(f.read()) - build_system = pp_toml.get("build-system") - else: - build_system = None - - # The following cases must use PEP 517 - # We check for use_pep517 being non-None and falsey because that means - # the user explicitly requested --no-use-pep517. The value 0 as - # opposed to False can occur when the value is provided via an - # environment variable or config file option (due to the quirk of - # strtobool() returning an integer in pip's configuration code). - if has_pyproject and not has_setup: - if use_pep517 is not None and not use_pep517: - raise InstallationError( - "Disabling PEP 517 processing is invalid: " - "project does not have a setup.py" - ) - use_pep517 = True - elif build_system and "build-backend" in build_system: - if use_pep517 is not None and not use_pep517: - raise InstallationError( - "Disabling PEP 517 processing is invalid: " - "project specifies a build backend of {} " - "in pyproject.toml".format(build_system["build-backend"]) - ) - use_pep517 = True - - # If we haven't worked out whether to use PEP 517 yet, - # and the user hasn't explicitly stated a preference, - # we do so if the project has a pyproject.toml file. - elif use_pep517 is None: - use_pep517 = has_pyproject - - # At this point, we know whether we're going to use PEP 517. - assert use_pep517 is not None - - # If we're using the legacy code path, there is nothing further - # for us to do here. - if not use_pep517: - return None - - if build_system is None: - # Either the user has a pyproject.toml with no build-system - # section, or the user has no pyproject.toml, but has opted in - # explicitly via --use-pep517. - # In the absence of any explicit backend specification, we - # assume the setuptools backend that most closely emulates the - # traditional direct setup.py execution, and require wheel and - # a version of setuptools that supports that backend. - - build_system = { - "requires": ["setuptools>=40.8.0", "wheel"], - "build-backend": "setuptools.build_meta:__legacy__", - } - - # If we're using PEP 517, we have build system information (either - # from pyproject.toml, or defaulted by the code above). - # Note that at this point, we do not know if the user has actually - # specified a backend, though. - assert build_system is not None - - # Ensure that the build-system section in pyproject.toml conforms - # to PEP 518. - - # Specifying the build-system table but not the requires key is invalid - if "requires" not in build_system: - raise MissingPyProjectBuildRequires(package=req_name) - - # Error out if requires is not a list of strings - requires = build_system["requires"] - if not _is_list_of_str(requires): - raise InvalidPyProjectBuildRequires( - package=req_name, - reason="It is not a list of strings.", - ) - - # Each requirement must be valid as per PEP 508 - for requirement in requires: - try: - Requirement(requirement) - except InvalidRequirement as error: - raise InvalidPyProjectBuildRequires( - package=req_name, - reason=f"It contains an invalid requirement: {requirement!r}", - ) from error - - backend = build_system.get("build-backend") - backend_path = build_system.get("backend-path", []) - check: List[str] = [] - if backend is None: - # If the user didn't specify a backend, we assume they want to use - # the setuptools backend. But we can't be sure they have included - # a version of setuptools which supplies the backend, or wheel - # (which is needed by the backend) in their requirements. So we - # make a note to check that those requirements are present once - # we have set up the environment. - # This is quite a lot of work to check for a very specific case. But - # the problem is, that case is potentially quite common - projects that - # adopted PEP 518 early for the ability to specify requirements to - # execute setup.py, but never considered needing to mention the build - # tools themselves. The original PEP 518 code had a similar check (but - # implemented in a different way). - backend = "setuptools.build_meta:__legacy__" - check = ["setuptools>=40.8.0", "wheel"] - - return BuildSystemDetails(requires, backend, check, backend_path) diff --git a/venv/Lib/site-packages/pip/_internal/req/__init__.py b/venv/Lib/site-packages/pip/_internal/req/__init__.py deleted file mode 100644 index 70dea27..0000000 --- a/venv/Lib/site-packages/pip/_internal/req/__init__.py +++ /dev/null @@ -1,94 +0,0 @@ -import collections -import logging -from typing import Iterator, List, Optional, Sequence, Tuple - -from pip._internal.utils.logging import indent_log - -from .req_file import parse_requirements -from .req_install import InstallRequirement -from .req_set import RequirementSet - -__all__ = [ - "RequirementSet", - "InstallRequirement", - "parse_requirements", - "install_given_reqs", -] - -logger = logging.getLogger(__name__) - - -class InstallationResult: - def __init__(self, name: str) -> None: - self.name = name - - def __repr__(self) -> str: - return f"InstallationResult(name={self.name!r})" - - -def _validate_requirements( - requirements: List[InstallRequirement], -) -> Iterator[Tuple[str, InstallRequirement]]: - for req in requirements: - assert req.name, f"invalid to-be-installed requirement: {req}" - yield req.name, req - - -def install_given_reqs( - requirements: List[InstallRequirement], - install_options: List[str], - global_options: Sequence[str], - root: Optional[str], - home: Optional[str], - prefix: Optional[str], - warn_script_location: bool, - use_user_site: bool, - pycompile: bool, -) -> List[InstallationResult]: - """ - Install everything in the given list. - - (to be called after having downloaded and unpacked the packages) - """ - to_install = collections.OrderedDict(_validate_requirements(requirements)) - - if to_install: - logger.info( - "Installing collected packages: %s", - ", ".join(to_install.keys()), - ) - - installed = [] - - with indent_log(): - for req_name, requirement in to_install.items(): - if requirement.should_reinstall: - logger.info("Attempting uninstall: %s", req_name) - with indent_log(): - uninstalled_pathset = requirement.uninstall(auto_confirm=True) - else: - uninstalled_pathset = None - - try: - requirement.install( - install_options, - global_options, - root=root, - home=home, - prefix=prefix, - warn_script_location=warn_script_location, - use_user_site=use_user_site, - pycompile=pycompile, - ) - except Exception: - # if install did not succeed, rollback previous uninstall - if uninstalled_pathset and not requirement.install_succeeded: - uninstalled_pathset.rollback() - raise - else: - if uninstalled_pathset and requirement.install_succeeded: - uninstalled_pathset.commit() - - installed.append(InstallationResult(req_name)) - - return installed diff --git a/venv/Lib/site-packages/pip/_internal/req/__pycache__/__init__.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/req/__pycache__/__init__.cpython-39.pyc deleted file mode 100644 index 86b16eb..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/req/__pycache__/__init__.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/req/__pycache__/constructors.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/req/__pycache__/constructors.cpython-39.pyc deleted file mode 100644 index 1fc065e..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/req/__pycache__/constructors.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/req/__pycache__/req_file.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/req/__pycache__/req_file.cpython-39.pyc deleted file mode 100644 index a06ab3a..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/req/__pycache__/req_file.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/req/__pycache__/req_install.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/req/__pycache__/req_install.cpython-39.pyc deleted file mode 100644 index 1fd8cf0..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/req/__pycache__/req_install.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/req/__pycache__/req_set.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/req/__pycache__/req_set.cpython-39.pyc deleted file mode 100644 index 711d6a2..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/req/__pycache__/req_set.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/req/__pycache__/req_tracker.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/req/__pycache__/req_tracker.cpython-39.pyc deleted file mode 100644 index 08d96c5..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/req/__pycache__/req_tracker.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/req/__pycache__/req_uninstall.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/req/__pycache__/req_uninstall.cpython-39.pyc deleted file mode 100644 index fa55a0a..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/req/__pycache__/req_uninstall.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/req/constructors.py b/venv/Lib/site-packages/pip/_internal/req/constructors.py deleted file mode 100644 index 25bfb39..0000000 --- a/venv/Lib/site-packages/pip/_internal/req/constructors.py +++ /dev/null @@ -1,490 +0,0 @@ -"""Backing implementation for InstallRequirement's various constructors - -The idea here is that these formed a major chunk of InstallRequirement's size -so, moving them and support code dedicated to them outside of that class -helps creates for better understandability for the rest of the code. - -These are meant to be used elsewhere within pip to create instances of -InstallRequirement. -""" - -import logging -import os -import re -from typing import Any, Dict, Optional, Set, Tuple, Union - -from pip._vendor.packaging.markers import Marker -from pip._vendor.packaging.requirements import InvalidRequirement, Requirement -from pip._vendor.packaging.specifiers import Specifier - -from pip._internal.exceptions import InstallationError -from pip._internal.models.index import PyPI, TestPyPI -from pip._internal.models.link import Link -from pip._internal.models.wheel import Wheel -from pip._internal.req.req_file import ParsedRequirement -from pip._internal.req.req_install import InstallRequirement -from pip._internal.utils.filetypes import is_archive_file -from pip._internal.utils.misc import is_installable_dir -from pip._internal.utils.packaging import get_requirement -from pip._internal.utils.urls import path_to_url -from pip._internal.vcs import is_url, vcs - -__all__ = [ - "install_req_from_editable", - "install_req_from_line", - "parse_editable", -] - -logger = logging.getLogger(__name__) -operators = Specifier._operators.keys() - - -def _strip_extras(path: str) -> Tuple[str, Optional[str]]: - m = re.match(r"^(.+)(\[[^\]]+\])$", path) - extras = None - if m: - path_no_extras = m.group(1) - extras = m.group(2) - else: - path_no_extras = path - - return path_no_extras, extras - - -def convert_extras(extras: Optional[str]) -> Set[str]: - if not extras: - return set() - return get_requirement("placeholder" + extras.lower()).extras - - -def parse_editable(editable_req: str) -> Tuple[Optional[str], str, Set[str]]: - """Parses an editable requirement into: - - a requirement name - - an URL - - extras - - editable options - Accepted requirements: - svn+http://blahblah@rev#egg=Foobar[baz]&subdirectory=version_subdir - .[some_extra] - """ - - url = editable_req - - # If a file path is specified with extras, strip off the extras. - url_no_extras, extras = _strip_extras(url) - - if os.path.isdir(url_no_extras): - # Treating it as code that has already been checked out - url_no_extras = path_to_url(url_no_extras) - - if url_no_extras.lower().startswith("file:"): - package_name = Link(url_no_extras).egg_fragment - if extras: - return ( - package_name, - url_no_extras, - get_requirement("placeholder" + extras.lower()).extras, - ) - else: - return package_name, url_no_extras, set() - - for version_control in vcs: - if url.lower().startswith(f"{version_control}:"): - url = f"{version_control}+{url}" - break - - link = Link(url) - - if not link.is_vcs: - backends = ", ".join(vcs.all_schemes) - raise InstallationError( - f"{editable_req} is not a valid editable requirement. " - f"It should either be a path to a local project or a VCS URL " - f"(beginning with {backends})." - ) - - package_name = link.egg_fragment - if not package_name: - raise InstallationError( - "Could not detect requirement name for '{}', please specify one " - "with #egg=your_package_name".format(editable_req) - ) - return package_name, url, set() - - -def check_first_requirement_in_file(filename: str) -> None: - """Check if file is parsable as a requirements file. - - This is heavily based on ``pkg_resources.parse_requirements``, but - simplified to just check the first meaningful line. - - :raises InvalidRequirement: If the first meaningful line cannot be parsed - as an requirement. - """ - with open(filename, encoding="utf-8", errors="ignore") as f: - # Create a steppable iterator, so we can handle \-continuations. - lines = ( - line - for line in (line.strip() for line in f) - if line and not line.startswith("#") # Skip blank lines/comments. - ) - - for line in lines: - # Drop comments -- a hash without a space may be in a URL. - if " #" in line: - line = line[: line.find(" #")] - # If there is a line continuation, drop it, and append the next line. - if line.endswith("\\"): - line = line[:-2].strip() + next(lines, "") - Requirement(line) - return - - -def deduce_helpful_msg(req: str) -> str: - """Returns helpful msg in case requirements file does not exist, - or cannot be parsed. - - :params req: Requirements file path - """ - if not os.path.exists(req): - return f" File '{req}' does not exist." - msg = " The path does exist. " - # Try to parse and check if it is a requirements file. - try: - check_first_requirement_in_file(req) - except InvalidRequirement: - logger.debug("Cannot parse '%s' as requirements file", req) - else: - msg += ( - f"The argument you provided " - f"({req}) appears to be a" - f" requirements file. If that is the" - f" case, use the '-r' flag to install" - f" the packages specified within it." - ) - return msg - - -class RequirementParts: - def __init__( - self, - requirement: Optional[Requirement], - link: Optional[Link], - markers: Optional[Marker], - extras: Set[str], - ): - self.requirement = requirement - self.link = link - self.markers = markers - self.extras = extras - - -def parse_req_from_editable(editable_req: str) -> RequirementParts: - name, url, extras_override = parse_editable(editable_req) - - if name is not None: - try: - req: Optional[Requirement] = Requirement(name) - except InvalidRequirement: - raise InstallationError(f"Invalid requirement: '{name}'") - else: - req = None - - link = Link(url) - - return RequirementParts(req, link, None, extras_override) - - -# ---- The actual constructors follow ---- - - -def install_req_from_editable( - editable_req: str, - comes_from: Optional[Union[InstallRequirement, str]] = None, - use_pep517: Optional[bool] = None, - isolated: bool = False, - options: Optional[Dict[str, Any]] = None, - constraint: bool = False, - user_supplied: bool = False, - permit_editable_wheels: bool = False, -) -> InstallRequirement: - - parts = parse_req_from_editable(editable_req) - - return InstallRequirement( - parts.requirement, - comes_from=comes_from, - user_supplied=user_supplied, - editable=True, - permit_editable_wheels=permit_editable_wheels, - link=parts.link, - constraint=constraint, - use_pep517=use_pep517, - isolated=isolated, - install_options=options.get("install_options", []) if options else [], - global_options=options.get("global_options", []) if options else [], - hash_options=options.get("hashes", {}) if options else {}, - extras=parts.extras, - ) - - -def _looks_like_path(name: str) -> bool: - """Checks whether the string "looks like" a path on the filesystem. - - This does not check whether the target actually exists, only judge from the - appearance. - - Returns true if any of the following conditions is true: - * a path separator is found (either os.path.sep or os.path.altsep); - * a dot is found (which represents the current directory). - """ - if os.path.sep in name: - return True - if os.path.altsep is not None and os.path.altsep in name: - return True - if name.startswith("."): - return True - return False - - -def _get_url_from_path(path: str, name: str) -> Optional[str]: - """ - First, it checks whether a provided path is an installable directory. If it - is, returns the path. - - If false, check if the path is an archive file (such as a .whl). - The function checks if the path is a file. If false, if the path has - an @, it will treat it as a PEP 440 URL requirement and return the path. - """ - if _looks_like_path(name) and os.path.isdir(path): - if is_installable_dir(path): - return path_to_url(path) - # TODO: The is_installable_dir test here might not be necessary - # now that it is done in load_pyproject_toml too. - raise InstallationError( - f"Directory {name!r} is not installable. Neither 'setup.py' " - "nor 'pyproject.toml' found." - ) - if not is_archive_file(path): - return None - if os.path.isfile(path): - return path_to_url(path) - urlreq_parts = name.split("@", 1) - if len(urlreq_parts) >= 2 and not _looks_like_path(urlreq_parts[0]): - # If the path contains '@' and the part before it does not look - # like a path, try to treat it as a PEP 440 URL req instead. - return None - logger.warning( - "Requirement %r looks like a filename, but the file does not exist", - name, - ) - return path_to_url(path) - - -def parse_req_from_line(name: str, line_source: Optional[str]) -> RequirementParts: - if is_url(name): - marker_sep = "; " - else: - marker_sep = ";" - if marker_sep in name: - name, markers_as_string = name.split(marker_sep, 1) - markers_as_string = markers_as_string.strip() - if not markers_as_string: - markers = None - else: - markers = Marker(markers_as_string) - else: - markers = None - name = name.strip() - req_as_string = None - path = os.path.normpath(os.path.abspath(name)) - link = None - extras_as_string = None - - if is_url(name): - link = Link(name) - else: - p, extras_as_string = _strip_extras(path) - url = _get_url_from_path(p, name) - if url is not None: - link = Link(url) - - # it's a local file, dir, or url - if link: - # Handle relative file URLs - if link.scheme == "file" and re.search(r"\.\./", link.url): - link = Link(path_to_url(os.path.normpath(os.path.abspath(link.path)))) - # wheel file - if link.is_wheel: - wheel = Wheel(link.filename) # can raise InvalidWheelFilename - req_as_string = f"{wheel.name}=={wheel.version}" - else: - # set the req to the egg fragment. when it's not there, this - # will become an 'unnamed' requirement - req_as_string = link.egg_fragment - - # a requirement specifier - else: - req_as_string = name - - extras = convert_extras(extras_as_string) - - def with_source(text: str) -> str: - if not line_source: - return text - return f"{text} (from {line_source})" - - def _parse_req_string(req_as_string: str) -> Requirement: - try: - req = get_requirement(req_as_string) - except InvalidRequirement: - if os.path.sep in req_as_string: - add_msg = "It looks like a path." - add_msg += deduce_helpful_msg(req_as_string) - elif "=" in req_as_string and not any( - op in req_as_string for op in operators - ): - add_msg = "= is not a valid operator. Did you mean == ?" - else: - add_msg = "" - msg = with_source(f"Invalid requirement: {req_as_string!r}") - if add_msg: - msg += f"\nHint: {add_msg}" - raise InstallationError(msg) - else: - # Deprecate extras after specifiers: "name>=1.0[extras]" - # This currently works by accident because _strip_extras() parses - # any extras in the end of the string and those are saved in - # RequirementParts - for spec in req.specifier: - spec_str = str(spec) - if spec_str.endswith("]"): - msg = f"Extras after version '{spec_str}'." - raise InstallationError(msg) - return req - - if req_as_string is not None: - req: Optional[Requirement] = _parse_req_string(req_as_string) - else: - req = None - - return RequirementParts(req, link, markers, extras) - - -def install_req_from_line( - name: str, - comes_from: Optional[Union[str, InstallRequirement]] = None, - use_pep517: Optional[bool] = None, - isolated: bool = False, - options: Optional[Dict[str, Any]] = None, - constraint: bool = False, - line_source: Optional[str] = None, - user_supplied: bool = False, -) -> InstallRequirement: - """Creates an InstallRequirement from a name, which might be a - requirement, directory containing 'setup.py', filename, or URL. - - :param line_source: An optional string describing where the line is from, - for logging purposes in case of an error. - """ - parts = parse_req_from_line(name, line_source) - - return InstallRequirement( - parts.requirement, - comes_from, - link=parts.link, - markers=parts.markers, - use_pep517=use_pep517, - isolated=isolated, - install_options=options.get("install_options", []) if options else [], - global_options=options.get("global_options", []) if options else [], - hash_options=options.get("hashes", {}) if options else {}, - constraint=constraint, - extras=parts.extras, - user_supplied=user_supplied, - ) - - -def install_req_from_req_string( - req_string: str, - comes_from: Optional[InstallRequirement] = None, - isolated: bool = False, - use_pep517: Optional[bool] = None, - user_supplied: bool = False, -) -> InstallRequirement: - try: - req = get_requirement(req_string) - except InvalidRequirement: - raise InstallationError(f"Invalid requirement: '{req_string}'") - - domains_not_allowed = [ - PyPI.file_storage_domain, - TestPyPI.file_storage_domain, - ] - if ( - req.url - and comes_from - and comes_from.link - and comes_from.link.netloc in domains_not_allowed - ): - # Explicitly disallow pypi packages that depend on external urls - raise InstallationError( - "Packages installed from PyPI cannot depend on packages " - "which are not also hosted on PyPI.\n" - "{} depends on {} ".format(comes_from.name, req) - ) - - return InstallRequirement( - req, - comes_from, - isolated=isolated, - use_pep517=use_pep517, - user_supplied=user_supplied, - ) - - -def install_req_from_parsed_requirement( - parsed_req: ParsedRequirement, - isolated: bool = False, - use_pep517: Optional[bool] = None, - user_supplied: bool = False, -) -> InstallRequirement: - if parsed_req.is_editable: - req = install_req_from_editable( - parsed_req.requirement, - comes_from=parsed_req.comes_from, - use_pep517=use_pep517, - constraint=parsed_req.constraint, - isolated=isolated, - user_supplied=user_supplied, - ) - - else: - req = install_req_from_line( - parsed_req.requirement, - comes_from=parsed_req.comes_from, - use_pep517=use_pep517, - isolated=isolated, - options=parsed_req.options, - constraint=parsed_req.constraint, - line_source=parsed_req.line_source, - user_supplied=user_supplied, - ) - return req - - -def install_req_from_link_and_ireq( - link: Link, ireq: InstallRequirement -) -> InstallRequirement: - return InstallRequirement( - req=ireq.req, - comes_from=ireq.comes_from, - editable=ireq.editable, - link=link, - markers=ireq.markers, - use_pep517=ireq.use_pep517, - isolated=ireq.isolated, - install_options=ireq.install_options, - global_options=ireq.global_options, - hash_options=ireq.hash_options, - ) diff --git a/venv/Lib/site-packages/pip/_internal/req/req_file.py b/venv/Lib/site-packages/pip/_internal/req/req_file.py deleted file mode 100644 index 03ae504..0000000 --- a/venv/Lib/site-packages/pip/_internal/req/req_file.py +++ /dev/null @@ -1,536 +0,0 @@ -""" -Requirements file parsing -""" - -import optparse -import os -import re -import shlex -import urllib.parse -from optparse import Values -from typing import ( - TYPE_CHECKING, - Any, - Callable, - Dict, - Iterable, - Iterator, - List, - Optional, - Tuple, -) - -from pip._internal.cli import cmdoptions -from pip._internal.exceptions import InstallationError, RequirementsFileParseError -from pip._internal.models.search_scope import SearchScope -from pip._internal.network.session import PipSession -from pip._internal.network.utils import raise_for_status -from pip._internal.utils.encoding import auto_decode -from pip._internal.utils.urls import get_url_scheme - -if TYPE_CHECKING: - # NoReturn introduced in 3.6.2; imported only for type checking to maintain - # pip compatibility with older patch versions of Python 3.6 - from typing import NoReturn - - from pip._internal.index.package_finder import PackageFinder - -__all__ = ["parse_requirements"] - -ReqFileLines = Iterable[Tuple[int, str]] - -LineParser = Callable[[str], Tuple[str, Values]] - -SCHEME_RE = re.compile(r"^(http|https|file):", re.I) -COMMENT_RE = re.compile(r"(^|\s+)#.*$") - -# Matches environment variable-style values in '${MY_VARIABLE_1}' with the -# variable name consisting of only uppercase letters, digits or the '_' -# (underscore). This follows the POSIX standard defined in IEEE Std 1003.1, -# 2013 Edition. -ENV_VAR_RE = re.compile(r"(?P\$\{(?P[A-Z0-9_]+)\})") - -SUPPORTED_OPTIONS: List[Callable[..., optparse.Option]] = [ - cmdoptions.index_url, - cmdoptions.extra_index_url, - cmdoptions.no_index, - cmdoptions.constraints, - cmdoptions.requirements, - cmdoptions.editable, - cmdoptions.find_links, - cmdoptions.no_binary, - cmdoptions.only_binary, - cmdoptions.prefer_binary, - cmdoptions.require_hashes, - cmdoptions.pre, - cmdoptions.trusted_host, - cmdoptions.use_new_feature, -] - -# options to be passed to requirements -SUPPORTED_OPTIONS_REQ: List[Callable[..., optparse.Option]] = [ - cmdoptions.install_options, - cmdoptions.global_options, - cmdoptions.hash, -] - -# the 'dest' string values -SUPPORTED_OPTIONS_REQ_DEST = [str(o().dest) for o in SUPPORTED_OPTIONS_REQ] - - -class ParsedRequirement: - def __init__( - self, - requirement: str, - is_editable: bool, - comes_from: str, - constraint: bool, - options: Optional[Dict[str, Any]] = None, - line_source: Optional[str] = None, - ) -> None: - self.requirement = requirement - self.is_editable = is_editable - self.comes_from = comes_from - self.options = options - self.constraint = constraint - self.line_source = line_source - - -class ParsedLine: - def __init__( - self, - filename: str, - lineno: int, - args: str, - opts: Values, - constraint: bool, - ) -> None: - self.filename = filename - self.lineno = lineno - self.opts = opts - self.constraint = constraint - - if args: - self.is_requirement = True - self.is_editable = False - self.requirement = args - elif opts.editables: - self.is_requirement = True - self.is_editable = True - # We don't support multiple -e on one line - self.requirement = opts.editables[0] - else: - self.is_requirement = False - - -def parse_requirements( - filename: str, - session: PipSession, - finder: Optional["PackageFinder"] = None, - options: Optional[optparse.Values] = None, - constraint: bool = False, -) -> Iterator[ParsedRequirement]: - """Parse a requirements file and yield ParsedRequirement instances. - - :param filename: Path or url of requirements file. - :param session: PipSession instance. - :param finder: Instance of pip.index.PackageFinder. - :param options: cli options. - :param constraint: If true, parsing a constraint file rather than - requirements file. - """ - line_parser = get_line_parser(finder) - parser = RequirementsFileParser(session, line_parser) - - for parsed_line in parser.parse(filename, constraint): - parsed_req = handle_line( - parsed_line, options=options, finder=finder, session=session - ) - if parsed_req is not None: - yield parsed_req - - -def preprocess(content: str) -> ReqFileLines: - """Split, filter, and join lines, and return a line iterator - - :param content: the content of the requirements file - """ - lines_enum: ReqFileLines = enumerate(content.splitlines(), start=1) - lines_enum = join_lines(lines_enum) - lines_enum = ignore_comments(lines_enum) - lines_enum = expand_env_variables(lines_enum) - return lines_enum - - -def handle_requirement_line( - line: ParsedLine, - options: Optional[optparse.Values] = None, -) -> ParsedRequirement: - - # preserve for the nested code path - line_comes_from = "{} {} (line {})".format( - "-c" if line.constraint else "-r", - line.filename, - line.lineno, - ) - - assert line.is_requirement - - if line.is_editable: - # For editable requirements, we don't support per-requirement - # options, so just return the parsed requirement. - return ParsedRequirement( - requirement=line.requirement, - is_editable=line.is_editable, - comes_from=line_comes_from, - constraint=line.constraint, - ) - else: - if options: - # Disable wheels if the user has specified build options - cmdoptions.check_install_build_global(options, line.opts) - - # get the options that apply to requirements - req_options = {} - for dest in SUPPORTED_OPTIONS_REQ_DEST: - if dest in line.opts.__dict__ and line.opts.__dict__[dest]: - req_options[dest] = line.opts.__dict__[dest] - - line_source = f"line {line.lineno} of {line.filename}" - return ParsedRequirement( - requirement=line.requirement, - is_editable=line.is_editable, - comes_from=line_comes_from, - constraint=line.constraint, - options=req_options, - line_source=line_source, - ) - - -def handle_option_line( - opts: Values, - filename: str, - lineno: int, - finder: Optional["PackageFinder"] = None, - options: Optional[optparse.Values] = None, - session: Optional[PipSession] = None, -) -> None: - - if options: - # percolate options upward - if opts.require_hashes: - options.require_hashes = opts.require_hashes - if opts.features_enabled: - options.features_enabled.extend( - f for f in opts.features_enabled if f not in options.features_enabled - ) - - # set finder options - if finder: - find_links = finder.find_links - index_urls = finder.index_urls - if opts.index_url: - index_urls = [opts.index_url] - if opts.no_index is True: - index_urls = [] - if opts.extra_index_urls: - index_urls.extend(opts.extra_index_urls) - if opts.find_links: - # FIXME: it would be nice to keep track of the source - # of the find_links: support a find-links local path - # relative to a requirements file. - value = opts.find_links[0] - req_dir = os.path.dirname(os.path.abspath(filename)) - relative_to_reqs_file = os.path.join(req_dir, value) - if os.path.exists(relative_to_reqs_file): - value = relative_to_reqs_file - find_links.append(value) - - if session: - # We need to update the auth urls in session - session.update_index_urls(index_urls) - - search_scope = SearchScope( - find_links=find_links, - index_urls=index_urls, - ) - finder.search_scope = search_scope - - if opts.pre: - finder.set_allow_all_prereleases() - - if opts.prefer_binary: - finder.set_prefer_binary() - - if session: - for host in opts.trusted_hosts or []: - source = f"line {lineno} of {filename}" - session.add_trusted_host(host, source=source) - - -def handle_line( - line: ParsedLine, - options: Optional[optparse.Values] = None, - finder: Optional["PackageFinder"] = None, - session: Optional[PipSession] = None, -) -> Optional[ParsedRequirement]: - """Handle a single parsed requirements line; This can result in - creating/yielding requirements, or updating the finder. - - :param line: The parsed line to be processed. - :param options: CLI options. - :param finder: The finder - updated by non-requirement lines. - :param session: The session - updated by non-requirement lines. - - Returns a ParsedRequirement object if the line is a requirement line, - otherwise returns None. - - For lines that contain requirements, the only options that have an effect - are from SUPPORTED_OPTIONS_REQ, and they are scoped to the - requirement. Other options from SUPPORTED_OPTIONS may be present, but are - ignored. - - For lines that do not contain requirements, the only options that have an - effect are from SUPPORTED_OPTIONS. Options from SUPPORTED_OPTIONS_REQ may - be present, but are ignored. These lines may contain multiple options - (although our docs imply only one is supported), and all our parsed and - affect the finder. - """ - - if line.is_requirement: - parsed_req = handle_requirement_line(line, options) - return parsed_req - else: - handle_option_line( - line.opts, - line.filename, - line.lineno, - finder, - options, - session, - ) - return None - - -class RequirementsFileParser: - def __init__( - self, - session: PipSession, - line_parser: LineParser, - ) -> None: - self._session = session - self._line_parser = line_parser - - def parse(self, filename: str, constraint: bool) -> Iterator[ParsedLine]: - """Parse a given file, yielding parsed lines.""" - yield from self._parse_and_recurse(filename, constraint) - - def _parse_and_recurse( - self, filename: str, constraint: bool - ) -> Iterator[ParsedLine]: - for line in self._parse_file(filename, constraint): - if not line.is_requirement and ( - line.opts.requirements or line.opts.constraints - ): - # parse a nested requirements file - if line.opts.requirements: - req_path = line.opts.requirements[0] - nested_constraint = False - else: - req_path = line.opts.constraints[0] - nested_constraint = True - - # original file is over http - if SCHEME_RE.search(filename): - # do a url join so relative paths work - req_path = urllib.parse.urljoin(filename, req_path) - # original file and nested file are paths - elif not SCHEME_RE.search(req_path): - # do a join so relative paths work - req_path = os.path.join( - os.path.dirname(filename), - req_path, - ) - - yield from self._parse_and_recurse(req_path, nested_constraint) - else: - yield line - - def _parse_file(self, filename: str, constraint: bool) -> Iterator[ParsedLine]: - _, content = get_file_content(filename, self._session) - - lines_enum = preprocess(content) - - for line_number, line in lines_enum: - try: - args_str, opts = self._line_parser(line) - except OptionParsingError as e: - # add offending line - msg = f"Invalid requirement: {line}\n{e.msg}" - raise RequirementsFileParseError(msg) - - yield ParsedLine( - filename, - line_number, - args_str, - opts, - constraint, - ) - - -def get_line_parser(finder: Optional["PackageFinder"]) -> LineParser: - def parse_line(line: str) -> Tuple[str, Values]: - # Build new parser for each line since it accumulates appendable - # options. - parser = build_parser() - defaults = parser.get_default_values() - defaults.index_url = None - if finder: - defaults.format_control = finder.format_control - - args_str, options_str = break_args_options(line) - - opts, _ = parser.parse_args(shlex.split(options_str), defaults) - - return args_str, opts - - return parse_line - - -def break_args_options(line: str) -> Tuple[str, str]: - """Break up the line into an args and options string. We only want to shlex - (and then optparse) the options, not the args. args can contain markers - which are corrupted by shlex. - """ - tokens = line.split(" ") - args = [] - options = tokens[:] - for token in tokens: - if token.startswith("-") or token.startswith("--"): - break - else: - args.append(token) - options.pop(0) - return " ".join(args), " ".join(options) - - -class OptionParsingError(Exception): - def __init__(self, msg: str) -> None: - self.msg = msg - - -def build_parser() -> optparse.OptionParser: - """ - Return a parser for parsing requirement lines - """ - parser = optparse.OptionParser(add_help_option=False) - - option_factories = SUPPORTED_OPTIONS + SUPPORTED_OPTIONS_REQ - for option_factory in option_factories: - option = option_factory() - parser.add_option(option) - - # By default optparse sys.exits on parsing errors. We want to wrap - # that in our own exception. - def parser_exit(self: Any, msg: str) -> "NoReturn": - raise OptionParsingError(msg) - - # NOTE: mypy disallows assigning to a method - # https://github.com/python/mypy/issues/2427 - parser.exit = parser_exit # type: ignore - - return parser - - -def join_lines(lines_enum: ReqFileLines) -> ReqFileLines: - """Joins a line ending in '\' with the previous line (except when following - comments). The joined line takes on the index of the first line. - """ - primary_line_number = None - new_line: List[str] = [] - for line_number, line in lines_enum: - if not line.endswith("\\") or COMMENT_RE.match(line): - if COMMENT_RE.match(line): - # this ensures comments are always matched later - line = " " + line - if new_line: - new_line.append(line) - assert primary_line_number is not None - yield primary_line_number, "".join(new_line) - new_line = [] - else: - yield line_number, line - else: - if not new_line: - primary_line_number = line_number - new_line.append(line.strip("\\")) - - # last line contains \ - if new_line: - assert primary_line_number is not None - yield primary_line_number, "".join(new_line) - - # TODO: handle space after '\'. - - -def ignore_comments(lines_enum: ReqFileLines) -> ReqFileLines: - """ - Strips comments and filter empty lines. - """ - for line_number, line in lines_enum: - line = COMMENT_RE.sub("", line) - line = line.strip() - if line: - yield line_number, line - - -def expand_env_variables(lines_enum: ReqFileLines) -> ReqFileLines: - """Replace all environment variables that can be retrieved via `os.getenv`. - - The only allowed format for environment variables defined in the - requirement file is `${MY_VARIABLE_1}` to ensure two things: - - 1. Strings that contain a `$` aren't accidentally (partially) expanded. - 2. Ensure consistency across platforms for requirement files. - - These points are the result of a discussion on the `github pull - request #3514 `_. - - Valid characters in variable names follow the `POSIX standard - `_ and are limited - to uppercase letter, digits and the `_` (underscore). - """ - for line_number, line in lines_enum: - for env_var, var_name in ENV_VAR_RE.findall(line): - value = os.getenv(var_name) - if not value: - continue - - line = line.replace(env_var, value) - - yield line_number, line - - -def get_file_content(url: str, session: PipSession) -> Tuple[str, str]: - """Gets the content of a file; it may be a filename, file: URL, or - http: URL. Returns (location, content). Content is unicode. - Respects # -*- coding: declarations on the retrieved files. - - :param url: File path or url. - :param session: PipSession instance. - """ - scheme = get_url_scheme(url) - - # Pip has special support for file:// URLs (LocalFSAdapter). - if scheme in ["http", "https", "file"]: - resp = session.get(url) - raise_for_status(resp) - return resp.url, resp.text - - # Assume this is a bare path. - try: - with open(url, "rb") as f: - content = auto_decode(f.read()) - except OSError as exc: - raise InstallationError(f"Could not open requirements file: {exc}") - return url, content diff --git a/venv/Lib/site-packages/pip/_internal/req/req_install.py b/venv/Lib/site-packages/pip/_internal/req/req_install.py deleted file mode 100644 index 02dbda1..0000000 --- a/venv/Lib/site-packages/pip/_internal/req/req_install.py +++ /dev/null @@ -1,858 +0,0 @@ -# The following comment should be removed at some point in the future. -# mypy: strict-optional=False - -import functools -import logging -import os -import shutil -import sys -import uuid -import zipfile -from typing import Any, Collection, Dict, Iterable, List, Optional, Sequence, Union - -from pip._vendor.packaging.markers import Marker -from pip._vendor.packaging.requirements import Requirement -from pip._vendor.packaging.specifiers import SpecifierSet -from pip._vendor.packaging.utils import canonicalize_name -from pip._vendor.packaging.version import Version -from pip._vendor.packaging.version import parse as parse_version -from pip._vendor.pep517.wrappers import Pep517HookCaller - -from pip._internal.build_env import BuildEnvironment, NoOpBuildEnvironment -from pip._internal.exceptions import InstallationError, LegacyInstallFailure -from pip._internal.locations import get_scheme -from pip._internal.metadata import ( - BaseDistribution, - get_default_environment, - get_directory_distribution, -) -from pip._internal.models.link import Link -from pip._internal.operations.build.metadata import generate_metadata -from pip._internal.operations.build.metadata_editable import generate_editable_metadata -from pip._internal.operations.build.metadata_legacy import ( - generate_metadata as generate_metadata_legacy, -) -from pip._internal.operations.install.editable_legacy import ( - install_editable as install_editable_legacy, -) -from pip._internal.operations.install.legacy import install as install_legacy -from pip._internal.operations.install.wheel import install_wheel -from pip._internal.pyproject import load_pyproject_toml, make_pyproject_path -from pip._internal.req.req_uninstall import UninstallPathSet -from pip._internal.utils.deprecation import deprecated -from pip._internal.utils.direct_url_helpers import ( - direct_url_for_editable, - direct_url_from_link, -) -from pip._internal.utils.hashes import Hashes -from pip._internal.utils.misc import ( - ask_path_exists, - backup_dir, - display_path, - hide_url, - redact_auth_from_url, -) -from pip._internal.utils.packaging import safe_extra -from pip._internal.utils.subprocess import runner_with_spinner_message -from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds -from pip._internal.utils.virtualenv import running_under_virtualenv -from pip._internal.vcs import vcs - -logger = logging.getLogger(__name__) - - -class InstallRequirement: - """ - Represents something that may be installed later on, may have information - about where to fetch the relevant requirement and also contains logic for - installing the said requirement. - """ - - def __init__( - self, - req: Optional[Requirement], - comes_from: Optional[Union[str, "InstallRequirement"]], - editable: bool = False, - link: Optional[Link] = None, - markers: Optional[Marker] = None, - use_pep517: Optional[bool] = None, - isolated: bool = False, - install_options: Optional[List[str]] = None, - global_options: Optional[List[str]] = None, - hash_options: Optional[Dict[str, List[str]]] = None, - constraint: bool = False, - extras: Collection[str] = (), - user_supplied: bool = False, - permit_editable_wheels: bool = False, - ) -> None: - assert req is None or isinstance(req, Requirement), req - self.req = req - self.comes_from = comes_from - self.constraint = constraint - self.editable = editable - self.permit_editable_wheels = permit_editable_wheels - self.legacy_install_reason: Optional[int] = None - - # source_dir is the local directory where the linked requirement is - # located, or unpacked. In case unpacking is needed, creating and - # populating source_dir is done by the RequirementPreparer. Note this - # is not necessarily the directory where pyproject.toml or setup.py is - # located - that one is obtained via unpacked_source_directory. - self.source_dir: Optional[str] = None - if self.editable: - assert link - if link.is_file: - self.source_dir = os.path.normpath(os.path.abspath(link.file_path)) - - if link is None and req and req.url: - # PEP 508 URL requirement - link = Link(req.url) - self.link = self.original_link = link - self.original_link_is_in_wheel_cache = False - - # Path to any downloaded or already-existing package. - self.local_file_path: Optional[str] = None - if self.link and self.link.is_file: - self.local_file_path = self.link.file_path - - if extras: - self.extras = extras - elif req: - self.extras = {safe_extra(extra) for extra in req.extras} - else: - self.extras = set() - if markers is None and req: - markers = req.marker - self.markers = markers - - # This holds the Distribution object if this requirement is already installed. - self.satisfied_by: Optional[BaseDistribution] = None - # Whether the installation process should try to uninstall an existing - # distribution before installing this requirement. - self.should_reinstall = False - # Temporary build location - self._temp_build_dir: Optional[TempDirectory] = None - # Set to True after successful installation - self.install_succeeded: Optional[bool] = None - # Supplied options - self.install_options = install_options if install_options else [] - self.global_options = global_options if global_options else [] - self.hash_options = hash_options if hash_options else {} - # Set to True after successful preparation of this requirement - self.prepared = False - # User supplied requirement are explicitly requested for installation - # by the user via CLI arguments or requirements files, as opposed to, - # e.g. dependencies, extras or constraints. - self.user_supplied = user_supplied - - self.isolated = isolated - self.build_env: BuildEnvironment = NoOpBuildEnvironment() - - # For PEP 517, the directory where we request the project metadata - # gets stored. We need this to pass to build_wheel, so the backend - # can ensure that the wheel matches the metadata (see the PEP for - # details). - self.metadata_directory: Optional[str] = None - - # The static build requirements (from pyproject.toml) - self.pyproject_requires: Optional[List[str]] = None - - # Build requirements that we will check are available - self.requirements_to_check: List[str] = [] - - # The PEP 517 backend we should use to build the project - self.pep517_backend: Optional[Pep517HookCaller] = None - - # Are we using PEP 517 for this requirement? - # After pyproject.toml has been loaded, the only valid values are True - # and False. Before loading, None is valid (meaning "use the default"). - # Setting an explicit value before loading pyproject.toml is supported, - # but after loading this flag should be treated as read only. - self.use_pep517 = use_pep517 - - # This requirement needs more preparation before it can be built - self.needs_more_preparation = False - - def __str__(self) -> str: - if self.req: - s = str(self.req) - if self.link: - s += " from {}".format(redact_auth_from_url(self.link.url)) - elif self.link: - s = redact_auth_from_url(self.link.url) - else: - s = "" - if self.satisfied_by is not None: - s += " in {}".format(display_path(self.satisfied_by.location)) - if self.comes_from: - if isinstance(self.comes_from, str): - comes_from: Optional[str] = self.comes_from - else: - comes_from = self.comes_from.from_path() - if comes_from: - s += f" (from {comes_from})" - return s - - def __repr__(self) -> str: - return "<{} object: {} editable={!r}>".format( - self.__class__.__name__, str(self), self.editable - ) - - def format_debug(self) -> str: - """An un-tested helper for getting state, for debugging.""" - attributes = vars(self) - names = sorted(attributes) - - state = ("{}={!r}".format(attr, attributes[attr]) for attr in sorted(names)) - return "<{name} object: {{{state}}}>".format( - name=self.__class__.__name__, - state=", ".join(state), - ) - - # Things that are valid for all kinds of requirements? - @property - def name(self) -> Optional[str]: - if self.req is None: - return None - return self.req.name - - @functools.lru_cache() # use cached_property in python 3.8+ - def supports_pyproject_editable(self) -> bool: - if not self.use_pep517: - return False - assert self.pep517_backend - with self.build_env: - runner = runner_with_spinner_message( - "Checking if build backend supports build_editable" - ) - with self.pep517_backend.subprocess_runner(runner): - return "build_editable" in self.pep517_backend._supported_features() - - @property - def specifier(self) -> SpecifierSet: - return self.req.specifier - - @property - def is_pinned(self) -> bool: - """Return whether I am pinned to an exact version. - - For example, some-package==1.2 is pinned; some-package>1.2 is not. - """ - specifiers = self.specifier - return len(specifiers) == 1 and next(iter(specifiers)).operator in {"==", "==="} - - def match_markers(self, extras_requested: Optional[Iterable[str]] = None) -> bool: - if not extras_requested: - # Provide an extra to safely evaluate the markers - # without matching any extra - extras_requested = ("",) - if self.markers is not None: - return any( - self.markers.evaluate({"extra": extra}) for extra in extras_requested - ) - else: - return True - - @property - def has_hash_options(self) -> bool: - """Return whether any known-good hashes are specified as options. - - These activate --require-hashes mode; hashes specified as part of a - URL do not. - - """ - return bool(self.hash_options) - - def hashes(self, trust_internet: bool = True) -> Hashes: - """Return a hash-comparer that considers my option- and URL-based - hashes to be known-good. - - Hashes in URLs--ones embedded in the requirements file, not ones - downloaded from an index server--are almost peers with ones from - flags. They satisfy --require-hashes (whether it was implicitly or - explicitly activated) but do not activate it. md5 and sha224 are not - allowed in flags, which should nudge people toward good algos. We - always OR all hashes together, even ones from URLs. - - :param trust_internet: Whether to trust URL-based (#md5=...) hashes - downloaded from the internet, as by populate_link() - - """ - good_hashes = self.hash_options.copy() - link = self.link if trust_internet else self.original_link - if link and link.hash: - good_hashes.setdefault(link.hash_name, []).append(link.hash) - return Hashes(good_hashes) - - def from_path(self) -> Optional[str]: - """Format a nice indicator to show where this "comes from" """ - if self.req is None: - return None - s = str(self.req) - if self.comes_from: - if isinstance(self.comes_from, str): - comes_from = self.comes_from - else: - comes_from = self.comes_from.from_path() - if comes_from: - s += "->" + comes_from - return s - - def ensure_build_location( - self, build_dir: str, autodelete: bool, parallel_builds: bool - ) -> str: - assert build_dir is not None - if self._temp_build_dir is not None: - assert self._temp_build_dir.path - return self._temp_build_dir.path - if self.req is None: - # Some systems have /tmp as a symlink which confuses custom - # builds (such as numpy). Thus, we ensure that the real path - # is returned. - self._temp_build_dir = TempDirectory( - kind=tempdir_kinds.REQ_BUILD, globally_managed=True - ) - - return self._temp_build_dir.path - - # This is the only remaining place where we manually determine the path - # for the temporary directory. It is only needed for editables where - # it is the value of the --src option. - - # When parallel builds are enabled, add a UUID to the build directory - # name so multiple builds do not interfere with each other. - dir_name: str = canonicalize_name(self.name) - if parallel_builds: - dir_name = f"{dir_name}_{uuid.uuid4().hex}" - - # FIXME: Is there a better place to create the build_dir? (hg and bzr - # need this) - if not os.path.exists(build_dir): - logger.debug("Creating directory %s", build_dir) - os.makedirs(build_dir) - actual_build_dir = os.path.join(build_dir, dir_name) - # `None` indicates that we respect the globally-configured deletion - # settings, which is what we actually want when auto-deleting. - delete_arg = None if autodelete else False - return TempDirectory( - path=actual_build_dir, - delete=delete_arg, - kind=tempdir_kinds.REQ_BUILD, - globally_managed=True, - ).path - - def _set_requirement(self) -> None: - """Set requirement after generating metadata.""" - assert self.req is None - assert self.metadata is not None - assert self.source_dir is not None - - # Construct a Requirement object from the generated metadata - if isinstance(parse_version(self.metadata["Version"]), Version): - op = "==" - else: - op = "===" - - self.req = Requirement( - "".join( - [ - self.metadata["Name"], - op, - self.metadata["Version"], - ] - ) - ) - - def warn_on_mismatching_name(self) -> None: - metadata_name = canonicalize_name(self.metadata["Name"]) - if canonicalize_name(self.req.name) == metadata_name: - # Everything is fine. - return - - # If we're here, there's a mismatch. Log a warning about it. - logger.warning( - "Generating metadata for package %s " - "produced metadata for project name %s. Fix your " - "#egg=%s fragments.", - self.name, - metadata_name, - self.name, - ) - self.req = Requirement(metadata_name) - - def check_if_exists(self, use_user_site: bool) -> None: - """Find an installed distribution that satisfies or conflicts - with this requirement, and set self.satisfied_by or - self.should_reinstall appropriately. - """ - if self.req is None: - return - existing_dist = get_default_environment().get_distribution(self.req.name) - if not existing_dist: - return - - version_compatible = self.req.specifier.contains( - existing_dist.version, - prereleases=True, - ) - if not version_compatible: - self.satisfied_by = None - if use_user_site: - if existing_dist.in_usersite: - self.should_reinstall = True - elif running_under_virtualenv() and existing_dist.in_site_packages: - raise InstallationError( - f"Will not install to the user site because it will " - f"lack sys.path precedence to {existing_dist.raw_name} " - f"in {existing_dist.location}" - ) - else: - self.should_reinstall = True - else: - if self.editable: - self.should_reinstall = True - # when installing editables, nothing pre-existing should ever - # satisfy - self.satisfied_by = None - else: - self.satisfied_by = existing_dist - - # Things valid for wheels - @property - def is_wheel(self) -> bool: - if not self.link: - return False - return self.link.is_wheel - - # Things valid for sdists - @property - def unpacked_source_directory(self) -> str: - return os.path.join( - self.source_dir, self.link and self.link.subdirectory_fragment or "" - ) - - @property - def setup_py_path(self) -> str: - assert self.source_dir, f"No source dir for {self}" - setup_py = os.path.join(self.unpacked_source_directory, "setup.py") - - return setup_py - - @property - def setup_cfg_path(self) -> str: - assert self.source_dir, f"No source dir for {self}" - setup_cfg = os.path.join(self.unpacked_source_directory, "setup.cfg") - - return setup_cfg - - @property - def pyproject_toml_path(self) -> str: - assert self.source_dir, f"No source dir for {self}" - return make_pyproject_path(self.unpacked_source_directory) - - def load_pyproject_toml(self) -> None: - """Load the pyproject.toml file. - - After calling this routine, all of the attributes related to PEP 517 - processing for this requirement have been set. In particular, the - use_pep517 attribute can be used to determine whether we should - follow the PEP 517 or legacy (setup.py) code path. - """ - pyproject_toml_data = load_pyproject_toml( - self.use_pep517, self.pyproject_toml_path, self.setup_py_path, str(self) - ) - - if pyproject_toml_data is None: - self.use_pep517 = False - return - - self.use_pep517 = True - requires, backend, check, backend_path = pyproject_toml_data - self.requirements_to_check = check - self.pyproject_requires = requires - self.pep517_backend = Pep517HookCaller( - self.unpacked_source_directory, - backend, - backend_path=backend_path, - ) - - def isolated_editable_sanity_check(self) -> None: - """Check that an editable requirement if valid for use with PEP 517/518. - - This verifies that an editable that has a pyproject.toml either supports PEP 660 - or as a setup.py or a setup.cfg - """ - if ( - self.editable - and self.use_pep517 - and not self.supports_pyproject_editable() - and not os.path.isfile(self.setup_py_path) - and not os.path.isfile(self.setup_cfg_path) - ): - raise InstallationError( - f"Project {self} has a 'pyproject.toml' and its build " - f"backend is missing the 'build_editable' hook. Since it does not " - f"have a 'setup.py' nor a 'setup.cfg', " - f"it cannot be installed in editable mode. " - f"Consider using a build backend that supports PEP 660." - ) - - def prepare_metadata(self) -> None: - """Ensure that project metadata is available. - - Under PEP 517 and PEP 660, call the backend hook to prepare the metadata. - Under legacy processing, call setup.py egg-info. - """ - assert self.source_dir - details = self.name or f"from {self.link}" - - if self.use_pep517: - assert self.pep517_backend is not None - if ( - self.editable - and self.permit_editable_wheels - and self.supports_pyproject_editable() - ): - self.metadata_directory = generate_editable_metadata( - build_env=self.build_env, - backend=self.pep517_backend, - details=details, - ) - else: - self.metadata_directory = generate_metadata( - build_env=self.build_env, - backend=self.pep517_backend, - details=details, - ) - else: - self.metadata_directory = generate_metadata_legacy( - build_env=self.build_env, - setup_py_path=self.setup_py_path, - source_dir=self.unpacked_source_directory, - isolated=self.isolated, - details=details, - ) - - # Act on the newly generated metadata, based on the name and version. - if not self.name: - self._set_requirement() - else: - self.warn_on_mismatching_name() - - self.assert_source_matches_version() - - @property - def metadata(self) -> Any: - if not hasattr(self, "_metadata"): - self._metadata = self.get_dist().metadata - - return self._metadata - - def get_dist(self) -> BaseDistribution: - return get_directory_distribution(self.metadata_directory) - - def assert_source_matches_version(self) -> None: - assert self.source_dir - version = self.metadata["version"] - if self.req.specifier and version not in self.req.specifier: - logger.warning( - "Requested %s, but installing version %s", - self, - version, - ) - else: - logger.debug( - "Source in %s has version %s, which satisfies requirement %s", - display_path(self.source_dir), - version, - self, - ) - - # For both source distributions and editables - def ensure_has_source_dir( - self, - parent_dir: str, - autodelete: bool = False, - parallel_builds: bool = False, - ) -> None: - """Ensure that a source_dir is set. - - This will create a temporary build dir if the name of the requirement - isn't known yet. - - :param parent_dir: The ideal pip parent_dir for the source_dir. - Generally src_dir for editables and build_dir for sdists. - :return: self.source_dir - """ - if self.source_dir is None: - self.source_dir = self.ensure_build_location( - parent_dir, - autodelete=autodelete, - parallel_builds=parallel_builds, - ) - - # For editable installations - def update_editable(self) -> None: - if not self.link: - logger.debug( - "Cannot update repository at %s; repository location is unknown", - self.source_dir, - ) - return - assert self.editable - assert self.source_dir - if self.link.scheme == "file": - # Static paths don't get updated - return - vcs_backend = vcs.get_backend_for_scheme(self.link.scheme) - # Editable requirements are validated in Requirement constructors. - # So here, if it's neither a path nor a valid VCS URL, it's a bug. - assert vcs_backend, f"Unsupported VCS URL {self.link.url}" - hidden_url = hide_url(self.link.url) - vcs_backend.obtain(self.source_dir, url=hidden_url, verbosity=0) - - # Top-level Actions - def uninstall( - self, auto_confirm: bool = False, verbose: bool = False - ) -> Optional[UninstallPathSet]: - """ - Uninstall the distribution currently satisfying this requirement. - - Prompts before removing or modifying files unless - ``auto_confirm`` is True. - - Refuses to delete or modify files outside of ``sys.prefix`` - - thus uninstallation within a virtual environment can only - modify that virtual environment, even if the virtualenv is - linked to global site-packages. - - """ - assert self.req - dist = get_default_environment().get_distribution(self.req.name) - if not dist: - logger.warning("Skipping %s as it is not installed.", self.name) - return None - logger.info("Found existing installation: %s", dist) - - uninstalled_pathset = UninstallPathSet.from_dist(dist) - uninstalled_pathset.remove(auto_confirm, verbose) - return uninstalled_pathset - - def _get_archive_name(self, path: str, parentdir: str, rootdir: str) -> str: - def _clean_zip_name(name: str, prefix: str) -> str: - assert name.startswith( - prefix + os.path.sep - ), f"name {name!r} doesn't start with prefix {prefix!r}" - name = name[len(prefix) + 1 :] - name = name.replace(os.path.sep, "/") - return name - - path = os.path.join(parentdir, path) - name = _clean_zip_name(path, rootdir) - return self.name + "/" + name - - def archive(self, build_dir: Optional[str]) -> None: - """Saves archive to provided build_dir. - - Used for saving downloaded VCS requirements as part of `pip download`. - """ - assert self.source_dir - if build_dir is None: - return - - create_archive = True - archive_name = "{}-{}.zip".format(self.name, self.metadata["version"]) - archive_path = os.path.join(build_dir, archive_name) - - if os.path.exists(archive_path): - response = ask_path_exists( - "The file {} exists. (i)gnore, (w)ipe, " - "(b)ackup, (a)bort ".format(display_path(archive_path)), - ("i", "w", "b", "a"), - ) - if response == "i": - create_archive = False - elif response == "w": - logger.warning("Deleting %s", display_path(archive_path)) - os.remove(archive_path) - elif response == "b": - dest_file = backup_dir(archive_path) - logger.warning( - "Backing up %s to %s", - display_path(archive_path), - display_path(dest_file), - ) - shutil.move(archive_path, dest_file) - elif response == "a": - sys.exit(-1) - - if not create_archive: - return - - zip_output = zipfile.ZipFile( - archive_path, - "w", - zipfile.ZIP_DEFLATED, - allowZip64=True, - ) - with zip_output: - dir = os.path.normcase(os.path.abspath(self.unpacked_source_directory)) - for dirpath, dirnames, filenames in os.walk(dir): - for dirname in dirnames: - dir_arcname = self._get_archive_name( - dirname, - parentdir=dirpath, - rootdir=dir, - ) - zipdir = zipfile.ZipInfo(dir_arcname + "/") - zipdir.external_attr = 0x1ED << 16 # 0o755 - zip_output.writestr(zipdir, "") - for filename in filenames: - file_arcname = self._get_archive_name( - filename, - parentdir=dirpath, - rootdir=dir, - ) - filename = os.path.join(dirpath, filename) - zip_output.write(filename, file_arcname) - - logger.info("Saved %s", display_path(archive_path)) - - def install( - self, - install_options: List[str], - global_options: Optional[Sequence[str]] = None, - root: Optional[str] = None, - home: Optional[str] = None, - prefix: Optional[str] = None, - warn_script_location: bool = True, - use_user_site: bool = False, - pycompile: bool = True, - ) -> None: - scheme = get_scheme( - self.name, - user=use_user_site, - home=home, - root=root, - isolated=self.isolated, - prefix=prefix, - ) - - global_options = global_options if global_options is not None else [] - if self.editable and not self.is_wheel: - install_editable_legacy( - install_options, - global_options, - prefix=prefix, - home=home, - use_user_site=use_user_site, - name=self.name, - setup_py_path=self.setup_py_path, - isolated=self.isolated, - build_env=self.build_env, - unpacked_source_directory=self.unpacked_source_directory, - ) - self.install_succeeded = True - return - - if self.is_wheel: - assert self.local_file_path - direct_url = None - if self.editable: - direct_url = direct_url_for_editable(self.unpacked_source_directory) - elif self.original_link: - direct_url = direct_url_from_link( - self.original_link, - self.source_dir, - self.original_link_is_in_wheel_cache, - ) - install_wheel( - self.name, - self.local_file_path, - scheme=scheme, - req_description=str(self.req), - pycompile=pycompile, - warn_script_location=warn_script_location, - direct_url=direct_url, - requested=self.user_supplied, - ) - self.install_succeeded = True - return - - # TODO: Why don't we do this for editable installs? - - # Extend the list of global and install options passed on to - # the setup.py call with the ones from the requirements file. - # Options specified in requirements file override those - # specified on the command line, since the last option given - # to setup.py is the one that is used. - global_options = list(global_options) + self.global_options - install_options = list(install_options) + self.install_options - - try: - success = install_legacy( - install_options=install_options, - global_options=global_options, - root=root, - home=home, - prefix=prefix, - use_user_site=use_user_site, - pycompile=pycompile, - scheme=scheme, - setup_py_path=self.setup_py_path, - isolated=self.isolated, - req_name=self.name, - build_env=self.build_env, - unpacked_source_directory=self.unpacked_source_directory, - req_description=str(self.req), - ) - except LegacyInstallFailure as exc: - self.install_succeeded = False - raise exc - except Exception: - self.install_succeeded = True - raise - - self.install_succeeded = success - - if success and self.legacy_install_reason == 8368: - deprecated( - reason=( - "{} was installed using the legacy 'setup.py install' " - "method, because a wheel could not be built for it.".format( - self.name - ) - ), - replacement="to fix the wheel build issue reported above", - gone_in=None, - issue=8368, - ) - - -def check_invalid_constraint_type(req: InstallRequirement) -> str: - - # Check for unsupported forms - problem = "" - if not req.name: - problem = "Unnamed requirements are not allowed as constraints" - elif req.editable: - problem = "Editable requirements are not allowed as constraints" - elif req.extras: - problem = "Constraints cannot have extras" - - if problem: - deprecated( - reason=( - "Constraints are only allowed to take the form of a package " - "name and a version specifier. Other forms were originally " - "permitted as an accident of the implementation, but were " - "undocumented. The new implementation of the resolver no " - "longer supports these forms." - ), - replacement="replacing the constraint with a requirement", - # No plan yet for when the new resolver becomes default - gone_in=None, - issue=8210, - ) - - return problem diff --git a/venv/Lib/site-packages/pip/_internal/req/req_set.py b/venv/Lib/site-packages/pip/_internal/req/req_set.py deleted file mode 100644 index 6626c37..0000000 --- a/venv/Lib/site-packages/pip/_internal/req/req_set.py +++ /dev/null @@ -1,189 +0,0 @@ -import logging -from collections import OrderedDict -from typing import Dict, Iterable, List, Optional, Tuple - -from pip._vendor.packaging.utils import canonicalize_name - -from pip._internal.exceptions import InstallationError -from pip._internal.models.wheel import Wheel -from pip._internal.req.req_install import InstallRequirement -from pip._internal.utils import compatibility_tags - -logger = logging.getLogger(__name__) - - -class RequirementSet: - def __init__(self, check_supported_wheels: bool = True) -> None: - """Create a RequirementSet.""" - - self.requirements: Dict[str, InstallRequirement] = OrderedDict() - self.check_supported_wheels = check_supported_wheels - - self.unnamed_requirements: List[InstallRequirement] = [] - - def __str__(self) -> str: - requirements = sorted( - (req for req in self.requirements.values() if not req.comes_from), - key=lambda req: canonicalize_name(req.name or ""), - ) - return " ".join(str(req.req) for req in requirements) - - def __repr__(self) -> str: - requirements = sorted( - self.requirements.values(), - key=lambda req: canonicalize_name(req.name or ""), - ) - - format_string = "<{classname} object; {count} requirement(s): {reqs}>" - return format_string.format( - classname=self.__class__.__name__, - count=len(requirements), - reqs=", ".join(str(req.req) for req in requirements), - ) - - def add_unnamed_requirement(self, install_req: InstallRequirement) -> None: - assert not install_req.name - self.unnamed_requirements.append(install_req) - - def add_named_requirement(self, install_req: InstallRequirement) -> None: - assert install_req.name - - project_name = canonicalize_name(install_req.name) - self.requirements[project_name] = install_req - - def add_requirement( - self, - install_req: InstallRequirement, - parent_req_name: Optional[str] = None, - extras_requested: Optional[Iterable[str]] = None, - ) -> Tuple[List[InstallRequirement], Optional[InstallRequirement]]: - """Add install_req as a requirement to install. - - :param parent_req_name: The name of the requirement that needed this - added. The name is used because when multiple unnamed requirements - resolve to the same name, we could otherwise end up with dependency - links that point outside the Requirements set. parent_req must - already be added. Note that None implies that this is a user - supplied requirement, vs an inferred one. - :param extras_requested: an iterable of extras used to evaluate the - environment markers. - :return: Additional requirements to scan. That is either [] if - the requirement is not applicable, or [install_req] if the - requirement is applicable and has just been added. - """ - # If the markers do not match, ignore this requirement. - if not install_req.match_markers(extras_requested): - logger.info( - "Ignoring %s: markers '%s' don't match your environment", - install_req.name, - install_req.markers, - ) - return [], None - - # If the wheel is not supported, raise an error. - # Should check this after filtering out based on environment markers to - # allow specifying different wheels based on the environment/OS, in a - # single requirements file. - if install_req.link and install_req.link.is_wheel: - wheel = Wheel(install_req.link.filename) - tags = compatibility_tags.get_supported() - if self.check_supported_wheels and not wheel.supported(tags): - raise InstallationError( - "{} is not a supported wheel on this platform.".format( - wheel.filename - ) - ) - - # This next bit is really a sanity check. - assert ( - not install_req.user_supplied or parent_req_name is None - ), "a user supplied req shouldn't have a parent" - - # Unnamed requirements are scanned again and the requirement won't be - # added as a dependency until after scanning. - if not install_req.name: - self.add_unnamed_requirement(install_req) - return [install_req], None - - try: - existing_req: Optional[InstallRequirement] = self.get_requirement( - install_req.name - ) - except KeyError: - existing_req = None - - has_conflicting_requirement = ( - parent_req_name is None - and existing_req - and not existing_req.constraint - and existing_req.extras == install_req.extras - and existing_req.req - and install_req.req - and existing_req.req.specifier != install_req.req.specifier - ) - if has_conflicting_requirement: - raise InstallationError( - "Double requirement given: {} (already in {}, name={!r})".format( - install_req, existing_req, install_req.name - ) - ) - - # When no existing requirement exists, add the requirement as a - # dependency and it will be scanned again after. - if not existing_req: - self.add_named_requirement(install_req) - # We'd want to rescan this requirement later - return [install_req], install_req - - # Assume there's no need to scan, and that we've already - # encountered this for scanning. - if install_req.constraint or not existing_req.constraint: - return [], existing_req - - does_not_satisfy_constraint = install_req.link and not ( - existing_req.link and install_req.link.path == existing_req.link.path - ) - if does_not_satisfy_constraint: - raise InstallationError( - "Could not satisfy constraints for '{}': " - "installation from path or url cannot be " - "constrained to a version".format(install_req.name) - ) - # If we're now installing a constraint, mark the existing - # object for real installation. - existing_req.constraint = False - # If we're now installing a user supplied requirement, - # mark the existing object as such. - if install_req.user_supplied: - existing_req.user_supplied = True - existing_req.extras = tuple( - sorted(set(existing_req.extras) | set(install_req.extras)) - ) - logger.debug( - "Setting %s extras to: %s", - existing_req, - existing_req.extras, - ) - # Return the existing requirement for addition to the parent and - # scanning again. - return [existing_req], existing_req - - def has_requirement(self, name: str) -> bool: - project_name = canonicalize_name(name) - - return ( - project_name in self.requirements - and not self.requirements[project_name].constraint - ) - - def get_requirement(self, name: str) -> InstallRequirement: - project_name = canonicalize_name(name) - - if project_name in self.requirements: - return self.requirements[project_name] - - raise KeyError(f"No project with the name {name!r}") - - @property - def all_requirements(self) -> List[InstallRequirement]: - return self.unnamed_requirements + list(self.requirements.values()) diff --git a/venv/Lib/site-packages/pip/_internal/req/req_tracker.py b/venv/Lib/site-packages/pip/_internal/req/req_tracker.py deleted file mode 100644 index 24d3c53..0000000 --- a/venv/Lib/site-packages/pip/_internal/req/req_tracker.py +++ /dev/null @@ -1,124 +0,0 @@ -import contextlib -import hashlib -import logging -import os -from types import TracebackType -from typing import Dict, Iterator, Optional, Set, Type, Union - -from pip._internal.models.link import Link -from pip._internal.req.req_install import InstallRequirement -from pip._internal.utils.temp_dir import TempDirectory - -logger = logging.getLogger(__name__) - - -@contextlib.contextmanager -def update_env_context_manager(**changes: str) -> Iterator[None]: - target = os.environ - - # Save values from the target and change them. - non_existent_marker = object() - saved_values: Dict[str, Union[object, str]] = {} - for name, new_value in changes.items(): - try: - saved_values[name] = target[name] - except KeyError: - saved_values[name] = non_existent_marker - target[name] = new_value - - try: - yield - finally: - # Restore original values in the target. - for name, original_value in saved_values.items(): - if original_value is non_existent_marker: - del target[name] - else: - assert isinstance(original_value, str) # for mypy - target[name] = original_value - - -@contextlib.contextmanager -def get_requirement_tracker() -> Iterator["RequirementTracker"]: - root = os.environ.get("PIP_REQ_TRACKER") - with contextlib.ExitStack() as ctx: - if root is None: - root = ctx.enter_context(TempDirectory(kind="req-tracker")).path - ctx.enter_context(update_env_context_manager(PIP_REQ_TRACKER=root)) - logger.debug("Initialized build tracking at %s", root) - - with RequirementTracker(root) as tracker: - yield tracker - - -class RequirementTracker: - def __init__(self, root: str) -> None: - self._root = root - self._entries: Set[InstallRequirement] = set() - logger.debug("Created build tracker: %s", self._root) - - def __enter__(self) -> "RequirementTracker": - logger.debug("Entered build tracker: %s", self._root) - return self - - def __exit__( - self, - exc_type: Optional[Type[BaseException]], - exc_val: Optional[BaseException], - exc_tb: Optional[TracebackType], - ) -> None: - self.cleanup() - - def _entry_path(self, link: Link) -> str: - hashed = hashlib.sha224(link.url_without_fragment.encode()).hexdigest() - return os.path.join(self._root, hashed) - - def add(self, req: InstallRequirement) -> None: - """Add an InstallRequirement to build tracking.""" - - assert req.link - # Get the file to write information about this requirement. - entry_path = self._entry_path(req.link) - - # Try reading from the file. If it exists and can be read from, a build - # is already in progress, so a LookupError is raised. - try: - with open(entry_path) as fp: - contents = fp.read() - except FileNotFoundError: - pass - else: - message = "{} is already being built: {}".format(req.link, contents) - raise LookupError(message) - - # If we're here, req should really not be building already. - assert req not in self._entries - - # Start tracking this requirement. - with open(entry_path, "w", encoding="utf-8") as fp: - fp.write(str(req)) - self._entries.add(req) - - logger.debug("Added %s to build tracker %r", req, self._root) - - def remove(self, req: InstallRequirement) -> None: - """Remove an InstallRequirement from build tracking.""" - - assert req.link - # Delete the created file and the corresponding entries. - os.unlink(self._entry_path(req.link)) - self._entries.remove(req) - - logger.debug("Removed %s from build tracker %r", req, self._root) - - def cleanup(self) -> None: - for req in set(self._entries): - self.remove(req) - - logger.debug("Removed build tracker: %r", self._root) - - @contextlib.contextmanager - def track(self, req: InstallRequirement) -> Iterator[None]: - self.add(req) - yield - self.remove(req) diff --git a/venv/Lib/site-packages/pip/_internal/req/req_uninstall.py b/venv/Lib/site-packages/pip/_internal/req/req_uninstall.py deleted file mode 100644 index 472090a..0000000 --- a/venv/Lib/site-packages/pip/_internal/req/req_uninstall.py +++ /dev/null @@ -1,633 +0,0 @@ -import functools -import os -import sys -import sysconfig -from importlib.util import cache_from_source -from typing import Any, Callable, Dict, Iterable, Iterator, List, Optional, Set, Tuple - -from pip._internal.exceptions import UninstallationError -from pip._internal.locations import get_bin_prefix, get_bin_user -from pip._internal.metadata import BaseDistribution -from pip._internal.utils.compat import WINDOWS -from pip._internal.utils.egg_link import egg_link_path_from_location -from pip._internal.utils.logging import getLogger, indent_log -from pip._internal.utils.misc import ask, is_local, normalize_path, renames, rmtree -from pip._internal.utils.temp_dir import AdjacentTempDirectory, TempDirectory - -logger = getLogger(__name__) - - -def _script_names(bin_dir: str, script_name: str, is_gui: bool) -> Iterator[str]: - """Create the fully qualified name of the files created by - {console,gui}_scripts for the given ``dist``. - Returns the list of file names - """ - exe_name = os.path.join(bin_dir, script_name) - yield exe_name - if not WINDOWS: - return - yield f"{exe_name}.exe" - yield f"{exe_name}.exe.manifest" - if is_gui: - yield f"{exe_name}-script.pyw" - else: - yield f"{exe_name}-script.py" - - -def _unique(fn: Callable[..., Iterator[Any]]) -> Callable[..., Iterator[Any]]: - @functools.wraps(fn) - def unique(*args: Any, **kw: Any) -> Iterator[Any]: - seen: Set[Any] = set() - for item in fn(*args, **kw): - if item not in seen: - seen.add(item) - yield item - - return unique - - -@_unique -def uninstallation_paths(dist: BaseDistribution) -> Iterator[str]: - """ - Yield all the uninstallation paths for dist based on RECORD-without-.py[co] - - Yield paths to all the files in RECORD. For each .py file in RECORD, add - the .pyc and .pyo in the same directory. - - UninstallPathSet.add() takes care of the __pycache__ .py[co]. - - If RECORD is not found, raises UninstallationError, - with possible information from the INSTALLER file. - - https://packaging.python.org/specifications/recording-installed-packages/ - """ - location = dist.location - assert location is not None, "not installed" - - entries = dist.iter_declared_entries() - if entries is None: - msg = "Cannot uninstall {dist}, RECORD file not found.".format(dist=dist) - installer = dist.installer - if not installer or installer == "pip": - dep = "{}=={}".format(dist.raw_name, dist.version) - msg += ( - " You might be able to recover from this via: " - "'pip install --force-reinstall --no-deps {}'.".format(dep) - ) - else: - msg += " Hint: The package was installed by {}.".format(installer) - raise UninstallationError(msg) - - for entry in entries: - path = os.path.join(location, entry) - yield path - if path.endswith(".py"): - dn, fn = os.path.split(path) - base = fn[:-3] - path = os.path.join(dn, base + ".pyc") - yield path - path = os.path.join(dn, base + ".pyo") - yield path - - -def compact(paths: Iterable[str]) -> Set[str]: - """Compact a path set to contain the minimal number of paths - necessary to contain all paths in the set. If /a/path/ and - /a/path/to/a/file.txt are both in the set, leave only the - shorter path.""" - - sep = os.path.sep - short_paths: Set[str] = set() - for path in sorted(paths, key=len): - should_skip = any( - path.startswith(shortpath.rstrip("*")) - and path[len(shortpath.rstrip("*").rstrip(sep))] == sep - for shortpath in short_paths - ) - if not should_skip: - short_paths.add(path) - return short_paths - - -def compress_for_rename(paths: Iterable[str]) -> Set[str]: - """Returns a set containing the paths that need to be renamed. - - This set may include directories when the original sequence of paths - included every file on disk. - """ - case_map = {os.path.normcase(p): p for p in paths} - remaining = set(case_map) - unchecked = sorted({os.path.split(p)[0] for p in case_map.values()}, key=len) - wildcards: Set[str] = set() - - def norm_join(*a: str) -> str: - return os.path.normcase(os.path.join(*a)) - - for root in unchecked: - if any(os.path.normcase(root).startswith(w) for w in wildcards): - # This directory has already been handled. - continue - - all_files: Set[str] = set() - all_subdirs: Set[str] = set() - for dirname, subdirs, files in os.walk(root): - all_subdirs.update(norm_join(root, dirname, d) for d in subdirs) - all_files.update(norm_join(root, dirname, f) for f in files) - # If all the files we found are in our remaining set of files to - # remove, then remove them from the latter set and add a wildcard - # for the directory. - if not (all_files - remaining): - remaining.difference_update(all_files) - wildcards.add(root + os.sep) - - return set(map(case_map.__getitem__, remaining)) | wildcards - - -def compress_for_output_listing(paths: Iterable[str]) -> Tuple[Set[str], Set[str]]: - """Returns a tuple of 2 sets of which paths to display to user - - The first set contains paths that would be deleted. Files of a package - are not added and the top-level directory of the package has a '*' added - at the end - to signify that all it's contents are removed. - - The second set contains files that would have been skipped in the above - folders. - """ - - will_remove = set(paths) - will_skip = set() - - # Determine folders and files - folders = set() - files = set() - for path in will_remove: - if path.endswith(".pyc"): - continue - if path.endswith("__init__.py") or ".dist-info" in path: - folders.add(os.path.dirname(path)) - files.add(path) - - # probably this one https://github.com/python/mypy/issues/390 - _normcased_files = set(map(os.path.normcase, files)) # type: ignore - - folders = compact(folders) - - # This walks the tree using os.walk to not miss extra folders - # that might get added. - for folder in folders: - for dirpath, _, dirfiles in os.walk(folder): - for fname in dirfiles: - if fname.endswith(".pyc"): - continue - - file_ = os.path.join(dirpath, fname) - if ( - os.path.isfile(file_) - and os.path.normcase(file_) not in _normcased_files - ): - # We are skipping this file. Add it to the set. - will_skip.add(file_) - - will_remove = files | {os.path.join(folder, "*") for folder in folders} - - return will_remove, will_skip - - -class StashedUninstallPathSet: - """A set of file rename operations to stash files while - tentatively uninstalling them.""" - - def __init__(self) -> None: - # Mapping from source file root to [Adjacent]TempDirectory - # for files under that directory. - self._save_dirs: Dict[str, TempDirectory] = {} - # (old path, new path) tuples for each move that may need - # to be undone. - self._moves: List[Tuple[str, str]] = [] - - def _get_directory_stash(self, path: str) -> str: - """Stashes a directory. - - Directories are stashed adjacent to their original location if - possible, or else moved/copied into the user's temp dir.""" - - try: - save_dir: TempDirectory = AdjacentTempDirectory(path) - except OSError: - save_dir = TempDirectory(kind="uninstall") - self._save_dirs[os.path.normcase(path)] = save_dir - - return save_dir.path - - def _get_file_stash(self, path: str) -> str: - """Stashes a file. - - If no root has been provided, one will be created for the directory - in the user's temp directory.""" - path = os.path.normcase(path) - head, old_head = os.path.dirname(path), None - save_dir = None - - while head != old_head: - try: - save_dir = self._save_dirs[head] - break - except KeyError: - pass - head, old_head = os.path.dirname(head), head - else: - # Did not find any suitable root - head = os.path.dirname(path) - save_dir = TempDirectory(kind="uninstall") - self._save_dirs[head] = save_dir - - relpath = os.path.relpath(path, head) - if relpath and relpath != os.path.curdir: - return os.path.join(save_dir.path, relpath) - return save_dir.path - - def stash(self, path: str) -> str: - """Stashes the directory or file and returns its new location. - Handle symlinks as files to avoid modifying the symlink targets. - """ - path_is_dir = os.path.isdir(path) and not os.path.islink(path) - if path_is_dir: - new_path = self._get_directory_stash(path) - else: - new_path = self._get_file_stash(path) - - self._moves.append((path, new_path)) - if path_is_dir and os.path.isdir(new_path): - # If we're moving a directory, we need to - # remove the destination first or else it will be - # moved to inside the existing directory. - # We just created new_path ourselves, so it will - # be removable. - os.rmdir(new_path) - renames(path, new_path) - return new_path - - def commit(self) -> None: - """Commits the uninstall by removing stashed files.""" - for _, save_dir in self._save_dirs.items(): - save_dir.cleanup() - self._moves = [] - self._save_dirs = {} - - def rollback(self) -> None: - """Undoes the uninstall by moving stashed files back.""" - for p in self._moves: - logger.info("Moving to %s\n from %s", *p) - - for new_path, path in self._moves: - try: - logger.debug("Replacing %s from %s", new_path, path) - if os.path.isfile(new_path) or os.path.islink(new_path): - os.unlink(new_path) - elif os.path.isdir(new_path): - rmtree(new_path) - renames(path, new_path) - except OSError as ex: - logger.error("Failed to restore %s", new_path) - logger.debug("Exception: %s", ex) - - self.commit() - - @property - def can_rollback(self) -> bool: - return bool(self._moves) - - -class UninstallPathSet: - """A set of file paths to be removed in the uninstallation of a - requirement.""" - - def __init__(self, dist: BaseDistribution) -> None: - self._paths: Set[str] = set() - self._refuse: Set[str] = set() - self._pth: Dict[str, UninstallPthEntries] = {} - self._dist = dist - self._moved_paths = StashedUninstallPathSet() - - def _permitted(self, path: str) -> bool: - """ - Return True if the given path is one we are permitted to - remove/modify, False otherwise. - - """ - return is_local(path) - - def add(self, path: str) -> None: - head, tail = os.path.split(path) - - # we normalize the head to resolve parent directory symlinks, but not - # the tail, since we only want to uninstall symlinks, not their targets - path = os.path.join(normalize_path(head), os.path.normcase(tail)) - - if not os.path.exists(path): - return - if self._permitted(path): - self._paths.add(path) - else: - self._refuse.add(path) - - # __pycache__ files can show up after 'installed-files.txt' is created, - # due to imports - if os.path.splitext(path)[1] == ".py": - self.add(cache_from_source(path)) - - def add_pth(self, pth_file: str, entry: str) -> None: - pth_file = normalize_path(pth_file) - if self._permitted(pth_file): - if pth_file not in self._pth: - self._pth[pth_file] = UninstallPthEntries(pth_file) - self._pth[pth_file].add(entry) - else: - self._refuse.add(pth_file) - - def remove(self, auto_confirm: bool = False, verbose: bool = False) -> None: - """Remove paths in ``self._paths`` with confirmation (unless - ``auto_confirm`` is True).""" - - if not self._paths: - logger.info( - "Can't uninstall '%s'. No files were found to uninstall.", - self._dist.raw_name, - ) - return - - dist_name_version = f"{self._dist.raw_name}-{self._dist.version}" - logger.info("Uninstalling %s:", dist_name_version) - - with indent_log(): - if auto_confirm or self._allowed_to_proceed(verbose): - moved = self._moved_paths - - for_rename = compress_for_rename(self._paths) - - for path in sorted(compact(for_rename)): - moved.stash(path) - logger.verbose("Removing file or directory %s", path) - - for pth in self._pth.values(): - pth.remove() - - logger.info("Successfully uninstalled %s", dist_name_version) - - def _allowed_to_proceed(self, verbose: bool) -> bool: - """Display which files would be deleted and prompt for confirmation""" - - def _display(msg: str, paths: Iterable[str]) -> None: - if not paths: - return - - logger.info(msg) - with indent_log(): - for path in sorted(compact(paths)): - logger.info(path) - - if not verbose: - will_remove, will_skip = compress_for_output_listing(self._paths) - else: - # In verbose mode, display all the files that are going to be - # deleted. - will_remove = set(self._paths) - will_skip = set() - - _display("Would remove:", will_remove) - _display("Would not remove (might be manually added):", will_skip) - _display("Would not remove (outside of prefix):", self._refuse) - if verbose: - _display("Will actually move:", compress_for_rename(self._paths)) - - return ask("Proceed (Y/n)? ", ("y", "n", "")) != "n" - - def rollback(self) -> None: - """Rollback the changes previously made by remove().""" - if not self._moved_paths.can_rollback: - logger.error( - "Can't roll back %s; was not uninstalled", - self._dist.raw_name, - ) - return - logger.info("Rolling back uninstall of %s", self._dist.raw_name) - self._moved_paths.rollback() - for pth in self._pth.values(): - pth.rollback() - - def commit(self) -> None: - """Remove temporary save dir: rollback will no longer be possible.""" - self._moved_paths.commit() - - @classmethod - def from_dist(cls, dist: BaseDistribution) -> "UninstallPathSet": - dist_location = dist.location - info_location = dist.info_location - if dist_location is None: - logger.info( - "Not uninstalling %s since it is not installed", - dist.canonical_name, - ) - return cls(dist) - - normalized_dist_location = normalize_path(dist_location) - if not dist.local: - logger.info( - "Not uninstalling %s at %s, outside environment %s", - dist.canonical_name, - normalized_dist_location, - sys.prefix, - ) - return cls(dist) - - if normalized_dist_location in { - p - for p in {sysconfig.get_path("stdlib"), sysconfig.get_path("platstdlib")} - if p - }: - logger.info( - "Not uninstalling %s at %s, as it is in the standard library.", - dist.canonical_name, - normalized_dist_location, - ) - return cls(dist) - - paths_to_remove = cls(dist) - develop_egg_link = egg_link_path_from_location(dist.raw_name) - - # Distribution is installed with metadata in a "flat" .egg-info - # directory. This means it is not a modern .dist-info installation, an - # egg, or legacy editable. - setuptools_flat_installation = ( - dist.installed_with_setuptools_egg_info - and info_location is not None - and os.path.exists(info_location) - # If dist is editable and the location points to a ``.egg-info``, - # we are in fact in the legacy editable case. - and not info_location.endswith(f"{dist.setuptools_filename}.egg-info") - ) - - # Uninstall cases order do matter as in the case of 2 installs of the - # same package, pip needs to uninstall the currently detected version - if setuptools_flat_installation: - if info_location is not None: - paths_to_remove.add(info_location) - installed_files = dist.iter_declared_entries() - if installed_files is not None: - for installed_file in installed_files: - paths_to_remove.add(os.path.join(dist_location, installed_file)) - # FIXME: need a test for this elif block - # occurs with --single-version-externally-managed/--record outside - # of pip - elif dist.is_file("top_level.txt"): - try: - namespace_packages = dist.read_text("namespace_packages.txt") - except FileNotFoundError: - namespaces = [] - else: - namespaces = namespace_packages.splitlines(keepends=False) - for top_level_pkg in [ - p - for p in dist.read_text("top_level.txt").splitlines() - if p and p not in namespaces - ]: - path = os.path.join(dist_location, top_level_pkg) - paths_to_remove.add(path) - paths_to_remove.add(f"{path}.py") - paths_to_remove.add(f"{path}.pyc") - paths_to_remove.add(f"{path}.pyo") - - elif dist.installed_by_distutils: - raise UninstallationError( - "Cannot uninstall {!r}. It is a distutils installed project " - "and thus we cannot accurately determine which files belong " - "to it which would lead to only a partial uninstall.".format( - dist.raw_name, - ) - ) - - elif dist.installed_as_egg: - # package installed by easy_install - # We cannot match on dist.egg_name because it can slightly vary - # i.e. setuptools-0.6c11-py2.6.egg vs setuptools-0.6rc11-py2.6.egg - paths_to_remove.add(dist_location) - easy_install_egg = os.path.split(dist_location)[1] - easy_install_pth = os.path.join( - os.path.dirname(dist_location), - "easy-install.pth", - ) - paths_to_remove.add_pth(easy_install_pth, "./" + easy_install_egg) - - elif dist.installed_with_dist_info: - for path in uninstallation_paths(dist): - paths_to_remove.add(path) - - elif develop_egg_link: - # PEP 660 modern editable is handled in the ``.dist-info`` case - # above, so this only covers the setuptools-style editable. - with open(develop_egg_link) as fh: - link_pointer = os.path.normcase(fh.readline().strip()) - assert link_pointer == dist_location, ( - f"Egg-link {link_pointer} does not match installed location of " - f"{dist.raw_name} (at {dist_location})" - ) - paths_to_remove.add(develop_egg_link) - easy_install_pth = os.path.join( - os.path.dirname(develop_egg_link), "easy-install.pth" - ) - paths_to_remove.add_pth(easy_install_pth, dist_location) - - else: - logger.debug( - "Not sure how to uninstall: %s - Check: %s", - dist, - dist_location, - ) - - if dist.in_usersite: - bin_dir = get_bin_user() - else: - bin_dir = get_bin_prefix() - - # find distutils scripts= scripts - try: - for script in dist.iterdir("scripts"): - paths_to_remove.add(os.path.join(bin_dir, script.name)) - if WINDOWS: - paths_to_remove.add(os.path.join(bin_dir, f"{script.name}.bat")) - except (FileNotFoundError, NotADirectoryError): - pass - - # find console_scripts and gui_scripts - def iter_scripts_to_remove( - dist: BaseDistribution, - bin_dir: str, - ) -> Iterator[str]: - for entry_point in dist.iter_entry_points(): - if entry_point.group == "console_scripts": - yield from _script_names(bin_dir, entry_point.name, False) - elif entry_point.group == "gui_scripts": - yield from _script_names(bin_dir, entry_point.name, True) - - for s in iter_scripts_to_remove(dist, bin_dir): - paths_to_remove.add(s) - - return paths_to_remove - - -class UninstallPthEntries: - def __init__(self, pth_file: str) -> None: - self.file = pth_file - self.entries: Set[str] = set() - self._saved_lines: Optional[List[bytes]] = None - - def add(self, entry: str) -> None: - entry = os.path.normcase(entry) - # On Windows, os.path.normcase converts the entry to use - # backslashes. This is correct for entries that describe absolute - # paths outside of site-packages, but all the others use forward - # slashes. - # os.path.splitdrive is used instead of os.path.isabs because isabs - # treats non-absolute paths with drive letter markings like c:foo\bar - # as absolute paths. It also does not recognize UNC paths if they don't - # have more than "\\sever\share". Valid examples: "\\server\share\" or - # "\\server\share\folder". - if WINDOWS and not os.path.splitdrive(entry)[0]: - entry = entry.replace("\\", "/") - self.entries.add(entry) - - def remove(self) -> None: - logger.verbose("Removing pth entries from %s:", self.file) - - # If the file doesn't exist, log a warning and return - if not os.path.isfile(self.file): - logger.warning("Cannot remove entries from nonexistent file %s", self.file) - return - with open(self.file, "rb") as fh: - # windows uses '\r\n' with py3k, but uses '\n' with py2.x - lines = fh.readlines() - self._saved_lines = lines - if any(b"\r\n" in line for line in lines): - endline = "\r\n" - else: - endline = "\n" - # handle missing trailing newline - if lines and not lines[-1].endswith(endline.encode("utf-8")): - lines[-1] = lines[-1] + endline.encode("utf-8") - for entry in self.entries: - try: - logger.verbose("Removing entry: %s", entry) - lines.remove((entry + endline).encode("utf-8")) - except ValueError: - pass - with open(self.file, "wb") as fh: - fh.writelines(lines) - - def rollback(self) -> bool: - if self._saved_lines is None: - logger.error("Cannot roll back changes to %s, none were made", self.file) - return False - logger.debug("Rolling %s back to previous state", self.file) - with open(self.file, "wb") as fh: - fh.writelines(self._saved_lines) - return True diff --git a/venv/Lib/site-packages/pip/_internal/resolution/__init__.py b/venv/Lib/site-packages/pip/_internal/resolution/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/venv/Lib/site-packages/pip/_internal/resolution/__pycache__/__init__.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/resolution/__pycache__/__init__.cpython-39.pyc deleted file mode 100644 index 64c80ef..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/resolution/__pycache__/__init__.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/resolution/__pycache__/base.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/resolution/__pycache__/base.cpython-39.pyc deleted file mode 100644 index 77b11f0..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/resolution/__pycache__/base.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/resolution/base.py b/venv/Lib/site-packages/pip/_internal/resolution/base.py deleted file mode 100644 index 42dade1..0000000 --- a/venv/Lib/site-packages/pip/_internal/resolution/base.py +++ /dev/null @@ -1,20 +0,0 @@ -from typing import Callable, List, Optional - -from pip._internal.req.req_install import InstallRequirement -from pip._internal.req.req_set import RequirementSet - -InstallRequirementProvider = Callable[ - [str, Optional[InstallRequirement]], InstallRequirement -] - - -class BaseResolver: - def resolve( - self, root_reqs: List[InstallRequirement], check_supported_wheels: bool - ) -> RequirementSet: - raise NotImplementedError() - - def get_installation_order( - self, req_set: RequirementSet - ) -> List[InstallRequirement]: - raise NotImplementedError() diff --git a/venv/Lib/site-packages/pip/_internal/resolution/legacy/__init__.py b/venv/Lib/site-packages/pip/_internal/resolution/legacy/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/venv/Lib/site-packages/pip/_internal/resolution/legacy/__pycache__/__init__.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/resolution/legacy/__pycache__/__init__.cpython-39.pyc deleted file mode 100644 index ed93c03..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/resolution/legacy/__pycache__/__init__.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/resolution/legacy/__pycache__/resolver.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/resolution/legacy/__pycache__/resolver.cpython-39.pyc deleted file mode 100644 index 5979439..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/resolution/legacy/__pycache__/resolver.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/resolution/legacy/resolver.py b/venv/Lib/site-packages/pip/_internal/resolution/legacy/resolver.py deleted file mode 100644 index 8c149d4..0000000 --- a/venv/Lib/site-packages/pip/_internal/resolution/legacy/resolver.py +++ /dev/null @@ -1,467 +0,0 @@ -"""Dependency Resolution - -The dependency resolution in pip is performed as follows: - -for top-level requirements: - a. only one spec allowed per project, regardless of conflicts or not. - otherwise a "double requirement" exception is raised - b. they override sub-dependency requirements. -for sub-dependencies - a. "first found, wins" (where the order is breadth first) -""" - -# The following comment should be removed at some point in the future. -# mypy: strict-optional=False - -import logging -import sys -from collections import defaultdict -from itertools import chain -from typing import DefaultDict, Iterable, List, Optional, Set, Tuple - -from pip._vendor.packaging import specifiers -from pip._vendor.packaging.requirements import Requirement - -from pip._internal.cache import WheelCache -from pip._internal.exceptions import ( - BestVersionAlreadyInstalled, - DistributionNotFound, - HashError, - HashErrors, - NoneMetadataError, - UnsupportedPythonVersion, -) -from pip._internal.index.package_finder import PackageFinder -from pip._internal.metadata import BaseDistribution -from pip._internal.models.link import Link -from pip._internal.operations.prepare import RequirementPreparer -from pip._internal.req.req_install import ( - InstallRequirement, - check_invalid_constraint_type, -) -from pip._internal.req.req_set import RequirementSet -from pip._internal.resolution.base import BaseResolver, InstallRequirementProvider -from pip._internal.utils.compatibility_tags import get_supported -from pip._internal.utils.logging import indent_log -from pip._internal.utils.misc import normalize_version_info -from pip._internal.utils.packaging import check_requires_python - -logger = logging.getLogger(__name__) - -DiscoveredDependencies = DefaultDict[str, List[InstallRequirement]] - - -def _check_dist_requires_python( - dist: BaseDistribution, - version_info: Tuple[int, int, int], - ignore_requires_python: bool = False, -) -> None: - """ - Check whether the given Python version is compatible with a distribution's - "Requires-Python" value. - - :param version_info: A 3-tuple of ints representing the Python - major-minor-micro version to check. - :param ignore_requires_python: Whether to ignore the "Requires-Python" - value if the given Python version isn't compatible. - - :raises UnsupportedPythonVersion: When the given Python version isn't - compatible. - """ - # This idiosyncratically converts the SpecifierSet to str and let - # check_requires_python then parse it again into SpecifierSet. But this - # is the legacy resolver so I'm just not going to bother refactoring. - try: - requires_python = str(dist.requires_python) - except FileNotFoundError as e: - raise NoneMetadataError(dist, str(e)) - try: - is_compatible = check_requires_python( - requires_python, - version_info=version_info, - ) - except specifiers.InvalidSpecifier as exc: - logger.warning( - "Package %r has an invalid Requires-Python: %s", dist.raw_name, exc - ) - return - - if is_compatible: - return - - version = ".".join(map(str, version_info)) - if ignore_requires_python: - logger.debug( - "Ignoring failed Requires-Python check for package %r: %s not in %r", - dist.raw_name, - version, - requires_python, - ) - return - - raise UnsupportedPythonVersion( - "Package {!r} requires a different Python: {} not in {!r}".format( - dist.raw_name, version, requires_python - ) - ) - - -class Resolver(BaseResolver): - """Resolves which packages need to be installed/uninstalled to perform \ - the requested operation without breaking the requirements of any package. - """ - - _allowed_strategies = {"eager", "only-if-needed", "to-satisfy-only"} - - def __init__( - self, - preparer: RequirementPreparer, - finder: PackageFinder, - wheel_cache: Optional[WheelCache], - make_install_req: InstallRequirementProvider, - use_user_site: bool, - ignore_dependencies: bool, - ignore_installed: bool, - ignore_requires_python: bool, - force_reinstall: bool, - upgrade_strategy: str, - py_version_info: Optional[Tuple[int, ...]] = None, - ) -> None: - super().__init__() - assert upgrade_strategy in self._allowed_strategies - - if py_version_info is None: - py_version_info = sys.version_info[:3] - else: - py_version_info = normalize_version_info(py_version_info) - - self._py_version_info = py_version_info - - self.preparer = preparer - self.finder = finder - self.wheel_cache = wheel_cache - - self.upgrade_strategy = upgrade_strategy - self.force_reinstall = force_reinstall - self.ignore_dependencies = ignore_dependencies - self.ignore_installed = ignore_installed - self.ignore_requires_python = ignore_requires_python - self.use_user_site = use_user_site - self._make_install_req = make_install_req - - self._discovered_dependencies: DiscoveredDependencies = defaultdict(list) - - def resolve( - self, root_reqs: List[InstallRequirement], check_supported_wheels: bool - ) -> RequirementSet: - """Resolve what operations need to be done - - As a side-effect of this method, the packages (and their dependencies) - are downloaded, unpacked and prepared for installation. This - preparation is done by ``pip.operations.prepare``. - - Once PyPI has static dependency metadata available, it would be - possible to move the preparation to become a step separated from - dependency resolution. - """ - requirement_set = RequirementSet(check_supported_wheels=check_supported_wheels) - for req in root_reqs: - if req.constraint: - check_invalid_constraint_type(req) - requirement_set.add_requirement(req) - - # Actually prepare the files, and collect any exceptions. Most hash - # exceptions cannot be checked ahead of time, because - # _populate_link() needs to be called before we can make decisions - # based on link type. - discovered_reqs: List[InstallRequirement] = [] - hash_errors = HashErrors() - for req in chain(requirement_set.all_requirements, discovered_reqs): - try: - discovered_reqs.extend(self._resolve_one(requirement_set, req)) - except HashError as exc: - exc.req = req - hash_errors.append(exc) - - if hash_errors: - raise hash_errors - - return requirement_set - - def _is_upgrade_allowed(self, req: InstallRequirement) -> bool: - if self.upgrade_strategy == "to-satisfy-only": - return False - elif self.upgrade_strategy == "eager": - return True - else: - assert self.upgrade_strategy == "only-if-needed" - return req.user_supplied or req.constraint - - def _set_req_to_reinstall(self, req: InstallRequirement) -> None: - """ - Set a requirement to be installed. - """ - # Don't uninstall the conflict if doing a user install and the - # conflict is not a user install. - if not self.use_user_site or req.satisfied_by.in_usersite: - req.should_reinstall = True - req.satisfied_by = None - - def _check_skip_installed( - self, req_to_install: InstallRequirement - ) -> Optional[str]: - """Check if req_to_install should be skipped. - - This will check if the req is installed, and whether we should upgrade - or reinstall it, taking into account all the relevant user options. - - After calling this req_to_install will only have satisfied_by set to - None if the req_to_install is to be upgraded/reinstalled etc. Any - other value will be a dist recording the current thing installed that - satisfies the requirement. - - Note that for vcs urls and the like we can't assess skipping in this - routine - we simply identify that we need to pull the thing down, - then later on it is pulled down and introspected to assess upgrade/ - reinstalls etc. - - :return: A text reason for why it was skipped, or None. - """ - if self.ignore_installed: - return None - - req_to_install.check_if_exists(self.use_user_site) - if not req_to_install.satisfied_by: - return None - - if self.force_reinstall: - self._set_req_to_reinstall(req_to_install) - return None - - if not self._is_upgrade_allowed(req_to_install): - if self.upgrade_strategy == "only-if-needed": - return "already satisfied, skipping upgrade" - return "already satisfied" - - # Check for the possibility of an upgrade. For link-based - # requirements we have to pull the tree down and inspect to assess - # the version #, so it's handled way down. - if not req_to_install.link: - try: - self.finder.find_requirement(req_to_install, upgrade=True) - except BestVersionAlreadyInstalled: - # Then the best version is installed. - return "already up-to-date" - except DistributionNotFound: - # No distribution found, so we squash the error. It will - # be raised later when we re-try later to do the install. - # Why don't we just raise here? - pass - - self._set_req_to_reinstall(req_to_install) - return None - - def _find_requirement_link(self, req: InstallRequirement) -> Optional[Link]: - upgrade = self._is_upgrade_allowed(req) - best_candidate = self.finder.find_requirement(req, upgrade) - if not best_candidate: - return None - - # Log a warning per PEP 592 if necessary before returning. - link = best_candidate.link - if link.is_yanked: - reason = link.yanked_reason or "" - msg = ( - # Mark this as a unicode string to prevent - # "UnicodeEncodeError: 'ascii' codec can't encode character" - # in Python 2 when the reason contains non-ascii characters. - "The candidate selected for download or install is a " - "yanked version: {candidate}\n" - "Reason for being yanked: {reason}" - ).format(candidate=best_candidate, reason=reason) - logger.warning(msg) - - return link - - def _populate_link(self, req: InstallRequirement) -> None: - """Ensure that if a link can be found for this, that it is found. - - Note that req.link may still be None - if the requirement is already - installed and not needed to be upgraded based on the return value of - _is_upgrade_allowed(). - - If preparer.require_hashes is True, don't use the wheel cache, because - cached wheels, always built locally, have different hashes than the - files downloaded from the index server and thus throw false hash - mismatches. Furthermore, cached wheels at present have undeterministic - contents due to file modification times. - """ - if req.link is None: - req.link = self._find_requirement_link(req) - - if self.wheel_cache is None or self.preparer.require_hashes: - return - cache_entry = self.wheel_cache.get_cache_entry( - link=req.link, - package_name=req.name, - supported_tags=get_supported(), - ) - if cache_entry is not None: - logger.debug("Using cached wheel link: %s", cache_entry.link) - if req.link is req.original_link and cache_entry.persistent: - req.original_link_is_in_wheel_cache = True - req.link = cache_entry.link - - def _get_dist_for(self, req: InstallRequirement) -> BaseDistribution: - """Takes a InstallRequirement and returns a single AbstractDist \ - representing a prepared variant of the same. - """ - if req.editable: - return self.preparer.prepare_editable_requirement(req) - - # satisfied_by is only evaluated by calling _check_skip_installed, - # so it must be None here. - assert req.satisfied_by is None - skip_reason = self._check_skip_installed(req) - - if req.satisfied_by: - return self.preparer.prepare_installed_requirement(req, skip_reason) - - # We eagerly populate the link, since that's our "legacy" behavior. - self._populate_link(req) - dist = self.preparer.prepare_linked_requirement(req) - - # NOTE - # The following portion is for determining if a certain package is - # going to be re-installed/upgraded or not and reporting to the user. - # This should probably get cleaned up in a future refactor. - - # req.req is only avail after unpack for URL - # pkgs repeat check_if_exists to uninstall-on-upgrade - # (#14) - if not self.ignore_installed: - req.check_if_exists(self.use_user_site) - - if req.satisfied_by: - should_modify = ( - self.upgrade_strategy != "to-satisfy-only" - or self.force_reinstall - or self.ignore_installed - or req.link.scheme == "file" - ) - if should_modify: - self._set_req_to_reinstall(req) - else: - logger.info( - "Requirement already satisfied (use --upgrade to upgrade): %s", - req, - ) - return dist - - def _resolve_one( - self, - requirement_set: RequirementSet, - req_to_install: InstallRequirement, - ) -> List[InstallRequirement]: - """Prepare a single requirements file. - - :return: A list of additional InstallRequirements to also install. - """ - # Tell user what we are doing for this requirement: - # obtain (editable), skipping, processing (local url), collecting - # (remote url or package name) - if req_to_install.constraint or req_to_install.prepared: - return [] - - req_to_install.prepared = True - - # Parse and return dependencies - dist = self._get_dist_for(req_to_install) - # This will raise UnsupportedPythonVersion if the given Python - # version isn't compatible with the distribution's Requires-Python. - _check_dist_requires_python( - dist, - version_info=self._py_version_info, - ignore_requires_python=self.ignore_requires_python, - ) - - more_reqs: List[InstallRequirement] = [] - - def add_req(subreq: Requirement, extras_requested: Iterable[str]) -> None: - # This idiosyncratically converts the Requirement to str and let - # make_install_req then parse it again into Requirement. But this is - # the legacy resolver so I'm just not going to bother refactoring. - sub_install_req = self._make_install_req(str(subreq), req_to_install) - parent_req_name = req_to_install.name - to_scan_again, add_to_parent = requirement_set.add_requirement( - sub_install_req, - parent_req_name=parent_req_name, - extras_requested=extras_requested, - ) - if parent_req_name and add_to_parent: - self._discovered_dependencies[parent_req_name].append(add_to_parent) - more_reqs.extend(to_scan_again) - - with indent_log(): - # We add req_to_install before its dependencies, so that we - # can refer to it when adding dependencies. - if not requirement_set.has_requirement(req_to_install.name): - # 'unnamed' requirements will get added here - # 'unnamed' requirements can only come from being directly - # provided by the user. - assert req_to_install.user_supplied - requirement_set.add_requirement(req_to_install, parent_req_name=None) - - if not self.ignore_dependencies: - if req_to_install.extras: - logger.debug( - "Installing extra requirements: %r", - ",".join(req_to_install.extras), - ) - missing_requested = sorted( - set(req_to_install.extras) - set(dist.iter_provided_extras()) - ) - for missing in missing_requested: - logger.warning( - "%s %s does not provide the extra '%s'", - dist.raw_name, - dist.version, - missing, - ) - - available_requested = sorted( - set(dist.iter_provided_extras()) & set(req_to_install.extras) - ) - for subreq in dist.iter_dependencies(available_requested): - add_req(subreq, extras_requested=available_requested) - - return more_reqs - - def get_installation_order( - self, req_set: RequirementSet - ) -> List[InstallRequirement]: - """Create the installation order. - - The installation order is topological - requirements are installed - before the requiring thing. We break cycles at an arbitrary point, - and make no other guarantees. - """ - # The current implementation, which we may change at any point - # installs the user specified things in the order given, except when - # dependencies must come earlier to achieve topological order. - order = [] - ordered_reqs: Set[InstallRequirement] = set() - - def schedule(req: InstallRequirement) -> None: - if req.satisfied_by or req in ordered_reqs: - return - if req.constraint: - return - ordered_reqs.add(req) - for dep in self._discovered_dependencies[req.name]: - schedule(dep) - order.append(req) - - for install_req in req_set.requirements.values(): - schedule(install_req) - return order diff --git a/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/__init__.py b/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/__pycache__/__init__.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/__pycache__/__init__.cpython-39.pyc deleted file mode 100644 index c31d41a..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/__pycache__/__init__.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/__pycache__/base.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/__pycache__/base.cpython-39.pyc deleted file mode 100644 index 80e12d8..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/__pycache__/base.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/__pycache__/candidates.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/__pycache__/candidates.cpython-39.pyc deleted file mode 100644 index 5fa86cc..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/__pycache__/candidates.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/__pycache__/factory.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/__pycache__/factory.cpython-39.pyc deleted file mode 100644 index b7595a1..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/__pycache__/factory.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/__pycache__/found_candidates.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/__pycache__/found_candidates.cpython-39.pyc deleted file mode 100644 index 9d288d7..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/__pycache__/found_candidates.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/__pycache__/provider.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/__pycache__/provider.cpython-39.pyc deleted file mode 100644 index 04542b9..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/__pycache__/provider.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/__pycache__/reporter.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/__pycache__/reporter.cpython-39.pyc deleted file mode 100644 index 774e886..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/__pycache__/reporter.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/__pycache__/requirements.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/__pycache__/requirements.cpython-39.pyc deleted file mode 100644 index 3b2bd62..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/__pycache__/requirements.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/__pycache__/resolver.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/__pycache__/resolver.cpython-39.pyc deleted file mode 100644 index 931c245..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/__pycache__/resolver.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/base.py b/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/base.py deleted file mode 100644 index b206692..0000000 --- a/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/base.py +++ /dev/null @@ -1,141 +0,0 @@ -from typing import FrozenSet, Iterable, Optional, Tuple, Union - -from pip._vendor.packaging.specifiers import SpecifierSet -from pip._vendor.packaging.utils import NormalizedName, canonicalize_name -from pip._vendor.packaging.version import LegacyVersion, Version - -from pip._internal.models.link import Link, links_equivalent -from pip._internal.req.req_install import InstallRequirement -from pip._internal.utils.hashes import Hashes - -CandidateLookup = Tuple[Optional["Candidate"], Optional[InstallRequirement]] -CandidateVersion = Union[LegacyVersion, Version] - - -def format_name(project: str, extras: FrozenSet[str]) -> str: - if not extras: - return project - canonical_extras = sorted(canonicalize_name(e) for e in extras) - return "{}[{}]".format(project, ",".join(canonical_extras)) - - -class Constraint: - def __init__( - self, specifier: SpecifierSet, hashes: Hashes, links: FrozenSet[Link] - ) -> None: - self.specifier = specifier - self.hashes = hashes - self.links = links - - @classmethod - def empty(cls) -> "Constraint": - return Constraint(SpecifierSet(), Hashes(), frozenset()) - - @classmethod - def from_ireq(cls, ireq: InstallRequirement) -> "Constraint": - links = frozenset([ireq.link]) if ireq.link else frozenset() - return Constraint(ireq.specifier, ireq.hashes(trust_internet=False), links) - - def __bool__(self) -> bool: - return bool(self.specifier) or bool(self.hashes) or bool(self.links) - - def __and__(self, other: InstallRequirement) -> "Constraint": - if not isinstance(other, InstallRequirement): - return NotImplemented - specifier = self.specifier & other.specifier - hashes = self.hashes & other.hashes(trust_internet=False) - links = self.links - if other.link: - links = links.union([other.link]) - return Constraint(specifier, hashes, links) - - def is_satisfied_by(self, candidate: "Candidate") -> bool: - # Reject if there are any mismatched URL constraints on this package. - if self.links and not all(_match_link(link, candidate) for link in self.links): - return False - # We can safely always allow prereleases here since PackageFinder - # already implements the prerelease logic, and would have filtered out - # prerelease candidates if the user does not expect them. - return self.specifier.contains(candidate.version, prereleases=True) - - -class Requirement: - @property - def project_name(self) -> NormalizedName: - """The "project name" of a requirement. - - This is different from ``name`` if this requirement contains extras, - in which case ``name`` would contain the ``[...]`` part, while this - refers to the name of the project. - """ - raise NotImplementedError("Subclass should override") - - @property - def name(self) -> str: - """The name identifying this requirement in the resolver. - - This is different from ``project_name`` if this requirement contains - extras, where ``project_name`` would not contain the ``[...]`` part. - """ - raise NotImplementedError("Subclass should override") - - def is_satisfied_by(self, candidate: "Candidate") -> bool: - return False - - def get_candidate_lookup(self) -> CandidateLookup: - raise NotImplementedError("Subclass should override") - - def format_for_error(self) -> str: - raise NotImplementedError("Subclass should override") - - -def _match_link(link: Link, candidate: "Candidate") -> bool: - if candidate.source_link: - return links_equivalent(link, candidate.source_link) - return False - - -class Candidate: - @property - def project_name(self) -> NormalizedName: - """The "project name" of the candidate. - - This is different from ``name`` if this candidate contains extras, - in which case ``name`` would contain the ``[...]`` part, while this - refers to the name of the project. - """ - raise NotImplementedError("Override in subclass") - - @property - def name(self) -> str: - """The name identifying this candidate in the resolver. - - This is different from ``project_name`` if this candidate contains - extras, where ``project_name`` would not contain the ``[...]`` part. - """ - raise NotImplementedError("Override in subclass") - - @property - def version(self) -> CandidateVersion: - raise NotImplementedError("Override in subclass") - - @property - def is_installed(self) -> bool: - raise NotImplementedError("Override in subclass") - - @property - def is_editable(self) -> bool: - raise NotImplementedError("Override in subclass") - - @property - def source_link(self) -> Optional[Link]: - raise NotImplementedError("Override in subclass") - - def iter_dependencies(self, with_requires: bool) -> Iterable[Optional[Requirement]]: - raise NotImplementedError("Override in subclass") - - def get_install_requirement(self) -> Optional[InstallRequirement]: - raise NotImplementedError("Override in subclass") - - def format_for_error(self) -> str: - raise NotImplementedError("Subclass should override") diff --git a/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/candidates.py b/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/candidates.py deleted file mode 100644 index 9b8450e..0000000 --- a/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/candidates.py +++ /dev/null @@ -1,547 +0,0 @@ -import logging -import sys -from typing import TYPE_CHECKING, Any, FrozenSet, Iterable, Optional, Tuple, Union, cast - -from pip._vendor.packaging.utils import NormalizedName, canonicalize_name -from pip._vendor.packaging.version import Version - -from pip._internal.exceptions import ( - HashError, - InstallationSubprocessError, - MetadataInconsistent, -) -from pip._internal.metadata import BaseDistribution -from pip._internal.models.link import Link, links_equivalent -from pip._internal.models.wheel import Wheel -from pip._internal.req.constructors import ( - install_req_from_editable, - install_req_from_line, -) -from pip._internal.req.req_install import InstallRequirement -from pip._internal.utils.misc import normalize_version_info - -from .base import Candidate, CandidateVersion, Requirement, format_name - -if TYPE_CHECKING: - from .factory import Factory - -logger = logging.getLogger(__name__) - -BaseCandidate = Union[ - "AlreadyInstalledCandidate", - "EditableCandidate", - "LinkCandidate", -] - -# Avoid conflicting with the PyPI package "Python". -REQUIRES_PYTHON_IDENTIFIER = cast(NormalizedName, "") - - -def as_base_candidate(candidate: Candidate) -> Optional[BaseCandidate]: - """The runtime version of BaseCandidate.""" - base_candidate_classes = ( - AlreadyInstalledCandidate, - EditableCandidate, - LinkCandidate, - ) - if isinstance(candidate, base_candidate_classes): - return candidate - return None - - -def make_install_req_from_link( - link: Link, template: InstallRequirement -) -> InstallRequirement: - assert not template.editable, "template is editable" - if template.req: - line = str(template.req) - else: - line = link.url - ireq = install_req_from_line( - line, - user_supplied=template.user_supplied, - comes_from=template.comes_from, - use_pep517=template.use_pep517, - isolated=template.isolated, - constraint=template.constraint, - options=dict( - install_options=template.install_options, - global_options=template.global_options, - hashes=template.hash_options, - ), - ) - ireq.original_link = template.original_link - ireq.link = link - return ireq - - -def make_install_req_from_editable( - link: Link, template: InstallRequirement -) -> InstallRequirement: - assert template.editable, "template not editable" - return install_req_from_editable( - link.url, - user_supplied=template.user_supplied, - comes_from=template.comes_from, - use_pep517=template.use_pep517, - isolated=template.isolated, - constraint=template.constraint, - permit_editable_wheels=template.permit_editable_wheels, - options=dict( - install_options=template.install_options, - global_options=template.global_options, - hashes=template.hash_options, - ), - ) - - -def _make_install_req_from_dist( - dist: BaseDistribution, template: InstallRequirement -) -> InstallRequirement: - if template.req: - line = str(template.req) - elif template.link: - line = f"{dist.canonical_name} @ {template.link.url}" - else: - line = f"{dist.canonical_name}=={dist.version}" - ireq = install_req_from_line( - line, - user_supplied=template.user_supplied, - comes_from=template.comes_from, - use_pep517=template.use_pep517, - isolated=template.isolated, - constraint=template.constraint, - options=dict( - install_options=template.install_options, - global_options=template.global_options, - hashes=template.hash_options, - ), - ) - ireq.satisfied_by = dist - return ireq - - -class _InstallRequirementBackedCandidate(Candidate): - """A candidate backed by an ``InstallRequirement``. - - This represents a package request with the target not being already - in the environment, and needs to be fetched and installed. The backing - ``InstallRequirement`` is responsible for most of the leg work; this - class exposes appropriate information to the resolver. - - :param link: The link passed to the ``InstallRequirement``. The backing - ``InstallRequirement`` will use this link to fetch the distribution. - :param source_link: The link this candidate "originates" from. This is - different from ``link`` when the link is found in the wheel cache. - ``link`` would point to the wheel cache, while this points to the - found remote link (e.g. from pypi.org). - """ - - dist: BaseDistribution - is_installed = False - - def __init__( - self, - link: Link, - source_link: Link, - ireq: InstallRequirement, - factory: "Factory", - name: Optional[NormalizedName] = None, - version: Optional[CandidateVersion] = None, - ) -> None: - self._link = link - self._source_link = source_link - self._factory = factory - self._ireq = ireq - self._name = name - self._version = version - self.dist = self._prepare() - - def __str__(self) -> str: - return f"{self.name} {self.version}" - - def __repr__(self) -> str: - return "{class_name}({link!r})".format( - class_name=self.__class__.__name__, - link=str(self._link), - ) - - def __hash__(self) -> int: - return hash((self.__class__, self._link)) - - def __eq__(self, other: Any) -> bool: - if isinstance(other, self.__class__): - return links_equivalent(self._link, other._link) - return False - - @property - def source_link(self) -> Optional[Link]: - return self._source_link - - @property - def project_name(self) -> NormalizedName: - """The normalised name of the project the candidate refers to""" - if self._name is None: - self._name = self.dist.canonical_name - return self._name - - @property - def name(self) -> str: - return self.project_name - - @property - def version(self) -> CandidateVersion: - if self._version is None: - self._version = self.dist.version - return self._version - - def format_for_error(self) -> str: - return "{} {} (from {})".format( - self.name, - self.version, - self._link.file_path if self._link.is_file else self._link, - ) - - def _prepare_distribution(self) -> BaseDistribution: - raise NotImplementedError("Override in subclass") - - def _check_metadata_consistency(self, dist: BaseDistribution) -> None: - """Check for consistency of project name and version of dist.""" - if self._name is not None and self._name != dist.canonical_name: - raise MetadataInconsistent( - self._ireq, - "name", - self._name, - dist.canonical_name, - ) - if self._version is not None and self._version != dist.version: - raise MetadataInconsistent( - self._ireq, - "version", - str(self._version), - str(dist.version), - ) - - def _prepare(self) -> BaseDistribution: - try: - dist = self._prepare_distribution() - except HashError as e: - # Provide HashError the underlying ireq that caused it. This - # provides context for the resulting error message to show the - # offending line to the user. - e.req = self._ireq - raise - except InstallationSubprocessError as exc: - # The output has been presented already, so don't duplicate it. - exc.context = "See above for output." - raise - - self._check_metadata_consistency(dist) - return dist - - def iter_dependencies(self, with_requires: bool) -> Iterable[Optional[Requirement]]: - requires = self.dist.iter_dependencies() if with_requires else () - for r in requires: - yield self._factory.make_requirement_from_spec(str(r), self._ireq) - yield self._factory.make_requires_python_requirement(self.dist.requires_python) - - def get_install_requirement(self) -> Optional[InstallRequirement]: - return self._ireq - - -class LinkCandidate(_InstallRequirementBackedCandidate): - is_editable = False - - def __init__( - self, - link: Link, - template: InstallRequirement, - factory: "Factory", - name: Optional[NormalizedName] = None, - version: Optional[CandidateVersion] = None, - ) -> None: - source_link = link - cache_entry = factory.get_wheel_cache_entry(link, name) - if cache_entry is not None: - logger.debug("Using cached wheel link: %s", cache_entry.link) - link = cache_entry.link - ireq = make_install_req_from_link(link, template) - assert ireq.link == link - if ireq.link.is_wheel and not ireq.link.is_file: - wheel = Wheel(ireq.link.filename) - wheel_name = canonicalize_name(wheel.name) - assert name == wheel_name, f"{name!r} != {wheel_name!r} for wheel" - # Version may not be present for PEP 508 direct URLs - if version is not None: - wheel_version = Version(wheel.version) - assert version == wheel_version, "{!r} != {!r} for wheel {}".format( - version, wheel_version, name - ) - - if ( - cache_entry is not None - and cache_entry.persistent - and template.link is template.original_link - ): - ireq.original_link_is_in_wheel_cache = True - - super().__init__( - link=link, - source_link=source_link, - ireq=ireq, - factory=factory, - name=name, - version=version, - ) - - def _prepare_distribution(self) -> BaseDistribution: - preparer = self._factory.preparer - return preparer.prepare_linked_requirement(self._ireq, parallel_builds=True) - - -class EditableCandidate(_InstallRequirementBackedCandidate): - is_editable = True - - def __init__( - self, - link: Link, - template: InstallRequirement, - factory: "Factory", - name: Optional[NormalizedName] = None, - version: Optional[CandidateVersion] = None, - ) -> None: - super().__init__( - link=link, - source_link=link, - ireq=make_install_req_from_editable(link, template), - factory=factory, - name=name, - version=version, - ) - - def _prepare_distribution(self) -> BaseDistribution: - return self._factory.preparer.prepare_editable_requirement(self._ireq) - - -class AlreadyInstalledCandidate(Candidate): - is_installed = True - source_link = None - - def __init__( - self, - dist: BaseDistribution, - template: InstallRequirement, - factory: "Factory", - ) -> None: - self.dist = dist - self._ireq = _make_install_req_from_dist(dist, template) - self._factory = factory - - # This is just logging some messages, so we can do it eagerly. - # The returned dist would be exactly the same as self.dist because we - # set satisfied_by in _make_install_req_from_dist. - # TODO: Supply reason based on force_reinstall and upgrade_strategy. - skip_reason = "already satisfied" - factory.preparer.prepare_installed_requirement(self._ireq, skip_reason) - - def __str__(self) -> str: - return str(self.dist) - - def __repr__(self) -> str: - return "{class_name}({distribution!r})".format( - class_name=self.__class__.__name__, - distribution=self.dist, - ) - - def __hash__(self) -> int: - return hash((self.__class__, self.name, self.version)) - - def __eq__(self, other: Any) -> bool: - if isinstance(other, self.__class__): - return self.name == other.name and self.version == other.version - return False - - @property - def project_name(self) -> NormalizedName: - return self.dist.canonical_name - - @property - def name(self) -> str: - return self.project_name - - @property - def version(self) -> CandidateVersion: - return self.dist.version - - @property - def is_editable(self) -> bool: - return self.dist.editable - - def format_for_error(self) -> str: - return f"{self.name} {self.version} (Installed)" - - def iter_dependencies(self, with_requires: bool) -> Iterable[Optional[Requirement]]: - if not with_requires: - return - for r in self.dist.iter_dependencies(): - yield self._factory.make_requirement_from_spec(str(r), self._ireq) - - def get_install_requirement(self) -> Optional[InstallRequirement]: - return None - - -class ExtrasCandidate(Candidate): - """A candidate that has 'extras', indicating additional dependencies. - - Requirements can be for a project with dependencies, something like - foo[extra]. The extras don't affect the project/version being installed - directly, but indicate that we need additional dependencies. We model that - by having an artificial ExtrasCandidate that wraps the "base" candidate. - - The ExtrasCandidate differs from the base in the following ways: - - 1. It has a unique name, of the form foo[extra]. This causes the resolver - to treat it as a separate node in the dependency graph. - 2. When we're getting the candidate's dependencies, - a) We specify that we want the extra dependencies as well. - b) We add a dependency on the base candidate. - See below for why this is needed. - 3. We return None for the underlying InstallRequirement, as the base - candidate will provide it, and we don't want to end up with duplicates. - - The dependency on the base candidate is needed so that the resolver can't - decide that it should recommend foo[extra1] version 1.0 and foo[extra2] - version 2.0. Having those candidates depend on foo=1.0 and foo=2.0 - respectively forces the resolver to recognise that this is a conflict. - """ - - def __init__( - self, - base: BaseCandidate, - extras: FrozenSet[str], - ) -> None: - self.base = base - self.extras = extras - - def __str__(self) -> str: - name, rest = str(self.base).split(" ", 1) - return "{}[{}] {}".format(name, ",".join(self.extras), rest) - - def __repr__(self) -> str: - return "{class_name}(base={base!r}, extras={extras!r})".format( - class_name=self.__class__.__name__, - base=self.base, - extras=self.extras, - ) - - def __hash__(self) -> int: - return hash((self.base, self.extras)) - - def __eq__(self, other: Any) -> bool: - if isinstance(other, self.__class__): - return self.base == other.base and self.extras == other.extras - return False - - @property - def project_name(self) -> NormalizedName: - return self.base.project_name - - @property - def name(self) -> str: - """The normalised name of the project the candidate refers to""" - return format_name(self.base.project_name, self.extras) - - @property - def version(self) -> CandidateVersion: - return self.base.version - - def format_for_error(self) -> str: - return "{} [{}]".format( - self.base.format_for_error(), ", ".join(sorted(self.extras)) - ) - - @property - def is_installed(self) -> bool: - return self.base.is_installed - - @property - def is_editable(self) -> bool: - return self.base.is_editable - - @property - def source_link(self) -> Optional[Link]: - return self.base.source_link - - def iter_dependencies(self, with_requires: bool) -> Iterable[Optional[Requirement]]: - factory = self.base._factory - - # Add a dependency on the exact base - # (See note 2b in the class docstring) - yield factory.make_requirement_from_candidate(self.base) - if not with_requires: - return - - # The user may have specified extras that the candidate doesn't - # support. We ignore any unsupported extras here. - valid_extras = self.extras.intersection(self.base.dist.iter_provided_extras()) - invalid_extras = self.extras.difference(self.base.dist.iter_provided_extras()) - for extra in sorted(invalid_extras): - logger.warning( - "%s %s does not provide the extra '%s'", - self.base.name, - self.version, - extra, - ) - - for r in self.base.dist.iter_dependencies(valid_extras): - requirement = factory.make_requirement_from_spec( - str(r), self.base._ireq, valid_extras - ) - if requirement: - yield requirement - - def get_install_requirement(self) -> Optional[InstallRequirement]: - # We don't return anything here, because we always - # depend on the base candidate, and we'll get the - # install requirement from that. - return None - - -class RequiresPythonCandidate(Candidate): - is_installed = False - source_link = None - - def __init__(self, py_version_info: Optional[Tuple[int, ...]]) -> None: - if py_version_info is not None: - version_info = normalize_version_info(py_version_info) - else: - version_info = sys.version_info[:3] - self._version = Version(".".join(str(c) for c in version_info)) - - # We don't need to implement __eq__() and __ne__() since there is always - # only one RequiresPythonCandidate in a resolution, i.e. the host Python. - # The built-in object.__eq__() and object.__ne__() do exactly what we want. - - def __str__(self) -> str: - return f"Python {self._version}" - - @property - def project_name(self) -> NormalizedName: - return REQUIRES_PYTHON_IDENTIFIER - - @property - def name(self) -> str: - return REQUIRES_PYTHON_IDENTIFIER - - @property - def version(self) -> CandidateVersion: - return self._version - - def format_for_error(self) -> str: - return f"Python {self.version}" - - def iter_dependencies(self, with_requires: bool) -> Iterable[Optional[Requirement]]: - return () - - def get_install_requirement(self) -> Optional[InstallRequirement]: - return None diff --git a/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/factory.py b/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/factory.py deleted file mode 100644 index 261d8d5..0000000 --- a/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/factory.py +++ /dev/null @@ -1,739 +0,0 @@ -import contextlib -import functools -import logging -from typing import ( - TYPE_CHECKING, - Dict, - FrozenSet, - Iterable, - Iterator, - List, - Mapping, - NamedTuple, - Optional, - Sequence, - Set, - Tuple, - TypeVar, - cast, -) - -from pip._vendor.packaging.requirements import InvalidRequirement -from pip._vendor.packaging.specifiers import SpecifierSet -from pip._vendor.packaging.utils import NormalizedName, canonicalize_name -from pip._vendor.resolvelib import ResolutionImpossible - -from pip._internal.cache import CacheEntry, WheelCache -from pip._internal.exceptions import ( - DistributionNotFound, - InstallationError, - InstallationSubprocessError, - MetadataInconsistent, - UnsupportedPythonVersion, - UnsupportedWheel, -) -from pip._internal.index.package_finder import PackageFinder -from pip._internal.metadata import BaseDistribution, get_default_environment -from pip._internal.models.link import Link -from pip._internal.models.wheel import Wheel -from pip._internal.operations.prepare import RequirementPreparer -from pip._internal.req.constructors import install_req_from_link_and_ireq -from pip._internal.req.req_install import ( - InstallRequirement, - check_invalid_constraint_type, -) -from pip._internal.resolution.base import InstallRequirementProvider -from pip._internal.utils.compatibility_tags import get_supported -from pip._internal.utils.hashes import Hashes -from pip._internal.utils.packaging import get_requirement -from pip._internal.utils.virtualenv import running_under_virtualenv - -from .base import Candidate, CandidateVersion, Constraint, Requirement -from .candidates import ( - AlreadyInstalledCandidate, - BaseCandidate, - EditableCandidate, - ExtrasCandidate, - LinkCandidate, - RequiresPythonCandidate, - as_base_candidate, -) -from .found_candidates import FoundCandidates, IndexCandidateInfo -from .requirements import ( - ExplicitRequirement, - RequiresPythonRequirement, - SpecifierRequirement, - UnsatisfiableRequirement, -) - -if TYPE_CHECKING: - from typing import Protocol - - class ConflictCause(Protocol): - requirement: RequiresPythonRequirement - parent: Candidate - - -logger = logging.getLogger(__name__) - -C = TypeVar("C") -Cache = Dict[Link, C] - - -class CollectedRootRequirements(NamedTuple): - requirements: List[Requirement] - constraints: Dict[str, Constraint] - user_requested: Dict[str, int] - - -class Factory: - def __init__( - self, - finder: PackageFinder, - preparer: RequirementPreparer, - make_install_req: InstallRequirementProvider, - wheel_cache: Optional[WheelCache], - use_user_site: bool, - force_reinstall: bool, - ignore_installed: bool, - ignore_requires_python: bool, - suppress_build_failures: bool, - py_version_info: Optional[Tuple[int, ...]] = None, - ) -> None: - self._finder = finder - self.preparer = preparer - self._wheel_cache = wheel_cache - self._python_candidate = RequiresPythonCandidate(py_version_info) - self._make_install_req_from_spec = make_install_req - self._use_user_site = use_user_site - self._force_reinstall = force_reinstall - self._ignore_requires_python = ignore_requires_python - self._suppress_build_failures = suppress_build_failures - - self._build_failures: Cache[InstallationError] = {} - self._link_candidate_cache: Cache[LinkCandidate] = {} - self._editable_candidate_cache: Cache[EditableCandidate] = {} - self._installed_candidate_cache: Dict[str, AlreadyInstalledCandidate] = {} - self._extras_candidate_cache: Dict[ - Tuple[int, FrozenSet[str]], ExtrasCandidate - ] = {} - - if not ignore_installed: - env = get_default_environment() - self._installed_dists = { - dist.canonical_name: dist - for dist in env.iter_installed_distributions(local_only=False) - } - else: - self._installed_dists = {} - - @property - def force_reinstall(self) -> bool: - return self._force_reinstall - - def _fail_if_link_is_unsupported_wheel(self, link: Link) -> None: - if not link.is_wheel: - return - wheel = Wheel(link.filename) - if wheel.supported(self._finder.target_python.get_tags()): - return - msg = f"{link.filename} is not a supported wheel on this platform." - raise UnsupportedWheel(msg) - - def _make_extras_candidate( - self, base: BaseCandidate, extras: FrozenSet[str] - ) -> ExtrasCandidate: - cache_key = (id(base), extras) - try: - candidate = self._extras_candidate_cache[cache_key] - except KeyError: - candidate = ExtrasCandidate(base, extras) - self._extras_candidate_cache[cache_key] = candidate - return candidate - - def _make_candidate_from_dist( - self, - dist: BaseDistribution, - extras: FrozenSet[str], - template: InstallRequirement, - ) -> Candidate: - try: - base = self._installed_candidate_cache[dist.canonical_name] - except KeyError: - base = AlreadyInstalledCandidate(dist, template, factory=self) - self._installed_candidate_cache[dist.canonical_name] = base - if not extras: - return base - return self._make_extras_candidate(base, extras) - - def _make_candidate_from_link( - self, - link: Link, - extras: FrozenSet[str], - template: InstallRequirement, - name: Optional[NormalizedName], - version: Optional[CandidateVersion], - ) -> Optional[Candidate]: - # TODO: Check already installed candidate, and use it if the link and - # editable flag match. - - if link in self._build_failures: - # We already tried this candidate before, and it does not build. - # Don't bother trying again. - return None - - if template.editable: - if link not in self._editable_candidate_cache: - try: - self._editable_candidate_cache[link] = EditableCandidate( - link, - template, - factory=self, - name=name, - version=version, - ) - except MetadataInconsistent as e: - logger.info( - "Discarding [blue underline]%s[/]: [yellow]%s[reset]", - link, - e, - extra={"markup": True}, - ) - self._build_failures[link] = e - return None - except InstallationSubprocessError as e: - if not self._suppress_build_failures: - raise - logger.warning("Discarding %s due to build failure: %s", link, e) - self._build_failures[link] = e - return None - - base: BaseCandidate = self._editable_candidate_cache[link] - else: - if link not in self._link_candidate_cache: - try: - self._link_candidate_cache[link] = LinkCandidate( - link, - template, - factory=self, - name=name, - version=version, - ) - except MetadataInconsistent as e: - logger.info( - "Discarding [blue underline]%s[/]: [yellow]%s[reset]", - link, - e, - extra={"markup": True}, - ) - self._build_failures[link] = e - return None - except InstallationSubprocessError as e: - if not self._suppress_build_failures: - raise - logger.warning("Discarding %s due to build failure: %s", link, e) - self._build_failures[link] = e - return None - base = self._link_candidate_cache[link] - - if not extras: - return base - return self._make_extras_candidate(base, extras) - - def _iter_found_candidates( - self, - ireqs: Sequence[InstallRequirement], - specifier: SpecifierSet, - hashes: Hashes, - prefers_installed: bool, - incompatible_ids: Set[int], - ) -> Iterable[Candidate]: - if not ireqs: - return () - - # The InstallRequirement implementation requires us to give it a - # "template". Here we just choose the first requirement to represent - # all of them. - # Hopefully the Project model can correct this mismatch in the future. - template = ireqs[0] - assert template.req, "Candidates found on index must be PEP 508" - name = canonicalize_name(template.req.name) - - extras: FrozenSet[str] = frozenset() - for ireq in ireqs: - assert ireq.req, "Candidates found on index must be PEP 508" - specifier &= ireq.req.specifier - hashes &= ireq.hashes(trust_internet=False) - extras |= frozenset(ireq.extras) - - def _get_installed_candidate() -> Optional[Candidate]: - """Get the candidate for the currently-installed version.""" - # If --force-reinstall is set, we want the version from the index - # instead, so we "pretend" there is nothing installed. - if self._force_reinstall: - return None - try: - installed_dist = self._installed_dists[name] - except KeyError: - return None - # Don't use the installed distribution if its version does not fit - # the current dependency graph. - if not specifier.contains(installed_dist.version, prereleases=True): - return None - candidate = self._make_candidate_from_dist( - dist=installed_dist, - extras=extras, - template=template, - ) - # The candidate is a known incompatibility. Don't use it. - if id(candidate) in incompatible_ids: - return None - return candidate - - def iter_index_candidate_infos() -> Iterator[IndexCandidateInfo]: - result = self._finder.find_best_candidate( - project_name=name, - specifier=specifier, - hashes=hashes, - ) - icans = list(result.iter_applicable()) - - # PEP 592: Yanked releases are ignored unless the specifier - # explicitly pins a version (via '==' or '===') that can be - # solely satisfied by a yanked release. - all_yanked = all(ican.link.is_yanked for ican in icans) - - def is_pinned(specifier: SpecifierSet) -> bool: - for sp in specifier: - if sp.operator == "===": - return True - if sp.operator != "==": - continue - if sp.version.endswith(".*"): - continue - return True - return False - - pinned = is_pinned(specifier) - - # PackageFinder returns earlier versions first, so we reverse. - for ican in reversed(icans): - if not (all_yanked and pinned) and ican.link.is_yanked: - continue - func = functools.partial( - self._make_candidate_from_link, - link=ican.link, - extras=extras, - template=template, - name=name, - version=ican.version, - ) - yield ican.version, func - - return FoundCandidates( - iter_index_candidate_infos, - _get_installed_candidate(), - prefers_installed, - incompatible_ids, - ) - - def _iter_explicit_candidates_from_base( - self, - base_requirements: Iterable[Requirement], - extras: FrozenSet[str], - ) -> Iterator[Candidate]: - """Produce explicit candidates from the base given an extra-ed package. - - :param base_requirements: Requirements known to the resolver. The - requirements are guaranteed to not have extras. - :param extras: The extras to inject into the explicit requirements' - candidates. - """ - for req in base_requirements: - lookup_cand, _ = req.get_candidate_lookup() - if lookup_cand is None: # Not explicit. - continue - # We've stripped extras from the identifier, and should always - # get a BaseCandidate here, unless there's a bug elsewhere. - base_cand = as_base_candidate(lookup_cand) - assert base_cand is not None, "no extras here" - yield self._make_extras_candidate(base_cand, extras) - - def _iter_candidates_from_constraints( - self, - identifier: str, - constraint: Constraint, - template: InstallRequirement, - ) -> Iterator[Candidate]: - """Produce explicit candidates from constraints. - - This creates "fake" InstallRequirement objects that are basically clones - of what "should" be the template, but with original_link set to link. - """ - for link in constraint.links: - self._fail_if_link_is_unsupported_wheel(link) - candidate = self._make_candidate_from_link( - link, - extras=frozenset(), - template=install_req_from_link_and_ireq(link, template), - name=canonicalize_name(identifier), - version=None, - ) - if candidate: - yield candidate - - def find_candidates( - self, - identifier: str, - requirements: Mapping[str, Iterable[Requirement]], - incompatibilities: Mapping[str, Iterator[Candidate]], - constraint: Constraint, - prefers_installed: bool, - ) -> Iterable[Candidate]: - # Collect basic lookup information from the requirements. - explicit_candidates: Set[Candidate] = set() - ireqs: List[InstallRequirement] = [] - for req in requirements[identifier]: - cand, ireq = req.get_candidate_lookup() - if cand is not None: - explicit_candidates.add(cand) - if ireq is not None: - ireqs.append(ireq) - - # If the current identifier contains extras, add explicit candidates - # from entries from extra-less identifier. - with contextlib.suppress(InvalidRequirement): - parsed_requirement = get_requirement(identifier) - explicit_candidates.update( - self._iter_explicit_candidates_from_base( - requirements.get(parsed_requirement.name, ()), - frozenset(parsed_requirement.extras), - ), - ) - - # Add explicit candidates from constraints. We only do this if there are - # known ireqs, which represent requirements not already explicit. If - # there are no ireqs, we're constraining already-explicit requirements, - # which is handled later when we return the explicit candidates. - if ireqs: - try: - explicit_candidates.update( - self._iter_candidates_from_constraints( - identifier, - constraint, - template=ireqs[0], - ), - ) - except UnsupportedWheel: - # If we're constrained to install a wheel incompatible with the - # target architecture, no candidates will ever be valid. - return () - - # Since we cache all the candidates, incompatibility identification - # can be made quicker by comparing only the id() values. - incompat_ids = {id(c) for c in incompatibilities.get(identifier, ())} - - # If none of the requirements want an explicit candidate, we can ask - # the finder for candidates. - if not explicit_candidates: - return self._iter_found_candidates( - ireqs, - constraint.specifier, - constraint.hashes, - prefers_installed, - incompat_ids, - ) - - return ( - c - for c in explicit_candidates - if id(c) not in incompat_ids - and constraint.is_satisfied_by(c) - and all(req.is_satisfied_by(c) for req in requirements[identifier]) - ) - - def _make_requirement_from_install_req( - self, ireq: InstallRequirement, requested_extras: Iterable[str] - ) -> Optional[Requirement]: - if not ireq.match_markers(requested_extras): - logger.info( - "Ignoring %s: markers '%s' don't match your environment", - ireq.name, - ireq.markers, - ) - return None - if not ireq.link: - return SpecifierRequirement(ireq) - self._fail_if_link_is_unsupported_wheel(ireq.link) - cand = self._make_candidate_from_link( - ireq.link, - extras=frozenset(ireq.extras), - template=ireq, - name=canonicalize_name(ireq.name) if ireq.name else None, - version=None, - ) - if cand is None: - # There's no way we can satisfy a URL requirement if the underlying - # candidate fails to build. An unnamed URL must be user-supplied, so - # we fail eagerly. If the URL is named, an unsatisfiable requirement - # can make the resolver do the right thing, either backtrack (and - # maybe find some other requirement that's buildable) or raise a - # ResolutionImpossible eventually. - if not ireq.name: - raise self._build_failures[ireq.link] - return UnsatisfiableRequirement(canonicalize_name(ireq.name)) - return self.make_requirement_from_candidate(cand) - - def collect_root_requirements( - self, root_ireqs: List[InstallRequirement] - ) -> CollectedRootRequirements: - collected = CollectedRootRequirements([], {}, {}) - for i, ireq in enumerate(root_ireqs): - if ireq.constraint: - # Ensure we only accept valid constraints - problem = check_invalid_constraint_type(ireq) - if problem: - raise InstallationError(problem) - if not ireq.match_markers(): - continue - assert ireq.name, "Constraint must be named" - name = canonicalize_name(ireq.name) - if name in collected.constraints: - collected.constraints[name] &= ireq - else: - collected.constraints[name] = Constraint.from_ireq(ireq) - else: - req = self._make_requirement_from_install_req( - ireq, - requested_extras=(), - ) - if req is None: - continue - if ireq.user_supplied and req.name not in collected.user_requested: - collected.user_requested[req.name] = i - collected.requirements.append(req) - return collected - - def make_requirement_from_candidate( - self, candidate: Candidate - ) -> ExplicitRequirement: - return ExplicitRequirement(candidate) - - def make_requirement_from_spec( - self, - specifier: str, - comes_from: Optional[InstallRequirement], - requested_extras: Iterable[str] = (), - ) -> Optional[Requirement]: - ireq = self._make_install_req_from_spec(specifier, comes_from) - return self._make_requirement_from_install_req(ireq, requested_extras) - - def make_requires_python_requirement( - self, - specifier: SpecifierSet, - ) -> Optional[Requirement]: - if self._ignore_requires_python: - return None - # Don't bother creating a dependency for an empty Requires-Python. - if not str(specifier): - return None - return RequiresPythonRequirement(specifier, self._python_candidate) - - def get_wheel_cache_entry( - self, link: Link, name: Optional[str] - ) -> Optional[CacheEntry]: - """Look up the link in the wheel cache. - - If ``preparer.require_hashes`` is True, don't use the wheel cache, - because cached wheels, always built locally, have different hashes - than the files downloaded from the index server and thus throw false - hash mismatches. Furthermore, cached wheels at present have - nondeterministic contents due to file modification times. - """ - if self._wheel_cache is None or self.preparer.require_hashes: - return None - return self._wheel_cache.get_cache_entry( - link=link, - package_name=name, - supported_tags=get_supported(), - ) - - def get_dist_to_uninstall(self, candidate: Candidate) -> Optional[BaseDistribution]: - # TODO: Are there more cases this needs to return True? Editable? - dist = self._installed_dists.get(candidate.project_name) - if dist is None: # Not installed, no uninstallation required. - return None - - # We're installing into global site. The current installation must - # be uninstalled, no matter it's in global or user site, because the - # user site installation has precedence over global. - if not self._use_user_site: - return dist - - # We're installing into user site. Remove the user site installation. - if dist.in_usersite: - return dist - - # We're installing into user site, but the installed incompatible - # package is in global site. We can't uninstall that, and would let - # the new user installation to "shadow" it. But shadowing won't work - # in virtual environments, so we error out. - if running_under_virtualenv() and dist.in_site_packages: - message = ( - f"Will not install to the user site because it will lack " - f"sys.path precedence to {dist.raw_name} in {dist.location}" - ) - raise InstallationError(message) - return None - - def _report_requires_python_error( - self, causes: Sequence["ConflictCause"] - ) -> UnsupportedPythonVersion: - assert causes, "Requires-Python error reported with no cause" - - version = self._python_candidate.version - - if len(causes) == 1: - specifier = str(causes[0].requirement.specifier) - message = ( - f"Package {causes[0].parent.name!r} requires a different " - f"Python: {version} not in {specifier!r}" - ) - return UnsupportedPythonVersion(message) - - message = f"Packages require a different Python. {version} not in:" - for cause in causes: - package = cause.parent.format_for_error() - specifier = str(cause.requirement.specifier) - message += f"\n{specifier!r} (required by {package})" - return UnsupportedPythonVersion(message) - - def _report_single_requirement_conflict( - self, req: Requirement, parent: Optional[Candidate] - ) -> DistributionNotFound: - if parent is None: - req_disp = str(req) - else: - req_disp = f"{req} (from {parent.name})" - - cands = self._finder.find_all_candidates(req.project_name) - versions = [str(v) for v in sorted({c.version for c in cands})] - - logger.critical( - "Could not find a version that satisfies the requirement %s " - "(from versions: %s)", - req_disp, - ", ".join(versions) or "none", - ) - if str(req) == "requirements.txt": - logger.info( - "HINT: You are attempting to install a package literally " - 'named "requirements.txt" (which cannot exist). Consider ' - "using the '-r' flag to install the packages listed in " - "requirements.txt" - ) - - return DistributionNotFound(f"No matching distribution found for {req}") - - def get_installation_error( - self, - e: "ResolutionImpossible[Requirement, Candidate]", - constraints: Dict[str, Constraint], - ) -> InstallationError: - - assert e.causes, "Installation error reported with no cause" - - # If one of the things we can't solve is "we need Python X.Y", - # that is what we report. - requires_python_causes = [ - cause - for cause in e.causes - if isinstance(cause.requirement, RequiresPythonRequirement) - and not cause.requirement.is_satisfied_by(self._python_candidate) - ] - if requires_python_causes: - # The comprehension above makes sure all Requirement instances are - # RequiresPythonRequirement, so let's cast for convenience. - return self._report_requires_python_error( - cast("Sequence[ConflictCause]", requires_python_causes), - ) - - # Otherwise, we have a set of causes which can't all be satisfied - # at once. - - # The simplest case is when we have *one* cause that can't be - # satisfied. We just report that case. - if len(e.causes) == 1: - req, parent = e.causes[0] - if req.name not in constraints: - return self._report_single_requirement_conflict(req, parent) - - # OK, we now have a list of requirements that can't all be - # satisfied at once. - - # A couple of formatting helpers - def text_join(parts: List[str]) -> str: - if len(parts) == 1: - return parts[0] - - return ", ".join(parts[:-1]) + " and " + parts[-1] - - def describe_trigger(parent: Candidate) -> str: - ireq = parent.get_install_requirement() - if not ireq or not ireq.comes_from: - return f"{parent.name}=={parent.version}" - if isinstance(ireq.comes_from, InstallRequirement): - return str(ireq.comes_from.name) - return str(ireq.comes_from) - - triggers = set() - for req, parent in e.causes: - if parent is None: - # This is a root requirement, so we can report it directly - trigger = req.format_for_error() - else: - trigger = describe_trigger(parent) - triggers.add(trigger) - - if triggers: - info = text_join(sorted(triggers)) - else: - info = "the requested packages" - - msg = ( - "Cannot install {} because these package versions " - "have conflicting dependencies.".format(info) - ) - logger.critical(msg) - msg = "\nThe conflict is caused by:" - - relevant_constraints = set() - for req, parent in e.causes: - if req.name in constraints: - relevant_constraints.add(req.name) - msg = msg + "\n " - if parent: - msg = msg + f"{parent.name} {parent.version} depends on " - else: - msg = msg + "The user requested " - msg = msg + req.format_for_error() - for key in relevant_constraints: - spec = constraints[key].specifier - msg += f"\n The user requested (constraint) {key}{spec}" - - msg = ( - msg - + "\n\n" - + "To fix this you could try to:\n" - + "1. loosen the range of package versions you've specified\n" - + "2. remove package versions to allow pip attempt to solve " - + "the dependency conflict\n" - ) - - logger.info(msg) - - return DistributionNotFound( - "ResolutionImpossible: for help visit " - "https://pip.pypa.io/en/latest/topics/dependency-resolution/" - "#dealing-with-dependency-conflicts" - ) diff --git a/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/found_candidates.py b/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/found_candidates.py deleted file mode 100644 index 8663097..0000000 --- a/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/found_candidates.py +++ /dev/null @@ -1,155 +0,0 @@ -"""Utilities to lazily create and visit candidates found. - -Creating and visiting a candidate is a *very* costly operation. It involves -fetching, extracting, potentially building modules from source, and verifying -distribution metadata. It is therefore crucial for performance to keep -everything here lazy all the way down, so we only touch candidates that we -absolutely need, and not "download the world" when we only need one version of -something. -""" - -import functools -from collections.abc import Sequence -from typing import TYPE_CHECKING, Any, Callable, Iterator, Optional, Set, Tuple - -from pip._vendor.packaging.version import _BaseVersion - -from .base import Candidate - -IndexCandidateInfo = Tuple[_BaseVersion, Callable[[], Optional[Candidate]]] - -if TYPE_CHECKING: - SequenceCandidate = Sequence[Candidate] -else: - # For compatibility: Python before 3.9 does not support using [] on the - # Sequence class. - # - # >>> from collections.abc import Sequence - # >>> Sequence[str] - # Traceback (most recent call last): - # File "", line 1, in - # TypeError: 'ABCMeta' object is not subscriptable - # - # TODO: Remove this block after dropping Python 3.8 support. - SequenceCandidate = Sequence - - -def _iter_built(infos: Iterator[IndexCandidateInfo]) -> Iterator[Candidate]: - """Iterator for ``FoundCandidates``. - - This iterator is used when the package is not already installed. Candidates - from index come later in their normal ordering. - """ - versions_found: Set[_BaseVersion] = set() - for version, func in infos: - if version in versions_found: - continue - candidate = func() - if candidate is None: - continue - yield candidate - versions_found.add(version) - - -def _iter_built_with_prepended( - installed: Candidate, infos: Iterator[IndexCandidateInfo] -) -> Iterator[Candidate]: - """Iterator for ``FoundCandidates``. - - This iterator is used when the resolver prefers the already-installed - candidate and NOT to upgrade. The installed candidate is therefore - always yielded first, and candidates from index come later in their - normal ordering, except skipped when the version is already installed. - """ - yield installed - versions_found: Set[_BaseVersion] = {installed.version} - for version, func in infos: - if version in versions_found: - continue - candidate = func() - if candidate is None: - continue - yield candidate - versions_found.add(version) - - -def _iter_built_with_inserted( - installed: Candidate, infos: Iterator[IndexCandidateInfo] -) -> Iterator[Candidate]: - """Iterator for ``FoundCandidates``. - - This iterator is used when the resolver prefers to upgrade an - already-installed package. Candidates from index are returned in their - normal ordering, except replaced when the version is already installed. - - The implementation iterates through and yields other candidates, inserting - the installed candidate exactly once before we start yielding older or - equivalent candidates, or after all other candidates if they are all newer. - """ - versions_found: Set[_BaseVersion] = set() - for version, func in infos: - if version in versions_found: - continue - # If the installed candidate is better, yield it first. - if installed.version >= version: - yield installed - versions_found.add(installed.version) - candidate = func() - if candidate is None: - continue - yield candidate - versions_found.add(version) - - # If the installed candidate is older than all other candidates. - if installed.version not in versions_found: - yield installed - - -class FoundCandidates(SequenceCandidate): - """A lazy sequence to provide candidates to the resolver. - - The intended usage is to return this from `find_matches()` so the resolver - can iterate through the sequence multiple times, but only access the index - page when remote packages are actually needed. This improve performances - when suitable candidates are already installed on disk. - """ - - def __init__( - self, - get_infos: Callable[[], Iterator[IndexCandidateInfo]], - installed: Optional[Candidate], - prefers_installed: bool, - incompatible_ids: Set[int], - ): - self._get_infos = get_infos - self._installed = installed - self._prefers_installed = prefers_installed - self._incompatible_ids = incompatible_ids - - def __getitem__(self, index: Any) -> Any: - # Implemented to satisfy the ABC check. This is not needed by the - # resolver, and should not be used by the provider either (for - # performance reasons). - raise NotImplementedError("don't do this") - - def __iter__(self) -> Iterator[Candidate]: - infos = self._get_infos() - if not self._installed: - iterator = _iter_built(infos) - elif self._prefers_installed: - iterator = _iter_built_with_prepended(self._installed, infos) - else: - iterator = _iter_built_with_inserted(self._installed, infos) - return (c for c in iterator if id(c) not in self._incompatible_ids) - - def __len__(self) -> int: - # Implemented to satisfy the ABC check. This is not needed by the - # resolver, and should not be used by the provider either (for - # performance reasons). - raise NotImplementedError("don't do this") - - @functools.lru_cache(maxsize=1) - def __bool__(self) -> bool: - if self._prefers_installed and self._installed: - return True - return any(self) diff --git a/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/provider.py b/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/provider.py deleted file mode 100644 index e6ec959..0000000 --- a/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/provider.py +++ /dev/null @@ -1,248 +0,0 @@ -import collections -import math -from typing import ( - TYPE_CHECKING, - Dict, - Iterable, - Iterator, - Mapping, - Sequence, - TypeVar, - Union, -) - -from pip._vendor.resolvelib.providers import AbstractProvider - -from .base import Candidate, Constraint, Requirement -from .candidates import REQUIRES_PYTHON_IDENTIFIER -from .factory import Factory - -if TYPE_CHECKING: - from pip._vendor.resolvelib.providers import Preference - from pip._vendor.resolvelib.resolvers import RequirementInformation - - PreferenceInformation = RequirementInformation[Requirement, Candidate] - - _ProviderBase = AbstractProvider[Requirement, Candidate, str] -else: - _ProviderBase = AbstractProvider - -# Notes on the relationship between the provider, the factory, and the -# candidate and requirement classes. -# -# The provider is a direct implementation of the resolvelib class. Its role -# is to deliver the API that resolvelib expects. -# -# Rather than work with completely abstract "requirement" and "candidate" -# concepts as resolvelib does, pip has concrete classes implementing these two -# ideas. The API of Requirement and Candidate objects are defined in the base -# classes, but essentially map fairly directly to the equivalent provider -# methods. In particular, `find_matches` and `is_satisfied_by` are -# requirement methods, and `get_dependencies` is a candidate method. -# -# The factory is the interface to pip's internal mechanisms. It is stateless, -# and is created by the resolver and held as a property of the provider. It is -# responsible for creating Requirement and Candidate objects, and provides -# services to those objects (access to pip's finder and preparer). - - -D = TypeVar("D") -V = TypeVar("V") - - -def _get_with_identifier( - mapping: Mapping[str, V], - identifier: str, - default: D, -) -> Union[D, V]: - """Get item from a package name lookup mapping with a resolver identifier. - - This extra logic is needed when the target mapping is keyed by package - name, which cannot be directly looked up with an identifier (which may - contain requested extras). Additional logic is added to also look up a value - by "cleaning up" the extras from the identifier. - """ - if identifier in mapping: - return mapping[identifier] - # HACK: Theoretically we should check whether this identifier is a valid - # "NAME[EXTRAS]" format, and parse out the name part with packaging or - # some regular expression. But since pip's resolver only spits out three - # kinds of identifiers: normalized PEP 503 names, normalized names plus - # extras, and Requires-Python, we can cheat a bit here. - name, open_bracket, _ = identifier.partition("[") - if open_bracket and name in mapping: - return mapping[name] - return default - - -class PipProvider(_ProviderBase): - """Pip's provider implementation for resolvelib. - - :params constraints: A mapping of constraints specified by the user. Keys - are canonicalized project names. - :params ignore_dependencies: Whether the user specified ``--no-deps``. - :params upgrade_strategy: The user-specified upgrade strategy. - :params user_requested: A set of canonicalized package names that the user - supplied for pip to install/upgrade. - """ - - def __init__( - self, - factory: Factory, - constraints: Dict[str, Constraint], - ignore_dependencies: bool, - upgrade_strategy: str, - user_requested: Dict[str, int], - ) -> None: - self._factory = factory - self._constraints = constraints - self._ignore_dependencies = ignore_dependencies - self._upgrade_strategy = upgrade_strategy - self._user_requested = user_requested - self._known_depths: Dict[str, float] = collections.defaultdict(lambda: math.inf) - - def identify(self, requirement_or_candidate: Union[Requirement, Candidate]) -> str: - return requirement_or_candidate.name - - def get_preference( # type: ignore - self, - identifier: str, - resolutions: Mapping[str, Candidate], - candidates: Mapping[str, Iterator[Candidate]], - information: Mapping[str, Iterable["PreferenceInformation"]], - backtrack_causes: Sequence["PreferenceInformation"], - ) -> "Preference": - """Produce a sort key for given requirement based on preference. - - The lower the return value is, the more preferred this group of - arguments is. - - Currently pip considers the followings in order: - - * Prefer if any of the known requirements is "direct", e.g. points to an - explicit URL. - * If equal, prefer if any requirement is "pinned", i.e. contains - operator ``===`` or ``==``. - * If equal, calculate an approximate "depth" and resolve requirements - closer to the user-specified requirements first. - * Order user-specified requirements by the order they are specified. - * If equal, prefers "non-free" requirements, i.e. contains at least one - operator, such as ``>=`` or ``<``. - * If equal, order alphabetically for consistency (helps debuggability). - """ - lookups = (r.get_candidate_lookup() for r, _ in information[identifier]) - candidate, ireqs = zip(*lookups) - operators = [ - specifier.operator - for specifier_set in (ireq.specifier for ireq in ireqs if ireq) - for specifier in specifier_set - ] - - direct = candidate is not None - pinned = any(op[:2] == "==" for op in operators) - unfree = bool(operators) - - try: - requested_order: Union[int, float] = self._user_requested[identifier] - except KeyError: - requested_order = math.inf - parent_depths = ( - self._known_depths[parent.name] if parent is not None else 0.0 - for _, parent in information[identifier] - ) - inferred_depth = min(d for d in parent_depths) + 1.0 - else: - inferred_depth = 1.0 - self._known_depths[identifier] = inferred_depth - - requested_order = self._user_requested.get(identifier, math.inf) - - # Requires-Python has only one candidate and the check is basically - # free, so we always do it first to avoid needless work if it fails. - requires_python = identifier == REQUIRES_PYTHON_IDENTIFIER - - # HACK: Setuptools have a very long and solid backward compatibility - # track record, and extremely few projects would request a narrow, - # non-recent version range of it since that would break a lot things. - # (Most projects specify it only to request for an installer feature, - # which does not work, but that's another topic.) Intentionally - # delaying Setuptools helps reduce branches the resolver has to check. - # This serves as a temporary fix for issues like "apache-airflow[all]" - # while we work on "proper" branch pruning techniques. - delay_this = identifier == "setuptools" - - # Prefer the causes of backtracking on the assumption that the problem - # resolving the dependency tree is related to the failures that caused - # the backtracking - backtrack_cause = self.is_backtrack_cause(identifier, backtrack_causes) - - return ( - not requires_python, - delay_this, - not direct, - not pinned, - not backtrack_cause, - inferred_depth, - requested_order, - not unfree, - identifier, - ) - - def find_matches( - self, - identifier: str, - requirements: Mapping[str, Iterator[Requirement]], - incompatibilities: Mapping[str, Iterator[Candidate]], - ) -> Iterable[Candidate]: - def _eligible_for_upgrade(identifier: str) -> bool: - """Are upgrades allowed for this project? - - This checks the upgrade strategy, and whether the project was one - that the user specified in the command line, in order to decide - whether we should upgrade if there's a newer version available. - - (Note that we don't need access to the `--upgrade` flag, because - an upgrade strategy of "to-satisfy-only" means that `--upgrade` - was not specified). - """ - if self._upgrade_strategy == "eager": - return True - elif self._upgrade_strategy == "only-if-needed": - user_order = _get_with_identifier( - self._user_requested, - identifier, - default=None, - ) - return user_order is not None - return False - - constraint = _get_with_identifier( - self._constraints, - identifier, - default=Constraint.empty(), - ) - return self._factory.find_candidates( - identifier=identifier, - requirements=requirements, - constraint=constraint, - prefers_installed=(not _eligible_for_upgrade(identifier)), - incompatibilities=incompatibilities, - ) - - def is_satisfied_by(self, requirement: Requirement, candidate: Candidate) -> bool: - return requirement.is_satisfied_by(candidate) - - def get_dependencies(self, candidate: Candidate) -> Sequence[Requirement]: - with_requires = not self._ignore_dependencies - return [r for r in candidate.iter_dependencies(with_requires) if r is not None] - - @staticmethod - def is_backtrack_cause( - identifier: str, backtrack_causes: Sequence["PreferenceInformation"] - ) -> bool: - for backtrack_cause in backtrack_causes: - if identifier == backtrack_cause.requirement.name: - return True - if backtrack_cause.parent and identifier == backtrack_cause.parent.name: - return True - return False diff --git a/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/reporter.py b/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/reporter.py deleted file mode 100644 index 6ced532..0000000 --- a/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/reporter.py +++ /dev/null @@ -1,68 +0,0 @@ -from collections import defaultdict -from logging import getLogger -from typing import Any, DefaultDict - -from pip._vendor.resolvelib.reporters import BaseReporter - -from .base import Candidate, Requirement - -logger = getLogger(__name__) - - -class PipReporter(BaseReporter): - def __init__(self) -> None: - self.backtracks_by_package: DefaultDict[str, int] = defaultdict(int) - - self._messages_at_backtrack = { - 1: ( - "pip is looking at multiple versions of {package_name} to " - "determine which version is compatible with other " - "requirements. This could take a while." - ), - 8: ( - "pip is looking at multiple versions of {package_name} to " - "determine which version is compatible with other " - "requirements. This could take a while." - ), - 13: ( - "This is taking longer than usual. You might need to provide " - "the dependency resolver with stricter constraints to reduce " - "runtime. See https://pip.pypa.io/warnings/backtracking for " - "guidance. If you want to abort this run, press Ctrl + C." - ), - } - - def backtracking(self, candidate: Candidate) -> None: - self.backtracks_by_package[candidate.name] += 1 - - count = self.backtracks_by_package[candidate.name] - if count not in self._messages_at_backtrack: - return - - message = self._messages_at_backtrack[count] - logger.info("INFO: %s", message.format(package_name=candidate.name)) - - -class PipDebuggingReporter(BaseReporter): - """A reporter that does an info log for every event it sees.""" - - def starting(self) -> None: - logger.info("Reporter.starting()") - - def starting_round(self, index: int) -> None: - logger.info("Reporter.starting_round(%r)", index) - - def ending_round(self, index: int, state: Any) -> None: - logger.info("Reporter.ending_round(%r, state)", index) - - def ending(self, state: Any) -> None: - logger.info("Reporter.ending(%r)", state) - - def adding_requirement(self, requirement: Requirement, parent: Candidate) -> None: - logger.info("Reporter.adding_requirement(%r, %r)", requirement, parent) - - def backtracking(self, candidate: Candidate) -> None: - logger.info("Reporter.backtracking(%r)", candidate) - - def pinning(self, candidate: Candidate) -> None: - logger.info("Reporter.pinning(%r)", candidate) diff --git a/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/requirements.py b/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/requirements.py deleted file mode 100644 index f561f1f..0000000 --- a/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/requirements.py +++ /dev/null @@ -1,166 +0,0 @@ -from pip._vendor.packaging.specifiers import SpecifierSet -from pip._vendor.packaging.utils import NormalizedName, canonicalize_name - -from pip._internal.req.req_install import InstallRequirement - -from .base import Candidate, CandidateLookup, Requirement, format_name - - -class ExplicitRequirement(Requirement): - def __init__(self, candidate: Candidate) -> None: - self.candidate = candidate - - def __str__(self) -> str: - return str(self.candidate) - - def __repr__(self) -> str: - return "{class_name}({candidate!r})".format( - class_name=self.__class__.__name__, - candidate=self.candidate, - ) - - @property - def project_name(self) -> NormalizedName: - # No need to canonicalize - the candidate did this - return self.candidate.project_name - - @property - def name(self) -> str: - # No need to canonicalize - the candidate did this - return self.candidate.name - - def format_for_error(self) -> str: - return self.candidate.format_for_error() - - def get_candidate_lookup(self) -> CandidateLookup: - return self.candidate, None - - def is_satisfied_by(self, candidate: Candidate) -> bool: - return candidate == self.candidate - - -class SpecifierRequirement(Requirement): - def __init__(self, ireq: InstallRequirement) -> None: - assert ireq.link is None, "This is a link, not a specifier" - self._ireq = ireq - self._extras = frozenset(ireq.extras) - - def __str__(self) -> str: - return str(self._ireq.req) - - def __repr__(self) -> str: - return "{class_name}({requirement!r})".format( - class_name=self.__class__.__name__, - requirement=str(self._ireq.req), - ) - - @property - def project_name(self) -> NormalizedName: - assert self._ireq.req, "Specifier-backed ireq is always PEP 508" - return canonicalize_name(self._ireq.req.name) - - @property - def name(self) -> str: - return format_name(self.project_name, self._extras) - - def format_for_error(self) -> str: - - # Convert comma-separated specifiers into "A, B, ..., F and G" - # This makes the specifier a bit more "human readable", without - # risking a change in meaning. (Hopefully! Not all edge cases have - # been checked) - parts = [s.strip() for s in str(self).split(",")] - if len(parts) == 0: - return "" - elif len(parts) == 1: - return parts[0] - - return ", ".join(parts[:-1]) + " and " + parts[-1] - - def get_candidate_lookup(self) -> CandidateLookup: - return None, self._ireq - - def is_satisfied_by(self, candidate: Candidate) -> bool: - assert candidate.name == self.name, ( - f"Internal issue: Candidate is not for this requirement " - f"{candidate.name} vs {self.name}" - ) - # We can safely always allow prereleases here since PackageFinder - # already implements the prerelease logic, and would have filtered out - # prerelease candidates if the user does not expect them. - assert self._ireq.req, "Specifier-backed ireq is always PEP 508" - spec = self._ireq.req.specifier - return spec.contains(candidate.version, prereleases=True) - - -class RequiresPythonRequirement(Requirement): - """A requirement representing Requires-Python metadata.""" - - def __init__(self, specifier: SpecifierSet, match: Candidate) -> None: - self.specifier = specifier - self._candidate = match - - def __str__(self) -> str: - return f"Python {self.specifier}" - - def __repr__(self) -> str: - return "{class_name}({specifier!r})".format( - class_name=self.__class__.__name__, - specifier=str(self.specifier), - ) - - @property - def project_name(self) -> NormalizedName: - return self._candidate.project_name - - @property - def name(self) -> str: - return self._candidate.name - - def format_for_error(self) -> str: - return str(self) - - def get_candidate_lookup(self) -> CandidateLookup: - if self.specifier.contains(self._candidate.version, prereleases=True): - return self._candidate, None - return None, None - - def is_satisfied_by(self, candidate: Candidate) -> bool: - assert candidate.name == self._candidate.name, "Not Python candidate" - # We can safely always allow prereleases here since PackageFinder - # already implements the prerelease logic, and would have filtered out - # prerelease candidates if the user does not expect them. - return self.specifier.contains(candidate.version, prereleases=True) - - -class UnsatisfiableRequirement(Requirement): - """A requirement that cannot be satisfied.""" - - def __init__(self, name: NormalizedName) -> None: - self._name = name - - def __str__(self) -> str: - return f"{self._name} (unavailable)" - - def __repr__(self) -> str: - return "{class_name}({name!r})".format( - class_name=self.__class__.__name__, - name=str(self._name), - ) - - @property - def project_name(self) -> NormalizedName: - return self._name - - @property - def name(self) -> str: - return self._name - - def format_for_error(self) -> str: - return str(self) - - def get_candidate_lookup(self) -> CandidateLookup: - return None, None - - def is_satisfied_by(self, candidate: Candidate) -> bool: - return False diff --git a/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/resolver.py b/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/resolver.py deleted file mode 100644 index 32ef789..0000000 --- a/venv/Lib/site-packages/pip/_internal/resolution/resolvelib/resolver.py +++ /dev/null @@ -1,298 +0,0 @@ -import functools -import logging -import os -from typing import TYPE_CHECKING, Dict, List, Optional, Set, Tuple, cast - -from pip._vendor.packaging.utils import canonicalize_name -from pip._vendor.resolvelib import BaseReporter, ResolutionImpossible -from pip._vendor.resolvelib import Resolver as RLResolver -from pip._vendor.resolvelib.structs import DirectedGraph - -from pip._internal.cache import WheelCache -from pip._internal.index.package_finder import PackageFinder -from pip._internal.operations.prepare import RequirementPreparer -from pip._internal.req.req_install import InstallRequirement -from pip._internal.req.req_set import RequirementSet -from pip._internal.resolution.base import BaseResolver, InstallRequirementProvider -from pip._internal.resolution.resolvelib.provider import PipProvider -from pip._internal.resolution.resolvelib.reporter import ( - PipDebuggingReporter, - PipReporter, -) - -from .base import Candidate, Requirement -from .factory import Factory - -if TYPE_CHECKING: - from pip._vendor.resolvelib.resolvers import Result as RLResult - - Result = RLResult[Requirement, Candidate, str] - - -logger = logging.getLogger(__name__) - - -class Resolver(BaseResolver): - _allowed_strategies = {"eager", "only-if-needed", "to-satisfy-only"} - - def __init__( - self, - preparer: RequirementPreparer, - finder: PackageFinder, - wheel_cache: Optional[WheelCache], - make_install_req: InstallRequirementProvider, - use_user_site: bool, - ignore_dependencies: bool, - ignore_installed: bool, - ignore_requires_python: bool, - force_reinstall: bool, - upgrade_strategy: str, - suppress_build_failures: bool, - py_version_info: Optional[Tuple[int, ...]] = None, - ): - super().__init__() - assert upgrade_strategy in self._allowed_strategies - - self.factory = Factory( - finder=finder, - preparer=preparer, - make_install_req=make_install_req, - wheel_cache=wheel_cache, - use_user_site=use_user_site, - force_reinstall=force_reinstall, - ignore_installed=ignore_installed, - ignore_requires_python=ignore_requires_python, - suppress_build_failures=suppress_build_failures, - py_version_info=py_version_info, - ) - self.ignore_dependencies = ignore_dependencies - self.upgrade_strategy = upgrade_strategy - self._result: Optional[Result] = None - - def resolve( - self, root_reqs: List[InstallRequirement], check_supported_wheels: bool - ) -> RequirementSet: - collected = self.factory.collect_root_requirements(root_reqs) - provider = PipProvider( - factory=self.factory, - constraints=collected.constraints, - ignore_dependencies=self.ignore_dependencies, - upgrade_strategy=self.upgrade_strategy, - user_requested=collected.user_requested, - ) - if "PIP_RESOLVER_DEBUG" in os.environ: - reporter: BaseReporter = PipDebuggingReporter() - else: - reporter = PipReporter() - resolver: RLResolver[Requirement, Candidate, str] = RLResolver( - provider, - reporter, - ) - - try: - try_to_avoid_resolution_too_deep = 2000000 - result = self._result = resolver.resolve( - collected.requirements, max_rounds=try_to_avoid_resolution_too_deep - ) - - except ResolutionImpossible as e: - error = self.factory.get_installation_error( - cast("ResolutionImpossible[Requirement, Candidate]", e), - collected.constraints, - ) - raise error from e - - req_set = RequirementSet(check_supported_wheels=check_supported_wheels) - for candidate in result.mapping.values(): - ireq = candidate.get_install_requirement() - if ireq is None: - continue - - # Check if there is already an installation under the same name, - # and set a flag for later stages to uninstall it, if needed. - installed_dist = self.factory.get_dist_to_uninstall(candidate) - if installed_dist is None: - # There is no existing installation -- nothing to uninstall. - ireq.should_reinstall = False - elif self.factory.force_reinstall: - # The --force-reinstall flag is set -- reinstall. - ireq.should_reinstall = True - elif installed_dist.version != candidate.version: - # The installation is different in version -- reinstall. - ireq.should_reinstall = True - elif candidate.is_editable or installed_dist.editable: - # The incoming distribution is editable, or different in - # editable-ness to installation -- reinstall. - ireq.should_reinstall = True - elif candidate.source_link and candidate.source_link.is_file: - # The incoming distribution is under file:// - if candidate.source_link.is_wheel: - # is a local wheel -- do nothing. - logger.info( - "%s is already installed with the same version as the " - "provided wheel. Use --force-reinstall to force an " - "installation of the wheel.", - ireq.name, - ) - continue - - # is a local sdist or path -- reinstall - ireq.should_reinstall = True - else: - continue - - link = candidate.source_link - if link and link.is_yanked: - # The reason can contain non-ASCII characters, Unicode - # is required for Python 2. - msg = ( - "The candidate selected for download or install is a " - "yanked version: {name!r} candidate (version {version} " - "at {link})\nReason for being yanked: {reason}" - ).format( - name=candidate.name, - version=candidate.version, - link=link, - reason=link.yanked_reason or "", - ) - logger.warning(msg) - - req_set.add_named_requirement(ireq) - - reqs = req_set.all_requirements - self.factory.preparer.prepare_linked_requirements_more(reqs) - return req_set - - def get_installation_order( - self, req_set: RequirementSet - ) -> List[InstallRequirement]: - """Get order for installation of requirements in RequirementSet. - - The returned list contains a requirement before another that depends on - it. This helps ensure that the environment is kept consistent as they - get installed one-by-one. - - The current implementation creates a topological ordering of the - dependency graph, giving more weight to packages with less - or no dependencies, while breaking any cycles in the graph at - arbitrary points. We make no guarantees about where the cycle - would be broken, other than it *would* be broken. - """ - assert self._result is not None, "must call resolve() first" - - if not req_set.requirements: - # Nothing is left to install, so we do not need an order. - return [] - - graph = self._result.graph - weights = get_topological_weights(graph, set(req_set.requirements.keys())) - - sorted_items = sorted( - req_set.requirements.items(), - key=functools.partial(_req_set_item_sorter, weights=weights), - reverse=True, - ) - return [ireq for _, ireq in sorted_items] - - -def get_topological_weights( - graph: "DirectedGraph[Optional[str]]", requirement_keys: Set[str] -) -> Dict[Optional[str], int]: - """Assign weights to each node based on how "deep" they are. - - This implementation may change at any point in the future without prior - notice. - - We first simplify the dependency graph by pruning any leaves and giving them - the highest weight: a package without any dependencies should be installed - first. This is done again and again in the same way, giving ever less weight - to the newly found leaves. The loop stops when no leaves are left: all - remaining packages have at least one dependency left in the graph. - - Then we continue with the remaining graph, by taking the length for the - longest path to any node from root, ignoring any paths that contain a single - node twice (i.e. cycles). This is done through a depth-first search through - the graph, while keeping track of the path to the node. - - Cycles in the graph result would result in node being revisited while also - being on its own path. In this case, take no action. This helps ensure we - don't get stuck in a cycle. - - When assigning weight, the longer path (i.e. larger length) is preferred. - - We are only interested in the weights of packages that are in the - requirement_keys. - """ - path: Set[Optional[str]] = set() - weights: Dict[Optional[str], int] = {} - - def visit(node: Optional[str]) -> None: - if node in path: - # We hit a cycle, so we'll break it here. - return - - # Time to visit the children! - path.add(node) - for child in graph.iter_children(node): - visit(child) - path.remove(node) - - if node not in requirement_keys: - return - - last_known_parent_count = weights.get(node, 0) - weights[node] = max(last_known_parent_count, len(path)) - - # Simplify the graph, pruning leaves that have no dependencies. - # This is needed for large graphs (say over 200 packages) because the - # `visit` function is exponentially slower then, taking minutes. - # See https://github.com/pypa/pip/issues/10557 - # We will loop until we explicitly break the loop. - while True: - leaves = set() - for key in graph: - if key is None: - continue - for _child in graph.iter_children(key): - # This means we have at least one child - break - else: - # No child. - leaves.add(key) - if not leaves: - # We are done simplifying. - break - # Calculate the weight for the leaves. - weight = len(graph) - 1 - for leaf in leaves: - if leaf not in requirement_keys: - continue - weights[leaf] = weight - # Remove the leaves from the graph, making it simpler. - for leaf in leaves: - graph.remove(leaf) - - # Visit the remaining graph. - # `None` is guaranteed to be the root node by resolvelib. - visit(None) - - # Sanity check: all requirement keys should be in the weights, - # and no other keys should be in the weights. - difference = set(weights.keys()).difference(requirement_keys) - assert not difference, difference - - return weights - - -def _req_set_item_sorter( - item: Tuple[str, InstallRequirement], - weights: Dict[Optional[str], int], -) -> Tuple[int, str]: - """Key function used to sort install requirements for installation. - - Based on the "weight" mapping calculated in ``get_installation_order()``. - The canonical package name is returned as the second member as a tie- - breaker to ensure the result is predictable, which is useful in tests. - """ - name = canonicalize_name(item[0]) - return weights[name], name diff --git a/venv/Lib/site-packages/pip/_internal/self_outdated_check.py b/venv/Lib/site-packages/pip/_internal/self_outdated_check.py deleted file mode 100644 index 7300e0e..0000000 --- a/venv/Lib/site-packages/pip/_internal/self_outdated_check.py +++ /dev/null @@ -1,189 +0,0 @@ -import datetime -import hashlib -import json -import logging -import optparse -import os.path -import sys -from typing import Any, Dict - -from pip._vendor.packaging.version import parse as parse_version - -from pip._internal.index.collector import LinkCollector -from pip._internal.index.package_finder import PackageFinder -from pip._internal.metadata import get_default_environment -from pip._internal.models.selection_prefs import SelectionPreferences -from pip._internal.network.session import PipSession -from pip._internal.utils.filesystem import adjacent_tmp_file, check_path_owner, replace -from pip._internal.utils.misc import ensure_dir - -SELFCHECK_DATE_FMT = "%Y-%m-%dT%H:%M:%SZ" - - -logger = logging.getLogger(__name__) - - -def _get_statefile_name(key: str) -> str: - key_bytes = key.encode() - name = hashlib.sha224(key_bytes).hexdigest() - return name - - -class SelfCheckState: - def __init__(self, cache_dir: str) -> None: - self.state: Dict[str, Any] = {} - self.statefile_path = None - - # Try to load the existing state - if cache_dir: - self.statefile_path = os.path.join( - cache_dir, "selfcheck", _get_statefile_name(self.key) - ) - try: - with open(self.statefile_path, encoding="utf-8") as statefile: - self.state = json.load(statefile) - except (OSError, ValueError, KeyError): - # Explicitly suppressing exceptions, since we don't want to - # error out if the cache file is invalid. - pass - - @property - def key(self) -> str: - return sys.prefix - - def save(self, pypi_version: str, current_time: datetime.datetime) -> None: - # If we do not have a path to cache in, don't bother saving. - if not self.statefile_path: - return - - # Check to make sure that we own the directory - if not check_path_owner(os.path.dirname(self.statefile_path)): - return - - # Now that we've ensured the directory is owned by this user, we'll go - # ahead and make sure that all our directories are created. - ensure_dir(os.path.dirname(self.statefile_path)) - - state = { - # Include the key so it's easy to tell which pip wrote the - # file. - "key": self.key, - "last_check": current_time.strftime(SELFCHECK_DATE_FMT), - "pypi_version": pypi_version, - } - - text = json.dumps(state, sort_keys=True, separators=(",", ":")) - - with adjacent_tmp_file(self.statefile_path) as f: - f.write(text.encode()) - - try: - # Since we have a prefix-specific state file, we can just - # overwrite whatever is there, no need to check. - replace(f.name, self.statefile_path) - except OSError: - # Best effort. - pass - - -def was_installed_by_pip(pkg: str) -> bool: - """Checks whether pkg was installed by pip - - This is used not to display the upgrade message when pip is in fact - installed by system package manager, such as dnf on Fedora. - """ - dist = get_default_environment().get_distribution(pkg) - return dist is not None and "pip" == dist.installer - - -def pip_self_version_check(session: PipSession, options: optparse.Values) -> None: - """Check for an update for pip. - - Limit the frequency of checks to once per week. State is stored either in - the active virtualenv or in the user's USER_CACHE_DIR keyed off the prefix - of the pip script path. - """ - installed_dist = get_default_environment().get_distribution("pip") - if not installed_dist: - return - - pip_version = installed_dist.version - pypi_version = None - - try: - state = SelfCheckState(cache_dir=options.cache_dir) - - current_time = datetime.datetime.utcnow() - # Determine if we need to refresh the state - if "last_check" in state.state and "pypi_version" in state.state: - last_check = datetime.datetime.strptime( - state.state["last_check"], SELFCHECK_DATE_FMT - ) - if (current_time - last_check).total_seconds() < 7 * 24 * 60 * 60: - pypi_version = state.state["pypi_version"] - - # Refresh the version if we need to or just see if we need to warn - if pypi_version is None: - # Lets use PackageFinder to see what the latest pip version is - link_collector = LinkCollector.create( - session, - options=options, - suppress_no_index=True, - ) - - # Pass allow_yanked=False so we don't suggest upgrading to a - # yanked version. - selection_prefs = SelectionPreferences( - allow_yanked=False, - allow_all_prereleases=False, # Explicitly set to False - ) - - finder = PackageFinder.create( - link_collector=link_collector, - selection_prefs=selection_prefs, - use_deprecated_html5lib=( - "html5lib" in options.deprecated_features_enabled - ), - ) - best_candidate = finder.find_best_candidate("pip").best_candidate - if best_candidate is None: - return - pypi_version = str(best_candidate.version) - - # save that we've performed a check - state.save(pypi_version, current_time) - - remote_version = parse_version(pypi_version) - - local_version_is_older = ( - pip_version < remote_version - and pip_version.base_version != remote_version.base_version - and was_installed_by_pip("pip") - ) - - # Determine if our pypi_version is older - if not local_version_is_older: - return - - # We cannot tell how the current pip is available in the current - # command context, so be pragmatic here and suggest the command - # that's always available. This does not accommodate spaces in - # `sys.executable` on purpose as it is not possible to do it - # correctly without knowing the user's shell. Thus, - # it won't be done until possible through the standard library. - # Do not be tempted to use the undocumented subprocess.list2cmdline. - # It is considered an internal implementation detail for a reason. - pip_cmd = f"{sys.executable} -m pip" - logger.warning( - "You are using pip version %s; however, version %s is " - "available.\nYou should consider upgrading via the " - "'%s install --upgrade pip' command.", - pip_version, - pypi_version, - pip_cmd, - ) - except Exception: - logger.debug( - "There was an error checking the latest version of pip", - exc_info=True, - ) diff --git a/venv/Lib/site-packages/pip/_internal/utils/__init__.py b/venv/Lib/site-packages/pip/_internal/utils/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/venv/Lib/site-packages/pip/_internal/utils/__pycache__/__init__.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/utils/__pycache__/__init__.cpython-39.pyc deleted file mode 100644 index 32e73e2..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/utils/__pycache__/__init__.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/utils/__pycache__/_log.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/utils/__pycache__/_log.cpython-39.pyc deleted file mode 100644 index 427de8d..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/utils/__pycache__/_log.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/utils/__pycache__/appdirs.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/utils/__pycache__/appdirs.cpython-39.pyc deleted file mode 100644 index 356a925..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/utils/__pycache__/appdirs.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/utils/__pycache__/compat.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/utils/__pycache__/compat.cpython-39.pyc deleted file mode 100644 index 418b8d7..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/utils/__pycache__/compat.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/utils/__pycache__/compatibility_tags.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/utils/__pycache__/compatibility_tags.cpython-39.pyc deleted file mode 100644 index f1c3920..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/utils/__pycache__/compatibility_tags.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/utils/__pycache__/datetime.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/utils/__pycache__/datetime.cpython-39.pyc deleted file mode 100644 index 60d7b59..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/utils/__pycache__/datetime.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/utils/__pycache__/deprecation.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/utils/__pycache__/deprecation.cpython-39.pyc deleted file mode 100644 index 4576bed..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/utils/__pycache__/deprecation.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/utils/__pycache__/direct_url_helpers.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/utils/__pycache__/direct_url_helpers.cpython-39.pyc deleted file mode 100644 index 2ef1a12..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/utils/__pycache__/direct_url_helpers.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/utils/__pycache__/distutils_args.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/utils/__pycache__/distutils_args.cpython-39.pyc deleted file mode 100644 index 88fdffa..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/utils/__pycache__/distutils_args.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/utils/__pycache__/egg_link.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/utils/__pycache__/egg_link.cpython-39.pyc deleted file mode 100644 index 66ca100..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/utils/__pycache__/egg_link.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/utils/__pycache__/encoding.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/utils/__pycache__/encoding.cpython-39.pyc deleted file mode 100644 index 2e1d0d8..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/utils/__pycache__/encoding.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/utils/__pycache__/entrypoints.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/utils/__pycache__/entrypoints.cpython-39.pyc deleted file mode 100644 index c105a05..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/utils/__pycache__/entrypoints.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/utils/__pycache__/filesystem.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/utils/__pycache__/filesystem.cpython-39.pyc deleted file mode 100644 index b4bf69d..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/utils/__pycache__/filesystem.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/utils/__pycache__/filetypes.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/utils/__pycache__/filetypes.cpython-39.pyc deleted file mode 100644 index b3c1c73..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/utils/__pycache__/filetypes.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/utils/__pycache__/glibc.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/utils/__pycache__/glibc.cpython-39.pyc deleted file mode 100644 index 89024d9..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/utils/__pycache__/glibc.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/utils/__pycache__/hashes.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/utils/__pycache__/hashes.cpython-39.pyc deleted file mode 100644 index 300d268..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/utils/__pycache__/hashes.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/utils/__pycache__/inject_securetransport.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/utils/__pycache__/inject_securetransport.cpython-39.pyc deleted file mode 100644 index aa550fc..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/utils/__pycache__/inject_securetransport.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/utils/__pycache__/logging.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/utils/__pycache__/logging.cpython-39.pyc deleted file mode 100644 index b363665..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/utils/__pycache__/logging.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/utils/__pycache__/misc.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/utils/__pycache__/misc.cpython-39.pyc deleted file mode 100644 index ac6c827..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/utils/__pycache__/misc.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/utils/__pycache__/models.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/utils/__pycache__/models.cpython-39.pyc deleted file mode 100644 index ff6d910..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/utils/__pycache__/models.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/utils/__pycache__/packaging.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/utils/__pycache__/packaging.cpython-39.pyc deleted file mode 100644 index 498360c..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/utils/__pycache__/packaging.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/utils/__pycache__/setuptools_build.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/utils/__pycache__/setuptools_build.cpython-39.pyc deleted file mode 100644 index 7c9a9dd..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/utils/__pycache__/setuptools_build.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/utils/__pycache__/subprocess.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/utils/__pycache__/subprocess.cpython-39.pyc deleted file mode 100644 index fa53fa9..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/utils/__pycache__/subprocess.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/utils/__pycache__/temp_dir.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/utils/__pycache__/temp_dir.cpython-39.pyc deleted file mode 100644 index 7d1a15c..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/utils/__pycache__/temp_dir.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/utils/__pycache__/unpacking.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/utils/__pycache__/unpacking.cpython-39.pyc deleted file mode 100644 index 406d643..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/utils/__pycache__/unpacking.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/utils/__pycache__/urls.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/utils/__pycache__/urls.cpython-39.pyc deleted file mode 100644 index b70a82f..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/utils/__pycache__/urls.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/utils/__pycache__/virtualenv.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/utils/__pycache__/virtualenv.cpython-39.pyc deleted file mode 100644 index 7f51618..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/utils/__pycache__/virtualenv.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/utils/__pycache__/wheel.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/utils/__pycache__/wheel.cpython-39.pyc deleted file mode 100644 index 6ea7a14..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/utils/__pycache__/wheel.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/utils/_log.py b/venv/Lib/site-packages/pip/_internal/utils/_log.py deleted file mode 100644 index 92c4c6a..0000000 --- a/venv/Lib/site-packages/pip/_internal/utils/_log.py +++ /dev/null @@ -1,38 +0,0 @@ -"""Customize logging - -Defines custom logger class for the `logger.verbose(...)` method. - -init_logging() must be called before any other modules that call logging.getLogger. -""" - -import logging -from typing import Any, cast - -# custom log level for `--verbose` output -# between DEBUG and INFO -VERBOSE = 15 - - -class VerboseLogger(logging.Logger): - """Custom Logger, defining a verbose log-level - - VERBOSE is between INFO and DEBUG. - """ - - def verbose(self, msg: str, *args: Any, **kwargs: Any) -> None: - return self.log(VERBOSE, msg, *args, **kwargs) - - -def getLogger(name: str) -> VerboseLogger: - """logging.getLogger, but ensures our VerboseLogger class is returned""" - return cast(VerboseLogger, logging.getLogger(name)) - - -def init_logging() -> None: - """Register our VerboseLogger and VERBOSE log level. - - Should be called before any calls to getLogger(), - i.e. in pip._internal.__init__ - """ - logging.setLoggerClass(VerboseLogger) - logging.addLevelName(VERBOSE, "VERBOSE") diff --git a/venv/Lib/site-packages/pip/_internal/utils/appdirs.py b/venv/Lib/site-packages/pip/_internal/utils/appdirs.py deleted file mode 100644 index 16933bf..0000000 --- a/venv/Lib/site-packages/pip/_internal/utils/appdirs.py +++ /dev/null @@ -1,52 +0,0 @@ -""" -This code wraps the vendored appdirs module to so the return values are -compatible for the current pip code base. - -The intention is to rewrite current usages gradually, keeping the tests pass, -and eventually drop this after all usages are changed. -""" - -import os -import sys -from typing import List - -from pip._vendor import platformdirs as _appdirs - - -def user_cache_dir(appname: str) -> str: - return _appdirs.user_cache_dir(appname, appauthor=False) - - -def _macos_user_config_dir(appname: str, roaming: bool = True) -> str: - # Use ~/Application Support/pip, if the directory exists. - path = _appdirs.user_data_dir(appname, appauthor=False, roaming=roaming) - if os.path.isdir(path): - return path - - # Use a Linux-like ~/.config/pip, by default. - linux_like_path = "~/.config/" - if appname: - linux_like_path = os.path.join(linux_like_path, appname) - - return os.path.expanduser(linux_like_path) - - -def user_config_dir(appname: str, roaming: bool = True) -> str: - if sys.platform == "darwin": - return _macos_user_config_dir(appname, roaming) - - return _appdirs.user_config_dir(appname, appauthor=False, roaming=roaming) - - -# for the discussion regarding site_config_dir locations -# see -def site_config_dirs(appname: str) -> List[str]: - if sys.platform == "darwin": - return [_appdirs.site_data_dir(appname, appauthor=False, multipath=True)] - - dirval = _appdirs.site_config_dir(appname, appauthor=False, multipath=True) - if sys.platform == "win32": - return [dirval] - - # Unix-y system. Look in /etc as well. - return dirval.split(os.pathsep) + ["/etc"] diff --git a/venv/Lib/site-packages/pip/_internal/utils/compat.py b/venv/Lib/site-packages/pip/_internal/utils/compat.py deleted file mode 100644 index 3f4d300..0000000 --- a/venv/Lib/site-packages/pip/_internal/utils/compat.py +++ /dev/null @@ -1,63 +0,0 @@ -"""Stuff that differs in different Python versions and platform -distributions.""" - -import logging -import os -import sys - -__all__ = ["get_path_uid", "stdlib_pkgs", "WINDOWS"] - - -logger = logging.getLogger(__name__) - - -def has_tls() -> bool: - try: - import _ssl # noqa: F401 # ignore unused - - return True - except ImportError: - pass - - from pip._vendor.urllib3.util import IS_PYOPENSSL - - return IS_PYOPENSSL - - -def get_path_uid(path: str) -> int: - """ - Return path's uid. - - Does not follow symlinks: - https://github.com/pypa/pip/pull/935#discussion_r5307003 - - Placed this function in compat due to differences on AIX and - Jython, that should eventually go away. - - :raises OSError: When path is a symlink or can't be read. - """ - if hasattr(os, "O_NOFOLLOW"): - fd = os.open(path, os.O_RDONLY | os.O_NOFOLLOW) - file_uid = os.fstat(fd).st_uid - os.close(fd) - else: # AIX and Jython - # WARNING: time of check vulnerability, but best we can do w/o NOFOLLOW - if not os.path.islink(path): - # older versions of Jython don't have `os.fstat` - file_uid = os.stat(path).st_uid - else: - # raise OSError for parity with os.O_NOFOLLOW above - raise OSError(f"{path} is a symlink; Will not return uid for symlinks") - return file_uid - - -# packages in the stdlib that may have installation metadata, but should not be -# considered 'installed'. this theoretically could be determined based on -# dist.location (py27:`sysconfig.get_paths()['stdlib']`, -# py26:sysconfig.get_config_vars('LIBDEST')), but fear platform variation may -# make this ineffective, so hard-coding -stdlib_pkgs = {"python", "wsgiref", "argparse"} - - -# windows detection, covers cpython and ironpython -WINDOWS = sys.platform.startswith("win") or (sys.platform == "cli" and os.name == "nt") diff --git a/venv/Lib/site-packages/pip/_internal/utils/compatibility_tags.py b/venv/Lib/site-packages/pip/_internal/utils/compatibility_tags.py deleted file mode 100644 index b6ed9a7..0000000 --- a/venv/Lib/site-packages/pip/_internal/utils/compatibility_tags.py +++ /dev/null @@ -1,165 +0,0 @@ -"""Generate and work with PEP 425 Compatibility Tags. -""" - -import re -from typing import List, Optional, Tuple - -from pip._vendor.packaging.tags import ( - PythonVersion, - Tag, - compatible_tags, - cpython_tags, - generic_tags, - interpreter_name, - interpreter_version, - mac_platforms, -) - -_osx_arch_pat = re.compile(r"(.+)_(\d+)_(\d+)_(.+)") - - -def version_info_to_nodot(version_info: Tuple[int, ...]) -> str: - # Only use up to the first two numbers. - return "".join(map(str, version_info[:2])) - - -def _mac_platforms(arch: str) -> List[str]: - match = _osx_arch_pat.match(arch) - if match: - name, major, minor, actual_arch = match.groups() - mac_version = (int(major), int(minor)) - arches = [ - # Since we have always only checked that the platform starts - # with "macosx", for backwards-compatibility we extract the - # actual prefix provided by the user in case they provided - # something like "macosxcustom_". It may be good to remove - # this as undocumented or deprecate it in the future. - "{}_{}".format(name, arch[len("macosx_") :]) - for arch in mac_platforms(mac_version, actual_arch) - ] - else: - # arch pattern didn't match (?!) - arches = [arch] - return arches - - -def _custom_manylinux_platforms(arch: str) -> List[str]: - arches = [arch] - arch_prefix, arch_sep, arch_suffix = arch.partition("_") - if arch_prefix == "manylinux2014": - # manylinux1/manylinux2010 wheels run on most manylinux2014 systems - # with the exception of wheels depending on ncurses. PEP 599 states - # manylinux1/manylinux2010 wheels should be considered - # manylinux2014 wheels: - # https://www.python.org/dev/peps/pep-0599/#backwards-compatibility-with-manylinux2010-wheels - if arch_suffix in {"i686", "x86_64"}: - arches.append("manylinux2010" + arch_sep + arch_suffix) - arches.append("manylinux1" + arch_sep + arch_suffix) - elif arch_prefix == "manylinux2010": - # manylinux1 wheels run on most manylinux2010 systems with the - # exception of wheels depending on ncurses. PEP 571 states - # manylinux1 wheels should be considered manylinux2010 wheels: - # https://www.python.org/dev/peps/pep-0571/#backwards-compatibility-with-manylinux1-wheels - arches.append("manylinux1" + arch_sep + arch_suffix) - return arches - - -def _get_custom_platforms(arch: str) -> List[str]: - arch_prefix, arch_sep, arch_suffix = arch.partition("_") - if arch.startswith("macosx"): - arches = _mac_platforms(arch) - elif arch_prefix in ["manylinux2014", "manylinux2010"]: - arches = _custom_manylinux_platforms(arch) - else: - arches = [arch] - return arches - - -def _expand_allowed_platforms(platforms: Optional[List[str]]) -> Optional[List[str]]: - if not platforms: - return None - - seen = set() - result = [] - - for p in platforms: - if p in seen: - continue - additions = [c for c in _get_custom_platforms(p) if c not in seen] - seen.update(additions) - result.extend(additions) - - return result - - -def _get_python_version(version: str) -> PythonVersion: - if len(version) > 1: - return int(version[0]), int(version[1:]) - else: - return (int(version[0]),) - - -def _get_custom_interpreter( - implementation: Optional[str] = None, version: Optional[str] = None -) -> str: - if implementation is None: - implementation = interpreter_name() - if version is None: - version = interpreter_version() - return f"{implementation}{version}" - - -def get_supported( - version: Optional[str] = None, - platforms: Optional[List[str]] = None, - impl: Optional[str] = None, - abis: Optional[List[str]] = None, -) -> List[Tag]: - """Return a list of supported tags for each version specified in - `versions`. - - :param version: a string version, of the form "33" or "32", - or None. The version will be assumed to support our ABI. - :param platform: specify a list of platforms you want valid - tags for, or None. If None, use the local system platform. - :param impl: specify the exact implementation you want valid - tags for, or None. If None, use the local interpreter impl. - :param abis: specify a list of abis you want valid - tags for, or None. If None, use the local interpreter abi. - """ - supported: List[Tag] = [] - - python_version: Optional[PythonVersion] = None - if version is not None: - python_version = _get_python_version(version) - - interpreter = _get_custom_interpreter(impl, version) - - platforms = _expand_allowed_platforms(platforms) - - is_cpython = (impl or interpreter_name()) == "cp" - if is_cpython: - supported.extend( - cpython_tags( - python_version=python_version, - abis=abis, - platforms=platforms, - ) - ) - else: - supported.extend( - generic_tags( - interpreter=interpreter, - abis=abis, - platforms=platforms, - ) - ) - supported.extend( - compatible_tags( - python_version=python_version, - interpreter=interpreter, - platforms=platforms, - ) - ) - - return supported diff --git a/venv/Lib/site-packages/pip/_internal/utils/datetime.py b/venv/Lib/site-packages/pip/_internal/utils/datetime.py deleted file mode 100644 index 8668b3b..0000000 --- a/venv/Lib/site-packages/pip/_internal/utils/datetime.py +++ /dev/null @@ -1,11 +0,0 @@ -"""For when pip wants to check the date or time. -""" - -import datetime - - -def today_is_later_than(year: int, month: int, day: int) -> bool: - today = datetime.date.today() - given = datetime.date(year, month, day) - - return today > given diff --git a/venv/Lib/site-packages/pip/_internal/utils/deprecation.py b/venv/Lib/site-packages/pip/_internal/utils/deprecation.py deleted file mode 100644 index 72bd6f2..0000000 --- a/venv/Lib/site-packages/pip/_internal/utils/deprecation.py +++ /dev/null @@ -1,120 +0,0 @@ -""" -A module that implements tooling to enable easy warnings about deprecations. -""" - -import logging -import warnings -from typing import Any, Optional, TextIO, Type, Union - -from pip._vendor.packaging.version import parse - -from pip import __version__ as current_version # NOTE: tests patch this name. - -DEPRECATION_MSG_PREFIX = "DEPRECATION: " - - -class PipDeprecationWarning(Warning): - pass - - -_original_showwarning: Any = None - - -# Warnings <-> Logging Integration -def _showwarning( - message: Union[Warning, str], - category: Type[Warning], - filename: str, - lineno: int, - file: Optional[TextIO] = None, - line: Optional[str] = None, -) -> None: - if file is not None: - if _original_showwarning is not None: - _original_showwarning(message, category, filename, lineno, file, line) - elif issubclass(category, PipDeprecationWarning): - # We use a specially named logger which will handle all of the - # deprecation messages for pip. - logger = logging.getLogger("pip._internal.deprecations") - logger.warning(message) - else: - _original_showwarning(message, category, filename, lineno, file, line) - - -def install_warning_logger() -> None: - # Enable our Deprecation Warnings - warnings.simplefilter("default", PipDeprecationWarning, append=True) - - global _original_showwarning - - if _original_showwarning is None: - _original_showwarning = warnings.showwarning - warnings.showwarning = _showwarning - - -def deprecated( - *, - reason: str, - replacement: Optional[str], - gone_in: Optional[str], - feature_flag: Optional[str] = None, - issue: Optional[int] = None, -) -> None: - """Helper to deprecate existing functionality. - - reason: - Textual reason shown to the user about why this functionality has - been deprecated. Should be a complete sentence. - replacement: - Textual suggestion shown to the user about what alternative - functionality they can use. - gone_in: - The version of pip does this functionality should get removed in. - Raises an error if pip's current version is greater than or equal to - this. - feature_flag: - Command-line flag of the form --use-feature={feature_flag} for testing - upcoming functionality. - issue: - Issue number on the tracker that would serve as a useful place for - users to find related discussion and provide feedback. - """ - - # Determine whether or not the feature is already gone in this version. - is_gone = gone_in is not None and parse(current_version) >= parse(gone_in) - - message_parts = [ - (reason, f"{DEPRECATION_MSG_PREFIX}{{}}"), - ( - gone_in, - "pip {} will enforce this behaviour change." - if not is_gone - else "Since pip {}, this is no longer supported.", - ), - ( - replacement, - "A possible replacement is {}.", - ), - ( - feature_flag, - "You can use the flag --use-feature={} to test the upcoming behaviour." - if not is_gone - else None, - ), - ( - issue, - "Discussion can be found at https://github.com/pypa/pip/issues/{}", - ), - ] - - message = " ".join( - format_str.format(value) - for value, format_str in message_parts - if format_str is not None and value is not None - ) - - # Raise as an error if this behaviour is deprecated. - if is_gone: - raise PipDeprecationWarning(message) - - warnings.warn(message, category=PipDeprecationWarning, stacklevel=2) diff --git a/venv/Lib/site-packages/pip/_internal/utils/direct_url_helpers.py b/venv/Lib/site-packages/pip/_internal/utils/direct_url_helpers.py deleted file mode 100644 index 0e8e5e1..0000000 --- a/venv/Lib/site-packages/pip/_internal/utils/direct_url_helpers.py +++ /dev/null @@ -1,87 +0,0 @@ -from typing import Optional - -from pip._internal.models.direct_url import ArchiveInfo, DirectUrl, DirInfo, VcsInfo -from pip._internal.models.link import Link -from pip._internal.utils.urls import path_to_url -from pip._internal.vcs import vcs - - -def direct_url_as_pep440_direct_reference(direct_url: DirectUrl, name: str) -> str: - """Convert a DirectUrl to a pip requirement string.""" - direct_url.validate() # if invalid, this is a pip bug - requirement = name + " @ " - fragments = [] - if isinstance(direct_url.info, VcsInfo): - requirement += "{}+{}@{}".format( - direct_url.info.vcs, direct_url.url, direct_url.info.commit_id - ) - elif isinstance(direct_url.info, ArchiveInfo): - requirement += direct_url.url - if direct_url.info.hash: - fragments.append(direct_url.info.hash) - else: - assert isinstance(direct_url.info, DirInfo) - requirement += direct_url.url - if direct_url.subdirectory: - fragments.append("subdirectory=" + direct_url.subdirectory) - if fragments: - requirement += "#" + "&".join(fragments) - return requirement - - -def direct_url_for_editable(source_dir: str) -> DirectUrl: - return DirectUrl( - url=path_to_url(source_dir), - info=DirInfo(editable=True), - ) - - -def direct_url_from_link( - link: Link, source_dir: Optional[str] = None, link_is_in_wheel_cache: bool = False -) -> DirectUrl: - if link.is_vcs: - vcs_backend = vcs.get_backend_for_scheme(link.scheme) - assert vcs_backend - url, requested_revision, _ = vcs_backend.get_url_rev_and_auth( - link.url_without_fragment - ) - # For VCS links, we need to find out and add commit_id. - if link_is_in_wheel_cache: - # If the requested VCS link corresponds to a cached - # wheel, it means the requested revision was an - # immutable commit hash, otherwise it would not have - # been cached. In that case we don't have a source_dir - # with the VCS checkout. - assert requested_revision - commit_id = requested_revision - else: - # If the wheel was not in cache, it means we have - # had to checkout from VCS to build and we have a source_dir - # which we can inspect to find out the commit id. - assert source_dir - commit_id = vcs_backend.get_revision(source_dir) - return DirectUrl( - url=url, - info=VcsInfo( - vcs=vcs_backend.name, - commit_id=commit_id, - requested_revision=requested_revision, - ), - subdirectory=link.subdirectory_fragment, - ) - elif link.is_existing_dir(): - return DirectUrl( - url=link.url_without_fragment, - info=DirInfo(), - subdirectory=link.subdirectory_fragment, - ) - else: - hash = None - hash_name = link.hash_name - if hash_name: - hash = f"{hash_name}={link.hash}" - return DirectUrl( - url=link.url_without_fragment, - info=ArchiveInfo(hash=hash), - subdirectory=link.subdirectory_fragment, - ) diff --git a/venv/Lib/site-packages/pip/_internal/utils/distutils_args.py b/venv/Lib/site-packages/pip/_internal/utils/distutils_args.py deleted file mode 100644 index e4aa5b8..0000000 --- a/venv/Lib/site-packages/pip/_internal/utils/distutils_args.py +++ /dev/null @@ -1,42 +0,0 @@ -from distutils.errors import DistutilsArgError -from distutils.fancy_getopt import FancyGetopt -from typing import Dict, List - -_options = [ - ("exec-prefix=", None, ""), - ("home=", None, ""), - ("install-base=", None, ""), - ("install-data=", None, ""), - ("install-headers=", None, ""), - ("install-lib=", None, ""), - ("install-platlib=", None, ""), - ("install-purelib=", None, ""), - ("install-scripts=", None, ""), - ("prefix=", None, ""), - ("root=", None, ""), - ("user", None, ""), -] - - -# typeshed doesn't permit Tuple[str, None, str], see python/typeshed#3469. -_distutils_getopt = FancyGetopt(_options) # type: ignore - - -def parse_distutils_args(args: List[str]) -> Dict[str, str]: - """Parse provided arguments, returning an object that has the - matched arguments. - - Any unknown arguments are ignored. - """ - result = {} - for arg in args: - try: - _, match = _distutils_getopt.getopt(args=[arg]) - except DistutilsArgError: - # We don't care about any other options, which here may be - # considered unrecognized since our option list is not - # exhaustive. - pass - else: - result.update(match.__dict__) - return result diff --git a/venv/Lib/site-packages/pip/_internal/utils/egg_link.py b/venv/Lib/site-packages/pip/_internal/utils/egg_link.py deleted file mode 100644 index 9e0da8d..0000000 --- a/venv/Lib/site-packages/pip/_internal/utils/egg_link.py +++ /dev/null @@ -1,75 +0,0 @@ -# The following comment should be removed at some point in the future. -# mypy: strict-optional=False - -import os -import re -import sys -from typing import Optional - -from pip._internal.locations import site_packages, user_site -from pip._internal.utils.virtualenv import ( - running_under_virtualenv, - virtualenv_no_global, -) - -__all__ = [ - "egg_link_path_from_sys_path", - "egg_link_path_from_location", -] - - -def _egg_link_name(raw_name: str) -> str: - """ - Convert a Name metadata value to a .egg-link name, by applying - the same substitution as pkg_resources's safe_name function. - Note: we cannot use canonicalize_name because it has a different logic. - """ - return re.sub("[^A-Za-z0-9.]+", "-", raw_name) + ".egg-link" - - -def egg_link_path_from_sys_path(raw_name: str) -> Optional[str]: - """ - Look for a .egg-link file for project name, by walking sys.path. - """ - egg_link_name = _egg_link_name(raw_name) - for path_item in sys.path: - egg_link = os.path.join(path_item, egg_link_name) - if os.path.isfile(egg_link): - return egg_link - return None - - -def egg_link_path_from_location(raw_name: str) -> Optional[str]: - """ - Return the path for the .egg-link file if it exists, otherwise, None. - - There's 3 scenarios: - 1) not in a virtualenv - try to find in site.USER_SITE, then site_packages - 2) in a no-global virtualenv - try to find in site_packages - 3) in a yes-global virtualenv - try to find in site_packages, then site.USER_SITE - (don't look in global location) - - For #1 and #3, there could be odd cases, where there's an egg-link in 2 - locations. - - This method will just return the first one found. - """ - sites = [] - if running_under_virtualenv(): - sites.append(site_packages) - if not virtualenv_no_global() and user_site: - sites.append(user_site) - else: - if user_site: - sites.append(user_site) - sites.append(site_packages) - - egg_link_name = _egg_link_name(raw_name) - for site in sites: - egglink = os.path.join(site, egg_link_name) - if os.path.isfile(egglink): - return egglink - return None diff --git a/venv/Lib/site-packages/pip/_internal/utils/encoding.py b/venv/Lib/site-packages/pip/_internal/utils/encoding.py deleted file mode 100644 index 1c73f6c..0000000 --- a/venv/Lib/site-packages/pip/_internal/utils/encoding.py +++ /dev/null @@ -1,36 +0,0 @@ -import codecs -import locale -import re -import sys -from typing import List, Tuple - -BOMS: List[Tuple[bytes, str]] = [ - (codecs.BOM_UTF8, "utf-8"), - (codecs.BOM_UTF16, "utf-16"), - (codecs.BOM_UTF16_BE, "utf-16-be"), - (codecs.BOM_UTF16_LE, "utf-16-le"), - (codecs.BOM_UTF32, "utf-32"), - (codecs.BOM_UTF32_BE, "utf-32-be"), - (codecs.BOM_UTF32_LE, "utf-32-le"), -] - -ENCODING_RE = re.compile(br"coding[:=]\s*([-\w.]+)") - - -def auto_decode(data: bytes) -> str: - """Check a bytes string for a BOM to correctly detect the encoding - - Fallback to locale.getpreferredencoding(False) like open() on Python3""" - for bom, encoding in BOMS: - if data.startswith(bom): - return data[len(bom) :].decode(encoding) - # Lets check the first two lines as in PEP263 - for line in data.split(b"\n")[:2]: - if line[0:1] == b"#" and ENCODING_RE.search(line): - result = ENCODING_RE.search(line) - assert result is not None - encoding = result.groups()[0].decode("ascii") - return data.decode(encoding) - return data.decode( - locale.getpreferredencoding(False) or sys.getdefaultencoding(), - ) diff --git a/venv/Lib/site-packages/pip/_internal/utils/entrypoints.py b/venv/Lib/site-packages/pip/_internal/utils/entrypoints.py deleted file mode 100644 index 1504a12..0000000 --- a/venv/Lib/site-packages/pip/_internal/utils/entrypoints.py +++ /dev/null @@ -1,27 +0,0 @@ -import sys -from typing import List, Optional - -from pip._internal.cli.main import main - - -def _wrapper(args: Optional[List[str]] = None) -> int: - """Central wrapper for all old entrypoints. - - Historically pip has had several entrypoints defined. Because of issues - arising from PATH, sys.path, multiple Pythons, their interactions, and most - of them having a pip installed, users suffer every time an entrypoint gets - moved. - - To alleviate this pain, and provide a mechanism for warning users and - directing them to an appropriate place for help, we now define all of - our old entrypoints as wrappers for the current one. - """ - sys.stderr.write( - "WARNING: pip is being invoked by an old script wrapper. This will " - "fail in a future version of pip.\n" - "Please see https://github.com/pypa/pip/issues/5599 for advice on " - "fixing the underlying issue.\n" - "To avoid this problem you can invoke Python with '-m pip' instead of " - "running pip directly.\n" - ) - return main(args) diff --git a/venv/Lib/site-packages/pip/_internal/utils/filesystem.py b/venv/Lib/site-packages/pip/_internal/utils/filesystem.py deleted file mode 100644 index b7e6191..0000000 --- a/venv/Lib/site-packages/pip/_internal/utils/filesystem.py +++ /dev/null @@ -1,182 +0,0 @@ -import fnmatch -import os -import os.path -import random -import shutil -import stat -import sys -from contextlib import contextmanager -from tempfile import NamedTemporaryFile -from typing import Any, BinaryIO, Iterator, List, Union, cast - -from pip._vendor.tenacity import retry, stop_after_delay, wait_fixed - -from pip._internal.utils.compat import get_path_uid -from pip._internal.utils.misc import format_size - - -def check_path_owner(path: str) -> bool: - # If we don't have a way to check the effective uid of this process, then - # we'll just assume that we own the directory. - if sys.platform == "win32" or not hasattr(os, "geteuid"): - return True - - assert os.path.isabs(path) - - previous = None - while path != previous: - if os.path.lexists(path): - # Check if path is writable by current user. - if os.geteuid() == 0: - # Special handling for root user in order to handle properly - # cases where users use sudo without -H flag. - try: - path_uid = get_path_uid(path) - except OSError: - return False - return path_uid == 0 - else: - return os.access(path, os.W_OK) - else: - previous, path = path, os.path.dirname(path) - return False # assume we don't own the path - - -def copy2_fixed(src: str, dest: str) -> None: - """Wrap shutil.copy2() but map errors copying socket files to - SpecialFileError as expected. - - See also https://bugs.python.org/issue37700. - """ - try: - shutil.copy2(src, dest) - except OSError: - for f in [src, dest]: - try: - is_socket_file = is_socket(f) - except OSError: - # An error has already occurred. Another error here is not - # a problem and we can ignore it. - pass - else: - if is_socket_file: - raise shutil.SpecialFileError(f"`{f}` is a socket") - - raise - - -def is_socket(path: str) -> bool: - return stat.S_ISSOCK(os.lstat(path).st_mode) - - -@contextmanager -def adjacent_tmp_file(path: str, **kwargs: Any) -> Iterator[BinaryIO]: - """Return a file-like object pointing to a tmp file next to path. - - The file is created securely and is ensured to be written to disk - after the context reaches its end. - - kwargs will be passed to tempfile.NamedTemporaryFile to control - the way the temporary file will be opened. - """ - with NamedTemporaryFile( - delete=False, - dir=os.path.dirname(path), - prefix=os.path.basename(path), - suffix=".tmp", - **kwargs, - ) as f: - result = cast(BinaryIO, f) - try: - yield result - finally: - result.flush() - os.fsync(result.fileno()) - - -# Tenacity raises RetryError by default, explicitly raise the original exception -_replace_retry = retry(reraise=True, stop=stop_after_delay(1), wait=wait_fixed(0.25)) - -replace = _replace_retry(os.replace) - - -# test_writable_dir and _test_writable_dir_win are copied from Flit, -# with the author's agreement to also place them under pip's license. -def test_writable_dir(path: str) -> bool: - """Check if a directory is writable. - - Uses os.access() on POSIX, tries creating files on Windows. - """ - # If the directory doesn't exist, find the closest parent that does. - while not os.path.isdir(path): - parent = os.path.dirname(path) - if parent == path: - break # Should never get here, but infinite loops are bad - path = parent - - if os.name == "posix": - return os.access(path, os.W_OK) - - return _test_writable_dir_win(path) - - -def _test_writable_dir_win(path: str) -> bool: - # os.access doesn't work on Windows: http://bugs.python.org/issue2528 - # and we can't use tempfile: http://bugs.python.org/issue22107 - basename = "accesstest_deleteme_fishfingers_custard_" - alphabet = "abcdefghijklmnopqrstuvwxyz0123456789" - for _ in range(10): - name = basename + "".join(random.choice(alphabet) for _ in range(6)) - file = os.path.join(path, name) - try: - fd = os.open(file, os.O_RDWR | os.O_CREAT | os.O_EXCL) - except FileExistsError: - pass - except PermissionError: - # This could be because there's a directory with the same name. - # But it's highly unlikely there's a directory called that, - # so we'll assume it's because the parent dir is not writable. - # This could as well be because the parent dir is not readable, - # due to non-privileged user access. - return False - else: - os.close(fd) - os.unlink(file) - return True - - # This should never be reached - raise OSError("Unexpected condition testing for writable directory") - - -def find_files(path: str, pattern: str) -> List[str]: - """Returns a list of absolute paths of files beneath path, recursively, - with filenames which match the UNIX-style shell glob pattern.""" - result: List[str] = [] - for root, _, files in os.walk(path): - matches = fnmatch.filter(files, pattern) - result.extend(os.path.join(root, f) for f in matches) - return result - - -def file_size(path: str) -> Union[int, float]: - # If it's a symlink, return 0. - if os.path.islink(path): - return 0 - return os.path.getsize(path) - - -def format_file_size(path: str) -> str: - return format_size(file_size(path)) - - -def directory_size(path: str) -> Union[int, float]: - size = 0.0 - for root, _dirs, files in os.walk(path): - for filename in files: - file_path = os.path.join(root, filename) - size += file_size(file_path) - return size - - -def format_directory_size(path: str) -> str: - return format_size(directory_size(path)) diff --git a/venv/Lib/site-packages/pip/_internal/utils/filetypes.py b/venv/Lib/site-packages/pip/_internal/utils/filetypes.py deleted file mode 100644 index 5948570..0000000 --- a/venv/Lib/site-packages/pip/_internal/utils/filetypes.py +++ /dev/null @@ -1,27 +0,0 @@ -"""Filetype information. -""" - -from typing import Tuple - -from pip._internal.utils.misc import splitext - -WHEEL_EXTENSION = ".whl" -BZ2_EXTENSIONS: Tuple[str, ...] = (".tar.bz2", ".tbz") -XZ_EXTENSIONS: Tuple[str, ...] = ( - ".tar.xz", - ".txz", - ".tlz", - ".tar.lz", - ".tar.lzma", -) -ZIP_EXTENSIONS: Tuple[str, ...] = (".zip", WHEEL_EXTENSION) -TAR_EXTENSIONS: Tuple[str, ...] = (".tar.gz", ".tgz", ".tar") -ARCHIVE_EXTENSIONS = ZIP_EXTENSIONS + BZ2_EXTENSIONS + TAR_EXTENSIONS + XZ_EXTENSIONS - - -def is_archive_file(name: str) -> bool: - """Return True if `name` is a considered as an archive file.""" - ext = splitext(name)[1].lower() - if ext in ARCHIVE_EXTENSIONS: - return True - return False diff --git a/venv/Lib/site-packages/pip/_internal/utils/glibc.py b/venv/Lib/site-packages/pip/_internal/utils/glibc.py deleted file mode 100644 index 7bd3c20..0000000 --- a/venv/Lib/site-packages/pip/_internal/utils/glibc.py +++ /dev/null @@ -1,88 +0,0 @@ -# The following comment should be removed at some point in the future. -# mypy: strict-optional=False - -import os -import sys -from typing import Optional, Tuple - - -def glibc_version_string() -> Optional[str]: - "Returns glibc version string, or None if not using glibc." - return glibc_version_string_confstr() or glibc_version_string_ctypes() - - -def glibc_version_string_confstr() -> Optional[str]: - "Primary implementation of glibc_version_string using os.confstr." - # os.confstr is quite a bit faster than ctypes.DLL. It's also less likely - # to be broken or missing. This strategy is used in the standard library - # platform module: - # https://github.com/python/cpython/blob/fcf1d003bf4f0100c9d0921ff3d70e1127ca1b71/Lib/platform.py#L175-L183 - if sys.platform == "win32": - return None - try: - # os.confstr("CS_GNU_LIBC_VERSION") returns a string like "glibc 2.17": - _, version = os.confstr("CS_GNU_LIBC_VERSION").split() - except (AttributeError, OSError, ValueError): - # os.confstr() or CS_GNU_LIBC_VERSION not available (or a bad value)... - return None - return version - - -def glibc_version_string_ctypes() -> Optional[str]: - "Fallback implementation of glibc_version_string using ctypes." - - try: - import ctypes - except ImportError: - return None - - # ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen - # manpage says, "If filename is NULL, then the returned handle is for the - # main program". This way we can let the linker do the work to figure out - # which libc our process is actually using. - process_namespace = ctypes.CDLL(None) - try: - gnu_get_libc_version = process_namespace.gnu_get_libc_version - except AttributeError: - # Symbol doesn't exist -> therefore, we are not linked to - # glibc. - return None - - # Call gnu_get_libc_version, which returns a string like "2.5" - gnu_get_libc_version.restype = ctypes.c_char_p - version_str = gnu_get_libc_version() - # py2 / py3 compatibility: - if not isinstance(version_str, str): - version_str = version_str.decode("ascii") - - return version_str - - -# platform.libc_ver regularly returns completely nonsensical glibc -# versions. E.g. on my computer, platform says: -# -# ~$ python2.7 -c 'import platform; print(platform.libc_ver())' -# ('glibc', '2.7') -# ~$ python3.5 -c 'import platform; print(platform.libc_ver())' -# ('glibc', '2.9') -# -# But the truth is: -# -# ~$ ldd --version -# ldd (Debian GLIBC 2.22-11) 2.22 -# -# This is unfortunate, because it means that the linehaul data on libc -# versions that was generated by pip 8.1.2 and earlier is useless and -# misleading. Solution: instead of using platform, use our code that actually -# works. -def libc_ver() -> Tuple[str, str]: - """Try to determine the glibc version - - Returns a tuple of strings (lib, version) which default to empty strings - in case the lookup fails. - """ - glibc_version = glibc_version_string() - if glibc_version is None: - return ("", "") - else: - return ("glibc", glibc_version) diff --git a/venv/Lib/site-packages/pip/_internal/utils/hashes.py b/venv/Lib/site-packages/pip/_internal/utils/hashes.py deleted file mode 100644 index 82eb035..0000000 --- a/venv/Lib/site-packages/pip/_internal/utils/hashes.py +++ /dev/null @@ -1,144 +0,0 @@ -import hashlib -from typing import TYPE_CHECKING, BinaryIO, Dict, Iterator, List - -from pip._internal.exceptions import HashMismatch, HashMissing, InstallationError -from pip._internal.utils.misc import read_chunks - -if TYPE_CHECKING: - from hashlib import _Hash - - # NoReturn introduced in 3.6.2; imported only for type checking to maintain - # pip compatibility with older patch versions of Python 3.6 - from typing import NoReturn - - -# The recommended hash algo of the moment. Change this whenever the state of -# the art changes; it won't hurt backward compatibility. -FAVORITE_HASH = "sha256" - - -# Names of hashlib algorithms allowed by the --hash option and ``pip hash`` -# Currently, those are the ones at least as collision-resistant as sha256. -STRONG_HASHES = ["sha256", "sha384", "sha512"] - - -class Hashes: - """A wrapper that builds multiple hashes at once and checks them against - known-good values - - """ - - def __init__(self, hashes: Dict[str, List[str]] = None) -> None: - """ - :param hashes: A dict of algorithm names pointing to lists of allowed - hex digests - """ - allowed = {} - if hashes is not None: - for alg, keys in hashes.items(): - # Make sure values are always sorted (to ease equality checks) - allowed[alg] = sorted(keys) - self._allowed = allowed - - def __and__(self, other: "Hashes") -> "Hashes": - if not isinstance(other, Hashes): - return NotImplemented - - # If either of the Hashes object is entirely empty (i.e. no hash - # specified at all), all hashes from the other object are allowed. - if not other: - return self - if not self: - return other - - # Otherwise only hashes that present in both objects are allowed. - new = {} - for alg, values in other._allowed.items(): - if alg not in self._allowed: - continue - new[alg] = [v for v in values if v in self._allowed[alg]] - return Hashes(new) - - @property - def digest_count(self) -> int: - return sum(len(digests) for digests in self._allowed.values()) - - def is_hash_allowed(self, hash_name: str, hex_digest: str) -> bool: - """Return whether the given hex digest is allowed.""" - return hex_digest in self._allowed.get(hash_name, []) - - def check_against_chunks(self, chunks: Iterator[bytes]) -> None: - """Check good hashes against ones built from iterable of chunks of - data. - - Raise HashMismatch if none match. - - """ - gots = {} - for hash_name in self._allowed.keys(): - try: - gots[hash_name] = hashlib.new(hash_name) - except (ValueError, TypeError): - raise InstallationError(f"Unknown hash name: {hash_name}") - - for chunk in chunks: - for hash in gots.values(): - hash.update(chunk) - - for hash_name, got in gots.items(): - if got.hexdigest() in self._allowed[hash_name]: - return - self._raise(gots) - - def _raise(self, gots: Dict[str, "_Hash"]) -> "NoReturn": - raise HashMismatch(self._allowed, gots) - - def check_against_file(self, file: BinaryIO) -> None: - """Check good hashes against a file-like object - - Raise HashMismatch if none match. - - """ - return self.check_against_chunks(read_chunks(file)) - - def check_against_path(self, path: str) -> None: - with open(path, "rb") as file: - return self.check_against_file(file) - - def __bool__(self) -> bool: - """Return whether I know any known-good hashes.""" - return bool(self._allowed) - - def __eq__(self, other: object) -> bool: - if not isinstance(other, Hashes): - return NotImplemented - return self._allowed == other._allowed - - def __hash__(self) -> int: - return hash( - ",".join( - sorted( - ":".join((alg, digest)) - for alg, digest_list in self._allowed.items() - for digest in digest_list - ) - ) - ) - - -class MissingHashes(Hashes): - """A workalike for Hashes used when we're missing a hash for a requirement - - It computes the actual hash of the requirement and raises a HashMissing - exception showing it to the user. - - """ - - def __init__(self) -> None: - """Don't offer the ``hashes`` kwarg.""" - # Pass our favorite hash in to generate a "gotten hash". With the - # empty list, it will never match, so an error will always raise. - super().__init__(hashes={FAVORITE_HASH: []}) - - def _raise(self, gots: Dict[str, "_Hash"]) -> "NoReturn": - raise HashMissing(gots[FAVORITE_HASH].hexdigest()) diff --git a/venv/Lib/site-packages/pip/_internal/utils/inject_securetransport.py b/venv/Lib/site-packages/pip/_internal/utils/inject_securetransport.py deleted file mode 100644 index 276aa79..0000000 --- a/venv/Lib/site-packages/pip/_internal/utils/inject_securetransport.py +++ /dev/null @@ -1,35 +0,0 @@ -"""A helper module that injects SecureTransport, on import. - -The import should be done as early as possible, to ensure all requests and -sessions (or whatever) are created after injecting SecureTransport. - -Note that we only do the injection on macOS, when the linked OpenSSL is too -old to handle TLSv1.2. -""" - -import sys - - -def inject_securetransport() -> None: - # Only relevant on macOS - if sys.platform != "darwin": - return - - try: - import ssl - except ImportError: - return - - # Checks for OpenSSL 1.0.1 - if ssl.OPENSSL_VERSION_NUMBER >= 0x1000100F: - return - - try: - from pip._vendor.urllib3.contrib import securetransport - except (ImportError, OSError): - return - - securetransport.inject_into_urllib3() - - -inject_securetransport() diff --git a/venv/Lib/site-packages/pip/_internal/utils/logging.py b/venv/Lib/site-packages/pip/_internal/utils/logging.py deleted file mode 100644 index 6e001c5..0000000 --- a/venv/Lib/site-packages/pip/_internal/utils/logging.py +++ /dev/null @@ -1,343 +0,0 @@ -import contextlib -import errno -import logging -import logging.handlers -import os -import sys -import threading -from dataclasses import dataclass -from logging import Filter -from typing import IO, Any, ClassVar, Iterator, List, Optional, TextIO, Type - -from pip._vendor.rich.console import ( - Console, - ConsoleOptions, - ConsoleRenderable, - RenderResult, -) -from pip._vendor.rich.highlighter import NullHighlighter -from pip._vendor.rich.logging import RichHandler -from pip._vendor.rich.segment import Segment -from pip._vendor.rich.style import Style - -from pip._internal.exceptions import DiagnosticPipError -from pip._internal.utils._log import VERBOSE, getLogger -from pip._internal.utils.compat import WINDOWS -from pip._internal.utils.deprecation import DEPRECATION_MSG_PREFIX -from pip._internal.utils.misc import ensure_dir - -_log_state = threading.local() -subprocess_logger = getLogger("pip.subprocessor") - - -class BrokenStdoutLoggingError(Exception): - """ - Raised if BrokenPipeError occurs for the stdout stream while logging. - """ - - -def _is_broken_pipe_error(exc_class: Type[BaseException], exc: BaseException) -> bool: - if exc_class is BrokenPipeError: - return True - - # On Windows, a broken pipe can show up as EINVAL rather than EPIPE: - # https://bugs.python.org/issue19612 - # https://bugs.python.org/issue30418 - if not WINDOWS: - return False - - return isinstance(exc, OSError) and exc.errno in (errno.EINVAL, errno.EPIPE) - - -@contextlib.contextmanager -def indent_log(num: int = 2) -> Iterator[None]: - """ - A context manager which will cause the log output to be indented for any - log messages emitted inside it. - """ - # For thread-safety - _log_state.indentation = get_indentation() - _log_state.indentation += num - try: - yield - finally: - _log_state.indentation -= num - - -def get_indentation() -> int: - return getattr(_log_state, "indentation", 0) - - -class IndentingFormatter(logging.Formatter): - default_time_format = "%Y-%m-%dT%H:%M:%S" - - def __init__( - self, - *args: Any, - add_timestamp: bool = False, - **kwargs: Any, - ) -> None: - """ - A logging.Formatter that obeys the indent_log() context manager. - - :param add_timestamp: A bool indicating output lines should be prefixed - with their record's timestamp. - """ - self.add_timestamp = add_timestamp - super().__init__(*args, **kwargs) - - def get_message_start(self, formatted: str, levelno: int) -> str: - """ - Return the start of the formatted log message (not counting the - prefix to add to each line). - """ - if levelno < logging.WARNING: - return "" - if formatted.startswith(DEPRECATION_MSG_PREFIX): - # Then the message already has a prefix. We don't want it to - # look like "WARNING: DEPRECATION: ...." - return "" - if levelno < logging.ERROR: - return "WARNING: " - - return "ERROR: " - - def format(self, record: logging.LogRecord) -> str: - """ - Calls the standard formatter, but will indent all of the log message - lines by our current indentation level. - """ - formatted = super().format(record) - message_start = self.get_message_start(formatted, record.levelno) - formatted = message_start + formatted - - prefix = "" - if self.add_timestamp: - prefix = f"{self.formatTime(record)} " - prefix += " " * get_indentation() - formatted = "".join([prefix + line for line in formatted.splitlines(True)]) - return formatted - - -@dataclass -class IndentedRenderable: - renderable: ConsoleRenderable - indent: int - - def __rich_console__( - self, console: Console, options: ConsoleOptions - ) -> RenderResult: - segments = console.render(self.renderable, options) - lines = Segment.split_lines(segments) - for line in lines: - yield Segment(" " * self.indent) - yield from line - yield Segment("\n") - - -class RichPipStreamHandler(RichHandler): - KEYWORDS: ClassVar[Optional[List[str]]] = [] - - def __init__(self, stream: Optional[TextIO], no_color: bool) -> None: - super().__init__( - console=Console(file=stream, no_color=no_color, soft_wrap=True), - show_time=False, - show_level=False, - show_path=False, - highlighter=NullHighlighter(), - ) - - # Our custom override on Rich's logger, to make things work as we need them to. - def emit(self, record: logging.LogRecord) -> None: - style: Optional[Style] = None - - # If we are given a diagnostic error to present, present it with indentation. - if record.msg == "[present-diagnostic] %s" and len(record.args) == 1: - diagnostic_error: DiagnosticPipError = record.args[0] # type: ignore[index] - assert isinstance(diagnostic_error, DiagnosticPipError) - - renderable: ConsoleRenderable = IndentedRenderable( - diagnostic_error, indent=get_indentation() - ) - else: - message = self.format(record) - renderable = self.render_message(record, message) - if record.levelno is not None: - if record.levelno >= logging.ERROR: - style = Style(color="red") - elif record.levelno >= logging.WARNING: - style = Style(color="yellow") - - try: - self.console.print(renderable, overflow="ignore", crop=False, style=style) - except Exception: - self.handleError(record) - - def handleError(self, record: logging.LogRecord) -> None: - """Called when logging is unable to log some output.""" - - exc_class, exc = sys.exc_info()[:2] - # If a broken pipe occurred while calling write() or flush() on the - # stdout stream in logging's Handler.emit(), then raise our special - # exception so we can handle it in main() instead of logging the - # broken pipe error and continuing. - if ( - exc_class - and exc - and self.console.file is sys.stdout - and _is_broken_pipe_error(exc_class, exc) - ): - raise BrokenStdoutLoggingError() - - return super().handleError(record) - - -class BetterRotatingFileHandler(logging.handlers.RotatingFileHandler): - def _open(self) -> IO[Any]: - ensure_dir(os.path.dirname(self.baseFilename)) - return super()._open() - - -class MaxLevelFilter(Filter): - def __init__(self, level: int) -> None: - self.level = level - - def filter(self, record: logging.LogRecord) -> bool: - return record.levelno < self.level - - -class ExcludeLoggerFilter(Filter): - - """ - A logging Filter that excludes records from a logger (or its children). - """ - - def filter(self, record: logging.LogRecord) -> bool: - # The base Filter class allows only records from a logger (or its - # children). - return not super().filter(record) - - -def setup_logging(verbosity: int, no_color: bool, user_log_file: Optional[str]) -> int: - """Configures and sets up all of the logging - - Returns the requested logging level, as its integer value. - """ - - # Determine the level to be logging at. - if verbosity >= 2: - level_number = logging.DEBUG - elif verbosity == 1: - level_number = VERBOSE - elif verbosity == -1: - level_number = logging.WARNING - elif verbosity == -2: - level_number = logging.ERROR - elif verbosity <= -3: - level_number = logging.CRITICAL - else: - level_number = logging.INFO - - level = logging.getLevelName(level_number) - - # The "root" logger should match the "console" level *unless* we also need - # to log to a user log file. - include_user_log = user_log_file is not None - if include_user_log: - additional_log_file = user_log_file - root_level = "DEBUG" - else: - additional_log_file = "/dev/null" - root_level = level - - # Disable any logging besides WARNING unless we have DEBUG level logging - # enabled for vendored libraries. - vendored_log_level = "WARNING" if level in ["INFO", "ERROR"] else "DEBUG" - - # Shorthands for clarity - log_streams = { - "stdout": "ext://sys.stdout", - "stderr": "ext://sys.stderr", - } - handler_classes = { - "stream": "pip._internal.utils.logging.RichPipStreamHandler", - "file": "pip._internal.utils.logging.BetterRotatingFileHandler", - } - handlers = ["console", "console_errors", "console_subprocess"] + ( - ["user_log"] if include_user_log else [] - ) - - logging.config.dictConfig( - { - "version": 1, - "disable_existing_loggers": False, - "filters": { - "exclude_warnings": { - "()": "pip._internal.utils.logging.MaxLevelFilter", - "level": logging.WARNING, - }, - "restrict_to_subprocess": { - "()": "logging.Filter", - "name": subprocess_logger.name, - }, - "exclude_subprocess": { - "()": "pip._internal.utils.logging.ExcludeLoggerFilter", - "name": subprocess_logger.name, - }, - }, - "formatters": { - "indent": { - "()": IndentingFormatter, - "format": "%(message)s", - }, - "indent_with_timestamp": { - "()": IndentingFormatter, - "format": "%(message)s", - "add_timestamp": True, - }, - }, - "handlers": { - "console": { - "level": level, - "class": handler_classes["stream"], - "no_color": no_color, - "stream": log_streams["stdout"], - "filters": ["exclude_subprocess", "exclude_warnings"], - "formatter": "indent", - }, - "console_errors": { - "level": "WARNING", - "class": handler_classes["stream"], - "no_color": no_color, - "stream": log_streams["stderr"], - "filters": ["exclude_subprocess"], - "formatter": "indent", - }, - # A handler responsible for logging to the console messages - # from the "subprocessor" logger. - "console_subprocess": { - "level": level, - "class": handler_classes["stream"], - "stream": log_streams["stderr"], - "no_color": no_color, - "filters": ["restrict_to_subprocess"], - "formatter": "indent", - }, - "user_log": { - "level": "DEBUG", - "class": handler_classes["file"], - "filename": additional_log_file, - "encoding": "utf-8", - "delay": True, - "formatter": "indent_with_timestamp", - }, - }, - "root": { - "level": root_level, - "handlers": handlers, - }, - "loggers": {"pip._vendor": {"level": vendored_log_level}}, - } - ) - - return level_number diff --git a/venv/Lib/site-packages/pip/_internal/utils/misc.py b/venv/Lib/site-packages/pip/_internal/utils/misc.py deleted file mode 100644 index b07e56f..0000000 --- a/venv/Lib/site-packages/pip/_internal/utils/misc.py +++ /dev/null @@ -1,629 +0,0 @@ -# The following comment should be removed at some point in the future. -# mypy: strict-optional=False - -import contextlib -import errno -import getpass -import hashlib -import io -import logging -import os -import posixpath -import shutil -import stat -import sys -import urllib.parse -from io import StringIO -from itertools import filterfalse, tee, zip_longest -from types import TracebackType -from typing import ( - Any, - BinaryIO, - Callable, - ContextManager, - Iterable, - Iterator, - List, - Optional, - TextIO, - Tuple, - Type, - TypeVar, - cast, -) - -from pip._vendor.tenacity import retry, stop_after_delay, wait_fixed - -from pip import __version__ -from pip._internal.exceptions import CommandError -from pip._internal.locations import get_major_minor_version -from pip._internal.utils.compat import WINDOWS -from pip._internal.utils.virtualenv import running_under_virtualenv - -__all__ = [ - "rmtree", - "display_path", - "backup_dir", - "ask", - "splitext", - "format_size", - "is_installable_dir", - "normalize_path", - "renames", - "get_prog", - "captured_stdout", - "ensure_dir", - "remove_auth_from_url", -] - - -logger = logging.getLogger(__name__) - -T = TypeVar("T") -ExcInfo = Tuple[Type[BaseException], BaseException, TracebackType] -VersionInfo = Tuple[int, int, int] -NetlocTuple = Tuple[str, Tuple[Optional[str], Optional[str]]] - - -def get_pip_version() -> str: - pip_pkg_dir = os.path.join(os.path.dirname(__file__), "..", "..") - pip_pkg_dir = os.path.abspath(pip_pkg_dir) - - return "pip {} from {} (python {})".format( - __version__, - pip_pkg_dir, - get_major_minor_version(), - ) - - -def normalize_version_info(py_version_info: Tuple[int, ...]) -> Tuple[int, int, int]: - """ - Convert a tuple of ints representing a Python version to one of length - three. - - :param py_version_info: a tuple of ints representing a Python version, - or None to specify no version. The tuple can have any length. - - :return: a tuple of length three if `py_version_info` is non-None. - Otherwise, return `py_version_info` unchanged (i.e. None). - """ - if len(py_version_info) < 3: - py_version_info += (3 - len(py_version_info)) * (0,) - elif len(py_version_info) > 3: - py_version_info = py_version_info[:3] - - return cast("VersionInfo", py_version_info) - - -def ensure_dir(path: str) -> None: - """os.path.makedirs without EEXIST.""" - try: - os.makedirs(path) - except OSError as e: - # Windows can raise spurious ENOTEMPTY errors. See #6426. - if e.errno != errno.EEXIST and e.errno != errno.ENOTEMPTY: - raise - - -def get_prog() -> str: - try: - prog = os.path.basename(sys.argv[0]) - if prog in ("__main__.py", "-c"): - return f"{sys.executable} -m pip" - else: - return prog - except (AttributeError, TypeError, IndexError): - pass - return "pip" - - -# Retry every half second for up to 3 seconds -# Tenacity raises RetryError by default, explicitly raise the original exception -@retry(reraise=True, stop=stop_after_delay(3), wait=wait_fixed(0.5)) -def rmtree(dir: str, ignore_errors: bool = False) -> None: - shutil.rmtree(dir, ignore_errors=ignore_errors, onerror=rmtree_errorhandler) - - -def rmtree_errorhandler(func: Callable[..., Any], path: str, exc_info: ExcInfo) -> None: - """On Windows, the files in .svn are read-only, so when rmtree() tries to - remove them, an exception is thrown. We catch that here, remove the - read-only attribute, and hopefully continue without problems.""" - try: - has_attr_readonly = not (os.stat(path).st_mode & stat.S_IWRITE) - except OSError: - # it's equivalent to os.path.exists - return - - if has_attr_readonly: - # convert to read/write - os.chmod(path, stat.S_IWRITE) - # use the original function to repeat the operation - func(path) - return - else: - raise - - -def display_path(path: str) -> str: - """Gives the display value for a given path, making it relative to cwd - if possible.""" - path = os.path.normcase(os.path.abspath(path)) - if path.startswith(os.getcwd() + os.path.sep): - path = "." + path[len(os.getcwd()) :] - return path - - -def backup_dir(dir: str, ext: str = ".bak") -> str: - """Figure out the name of a directory to back up the given dir to - (adding .bak, .bak2, etc)""" - n = 1 - extension = ext - while os.path.exists(dir + extension): - n += 1 - extension = ext + str(n) - return dir + extension - - -def ask_path_exists(message: str, options: Iterable[str]) -> str: - for action in os.environ.get("PIP_EXISTS_ACTION", "").split(): - if action in options: - return action - return ask(message, options) - - -def _check_no_input(message: str) -> None: - """Raise an error if no input is allowed.""" - if os.environ.get("PIP_NO_INPUT"): - raise Exception( - f"No input was expected ($PIP_NO_INPUT set); question: {message}" - ) - - -def ask(message: str, options: Iterable[str]) -> str: - """Ask the message interactively, with the given possible responses""" - while 1: - _check_no_input(message) - response = input(message) - response = response.strip().lower() - if response not in options: - print( - "Your response ({!r}) was not one of the expected responses: " - "{}".format(response, ", ".join(options)) - ) - else: - return response - - -def ask_input(message: str) -> str: - """Ask for input interactively.""" - _check_no_input(message) - return input(message) - - -def ask_password(message: str) -> str: - """Ask for a password interactively.""" - _check_no_input(message) - return getpass.getpass(message) - - -def strtobool(val: str) -> int: - """Convert a string representation of truth to true (1) or false (0). - - True values are 'y', 'yes', 't', 'true', 'on', and '1'; false values - are 'n', 'no', 'f', 'false', 'off', and '0'. Raises ValueError if - 'val' is anything else. - """ - val = val.lower() - if val in ("y", "yes", "t", "true", "on", "1"): - return 1 - elif val in ("n", "no", "f", "false", "off", "0"): - return 0 - else: - raise ValueError(f"invalid truth value {val!r}") - - -def format_size(bytes: float) -> str: - if bytes > 1000 * 1000: - return "{:.1f} MB".format(bytes / 1000.0 / 1000) - elif bytes > 10 * 1000: - return "{} kB".format(int(bytes / 1000)) - elif bytes > 1000: - return "{:.1f} kB".format(bytes / 1000.0) - else: - return "{} bytes".format(int(bytes)) - - -def tabulate(rows: Iterable[Iterable[Any]]) -> Tuple[List[str], List[int]]: - """Return a list of formatted rows and a list of column sizes. - - For example:: - - >>> tabulate([['foobar', 2000], [0xdeadbeef]]) - (['foobar 2000', '3735928559'], [10, 4]) - """ - rows = [tuple(map(str, row)) for row in rows] - sizes = [max(map(len, col)) for col in zip_longest(*rows, fillvalue="")] - table = [" ".join(map(str.ljust, row, sizes)).rstrip() for row in rows] - return table, sizes - - -def is_installable_dir(path: str) -> bool: - """Is path is a directory containing pyproject.toml or setup.py? - - If pyproject.toml exists, this is a PEP 517 project. Otherwise we look for - a legacy setuptools layout by identifying setup.py. We don't check for the - setup.cfg because using it without setup.py is only available for PEP 517 - projects, which are already covered by the pyproject.toml check. - """ - if not os.path.isdir(path): - return False - if os.path.isfile(os.path.join(path, "pyproject.toml")): - return True - if os.path.isfile(os.path.join(path, "setup.py")): - return True - return False - - -def read_chunks(file: BinaryIO, size: int = io.DEFAULT_BUFFER_SIZE) -> Iterator[bytes]: - """Yield pieces of data from a file-like object until EOF.""" - while True: - chunk = file.read(size) - if not chunk: - break - yield chunk - - -def normalize_path(path: str, resolve_symlinks: bool = True) -> str: - """ - Convert a path to its canonical, case-normalized, absolute version. - - """ - path = os.path.expanduser(path) - if resolve_symlinks: - path = os.path.realpath(path) - else: - path = os.path.abspath(path) - return os.path.normcase(path) - - -def splitext(path: str) -> Tuple[str, str]: - """Like os.path.splitext, but take off .tar too""" - base, ext = posixpath.splitext(path) - if base.lower().endswith(".tar"): - ext = base[-4:] + ext - base = base[:-4] - return base, ext - - -def renames(old: str, new: str) -> None: - """Like os.renames(), but handles renaming across devices.""" - # Implementation borrowed from os.renames(). - head, tail = os.path.split(new) - if head and tail and not os.path.exists(head): - os.makedirs(head) - - shutil.move(old, new) - - head, tail = os.path.split(old) - if head and tail: - try: - os.removedirs(head) - except OSError: - pass - - -def is_local(path: str) -> bool: - """ - Return True if path is within sys.prefix, if we're running in a virtualenv. - - If we're not in a virtualenv, all paths are considered "local." - - Caution: this function assumes the head of path has been normalized - with normalize_path. - """ - if not running_under_virtualenv(): - return True - return path.startswith(normalize_path(sys.prefix)) - - -def write_output(msg: Any, *args: Any) -> None: - logger.info(msg, *args) - - -class StreamWrapper(StringIO): - orig_stream: TextIO = None - - @classmethod - def from_stream(cls, orig_stream: TextIO) -> "StreamWrapper": - cls.orig_stream = orig_stream - return cls() - - # compileall.compile_dir() needs stdout.encoding to print to stdout - # https://github.com/python/mypy/issues/4125 - @property - def encoding(self): # type: ignore - return self.orig_stream.encoding - - -@contextlib.contextmanager -def captured_output(stream_name: str) -> Iterator[StreamWrapper]: - """Return a context manager used by captured_stdout/stdin/stderr - that temporarily replaces the sys stream *stream_name* with a StringIO. - - Taken from Lib/support/__init__.py in the CPython repo. - """ - orig_stdout = getattr(sys, stream_name) - setattr(sys, stream_name, StreamWrapper.from_stream(orig_stdout)) - try: - yield getattr(sys, stream_name) - finally: - setattr(sys, stream_name, orig_stdout) - - -def captured_stdout() -> ContextManager[StreamWrapper]: - """Capture the output of sys.stdout: - - with captured_stdout() as stdout: - print('hello') - self.assertEqual(stdout.getvalue(), 'hello\n') - - Taken from Lib/support/__init__.py in the CPython repo. - """ - return captured_output("stdout") - - -def captured_stderr() -> ContextManager[StreamWrapper]: - """ - See captured_stdout(). - """ - return captured_output("stderr") - - -# Simulates an enum -def enum(*sequential: Any, **named: Any) -> Type[Any]: - enums = dict(zip(sequential, range(len(sequential))), **named) - reverse = {value: key for key, value in enums.items()} - enums["reverse_mapping"] = reverse - return type("Enum", (), enums) - - -def build_netloc(host: str, port: Optional[int]) -> str: - """ - Build a netloc from a host-port pair - """ - if port is None: - return host - if ":" in host: - # Only wrap host with square brackets when it is IPv6 - host = f"[{host}]" - return f"{host}:{port}" - - -def build_url_from_netloc(netloc: str, scheme: str = "https") -> str: - """ - Build a full URL from a netloc. - """ - if netloc.count(":") >= 2 and "@" not in netloc and "[" not in netloc: - # It must be a bare IPv6 address, so wrap it with brackets. - netloc = f"[{netloc}]" - return f"{scheme}://{netloc}" - - -def parse_netloc(netloc: str) -> Tuple[str, Optional[int]]: - """ - Return the host-port pair from a netloc. - """ - url = build_url_from_netloc(netloc) - parsed = urllib.parse.urlparse(url) - return parsed.hostname, parsed.port - - -def split_auth_from_netloc(netloc: str) -> NetlocTuple: - """ - Parse out and remove the auth information from a netloc. - - Returns: (netloc, (username, password)). - """ - if "@" not in netloc: - return netloc, (None, None) - - # Split from the right because that's how urllib.parse.urlsplit() - # behaves if more than one @ is present (which can be checked using - # the password attribute of urlsplit()'s return value). - auth, netloc = netloc.rsplit("@", 1) - pw: Optional[str] = None - if ":" in auth: - # Split from the left because that's how urllib.parse.urlsplit() - # behaves if more than one : is present (which again can be checked - # using the password attribute of the return value) - user, pw = auth.split(":", 1) - else: - user, pw = auth, None - - user = urllib.parse.unquote(user) - if pw is not None: - pw = urllib.parse.unquote(pw) - - return netloc, (user, pw) - - -def redact_netloc(netloc: str) -> str: - """ - Replace the sensitive data in a netloc with "****", if it exists. - - For example: - - "user:pass@example.com" returns "user:****@example.com" - - "accesstoken@example.com" returns "****@example.com" - """ - netloc, (user, password) = split_auth_from_netloc(netloc) - if user is None: - return netloc - if password is None: - user = "****" - password = "" - else: - user = urllib.parse.quote(user) - password = ":****" - return "{user}{password}@{netloc}".format( - user=user, password=password, netloc=netloc - ) - - -def _transform_url( - url: str, transform_netloc: Callable[[str], Tuple[Any, ...]] -) -> Tuple[str, NetlocTuple]: - """Transform and replace netloc in a url. - - transform_netloc is a function taking the netloc and returning a - tuple. The first element of this tuple is the new netloc. The - entire tuple is returned. - - Returns a tuple containing the transformed url as item 0 and the - original tuple returned by transform_netloc as item 1. - """ - purl = urllib.parse.urlsplit(url) - netloc_tuple = transform_netloc(purl.netloc) - # stripped url - url_pieces = (purl.scheme, netloc_tuple[0], purl.path, purl.query, purl.fragment) - surl = urllib.parse.urlunsplit(url_pieces) - return surl, cast("NetlocTuple", netloc_tuple) - - -def _get_netloc(netloc: str) -> NetlocTuple: - return split_auth_from_netloc(netloc) - - -def _redact_netloc(netloc: str) -> Tuple[str]: - return (redact_netloc(netloc),) - - -def split_auth_netloc_from_url(url: str) -> Tuple[str, str, Tuple[str, str]]: - """ - Parse a url into separate netloc, auth, and url with no auth. - - Returns: (url_without_auth, netloc, (username, password)) - """ - url_without_auth, (netloc, auth) = _transform_url(url, _get_netloc) - return url_without_auth, netloc, auth - - -def remove_auth_from_url(url: str) -> str: - """Return a copy of url with 'username:password@' removed.""" - # username/pass params are passed to subversion through flags - # and are not recognized in the url. - return _transform_url(url, _get_netloc)[0] - - -def redact_auth_from_url(url: str) -> str: - """Replace the password in a given url with ****.""" - return _transform_url(url, _redact_netloc)[0] - - -class HiddenText: - def __init__(self, secret: str, redacted: str) -> None: - self.secret = secret - self.redacted = redacted - - def __repr__(self) -> str: - return "".format(str(self)) - - def __str__(self) -> str: - return self.redacted - - # This is useful for testing. - def __eq__(self, other: Any) -> bool: - if type(self) != type(other): - return False - - # The string being used for redaction doesn't also have to match, - # just the raw, original string. - return self.secret == other.secret - - -def hide_value(value: str) -> HiddenText: - return HiddenText(value, redacted="****") - - -def hide_url(url: str) -> HiddenText: - redacted = redact_auth_from_url(url) - return HiddenText(url, redacted=redacted) - - -def protect_pip_from_modification_on_windows(modifying_pip: bool) -> None: - """Protection of pip.exe from modification on Windows - - On Windows, any operation modifying pip should be run as: - python -m pip ... - """ - pip_names = [ - "pip.exe", - "pip{}.exe".format(sys.version_info[0]), - "pip{}.{}.exe".format(*sys.version_info[:2]), - ] - - # See https://github.com/pypa/pip/issues/1299 for more discussion - should_show_use_python_msg = ( - modifying_pip and WINDOWS and os.path.basename(sys.argv[0]) in pip_names - ) - - if should_show_use_python_msg: - new_command = [sys.executable, "-m", "pip"] + sys.argv[1:] - raise CommandError( - "To modify pip, please run the following command:\n{}".format( - " ".join(new_command) - ) - ) - - -def is_console_interactive() -> bool: - """Is this console interactive?""" - return sys.stdin is not None and sys.stdin.isatty() - - -def hash_file(path: str, blocksize: int = 1 << 20) -> Tuple[Any, int]: - """Return (hash, length) for path using hashlib.sha256()""" - - h = hashlib.sha256() - length = 0 - with open(path, "rb") as f: - for block in read_chunks(f, size=blocksize): - length += len(block) - h.update(block) - return h, length - - -def is_wheel_installed() -> bool: - """ - Return whether the wheel package is installed. - """ - try: - import wheel # noqa: F401 - except ImportError: - return False - - return True - - -def pairwise(iterable: Iterable[Any]) -> Iterator[Tuple[Any, Any]]: - """ - Return paired elements. - - For example: - s -> (s0, s1), (s2, s3), (s4, s5), ... - """ - iterable = iter(iterable) - return zip_longest(iterable, iterable) - - -def partition( - pred: Callable[[T], bool], - iterable: Iterable[T], -) -> Tuple[Iterable[T], Iterable[T]]: - """ - Use a predicate to partition entries into false entries and true entries, - like - - partition(is_odd, range(10)) --> 0 2 4 6 8 and 1 3 5 7 9 - """ - t1, t2 = tee(iterable) - return filterfalse(pred, t1), filter(pred, t2) diff --git a/venv/Lib/site-packages/pip/_internal/utils/models.py b/venv/Lib/site-packages/pip/_internal/utils/models.py deleted file mode 100644 index b6bb21a..0000000 --- a/venv/Lib/site-packages/pip/_internal/utils/models.py +++ /dev/null @@ -1,39 +0,0 @@ -"""Utilities for defining models -""" - -import operator -from typing import Any, Callable, Type - - -class KeyBasedCompareMixin: - """Provides comparison capabilities that is based on a key""" - - __slots__ = ["_compare_key", "_defining_class"] - - def __init__(self, key: Any, defining_class: Type["KeyBasedCompareMixin"]) -> None: - self._compare_key = key - self._defining_class = defining_class - - def __hash__(self) -> int: - return hash(self._compare_key) - - def __lt__(self, other: Any) -> bool: - return self._compare(other, operator.__lt__) - - def __le__(self, other: Any) -> bool: - return self._compare(other, operator.__le__) - - def __gt__(self, other: Any) -> bool: - return self._compare(other, operator.__gt__) - - def __ge__(self, other: Any) -> bool: - return self._compare(other, operator.__ge__) - - def __eq__(self, other: Any) -> bool: - return self._compare(other, operator.__eq__) - - def _compare(self, other: Any, method: Callable[[Any, Any], bool]) -> bool: - if not isinstance(other, self._defining_class): - return NotImplemented - - return method(self._compare_key, other._compare_key) diff --git a/venv/Lib/site-packages/pip/_internal/utils/packaging.py b/venv/Lib/site-packages/pip/_internal/utils/packaging.py deleted file mode 100644 index b9f6af4..0000000 --- a/venv/Lib/site-packages/pip/_internal/utils/packaging.py +++ /dev/null @@ -1,57 +0,0 @@ -import functools -import logging -import re -from typing import NewType, Optional, Tuple, cast - -from pip._vendor.packaging import specifiers, version -from pip._vendor.packaging.requirements import Requirement - -NormalizedExtra = NewType("NormalizedExtra", str) - -logger = logging.getLogger(__name__) - - -def check_requires_python( - requires_python: Optional[str], version_info: Tuple[int, ...] -) -> bool: - """ - Check if the given Python version matches a "Requires-Python" specifier. - - :param version_info: A 3-tuple of ints representing a Python - major-minor-micro version to check (e.g. `sys.version_info[:3]`). - - :return: `True` if the given Python version satisfies the requirement. - Otherwise, return `False`. - - :raises InvalidSpecifier: If `requires_python` has an invalid format. - """ - if requires_python is None: - # The package provides no information - return True - requires_python_specifier = specifiers.SpecifierSet(requires_python) - - python_version = version.parse(".".join(map(str, version_info))) - return python_version in requires_python_specifier - - -@functools.lru_cache(maxsize=512) -def get_requirement(req_string: str) -> Requirement: - """Construct a packaging.Requirement object with caching""" - # Parsing requirement strings is expensive, and is also expected to happen - # with a low diversity of different arguments (at least relative the number - # constructed). This method adds a cache to requirement object creation to - # minimize repeated parsing of the same string to construct equivalent - # Requirement objects. - return Requirement(req_string) - - -def safe_extra(extra: str) -> NormalizedExtra: - """Convert an arbitrary string to a standard 'extra' name - - Any runs of non-alphanumeric characters are replaced with a single '_', - and the result is always lowercased. - - This function is duplicated from ``pkg_resources``. Note that this is not - the same to either ``canonicalize_name`` or ``_egg_link_name``. - """ - return cast(NormalizedExtra, re.sub("[^A-Za-z0-9.-]+", "_", extra).lower()) diff --git a/venv/Lib/site-packages/pip/_internal/utils/setuptools_build.py b/venv/Lib/site-packages/pip/_internal/utils/setuptools_build.py deleted file mode 100644 index f460c40..0000000 --- a/venv/Lib/site-packages/pip/_internal/utils/setuptools_build.py +++ /dev/null @@ -1,195 +0,0 @@ -import sys -import textwrap -from typing import List, Optional, Sequence - -# Shim to wrap setup.py invocation with setuptools -# Note that __file__ is handled via two {!r} *and* %r, to ensure that paths on -# Windows are correctly handled (it should be "C:\\Users" not "C:\Users"). -_SETUPTOOLS_SHIM = textwrap.dedent( - """ - exec(compile(''' - # This is -- a caller that pip uses to run setup.py - # - # - It imports setuptools before invoking setup.py, to enable projects that directly - # import from `distutils.core` to work with newer packaging standards. - # - It provides a clear error message when setuptools is not installed. - # - It sets `sys.argv[0]` to the underlying `setup.py`, when invoking `setup.py` so - # setuptools doesn't think the script is `-c`. This avoids the following warning: - # manifest_maker: standard file '-c' not found". - # - It generates a shim setup.py, for handling setup.cfg-only projects. - import os, sys, tokenize - - try: - import setuptools - except ImportError as error: - print( - "ERROR: Can not execute `setup.py` since setuptools is not available in " - "the build environment.", - file=sys.stderr, - ) - sys.exit(1) - - __file__ = %r - sys.argv[0] = __file__ - - if os.path.exists(__file__): - filename = __file__ - with tokenize.open(__file__) as f: - setup_py_code = f.read() - else: - filename = "" - setup_py_code = "from setuptools import setup; setup()" - - exec(compile(setup_py_code, filename, "exec")) - ''' % ({!r},), "", "exec")) - """ -).rstrip() - - -def make_setuptools_shim_args( - setup_py_path: str, - global_options: Sequence[str] = None, - no_user_config: bool = False, - unbuffered_output: bool = False, -) -> List[str]: - """ - Get setuptools command arguments with shim wrapped setup file invocation. - - :param setup_py_path: The path to setup.py to be wrapped. - :param global_options: Additional global options. - :param no_user_config: If True, disables personal user configuration. - :param unbuffered_output: If True, adds the unbuffered switch to the - argument list. - """ - args = [sys.executable] - if unbuffered_output: - args += ["-u"] - args += ["-c", _SETUPTOOLS_SHIM.format(setup_py_path)] - if global_options: - args += global_options - if no_user_config: - args += ["--no-user-cfg"] - return args - - -def make_setuptools_bdist_wheel_args( - setup_py_path: str, - global_options: Sequence[str], - build_options: Sequence[str], - destination_dir: str, -) -> List[str]: - # NOTE: Eventually, we'd want to also -S to the flags here, when we're - # isolating. Currently, it breaks Python in virtualenvs, because it - # relies on site.py to find parts of the standard library outside the - # virtualenv. - args = make_setuptools_shim_args( - setup_py_path, global_options=global_options, unbuffered_output=True - ) - args += ["bdist_wheel", "-d", destination_dir] - args += build_options - return args - - -def make_setuptools_clean_args( - setup_py_path: str, - global_options: Sequence[str], -) -> List[str]: - args = make_setuptools_shim_args( - setup_py_path, global_options=global_options, unbuffered_output=True - ) - args += ["clean", "--all"] - return args - - -def make_setuptools_develop_args( - setup_py_path: str, - global_options: Sequence[str], - install_options: Sequence[str], - no_user_config: bool, - prefix: Optional[str], - home: Optional[str], - use_user_site: bool, -) -> List[str]: - assert not (use_user_site and prefix) - - args = make_setuptools_shim_args( - setup_py_path, - global_options=global_options, - no_user_config=no_user_config, - ) - - args += ["develop", "--no-deps"] - - args += install_options - - if prefix: - args += ["--prefix", prefix] - if home is not None: - args += ["--install-dir", home] - - if use_user_site: - args += ["--user", "--prefix="] - - return args - - -def make_setuptools_egg_info_args( - setup_py_path: str, - egg_info_dir: Optional[str], - no_user_config: bool, -) -> List[str]: - args = make_setuptools_shim_args(setup_py_path, no_user_config=no_user_config) - - args += ["egg_info"] - - if egg_info_dir: - args += ["--egg-base", egg_info_dir] - - return args - - -def make_setuptools_install_args( - setup_py_path: str, - global_options: Sequence[str], - install_options: Sequence[str], - record_filename: str, - root: Optional[str], - prefix: Optional[str], - header_dir: Optional[str], - home: Optional[str], - use_user_site: bool, - no_user_config: bool, - pycompile: bool, -) -> List[str]: - assert not (use_user_site and prefix) - assert not (use_user_site and root) - - args = make_setuptools_shim_args( - setup_py_path, - global_options=global_options, - no_user_config=no_user_config, - unbuffered_output=True, - ) - args += ["install", "--record", record_filename] - args += ["--single-version-externally-managed"] - - if root is not None: - args += ["--root", root] - if prefix is not None: - args += ["--prefix", prefix] - if home is not None: - args += ["--home", home] - if use_user_site: - args += ["--user", "--prefix="] - - if pycompile: - args += ["--compile"] - else: - args += ["--no-compile"] - - if header_dir: - args += ["--install-headers", header_dir] - - args += install_options - - return args diff --git a/venv/Lib/site-packages/pip/_internal/utils/subprocess.py b/venv/Lib/site-packages/pip/_internal/utils/subprocess.py deleted file mode 100644 index b5b7624..0000000 --- a/venv/Lib/site-packages/pip/_internal/utils/subprocess.py +++ /dev/null @@ -1,260 +0,0 @@ -import logging -import os -import shlex -import subprocess -from typing import ( - TYPE_CHECKING, - Any, - Callable, - Iterable, - List, - Mapping, - Optional, - Union, -) - -from pip._vendor.rich.markup import escape - -from pip._internal.cli.spinners import SpinnerInterface, open_spinner -from pip._internal.exceptions import InstallationSubprocessError -from pip._internal.utils.logging import VERBOSE, subprocess_logger -from pip._internal.utils.misc import HiddenText - -if TYPE_CHECKING: - # Literal was introduced in Python 3.8. - # - # TODO: Remove `if TYPE_CHECKING` when dropping support for Python 3.7. - from typing import Literal - -CommandArgs = List[Union[str, HiddenText]] - - -def make_command(*args: Union[str, HiddenText, CommandArgs]) -> CommandArgs: - """ - Create a CommandArgs object. - """ - command_args: CommandArgs = [] - for arg in args: - # Check for list instead of CommandArgs since CommandArgs is - # only known during type-checking. - if isinstance(arg, list): - command_args.extend(arg) - else: - # Otherwise, arg is str or HiddenText. - command_args.append(arg) - - return command_args - - -def format_command_args(args: Union[List[str], CommandArgs]) -> str: - """ - Format command arguments for display. - """ - # For HiddenText arguments, display the redacted form by calling str(). - # Also, we don't apply str() to arguments that aren't HiddenText since - # this can trigger a UnicodeDecodeError in Python 2 if the argument - # has type unicode and includes a non-ascii character. (The type - # checker doesn't ensure the annotations are correct in all cases.) - return " ".join( - shlex.quote(str(arg)) if isinstance(arg, HiddenText) else shlex.quote(arg) - for arg in args - ) - - -def reveal_command_args(args: Union[List[str], CommandArgs]) -> List[str]: - """ - Return the arguments in their raw, unredacted form. - """ - return [arg.secret if isinstance(arg, HiddenText) else arg for arg in args] - - -def call_subprocess( - cmd: Union[List[str], CommandArgs], - show_stdout: bool = False, - cwd: Optional[str] = None, - on_returncode: 'Literal["raise", "warn", "ignore"]' = "raise", - extra_ok_returncodes: Optional[Iterable[int]] = None, - extra_environ: Optional[Mapping[str, Any]] = None, - unset_environ: Optional[Iterable[str]] = None, - spinner: Optional[SpinnerInterface] = None, - log_failed_cmd: Optional[bool] = True, - stdout_only: Optional[bool] = False, - *, - command_desc: str, -) -> str: - """ - Args: - show_stdout: if true, use INFO to log the subprocess's stderr and - stdout streams. Otherwise, use DEBUG. Defaults to False. - extra_ok_returncodes: an iterable of integer return codes that are - acceptable, in addition to 0. Defaults to None, which means []. - unset_environ: an iterable of environment variable names to unset - prior to calling subprocess.Popen(). - log_failed_cmd: if false, failed commands are not logged, only raised. - stdout_only: if true, return only stdout, else return both. When true, - logging of both stdout and stderr occurs when the subprocess has - terminated, else logging occurs as subprocess output is produced. - """ - if extra_ok_returncodes is None: - extra_ok_returncodes = [] - if unset_environ is None: - unset_environ = [] - # Most places in pip use show_stdout=False. What this means is-- - # - # - We connect the child's output (combined stderr and stdout) to a - # single pipe, which we read. - # - We log this output to stderr at DEBUG level as it is received. - # - If DEBUG logging isn't enabled (e.g. if --verbose logging wasn't - # requested), then we show a spinner so the user can still see the - # subprocess is in progress. - # - If the subprocess exits with an error, we log the output to stderr - # at ERROR level if it hasn't already been displayed to the console - # (e.g. if --verbose logging wasn't enabled). This way we don't log - # the output to the console twice. - # - # If show_stdout=True, then the above is still done, but with DEBUG - # replaced by INFO. - if show_stdout: - # Then log the subprocess output at INFO level. - log_subprocess = subprocess_logger.info - used_level = logging.INFO - else: - # Then log the subprocess output using VERBOSE. This also ensures - # it will be logged to the log file (aka user_log), if enabled. - log_subprocess = subprocess_logger.verbose - used_level = VERBOSE - - # Whether the subprocess will be visible in the console. - showing_subprocess = subprocess_logger.getEffectiveLevel() <= used_level - - # Only use the spinner if we're not showing the subprocess output - # and we have a spinner. - use_spinner = not showing_subprocess and spinner is not None - - log_subprocess("Running command %s", command_desc) - env = os.environ.copy() - if extra_environ: - env.update(extra_environ) - for name in unset_environ: - env.pop(name, None) - try: - proc = subprocess.Popen( - # Convert HiddenText objects to the underlying str. - reveal_command_args(cmd), - stdin=subprocess.PIPE, - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT if not stdout_only else subprocess.PIPE, - cwd=cwd, - env=env, - errors="backslashreplace", - ) - except Exception as exc: - if log_failed_cmd: - subprocess_logger.critical( - "Error %s while executing command %s", - exc, - command_desc, - ) - raise - all_output = [] - if not stdout_only: - assert proc.stdout - assert proc.stdin - proc.stdin.close() - # In this mode, stdout and stderr are in the same pipe. - while True: - line: str = proc.stdout.readline() - if not line: - break - line = line.rstrip() - all_output.append(line + "\n") - - # Show the line immediately. - log_subprocess(line) - # Update the spinner. - if use_spinner: - assert spinner - spinner.spin() - try: - proc.wait() - finally: - if proc.stdout: - proc.stdout.close() - output = "".join(all_output) - else: - # In this mode, stdout and stderr are in different pipes. - # We must use communicate() which is the only safe way to read both. - out, err = proc.communicate() - # log line by line to preserve pip log indenting - for out_line in out.splitlines(): - log_subprocess(out_line) - all_output.append(out) - for err_line in err.splitlines(): - log_subprocess(err_line) - all_output.append(err) - output = out - - proc_had_error = proc.returncode and proc.returncode not in extra_ok_returncodes - if use_spinner: - assert spinner - if proc_had_error: - spinner.finish("error") - else: - spinner.finish("done") - if proc_had_error: - if on_returncode == "raise": - error = InstallationSubprocessError( - command_description=command_desc, - exit_code=proc.returncode, - output_lines=all_output if not showing_subprocess else None, - ) - if log_failed_cmd: - subprocess_logger.error("[present-diagnostic] %s", error) - subprocess_logger.verbose( - "[bold magenta]full command[/]: [blue]%s[/]", - escape(format_command_args(cmd)), - extra={"markup": True}, - ) - subprocess_logger.verbose( - "[bold magenta]cwd[/]: %s", - escape(cwd or "[inherit]"), - extra={"markup": True}, - ) - - raise error - elif on_returncode == "warn": - subprocess_logger.warning( - 'Command "%s" had error code %s in %s', - command_desc, - proc.returncode, - cwd, - ) - elif on_returncode == "ignore": - pass - else: - raise ValueError(f"Invalid value: on_returncode={on_returncode!r}") - return output - - -def runner_with_spinner_message(message: str) -> Callable[..., None]: - """Provide a subprocess_runner that shows a spinner message. - - Intended for use with for pep517's Pep517HookCaller. Thus, the runner has - an API that matches what's expected by Pep517HookCaller.subprocess_runner. - """ - - def runner( - cmd: List[str], - cwd: Optional[str] = None, - extra_environ: Optional[Mapping[str, Any]] = None, - ) -> None: - with open_spinner(message) as spinner: - call_subprocess( - cmd, - command_desc=message, - cwd=cwd, - extra_environ=extra_environ, - spinner=spinner, - ) - - return runner diff --git a/venv/Lib/site-packages/pip/_internal/utils/temp_dir.py b/venv/Lib/site-packages/pip/_internal/utils/temp_dir.py deleted file mode 100644 index 442679a..0000000 --- a/venv/Lib/site-packages/pip/_internal/utils/temp_dir.py +++ /dev/null @@ -1,246 +0,0 @@ -import errno -import itertools -import logging -import os.path -import tempfile -from contextlib import ExitStack, contextmanager -from typing import Any, Dict, Iterator, Optional, TypeVar, Union - -from pip._internal.utils.misc import enum, rmtree - -logger = logging.getLogger(__name__) - -_T = TypeVar("_T", bound="TempDirectory") - - -# Kinds of temporary directories. Only needed for ones that are -# globally-managed. -tempdir_kinds = enum( - BUILD_ENV="build-env", - EPHEM_WHEEL_CACHE="ephem-wheel-cache", - REQ_BUILD="req-build", -) - - -_tempdir_manager: Optional[ExitStack] = None - - -@contextmanager -def global_tempdir_manager() -> Iterator[None]: - global _tempdir_manager - with ExitStack() as stack: - old_tempdir_manager, _tempdir_manager = _tempdir_manager, stack - try: - yield - finally: - _tempdir_manager = old_tempdir_manager - - -class TempDirectoryTypeRegistry: - """Manages temp directory behavior""" - - def __init__(self) -> None: - self._should_delete: Dict[str, bool] = {} - - def set_delete(self, kind: str, value: bool) -> None: - """Indicate whether a TempDirectory of the given kind should be - auto-deleted. - """ - self._should_delete[kind] = value - - def get_delete(self, kind: str) -> bool: - """Get configured auto-delete flag for a given TempDirectory type, - default True. - """ - return self._should_delete.get(kind, True) - - -_tempdir_registry: Optional[TempDirectoryTypeRegistry] = None - - -@contextmanager -def tempdir_registry() -> Iterator[TempDirectoryTypeRegistry]: - """Provides a scoped global tempdir registry that can be used to dictate - whether directories should be deleted. - """ - global _tempdir_registry - old_tempdir_registry = _tempdir_registry - _tempdir_registry = TempDirectoryTypeRegistry() - try: - yield _tempdir_registry - finally: - _tempdir_registry = old_tempdir_registry - - -class _Default: - pass - - -_default = _Default() - - -class TempDirectory: - """Helper class that owns and cleans up a temporary directory. - - This class can be used as a context manager or as an OO representation of a - temporary directory. - - Attributes: - path - Location to the created temporary directory - delete - Whether the directory should be deleted when exiting - (when used as a contextmanager) - - Methods: - cleanup() - Deletes the temporary directory - - When used as a context manager, if the delete attribute is True, on - exiting the context the temporary directory is deleted. - """ - - def __init__( - self, - path: Optional[str] = None, - delete: Union[bool, None, _Default] = _default, - kind: str = "temp", - globally_managed: bool = False, - ): - super().__init__() - - if delete is _default: - if path is not None: - # If we were given an explicit directory, resolve delete option - # now. - delete = False - else: - # Otherwise, we wait until cleanup and see what - # tempdir_registry says. - delete = None - - # The only time we specify path is in for editables where it - # is the value of the --src option. - if path is None: - path = self._create(kind) - - self._path = path - self._deleted = False - self.delete = delete - self.kind = kind - - if globally_managed: - assert _tempdir_manager is not None - _tempdir_manager.enter_context(self) - - @property - def path(self) -> str: - assert not self._deleted, f"Attempted to access deleted path: {self._path}" - return self._path - - def __repr__(self) -> str: - return f"<{self.__class__.__name__} {self.path!r}>" - - def __enter__(self: _T) -> _T: - return self - - def __exit__(self, exc: Any, value: Any, tb: Any) -> None: - if self.delete is not None: - delete = self.delete - elif _tempdir_registry: - delete = _tempdir_registry.get_delete(self.kind) - else: - delete = True - - if delete: - self.cleanup() - - def _create(self, kind: str) -> str: - """Create a temporary directory and store its path in self.path""" - # We realpath here because some systems have their default tmpdir - # symlinked to another directory. This tends to confuse build - # scripts, so we canonicalize the path by traversing potential - # symlinks here. - path = os.path.realpath(tempfile.mkdtemp(prefix=f"pip-{kind}-")) - logger.debug("Created temporary directory: %s", path) - return path - - def cleanup(self) -> None: - """Remove the temporary directory created and reset state""" - self._deleted = True - if not os.path.exists(self._path): - return - rmtree(self._path) - - -class AdjacentTempDirectory(TempDirectory): - """Helper class that creates a temporary directory adjacent to a real one. - - Attributes: - original - The original directory to create a temp directory for. - path - After calling create() or entering, contains the full - path to the temporary directory. - delete - Whether the directory should be deleted when exiting - (when used as a contextmanager) - - """ - - # The characters that may be used to name the temp directory - # We always prepend a ~ and then rotate through these until - # a usable name is found. - # pkg_resources raises a different error for .dist-info folder - # with leading '-' and invalid metadata - LEADING_CHARS = "-~.=%0123456789" - - def __init__(self, original: str, delete: Optional[bool] = None) -> None: - self.original = original.rstrip("/\\") - super().__init__(delete=delete) - - @classmethod - def _generate_names(cls, name: str) -> Iterator[str]: - """Generates a series of temporary names. - - The algorithm replaces the leading characters in the name - with ones that are valid filesystem characters, but are not - valid package names (for both Python and pip definitions of - package). - """ - for i in range(1, len(name)): - for candidate in itertools.combinations_with_replacement( - cls.LEADING_CHARS, i - 1 - ): - new_name = "~" + "".join(candidate) + name[i:] - if new_name != name: - yield new_name - - # If we make it this far, we will have to make a longer name - for i in range(len(cls.LEADING_CHARS)): - for candidate in itertools.combinations_with_replacement( - cls.LEADING_CHARS, i - ): - new_name = "~" + "".join(candidate) + name - if new_name != name: - yield new_name - - def _create(self, kind: str) -> str: - root, name = os.path.split(self.original) - for candidate in self._generate_names(name): - path = os.path.join(root, candidate) - try: - os.mkdir(path) - except OSError as ex: - # Continue if the name exists already - if ex.errno != errno.EEXIST: - raise - else: - path = os.path.realpath(path) - break - else: - # Final fallback on the default behavior. - path = os.path.realpath(tempfile.mkdtemp(prefix=f"pip-{kind}-")) - - logger.debug("Created temporary directory: %s", path) - return path diff --git a/venv/Lib/site-packages/pip/_internal/utils/unpacking.py b/venv/Lib/site-packages/pip/_internal/utils/unpacking.py deleted file mode 100644 index 5f63f97..0000000 --- a/venv/Lib/site-packages/pip/_internal/utils/unpacking.py +++ /dev/null @@ -1,258 +0,0 @@ -"""Utilities related archives. -""" - -import logging -import os -import shutil -import stat -import tarfile -import zipfile -from typing import Iterable, List, Optional -from zipfile import ZipInfo - -from pip._internal.exceptions import InstallationError -from pip._internal.utils.filetypes import ( - BZ2_EXTENSIONS, - TAR_EXTENSIONS, - XZ_EXTENSIONS, - ZIP_EXTENSIONS, -) -from pip._internal.utils.misc import ensure_dir - -logger = logging.getLogger(__name__) - - -SUPPORTED_EXTENSIONS = ZIP_EXTENSIONS + TAR_EXTENSIONS - -try: - import bz2 # noqa - - SUPPORTED_EXTENSIONS += BZ2_EXTENSIONS -except ImportError: - logger.debug("bz2 module is not available") - -try: - # Only for Python 3.3+ - import lzma # noqa - - SUPPORTED_EXTENSIONS += XZ_EXTENSIONS -except ImportError: - logger.debug("lzma module is not available") - - -def current_umask() -> int: - """Get the current umask which involves having to set it temporarily.""" - mask = os.umask(0) - os.umask(mask) - return mask - - -def split_leading_dir(path: str) -> List[str]: - path = path.lstrip("/").lstrip("\\") - if "/" in path and ( - ("\\" in path and path.find("/") < path.find("\\")) or "\\" not in path - ): - return path.split("/", 1) - elif "\\" in path: - return path.split("\\", 1) - else: - return [path, ""] - - -def has_leading_dir(paths: Iterable[str]) -> bool: - """Returns true if all the paths have the same leading path name - (i.e., everything is in one subdirectory in an archive)""" - common_prefix = None - for path in paths: - prefix, rest = split_leading_dir(path) - if not prefix: - return False - elif common_prefix is None: - common_prefix = prefix - elif prefix != common_prefix: - return False - return True - - -def is_within_directory(directory: str, target: str) -> bool: - """ - Return true if the absolute path of target is within the directory - """ - abs_directory = os.path.abspath(directory) - abs_target = os.path.abspath(target) - - prefix = os.path.commonprefix([abs_directory, abs_target]) - return prefix == abs_directory - - -def set_extracted_file_to_default_mode_plus_executable(path: str) -> None: - """ - Make file present at path have execute for user/group/world - (chmod +x) is no-op on windows per python docs - """ - os.chmod(path, (0o777 & ~current_umask() | 0o111)) - - -def zip_item_is_executable(info: ZipInfo) -> bool: - mode = info.external_attr >> 16 - # if mode and regular file and any execute permissions for - # user/group/world? - return bool(mode and stat.S_ISREG(mode) and mode & 0o111) - - -def unzip_file(filename: str, location: str, flatten: bool = True) -> None: - """ - Unzip the file (with path `filename`) to the destination `location`. All - files are written based on system defaults and umask (i.e. permissions are - not preserved), except that regular file members with any execute - permissions (user, group, or world) have "chmod +x" applied after being - written. Note that for windows, any execute changes using os.chmod are - no-ops per the python docs. - """ - ensure_dir(location) - zipfp = open(filename, "rb") - try: - zip = zipfile.ZipFile(zipfp, allowZip64=True) - leading = has_leading_dir(zip.namelist()) and flatten - for info in zip.infolist(): - name = info.filename - fn = name - if leading: - fn = split_leading_dir(name)[1] - fn = os.path.join(location, fn) - dir = os.path.dirname(fn) - if not is_within_directory(location, fn): - message = ( - "The zip file ({}) has a file ({}) trying to install " - "outside target directory ({})" - ) - raise InstallationError(message.format(filename, fn, location)) - if fn.endswith("/") or fn.endswith("\\"): - # A directory - ensure_dir(fn) - else: - ensure_dir(dir) - # Don't use read() to avoid allocating an arbitrarily large - # chunk of memory for the file's content - fp = zip.open(name) - try: - with open(fn, "wb") as destfp: - shutil.copyfileobj(fp, destfp) - finally: - fp.close() - if zip_item_is_executable(info): - set_extracted_file_to_default_mode_plus_executable(fn) - finally: - zipfp.close() - - -def untar_file(filename: str, location: str) -> None: - """ - Untar the file (with path `filename`) to the destination `location`. - All files are written based on system defaults and umask (i.e. permissions - are not preserved), except that regular file members with any execute - permissions (user, group, or world) have "chmod +x" applied after being - written. Note that for windows, any execute changes using os.chmod are - no-ops per the python docs. - """ - ensure_dir(location) - if filename.lower().endswith(".gz") or filename.lower().endswith(".tgz"): - mode = "r:gz" - elif filename.lower().endswith(BZ2_EXTENSIONS): - mode = "r:bz2" - elif filename.lower().endswith(XZ_EXTENSIONS): - mode = "r:xz" - elif filename.lower().endswith(".tar"): - mode = "r" - else: - logger.warning( - "Cannot determine compression type for file %s", - filename, - ) - mode = "r:*" - tar = tarfile.open(filename, mode, encoding="utf-8") - try: - leading = has_leading_dir([member.name for member in tar.getmembers()]) - for member in tar.getmembers(): - fn = member.name - if leading: - fn = split_leading_dir(fn)[1] - path = os.path.join(location, fn) - if not is_within_directory(location, path): - message = ( - "The tar file ({}) has a file ({}) trying to install " - "outside target directory ({})" - ) - raise InstallationError(message.format(filename, path, location)) - if member.isdir(): - ensure_dir(path) - elif member.issym(): - try: - # https://github.com/python/typeshed/issues/2673 - tar._extract_member(member, path) # type: ignore - except Exception as exc: - # Some corrupt tar files seem to produce this - # (specifically bad symlinks) - logger.warning( - "In the tar file %s the member %s is invalid: %s", - filename, - member.name, - exc, - ) - continue - else: - try: - fp = tar.extractfile(member) - except (KeyError, AttributeError) as exc: - # Some corrupt tar files seem to produce this - # (specifically bad symlinks) - logger.warning( - "In the tar file %s the member %s is invalid: %s", - filename, - member.name, - exc, - ) - continue - ensure_dir(os.path.dirname(path)) - assert fp is not None - with open(path, "wb") as destfp: - shutil.copyfileobj(fp, destfp) - fp.close() - # Update the timestamp (useful for cython compiled files) - tar.utime(member, path) - # member have any execute permissions for user/group/world? - if member.mode & 0o111: - set_extracted_file_to_default_mode_plus_executable(path) - finally: - tar.close() - - -def unpack_file( - filename: str, - location: str, - content_type: Optional[str] = None, -) -> None: - filename = os.path.realpath(filename) - if ( - content_type == "application/zip" - or filename.lower().endswith(ZIP_EXTENSIONS) - or zipfile.is_zipfile(filename) - ): - unzip_file(filename, location, flatten=not filename.endswith(".whl")) - elif ( - content_type == "application/x-gzip" - or tarfile.is_tarfile(filename) - or filename.lower().endswith(TAR_EXTENSIONS + BZ2_EXTENSIONS + XZ_EXTENSIONS) - ): - untar_file(filename, location) - else: - # FIXME: handle? - # FIXME: magic signatures? - logger.critical( - "Cannot unpack file %s (downloaded from %s, content-type: %s); " - "cannot detect archive format", - filename, - location, - content_type, - ) - raise InstallationError(f"Cannot determine archive format of {location}") diff --git a/venv/Lib/site-packages/pip/_internal/utils/urls.py b/venv/Lib/site-packages/pip/_internal/utils/urls.py deleted file mode 100644 index 6ba2e04..0000000 --- a/venv/Lib/site-packages/pip/_internal/utils/urls.py +++ /dev/null @@ -1,62 +0,0 @@ -import os -import string -import urllib.parse -import urllib.request -from typing import Optional - -from .compat import WINDOWS - - -def get_url_scheme(url: str) -> Optional[str]: - if ":" not in url: - return None - return url.split(":", 1)[0].lower() - - -def path_to_url(path: str) -> str: - """ - Convert a path to a file: URL. The path will be made absolute and have - quoted path parts. - """ - path = os.path.normpath(os.path.abspath(path)) - url = urllib.parse.urljoin("file:", urllib.request.pathname2url(path)) - return url - - -def url_to_path(url: str) -> str: - """ - Convert a file: URL to a path. - """ - assert url.startswith( - "file:" - ), f"You can only turn file: urls into filenames (not {url!r})" - - _, netloc, path, _, _ = urllib.parse.urlsplit(url) - - if not netloc or netloc == "localhost": - # According to RFC 8089, same as empty authority. - netloc = "" - elif WINDOWS: - # If we have a UNC path, prepend UNC share notation. - netloc = "\\\\" + netloc - else: - raise ValueError( - f"non-local file URIs are not supported on this platform: {url!r}" - ) - - path = urllib.request.url2pathname(netloc + path) - - # On Windows, urlsplit parses the path as something like "/C:/Users/foo". - # This creates issues for path-related functions like io.open(), so we try - # to detect and strip the leading slash. - if ( - WINDOWS - and not netloc # Not UNC. - and len(path) >= 3 - and path[0] == "/" # Leading slash to strip. - and path[1] in string.ascii_letters # Drive letter. - and path[2:4] in (":", ":/") # Colon + end of string, or colon + absolute path. - ): - path = path[1:] - - return path diff --git a/venv/Lib/site-packages/pip/_internal/utils/virtualenv.py b/venv/Lib/site-packages/pip/_internal/utils/virtualenv.py deleted file mode 100644 index c926db4..0000000 --- a/venv/Lib/site-packages/pip/_internal/utils/virtualenv.py +++ /dev/null @@ -1,104 +0,0 @@ -import logging -import os -import re -import site -import sys -from typing import List, Optional - -logger = logging.getLogger(__name__) -_INCLUDE_SYSTEM_SITE_PACKAGES_REGEX = re.compile( - r"include-system-site-packages\s*=\s*(?Ptrue|false)" -) - - -def _running_under_venv() -> bool: - """Checks if sys.base_prefix and sys.prefix match. - - This handles PEP 405 compliant virtual environments. - """ - return sys.prefix != getattr(sys, "base_prefix", sys.prefix) - - -def _running_under_regular_virtualenv() -> bool: - """Checks if sys.real_prefix is set. - - This handles virtual environments created with pypa's virtualenv. - """ - # pypa/virtualenv case - return hasattr(sys, "real_prefix") - - -def running_under_virtualenv() -> bool: - """Return True if we're running inside a virtualenv, False otherwise.""" - return _running_under_venv() or _running_under_regular_virtualenv() - - -def _get_pyvenv_cfg_lines() -> Optional[List[str]]: - """Reads {sys.prefix}/pyvenv.cfg and returns its contents as list of lines - - Returns None, if it could not read/access the file. - """ - pyvenv_cfg_file = os.path.join(sys.prefix, "pyvenv.cfg") - try: - # Although PEP 405 does not specify, the built-in venv module always - # writes with UTF-8. (pypa/pip#8717) - with open(pyvenv_cfg_file, encoding="utf-8") as f: - return f.read().splitlines() # avoids trailing newlines - except OSError: - return None - - -def _no_global_under_venv() -> bool: - """Check `{sys.prefix}/pyvenv.cfg` for system site-packages inclusion - - PEP 405 specifies that when system site-packages are not supposed to be - visible from a virtual environment, `pyvenv.cfg` must contain the following - line: - - include-system-site-packages = false - - Additionally, log a warning if accessing the file fails. - """ - cfg_lines = _get_pyvenv_cfg_lines() - if cfg_lines is None: - # We're not in a "sane" venv, so assume there is no system - # site-packages access (since that's PEP 405's default state). - logger.warning( - "Could not access 'pyvenv.cfg' despite a virtual environment " - "being active. Assuming global site-packages is not accessible " - "in this environment." - ) - return True - - for line in cfg_lines: - match = _INCLUDE_SYSTEM_SITE_PACKAGES_REGEX.match(line) - if match is not None and match.group("value") == "false": - return True - return False - - -def _no_global_under_regular_virtualenv() -> bool: - """Check if "no-global-site-packages.txt" exists beside site.py - - This mirrors logic in pypa/virtualenv for determining whether system - site-packages are visible in the virtual environment. - """ - site_mod_dir = os.path.dirname(os.path.abspath(site.__file__)) - no_global_site_packages_file = os.path.join( - site_mod_dir, - "no-global-site-packages.txt", - ) - return os.path.exists(no_global_site_packages_file) - - -def virtualenv_no_global() -> bool: - """Returns a boolean, whether running in venv with no system site-packages.""" - # PEP 405 compliance needs to be checked first since virtualenv >=20 would - # return True for both checks, but is only able to use the PEP 405 config. - if _running_under_venv(): - return _no_global_under_venv() - - if _running_under_regular_virtualenv(): - return _no_global_under_regular_virtualenv() - - return False diff --git a/venv/Lib/site-packages/pip/_internal/utils/wheel.py b/venv/Lib/site-packages/pip/_internal/utils/wheel.py deleted file mode 100644 index e5e3f34..0000000 --- a/venv/Lib/site-packages/pip/_internal/utils/wheel.py +++ /dev/null @@ -1,136 +0,0 @@ -"""Support functions for working with wheel files. -""" - -import logging -from email.message import Message -from email.parser import Parser -from typing import Tuple -from zipfile import BadZipFile, ZipFile - -from pip._vendor.packaging.utils import canonicalize_name - -from pip._internal.exceptions import UnsupportedWheel - -VERSION_COMPATIBLE = (1, 0) - - -logger = logging.getLogger(__name__) - - -def parse_wheel(wheel_zip: ZipFile, name: str) -> Tuple[str, Message]: - """Extract information from the provided wheel, ensuring it meets basic - standards. - - Returns the name of the .dist-info directory and the parsed WHEEL metadata. - """ - try: - info_dir = wheel_dist_info_dir(wheel_zip, name) - metadata = wheel_metadata(wheel_zip, info_dir) - version = wheel_version(metadata) - except UnsupportedWheel as e: - raise UnsupportedWheel("{} has an invalid wheel, {}".format(name, str(e))) - - check_compatibility(version, name) - - return info_dir, metadata - - -def wheel_dist_info_dir(source: ZipFile, name: str) -> str: - """Returns the name of the contained .dist-info directory. - - Raises AssertionError or UnsupportedWheel if not found, >1 found, or - it doesn't match the provided name. - """ - # Zip file path separators must be / - subdirs = {p.split("/", 1)[0] for p in source.namelist()} - - info_dirs = [s for s in subdirs if s.endswith(".dist-info")] - - if not info_dirs: - raise UnsupportedWheel(".dist-info directory not found") - - if len(info_dirs) > 1: - raise UnsupportedWheel( - "multiple .dist-info directories found: {}".format(", ".join(info_dirs)) - ) - - info_dir = info_dirs[0] - - info_dir_name = canonicalize_name(info_dir) - canonical_name = canonicalize_name(name) - if not info_dir_name.startswith(canonical_name): - raise UnsupportedWheel( - ".dist-info directory {!r} does not start with {!r}".format( - info_dir, canonical_name - ) - ) - - return info_dir - - -def read_wheel_metadata_file(source: ZipFile, path: str) -> bytes: - try: - return source.read(path) - # BadZipFile for general corruption, KeyError for missing entry, - # and RuntimeError for password-protected files - except (BadZipFile, KeyError, RuntimeError) as e: - raise UnsupportedWheel(f"could not read {path!r} file: {e!r}") - - -def wheel_metadata(source: ZipFile, dist_info_dir: str) -> Message: - """Return the WHEEL metadata of an extracted wheel, if possible. - Otherwise, raise UnsupportedWheel. - """ - path = f"{dist_info_dir}/WHEEL" - # Zip file path separators must be / - wheel_contents = read_wheel_metadata_file(source, path) - - try: - wheel_text = wheel_contents.decode() - except UnicodeDecodeError as e: - raise UnsupportedWheel(f"error decoding {path!r}: {e!r}") - - # FeedParser (used by Parser) does not raise any exceptions. The returned - # message may have .defects populated, but for backwards-compatibility we - # currently ignore them. - return Parser().parsestr(wheel_text) - - -def wheel_version(wheel_data: Message) -> Tuple[int, ...]: - """Given WHEEL metadata, return the parsed Wheel-Version. - Otherwise, raise UnsupportedWheel. - """ - version_text = wheel_data["Wheel-Version"] - if version_text is None: - raise UnsupportedWheel("WHEEL is missing Wheel-Version") - - version = version_text.strip() - - try: - return tuple(map(int, version.split("."))) - except ValueError: - raise UnsupportedWheel(f"invalid Wheel-Version: {version!r}") - - -def check_compatibility(version: Tuple[int, ...], name: str) -> None: - """Raises errors or warns if called with an incompatible Wheel-Version. - - pip should refuse to install a Wheel-Version that's a major series - ahead of what it's compatible with (e.g 2.0 > 1.1); and warn when - installing a version only minor version ahead (e.g 1.2 > 1.1). - - version: a 2-tuple representing a Wheel-Version (Major, Minor) - name: name of wheel or package to raise exception about - - :raises UnsupportedWheel: when an incompatible Wheel-Version is given - """ - if version[0] > VERSION_COMPATIBLE[0]: - raise UnsupportedWheel( - "{}'s Wheel-Version ({}) is not compatible with this version " - "of pip".format(name, ".".join(map(str, version))) - ) - elif version > VERSION_COMPATIBLE: - logger.warning( - "Installing from a newer Wheel-Version (%s)", - ".".join(map(str, version)), - ) diff --git a/venv/Lib/site-packages/pip/_internal/vcs/__init__.py b/venv/Lib/site-packages/pip/_internal/vcs/__init__.py deleted file mode 100644 index b6beddb..0000000 --- a/venv/Lib/site-packages/pip/_internal/vcs/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# Expose a limited set of classes and functions so callers outside of -# the vcs package don't need to import deeper than `pip._internal.vcs`. -# (The test directory may still need to import from a vcs sub-package.) -# Import all vcs modules to register each VCS in the VcsSupport object. -import pip._internal.vcs.bazaar -import pip._internal.vcs.git -import pip._internal.vcs.mercurial -import pip._internal.vcs.subversion # noqa: F401 -from pip._internal.vcs.versioncontrol import ( # noqa: F401 - RemoteNotFoundError, - RemoteNotValidError, - is_url, - make_vcs_requirement_url, - vcs, -) diff --git a/venv/Lib/site-packages/pip/_internal/vcs/__pycache__/__init__.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/vcs/__pycache__/__init__.cpython-39.pyc deleted file mode 100644 index 633dd37..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/vcs/__pycache__/__init__.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/vcs/__pycache__/bazaar.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/vcs/__pycache__/bazaar.cpython-39.pyc deleted file mode 100644 index 7cbc774..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/vcs/__pycache__/bazaar.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/vcs/__pycache__/git.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/vcs/__pycache__/git.cpython-39.pyc deleted file mode 100644 index 808c250..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/vcs/__pycache__/git.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/vcs/__pycache__/mercurial.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/vcs/__pycache__/mercurial.cpython-39.pyc deleted file mode 100644 index 28773af..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/vcs/__pycache__/mercurial.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/vcs/__pycache__/subversion.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/vcs/__pycache__/subversion.cpython-39.pyc deleted file mode 100644 index 8a745d6..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/vcs/__pycache__/subversion.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/vcs/__pycache__/versioncontrol.cpython-39.pyc b/venv/Lib/site-packages/pip/_internal/vcs/__pycache__/versioncontrol.cpython-39.pyc deleted file mode 100644 index 9f96656..0000000 Binary files a/venv/Lib/site-packages/pip/_internal/vcs/__pycache__/versioncontrol.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_internal/vcs/bazaar.py b/venv/Lib/site-packages/pip/_internal/vcs/bazaar.py deleted file mode 100644 index a7b16e2..0000000 --- a/venv/Lib/site-packages/pip/_internal/vcs/bazaar.py +++ /dev/null @@ -1,101 +0,0 @@ -import logging -from typing import List, Optional, Tuple - -from pip._internal.utils.misc import HiddenText, display_path -from pip._internal.utils.subprocess import make_command -from pip._internal.utils.urls import path_to_url -from pip._internal.vcs.versioncontrol import ( - AuthInfo, - RemoteNotFoundError, - RevOptions, - VersionControl, - vcs, -) - -logger = logging.getLogger(__name__) - - -class Bazaar(VersionControl): - name = "bzr" - dirname = ".bzr" - repo_name = "branch" - schemes = ( - "bzr+http", - "bzr+https", - "bzr+ssh", - "bzr+sftp", - "bzr+ftp", - "bzr+lp", - "bzr+file", - ) - - @staticmethod - def get_base_rev_args(rev: str) -> List[str]: - return ["-r", rev] - - def fetch_new( - self, dest: str, url: HiddenText, rev_options: RevOptions, verbosity: int - ) -> None: - rev_display = rev_options.to_display() - logger.info( - "Checking out %s%s to %s", - url, - rev_display, - display_path(dest), - ) - if verbosity <= 0: - flag = "--quiet" - elif verbosity == 1: - flag = "" - else: - flag = f"-{'v'*verbosity}" - cmd_args = make_command("branch", flag, rev_options.to_args(), url, dest) - self.run_command(cmd_args) - - def switch(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None: - self.run_command(make_command("switch", url), cwd=dest) - - def update(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None: - cmd_args = make_command("pull", "-q", rev_options.to_args()) - self.run_command(cmd_args, cwd=dest) - - @classmethod - def get_url_rev_and_auth(cls, url: str) -> Tuple[str, Optional[str], AuthInfo]: - # hotfix the URL scheme after removing bzr+ from bzr+ssh:// readd it - url, rev, user_pass = super().get_url_rev_and_auth(url) - if url.startswith("ssh://"): - url = "bzr+" + url - return url, rev, user_pass - - @classmethod - def get_remote_url(cls, location: str) -> str: - urls = cls.run_command( - ["info"], show_stdout=False, stdout_only=True, cwd=location - ) - for line in urls.splitlines(): - line = line.strip() - for x in ("checkout of branch: ", "parent branch: "): - if line.startswith(x): - repo = line.split(x)[1] - if cls._is_local_repository(repo): - return path_to_url(repo) - return repo - raise RemoteNotFoundError - - @classmethod - def get_revision(cls, location: str) -> str: - revision = cls.run_command( - ["revno"], - show_stdout=False, - stdout_only=True, - cwd=location, - ) - return revision.splitlines()[-1] - - @classmethod - def is_commit_id_equal(cls, dest: str, name: Optional[str]) -> bool: - """Always assume the versions don't match""" - return False - - -vcs.register(Bazaar) diff --git a/venv/Lib/site-packages/pip/_internal/vcs/git.py b/venv/Lib/site-packages/pip/_internal/vcs/git.py deleted file mode 100644 index 8d1d499..0000000 --- a/venv/Lib/site-packages/pip/_internal/vcs/git.py +++ /dev/null @@ -1,526 +0,0 @@ -import logging -import os.path -import pathlib -import re -import urllib.parse -import urllib.request -from typing import List, Optional, Tuple - -from pip._internal.exceptions import BadCommand, InstallationError -from pip._internal.utils.misc import HiddenText, display_path, hide_url -from pip._internal.utils.subprocess import make_command -from pip._internal.vcs.versioncontrol import ( - AuthInfo, - RemoteNotFoundError, - RemoteNotValidError, - RevOptions, - VersionControl, - find_path_to_project_root_from_repo_root, - vcs, -) - -urlsplit = urllib.parse.urlsplit -urlunsplit = urllib.parse.urlunsplit - - -logger = logging.getLogger(__name__) - - -GIT_VERSION_REGEX = re.compile( - r"^git version " # Prefix. - r"(\d+)" # Major. - r"\.(\d+)" # Dot, minor. - r"(?:\.(\d+))?" # Optional dot, patch. - r".*$" # Suffix, including any pre- and post-release segments we don't care about. -) - -HASH_REGEX = re.compile("^[a-fA-F0-9]{40}$") - -# SCP (Secure copy protocol) shorthand. e.g. 'git@example.com:foo/bar.git' -SCP_REGEX = re.compile( - r"""^ - # Optional user, e.g. 'git@' - (\w+@)? - # Server, e.g. 'github.com'. - ([^/:]+): - # The server-side path. e.g. 'user/project.git'. Must start with an - # alphanumeric character so as not to be confusable with a Windows paths - # like 'C:/foo/bar' or 'C:\foo\bar'. - (\w[^:]*) - $""", - re.VERBOSE, -) - - -def looks_like_hash(sha: str) -> bool: - return bool(HASH_REGEX.match(sha)) - - -class Git(VersionControl): - name = "git" - dirname = ".git" - repo_name = "clone" - schemes = ( - "git+http", - "git+https", - "git+ssh", - "git+git", - "git+file", - ) - # Prevent the user's environment variables from interfering with pip: - # https://github.com/pypa/pip/issues/1130 - unset_environ = ("GIT_DIR", "GIT_WORK_TREE") - default_arg_rev = "HEAD" - - @staticmethod - def get_base_rev_args(rev: str) -> List[str]: - return [rev] - - def is_immutable_rev_checkout(self, url: str, dest: str) -> bool: - _, rev_options = self.get_url_rev_options(hide_url(url)) - if not rev_options.rev: - return False - if not self.is_commit_id_equal(dest, rev_options.rev): - # the current commit is different from rev, - # which means rev was something else than a commit hash - return False - # return False in the rare case rev is both a commit hash - # and a tag or a branch; we don't want to cache in that case - # because that branch/tag could point to something else in the future - is_tag_or_branch = bool(self.get_revision_sha(dest, rev_options.rev)[0]) - return not is_tag_or_branch - - def get_git_version(self) -> Tuple[int, ...]: - version = self.run_command( - ["version"], - command_desc="git version", - show_stdout=False, - stdout_only=True, - ) - match = GIT_VERSION_REGEX.match(version) - if not match: - logger.warning("Can't parse git version: %s", version) - return () - return tuple(int(c) for c in match.groups()) - - @classmethod - def get_current_branch(cls, location: str) -> Optional[str]: - """ - Return the current branch, or None if HEAD isn't at a branch - (e.g. detached HEAD). - """ - # git-symbolic-ref exits with empty stdout if "HEAD" is a detached - # HEAD rather than a symbolic ref. In addition, the -q causes the - # command to exit with status code 1 instead of 128 in this case - # and to suppress the message to stderr. - args = ["symbolic-ref", "-q", "HEAD"] - output = cls.run_command( - args, - extra_ok_returncodes=(1,), - show_stdout=False, - stdout_only=True, - cwd=location, - ) - ref = output.strip() - - if ref.startswith("refs/heads/"): - return ref[len("refs/heads/") :] - - return None - - @classmethod - def get_revision_sha(cls, dest: str, rev: str) -> Tuple[Optional[str], bool]: - """ - Return (sha_or_none, is_branch), where sha_or_none is a commit hash - if the revision names a remote branch or tag, otherwise None. - - Args: - dest: the repository directory. - rev: the revision name. - """ - # Pass rev to pre-filter the list. - output = cls.run_command( - ["show-ref", rev], - cwd=dest, - show_stdout=False, - stdout_only=True, - on_returncode="ignore", - ) - refs = {} - # NOTE: We do not use splitlines here since that would split on other - # unicode separators, which can be maliciously used to install a - # different revision. - for line in output.strip().split("\n"): - line = line.rstrip("\r") - if not line: - continue - try: - ref_sha, ref_name = line.split(" ", maxsplit=2) - except ValueError: - # Include the offending line to simplify troubleshooting if - # this error ever occurs. - raise ValueError(f"unexpected show-ref line: {line!r}") - - refs[ref_name] = ref_sha - - branch_ref = f"refs/remotes/origin/{rev}" - tag_ref = f"refs/tags/{rev}" - - sha = refs.get(branch_ref) - if sha is not None: - return (sha, True) - - sha = refs.get(tag_ref) - - return (sha, False) - - @classmethod - def _should_fetch(cls, dest: str, rev: str) -> bool: - """ - Return true if rev is a ref or is a commit that we don't have locally. - - Branches and tags are not considered in this method because they are - assumed to be always available locally (which is a normal outcome of - ``git clone`` and ``git fetch --tags``). - """ - if rev.startswith("refs/"): - # Always fetch remote refs. - return True - - if not looks_like_hash(rev): - # Git fetch would fail with abbreviated commits. - return False - - if cls.has_commit(dest, rev): - # Don't fetch if we have the commit locally. - return False - - return True - - @classmethod - def resolve_revision( - cls, dest: str, url: HiddenText, rev_options: RevOptions - ) -> RevOptions: - """ - Resolve a revision to a new RevOptions object with the SHA1 of the - branch, tag, or ref if found. - - Args: - rev_options: a RevOptions object. - """ - rev = rev_options.arg_rev - # The arg_rev property's implementation for Git ensures that the - # rev return value is always non-None. - assert rev is not None - - sha, is_branch = cls.get_revision_sha(dest, rev) - - if sha is not None: - rev_options = rev_options.make_new(sha) - rev_options.branch_name = rev if is_branch else None - - return rev_options - - # Do not show a warning for the common case of something that has - # the form of a Git commit hash. - if not looks_like_hash(rev): - logger.warning( - "Did not find branch or tag '%s', assuming revision or ref.", - rev, - ) - - if not cls._should_fetch(dest, rev): - return rev_options - - # fetch the requested revision - cls.run_command( - make_command("fetch", "-q", url, rev_options.to_args()), - cwd=dest, - ) - # Change the revision to the SHA of the ref we fetched - sha = cls.get_revision(dest, rev="FETCH_HEAD") - rev_options = rev_options.make_new(sha) - - return rev_options - - @classmethod - def is_commit_id_equal(cls, dest: str, name: Optional[str]) -> bool: - """ - Return whether the current commit hash equals the given name. - - Args: - dest: the repository directory. - name: a string name. - """ - if not name: - # Then avoid an unnecessary subprocess call. - return False - - return cls.get_revision(dest) == name - - def fetch_new( - self, dest: str, url: HiddenText, rev_options: RevOptions, verbosity: int - ) -> None: - rev_display = rev_options.to_display() - logger.info("Cloning %s%s to %s", url, rev_display, display_path(dest)) - if verbosity <= 0: - flags: Tuple[str, ...] = ("--quiet",) - elif verbosity == 1: - flags = () - else: - flags = ("--verbose", "--progress") - if self.get_git_version() >= (2, 17): - # Git added support for partial clone in 2.17 - # https://git-scm.com/docs/partial-clone - # Speeds up cloning by functioning without a complete copy of repository - self.run_command( - make_command( - "clone", - "--filter=blob:none", - *flags, - url, - dest, - ) - ) - else: - self.run_command(make_command("clone", *flags, url, dest)) - - if rev_options.rev: - # Then a specific revision was requested. - rev_options = self.resolve_revision(dest, url, rev_options) - branch_name = getattr(rev_options, "branch_name", None) - logger.debug("Rev options %s, branch_name %s", rev_options, branch_name) - if branch_name is None: - # Only do a checkout if the current commit id doesn't match - # the requested revision. - if not self.is_commit_id_equal(dest, rev_options.rev): - cmd_args = make_command( - "checkout", - "-q", - rev_options.to_args(), - ) - self.run_command(cmd_args, cwd=dest) - elif self.get_current_branch(dest) != branch_name: - # Then a specific branch was requested, and that branch - # is not yet checked out. - track_branch = f"origin/{branch_name}" - cmd_args = [ - "checkout", - "-b", - branch_name, - "--track", - track_branch, - ] - self.run_command(cmd_args, cwd=dest) - else: - sha = self.get_revision(dest) - rev_options = rev_options.make_new(sha) - - logger.info("Resolved %s to commit %s", url, rev_options.rev) - - #: repo may contain submodules - self.update_submodules(dest) - - def switch(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None: - self.run_command( - make_command("config", "remote.origin.url", url), - cwd=dest, - ) - cmd_args = make_command("checkout", "-q", rev_options.to_args()) - self.run_command(cmd_args, cwd=dest) - - self.update_submodules(dest) - - def update(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None: - # First fetch changes from the default remote - if self.get_git_version() >= (1, 9): - # fetch tags in addition to everything else - self.run_command(["fetch", "-q", "--tags"], cwd=dest) - else: - self.run_command(["fetch", "-q"], cwd=dest) - # Then reset to wanted revision (maybe even origin/master) - rev_options = self.resolve_revision(dest, url, rev_options) - cmd_args = make_command("reset", "--hard", "-q", rev_options.to_args()) - self.run_command(cmd_args, cwd=dest) - #: update submodules - self.update_submodules(dest) - - @classmethod - def get_remote_url(cls, location: str) -> str: - """ - Return URL of the first remote encountered. - - Raises RemoteNotFoundError if the repository does not have a remote - url configured. - """ - # We need to pass 1 for extra_ok_returncodes since the command - # exits with return code 1 if there are no matching lines. - stdout = cls.run_command( - ["config", "--get-regexp", r"remote\..*\.url"], - extra_ok_returncodes=(1,), - show_stdout=False, - stdout_only=True, - cwd=location, - ) - remotes = stdout.splitlines() - try: - found_remote = remotes[0] - except IndexError: - raise RemoteNotFoundError - - for remote in remotes: - if remote.startswith("remote.origin.url "): - found_remote = remote - break - url = found_remote.split(" ")[1] - return cls._git_remote_to_pip_url(url.strip()) - - @staticmethod - def _git_remote_to_pip_url(url: str) -> str: - """ - Convert a remote url from what git uses to what pip accepts. - - There are 3 legal forms **url** may take: - - 1. A fully qualified url: ssh://git@example.com/foo/bar.git - 2. A local project.git folder: /path/to/bare/repository.git - 3. SCP shorthand for form 1: git@example.com:foo/bar.git - - Form 1 is output as-is. Form 2 must be converted to URI and form 3 must - be converted to form 1. - - See the corresponding test test_git_remote_url_to_pip() for examples of - sample inputs/outputs. - """ - if re.match(r"\w+://", url): - # This is already valid. Pass it though as-is. - return url - if os.path.exists(url): - # A local bare remote (git clone --mirror). - # Needs a file:// prefix. - return pathlib.PurePath(url).as_uri() - scp_match = SCP_REGEX.match(url) - if scp_match: - # Add an ssh:// prefix and replace the ':' with a '/'. - return scp_match.expand(r"ssh://\1\2/\3") - # Otherwise, bail out. - raise RemoteNotValidError(url) - - @classmethod - def has_commit(cls, location: str, rev: str) -> bool: - """ - Check if rev is a commit that is available in the local repository. - """ - try: - cls.run_command( - ["rev-parse", "-q", "--verify", "sha^" + rev], - cwd=location, - log_failed_cmd=False, - ) - except InstallationError: - return False - else: - return True - - @classmethod - def get_revision(cls, location: str, rev: Optional[str] = None) -> str: - if rev is None: - rev = "HEAD" - current_rev = cls.run_command( - ["rev-parse", rev], - show_stdout=False, - stdout_only=True, - cwd=location, - ) - return current_rev.strip() - - @classmethod - def get_subdirectory(cls, location: str) -> Optional[str]: - """ - Return the path to Python project root, relative to the repo root. - Return None if the project root is in the repo root. - """ - # find the repo root - git_dir = cls.run_command( - ["rev-parse", "--git-dir"], - show_stdout=False, - stdout_only=True, - cwd=location, - ).strip() - if not os.path.isabs(git_dir): - git_dir = os.path.join(location, git_dir) - repo_root = os.path.abspath(os.path.join(git_dir, "..")) - return find_path_to_project_root_from_repo_root(location, repo_root) - - @classmethod - def get_url_rev_and_auth(cls, url: str) -> Tuple[str, Optional[str], AuthInfo]: - """ - Prefixes stub URLs like 'user@hostname:user/repo.git' with 'ssh://'. - That's required because although they use SSH they sometimes don't - work with a ssh:// scheme (e.g. GitHub). But we need a scheme for - parsing. Hence we remove it again afterwards and return it as a stub. - """ - # Works around an apparent Git bug - # (see https://article.gmane.org/gmane.comp.version-control.git/146500) - scheme, netloc, path, query, fragment = urlsplit(url) - if scheme.endswith("file"): - initial_slashes = path[: -len(path.lstrip("/"))] - newpath = initial_slashes + urllib.request.url2pathname(path).replace( - "\\", "/" - ).lstrip("/") - after_plus = scheme.find("+") + 1 - url = scheme[:after_plus] + urlunsplit( - (scheme[after_plus:], netloc, newpath, query, fragment), - ) - - if "://" not in url: - assert "file:" not in url - url = url.replace("git+", "git+ssh://") - url, rev, user_pass = super().get_url_rev_and_auth(url) - url = url.replace("ssh://", "") - else: - url, rev, user_pass = super().get_url_rev_and_auth(url) - - return url, rev, user_pass - - @classmethod - def update_submodules(cls, location: str) -> None: - if not os.path.exists(os.path.join(location, ".gitmodules")): - return - cls.run_command( - ["submodule", "update", "--init", "--recursive", "-q"], - cwd=location, - ) - - @classmethod - def get_repository_root(cls, location: str) -> Optional[str]: - loc = super().get_repository_root(location) - if loc: - return loc - try: - r = cls.run_command( - ["rev-parse", "--show-toplevel"], - cwd=location, - show_stdout=False, - stdout_only=True, - on_returncode="raise", - log_failed_cmd=False, - ) - except BadCommand: - logger.debug( - "could not determine if %s is under git control " - "because git is not available", - location, - ) - return None - except InstallationError: - return None - return os.path.normpath(r.rstrip("\r\n")) - - @staticmethod - def should_add_vcs_url_prefix(repo_url: str) -> bool: - """In either https or ssh form, requirements must be prefixed with git+.""" - return True - - -vcs.register(Git) diff --git a/venv/Lib/site-packages/pip/_internal/vcs/mercurial.py b/venv/Lib/site-packages/pip/_internal/vcs/mercurial.py deleted file mode 100644 index 2a005e0..0000000 --- a/venv/Lib/site-packages/pip/_internal/vcs/mercurial.py +++ /dev/null @@ -1,163 +0,0 @@ -import configparser -import logging -import os -from typing import List, Optional, Tuple - -from pip._internal.exceptions import BadCommand, InstallationError -from pip._internal.utils.misc import HiddenText, display_path -from pip._internal.utils.subprocess import make_command -from pip._internal.utils.urls import path_to_url -from pip._internal.vcs.versioncontrol import ( - RevOptions, - VersionControl, - find_path_to_project_root_from_repo_root, - vcs, -) - -logger = logging.getLogger(__name__) - - -class Mercurial(VersionControl): - name = "hg" - dirname = ".hg" - repo_name = "clone" - schemes = ( - "hg+file", - "hg+http", - "hg+https", - "hg+ssh", - "hg+static-http", - ) - - @staticmethod - def get_base_rev_args(rev: str) -> List[str]: - return [rev] - - def fetch_new( - self, dest: str, url: HiddenText, rev_options: RevOptions, verbosity: int - ) -> None: - rev_display = rev_options.to_display() - logger.info( - "Cloning hg %s%s to %s", - url, - rev_display, - display_path(dest), - ) - if verbosity <= 0: - flags: Tuple[str, ...] = ("--quiet",) - elif verbosity == 1: - flags = () - elif verbosity == 2: - flags = ("--verbose",) - else: - flags = ("--verbose", "--debug") - self.run_command(make_command("clone", "--noupdate", *flags, url, dest)) - self.run_command( - make_command("update", *flags, rev_options.to_args()), - cwd=dest, - ) - - def switch(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None: - repo_config = os.path.join(dest, self.dirname, "hgrc") - config = configparser.RawConfigParser() - try: - config.read(repo_config) - config.set("paths", "default", url.secret) - with open(repo_config, "w") as config_file: - config.write(config_file) - except (OSError, configparser.NoSectionError) as exc: - logger.warning("Could not switch Mercurial repository to %s: %s", url, exc) - else: - cmd_args = make_command("update", "-q", rev_options.to_args()) - self.run_command(cmd_args, cwd=dest) - - def update(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None: - self.run_command(["pull", "-q"], cwd=dest) - cmd_args = make_command("update", "-q", rev_options.to_args()) - self.run_command(cmd_args, cwd=dest) - - @classmethod - def get_remote_url(cls, location: str) -> str: - url = cls.run_command( - ["showconfig", "paths.default"], - show_stdout=False, - stdout_only=True, - cwd=location, - ).strip() - if cls._is_local_repository(url): - url = path_to_url(url) - return url.strip() - - @classmethod - def get_revision(cls, location: str) -> str: - """ - Return the repository-local changeset revision number, as an integer. - """ - current_revision = cls.run_command( - ["parents", "--template={rev}"], - show_stdout=False, - stdout_only=True, - cwd=location, - ).strip() - return current_revision - - @classmethod - def get_requirement_revision(cls, location: str) -> str: - """ - Return the changeset identification hash, as a 40-character - hexadecimal string - """ - current_rev_hash = cls.run_command( - ["parents", "--template={node}"], - show_stdout=False, - stdout_only=True, - cwd=location, - ).strip() - return current_rev_hash - - @classmethod - def is_commit_id_equal(cls, dest: str, name: Optional[str]) -> bool: - """Always assume the versions don't match""" - return False - - @classmethod - def get_subdirectory(cls, location: str) -> Optional[str]: - """ - Return the path to Python project root, relative to the repo root. - Return None if the project root is in the repo root. - """ - # find the repo root - repo_root = cls.run_command( - ["root"], show_stdout=False, stdout_only=True, cwd=location - ).strip() - if not os.path.isabs(repo_root): - repo_root = os.path.abspath(os.path.join(location, repo_root)) - return find_path_to_project_root_from_repo_root(location, repo_root) - - @classmethod - def get_repository_root(cls, location: str) -> Optional[str]: - loc = super().get_repository_root(location) - if loc: - return loc - try: - r = cls.run_command( - ["root"], - cwd=location, - show_stdout=False, - stdout_only=True, - on_returncode="raise", - log_failed_cmd=False, - ) - except BadCommand: - logger.debug( - "could not determine if %s is under hg control " - "because hg is not available", - location, - ) - return None - except InstallationError: - return None - return os.path.normpath(r.rstrip("\r\n")) - - -vcs.register(Mercurial) diff --git a/venv/Lib/site-packages/pip/_internal/vcs/subversion.py b/venv/Lib/site-packages/pip/_internal/vcs/subversion.py deleted file mode 100644 index 89c8754..0000000 --- a/venv/Lib/site-packages/pip/_internal/vcs/subversion.py +++ /dev/null @@ -1,324 +0,0 @@ -import logging -import os -import re -from typing import List, Optional, Tuple - -from pip._internal.utils.misc import ( - HiddenText, - display_path, - is_console_interactive, - is_installable_dir, - split_auth_from_netloc, -) -from pip._internal.utils.subprocess import CommandArgs, make_command -from pip._internal.vcs.versioncontrol import ( - AuthInfo, - RemoteNotFoundError, - RevOptions, - VersionControl, - vcs, -) - -logger = logging.getLogger(__name__) - -_svn_xml_url_re = re.compile('url="([^"]+)"') -_svn_rev_re = re.compile(r'committed-rev="(\d+)"') -_svn_info_xml_rev_re = re.compile(r'\s*revision="(\d+)"') -_svn_info_xml_url_re = re.compile(r"(.*)") - - -class Subversion(VersionControl): - name = "svn" - dirname = ".svn" - repo_name = "checkout" - schemes = ("svn+ssh", "svn+http", "svn+https", "svn+svn", "svn+file") - - @classmethod - def should_add_vcs_url_prefix(cls, remote_url: str) -> bool: - return True - - @staticmethod - def get_base_rev_args(rev: str) -> List[str]: - return ["-r", rev] - - @classmethod - def get_revision(cls, location: str) -> str: - """ - Return the maximum revision for all files under a given location - """ - # Note: taken from setuptools.command.egg_info - revision = 0 - - for base, dirs, _ in os.walk(location): - if cls.dirname not in dirs: - dirs[:] = [] - continue # no sense walking uncontrolled subdirs - dirs.remove(cls.dirname) - entries_fn = os.path.join(base, cls.dirname, "entries") - if not os.path.exists(entries_fn): - # FIXME: should we warn? - continue - - dirurl, localrev = cls._get_svn_url_rev(base) - - if base == location: - assert dirurl is not None - base = dirurl + "/" # save the root url - elif not dirurl or not dirurl.startswith(base): - dirs[:] = [] - continue # not part of the same svn tree, skip it - revision = max(revision, localrev) - return str(revision) - - @classmethod - def get_netloc_and_auth( - cls, netloc: str, scheme: str - ) -> Tuple[str, Tuple[Optional[str], Optional[str]]]: - """ - This override allows the auth information to be passed to svn via the - --username and --password options instead of via the URL. - """ - if scheme == "ssh": - # The --username and --password options can't be used for - # svn+ssh URLs, so keep the auth information in the URL. - return super().get_netloc_and_auth(netloc, scheme) - - return split_auth_from_netloc(netloc) - - @classmethod - def get_url_rev_and_auth(cls, url: str) -> Tuple[str, Optional[str], AuthInfo]: - # hotfix the URL scheme after removing svn+ from svn+ssh:// readd it - url, rev, user_pass = super().get_url_rev_and_auth(url) - if url.startswith("ssh://"): - url = "svn+" + url - return url, rev, user_pass - - @staticmethod - def make_rev_args( - username: Optional[str], password: Optional[HiddenText] - ) -> CommandArgs: - extra_args: CommandArgs = [] - if username: - extra_args += ["--username", username] - if password: - extra_args += ["--password", password] - - return extra_args - - @classmethod - def get_remote_url(cls, location: str) -> str: - # In cases where the source is in a subdirectory, we have to look up in - # the location until we find a valid project root. - orig_location = location - while not is_installable_dir(location): - last_location = location - location = os.path.dirname(location) - if location == last_location: - # We've traversed up to the root of the filesystem without - # finding a Python project. - logger.warning( - "Could not find Python project for directory %s (tried all " - "parent directories)", - orig_location, - ) - raise RemoteNotFoundError - - url, _rev = cls._get_svn_url_rev(location) - if url is None: - raise RemoteNotFoundError - - return url - - @classmethod - def _get_svn_url_rev(cls, location: str) -> Tuple[Optional[str], int]: - from pip._internal.exceptions import InstallationError - - entries_path = os.path.join(location, cls.dirname, "entries") - if os.path.exists(entries_path): - with open(entries_path) as f: - data = f.read() - else: # subversion >= 1.7 does not have the 'entries' file - data = "" - - url = None - if data.startswith("8") or data.startswith("9") or data.startswith("10"): - entries = list(map(str.splitlines, data.split("\n\x0c\n"))) - del entries[0][0] # get rid of the '8' - url = entries[0][3] - revs = [int(d[9]) for d in entries if len(d) > 9 and d[9]] + [0] - elif data.startswith("= 1.7 - # Note that using get_remote_call_options is not necessary here - # because `svn info` is being run against a local directory. - # We don't need to worry about making sure interactive mode - # is being used to prompt for passwords, because passwords - # are only potentially needed for remote server requests. - xml = cls.run_command( - ["info", "--xml", location], - show_stdout=False, - stdout_only=True, - ) - match = _svn_info_xml_url_re.search(xml) - assert match is not None - url = match.group(1) - revs = [int(m.group(1)) for m in _svn_info_xml_rev_re.finditer(xml)] - except InstallationError: - url, revs = None, [] - - if revs: - rev = max(revs) - else: - rev = 0 - - return url, rev - - @classmethod - def is_commit_id_equal(cls, dest: str, name: Optional[str]) -> bool: - """Always assume the versions don't match""" - return False - - def __init__(self, use_interactive: bool = None) -> None: - if use_interactive is None: - use_interactive = is_console_interactive() - self.use_interactive = use_interactive - - # This member is used to cache the fetched version of the current - # ``svn`` client. - # Special value definitions: - # None: Not evaluated yet. - # Empty tuple: Could not parse version. - self._vcs_version: Optional[Tuple[int, ...]] = None - - super().__init__() - - def call_vcs_version(self) -> Tuple[int, ...]: - """Query the version of the currently installed Subversion client. - - :return: A tuple containing the parts of the version information or - ``()`` if the version returned from ``svn`` could not be parsed. - :raises: BadCommand: If ``svn`` is not installed. - """ - # Example versions: - # svn, version 1.10.3 (r1842928) - # compiled Feb 25 2019, 14:20:39 on x86_64-apple-darwin17.0.0 - # svn, version 1.7.14 (r1542130) - # compiled Mar 28 2018, 08:49:13 on x86_64-pc-linux-gnu - # svn, version 1.12.0-SlikSvn (SlikSvn/1.12.0) - # compiled May 28 2019, 13:44:56 on x86_64-microsoft-windows6.2 - version_prefix = "svn, version " - version = self.run_command(["--version"], show_stdout=False, stdout_only=True) - if not version.startswith(version_prefix): - return () - - version = version[len(version_prefix) :].split()[0] - version_list = version.partition("-")[0].split(".") - try: - parsed_version = tuple(map(int, version_list)) - except ValueError: - return () - - return parsed_version - - def get_vcs_version(self) -> Tuple[int, ...]: - """Return the version of the currently installed Subversion client. - - If the version of the Subversion client has already been queried, - a cached value will be used. - - :return: A tuple containing the parts of the version information or - ``()`` if the version returned from ``svn`` could not be parsed. - :raises: BadCommand: If ``svn`` is not installed. - """ - if self._vcs_version is not None: - # Use cached version, if available. - # If parsing the version failed previously (empty tuple), - # do not attempt to parse it again. - return self._vcs_version - - vcs_version = self.call_vcs_version() - self._vcs_version = vcs_version - return vcs_version - - def get_remote_call_options(self) -> CommandArgs: - """Return options to be used on calls to Subversion that contact the server. - - These options are applicable for the following ``svn`` subcommands used - in this class. - - - checkout - - switch - - update - - :return: A list of command line arguments to pass to ``svn``. - """ - if not self.use_interactive: - # --non-interactive switch is available since Subversion 0.14.4. - # Subversion < 1.8 runs in interactive mode by default. - return ["--non-interactive"] - - svn_version = self.get_vcs_version() - # By default, Subversion >= 1.8 runs in non-interactive mode if - # stdin is not a TTY. Since that is how pip invokes SVN, in - # call_subprocess(), pip must pass --force-interactive to ensure - # the user can be prompted for a password, if required. - # SVN added the --force-interactive option in SVN 1.8. Since - # e.g. RHEL/CentOS 7, which is supported until 2024, ships with - # SVN 1.7, pip should continue to support SVN 1.7. Therefore, pip - # can't safely add the option if the SVN version is < 1.8 (or unknown). - if svn_version >= (1, 8): - return ["--force-interactive"] - - return [] - - def fetch_new( - self, dest: str, url: HiddenText, rev_options: RevOptions, verbosity: int - ) -> None: - rev_display = rev_options.to_display() - logger.info( - "Checking out %s%s to %s", - url, - rev_display, - display_path(dest), - ) - if verbosity <= 0: - flag = "--quiet" - else: - flag = "" - cmd_args = make_command( - "checkout", - flag, - self.get_remote_call_options(), - rev_options.to_args(), - url, - dest, - ) - self.run_command(cmd_args) - - def switch(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None: - cmd_args = make_command( - "switch", - self.get_remote_call_options(), - rev_options.to_args(), - url, - dest, - ) - self.run_command(cmd_args) - - def update(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None: - cmd_args = make_command( - "update", - self.get_remote_call_options(), - rev_options.to_args(), - dest, - ) - self.run_command(cmd_args) - - -vcs.register(Subversion) diff --git a/venv/Lib/site-packages/pip/_internal/vcs/versioncontrol.py b/venv/Lib/site-packages/pip/_internal/vcs/versioncontrol.py deleted file mode 100644 index 02bbf68..0000000 --- a/venv/Lib/site-packages/pip/_internal/vcs/versioncontrol.py +++ /dev/null @@ -1,705 +0,0 @@ -"""Handles all VCS (version control) support""" - -import logging -import os -import shutil -import sys -import urllib.parse -from typing import ( - TYPE_CHECKING, - Any, - Dict, - Iterable, - Iterator, - List, - Mapping, - Optional, - Tuple, - Type, - Union, -) - -from pip._internal.cli.spinners import SpinnerInterface -from pip._internal.exceptions import BadCommand, InstallationError -from pip._internal.utils.misc import ( - HiddenText, - ask_path_exists, - backup_dir, - display_path, - hide_url, - hide_value, - is_installable_dir, - rmtree, -) -from pip._internal.utils.subprocess import ( - CommandArgs, - call_subprocess, - format_command_args, - make_command, -) -from pip._internal.utils.urls import get_url_scheme - -if TYPE_CHECKING: - # Literal was introduced in Python 3.8. - # - # TODO: Remove `if TYPE_CHECKING` when dropping support for Python 3.7. - from typing import Literal - - -__all__ = ["vcs"] - - -logger = logging.getLogger(__name__) - -AuthInfo = Tuple[Optional[str], Optional[str]] - - -def is_url(name: str) -> bool: - """ - Return true if the name looks like a URL. - """ - scheme = get_url_scheme(name) - if scheme is None: - return False - return scheme in ["http", "https", "file", "ftp"] + vcs.all_schemes - - -def make_vcs_requirement_url( - repo_url: str, rev: str, project_name: str, subdir: Optional[str] = None -) -> str: - """ - Return the URL for a VCS requirement. - - Args: - repo_url: the remote VCS url, with any needed VCS prefix (e.g. "git+"). - project_name: the (unescaped) project name. - """ - egg_project_name = project_name.replace("-", "_") - req = f"{repo_url}@{rev}#egg={egg_project_name}" - if subdir: - req += f"&subdirectory={subdir}" - - return req - - -def find_path_to_project_root_from_repo_root( - location: str, repo_root: str -) -> Optional[str]: - """ - Find the the Python project's root by searching up the filesystem from - `location`. Return the path to project root relative to `repo_root`. - Return None if the project root is `repo_root`, or cannot be found. - """ - # find project root. - orig_location = location - while not is_installable_dir(location): - last_location = location - location = os.path.dirname(location) - if location == last_location: - # We've traversed up to the root of the filesystem without - # finding a Python project. - logger.warning( - "Could not find a Python project for directory %s (tried all " - "parent directories)", - orig_location, - ) - return None - - if os.path.samefile(repo_root, location): - return None - - return os.path.relpath(location, repo_root) - - -class RemoteNotFoundError(Exception): - pass - - -class RemoteNotValidError(Exception): - def __init__(self, url: str): - super().__init__(url) - self.url = url - - -class RevOptions: - - """ - Encapsulates a VCS-specific revision to install, along with any VCS - install options. - - Instances of this class should be treated as if immutable. - """ - - def __init__( - self, - vc_class: Type["VersionControl"], - rev: Optional[str] = None, - extra_args: Optional[CommandArgs] = None, - ) -> None: - """ - Args: - vc_class: a VersionControl subclass. - rev: the name of the revision to install. - extra_args: a list of extra options. - """ - if extra_args is None: - extra_args = [] - - self.extra_args = extra_args - self.rev = rev - self.vc_class = vc_class - self.branch_name: Optional[str] = None - - def __repr__(self) -> str: - return f"" - - @property - def arg_rev(self) -> Optional[str]: - if self.rev is None: - return self.vc_class.default_arg_rev - - return self.rev - - def to_args(self) -> CommandArgs: - """ - Return the VCS-specific command arguments. - """ - args: CommandArgs = [] - rev = self.arg_rev - if rev is not None: - args += self.vc_class.get_base_rev_args(rev) - args += self.extra_args - - return args - - def to_display(self) -> str: - if not self.rev: - return "" - - return f" (to revision {self.rev})" - - def make_new(self, rev: str) -> "RevOptions": - """ - Make a copy of the current instance, but with a new rev. - - Args: - rev: the name of the revision for the new object. - """ - return self.vc_class.make_rev_options(rev, extra_args=self.extra_args) - - -class VcsSupport: - _registry: Dict[str, "VersionControl"] = {} - schemes = ["ssh", "git", "hg", "bzr", "sftp", "svn"] - - def __init__(self) -> None: - # Register more schemes with urlparse for various version control - # systems - urllib.parse.uses_netloc.extend(self.schemes) - super().__init__() - - def __iter__(self) -> Iterator[str]: - return self._registry.__iter__() - - @property - def backends(self) -> List["VersionControl"]: - return list(self._registry.values()) - - @property - def dirnames(self) -> List[str]: - return [backend.dirname for backend in self.backends] - - @property - def all_schemes(self) -> List[str]: - schemes: List[str] = [] - for backend in self.backends: - schemes.extend(backend.schemes) - return schemes - - def register(self, cls: Type["VersionControl"]) -> None: - if not hasattr(cls, "name"): - logger.warning("Cannot register VCS %s", cls.__name__) - return - if cls.name not in self._registry: - self._registry[cls.name] = cls() - logger.debug("Registered VCS backend: %s", cls.name) - - def unregister(self, name: str) -> None: - if name in self._registry: - del self._registry[name] - - def get_backend_for_dir(self, location: str) -> Optional["VersionControl"]: - """ - Return a VersionControl object if a repository of that type is found - at the given directory. - """ - vcs_backends = {} - for vcs_backend in self._registry.values(): - repo_path = vcs_backend.get_repository_root(location) - if not repo_path: - continue - logger.debug("Determine that %s uses VCS: %s", location, vcs_backend.name) - vcs_backends[repo_path] = vcs_backend - - if not vcs_backends: - return None - - # Choose the VCS in the inner-most directory. Since all repository - # roots found here would be either `location` or one of its - # parents, the longest path should have the most path components, - # i.e. the backend representing the inner-most repository. - inner_most_repo_path = max(vcs_backends, key=len) - return vcs_backends[inner_most_repo_path] - - def get_backend_for_scheme(self, scheme: str) -> Optional["VersionControl"]: - """ - Return a VersionControl object or None. - """ - for vcs_backend in self._registry.values(): - if scheme in vcs_backend.schemes: - return vcs_backend - return None - - def get_backend(self, name: str) -> Optional["VersionControl"]: - """ - Return a VersionControl object or None. - """ - name = name.lower() - return self._registry.get(name) - - -vcs = VcsSupport() - - -class VersionControl: - name = "" - dirname = "" - repo_name = "" - # List of supported schemes for this Version Control - schemes: Tuple[str, ...] = () - # Iterable of environment variable names to pass to call_subprocess(). - unset_environ: Tuple[str, ...] = () - default_arg_rev: Optional[str] = None - - @classmethod - def should_add_vcs_url_prefix(cls, remote_url: str) -> bool: - """ - Return whether the vcs prefix (e.g. "git+") should be added to a - repository's remote url when used in a requirement. - """ - return not remote_url.lower().startswith(f"{cls.name}:") - - @classmethod - def get_subdirectory(cls, location: str) -> Optional[str]: - """ - Return the path to Python project root, relative to the repo root. - Return None if the project root is in the repo root. - """ - return None - - @classmethod - def get_requirement_revision(cls, repo_dir: str) -> str: - """ - Return the revision string that should be used in a requirement. - """ - return cls.get_revision(repo_dir) - - @classmethod - def get_src_requirement(cls, repo_dir: str, project_name: str) -> str: - """ - Return the requirement string to use to redownload the files - currently at the given repository directory. - - Args: - project_name: the (unescaped) project name. - - The return value has a form similar to the following: - - {repository_url}@{revision}#egg={project_name} - """ - repo_url = cls.get_remote_url(repo_dir) - - if cls.should_add_vcs_url_prefix(repo_url): - repo_url = f"{cls.name}+{repo_url}" - - revision = cls.get_requirement_revision(repo_dir) - subdir = cls.get_subdirectory(repo_dir) - req = make_vcs_requirement_url(repo_url, revision, project_name, subdir=subdir) - - return req - - @staticmethod - def get_base_rev_args(rev: str) -> List[str]: - """ - Return the base revision arguments for a vcs command. - - Args: - rev: the name of a revision to install. Cannot be None. - """ - raise NotImplementedError - - def is_immutable_rev_checkout(self, url: str, dest: str) -> bool: - """ - Return true if the commit hash checked out at dest matches - the revision in url. - - Always return False, if the VCS does not support immutable commit - hashes. - - This method does not check if there are local uncommitted changes - in dest after checkout, as pip currently has no use case for that. - """ - return False - - @classmethod - def make_rev_options( - cls, rev: Optional[str] = None, extra_args: Optional[CommandArgs] = None - ) -> RevOptions: - """ - Return a RevOptions object. - - Args: - rev: the name of a revision to install. - extra_args: a list of extra options. - """ - return RevOptions(cls, rev, extra_args=extra_args) - - @classmethod - def _is_local_repository(cls, repo: str) -> bool: - """ - posix absolute paths start with os.path.sep, - win32 ones start with drive (like c:\\folder) - """ - drive, tail = os.path.splitdrive(repo) - return repo.startswith(os.path.sep) or bool(drive) - - @classmethod - def get_netloc_and_auth( - cls, netloc: str, scheme: str - ) -> Tuple[str, Tuple[Optional[str], Optional[str]]]: - """ - Parse the repository URL's netloc, and return the new netloc to use - along with auth information. - - Args: - netloc: the original repository URL netloc. - scheme: the repository URL's scheme without the vcs prefix. - - This is mainly for the Subversion class to override, so that auth - information can be provided via the --username and --password options - instead of through the URL. For other subclasses like Git without - such an option, auth information must stay in the URL. - - Returns: (netloc, (username, password)). - """ - return netloc, (None, None) - - @classmethod - def get_url_rev_and_auth(cls, url: str) -> Tuple[str, Optional[str], AuthInfo]: - """ - Parse the repository URL to use, and return the URL, revision, - and auth info to use. - - Returns: (url, rev, (username, password)). - """ - scheme, netloc, path, query, frag = urllib.parse.urlsplit(url) - if "+" not in scheme: - raise ValueError( - "Sorry, {!r} is a malformed VCS url. " - "The format is +://, " - "e.g. svn+http://myrepo/svn/MyApp#egg=MyApp".format(url) - ) - # Remove the vcs prefix. - scheme = scheme.split("+", 1)[1] - netloc, user_pass = cls.get_netloc_and_auth(netloc, scheme) - rev = None - if "@" in path: - path, rev = path.rsplit("@", 1) - if not rev: - raise InstallationError( - "The URL {!r} has an empty revision (after @) " - "which is not supported. Include a revision after @ " - "or remove @ from the URL.".format(url) - ) - url = urllib.parse.urlunsplit((scheme, netloc, path, query, "")) - return url, rev, user_pass - - @staticmethod - def make_rev_args( - username: Optional[str], password: Optional[HiddenText] - ) -> CommandArgs: - """ - Return the RevOptions "extra arguments" to use in obtain(). - """ - return [] - - def get_url_rev_options(self, url: HiddenText) -> Tuple[HiddenText, RevOptions]: - """ - Return the URL and RevOptions object to use in obtain(), - as a tuple (url, rev_options). - """ - secret_url, rev, user_pass = self.get_url_rev_and_auth(url.secret) - username, secret_password = user_pass - password: Optional[HiddenText] = None - if secret_password is not None: - password = hide_value(secret_password) - extra_args = self.make_rev_args(username, password) - rev_options = self.make_rev_options(rev, extra_args=extra_args) - - return hide_url(secret_url), rev_options - - @staticmethod - def normalize_url(url: str) -> str: - """ - Normalize a URL for comparison by unquoting it and removing any - trailing slash. - """ - return urllib.parse.unquote(url).rstrip("/") - - @classmethod - def compare_urls(cls, url1: str, url2: str) -> bool: - """ - Compare two repo URLs for identity, ignoring incidental differences. - """ - return cls.normalize_url(url1) == cls.normalize_url(url2) - - def fetch_new( - self, dest: str, url: HiddenText, rev_options: RevOptions, verbosity: int - ) -> None: - """ - Fetch a revision from a repository, in the case that this is the - first fetch from the repository. - - Args: - dest: the directory to fetch the repository to. - rev_options: a RevOptions object. - verbosity: verbosity level. - """ - raise NotImplementedError - - def switch(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None: - """ - Switch the repo at ``dest`` to point to ``URL``. - - Args: - rev_options: a RevOptions object. - """ - raise NotImplementedError - - def update(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None: - """ - Update an already-existing repo to the given ``rev_options``. - - Args: - rev_options: a RevOptions object. - """ - raise NotImplementedError - - @classmethod - def is_commit_id_equal(cls, dest: str, name: Optional[str]) -> bool: - """ - Return whether the id of the current commit equals the given name. - - Args: - dest: the repository directory. - name: a string name. - """ - raise NotImplementedError - - def obtain(self, dest: str, url: HiddenText, verbosity: int) -> None: - """ - Install or update in editable mode the package represented by this - VersionControl object. - - :param dest: the repository directory in which to install or update. - :param url: the repository URL starting with a vcs prefix. - :param verbosity: verbosity level. - """ - url, rev_options = self.get_url_rev_options(url) - - if not os.path.exists(dest): - self.fetch_new(dest, url, rev_options, verbosity=verbosity) - return - - rev_display = rev_options.to_display() - if self.is_repository_directory(dest): - existing_url = self.get_remote_url(dest) - if self.compare_urls(existing_url, url.secret): - logger.debug( - "%s in %s exists, and has correct URL (%s)", - self.repo_name.title(), - display_path(dest), - url, - ) - if not self.is_commit_id_equal(dest, rev_options.rev): - logger.info( - "Updating %s %s%s", - display_path(dest), - self.repo_name, - rev_display, - ) - self.update(dest, url, rev_options) - else: - logger.info("Skipping because already up-to-date.") - return - - logger.warning( - "%s %s in %s exists with URL %s", - self.name, - self.repo_name, - display_path(dest), - existing_url, - ) - prompt = ("(s)witch, (i)gnore, (w)ipe, (b)ackup ", ("s", "i", "w", "b")) - else: - logger.warning( - "Directory %s already exists, and is not a %s %s.", - dest, - self.name, - self.repo_name, - ) - # https://github.com/python/mypy/issues/1174 - prompt = ("(i)gnore, (w)ipe, (b)ackup ", ("i", "w", "b")) # type: ignore - - logger.warning( - "The plan is to install the %s repository %s", - self.name, - url, - ) - response = ask_path_exists("What to do? {}".format(prompt[0]), prompt[1]) - - if response == "a": - sys.exit(-1) - - if response == "w": - logger.warning("Deleting %s", display_path(dest)) - rmtree(dest) - self.fetch_new(dest, url, rev_options, verbosity=verbosity) - return - - if response == "b": - dest_dir = backup_dir(dest) - logger.warning("Backing up %s to %s", display_path(dest), dest_dir) - shutil.move(dest, dest_dir) - self.fetch_new(dest, url, rev_options, verbosity=verbosity) - return - - # Do nothing if the response is "i". - if response == "s": - logger.info( - "Switching %s %s to %s%s", - self.repo_name, - display_path(dest), - url, - rev_display, - ) - self.switch(dest, url, rev_options) - - def unpack(self, location: str, url: HiddenText, verbosity: int) -> None: - """ - Clean up current location and download the url repository - (and vcs infos) into location - - :param url: the repository URL starting with a vcs prefix. - :param verbosity: verbosity level. - """ - if os.path.exists(location): - rmtree(location) - self.obtain(location, url=url, verbosity=verbosity) - - @classmethod - def get_remote_url(cls, location: str) -> str: - """ - Return the url used at location - - Raises RemoteNotFoundError if the repository does not have a remote - url configured. - """ - raise NotImplementedError - - @classmethod - def get_revision(cls, location: str) -> str: - """ - Return the current commit id of the files at the given location. - """ - raise NotImplementedError - - @classmethod - def run_command( - cls, - cmd: Union[List[str], CommandArgs], - show_stdout: bool = True, - cwd: Optional[str] = None, - on_returncode: 'Literal["raise", "warn", "ignore"]' = "raise", - extra_ok_returncodes: Optional[Iterable[int]] = None, - command_desc: Optional[str] = None, - extra_environ: Optional[Mapping[str, Any]] = None, - spinner: Optional[SpinnerInterface] = None, - log_failed_cmd: bool = True, - stdout_only: bool = False, - ) -> str: - """ - Run a VCS subcommand - This is simply a wrapper around call_subprocess that adds the VCS - command name, and checks that the VCS is available - """ - cmd = make_command(cls.name, *cmd) - if command_desc is None: - command_desc = format_command_args(cmd) - try: - return call_subprocess( - cmd, - show_stdout, - cwd, - on_returncode=on_returncode, - extra_ok_returncodes=extra_ok_returncodes, - command_desc=command_desc, - extra_environ=extra_environ, - unset_environ=cls.unset_environ, - spinner=spinner, - log_failed_cmd=log_failed_cmd, - stdout_only=stdout_only, - ) - except FileNotFoundError: - # errno.ENOENT = no such file or directory - # In other words, the VCS executable isn't available - raise BadCommand( - f"Cannot find command {cls.name!r} - do you have " - f"{cls.name!r} installed and in your PATH?" - ) - except PermissionError: - # errno.EACCES = Permission denied - # This error occurs, for instance, when the command is installed - # only for another user. So, the current user don't have - # permission to call the other user command. - raise BadCommand( - f"No permission to execute {cls.name!r} - install it " - f"locally, globally (ask admin), or check your PATH. " - f"See possible solutions at " - f"https://pip.pypa.io/en/latest/reference/pip_freeze/" - f"#fixing-permission-denied." - ) - - @classmethod - def is_repository_directory(cls, path: str) -> bool: - """ - Return whether a directory path is a repository directory. - """ - logger.debug("Checking in %s for %s (%s)...", path, cls.dirname, cls.name) - return os.path.exists(os.path.join(path, cls.dirname)) - - @classmethod - def get_repository_root(cls, location: str) -> Optional[str]: - """ - Return the "root" (top-level) directory controlled by the vcs, - or `None` if the directory is not in any. - - It is meant to be overridden to implement smarter detection - mechanisms for specific vcs. - - This can do more than is_repository_directory() alone. For - example, the Git override checks that Git is actually available. - """ - if cls.is_repository_directory(location): - return location - return None diff --git a/venv/Lib/site-packages/pip/_internal/wheel_builder.py b/venv/Lib/site-packages/pip/_internal/wheel_builder.py deleted file mode 100644 index d066344..0000000 --- a/venv/Lib/site-packages/pip/_internal/wheel_builder.py +++ /dev/null @@ -1,377 +0,0 @@ -"""Orchestrator for building wheels from InstallRequirements. -""" - -import logging -import os.path -import re -import shutil -from typing import Any, Callable, Iterable, List, Optional, Tuple - -from pip._vendor.packaging.utils import canonicalize_name, canonicalize_version -from pip._vendor.packaging.version import InvalidVersion, Version - -from pip._internal.cache import WheelCache -from pip._internal.exceptions import InvalidWheelFilename, UnsupportedWheel -from pip._internal.metadata import FilesystemWheel, get_wheel_distribution -from pip._internal.models.link import Link -from pip._internal.models.wheel import Wheel -from pip._internal.operations.build.wheel import build_wheel_pep517 -from pip._internal.operations.build.wheel_editable import build_wheel_editable -from pip._internal.operations.build.wheel_legacy import build_wheel_legacy -from pip._internal.req.req_install import InstallRequirement -from pip._internal.utils.logging import indent_log -from pip._internal.utils.misc import ensure_dir, hash_file, is_wheel_installed -from pip._internal.utils.setuptools_build import make_setuptools_clean_args -from pip._internal.utils.subprocess import call_subprocess -from pip._internal.utils.temp_dir import TempDirectory -from pip._internal.utils.urls import path_to_url -from pip._internal.vcs import vcs - -logger = logging.getLogger(__name__) - -_egg_info_re = re.compile(r"([a-z0-9_.]+)-([a-z0-9_.!+-]+)", re.IGNORECASE) - -BinaryAllowedPredicate = Callable[[InstallRequirement], bool] -BuildResult = Tuple[List[InstallRequirement], List[InstallRequirement]] - - -def _contains_egg_info(s: str) -> bool: - """Determine whether the string looks like an egg_info. - - :param s: The string to parse. E.g. foo-2.1 - """ - return bool(_egg_info_re.search(s)) - - -def _should_build( - req: InstallRequirement, - need_wheel: bool, - check_binary_allowed: BinaryAllowedPredicate, -) -> bool: - """Return whether an InstallRequirement should be built into a wheel.""" - if req.constraint: - # never build requirements that are merely constraints - return False - if req.is_wheel: - if need_wheel: - logger.info( - "Skipping %s, due to already being wheel.", - req.name, - ) - return False - - if need_wheel: - # i.e. pip wheel, not pip install - return True - - # From this point, this concerns the pip install command only - # (need_wheel=False). - - if not req.source_dir: - return False - - if req.editable: - # we only build PEP 660 editable requirements - return req.supports_pyproject_editable() - - if req.use_pep517: - return True - - if not check_binary_allowed(req): - logger.info( - "Skipping wheel build for %s, due to binaries being disabled for it.", - req.name, - ) - return False - - if not is_wheel_installed(): - # we don't build legacy requirements if wheel is not installed - logger.info( - "Using legacy 'setup.py install' for %s, " - "since package 'wheel' is not installed.", - req.name, - ) - return False - - return True - - -def should_build_for_wheel_command( - req: InstallRequirement, -) -> bool: - return _should_build(req, need_wheel=True, check_binary_allowed=_always_true) - - -def should_build_for_install_command( - req: InstallRequirement, - check_binary_allowed: BinaryAllowedPredicate, -) -> bool: - return _should_build( - req, need_wheel=False, check_binary_allowed=check_binary_allowed - ) - - -def _should_cache( - req: InstallRequirement, -) -> Optional[bool]: - """ - Return whether a built InstallRequirement can be stored in the persistent - wheel cache, assuming the wheel cache is available, and _should_build() - has determined a wheel needs to be built. - """ - if req.editable or not req.source_dir: - # never cache editable requirements - return False - - if req.link and req.link.is_vcs: - # VCS checkout. Do not cache - # unless it points to an immutable commit hash. - assert not req.editable - assert req.source_dir - vcs_backend = vcs.get_backend_for_scheme(req.link.scheme) - assert vcs_backend - if vcs_backend.is_immutable_rev_checkout(req.link.url, req.source_dir): - return True - return False - - assert req.link - base, ext = req.link.splitext() - if _contains_egg_info(base): - return True - - # Otherwise, do not cache. - return False - - -def _get_cache_dir( - req: InstallRequirement, - wheel_cache: WheelCache, -) -> str: - """Return the persistent or temporary cache directory where the built - wheel need to be stored. - """ - cache_available = bool(wheel_cache.cache_dir) - assert req.link - if cache_available and _should_cache(req): - cache_dir = wheel_cache.get_path_for_link(req.link) - else: - cache_dir = wheel_cache.get_ephem_path_for_link(req.link) - return cache_dir - - -def _always_true(_: Any) -> bool: - return True - - -def _verify_one(req: InstallRequirement, wheel_path: str) -> None: - canonical_name = canonicalize_name(req.name or "") - w = Wheel(os.path.basename(wheel_path)) - if canonicalize_name(w.name) != canonical_name: - raise InvalidWheelFilename( - "Wheel has unexpected file name: expected {!r}, " - "got {!r}".format(canonical_name, w.name), - ) - dist = get_wheel_distribution(FilesystemWheel(wheel_path), canonical_name) - dist_verstr = str(dist.version) - if canonicalize_version(dist_verstr) != canonicalize_version(w.version): - raise InvalidWheelFilename( - "Wheel has unexpected file name: expected {!r}, " - "got {!r}".format(dist_verstr, w.version), - ) - metadata_version_value = dist.metadata_version - if metadata_version_value is None: - raise UnsupportedWheel("Missing Metadata-Version") - try: - metadata_version = Version(metadata_version_value) - except InvalidVersion: - msg = f"Invalid Metadata-Version: {metadata_version_value}" - raise UnsupportedWheel(msg) - if metadata_version >= Version("1.2") and not isinstance(dist.version, Version): - raise UnsupportedWheel( - "Metadata 1.2 mandates PEP 440 version, " - "but {!r} is not".format(dist_verstr) - ) - - -def _build_one( - req: InstallRequirement, - output_dir: str, - verify: bool, - build_options: List[str], - global_options: List[str], - editable: bool, -) -> Optional[str]: - """Build one wheel. - - :return: The filename of the built wheel, or None if the build failed. - """ - artifact = "editable" if editable else "wheel" - try: - ensure_dir(output_dir) - except OSError as e: - logger.warning( - "Building %s for %s failed: %s", - artifact, - req.name, - e, - ) - return None - - # Install build deps into temporary directory (PEP 518) - with req.build_env: - wheel_path = _build_one_inside_env( - req, output_dir, build_options, global_options, editable - ) - if wheel_path and verify: - try: - _verify_one(req, wheel_path) - except (InvalidWheelFilename, UnsupportedWheel) as e: - logger.warning("Built %s for %s is invalid: %s", artifact, req.name, e) - return None - return wheel_path - - -def _build_one_inside_env( - req: InstallRequirement, - output_dir: str, - build_options: List[str], - global_options: List[str], - editable: bool, -) -> Optional[str]: - with TempDirectory(kind="wheel") as temp_dir: - assert req.name - if req.use_pep517: - assert req.metadata_directory - assert req.pep517_backend - if global_options: - logger.warning( - "Ignoring --global-option when building %s using PEP 517", req.name - ) - if build_options: - logger.warning( - "Ignoring --build-option when building %s using PEP 517", req.name - ) - if editable: - wheel_path = build_wheel_editable( - name=req.name, - backend=req.pep517_backend, - metadata_directory=req.metadata_directory, - tempd=temp_dir.path, - ) - else: - wheel_path = build_wheel_pep517( - name=req.name, - backend=req.pep517_backend, - metadata_directory=req.metadata_directory, - tempd=temp_dir.path, - ) - else: - wheel_path = build_wheel_legacy( - name=req.name, - setup_py_path=req.setup_py_path, - source_dir=req.unpacked_source_directory, - global_options=global_options, - build_options=build_options, - tempd=temp_dir.path, - ) - - if wheel_path is not None: - wheel_name = os.path.basename(wheel_path) - dest_path = os.path.join(output_dir, wheel_name) - try: - wheel_hash, length = hash_file(wheel_path) - shutil.move(wheel_path, dest_path) - logger.info( - "Created wheel for %s: filename=%s size=%d sha256=%s", - req.name, - wheel_name, - length, - wheel_hash.hexdigest(), - ) - logger.info("Stored in directory: %s", output_dir) - return dest_path - except Exception as e: - logger.warning( - "Building wheel for %s failed: %s", - req.name, - e, - ) - # Ignore return, we can't do anything else useful. - if not req.use_pep517: - _clean_one_legacy(req, global_options) - return None - - -def _clean_one_legacy(req: InstallRequirement, global_options: List[str]) -> bool: - clean_args = make_setuptools_clean_args( - req.setup_py_path, - global_options=global_options, - ) - - logger.info("Running setup.py clean for %s", req.name) - try: - call_subprocess( - clean_args, command_desc="python setup.py clean", cwd=req.source_dir - ) - return True - except Exception: - logger.error("Failed cleaning build dir for %s", req.name) - return False - - -def build( - requirements: Iterable[InstallRequirement], - wheel_cache: WheelCache, - verify: bool, - build_options: List[str], - global_options: List[str], -) -> BuildResult: - """Build wheels. - - :return: The list of InstallRequirement that succeeded to build and - the list of InstallRequirement that failed to build. - """ - if not requirements: - return [], [] - - # Build the wheels. - logger.info( - "Building wheels for collected packages: %s", - ", ".join(req.name for req in requirements), # type: ignore - ) - - with indent_log(): - build_successes, build_failures = [], [] - for req in requirements: - assert req.name - cache_dir = _get_cache_dir(req, wheel_cache) - wheel_file = _build_one( - req, - cache_dir, - verify, - build_options, - global_options, - req.editable and req.permit_editable_wheels, - ) - if wheel_file: - # Update the link for this. - req.link = Link(path_to_url(wheel_file)) - req.local_file_path = req.link.file_path - assert req.link.is_wheel - build_successes.append(req) - else: - build_failures.append(req) - - # notify success/failure - if build_successes: - logger.info( - "Successfully built %s", - " ".join([req.name for req in build_successes]), # type: ignore - ) - if build_failures: - logger.info( - "Failed to build %s", - " ".join([req.name for req in build_failures]), # type: ignore - ) - # Return a list of requirements that failed to build - return build_successes, build_failures diff --git a/venv/Lib/site-packages/pip/_vendor/__init__.py b/venv/Lib/site-packages/pip/_vendor/__init__.py deleted file mode 100644 index 3843cb0..0000000 --- a/venv/Lib/site-packages/pip/_vendor/__init__.py +++ /dev/null @@ -1,111 +0,0 @@ -""" -pip._vendor is for vendoring dependencies of pip to prevent needing pip to -depend on something external. - -Files inside of pip._vendor should be considered immutable and should only be -updated to versions from upstream. -""" -from __future__ import absolute_import - -import glob -import os.path -import sys - -# Downstream redistributors which have debundled our dependencies should also -# patch this value to be true. This will trigger the additional patching -# to cause things like "six" to be available as pip. -DEBUNDLED = False - -# By default, look in this directory for a bunch of .whl files which we will -# add to the beginning of sys.path before attempting to import anything. This -# is done to support downstream re-distributors like Debian and Fedora who -# wish to create their own Wheels for our dependencies to aid in debundling. -WHEEL_DIR = os.path.abspath(os.path.dirname(__file__)) - - -# Define a small helper function to alias our vendored modules to the real ones -# if the vendored ones do not exist. This idea of this was taken from -# https://github.com/kennethreitz/requests/pull/2567. -def vendored(modulename): - vendored_name = "{0}.{1}".format(__name__, modulename) - - try: - __import__(modulename, globals(), locals(), level=0) - except ImportError: - # We can just silently allow import failures to pass here. If we - # got to this point it means that ``import pip._vendor.whatever`` - # failed and so did ``import whatever``. Since we're importing this - # upfront in an attempt to alias imports, not erroring here will - # just mean we get a regular import error whenever pip *actually* - # tries to import one of these modules to use it, which actually - # gives us a better error message than we would have otherwise - # gotten. - pass - else: - sys.modules[vendored_name] = sys.modules[modulename] - base, head = vendored_name.rsplit(".", 1) - setattr(sys.modules[base], head, sys.modules[modulename]) - - -# If we're operating in a debundled setup, then we want to go ahead and trigger -# the aliasing of our vendored libraries as well as looking for wheels to add -# to our sys.path. This will cause all of this code to be a no-op typically -# however downstream redistributors can enable it in a consistent way across -# all platforms. -if DEBUNDLED: - # Actually look inside of WHEEL_DIR to find .whl files and add them to the - # front of our sys.path. - sys.path[:] = glob.glob(os.path.join(WHEEL_DIR, "*.whl")) + sys.path - - # Actually alias all of our vendored dependencies. - vendored("cachecontrol") - vendored("certifi") - vendored("colorama") - vendored("distlib") - vendored("distro") - vendored("html5lib") - vendored("six") - vendored("six.moves") - vendored("six.moves.urllib") - vendored("six.moves.urllib.parse") - vendored("packaging") - vendored("packaging.version") - vendored("packaging.specifiers") - vendored("pep517") - vendored("pkg_resources") - vendored("platformdirs") - vendored("progress") - vendored("requests") - vendored("requests.exceptions") - vendored("requests.packages") - vendored("requests.packages.urllib3") - vendored("requests.packages.urllib3._collections") - vendored("requests.packages.urllib3.connection") - vendored("requests.packages.urllib3.connectionpool") - vendored("requests.packages.urllib3.contrib") - vendored("requests.packages.urllib3.contrib.ntlmpool") - vendored("requests.packages.urllib3.contrib.pyopenssl") - vendored("requests.packages.urllib3.exceptions") - vendored("requests.packages.urllib3.fields") - vendored("requests.packages.urllib3.filepost") - vendored("requests.packages.urllib3.packages") - vendored("requests.packages.urllib3.packages.ordered_dict") - vendored("requests.packages.urllib3.packages.six") - vendored("requests.packages.urllib3.packages.ssl_match_hostname") - vendored("requests.packages.urllib3.packages.ssl_match_hostname." - "_implementation") - vendored("requests.packages.urllib3.poolmanager") - vendored("requests.packages.urllib3.request") - vendored("requests.packages.urllib3.response") - vendored("requests.packages.urllib3.util") - vendored("requests.packages.urllib3.util.connection") - vendored("requests.packages.urllib3.util.request") - vendored("requests.packages.urllib3.util.response") - vendored("requests.packages.urllib3.util.retry") - vendored("requests.packages.urllib3.util.ssl_") - vendored("requests.packages.urllib3.util.timeout") - vendored("requests.packages.urllib3.util.url") - vendored("resolvelib") - vendored("tenacity") - vendored("tomli") - vendored("urllib3") diff --git a/venv/Lib/site-packages/pip/_vendor/__pycache__/__init__.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/__pycache__/__init__.cpython-39.pyc deleted file mode 100644 index 6a8337a..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/__pycache__/__init__.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/__pycache__/distro.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/__pycache__/distro.cpython-39.pyc deleted file mode 100644 index 1bb2003..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/__pycache__/distro.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/__pycache__/six.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/__pycache__/six.cpython-39.pyc deleted file mode 100644 index 282f7ba..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/__pycache__/six.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/__pycache__/typing_extensions.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/__pycache__/typing_extensions.cpython-39.pyc deleted file mode 100644 index cd8328d..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/__pycache__/typing_extensions.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/cachecontrol/__init__.py b/venv/Lib/site-packages/pip/_vendor/cachecontrol/__init__.py deleted file mode 100644 index 8435d62..0000000 --- a/venv/Lib/site-packages/pip/_vendor/cachecontrol/__init__.py +++ /dev/null @@ -1,18 +0,0 @@ -# SPDX-FileCopyrightText: 2015 Eric Larson -# -# SPDX-License-Identifier: Apache-2.0 - -"""CacheControl import Interface. - -Make it easy to import from cachecontrol without long namespaces. -""" -__author__ = "Eric Larson" -__email__ = "eric@ionrock.org" -__version__ = "0.12.10" - -from .wrapper import CacheControl -from .adapter import CacheControlAdapter -from .controller import CacheController - -import logging -logging.getLogger(__name__).addHandler(logging.NullHandler()) diff --git a/venv/Lib/site-packages/pip/_vendor/cachecontrol/__pycache__/__init__.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/cachecontrol/__pycache__/__init__.cpython-39.pyc deleted file mode 100644 index 548152d..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/cachecontrol/__pycache__/__init__.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/cachecontrol/__pycache__/_cmd.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/cachecontrol/__pycache__/_cmd.cpython-39.pyc deleted file mode 100644 index 974220b..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/cachecontrol/__pycache__/_cmd.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/cachecontrol/__pycache__/adapter.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/cachecontrol/__pycache__/adapter.cpython-39.pyc deleted file mode 100644 index 8855d47..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/cachecontrol/__pycache__/adapter.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/cachecontrol/__pycache__/cache.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/cachecontrol/__pycache__/cache.cpython-39.pyc deleted file mode 100644 index d156566..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/cachecontrol/__pycache__/cache.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/cachecontrol/__pycache__/compat.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/cachecontrol/__pycache__/compat.cpython-39.pyc deleted file mode 100644 index 348d196..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/cachecontrol/__pycache__/compat.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/cachecontrol/__pycache__/controller.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/cachecontrol/__pycache__/controller.cpython-39.pyc deleted file mode 100644 index ff3f229..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/cachecontrol/__pycache__/controller.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/cachecontrol/__pycache__/filewrapper.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/cachecontrol/__pycache__/filewrapper.cpython-39.pyc deleted file mode 100644 index 2f96cb3..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/cachecontrol/__pycache__/filewrapper.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/cachecontrol/__pycache__/heuristics.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/cachecontrol/__pycache__/heuristics.cpython-39.pyc deleted file mode 100644 index f112f7b..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/cachecontrol/__pycache__/heuristics.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/cachecontrol/__pycache__/serialize.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/cachecontrol/__pycache__/serialize.cpython-39.pyc deleted file mode 100644 index 307c89b..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/cachecontrol/__pycache__/serialize.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/cachecontrol/__pycache__/wrapper.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/cachecontrol/__pycache__/wrapper.cpython-39.pyc deleted file mode 100644 index b9b6778..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/cachecontrol/__pycache__/wrapper.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/cachecontrol/_cmd.py b/venv/Lib/site-packages/pip/_vendor/cachecontrol/_cmd.py deleted file mode 100644 index 4266b5e..0000000 --- a/venv/Lib/site-packages/pip/_vendor/cachecontrol/_cmd.py +++ /dev/null @@ -1,61 +0,0 @@ -# SPDX-FileCopyrightText: 2015 Eric Larson -# -# SPDX-License-Identifier: Apache-2.0 - -import logging - -from pip._vendor import requests - -from pip._vendor.cachecontrol.adapter import CacheControlAdapter -from pip._vendor.cachecontrol.cache import DictCache -from pip._vendor.cachecontrol.controller import logger - -from argparse import ArgumentParser - - -def setup_logging(): - logger.setLevel(logging.DEBUG) - handler = logging.StreamHandler() - logger.addHandler(handler) - - -def get_session(): - adapter = CacheControlAdapter( - DictCache(), cache_etags=True, serializer=None, heuristic=None - ) - sess = requests.Session() - sess.mount("http://", adapter) - sess.mount("https://", adapter) - - sess.cache_controller = adapter.controller - return sess - - -def get_args(): - parser = ArgumentParser() - parser.add_argument("url", help="The URL to try and cache") - return parser.parse_args() - - -def main(args=None): - args = get_args() - sess = get_session() - - # Make a request to get a response - resp = sess.get(args.url) - - # Turn on logging - setup_logging() - - # try setting the cache - sess.cache_controller.cache_response(resp.request, resp.raw) - - # Now try to get it - if sess.cache_controller.cached_request(resp.request): - print("Cached!") - else: - print("Not cached :(") - - -if __name__ == "__main__": - main() diff --git a/venv/Lib/site-packages/pip/_vendor/cachecontrol/adapter.py b/venv/Lib/site-packages/pip/_vendor/cachecontrol/adapter.py deleted file mode 100644 index 94c75e1..0000000 --- a/venv/Lib/site-packages/pip/_vendor/cachecontrol/adapter.py +++ /dev/null @@ -1,137 +0,0 @@ -# SPDX-FileCopyrightText: 2015 Eric Larson -# -# SPDX-License-Identifier: Apache-2.0 - -import types -import functools -import zlib - -from pip._vendor.requests.adapters import HTTPAdapter - -from .controller import CacheController, PERMANENT_REDIRECT_STATUSES -from .cache import DictCache -from .filewrapper import CallbackFileWrapper - - -class CacheControlAdapter(HTTPAdapter): - invalidating_methods = {"PUT", "PATCH", "DELETE"} - - def __init__( - self, - cache=None, - cache_etags=True, - controller_class=None, - serializer=None, - heuristic=None, - cacheable_methods=None, - *args, - **kw - ): - super(CacheControlAdapter, self).__init__(*args, **kw) - self.cache = DictCache() if cache is None else cache - self.heuristic = heuristic - self.cacheable_methods = cacheable_methods or ("GET",) - - controller_factory = controller_class or CacheController - self.controller = controller_factory( - self.cache, cache_etags=cache_etags, serializer=serializer - ) - - def send(self, request, cacheable_methods=None, **kw): - """ - Send a request. Use the request information to see if it - exists in the cache and cache the response if we need to and can. - """ - cacheable = cacheable_methods or self.cacheable_methods - if request.method in cacheable: - try: - cached_response = self.controller.cached_request(request) - except zlib.error: - cached_response = None - if cached_response: - return self.build_response(request, cached_response, from_cache=True) - - # check for etags and add headers if appropriate - request.headers.update(self.controller.conditional_headers(request)) - - resp = super(CacheControlAdapter, self).send(request, **kw) - - return resp - - def build_response( - self, request, response, from_cache=False, cacheable_methods=None - ): - """ - Build a response by making a request or using the cache. - - This will end up calling send and returning a potentially - cached response - """ - cacheable = cacheable_methods or self.cacheable_methods - if not from_cache and request.method in cacheable: - # Check for any heuristics that might update headers - # before trying to cache. - if self.heuristic: - response = self.heuristic.apply(response) - - # apply any expiration heuristics - if response.status == 304: - # We must have sent an ETag request. This could mean - # that we've been expired already or that we simply - # have an etag. In either case, we want to try and - # update the cache if that is the case. - cached_response = self.controller.update_cached_response( - request, response - ) - - if cached_response is not response: - from_cache = True - - # We are done with the server response, read a - # possible response body (compliant servers will - # not return one, but we cannot be 100% sure) and - # release the connection back to the pool. - response.read(decode_content=False) - response.release_conn() - - response = cached_response - - # We always cache the 301 responses - elif int(response.status) in PERMANENT_REDIRECT_STATUSES: - self.controller.cache_response(request, response) - else: - # Wrap the response file with a wrapper that will cache the - # response when the stream has been consumed. - response._fp = CallbackFileWrapper( - response._fp, - functools.partial( - self.controller.cache_response, request, response - ), - ) - if response.chunked: - super_update_chunk_length = response._update_chunk_length - - def _update_chunk_length(self): - super_update_chunk_length() - if self.chunk_left == 0: - self._fp._close() - - response._update_chunk_length = types.MethodType( - _update_chunk_length, response - ) - - resp = super(CacheControlAdapter, self).build_response(request, response) - - # See if we should invalidate the cache. - if request.method in self.invalidating_methods and resp.ok: - cache_url = self.controller.cache_url(request.url) - self.cache.delete(cache_url) - - # Give the request a from_cache attr to let people use it - resp.from_cache = from_cache - - return resp - - def close(self): - self.cache.close() - super(CacheControlAdapter, self).close() diff --git a/venv/Lib/site-packages/pip/_vendor/cachecontrol/cache.py b/venv/Lib/site-packages/pip/_vendor/cachecontrol/cache.py deleted file mode 100644 index 44e4309..0000000 --- a/venv/Lib/site-packages/pip/_vendor/cachecontrol/cache.py +++ /dev/null @@ -1,43 +0,0 @@ -# SPDX-FileCopyrightText: 2015 Eric Larson -# -# SPDX-License-Identifier: Apache-2.0 - -""" -The cache object API for implementing caches. The default is a thread -safe in-memory dictionary. -""" -from threading import Lock - - -class BaseCache(object): - - def get(self, key): - raise NotImplementedError() - - def set(self, key, value, expires=None): - raise NotImplementedError() - - def delete(self, key): - raise NotImplementedError() - - def close(self): - pass - - -class DictCache(BaseCache): - - def __init__(self, init_dict=None): - self.lock = Lock() - self.data = init_dict or {} - - def get(self, key): - return self.data.get(key, None) - - def set(self, key, value, expires=None): - with self.lock: - self.data.update({key: value}) - - def delete(self, key): - with self.lock: - if key in self.data: - self.data.pop(key) diff --git a/venv/Lib/site-packages/pip/_vendor/cachecontrol/caches/__init__.py b/venv/Lib/site-packages/pip/_vendor/cachecontrol/caches/__init__.py deleted file mode 100644 index 44becd6..0000000 --- a/venv/Lib/site-packages/pip/_vendor/cachecontrol/caches/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -# SPDX-FileCopyrightText: 2015 Eric Larson -# -# SPDX-License-Identifier: Apache-2.0 - -from .file_cache import FileCache # noqa -from .redis_cache import RedisCache # noqa diff --git a/venv/Lib/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/__init__.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/__init__.cpython-39.pyc deleted file mode 100644 index ea12121..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/__init__.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/file_cache.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/file_cache.cpython-39.pyc deleted file mode 100644 index e11bf21..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/file_cache.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/redis_cache.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/redis_cache.cpython-39.pyc deleted file mode 100644 index 77d0641..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/redis_cache.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/cachecontrol/caches/file_cache.py b/venv/Lib/site-packages/pip/_vendor/cachecontrol/caches/file_cache.py deleted file mode 100644 index 6cd1106..0000000 --- a/venv/Lib/site-packages/pip/_vendor/cachecontrol/caches/file_cache.py +++ /dev/null @@ -1,150 +0,0 @@ -# SPDX-FileCopyrightText: 2015 Eric Larson -# -# SPDX-License-Identifier: Apache-2.0 - -import hashlib -import os -from textwrap import dedent - -from ..cache import BaseCache -from ..controller import CacheController - -try: - FileNotFoundError -except NameError: - # py2.X - FileNotFoundError = (IOError, OSError) - - -def _secure_open_write(filename, fmode): - # We only want to write to this file, so open it in write only mode - flags = os.O_WRONLY - - # os.O_CREAT | os.O_EXCL will fail if the file already exists, so we only - # will open *new* files. - # We specify this because we want to ensure that the mode we pass is the - # mode of the file. - flags |= os.O_CREAT | os.O_EXCL - - # Do not follow symlinks to prevent someone from making a symlink that - # we follow and insecurely open a cache file. - if hasattr(os, "O_NOFOLLOW"): - flags |= os.O_NOFOLLOW - - # On Windows we'll mark this file as binary - if hasattr(os, "O_BINARY"): - flags |= os.O_BINARY - - # Before we open our file, we want to delete any existing file that is - # there - try: - os.remove(filename) - except (IOError, OSError): - # The file must not exist already, so we can just skip ahead to opening - pass - - # Open our file, the use of os.O_CREAT | os.O_EXCL will ensure that if a - # race condition happens between the os.remove and this line, that an - # error will be raised. Because we utilize a lockfile this should only - # happen if someone is attempting to attack us. - fd = os.open(filename, flags, fmode) - try: - return os.fdopen(fd, "wb") - - except: - # An error occurred wrapping our FD in a file object - os.close(fd) - raise - - -class FileCache(BaseCache): - - def __init__( - self, - directory, - forever=False, - filemode=0o0600, - dirmode=0o0700, - use_dir_lock=None, - lock_class=None, - ): - - if use_dir_lock is not None and lock_class is not None: - raise ValueError("Cannot use use_dir_lock and lock_class together") - - try: - from lockfile import LockFile - from lockfile.mkdirlockfile import MkdirLockFile - except ImportError: - notice = dedent( - """ - NOTE: In order to use the FileCache you must have - lockfile installed. You can install it via pip: - pip install lockfile - """ - ) - raise ImportError(notice) - - else: - if use_dir_lock: - lock_class = MkdirLockFile - - elif lock_class is None: - lock_class = LockFile - - self.directory = directory - self.forever = forever - self.filemode = filemode - self.dirmode = dirmode - self.lock_class = lock_class - - @staticmethod - def encode(x): - return hashlib.sha224(x.encode()).hexdigest() - - def _fn(self, name): - # NOTE: This method should not change as some may depend on it. - # See: https://github.com/ionrock/cachecontrol/issues/63 - hashed = self.encode(name) - parts = list(hashed[:5]) + [hashed] - return os.path.join(self.directory, *parts) - - def get(self, key): - name = self._fn(key) - try: - with open(name, "rb") as fh: - return fh.read() - - except FileNotFoundError: - return None - - def set(self, key, value, expires=None): - name = self._fn(key) - - # Make sure the directory exists - try: - os.makedirs(os.path.dirname(name), self.dirmode) - except (IOError, OSError): - pass - - with self.lock_class(name) as lock: - # Write our actual file - with _secure_open_write(lock.path, self.filemode) as fh: - fh.write(value) - - def delete(self, key): - name = self._fn(key) - if not self.forever: - try: - os.remove(name) - except FileNotFoundError: - pass - - -def url_to_file_path(url, filecache): - """Return the file cache path based on the URL. - - This does not ensure the file exists! - """ - key = CacheController.cache_url(url) - return filecache._fn(key) diff --git a/venv/Lib/site-packages/pip/_vendor/cachecontrol/caches/redis_cache.py b/venv/Lib/site-packages/pip/_vendor/cachecontrol/caches/redis_cache.py deleted file mode 100644 index 720b507..0000000 --- a/venv/Lib/site-packages/pip/_vendor/cachecontrol/caches/redis_cache.py +++ /dev/null @@ -1,37 +0,0 @@ -# SPDX-FileCopyrightText: 2015 Eric Larson -# -# SPDX-License-Identifier: Apache-2.0 - -from __future__ import division - -from datetime import datetime -from pip._vendor.cachecontrol.cache import BaseCache - - -class RedisCache(BaseCache): - - def __init__(self, conn): - self.conn = conn - - def get(self, key): - return self.conn.get(key) - - def set(self, key, value, expires=None): - if not expires: - self.conn.set(key, value) - else: - expires = expires - datetime.utcnow() - self.conn.setex(key, int(expires.total_seconds()), value) - - def delete(self, key): - self.conn.delete(key) - - def clear(self): - """Helper for clearing all the keys in a database. Use with - caution!""" - for key in self.conn.keys(): - self.conn.delete(key) - - def close(self): - """Redis uses connection pooling, no need to close the connection.""" - pass diff --git a/venv/Lib/site-packages/pip/_vendor/cachecontrol/compat.py b/venv/Lib/site-packages/pip/_vendor/cachecontrol/compat.py deleted file mode 100644 index ccec937..0000000 --- a/venv/Lib/site-packages/pip/_vendor/cachecontrol/compat.py +++ /dev/null @@ -1,32 +0,0 @@ -# SPDX-FileCopyrightText: 2015 Eric Larson -# -# SPDX-License-Identifier: Apache-2.0 - -try: - from urllib.parse import urljoin -except ImportError: - from urlparse import urljoin - - -try: - import cPickle as pickle -except ImportError: - import pickle - -# Handle the case where the requests module has been patched to not have -# urllib3 bundled as part of its source. -try: - from pip._vendor.requests.packages.urllib3.response import HTTPResponse -except ImportError: - from pip._vendor.urllib3.response import HTTPResponse - -try: - from pip._vendor.requests.packages.urllib3.util import is_fp_closed -except ImportError: - from pip._vendor.urllib3.util import is_fp_closed - -# Replicate some six behaviour -try: - text_type = unicode -except NameError: - text_type = str diff --git a/venv/Lib/site-packages/pip/_vendor/cachecontrol/controller.py b/venv/Lib/site-packages/pip/_vendor/cachecontrol/controller.py deleted file mode 100644 index d7e7380..0000000 --- a/venv/Lib/site-packages/pip/_vendor/cachecontrol/controller.py +++ /dev/null @@ -1,415 +0,0 @@ -# SPDX-FileCopyrightText: 2015 Eric Larson -# -# SPDX-License-Identifier: Apache-2.0 - -""" -The httplib2 algorithms ported for use with requests. -""" -import logging -import re -import calendar -import time -from email.utils import parsedate_tz - -from pip._vendor.requests.structures import CaseInsensitiveDict - -from .cache import DictCache -from .serialize import Serializer - - -logger = logging.getLogger(__name__) - -URI = re.compile(r"^(([^:/?#]+):)?(//([^/?#]*))?([^?#]*)(\?([^#]*))?(#(.*))?") - -PERMANENT_REDIRECT_STATUSES = (301, 308) - - -def parse_uri(uri): - """Parses a URI using the regex given in Appendix B of RFC 3986. - - (scheme, authority, path, query, fragment) = parse_uri(uri) - """ - groups = URI.match(uri).groups() - return (groups[1], groups[3], groups[4], groups[6], groups[8]) - - -class CacheController(object): - """An interface to see if request should cached or not. - """ - - def __init__( - self, cache=None, cache_etags=True, serializer=None, status_codes=None - ): - self.cache = DictCache() if cache is None else cache - self.cache_etags = cache_etags - self.serializer = serializer or Serializer() - self.cacheable_status_codes = status_codes or (200, 203, 300, 301, 308) - - @classmethod - def _urlnorm(cls, uri): - """Normalize the URL to create a safe key for the cache""" - (scheme, authority, path, query, fragment) = parse_uri(uri) - if not scheme or not authority: - raise Exception("Only absolute URIs are allowed. uri = %s" % uri) - - scheme = scheme.lower() - authority = authority.lower() - - if not path: - path = "/" - - # Could do syntax based normalization of the URI before - # computing the digest. See Section 6.2.2 of Std 66. - request_uri = query and "?".join([path, query]) or path - defrag_uri = scheme + "://" + authority + request_uri - - return defrag_uri - - @classmethod - def cache_url(cls, uri): - return cls._urlnorm(uri) - - def parse_cache_control(self, headers): - known_directives = { - # https://tools.ietf.org/html/rfc7234#section-5.2 - "max-age": (int, True), - "max-stale": (int, False), - "min-fresh": (int, True), - "no-cache": (None, False), - "no-store": (None, False), - "no-transform": (None, False), - "only-if-cached": (None, False), - "must-revalidate": (None, False), - "public": (None, False), - "private": (None, False), - "proxy-revalidate": (None, False), - "s-maxage": (int, True), - } - - cc_headers = headers.get("cache-control", headers.get("Cache-Control", "")) - - retval = {} - - for cc_directive in cc_headers.split(","): - if not cc_directive.strip(): - continue - - parts = cc_directive.split("=", 1) - directive = parts[0].strip() - - try: - typ, required = known_directives[directive] - except KeyError: - logger.debug("Ignoring unknown cache-control directive: %s", directive) - continue - - if not typ or not required: - retval[directive] = None - if typ: - try: - retval[directive] = typ(parts[1].strip()) - except IndexError: - if required: - logger.debug( - "Missing value for cache-control " "directive: %s", - directive, - ) - except ValueError: - logger.debug( - "Invalid value for cache-control directive " "%s, must be %s", - directive, - typ.__name__, - ) - - return retval - - def cached_request(self, request): - """ - Return a cached response if it exists in the cache, otherwise - return False. - """ - cache_url = self.cache_url(request.url) - logger.debug('Looking up "%s" in the cache', cache_url) - cc = self.parse_cache_control(request.headers) - - # Bail out if the request insists on fresh data - if "no-cache" in cc: - logger.debug('Request header has "no-cache", cache bypassed') - return False - - if "max-age" in cc and cc["max-age"] == 0: - logger.debug('Request header has "max_age" as 0, cache bypassed') - return False - - # Request allows serving from the cache, let's see if we find something - cache_data = self.cache.get(cache_url) - if cache_data is None: - logger.debug("No cache entry available") - return False - - # Check whether it can be deserialized - resp = self.serializer.loads(request, cache_data) - if not resp: - logger.warning("Cache entry deserialization failed, entry ignored") - return False - - # If we have a cached permanent redirect, return it immediately. We - # don't need to test our response for other headers b/c it is - # intrinsically "cacheable" as it is Permanent. - # - # See: - # https://tools.ietf.org/html/rfc7231#section-6.4.2 - # - # Client can try to refresh the value by repeating the request - # with cache busting headers as usual (ie no-cache). - if int(resp.status) in PERMANENT_REDIRECT_STATUSES: - msg = ( - "Returning cached permanent redirect response " - "(ignoring date and etag information)" - ) - logger.debug(msg) - return resp - - headers = CaseInsensitiveDict(resp.headers) - if not headers or "date" not in headers: - if "etag" not in headers: - # Without date or etag, the cached response can never be used - # and should be deleted. - logger.debug("Purging cached response: no date or etag") - self.cache.delete(cache_url) - logger.debug("Ignoring cached response: no date") - return False - - now = time.time() - date = calendar.timegm(parsedate_tz(headers["date"])) - current_age = max(0, now - date) - logger.debug("Current age based on date: %i", current_age) - - # TODO: There is an assumption that the result will be a - # urllib3 response object. This may not be best since we - # could probably avoid instantiating or constructing the - # response until we know we need it. - resp_cc = self.parse_cache_control(headers) - - # determine freshness - freshness_lifetime = 0 - - # Check the max-age pragma in the cache control header - if "max-age" in resp_cc: - freshness_lifetime = resp_cc["max-age"] - logger.debug("Freshness lifetime from max-age: %i", freshness_lifetime) - - # If there isn't a max-age, check for an expires header - elif "expires" in headers: - expires = parsedate_tz(headers["expires"]) - if expires is not None: - expire_time = calendar.timegm(expires) - date - freshness_lifetime = max(0, expire_time) - logger.debug("Freshness lifetime from expires: %i", freshness_lifetime) - - # Determine if we are setting freshness limit in the - # request. Note, this overrides what was in the response. - if "max-age" in cc: - freshness_lifetime = cc["max-age"] - logger.debug( - "Freshness lifetime from request max-age: %i", freshness_lifetime - ) - - if "min-fresh" in cc: - min_fresh = cc["min-fresh"] - # adjust our current age by our min fresh - current_age += min_fresh - logger.debug("Adjusted current age from min-fresh: %i", current_age) - - # Return entry if it is fresh enough - if freshness_lifetime > current_age: - logger.debug('The response is "fresh", returning cached response') - logger.debug("%i > %i", freshness_lifetime, current_age) - return resp - - # we're not fresh. If we don't have an Etag, clear it out - if "etag" not in headers: - logger.debug('The cached response is "stale" with no etag, purging') - self.cache.delete(cache_url) - - # return the original handler - return False - - def conditional_headers(self, request): - cache_url = self.cache_url(request.url) - resp = self.serializer.loads(request, self.cache.get(cache_url)) - new_headers = {} - - if resp: - headers = CaseInsensitiveDict(resp.headers) - - if "etag" in headers: - new_headers["If-None-Match"] = headers["ETag"] - - if "last-modified" in headers: - new_headers["If-Modified-Since"] = headers["Last-Modified"] - - return new_headers - - def cache_response(self, request, response, body=None, status_codes=None): - """ - Algorithm for caching requests. - - This assumes a requests Response object. - """ - # From httplib2: Don't cache 206's since we aren't going to - # handle byte range requests - cacheable_status_codes = status_codes or self.cacheable_status_codes - if response.status not in cacheable_status_codes: - logger.debug( - "Status code %s not in %s", response.status, cacheable_status_codes - ) - return - - response_headers = CaseInsensitiveDict(response.headers) - - if "date" in response_headers: - date = calendar.timegm(parsedate_tz(response_headers["date"])) - else: - date = 0 - - # If we've been given a body, our response has a Content-Length, that - # Content-Length is valid then we can check to see if the body we've - # been given matches the expected size, and if it doesn't we'll just - # skip trying to cache it. - if ( - body is not None - and "content-length" in response_headers - and response_headers["content-length"].isdigit() - and int(response_headers["content-length"]) != len(body) - ): - return - - cc_req = self.parse_cache_control(request.headers) - cc = self.parse_cache_control(response_headers) - - cache_url = self.cache_url(request.url) - logger.debug('Updating cache with response from "%s"', cache_url) - - # Delete it from the cache if we happen to have it stored there - no_store = False - if "no-store" in cc: - no_store = True - logger.debug('Response header has "no-store"') - if "no-store" in cc_req: - no_store = True - logger.debug('Request header has "no-store"') - if no_store and self.cache.get(cache_url): - logger.debug('Purging existing cache entry to honor "no-store"') - self.cache.delete(cache_url) - if no_store: - return - - # https://tools.ietf.org/html/rfc7234#section-4.1: - # A Vary header field-value of "*" always fails to match. - # Storing such a response leads to a deserialization warning - # during cache lookup and is not allowed to ever be served, - # so storing it can be avoided. - if "*" in response_headers.get("vary", ""): - logger.debug('Response header has "Vary: *"') - return - - # If we've been given an etag, then keep the response - if self.cache_etags and "etag" in response_headers: - expires_time = 0 - if response_headers.get("expires"): - expires = parsedate_tz(response_headers["expires"]) - if expires is not None: - expires_time = calendar.timegm(expires) - date - - expires_time = max(expires_time, 14 * 86400) - - logger.debug("etag object cached for {0} seconds".format(expires_time)) - logger.debug("Caching due to etag") - self.cache.set( - cache_url, - self.serializer.dumps(request, response, body), - expires=expires_time, - ) - - # Add to the cache any permanent redirects. We do this before looking - # that the Date headers. - elif int(response.status) in PERMANENT_REDIRECT_STATUSES: - logger.debug("Caching permanent redirect") - self.cache.set(cache_url, self.serializer.dumps(request, response, b"")) - - # Add to the cache if the response headers demand it. If there - # is no date header then we can't do anything about expiring - # the cache. - elif "date" in response_headers: - date = calendar.timegm(parsedate_tz(response_headers["date"])) - # cache when there is a max-age > 0 - if "max-age" in cc and cc["max-age"] > 0: - logger.debug("Caching b/c date exists and max-age > 0") - expires_time = cc["max-age"] - self.cache.set( - cache_url, - self.serializer.dumps(request, response, body), - expires=expires_time, - ) - - # If the request can expire, it means we should cache it - # in the meantime. - elif "expires" in response_headers: - if response_headers["expires"]: - expires = parsedate_tz(response_headers["expires"]) - if expires is not None: - expires_time = calendar.timegm(expires) - date - else: - expires_time = None - - logger.debug( - "Caching b/c of expires header. expires in {0} seconds".format( - expires_time - ) - ) - self.cache.set( - cache_url, - self.serializer.dumps(request, response, body=body), - expires=expires_time, - ) - - def update_cached_response(self, request, response): - """On a 304 we will get a new set of headers that we want to - update our cached value with, assuming we have one. - - This should only ever be called when we've sent an ETag and - gotten a 304 as the response. - """ - cache_url = self.cache_url(request.url) - - cached_response = self.serializer.loads(request, self.cache.get(cache_url)) - - if not cached_response: - # we didn't have a cached response - return response - - # Lets update our headers with the headers from the new request: - # http://tools.ietf.org/html/draft-ietf-httpbis-p4-conditional-26#section-4.1 - # - # The server isn't supposed to send headers that would make - # the cached body invalid. But... just in case, we'll be sure - # to strip out ones we know that might be problmatic due to - # typical assumptions. - excluded_headers = ["content-length"] - - cached_response.headers.update( - dict( - (k, v) - for k, v in response.headers.items() - if k.lower() not in excluded_headers - ) - ) - - # we want a 200 b/c we have content via the cache - cached_response.status = 200 - - # update our cache - self.cache.set(cache_url, self.serializer.dumps(request, cached_response)) - - return cached_response diff --git a/venv/Lib/site-packages/pip/_vendor/cachecontrol/filewrapper.py b/venv/Lib/site-packages/pip/_vendor/cachecontrol/filewrapper.py deleted file mode 100644 index f5ed5f6..0000000 --- a/venv/Lib/site-packages/pip/_vendor/cachecontrol/filewrapper.py +++ /dev/null @@ -1,111 +0,0 @@ -# SPDX-FileCopyrightText: 2015 Eric Larson -# -# SPDX-License-Identifier: Apache-2.0 - -from tempfile import NamedTemporaryFile -import mmap - - -class CallbackFileWrapper(object): - """ - Small wrapper around a fp object which will tee everything read into a - buffer, and when that file is closed it will execute a callback with the - contents of that buffer. - - All attributes are proxied to the underlying file object. - - This class uses members with a double underscore (__) leading prefix so as - not to accidentally shadow an attribute. - - The data is stored in a temporary file until it is all available. As long - as the temporary files directory is disk-based (sometimes it's a - memory-backed-``tmpfs`` on Linux), data will be unloaded to disk if memory - pressure is high. For small files the disk usually won't be used at all, - it'll all be in the filesystem memory cache, so there should be no - performance impact. - """ - - def __init__(self, fp, callback): - self.__buf = NamedTemporaryFile("rb+", delete=True) - self.__fp = fp - self.__callback = callback - - def __getattr__(self, name): - # The vaguaries of garbage collection means that self.__fp is - # not always set. By using __getattribute__ and the private - # name[0] allows looking up the attribute value and raising an - # AttributeError when it doesn't exist. This stop thigns from - # infinitely recursing calls to getattr in the case where - # self.__fp hasn't been set. - # - # [0] https://docs.python.org/2/reference/expressions.html#atom-identifiers - fp = self.__getattribute__("_CallbackFileWrapper__fp") - return getattr(fp, name) - - def __is_fp_closed(self): - try: - return self.__fp.fp is None - - except AttributeError: - pass - - try: - return self.__fp.closed - - except AttributeError: - pass - - # We just don't cache it then. - # TODO: Add some logging here... - return False - - def _close(self): - if self.__callback: - if self.__buf.tell() == 0: - # Empty file: - result = b"" - else: - # Return the data without actually loading it into memory, - # relying on Python's buffer API and mmap(). mmap() just gives - # a view directly into the filesystem's memory cache, so it - # doesn't result in duplicate memory use. - self.__buf.seek(0, 0) - result = memoryview( - mmap.mmap(self.__buf.fileno(), 0, access=mmap.ACCESS_READ) - ) - self.__callback(result) - - # We assign this to None here, because otherwise we can get into - # really tricky problems where the CPython interpreter dead locks - # because the callback is holding a reference to something which - # has a __del__ method. Setting this to None breaks the cycle - # and allows the garbage collector to do it's thing normally. - self.__callback = None - - # Closing the temporary file releases memory and frees disk space. - # Important when caching big files. - self.__buf.close() - - def read(self, amt=None): - data = self.__fp.read(amt) - if data: - # We may be dealing with b'', a sign that things are over: - # it's passed e.g. after we've already closed self.__buf. - self.__buf.write(data) - if self.__is_fp_closed(): - self._close() - - return data - - def _safe_read(self, amt): - data = self.__fp._safe_read(amt) - if amt == 2 and data == b"\r\n": - # urllib executes this read to toss the CRLF at the end - # of the chunk. - return data - - self.__buf.write(data) - if self.__is_fp_closed(): - self._close() - - return data diff --git a/venv/Lib/site-packages/pip/_vendor/cachecontrol/heuristics.py b/venv/Lib/site-packages/pip/_vendor/cachecontrol/heuristics.py deleted file mode 100644 index ebe4a96..0000000 --- a/venv/Lib/site-packages/pip/_vendor/cachecontrol/heuristics.py +++ /dev/null @@ -1,139 +0,0 @@ -# SPDX-FileCopyrightText: 2015 Eric Larson -# -# SPDX-License-Identifier: Apache-2.0 - -import calendar -import time - -from email.utils import formatdate, parsedate, parsedate_tz - -from datetime import datetime, timedelta - -TIME_FMT = "%a, %d %b %Y %H:%M:%S GMT" - - -def expire_after(delta, date=None): - date = date or datetime.utcnow() - return date + delta - - -def datetime_to_header(dt): - return formatdate(calendar.timegm(dt.timetuple())) - - -class BaseHeuristic(object): - - def warning(self, response): - """ - Return a valid 1xx warning header value describing the cache - adjustments. - - The response is provided too allow warnings like 113 - http://tools.ietf.org/html/rfc7234#section-5.5.4 where we need - to explicitly say response is over 24 hours old. - """ - return '110 - "Response is Stale"' - - def update_headers(self, response): - """Update the response headers with any new headers. - - NOTE: This SHOULD always include some Warning header to - signify that the response was cached by the client, not - by way of the provided headers. - """ - return {} - - def apply(self, response): - updated_headers = self.update_headers(response) - - if updated_headers: - response.headers.update(updated_headers) - warning_header_value = self.warning(response) - if warning_header_value is not None: - response.headers.update({"Warning": warning_header_value}) - - return response - - -class OneDayCache(BaseHeuristic): - """ - Cache the response by providing an expires 1 day in the - future. - """ - - def update_headers(self, response): - headers = {} - - if "expires" not in response.headers: - date = parsedate(response.headers["date"]) - expires = expire_after(timedelta(days=1), date=datetime(*date[:6])) - headers["expires"] = datetime_to_header(expires) - headers["cache-control"] = "public" - return headers - - -class ExpiresAfter(BaseHeuristic): - """ - Cache **all** requests for a defined time period. - """ - - def __init__(self, **kw): - self.delta = timedelta(**kw) - - def update_headers(self, response): - expires = expire_after(self.delta) - return {"expires": datetime_to_header(expires), "cache-control": "public"} - - def warning(self, response): - tmpl = "110 - Automatically cached for %s. Response might be stale" - return tmpl % self.delta - - -class LastModified(BaseHeuristic): - """ - If there is no Expires header already, fall back on Last-Modified - using the heuristic from - http://tools.ietf.org/html/rfc7234#section-4.2.2 - to calculate a reasonable value. - - Firefox also does something like this per - https://developer.mozilla.org/en-US/docs/Web/HTTP/Caching_FAQ - http://lxr.mozilla.org/mozilla-release/source/netwerk/protocol/http/nsHttpResponseHead.cpp#397 - Unlike mozilla we limit this to 24-hr. - """ - cacheable_by_default_statuses = { - 200, 203, 204, 206, 300, 301, 404, 405, 410, 414, 501 - } - - def update_headers(self, resp): - headers = resp.headers - - if "expires" in headers: - return {} - - if "cache-control" in headers and headers["cache-control"] != "public": - return {} - - if resp.status not in self.cacheable_by_default_statuses: - return {} - - if "date" not in headers or "last-modified" not in headers: - return {} - - date = calendar.timegm(parsedate_tz(headers["date"])) - last_modified = parsedate(headers["last-modified"]) - if date is None or last_modified is None: - return {} - - now = time.time() - current_age = max(0, now - date) - delta = date - calendar.timegm(last_modified) - freshness_lifetime = max(0, min(delta / 10, 24 * 3600)) - if freshness_lifetime <= current_age: - return {} - - expires = date + freshness_lifetime - return {"expires": time.strftime(TIME_FMT, time.gmtime(expires))} - - def warning(self, resp): - return None diff --git a/venv/Lib/site-packages/pip/_vendor/cachecontrol/serialize.py b/venv/Lib/site-packages/pip/_vendor/cachecontrol/serialize.py deleted file mode 100644 index b075df1..0000000 --- a/venv/Lib/site-packages/pip/_vendor/cachecontrol/serialize.py +++ /dev/null @@ -1,186 +0,0 @@ -# SPDX-FileCopyrightText: 2015 Eric Larson -# -# SPDX-License-Identifier: Apache-2.0 - -import base64 -import io -import json -import zlib - -from pip._vendor import msgpack -from pip._vendor.requests.structures import CaseInsensitiveDict - -from .compat import HTTPResponse, pickle, text_type - - -def _b64_decode_bytes(b): - return base64.b64decode(b.encode("ascii")) - - -def _b64_decode_str(s): - return _b64_decode_bytes(s).decode("utf8") - - -_default_body_read = object() - - -class Serializer(object): - def dumps(self, request, response, body=None): - response_headers = CaseInsensitiveDict(response.headers) - - if body is None: - # When a body isn't passed in, we'll read the response. We - # also update the response with a new file handler to be - # sure it acts as though it was never read. - body = response.read(decode_content=False) - response._fp = io.BytesIO(body) - - # NOTE: This is all a bit weird, but it's really important that on - # Python 2.x these objects are unicode and not str, even when - # they contain only ascii. The problem here is that msgpack - # understands the difference between unicode and bytes and we - # have it set to differentiate between them, however Python 2 - # doesn't know the difference. Forcing these to unicode will be - # enough to have msgpack know the difference. - data = { - u"response": { - u"body": body, - u"headers": dict( - (text_type(k), text_type(v)) for k, v in response.headers.items() - ), - u"status": response.status, - u"version": response.version, - u"reason": text_type(response.reason), - u"strict": response.strict, - u"decode_content": response.decode_content, - } - } - - # Construct our vary headers - data[u"vary"] = {} - if u"vary" in response_headers: - varied_headers = response_headers[u"vary"].split(",") - for header in varied_headers: - header = text_type(header).strip() - header_value = request.headers.get(header, None) - if header_value is not None: - header_value = text_type(header_value) - data[u"vary"][header] = header_value - - return b",".join([b"cc=4", msgpack.dumps(data, use_bin_type=True)]) - - def loads(self, request, data): - # Short circuit if we've been given an empty set of data - if not data: - return - - # Determine what version of the serializer the data was serialized - # with - try: - ver, data = data.split(b",", 1) - except ValueError: - ver = b"cc=0" - - # Make sure that our "ver" is actually a version and isn't a false - # positive from a , being in the data stream. - if ver[:3] != b"cc=": - data = ver + data - ver = b"cc=0" - - # Get the version number out of the cc=N - ver = ver.split(b"=", 1)[-1].decode("ascii") - - # Dispatch to the actual load method for the given version - try: - return getattr(self, "_loads_v{}".format(ver))(request, data) - - except AttributeError: - # This is a version we don't have a loads function for, so we'll - # just treat it as a miss and return None - return - - def prepare_response(self, request, cached): - """Verify our vary headers match and construct a real urllib3 - HTTPResponse object. - """ - # Special case the '*' Vary value as it means we cannot actually - # determine if the cached response is suitable for this request. - # This case is also handled in the controller code when creating - # a cache entry, but is left here for backwards compatibility. - if "*" in cached.get("vary", {}): - return - - # Ensure that the Vary headers for the cached response match our - # request - for header, value in cached.get("vary", {}).items(): - if request.headers.get(header, None) != value: - return - - body_raw = cached["response"].pop("body") - - headers = CaseInsensitiveDict(data=cached["response"]["headers"]) - if headers.get("transfer-encoding", "") == "chunked": - headers.pop("transfer-encoding") - - cached["response"]["headers"] = headers - - try: - body = io.BytesIO(body_raw) - except TypeError: - # This can happen if cachecontrol serialized to v1 format (pickle) - # using Python 2. A Python 2 str(byte string) will be unpickled as - # a Python 3 str (unicode string), which will cause the above to - # fail with: - # - # TypeError: 'str' does not support the buffer interface - body = io.BytesIO(body_raw.encode("utf8")) - - return HTTPResponse(body=body, preload_content=False, **cached["response"]) - - def _loads_v0(self, request, data): - # The original legacy cache data. This doesn't contain enough - # information to construct everything we need, so we'll treat this as - # a miss. - return - - def _loads_v1(self, request, data): - try: - cached = pickle.loads(data) - except ValueError: - return - - return self.prepare_response(request, cached) - - def _loads_v2(self, request, data): - try: - cached = json.loads(zlib.decompress(data).decode("utf8")) - except (ValueError, zlib.error): - return - - # We need to decode the items that we've base64 encoded - cached["response"]["body"] = _b64_decode_bytes(cached["response"]["body"]) - cached["response"]["headers"] = dict( - (_b64_decode_str(k), _b64_decode_str(v)) - for k, v in cached["response"]["headers"].items() - ) - cached["response"]["reason"] = _b64_decode_str(cached["response"]["reason"]) - cached["vary"] = dict( - (_b64_decode_str(k), _b64_decode_str(v) if v is not None else v) - for k, v in cached["vary"].items() - ) - - return self.prepare_response(request, cached) - - def _loads_v3(self, request, data): - # Due to Python 2 encoding issues, it's impossible to know for sure - # exactly how to load v3 entries, thus we'll treat these as a miss so - # that they get rewritten out as v4 entries. - return - - def _loads_v4(self, request, data): - try: - cached = msgpack.loads(data, raw=False) - except ValueError: - return - - return self.prepare_response(request, cached) diff --git a/venv/Lib/site-packages/pip/_vendor/cachecontrol/wrapper.py b/venv/Lib/site-packages/pip/_vendor/cachecontrol/wrapper.py deleted file mode 100644 index b6ee7f2..0000000 --- a/venv/Lib/site-packages/pip/_vendor/cachecontrol/wrapper.py +++ /dev/null @@ -1,33 +0,0 @@ -# SPDX-FileCopyrightText: 2015 Eric Larson -# -# SPDX-License-Identifier: Apache-2.0 - -from .adapter import CacheControlAdapter -from .cache import DictCache - - -def CacheControl( - sess, - cache=None, - cache_etags=True, - serializer=None, - heuristic=None, - controller_class=None, - adapter_class=None, - cacheable_methods=None, -): - - cache = DictCache() if cache is None else cache - adapter_class = adapter_class or CacheControlAdapter - adapter = adapter_class( - cache, - cache_etags=cache_etags, - serializer=serializer, - heuristic=heuristic, - controller_class=controller_class, - cacheable_methods=cacheable_methods, - ) - sess.mount("http://", adapter) - sess.mount("https://", adapter) - - return sess diff --git a/venv/Lib/site-packages/pip/_vendor/certifi/__init__.py b/venv/Lib/site-packages/pip/_vendor/certifi/__init__.py deleted file mode 100644 index 8db1a0e..0000000 --- a/venv/Lib/site-packages/pip/_vendor/certifi/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -from .core import contents, where - -__version__ = "2021.10.08" diff --git a/venv/Lib/site-packages/pip/_vendor/certifi/__main__.py b/venv/Lib/site-packages/pip/_vendor/certifi/__main__.py deleted file mode 100644 index 0037634..0000000 --- a/venv/Lib/site-packages/pip/_vendor/certifi/__main__.py +++ /dev/null @@ -1,12 +0,0 @@ -import argparse - -from pip._vendor.certifi import contents, where - -parser = argparse.ArgumentParser() -parser.add_argument("-c", "--contents", action="store_true") -args = parser.parse_args() - -if args.contents: - print(contents()) -else: - print(where()) diff --git a/venv/Lib/site-packages/pip/_vendor/certifi/__pycache__/__init__.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/certifi/__pycache__/__init__.cpython-39.pyc deleted file mode 100644 index e6a7994..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/certifi/__pycache__/__init__.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/certifi/__pycache__/__main__.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/certifi/__pycache__/__main__.cpython-39.pyc deleted file mode 100644 index 4e4e353..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/certifi/__pycache__/__main__.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/certifi/__pycache__/core.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/certifi/__pycache__/core.cpython-39.pyc deleted file mode 100644 index 7141923..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/certifi/__pycache__/core.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/certifi/cacert.pem b/venv/Lib/site-packages/pip/_vendor/certifi/cacert.pem deleted file mode 100644 index 6d0ccc0..0000000 --- a/venv/Lib/site-packages/pip/_vendor/certifi/cacert.pem +++ /dev/null @@ -1,4362 +0,0 @@ - -# Issuer: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA -# Subject: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA -# Label: "GlobalSign Root CA" -# Serial: 4835703278459707669005204 -# MD5 Fingerprint: 3e:45:52:15:09:51:92:e1:b7:5d:37:9f:b1:87:29:8a -# SHA1 Fingerprint: b1:bc:96:8b:d4:f4:9d:62:2a:a8:9a:81:f2:15:01:52:a4:1d:82:9c -# SHA256 Fingerprint: eb:d4:10:40:e4:bb:3e:c7:42:c9:e3:81:d3:1e:f2:a4:1a:48:b6:68:5c:96:e7:ce:f3:c1:df:6c:d4:33:1c:99 ------BEGIN CERTIFICATE----- -MIIDdTCCAl2gAwIBAgILBAAAAAABFUtaw5QwDQYJKoZIhvcNAQEFBQAwVzELMAkG -A1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2ExEDAOBgNVBAsTB1Jv -b3QgQ0ExGzAZBgNVBAMTEkdsb2JhbFNpZ24gUm9vdCBDQTAeFw05ODA5MDExMjAw -MDBaFw0yODAxMjgxMjAwMDBaMFcxCzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9i -YWxTaWduIG52LXNhMRAwDgYDVQQLEwdSb290IENBMRswGQYDVQQDExJHbG9iYWxT -aWduIFJvb3QgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDaDuaZ -jc6j40+Kfvvxi4Mla+pIH/EqsLmVEQS98GPR4mdmzxzdzxtIK+6NiY6arymAZavp -xy0Sy6scTHAHoT0KMM0VjU/43dSMUBUc71DuxC73/OlS8pF94G3VNTCOXkNz8kHp -1Wrjsok6Vjk4bwY8iGlbKk3Fp1S4bInMm/k8yuX9ifUSPJJ4ltbcdG6TRGHRjcdG -snUOhugZitVtbNV4FpWi6cgKOOvyJBNPc1STE4U6G7weNLWLBYy5d4ux2x8gkasJ -U26Qzns3dLlwR5EiUWMWea6xrkEmCMgZK9FGqkjWZCrXgzT/LCrBbBlDSgeF59N8 -9iFo7+ryUp9/k5DPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8E -BTADAQH/MB0GA1UdDgQWBBRge2YaRQ2XyolQL30EzTSo//z9SzANBgkqhkiG9w0B -AQUFAAOCAQEA1nPnfE920I2/7LqivjTFKDK1fPxsnCwrvQmeU79rXqoRSLblCKOz -yj1hTdNGCbM+w6DjY1Ub8rrvrTnhQ7k4o+YviiY776BQVvnGCv04zcQLcFGUl5gE -38NflNUVyRRBnMRddWQVDf9VMOyGj/8N7yy5Y0b2qvzfvGn9LhJIZJrglfCm7ymP -AbEVtQwdpf5pLGkkeB6zpxxxYu7KyJesF12KwvhHhm4qxFYxldBniYUr+WymXUad -DKqC5JlR3XC321Y9YeRq4VzW9v493kHMB65jUr9TU/Qr6cf9tveCX4XSQRjbgbME -HMUfpIBvFSDJ3gyICh3WZlXi/EjJKSZp4A== ------END CERTIFICATE----- - -# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R2 -# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R2 -# Label: "GlobalSign Root CA - R2" -# Serial: 4835703278459682885658125 -# MD5 Fingerprint: 94:14:77:7e:3e:5e:fd:8f:30:bd:41:b0:cf:e7:d0:30 -# SHA1 Fingerprint: 75:e0:ab:b6:13:85:12:27:1c:04:f8:5f:dd:de:38:e4:b7:24:2e:fe -# SHA256 Fingerprint: ca:42:dd:41:74:5f:d0:b8:1e:b9:02:36:2c:f9:d8:bf:71:9d:a1:bd:1b:1e:fc:94:6f:5b:4c:99:f4:2c:1b:9e ------BEGIN CERTIFICATE----- -MIIDujCCAqKgAwIBAgILBAAAAAABD4Ym5g0wDQYJKoZIhvcNAQEFBQAwTDEgMB4G -A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjIxEzARBgNVBAoTCkdsb2JhbFNp -Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDYxMjE1MDgwMDAwWhcNMjExMjE1 -MDgwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMjETMBEG -A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI -hvcNAQEBBQADggEPADCCAQoCggEBAKbPJA6+Lm8omUVCxKs+IVSbC9N/hHD6ErPL -v4dfxn+G07IwXNb9rfF73OX4YJYJkhD10FPe+3t+c4isUoh7SqbKSaZeqKeMWhG8 -eoLrvozps6yWJQeXSpkqBy+0Hne/ig+1AnwblrjFuTosvNYSuetZfeLQBoZfXklq -tTleiDTsvHgMCJiEbKjNS7SgfQx5TfC4LcshytVsW33hoCmEofnTlEnLJGKRILzd -C9XZzPnqJworc5HGnRusyMvo4KD0L5CLTfuwNhv2GXqF4G3yYROIXJ/gkwpRl4pa -zq+r1feqCapgvdzZX99yqWATXgAByUr6P6TqBwMhAo6CygPCm48CAwEAAaOBnDCB -mTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUm+IH -V2ccHsBqBt5ZtJot39wZhi4wNgYDVR0fBC8wLTAroCmgJ4YlaHR0cDovL2NybC5n -bG9iYWxzaWduLm5ldC9yb290LXIyLmNybDAfBgNVHSMEGDAWgBSb4gdXZxwewGoG -3lm0mi3f3BmGLjANBgkqhkiG9w0BAQUFAAOCAQEAmYFThxxol4aR7OBKuEQLq4Gs -J0/WwbgcQ3izDJr86iw8bmEbTUsp9Z8FHSbBuOmDAGJFtqkIk7mpM0sYmsL4h4hO -291xNBrBVNpGP+DTKqttVCL1OmLNIG+6KYnX3ZHu01yiPqFbQfXf5WRDLenVOavS -ot+3i9DAgBkcRcAtjOj4LaR0VknFBbVPFd5uRHg5h6h+u/N5GJG79G+dwfCMNYxd -AfvDbbnvRG15RjF+Cv6pgsH/76tuIMRQyV+dTZsXjAzlAcmgQWpzU/qlULRuJQ/7 -TBj0/VLZjmmx6BEP3ojY+x1J96relc8geMJgEtslQIxq/H5COEBkEveegeGTLg== ------END CERTIFICATE----- - -# Issuer: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited -# Subject: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited -# Label: "Entrust.net Premium 2048 Secure Server CA" -# Serial: 946069240 -# MD5 Fingerprint: ee:29:31:bc:32:7e:9a:e6:e8:b5:f7:51:b4:34:71:90 -# SHA1 Fingerprint: 50:30:06:09:1d:97:d4:f5:ae:39:f7:cb:e7:92:7d:7d:65:2d:34:31 -# SHA256 Fingerprint: 6d:c4:71:72:e0:1c:bc:b0:bf:62:58:0d:89:5f:e2:b8:ac:9a:d4:f8:73:80:1e:0c:10:b9:c8:37:d2:1e:b1:77 ------BEGIN CERTIFICATE----- -MIIEKjCCAxKgAwIBAgIEOGPe+DANBgkqhkiG9w0BAQUFADCBtDEUMBIGA1UEChML -RW50cnVzdC5uZXQxQDA+BgNVBAsUN3d3dy5lbnRydXN0Lm5ldC9DUFNfMjA0OCBp -bmNvcnAuIGJ5IHJlZi4gKGxpbWl0cyBsaWFiLikxJTAjBgNVBAsTHChjKSAxOTk5 -IEVudHJ1c3QubmV0IExpbWl0ZWQxMzAxBgNVBAMTKkVudHJ1c3QubmV0IENlcnRp -ZmljYXRpb24gQXV0aG9yaXR5ICgyMDQ4KTAeFw05OTEyMjQxNzUwNTFaFw0yOTA3 -MjQxNDE1MTJaMIG0MRQwEgYDVQQKEwtFbnRydXN0Lm5ldDFAMD4GA1UECxQ3d3d3 -LmVudHJ1c3QubmV0L0NQU18yMDQ4IGluY29ycC4gYnkgcmVmLiAobGltaXRzIGxp -YWIuKTElMCMGA1UECxMcKGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRlZDEzMDEG -A1UEAxMqRW50cnVzdC5uZXQgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgKDIwNDgp -MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArU1LqRKGsuqjIAcVFmQq -K0vRvwtKTY7tgHalZ7d4QMBzQshowNtTK91euHaYNZOLGp18EzoOH1u3Hs/lJBQe -sYGpjX24zGtLA/ECDNyrpUAkAH90lKGdCCmziAv1h3edVc3kw37XamSrhRSGlVuX -MlBvPci6Zgzj/L24ScF2iUkZ/cCovYmjZy/Gn7xxGWC4LeksyZB2ZnuU4q941mVT -XTzWnLLPKQP5L6RQstRIzgUyVYr9smRMDuSYB3Xbf9+5CFVghTAp+XtIpGmG4zU/ -HoZdenoVve8AjhUiVBcAkCaTvA5JaJG/+EfTnZVCwQ5N328mz8MYIWJmQ3DW1cAH -4QIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV -HQ4EFgQUVeSB0RGAvtiJuQijMfmhJAkWuXAwDQYJKoZIhvcNAQEFBQADggEBADub -j1abMOdTmXx6eadNl9cZlZD7Bh/KM3xGY4+WZiT6QBshJ8rmcnPyT/4xmf3IDExo -U8aAghOY+rat2l098c5u9hURlIIM7j+VrxGrD9cv3h8Dj1csHsm7mhpElesYT6Yf -zX1XEC+bBAlahLVu2B064dae0Wx5XnkcFMXj0EyTO2U87d89vqbllRrDtRnDvV5b -u/8j72gZyxKTJ1wDLW8w0B62GqzeWvfRqqgnpv55gcR5mTNXuhKwqeBCbJPKVt7+ -bYQLCIt+jerXmCHG8+c8eS9enNFMFY3h7CI3zJpDC5fcgJCNs2ebb0gIFVbPv/Er -fF6adulZkMV8gzURZVE= ------END CERTIFICATE----- - -# Issuer: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust -# Subject: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust -# Label: "Baltimore CyberTrust Root" -# Serial: 33554617 -# MD5 Fingerprint: ac:b6:94:a5:9c:17:e0:d7:91:52:9b:b1:97:06:a6:e4 -# SHA1 Fingerprint: d4:de:20:d0:5e:66:fc:53:fe:1a:50:88:2c:78:db:28:52:ca:e4:74 -# SHA256 Fingerprint: 16:af:57:a9:f6:76:b0:ab:12:60:95:aa:5e:ba:de:f2:2a:b3:11:19:d6:44:ac:95:cd:4b:93:db:f3:f2:6a:eb ------BEGIN CERTIFICATE----- -MIIDdzCCAl+gAwIBAgIEAgAAuTANBgkqhkiG9w0BAQUFADBaMQswCQYDVQQGEwJJ -RTESMBAGA1UEChMJQmFsdGltb3JlMRMwEQYDVQQLEwpDeWJlclRydXN0MSIwIAYD -VQQDExlCYWx0aW1vcmUgQ3liZXJUcnVzdCBSb290MB4XDTAwMDUxMjE4NDYwMFoX -DTI1MDUxMjIzNTkwMFowWjELMAkGA1UEBhMCSUUxEjAQBgNVBAoTCUJhbHRpbW9y -ZTETMBEGA1UECxMKQ3liZXJUcnVzdDEiMCAGA1UEAxMZQmFsdGltb3JlIEN5YmVy -VHJ1c3QgUm9vdDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKMEuyKr -mD1X6CZymrV51Cni4eiVgLGw41uOKymaZN+hXe2wCQVt2yguzmKiYv60iNoS6zjr -IZ3AQSsBUnuId9Mcj8e6uYi1agnnc+gRQKfRzMpijS3ljwumUNKoUMMo6vWrJYeK -mpYcqWe4PwzV9/lSEy/CG9VwcPCPwBLKBsua4dnKM3p31vjsufFoREJIE9LAwqSu -XmD+tqYF/LTdB1kC1FkYmGP1pWPgkAx9XbIGevOF6uvUA65ehD5f/xXtabz5OTZy -dc93Uk3zyZAsuT3lySNTPx8kmCFcB5kpvcY67Oduhjprl3RjM71oGDHweI12v/ye -jl0qhqdNkNwnGjkCAwEAAaNFMEMwHQYDVR0OBBYEFOWdWTCCR1jMrPoIVDaGezq1 -BE3wMBIGA1UdEwEB/wQIMAYBAf8CAQMwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3 -DQEBBQUAA4IBAQCFDF2O5G9RaEIFoN27TyclhAO992T9Ldcw46QQF+vaKSm2eT92 -9hkTI7gQCvlYpNRhcL0EYWoSihfVCr3FvDB81ukMJY2GQE/szKN+OMY3EU/t3Wgx -jkzSswF07r51XgdIGn9w/xZchMB5hbgF/X++ZRGjD8ACtPhSNzkE1akxehi/oCr0 -Epn3o0WC4zxe9Z2etciefC7IpJ5OCBRLbf1wbWsaY71k5h+3zvDyny67G7fyUIhz -ksLi4xaNmjICq44Y3ekQEe5+NauQrz4wlHrQMz2nZQ/1/I6eYs9HRCwBXbsdtTLS -R9I4LtD+gdwyah617jzV/OeBHRnDJELqYzmp ------END CERTIFICATE----- - -# Issuer: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc. -# Subject: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc. -# Label: "Entrust Root Certification Authority" -# Serial: 1164660820 -# MD5 Fingerprint: d6:a5:c3:ed:5d:dd:3e:00:c1:3d:87:92:1f:1d:3f:e4 -# SHA1 Fingerprint: b3:1e:b1:b7:40:e3:6c:84:02:da:dc:37:d4:4d:f5:d4:67:49:52:f9 -# SHA256 Fingerprint: 73:c1:76:43:4f:1b:c6:d5:ad:f4:5b:0e:76:e7:27:28:7c:8d:e5:76:16:c1:e6:e6:14:1a:2b:2c:bc:7d:8e:4c ------BEGIN CERTIFICATE----- -MIIEkTCCA3mgAwIBAgIERWtQVDANBgkqhkiG9w0BAQUFADCBsDELMAkGA1UEBhMC -VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xOTA3BgNVBAsTMHd3dy5lbnRydXN0 -Lm5ldC9DUFMgaXMgaW5jb3Jwb3JhdGVkIGJ5IHJlZmVyZW5jZTEfMB0GA1UECxMW -KGMpIDIwMDYgRW50cnVzdCwgSW5jLjEtMCsGA1UEAxMkRW50cnVzdCBSb290IENl -cnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA2MTEyNzIwMjM0MloXDTI2MTEyNzIw -NTM0MlowgbAxCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMTkw -NwYDVQQLEzB3d3cuZW50cnVzdC5uZXQvQ1BTIGlzIGluY29ycG9yYXRlZCBieSBy -ZWZlcmVuY2UxHzAdBgNVBAsTFihjKSAyMDA2IEVudHJ1c3QsIEluYy4xLTArBgNV -BAMTJEVudHJ1c3QgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASIwDQYJ -KoZIhvcNAQEBBQADggEPADCCAQoCggEBALaVtkNC+sZtKm9I35RMOVcF7sN5EUFo -Nu3s/poBj6E4KPz3EEZmLk0eGrEaTsbRwJWIsMn/MYszA9u3g3s+IIRe7bJWKKf4 -4LlAcTfFy0cOlypowCKVYhXbR9n10Cv/gkvJrT7eTNuQgFA/CYqEAOwwCj0Yzfv9 -KlmaI5UXLEWeH25DeW0MXJj+SKfFI0dcXv1u5x609mhF0YaDW6KKjbHjKYD+JXGI -rb68j6xSlkuqUY3kEzEZ6E5Nn9uss2rVvDlUccp6en+Q3X0dgNmBu1kmwhH+5pPi -94DkZfs0Nw4pgHBNrziGLp5/V6+eF67rHMsoIV+2HNjnogQi+dPa2MsCAwEAAaOB -sDCBrTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zArBgNVHRAEJDAi -gA8yMDA2MTEyNzIwMjM0MlqBDzIwMjYxMTI3MjA1MzQyWjAfBgNVHSMEGDAWgBRo -kORnpKZTgMeGZqTx90tD+4S9bTAdBgNVHQ4EFgQUaJDkZ6SmU4DHhmak8fdLQ/uE -vW0wHQYJKoZIhvZ9B0EABBAwDhsIVjcuMTo0LjADAgSQMA0GCSqGSIb3DQEBBQUA -A4IBAQCT1DCw1wMgKtD5Y+iRDAUgqV8ZyntyTtSx29CW+1RaGSwMCPeyvIWonX9t -O1KzKtvn1ISMY/YPyyYBkVBs9F8U4pN0wBOeMDpQ47RgxRzwIkSNcUesyBrJ6Zua -AGAT/3B+XxFNSRuzFVJ7yVTav52Vr2ua2J7p8eRDjeIRRDq/r72DQnNSi6q7pynP -9WQcCk3RvKqsnyrQ/39/2n3qse0wJcGE2jTSW3iDVuycNsMm4hH2Z0kdkquM++v/ -eu6FSqdQgPCnXEqULl8FmTxSQeDNtGPPAUO6nIPcj2A781q0tHuu2guQOHXvgR1m -0vdXcDazv/wor3ElhVsT/h5/WrQ8 ------END CERTIFICATE----- - -# Issuer: CN=AAA Certificate Services O=Comodo CA Limited -# Subject: CN=AAA Certificate Services O=Comodo CA Limited -# Label: "Comodo AAA Services root" -# Serial: 1 -# MD5 Fingerprint: 49:79:04:b0:eb:87:19:ac:47:b0:bc:11:51:9b:74:d0 -# SHA1 Fingerprint: d1:eb:23:a4:6d:17:d6:8f:d9:25:64:c2:f1:f1:60:17:64:d8:e3:49 -# SHA256 Fingerprint: d7:a7:a0:fb:5d:7e:27:31:d7:71:e9:48:4e:bc:de:f7:1d:5f:0c:3e:0a:29:48:78:2b:c8:3e:e0:ea:69:9e:f4 ------BEGIN CERTIFICATE----- -MIIEMjCCAxqgAwIBAgIBATANBgkqhkiG9w0BAQUFADB7MQswCQYDVQQGEwJHQjEb -MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow -GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDEhMB8GA1UEAwwYQUFBIENlcnRpZmlj -YXRlIFNlcnZpY2VzMB4XDTA0MDEwMTAwMDAwMFoXDTI4MTIzMTIzNTk1OVowezEL -MAkGA1UEBhMCR0IxGzAZBgNVBAgMEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE -BwwHU2FsZm9yZDEaMBgGA1UECgwRQ29tb2RvIENBIExpbWl0ZWQxITAfBgNVBAMM -GEFBQSBDZXJ0aWZpY2F0ZSBTZXJ2aWNlczCCASIwDQYJKoZIhvcNAQEBBQADggEP -ADCCAQoCggEBAL5AnfRu4ep2hxxNRUSOvkbIgwadwSr+GB+O5AL686tdUIoWMQua -BtDFcCLNSS1UY8y2bmhGC1Pqy0wkwLxyTurxFa70VJoSCsN6sjNg4tqJVfMiWPPe -3M/vg4aijJRPn2jymJBGhCfHdr/jzDUsi14HZGWCwEiwqJH5YZ92IFCokcdmtet4 -YgNW8IoaE+oxox6gmf049vYnMlhvB/VruPsUK6+3qszWY19zjNoFmag4qMsXeDZR -rOme9Hg6jc8P2ULimAyrL58OAd7vn5lJ8S3frHRNG5i1R8XlKdH5kBjHYpy+g8cm -ez6KJcfA3Z3mNWgQIJ2P2N7Sw4ScDV7oL8kCAwEAAaOBwDCBvTAdBgNVHQ4EFgQU -oBEKIz6W8Qfs4q8p74Klf9AwpLQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQF -MAMBAf8wewYDVR0fBHQwcjA4oDagNIYyaHR0cDovL2NybC5jb21vZG9jYS5jb20v -QUFBQ2VydGlmaWNhdGVTZXJ2aWNlcy5jcmwwNqA0oDKGMGh0dHA6Ly9jcmwuY29t -b2RvLm5ldC9BQUFDZXJ0aWZpY2F0ZVNlcnZpY2VzLmNybDANBgkqhkiG9w0BAQUF -AAOCAQEACFb8AvCb6P+k+tZ7xkSAzk/ExfYAWMymtrwUSWgEdujm7l3sAg9g1o1Q -GE8mTgHj5rCl7r+8dFRBv/38ErjHT1r0iWAFf2C3BUrz9vHCv8S5dIa2LX1rzNLz -Rt0vxuBqw8M0Ayx9lt1awg6nCpnBBYurDC/zXDrPbDdVCYfeU0BsWO/8tqtlbgT2 -G9w84FoVxp7Z8VlIMCFlA2zs6SFz7JsDoeA3raAVGI/6ugLOpyypEBMs1OUIJqsi -l2D4kF501KKaU73yqWjgom7C12yxow+ev+to51byrvLjKzg6CYG1a4XXvi3tPxq3 -smPi9WIsgtRqAEFQ8TmDn5XpNpaYbg== ------END CERTIFICATE----- - -# Issuer: CN=QuoVadis Root CA 2 O=QuoVadis Limited -# Subject: CN=QuoVadis Root CA 2 O=QuoVadis Limited -# Label: "QuoVadis Root CA 2" -# Serial: 1289 -# MD5 Fingerprint: 5e:39:7b:dd:f8:ba:ec:82:e9:ac:62:ba:0c:54:00:2b -# SHA1 Fingerprint: ca:3a:fb:cf:12:40:36:4b:44:b2:16:20:88:80:48:39:19:93:7c:f7 -# SHA256 Fingerprint: 85:a0:dd:7d:d7:20:ad:b7:ff:05:f8:3d:54:2b:20:9d:c7:ff:45:28:f7:d6:77:b1:83:89:fe:a5:e5:c4:9e:86 ------BEGIN CERTIFICATE----- -MIIFtzCCA5+gAwIBAgICBQkwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x -GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv -b3QgQ0EgMjAeFw0wNjExMjQxODI3MDBaFw0zMTExMjQxODIzMzNaMEUxCzAJBgNV -BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W -YWRpcyBSb290IENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCa -GMpLlA0ALa8DKYrwD4HIrkwZhR0In6spRIXzL4GtMh6QRr+jhiYaHv5+HBg6XJxg -Fyo6dIMzMH1hVBHL7avg5tKifvVrbxi3Cgst/ek+7wrGsxDp3MJGF/hd/aTa/55J -WpzmM+Yklvc/ulsrHHo1wtZn/qtmUIttKGAr79dgw8eTvI02kfN/+NsRE8Scd3bB -rrcCaoF6qUWD4gXmuVbBlDePSHFjIuwXZQeVikvfj8ZaCuWw419eaxGrDPmF60Tp -+ARz8un+XJiM9XOva7R+zdRcAitMOeGylZUtQofX1bOQQ7dsE/He3fbE+Ik/0XX1 -ksOR1YqI0JDs3G3eicJlcZaLDQP9nL9bFqyS2+r+eXyt66/3FsvbzSUr5R/7mp/i -Ucw6UwxI5g69ybR2BlLmEROFcmMDBOAENisgGQLodKcftslWZvB1JdxnwQ5hYIiz -PtGo/KPaHbDRsSNU30R2be1B2MGyIrZTHN81Hdyhdyox5C315eXbyOD/5YDXC2Og -/zOhD7osFRXql7PSorW+8oyWHhqPHWykYTe5hnMz15eWniN9gqRMgeKh0bpnX5UH -oycR7hYQe7xFSkyyBNKr79X9DFHOUGoIMfmR2gyPZFwDwzqLID9ujWc9Otb+fVuI -yV77zGHcizN300QyNQliBJIWENieJ0f7OyHj+OsdWwIDAQABo4GwMIGtMA8GA1Ud -EwEB/wQFMAMBAf8wCwYDVR0PBAQDAgEGMB0GA1UdDgQWBBQahGK8SEwzJQTU7tD2 -A8QZRtGUazBuBgNVHSMEZzBlgBQahGK8SEwzJQTU7tD2A8QZRtGUa6FJpEcwRTEL -MAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMT -ElF1b1ZhZGlzIFJvb3QgQ0EgMoICBQkwDQYJKoZIhvcNAQEFBQADggIBAD4KFk2f -BluornFdLwUvZ+YTRYPENvbzwCYMDbVHZF34tHLJRqUDGCdViXh9duqWNIAXINzn -g/iN/Ae42l9NLmeyhP3ZRPx3UIHmfLTJDQtyU/h2BwdBR5YM++CCJpNVjP4iH2Bl -fF/nJrP3MpCYUNQ3cVX2kiF495V5+vgtJodmVjB3pjd4M1IQWK4/YY7yarHvGH5K -WWPKjaJW1acvvFYfzznB4vsKqBUsfU16Y8Zsl0Q80m/DShcK+JDSV6IZUaUtl0Ha -B0+pUNqQjZRG4T7wlP0QADj1O+hA4bRuVhogzG9Yje0uRY/W6ZM/57Es3zrWIozc -hLsib9D45MY56QSIPMO661V6bYCZJPVsAfv4l7CUW+v90m/xd2gNNWQjrLhVoQPR -TUIZ3Ph1WVaj+ahJefivDrkRoHy3au000LYmYjgahwz46P0u05B/B5EqHdZ+XIWD -mbA4CD/pXvk1B+TJYm5Xf6dQlfe6yJvmjqIBxdZmv3lh8zwc4bmCXF2gw+nYSL0Z -ohEUGW6yhhtoPkg3Goi3XZZenMfvJ2II4pEZXNLxId26F0KCl3GBUzGpn/Z9Yr9y -4aOTHcyKJloJONDO1w2AFrR4pTqHTI2KpdVGl/IsELm8VCLAAVBpQ570su9t+Oza -8eOx79+Rj1QqCyXBJhnEUhAFZdWCEOrCMc0u ------END CERTIFICATE----- - -# Issuer: CN=QuoVadis Root CA 3 O=QuoVadis Limited -# Subject: CN=QuoVadis Root CA 3 O=QuoVadis Limited -# Label: "QuoVadis Root CA 3" -# Serial: 1478 -# MD5 Fingerprint: 31:85:3c:62:94:97:63:b9:aa:fd:89:4e:af:6f:e0:cf -# SHA1 Fingerprint: 1f:49:14:f7:d8:74:95:1d:dd:ae:02:c0:be:fd:3a:2d:82:75:51:85 -# SHA256 Fingerprint: 18:f1:fc:7f:20:5d:f8:ad:dd:eb:7f:e0:07:dd:57:e3:af:37:5a:9c:4d:8d:73:54:6b:f4:f1:fe:d1:e1:8d:35 ------BEGIN CERTIFICATE----- -MIIGnTCCBIWgAwIBAgICBcYwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x -GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv -b3QgQ0EgMzAeFw0wNjExMjQxOTExMjNaFw0zMTExMjQxOTA2NDRaMEUxCzAJBgNV -BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W -YWRpcyBSb290IENBIDMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDM -V0IWVJzmmNPTTe7+7cefQzlKZbPoFog02w1ZkXTPkrgEQK0CSzGrvI2RaNggDhoB -4hp7Thdd4oq3P5kazethq8Jlph+3t723j/z9cI8LoGe+AaJZz3HmDyl2/7FWeUUr -H556VOijKTVopAFPD6QuN+8bv+OPEKhyq1hX51SGyMnzW9os2l2ObjyjPtr7guXd -8lyyBTNvijbO0BNO/79KDDRMpsMhvVAEVeuxu537RR5kFd5VAYwCdrXLoT9Cabwv -vWhDFlaJKjdhkf2mrk7AyxRllDdLkgbvBNDInIjbC3uBr7E9KsRlOni27tyAsdLT -mZw67mtaa7ONt9XOnMK+pUsvFrGeaDsGb659n/je7Mwpp5ijJUMv7/FfJuGITfhe -btfZFG4ZM2mnO4SJk8RTVROhUXhA+LjJou57ulJCg54U7QVSWllWp5f8nT8KKdjc -T5EOE7zelaTfi5m+rJsziO+1ga8bxiJTyPbH7pcUsMV8eFLI8M5ud2CEpukqdiDt -WAEXMJPpGovgc2PZapKUSU60rUqFxKMiMPwJ7Wgic6aIDFUhWMXhOp8q3crhkODZ -c6tsgLjoC2SToJyMGf+z0gzskSaHirOi4XCPLArlzW1oUevaPwV/izLmE1xr/l9A -4iLItLRkT9a6fUg+qGkM17uGcclzuD87nSVL2v9A6wIDAQABo4IBlTCCAZEwDwYD -VR0TAQH/BAUwAwEB/zCB4QYDVR0gBIHZMIHWMIHTBgkrBgEEAb5YAAMwgcUwgZMG -CCsGAQUFBwICMIGGGoGDQW55IHVzZSBvZiB0aGlzIENlcnRpZmljYXRlIGNvbnN0 -aXR1dGVzIGFjY2VwdGFuY2Ugb2YgdGhlIFF1b1ZhZGlzIFJvb3QgQ0EgMyBDZXJ0 -aWZpY2F0ZSBQb2xpY3kgLyBDZXJ0aWZpY2F0aW9uIFByYWN0aWNlIFN0YXRlbWVu -dC4wLQYIKwYBBQUHAgEWIWh0dHA6Ly93d3cucXVvdmFkaXNnbG9iYWwuY29tL2Nw -czALBgNVHQ8EBAMCAQYwHQYDVR0OBBYEFPLAE+CCQz777i9nMpY1XNu4ywLQMG4G -A1UdIwRnMGWAFPLAE+CCQz777i9nMpY1XNu4ywLQoUmkRzBFMQswCQYDVQQGEwJC -TTEZMBcGA1UEChMQUXVvVmFkaXMgTGltaXRlZDEbMBkGA1UEAxMSUXVvVmFkaXMg -Um9vdCBDQSAzggIFxjANBgkqhkiG9w0BAQUFAAOCAgEAT62gLEz6wPJv92ZVqyM0 -7ucp2sNbtrCD2dDQ4iH782CnO11gUyeim/YIIirnv6By5ZwkajGxkHon24QRiSem -d1o417+shvzuXYO8BsbRd2sPbSQvS3pspweWyuOEn62Iix2rFo1bZhfZFvSLgNLd -+LJ2w/w4E6oM3kJpK27zPOuAJ9v1pkQNn1pVWQvVDVJIxa6f8i+AxeoyUDUSly7B -4f/xI4hROJ/yZlZ25w9Rl6VSDE1JUZU2Pb+iSwwQHYaZTKrzchGT5Or2m9qoXadN -t54CrnMAyNojA+j56hl0YgCUyyIgvpSnWbWCar6ZeXqp8kokUvd0/bpO5qgdAm6x -DYBEwa7TIzdfu4V8K5Iu6H6li92Z4b8nby1dqnuH/grdS/yO9SbkbnBCbjPsMZ57 -k8HkyWkaPcBrTiJt7qtYTcbQQcEr6k8Sh17rRdhs9ZgC06DYVYoGmRmioHfRMJ6s -zHXug/WwYjnPbFfiTNKRCw51KBuav/0aQ/HKd/s7j2G4aSgWQgRecCocIdiP4b0j -Wy10QJLZYxkNc91pvGJHvOB0K7Lrfb5BG7XARsWhIstfTsEokt4YutUqKLsRixeT -mJlglFwjz1onl14LBQaTNx47aTbrqZ5hHY8y2o4M1nQ+ewkk2gF3R8Q7zTSMmfXK -4SVhM7JZG+Ju1zdXtg2pEto= ------END CERTIFICATE----- - -# Issuer: O=SECOM Trust.net OU=Security Communication RootCA1 -# Subject: O=SECOM Trust.net OU=Security Communication RootCA1 -# Label: "Security Communication Root CA" -# Serial: 0 -# MD5 Fingerprint: f1:bc:63:6a:54:e0:b5:27:f5:cd:e7:1a:e3:4d:6e:4a -# SHA1 Fingerprint: 36:b1:2b:49:f9:81:9e:d7:4c:9e:bc:38:0f:c6:56:8f:5d:ac:b2:f7 -# SHA256 Fingerprint: e7:5e:72:ed:9f:56:0e:ec:6e:b4:80:00:73:a4:3f:c3:ad:19:19:5a:39:22:82:01:78:95:97:4a:99:02:6b:6c ------BEGIN CERTIFICATE----- -MIIDWjCCAkKgAwIBAgIBADANBgkqhkiG9w0BAQUFADBQMQswCQYDVQQGEwJKUDEY -MBYGA1UEChMPU0VDT00gVHJ1c3QubmV0MScwJQYDVQQLEx5TZWN1cml0eSBDb21t -dW5pY2F0aW9uIFJvb3RDQTEwHhcNMDMwOTMwMDQyMDQ5WhcNMjMwOTMwMDQyMDQ5 -WjBQMQswCQYDVQQGEwJKUDEYMBYGA1UEChMPU0VDT00gVHJ1c3QubmV0MScwJQYD -VQQLEx5TZWN1cml0eSBDb21tdW5pY2F0aW9uIFJvb3RDQTEwggEiMA0GCSqGSIb3 -DQEBAQUAA4IBDwAwggEKAoIBAQCzs/5/022x7xZ8V6UMbXaKL0u/ZPtM7orw8yl8 -9f/uKuDp6bpbZCKamm8sOiZpUQWZJtzVHGpxxpp9Hp3dfGzGjGdnSj74cbAZJ6kJ -DKaVv0uMDPpVmDvY6CKhS3E4eayXkmmziX7qIWgGmBSWh9JhNrxtJ1aeV+7AwFb9 -Ms+k2Y7CI9eNqPPYJayX5HA49LY6tJ07lyZDo6G8SVlyTCMwhwFY9k6+HGhWZq/N -QV3Is00qVUarH9oe4kA92819uZKAnDfdDJZkndwi92SL32HeFZRSFaB9UslLqCHJ -xrHty8OVYNEP8Ktw+N/LTX7s1vqr2b1/VPKl6Xn62dZ2JChzAgMBAAGjPzA9MB0G -A1UdDgQWBBSgc0mZaNyFW2XjmygvV5+9M7wHSDALBgNVHQ8EBAMCAQYwDwYDVR0T -AQH/BAUwAwEB/zANBgkqhkiG9w0BAQUFAAOCAQEAaECpqLvkT115swW1F7NgE+vG -kl3g0dNq/vu+m22/xwVtWSDEHPC32oRYAmP6SBbvT6UL90qY8j+eG61Ha2POCEfr -Uj94nK9NrvjVT8+amCoQQTlSxN3Zmw7vkwGusi7KaEIkQmywszo+zenaSMQVy+n5 -Bw+SUEmK3TGXX8npN6o7WWWXlDLJs58+OmJYxUmtYg5xpTKqL8aJdkNAExNnPaJU -JRDL8Try2frbSVa7pv6nQTXD4IhhyYjH3zYQIphZ6rBK+1YWc26sTfcioU+tHXot -RSflMMFe8toTyyVCUZVHA4xsIcx0Qu1T/zOLjw9XARYvz6buyXAiFL39vmwLAw== ------END CERTIFICATE----- - -# Issuer: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com -# Subject: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com -# Label: "XRamp Global CA Root" -# Serial: 107108908803651509692980124233745014957 -# MD5 Fingerprint: a1:0b:44:b3:ca:10:d8:00:6e:9d:0f:d8:0f:92:0a:d1 -# SHA1 Fingerprint: b8:01:86:d1:eb:9c:86:a5:41:04:cf:30:54:f3:4c:52:b7:e5:58:c6 -# SHA256 Fingerprint: ce:cd:dc:90:50:99:d8:da:df:c5:b1:d2:09:b7:37:cb:e2:c1:8c:fb:2c:10:c0:ff:0b:cf:0d:32:86:fc:1a:a2 ------BEGIN CERTIFICATE----- -MIIEMDCCAxigAwIBAgIQUJRs7Bjq1ZxN1ZfvdY+grTANBgkqhkiG9w0BAQUFADCB -gjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3dy54cmFtcHNlY3VyaXR5LmNvbTEk -MCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2VydmljZXMgSW5jMS0wKwYDVQQDEyRY -UmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQxMTAxMTcx -NDA0WhcNMzUwMTAxMDUzNzE5WjCBgjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3 -dy54cmFtcHNlY3VyaXR5LmNvbTEkMCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2Vy -dmljZXMgSW5jMS0wKwYDVQQDEyRYUmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBB -dXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCYJB69FbS6 -38eMpSe2OAtp87ZOqCwuIR1cRN8hXX4jdP5efrRKt6atH67gBhbim1vZZ3RrXYCP -KZ2GG9mcDZhtdhAoWORlsH9KmHmf4MMxfoArtYzAQDsRhtDLooY2YKTVMIJt2W7Q -DxIEM5dfT2Fa8OT5kavnHTu86M/0ay00fOJIYRyO82FEzG+gSqmUsE3a56k0enI4 -qEHMPJQRfevIpoy3hsvKMzvZPTeL+3o+hiznc9cKV6xkmxnr9A8ECIqsAxcZZPRa -JSKNNCyy9mgdEm3Tih4U2sSPpuIjhdV6Db1q4Ons7Be7QhtnqiXtRYMh/MHJfNVi -PvryxS3T/dRlAgMBAAGjgZ8wgZwwEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0P -BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMZPoj0GY4QJnM5i5ASs -jVy16bYbMDYGA1UdHwQvMC0wK6ApoCeGJWh0dHA6Ly9jcmwueHJhbXBzZWN1cml0 -eS5jb20vWEdDQS5jcmwwEAYJKwYBBAGCNxUBBAMCAQEwDQYJKoZIhvcNAQEFBQAD -ggEBAJEVOQMBG2f7Shz5CmBbodpNl2L5JFMn14JkTpAuw0kbK5rc/Kh4ZzXxHfAR -vbdI4xD2Dd8/0sm2qlWkSLoC295ZLhVbO50WfUfXN+pfTXYSNrsf16GBBEYgoyxt -qZ4Bfj8pzgCT3/3JknOJiWSe5yvkHJEs0rnOfc5vMZnT5r7SHpDwCRR5XCOrTdLa -IR9NmXmd4c8nnxCbHIgNsIpkQTG4DmyQJKSbXHGPurt+HBvbaoAPIbzp26a3QPSy -i6mx5O+aGtA9aZnuqCij4Tyz8LIRnM98QObd50N9otg6tamN8jSZxNQQ4Qb9CYQQ -O+7ETPTsJ3xCwnR8gooJybQDJbw= ------END CERTIFICATE----- - -# Issuer: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority -# Subject: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority -# Label: "Go Daddy Class 2 CA" -# Serial: 0 -# MD5 Fingerprint: 91:de:06:25:ab:da:fd:32:17:0c:bb:25:17:2a:84:67 -# SHA1 Fingerprint: 27:96:ba:e6:3f:18:01:e2:77:26:1b:a0:d7:77:70:02:8f:20:ee:e4 -# SHA256 Fingerprint: c3:84:6b:f2:4b:9e:93:ca:64:27:4c:0e:c6:7c:1e:cc:5e:02:4f:fc:ac:d2:d7:40:19:35:0e:81:fe:54:6a:e4 ------BEGIN CERTIFICATE----- -MIIEADCCAuigAwIBAgIBADANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJVUzEh -MB8GA1UEChMYVGhlIEdvIERhZGR5IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBE -YWRkeSBDbGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA0MDYyOTE3 -MDYyMFoXDTM0MDYyOTE3MDYyMFowYzELMAkGA1UEBhMCVVMxITAfBgNVBAoTGFRo -ZSBHbyBEYWRkeSBHcm91cCwgSW5jLjExMC8GA1UECxMoR28gRGFkZHkgQ2xhc3Mg -MiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASAwDQYJKoZIhvcNAQEBBQADggEN -ADCCAQgCggEBAN6d1+pXGEmhW+vXX0iG6r7d/+TvZxz0ZWizV3GgXne77ZtJ6XCA -PVYYYwhv2vLM0D9/AlQiVBDYsoHUwHU9S3/Hd8M+eKsaA7Ugay9qK7HFiH7Eux6w -wdhFJ2+qN1j3hybX2C32qRe3H3I2TqYXP2WYktsqbl2i/ojgC95/5Y0V4evLOtXi -EqITLdiOr18SPaAIBQi2XKVlOARFmR6jYGB0xUGlcmIbYsUfb18aQr4CUWWoriMY -avx4A6lNf4DD+qta/KFApMoZFv6yyO9ecw3ud72a9nmYvLEHZ6IVDd2gWMZEewo+ -YihfukEHU1jPEX44dMX4/7VpkI+EdOqXG68CAQOjgcAwgb0wHQYDVR0OBBYEFNLE -sNKR1EwRcbNhyz2h/t2oatTjMIGNBgNVHSMEgYUwgYKAFNLEsNKR1EwRcbNhyz2h -/t2oatTjoWekZTBjMQswCQYDVQQGEwJVUzEhMB8GA1UEChMYVGhlIEdvIERhZGR5 -IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBEYWRkeSBDbGFzcyAyIENlcnRpZmlj -YXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQAD -ggEBADJL87LKPpH8EsahB4yOd6AzBhRckB4Y9wimPQoZ+YeAEW5p5JYXMP80kWNy -OO7MHAGjHZQopDH2esRU1/blMVgDoszOYtuURXO1v0XJJLXVggKtI3lpjbi2Tc7P -TMozI+gciKqdi0FuFskg5YmezTvacPd+mSYgFFQlq25zheabIZ0KbIIOqPjCDPoQ -HmyW74cNxA9hi63ugyuV+I6ShHI56yDqg+2DzZduCLzrTia2cyvk0/ZM/iZx4mER -dEr/VxqHD3VILs9RaRegAhJhldXRQLIQTO7ErBBDpqWeCtWVYpoNz4iCxTIM5Cuf -ReYNnyicsbkqWletNw+vHX/bvZ8= ------END CERTIFICATE----- - -# Issuer: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority -# Subject: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority -# Label: "Starfield Class 2 CA" -# Serial: 0 -# MD5 Fingerprint: 32:4a:4b:bb:c8:63:69:9b:be:74:9a:c6:dd:1d:46:24 -# SHA1 Fingerprint: ad:7e:1c:28:b0:64:ef:8f:60:03:40:20:14:c3:d0:e3:37:0e:b5:8a -# SHA256 Fingerprint: 14:65:fa:20:53:97:b8:76:fa:a6:f0:a9:95:8e:55:90:e4:0f:cc:7f:aa:4f:b7:c2:c8:67:75:21:fb:5f:b6:58 ------BEGIN CERTIFICATE----- -MIIEDzCCAvegAwIBAgIBADANBgkqhkiG9w0BAQUFADBoMQswCQYDVQQGEwJVUzEl -MCMGA1UEChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMp -U3RhcmZpZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQw -NjI5MTczOTE2WhcNMzQwNjI5MTczOTE2WjBoMQswCQYDVQQGEwJVUzElMCMGA1UE -ChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMpU3RhcmZp -ZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggEgMA0GCSqGSIb3 -DQEBAQUAA4IBDQAwggEIAoIBAQC3Msj+6XGmBIWtDBFk385N78gDGIc/oav7PKaf -8MOh2tTYbitTkPskpD6E8J7oX+zlJ0T1KKY/e97gKvDIr1MvnsoFAZMej2YcOadN -+lq2cwQlZut3f+dZxkqZJRRU6ybH838Z1TBwj6+wRir/resp7defqgSHo9T5iaU0 -X9tDkYI22WY8sbi5gv2cOj4QyDvvBmVmepsZGD3/cVE8MC5fvj13c7JdBmzDI1aa -K4UmkhynArPkPw2vCHmCuDY96pzTNbO8acr1zJ3o/WSNF4Azbl5KXZnJHoe0nRrA -1W4TNSNe35tfPe/W93bC6j67eA0cQmdrBNj41tpvi/JEoAGrAgEDo4HFMIHCMB0G -A1UdDgQWBBS/X7fRzt0fhvRbVazc1xDCDqmI5zCBkgYDVR0jBIGKMIGHgBS/X7fR -zt0fhvRbVazc1xDCDqmI56FspGowaDELMAkGA1UEBhMCVVMxJTAjBgNVBAoTHFN0 -YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAsTKVN0YXJmaWVsZCBD -bGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8w -DQYJKoZIhvcNAQEFBQADggEBAAWdP4id0ckaVaGsafPzWdqbAYcaT1epoXkJKtv3 -L7IezMdeatiDh6GX70k1PncGQVhiv45YuApnP+yz3SFmH8lU+nLMPUxA2IGvd56D -eruix/U0F47ZEUD0/CwqTRV/p2JdLiXTAAsgGh1o+Re49L2L7ShZ3U0WixeDyLJl -xy16paq8U4Zt3VekyvggQQto8PT7dL5WXXp59fkdheMtlb71cZBDzI0fmgAKhynp -VSJYACPq4xJDKVtHCN2MQWplBqjlIapBtJUhlbl90TSrE9atvNziPTnNvT51cKEY -WQPJIrSPnNVeKtelttQKbfi3QBFGmh95DmK/D5fs4C8fF5Q= ------END CERTIFICATE----- - -# Issuer: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert Assured ID Root CA" -# Serial: 17154717934120587862167794914071425081 -# MD5 Fingerprint: 87:ce:0b:7b:2a:0e:49:00:e1:58:71:9b:37:a8:93:72 -# SHA1 Fingerprint: 05:63:b8:63:0d:62:d7:5a:bb:c8:ab:1e:4b:df:b5:a8:99:b2:4d:43 -# SHA256 Fingerprint: 3e:90:99:b5:01:5e:8f:48:6c:00:bc:ea:9d:11:1e:e7:21:fa:ba:35:5a:89:bc:f1:df:69:56:1e:3d:c6:32:5c ------BEGIN CERTIFICATE----- -MIIDtzCCAp+gAwIBAgIQDOfg5RfYRv6P5WD8G/AwOTANBgkqhkiG9w0BAQUFADBl -MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 -d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv -b3QgQ0EwHhcNMDYxMTEwMDAwMDAwWhcNMzExMTEwMDAwMDAwWjBlMQswCQYDVQQG -EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl -cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgQ0EwggEi -MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCtDhXO5EOAXLGH87dg+XESpa7c -JpSIqvTO9SA5KFhgDPiA2qkVlTJhPLWxKISKityfCgyDF3qPkKyK53lTXDGEKvYP -mDI2dsze3Tyoou9q+yHyUmHfnyDXH+Kx2f4YZNISW1/5WBg1vEfNoTb5a3/UsDg+ -wRvDjDPZ2C8Y/igPs6eD1sNuRMBhNZYW/lmci3Zt1/GiSw0r/wty2p5g0I6QNcZ4 -VYcgoc/lbQrISXwxmDNsIumH0DJaoroTghHtORedmTpyoeb6pNnVFzF1roV9Iq4/ -AUaG9ih5yLHa5FcXxH4cDrC0kqZWs72yl+2qp/C3xag/lRbQ/6GW6whfGHdPAgMB -AAGjYzBhMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW -BBRF66Kv9JLLgjEtUYunpyGd823IDzAfBgNVHSMEGDAWgBRF66Kv9JLLgjEtUYun -pyGd823IDzANBgkqhkiG9w0BAQUFAAOCAQEAog683+Lt8ONyc3pklL/3cmbYMuRC -dWKuh+vy1dneVrOfzM4UKLkNl2BcEkxY5NM9g0lFWJc1aRqoR+pWxnmrEthngYTf -fwk8lOa4JiwgvT2zKIn3X/8i4peEH+ll74fg38FnSbNd67IJKusm7Xi+fT8r87cm -NW1fiQG2SVufAQWbqz0lwcy2f8Lxb4bG+mRo64EtlOtCt/qMHt1i8b5QZ7dsvfPx -H2sMNgcWfzd8qVttevESRmCD1ycEvkvOl77DZypoEd+A5wwzZr8TDRRu838fYxAe -+o0bJW1sj6W3YQGx0qMmoRBxna3iw/nDmVG3KwcIzi7mULKn+gpFL6Lw8g== ------END CERTIFICATE----- - -# Issuer: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert Global Root CA" -# Serial: 10944719598952040374951832963794454346 -# MD5 Fingerprint: 79:e4:a9:84:0d:7d:3a:96:d7:c0:4f:e2:43:4c:89:2e -# SHA1 Fingerprint: a8:98:5d:3a:65:e5:e5:c4:b2:d7:d6:6d:40:c6:dd:2f:b1:9c:54:36 -# SHA256 Fingerprint: 43:48:a0:e9:44:4c:78:cb:26:5e:05:8d:5e:89:44:b4:d8:4f:96:62:bd:26:db:25:7f:89:34:a4:43:c7:01:61 ------BEGIN CERTIFICATE----- -MIIDrzCCApegAwIBAgIQCDvgVpBCRrGhdWrJWZHHSjANBgkqhkiG9w0BAQUFADBh -MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 -d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBD -QTAeFw0wNjExMTAwMDAwMDBaFw0zMTExMTAwMDAwMDBaMGExCzAJBgNVBAYTAlVT -MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j -b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IENBMIIBIjANBgkqhkiG -9w0BAQEFAAOCAQ8AMIIBCgKCAQEA4jvhEXLeqKTTo1eqUKKPC3eQyaKl7hLOllsB -CSDMAZOnTjC3U/dDxGkAV53ijSLdhwZAAIEJzs4bg7/fzTtxRuLWZscFs3YnFo97 -nh6Vfe63SKMI2tavegw5BmV/Sl0fvBf4q77uKNd0f3p4mVmFaG5cIzJLv07A6Fpt -43C/dxC//AH2hdmoRBBYMql1GNXRor5H4idq9Joz+EkIYIvUX7Q6hL+hqkpMfT7P -T19sdl6gSzeRntwi5m3OFBqOasv+zbMUZBfHWymeMr/y7vrTC0LUq7dBMtoM1O/4 -gdW7jVg/tRvoSSiicNoxBN33shbyTApOB6jtSj1etX+jkMOvJwIDAQABo2MwYTAO -BgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUA95QNVbR -TLtm8KPiGxvDl7I90VUwHwYDVR0jBBgwFoAUA95QNVbRTLtm8KPiGxvDl7I90VUw -DQYJKoZIhvcNAQEFBQADggEBAMucN6pIExIK+t1EnE9SsPTfrgT1eXkIoyQY/Esr -hMAtudXH/vTBH1jLuG2cenTnmCmrEbXjcKChzUyImZOMkXDiqw8cvpOp/2PV5Adg -06O/nVsJ8dWO41P0jmP6P6fbtGbfYmbW0W5BjfIttep3Sp+dWOIrWcBAI+0tKIJF -PnlUkiaY4IBIqDfv8NZ5YBberOgOzW6sRBc4L0na4UU+Krk2U886UAb3LujEV0ls -YSEY1QSteDwsOoBrp+uvFRTp2InBuThs4pFsiv9kuXclVzDAGySj4dzp30d8tbQk -CAUw7C29C79Fv1C5qfPrmAESrciIxpg0X40KPMbp1ZWVbd4= ------END CERTIFICATE----- - -# Issuer: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert High Assurance EV Root CA" -# Serial: 3553400076410547919724730734378100087 -# MD5 Fingerprint: d4:74:de:57:5c:39:b2:d3:9c:85:83:c5:c0:65:49:8a -# SHA1 Fingerprint: 5f:b7:ee:06:33:e2:59:db:ad:0c:4c:9a:e6:d3:8f:1a:61:c7:dc:25 -# SHA256 Fingerprint: 74:31:e5:f4:c3:c1:ce:46:90:77:4f:0b:61:e0:54:40:88:3b:a9:a0:1e:d0:0b:a6:ab:d7:80:6e:d3:b1:18:cf ------BEGIN CERTIFICATE----- -MIIDxTCCAq2gAwIBAgIQAqxcJmoLQJuPC3nyrkYldzANBgkqhkiG9w0BAQUFADBs -MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 -d3cuZGlnaWNlcnQuY29tMSswKQYDVQQDEyJEaWdpQ2VydCBIaWdoIEFzc3VyYW5j -ZSBFViBSb290IENBMB4XDTA2MTExMDAwMDAwMFoXDTMxMTExMDAwMDAwMFowbDEL -MAkGA1UEBhMCVVMxFTATBgNVBAoTDERpZ2lDZXJ0IEluYzEZMBcGA1UECxMQd3d3 -LmRpZ2ljZXJ0LmNvbTErMCkGA1UEAxMiRGlnaUNlcnQgSGlnaCBBc3N1cmFuY2Ug -RVYgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMbM5XPm -+9S75S0tMqbf5YE/yc0lSbZxKsPVlDRnogocsF9ppkCxxLeyj9CYpKlBWTrT3JTW -PNt0OKRKzE0lgvdKpVMSOO7zSW1xkX5jtqumX8OkhPhPYlG++MXs2ziS4wblCJEM -xChBVfvLWokVfnHoNb9Ncgk9vjo4UFt3MRuNs8ckRZqnrG0AFFoEt7oT61EKmEFB -Ik5lYYeBQVCmeVyJ3hlKV9Uu5l0cUyx+mM0aBhakaHPQNAQTXKFx01p8VdteZOE3 -hzBWBOURtCmAEvF5OYiiAhF8J2a3iLd48soKqDirCmTCv2ZdlYTBoSUeh10aUAsg -EsxBu24LUTi4S8sCAwEAAaNjMGEwDgYDVR0PAQH/BAQDAgGGMA8GA1UdEwEB/wQF -MAMBAf8wHQYDVR0OBBYEFLE+w2kD+L9HAdSYJhoIAu9jZCvDMB8GA1UdIwQYMBaA -FLE+w2kD+L9HAdSYJhoIAu9jZCvDMA0GCSqGSIb3DQEBBQUAA4IBAQAcGgaX3Nec -nzyIZgYIVyHbIUf4KmeqvxgydkAQV8GK83rZEWWONfqe/EW1ntlMMUu4kehDLI6z -eM7b41N5cdblIZQB2lWHmiRk9opmzN6cN82oNLFpmyPInngiK3BD41VHMWEZ71jF -hS9OMPagMRYjyOfiZRYzy78aG6A9+MpeizGLYAiJLQwGXFK3xPkKmNEVX58Svnw2 -Yzi9RKR/5CYrCsSXaQ3pjOLAEFe4yHYSkVXySGnYvCoCWw9E1CAx2/S6cCZdkGCe -vEsXCS+0yx5DaMkHJ8HSXPfqIbloEpw8nL+e/IBcm2PN7EeqJSdnoDfzAIJ9VNep -+OkuE6N36B9K ------END CERTIFICATE----- - -# Issuer: CN=DST Root CA X3 O=Digital Signature Trust Co. -# Subject: CN=DST Root CA X3 O=Digital Signature Trust Co. -# Label: "DST Root CA X3" -# Serial: 91299735575339953335919266965803778155 -# MD5 Fingerprint: 41:03:52:dc:0f:f7:50:1b:16:f0:02:8e:ba:6f:45:c5 -# SHA1 Fingerprint: da:c9:02:4f:54:d8:f6:df:94:93:5f:b1:73:26:38:ca:6a:d7:7c:13 -# SHA256 Fingerprint: 06:87:26:03:31:a7:24:03:d9:09:f1:05:e6:9b:cf:0d:32:e1:bd:24:93:ff:c6:d9:20:6d:11:bc:d6:77:07:39 ------BEGIN CERTIFICATE----- -MIIDSjCCAjKgAwIBAgIQRK+wgNajJ7qJMDmGLvhAazANBgkqhkiG9w0BAQUFADA/ -MSQwIgYDVQQKExtEaWdpdGFsIFNpZ25hdHVyZSBUcnVzdCBDby4xFzAVBgNVBAMT -DkRTVCBSb290IENBIFgzMB4XDTAwMDkzMDIxMTIxOVoXDTIxMDkzMDE0MDExNVow -PzEkMCIGA1UEChMbRGlnaXRhbCBTaWduYXR1cmUgVHJ1c3QgQ28uMRcwFQYDVQQD -Ew5EU1QgUm9vdCBDQSBYMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEB -AN+v6ZdQCINXtMxiZfaQguzH0yxrMMpb7NnDfcdAwRgUi+DoM3ZJKuM/IUmTrE4O -rz5Iy2Xu/NMhD2XSKtkyj4zl93ewEnu1lcCJo6m67XMuegwGMoOifooUMM0RoOEq -OLl5CjH9UL2AZd+3UWODyOKIYepLYYHsUmu5ouJLGiifSKOeDNoJjj4XLh7dIN9b -xiqKqy69cK3FCxolkHRyxXtqqzTWMIn/5WgTe1QLyNau7Fqckh49ZLOMxt+/yUFw -7BZy1SbsOFU5Q9D8/RhcQPGX69Wam40dutolucbY38EVAjqr2m7xPi71XAicPNaD -aeQQmxkqtilX4+U9m5/wAl0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNV -HQ8BAf8EBAMCAQYwHQYDVR0OBBYEFMSnsaR7LHH62+FLkHX/xBVghYkQMA0GCSqG -SIb3DQEBBQUAA4IBAQCjGiybFwBcqR7uKGY3Or+Dxz9LwwmglSBd49lZRNI+DT69 -ikugdB/OEIKcdBodfpga3csTS7MgROSR6cz8faXbauX+5v3gTt23ADq1cEmv8uXr -AvHRAosZy5Q6XkjEGB5YGV8eAlrwDPGxrancWYaLbumR9YbK+rlmM6pZW87ipxZz -R8srzJmwN0jP41ZL9c8PDHIyh8bwRLtTcm1D9SZImlJnt1ir/md2cXjbDaJWFBM5 -JDGFoqgCWjBH4d1QB7wCCZAA62RjYJsWvIjJEubSfZGL+T0yjWW06XyxV3bqxbYo -Ob8VZRzI9neWagqNdwvYkQsEjgfbKbYK7p2CNTUQ ------END CERTIFICATE----- - -# Issuer: CN=SwissSign Gold CA - G2 O=SwissSign AG -# Subject: CN=SwissSign Gold CA - G2 O=SwissSign AG -# Label: "SwissSign Gold CA - G2" -# Serial: 13492815561806991280 -# MD5 Fingerprint: 24:77:d9:a8:91:d1:3b:fa:88:2d:c2:ff:f8:cd:33:93 -# SHA1 Fingerprint: d8:c5:38:8a:b7:30:1b:1b:6e:d4:7a:e6:45:25:3a:6f:9f:1a:27:61 -# SHA256 Fingerprint: 62:dd:0b:e9:b9:f5:0a:16:3e:a0:f8:e7:5c:05:3b:1e:ca:57:ea:55:c8:68:8f:64:7c:68:81:f2:c8:35:7b:95 ------BEGIN CERTIFICATE----- -MIIFujCCA6KgAwIBAgIJALtAHEP1Xk+wMA0GCSqGSIb3DQEBBQUAMEUxCzAJBgNV -BAYTAkNIMRUwEwYDVQQKEwxTd2lzc1NpZ24gQUcxHzAdBgNVBAMTFlN3aXNzU2ln -biBHb2xkIENBIC0gRzIwHhcNMDYxMDI1MDgzMDM1WhcNMzYxMDI1MDgzMDM1WjBF -MQswCQYDVQQGEwJDSDEVMBMGA1UEChMMU3dpc3NTaWduIEFHMR8wHQYDVQQDExZT -d2lzc1NpZ24gR29sZCBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIIC -CgKCAgEAr+TufoskDhJuqVAtFkQ7kpJcyrhdhJJCEyq8ZVeCQD5XJM1QiyUqt2/8 -76LQwB8CJEoTlo8jE+YoWACjR8cGp4QjK7u9lit/VcyLwVcfDmJlD909Vopz2q5+ -bbqBHH5CjCA12UNNhPqE21Is8w4ndwtrvxEvcnifLtg+5hg3Wipy+dpikJKVyh+c -6bM8K8vzARO/Ws/BtQpgvd21mWRTuKCWs2/iJneRjOBiEAKfNA+k1ZIzUd6+jbqE -emA8atufK+ze3gE/bk3lUIbLtK/tREDFylqM2tIrfKjuvqblCqoOpd8FUrdVxyJd -MmqXl2MT28nbeTZ7hTpKxVKJ+STnnXepgv9VHKVxaSvRAiTysybUa9oEVeXBCsdt -MDeQKuSeFDNeFhdVxVu1yzSJkvGdJo+hB9TGsnhQ2wwMC3wLjEHXuendjIj3o02y -MszYF9rNt85mndT9Xv+9lz4pded+p2JYryU0pUHHPbwNUMoDAw8IWh+Vc3hiv69y -FGkOpeUDDniOJihC8AcLYiAQZzlG+qkDzAQ4embvIIO1jEpWjpEA/I5cgt6IoMPi -aG59je883WX0XaxR7ySArqpWl2/5rX3aYT+YdzylkbYcjCbaZaIJbcHiVOO5ykxM -gI93e2CaHt+28kgeDrpOVG2Y4OGiGqJ3UM/EY5LsRxmd6+ZrzsECAwEAAaOBrDCB -qTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUWyV7 -lqRlUX64OfPAeGZe6Drn8O4wHwYDVR0jBBgwFoAUWyV7lqRlUX64OfPAeGZe6Drn -8O4wRgYDVR0gBD8wPTA7BglghXQBWQECAQEwLjAsBggrBgEFBQcCARYgaHR0cDov -L3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIBACe6 -45R88a7A3hfm5djV9VSwg/S7zV4Fe0+fdWavPOhWfvxyeDgD2StiGwC5+OlgzczO -UYrHUDFu4Up+GC9pWbY9ZIEr44OE5iKHjn3g7gKZYbge9LgriBIWhMIxkziWMaa5 -O1M/wySTVltpkuzFwbs4AOPsF6m43Md8AYOfMke6UiI0HTJ6CVanfCU2qT1L2sCC -bwq7EsiHSycR+R4tx5M/nttfJmtS2S6K8RTGRI0Vqbe/vd6mGu6uLftIdxf+u+yv -GPUqUfA5hJeVbG4bwyvEdGB5JbAKJ9/fXtI5z0V9QkvfsywexcZdylU6oJxpmo/a -77KwPJ+HbBIrZXAVUjEaJM9vMSNQH4xPjyPDdEFjHFWoFN0+4FFQz/EbMFYOkrCC -hdiDyyJkvC24JdVUorgG6q2SpCSgwYa1ShNqR88uC1aVVMvOmttqtKay20EIhid3 -92qgQmwLOM7XdVAyksLfKzAiSNDVQTglXaTpXZ/GlHXQRf0wl0OPkKsKx4ZzYEpp -Ld6leNcG2mqeSz53OiATIgHQv2ieY2BrNU0LbbqhPcCT4H8js1WtciVORvnSFu+w -ZMEBnunKoGqYDs/YYPIvSbjkQuE4NRb0yG5P94FW6LqjviOvrv1vA+ACOzB2+htt -Qc8Bsem4yWb02ybzOqR08kkkW8mw0FfB+j564ZfJ ------END CERTIFICATE----- - -# Issuer: CN=SwissSign Silver CA - G2 O=SwissSign AG -# Subject: CN=SwissSign Silver CA - G2 O=SwissSign AG -# Label: "SwissSign Silver CA - G2" -# Serial: 5700383053117599563 -# MD5 Fingerprint: e0:06:a1:c9:7d:cf:c9:fc:0d:c0:56:75:96:d8:62:13 -# SHA1 Fingerprint: 9b:aa:e5:9f:56:ee:21:cb:43:5a:be:25:93:df:a7:f0:40:d1:1d:cb -# SHA256 Fingerprint: be:6c:4d:a2:bb:b9:ba:59:b6:f3:93:97:68:37:42:46:c3:c0:05:99:3f:a9:8f:02:0d:1d:ed:be:d4:8a:81:d5 ------BEGIN CERTIFICATE----- -MIIFvTCCA6WgAwIBAgIITxvUL1S7L0swDQYJKoZIhvcNAQEFBQAwRzELMAkGA1UE -BhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMYU3dpc3NTaWdu -IFNpbHZlciBDQSAtIEcyMB4XDTA2MTAyNTA4MzI0NloXDTM2MTAyNTA4MzI0Nlow -RzELMAkGA1UEBhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMY -U3dpc3NTaWduIFNpbHZlciBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8A -MIICCgKCAgEAxPGHf9N4Mfc4yfjDmUO8x/e8N+dOcbpLj6VzHVxumK4DV644N0Mv -Fz0fyM5oEMF4rhkDKxD6LHmD9ui5aLlV8gREpzn5/ASLHvGiTSf5YXu6t+WiE7br -YT7QbNHm+/pe7R20nqA1W6GSy/BJkv6FCgU+5tkL4k+73JU3/JHpMjUi0R86TieF -nbAVlDLaYQ1HTWBCrpJH6INaUFjpiou5XaHc3ZlKHzZnu0jkg7Y360g6rw9njxcH -6ATK72oxh9TAtvmUcXtnZLi2kUpCe2UuMGoM9ZDulebyzYLs2aFK7PayS+VFheZt -eJMELpyCbTapxDFkH4aDCyr0NQp4yVXPQbBH6TCfmb5hqAaEuSh6XzjZG6k4sIN/ -c8HDO0gqgg8hm7jMqDXDhBuDsz6+pJVpATqJAHgE2cn0mRmrVn5bi4Y5FZGkECwJ -MoBgs5PAKrYYC51+jUnyEEp/+dVGLxmSo5mnJqy7jDzmDrxHB9xzUfFwZC8I+bRH -HTBsROopN4WSaGa8gzj+ezku01DwH/teYLappvonQfGbGHLy9YR0SslnxFSuSGTf -jNFusB3hB48IHpmccelM2KX3RxIfdNFRnobzwqIjQAtz20um53MGjMGg6cFZrEb6 -5i/4z3GcRm25xBWNOHkDRUjvxF3XCO6HOSKGsg0PWEP3calILv3q1h8CAwEAAaOB -rDCBqTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU -F6DNweRBtjpbO8tFnb0cwpj6hlgwHwYDVR0jBBgwFoAUF6DNweRBtjpbO8tFnb0c -wpj6hlgwRgYDVR0gBD8wPTA7BglghXQBWQEDAQEwLjAsBggrBgEFBQcCARYgaHR0 -cDovL3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIB -AHPGgeAn0i0P4JUw4ppBf1AsX19iYamGamkYDHRJ1l2E6kFSGG9YrVBWIGrGvShp -WJHckRE1qTodvBqlYJ7YH39FkWnZfrt4csEGDyrOj4VwYaygzQu4OSlWhDJOhrs9 -xCrZ1x9y7v5RoSJBsXECYxqCsGKrXlcSH9/L3XWgwF15kIwb4FDm3jH+mHtwX6WQ -2K34ArZv02DdQEsixT2tOnqfGhpHkXkzuoLcMmkDlm4fS/Bx/uNncqCxv1yL5PqZ -IseEuRuNI5c/7SXgz2W79WEE790eslpBIlqhn10s6FvJbakMDHiqYMZWjwFaDGi8 -aRl5xB9+lwW/xekkUV7U1UtT7dkjWjYDZaPBA61BMPNGG4WQr2W11bHkFlt4dR2X -em1ZqSqPe97Dh4kQmUlzeMg9vVE1dCrV8X5pGyq7O70luJpaPXJhkGaH7gzWTdQR -dAtq/gsD/KNVV4n+SsuuWxcFyPKNIzFTONItaj+CuY0IavdeQXRuwxF+B6wpYJE/ -OMpXEA29MC/HpeZBoNquBYeaoKRlbEwJDIm6uNO5wJOKMPqN5ZprFQFOZ6raYlY+ -hAhm0sQ2fac+EPyI4NSA5QC9qvNOBqN6avlicuMJT+ubDgEj8Z+7fNzcbBGXJbLy -tGMU0gYqZ4yD9c7qB9iaah7s5Aq7KkzrCWA5zspi2C5u ------END CERTIFICATE----- - -# Issuer: CN=SecureTrust CA O=SecureTrust Corporation -# Subject: CN=SecureTrust CA O=SecureTrust Corporation -# Label: "SecureTrust CA" -# Serial: 17199774589125277788362757014266862032 -# MD5 Fingerprint: dc:32:c3:a7:6d:25:57:c7:68:09:9d:ea:2d:a9:a2:d1 -# SHA1 Fingerprint: 87:82:c6:c3:04:35:3b:cf:d2:96:92:d2:59:3e:7d:44:d9:34:ff:11 -# SHA256 Fingerprint: f1:c1:b5:0a:e5:a2:0d:d8:03:0e:c9:f6:bc:24:82:3d:d3:67:b5:25:57:59:b4:e7:1b:61:fc:e9:f7:37:5d:73 ------BEGIN CERTIFICATE----- -MIIDuDCCAqCgAwIBAgIQDPCOXAgWpa1Cf/DrJxhZ0DANBgkqhkiG9w0BAQUFADBI -MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x -FzAVBgNVBAMTDlNlY3VyZVRydXN0IENBMB4XDTA2MTEwNzE5MzExOFoXDTI5MTIz -MTE5NDA1NVowSDELMAkGA1UEBhMCVVMxIDAeBgNVBAoTF1NlY3VyZVRydXN0IENv -cnBvcmF0aW9uMRcwFQYDVQQDEw5TZWN1cmVUcnVzdCBDQTCCASIwDQYJKoZIhvcN -AQEBBQADggEPADCCAQoCggEBAKukgeWVzfX2FI7CT8rU4niVWJxB4Q2ZQCQXOZEz -Zum+4YOvYlyJ0fwkW2Gz4BERQRwdbvC4u/jep4G6pkjGnx29vo6pQT64lO0pGtSO -0gMdA+9tDWccV9cGrcrI9f4Or2YlSASWC12juhbDCE/RRvgUXPLIXgGZbf2IzIao -wW8xQmxSPmjL8xk037uHGFaAJsTQ3MBv396gwpEWoGQRS0S8Hvbn+mPeZqx2pHGj -7DaUaHp3pLHnDi+BeuK1cobvomuL8A/b01k/unK8RCSc43Oz969XL0Imnal0ugBS -8kvNU3xHCzaFDmapCJcWNFfBZveA4+1wVMeT4C4oFVmHursCAwEAAaOBnTCBmjAT -BgkrBgEEAYI3FAIEBh4EAEMAQTALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB -/zAdBgNVHQ4EFgQUQjK2FvoE/f5dS3rD/fdMQB1aQ68wNAYDVR0fBC0wKzApoCeg -JYYjaHR0cDovL2NybC5zZWN1cmV0cnVzdC5jb20vU1RDQS5jcmwwEAYJKwYBBAGC -NxUBBAMCAQAwDQYJKoZIhvcNAQEFBQADggEBADDtT0rhWDpSclu1pqNlGKa7UTt3 -6Z3q059c4EVlew3KW+JwULKUBRSuSceNQQcSc5R+DCMh/bwQf2AQWnL1mA6s7Ll/ -3XpvXdMc9P+IBWlCqQVxyLesJugutIxq/3HcuLHfmbx8IVQr5Fiiu1cprp6poxkm -D5kuCLDv/WnPmRoJjeOnnyvJNjR7JLN4TJUXpAYmHrZkUjZfYGfZnMUFdAvnZyPS -CPyI6a6Lf+Ew9Dd+/cYy2i2eRDAwbO4H3tI0/NL/QPZL9GZGBlSm8jIKYyYwa5vR -3ItHuuG51WLQoqD0ZwV4KWMabwTW+MZMo5qxN7SN5ShLHZ4swrhovO0C7jE= ------END CERTIFICATE----- - -# Issuer: CN=Secure Global CA O=SecureTrust Corporation -# Subject: CN=Secure Global CA O=SecureTrust Corporation -# Label: "Secure Global CA" -# Serial: 9751836167731051554232119481456978597 -# MD5 Fingerprint: cf:f4:27:0d:d4:ed:dc:65:16:49:6d:3d:da:bf:6e:de -# SHA1 Fingerprint: 3a:44:73:5a:e5:81:90:1f:24:86:61:46:1e:3b:9c:c4:5f:f5:3a:1b -# SHA256 Fingerprint: 42:00:f5:04:3a:c8:59:0e:bb:52:7d:20:9e:d1:50:30:29:fb:cb:d4:1c:a1:b5:06:ec:27:f1:5a:de:7d:ac:69 ------BEGIN CERTIFICATE----- -MIIDvDCCAqSgAwIBAgIQB1YipOjUiolN9BPI8PjqpTANBgkqhkiG9w0BAQUFADBK -MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x -GTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwHhcNMDYxMTA3MTk0MjI4WhcNMjkx -MjMxMTk1MjA2WjBKMQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3Qg -Q29ycG9yYXRpb24xGTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwggEiMA0GCSqG -SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvNS7YrGxVaQZx5RNoJLNP2MwhR/jxYDiJ -iQPpvepeRlMJ3Fz1Wuj3RSoC6zFh1ykzTM7HfAo3fg+6MpjhHZevj8fcyTiW89sa -/FHtaMbQbqR8JNGuQsiWUGMu4P51/pinX0kuleM5M2SOHqRfkNJnPLLZ/kG5VacJ -jnIFHovdRIWCQtBJwB1g8NEXLJXr9qXBkqPFwqcIYA1gBBCWeZ4WNOaptvolRTnI -HmX5k/Wq8VLcmZg9pYYaDDUz+kulBAYVHDGA76oYa8J719rO+TMg1fW9ajMtgQT7 -sFzUnKPiXB3jqUJ1XnvUd+85VLrJChgbEplJL4hL/VBi0XPnj3pDAgMBAAGjgZ0w -gZowEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0PBAQDAgGGMA8GA1UdEwEB/wQF -MAMBAf8wHQYDVR0OBBYEFK9EBMJBfkiD2045AuzshHrmzsmkMDQGA1UdHwQtMCsw -KaAnoCWGI2h0dHA6Ly9jcmwuc2VjdXJldHJ1c3QuY29tL1NHQ0EuY3JsMBAGCSsG -AQQBgjcVAQQDAgEAMA0GCSqGSIb3DQEBBQUAA4IBAQBjGghAfaReUw132HquHw0L -URYD7xh8yOOvaliTFGCRsoTciE6+OYo68+aCiV0BN7OrJKQVDpI1WkpEXk5X+nXO -H0jOZvQ8QCaSmGwb7iRGDBezUqXbpZGRzzfTb+cnCDpOGR86p1hcF895P4vkp9Mm -I50mD1hp/Ed+stCNi5O/KU9DaXR2Z0vPB4zmAve14bRDtUstFJ/53CYNv6ZHdAbY -iNE6KTCEztI5gGIbqMdXSbxqVVFnFUq+NQfk1XWYN3kwFNspnWzFacxHVaIw98xc -f8LDmBxrThaA63p4ZUWiABqvDA1VZDRIuJK58bRQKfJPIx/abKwfROHdI3hRW8cW ------END CERTIFICATE----- - -# Issuer: CN=COMODO Certification Authority O=COMODO CA Limited -# Subject: CN=COMODO Certification Authority O=COMODO CA Limited -# Label: "COMODO Certification Authority" -# Serial: 104350513648249232941998508985834464573 -# MD5 Fingerprint: 5c:48:dc:f7:42:72:ec:56:94:6d:1c:cc:71:35:80:75 -# SHA1 Fingerprint: 66:31:bf:9e:f7:4f:9e:b6:c9:d5:a6:0c:ba:6a:be:d1:f7:bd:ef:7b -# SHA256 Fingerprint: 0c:2c:d6:3d:f7:80:6f:a3:99:ed:e8:09:11:6b:57:5b:f8:79:89:f0:65:18:f9:80:8c:86:05:03:17:8b:af:66 ------BEGIN CERTIFICATE----- -MIIEHTCCAwWgAwIBAgIQToEtioJl4AsC7j41AkblPTANBgkqhkiG9w0BAQUFADCB -gTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G -A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxJzAlBgNV -BAMTHkNPTU9ETyBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEyMDEwMDAw -MDBaFw0yOTEyMzEyMzU5NTlaMIGBMQswCQYDVQQGEwJHQjEbMBkGA1UECBMSR3Jl -YXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHEwdTYWxmb3JkMRowGAYDVQQKExFDT01P -RE8gQ0EgTGltaXRlZDEnMCUGA1UEAxMeQ09NT0RPIENlcnRpZmljYXRpb24gQXV0 -aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA0ECLi3LjkRv3 -UcEbVASY06m/weaKXTuH+7uIzg3jLz8GlvCiKVCZrts7oVewdFFxze1CkU1B/qnI -2GqGd0S7WWaXUF601CxwRM/aN5VCaTwwxHGzUvAhTaHYujl8HJ6jJJ3ygxaYqhZ8 -Q5sVW7euNJH+1GImGEaaP+vB+fGQV+useg2L23IwambV4EajcNxo2f8ESIl33rXp -+2dtQem8Ob0y2WIC8bGoPW43nOIv4tOiJovGuFVDiOEjPqXSJDlqR6sA1KGzqSX+ -DT+nHbrTUcELpNqsOO9VUCQFZUaTNE8tja3G1CEZ0o7KBWFxB3NH5YoZEr0ETc5O -nKVIrLsm9wIDAQABo4GOMIGLMB0GA1UdDgQWBBQLWOWLxkwVN6RAqTCpIb5HNlpW -/zAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zBJBgNVHR8EQjBAMD6g -PKA6hjhodHRwOi8vY3JsLmNvbW9kb2NhLmNvbS9DT01PRE9DZXJ0aWZpY2F0aW9u -QXV0aG9yaXR5LmNybDANBgkqhkiG9w0BAQUFAAOCAQEAPpiem/Yb6dc5t3iuHXIY -SdOH5EOC6z/JqvWote9VfCFSZfnVDeFs9D6Mk3ORLgLETgdxb8CPOGEIqB6BCsAv -IC9Bi5HcSEW88cbeunZrM8gALTFGTO3nnc+IlP8zwFboJIYmuNg4ON8qa90SzMc/ -RxdMosIGlgnW2/4/PEZB31jiVg88O8EckzXZOFKs7sjsLjBOlDW0JB9LeGna8gI4 -zJVSk/BwJVmcIGfE7vmLV2H0knZ9P4SNVbfo5azV8fUZVqZa+5Acr5Pr5RzUZ5dd -BA6+C4OmF4O5MBKgxTMVBbkN+8cFduPYSo38NBejxiEovjBFMR7HeL5YYTisO+IB -ZQ== ------END CERTIFICATE----- - -# Issuer: CN=Network Solutions Certificate Authority O=Network Solutions L.L.C. -# Subject: CN=Network Solutions Certificate Authority O=Network Solutions L.L.C. -# Label: "Network Solutions Certificate Authority" -# Serial: 116697915152937497490437556386812487904 -# MD5 Fingerprint: d3:f3:a6:16:c0:fa:6b:1d:59:b1:2d:96:4d:0e:11:2e -# SHA1 Fingerprint: 74:f8:a3:c3:ef:e7:b3:90:06:4b:83:90:3c:21:64:60:20:e5:df:ce -# SHA256 Fingerprint: 15:f0:ba:00:a3:ac:7a:f3:ac:88:4c:07:2b:10:11:a0:77:bd:77:c0:97:f4:01:64:b2:f8:59:8a:bd:83:86:0c ------BEGIN CERTIFICATE----- -MIID5jCCAs6gAwIBAgIQV8szb8JcFuZHFhfjkDFo4DANBgkqhkiG9w0BAQUFADBi -MQswCQYDVQQGEwJVUzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMu -MTAwLgYDVQQDEydOZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3Jp -dHkwHhcNMDYxMjAxMDAwMDAwWhcNMjkxMjMxMjM1OTU5WjBiMQswCQYDVQQGEwJV -UzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMuMTAwLgYDVQQDEydO -ZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkwggEiMA0GCSqG -SIb3DQEBAQUAA4IBDwAwggEKAoIBAQDkvH6SMG3G2I4rC7xGzuAnlt7e+foS0zwz -c7MEL7xxjOWftiJgPl9dzgn/ggwbmlFQGiaJ3dVhXRncEg8tCqJDXRfQNJIg6nPP -OCwGJgl6cvf6UDL4wpPTaaIjzkGxzOTVHzbRijr4jGPiFFlp7Q3Tf2vouAPlT2rl -mGNpSAW+Lv8ztumXWWn4Zxmuk2GWRBXTcrA/vGp97Eh/jcOrqnErU2lBUzS1sLnF -BgrEsEX1QV1uiUV7PTsmjHTC5dLRfbIR1PtYMiKagMnc/Qzpf14Dl847ABSHJ3A4 -qY5usyd2mFHgBeMhqxrVhSI8KbWaFsWAqPS7azCPL0YCorEMIuDTAgMBAAGjgZcw -gZQwHQYDVR0OBBYEFCEwyfsA106Y2oeqKtCnLrFAMadMMA4GA1UdDwEB/wQEAwIB -BjAPBgNVHRMBAf8EBTADAQH/MFIGA1UdHwRLMEkwR6BFoEOGQWh0dHA6Ly9jcmwu -bmV0c29sc3NsLmNvbS9OZXR3b3JrU29sdXRpb25zQ2VydGlmaWNhdGVBdXRob3Jp -dHkuY3JsMA0GCSqGSIb3DQEBBQUAA4IBAQC7rkvnt1frf6ott3NHhWrB5KUd5Oc8 -6fRZZXe1eltajSU24HqXLjjAV2CDmAaDn7l2em5Q4LqILPxFzBiwmZVRDuwduIj/ -h1AcgsLj4DKAv6ALR8jDMe+ZZzKATxcheQxpXN5eNK4CtSbqUN9/GGUsyfJj4akH -/nxxH2szJGoeBfcFaMBqEssuXmHLrijTfsK0ZpEmXzwuJF/LWA/rKOyvEZbz3Htv -wKeI8lN3s2Berq4o2jUsbzRF0ybh3uxbTydrFny9RAQYgrOJeRcQcT16ohZO9QHN -pGxlaKFJdlxDydi8NmdspZS11My5vWo1ViHe2MPr+8ukYEywVaCge1ey ------END CERTIFICATE----- - -# Issuer: CN=COMODO ECC Certification Authority O=COMODO CA Limited -# Subject: CN=COMODO ECC Certification Authority O=COMODO CA Limited -# Label: "COMODO ECC Certification Authority" -# Serial: 41578283867086692638256921589707938090 -# MD5 Fingerprint: 7c:62:ff:74:9d:31:53:5e:68:4a:d5:78:aa:1e:bf:23 -# SHA1 Fingerprint: 9f:74:4e:9f:2b:4d:ba:ec:0f:31:2c:50:b6:56:3b:8e:2d:93:c3:11 -# SHA256 Fingerprint: 17:93:92:7a:06:14:54:97:89:ad:ce:2f:8f:34:f7:f0:b6:6d:0f:3a:e3:a3:b8:4d:21:ec:15:db:ba:4f:ad:c7 ------BEGIN CERTIFICATE----- -MIICiTCCAg+gAwIBAgIQH0evqmIAcFBUTAGem2OZKjAKBggqhkjOPQQDAzCBhTEL -MAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE -BxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMT -IkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwMzA2MDAw -MDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdy -ZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09N -T0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlv -biBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQDR3svdcmCFYX7deSR -FtSrYpn1PlILBs5BAH+X4QokPB0BBO490o0JlwzgdeT6+3eKKvUDYEs2ixYjFq0J -cfRK9ChQtP6IHG4/bC8vCVlbpVsLM5niwz2J+Wos77LTBumjQjBAMB0GA1UdDgQW -BBR1cacZSBm8nZ3qQUfflMRId5nTeTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/ -BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjEA7wNbeqy3eApyt4jf/7VGFAkK+qDm -fQjGGoe9GKhzvSbKYAydzpmfz1wPMOG+FDHqAjAU9JM8SaczepBGR7NjfRObTrdv -GDeAU/7dIOA1mjbRxwG55tzd8/8dLDoWV9mSOdY= ------END CERTIFICATE----- - -# Issuer: CN=Certigna O=Dhimyotis -# Subject: CN=Certigna O=Dhimyotis -# Label: "Certigna" -# Serial: 18364802974209362175 -# MD5 Fingerprint: ab:57:a6:5b:7d:42:82:19:b5:d8:58:26:28:5e:fd:ff -# SHA1 Fingerprint: b1:2e:13:63:45:86:a4:6f:1a:b2:60:68:37:58:2d:c4:ac:fd:94:97 -# SHA256 Fingerprint: e3:b6:a2:db:2e:d7:ce:48:84:2f:7a:c5:32:41:c7:b7:1d:54:14:4b:fb:40:c1:1f:3f:1d:0b:42:f5:ee:a1:2d ------BEGIN CERTIFICATE----- -MIIDqDCCApCgAwIBAgIJAP7c4wEPyUj/MA0GCSqGSIb3DQEBBQUAMDQxCzAJBgNV -BAYTAkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hMB4X -DTA3MDYyOTE1MTMwNVoXDTI3MDYyOTE1MTMwNVowNDELMAkGA1UEBhMCRlIxEjAQ -BgNVBAoMCURoaW15b3RpczERMA8GA1UEAwwIQ2VydGlnbmEwggEiMA0GCSqGSIb3 -DQEBAQUAA4IBDwAwggEKAoIBAQDIaPHJ1tazNHUmgh7stL7qXOEm7RFHYeGifBZ4 -QCHkYJ5ayGPhxLGWkv8YbWkj4Sti993iNi+RB7lIzw7sebYs5zRLcAglozyHGxny -gQcPOJAZ0xH+hrTy0V4eHpbNgGzOOzGTtvKg0KmVEn2lmsxryIRWijOp5yIVUxbw -zBfsV1/pogqYCd7jX5xv3EjjhQsVWqa6n6xI4wmy9/Qy3l40vhx4XUJbzg4ij02Q -130yGLMLLGq/jj8UEYkgDncUtT2UCIf3JR7VsmAA7G8qKCVuKj4YYxclPz5EIBb2 -JsglrgVKtOdjLPOMFlN+XPsRGgjBRmKfIrjxwo1p3Po6WAbfAgMBAAGjgbwwgbkw -DwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUGu3+QTmQtCRZvgHyUtVF9lo53BEw -ZAYDVR0jBF0wW4AUGu3+QTmQtCRZvgHyUtVF9lo53BGhOKQ2MDQxCzAJBgNVBAYT -AkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hggkA/tzj -AQ/JSP8wDgYDVR0PAQH/BAQDAgEGMBEGCWCGSAGG+EIBAQQEAwIABzANBgkqhkiG -9w0BAQUFAAOCAQEAhQMeknH2Qq/ho2Ge6/PAD/Kl1NqV5ta+aDY9fm4fTIrv0Q8h -bV6lUmPOEvjvKtpv6zf+EwLHyzs+ImvaYS5/1HI93TDhHkxAGYwP15zRgzB7mFnc -fca5DClMoTOi62c6ZYTTluLtdkVwj7Ur3vkj1kluPBS1xp81HlDQwY9qcEQCYsuu -HWhBp6pX6FOqB9IG9tUUBguRA3UsbHK1YZWaDYu5Def131TN3ubY1gkIl2PlwS6w -t0QmwCbAr1UwnjvVNioZBPRcHv/PLLf/0P2HQBHVESO7SMAhqaQoLf0V+LBOK/Qw -WyH8EZE0vkHve52Xdf+XlcCWWC/qu0bXu+TZLg== ------END CERTIFICATE----- - -# Issuer: CN=Cybertrust Global Root O=Cybertrust, Inc -# Subject: CN=Cybertrust Global Root O=Cybertrust, Inc -# Label: "Cybertrust Global Root" -# Serial: 4835703278459682877484360 -# MD5 Fingerprint: 72:e4:4a:87:e3:69:40:80:77:ea:bc:e3:f4:ff:f0:e1 -# SHA1 Fingerprint: 5f:43:e5:b1:bf:f8:78:8c:ac:1c:c7:ca:4a:9a:c6:22:2b:cc:34:c6 -# SHA256 Fingerprint: 96:0a:df:00:63:e9:63:56:75:0c:29:65:dd:0a:08:67:da:0b:9c:bd:6e:77:71:4a:ea:fb:23:49:ab:39:3d:a3 ------BEGIN CERTIFICATE----- -MIIDoTCCAomgAwIBAgILBAAAAAABD4WqLUgwDQYJKoZIhvcNAQEFBQAwOzEYMBYG -A1UEChMPQ3liZXJ0cnVzdCwgSW5jMR8wHQYDVQQDExZDeWJlcnRydXN0IEdsb2Jh -bCBSb290MB4XDTA2MTIxNTA4MDAwMFoXDTIxMTIxNTA4MDAwMFowOzEYMBYGA1UE -ChMPQ3liZXJ0cnVzdCwgSW5jMR8wHQYDVQQDExZDeWJlcnRydXN0IEdsb2JhbCBS -b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA+Mi8vRRQZhP/8NN5 -7CPytxrHjoXxEnOmGaoQ25yiZXRadz5RfVb23CO21O1fWLE3TdVJDm71aofW0ozS -J8bi/zafmGWgE07GKmSb1ZASzxQG9Dvj1Ci+6A74q05IlG2OlTEQXO2iLb3VOm2y -HLtgwEZLAfVJrn5GitB0jaEMAs7u/OePuGtm839EAL9mJRQr3RAwHQeWP032a7iP -t3sMpTjr3kfb1V05/Iin89cqdPHoWqI7n1C6poxFNcJQZZXcY4Lv3b93TZxiyWNz -FtApD0mpSPCzqrdsxacwOUBdrsTiXSZT8M4cIwhhqJQZugRiQOwfOHB3EgZxpzAY -XSUnpQIDAQABo4GlMIGiMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/ -MB0GA1UdDgQWBBS2CHsNesysIEyGVjJez6tuhS1wVzA/BgNVHR8EODA2MDSgMqAw -hi5odHRwOi8vd3d3Mi5wdWJsaWMtdHJ1c3QuY29tL2NybC9jdC9jdHJvb3QuY3Js -MB8GA1UdIwQYMBaAFLYIew16zKwgTIZWMl7Pq26FLXBXMA0GCSqGSIb3DQEBBQUA -A4IBAQBW7wojoFROlZfJ+InaRcHUowAl9B8Tq7ejhVhpwjCt2BWKLePJzYFa+HMj -Wqd8BfP9IjsO0QbE2zZMcwSO5bAi5MXzLqXZI+O4Tkogp24CJJ8iYGd7ix1yCcUx -XOl5n4BHPa2hCwcUPUf/A2kaDAtE52Mlp3+yybh2hO0j9n0Hq0V+09+zv+mKts2o -omcrUtW3ZfA5TGOgkXmTUg9U3YO7n9GPp1Nzw8v/MOx8BLjYRB+TX3EJIrduPuoc -A06dGiBh+4E37F78CkWr1+cXVdCg6mCbpvbjjFspwgZgFJ0tl0ypkxWdYcQBX0jW -WL1WMRJOEcgh4LMRkWXbtKaIOM5V ------END CERTIFICATE----- - -# Issuer: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority -# Subject: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority -# Label: "ePKI Root Certification Authority" -# Serial: 28956088682735189655030529057352760477 -# MD5 Fingerprint: 1b:2e:00:ca:26:06:90:3d:ad:fe:6f:15:68:d3:6b:b3 -# SHA1 Fingerprint: 67:65:0d:f1:7e:8e:7e:5b:82:40:a4:f4:56:4b:cf:e2:3d:69:c6:f0 -# SHA256 Fingerprint: c0:a6:f4:dc:63:a2:4b:fd:cf:54:ef:2a:6a:08:2a:0a:72:de:35:80:3e:2f:f5:ff:52:7a:e5:d8:72:06:df:d5 ------BEGIN CERTIFICATE----- -MIIFsDCCA5igAwIBAgIQFci9ZUdcr7iXAF7kBtK8nTANBgkqhkiG9w0BAQUFADBe -MQswCQYDVQQGEwJUVzEjMCEGA1UECgwaQ2h1bmdod2EgVGVsZWNvbSBDby4sIEx0 -ZC4xKjAoBgNVBAsMIWVQS0kgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAe -Fw0wNDEyMjAwMjMxMjdaFw0zNDEyMjAwMjMxMjdaMF4xCzAJBgNVBAYTAlRXMSMw -IQYDVQQKDBpDaHVuZ2h3YSBUZWxlY29tIENvLiwgTHRkLjEqMCgGA1UECwwhZVBL -SSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIICIjANBgkqhkiG9w0BAQEF -AAOCAg8AMIICCgKCAgEA4SUP7o3biDN1Z82tH306Tm2d0y8U82N0ywEhajfqhFAH -SyZbCUNsIZ5qyNUD9WBpj8zwIuQf5/dqIjG3LBXy4P4AakP/h2XGtRrBp0xtInAh -ijHyl3SJCRImHJ7K2RKilTza6We/CKBk49ZCt0Xvl/T29de1ShUCWH2YWEtgvM3X -DZoTM1PRYfl61dd4s5oz9wCGzh1NlDivqOx4UXCKXBCDUSH3ET00hl7lSM2XgYI1 -TBnsZfZrxQWh7kcT1rMhJ5QQCtkkO7q+RBNGMD+XPNjX12ruOzjjK9SXDrkb5wdJ -fzcq+Xd4z1TtW0ado4AOkUPB1ltfFLqfpo0kR0BZv3I4sjZsN/+Z0V0OWQqraffA -sgRFelQArr5T9rXn4fg8ozHSqf4hUmTFpmfwdQcGlBSBVcYn5AGPF8Fqcde+S/uU -WH1+ETOxQvdibBjWzwloPn9s9h6PYq2lY9sJpx8iQkEeb5mKPtf5P0B6ebClAZLS -nT0IFaUQAS2zMnaolQ2zepr7BxB4EW/hj8e6DyUadCrlHJhBmd8hh+iVBmoKs2pH -dmX2Os+PYhcZewoozRrSgx4hxyy/vv9haLdnG7t4TY3OZ+XkwY63I2binZB1NJip -NiuKmpS5nezMirH4JYlcWrYvjB9teSSnUmjDhDXiZo1jDiVN1Rmy5nk3pyKdVDEC -AwEAAaNqMGgwHQYDVR0OBBYEFB4M97Zn8uGSJglFwFU5Lnc/QkqiMAwGA1UdEwQF -MAMBAf8wOQYEZyoHAAQxMC8wLQIBADAJBgUrDgMCGgUAMAcGBWcqAwAABBRFsMLH -ClZ87lt4DJX5GFPBphzYEDANBgkqhkiG9w0BAQUFAAOCAgEACbODU1kBPpVJufGB -uvl2ICO1J2B01GqZNF5sAFPZn/KmsSQHRGoqxqWOeBLoR9lYGxMqXnmbnwoqZ6Yl -PwZpVnPDimZI+ymBV3QGypzqKOg4ZyYr8dW1P2WT+DZdjo2NQCCHGervJ8A9tDkP -JXtoUHRVnAxZfVo9QZQlUgjgRywVMRnVvwdVxrsStZf0X4OFunHB2WyBEXYKCrC/ -gpf36j36+uwtqSiUO1bd0lEursC9CBWMd1I0ltabrNMdjmEPNXubrjlpC2JgQCA2 -j6/7Nu4tCEoduL+bXPjqpRugc6bY+G7gMwRfaKonh+3ZwZCc7b3jajWvY9+rGNm6 -5ulK6lCKD2GTHuItGeIwlDWSXQ62B68ZgI9HkFFLLk3dheLSClIKF5r8GrBQAuUB -o2M3IUxExJtRmREOc5wGj1QupyheRDmHVi03vYVElOEMSyycw5KFNGHLD7ibSkNS -/jQ6fbjpKdx2qcgw+BRxgMYeNkh0IkFch4LoGHGLQYlE535YW6i4jRPpp2zDR+2z -Gp1iro2C6pSe3VkQw63d4k3jMdXH7OjysP6SHhYKGvzZ8/gntsm+HbRsZJB/9OTE -W9c3rkIO3aQab3yIVMUWbuF6aC74Or8NpDyJO3inTmODBCEIZ43ygknQW/2xzQ+D -hNQ+IIX3Sj0rnP0qCglN6oH4EZw= ------END CERTIFICATE----- - -# Issuer: O=certSIGN OU=certSIGN ROOT CA -# Subject: O=certSIGN OU=certSIGN ROOT CA -# Label: "certSIGN ROOT CA" -# Serial: 35210227249154 -# MD5 Fingerprint: 18:98:c0:d6:e9:3a:fc:f9:b0:f5:0c:f7:4b:01:44:17 -# SHA1 Fingerprint: fa:b7:ee:36:97:26:62:fb:2d:b0:2a:f6:bf:03:fd:e8:7c:4b:2f:9b -# SHA256 Fingerprint: ea:a9:62:c4:fa:4a:6b:af:eb:e4:15:19:6d:35:1c:cd:88:8d:4f:53:f3:fa:8a:e6:d7:c4:66:a9:4e:60:42:bb ------BEGIN CERTIFICATE----- -MIIDODCCAiCgAwIBAgIGIAYFFnACMA0GCSqGSIb3DQEBBQUAMDsxCzAJBgNVBAYT -AlJPMREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBD -QTAeFw0wNjA3MDQxNzIwMDRaFw0zMTA3MDQxNzIwMDRaMDsxCzAJBgNVBAYTAlJP -MREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBDQTCC -ASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALczuX7IJUqOtdu0KBuqV5Do -0SLTZLrTk+jUrIZhQGpgV2hUhE28alQCBf/fm5oqrl0Hj0rDKH/v+yv6efHHrfAQ -UySQi2bJqIirr1qjAOm+ukbuW3N7LBeCgV5iLKECZbO9xSsAfsT8AzNXDe3i+s5d -RdY4zTW2ssHQnIFKquSyAVwdj1+ZxLGt24gh65AIgoDzMKND5pCCrlUoSe1b16kQ -OA7+j0xbm0bqQfWwCHTD0IgztnzXdN/chNFDDnU5oSVAKOp4yw4sLjmdjItuFhwv -JoIQ4uNllAoEwF73XVv4EOLQunpL+943AAAaWyjj0pxzPjKHmKHJUS/X3qwzs08C -AwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAcYwHQYDVR0O -BBYEFOCMm9slSbPxfIbWskKHC9BroNnkMA0GCSqGSIb3DQEBBQUAA4IBAQA+0hyJ -LjX8+HXd5n9liPRyTMks1zJO890ZeUe9jjtbkw9QSSQTaxQGcu8J06Gh40CEyecY -MnQ8SG4Pn0vU9x7Tk4ZkVJdjclDVVc/6IJMCopvDI5NOFlV2oHB5bc0hH88vLbwZ -44gx+FkagQnIl6Z0x2DEW8xXjrJ1/RsCCdtZb3KTafcxQdaIOL+Hsr0Wefmq5L6I -Jd1hJyMctTEHBDa0GpC9oHRxUIltvBTjD4au8as+x6AJzKNI0eDbZOeStc+vckNw -i/nDhDwTqn6Sm1dTk/pwwpEOMfmbZ13pljheX7NzTogVZ96edhBiIL5VaZVDADlN -9u6wWk5JRFRYX0KD ------END CERTIFICATE----- - -# Issuer: CN=NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny O=NetLock Kft. OU=Tan\xfas\xedtv\xe1nykiad\xf3k (Certification Services) -# Subject: CN=NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny O=NetLock Kft. OU=Tan\xfas\xedtv\xe1nykiad\xf3k (Certification Services) -# Label: "NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny" -# Serial: 80544274841616 -# MD5 Fingerprint: c5:a1:b7:ff:73:dd:d6:d7:34:32:18:df:fc:3c:ad:88 -# SHA1 Fingerprint: 06:08:3f:59:3f:15:a1:04:a0:69:a4:6b:a9:03:d0:06:b7:97:09:91 -# SHA256 Fingerprint: 6c:61:da:c3:a2:de:f0:31:50:6b:e0:36:d2:a6:fe:40:19:94:fb:d1:3d:f9:c8:d4:66:59:92:74:c4:46:ec:98 ------BEGIN CERTIFICATE----- -MIIEFTCCAv2gAwIBAgIGSUEs5AAQMA0GCSqGSIb3DQEBCwUAMIGnMQswCQYDVQQG -EwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFTATBgNVBAoMDE5ldExvY2sgS2Z0LjE3 -MDUGA1UECwwuVGFuw7pzw610dsOhbnlraWFkw7NrIChDZXJ0aWZpY2F0aW9uIFNl -cnZpY2VzKTE1MDMGA1UEAwwsTmV0TG9jayBBcmFueSAoQ2xhc3MgR29sZCkgRsWR -dGFuw7pzw610dsOhbnkwHhcNMDgxMjExMTUwODIxWhcNMjgxMjA2MTUwODIxWjCB -pzELMAkGA1UEBhMCSFUxETAPBgNVBAcMCEJ1ZGFwZXN0MRUwEwYDVQQKDAxOZXRM -b2NrIEtmdC4xNzA1BgNVBAsMLlRhbsO6c8OtdHbDoW55a2lhZMOzayAoQ2VydGlm -aWNhdGlvbiBTZXJ2aWNlcykxNTAzBgNVBAMMLE5ldExvY2sgQXJhbnkgKENsYXNz -IEdvbGQpIEbFkXRhbsO6c8OtdHbDoW55MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A -MIIBCgKCAQEAxCRec75LbRTDofTjl5Bu0jBFHjzuZ9lk4BqKf8owyoPjIMHj9DrT -lF8afFttvzBPhCf2nx9JvMaZCpDyD/V/Q4Q3Y1GLeqVw/HpYzY6b7cNGbIRwXdrz -AZAj/E4wqX7hJ2Pn7WQ8oLjJM2P+FpD/sLj916jAwJRDC7bVWaaeVtAkH3B5r9s5 -VA1lddkVQZQBr17s9o3x/61k/iCa11zr/qYfCGSji3ZVrR47KGAuhyXoqq8fxmRG -ILdwfzzeSNuWU7c5d+Qa4scWhHaXWy+7GRWF+GmF9ZmnqfI0p6m2pgP8b4Y9VHx2 -BJtr+UBdADTHLpl1neWIA6pN+APSQnbAGwIDAKiLo0UwQzASBgNVHRMBAf8ECDAG -AQH/AgEEMA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUzPpnk/C2uNClwB7zU/2M -U9+D15YwDQYJKoZIhvcNAQELBQADggEBAKt/7hwWqZw8UQCgwBEIBaeZ5m8BiFRh -bvG5GK1Krf6BQCOUL/t1fC8oS2IkgYIL9WHxHG64YTjrgfpioTtaYtOUZcTh5m2C -+C8lcLIhJsFyUR+MLMOEkMNaj7rP9KdlpeuY0fsFskZ1FSNqb4VjMIDw1Z4fKRzC -bLBQWV2QWzuoDTDPv31/zvGdg73JRm4gpvlhUbohL3u+pRVjodSVh/GeufOJ8z2F -uLjbvrW5KfnaNwUASZQDhETnv0Mxz3WLJdH0pmT1kvarBes96aULNmLazAZfNou2 -XjG4Kvte9nHfRCaexOYNkbQudZWAUWpLMKawYqGT8ZvYzsRjdT9ZR7E= ------END CERTIFICATE----- - -# Issuer: CN=Hongkong Post Root CA 1 O=Hongkong Post -# Subject: CN=Hongkong Post Root CA 1 O=Hongkong Post -# Label: "Hongkong Post Root CA 1" -# Serial: 1000 -# MD5 Fingerprint: a8:0d:6f:39:78:b9:43:6d:77:42:6d:98:5a:cc:23:ca -# SHA1 Fingerprint: d6:da:a8:20:8d:09:d2:15:4d:24:b5:2f:cb:34:6e:b2:58:b2:8a:58 -# SHA256 Fingerprint: f9:e6:7d:33:6c:51:00:2a:c0:54:c6:32:02:2d:66:dd:a2:e7:e3:ff:f1:0a:d0:61:ed:31:d8:bb:b4:10:cf:b2 ------BEGIN CERTIFICATE----- -MIIDMDCCAhigAwIBAgICA+gwDQYJKoZIhvcNAQEFBQAwRzELMAkGA1UEBhMCSEsx -FjAUBgNVBAoTDUhvbmdrb25nIFBvc3QxIDAeBgNVBAMTF0hvbmdrb25nIFBvc3Qg -Um9vdCBDQSAxMB4XDTAzMDUxNTA1MTMxNFoXDTIzMDUxNTA0NTIyOVowRzELMAkG -A1UEBhMCSEsxFjAUBgNVBAoTDUhvbmdrb25nIFBvc3QxIDAeBgNVBAMTF0hvbmdr -b25nIFBvc3QgUm9vdCBDQSAxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC -AQEArP84tulmAknjorThkPlAj3n54r15/gK97iSSHSL22oVyaf7XPwnU3ZG1ApzQ -jVrhVcNQhrkpJsLj2aDxaQMoIIBFIi1WpztUlVYiWR8o3x8gPW2iNr4joLFutbEn -PzlTCeqrauh0ssJlXI6/fMN4hM2eFvz1Lk8gKgifd/PFHsSaUmYeSF7jEAaPIpjh -ZY4bXSNmO7ilMlHIhqqhqZ5/dpTCpmy3QfDVyAY45tQM4vM7TG1QjMSDJ8EThFk9 -nnV0ttgCXjqQesBCNnLsak3c78QA3xMYV18meMjWCnl3v/evt3a5pQuEF10Q6m/h -q5URX208o1xNg1vysxmKgIsLhwIDAQABoyYwJDASBgNVHRMBAf8ECDAGAQH/AgED -MA4GA1UdDwEB/wQEAwIBxjANBgkqhkiG9w0BAQUFAAOCAQEADkbVPK7ih9legYsC -mEEIjEy82tvuJxuC52pF7BaLT4Wg87JwvVqWuspube5Gi27nKi6Wsxkz67SfqLI3 -7piol7Yutmcn1KZJ/RyTZXaeQi/cImyaT/JaFTmxcdcrUehtHJjA2Sr0oYJ71clB -oiMBdDhViw+5LmeiIAQ32pwL0xch4I+XeTRvhEgCIDMb5jREn5Fw9IBehEPCKdJs -EhTkYY2sEJCehFC78JZvRZ+K88psT/oROhUVRsPNH4NbLUES7VBnQRM9IauUiqpO -fMGx+6fWtScvl6tu4B3i0RwsH0Ti/L6RoZz71ilTc4afU9hDDl3WY4JxHYB0yvbi -AmvZWg== ------END CERTIFICATE----- - -# Issuer: CN=SecureSign RootCA11 O=Japan Certification Services, Inc. -# Subject: CN=SecureSign RootCA11 O=Japan Certification Services, Inc. -# Label: "SecureSign RootCA11" -# Serial: 1 -# MD5 Fingerprint: b7:52:74:e2:92:b4:80:93:f2:75:e4:cc:d7:f2:ea:26 -# SHA1 Fingerprint: 3b:c4:9f:48:f8:f3:73:a0:9c:1e:bd:f8:5b:b1:c3:65:c7:d8:11:b3 -# SHA256 Fingerprint: bf:0f:ee:fb:9e:3a:58:1a:d5:f9:e9:db:75:89:98:57:43:d2:61:08:5c:4d:31:4f:6f:5d:72:59:aa:42:16:12 ------BEGIN CERTIFICATE----- -MIIDbTCCAlWgAwIBAgIBATANBgkqhkiG9w0BAQUFADBYMQswCQYDVQQGEwJKUDEr -MCkGA1UEChMiSmFwYW4gQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcywgSW5jLjEcMBoG -A1UEAxMTU2VjdXJlU2lnbiBSb290Q0ExMTAeFw0wOTA0MDgwNDU2NDdaFw0yOTA0 -MDgwNDU2NDdaMFgxCzAJBgNVBAYTAkpQMSswKQYDVQQKEyJKYXBhbiBDZXJ0aWZp -Y2F0aW9uIFNlcnZpY2VzLCBJbmMuMRwwGgYDVQQDExNTZWN1cmVTaWduIFJvb3RD -QTExMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA/XeqpRyQBTvLTJsz -i1oURaTnkBbR31fSIRCkF/3frNYfp+TbfPfs37gD2pRY/V1yfIw/XwFndBWW4wI8 -h9uuywGOwvNmxoVF9ALGOrVisq/6nL+k5tSAMJjzDbaTj6nU2DbysPyKyiyhFTOV -MdrAG/LuYpmGYz+/3ZMqg6h2uRMft85OQoWPIucuGvKVCbIFtUROd6EgvanyTgp9 -UK31BQ1FT0Zx/Sg+U/sE2C3XZR1KG/rPO7AxmjVuyIsG0wCR8pQIZUyxNAYAeoni -8McDWc/V1uinMrPmmECGxc0nEovMe863ETxiYAcjPitAbpSACW22s293bzUIUPsC -h8U+iQIDAQABo0IwQDAdBgNVHQ4EFgQUW/hNT7KlhtQ60vFjmqC+CfZXt94wDgYD -VR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEB -AKChOBZmLqdWHyGcBvod7bkixTgm2E5P7KN/ed5GIaGHd48HCJqypMWvDzKYC3xm -KbabfSVSSUOrTC4rbnpwrxYO4wJs+0LmGJ1F2FXI6Dvd5+H0LgscNFxsWEr7jIhQ -X5Ucv+2rIrVls4W6ng+4reV6G4pQOh29Dbx7VFALuUKvVaAYga1lme++5Jy/xIWr -QbJUb9wlze144o4MjQlJ3WN7WmmWAiGovVJZ6X01y8hSyn+B/tlr0/cR7SXf+Of5 -pPpyl4RTDaXQMhhRdlkUbA/r7F+AjHVDg8OFmP9Mni0N5HeDk061lgeLKBObjBmN -QSdJQO7e5iNEOdyhIta6A/I= ------END CERTIFICATE----- - -# Issuer: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd. -# Subject: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd. -# Label: "Microsec e-Szigno Root CA 2009" -# Serial: 14014712776195784473 -# MD5 Fingerprint: f8:49:f4:03:bc:44:2d:83:be:48:69:7d:29:64:fc:b1 -# SHA1 Fingerprint: 89:df:74:fe:5c:f4:0f:4a:80:f9:e3:37:7d:54:da:91:e1:01:31:8e -# SHA256 Fingerprint: 3c:5f:81:fe:a5:fa:b8:2c:64:bf:a2:ea:ec:af:cd:e8:e0:77:fc:86:20:a7:ca:e5:37:16:3d:f3:6e:db:f3:78 ------BEGIN CERTIFICATE----- -MIIECjCCAvKgAwIBAgIJAMJ+QwRORz8ZMA0GCSqGSIb3DQEBCwUAMIGCMQswCQYD -VQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFjAUBgNVBAoMDU1pY3Jvc2VjIEx0 -ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3ppZ25vIFJvb3QgQ0EgMjAwOTEfMB0G -CSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5odTAeFw0wOTA2MTYxMTMwMThaFw0y -OTEyMzAxMTMwMThaMIGCMQswCQYDVQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3Qx -FjAUBgNVBAoMDU1pY3Jvc2VjIEx0ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3pp -Z25vIFJvb3QgQ0EgMjAwOTEfMB0GCSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5o -dTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAOn4j/NjrdqG2KfgQvvP -kd6mJviZpWNwrZuuyjNAfW2WbqEORO7hE52UQlKavXWFdCyoDh2Tthi3jCyoz/tc -cbna7P7ofo/kLx2yqHWH2Leh5TvPmUpG0IMZfcChEhyVbUr02MelTTMuhTlAdX4U -fIASmFDHQWe4oIBhVKZsTh/gnQ4H6cm6M+f+wFUoLAKApxn1ntxVUwOXewdI/5n7 -N4okxFnMUBBjjqqpGrCEGob5X7uxUG6k0QrM1XF+H6cbfPVTbiJfyyvm1HxdrtbC -xkzlBQHZ7Vf8wSN5/PrIJIOV87VqUQHQd9bpEqH5GoP7ghu5sJf0dgYzQ0mg/wu1 -+rUCAwEAAaOBgDB+MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0G -A1UdDgQWBBTLD8bfQkPMPcu1SCOhGnqmKrs0aDAfBgNVHSMEGDAWgBTLD8bfQkPM -Pcu1SCOhGnqmKrs0aDAbBgNVHREEFDASgRBpbmZvQGUtc3ppZ25vLmh1MA0GCSqG -SIb3DQEBCwUAA4IBAQDJ0Q5eLtXMs3w+y/w9/w0olZMEyL/azXm4Q5DwpL7v8u8h -mLzU1F0G9u5C7DBsoKqpyvGvivo/C3NqPuouQH4frlRheesuCDfXI/OMn74dseGk -ddug4lQUsbocKaQY9hK6ohQU4zE1yED/t+AFdlfBHFny+L/k7SViXITwfn4fs775 -tyERzAMBVnCnEJIeGzSBHq2cGsMEPO0CYdYeBvNfOofyK/FFh+U9rNHHV4S9a67c -2Pm2G2JwCz02yULyMtd6YebS2z3PyKnJm9zbWETXbzivf3jTo60adbocwTZ8jx5t -HMN1Rq41Bab2XD0h7lbwyYIiLXpUq3DDfSJlgnCW ------END CERTIFICATE----- - -# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3 -# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3 -# Label: "GlobalSign Root CA - R3" -# Serial: 4835703278459759426209954 -# MD5 Fingerprint: c5:df:b8:49:ca:05:13:55:ee:2d:ba:1a:c3:3e:b0:28 -# SHA1 Fingerprint: d6:9b:56:11:48:f0:1c:77:c5:45:78:c1:09:26:df:5b:85:69:76:ad -# SHA256 Fingerprint: cb:b5:22:d7:b7:f1:27:ad:6a:01:13:86:5b:df:1c:d4:10:2e:7d:07:59:af:63:5a:7c:f4:72:0d:c9:63:c5:3b ------BEGIN CERTIFICATE----- -MIIDXzCCAkegAwIBAgILBAAAAAABIVhTCKIwDQYJKoZIhvcNAQELBQAwTDEgMB4G -A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjMxEzARBgNVBAoTCkdsb2JhbFNp -Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDkwMzE4MTAwMDAwWhcNMjkwMzE4 -MTAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMzETMBEG -A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI -hvcNAQEBBQADggEPADCCAQoCggEBAMwldpB5BngiFvXAg7aEyiie/QV2EcWtiHL8 -RgJDx7KKnQRfJMsuS+FggkbhUqsMgUdwbN1k0ev1LKMPgj0MK66X17YUhhB5uzsT -gHeMCOFJ0mpiLx9e+pZo34knlTifBtc+ycsmWQ1z3rDI6SYOgxXG71uL0gRgykmm -KPZpO/bLyCiR5Z2KYVc3rHQU3HTgOu5yLy6c+9C7v/U9AOEGM+iCK65TpjoWc4zd -QQ4gOsC0p6Hpsk+QLjJg6VfLuQSSaGjlOCZgdbKfd/+RFO+uIEn8rUAVSNECMWEZ -XriX7613t2Saer9fwRPvm2L7DWzgVGkWqQPabumDk3F2xmmFghcCAwEAAaNCMEAw -DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI/wS3+o -LkUkrk1Q+mOai97i3Ru8MA0GCSqGSIb3DQEBCwUAA4IBAQBLQNvAUKr+yAzv95ZU -RUm7lgAJQayzE4aGKAczymvmdLm6AC2upArT9fHxD4q/c2dKg8dEe3jgr25sbwMp -jjM5RcOO5LlXbKr8EpbsU8Yt5CRsuZRj+9xTaGdWPoO4zzUhw8lo/s7awlOqzJCK -6fBdRoyV3XpYKBovHd7NADdBj+1EbddTKJd+82cEHhXXipa0095MJ6RMG3NzdvQX -mcIfeg7jLQitChws/zyrVQ4PkX4268NXSb7hLi18YIvDQVETI53O9zJrlAGomecs -Mx86OyXShkDOOyyGeMlhLxS67ttVb9+E7gUJTb0o2HLO02JQZR7rkpeDMdmztcpH -WD9f ------END CERTIFICATE----- - -# Issuer: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068 -# Subject: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068 -# Label: "Autoridad de Certificacion Firmaprofesional CIF A62634068" -# Serial: 6047274297262753887 -# MD5 Fingerprint: 73:3a:74:7a:ec:bb:a3:96:a6:c2:e4:e2:c8:9b:c0:c3 -# SHA1 Fingerprint: ae:c5:fb:3f:c8:e1:bf:c4:e5:4f:03:07:5a:9a:e8:00:b7:f7:b6:fa -# SHA256 Fingerprint: 04:04:80:28:bf:1f:28:64:d4:8f:9a:d4:d8:32:94:36:6a:82:88:56:55:3f:3b:14:30:3f:90:14:7f:5d:40:ef ------BEGIN CERTIFICATE----- -MIIGFDCCA/ygAwIBAgIIU+w77vuySF8wDQYJKoZIhvcNAQEFBQAwUTELMAkGA1UE -BhMCRVMxQjBABgNVBAMMOUF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uIEZpcm1h -cHJvZmVzaW9uYWwgQ0lGIEE2MjYzNDA2ODAeFw0wOTA1MjAwODM4MTVaFw0zMDEy -MzEwODM4MTVaMFExCzAJBgNVBAYTAkVTMUIwQAYDVQQDDDlBdXRvcmlkYWQgZGUg -Q2VydGlmaWNhY2lvbiBGaXJtYXByb2Zlc2lvbmFsIENJRiBBNjI2MzQwNjgwggIi -MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKlmuO6vj78aI14H9M2uDDUtd9 -thDIAl6zQyrET2qyyhxdKJp4ERppWVevtSBC5IsP5t9bpgOSL/UR5GLXMnE42QQM -cas9UX4PB99jBVzpv5RvwSmCwLTaUbDBPLutN0pcyvFLNg4kq7/DhHf9qFD0sefG -L9ItWY16Ck6WaVICqjaY7Pz6FIMMNx/Jkjd/14Et5cS54D40/mf0PmbR0/RAz15i -NA9wBj4gGFrO93IbJWyTdBSTo3OxDqqHECNZXyAFGUftaI6SEspd/NYrspI8IM/h -X68gvqB2f3bl7BqGYTM+53u0P6APjqK5am+5hyZvQWyIplD9amML9ZMWGxmPsu2b -m8mQ9QEM3xk9Dz44I8kvjwzRAv4bVdZO0I08r0+k8/6vKtMFnXkIoctXMbScyJCy -Z/QYFpM6/EfY0XiWMR+6KwxfXZmtY4laJCB22N/9q06mIqqdXuYnin1oKaPnirja -EbsXLZmdEyRG98Xi2J+Of8ePdG1asuhy9azuJBCtLxTa/y2aRnFHvkLfuwHb9H/T -KI8xWVvTyQKmtFLKbpf7Q8UIJm+K9Lv9nyiqDdVF8xM6HdjAeI9BZzwelGSuewvF -6NkBiDkal4ZkQdU7hwxu+g/GvUgUvzlN1J5Bto+WHWOWk9mVBngxaJ43BjuAiUVh -OSPHG0SjFeUc+JIwuwIDAQABo4HvMIHsMBIGA1UdEwEB/wQIMAYBAf8CAQEwDgYD -VR0PAQH/BAQDAgEGMB0GA1UdDgQWBBRlzeurNR4APn7VdMActHNHDhpkLzCBpgYD -VR0gBIGeMIGbMIGYBgRVHSAAMIGPMC8GCCsGAQUFBwIBFiNodHRwOi8vd3d3LmZp -cm1hcHJvZmVzaW9uYWwuY29tL2NwczBcBggrBgEFBQcCAjBQHk4AUABhAHMAZQBv -ACAAZABlACAAbABhACAAQgBvAG4AYQBuAG8AdgBhACAANAA3ACAAQgBhAHIAYwBl -AGwAbwBuAGEAIAAwADgAMAAxADcwDQYJKoZIhvcNAQEFBQADggIBABd9oPm03cXF -661LJLWhAqvdpYhKsg9VSytXjDvlMd3+xDLx51tkljYyGOylMnfX40S2wBEqgLk9 -am58m9Ot/MPWo+ZkKXzR4Tgegiv/J2Wv+xYVxC5xhOW1//qkR71kMrv2JYSiJ0L1 -ILDCExARzRAVukKQKtJE4ZYm6zFIEv0q2skGz3QeqUvVhyj5eTSSPi5E6PaPT481 -PyWzOdxjKpBrIF/EUhJOlywqrJ2X3kjyo2bbwtKDlaZmp54lD+kLM5FlClrD2VQS -3a/DTg4fJl4N3LON7NWBcN7STyQF82xO9UxJZo3R/9ILJUFI/lGExkKvgATP0H5k -SeTy36LssUzAKh3ntLFlosS88Zj0qnAHY7S42jtM+kAiMFsRpvAFDsYCA0irhpuF -3dvd6qJ2gHN99ZwExEWN57kci57q13XRcrHedUTnQn3iV2t93Jm8PYMo6oCTjcVM -ZcFwgbg4/EMxsvYDNEeyrPsiBsse3RdHHF9mudMaotoRsaS8I8nkvof/uZS2+F0g -StRf571oe2XyFR7SOqkt6dhrJKyXWERHrVkY8SFlcN7ONGCoQPHzPKTDKCOM/icz -Q0CgFzzr6juwcqajuUpLXhZI9LK8yIySxZ2frHI2vDSANGupi5LAuBft7HZT9SQB -jLMi6Et8Vcad+qMUu2WFbm5PEn4KPJ2V ------END CERTIFICATE----- - -# Issuer: CN=Izenpe.com O=IZENPE S.A. -# Subject: CN=Izenpe.com O=IZENPE S.A. -# Label: "Izenpe.com" -# Serial: 917563065490389241595536686991402621 -# MD5 Fingerprint: a6:b0:cd:85:80:da:5c:50:34:a3:39:90:2f:55:67:73 -# SHA1 Fingerprint: 2f:78:3d:25:52:18:a7:4a:65:39:71:b5:2c:a2:9c:45:15:6f:e9:19 -# SHA256 Fingerprint: 25:30:cc:8e:98:32:15:02:ba:d9:6f:9b:1f:ba:1b:09:9e:2d:29:9e:0f:45:48:bb:91:4f:36:3b:c0:d4:53:1f ------BEGIN CERTIFICATE----- -MIIF8TCCA9mgAwIBAgIQALC3WhZIX7/hy/WL1xnmfTANBgkqhkiG9w0BAQsFADA4 -MQswCQYDVQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6 -ZW5wZS5jb20wHhcNMDcxMjEzMTMwODI4WhcNMzcxMjEzMDgyNzI1WjA4MQswCQYD -VQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6ZW5wZS5j -b20wggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDJ03rKDx6sp4boFmVq -scIbRTJxldn+EFvMr+eleQGPicPK8lVx93e+d5TzcqQsRNiekpsUOqHnJJAKClaO -xdgmlOHZSOEtPtoKct2jmRXagaKH9HtuJneJWK3W6wyyQXpzbm3benhB6QiIEn6H -LmYRY2xU+zydcsC8Lv/Ct90NduM61/e0aL6i9eOBbsFGb12N4E3GVFWJGjMxCrFX -uaOKmMPsOzTFlUFpfnXCPCDFYbpRR6AgkJOhkEvzTnyFRVSa0QUmQbC1TR0zvsQD -yCV8wXDbO/QJLVQnSKwv4cSsPsjLkkxTOTcj7NMB+eAJRE1NZMDhDVqHIrytG6P+ -JrUV86f8hBnp7KGItERphIPzidF0BqnMC9bC3ieFUCbKF7jJeodWLBoBHmy+E60Q -rLUk9TiRodZL2vG70t5HtfG8gfZZa88ZU+mNFctKy6lvROUbQc/hhqfK0GqfvEyN -BjNaooXlkDWgYlwWTvDjovoDGrQscbNYLN57C9saD+veIR8GdwYDsMnvmfzAuU8L -hij+0rnq49qlw0dpEuDb8PYZi+17cNcC1u2HGCgsBCRMd+RIihrGO5rUD8r6ddIB -QFqNeb+Lz0vPqhbBleStTIo+F5HUsWLlguWABKQDfo2/2n+iD5dPDNMN+9fR5XJ+ -HMh3/1uaD7euBUbl8agW7EekFwIDAQABo4H2MIHzMIGwBgNVHREEgagwgaWBD2lu -Zm9AaXplbnBlLmNvbaSBkTCBjjFHMEUGA1UECgw+SVpFTlBFIFMuQS4gLSBDSUYg -QTAxMzM3MjYwLVJNZXJjLlZpdG9yaWEtR2FzdGVpeiBUMTA1NSBGNjIgUzgxQzBB -BgNVBAkMOkF2ZGEgZGVsIE1lZGl0ZXJyYW5lbyBFdG9yYmlkZWEgMTQgLSAwMTAx -MCBWaXRvcmlhLUdhc3RlaXowDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC -AQYwHQYDVR0OBBYEFB0cZQ6o8iV7tJHP5LGx5r1VdGwFMA0GCSqGSIb3DQEBCwUA -A4ICAQB4pgwWSp9MiDrAyw6lFn2fuUhfGI8NYjb2zRlrrKvV9pF9rnHzP7MOeIWb -laQnIUdCSnxIOvVFfLMMjlF4rJUT3sb9fbgakEyrkgPH7UIBzg/YsfqikuFgba56 -awmqxinuaElnMIAkejEWOVt+8Rwu3WwJrfIxwYJOubv5vr8qhT/AQKM6WfxZSzwo -JNu0FXWuDYi6LnPAvViH5ULy617uHjAimcs30cQhbIHsvm0m5hzkQiCeR7Csg1lw -LDXWrzY0tM07+DKo7+N4ifuNRSzanLh+QBxh5z6ikixL8s36mLYp//Pye6kfLqCT -VyvehQP5aTfLnnhqBbTFMXiJ7HqnheG5ezzevh55hM6fcA5ZwjUukCox2eRFekGk -LhObNA5me0mrZJfQRsN5nXJQY6aYWwa9SG3YOYNw6DXwBdGqvOPbyALqfP2C2sJb -UjWumDqtujWTI6cfSN01RpiyEGjkpTHCClguGYEQyVB1/OpaFs4R1+7vUIgtYf8/ -QnMFlEPVjjxOAToZpR9GTnfQXeWBIiGH/pR9hNiTrdZoQ0iy2+tzJOeRf1SktoA+ -naM8THLCV8Sg1Mw4J87VBp6iSNnpn86CcDaTmjvfliHjWbcM2pE38P1ZWrOZyGls -QyYBNWNgVYkDOnXYukrZVP/u3oDYLdE41V4tC5h9Pmzb/CaIxw== ------END CERTIFICATE----- - -# Issuer: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc. -# Subject: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc. -# Label: "Go Daddy Root Certificate Authority - G2" -# Serial: 0 -# MD5 Fingerprint: 80:3a:bc:22:c1:e6:fb:8d:9b:3b:27:4a:32:1b:9a:01 -# SHA1 Fingerprint: 47:be:ab:c9:22:ea:e8:0e:78:78:34:62:a7:9f:45:c2:54:fd:e6:8b -# SHA256 Fingerprint: 45:14:0b:32:47:eb:9c:c8:c5:b4:f0:d7:b5:30:91:f7:32:92:08:9e:6e:5a:63:e2:74:9d:d3:ac:a9:19:8e:da ------BEGIN CERTIFICATE----- -MIIDxTCCAq2gAwIBAgIBADANBgkqhkiG9w0BAQsFADCBgzELMAkGA1UEBhMCVVMx -EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxGjAYBgNVBAoT -EUdvRGFkZHkuY29tLCBJbmMuMTEwLwYDVQQDEyhHbyBEYWRkeSBSb290IENlcnRp -ZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAwMFoXDTM3MTIzMTIz -NTk1OVowgYMxCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6b25hMRMwEQYDVQQH -EwpTY290dHNkYWxlMRowGAYDVQQKExFHb0RhZGR5LmNvbSwgSW5jLjExMC8GA1UE -AxMoR28gRGFkZHkgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIw -DQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAL9xYgjx+lk09xvJGKP3gElY6SKD -E6bFIEMBO4Tx5oVJnyfq9oQbTqC023CYxzIBsQU+B07u9PpPL1kwIuerGVZr4oAH -/PMWdYA5UXvl+TW2dE6pjYIT5LY/qQOD+qK+ihVqf94Lw7YZFAXK6sOoBJQ7Rnwy -DfMAZiLIjWltNowRGLfTshxgtDj6AozO091GB94KPutdfMh8+7ArU6SSYmlRJQVh -GkSBjCypQ5Yj36w6gZoOKcUcqeldHraenjAKOc7xiID7S13MMuyFYkMlNAJWJwGR -tDtwKj9useiciAF9n9T521NtYJ2/LOdYq7hfRvzOxBsDPAnrSTFcaUaz4EcCAwEA -AaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE -FDqahQcQZyi27/a9BUFuIMGU2g/eMA0GCSqGSIb3DQEBCwUAA4IBAQCZ21151fmX -WWcDYfF+OwYxdS2hII5PZYe096acvNjpL9DbWu7PdIxztDhC2gV7+AJ1uP2lsdeu -9tfeE8tTEH6KRtGX+rcuKxGrkLAngPnon1rpN5+r5N9ss4UXnT3ZJE95kTXWXwTr -gIOrmgIttRD02JDHBHNA7XIloKmf7J6raBKZV8aPEjoJpL1E/QYVN8Gb5DKj7Tjo -2GTzLH4U/ALqn83/B2gX2yKQOC16jdFU8WnjXzPKej17CuPKf1855eJ1usV2GDPO -LPAvTK33sefOT6jEm0pUBsV/fdUID+Ic/n4XuKxe9tQWskMJDE32p2u0mYRlynqI -4uJEvlz36hz1 ------END CERTIFICATE----- - -# Issuer: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc. -# Subject: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc. -# Label: "Starfield Root Certificate Authority - G2" -# Serial: 0 -# MD5 Fingerprint: d6:39:81:c6:52:7e:96:69:fc:fc:ca:66:ed:05:f2:96 -# SHA1 Fingerprint: b5:1c:06:7c:ee:2b:0c:3d:f8:55:ab:2d:92:f4:fe:39:d4:e7:0f:0e -# SHA256 Fingerprint: 2c:e1:cb:0b:f9:d2:f9:e1:02:99:3f:be:21:51:52:c3:b2:dd:0c:ab:de:1c:68:e5:31:9b:83:91:54:db:b7:f5 ------BEGIN CERTIFICATE----- -MIID3TCCAsWgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBjzELMAkGA1UEBhMCVVMx -EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT -HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAMTKVN0YXJmaWVs -ZCBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAw -MFoXDTM3MTIzMTIzNTk1OVowgY8xCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6 -b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFyZmllbGQgVGVj -aG5vbG9naWVzLCBJbmMuMTIwMAYDVQQDEylTdGFyZmllbGQgUm9vdCBDZXJ0aWZp -Y2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC -ggEBAL3twQP89o/8ArFvW59I2Z154qK3A2FWGMNHttfKPTUuiUP3oWmb3ooa/RMg -nLRJdzIpVv257IzdIvpy3Cdhl+72WoTsbhm5iSzchFvVdPtrX8WJpRBSiUZV9Lh1 -HOZ/5FSuS/hVclcCGfgXcVnrHigHdMWdSL5stPSksPNkN3mSwOxGXn/hbVNMYq/N -Hwtjuzqd+/x5AJhhdM8mgkBj87JyahkNmcrUDnXMN/uLicFZ8WJ/X7NfZTD4p7dN -dloedl40wOiWVpmKs/B/pM293DIxfJHP4F8R+GuqSVzRmZTRouNjWwl2tVZi4Ut0 -HZbUJtQIBFnQmA4O5t78w+wfkPECAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO -BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFHwMMh+n2TB/xH1oo2Kooc6rB1snMA0G -CSqGSIb3DQEBCwUAA4IBAQARWfolTwNvlJk7mh+ChTnUdgWUXuEok21iXQnCoKjU -sHU48TRqneSfioYmUeYs0cYtbpUgSpIB7LiKZ3sx4mcujJUDJi5DnUox9g61DLu3 -4jd/IroAow57UvtruzvE03lRTs2Q9GcHGcg8RnoNAX3FWOdt5oUwF5okxBDgBPfg -8n/Uqgr/Qh037ZTlZFkSIHc40zI+OIF1lnP6aI+xy84fxez6nH7PfrHxBy22/L/K -pL/QlwVKvOoYKAKQvVR4CSFx09F9HdkWsKlhPdAKACL8x3vLCWRFCztAgfd9fDL1 -mMpYjn0q7pBZc2T5NnReJaH1ZgUufzkVqSr7UIuOhWn0 ------END CERTIFICATE----- - -# Issuer: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc. -# Subject: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc. -# Label: "Starfield Services Root Certificate Authority - G2" -# Serial: 0 -# MD5 Fingerprint: 17:35:74:af:7b:61:1c:eb:f4:f9:3c:e2:ee:40:f9:a2 -# SHA1 Fingerprint: 92:5a:8f:8d:2c:6d:04:e0:66:5f:59:6a:ff:22:d8:63:e8:25:6f:3f -# SHA256 Fingerprint: 56:8d:69:05:a2:c8:87:08:a4:b3:02:51:90:ed:cf:ed:b1:97:4a:60:6a:13:c6:e5:29:0f:cb:2a:e6:3e:da:b5 ------BEGIN CERTIFICATE----- -MIID7zCCAtegAwIBAgIBADANBgkqhkiG9w0BAQsFADCBmDELMAkGA1UEBhMCVVMx -EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT -HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xOzA5BgNVBAMTMlN0YXJmaWVs -ZCBTZXJ2aWNlcyBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5 -MDkwMTAwMDAwMFoXDTM3MTIzMTIzNTk1OVowgZgxCzAJBgNVBAYTAlVTMRAwDgYD -VQQIEwdBcml6b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFy -ZmllbGQgVGVjaG5vbG9naWVzLCBJbmMuMTswOQYDVQQDEzJTdGFyZmllbGQgU2Vy -dmljZXMgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZI -hvcNAQEBBQADggEPADCCAQoCggEBANUMOsQq+U7i9b4Zl1+OiFOxHz/Lz58gE20p -OsgPfTz3a3Y4Y9k2YKibXlwAgLIvWX/2h/klQ4bnaRtSmpDhcePYLQ1Ob/bISdm2 -8xpWriu2dBTrz/sm4xq6HZYuajtYlIlHVv8loJNwU4PahHQUw2eeBGg6345AWh1K -Ts9DkTvnVtYAcMtS7nt9rjrnvDH5RfbCYM8TWQIrgMw0R9+53pBlbQLPLJGmpufe -hRhJfGZOozptqbXuNC66DQO4M99H67FrjSXZm86B0UVGMpZwh94CDklDhbZsc7tk -6mFBrMnUVN+HL8cisibMn1lUaJ/8viovxFUcdUBgF4UCVTmLfwUCAwEAAaNCMEAw -DwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJxfAN+q -AdcwKziIorhtSpzyEZGDMA0GCSqGSIb3DQEBCwUAA4IBAQBLNqaEd2ndOxmfZyMI -bw5hyf2E3F/YNoHN2BtBLZ9g3ccaaNnRbobhiCPPE95Dz+I0swSdHynVv/heyNXB -ve6SbzJ08pGCL72CQnqtKrcgfU28elUSwhXqvfdqlS5sdJ/PHLTyxQGjhdByPq1z -qwubdQxtRbeOlKyWN7Wg0I8VRw7j6IPdj/3vQQF3zCepYoUz8jcI73HPdwbeyBkd -iEDPfUYd/x7H4c7/I9vG+o1VTqkC50cRRj70/b17KSa7qWFiNyi2LSr2EIZkyXCn -0q23KXB56jzaYyWf/Wi3MOxw+3WKt21gZ7IeyLnp2KhvAotnDU0mV3HaIPzBSlCN -sSi6 ------END CERTIFICATE----- - -# Issuer: CN=AffirmTrust Commercial O=AffirmTrust -# Subject: CN=AffirmTrust Commercial O=AffirmTrust -# Label: "AffirmTrust Commercial" -# Serial: 8608355977964138876 -# MD5 Fingerprint: 82:92:ba:5b:ef:cd:8a:6f:a6:3d:55:f9:84:f6:d6:b7 -# SHA1 Fingerprint: f9:b5:b6:32:45:5f:9c:be:ec:57:5f:80:dc:e9:6e:2c:c7:b2:78:b7 -# SHA256 Fingerprint: 03:76:ab:1d:54:c5:f9:80:3c:e4:b2:e2:01:a0:ee:7e:ef:7b:57:b6:36:e8:a9:3c:9b:8d:48:60:c9:6f:5f:a7 ------BEGIN CERTIFICATE----- -MIIDTDCCAjSgAwIBAgIId3cGJyapsXwwDQYJKoZIhvcNAQELBQAwRDELMAkGA1UE -BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz -dCBDb21tZXJjaWFsMB4XDTEwMDEyOTE0MDYwNloXDTMwMTIzMTE0MDYwNlowRDEL -MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp -cm1UcnVzdCBDb21tZXJjaWFsMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC -AQEA9htPZwcroRX1BiLLHwGy43NFBkRJLLtJJRTWzsO3qyxPxkEylFf6EqdbDuKP -Hx6GGaeqtS25Xw2Kwq+FNXkyLbscYjfysVtKPcrNcV/pQr6U6Mje+SJIZMblq8Yr -ba0F8PrVC8+a5fBQpIs7R6UjW3p6+DM/uO+Zl+MgwdYoic+U+7lF7eNAFxHUdPAL -MeIrJmqbTFeurCA+ukV6BfO9m2kVrn1OIGPENXY6BwLJN/3HR+7o8XYdcxXyl6S1 -yHp52UKqK39c/s4mT6NmgTWvRLpUHhwwMmWd5jyTXlBOeuM61G7MGvv50jeuJCqr -VwMiKA1JdX+3KNp1v47j3A55MQIDAQABo0IwQDAdBgNVHQ4EFgQUnZPGU4teyq8/ -nx4P5ZmVvCT2lI8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ -KoZIhvcNAQELBQADggEBAFis9AQOzcAN/wr91LoWXym9e2iZWEnStB03TX8nfUYG -XUPGhi4+c7ImfU+TqbbEKpqrIZcUsd6M06uJFdhrJNTxFq7YpFzUf1GO7RgBsZNj -vbz4YYCanrHOQnDiqX0GJX0nof5v7LMeJNrjS1UaADs1tDvZ110w/YETifLCBivt -Z8SOyUOyXGsViQK8YvxO8rUzqrJv0wqiUOP2O+guRMLbZjipM1ZI8W0bM40NjD9g -N53Tym1+NH4Nn3J2ixufcv1SNUFFApYvHLKac0khsUlHRUe072o0EclNmsxZt9YC -nlpOZbWUrhvfKbAW8b8Angc6F2S1BLUjIZkKlTuXfO8= ------END CERTIFICATE----- - -# Issuer: CN=AffirmTrust Networking O=AffirmTrust -# Subject: CN=AffirmTrust Networking O=AffirmTrust -# Label: "AffirmTrust Networking" -# Serial: 8957382827206547757 -# MD5 Fingerprint: 42:65:ca:be:01:9a:9a:4c:a9:8c:41:49:cd:c0:d5:7f -# SHA1 Fingerprint: 29:36:21:02:8b:20:ed:02:f5:66:c5:32:d1:d6:ed:90:9f:45:00:2f -# SHA256 Fingerprint: 0a:81:ec:5a:92:97:77:f1:45:90:4a:f3:8d:5d:50:9f:66:b5:e2:c5:8f:cd:b5:31:05:8b:0e:17:f3:f0:b4:1b ------BEGIN CERTIFICATE----- -MIIDTDCCAjSgAwIBAgIIfE8EORzUmS0wDQYJKoZIhvcNAQEFBQAwRDELMAkGA1UE -BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz -dCBOZXR3b3JraW5nMB4XDTEwMDEyOTE0MDgyNFoXDTMwMTIzMTE0MDgyNFowRDEL -MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp -cm1UcnVzdCBOZXR3b3JraW5nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC -AQEAtITMMxcua5Rsa2FSoOujz3mUTOWUgJnLVWREZY9nZOIG41w3SfYvm4SEHi3y -YJ0wTsyEheIszx6e/jarM3c1RNg1lho9Nuh6DtjVR6FqaYvZ/Ls6rnla1fTWcbua -kCNrmreIdIcMHl+5ni36q1Mr3Lt2PpNMCAiMHqIjHNRqrSK6mQEubWXLviRmVSRL -QESxG9fhwoXA3hA/Pe24/PHxI1Pcv2WXb9n5QHGNfb2V1M6+oF4nI979ptAmDgAp -6zxG8D1gvz9Q0twmQVGeFDdCBKNwV6gbh+0t+nvujArjqWaJGctB+d1ENmHP4ndG -yH329JKBNv3bNPFyfvMMFr20FQIDAQABo0IwQDAdBgNVHQ4EFgQUBx/S55zawm6i -QLSwelAQUHTEyL0wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ -KoZIhvcNAQEFBQADggEBAIlXshZ6qML91tmbmzTCnLQyFE2npN/svqe++EPbkTfO -tDIuUFUaNU52Q3Eg75N3ThVwLofDwR1t3Mu1J9QsVtFSUzpE0nPIxBsFZVpikpzu -QY0x2+c06lkh1QF612S4ZDnNye2v7UsDSKegmQGA3GWjNq5lWUhPgkvIZfFXHeVZ -Lgo/bNjR9eUJtGxUAArgFU2HdW23WJZa3W3SAKD0m0i+wzekujbgfIeFlxoVot4u -olu9rxj5kFDNcFn4J2dHy8egBzp90SxdbBk6ZrV9/ZFvgrG+CJPbFEfxojfHRZ48 -x3evZKiT3/Zpg4Jg8klCNO1aAFSFHBY2kgxc+qatv9s= ------END CERTIFICATE----- - -# Issuer: CN=AffirmTrust Premium O=AffirmTrust -# Subject: CN=AffirmTrust Premium O=AffirmTrust -# Label: "AffirmTrust Premium" -# Serial: 7893706540734352110 -# MD5 Fingerprint: c4:5d:0e:48:b6:ac:28:30:4e:0a:bc:f9:38:16:87:57 -# SHA1 Fingerprint: d8:a6:33:2c:e0:03:6f:b1:85:f6:63:4f:7d:6a:06:65:26:32:28:27 -# SHA256 Fingerprint: 70:a7:3f:7f:37:6b:60:07:42:48:90:45:34:b1:14:82:d5:bf:0e:69:8e:cc:49:8d:f5:25:77:eb:f2:e9:3b:9a ------BEGIN CERTIFICATE----- -MIIFRjCCAy6gAwIBAgIIbYwURrGmCu4wDQYJKoZIhvcNAQEMBQAwQTELMAkGA1UE -BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1UcnVz -dCBQcmVtaXVtMB4XDTEwMDEyOTE0MTAzNloXDTQwMTIzMTE0MTAzNlowQTELMAkG -A1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1U -cnVzdCBQcmVtaXVtMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxBLf -qV/+Qd3d9Z+K4/as4Tx4mrzY8H96oDMq3I0gW64tb+eT2TZwamjPjlGjhVtnBKAQ -JG9dKILBl1fYSCkTtuG+kU3fhQxTGJoeJKJPj/CihQvL9Cl/0qRY7iZNyaqoe5rZ -+jjeRFcV5fiMyNlI4g0WJx0eyIOFJbe6qlVBzAMiSy2RjYvmia9mx+n/K+k8rNrS -s8PhaJyJ+HoAVt70VZVs+7pk3WKL3wt3MutizCaam7uqYoNMtAZ6MMgpv+0GTZe5 -HMQxK9VfvFMSF5yZVylmd2EhMQcuJUmdGPLu8ytxjLW6OQdJd/zvLpKQBY0tL3d7 -70O/Nbua2Plzpyzy0FfuKE4mX4+QaAkvuPjcBukumj5Rp9EixAqnOEhss/n/fauG -V+O61oV4d7pD6kh/9ti+I20ev9E2bFhc8e6kGVQa9QPSdubhjL08s9NIS+LI+H+S -qHZGnEJlPqQewQcDWkYtuJfzt9WyVSHvutxMAJf7FJUnM7/oQ0dG0giZFmA7mn7S -5u046uwBHjxIVkkJx0w3AJ6IDsBz4W9m6XJHMD4Q5QsDyZpCAGzFlH5hxIrff4Ia -C1nEWTJ3s7xgaVY5/bQGeyzWZDbZvUjthB9+pSKPKrhC9IK31FOQeE4tGv2Bb0TX -OwF0lkLgAOIua+rF7nKsu7/+6qqo+Nz2snmKtmcCAwEAAaNCMEAwHQYDVR0OBBYE -FJ3AZ6YMItkm9UWrpmVSESfYRaxjMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/ -BAQDAgEGMA0GCSqGSIb3DQEBDAUAA4ICAQCzV00QYk465KzquByvMiPIs0laUZx2 -KI15qldGF9X1Uva3ROgIRL8YhNILgM3FEv0AVQVhh0HctSSePMTYyPtwni94loMg -Nt58D2kTiKV1NpgIpsbfrM7jWNa3Pt668+s0QNiigfV4Py/VpfzZotReBA4Xrf5B -8OWycvpEgjNC6C1Y91aMYj+6QrCcDFx+LmUmXFNPALJ4fqENmS2NuB2OosSw/WDQ -MKSOyARiqcTtNd56l+0OOF6SL5Nwpamcb6d9Ex1+xghIsV5n61EIJenmJWtSKZGc -0jlzCFfemQa0W50QBuHCAKi4HEoCChTQwUHK+4w1IX2COPKpVJEZNZOUbWo6xbLQ -u4mGk+ibyQ86p3q4ofB4Rvr8Ny/lioTz3/4E2aFooC8k4gmVBtWVyuEklut89pMF -u+1z6S3RdTnX5yTb2E5fQ4+e0BQ5v1VwSJlXMbSc7kqYA5YwH2AG7hsj/oFgIxpH -YoWlzBk0gG+zrBrjn/B7SK3VAdlntqlyk+otZrWyuOQ9PLLvTIzq6we/qzWaVYa8 -GKa1qF60g2xraUDTn9zxw2lrueFtCfTxqlB2Cnp9ehehVZZCmTEJ3WARjQUwfuaO -RtGdFNrHF+QFlozEJLUbzxQHskD4o55BhrwE0GuWyCqANP2/7waj3VjFhT0+j/6e -KeC2uAloGRwYQw== ------END CERTIFICATE----- - -# Issuer: CN=AffirmTrust Premium ECC O=AffirmTrust -# Subject: CN=AffirmTrust Premium ECC O=AffirmTrust -# Label: "AffirmTrust Premium ECC" -# Serial: 8401224907861490260 -# MD5 Fingerprint: 64:b0:09:55:cf:b1:d5:99:e2:be:13:ab:a6:5d:ea:4d -# SHA1 Fingerprint: b8:23:6b:00:2f:1d:16:86:53:01:55:6c:11:a4:37:ca:eb:ff:c3:bb -# SHA256 Fingerprint: bd:71:fd:f6:da:97:e4:cf:62:d1:64:7a:dd:25:81:b0:7d:79:ad:f8:39:7e:b4:ec:ba:9c:5e:84:88:82:14:23 ------BEGIN CERTIFICATE----- -MIIB/jCCAYWgAwIBAgIIdJclisc/elQwCgYIKoZIzj0EAwMwRTELMAkGA1UEBhMC -VVMxFDASBgNVBAoMC0FmZmlybVRydXN0MSAwHgYDVQQDDBdBZmZpcm1UcnVzdCBQ -cmVtaXVtIEVDQzAeFw0xMDAxMjkxNDIwMjRaFw00MDEyMzExNDIwMjRaMEUxCzAJ -BgNVBAYTAlVTMRQwEgYDVQQKDAtBZmZpcm1UcnVzdDEgMB4GA1UEAwwXQWZmaXJt -VHJ1c3QgUHJlbWl1bSBFQ0MwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQNMF4bFZ0D -0KF5Nbc6PJJ6yhUczWLznCZcBz3lVPqj1swS6vQUX+iOGasvLkjmrBhDeKzQN8O9 -ss0s5kfiGuZjuD0uL3jET9v0D6RoTFVya5UdThhClXjMNzyR4ptlKymjQjBAMB0G -A1UdDgQWBBSaryl6wBE1NSZRMADDav5A1a7WPDAPBgNVHRMBAf8EBTADAQH/MA4G -A1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNnADBkAjAXCfOHiFBar8jAQr9HX/Vs -aobgxCd05DhT1wV/GzTjxi+zygk8N53X57hG8f2h4nECMEJZh0PUUd+60wkyWs6I -flc9nF9Ca/UHLbXwgpP5WW+uZPpY5Yse42O+tYHNbwKMeQ== ------END CERTIFICATE----- - -# Issuer: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority -# Subject: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority -# Label: "Certum Trusted Network CA" -# Serial: 279744 -# MD5 Fingerprint: d5:e9:81:40:c5:18:69:fc:46:2c:89:75:62:0f:aa:78 -# SHA1 Fingerprint: 07:e0:32:e0:20:b7:2c:3f:19:2f:06:28:a2:59:3a:19:a7:0f:06:9e -# SHA256 Fingerprint: 5c:58:46:8d:55:f5:8e:49:7e:74:39:82:d2:b5:00:10:b6:d1:65:37:4a:cf:83:a7:d4:a3:2d:b7:68:c4:40:8e ------BEGIN CERTIFICATE----- -MIIDuzCCAqOgAwIBAgIDBETAMA0GCSqGSIb3DQEBBQUAMH4xCzAJBgNVBAYTAlBM -MSIwIAYDVQQKExlVbml6ZXRvIFRlY2hub2xvZ2llcyBTLkEuMScwJQYDVQQLEx5D -ZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxIjAgBgNVBAMTGUNlcnR1bSBU -cnVzdGVkIE5ldHdvcmsgQ0EwHhcNMDgxMDIyMTIwNzM3WhcNMjkxMjMxMTIwNzM3 -WjB+MQswCQYDVQQGEwJQTDEiMCAGA1UEChMZVW5pemV0byBUZWNobm9sb2dpZXMg -Uy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MSIw -IAYDVQQDExlDZXJ0dW0gVHJ1c3RlZCBOZXR3b3JrIENBMIIBIjANBgkqhkiG9w0B -AQEFAAOCAQ8AMIIBCgKCAQEA4/t9o3K6wvDJFIf1awFO4W5AB7ptJ11/91sts1rH -UV+rpDKmYYe2bg+G0jACl/jXaVehGDldamR5xgFZrDwxSjh80gTSSyjoIF87B6LM -TXPb865Px1bVWqeWifrzq2jUI4ZZJ88JJ7ysbnKDHDBy3+Ci6dLhdHUZvSqeexVU -BBvXQzmtVSjF4hq79MDkrjhJM8x2hZ85RdKknvISjFH4fOQtf/WsX+sWn7Et0brM -kUJ3TCXJkDhv2/DM+44el1k+1WBO5gUo7Ul5E0u6SNsv+XLTOcr+H9g0cvW0QM8x -AcPs3hEtF10fuFDRXhmnad4HMyjKUJX5p1TLVIZQRan5SQIDAQABo0IwQDAPBgNV -HRMBAf8EBTADAQH/MB0GA1UdDgQWBBQIds3LB/8k9sXN7buQvOKEN0Z19zAOBgNV -HQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQEFBQADggEBAKaorSLOAT2mo/9i0Eidi15y -sHhE49wcrwn9I0j6vSrEuVUEtRCjjSfeC4Jj0O7eDDd5QVsisrCaQVymcODU0HfL -I9MA4GxWL+FpDQ3Zqr8hgVDZBqWo/5U30Kr+4rP1mS1FhIrlQgnXdAIv94nYmem8 -J9RHjboNRhx3zxSkHLmkMcScKHQDNP8zGSal6Q10tz6XxnboJ5ajZt3hrvJBW8qY -VoNzcOSGGtIxQbovvi0TWnZvTuhOgQ4/WwMioBK+ZlgRSssDxLQqKi2WF+A5VLxI -03YnnZotBqbJ7DnSq9ufmgsnAjUpsUCV5/nonFWIGUbWtzT1fs45mtk48VH3Tyw= ------END CERTIFICATE----- - -# Issuer: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA -# Subject: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA -# Label: "TWCA Root Certification Authority" -# Serial: 1 -# MD5 Fingerprint: aa:08:8f:f6:f9:7b:b7:f2:b1:a7:1e:9b:ea:ea:bd:79 -# SHA1 Fingerprint: cf:9e:87:6d:d3:eb:fc:42:26:97:a3:b5:a3:7a:a0:76:a9:06:23:48 -# SHA256 Fingerprint: bf:d8:8f:e1:10:1c:41:ae:3e:80:1b:f8:be:56:35:0e:e9:ba:d1:a6:b9:bd:51:5e:dc:5c:6d:5b:87:11:ac:44 ------BEGIN CERTIFICATE----- -MIIDezCCAmOgAwIBAgIBATANBgkqhkiG9w0BAQUFADBfMQswCQYDVQQGEwJUVzES -MBAGA1UECgwJVEFJV0FOLUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFU -V0NBIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwODI4MDcyNDMz -WhcNMzAxMjMxMTU1OTU5WjBfMQswCQYDVQQGEwJUVzESMBAGA1UECgwJVEFJV0FO -LUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFUV0NBIFJvb3QgQ2VydGlm -aWNhdGlvbiBBdXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB -AQCwfnK4pAOU5qfeCTiRShFAh6d8WWQUe7UREN3+v9XAu1bihSX0NXIP+FPQQeFE -AcK0HMMxQhZHhTMidrIKbw/lJVBPhYa+v5guEGcevhEFhgWQxFnQfHgQsIBct+HH -K3XLfJ+utdGdIzdjp9xCoi2SBBtQwXu4PhvJVgSLL1KbralW6cH/ralYhzC2gfeX -RfwZVzsrb+RH9JlF/h3x+JejiB03HFyP4HYlmlD4oFT/RJB2I9IyxsOrBr/8+7/z -rX2SYgJbKdM1o5OaQ2RgXbL6Mv87BK9NQGr5x+PvI/1ry+UPizgN7gr8/g+YnzAx -3WxSZfmLgb4i4RxYA7qRG4kHAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV -HRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqOFsmjd6LWvJPelSDGRjjCDWmujANBgkq -hkiG9w0BAQUFAAOCAQEAPNV3PdrfibqHDAhUaiBQkr6wQT25JmSDCi/oQMCXKCeC -MErJk/9q56YAf4lCmtYR5VPOL8zy2gXE/uJQxDqGfczafhAJO5I1KlOy/usrBdls -XebQ79NqZp4VKIV66IIArB6nCWlWQtNoURi+VJq/REG6Sb4gumlc7rh3zc5sH62D -lhh9DrUUOYTxKOkto557HnpyWoOzeW/vtPzQCqVYT0bf+215WfKEIlKuD8z7fDvn -aspHYcN6+NOSBB+4IIThNlQWx0DeO4pz3N/GCUzf7Nr/1FNCocnyYh0igzyXxfkZ -YiesZSLX0zzG5Y6yU8xJzrww/nsOM5D77dIUkR8Hrw== ------END CERTIFICATE----- - -# Issuer: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2 -# Subject: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2 -# Label: "Security Communication RootCA2" -# Serial: 0 -# MD5 Fingerprint: 6c:39:7d:a4:0e:55:59:b2:3f:d6:41:b1:12:50:de:43 -# SHA1 Fingerprint: 5f:3b:8c:f2:f8:10:b3:7d:78:b4:ce:ec:19:19:c3:73:34:b9:c7:74 -# SHA256 Fingerprint: 51:3b:2c:ec:b8:10:d4:cd:e5:dd:85:39:1a:df:c6:c2:dd:60:d8:7b:b7:36:d2:b5:21:48:4a:a4:7a:0e:be:f6 ------BEGIN CERTIFICATE----- -MIIDdzCCAl+gAwIBAgIBADANBgkqhkiG9w0BAQsFADBdMQswCQYDVQQGEwJKUDEl -MCMGA1UEChMcU0VDT00gVHJ1c3QgU3lzdGVtcyBDTy4sTFRELjEnMCUGA1UECxMe -U2VjdXJpdHkgQ29tbXVuaWNhdGlvbiBSb290Q0EyMB4XDTA5MDUyOTA1MDAzOVoX -DTI5MDUyOTA1MDAzOVowXTELMAkGA1UEBhMCSlAxJTAjBgNVBAoTHFNFQ09NIFRy -dXN0IFN5c3RlbXMgQ08uLExURC4xJzAlBgNVBAsTHlNlY3VyaXR5IENvbW11bmlj -YXRpb24gUm9vdENBMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANAV -OVKxUrO6xVmCxF1SrjpDZYBLx/KWvNs2l9amZIyoXvDjChz335c9S672XewhtUGr -zbl+dp+++T42NKA7wfYxEUV0kz1XgMX5iZnK5atq1LXaQZAQwdbWQonCv/Q4EpVM -VAX3NuRFg3sUZdbcDE3R3n4MqzvEFb46VqZab3ZpUql6ucjrappdUtAtCms1FgkQ -hNBqyjoGADdH5H5XTz+L62e4iKrFvlNVspHEfbmwhRkGeC7bYRr6hfVKkaHnFtWO -ojnflLhwHyg/i/xAXmODPIMqGplrz95Zajv8bxbXH/1KEOtOghY6rCcMU/Gt1SSw -awNQwS08Ft1ENCcadfsCAwEAAaNCMEAwHQYDVR0OBBYEFAqFqXdlBZh8QIH4D5cs -OPEK7DzPMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3 -DQEBCwUAA4IBAQBMOqNErLlFsceTfsgLCkLfZOoc7llsCLqJX2rKSpWeeo8HxdpF -coJxDjrSzG+ntKEju/Ykn8sX/oymzsLS28yN/HH8AynBbF0zX2S2ZTuJbxh2ePXc -okgfGT+Ok+vx+hfuzU7jBBJV1uXk3fs+BXziHV7Gp7yXT2g69ekuCkO2r1dcYmh8 -t/2jioSgrGK+KwmHNPBqAbubKVY8/gA3zyNs8U6qtnRGEmyR7jTV7JqR50S+kDFy -1UkC9gLl9B/rfNmWVan/7Ir5mUf/NVoCqgTLiluHcSmRvaS0eg29mvVXIwAHIRc/ -SjnRBUkLp7Y3gaVdjKozXoEofKd9J+sAro03 ------END CERTIFICATE----- - -# Issuer: CN=EC-ACC O=Agencia Catalana de Certificacio (NIF Q-0801176-I) OU=Serveis Publics de Certificacio/Vegeu https://www.catcert.net/verarrel (c)03/Jerarquia Entitats de Certificacio Catalanes -# Subject: CN=EC-ACC O=Agencia Catalana de Certificacio (NIF Q-0801176-I) OU=Serveis Publics de Certificacio/Vegeu https://www.catcert.net/verarrel (c)03/Jerarquia Entitats de Certificacio Catalanes -# Label: "EC-ACC" -# Serial: -23701579247955709139626555126524820479 -# MD5 Fingerprint: eb:f5:9d:29:0d:61:f9:42:1f:7c:c2:ba:6d:e3:15:09 -# SHA1 Fingerprint: 28:90:3a:63:5b:52:80:fa:e6:77:4c:0b:6d:a7:d6:ba:a6:4a:f2:e8 -# SHA256 Fingerprint: 88:49:7f:01:60:2f:31:54:24:6a:e2:8c:4d:5a:ef:10:f1:d8:7e:bb:76:62:6f:4a:e0:b7:f9:5b:a7:96:87:99 ------BEGIN CERTIFICATE----- -MIIFVjCCBD6gAwIBAgIQ7is969Qh3hSoYqwE893EATANBgkqhkiG9w0BAQUFADCB -8zELMAkGA1UEBhMCRVMxOzA5BgNVBAoTMkFnZW5jaWEgQ2F0YWxhbmEgZGUgQ2Vy -dGlmaWNhY2lvIChOSUYgUS0wODAxMTc2LUkpMSgwJgYDVQQLEx9TZXJ2ZWlzIFB1 -YmxpY3MgZGUgQ2VydGlmaWNhY2lvMTUwMwYDVQQLEyxWZWdldSBodHRwczovL3d3 -dy5jYXRjZXJ0Lm5ldC92ZXJhcnJlbCAoYykwMzE1MDMGA1UECxMsSmVyYXJxdWlh -IEVudGl0YXRzIGRlIENlcnRpZmljYWNpbyBDYXRhbGFuZXMxDzANBgNVBAMTBkVD -LUFDQzAeFw0wMzAxMDcyMzAwMDBaFw0zMTAxMDcyMjU5NTlaMIHzMQswCQYDVQQG -EwJFUzE7MDkGA1UEChMyQWdlbmNpYSBDYXRhbGFuYSBkZSBDZXJ0aWZpY2FjaW8g -KE5JRiBRLTA4MDExNzYtSSkxKDAmBgNVBAsTH1NlcnZlaXMgUHVibGljcyBkZSBD -ZXJ0aWZpY2FjaW8xNTAzBgNVBAsTLFZlZ2V1IGh0dHBzOi8vd3d3LmNhdGNlcnQu -bmV0L3ZlcmFycmVsIChjKTAzMTUwMwYDVQQLEyxKZXJhcnF1aWEgRW50aXRhdHMg -ZGUgQ2VydGlmaWNhY2lvIENhdGFsYW5lczEPMA0GA1UEAxMGRUMtQUNDMIIBIjAN -BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsyLHT+KXQpWIR4NA9h0X84NzJB5R -85iKw5K4/0CQBXCHYMkAqbWUZRkiFRfCQ2xmRJoNBD45b6VLeqpjt4pEndljkYRm -4CgPukLjbo73FCeTae6RDqNfDrHrZqJyTxIThmV6PttPB/SnCWDaOkKZx7J/sxaV -HMf5NLWUhdWZXqBIoH7nF2W4onW4HvPlQn2v7fOKSGRdghST2MDk/7NQcvJ29rNd -QlB50JQ+awwAvthrDk4q7D7SzIKiGGUzE3eeml0aE9jD2z3Il3rucO2n5nzbcc8t -lGLfbdb1OL4/pYUKGbio2Al1QnDE6u/LDsg0qBIimAy4E5S2S+zw0JDnJwIDAQAB -o4HjMIHgMB0GA1UdEQQWMBSBEmVjX2FjY0BjYXRjZXJ0Lm5ldDAPBgNVHRMBAf8E -BTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUoMOLRKo3pUW/l4Ba0fF4 -opvpXY0wfwYDVR0gBHgwdjB0BgsrBgEEAfV4AQMBCjBlMCwGCCsGAQUFBwIBFiBo -dHRwczovL3d3dy5jYXRjZXJ0Lm5ldC92ZXJhcnJlbDA1BggrBgEFBQcCAjApGidW -ZWdldSBodHRwczovL3d3dy5jYXRjZXJ0Lm5ldC92ZXJhcnJlbCAwDQYJKoZIhvcN -AQEFBQADggEBAKBIW4IB9k1IuDlVNZyAelOZ1Vr/sXE7zDkJlF7W2u++AVtd0x7Y -/X1PzaBB4DSTv8vihpw3kpBWHNzrKQXlxJ7HNd+KDM3FIUPpqojlNcAZQmNaAl6k -SBg6hW/cnbw/nZzBh7h6YQjpdwt/cKt63dmXLGQehb+8dJahw3oS7AwaboMMPOhy -Rp/7SNVel+axofjk70YllJyJ22k4vuxcDlbHZVHlUIiIv0LVKz3l+bqeLrPK9HOS -Agu+TGbrIP65y7WZf+a2E/rKS03Z7lNGBjvGTq2TWoF+bCpLagVFjPIhpDGQh2xl -nJ2lYJU6Un/10asIbvPuW/mIPX64b24D5EI= ------END CERTIFICATE----- - -# Issuer: CN=Hellenic Academic and Research Institutions RootCA 2011 O=Hellenic Academic and Research Institutions Cert. Authority -# Subject: CN=Hellenic Academic and Research Institutions RootCA 2011 O=Hellenic Academic and Research Institutions Cert. Authority -# Label: "Hellenic Academic and Research Institutions RootCA 2011" -# Serial: 0 -# MD5 Fingerprint: 73:9f:4c:4b:73:5b:79:e9:fa:ba:1c:ef:6e:cb:d5:c9 -# SHA1 Fingerprint: fe:45:65:9b:79:03:5b:98:a1:61:b5:51:2e:ac:da:58:09:48:22:4d -# SHA256 Fingerprint: bc:10:4f:15:a4:8b:e7:09:dc:a5:42:a7:e1:d4:b9:df:6f:05:45:27:e8:02:ea:a9:2d:59:54:44:25:8a:fe:71 ------BEGIN CERTIFICATE----- -MIIEMTCCAxmgAwIBAgIBADANBgkqhkiG9w0BAQUFADCBlTELMAkGA1UEBhMCR1Ix -RDBCBgNVBAoTO0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1 -dGlvbnMgQ2VydC4gQXV0aG9yaXR5MUAwPgYDVQQDEzdIZWxsZW5pYyBBY2FkZW1p -YyBhbmQgUmVzZWFyY2ggSW5zdGl0dXRpb25zIFJvb3RDQSAyMDExMB4XDTExMTIw -NjEzNDk1MloXDTMxMTIwMTEzNDk1MlowgZUxCzAJBgNVBAYTAkdSMUQwQgYDVQQK -EztIZWxsZW5pYyBBY2FkZW1pYyBhbmQgUmVzZWFyY2ggSW5zdGl0dXRpb25zIENl -cnQuIEF1dGhvcml0eTFAMD4GA1UEAxM3SGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl -c2VhcmNoIEluc3RpdHV0aW9ucyBSb290Q0EgMjAxMTCCASIwDQYJKoZIhvcNAQEB -BQADggEPADCCAQoCggEBAKlTAOMupvaO+mDYLZU++CwqVE7NuYRhlFhPjz2L5EPz -dYmNUeTDN9KKiE15HrcS3UN4SoqS5tdI1Q+kOilENbgH9mgdVc04UfCMJDGFr4PJ -fel3r+0ae50X+bOdOFAPplp5kYCvN66m0zH7tSYJnTxa71HFK9+WXesyHgLacEns -bgzImjeN9/E2YEsmLIKe0HjzDQ9jpFEw4fkrJxIH2Oq9GGKYsFk3fb7u8yBRQlqD -75O6aRXxYp2fmTmCobd0LovUxQt7L/DICto9eQqakxylKHJzkUOap9FNhYS5qXSP -FEDH3N6sQWRstBmbAmNtJGSPRLIl6s5ddAxjMlyNh+UCAwEAAaOBiTCBhjAPBgNV -HRMBAf8EBTADAQH/MAsGA1UdDwQEAwIBBjAdBgNVHQ4EFgQUppFC/RNhSiOeCKQp -5dgTBCPuQSUwRwYDVR0eBEAwPqA8MAWCAy5ncjAFggMuZXUwBoIELmVkdTAGggQu -b3JnMAWBAy5ncjAFgQMuZXUwBoEELmVkdTAGgQQub3JnMA0GCSqGSIb3DQEBBQUA -A4IBAQAf73lB4XtuP7KMhjdCSk4cNx6NZrokgclPEg8hwAOXhiVtXdMiKahsog2p -6z0GW5k6x8zDmjR/qw7IThzh+uTczQ2+vyT+bOdrwg3IBp5OjWEopmr95fZi6hg8 -TqBTnbI6nOulnJEWtk2C4AwFSKls9cz4y51JtPACpf1wA+2KIaWuE4ZJwzNzvoc7 -dIsXRSZMFpGD/md9zU1jZ/rzAxKWeAaNsWftjj++n08C9bMJL/NMh98qy5V8Acys -Nnq/onN694/BtZqhFLKPM58N7yLcZnuEvUUXBj08yrl3NI/K6s8/MT7jiOOASSXI -l7WdmplNsDz4SgCbZN2fOUvRJ9e4 ------END CERTIFICATE----- - -# Issuer: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967 -# Subject: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967 -# Label: "Actalis Authentication Root CA" -# Serial: 6271844772424770508 -# MD5 Fingerprint: 69:c1:0d:4f:07:a3:1b:c3:fe:56:3d:04:bc:11:f6:a6 -# SHA1 Fingerprint: f3:73:b3:87:06:5a:28:84:8a:f2:f3:4a:ce:19:2b:dd:c7:8e:9c:ac -# SHA256 Fingerprint: 55:92:60:84:ec:96:3a:64:b9:6e:2a:be:01:ce:0b:a8:6a:64:fb:fe:bc:c7:aa:b5:af:c1:55:b3:7f:d7:60:66 ------BEGIN CERTIFICATE----- -MIIFuzCCA6OgAwIBAgIIVwoRl0LE48wwDQYJKoZIhvcNAQELBQAwazELMAkGA1UE -BhMCSVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8w -MzM1ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290 -IENBMB4XDTExMDkyMjExMjIwMloXDTMwMDkyMjExMjIwMlowazELMAkGA1UEBhMC -SVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8wMzM1 -ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290IENB -MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAp8bEpSmkLO/lGMWwUKNv -UTufClrJwkg4CsIcoBh/kbWHuUA/3R1oHwiD1S0eiKD4j1aPbZkCkpAW1V8IbInX -4ay8IMKx4INRimlNAJZaby/ARH6jDuSRzVju3PvHHkVH3Se5CAGfpiEd9UEtL0z9 -KK3giq0itFZljoZUj5NDKd45RnijMCO6zfB9E1fAXdKDa0hMxKufgFpbOr3JpyI/ -gCczWw63igxdBzcIy2zSekciRDXFzMwujt0q7bd9Zg1fYVEiVRvjRuPjPdA1Yprb -rxTIW6HMiRvhMCb8oJsfgadHHwTrozmSBp+Z07/T6k9QnBn+locePGX2oxgkg4YQ -51Q+qDp2JE+BIcXjDwL4k5RHILv+1A7TaLndxHqEguNTVHnd25zS8gebLra8Pu2F -be8lEfKXGkJh90qX6IuxEAf6ZYGyojnP9zz/GPvG8VqLWeICrHuS0E4UT1lF9gxe -KF+w6D9Fz8+vm2/7hNN3WpVvrJSEnu68wEqPSpP4RCHiMUVhUE4Q2OM1fEwZtN4F -v6MGn8i1zeQf1xcGDXqVdFUNaBr8EBtiZJ1t4JWgw5QHVw0U5r0F+7if5t+L4sbn -fpb2U8WANFAoWPASUHEXMLrmeGO89LKtmyuy/uE5jF66CyCU3nuDuP/jVo23Eek7 -jPKxwV2dpAtMK9myGPW1n0sCAwEAAaNjMGEwHQYDVR0OBBYEFFLYiDrIn3hm7Ynz -ezhwlMkCAjbQMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUUtiIOsifeGbt -ifN7OHCUyQICNtAwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBCwUAA4ICAQAL -e3KHwGCmSUyIWOYdiPcUZEim2FgKDk8TNd81HdTtBjHIgT5q1d07GjLukD0R0i70 -jsNjLiNmsGe+b7bAEzlgqqI0JZN1Ut6nna0Oh4lScWoWPBkdg/iaKWW+9D+a2fDz -WochcYBNy+A4mz+7+uAwTc+G02UQGRjRlwKxK3JCaKygvU5a2hi/a5iB0P2avl4V -SM0RFbnAKVy06Ij3Pjaut2L9HmLecHgQHEhb2rykOLpn7VU+Xlff1ANATIGk0k9j -pwlCCRT8AKnCgHNPLsBA2RF7SOp6AsDT6ygBJlh0wcBzIm2Tlf05fbsq4/aC4yyX -X04fkZT6/iyj2HYauE2yOE+b+h1IYHkm4vP9qdCa6HCPSXrW5b0KDtst842/6+Ok -fcvHlXHo2qN8xcL4dJIEG4aspCJTQLas/kx2z/uUMsA1n3Y/buWQbqCmJqK4LL7R -K4X9p2jIugErsWx0Hbhzlefut8cl8ABMALJ+tguLHPPAUJ4lueAI3jZm/zel0btU -ZCzJJ7VLkn5l/9Mt4blOvH+kQSGQQXemOR/qnuOf0GZvBeyqdn6/axag67XH/JJU -LysRJyU3eExRarDzzFhdFPFqSBX/wge2sY0PjlxQRrM9vwGYT7JZVEc+NHt4bVaT -LnPqZih4zR0Uv6CPLy64Lo7yFIrM6bV8+2ydDKXhlg== ------END CERTIFICATE----- - -# Issuer: CN=Buypass Class 2 Root CA O=Buypass AS-983163327 -# Subject: CN=Buypass Class 2 Root CA O=Buypass AS-983163327 -# Label: "Buypass Class 2 Root CA" -# Serial: 2 -# MD5 Fingerprint: 46:a7:d2:fe:45:fb:64:5a:a8:59:90:9b:78:44:9b:29 -# SHA1 Fingerprint: 49:0a:75:74:de:87:0a:47:fe:58:ee:f6:c7:6b:eb:c6:0b:12:40:99 -# SHA256 Fingerprint: 9a:11:40:25:19:7c:5b:b9:5d:94:e6:3d:55:cd:43:79:08:47:b6:46:b2:3c:df:11:ad:a4:a0:0e:ff:15:fb:48 ------BEGIN CERTIFICATE----- -MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd -MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg -Q2xhc3MgMiBSb290IENBMB4XDTEwMTAyNjA4MzgwM1oXDTQwMTAyNjA4MzgwM1ow -TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw -HgYDVQQDDBdCdXlwYXNzIENsYXNzIDIgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB -BQADggIPADCCAgoCggIBANfHXvfBB9R3+0Mh9PT1aeTuMgHbo4Yf5FkNuud1g1Lr -6hxhFUi7HQfKjK6w3Jad6sNgkoaCKHOcVgb/S2TwDCo3SbXlzwx87vFKu3MwZfPV -L4O2fuPn9Z6rYPnT8Z2SdIrkHJasW4DptfQxh6NR/Md+oW+OU3fUl8FVM5I+GC91 -1K2GScuVr1QGbNgGE41b/+EmGVnAJLqBcXmQRFBoJJRfuLMR8SlBYaNByyM21cHx -MlAQTn/0hpPshNOOvEu/XAFOBz3cFIqUCqTqc/sLUegTBxj6DvEr0VQVfTzh97QZ -QmdiXnfgolXsttlpF9U6r0TtSsWe5HonfOV116rLJeffawrbD02TTqigzXsu8lkB -arcNuAeBfos4GzjmCleZPe4h6KP1DBbdi+w0jpwqHAAVF41og9JwnxgIzRFo1clr -Us3ERo/ctfPYV3Me6ZQ5BL/T3jjetFPsaRyifsSP5BtwrfKi+fv3FmRmaZ9JUaLi -FRhnBkp/1Wy1TbMz4GHrXb7pmA8y1x1LPC5aAVKRCfLf6o3YBkBjqhHk/sM3nhRS -P/TizPJhk9H9Z2vXUq6/aKtAQ6BXNVN48FP4YUIHZMbXb5tMOA1jrGKvNouicwoN -9SG9dKpN6nIDSdvHXx1iY8f93ZHsM+71bbRuMGjeyNYmsHVee7QHIJihdjK4TWxP -AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMmAd+BikoL1Rpzz -uvdMw964o605MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAU18h -9bqwOlI5LJKwbADJ784g7wbylp7ppHR/ehb8t/W2+xUbP6umwHJdELFx7rxP462s -A20ucS6vxOOto70MEae0/0qyexAQH6dXQbLArvQsWdZHEIjzIVEpMMpghq9Gqx3t -OluwlN5E40EIosHsHdb9T7bWR9AUC8rmyrV7d35BH16Dx7aMOZawP5aBQW9gkOLo -+fsicdl9sz1Gv7SEr5AcD48Saq/v7h56rgJKihcrdv6sVIkkLE8/trKnToyokZf7 -KcZ7XC25y2a2t6hbElGFtQl+Ynhw/qlqYLYdDnkM/crqJIByw5c/8nerQyIKx+u2 -DISCLIBrQYoIwOula9+ZEsuK1V6ADJHgJgg2SMX6OBE1/yWDLfJ6v9r9jv6ly0Us -H8SIU653DtmadsWOLB2jutXsMq7Aqqz30XpN69QH4kj3Io6wpJ9qzo6ysmD0oyLQ -I+uUWnpp3Q+/QFesa1lQ2aOZ4W7+jQF5JyMV3pKdewlNWudLSDBaGOYKbeaP4NK7 -5t98biGCwWg5TbSYWGZizEqQXsP6JwSxeRV0mcy+rSDeJmAc61ZRpqPq5KM/p/9h -3PFaTWwyI0PurKju7koSCTxdccK+efrCh2gdC/1cacwG0Jp9VJkqyTkaGa9LKkPz -Y11aWOIv4x3kqdbQCtCev9eBCfHJxyYNrJgWVqA= ------END CERTIFICATE----- - -# Issuer: CN=Buypass Class 3 Root CA O=Buypass AS-983163327 -# Subject: CN=Buypass Class 3 Root CA O=Buypass AS-983163327 -# Label: "Buypass Class 3 Root CA" -# Serial: 2 -# MD5 Fingerprint: 3d:3b:18:9e:2c:64:5a:e8:d5:88:ce:0e:f9:37:c2:ec -# SHA1 Fingerprint: da:fa:f7:fa:66:84:ec:06:8f:14:50:bd:c7:c2:81:a5:bc:a9:64:57 -# SHA256 Fingerprint: ed:f7:eb:bc:a2:7a:2a:38:4d:38:7b:7d:40:10:c6:66:e2:ed:b4:84:3e:4c:29:b4:ae:1d:5b:93:32:e6:b2:4d ------BEGIN CERTIFICATE----- -MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd -MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg -Q2xhc3MgMyBSb290IENBMB4XDTEwMTAyNjA4Mjg1OFoXDTQwMTAyNjA4Mjg1OFow -TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw -HgYDVQQDDBdCdXlwYXNzIENsYXNzIDMgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB -BQADggIPADCCAgoCggIBAKXaCpUWUOOV8l6ddjEGMnqb8RB2uACatVI2zSRHsJ8Y -ZLya9vrVediQYkwiL944PdbgqOkcLNt4EemOaFEVcsfzM4fkoF0LXOBXByow9c3E -N3coTRiR5r/VUv1xLXA+58bEiuPwKAv0dpihi4dVsjoT/Lc+JzeOIuOoTyrvYLs9 -tznDDgFHmV0ST9tD+leh7fmdvhFHJlsTmKtdFoqwNxxXnUX/iJY2v7vKB3tvh2PX -0DJq1l1sDPGzbjniazEuOQAnFN44wOwZZoYS6J1yFhNkUsepNxz9gjDthBgd9K5c -/3ATAOux9TN6S9ZV+AWNS2mw9bMoNlwUxFFzTWsL8TQH2xc519woe2v1n/MuwU8X -KhDzzMro6/1rqy6any2CbgTUUgGTLT2G/H783+9CHaZr77kgxve9oKeV/afmiSTY -zIw0bOIjL9kSGiG5VZFvC5F5GQytQIgLcOJ60g7YaEi7ghM5EFjp2CoHxhLbWNvS -O1UQRwUVZ2J+GGOmRj8JDlQyXr8NYnon74Do29lLBlo3WiXQCBJ31G8JUJc9yB3D -34xFMFbG02SrZvPAXpacw8Tvw3xrizp5f7NJzz3iiZ+gMEuFuZyUJHmPfWupRWgP -K9Dx2hzLabjKSWJtyNBjYt1gD1iqj6G8BaVmos8bdrKEZLFMOVLAMLrwjEsCsLa3 -AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFEe4zf/lb+74suwv -Tg75JbCOPGvDMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAACAj -QTUEkMJAYmDv4jVM1z+s4jSQuKFvdvoWFqRINyzpkMLyPPgKn9iB5btb2iUspKdV -cSQy9sgL8rxq+JOssgfCX5/bzMiKqr5qb+FJEMwx14C7u8jYog5kV+qi9cKpMRXS -IGrs/CIBKM+GuIAeqcwRpTzyFrNHnfzSgCHEy9BHcEGhyoMZCCxt8l13nIoUE9Q2 -HJLw5QY33KbmkJs4j1xrG0aGQ0JfPgEHU1RdZX33inOhmlRaHylDFCfChQ+1iHsa -O5S3HWCntZznKWlXWpuTekMwGwPXYshApqr8ZORK15FTAaggiG6cX0S5y2CBNOxv -033aSF/rtJC8LakcC6wc1aJoIIAE1vyxjy+7SjENSoYc6+I2KSb12tjE8nVhz36u -dmNKekBlk4f4HoCMhuWG1o8O/FMsYOgWYRqiPkN7zTlgVGr18okmAWiDSKIz6MkE -kbIRNBE+6tBDGR8Dk5AM/1E9V/RBbuHLoL7ryWPNbczk+DaqaJ3tvV2XcEQNtg41 -3OEMXbugUZTLfhbrES+jkkXITHHZvMmZUldGL1DPvTVp9D0VzgalLA8+9oG6lLvD -u79leNKGef9JOxqDDPDeeOzI8k1MGt6CKfjBWtrt7uYnXuhF0J0cUahoq0Tj0Itq -4/g7u9xN12TyUb7mqqta6THuBrxzvxNiCp/HuZc= ------END CERTIFICATE----- - -# Issuer: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center -# Subject: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center -# Label: "T-TeleSec GlobalRoot Class 3" -# Serial: 1 -# MD5 Fingerprint: ca:fb:40:a8:4e:39:92:8a:1d:fe:8e:2f:c4:27:ea:ef -# SHA1 Fingerprint: 55:a6:72:3e:cb:f2:ec:cd:c3:23:74:70:19:9d:2a:be:11:e3:81:d1 -# SHA256 Fingerprint: fd:73:da:d3:1c:64:4f:f1:b4:3b:ef:0c:cd:da:96:71:0b:9c:d9:87:5e:ca:7e:31:70:7a:f3:e9:6d:52:2b:bd ------BEGIN CERTIFICATE----- -MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx -KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd -BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl -YyBHbG9iYWxSb290IENsYXNzIDMwHhcNMDgxMDAxMTAyOTU2WhcNMzMxMDAxMjM1 -OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy -aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50 -ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDMwggEiMA0G -CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC9dZPwYiJvJK7genasfb3ZJNW4t/zN -8ELg63iIVl6bmlQdTQyK9tPPcPRStdiTBONGhnFBSivwKixVA9ZIw+A5OO3yXDw/ -RLyTPWGrTs0NvvAgJ1gORH8EGoel15YUNpDQSXuhdfsaa3Ox+M6pCSzyU9XDFES4 -hqX2iys52qMzVNn6chr3IhUciJFrf2blw2qAsCTz34ZFiP0Zf3WHHx+xGwpzJFu5 -ZeAsVMhg02YXP+HMVDNzkQI6pn97djmiH5a2OK61yJN0HZ65tOVgnS9W0eDrXltM -EnAMbEQgqxHY9Bn20pxSN+f6tsIxO0rUFJmtxxr1XV/6B7h8DR/Wgx6zAgMBAAGj -QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS1 -A/d2O2GCahKqGFPrAyGUv/7OyjANBgkqhkiG9w0BAQsFAAOCAQEAVj3vlNW92nOy -WL6ukK2YJ5f+AbGwUgC4TeQbIXQbfsDuXmkqJa9c1h3a0nnJ85cp4IaH3gRZD/FZ -1GSFS5mvJQQeyUapl96Cshtwn5z2r3Ex3XsFpSzTucpH9sry9uetuUg/vBa3wW30 -6gmv7PO15wWeph6KU1HWk4HMdJP2udqmJQV0eVp+QD6CSyYRMG7hP0HHRwA11fXT -91Q+gT3aSWqas+8QPebrb9HIIkfLzM8BMZLZGOMivgkeGj5asuRrDFR6fUNOuIml -e9eiPZaGzPImNC1qkp2aGtAw4l1OBLBfiyB+d8E9lYLRRpo7PHi4b6HQDWSieB4p -TpPDpFQUWw== ------END CERTIFICATE----- - -# Issuer: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH -# Subject: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH -# Label: "D-TRUST Root Class 3 CA 2 2009" -# Serial: 623603 -# MD5 Fingerprint: cd:e0:25:69:8d:47:ac:9c:89:35:90:f7:fd:51:3d:2f -# SHA1 Fingerprint: 58:e8:ab:b0:36:15:33:fb:80:f7:9b:1b:6d:29:d3:ff:8d:5f:00:f0 -# SHA256 Fingerprint: 49:e7:a4:42:ac:f0:ea:62:87:05:00:54:b5:25:64:b6:50:e4:f4:9e:42:e3:48:d6:aa:38:e0:39:e9:57:b1:c1 ------BEGIN CERTIFICATE----- -MIIEMzCCAxugAwIBAgIDCYPzMA0GCSqGSIb3DQEBCwUAME0xCzAJBgNVBAYTAkRF -MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMMHkQtVFJVU1QgUm9vdCBD -bGFzcyAzIENBIDIgMjAwOTAeFw0wOTExMDUwODM1NThaFw0yOTExMDUwODM1NTha -ME0xCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMM -HkQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgMjAwOTCCASIwDQYJKoZIhvcNAQEB -BQADggEPADCCAQoCggEBANOySs96R+91myP6Oi/WUEWJNTrGa9v+2wBoqOADER03 -UAifTUpolDWzU9GUY6cgVq/eUXjsKj3zSEhQPgrfRlWLJ23DEE0NkVJD2IfgXU42 -tSHKXzlABF9bfsyjxiupQB7ZNoTWSPOSHjRGICTBpFGOShrvUD9pXRl/RcPHAY9R -ySPocq60vFYJfxLLHLGvKZAKyVXMD9O0Gu1HNVpK7ZxzBCHQqr0ME7UAyiZsxGsM -lFqVlNpQmvH/pStmMaTJOKDfHR+4CS7zp+hnUquVH+BGPtikw8paxTGA6Eian5Rp -/hnd2HN8gcqW3o7tszIFZYQ05ub9VxC1X3a/L7AQDcUCAwEAAaOCARowggEWMA8G -A1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFP3aFMSfMN4hvR5COfyrYyNJ4PGEMA4G -A1UdDwEB/wQEAwIBBjCB0wYDVR0fBIHLMIHIMIGAoH6gfIZ6bGRhcDovL2RpcmVj -dG9yeS5kLXRydXN0Lm5ldC9DTj1ELVRSVVNUJTIwUm9vdCUyMENsYXNzJTIwMyUy -MENBJTIwMiUyMDIwMDksTz1ELVRydXN0JTIwR21iSCxDPURFP2NlcnRpZmljYXRl -cmV2b2NhdGlvbmxpc3QwQ6BBoD+GPWh0dHA6Ly93d3cuZC10cnVzdC5uZXQvY3Js -L2QtdHJ1c3Rfcm9vdF9jbGFzc18zX2NhXzJfMjAwOS5jcmwwDQYJKoZIhvcNAQEL -BQADggEBAH+X2zDI36ScfSF6gHDOFBJpiBSVYEQBrLLpME+bUMJm2H6NMLVwMeni -acfzcNsgFYbQDfC+rAF1hM5+n02/t2A7nPPKHeJeaNijnZflQGDSNiH+0LS4F9p0 -o3/U37CYAqxva2ssJSRyoWXuJVrl5jLn8t+rSfrzkGkj2wTZ51xY/GXUl77M/C4K -zCUqNQT4YJEVdT1B/yMfGchs64JTBKbkTCJNjYy6zltz7GRUUG3RnFX7acM2w4y8 -PIWmawomDeCTmGCufsYkl4phX5GOZpIJhzbNi5stPvZR1FDUWSi9g/LMKHtThm3Y -Johw1+qRzT65ysCQblrGXnRl11z+o+I= ------END CERTIFICATE----- - -# Issuer: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH -# Subject: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH -# Label: "D-TRUST Root Class 3 CA 2 EV 2009" -# Serial: 623604 -# MD5 Fingerprint: aa:c6:43:2c:5e:2d:cd:c4:34:c0:50:4f:11:02:4f:b6 -# SHA1 Fingerprint: 96:c9:1b:0b:95:b4:10:98:42:fa:d0:d8:22:79:fe:60:fa:b9:16:83 -# SHA256 Fingerprint: ee:c5:49:6b:98:8c:e9:86:25:b9:34:09:2e:ec:29:08:be:d0:b0:f3:16:c2:d4:73:0c:84:ea:f1:f3:d3:48:81 ------BEGIN CERTIFICATE----- -MIIEQzCCAyugAwIBAgIDCYP0MA0GCSqGSIb3DQEBCwUAMFAxCzAJBgNVBAYTAkRF -MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNVBAMMIUQtVFJVU1QgUm9vdCBD -bGFzcyAzIENBIDIgRVYgMjAwOTAeFw0wOTExMDUwODUwNDZaFw0yOTExMDUwODUw -NDZaMFAxCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNV -BAMMIUQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgRVYgMjAwOTCCASIwDQYJKoZI -hvcNAQEBBQADggEPADCCAQoCggEBAJnxhDRwui+3MKCOvXwEz75ivJn9gpfSegpn -ljgJ9hBOlSJzmY3aFS3nBfwZcyK3jpgAvDw9rKFs+9Z5JUut8Mxk2og+KbgPCdM0 -3TP1YtHhzRnp7hhPTFiu4h7WDFsVWtg6uMQYZB7jM7K1iXdODL/ZlGsTl28So/6Z -qQTMFexgaDbtCHu39b+T7WYxg4zGcTSHThfqr4uRjRxWQa4iN1438h3Z0S0NL2lR -p75mpoo6Kr3HGrHhFPC+Oh25z1uxav60sUYgovseO3Dvk5h9jHOW8sXvhXCtKSb8 -HgQ+HKDYD8tSg2J87otTlZCpV6LqYQXY+U3EJ/pure3511H3a6UCAwEAAaOCASQw -ggEgMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFNOUikxiEyoZLsyvcop9Ntea -HNxnMA4GA1UdDwEB/wQEAwIBBjCB3QYDVR0fBIHVMIHSMIGHoIGEoIGBhn9sZGFw -Oi8vZGlyZWN0b3J5LmQtdHJ1c3QubmV0L0NOPUQtVFJVU1QlMjBSb290JTIwQ2xh -c3MlMjAzJTIwQ0ElMjAyJTIwRVYlMjAyMDA5LE89RC1UcnVzdCUyMEdtYkgsQz1E -RT9jZXJ0aWZpY2F0ZXJldm9jYXRpb25saXN0MEagRKBChkBodHRwOi8vd3d3LmQt -dHJ1c3QubmV0L2NybC9kLXRydXN0X3Jvb3RfY2xhc3NfM19jYV8yX2V2XzIwMDku -Y3JsMA0GCSqGSIb3DQEBCwUAA4IBAQA07XtaPKSUiO8aEXUHL7P+PPoeUSbrh/Yp -3uDx1MYkCenBz1UbtDDZzhr+BlGmFaQt77JLvyAoJUnRpjZ3NOhk31KxEcdzes05 -nsKtjHEh8lprr988TlWvsoRlFIm5d8sqMb7Po23Pb0iUMkZv53GMoKaEGTcH8gNF -CSuGdXzfX2lXANtu2KZyIktQ1HWYVt+3GP9DQ1CuekR78HlR10M9p9OB0/DJT7na -xpeG0ILD5EJt/rDiZE4OJudANCa1CInXCGNjOCd1HjPqbqjdn5lPdE2BiYBL3ZqX -KVwvvoFBuYz/6n1gBp7N1z3TLqMVvKjmJuVvw9y4AyHqnxbxLFS1 ------END CERTIFICATE----- - -# Issuer: CN=CA Disig Root R2 O=Disig a.s. -# Subject: CN=CA Disig Root R2 O=Disig a.s. -# Label: "CA Disig Root R2" -# Serial: 10572350602393338211 -# MD5 Fingerprint: 26:01:fb:d8:27:a7:17:9a:45:54:38:1a:43:01:3b:03 -# SHA1 Fingerprint: b5:61:eb:ea:a4:de:e4:25:4b:69:1a:98:a5:57:47:c2:34:c7:d9:71 -# SHA256 Fingerprint: e2:3d:4a:03:6d:7b:70:e9:f5:95:b1:42:20:79:d2:b9:1e:df:bb:1f:b6:51:a0:63:3e:aa:8a:9d:c5:f8:07:03 ------BEGIN CERTIFICATE----- -MIIFaTCCA1GgAwIBAgIJAJK4iNuwisFjMA0GCSqGSIb3DQEBCwUAMFIxCzAJBgNV -BAYTAlNLMRMwEQYDVQQHEwpCcmF0aXNsYXZhMRMwEQYDVQQKEwpEaXNpZyBhLnMu -MRkwFwYDVQQDExBDQSBEaXNpZyBSb290IFIyMB4XDTEyMDcxOTA5MTUzMFoXDTQy -MDcxOTA5MTUzMFowUjELMAkGA1UEBhMCU0sxEzARBgNVBAcTCkJyYXRpc2xhdmEx -EzARBgNVBAoTCkRpc2lnIGEucy4xGTAXBgNVBAMTEENBIERpc2lnIFJvb3QgUjIw -ggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCio8QACdaFXS1tFPbCw3Oe -NcJxVX6B+6tGUODBfEl45qt5WDza/3wcn9iXAng+a0EE6UG9vgMsRfYvZNSrXaNH -PWSb6WiaxswbP7q+sos0Ai6YVRn8jG+qX9pMzk0DIaPY0jSTVpbLTAwAFjxfGs3I -x2ymrdMxp7zo5eFm1tL7A7RBZckQrg4FY8aAamkw/dLukO8NJ9+flXP04SXabBbe -QTg06ov80egEFGEtQX6sx3dOy1FU+16SGBsEWmjGycT6txOgmLcRK7fWV8x8nhfR -yyX+hk4kLlYMeE2eARKmK6cBZW58Yh2EhN/qwGu1pSqVg8NTEQxzHQuyRpDRQjrO -QG6Vrf/GlK1ul4SOfW+eioANSW1z4nuSHsPzwfPrLgVv2RvPN3YEyLRa5Beny912 -H9AZdugsBbPWnDTYltxhh5EF5EQIM8HauQhl1K6yNg3ruji6DOWbnuuNZt2Zz9aJ -QfYEkoopKW1rOhzndX0CcQ7zwOe9yxndnWCywmZgtrEE7snmhrmaZkCo5xHtgUUD -i/ZnWejBBhG93c+AAk9lQHhcR1DIm+YfgXvkRKhbhZri3lrVx/k6RGZL5DJUfORs -nLMOPReisjQS1n6yqEm70XooQL6iFh/f5DcfEXP7kAplQ6INfPgGAVUzfbANuPT1 -rqVCV3w2EYx7XsQDnYx5nQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1Ud -DwEB/wQEAwIBBjAdBgNVHQ4EFgQUtZn4r7CU9eMg1gqtzk5WpC5uQu0wDQYJKoZI -hvcNAQELBQADggIBACYGXnDnZTPIgm7ZnBc6G3pmsgH2eDtpXi/q/075KMOYKmFM -tCQSin1tERT3nLXK5ryeJ45MGcipvXrA1zYObYVybqjGom32+nNjf7xueQgcnYqf -GopTpti72TVVsRHFqQOzVju5hJMiXn7B9hJSi+osZ7z+Nkz1uM/Rs0mSO9MpDpkb -lvdhuDvEK7Z4bLQjb/D907JedR+Zlais9trhxTF7+9FGs9K8Z7RiVLoJ92Owk6Ka -+elSLotgEqv89WBW7xBci8QaQtyDW2QOy7W81k/BfDxujRNt+3vrMNDcTa/F1bal -TFtxyegxvug4BkihGuLq0t4SOVga/4AOgnXmt8kHbA7v/zjxmHHEt38OFdAlab0i -nSvtBfZGR6ztwPDUO+Ls7pZbkBNOHlY667DvlruWIxG68kOGdGSVyCh13x01utI3 -gzhTODY7z2zp+WsO0PsE6E9312UBeIYMej4hYvF/Y3EMyZ9E26gnonW+boE+18Dr -G5gPcFw0sorMwIUY6256s/daoQe/qUKS82Ail+QUoQebTnbAjn39pCXHR+3/H3Os -zMOl6W8KjptlwlCFtaOgUxLMVYdh84GuEEZhvUQhuMI9dM9+JDX6HAcOmz0iyu8x -L4ysEr3vQCj8KWefshNPZiTEUxnpHikV7+ZtsH8tZ/3zbBt1RqPlShfppNcL ------END CERTIFICATE----- - -# Issuer: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV -# Subject: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV -# Label: "ACCVRAIZ1" -# Serial: 6828503384748696800 -# MD5 Fingerprint: d0:a0:5a:ee:05:b6:09:94:21:a1:7d:f1:b2:29:82:02 -# SHA1 Fingerprint: 93:05:7a:88:15:c6:4f:ce:88:2f:fa:91:16:52:28:78:bc:53:64:17 -# SHA256 Fingerprint: 9a:6e:c0:12:e1:a7:da:9d:be:34:19:4d:47:8a:d7:c0:db:18:22:fb:07:1d:f1:29:81:49:6e:d1:04:38:41:13 ------BEGIN CERTIFICATE----- -MIIH0zCCBbugAwIBAgIIXsO3pkN/pOAwDQYJKoZIhvcNAQEFBQAwQjESMBAGA1UE -AwwJQUNDVlJBSVoxMRAwDgYDVQQLDAdQS0lBQ0NWMQ0wCwYDVQQKDARBQ0NWMQsw -CQYDVQQGEwJFUzAeFw0xMTA1MDUwOTM3MzdaFw0zMDEyMzEwOTM3MzdaMEIxEjAQ -BgNVBAMMCUFDQ1ZSQUlaMTEQMA4GA1UECwwHUEtJQUNDVjENMAsGA1UECgwEQUND -VjELMAkGA1UEBhMCRVMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCb -qau/YUqXry+XZpp0X9DZlv3P4uRm7x8fRzPCRKPfmt4ftVTdFXxpNRFvu8gMjmoY -HtiP2Ra8EEg2XPBjs5BaXCQ316PWywlxufEBcoSwfdtNgM3802/J+Nq2DoLSRYWo -G2ioPej0RGy9ocLLA76MPhMAhN9KSMDjIgro6TenGEyxCQ0jVn8ETdkXhBilyNpA -lHPrzg5XPAOBOp0KoVdDaaxXbXmQeOW1tDvYvEyNKKGno6e6Ak4l0Squ7a4DIrhr -IA8wKFSVf+DuzgpmndFALW4ir50awQUZ0m/A8p/4e7MCQvtQqR0tkw8jq8bBD5L/ -0KIV9VMJcRz/RROE5iZe+OCIHAr8Fraocwa48GOEAqDGWuzndN9wrqODJerWx5eH -k6fGioozl2A3ED6XPm4pFdahD9GILBKfb6qkxkLrQaLjlUPTAYVtjrs78yM2x/47 -4KElB0iryYl0/wiPgL/AlmXz7uxLaL2diMMxs0Dx6M/2OLuc5NF/1OVYm3z61PMO -m3WR5LpSLhl+0fXNWhn8ugb2+1KoS5kE3fj5tItQo05iifCHJPqDQsGH+tUtKSpa -cXpkatcnYGMN285J9Y0fkIkyF/hzQ7jSWpOGYdbhdQrqeWZ2iE9x6wQl1gpaepPl -uUsXQA+xtrn13k/c4LOsOxFwYIRKQ26ZIMApcQrAZQIDAQABo4ICyzCCAscwfQYI -KwYBBQUHAQEEcTBvMEwGCCsGAQUFBzAChkBodHRwOi8vd3d3LmFjY3YuZXMvZmls -ZWFkbWluL0FyY2hpdm9zL2NlcnRpZmljYWRvcy9yYWl6YWNjdjEuY3J0MB8GCCsG -AQUFBzABhhNodHRwOi8vb2NzcC5hY2N2LmVzMB0GA1UdDgQWBBTSh7Tj3zcnk1X2 -VuqB5TbMjB4/vTAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFNKHtOPfNyeT -VfZW6oHlNsyMHj+9MIIBcwYDVR0gBIIBajCCAWYwggFiBgRVHSAAMIIBWDCCASIG -CCsGAQUFBwICMIIBFB6CARAAQQB1AHQAbwByAGkAZABhAGQAIABkAGUAIABDAGUA -cgB0AGkAZgBpAGMAYQBjAGkA8wBuACAAUgBhAO0AegAgAGQAZQAgAGwAYQAgAEEA -QwBDAFYAIAAoAEEAZwBlAG4AYwBpAGEAIABkAGUAIABUAGUAYwBuAG8AbABvAGcA -7QBhACAAeQAgAEMAZQByAHQAaQBmAGkAYwBhAGMAaQDzAG4AIABFAGwAZQBjAHQA -cgDzAG4AaQBjAGEALAAgAEMASQBGACAAUQA0ADYAMAAxADEANQA2AEUAKQAuACAA -QwBQAFMAIABlAG4AIABoAHQAdABwADoALwAvAHcAdwB3AC4AYQBjAGMAdgAuAGUA -czAwBggrBgEFBQcCARYkaHR0cDovL3d3dy5hY2N2LmVzL2xlZ2lzbGFjaW9uX2Mu -aHRtMFUGA1UdHwROMEwwSqBIoEaGRGh0dHA6Ly93d3cuYWNjdi5lcy9maWxlYWRt -aW4vQXJjaGl2b3MvY2VydGlmaWNhZG9zL3JhaXphY2N2MV9kZXIuY3JsMA4GA1Ud -DwEB/wQEAwIBBjAXBgNVHREEEDAOgQxhY2N2QGFjY3YuZXMwDQYJKoZIhvcNAQEF -BQADggIBAJcxAp/n/UNnSEQU5CmH7UwoZtCPNdpNYbdKl02125DgBS4OxnnQ8pdp -D70ER9m+27Up2pvZrqmZ1dM8MJP1jaGo/AaNRPTKFpV8M9xii6g3+CfYCS0b78gU -JyCpZET/LtZ1qmxNYEAZSUNUY9rizLpm5U9EelvZaoErQNV/+QEnWCzI7UiRfD+m -AM/EKXMRNt6GGT6d7hmKG9Ww7Y49nCrADdg9ZuM8Db3VlFzi4qc1GwQA9j9ajepD -vV+JHanBsMyZ4k0ACtrJJ1vnE5Bc5PUzolVt3OAJTS+xJlsndQAJxGJ3KQhfnlms -tn6tn1QwIgPBHnFk/vk4CpYY3QIUrCPLBhwepH2NDd4nQeit2hW3sCPdK6jT2iWH -7ehVRE2I9DZ+hJp4rPcOVkkO1jMl1oRQQmwgEh0q1b688nCBpHBgvgW1m54ERL5h -I6zppSSMEYCUWqKiuUnSwdzRp+0xESyeGabu4VXhwOrPDYTkF7eifKXeVSUG7szA -h1xA2syVP1XgNce4hL60Xc16gwFy7ofmXx2utYXGJt/mwZrpHgJHnyqobalbz+xF -d3+YJ5oyXSrjhO7FmGYvliAd3djDJ9ew+f7Zfc3Qn48LFFhRny+Lwzgt3uiP1o2H -pPVWQxaZLPSkVrQ0uGE3ycJYgBugl6H8WY3pEfbRD0tVNEYqi4Y7 ------END CERTIFICATE----- - -# Issuer: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA -# Subject: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA -# Label: "TWCA Global Root CA" -# Serial: 3262 -# MD5 Fingerprint: f9:03:7e:cf:e6:9e:3c:73:7a:2a:90:07:69:ff:2b:96 -# SHA1 Fingerprint: 9c:bb:48:53:f6:a4:f6:d3:52:a4:e8:32:52:55:60:13:f5:ad:af:65 -# SHA256 Fingerprint: 59:76:90:07:f7:68:5d:0f:cd:50:87:2f:9f:95:d5:75:5a:5b:2b:45:7d:81:f3:69:2b:61:0a:98:67:2f:0e:1b ------BEGIN CERTIFICATE----- -MIIFQTCCAymgAwIBAgICDL4wDQYJKoZIhvcNAQELBQAwUTELMAkGA1UEBhMCVFcx -EjAQBgNVBAoTCVRBSVdBTi1DQTEQMA4GA1UECxMHUm9vdCBDQTEcMBoGA1UEAxMT -VFdDQSBHbG9iYWwgUm9vdCBDQTAeFw0xMjA2MjcwNjI4MzNaFw0zMDEyMzExNTU5 -NTlaMFExCzAJBgNVBAYTAlRXMRIwEAYDVQQKEwlUQUlXQU4tQ0ExEDAOBgNVBAsT -B1Jvb3QgQ0ExHDAaBgNVBAMTE1RXQ0EgR2xvYmFsIFJvb3QgQ0EwggIiMA0GCSqG -SIb3DQEBAQUAA4ICDwAwggIKAoICAQCwBdvI64zEbooh745NnHEKH1Jw7W2CnJfF -10xORUnLQEK1EjRsGcJ0pDFfhQKX7EMzClPSnIyOt7h52yvVavKOZsTuKwEHktSz -0ALfUPZVr2YOy+BHYC8rMjk1Ujoog/h7FsYYuGLWRyWRzvAZEk2tY/XTP3VfKfCh -MBwqoJimFb3u/Rk28OKRQ4/6ytYQJ0lM793B8YVwm8rqqFpD/G2Gb3PpN0Wp8DbH -zIh1HrtsBv+baz4X7GGqcXzGHaL3SekVtTzWoWH1EfcFbx39Eb7QMAfCKbAJTibc -46KokWofwpFFiFzlmLhxpRUZyXx1EcxwdE8tmx2RRP1WKKD+u4ZqyPpcC1jcxkt2 -yKsi2XMPpfRaAok/T54igu6idFMqPVMnaR1sjjIsZAAmY2E2TqNGtz99sy2sbZCi -laLOz9qC5wc0GZbpuCGqKX6mOL6OKUohZnkfs8O1CWfe1tQHRvMq2uYiN2DLgbYP -oA/pyJV/v1WRBXrPPRXAb94JlAGD1zQbzECl8LibZ9WYkTunhHiVJqRaCPgrdLQA -BDzfuBSO6N+pjWxnkjMdwLfS7JLIvgm/LCkFbwJrnu+8vyq8W8BQj0FwcYeyTbcE -qYSjMq+u7msXi7Kx/mzhkIyIqJdIzshNy/MGz19qCkKxHh53L46g5pIOBvwFItIm -4TFRfTLcDwIDAQABoyMwITAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB -/zANBgkqhkiG9w0BAQsFAAOCAgEAXzSBdu+WHdXltdkCY4QWwa6gcFGn90xHNcgL -1yg9iXHZqjNB6hQbbCEAwGxCGX6faVsgQt+i0trEfJdLjbDorMjupWkEmQqSpqsn -LhpNgb+E1HAerUf+/UqdM+DyucRFCCEK2mlpc3INvjT+lIutwx4116KD7+U4x6WF -H6vPNOw/KP4M8VeGTslV9xzU2KV9Bnpv1d8Q34FOIWWxtuEXeZVFBs5fzNxGiWNo -RI2T9GRwoD2dKAXDOXC4Ynsg/eTb6QihuJ49CcdP+yz4k3ZB3lLg4VfSnQO8d57+ -nile98FRYB/e2guyLXW3Q0iT5/Z5xoRdgFlglPx4mI88k1HtQJAH32RjJMtOcQWh -15QaiDLxInQirqWm2BJpTGCjAu4r7NRjkgtevi92a6O2JryPA9gK8kxkRr05YuWW -6zRjESjMlfGt7+/cgFhI6Uu46mWs6fyAtbXIRfmswZ/ZuepiiI7E8UuDEq3mi4TW -nsLrgxifarsbJGAzcMzs9zLzXNl5fe+epP7JI8Mk7hWSsT2RTyaGvWZzJBPqpK5j -wa19hAM8EHiGG3njxPPyBJUgriOCxLM6AGK/5jYk4Ve6xx6QddVfP5VhK8E7zeWz -aGHQRiapIVJpLesux+t3zqY6tQMzT3bR51xUAV3LePTJDL/PEo4XLSNolOer/qmy -KwbQBM0= ------END CERTIFICATE----- - -# Issuer: CN=TeliaSonera Root CA v1 O=TeliaSonera -# Subject: CN=TeliaSonera Root CA v1 O=TeliaSonera -# Label: "TeliaSonera Root CA v1" -# Serial: 199041966741090107964904287217786801558 -# MD5 Fingerprint: 37:41:49:1b:18:56:9a:26:f5:ad:c2:66:fb:40:a5:4c -# SHA1 Fingerprint: 43:13:bb:96:f1:d5:86:9b:c1:4e:6a:92:f6:cf:f6:34:69:87:82:37 -# SHA256 Fingerprint: dd:69:36:fe:21:f8:f0:77:c1:23:a1:a5:21:c1:22:24:f7:22:55:b7:3e:03:a7:26:06:93:e8:a2:4b:0f:a3:89 ------BEGIN CERTIFICATE----- -MIIFODCCAyCgAwIBAgIRAJW+FqD3LkbxezmCcvqLzZYwDQYJKoZIhvcNAQEFBQAw -NzEUMBIGA1UECgwLVGVsaWFTb25lcmExHzAdBgNVBAMMFlRlbGlhU29uZXJhIFJv -b3QgQ0EgdjEwHhcNMDcxMDE4MTIwMDUwWhcNMzIxMDE4MTIwMDUwWjA3MRQwEgYD -VQQKDAtUZWxpYVNvbmVyYTEfMB0GA1UEAwwWVGVsaWFTb25lcmEgUm9vdCBDQSB2 -MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMK+6yfwIaPzaSZVfp3F -VRaRXP3vIb9TgHot0pGMYzHw7CTww6XScnwQbfQ3t+XmfHnqjLWCi65ItqwA3GV1 -7CpNX8GH9SBlK4GoRz6JI5UwFpB/6FcHSOcZrr9FZ7E3GwYq/t75rH2D+1665I+X -Z75Ljo1kB1c4VWk0Nj0TSO9P4tNmHqTPGrdeNjPUtAa9GAH9d4RQAEX1jF3oI7x+ -/jXh7VB7qTCNGdMJjmhnXb88lxhTuylixcpecsHHltTbLaC0H2kD7OriUPEMPPCs -81Mt8Bz17Ww5OXOAFshSsCPN4D7c3TxHoLs1iuKYaIu+5b9y7tL6pe0S7fyYGKkm -dtwoSxAgHNN/Fnct7W+A90m7UwW7XWjH1Mh1Fj+JWov3F0fUTPHSiXk+TT2YqGHe -Oh7S+F4D4MHJHIzTjU3TlTazN19jY5szFPAtJmtTfImMMsJu7D0hADnJoWjiUIMu -sDor8zagrC/kb2HCUQk5PotTubtn2txTuXZZNp1D5SDgPTJghSJRt8czu90VL6R4 -pgd7gUY2BIbdeTXHlSw7sKMXNeVzH7RcWe/a6hBle3rQf5+ztCo3O3CLm1u5K7fs -slESl1MpWtTwEhDcTwK7EpIvYtQ/aUN8Ddb8WHUBiJ1YFkveupD/RwGJBmr2X7KQ -arMCpgKIv7NHfirZ1fpoeDVNAgMBAAGjPzA9MA8GA1UdEwEB/wQFMAMBAf8wCwYD -VR0PBAQDAgEGMB0GA1UdDgQWBBTwj1k4ALP1j5qWDNXr+nuqF+gTEjANBgkqhkiG -9w0BAQUFAAOCAgEAvuRcYk4k9AwI//DTDGjkk0kiP0Qnb7tt3oNmzqjMDfz1mgbl -dxSR651Be5kqhOX//CHBXfDkH1e3damhXwIm/9fH907eT/j3HEbAek9ALCI18Bmx -0GtnLLCo4MBANzX2hFxc469CeP6nyQ1Q6g2EdvZR74NTxnr/DlZJLo961gzmJ1Tj -TQpgcmLNkQfWpb/ImWvtxBnmq0wROMVvMeJuScg/doAmAyYp4Db29iBT4xdwNBed -Y2gea+zDTYa4EzAvXUYNR0PVG6pZDrlcjQZIrXSHX8f8MVRBE+LHIQ6e4B4N4cB7 -Q4WQxYpYxmUKeFfyxiMPAdkgS94P+5KFdSpcc41teyWRyu5FrgZLAMzTsVlQ2jqI -OylDRl6XK1TOU2+NSueW+r9xDkKLfP0ooNBIytrEgUy7onOTJsjrDNYmiLbAJM+7 -vVvrdX3pCI6GMyx5dwlppYn8s3CQh3aP0yK7Qs69cwsgJirQmz1wHiRszYd2qReW -t88NkvuOGKmYSdGe/mBEciG5Ge3C9THxOUiIkCR1VBatzvT4aRRkOfujuLpwQMcn -HL/EVlP6Y2XQ8xwOFvVrhlhNGNTkDY6lnVuR3HYkUD/GKvvZt5y11ubQ2egZixVx -SK236thZiNSQvxaz2emsWWFUyBy6ysHK4bkgTI86k4mloMy/0/Z1pHWWbVY= ------END CERTIFICATE----- - -# Issuer: CN=E-Tugra Certification Authority O=E-Tu\u011fra EBG Bili\u015fim Teknolojileri ve Hizmetleri A.\u015e. OU=E-Tugra Sertifikasyon Merkezi -# Subject: CN=E-Tugra Certification Authority O=E-Tu\u011fra EBG Bili\u015fim Teknolojileri ve Hizmetleri A.\u015e. OU=E-Tugra Sertifikasyon Merkezi -# Label: "E-Tugra Certification Authority" -# Serial: 7667447206703254355 -# MD5 Fingerprint: b8:a1:03:63:b0:bd:21:71:70:8a:6f:13:3a:bb:79:49 -# SHA1 Fingerprint: 51:c6:e7:08:49:06:6e:f3:92:d4:5c:a0:0d:6d:a3:62:8f:c3:52:39 -# SHA256 Fingerprint: b0:bf:d5:2b:b0:d7:d9:bd:92:bf:5d:4d:c1:3d:a2:55:c0:2c:54:2f:37:83:65:ea:89:39:11:f5:5e:55:f2:3c ------BEGIN CERTIFICATE----- -MIIGSzCCBDOgAwIBAgIIamg+nFGby1MwDQYJKoZIhvcNAQELBQAwgbIxCzAJBgNV -BAYTAlRSMQ8wDQYDVQQHDAZBbmthcmExQDA+BgNVBAoMN0UtVHXEn3JhIEVCRyBC -aWxpxZ9pbSBUZWtub2xvamlsZXJpIHZlIEhpem1ldGxlcmkgQS7Fni4xJjAkBgNV -BAsMHUUtVHVncmEgU2VydGlmaWthc3lvbiBNZXJrZXppMSgwJgYDVQQDDB9FLVR1 -Z3JhIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTEzMDMwNTEyMDk0OFoXDTIz -MDMwMzEyMDk0OFowgbIxCzAJBgNVBAYTAlRSMQ8wDQYDVQQHDAZBbmthcmExQDA+ -BgNVBAoMN0UtVHXEn3JhIEVCRyBCaWxpxZ9pbSBUZWtub2xvamlsZXJpIHZlIEhp -em1ldGxlcmkgQS7Fni4xJjAkBgNVBAsMHUUtVHVncmEgU2VydGlmaWthc3lvbiBN -ZXJrZXppMSgwJgYDVQQDDB9FLVR1Z3JhIENlcnRpZmljYXRpb24gQXV0aG9yaXR5 -MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA4vU/kwVRHoViVF56C/UY -B4Oufq9899SKa6VjQzm5S/fDxmSJPZQuVIBSOTkHS0vdhQd2h8y/L5VMzH2nPbxH -D5hw+IyFHnSOkm0bQNGZDbt1bsipa5rAhDGvykPL6ys06I+XawGb1Q5KCKpbknSF -Q9OArqGIW66z6l7LFpp3RMih9lRozt6Plyu6W0ACDGQXwLWTzeHxE2bODHnv0ZEo -q1+gElIwcxmOj+GMB6LDu0rw6h8VqO4lzKRG+Bsi77MOQ7osJLjFLFzUHPhdZL3D -k14opz8n8Y4e0ypQBaNV2cvnOVPAmJ6MVGKLJrD3fY185MaeZkJVgkfnsliNZvcH -fC425lAcP9tDJMW/hkd5s3kc91r0E+xs+D/iWR+V7kI+ua2oMoVJl0b+SzGPWsut -dEcf6ZG33ygEIqDUD13ieU/qbIWGvaimzuT6w+Gzrt48Ue7LE3wBf4QOXVGUnhMM -ti6lTPk5cDZvlsouDERVxcr6XQKj39ZkjFqzAQqptQpHF//vkUAqjqFGOjGY5RH8 -zLtJVor8udBhmm9lbObDyz51Sf6Pp+KJxWfXnUYTTjF2OySznhFlhqt/7x3U+Lzn -rFpct1pHXFXOVbQicVtbC/DP3KBhZOqp12gKY6fgDT+gr9Oq0n7vUaDmUStVkhUX -U8u3Zg5mTPj5dUyQ5xJwx0UCAwEAAaNjMGEwHQYDVR0OBBYEFC7j27JJ0JxUeVz6 -Jyr+zE7S6E5UMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAULuPbsknQnFR5 -XPonKv7MTtLoTlQwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBCwUAA4ICAQAF -Nzr0TbdF4kV1JI+2d1LoHNgQk2Xz8lkGpD4eKexd0dCrfOAKkEh47U6YA5n+KGCR -HTAduGN8qOY1tfrTYXbm1gdLymmasoR6d5NFFxWfJNCYExL/u6Au/U5Mh/jOXKqY -GwXgAEZKgoClM4so3O0409/lPun++1ndYYRP0lSWE2ETPo+Aab6TR7U1Q9Jauz1c -77NCR807VRMGsAnb/WP2OogKmW9+4c4bU2pEZiNRCHu8W1Ki/QY3OEBhj0qWuJA3 -+GbHeJAAFS6LrVE1Uweoa2iu+U48BybNCAVwzDk/dr2l02cmAYamU9JgO3xDf1WK -vJUawSg5TB9D0pH0clmKuVb8P7Sd2nCcdlqMQ1DujjByTd//SffGqWfZbawCEeI6 -FiWnWAjLb1NBnEg4R2gz0dfHj9R0IdTDBZB6/86WiLEVKV0jq9BgoRJP3vQXzTLl -yb/IQ639Lo7xr+L0mPoSHyDYwKcMhcWQ9DstliaxLL5Mq+ux0orJ23gTDx4JnW2P -AJ8C2sH6H3p6CcRK5ogql5+Ji/03X186zjhZhkuvcQu02PJwT58yE+Owp1fl2tpD -y4Q08ijE6m30Ku/Ba3ba+367hTzSU8JNvnHhRdH9I2cNE3X7z2VnIp2usAnRCf8d -NL/+I5c30jn6PQ0GC7TbO6Orb1wdtn7os4I07QZcJA== ------END CERTIFICATE----- - -# Issuer: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center -# Subject: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center -# Label: "T-TeleSec GlobalRoot Class 2" -# Serial: 1 -# MD5 Fingerprint: 2b:9b:9e:e4:7b:6c:1f:00:72:1a:cc:c1:77:79:df:6a -# SHA1 Fingerprint: 59:0d:2d:7d:88:4f:40:2e:61:7e:a5:62:32:17:65:cf:17:d8:94:e9 -# SHA256 Fingerprint: 91:e2:f5:78:8d:58:10:eb:a7:ba:58:73:7d:e1:54:8a:8e:ca:cd:01:45:98:bc:0b:14:3e:04:1b:17:05:25:52 ------BEGIN CERTIFICATE----- -MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx -KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd -BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl -YyBHbG9iYWxSb290IENsYXNzIDIwHhcNMDgxMDAxMTA0MDE0WhcNMzMxMDAxMjM1 -OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy -aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50 -ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDIwggEiMA0G -CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCqX9obX+hzkeXaXPSi5kfl82hVYAUd -AqSzm1nzHoqvNK38DcLZSBnuaY/JIPwhqgcZ7bBcrGXHX+0CfHt8LRvWurmAwhiC -FoT6ZrAIxlQjgeTNuUk/9k9uN0goOA/FvudocP05l03Sx5iRUKrERLMjfTlH6VJi -1hKTXrcxlkIF+3anHqP1wvzpesVsqXFP6st4vGCvx9702cu+fjOlbpSD8DT6Iavq -jnKgP6TeMFvvhk1qlVtDRKgQFRzlAVfFmPHmBiiRqiDFt1MmUUOyCxGVWOHAD3bZ -wI18gfNycJ5v/hqO2V81xrJvNHy+SE/iWjnX2J14np+GPgNeGYtEotXHAgMBAAGj -QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS/ -WSA2AHmgoCJrjNXyYdK4LMuCSjANBgkqhkiG9w0BAQsFAAOCAQEAMQOiYQsfdOhy -NsZt+U2e+iKo4YFWz827n+qrkRk4r6p8FU3ztqONpfSO9kSpp+ghla0+AGIWiPAC -uvxhI+YzmzB6azZie60EI4RYZeLbK4rnJVM3YlNfvNoBYimipidx5joifsFvHZVw -IEoHNN/q/xWA5brXethbdXwFeilHfkCoMRN3zUA7tFFHei4R40cR3p1m0IvVVGb6 -g1XqfMIpiRvpb7PO4gWEyS8+eIVibslfwXhjdFjASBgMmTnrpMwatXlajRWc2BQN -9noHV8cigwUtPJslJj0Ys6lDfMjIq2SPDqO/nBudMNva0Bkuqjzx+zOAduTNrRlP -BSeOE6Fuwg== ------END CERTIFICATE----- - -# Issuer: CN=Atos TrustedRoot 2011 O=Atos -# Subject: CN=Atos TrustedRoot 2011 O=Atos -# Label: "Atos TrustedRoot 2011" -# Serial: 6643877497813316402 -# MD5 Fingerprint: ae:b9:c4:32:4b:ac:7f:5d:66:cc:77:94:bb:2a:77:56 -# SHA1 Fingerprint: 2b:b1:f5:3e:55:0c:1d:c5:f1:d4:e6:b7:6a:46:4b:55:06:02:ac:21 -# SHA256 Fingerprint: f3:56:be:a2:44:b7:a9:1e:b3:5d:53:ca:9a:d7:86:4a:ce:01:8e:2d:35:d5:f8:f9:6d:df:68:a6:f4:1a:a4:74 ------BEGIN CERTIFICATE----- -MIIDdzCCAl+gAwIBAgIIXDPLYixfszIwDQYJKoZIhvcNAQELBQAwPDEeMBwGA1UE -AwwVQXRvcyBUcnVzdGVkUm9vdCAyMDExMQ0wCwYDVQQKDARBdG9zMQswCQYDVQQG -EwJERTAeFw0xMTA3MDcxNDU4MzBaFw0zMDEyMzEyMzU5NTlaMDwxHjAcBgNVBAMM -FUF0b3MgVHJ1c3RlZFJvb3QgMjAxMTENMAsGA1UECgwEQXRvczELMAkGA1UEBhMC -REUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCVhTuXbyo7LjvPpvMp -Nb7PGKw+qtn4TaA+Gke5vJrf8v7MPkfoepbCJI419KkM/IL9bcFyYie96mvr54rM -VD6QUM+A1JX76LWC1BTFtqlVJVfbsVD2sGBkWXppzwO3bw2+yj5vdHLqqjAqc2K+ -SZFhyBH+DgMq92og3AIVDV4VavzjgsG1xZ1kCWyjWZgHJ8cblithdHFsQ/H3NYkQ -4J7sVaE3IqKHBAUsR320HLliKWYoyrfhk/WklAOZuXCFteZI6o1Q/NnezG8HDt0L -cp2AMBYHlT8oDv3FdU9T1nSatCQujgKRz3bFmx5VdJx4IbHwLfELn8LVlhgf8FQi -eowHAgMBAAGjfTB7MB0GA1UdDgQWBBSnpQaxLKYJYO7Rl+lwrrw7GWzbITAPBgNV -HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFKelBrEspglg7tGX6XCuvDsZbNshMBgG -A1UdIAQRMA8wDQYLKwYBBAGwLQMEAQEwDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3 -DQEBCwUAA4IBAQAmdzTblEiGKkGdLD4GkGDEjKwLVLgfuXvTBznk+j57sj1O7Z8j -vZfza1zv7v1Apt+hk6EKhqzvINB5Ab149xnYJDE0BAGmuhWawyfc2E8PzBhj/5kP -DpFrdRbhIfzYJsdHt6bPWHJxfrrhTZVHO8mvbaG0weyJ9rQPOLXiZNwlz6bb65pc -maHFCN795trV1lpFDMS3wrUU77QR/w4VtfX128a961qn8FYiqTxlVMYVqL2Gns2D -lmh6cYGJ4Qvh6hEbaAjMaZ7snkGeRDImeuKHCnE96+RapNLbxc3G3mB/ufNPRJLv -KrcYPqcZ2Qt9sTdBQrC6YB3y/gkRsPCHe6ed ------END CERTIFICATE----- - -# Issuer: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited -# Subject: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited -# Label: "QuoVadis Root CA 1 G3" -# Serial: 687049649626669250736271037606554624078720034195 -# MD5 Fingerprint: a4:bc:5b:3f:fe:37:9a:fa:64:f0:e2:fa:05:3d:0b:ab -# SHA1 Fingerprint: 1b:8e:ea:57:96:29:1a:c9:39:ea:b8:0a:81:1a:73:73:c0:93:79:67 -# SHA256 Fingerprint: 8a:86:6f:d1:b2:76:b5:7e:57:8e:92:1c:65:82:8a:2b:ed:58:e9:f2:f2:88:05:41:34:b7:f1:f4:bf:c9:cc:74 ------BEGIN CERTIFICATE----- -MIIFYDCCA0igAwIBAgIUeFhfLq0sGUvjNwc1NBMotZbUZZMwDQYJKoZIhvcNAQEL -BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc -BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMSBHMzAeFw0xMjAxMTIxNzI3NDRaFw00 -MjAxMTIxNzI3NDRaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM -aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDEgRzMwggIiMA0GCSqG -SIb3DQEBAQUAA4ICDwAwggIKAoICAQCgvlAQjunybEC0BJyFuTHK3C3kEakEPBtV -wedYMB0ktMPvhd6MLOHBPd+C5k+tR4ds7FtJwUrVu4/sh6x/gpqG7D0DmVIB0jWe -rNrwU8lmPNSsAgHaJNM7qAJGr6Qc4/hzWHa39g6QDbXwz8z6+cZM5cOGMAqNF341 -68Xfuw6cwI2H44g4hWf6Pser4BOcBRiYz5P1sZK0/CPTz9XEJ0ngnjybCKOLXSoh -4Pw5qlPafX7PGglTvF0FBM+hSo+LdoINofjSxxR3W5A2B4GbPgb6Ul5jxaYA/qXp -UhtStZI5cgMJYr2wYBZupt0lwgNm3fME0UDiTouG9G/lg6AnhF4EwfWQvTA9xO+o -abw4m6SkltFi2mnAAZauy8RRNOoMqv8hjlmPSlzkYZqn0ukqeI1RPToV7qJZjqlc -3sX5kCLliEVx3ZGZbHqfPT2YfF72vhZooF6uCyP8Wg+qInYtyaEQHeTTRCOQiJ/G -KubX9ZqzWB4vMIkIG1SitZgj7Ah3HJVdYdHLiZxfokqRmu8hqkkWCKi9YSgxyXSt -hfbZxbGL0eUQMk1fiyA6PEkfM4VZDdvLCXVDaXP7a3F98N/ETH3Goy7IlXnLc6KO -Tk0k+17kBL5yG6YnLUlamXrXXAkgt3+UuU/xDRxeiEIbEbfnkduebPRq34wGmAOt -zCjvpUfzUwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB -BjAdBgNVHQ4EFgQUo5fW816iEOGrRZ88F2Q87gFwnMwwDQYJKoZIhvcNAQELBQAD -ggIBABj6W3X8PnrHX3fHyt/PX8MSxEBd1DKquGrX1RUVRpgjpeaQWxiZTOOtQqOC -MTaIzen7xASWSIsBx40Bz1szBpZGZnQdT+3Btrm0DWHMY37XLneMlhwqI2hrhVd2 -cDMT/uFPpiN3GPoajOi9ZcnPP/TJF9zrx7zABC4tRi9pZsMbj/7sPtPKlL92CiUN -qXsCHKnQO18LwIE6PWThv6ctTr1NxNgpxiIY0MWscgKCP6o6ojoilzHdCGPDdRS5 -YCgtW2jgFqlmgiNR9etT2DGbe+m3nUvriBbP+V04ikkwj+3x6xn0dxoxGE1nVGwv -b2X52z3sIexe9PSLymBlVNFxZPT5pqOBMzYzcfCkeF9OrYMh3jRJjehZrJ3ydlo2 -8hP0r+AJx2EqbPfgna67hkooby7utHnNkDPDs3b69fBsnQGQ+p6Q9pxyz0fawx/k -NSBT8lTR32GDpgLiJTjehTItXnOQUl1CxM49S+H5GYQd1aJQzEH7QRTDvdbJWqNj -ZgKAvQU6O0ec7AAmTPWIUb+oI38YB7AL7YsmoWTTYUrrXJ/es69nA7Mf3W1daWhp -q1467HxpvMc7hU6eFbm0FU/DlXpY18ls6Wy58yljXrQs8C097Vpl4KlbQMJImYFt -nh8GKjwStIsPm6Ik8KaN1nrgS7ZklmOVhMJKzRwuJIczYOXD ------END CERTIFICATE----- - -# Issuer: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited -# Subject: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited -# Label: "QuoVadis Root CA 2 G3" -# Serial: 390156079458959257446133169266079962026824725800 -# MD5 Fingerprint: af:0c:86:6e:bf:40:2d:7f:0b:3e:12:50:ba:12:3d:06 -# SHA1 Fingerprint: 09:3c:61:f3:8b:8b:dc:7d:55:df:75:38:02:05:00:e1:25:f5:c8:36 -# SHA256 Fingerprint: 8f:e4:fb:0a:f9:3a:4d:0d:67:db:0b:eb:b2:3e:37:c7:1b:f3:25:dc:bc:dd:24:0e:a0:4d:af:58:b4:7e:18:40 ------BEGIN CERTIFICATE----- -MIIFYDCCA0igAwIBAgIURFc0JFuBiZs18s64KztbpybwdSgwDQYJKoZIhvcNAQEL -BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc -BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMiBHMzAeFw0xMjAxMTIxODU5MzJaFw00 -MjAxMTIxODU5MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM -aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDIgRzMwggIiMA0GCSqG -SIb3DQEBAQUAA4ICDwAwggIKAoICAQChriWyARjcV4g/Ruv5r+LrI3HimtFhZiFf -qq8nUeVuGxbULX1QsFN3vXg6YOJkApt8hpvWGo6t/x8Vf9WVHhLL5hSEBMHfNrMW -n4rjyduYNM7YMxcoRvynyfDStNVNCXJJ+fKH46nafaF9a7I6JaltUkSs+L5u+9ym -c5GQYaYDFCDy54ejiK2toIz/pgslUiXnFgHVy7g1gQyjO/Dh4fxaXc6AcW34Sas+ -O7q414AB+6XrW7PFXmAqMaCvN+ggOp+oMiwMzAkd056OXbxMmO7FGmh77FOm6RQ1 -o9/NgJ8MSPsc9PG/Srj61YxxSscfrf5BmrODXfKEVu+lV0POKa2Mq1W/xPtbAd0j -IaFYAI7D0GoT7RPjEiuA3GfmlbLNHiJuKvhB1PLKFAeNilUSxmn1uIZoL1NesNKq -IcGY5jDjZ1XHm26sGahVpkUG0CM62+tlXSoREfA7T8pt9DTEceT/AFr2XK4jYIVz -8eQQsSWu1ZK7E8EM4DnatDlXtas1qnIhO4M15zHfeiFuuDIIfR0ykRVKYnLP43eh -vNURG3YBZwjgQQvD6xVu+KQZ2aKrr+InUlYrAoosFCT5v0ICvybIxo/gbjh9Uy3l -7ZizlWNof/k19N+IxWA1ksB8aRxhlRbQ694Lrz4EEEVlWFA4r0jyWbYW8jwNkALG -cC4BrTwV1wIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB -BjAdBgNVHQ4EFgQU7edvdlq/YOxJW8ald7tyFnGbxD0wDQYJKoZIhvcNAQELBQAD -ggIBAJHfgD9DCX5xwvfrs4iP4VGyvD11+ShdyLyZm3tdquXK4Qr36LLTn91nMX66 -AarHakE7kNQIXLJgapDwyM4DYvmL7ftuKtwGTTwpD4kWilhMSA/ohGHqPHKmd+RC -roijQ1h5fq7KpVMNqT1wvSAZYaRsOPxDMuHBR//47PERIjKWnML2W2mWeyAMQ0Ga -W/ZZGYjeVYg3UQt4XAoeo0L9x52ID8DyeAIkVJOviYeIyUqAHerQbj5hLja7NQ4n -lv1mNDthcnPxFlxHBlRJAHpYErAK74X9sbgzdWqTHBLmYF5vHX/JHyPLhGGfHoJE -+V+tYlUkmlKY7VHnoX6XOuYvHxHaU4AshZ6rNRDbIl9qxV6XU/IyAgkwo1jwDQHV -csaxfGl7w/U2Rcxhbl5MlMVerugOXou/983g7aEOGzPuVBj+D77vfoRrQ+NwmNtd -dbINWQeFFSM51vHfqSYP1kjHs6Yi9TM3WpVHn3u6GBVv/9YUZINJ0gpnIdsPNWNg -KCLjsZWDzYWm3S8P52dSbrsvhXz1SnPnxT7AvSESBT/8twNJAlvIJebiVDj1eYeM -HVOyToV7BjjHLPj4sHKNJeV3UvQDHEimUF+IIDBu8oJDqz2XhOdT+yHBTw8imoa4 -WSr2Rz0ZiC3oheGe7IUIarFsNMkd7EgrO3jtZsSOeWmD3n+M ------END CERTIFICATE----- - -# Issuer: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited -# Subject: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited -# Label: "QuoVadis Root CA 3 G3" -# Serial: 268090761170461462463995952157327242137089239581 -# MD5 Fingerprint: df:7d:b9:ad:54:6f:68:a1:df:89:57:03:97:43:b0:d7 -# SHA1 Fingerprint: 48:12:bd:92:3c:a8:c4:39:06:e7:30:6d:27:96:e6:a4:cf:22:2e:7d -# SHA256 Fingerprint: 88:ef:81:de:20:2e:b0:18:45:2e:43:f8:64:72:5c:ea:5f:bd:1f:c2:d9:d2:05:73:07:09:c5:d8:b8:69:0f:46 ------BEGIN CERTIFICATE----- -MIIFYDCCA0igAwIBAgIULvWbAiin23r/1aOp7r0DoM8Sah0wDQYJKoZIhvcNAQEL -BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc -BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMyBHMzAeFw0xMjAxMTIyMDI2MzJaFw00 -MjAxMTIyMDI2MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM -aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDMgRzMwggIiMA0GCSqG -SIb3DQEBAQUAA4ICDwAwggIKAoICAQCzyw4QZ47qFJenMioKVjZ/aEzHs286IxSR -/xl/pcqs7rN2nXrpixurazHb+gtTTK/FpRp5PIpM/6zfJd5O2YIyC0TeytuMrKNu -FoM7pmRLMon7FhY4futD4tN0SsJiCnMK3UmzV9KwCoWdcTzeo8vAMvMBOSBDGzXR -U7Ox7sWTaYI+FrUoRqHe6okJ7UO4BUaKhvVZR74bbwEhELn9qdIoyhA5CcoTNs+c -ra1AdHkrAj80//ogaX3T7mH1urPnMNA3I4ZyYUUpSFlob3emLoG+B01vr87ERROR -FHAGjx+f+IdpsQ7vw4kZ6+ocYfx6bIrc1gMLnia6Et3UVDmrJqMz6nWB2i3ND0/k -A9HvFZcba5DFApCTZgIhsUfei5pKgLlVj7WiL8DWM2fafsSntARE60f75li59wzw -eyuxwHApw0BiLTtIadwjPEjrewl5qW3aqDCYz4ByA4imW0aucnl8CAMhZa634Ryl -sSqiMd5mBPfAdOhx3v89WcyWJhKLhZVXGqtrdQtEPREoPHtht+KPZ0/l7DxMYIBp -VzgeAVuNVejH38DMdyM0SXV89pgR6y3e7UEuFAUCf+D+IOs15xGsIs5XPd7JMG0Q -A4XN8f+MFrXBsj6IbGB/kE+V9/YtrQE5BwT6dYB9v0lQ7e/JxHwc64B+27bQ3RP+ -ydOc17KXqQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB -BjAdBgNVHQ4EFgQUxhfQvKjqAkPyGwaZXSuQILnXnOQwDQYJKoZIhvcNAQELBQAD -ggIBADRh2Va1EodVTd2jNTFGu6QHcrxfYWLopfsLN7E8trP6KZ1/AvWkyaiTt3px -KGmPc+FSkNrVvjrlt3ZqVoAh313m6Tqe5T72omnHKgqwGEfcIHB9UqM+WXzBusnI -FUBhynLWcKzSt/Ac5IYp8M7vaGPQtSCKFWGafoaYtMnCdvvMujAWzKNhxnQT5Wvv -oxXqA/4Ti2Tk08HS6IT7SdEQTXlm66r99I0xHnAUrdzeZxNMgRVhvLfZkXdxGYFg -u/BYpbWcC/ePIlUnwEsBbTuZDdQdm2NnL9DuDcpmvJRPpq3t/O5jrFc/ZSXPsoaP -0Aj/uHYUbt7lJ+yreLVTubY/6CD50qi+YUbKh4yE8/nxoGibIh6BJpsQBJFxwAYf -3KDTuVan45gtf4Od34wrnDKOMpTwATwiKp9Dwi7DmDkHOHv8XgBCH/MyJnmDhPbl -8MFREsALHgQjDFSlTC9JxUrRtm5gDWv8a4uFJGS3iQ6rJUdbPM9+Sb3H6QrG2vd+ -DhcI00iX0HGS8A85PjRqHH3Y8iKuu2n0M7SmSFXRDw4m6Oy2Cy2nhTXN/VnIn9HN -PlopNLk9hM6xZdRZkZFWdSHBd575euFgndOtBBj0fOtek49TSiIp+EgrPk2GrFt/ -ywaZWWDYWGWVjUTR939+J399roD1B0y2PpxxVJkES/1Y+Zj0 ------END CERTIFICATE----- - -# Issuer: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert Assured ID Root G2" -# Serial: 15385348160840213938643033620894905419 -# MD5 Fingerprint: 92:38:b9:f8:63:24:82:65:2c:57:33:e6:fe:81:8f:9d -# SHA1 Fingerprint: a1:4b:48:d9:43:ee:0a:0e:40:90:4f:3c:e0:a4:c0:91:93:51:5d:3f -# SHA256 Fingerprint: 7d:05:eb:b6:82:33:9f:8c:94:51:ee:09:4e:eb:fe:fa:79:53:a1:14:ed:b2:f4:49:49:45:2f:ab:7d:2f:c1:85 ------BEGIN CERTIFICATE----- -MIIDljCCAn6gAwIBAgIQC5McOtY5Z+pnI7/Dr5r0SzANBgkqhkiG9w0BAQsFADBl -MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 -d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv -b3QgRzIwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQG -EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl -cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzIwggEi -MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDZ5ygvUj82ckmIkzTz+GoeMVSA -n61UQbVH35ao1K+ALbkKz3X9iaV9JPrjIgwrvJUXCzO/GU1BBpAAvQxNEP4Htecc -biJVMWWXvdMX0h5i89vqbFCMP4QMls+3ywPgym2hFEwbid3tALBSfK+RbLE4E9Hp -EgjAALAcKxHad3A2m67OeYfcgnDmCXRwVWmvo2ifv922ebPynXApVfSr/5Vh88lA -bx3RvpO704gqu52/clpWcTs/1PPRCv4o76Pu2ZmvA9OPYLfykqGxvYmJHzDNw6Yu -YjOuFgJ3RFrngQo8p0Quebg/BLxcoIfhG69Rjs3sLPr4/m3wOnyqi+RnlTGNAgMB -AAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQW -BBTOw0q5mVXyuNtgv6l+vVa1lzan1jANBgkqhkiG9w0BAQsFAAOCAQEAyqVVjOPI -QW5pJ6d1Ee88hjZv0p3GeDgdaZaikmkuOGybfQTUiaWxMTeKySHMq2zNixya1r9I -0jJmwYrA8y8678Dj1JGG0VDjA9tzd29KOVPt3ibHtX2vK0LRdWLjSisCx1BL4Gni -lmwORGYQRI+tBev4eaymG+g3NJ1TyWGqolKvSnAWhsI6yLETcDbYz+70CjTVW0z9 -B5yiutkBclzzTcHdDrEcDcRjvq30FPuJ7KJBDkzMyFdA0G4Dqs0MjomZmWzwPDCv -ON9vvKO+KSAnq3T/EyJ43pdSVR6DtVQgA+6uwE9W3jfMw3+qBCe703e4YtsXfJwo -IhNzbM8m9Yop5w== ------END CERTIFICATE----- - -# Issuer: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert Assured ID Root G3" -# Serial: 15459312981008553731928384953135426796 -# MD5 Fingerprint: 7c:7f:65:31:0c:81:df:8d:ba:3e:99:e2:5c:ad:6e:fb -# SHA1 Fingerprint: f5:17:a2:4f:9a:48:c6:c9:f8:a2:00:26:9f:dc:0f:48:2c:ab:30:89 -# SHA256 Fingerprint: 7e:37:cb:8b:4c:47:09:0c:ab:36:55:1b:a6:f4:5d:b8:40:68:0f:ba:16:6a:95:2d:b1:00:71:7f:43:05:3f:c2 ------BEGIN CERTIFICATE----- -MIICRjCCAc2gAwIBAgIQC6Fa+h3foLVJRK/NJKBs7DAKBggqhkjOPQQDAzBlMQsw -CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu -ZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3Qg -RzMwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQGEwJV -UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu -Y29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzMwdjAQBgcq -hkjOPQIBBgUrgQQAIgNiAAQZ57ysRGXtzbg/WPuNsVepRC0FFfLvC/8QdJ+1YlJf -Zn4f5dwbRXkLzMZTCp2NXQLZqVneAlr2lSoOjThKiknGvMYDOAdfVdp+CW7if17Q -RSAPWXYQ1qAk8C3eNvJsKTmjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/ -BAQDAgGGMB0GA1UdDgQWBBTL0L2p4ZgFUaFNN6KDec6NHSrkhDAKBggqhkjOPQQD -AwNnADBkAjAlpIFFAmsSS3V0T8gj43DydXLefInwz5FyYZ5eEJJZVrmDxxDnOOlY -JjZ91eQ0hjkCMHw2U/Aw5WJjOpnitqM7mzT6HtoQknFekROn3aRukswy1vUhZscv -6pZjamVFkpUBtA== ------END CERTIFICATE----- - -# Issuer: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert Global Root G2" -# Serial: 4293743540046975378534879503202253541 -# MD5 Fingerprint: e4:a6:8a:c8:54:ac:52:42:46:0a:fd:72:48:1b:2a:44 -# SHA1 Fingerprint: df:3c:24:f9:bf:d6:66:76:1b:26:80:73:fe:06:d1:cc:8d:4f:82:a4 -# SHA256 Fingerprint: cb:3c:cb:b7:60:31:e5:e0:13:8f:8d:d3:9a:23:f9:de:47:ff:c3:5e:43:c1:14:4c:ea:27:d4:6a:5a:b1:cb:5f ------BEGIN CERTIFICATE----- -MIIDjjCCAnagAwIBAgIQAzrx5qcRqaC7KGSxHQn65TANBgkqhkiG9w0BAQsFADBh -MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 -d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBH -MjAeFw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVT -MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j -b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEcyMIIBIjANBgkqhkiG -9w0BAQEFAAOCAQ8AMIIBCgKCAQEAuzfNNNx7a8myaJCtSnX/RrohCgiN9RlUyfuI -2/Ou8jqJkTx65qsGGmvPrC3oXgkkRLpimn7Wo6h+4FR1IAWsULecYxpsMNzaHxmx -1x7e/dfgy5SDN67sH0NO3Xss0r0upS/kqbitOtSZpLYl6ZtrAGCSYP9PIUkY92eQ -q2EGnI/yuum06ZIya7XzV+hdG82MHauVBJVJ8zUtluNJbd134/tJS7SsVQepj5Wz -tCO7TG1F8PapspUwtP1MVYwnSlcUfIKdzXOS0xZKBgyMUNGPHgm+F6HmIcr9g+UQ -vIOlCsRnKPZzFBQ9RnbDhxSJITRNrw9FDKZJobq7nMWxM4MphQIDAQABo0IwQDAP -BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAdBgNVHQ4EFgQUTiJUIBiV -5uNu5g/6+rkS7QYXjzkwDQYJKoZIhvcNAQELBQADggEBAGBnKJRvDkhj6zHd6mcY -1Yl9PMWLSn/pvtsrF9+wX3N3KjITOYFnQoQj8kVnNeyIv/iPsGEMNKSuIEyExtv4 -NeF22d+mQrvHRAiGfzZ0JFrabA0UWTW98kndth/Jsw1HKj2ZL7tcu7XUIOGZX1NG -Fdtom/DzMNU+MeKNhJ7jitralj41E6Vf8PlwUHBHQRFXGU7Aj64GxJUTFy8bJZ91 -8rGOmaFvE7FBcf6IKshPECBV1/MUReXgRPTqh5Uykw7+U0b6LJ3/iyK5S9kJRaTe -pLiaWN0bfVKfjllDiIGknibVb63dDcY3fe0Dkhvld1927jyNxF1WW6LZZm6zNTfl -MrY= ------END CERTIFICATE----- - -# Issuer: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert Global Root G3" -# Serial: 7089244469030293291760083333884364146 -# MD5 Fingerprint: f5:5d:a4:50:a5:fb:28:7e:1e:0f:0d:cc:96:57:56:ca -# SHA1 Fingerprint: 7e:04:de:89:6a:3e:66:6d:00:e6:87:d3:3f:fa:d9:3b:e8:3d:34:9e -# SHA256 Fingerprint: 31:ad:66:48:f8:10:41:38:c7:38:f3:9e:a4:32:01:33:39:3e:3a:18:cc:02:29:6e:f9:7c:2a:c9:ef:67:31:d0 ------BEGIN CERTIFICATE----- -MIICPzCCAcWgAwIBAgIQBVVWvPJepDU1w6QP1atFcjAKBggqhkjOPQQDAzBhMQsw -CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu -ZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBHMzAe -Fw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVTMRUw -EwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5jb20x -IDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEczMHYwEAYHKoZIzj0CAQYF -K4EEACIDYgAE3afZu4q4C/sLfyHS8L6+c/MzXRq8NOrexpu80JX28MzQC7phW1FG -fp4tn+6OYwwX7Adw9c+ELkCDnOg/QW07rdOkFFk2eJ0DQ+4QE2xy3q6Ip6FrtUPO -Z9wj/wMco+I+o0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAd -BgNVHQ4EFgQUs9tIpPmhxdiuNkHMEWNpYim8S8YwCgYIKoZIzj0EAwMDaAAwZQIx -AK288mw/EkrRLTnDCgmXc/SINoyIJ7vmiI1Qhadj+Z4y3maTD/HMsQmP3Wyr+mt/ -oAIwOWZbwmSNuJ5Q3KjVSaLtx9zRSX8XAbjIho9OjIgrqJqpisXRAL34VOKa5Vt8 -sycX ------END CERTIFICATE----- - -# Issuer: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert Trusted Root G4" -# Serial: 7451500558977370777930084869016614236 -# MD5 Fingerprint: 78:f2:fc:aa:60:1f:2f:b4:eb:c9:37:ba:53:2e:75:49 -# SHA1 Fingerprint: dd:fb:16:cd:49:31:c9:73:a2:03:7d:3f:c8:3a:4d:7d:77:5d:05:e4 -# SHA256 Fingerprint: 55:2f:7b:dc:f1:a7:af:9e:6c:e6:72:01:7f:4f:12:ab:f7:72:40:c7:8e:76:1a:c2:03:d1:d9:d2:0a:c8:99:88 ------BEGIN CERTIFICATE----- -MIIFkDCCA3igAwIBAgIQBZsbV56OITLiOQe9p3d1XDANBgkqhkiG9w0BAQwFADBi -MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 -d3cuZGlnaWNlcnQuY29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3Qg -RzQwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBiMQswCQYDVQQGEwJV -UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu -Y29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3QgRzQwggIiMA0GCSqG -SIb3DQEBAQUAA4ICDwAwggIKAoICAQC/5pBzaN675F1KPDAiMGkz7MKnJS7JIT3y -ithZwuEppz1Yq3aaza57G4QNxDAf8xukOBbrVsaXbR2rsnnyyhHS5F/WBTxSD1If -xp4VpX6+n6lXFllVcq9ok3DCsrp1mWpzMpTREEQQLt+C8weE5nQ7bXHiLQwb7iDV -ySAdYyktzuxeTsiT+CFhmzTrBcZe7FsavOvJz82sNEBfsXpm7nfISKhmV1efVFiO -DCu3T6cw2Vbuyntd463JT17lNecxy9qTXtyOj4DatpGYQJB5w3jHtrHEtWoYOAMQ -jdjUN6QuBX2I9YI+EJFwq1WCQTLX2wRzKm6RAXwhTNS8rhsDdV14Ztk6MUSaM0C/ -CNdaSaTC5qmgZ92kJ7yhTzm1EVgX9yRcRo9k98FpiHaYdj1ZXUJ2h4mXaXpI8OCi -EhtmmnTK3kse5w5jrubU75KSOp493ADkRSWJtppEGSt+wJS00mFt6zPZxd9LBADM -fRyVw4/3IbKyEbe7f/LVjHAsQWCqsWMYRJUadmJ+9oCw++hkpjPRiQfhvbfmQ6QY -uKZ3AeEPlAwhHbJUKSWJbOUOUlFHdL4mrLZBdd56rF+NP8m800ERElvlEFDrMcXK -chYiCd98THU/Y+whX8QgUWtvsauGi0/C1kVfnSD8oR7FwI+isX4KJpn15GkvmB0t -9dmpsh3lGwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB -hjAdBgNVHQ4EFgQU7NfjgtJxXWRM3y5nP+e6mK4cD08wDQYJKoZIhvcNAQEMBQAD -ggIBALth2X2pbL4XxJEbw6GiAI3jZGgPVs93rnD5/ZpKmbnJeFwMDF/k5hQpVgs2 -SV1EY+CtnJYYZhsjDT156W1r1lT40jzBQ0CuHVD1UvyQO7uYmWlrx8GnqGikJ9yd -+SeuMIW59mdNOj6PWTkiU0TryF0Dyu1Qen1iIQqAyHNm0aAFYF/opbSnr6j3bTWc -fFqK1qI4mfN4i/RN0iAL3gTujJtHgXINwBQy7zBZLq7gcfJW5GqXb5JQbZaNaHqa -sjYUegbyJLkJEVDXCLG4iXqEI2FCKeWjzaIgQdfRnGTZ6iahixTXTBmyUEFxPT9N -cCOGDErcgdLMMpSEDQgJlxxPwO5rIHQw0uA5NBCFIRUBCOhVMt5xSdkoF1BN5r5N -0XWs0Mr7QbhDparTwwVETyw2m+L64kW4I1NsBm9nVX9GtUw/bihaeSbSpKhil9Ie -4u1Ki7wb/UdKDd9nZn6yW0HQO+T0O/QEY+nvwlQAUaCKKsnOeMzV6ocEGLPOr0mI -r/OSmbaz5mEP0oUA51Aa5BuVnRmhuZyxm7EAHu/QD09CbMkKvO5D+jpxpchNJqU1 -/YldvIViHTLSoCtU7ZpXwdv6EM8Zt4tKG48BtieVU+i2iW1bvGjUI+iLUaJW+fCm -gKDWHrO8Dw9TdSmq6hN35N6MgSGtBxBHEa2HPQfRdbzP82Z+ ------END CERTIFICATE----- - -# Issuer: CN=COMODO RSA Certification Authority O=COMODO CA Limited -# Subject: CN=COMODO RSA Certification Authority O=COMODO CA Limited -# Label: "COMODO RSA Certification Authority" -# Serial: 101909084537582093308941363524873193117 -# MD5 Fingerprint: 1b:31:b0:71:40:36:cc:14:36:91:ad:c4:3e:fd:ec:18 -# SHA1 Fingerprint: af:e5:d2:44:a8:d1:19:42:30:ff:47:9f:e2:f8:97:bb:cd:7a:8c:b4 -# SHA256 Fingerprint: 52:f0:e1:c4:e5:8e:c6:29:29:1b:60:31:7f:07:46:71:b8:5d:7e:a8:0d:5b:07:27:34:63:53:4b:32:b4:02:34 ------BEGIN CERTIFICATE----- -MIIF2DCCA8CgAwIBAgIQTKr5yttjb+Af907YWwOGnTANBgkqhkiG9w0BAQwFADCB -hTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G -A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNV -BAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMTE5 -MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgT -EkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMR -Q09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNh -dGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCR -6FSS0gpWsawNJN3Fz0RndJkrN6N9I3AAcbxT38T6KhKPS38QVr2fcHK3YX/JSw8X -pz3jsARh7v8Rl8f0hj4K+j5c+ZPmNHrZFGvnnLOFoIJ6dq9xkNfs/Q36nGz637CC -9BR++b7Epi9Pf5l/tfxnQ3K9DADWietrLNPtj5gcFKt+5eNu/Nio5JIk2kNrYrhV -/erBvGy2i/MOjZrkm2xpmfh4SDBF1a3hDTxFYPwyllEnvGfDyi62a+pGx8cgoLEf -Zd5ICLqkTqnyg0Y3hOvozIFIQ2dOciqbXL1MGyiKXCJ7tKuY2e7gUYPDCUZObT6Z -+pUX2nwzV0E8jVHtC7ZcryxjGt9XyD+86V3Em69FmeKjWiS0uqlWPc9vqv9JWL7w -qP/0uK3pN/u6uPQLOvnoQ0IeidiEyxPx2bvhiWC4jChWrBQdnArncevPDt09qZah -SL0896+1DSJMwBGB7FY79tOi4lu3sgQiUpWAk2nojkxl8ZEDLXB0AuqLZxUpaVIC -u9ffUGpVRr+goyhhf3DQw6KqLCGqR84onAZFdr+CGCe01a60y1Dma/RMhnEw6abf -Fobg2P9A3fvQQoh/ozM6LlweQRGBY84YcWsr7KaKtzFcOmpH4MN5WdYgGq/yapiq -crxXStJLnbsQ/LBMQeXtHT1eKJ2czL+zUdqnR+WEUwIDAQABo0IwQDAdBgNVHQ4E -FgQUu69+Aj36pvE8hI6t7jiY7NkyMtQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB -/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAArx1UaEt65Ru2yyTUEUAJNMnMvl -wFTPoCWOAvn9sKIN9SCYPBMtrFaisNZ+EZLpLrqeLppysb0ZRGxhNaKatBYSaVqM -4dc+pBroLwP0rmEdEBsqpIt6xf4FpuHA1sj+nq6PK7o9mfjYcwlYRm6mnPTXJ9OV -2jeDchzTc+CiR5kDOF3VSXkAKRzH7JsgHAckaVd4sjn8OoSgtZx8jb8uk2Intzna -FxiuvTwJaP+EmzzV1gsD41eeFPfR60/IvYcjt7ZJQ3mFXLrrkguhxuhoqEwWsRqZ -CuhTLJK7oQkYdQxlqHvLI7cawiiFwxv/0Cti76R7CZGYZ4wUAc1oBmpjIXUDgIiK -boHGhfKppC3n9KUkEEeDys30jXlYsQab5xoq2Z0B15R97QNKyvDb6KkBPvVWmcke -jkk9u+UJueBPSZI9FoJAzMxZxuY67RIuaTxslbH9qh17f4a+Hg4yRvv7E491f0yL -S0Zj/gA0QHDBw7mh3aZw4gSzQbzpgJHqZJx64SIDqZxubw5lT2yHh17zbqD5daWb -QOhTsiedSrnAdyGN/4fy3ryM7xfft0kL0fJuMAsaDk527RH89elWsn2/x20Kk4yl -0MC2Hb46TpSi125sC8KKfPog88Tk5c0NqMuRkrF8hey1FGlmDoLnzc7ILaZRfyHB -NVOFBkpdn627G190 ------END CERTIFICATE----- - -# Issuer: CN=USERTrust RSA Certification Authority O=The USERTRUST Network -# Subject: CN=USERTrust RSA Certification Authority O=The USERTRUST Network -# Label: "USERTrust RSA Certification Authority" -# Serial: 2645093764781058787591871645665788717 -# MD5 Fingerprint: 1b:fe:69:d1:91:b7:19:33:a3:72:a8:0f:e1:55:e5:b5 -# SHA1 Fingerprint: 2b:8f:1b:57:33:0d:bb:a2:d0:7a:6c:51:f7:0e:e9:0d:da:b9:ad:8e -# SHA256 Fingerprint: e7:93:c9:b0:2f:d8:aa:13:e2:1c:31:22:8a:cc:b0:81:19:64:3b:74:9c:89:89:64:b1:74:6d:46:c3:d4:cb:d2 ------BEGIN CERTIFICATE----- -MIIF3jCCA8agAwIBAgIQAf1tMPyjylGoG7xkDjUDLTANBgkqhkiG9w0BAQwFADCB -iDELMAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0pl -cnNleSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNV -BAMTJVVTRVJUcnVzdCBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAw -MjAxMDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNV -BAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVU -aGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBSU0EgQ2Vy -dGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK -AoICAQCAEmUXNg7D2wiz0KxXDXbtzSfTTK1Qg2HiqiBNCS1kCdzOiZ/MPans9s/B -3PHTsdZ7NygRK0faOca8Ohm0X6a9fZ2jY0K2dvKpOyuR+OJv0OwWIJAJPuLodMkY -tJHUYmTbf6MG8YgYapAiPLz+E/CHFHv25B+O1ORRxhFnRghRy4YUVD+8M/5+bJz/ -Fp0YvVGONaanZshyZ9shZrHUm3gDwFA66Mzw3LyeTP6vBZY1H1dat//O+T23LLb2 -VN3I5xI6Ta5MirdcmrS3ID3KfyI0rn47aGYBROcBTkZTmzNg95S+UzeQc0PzMsNT -79uq/nROacdrjGCT3sTHDN/hMq7MkztReJVni+49Vv4M0GkPGw/zJSZrM233bkf6 -c0Plfg6lZrEpfDKEY1WJxA3Bk1QwGROs0303p+tdOmw1XNtB1xLaqUkL39iAigmT -Yo61Zs8liM2EuLE/pDkP2QKe6xJMlXzzawWpXhaDzLhn4ugTncxbgtNMs+1b/97l -c6wjOy0AvzVVdAlJ2ElYGn+SNuZRkg7zJn0cTRe8yexDJtC/QV9AqURE9JnnV4ee -UB9XVKg+/XRjL7FQZQnmWEIuQxpMtPAlR1n6BB6T1CZGSlCBst6+eLf8ZxXhyVeE -Hg9j1uliutZfVS7qXMYoCAQlObgOK6nyTJccBz8NUvXt7y+CDwIDAQABo0IwQDAd -BgNVHQ4EFgQUU3m/WqorSs9UgOHYm8Cd8rIDZsswDgYDVR0PAQH/BAQDAgEGMA8G -A1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAFzUfA3P9wF9QZllDHPF -Up/L+M+ZBn8b2kMVn54CVVeWFPFSPCeHlCjtHzoBN6J2/FNQwISbxmtOuowhT6KO -VWKR82kV2LyI48SqC/3vqOlLVSoGIG1VeCkZ7l8wXEskEVX/JJpuXior7gtNn3/3 -ATiUFJVDBwn7YKnuHKsSjKCaXqeYalltiz8I+8jRRa8YFWSQEg9zKC7F4iRO/Fjs -8PRF/iKz6y+O0tlFYQXBl2+odnKPi4w2r78NBc5xjeambx9spnFixdjQg3IM8WcR -iQycE0xyNN+81XHfqnHd4blsjDwSXWXavVcStkNr/+XeTWYRUc+ZruwXtuhxkYze -Sf7dNXGiFSeUHM9h4ya7b6NnJSFd5t0dCy5oGzuCr+yDZ4XUmFF0sbmZgIn/f3gZ -XHlKYC6SQK5MNyosycdiyA5d9zZbyuAlJQG03RoHnHcAP9Dc1ew91Pq7P8yF1m9/ -qS3fuQL39ZeatTXaw2ewh0qpKJ4jjv9cJ2vhsE/zB+4ALtRZh8tSQZXq9EfX7mRB -VXyNWQKV3WKdwrnuWih0hKWbt5DHDAff9Yk2dDLWKMGwsAvgnEzDHNb842m1R0aB -L6KCq9NjRHDEjf8tM7qtj3u1cIiuPhnPQCjY/MiQu12ZIvVS5ljFH4gxQ+6IHdfG -jjxDah2nGN59PRbxYvnKkKj9 ------END CERTIFICATE----- - -# Issuer: CN=USERTrust ECC Certification Authority O=The USERTRUST Network -# Subject: CN=USERTrust ECC Certification Authority O=The USERTRUST Network -# Label: "USERTrust ECC Certification Authority" -# Serial: 123013823720199481456569720443997572134 -# MD5 Fingerprint: fa:68:bc:d9:b5:7f:ad:fd:c9:1d:06:83:28:cc:24:c1 -# SHA1 Fingerprint: d1:cb:ca:5d:b2:d5:2a:7f:69:3b:67:4d:e5:f0:5a:1d:0c:95:7d:f0 -# SHA256 Fingerprint: 4f:f4:60:d5:4b:9c:86:da:bf:bc:fc:57:12:e0:40:0d:2b:ed:3f:bc:4d:4f:bd:aa:86:e0:6a:dc:d2:a9:ad:7a ------BEGIN CERTIFICATE----- -MIICjzCCAhWgAwIBAgIQXIuZxVqUxdJxVt7NiYDMJjAKBggqhkjOPQQDAzCBiDEL -MAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNl -eSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMT -JVVTRVJUcnVzdCBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMjAx -MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNVBAgT -Ck5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVUaGUg -VVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBFQ0MgQ2VydGlm -aWNhdGlvbiBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQarFRaqflo -I+d61SRvU8Za2EurxtW20eZzca7dnNYMYf3boIkDuAUU7FfO7l0/4iGzzvfUinng -o4N+LZfQYcTxmdwlkWOrfzCjtHDix6EznPO/LlxTsV+zfTJ/ijTjeXmjQjBAMB0G -A1UdDgQWBBQ64QmG1M8ZwpZ2dEl23OA1xmNjmjAOBgNVHQ8BAf8EBAMCAQYwDwYD -VR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjA2Z6EWCNzklwBBHU6+4WMB -zzuqQhFkoJ2UOQIReVx7Hfpkue4WQrO/isIJxOzksU0CMQDpKmFHjFJKS04YcPbW -RNZu9YO6bVi9JNlWSOrvxKJGgYhqOkbRqZtNyWHa0V1Xahg= ------END CERTIFICATE----- - -# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4 -# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4 -# Label: "GlobalSign ECC Root CA - R4" -# Serial: 14367148294922964480859022125800977897474 -# MD5 Fingerprint: 20:f0:27:68:d1:7e:a0:9d:0e:e6:2a:ca:df:5c:89:8e -# SHA1 Fingerprint: 69:69:56:2e:40:80:f4:24:a1:e7:19:9f:14:ba:f3:ee:58:ab:6a:bb -# SHA256 Fingerprint: be:c9:49:11:c2:95:56:76:db:6c:0a:55:09:86:d7:6e:3b:a0:05:66:7c:44:2c:97:62:b4:fb:b7:73:de:22:8c ------BEGIN CERTIFICATE----- -MIIB4TCCAYegAwIBAgIRKjikHJYKBN5CsiilC+g0mAIwCgYIKoZIzj0EAwIwUDEk -MCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBDQSAtIFI0MRMwEQYDVQQKEwpH -bG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWduMB4XDTEyMTExMzAwMDAwMFoX -DTM4MDExOTAzMTQwN1owUDEkMCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBD -QSAtIFI0MRMwEQYDVQQKEwpHbG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWdu -MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEuMZ5049sJQ6fLjkZHAOkrprlOQcJ -FspjsbmG+IpXwVfOQvpzofdlQv8ewQCybnMO/8ch5RikqtlxP6jUuc6MHaNCMEAw -DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFFSwe61F -uOJAf/sKbvu+M8k8o4TVMAoGCCqGSM49BAMCA0gAMEUCIQDckqGgE6bPA7DmxCGX -kPoUVy0D7O48027KqGx2vKLeuwIgJ6iFJzWbVsaj8kfSt24bAgAXqmemFZHe+pTs -ewv4n4Q= ------END CERTIFICATE----- - -# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5 -# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5 -# Label: "GlobalSign ECC Root CA - R5" -# Serial: 32785792099990507226680698011560947931244 -# MD5 Fingerprint: 9f:ad:3b:1c:02:1e:8a:ba:17:74:38:81:0c:a2:bc:08 -# SHA1 Fingerprint: 1f:24:c6:30:cd:a4:18:ef:20:69:ff:ad:4f:dd:5f:46:3a:1b:69:aa -# SHA256 Fingerprint: 17:9f:bc:14:8a:3d:d0:0f:d2:4e:a1:34:58:cc:43:bf:a7:f5:9c:81:82:d7:83:a5:13:f6:eb:ec:10:0c:89:24 ------BEGIN CERTIFICATE----- -MIICHjCCAaSgAwIBAgIRYFlJ4CYuu1X5CneKcflK2GwwCgYIKoZIzj0EAwMwUDEk -MCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBDQSAtIFI1MRMwEQYDVQQKEwpH -bG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWduMB4XDTEyMTExMzAwMDAwMFoX -DTM4MDExOTAzMTQwN1owUDEkMCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBD -QSAtIFI1MRMwEQYDVQQKEwpHbG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWdu -MHYwEAYHKoZIzj0CAQYFK4EEACIDYgAER0UOlvt9Xb/pOdEh+J8LttV7HpI6SFkc -8GIxLcB6KP4ap1yztsyX50XUWPrRd21DosCHZTQKH3rd6zwzocWdTaRvQZU4f8ke -hOvRnkmSh5SHDDqFSmafnVmTTZdhBoZKo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYD -VR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUPeYpSJvqB8ohREom3m7e0oPQn1kwCgYI -KoZIzj0EAwMDaAAwZQIxAOVpEslu28YxuglB4Zf4+/2a4n0Sye18ZNPLBSWLVtmg -515dTguDnFt2KaAJJiFqYgIwcdK1j1zqO+F4CYWodZI7yFz9SO8NdCKoCOJuxUnO -xwy8p2Fp8fc74SrL+SvzZpA3 ------END CERTIFICATE----- - -# Issuer: CN=Staat der Nederlanden EV Root CA O=Staat der Nederlanden -# Subject: CN=Staat der Nederlanden EV Root CA O=Staat der Nederlanden -# Label: "Staat der Nederlanden EV Root CA" -# Serial: 10000013 -# MD5 Fingerprint: fc:06:af:7b:e8:1a:f1:9a:b4:e8:d2:70:1f:c0:f5:ba -# SHA1 Fingerprint: 76:e2:7e:c1:4f:db:82:c1:c0:a6:75:b5:05:be:3d:29:b4:ed:db:bb -# SHA256 Fingerprint: 4d:24:91:41:4c:fe:95:67:46:ec:4c:ef:a6:cf:6f:72:e2:8a:13:29:43:2f:9d:8a:90:7a:c4:cb:5d:ad:c1:5a ------BEGIN CERTIFICATE----- -MIIFcDCCA1igAwIBAgIEAJiWjTANBgkqhkiG9w0BAQsFADBYMQswCQYDVQQGEwJO -TDEeMBwGA1UECgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSkwJwYDVQQDDCBTdGFh -dCBkZXIgTmVkZXJsYW5kZW4gRVYgUm9vdCBDQTAeFw0xMDEyMDgxMTE5MjlaFw0y -MjEyMDgxMTEwMjhaMFgxCzAJBgNVBAYTAk5MMR4wHAYDVQQKDBVTdGFhdCBkZXIg -TmVkZXJsYW5kZW4xKTAnBgNVBAMMIFN0YWF0IGRlciBOZWRlcmxhbmRlbiBFViBS -b290IENBMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA48d+ifkkSzrS -M4M1LGns3Amk41GoJSt5uAg94JG6hIXGhaTK5skuU6TJJB79VWZxXSzFYGgEt9nC -UiY4iKTWO0Cmws0/zZiTs1QUWJZV1VD+hq2kY39ch/aO5ieSZxeSAgMs3NZmdO3d -Z//BYY1jTw+bbRcwJu+r0h8QoPnFfxZpgQNH7R5ojXKhTbImxrpsX23Wr9GxE46p -rfNeaXUmGD5BKyF/7otdBwadQ8QpCiv8Kj6GyzyDOvnJDdrFmeK8eEEzduG/L13l -pJhQDBXd4Pqcfzho0LKmeqfRMb1+ilgnQ7O6M5HTp5gVXJrm0w912fxBmJc+qiXb -j5IusHsMX/FjqTf5m3VpTCgmJdrV8hJwRVXj33NeN/UhbJCONVrJ0yPr08C+eKxC -KFhmpUZtcALXEPlLVPxdhkqHz3/KRawRWrUgUY0viEeXOcDPusBCAUCZSCELa6fS -/ZbV0b5GnUngC6agIk440ME8MLxwjyx1zNDFjFE7PZQIZCZhfbnDZY8UnCHQqv0X -cgOPvZuM5l5Tnrmd74K74bzickFbIZTTRTeU0d8JOV3nI6qaHcptqAqGhYqCvkIH -1vI4gnPah1vlPNOePqc7nvQDs/nxfRN0Av+7oeX6AHkcpmZBiFxgV6YuCcS6/ZrP -px9Aw7vMWgpVSzs4dlG4Y4uElBbmVvMCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB -/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFP6rAJCYniT8qcwaivsnuL8wbqg7 -MA0GCSqGSIb3DQEBCwUAA4ICAQDPdyxuVr5Os7aEAJSrR8kN0nbHhp8dB9O2tLsI -eK9p0gtJ3jPFrK3CiAJ9Brc1AsFgyb/E6JTe1NOpEyVa/m6irn0F3H3zbPB+po3u -2dfOWBfoqSmuc0iH55vKbimhZF8ZE/euBhD/UcabTVUlT5OZEAFTdfETzsemQUHS -v4ilf0X8rLiltTMMgsT7B/Zq5SWEXwbKwYY5EdtYzXc7LMJMD16a4/CrPmEbUCTC -wPTxGfARKbalGAKb12NMcIxHowNDXLldRqANb/9Zjr7dn3LDWyvfjFvO5QxGbJKy -CqNMVEIYFRIYvdr8unRu/8G2oGTYqV9Vrp9canaW2HNnh/tNf1zuacpzEPuKqf2e -vTY4SUmH9A4U8OmHuD+nT3pajnnUk+S7aFKErGzp85hwVXIy+TSrK0m1zSBi5Dp6 -Z2Orltxtrpfs/J92VoguZs9btsmksNcFuuEnL5O7Jiqik7Ab846+HUCjuTaPPoIa -Gl6I6lD4WeKDRikL40Rc4ZW2aZCaFG+XroHPaO+Zmr615+F/+PoTRxZMzG0IQOeL -eG9QgkRQP2YGiqtDhFZKDyAthg710tvSeopLzaXoTvFeJiUBWSOgftL2fiFX1ye8 -FVdMpEbB4IMeDExNH08GGeL5qPQ6gqGyeUN51q1veieQA6TqJIc/2b3Z6fJfUEkc -7uzXLg== ------END CERTIFICATE----- - -# Issuer: CN=IdenTrust Commercial Root CA 1 O=IdenTrust -# Subject: CN=IdenTrust Commercial Root CA 1 O=IdenTrust -# Label: "IdenTrust Commercial Root CA 1" -# Serial: 13298821034946342390520003877796839426 -# MD5 Fingerprint: b3:3e:77:73:75:ee:a0:d3:e3:7e:49:63:49:59:bb:c7 -# SHA1 Fingerprint: df:71:7e:aa:4a:d9:4e:c9:55:84:99:60:2d:48:de:5f:bc:f0:3a:25 -# SHA256 Fingerprint: 5d:56:49:9b:e4:d2:e0:8b:cf:ca:d0:8a:3e:38:72:3d:50:50:3b:de:70:69:48:e4:2f:55:60:30:19:e5:28:ae ------BEGIN CERTIFICATE----- -MIIFYDCCA0igAwIBAgIQCgFCgAAAAUUjyES1AAAAAjANBgkqhkiG9w0BAQsFADBK -MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScwJQYDVQQDEx5JZGVu -VHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwHhcNMTQwMTE2MTgxMjIzWhcNMzQw -MTE2MTgxMjIzWjBKMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScw -JQYDVQQDEx5JZGVuVHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwggIiMA0GCSqG -SIb3DQEBAQUAA4ICDwAwggIKAoICAQCnUBneP5k91DNG8W9RYYKyqU+PZ4ldhNlT -3Qwo2dfw/66VQ3KZ+bVdfIrBQuExUHTRgQ18zZshq0PirK1ehm7zCYofWjK9ouuU -+ehcCuz/mNKvcbO0U59Oh++SvL3sTzIwiEsXXlfEU8L2ApeN2WIrvyQfYo3fw7gp -S0l4PJNgiCL8mdo2yMKi1CxUAGc1bnO/AljwpN3lsKImesrgNqUZFvX9t++uP0D1 -bVoE/c40yiTcdCMbXTMTEl3EASX2MN0CXZ/g1Ue9tOsbobtJSdifWwLziuQkkORi -T0/Br4sOdBeo0XKIanoBScy0RnnGF7HamB4HWfp1IYVl3ZBWzvurpWCdxJ35UrCL -vYf5jysjCiN2O/cz4ckA82n5S6LgTrx+kzmEB/dEcH7+B1rlsazRGMzyNeVJSQjK -Vsk9+w8YfYs7wRPCTY/JTw436R+hDmrfYi7LNQZReSzIJTj0+kuniVyc0uMNOYZK -dHzVWYfCP04MXFL0PfdSgvHqo6z9STQaKPNBiDoT7uje/5kdX7rL6B7yuVBgwDHT -c+XvvqDtMwt0viAgxGds8AgDelWAf0ZOlqf0Hj7h9tgJ4TNkK2PXMl6f+cB7D3hv -l7yTmvmcEpB4eoCHFddydJxVdHixuuFucAS6T6C6aMN7/zHwcz09lCqxC0EOoP5N -iGVreTO01wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB -/zAdBgNVHQ4EFgQU7UQZwNPwBovupHu+QucmVMiONnYwDQYJKoZIhvcNAQELBQAD -ggIBAA2ukDL2pkt8RHYZYR4nKM1eVO8lvOMIkPkp165oCOGUAFjvLi5+U1KMtlwH -6oi6mYtQlNeCgN9hCQCTrQ0U5s7B8jeUeLBfnLOic7iPBZM4zY0+sLj7wM+x8uwt -LRvM7Kqas6pgghstO8OEPVeKlh6cdbjTMM1gCIOQ045U8U1mwF10A0Cj7oV+wh93 -nAbowacYXVKV7cndJZ5t+qntozo00Fl72u1Q8zW/7esUTTHHYPTa8Yec4kjixsU3 -+wYQ+nVZZjFHKdp2mhzpgq7vmrlR94gjmmmVYjzlVYA211QC//G5Xc7UI2/YRYRK -W2XviQzdFKcgyxilJbQN+QHwotL0AMh0jqEqSI5l2xPE4iUXfeu+h1sXIFRRk0pT -AwvsXcoz7WL9RccvW9xYoIA55vrX/hMUpu09lEpCdNTDd1lzzY9GvlU47/rokTLq -l1gEIt44w8y8bckzOmoKaT+gyOpyj4xjhiO9bTyWnpXgSUyqorkqG5w2gXjtw+hG -4iZZRHUe2XWJUc0QhJ1hYMtd+ZciTY6Y5uN/9lu7rs3KSoFrXgvzUeF0K+l+J6fZ -mUlO+KWA2yUPHGNiiskzZ2s8EIPGrd6ozRaOjfAHN3Gf8qv8QfXBi+wAN10J5U6A -7/qxXDgGpRtK4dw4LTzcqx+QGtVKnO7RcGzM7vRX+Bi6hG6H ------END CERTIFICATE----- - -# Issuer: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust -# Subject: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust -# Label: "IdenTrust Public Sector Root CA 1" -# Serial: 13298821034946342390521976156843933698 -# MD5 Fingerprint: 37:06:a5:b0:fc:89:9d:ba:f4:6b:8c:1a:64:cd:d5:ba -# SHA1 Fingerprint: ba:29:41:60:77:98:3f:f4:f3:ef:f2:31:05:3b:2e:ea:6d:4d:45:fd -# SHA256 Fingerprint: 30:d0:89:5a:9a:44:8a:26:20:91:63:55:22:d1:f5:20:10:b5:86:7a:ca:e1:2c:78:ef:95:8f:d4:f4:38:9f:2f ------BEGIN CERTIFICATE----- -MIIFZjCCA06gAwIBAgIQCgFCgAAAAUUjz0Z8AAAAAjANBgkqhkiG9w0BAQsFADBN -MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MSowKAYDVQQDEyFJZGVu -VHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwHhcNMTQwMTE2MTc1MzMyWhcN -MzQwMTE2MTc1MzMyWjBNMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0 -MSowKAYDVQQDEyFJZGVuVHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwggIi -MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC2IpT8pEiv6EdrCvsnduTyP4o7 -ekosMSqMjbCpwzFrqHd2hCa2rIFCDQjrVVi7evi8ZX3yoG2LqEfpYnYeEe4IFNGy -RBb06tD6Hi9e28tzQa68ALBKK0CyrOE7S8ItneShm+waOh7wCLPQ5CQ1B5+ctMlS -bdsHyo+1W/CD80/HLaXIrcuVIKQxKFdYWuSNG5qrng0M8gozOSI5Cpcu81N3uURF -/YTLNiCBWS2ab21ISGHKTN9T0a9SvESfqy9rg3LvdYDaBjMbXcjaY8ZNzaxmMc3R -3j6HEDbhuaR672BQssvKplbgN6+rNBM5Jeg5ZuSYeqoSmJxZZoY+rfGwyj4GD3vw -EUs3oERte8uojHH01bWRNszwFcYr3lEXsZdMUD2xlVl8BX0tIdUAvwFnol57plzy -9yLxkA2T26pEUWbMfXYD62qoKjgZl3YNa4ph+bz27nb9cCvdKTz4Ch5bQhyLVi9V -GxyhLrXHFub4qjySjmm2AcG1hp2JDws4lFTo6tyePSW8Uybt1as5qsVATFSrsrTZ -2fjXctscvG29ZV/viDUqZi/u9rNl8DONfJhBaUYPQxxp+pu10GFqzcpL2UyQRqsV -WaFHVCkugyhfHMKiq3IXAAaOReyL4jM9f9oZRORicsPfIsbyVtTdX5Vy7W1f90gD -W/3FKqD2cyOEEBsB5wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/ -BAUwAwEB/zAdBgNVHQ4EFgQU43HgntinQtnbcZFrlJPrw6PRFKMwDQYJKoZIhvcN -AQELBQADggIBAEf63QqwEZE4rU1d9+UOl1QZgkiHVIyqZJnYWv6IAcVYpZmxI1Qj -t2odIFflAWJBF9MJ23XLblSQdf4an4EKwt3X9wnQW3IV5B4Jaj0z8yGa5hV+rVHV -DRDtfULAj+7AmgjVQdZcDiFpboBhDhXAuM/FSRJSzL46zNQuOAXeNf0fb7iAaJg9 -TaDKQGXSc3z1i9kKlT/YPyNtGtEqJBnZhbMX73huqVjRI9PHE+1yJX9dsXNw0H8G -lwmEKYBhHfpe/3OsoOOJuBxxFcbeMX8S3OFtm6/n6J91eEyrRjuazr8FGF1NFTwW -mhlQBJqymm9li1JfPFgEKCXAZmExfrngdbkaqIHWchezxQMxNRF4eKLg6TCMf4Df -WN88uieW4oA0beOY02QnrEh+KHdcxiVhJfiFDGX6xDIvpZgF5PgLZxYWxoK4Mhn5 -+bl53B/N66+rDt0b20XkeucC4pVd/GnwU2lhlXV5C15V5jgclKlZM57IcXR5f1GJ -tshquDDIajjDbp7hNxbqBWJMWxJH7ae0s1hWx0nzfxJoCTFx8G34Tkf71oXuxVhA -GaQdp/lLQzfcaFpPz+vCZHTetBXZ9FRUGi8c15dxVJCO2SCdUyt/q4/i6jC8UDfv -8Ue1fXwsBOxonbRJRBD0ckscZOf85muQ3Wl9af0AVqW3rLatt8o+Ae+c ------END CERTIFICATE----- - -# Issuer: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only -# Subject: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only -# Label: "Entrust Root Certification Authority - G2" -# Serial: 1246989352 -# MD5 Fingerprint: 4b:e2:c9:91:96:65:0c:f4:0e:5a:93:92:a0:0a:fe:b2 -# SHA1 Fingerprint: 8c:f4:27:fd:79:0c:3a:d1:66:06:8d:e8:1e:57:ef:bb:93:22:72:d4 -# SHA256 Fingerprint: 43:df:57:74:b0:3e:7f:ef:5f:e4:0d:93:1a:7b:ed:f1:bb:2e:6b:42:73:8c:4e:6d:38:41:10:3d:3a:a7:f3:39 ------BEGIN CERTIFICATE----- -MIIEPjCCAyagAwIBAgIESlOMKDANBgkqhkiG9w0BAQsFADCBvjELMAkGA1UEBhMC -VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50 -cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3Qs -IEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVz -dCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRzIwHhcNMDkwNzA3MTcy -NTU0WhcNMzAxMjA3MTc1NTU0WjCBvjELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUVu -dHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50cnVzdC5uZXQvbGVnYWwt -dGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3QsIEluYy4gLSBmb3IgYXV0 -aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVzdCBSb290IENlcnRpZmlj -YXRpb24gQXV0aG9yaXR5IC0gRzIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEK -AoIBAQC6hLZy254Ma+KZ6TABp3bqMriVQRrJ2mFOWHLP/vaCeb9zYQYKpSfYs1/T -RU4cctZOMvJyig/3gxnQaoCAAEUesMfnmr8SVycco2gvCoe9amsOXmXzHHfV1IWN -cCG0szLni6LVhjkCsbjSR87kyUnEO6fe+1R9V77w6G7CebI6C1XiUJgWMhNcL3hW -wcKUs/Ja5CeanyTXxuzQmyWC48zCxEXFjJd6BmsqEZ+pCm5IO2/b1BEZQvePB7/1 -U1+cPvQXLOZprE4yTGJ36rfo5bs0vBmLrpxR57d+tVOxMyLlbc9wPBr64ptntoP0 -jaWvYkxN4FisZDQSA/i2jZRjJKRxAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAP -BgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqciZ60B7vfec7aVHUbI2fkBJmqzAN -BgkqhkiG9w0BAQsFAAOCAQEAeZ8dlsa2eT8ijYfThwMEYGprmi5ZiXMRrEPR9RP/ -jTkrwPK9T3CMqS/qF8QLVJ7UG5aYMzyorWKiAHarWWluBh1+xLlEjZivEtRh2woZ -Rkfz6/djwUAFQKXSt/S1mja/qYh2iARVBCuch38aNzx+LaUa2NSJXsq9rD1s2G2v -1fN2D807iDginWyTmsQ9v4IbZT+mD12q/OWyFcq1rca8PdCE6OoGcrBNOTJ4vz4R -nAuknZoh8/CbCzB428Hch0P+vGOaysXCHMnHjf87ElgI5rY97HosTvuDls4MPGmH -VHOkc8KT/1EQrBVUAdj8BbGJoX90g5pJ19xOe4pIb4tF9g== ------END CERTIFICATE----- - -# Issuer: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only -# Subject: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only -# Label: "Entrust Root Certification Authority - EC1" -# Serial: 51543124481930649114116133369 -# MD5 Fingerprint: b6:7e:1d:f0:58:c5:49:6c:24:3b:3d:ed:98:18:ed:bc -# SHA1 Fingerprint: 20:d8:06:40:df:9b:25:f5:12:25:3a:11:ea:f7:59:8a:eb:14:b5:47 -# SHA256 Fingerprint: 02:ed:0e:b2:8c:14:da:45:16:5c:56:67:91:70:0d:64:51:d7:fb:56:f0:b2:ab:1d:3b:8e:b0:70:e5:6e:df:f5 ------BEGIN CERTIFICATE----- -MIIC+TCCAoCgAwIBAgINAKaLeSkAAAAAUNCR+TAKBggqhkjOPQQDAzCBvzELMAkG -A1UEBhMCVVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3 -d3cuZW50cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDEyIEVu -dHJ1c3QsIEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEzMDEGA1UEAxMq -RW50cnVzdCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRUMxMB4XDTEy -MTIxODE1MjUzNloXDTM3MTIxODE1NTUzNlowgb8xCzAJBgNVBAYTAlVTMRYwFAYD -VQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQLEx9TZWUgd3d3LmVudHJ1c3QubmV0 -L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykgMjAxMiBFbnRydXN0LCBJbmMuIC0g -Zm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMzAxBgNVBAMTKkVudHJ1c3QgUm9vdCBD -ZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEVDMTB2MBAGByqGSM49AgEGBSuBBAAi -A2IABIQTydC6bUF74mzQ61VfZgIaJPRbiWlH47jCffHyAsWfoPZb1YsGGYZPUxBt -ByQnoaD41UcZYUx9ypMn6nQM72+WCf5j7HBdNq1nd67JnXxVRDqiY1Ef9eNi1KlH -Bz7MIKNCMEAwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0O -BBYEFLdj5xrdjekIplWDpOBqUEFlEUJJMAoGCCqGSM49BAMDA2cAMGQCMGF52OVC -R98crlOZF7ZvHH3hvxGU0QOIdeSNiaSKd0bebWHvAvX7td/M/k7//qnmpwIwW5nX -hTcGtXsI/esni0qU+eH6p44mCOh8kmhtc9hvJqwhAriZtyZBWyVgrtBIGu4G ------END CERTIFICATE----- - -# Issuer: CN=CFCA EV ROOT O=China Financial Certification Authority -# Subject: CN=CFCA EV ROOT O=China Financial Certification Authority -# Label: "CFCA EV ROOT" -# Serial: 407555286 -# MD5 Fingerprint: 74:e1:b6:ed:26:7a:7a:44:30:33:94:ab:7b:27:81:30 -# SHA1 Fingerprint: e2:b8:29:4b:55:84:ab:6b:58:c2:90:46:6c:ac:3f:b8:39:8f:84:83 -# SHA256 Fingerprint: 5c:c3:d7:8e:4e:1d:5e:45:54:7a:04:e6:87:3e:64:f9:0c:f9:53:6d:1c:cc:2e:f8:00:f3:55:c4:c5:fd:70:fd ------BEGIN CERTIFICATE----- -MIIFjTCCA3WgAwIBAgIEGErM1jANBgkqhkiG9w0BAQsFADBWMQswCQYDVQQGEwJD -TjEwMC4GA1UECgwnQ2hpbmEgRmluYW5jaWFsIENlcnRpZmljYXRpb24gQXV0aG9y -aXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJPT1QwHhcNMTIwODA4MDMwNzAxWhcNMjkx -MjMxMDMwNzAxWjBWMQswCQYDVQQGEwJDTjEwMC4GA1UECgwnQ2hpbmEgRmluYW5j -aWFsIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJP -T1QwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDXXWvNED8fBVnVBU03 -sQ7smCuOFR36k0sXgiFxEFLXUWRwFsJVaU2OFW2fvwwbwuCjZ9YMrM8irq93VCpL -TIpTUnrD7i7es3ElweldPe6hL6P3KjzJIx1qqx2hp/Hz7KDVRM8Vz3IvHWOX6Jn5 -/ZOkVIBMUtRSqy5J35DNuF++P96hyk0g1CXohClTt7GIH//62pCfCqktQT+x8Rgp -7hZZLDRJGqgG16iI0gNyejLi6mhNbiyWZXvKWfry4t3uMCz7zEasxGPrb382KzRz -EpR/38wmnvFyXVBlWY9ps4deMm/DGIq1lY+wejfeWkU7xzbh72fROdOXW3NiGUgt -hxwG+3SYIElz8AXSG7Ggo7cbcNOIabla1jj0Ytwli3i/+Oh+uFzJlU9fpy25IGvP -a931DfSCt/SyZi4QKPaXWnuWFo8BGS1sbn85WAZkgwGDg8NNkt0yxoekN+kWzqot -aK8KgWU6cMGbrU1tVMoqLUuFG7OA5nBFDWteNfB/O7ic5ARwiRIlk9oKmSJgamNg -TnYGmE69g60dWIolhdLHZR4tjsbftsbhf4oEIRUpdPA+nJCdDC7xij5aqgwJHsfV -PKPtl8MeNPo4+QgO48BdK4PRVmrJtqhUUy54Mmc9gn900PvhtgVguXDbjgv5E1hv -cWAQUhC5wUEJ73IfZzF4/5YFjQIDAQABo2MwYTAfBgNVHSMEGDAWgBTj/i39KNAL -tbq2osS/BqoFjJP7LzAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAd -BgNVHQ4EFgQU4/4t/SjQC7W6tqLEvwaqBYyT+y8wDQYJKoZIhvcNAQELBQADggIB -ACXGumvrh8vegjmWPfBEp2uEcwPenStPuiB/vHiyz5ewG5zz13ku9Ui20vsXiObT -ej/tUxPQ4i9qecsAIyjmHjdXNYmEwnZPNDatZ8POQQaIxffu2Bq41gt/UP+TqhdL -jOztUmCypAbqTuv0axn96/Ua4CUqmtzHQTb3yHQFhDmVOdYLO6Qn+gjYXB74BGBS -ESgoA//vU2YApUo0FmZ8/Qmkrp5nGm9BC2sGE5uPhnEFtC+NiWYzKXZUmhH4J/qy -P5Hgzg0b8zAarb8iXRvTvyUFTeGSGn+ZnzxEk8rUQElsgIfXBDrDMlI1Dlb4pd19 -xIsNER9Tyx6yF7Zod1rg1MvIB671Oi6ON7fQAUtDKXeMOZePglr4UeWJoBjnaH9d -Ci77o0cOPaYjesYBx4/IXr9tgFa+iiS6M+qf4TIRnvHST4D2G0CvOJ4RUHlzEhLN -5mydLIhyPDCBBpEi6lmt2hkuIsKNuYyH4Ga8cyNfIWRjgEj1oDwYPZTISEEdQLpe -/v5WOaHIz16eGWRGENoXkbcFgKyLmZJ956LYBws2J+dIeWCKw9cTXPhyQN9Ky8+Z -AAoACxGV2lZFA4gKn2fQ1XmxqI1AbQ3CekD6819kR5LLU7m7Wc5P/dAVUwHY3+vZ -5nbv0CO7O6l5s9UCKc2Jo5YPSjXnTkLAdc0Hz+Ys63su ------END CERTIFICATE----- - -# Issuer: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed -# Subject: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed -# Label: "OISTE WISeKey Global Root GB CA" -# Serial: 157768595616588414422159278966750757568 -# MD5 Fingerprint: a4:eb:b9:61:28:2e:b7:2f:98:b0:35:26:90:99:51:1d -# SHA1 Fingerprint: 0f:f9:40:76:18:d3:d7:6a:4b:98:f0:a8:35:9e:0c:fd:27:ac:cc:ed -# SHA256 Fingerprint: 6b:9c:08:e8:6e:b0:f7:67:cf:ad:65:cd:98:b6:21:49:e5:49:4a:67:f5:84:5e:7b:d1:ed:01:9f:27:b8:6b:d6 ------BEGIN CERTIFICATE----- -MIIDtTCCAp2gAwIBAgIQdrEgUnTwhYdGs/gjGvbCwDANBgkqhkiG9w0BAQsFADBt -MQswCQYDVQQGEwJDSDEQMA4GA1UEChMHV0lTZUtleTEiMCAGA1UECxMZT0lTVEUg -Rm91bmRhdGlvbiBFbmRvcnNlZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9i -YWwgUm9vdCBHQiBDQTAeFw0xNDEyMDExNTAwMzJaFw0zOTEyMDExNTEwMzFaMG0x -CzAJBgNVBAYTAkNIMRAwDgYDVQQKEwdXSVNlS2V5MSIwIAYDVQQLExlPSVNURSBG -b3VuZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBXSVNlS2V5IEdsb2Jh -bCBSb290IEdCIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2Be3 -HEokKtaXscriHvt9OO+Y9bI5mE4nuBFde9IllIiCFSZqGzG7qFshISvYD06fWvGx -WuR51jIjK+FTzJlFXHtPrby/h0oLS5daqPZI7H17Dc0hBt+eFf1Biki3IPShehtX -1F1Q/7pn2COZH8g/497/b1t3sWtuuMlk9+HKQUYOKXHQuSP8yYFfTvdv37+ErXNk -u7dCjmn21HYdfp2nuFeKUWdy19SouJVUQHMD9ur06/4oQnc/nSMbsrY9gBQHTC5P -99UKFg29ZkM3fiNDecNAhvVMKdqOmq0NpQSHiB6F4+lT1ZvIiwNjeOvgGUpuuy9r -M2RYk61pv48b74JIxwIDAQABo1EwTzALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUw -AwEB/zAdBgNVHQ4EFgQUNQ/INmNe4qPs+TtmFc5RUuORmj0wEAYJKwYBBAGCNxUB -BAMCAQAwDQYJKoZIhvcNAQELBQADggEBAEBM+4eymYGQfp3FsLAmzYh7KzKNbrgh -cViXfa43FK8+5/ea4n32cZiZBKpDdHij40lhPnOMTZTg+XHEthYOU3gf1qKHLwI5 -gSk8rxWYITD+KJAAjNHhy/peyP34EEY7onhCkRd0VQreUGdNZtGn//3ZwLWoo4rO -ZvUPQ82nK1d7Y0Zqqi5S2PTt4W2tKZB4SLrhI6qjiey1q5bAtEuiHZeeevJuQHHf -aPFlTc58Bd9TZaml8LGXBHAVRgOY1NK/VLSgWH1Sb9pWJmLU2NuJMW8c8CLC02Ic -Nc1MaRVUGpCY3useX8p3x8uOPUNpnJpY0CQ73xtAln41rYHHTnG6iBM= ------END CERTIFICATE----- - -# Issuer: CN=SZAFIR ROOT CA2 O=Krajowa Izba Rozliczeniowa S.A. -# Subject: CN=SZAFIR ROOT CA2 O=Krajowa Izba Rozliczeniowa S.A. -# Label: "SZAFIR ROOT CA2" -# Serial: 357043034767186914217277344587386743377558296292 -# MD5 Fingerprint: 11:64:c1:89:b0:24:b1:8c:b1:07:7e:89:9e:51:9e:99 -# SHA1 Fingerprint: e2:52:fa:95:3f:ed:db:24:60:bd:6e:28:f3:9c:cc:cf:5e:b3:3f:de -# SHA256 Fingerprint: a1:33:9d:33:28:1a:0b:56:e5:57:d3:d3:2b:1c:e7:f9:36:7e:b0:94:bd:5f:a7:2a:7e:50:04:c8:de:d7:ca:fe ------BEGIN CERTIFICATE----- -MIIDcjCCAlqgAwIBAgIUPopdB+xV0jLVt+O2XwHrLdzk1uQwDQYJKoZIhvcNAQEL -BQAwUTELMAkGA1UEBhMCUEwxKDAmBgNVBAoMH0tyYWpvd2EgSXpiYSBSb3psaWN6 -ZW5pb3dhIFMuQS4xGDAWBgNVBAMMD1NaQUZJUiBST09UIENBMjAeFw0xNTEwMTkw -NzQzMzBaFw0zNTEwMTkwNzQzMzBaMFExCzAJBgNVBAYTAlBMMSgwJgYDVQQKDB9L -cmFqb3dhIEl6YmEgUm96bGljemVuaW93YSBTLkEuMRgwFgYDVQQDDA9TWkFGSVIg -Uk9PVCBDQTIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC3vD5QqEvN -QLXOYeeWyrSh2gwisPq1e3YAd4wLz32ohswmUeQgPYUM1ljj5/QqGJ3a0a4m7utT -3PSQ1hNKDJA8w/Ta0o4NkjrcsbH/ON7Dui1fgLkCvUqdGw+0w8LBZwPd3BucPbOw -3gAeqDRHu5rr/gsUvTaE2g0gv/pby6kWIK05YO4vdbbnl5z5Pv1+TW9NL++IDWr6 -3fE9biCloBK0TXC5ztdyO4mTp4CEHCdJckm1/zuVnsHMyAHs6A6KCpbns6aH5db5 -BSsNl0BwPLqsdVqc1U2dAgrSS5tmS0YHF2Wtn2yIANwiieDhZNRnvDF5YTy7ykHN -XGoAyDw4jlivAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD -AgEGMB0GA1UdDgQWBBQuFqlKGLXLzPVvUPMjX/hd56zwyDANBgkqhkiG9w0BAQsF -AAOCAQEAtXP4A9xZWx126aMqe5Aosk3AM0+qmrHUuOQn/6mWmc5G4G18TKI4pAZw -8PRBEew/R40/cof5O/2kbytTAOD/OblqBw7rHRz2onKQy4I9EYKL0rufKq8h5mOG -nXkZ7/e7DDWQw4rtTw/1zBLZpD67oPwglV9PJi8RI4NOdQcPv5vRtB3pEAT+ymCP -oky4rc/hkA/NrgrHXXu3UNLUYfrVFdvXn4dRVOul4+vJhaAlIDf7js4MNIThPIGy -d05DpYhfhmehPea0XGG2Ptv+tyjFogeutcrKjSoS75ftwjCkySp6+/NNIxuZMzSg -LvWpCz/UXeHPhJ/iGcJfitYgHuNztw== ------END CERTIFICATE----- - -# Issuer: CN=Certum Trusted Network CA 2 O=Unizeto Technologies S.A. OU=Certum Certification Authority -# Subject: CN=Certum Trusted Network CA 2 O=Unizeto Technologies S.A. OU=Certum Certification Authority -# Label: "Certum Trusted Network CA 2" -# Serial: 44979900017204383099463764357512596969 -# MD5 Fingerprint: 6d:46:9e:d9:25:6d:08:23:5b:5e:74:7d:1e:27:db:f2 -# SHA1 Fingerprint: d3:dd:48:3e:2b:bf:4c:05:e8:af:10:f5:fa:76:26:cf:d3:dc:30:92 -# SHA256 Fingerprint: b6:76:f2:ed:da:e8:77:5c:d3:6c:b0:f6:3c:d1:d4:60:39:61:f4:9e:62:65:ba:01:3a:2f:03:07:b6:d0:b8:04 ------BEGIN CERTIFICATE----- -MIIF0jCCA7qgAwIBAgIQIdbQSk8lD8kyN/yqXhKN6TANBgkqhkiG9w0BAQ0FADCB -gDELMAkGA1UEBhMCUEwxIjAgBgNVBAoTGVVuaXpldG8gVGVjaG5vbG9naWVzIFMu -QS4xJzAlBgNVBAsTHkNlcnR1bSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTEkMCIG -A1UEAxMbQ2VydHVtIFRydXN0ZWQgTmV0d29yayBDQSAyMCIYDzIwMTExMDA2MDgz -OTU2WhgPMjA0NjEwMDYwODM5NTZaMIGAMQswCQYDVQQGEwJQTDEiMCAGA1UEChMZ -VW5pemV0byBUZWNobm9sb2dpZXMgUy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRp -ZmljYXRpb24gQXV0aG9yaXR5MSQwIgYDVQQDExtDZXJ0dW0gVHJ1c3RlZCBOZXR3 -b3JrIENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC9+Xj45tWA -DGSdhhuWZGc/IjoedQF97/tcZ4zJzFxrqZHmuULlIEub2pt7uZld2ZuAS9eEQCsn -0+i6MLs+CRqnSZXvK0AkwpfHp+6bJe+oCgCXhVqqndwpyeI1B+twTUrWwbNWuKFB -OJvR+zF/j+Bf4bE/D44WSWDXBo0Y+aomEKsq09DRZ40bRr5HMNUuctHFY9rnY3lE -fktjJImGLjQ/KUxSiyqnwOKRKIm5wFv5HdnnJ63/mgKXwcZQkpsCLL2puTRZCr+E -Sv/f/rOf69me4Jgj7KZrdxYq28ytOxykh9xGc14ZYmhFV+SQgkK7QtbwYeDBoz1m -o130GO6IyY0XRSmZMnUCMe4pJshrAua1YkV/NxVaI2iJ1D7eTiew8EAMvE0Xy02i -sx7QBlrd9pPPV3WZ9fqGGmd4s7+W/jTcvedSVuWz5XV710GRBdxdaeOVDUO5/IOW -OZV7bIBaTxNyxtd9KXpEulKkKtVBRgkg/iKgtlswjbyJDNXXcPiHUv3a76xRLgez -Tv7QCdpw75j6VuZt27VXS9zlLCUVyJ4ueE742pyehizKV/Ma5ciSixqClnrDvFAS -adgOWkaLOusm+iPJtrCBvkIApPjW/jAux9JG9uWOdf3yzLnQh1vMBhBgu4M1t15n -3kfsmUjxpKEV/q2MYo45VU85FrmxY53/twIDAQABo0IwQDAPBgNVHRMBAf8EBTAD -AQH/MB0GA1UdDgQWBBS2oVQ5AsOgP46KvPrU+Bym0ToO/TAOBgNVHQ8BAf8EBAMC -AQYwDQYJKoZIhvcNAQENBQADggIBAHGlDs7k6b8/ONWJWsQCYftMxRQXLYtPU2sQ -F/xlhMcQSZDe28cmk4gmb3DWAl45oPePq5a1pRNcgRRtDoGCERuKTsZPpd1iHkTf -CVn0W3cLN+mLIMb4Ck4uWBzrM9DPhmDJ2vuAL55MYIR4PSFk1vtBHxgP58l1cb29 -XN40hz5BsA72udY/CROWFC/emh1auVbONTqwX3BNXuMp8SMoclm2q8KMZiYcdywm -djWLKKdpoPk79SPdhRB0yZADVpHnr7pH1BKXESLjokmUbOe3lEu6LaTaM4tMpkT/ -WjzGHWTYtTHkpjx6qFcL2+1hGsvxznN3Y6SHb0xRONbkX8eftoEq5IVIeVheO/jb -AoJnwTnbw3RLPTYe+SmTiGhbqEQZIfCn6IENLOiTNrQ3ssqwGyZ6miUfmpqAnksq -P/ujmv5zMnHCnsZy4YpoJ/HkD7TETKVhk/iXEAcqMCWpuchxuO9ozC1+9eB+D4Ko -b7a6bINDd82Kkhehnlt4Fj1F4jNy3eFmypnTycUm/Q1oBEauttmbjL4ZvrHG8hnj -XALKLNhvSgfZyTXaQHXyxKcZb55CEJh15pWLYLztxRLXis7VmFxWlgPF7ncGNf/P -5O4/E2Hu29othfDNrp2yGAlFw5Khchf8R7agCyzxxN5DaAhqXzvwdmP7zAYspsbi -DrW5viSP ------END CERTIFICATE----- - -# Issuer: CN=Hellenic Academic and Research Institutions RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority -# Subject: CN=Hellenic Academic and Research Institutions RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority -# Label: "Hellenic Academic and Research Institutions RootCA 2015" -# Serial: 0 -# MD5 Fingerprint: ca:ff:e2:db:03:d9:cb:4b:e9:0f:ad:84:fd:7b:18:ce -# SHA1 Fingerprint: 01:0c:06:95:a6:98:19:14:ff:bf:5f:c6:b0:b6:95:ea:29:e9:12:a6 -# SHA256 Fingerprint: a0:40:92:9a:02:ce:53:b4:ac:f4:f2:ff:c6:98:1c:e4:49:6f:75:5e:6d:45:fe:0b:2a:69:2b:cd:52:52:3f:36 ------BEGIN CERTIFICATE----- -MIIGCzCCA/OgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBpjELMAkGA1UEBhMCR1Ix -DzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5k -IFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxQDA+BgNVBAMT -N0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgUm9v -dENBIDIwMTUwHhcNMTUwNzA3MTAxMTIxWhcNNDAwNjMwMTAxMTIxWjCBpjELMAkG -A1UEBhMCR1IxDzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNh -ZGVtaWMgYW5kIFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkx -QDA+BgNVBAMTN0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1 -dGlvbnMgUm9vdENBIDIwMTUwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoIC -AQDC+Kk/G4n8PDwEXT2QNrCROnk8ZlrvbTkBSRq0t89/TSNTt5AA4xMqKKYx8ZEA -4yjsriFBzh/a/X0SWwGDD7mwX5nh8hKDgE0GPt+sr+ehiGsxr/CL0BgzuNtFajT0 -AoAkKAoCFZVedioNmToUW/bLy1O8E00BiDeUJRtCvCLYjqOWXjrZMts+6PAQZe10 -4S+nfK8nNLspfZu2zwnI5dMK/IhlZXQK3HMcXM1AsRzUtoSMTFDPaI6oWa7CJ06C -ojXdFPQf/7J31Ycvqm59JCfnxssm5uX+Zwdj2EUN3TpZZTlYepKZcj2chF6IIbjV -9Cz82XBST3i4vTwri5WY9bPRaM8gFH5MXF/ni+X1NYEZN9cRCLdmvtNKzoNXADrD -gfgXy5I2XdGj2HUb4Ysn6npIQf1FGQatJ5lOwXBH3bWfgVMS5bGMSF0xQxfjjMZ6 -Y5ZLKTBOhE5iGV48zpeQpX8B653g+IuJ3SWYPZK2fu/Z8VFRfS0myGlZYeCsargq -NhEEelC9MoS+L9xy1dcdFkfkR2YgP/SWxa+OAXqlD3pk9Q0Yh9muiNX6hME6wGko -LfINaFGq46V3xqSQDqE3izEjR8EJCOtu93ib14L8hCCZSRm2Ekax+0VVFqmjZayc -Bw/qa9wfLgZy7IaIEuQt218FL+TwA9MmM+eAws1CoRc0CwIDAQABo0IwQDAPBgNV -HRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUcRVnyMjJvXVd -ctA4GGqd83EkVAswDQYJKoZIhvcNAQELBQADggIBAHW7bVRLqhBYRjTyYtcWNl0I -XtVsyIe9tC5G8jH4fOpCtZMWVdyhDBKg2mF+D1hYc2Ryx+hFjtyp8iY/xnmMsVMI -M4GwVhO+5lFc2JsKT0ucVlMC6U/2DWDqTUJV6HwbISHTGzrMd/K4kPFox/la/vot -9L/J9UUbzjgQKjeKeaO04wlshYaT/4mWJ3iBj2fjRnRUjtkNaeJK9E10A/+yd+2V -Z5fkscWrv2oj6NSU4kQoYsRL4vDY4ilrGnB+JGGTe08DMiUNRSQrlrRGar9KC/ea -j8GsGsVn82800vpzY4zvFrCopEYq+OsS7HK07/grfoxSwIuEVPkvPuNVqNxmsdnh -X9izjFk0WaSrT2y7HxjbdavYy5LNlDhhDgcGH0tGEPEVvo2FXDtKK4F5D7Rpn0lQ -l033DlZdwJVqwjbDG2jJ9SrcR5q+ss7FJej6A7na+RZukYT1HCjI/CbM1xyQVqdf -bzoEvM14iQuODy+jqk+iGxI9FghAD/FGTNeqewjBCvVtJ94Cj8rDtSvK6evIIVM4 -pcw72Hc3MKJP2W/R8kCtQXoXxdZKNYm3QdV8hn9VTYNKpXMgwDqvkPGaJI7ZjnHK -e7iG2rKPmT4dEw0SEe7Uq/DpFXYC5ODfqiAeW2GFZECpkJcNrVPSWh2HagCXZWK0 -vm9qp/UsQu0yrbYhnr68 ------END CERTIFICATE----- - -# Issuer: CN=Hellenic Academic and Research Institutions ECC RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority -# Subject: CN=Hellenic Academic and Research Institutions ECC RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority -# Label: "Hellenic Academic and Research Institutions ECC RootCA 2015" -# Serial: 0 -# MD5 Fingerprint: 81:e5:b4:17:eb:c2:f5:e1:4b:0d:41:7b:49:92:fe:ef -# SHA1 Fingerprint: 9f:f1:71:8d:92:d5:9a:f3:7d:74:97:b4:bc:6f:84:68:0b:ba:b6:66 -# SHA256 Fingerprint: 44:b5:45:aa:8a:25:e6:5a:73:ca:15:dc:27:fc:36:d2:4c:1c:b9:95:3a:06:65:39:b1:15:82:dc:48:7b:48:33 ------BEGIN CERTIFICATE----- -MIICwzCCAkqgAwIBAgIBADAKBggqhkjOPQQDAjCBqjELMAkGA1UEBhMCR1IxDzAN -BgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl -c2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxRDBCBgNVBAMTO0hl -bGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgRUNDIFJv -b3RDQSAyMDE1MB4XDTE1MDcwNzEwMzcxMloXDTQwMDYzMDEwMzcxMlowgaoxCzAJ -BgNVBAYTAkdSMQ8wDQYDVQQHEwZBdGhlbnMxRDBCBgNVBAoTO0hlbGxlbmljIEFj -YWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgQ2VydC4gQXV0aG9yaXR5 -MUQwQgYDVQQDEztIZWxsZW5pYyBBY2FkZW1pYyBhbmQgUmVzZWFyY2ggSW5zdGl0 -dXRpb25zIEVDQyBSb290Q0EgMjAxNTB2MBAGByqGSM49AgEGBSuBBAAiA2IABJKg -QehLgoRc4vgxEZmGZE4JJS+dQS8KrjVPdJWyUWRrjWvmP3CV8AVER6ZyOFB2lQJa -jq4onvktTpnvLEhvTCUp6NFxW98dwXU3tNf6e3pCnGoKVlp8aQuqgAkkbH7BRqNC -MEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFLQi -C4KZJAEOnLvkDv2/+5cgk5kqMAoGCCqGSM49BAMCA2cAMGQCMGfOFmI4oqxiRaep -lSTAGiecMjvAwNW6qef4BENThe5SId6d9SWDPp5YSy/XZxMOIQIwBeF1Ad5o7Sof -TUwJCA3sS61kFyjndc5FZXIhF8siQQ6ME5g4mlRtm8rifOoCWCKR ------END CERTIFICATE----- - -# Issuer: CN=ISRG Root X1 O=Internet Security Research Group -# Subject: CN=ISRG Root X1 O=Internet Security Research Group -# Label: "ISRG Root X1" -# Serial: 172886928669790476064670243504169061120 -# MD5 Fingerprint: 0c:d2:f9:e0:da:17:73:e9:ed:86:4d:a5:e3:70:e7:4e -# SHA1 Fingerprint: ca:bd:2a:79:a1:07:6a:31:f2:1d:25:36:35:cb:03:9d:43:29:a5:e8 -# SHA256 Fingerprint: 96:bc:ec:06:26:49:76:f3:74:60:77:9a:cf:28:c5:a7:cf:e8:a3:c0:aa:e1:1a:8f:fc:ee:05:c0:bd:df:08:c6 ------BEGIN CERTIFICATE----- -MIIFazCCA1OgAwIBAgIRAIIQz7DSQONZRGPgu2OCiwAwDQYJKoZIhvcNAQELBQAw -TzELMAkGA1UEBhMCVVMxKTAnBgNVBAoTIEludGVybmV0IFNlY3VyaXR5IFJlc2Vh -cmNoIEdyb3VwMRUwEwYDVQQDEwxJU1JHIFJvb3QgWDEwHhcNMTUwNjA0MTEwNDM4 -WhcNMzUwNjA0MTEwNDM4WjBPMQswCQYDVQQGEwJVUzEpMCcGA1UEChMgSW50ZXJu -ZXQgU2VjdXJpdHkgUmVzZWFyY2ggR3JvdXAxFTATBgNVBAMTDElTUkcgUm9vdCBY -MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK3oJHP0FDfzm54rVygc -h77ct984kIxuPOZXoHj3dcKi/vVqbvYATyjb3miGbESTtrFj/RQSa78f0uoxmyF+ -0TM8ukj13Xnfs7j/EvEhmkvBioZxaUpmZmyPfjxwv60pIgbz5MDmgK7iS4+3mX6U -A5/TR5d8mUgjU+g4rk8Kb4Mu0UlXjIB0ttov0DiNewNwIRt18jA8+o+u3dpjq+sW -T8KOEUt+zwvo/7V3LvSye0rgTBIlDHCNAymg4VMk7BPZ7hm/ELNKjD+Jo2FR3qyH -B5T0Y3HsLuJvW5iB4YlcNHlsdu87kGJ55tukmi8mxdAQ4Q7e2RCOFvu396j3x+UC -B5iPNgiV5+I3lg02dZ77DnKxHZu8A/lJBdiB3QW0KtZB6awBdpUKD9jf1b0SHzUv -KBds0pjBqAlkd25HN7rOrFleaJ1/ctaJxQZBKT5ZPt0m9STJEadao0xAH0ahmbWn -OlFuhjuefXKnEgV4We0+UXgVCwOPjdAvBbI+e0ocS3MFEvzG6uBQE3xDk3SzynTn -jh8BCNAw1FtxNrQHusEwMFxIt4I7mKZ9YIqioymCzLq9gwQbooMDQaHWBfEbwrbw -qHyGO0aoSCqI3Haadr8faqU9GY/rOPNk3sgrDQoo//fb4hVC1CLQJ13hef4Y53CI -rU7m2Ys6xt0nUW7/vGT1M0NPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV -HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR5tFnme7bl5AFzgAiIyBpY9umbbjANBgkq -hkiG9w0BAQsFAAOCAgEAVR9YqbyyqFDQDLHYGmkgJykIrGF1XIpu+ILlaS/V9lZL -ubhzEFnTIZd+50xx+7LSYK05qAvqFyFWhfFQDlnrzuBZ6brJFe+GnY+EgPbk6ZGQ -3BebYhtF8GaV0nxvwuo77x/Py9auJ/GpsMiu/X1+mvoiBOv/2X/qkSsisRcOj/KK -NFtY2PwByVS5uCbMiogziUwthDyC3+6WVwW6LLv3xLfHTjuCvjHIInNzktHCgKQ5 -ORAzI4JMPJ+GslWYHb4phowim57iaztXOoJwTdwJx4nLCgdNbOhdjsnvzqvHu7Ur -TkXWStAmzOVyyghqpZXjFaH3pO3JLF+l+/+sKAIuvtd7u+Nxe5AW0wdeRlN8NwdC -jNPElpzVmbUq4JUagEiuTDkHzsxHpFKVK7q4+63SM1N95R1NbdWhscdCb+ZAJzVc -oyi3B43njTOQ5yOf+1CceWxG1bQVs5ZufpsMljq4Ui0/1lvh+wjChP4kqKOJ2qxq -4RgqsahDYVvTH9w7jXbyLeiNdd8XM2w9U/t7y0Ff/9yi0GE44Za4rF2LN9d11TPA -mRGunUHBcnWEvgJBQl9nJEiU0Zsnvgc/ubhPgXRR4Xq37Z0j4r7g1SgEEzwxA57d -emyPxgcYxn/eR44/KJ4EBs+lVDR3veyJm+kXQ99b21/+jh5Xos1AnX5iItreGCc= ------END CERTIFICATE----- - -# Issuer: O=FNMT-RCM OU=AC RAIZ FNMT-RCM -# Subject: O=FNMT-RCM OU=AC RAIZ FNMT-RCM -# Label: "AC RAIZ FNMT-RCM" -# Serial: 485876308206448804701554682760554759 -# MD5 Fingerprint: e2:09:04:b4:d3:bd:d1:a0:14:fd:1a:d2:47:c4:57:1d -# SHA1 Fingerprint: ec:50:35:07:b2:15:c4:95:62:19:e2:a8:9a:5b:42:99:2c:4c:2c:20 -# SHA256 Fingerprint: eb:c5:57:0c:29:01:8c:4d:67:b1:aa:12:7b:af:12:f7:03:b4:61:1e:bc:17:b7:da:b5:57:38:94:17:9b:93:fa ------BEGIN CERTIFICATE----- -MIIFgzCCA2ugAwIBAgIPXZONMGc2yAYdGsdUhGkHMA0GCSqGSIb3DQEBCwUAMDsx -CzAJBgNVBAYTAkVTMREwDwYDVQQKDAhGTk1ULVJDTTEZMBcGA1UECwwQQUMgUkFJ -WiBGTk1ULVJDTTAeFw0wODEwMjkxNTU5NTZaFw0zMDAxMDEwMDAwMDBaMDsxCzAJ -BgNVBAYTAkVTMREwDwYDVQQKDAhGTk1ULVJDTTEZMBcGA1UECwwQQUMgUkFJWiBG -Tk1ULVJDTTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBALpxgHpMhm5/ -yBNtwMZ9HACXjywMI7sQmkCpGreHiPibVmr75nuOi5KOpyVdWRHbNi63URcfqQgf -BBckWKo3Shjf5TnUV/3XwSyRAZHiItQDwFj8d0fsjz50Q7qsNI1NOHZnjrDIbzAz -WHFctPVrbtQBULgTfmxKo0nRIBnuvMApGGWn3v7v3QqQIecaZ5JCEJhfTzC8PhxF -tBDXaEAUwED653cXeuYLj2VbPNmaUtu1vZ5Gzz3rkQUCwJaydkxNEJY7kvqcfw+Z -374jNUUeAlz+taibmSXaXvMiwzn15Cou08YfxGyqxRxqAQVKL9LFwag0Jl1mpdIC -IfkYtwb1TplvqKtMUejPUBjFd8g5CSxJkjKZqLsXF3mwWsXmo8RZZUc1g16p6DUL -mbvkzSDGm0oGObVo/CK67lWMK07q87Hj/LaZmtVC+nFNCM+HHmpxffnTtOmlcYF7 -wk5HlqX2doWjKI/pgG6BU6VtX7hI+cL5NqYuSf+4lsKMB7ObiFj86xsc3i1w4peS -MKGJ47xVqCfWS+2QrYv6YyVZLag13cqXM7zlzced0ezvXg5KkAYmY6252TUtB7p2 -ZSysV4999AeU14ECll2jB0nVetBX+RvnU0Z1qrB5QstocQjpYL05ac70r8NWQMet -UqIJ5G+GR4of6ygnXYMgrwTJbFaai0b1AgMBAAGjgYMwgYAwDwYDVR0TAQH/BAUw -AwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFPd9xf3E6Jobd2Sn9R2gzL+H -YJptMD4GA1UdIAQ3MDUwMwYEVR0gADArMCkGCCsGAQUFBwIBFh1odHRwOi8vd3d3 -LmNlcnQuZm5tdC5lcy9kcGNzLzANBgkqhkiG9w0BAQsFAAOCAgEAB5BK3/MjTvDD -nFFlm5wioooMhfNzKWtN/gHiqQxjAb8EZ6WdmF/9ARP67Jpi6Yb+tmLSbkyU+8B1 -RXxlDPiyN8+sD8+Nb/kZ94/sHvJwnvDKuO+3/3Y3dlv2bojzr2IyIpMNOmqOFGYM -LVN0V2Ue1bLdI4E7pWYjJ2cJj+F3qkPNZVEI7VFY/uY5+ctHhKQV8Xa7pO6kO8Rf -77IzlhEYt8llvhjho6Tc+hj507wTmzl6NLrTQfv6MooqtyuGC2mDOL7Nii4LcK2N -JpLuHvUBKwrZ1pebbuCoGRw6IYsMHkCtA+fdZn71uSANA+iW+YJF1DngoABd15jm -fZ5nc8OaKveri6E6FO80vFIOiZiaBECEHX5FaZNXzuvO+FB8TxxuBEOb+dY7Ixjp -6o7RTUaN8Tvkasq6+yO3m/qZASlaWFot4/nUbQ4mrcFuNLwy+AwF+mWj2zs3gyLp -1txyM/1d8iC9djwj2ij3+RvrWWTV3F9yfiD8zYm1kGdNYno/Tq0dwzn+evQoFt9B -9kiABdcPUXmsEKvU7ANm5mqwujGSQkBqvjrTcuFqN1W8rB2Vt2lh8kORdOag0wok -RqEIr9baRRmW1FMdW4R58MD3R++Lj8UGrp1MYp3/RgT408m2ECVAdf4WqslKYIYv -uu8wd+RU4riEmViAqhOLUTpPSPaLtrM= ------END CERTIFICATE----- - -# Issuer: CN=Amazon Root CA 1 O=Amazon -# Subject: CN=Amazon Root CA 1 O=Amazon -# Label: "Amazon Root CA 1" -# Serial: 143266978916655856878034712317230054538369994 -# MD5 Fingerprint: 43:c6:bf:ae:ec:fe:ad:2f:18:c6:88:68:30:fc:c8:e6 -# SHA1 Fingerprint: 8d:a7:f9:65:ec:5e:fc:37:91:0f:1c:6e:59:fd:c1:cc:6a:6e:de:16 -# SHA256 Fingerprint: 8e:cd:e6:88:4f:3d:87:b1:12:5b:a3:1a:c3:fc:b1:3d:70:16:de:7f:57:cc:90:4f:e1:cb:97:c6:ae:98:19:6e ------BEGIN CERTIFICATE----- -MIIDQTCCAimgAwIBAgITBmyfz5m/jAo54vB4ikPmljZbyjANBgkqhkiG9w0BAQsF -ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6 -b24gUm9vdCBDQSAxMB4XDTE1MDUyNjAwMDAwMFoXDTM4MDExNzAwMDAwMFowOTEL -MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv -b3QgQ0EgMTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALJ4gHHKeNXj -ca9HgFB0fW7Y14h29Jlo91ghYPl0hAEvrAIthtOgQ3pOsqTQNroBvo3bSMgHFzZM -9O6II8c+6zf1tRn4SWiw3te5djgdYZ6k/oI2peVKVuRF4fn9tBb6dNqcmzU5L/qw -IFAGbHrQgLKm+a/sRxmPUDgH3KKHOVj4utWp+UhnMJbulHheb4mjUcAwhmahRWa6 -VOujw5H5SNz/0egwLX0tdHA114gk957EWW67c4cX8jJGKLhD+rcdqsq08p8kDi1L -93FcXmn/6pUCyziKrlA4b9v7LWIbxcceVOF34GfID5yHI9Y/QCB/IIDEgEw+OyQm -jgSubJrIqg0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC -AYYwHQYDVR0OBBYEFIQYzIU07LwMlJQuCFmcx7IQTgoIMA0GCSqGSIb3DQEBCwUA -A4IBAQCY8jdaQZChGsV2USggNiMOruYou6r4lK5IpDB/G/wkjUu0yKGX9rbxenDI -U5PMCCjjmCXPI6T53iHTfIUJrU6adTrCC2qJeHZERxhlbI1Bjjt/msv0tadQ1wUs -N+gDS63pYaACbvXy8MWy7Vu33PqUXHeeE6V/Uq2V8viTO96LXFvKWlJbYK8U90vv -o/ufQJVtMVT8QtPHRh8jrdkPSHCa2XV4cdFyQzR1bldZwgJcJmApzyMZFo6IQ6XU -5MsI+yMRQ+hDKXJioaldXgjUkK642M4UwtBV8ob2xJNDd2ZhwLnoQdeXeGADbkpy -rqXRfboQnoZsG4q5WTP468SQvvG5 ------END CERTIFICATE----- - -# Issuer: CN=Amazon Root CA 2 O=Amazon -# Subject: CN=Amazon Root CA 2 O=Amazon -# Label: "Amazon Root CA 2" -# Serial: 143266982885963551818349160658925006970653239 -# MD5 Fingerprint: c8:e5:8d:ce:a8:42:e2:7a:c0:2a:5c:7c:9e:26:bf:66 -# SHA1 Fingerprint: 5a:8c:ef:45:d7:a6:98:59:76:7a:8c:8b:44:96:b5:78:cf:47:4b:1a -# SHA256 Fingerprint: 1b:a5:b2:aa:8c:65:40:1a:82:96:01:18:f8:0b:ec:4f:62:30:4d:83:ce:c4:71:3a:19:c3:9c:01:1e:a4:6d:b4 ------BEGIN CERTIFICATE----- -MIIFQTCCAymgAwIBAgITBmyf0pY1hp8KD+WGePhbJruKNzANBgkqhkiG9w0BAQwF -ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6 -b24gUm9vdCBDQSAyMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTEL -MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv -b3QgQ0EgMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK2Wny2cSkxK -gXlRmeyKy2tgURO8TW0G/LAIjd0ZEGrHJgw12MBvIITplLGbhQPDW9tK6Mj4kHbZ -W0/jTOgGNk3Mmqw9DJArktQGGWCsN0R5hYGCrVo34A3MnaZMUnbqQ523BNFQ9lXg -1dKmSYXpN+nKfq5clU1Imj+uIFptiJXZNLhSGkOQsL9sBbm2eLfq0OQ6PBJTYv9K -8nu+NQWpEjTj82R0Yiw9AElaKP4yRLuH3WUnAnE72kr3H9rN9yFVkE8P7K6C4Z9r -2UXTu/Bfh+08LDmG2j/e7HJV63mjrdvdfLC6HM783k81ds8P+HgfajZRRidhW+me -z/CiVX18JYpvL7TFz4QuK/0NURBs+18bvBt+xa47mAExkv8LV/SasrlX6avvDXbR -8O70zoan4G7ptGmh32n2M8ZpLpcTnqWHsFcQgTfJU7O7f/aS0ZzQGPSSbtqDT6Zj -mUyl+17vIWR6IF9sZIUVyzfpYgwLKhbcAS4y2j5L9Z469hdAlO+ekQiG+r5jqFoz -7Mt0Q5X5bGlSNscpb/xVA1wf+5+9R+vnSUeVC06JIglJ4PVhHvG/LopyboBZ/1c6 -+XUyo05f7O0oYtlNc/LMgRdg7c3r3NunysV+Ar3yVAhU/bQtCSwXVEqY0VThUWcI -0u1ufm8/0i2BWSlmy5A5lREedCf+3euvAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMB -Af8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSwDPBMMPQFWAJI/TPlUq9LhONm -UjANBgkqhkiG9w0BAQwFAAOCAgEAqqiAjw54o+Ci1M3m9Zh6O+oAA7CXDpO8Wqj2 -LIxyh6mx/H9z/WNxeKWHWc8w4Q0QshNabYL1auaAn6AFC2jkR2vHat+2/XcycuUY -+gn0oJMsXdKMdYV2ZZAMA3m3MSNjrXiDCYZohMr/+c8mmpJ5581LxedhpxfL86kS -k5Nrp+gvU5LEYFiwzAJRGFuFjWJZY7attN6a+yb3ACfAXVU3dJnJUH/jWS5E4ywl -7uxMMne0nxrpS10gxdr9HIcWxkPo1LsmmkVwXqkLN1PiRnsn/eBG8om3zEK2yygm -btmlyTrIQRNg91CMFa6ybRoVGld45pIq2WWQgj9sAq+uEjonljYE1x2igGOpm/Hl -urR8FLBOybEfdF849lHqm/osohHUqS0nGkWxr7JOcQ3AWEbWaQbLU8uz/mtBzUF+ -fUwPfHJ5elnNXkoOrJupmHN5fLT0zLm4BwyydFy4x2+IoZCn9Kr5v2c69BoVYh63 -n749sSmvZ6ES8lgQGVMDMBu4Gon2nL2XA46jCfMdiyHxtN/kHNGfZQIG6lzWE7OE -76KlXIx3KadowGuuQNKotOrN8I1LOJwZmhsoVLiJkO/KdYE+HvJkJMcYr07/R54H -9jVlpNMKVv/1F2Rs76giJUmTtt8AF9pYfl3uxRuw0dFfIRDH+fO6AgonB8Xx1sfT -4PsJYGw= ------END CERTIFICATE----- - -# Issuer: CN=Amazon Root CA 3 O=Amazon -# Subject: CN=Amazon Root CA 3 O=Amazon -# Label: "Amazon Root CA 3" -# Serial: 143266986699090766294700635381230934788665930 -# MD5 Fingerprint: a0:d4:ef:0b:f7:b5:d8:49:95:2a:ec:f5:c4:fc:81:87 -# SHA1 Fingerprint: 0d:44:dd:8c:3c:8c:1a:1a:58:75:64:81:e9:0f:2e:2a:ff:b3:d2:6e -# SHA256 Fingerprint: 18:ce:6c:fe:7b:f1:4e:60:b2:e3:47:b8:df:e8:68:cb:31:d0:2e:bb:3a:da:27:15:69:f5:03:43:b4:6d:b3:a4 ------BEGIN CERTIFICATE----- -MIIBtjCCAVugAwIBAgITBmyf1XSXNmY/Owua2eiedgPySjAKBggqhkjOPQQDAjA5 -MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g -Um9vdCBDQSAzMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG -A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg -Q0EgMzBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABCmXp8ZBf8ANm+gBG1bG8lKl -ui2yEujSLtf6ycXYqm0fc4E7O5hrOXwzpcVOho6AF2hiRVd9RFgdszflZwjrZt6j -QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSr -ttvXBp43rDCGB5Fwx5zEGbF4wDAKBggqhkjOPQQDAgNJADBGAiEA4IWSoxe3jfkr -BqWTrBqYaGFy+uGh0PsceGCmQ5nFuMQCIQCcAu/xlJyzlvnrxir4tiz+OpAUFteM -YyRIHN8wfdVoOw== ------END CERTIFICATE----- - -# Issuer: CN=Amazon Root CA 4 O=Amazon -# Subject: CN=Amazon Root CA 4 O=Amazon -# Label: "Amazon Root CA 4" -# Serial: 143266989758080763974105200630763877849284878 -# MD5 Fingerprint: 89:bc:27:d5:eb:17:8d:06:6a:69:d5:fd:89:47:b4:cd -# SHA1 Fingerprint: f6:10:84:07:d6:f8:bb:67:98:0c:c2:e2:44:c2:eb:ae:1c:ef:63:be -# SHA256 Fingerprint: e3:5d:28:41:9e:d0:20:25:cf:a6:90:38:cd:62:39:62:45:8d:a5:c6:95:fb:de:a3:c2:2b:0b:fb:25:89:70:92 ------BEGIN CERTIFICATE----- -MIIB8jCCAXigAwIBAgITBmyf18G7EEwpQ+Vxe3ssyBrBDjAKBggqhkjOPQQDAzA5 -MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g -Um9vdCBDQSA0MB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG -A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg -Q0EgNDB2MBAGByqGSM49AgEGBSuBBAAiA2IABNKrijdPo1MN/sGKe0uoe0ZLY7Bi -9i0b2whxIdIA6GO9mif78DluXeo9pcmBqqNbIJhFXRbb/egQbeOc4OO9X4Ri83Bk -M6DLJC9wuoihKqB1+IGuYgbEgds5bimwHvouXKNCMEAwDwYDVR0TAQH/BAUwAwEB -/zAOBgNVHQ8BAf8EBAMCAYYwHQYDVR0OBBYEFNPsxzplbszh2naaVvuc84ZtV+WB -MAoGCCqGSM49BAMDA2gAMGUCMDqLIfG9fhGt0O9Yli/W651+kI0rz2ZVwyzjKKlw -CkcO8DdZEv8tmZQoTipPNU0zWgIxAOp1AE47xDqUEpHJWEadIRNyp4iciuRMStuW -1KyLa2tJElMzrdfkviT8tQp21KW8EA== ------END CERTIFICATE----- - -# Issuer: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM -# Subject: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM -# Label: "TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1" -# Serial: 1 -# MD5 Fingerprint: dc:00:81:dc:69:2f:3e:2f:b0:3b:f6:3d:5a:91:8e:49 -# SHA1 Fingerprint: 31:43:64:9b:ec:ce:27:ec:ed:3a:3f:0b:8f:0d:e4:e8:91:dd:ee:ca -# SHA256 Fingerprint: 46:ed:c3:68:90:46:d5:3a:45:3f:b3:10:4a:b8:0d:ca:ec:65:8b:26:60:ea:16:29:dd:7e:86:79:90:64:87:16 ------BEGIN CERTIFICATE----- -MIIEYzCCA0ugAwIBAgIBATANBgkqhkiG9w0BAQsFADCB0jELMAkGA1UEBhMCVFIx -GDAWBgNVBAcTD0dlYnplIC0gS29jYWVsaTFCMEAGA1UEChM5VHVya2l5ZSBCaWxp -bXNlbCB2ZSBUZWtub2xvamlrIEFyYXN0aXJtYSBLdXJ1bXUgLSBUVUJJVEFLMS0w -KwYDVQQLEyRLYW11IFNlcnRpZmlrYXN5b24gTWVya2V6aSAtIEthbXUgU00xNjA0 -BgNVBAMTLVRVQklUQUsgS2FtdSBTTSBTU0wgS29rIFNlcnRpZmlrYXNpIC0gU3Vy -dW0gMTAeFw0xMzExMjUwODI1NTVaFw00MzEwMjUwODI1NTVaMIHSMQswCQYDVQQG -EwJUUjEYMBYGA1UEBxMPR2ViemUgLSBLb2NhZWxpMUIwQAYDVQQKEzlUdXJraXll -IEJpbGltc2VsIHZlIFRla25vbG9qaWsgQXJhc3Rpcm1hIEt1cnVtdSAtIFRVQklU -QUsxLTArBgNVBAsTJEthbXUgU2VydGlmaWthc3lvbiBNZXJrZXppIC0gS2FtdSBT -TTE2MDQGA1UEAxMtVFVCSVRBSyBLYW11IFNNIFNTTCBLb2sgU2VydGlmaWthc2kg -LSBTdXJ1bSAxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAr3UwM6q7 -a9OZLBI3hNmNe5eA027n/5tQlT6QlVZC1xl8JoSNkvoBHToP4mQ4t4y86Ij5iySr -LqP1N+RAjhgleYN1Hzv/bKjFxlb4tO2KRKOrbEz8HdDc72i9z+SqzvBV96I01INr -N3wcwv61A+xXzry0tcXtAA9TNypN9E8Mg/uGz8v+jE69h/mniyFXnHrfA2eJLJ2X -YacQuFWQfw4tJzh03+f92k4S400VIgLI4OD8D62K18lUUMw7D8oWgITQUVbDjlZ/ -iSIzL+aFCr2lqBs23tPcLG07xxO9WSMs5uWk99gL7eqQQESolbuT1dCANLZGeA4f -AJNG4e7p+exPFwIDAQABo0IwQDAdBgNVHQ4EFgQUZT/HiobGPN08VFw1+DrtUgxH -V8gwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEL -BQADggEBACo/4fEyjq7hmFxLXs9rHmoJ0iKpEsdeV31zVmSAhHqT5Am5EM2fKifh -AHe+SMg1qIGf5LgsyX8OsNJLN13qudULXjS99HMpw+0mFZx+CFOKWI3QSyjfwbPf -IPP54+M638yclNhOT8NrF7f3cuitZjO1JVOr4PhMqZ398g26rrnZqsZr+ZO7rqu4 -lzwDGrpDxpa5RXI4s6ehlj2Re37AIVNMh+3yC1SVUZPVIqUNivGTDj5UDrDYyU7c -8jEyVupk+eq1nRZmQnLzf9OxMUP8pI4X8W0jq5Rm+K37DwhuJi1/FwcJsoz7UMCf -lo3Ptv0AnVoUmr8CRPXBwp8iXqIPoeM= ------END CERTIFICATE----- - -# Issuer: CN=GDCA TrustAUTH R5 ROOT O=GUANG DONG CERTIFICATE AUTHORITY CO.,LTD. -# Subject: CN=GDCA TrustAUTH R5 ROOT O=GUANG DONG CERTIFICATE AUTHORITY CO.,LTD. -# Label: "GDCA TrustAUTH R5 ROOT" -# Serial: 9009899650740120186 -# MD5 Fingerprint: 63:cc:d9:3d:34:35:5c:6f:53:a3:e2:08:70:48:1f:b4 -# SHA1 Fingerprint: 0f:36:38:5b:81:1a:25:c3:9b:31:4e:83:ca:e9:34:66:70:cc:74:b4 -# SHA256 Fingerprint: bf:ff:8f:d0:44:33:48:7d:6a:8a:a6:0c:1a:29:76:7a:9f:c2:bb:b0:5e:42:0f:71:3a:13:b9:92:89:1d:38:93 ------BEGIN CERTIFICATE----- -MIIFiDCCA3CgAwIBAgIIfQmX/vBH6nowDQYJKoZIhvcNAQELBQAwYjELMAkGA1UE -BhMCQ04xMjAwBgNVBAoMKUdVQU5HIERPTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZ -IENPLixMVEQuMR8wHQYDVQQDDBZHRENBIFRydXN0QVVUSCBSNSBST09UMB4XDTE0 -MTEyNjA1MTMxNVoXDTQwMTIzMTE1NTk1OVowYjELMAkGA1UEBhMCQ04xMjAwBgNV -BAoMKUdVQU5HIERPTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZIENPLixMVEQuMR8w -HQYDVQQDDBZHRENBIFRydXN0QVVUSCBSNSBST09UMIICIjANBgkqhkiG9w0BAQEF -AAOCAg8AMIICCgKCAgEA2aMW8Mh0dHeb7zMNOwZ+Vfy1YI92hhJCfVZmPoiC7XJj -Dp6L3TQsAlFRwxn9WVSEyfFrs0yw6ehGXTjGoqcuEVe6ghWinI9tsJlKCvLriXBj -TnnEt1u9ol2x8kECK62pOqPseQrsXzrj/e+APK00mxqriCZ7VqKChh/rNYmDf1+u -KU49tm7srsHwJ5uu4/Ts765/94Y9cnrrpftZTqfrlYwiOXnhLQiPzLyRuEH3FMEj -qcOtmkVEs7LXLM3GKeJQEK5cy4KOFxg2fZfmiJqwTTQJ9Cy5WmYqsBebnh52nUpm -MUHfP/vFBu8btn4aRjb3ZGM74zkYI+dndRTVdVeSN72+ahsmUPI2JgaQxXABZG12 -ZuGR224HwGGALrIuL4xwp9E7PLOR5G62xDtw8mySlwnNR30YwPO7ng/Wi64HtloP -zgsMR6flPri9fcebNaBhlzpBdRfMK5Z3KpIhHtmVdiBnaM8Nvd/WHwlqmuLMc3Gk -L30SgLdTMEZeS1SZD2fJpcjyIMGC7J0R38IC+xo70e0gmu9lZJIQDSri3nDxGGeC -jGHeuLzRL5z7D9Ar7Rt2ueQ5Vfj4oR24qoAATILnsn8JuLwwoC8N9VKejveSswoA -HQBUlwbgsQfZxw9cZX08bVlX5O2ljelAU58VS6Bx9hoh49pwBiFYFIeFd3mqgnkC -AwEAAaNCMEAwHQYDVR0OBBYEFOLJQJ9NzuiaoXzPDj9lxSmIahlRMA8GA1UdEwEB -/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4ICAQDRSVfg -p8xoWLoBDysZzY2wYUWsEe1jUGn4H3++Fo/9nesLqjJHdtJnJO29fDMylyrHBYZm -DRd9FBUb1Ov9H5r2XpdptxolpAqzkT9fNqyL7FeoPueBihhXOYV0GkLH6VsTX4/5 -COmSdI31R9KrO9b7eGZONn356ZLpBN79SWP8bfsUcZNnL0dKt7n/HipzcEYwv1ry -L3ml4Y0M2fmyYzeMN2WFcGpcWwlyua1jPLHd+PwyvzeG5LuOmCd+uh8W4XAR8gPf -JWIyJyYYMoSf/wA6E7qaTfRPuBRwIrHKK5DOKcFw9C+df/KQHtZa37dG/OaG+svg -IHZ6uqbL9XzeYqWxi+7egmaKTjowHz+Ay60nugxe19CxVsp3cbK1daFQqUBDF8Io -2c9Si1vIY9RCPqAzekYu9wogRlR+ak8x8YF+QnQ4ZXMn7sZ8uI7XpTrXmKGcjBBV -09tL7ECQ8s1uV9JiDnxXk7Gnbc2dg7sq5+W2O3FYrf3RRbxake5TFW/TRQl1brqQ -XR4EzzffHqhmsYzmIGrv/EhOdJhCrylvLmrH+33RZjEizIYAfmaDDEL0vTSSwxrq -T8p+ck0LcIymSLumoRT2+1hEmRSuqguTaaApJUqlyyvdimYHFngVV3Eb7PVHhPOe -MTd61X8kreS8/f3MboPoDKi3QWwH3b08hpcv0g== ------END CERTIFICATE----- - -# Issuer: CN=TrustCor RootCert CA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority -# Subject: CN=TrustCor RootCert CA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority -# Label: "TrustCor RootCert CA-1" -# Serial: 15752444095811006489 -# MD5 Fingerprint: 6e:85:f1:dc:1a:00:d3:22:d5:b2:b2:ac:6b:37:05:45 -# SHA1 Fingerprint: ff:bd:cd:e7:82:c8:43:5e:3c:6f:26:86:5c:ca:a8:3a:45:5b:c3:0a -# SHA256 Fingerprint: d4:0e:9c:86:cd:8f:e4:68:c1:77:69:59:f4:9e:a7:74:fa:54:86:84:b6:c4:06:f3:90:92:61:f4:dc:e2:57:5c ------BEGIN CERTIFICATE----- -MIIEMDCCAxigAwIBAgIJANqb7HHzA7AZMA0GCSqGSIb3DQEBCwUAMIGkMQswCQYD -VQQGEwJQQTEPMA0GA1UECAwGUGFuYW1hMRQwEgYDVQQHDAtQYW5hbWEgQ2l0eTEk -MCIGA1UECgwbVHJ1c3RDb3IgU3lzdGVtcyBTLiBkZSBSLkwuMScwJQYDVQQLDB5U -cnVzdENvciBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxHzAdBgNVBAMMFlRydXN0Q29y -IFJvb3RDZXJ0IENBLTEwHhcNMTYwMjA0MTIzMjE2WhcNMjkxMjMxMTcyMzE2WjCB -pDELMAkGA1UEBhMCUEExDzANBgNVBAgMBlBhbmFtYTEUMBIGA1UEBwwLUGFuYW1h -IENpdHkxJDAiBgNVBAoMG1RydXN0Q29yIFN5c3RlbXMgUy4gZGUgUi5MLjEnMCUG -A1UECwweVHJ1c3RDb3IgQ2VydGlmaWNhdGUgQXV0aG9yaXR5MR8wHQYDVQQDDBZU -cnVzdENvciBSb290Q2VydCBDQS0xMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIB -CgKCAQEAv463leLCJhJrMxnHQFgKq1mqjQCj/IDHUHuO1CAmujIS2CNUSSUQIpid -RtLByZ5OGy4sDjjzGiVoHKZaBeYei0i/mJZ0PmnK6bV4pQa81QBeCQryJ3pS/C3V -seq0iWEk8xoT26nPUu0MJLq5nux+AHT6k61sKZKuUbS701e/s/OojZz0JEsq1pme -9J7+wH5COucLlVPat2gOkEz7cD+PSiyU8ybdY2mplNgQTsVHCJCZGxdNuWxu72CV -EY4hgLW9oHPY0LJ3xEXqWib7ZnZ2+AYfYW0PVcWDtxBWcgYHpfOxGgMFZA6dWorW -hnAbJN7+KIor0Gqw/Hqi3LJ5DotlDwIDAQABo2MwYTAdBgNVHQ4EFgQU7mtJPHo/ -DeOxCbeKyKsZn3MzUOcwHwYDVR0jBBgwFoAU7mtJPHo/DeOxCbeKyKsZn3MzUOcw -DwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYwDQYJKoZIhvcNAQELBQAD -ggEBACUY1JGPE+6PHh0RU9otRCkZoB5rMZ5NDp6tPVxBb5UrJKF5mDo4Nvu7Zp5I -/5CQ7z3UuJu0h3U/IJvOcs+hVcFNZKIZBqEHMwwLKeXx6quj7LUKdJDHfXLy11yf -ke+Ri7fc7Waiz45mO7yfOgLgJ90WmMCV1Aqk5IGadZQ1nJBfiDcGrVmVCrDRZ9MZ -yonnMlo2HD6CqFqTvsbQZJG2z9m2GM/bftJlo6bEjhcxwft+dtvTheNYsnd6djts -L1Ac59v2Z3kf9YKVmgenFK+P3CghZwnS1k1aHBkcjndcw5QkPTJrS37UeJSDvjdN -zl/HHk484IkzlQsPpTLWPFp5LBk= ------END CERTIFICATE----- - -# Issuer: CN=TrustCor RootCert CA-2 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority -# Subject: CN=TrustCor RootCert CA-2 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority -# Label: "TrustCor RootCert CA-2" -# Serial: 2711694510199101698 -# MD5 Fingerprint: a2:e1:f8:18:0b:ba:45:d5:c7:41:2a:bb:37:52:45:64 -# SHA1 Fingerprint: b8:be:6d:cb:56:f1:55:b9:63:d4:12:ca:4e:06:34:c7:94:b2:1c:c0 -# SHA256 Fingerprint: 07:53:e9:40:37:8c:1b:d5:e3:83:6e:39:5d:ae:a5:cb:83:9e:50:46:f1:bd:0e:ae:19:51:cf:10:fe:c7:c9:65 ------BEGIN CERTIFICATE----- -MIIGLzCCBBegAwIBAgIIJaHfyjPLWQIwDQYJKoZIhvcNAQELBQAwgaQxCzAJBgNV -BAYTAlBBMQ8wDQYDVQQIDAZQYW5hbWExFDASBgNVBAcMC1BhbmFtYSBDaXR5MSQw -IgYDVQQKDBtUcnVzdENvciBTeXN0ZW1zIFMuIGRlIFIuTC4xJzAlBgNVBAsMHlRy -dXN0Q29yIENlcnRpZmljYXRlIEF1dGhvcml0eTEfMB0GA1UEAwwWVHJ1c3RDb3Ig -Um9vdENlcnQgQ0EtMjAeFw0xNjAyMDQxMjMyMjNaFw0zNDEyMzExNzI2MzlaMIGk -MQswCQYDVQQGEwJQQTEPMA0GA1UECAwGUGFuYW1hMRQwEgYDVQQHDAtQYW5hbWEg -Q2l0eTEkMCIGA1UECgwbVHJ1c3RDb3IgU3lzdGVtcyBTLiBkZSBSLkwuMScwJQYD -VQQLDB5UcnVzdENvciBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxHzAdBgNVBAMMFlRy -dXN0Q29yIFJvb3RDZXJ0IENBLTIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK -AoICAQCnIG7CKqJiJJWQdsg4foDSq8GbZQWU9MEKENUCrO2fk8eHyLAnK0IMPQo+ -QVqedd2NyuCb7GgypGmSaIwLgQ5WoD4a3SwlFIIvl9NkRvRUqdw6VC0xK5mC8tkq -1+9xALgxpL56JAfDQiDyitSSBBtlVkxs1Pu2YVpHI7TYabS3OtB0PAx1oYxOdqHp -2yqlO/rOsP9+aij9JxzIsekp8VduZLTQwRVtDr4uDkbIXvRR/u8OYzo7cbrPb1nK -DOObXUm4TOJXsZiKQlecdu/vvdFoqNL0Cbt3Nb4lggjEFixEIFapRBF37120Hape -az6LMvYHL1cEksr1/p3C6eizjkxLAjHZ5DxIgif3GIJ2SDpxsROhOdUuxTTCHWKF -3wP+TfSvPd9cW436cOGlfifHhi5qjxLGhF5DUVCcGZt45vz27Ud+ez1m7xMTiF88 -oWP7+ayHNZ/zgp6kPwqcMWmLmaSISo5uZk3vFsQPeSghYA2FFn3XVDjxklb9tTNM -g9zXEJ9L/cb4Qr26fHMC4P99zVvh1Kxhe1fVSntb1IVYJ12/+CtgrKAmrhQhJ8Z3 -mjOAPF5GP/fDsaOGM8boXg25NSyqRsGFAnWAoOsk+xWq5Gd/bnc/9ASKL3x74xdh -8N0JqSDIvgmk0H5Ew7IwSjiqqewYmgeCK9u4nBit2uBGF6zPXQIDAQABo2MwYTAd -BgNVHQ4EFgQU2f4hQG6UnrybPZx9mCAZ5YwwYrIwHwYDVR0jBBgwFoAU2f4hQG6U -nrybPZx9mCAZ5YwwYrIwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYw -DQYJKoZIhvcNAQELBQADggIBAJ5Fngw7tu/hOsh80QA9z+LqBrWyOrsGS2h60COX -dKcs8AjYeVrXWoSK2BKaG9l9XE1wxaX5q+WjiYndAfrs3fnpkpfbsEZC89NiqpX+ -MWcUaViQCqoL7jcjx1BRtPV+nuN79+TMQjItSQzL/0kMmx40/W5ulop5A7Zv2wnL -/V9lFDfhOPXzYRZY5LVtDQsEGz9QLX+zx3oaFoBg+Iof6Rsqxvm6ARppv9JYx1RX -CI/hOWB3S6xZhBqI8d3LT3jX5+EzLfzuQfogsL7L9ziUwOHQhQ+77Sxzq+3+knYa -ZH9bDTMJBzN7Bj8RpFxwPIXAz+OQqIN3+tvmxYxoZxBnpVIt8MSZj3+/0WvitUfW -2dCFmU2Umw9Lje4AWkcdEQOsQRivh7dvDDqPys/cA8GiCcjl/YBeyGBCARsaU1q7 -N6a3vLqE6R5sGtRk2tRD/pOLS/IseRYQ1JMLiI+h2IYURpFHmygk71dSTlxCnKr3 -Sewn6EAes6aJInKc9Q0ztFijMDvd1GpUk74aTfOTlPf8hAs/hCBcNANExdqtvArB -As8e5ZTZ845b2EzwnexhF7sUMlQMAimTHpKG9n/v55IFDlndmQguLvqcAFLTxWYp -5KeXRKQOKIETNcX2b2TmQcTVL8w0RSXPQQCWPUouwpaYT05KnJe32x+SMsj/D1Fu -1uwJ ------END CERTIFICATE----- - -# Issuer: CN=TrustCor ECA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority -# Subject: CN=TrustCor ECA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority -# Label: "TrustCor ECA-1" -# Serial: 9548242946988625984 -# MD5 Fingerprint: 27:92:23:1d:0a:f5:40:7c:e9:e6:6b:9d:d8:f5:e7:6c -# SHA1 Fingerprint: 58:d1:df:95:95:67:6b:63:c0:f0:5b:1c:17:4d:8b:84:0b:c8:78:bd -# SHA256 Fingerprint: 5a:88:5d:b1:9c:01:d9:12:c5:75:93:88:93:8c:af:bb:df:03:1a:b2:d4:8e:91:ee:15:58:9b:42:97:1d:03:9c ------BEGIN CERTIFICATE----- -MIIEIDCCAwigAwIBAgIJAISCLF8cYtBAMA0GCSqGSIb3DQEBCwUAMIGcMQswCQYD -VQQGEwJQQTEPMA0GA1UECAwGUGFuYW1hMRQwEgYDVQQHDAtQYW5hbWEgQ2l0eTEk -MCIGA1UECgwbVHJ1c3RDb3IgU3lzdGVtcyBTLiBkZSBSLkwuMScwJQYDVQQLDB5U -cnVzdENvciBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxFzAVBgNVBAMMDlRydXN0Q29y -IEVDQS0xMB4XDTE2MDIwNDEyMzIzM1oXDTI5MTIzMTE3MjgwN1owgZwxCzAJBgNV -BAYTAlBBMQ8wDQYDVQQIDAZQYW5hbWExFDASBgNVBAcMC1BhbmFtYSBDaXR5MSQw -IgYDVQQKDBtUcnVzdENvciBTeXN0ZW1zIFMuIGRlIFIuTC4xJzAlBgNVBAsMHlRy -dXN0Q29yIENlcnRpZmljYXRlIEF1dGhvcml0eTEXMBUGA1UEAwwOVHJ1c3RDb3Ig -RUNBLTEwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDPj+ARtZ+odnbb -3w9U73NjKYKtR8aja+3+XzP4Q1HpGjORMRegdMTUpwHmspI+ap3tDvl0mEDTPwOA -BoJA6LHip1GnHYMma6ve+heRK9jGrB6xnhkB1Zem6g23xFUfJ3zSCNV2HykVh0A5 -3ThFEXXQmqc04L/NyFIduUd+Dbi7xgz2c1cWWn5DkR9VOsZtRASqnKmcp0yJF4Ou -owReUoCLHhIlERnXDH19MURB6tuvsBzvgdAsxZohmz3tQjtQJvLsznFhBmIhVE5/ -wZ0+fyCMgMsq2JdiyIMzkX2woloPV+g7zPIlstR8L+xNxqE6FXrntl019fZISjZF -ZtS6mFjBAgMBAAGjYzBhMB0GA1UdDgQWBBREnkj1zG1I1KBLf/5ZJC+Dl5mahjAf -BgNVHSMEGDAWgBREnkj1zG1I1KBLf/5ZJC+Dl5mahjAPBgNVHRMBAf8EBTADAQH/ -MA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAQEABT41XBVwm8nHc2Fv -civUwo/yQ10CzsSUuZQRg2dd4mdsdXa/uwyqNsatR5Nj3B5+1t4u/ukZMjgDfxT2 -AHMsWbEhBuH7rBiVDKP/mZb3Kyeb1STMHd3BOuCYRLDE5D53sXOpZCz2HAF8P11F -hcCF5yWPldwX8zyfGm6wyuMdKulMY/okYWLW2n62HGz1Ah3UKt1VkOsqEUc8Ll50 -soIipX1TH0XsJ5F95yIW6MBoNtjG8U+ARDL54dHRHareqKucBK+tIA5kmE2la8BI -WJZpTdwHjFGTot+fDz2LYLSCjaoITmJF4PkL0uDgPFveXHEnJcLmA4GLEFPjx1Wi -tJ/X5g== ------END CERTIFICATE----- - -# Issuer: CN=SSL.com Root Certification Authority RSA O=SSL Corporation -# Subject: CN=SSL.com Root Certification Authority RSA O=SSL Corporation -# Label: "SSL.com Root Certification Authority RSA" -# Serial: 8875640296558310041 -# MD5 Fingerprint: 86:69:12:c0:70:f1:ec:ac:ac:c2:d5:bc:a5:5b:a1:29 -# SHA1 Fingerprint: b7:ab:33:08:d1:ea:44:77:ba:14:80:12:5a:6f:bd:a9:36:49:0c:bb -# SHA256 Fingerprint: 85:66:6a:56:2e:e0:be:5c:e9:25:c1:d8:89:0a:6f:76:a8:7e:c1:6d:4d:7d:5f:29:ea:74:19:cf:20:12:3b:69 ------BEGIN CERTIFICATE----- -MIIF3TCCA8WgAwIBAgIIeyyb0xaAMpkwDQYJKoZIhvcNAQELBQAwfDELMAkGA1UE -BhMCVVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQK -DA9TU0wgQ29ycG9yYXRpb24xMTAvBgNVBAMMKFNTTC5jb20gUm9vdCBDZXJ0aWZp -Y2F0aW9uIEF1dGhvcml0eSBSU0EwHhcNMTYwMjEyMTczOTM5WhcNNDEwMjEyMTcz -OTM5WjB8MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hv -dXN0b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjExMC8GA1UEAwwoU1NMLmNv -bSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IFJTQTCCAiIwDQYJKoZIhvcN -AQEBBQADggIPADCCAgoCggIBAPkP3aMrfcvQKv7sZ4Wm5y4bunfh4/WvpOz6Sl2R -xFdHaxh3a3by/ZPkPQ/CFp4LZsNWlJ4Xg4XOVu/yFv0AYvUiCVToZRdOQbngT0aX -qhvIuG5iXmmxX9sqAn78bMrzQdjt0Oj8P2FI7bADFB0QDksZ4LtO7IZl/zbzXmcC -C52GVWH9ejjt/uIZALdvoVBidXQ8oPrIJZK0bnoix/geoeOy3ZExqysdBP+lSgQ3 -6YWkMyv94tZVNHwZpEpox7Ko07fKoZOI68GXvIz5HdkihCR0xwQ9aqkpk8zruFvh -/l8lqjRYyMEjVJ0bmBHDOJx+PYZspQ9AhnwC9FwCTyjLrnGfDzrIM/4RJTXq/LrF -YD3ZfBjVsqnTdXgDciLKOsMf7yzlLqn6niy2UUb9rwPW6mBo6oUWNmuF6R7As93E -JNyAKoFBbZQ+yODJgUEAnl6/f8UImKIYLEJAs/lvOCdLToD0PYFH4Ih86hzOtXVc -US4cK38acijnALXRdMbX5J+tB5O2UzU1/Dfkw/ZdFr4hc96SCvigY2q8lpJqPvi8 -ZVWb3vUNiSYE/CUapiVpy8JtynziWV+XrOvvLsi81xtZPCvM8hnIk2snYxnP/Okm -+Mpxm3+T/jRnhE6Z6/yzeAkzcLpmpnbtG3PrGqUNxCITIJRWCk4sbE6x/c+cCbqi -M+2HAgMBAAGjYzBhMB0GA1UdDgQWBBTdBAkHovV6fVJTEpKV7jiAJQ2mWTAPBgNV -HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFN0ECQei9Xp9UlMSkpXuOIAlDaZZMA4G -A1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAIBgRlCn7Jp0cHh5wYfGV -cpNxJK1ok1iOMq8bs3AD/CUrdIWQPXhq9LmLpZc7tRiRux6n+UBbkflVma8eEdBc -Hadm47GUBwwyOabqG7B52B2ccETjit3E+ZUfijhDPwGFpUenPUayvOUiaPd7nNgs -PgohyC0zrL/FgZkxdMF1ccW+sfAjRfSda/wZY52jvATGGAslu1OJD7OAUN5F7kR/ -q5R4ZJjT9ijdh9hwZXT7DrkT66cPYakylszeu+1jTBi7qUD3oFRuIIhxdRjqerQ0 -cuAjJ3dctpDqhiVAq+8zD8ufgr6iIPv2tS0a5sKFsXQP+8hlAqRSAUfdSSLBv9jr -a6x+3uxjMxW3IwiPxg+NQVrdjsW5j+VFP3jbutIbQLH+cU0/4IGiul607BXgk90I -H37hVZkLId6Tngr75qNJvTYw/ud3sqB1l7UtgYgXZSD32pAAn8lSzDLKNXz1PQ/Y -K9f1JmzJBjSWFupwWRoyeXkLtoh/D1JIPb9s2KJELtFOt3JY04kTlf5Eq/jXixtu -nLwsoFvVagCvXzfh1foQC5ichucmj87w7G6KVwuA406ywKBjYZC6VWg3dGq2ktuf -oYYitmUnDuy2n0Jg5GfCtdpBC8TTi2EbvPofkSvXRAdeuims2cXp71NIWuuA8ShY -Ic2wBlX7Jz9TkHCpBB5XJ7k= ------END CERTIFICATE----- - -# Issuer: CN=SSL.com Root Certification Authority ECC O=SSL Corporation -# Subject: CN=SSL.com Root Certification Authority ECC O=SSL Corporation -# Label: "SSL.com Root Certification Authority ECC" -# Serial: 8495723813297216424 -# MD5 Fingerprint: 2e:da:e4:39:7f:9c:8f:37:d1:70:9f:26:17:51:3a:8e -# SHA1 Fingerprint: c3:19:7c:39:24:e6:54:af:1b:c4:ab:20:95:7a:e2:c3:0e:13:02:6a -# SHA256 Fingerprint: 34:17:bb:06:cc:60:07:da:1b:96:1c:92:0b:8a:b4:ce:3f:ad:82:0e:4a:a3:0b:9a:cb:c4:a7:4e:bd:ce:bc:65 ------BEGIN CERTIFICATE----- -MIICjTCCAhSgAwIBAgIIdebfy8FoW6gwCgYIKoZIzj0EAwIwfDELMAkGA1UEBhMC -VVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQKDA9T -U0wgQ29ycG9yYXRpb24xMTAvBgNVBAMMKFNTTC5jb20gUm9vdCBDZXJ0aWZpY2F0 -aW9uIEF1dGhvcml0eSBFQ0MwHhcNMTYwMjEyMTgxNDAzWhcNNDEwMjEyMTgxNDAz -WjB8MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hvdXN0 -b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjExMC8GA1UEAwwoU1NMLmNvbSBS -b290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IEVDQzB2MBAGByqGSM49AgEGBSuB -BAAiA2IABEVuqVDEpiM2nl8ojRfLliJkP9x6jh3MCLOicSS6jkm5BBtHllirLZXI -7Z4INcgn64mMU1jrYor+8FsPazFSY0E7ic3s7LaNGdM0B9y7xgZ/wkWV7Mt/qCPg -CemB+vNH06NjMGEwHQYDVR0OBBYEFILRhXMw5zUE044CkvvlpNHEIejNMA8GA1Ud -EwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUgtGFczDnNQTTjgKS++Wk0cQh6M0wDgYD -VR0PAQH/BAQDAgGGMAoGCCqGSM49BAMCA2cAMGQCMG/n61kRpGDPYbCWe+0F+S8T -kdzt5fxQaxFGRrMcIQBiu77D5+jNB5n5DQtdcj7EqgIwH7y6C+IwJPt8bYBVCpk+ -gA0z5Wajs6O7pdWLjwkspl1+4vAHCGht0nxpbl/f5Wpl ------END CERTIFICATE----- - -# Issuer: CN=SSL.com EV Root Certification Authority RSA R2 O=SSL Corporation -# Subject: CN=SSL.com EV Root Certification Authority RSA R2 O=SSL Corporation -# Label: "SSL.com EV Root Certification Authority RSA R2" -# Serial: 6248227494352943350 -# MD5 Fingerprint: e1:1e:31:58:1a:ae:54:53:02:f6:17:6a:11:7b:4d:95 -# SHA1 Fingerprint: 74:3a:f0:52:9b:d0:32:a0:f4:4a:83:cd:d4:ba:a9:7b:7c:2e:c4:9a -# SHA256 Fingerprint: 2e:7b:f1:6c:c2:24:85:a7:bb:e2:aa:86:96:75:07:61:b0:ae:39:be:3b:2f:e9:d0:cc:6d:4e:f7:34:91:42:5c ------BEGIN CERTIFICATE----- -MIIF6zCCA9OgAwIBAgIIVrYpzTS8ePYwDQYJKoZIhvcNAQELBQAwgYIxCzAJBgNV -BAYTAlVTMQ4wDAYDVQQIDAVUZXhhczEQMA4GA1UEBwwHSG91c3RvbjEYMBYGA1UE -CgwPU1NMIENvcnBvcmF0aW9uMTcwNQYDVQQDDC5TU0wuY29tIEVWIFJvb3QgQ2Vy -dGlmaWNhdGlvbiBBdXRob3JpdHkgUlNBIFIyMB4XDTE3MDUzMTE4MTQzN1oXDTQy -MDUzMDE4MTQzN1owgYIxCzAJBgNVBAYTAlVTMQ4wDAYDVQQIDAVUZXhhczEQMA4G -A1UEBwwHSG91c3RvbjEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9uMTcwNQYDVQQD -DC5TU0wuY29tIEVWIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgUlNBIFIy -MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAjzZlQOHWTcDXtOlG2mvq -M0fNTPl9fb69LT3w23jhhqXZuglXaO1XPqDQCEGD5yhBJB/jchXQARr7XnAjssuf -OePPxU7Gkm0mxnu7s9onnQqG6YE3Bf7wcXHswxzpY6IXFJ3vG2fThVUCAtZJycxa -4bH3bzKfydQ7iEGonL3Lq9ttewkfokxykNorCPzPPFTOZw+oz12WGQvE43LrrdF9 -HSfvkusQv1vrO6/PgN3B0pYEW3p+pKk8OHakYo6gOV7qd89dAFmPZiw+B6KjBSYR -aZfqhbcPlgtLyEDhULouisv3D5oi53+aNxPN8k0TayHRwMwi8qFG9kRpnMphNQcA -b9ZhCBHqurj26bNg5U257J8UZslXWNvNh2n4ioYSA0e/ZhN2rHd9NCSFg83XqpyQ -Gp8hLH94t2S42Oim9HizVcuE0jLEeK6jj2HdzghTreyI/BXkmg3mnxp3zkyPuBQV -PWKchjgGAGYS5Fl2WlPAApiiECtoRHuOec4zSnaqW4EWG7WK2NAAe15itAnWhmMO -pgWVSbooi4iTsjQc2KRVbrcc0N6ZVTsj9CLg+SlmJuwgUHfbSguPvuUCYHBBXtSu -UDkiFCbLsjtzdFVHB3mBOagwE0TlBIqulhMlQg+5U8Sb/M3kHN48+qvWBkofZ6aY -MBzdLNvcGJVXZsb/XItW9XcCAwEAAaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAfBgNV -HSMEGDAWgBT5YLvU49U09rj1BoAlp3PbRmmonjAdBgNVHQ4EFgQU+WC71OPVNPa4 -9QaAJadz20ZpqJ4wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4ICAQBW -s47LCp1Jjr+kxJG7ZhcFUZh1++VQLHqe8RT6q9OKPv+RKY9ji9i0qVQBDb6Thi/5 -Sm3HXvVX+cpVHBK+Rw82xd9qt9t1wkclf7nxY/hoLVUE0fKNsKTPvDxeH3jnpaAg -cLAExbf3cqfeIg29MyVGjGSSJuM+LmOW2puMPfgYCdcDzH2GguDKBAdRUNf/ktUM -79qGn5nX67evaOI5JpS6aLe/g9Pqemc9YmeuJeVy6OLk7K4S9ksrPJ/psEDzOFSz -/bdoyNrGj1E8svuR3Bznm53htw1yj+KkxKl4+esUrMZDBcJlOSgYAsOCsp0FvmXt -ll9ldDz7CTUue5wT/RsPXcdtgTpWD8w74a8CLyKsRspGPKAcTNZEtF4uXBVmCeEm -Kf7GUmG6sXP/wwyc5WxqlD8UykAWlYTzWamsX0xhk23RO8yilQwipmdnRC652dKK -QbNmC1r7fSOl8hqw/96bg5Qu0T/fkreRrwU7ZcegbLHNYhLDkBvjJc40vG93drEQ -w/cFGsDWr3RiSBd3kmmQYRzelYB0VI8YHMPzA9C/pEN1hlMYegouCRw2n5H9gooi -S9EOUCXdywMMF8mDAAhONU2Ki+3wApRmLER/y5UnlhetCTCstnEXbosX9hwJ1C07 -mKVx01QT2WDz9UtmT/rx7iASjbSsV7FFY6GsdqnC+w== ------END CERTIFICATE----- - -# Issuer: CN=SSL.com EV Root Certification Authority ECC O=SSL Corporation -# Subject: CN=SSL.com EV Root Certification Authority ECC O=SSL Corporation -# Label: "SSL.com EV Root Certification Authority ECC" -# Serial: 3182246526754555285 -# MD5 Fingerprint: 59:53:22:65:83:42:01:54:c0:ce:42:b9:5a:7c:f2:90 -# SHA1 Fingerprint: 4c:dd:51:a3:d1:f5:20:32:14:b0:c6:c5:32:23:03:91:c7:46:42:6d -# SHA256 Fingerprint: 22:a2:c1:f7:bd:ed:70:4c:c1:e7:01:b5:f4:08:c3:10:88:0f:e9:56:b5:de:2a:4a:44:f9:9c:87:3a:25:a7:c8 ------BEGIN CERTIFICATE----- -MIIClDCCAhqgAwIBAgIILCmcWxbtBZUwCgYIKoZIzj0EAwIwfzELMAkGA1UEBhMC -VVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQKDA9T -U0wgQ29ycG9yYXRpb24xNDAyBgNVBAMMK1NTTC5jb20gRVYgUm9vdCBDZXJ0aWZp -Y2F0aW9uIEF1dGhvcml0eSBFQ0MwHhcNMTYwMjEyMTgxNTIzWhcNNDEwMjEyMTgx -NTIzWjB/MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hv -dXN0b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjE0MDIGA1UEAwwrU1NMLmNv -bSBFViBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IEVDQzB2MBAGByqGSM49 -AgEGBSuBBAAiA2IABKoSR5CYG/vvw0AHgyBO8TCCogbR8pKGYfL2IWjKAMTH6kMA -VIbc/R/fALhBYlzccBYy3h+Z1MzFB8gIH2EWB1E9fVwHU+M1OIzfzZ/ZLg1Kthku -WnBaBu2+8KGwytAJKaNjMGEwHQYDVR0OBBYEFFvKXuXe0oGqzagtZFG22XKbl+ZP -MA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUW8pe5d7SgarNqC1kUbbZcpuX -5k8wDgYDVR0PAQH/BAQDAgGGMAoGCCqGSM49BAMCA2gAMGUCMQCK5kCJN+vp1RPZ -ytRrJPOwPYdGWBrssd9v+1a6cGvHOMzosYxPD/fxZ3YOg9AeUY8CMD32IygmTMZg -h5Mmm7I1HrrW9zzRHM76JTymGoEVW/MSD2zuZYrJh6j5B+BimoxcSg== ------END CERTIFICATE----- - -# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R6 -# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R6 -# Label: "GlobalSign Root CA - R6" -# Serial: 1417766617973444989252670301619537 -# MD5 Fingerprint: 4f:dd:07:e4:d4:22:64:39:1e:0c:37:42:ea:d1:c6:ae -# SHA1 Fingerprint: 80:94:64:0e:b5:a7:a1:ca:11:9c:1f:dd:d5:9f:81:02:63:a7:fb:d1 -# SHA256 Fingerprint: 2c:ab:ea:fe:37:d0:6c:a2:2a:ba:73:91:c0:03:3d:25:98:29:52:c4:53:64:73:49:76:3a:3a:b5:ad:6c:cf:69 ------BEGIN CERTIFICATE----- -MIIFgzCCA2ugAwIBAgIORea7A4Mzw4VlSOb/RVEwDQYJKoZIhvcNAQEMBQAwTDEg -MB4GA1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjYxEzARBgNVBAoTCkdsb2Jh -bFNpZ24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMTQxMjEwMDAwMDAwWhcNMzQx -MjEwMDAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSNjET -MBEGA1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCAiIwDQYJ -KoZIhvcNAQEBBQADggIPADCCAgoCggIBAJUH6HPKZvnsFMp7PPcNCPG0RQssgrRI -xutbPK6DuEGSMxSkb3/pKszGsIhrxbaJ0cay/xTOURQh7ErdG1rG1ofuTToVBu1k -ZguSgMpE3nOUTvOniX9PeGMIyBJQbUJmL025eShNUhqKGoC3GYEOfsSKvGRMIRxD -aNc9PIrFsmbVkJq3MQbFvuJtMgamHvm566qjuL++gmNQ0PAYid/kD3n16qIfKtJw -LnvnvJO7bVPiSHyMEAc4/2ayd2F+4OqMPKq0pPbzlUoSB239jLKJz9CgYXfIWHSw -1CM69106yqLbnQneXUQtkPGBzVeS+n68UARjNN9rkxi+azayOeSsJDa38O+2HBNX -k7besvjihbdzorg1qkXy4J02oW9UivFyVm4uiMVRQkQVlO6jxTiWm05OWgtH8wY2 -SXcwvHE35absIQh1/OZhFj931dmRl4QKbNQCTXTAFO39OfuD8l4UoQSwC+n+7o/h -bguyCLNhZglqsQY6ZZZZwPA1/cnaKI0aEYdwgQqomnUdnjqGBQCe24DWJfncBZ4n -WUx2OVvq+aWh2IMP0f/fMBH5hc8zSPXKbWQULHpYT9NLCEnFlWQaYw55PfWzjMpY -rZxCRXluDocZXFSxZba/jJvcE+kNb7gu3GduyYsRtYQUigAZcIN5kZeR1Bonvzce -MgfYFGM8KEyvAgMBAAGjYzBhMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTAD -AQH/MB0GA1UdDgQWBBSubAWjkxPioufi1xzWx/B/yGdToDAfBgNVHSMEGDAWgBSu -bAWjkxPioufi1xzWx/B/yGdToDANBgkqhkiG9w0BAQwFAAOCAgEAgyXt6NH9lVLN -nsAEoJFp5lzQhN7craJP6Ed41mWYqVuoPId8AorRbrcWc+ZfwFSY1XS+wc3iEZGt -Ixg93eFyRJa0lV7Ae46ZeBZDE1ZXs6KzO7V33EByrKPrmzU+sQghoefEQzd5Mr61 -55wsTLxDKZmOMNOsIeDjHfrYBzN2VAAiKrlNIC5waNrlU/yDXNOd8v9EDERm8tLj -vUYAGm0CuiVdjaExUd1URhxN25mW7xocBFymFe944Hn+Xds+qkxV/ZoVqW/hpvvf -cDDpw+5CRu3CkwWJ+n1jez/QcYF8AOiYrg54NMMl+68KnyBr3TsTjxKM4kEaSHpz -oHdpx7Zcf4LIHv5YGygrqGytXm3ABdJ7t+uA/iU3/gKbaKxCXcPu9czc8FB10jZp -nOZ7BN9uBmm23goJSFmH63sUYHpkqmlD75HHTOwY3WzvUy2MmeFe8nI+z1TIvWfs -pA9MRf/TuTAjB0yPEL+GltmZWrSZVxykzLsViVO6LAUP5MSeGbEYNNVMnbrt9x+v -JJUEeKgDu+6B5dpffItKoZB0JaezPkvILFa9x8jvOOJckvB595yEunQtYQEgfn7R -8k8HWV+LLUNS60YMlOH1Zkd5d9VUWx+tJDfLRVpOoERIyNiwmcUVhAn21klJwGW4 -5hpxbqCo8YLoRT5s1gLXCmeDBVrJpBA= ------END CERTIFICATE----- - -# Issuer: CN=OISTE WISeKey Global Root GC CA O=WISeKey OU=OISTE Foundation Endorsed -# Subject: CN=OISTE WISeKey Global Root GC CA O=WISeKey OU=OISTE Foundation Endorsed -# Label: "OISTE WISeKey Global Root GC CA" -# Serial: 44084345621038548146064804565436152554 -# MD5 Fingerprint: a9:d6:b9:2d:2f:93:64:f8:a5:69:ca:91:e9:68:07:23 -# SHA1 Fingerprint: e0:11:84:5e:34:de:be:88:81:b9:9c:f6:16:26:d1:96:1f:c3:b9:31 -# SHA256 Fingerprint: 85:60:f9:1c:36:24:da:ba:95:70:b5:fe:a0:db:e3:6f:f1:1a:83:23:be:94:86:85:4f:b3:f3:4a:55:71:19:8d ------BEGIN CERTIFICATE----- -MIICaTCCAe+gAwIBAgIQISpWDK7aDKtARb8roi066jAKBggqhkjOPQQDAzBtMQsw -CQYDVQQGEwJDSDEQMA4GA1UEChMHV0lTZUtleTEiMCAGA1UECxMZT0lTVEUgRm91 -bmRhdGlvbiBFbmRvcnNlZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9iYWwg -Um9vdCBHQyBDQTAeFw0xNzA1MDkwOTQ4MzRaFw00MjA1MDkwOTU4MzNaMG0xCzAJ -BgNVBAYTAkNIMRAwDgYDVQQKEwdXSVNlS2V5MSIwIAYDVQQLExlPSVNURSBGb3Vu -ZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBXSVNlS2V5IEdsb2JhbCBS -b290IEdDIENBMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAETOlQwMYPchi82PG6s4ni -eUqjFqdrVCTbUf/q9Akkwwsin8tqJ4KBDdLArzHkdIJuyiXZjHWd8dvQmqJLIX4W -p2OQ0jnUsYd4XxiWD1AbNTcPasbc2RNNpI6QN+a9WzGRo1QwUjAOBgNVHQ8BAf8E -BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUSIcUrOPDnpBgOtfKie7T -rYy0UGYwEAYJKwYBBAGCNxUBBAMCAQAwCgYIKoZIzj0EAwMDaAAwZQIwJsdpW9zV -57LnyAyMjMPdeYwbY9XJUpROTYJKcx6ygISpJcBMWm1JKWB4E+J+SOtkAjEA2zQg -Mgj/mkkCtojeFK9dbJlxjRo/i9fgojaGHAeCOnZT/cKi7e97sIBPWA9LUzm9 ------END CERTIFICATE----- - -# Issuer: CN=GTS Root R1 O=Google Trust Services LLC -# Subject: CN=GTS Root R1 O=Google Trust Services LLC -# Label: "GTS Root R1" -# Serial: 146587175971765017618439757810265552097 -# MD5 Fingerprint: 82:1a:ef:d4:d2:4a:f2:9f:e2:3d:97:06:14:70:72:85 -# SHA1 Fingerprint: e1:c9:50:e6:ef:22:f8:4c:56:45:72:8b:92:20:60:d7:d5:a7:a3:e8 -# SHA256 Fingerprint: 2a:57:54:71:e3:13:40:bc:21:58:1c:bd:2c:f1:3e:15:84:63:20:3e:ce:94:bc:f9:d3:cc:19:6b:f0:9a:54:72 ------BEGIN CERTIFICATE----- -MIIFWjCCA0KgAwIBAgIQbkepxUtHDA3sM9CJuRz04TANBgkqhkiG9w0BAQwFADBH -MQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExM -QzEUMBIGA1UEAxMLR1RTIFJvb3QgUjEwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIy -MDAwMDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNl -cnZpY2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjEwggIiMA0GCSqGSIb3DQEB -AQUAA4ICDwAwggIKAoICAQC2EQKLHuOhd5s73L+UPreVp0A8of2C+X0yBoJx9vaM -f/vo27xqLpeXo4xL+Sv2sfnOhB2x+cWX3u+58qPpvBKJXqeqUqv4IyfLpLGcY9vX -mX7wCl7raKb0xlpHDU0QM+NOsROjyBhsS+z8CZDfnWQpJSMHobTSPS5g4M/SCYe7 -zUjwTcLCeoiKu7rPWRnWr4+wB7CeMfGCwcDfLqZtbBkOtdh+JhpFAz2weaSUKK0P -fyblqAj+lug8aJRT7oM6iCsVlgmy4HqMLnXWnOunVmSPlk9orj2XwoSPwLxAwAtc -vfaHszVsrBhQf4TgTM2S0yDpM7xSma8ytSmzJSq0SPly4cpk9+aCEI3oncKKiPo4 -Zor8Y/kB+Xj9e1x3+naH+uzfsQ55lVe0vSbv1gHR6xYKu44LtcXFilWr06zqkUsp -zBmkMiVOKvFlRNACzqrOSbTqn3yDsEB750Orp2yjj32JgfpMpf/VjsPOS+C12LOO -Rc92wO1AK/1TD7Cn1TsNsYqiA94xrcx36m97PtbfkSIS5r762DL8EGMUUXLeXdYW -k70paDPvOmbsB4om3xPXV2V4J95eSRQAogB/mqghtqmxlbCluQ0WEdrHbEg8QOB+ -DVrNVjzRlwW5y0vtOUucxD/SVRNuJLDWcfr0wbrM7Rv1/oFB2ACYPTrIrnqYNxgF -lQIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV -HQ4EFgQU5K8rJnEaK0gnhS9SZizv8IkTcT4wDQYJKoZIhvcNAQEMBQADggIBADiW -Cu49tJYeX++dnAsznyvgyv3SjgofQXSlfKqE1OXyHuY3UjKcC9FhHb8owbZEKTV1 -d5iyfNm9dKyKaOOpMQkpAWBz40d8U6iQSifvS9efk+eCNs6aaAyC58/UEBZvXw6Z -XPYfcX3v73svfuo21pdwCxXu11xWajOl40k4DLh9+42FpLFZXvRq4d2h9mREruZR -gyFmxhE+885H7pwoHyXa/6xmld01D1zvICxi/ZG6qcz8WpyTgYMpl0p8WnK0OdC3 -d8t5/Wk6kjftbjhlRn7pYL15iJdfOBL07q9bgsiG1eGZbYwE8na6SfZu6W0eX6Dv -J4J2QPim01hcDyxC2kLGe4g0x8HYRZvBPsVhHdljUEn2NIVq4BjFbkerQUIpm/Zg -DdIx02OYI5NaAIFItO/Nis3Jz5nu2Z6qNuFoS3FJFDYoOj0dzpqPJeaAcWErtXvM -+SUWgeExX6GjfhaknBZqlxi9dnKlC54dNuYvoS++cJEPqOba+MSSQGwlfnuzCdyy -F62ARPBopY+Udf90WuioAnwMCeKpSwughQtiue+hMZL77/ZRBIls6Kl0obsXs7X9 -SQ98POyDGCBDTtWTurQ0sR8WNh8M5mQ5Fkzc4P4dyKliPUDqysU0ArSuiYgzNdws -E3PYJ/HQcu51OyLemGhmW/HGY0dVHLqlCFF1pkgl ------END CERTIFICATE----- - -# Issuer: CN=GTS Root R2 O=Google Trust Services LLC -# Subject: CN=GTS Root R2 O=Google Trust Services LLC -# Label: "GTS Root R2" -# Serial: 146587176055767053814479386953112547951 -# MD5 Fingerprint: 44:ed:9a:0e:a4:09:3b:00:f2:ae:4c:a3:c6:61:b0:8b -# SHA1 Fingerprint: d2:73:96:2a:2a:5e:39:9f:73:3f:e1:c7:1e:64:3f:03:38:34:fc:4d -# SHA256 Fingerprint: c4:5d:7b:b0:8e:6d:67:e6:2e:42:35:11:0b:56:4e:5f:78:fd:92:ef:05:8c:84:0a:ea:4e:64:55:d7:58:5c:60 ------BEGIN CERTIFICATE----- -MIIFWjCCA0KgAwIBAgIQbkepxlqz5yDFMJo/aFLybzANBgkqhkiG9w0BAQwFADBH -MQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExM -QzEUMBIGA1UEAxMLR1RTIFJvb3QgUjIwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIy -MDAwMDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNl -cnZpY2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjIwggIiMA0GCSqGSIb3DQEB -AQUAA4ICDwAwggIKAoICAQDO3v2m++zsFDQ8BwZabFn3GTXd98GdVarTzTukk3Lv -CvptnfbwhYBboUhSnznFt+4orO/LdmgUud+tAWyZH8QiHZ/+cnfgLFuv5AS/T3Kg -GjSY6Dlo7JUle3ah5mm5hRm9iYz+re026nO8/4Piy33B0s5Ks40FnotJk9/BW9Bu -XvAuMC6C/Pq8tBcKSOWIm8Wba96wyrQD8Nr0kLhlZPdcTK3ofmZemde4wj7I0BOd -re7kRXuJVfeKH2JShBKzwkCX44ofR5GmdFrS+LFjKBC4swm4VndAoiaYecb+3yXu -PuWgf9RhD1FLPD+M2uFwdNjCaKH5wQzpoeJ/u1U8dgbuak7MkogwTZq9TwtImoS1 -mKPV+3PBV2HdKFZ1E66HjucMUQkQdYhMvI35ezzUIkgfKtzra7tEscszcTJGr61K -8YzodDqs5xoic4DSMPclQsciOzsSrZYuxsN2B6ogtzVJV+mSSeh2FnIxZyuWfoqj -x5RWIr9qS34BIbIjMt/kmkRtWVtd9QCgHJvGeJeNkP+byKq0rxFROV7Z+2et1VsR -nTKaG73VululycslaVNVJ1zgyjbLiGH7HrfQy+4W+9OmTN6SpdTi3/UGVN4unUu0 -kzCqgc7dGtxRcw1PcOnlthYhGXmy5okLdWTK1au8CcEYof/UVKGFPP0UJAOyh9Ok -twIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV -HQ4EFgQUu//KjiOfT5nK2+JopqUVJxce2Q4wDQYJKoZIhvcNAQEMBQADggIBALZp -8KZ3/p7uC4Gt4cCpx/k1HUCCq+YEtN/L9x0Pg/B+E02NjO7jMyLDOfxA325BS0JT -vhaI8dI4XsRomRyYUpOM52jtG2pzegVATX9lO9ZY8c6DR2Dj/5epnGB3GFW1fgiT -z9D2PGcDFWEJ+YF59exTpJ/JjwGLc8R3dtyDovUMSRqodt6Sm2T4syzFJ9MHwAiA -pJiS4wGWAqoC7o87xdFtCjMwc3i5T1QWvwsHoaRc5svJXISPD+AVdyx+Jn7axEvb -pxZ3B7DNdehyQtaVhJ2Gg/LkkM0JR9SLA3DaWsYDQvTtN6LwG1BUSw7YhN4ZKJmB -R64JGz9I0cNv4rBgF/XuIwKl2gBbbZCr7qLpGzvpx0QnRY5rn/WkhLx3+WuXrD5R -RaIRpsyF7gpo8j5QOHokYh4XIDdtak23CZvJ/KRY9bb7nE4Yu5UC56GtmwfuNmsk -0jmGwZODUNKBRqhfYlcsu2xkiAhu7xNUX90txGdj08+JN7+dIPT7eoOboB6BAFDC -5AwiWVIQ7UNWhwD4FFKnHYuTjKJNRn8nxnGbJN7k2oaLDX5rIMHAnuFl2GqjpuiF -izoHCBy69Y9Vmhh1fuXsgWbRIXOhNUQLgD1bnF5vKheW0YMjiGZt5obicDIvUiLn -yOd/xCxgXS/Dr55FBcOEArf9LAhST4Ldo/DUhgkC ------END CERTIFICATE----- - -# Issuer: CN=GTS Root R3 O=Google Trust Services LLC -# Subject: CN=GTS Root R3 O=Google Trust Services LLC -# Label: "GTS Root R3" -# Serial: 146587176140553309517047991083707763997 -# MD5 Fingerprint: 1a:79:5b:6b:04:52:9c:5d:c7:74:33:1b:25:9a:f9:25 -# SHA1 Fingerprint: 30:d4:24:6f:07:ff:db:91:89:8a:0b:e9:49:66:11:eb:8c:5e:46:e5 -# SHA256 Fingerprint: 15:d5:b8:77:46:19:ea:7d:54:ce:1c:a6:d0:b0:c4:03:e0:37:a9:17:f1:31:e8:a0:4e:1e:6b:7a:71:ba:bc:e5 ------BEGIN CERTIFICATE----- -MIICDDCCAZGgAwIBAgIQbkepx2ypcyRAiQ8DVd2NHTAKBggqhkjOPQQDAzBHMQsw -CQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEU -MBIGA1UEAxMLR1RTIFJvb3QgUjMwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAw -MDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZp -Y2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjMwdjAQBgcqhkjOPQIBBgUrgQQA -IgNiAAQfTzOHMymKoYTey8chWEGJ6ladK0uFxh1MJ7x/JlFyb+Kf1qPKzEUURout -736GjOyxfi//qXGdGIRFBEFVbivqJn+7kAHjSxm65FSWRQmx1WyRRK2EE46ajA2A -DDL24CejQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1Ud -DgQWBBTB8Sa6oC2uhYHP0/EqEr24Cmf9vDAKBggqhkjOPQQDAwNpADBmAjEAgFuk -fCPAlaUs3L6JbyO5o91lAFJekazInXJ0glMLfalAvWhgxeG4VDvBNhcl2MG9AjEA -njWSdIUlUfUk7GRSJFClH9voy8l27OyCbvWFGFPouOOaKaqW04MjyaR7YbPMAuhd ------END CERTIFICATE----- - -# Issuer: CN=GTS Root R4 O=Google Trust Services LLC -# Subject: CN=GTS Root R4 O=Google Trust Services LLC -# Label: "GTS Root R4" -# Serial: 146587176229350439916519468929765261721 -# MD5 Fingerprint: 5d:b6:6a:c4:60:17:24:6a:1a:99:a8:4b:ee:5e:b4:26 -# SHA1 Fingerprint: 2a:1d:60:27:d9:4a:b1:0a:1c:4d:91:5c:cd:33:a0:cb:3e:2d:54:cb -# SHA256 Fingerprint: 71:cc:a5:39:1f:9e:79:4b:04:80:25:30:b3:63:e1:21:da:8a:30:43:bb:26:66:2f:ea:4d:ca:7f:c9:51:a4:bd ------BEGIN CERTIFICATE----- -MIICCjCCAZGgAwIBAgIQbkepyIuUtui7OyrYorLBmTAKBggqhkjOPQQDAzBHMQsw -CQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEU -MBIGA1UEAxMLR1RTIFJvb3QgUjQwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAw -MDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZp -Y2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjQwdjAQBgcqhkjOPQIBBgUrgQQA -IgNiAATzdHOnaItgrkO4NcWBMHtLSZ37wWHO5t5GvWvVYRg1rkDdc/eJkTBa6zzu -hXyiQHY7qca4R9gq55KRanPpsXI5nymfopjTX15YhmUPoYRlBtHci8nHc8iMai/l -xKvRHYqjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1Ud -DgQWBBSATNbrdP9JNqPV2Py1PsVq8JQdjDAKBggqhkjOPQQDAwNnADBkAjBqUFJ0 -CMRw3J5QdCHojXohw0+WbhXRIjVhLfoIN+4Zba3bssx9BzT1YBkstTTZbyACMANx -sbqjYAuG7ZoIapVon+Kz4ZNkfF6Tpt95LY2F45TPI11xzPKwTdb+mciUqXWi4w== ------END CERTIFICATE----- - -# Issuer: CN=UCA Global G2 Root O=UniTrust -# Subject: CN=UCA Global G2 Root O=UniTrust -# Label: "UCA Global G2 Root" -# Serial: 124779693093741543919145257850076631279 -# MD5 Fingerprint: 80:fe:f0:c4:4a:f0:5c:62:32:9f:1c:ba:78:a9:50:f8 -# SHA1 Fingerprint: 28:f9:78:16:19:7a:ff:18:25:18:aa:44:fe:c1:a0:ce:5c:b6:4c:8a -# SHA256 Fingerprint: 9b:ea:11:c9:76:fe:01:47:64:c1:be:56:a6:f9:14:b5:a5:60:31:7a:bd:99:88:39:33:82:e5:16:1a:a0:49:3c ------BEGIN CERTIFICATE----- -MIIFRjCCAy6gAwIBAgIQXd+x2lqj7V2+WmUgZQOQ7zANBgkqhkiG9w0BAQsFADA9 -MQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxGzAZBgNVBAMMElVDQSBH -bG9iYWwgRzIgUm9vdDAeFw0xNjAzMTEwMDAwMDBaFw00MDEyMzEwMDAwMDBaMD0x -CzAJBgNVBAYTAkNOMREwDwYDVQQKDAhVbmlUcnVzdDEbMBkGA1UEAwwSVUNBIEds -b2JhbCBHMiBSb290MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxeYr -b3zvJgUno4Ek2m/LAfmZmqkywiKHYUGRO8vDaBsGxUypK8FnFyIdK+35KYmToni9 -kmugow2ifsqTs6bRjDXVdfkX9s9FxeV67HeToI8jrg4aA3++1NDtLnurRiNb/yzm -VHqUwCoV8MmNsHo7JOHXaOIxPAYzRrZUEaalLyJUKlgNAQLx+hVRZ2zA+te2G3/R -VogvGjqNO7uCEeBHANBSh6v7hn4PJGtAnTRnvI3HLYZveT6OqTwXS3+wmeOwcWDc -C/Vkw85DvG1xudLeJ1uK6NjGruFZfc8oLTW4lVYa8bJYS7cSN8h8s+1LgOGN+jIj -tm+3SJUIsUROhYw6AlQgL9+/V087OpAh18EmNVQg7Mc/R+zvWr9LesGtOxdQXGLY -D0tK3Cv6brxzks3sx1DoQZbXqX5t2Okdj4q1uViSukqSKwxW/YDrCPBeKW4bHAyv -j5OJrdu9o54hyokZ7N+1wxrrFv54NkzWbtA+FxyQF2smuvt6L78RHBgOLXMDj6Dl -NaBa4kx1HXHhOThTeEDMg5PXCp6dW4+K5OXgSORIskfNTip1KnvyIvbJvgmRlld6 -iIis7nCs+dwp4wwcOxJORNanTrAmyPPZGpeRaOrvjUYG0lZFWJo8DA+DuAUlwznP -O6Q0ibd5Ei9Hxeepl2n8pndntd978XplFeRhVmUCAwEAAaNCMEAwDgYDVR0PAQH/ -BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFIHEjMz15DD/pQwIX4wV -ZyF0Ad/fMA0GCSqGSIb3DQEBCwUAA4ICAQATZSL1jiutROTL/7lo5sOASD0Ee/oj -L3rtNtqyzm325p7lX1iPyzcyochltq44PTUbPrw7tgTQvPlJ9Zv3hcU2tsu8+Mg5 -1eRfB70VVJd0ysrtT7q6ZHafgbiERUlMjW+i67HM0cOU2kTC5uLqGOiiHycFutfl -1qnN3e92mI0ADs0b+gO3joBYDic/UvuUospeZcnWhNq5NXHzJsBPd+aBJ9J3O5oU -b3n09tDh05S60FdRvScFDcH9yBIw7m+NESsIndTUv4BFFJqIRNow6rSn4+7vW4LV -PtateJLbXDzz2K36uGt/xDYotgIVilQsnLAXc47QN6MUPJiVAAwpBVueSUmxX8fj -y88nZY41F7dXyDDZQVu5FLbowg+UMaeUmMxq67XhJ/UQqAHojhJi6IjMtX9Gl8Cb -EGY4GjZGXyJoPd/JxhMnq1MGrKI8hgZlb7F+sSlEmqO6SWkoaY/X5V+tBIZkbxqg -DMUIYs6Ao9Dz7GjevjPHF1t/gMRMTLGmhIrDO7gJzRSBuhjjVFc2/tsvfEehOjPI -+Vg7RE+xygKJBJYoaMVLuCaJu9YzL1DV/pqJuhgyklTGW+Cd+V7lDSKb9triyCGy -YiGqhkCyLmTTX8jjfhFnRR8F/uOi77Oos/N9j/gMHyIfLXC0uAE0djAA5SN4p1bX -UB+K+wb1whnw0A== ------END CERTIFICATE----- - -# Issuer: CN=UCA Extended Validation Root O=UniTrust -# Subject: CN=UCA Extended Validation Root O=UniTrust -# Label: "UCA Extended Validation Root" -# Serial: 106100277556486529736699587978573607008 -# MD5 Fingerprint: a1:f3:5f:43:c6:34:9b:da:bf:8c:7e:05:53:ad:96:e2 -# SHA1 Fingerprint: a3:a1:b0:6f:24:61:23:4a:e3:36:a5:c2:37:fc:a6:ff:dd:f0:d7:3a -# SHA256 Fingerprint: d4:3a:f9:b3:54:73:75:5c:96:84:fc:06:d7:d8:cb:70:ee:5c:28:e7:73:fb:29:4e:b4:1e:e7:17:22:92:4d:24 ------BEGIN CERTIFICATE----- -MIIFWjCCA0KgAwIBAgIQT9Irj/VkyDOeTzRYZiNwYDANBgkqhkiG9w0BAQsFADBH -MQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxJTAjBgNVBAMMHFVDQSBF -eHRlbmRlZCBWYWxpZGF0aW9uIFJvb3QwHhcNMTUwMzEzMDAwMDAwWhcNMzgxMjMx -MDAwMDAwWjBHMQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxJTAjBgNV -BAMMHFVDQSBFeHRlbmRlZCBWYWxpZGF0aW9uIFJvb3QwggIiMA0GCSqGSIb3DQEB -AQUAA4ICDwAwggIKAoICAQCpCQcoEwKwmeBkqh5DFnpzsZGgdT6o+uM4AHrsiWog -D4vFsJszA1qGxliG1cGFu0/GnEBNyr7uaZa4rYEwmnySBesFK5pI0Lh2PpbIILvS -sPGP2KxFRv+qZ2C0d35qHzwaUnoEPQc8hQ2E0B92CvdqFN9y4zR8V05WAT558aop -O2z6+I9tTcg1367r3CTueUWnhbYFiN6IXSV8l2RnCdm/WhUFhvMJHuxYMjMR83dk -sHYf5BA1FxvyDrFspCqjc/wJHx4yGVMR59mzLC52LqGj3n5qiAno8geK+LLNEOfi -c0CTuwjRP+H8C5SzJe98ptfRr5//lpr1kXuYC3fUfugH0mK1lTnj8/FtDw5lhIpj -VMWAtuCeS31HJqcBCF3RiJ7XwzJE+oJKCmhUfzhTA8ykADNkUVkLo4KRel7sFsLz -KuZi2irbWWIQJUoqgQtHB0MGcIfS+pMRKXpITeuUx3BNr2fVUbGAIAEBtHoIppB/ -TuDvB0GHr2qlXov7z1CymlSvw4m6WC31MJixNnI5fkkE/SmnTHnkBVfblLkWU41G -sx2VYVdWf6/wFlthWG82UBEL2KwrlRYaDh8IzTY0ZRBiZtWAXxQgXy0MoHgKaNYs -1+lvK9JKBZP8nm9rZ/+I8U6laUpSNwXqxhaN0sSZ0YIrO7o1dfdRUVjzyAfd5LQD -fwIDAQABo0IwQDAdBgNVHQ4EFgQU2XQ65DA9DfcS3H5aBZ8eNJr34RQwDwYDVR0T -AQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYwDQYJKoZIhvcNAQELBQADggIBADaN -l8xCFWQpN5smLNb7rhVpLGsaGvdftvkHTFnq88nIua7Mui563MD1sC3AO6+fcAUR -ap8lTwEpcOPlDOHqWnzcSbvBHiqB9RZLcpHIojG5qtr8nR/zXUACE/xOHAbKsxSQ -VBcZEhrxH9cMaVr2cXj0lH2RC47skFSOvG+hTKv8dGT9cZr4QQehzZHkPJrgmzI5 -c6sq1WnIeJEmMX3ixzDx/BR4dxIOE/TdFpS/S2d7cFOFyrC78zhNLJA5wA3CXWvp -4uXViI3WLL+rG761KIcSF3Ru/H38j9CHJrAb+7lsq+KePRXBOy5nAliRn+/4Qh8s -t2j1da3Ptfb/EX3C8CSlrdP6oDyp+l3cpaDvRKS+1ujl5BOWF3sGPjLtx7dCvHaj -2GU4Kzg1USEODm8uNBNA4StnDG1KQTAYI1oyVZnJF+A83vbsea0rWBmirSwiGpWO -vpaQXUJXxPkUAzUrHC1RVwinOt4/5Mi0A3PCwSaAuwtCH60NryZy2sy+s6ODWA2C -xR9GUeOcGMyNm43sSet1UNWMKFnKdDTajAshqx7qG+XH/RU+wBeq+yNuJkbL+vmx -cmtpzyKEC2IPrNkZAJSidjzULZrtBJ4tBmIQN1IchXIbJ+XMxjHsN+xjWZsLHXbM -fjKaiJUINlK73nZfdklJrX+9ZSCyycErdhh2n1ax ------END CERTIFICATE----- - -# Issuer: CN=Certigna Root CA O=Dhimyotis OU=0002 48146308100036 -# Subject: CN=Certigna Root CA O=Dhimyotis OU=0002 48146308100036 -# Label: "Certigna Root CA" -# Serial: 269714418870597844693661054334862075617 -# MD5 Fingerprint: 0e:5c:30:62:27:eb:5b:bc:d7:ae:62:ba:e9:d5:df:77 -# SHA1 Fingerprint: 2d:0d:52:14:ff:9e:ad:99:24:01:74:20:47:6e:6c:85:27:27:f5:43 -# SHA256 Fingerprint: d4:8d:3d:23:ee:db:50:a4:59:e5:51:97:60:1c:27:77:4b:9d:7b:18:c9:4d:5a:05:95:11:a1:02:50:b9:31:68 ------BEGIN CERTIFICATE----- -MIIGWzCCBEOgAwIBAgIRAMrpG4nxVQMNo+ZBbcTjpuEwDQYJKoZIhvcNAQELBQAw -WjELMAkGA1UEBhMCRlIxEjAQBgNVBAoMCURoaW15b3RpczEcMBoGA1UECwwTMDAw -MiA0ODE0NjMwODEwMDAzNjEZMBcGA1UEAwwQQ2VydGlnbmEgUm9vdCBDQTAeFw0x -MzEwMDEwODMyMjdaFw0zMzEwMDEwODMyMjdaMFoxCzAJBgNVBAYTAkZSMRIwEAYD -VQQKDAlEaGlteW90aXMxHDAaBgNVBAsMEzAwMDIgNDgxNDYzMDgxMDAwMzYxGTAX -BgNVBAMMEENlcnRpZ25hIFJvb3QgQ0EwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAw -ggIKAoICAQDNGDllGlmx6mQWDoyUJJV8g9PFOSbcDO8WV43X2KyjQn+Cyu3NW9sO -ty3tRQgXstmzy9YXUnIo245Onoq2C/mehJpNdt4iKVzSs9IGPjA5qXSjklYcoW9M -CiBtnyN6tMbaLOQdLNyzKNAT8kxOAkmhVECe5uUFoC2EyP+YbNDrihqECB63aCPu -I9Vwzm1RaRDuoXrC0SIxwoKF0vJVdlB8JXrJhFwLrN1CTivngqIkicuQstDuI7pm -TLtipPlTWmR7fJj6o0ieD5Wupxj0auwuA0Wv8HT4Ks16XdG+RCYyKfHx9WzMfgIh -C59vpD++nVPiz32pLHxYGpfhPTc3GGYo0kDFUYqMwy3OU4gkWGQwFsWq4NYKpkDf -ePb1BHxpE4S80dGnBs8B92jAqFe7OmGtBIyT46388NtEbVncSVmurJqZNjBBe3Yz -IoejwpKGbvlw7q6Hh5UbxHq9MfPU0uWZ/75I7HX1eBYdpnDBfzwboZL7z8g81sWT -Co/1VTp2lc5ZmIoJlXcymoO6LAQ6l73UL77XbJuiyn1tJslV1c/DeVIICZkHJC1k -JWumIWmbat10TWuXekG9qxf5kBdIjzb5LdXF2+6qhUVB+s06RbFo5jZMm5BX7CO5 -hwjCxAnxl4YqKE3idMDaxIzb3+KhF1nOJFl0Mdp//TBt2dzhauH8XwIDAQABo4IB -GjCCARYwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE -FBiHVuBud+4kNTxOc5of1uHieX4rMB8GA1UdIwQYMBaAFBiHVuBud+4kNTxOc5of -1uHieX4rMEQGA1UdIAQ9MDswOQYEVR0gADAxMC8GCCsGAQUFBwIBFiNodHRwczov -L3d3d3cuY2VydGlnbmEuZnIvYXV0b3JpdGVzLzBtBgNVHR8EZjBkMC+gLaArhilo -dHRwOi8vY3JsLmNlcnRpZ25hLmZyL2NlcnRpZ25hcm9vdGNhLmNybDAxoC+gLYYr -aHR0cDovL2NybC5kaGlteW90aXMuY29tL2NlcnRpZ25hcm9vdGNhLmNybDANBgkq -hkiG9w0BAQsFAAOCAgEAlLieT/DjlQgi581oQfccVdV8AOItOoldaDgvUSILSo3L -6btdPrtcPbEo/uRTVRPPoZAbAh1fZkYJMyjhDSSXcNMQH+pkV5a7XdrnxIxPTGRG -HVyH41neQtGbqH6mid2PHMkwgu07nM3A6RngatgCdTer9zQoKJHyBApPNeNgJgH6 -0BGM+RFq7q89w1DTj18zeTyGqHNFkIwgtnJzFyO+B2XleJINugHA64wcZr+shncB -lA2c5uk5jR+mUYyZDDl34bSb+hxnV29qao6pK0xXeXpXIs/NX2NGjVxZOob4Mkdi -o2cNGJHc+6Zr9UhhcyNZjgKnvETq9Emd8VRY+WCv2hikLyhF3HqgiIZd8zvn/yk1 -gPxkQ5Tm4xxvvq0OKmOZK8l+hfZx6AYDlf7ej0gcWtSS6Cvu5zHbugRqh5jnxV/v -faci9wHYTfmJ0A6aBVmknpjZbyvKcL5kwlWj9Omvw5Ip3IgWJJk8jSaYtlu3zM63 -Nwf9JtmYhST/WSMDmu2dnajkXjjO11INb9I/bbEFa0nOipFGc/T2L/Coc3cOZayh -jWZSaX5LaAzHHjcng6WMxwLkFM1JAbBzs/3GkDpv0mztO+7skb6iQ12LAEpmJURw -3kAP+HwV96LOPNdeE4yBFxgX0b3xdxA61GU5wSesVywlVP+i2k+KYTlerj1KjL0= ------END CERTIFICATE----- - -# Issuer: CN=emSign Root CA - G1 O=eMudhra Technologies Limited OU=emSign PKI -# Subject: CN=emSign Root CA - G1 O=eMudhra Technologies Limited OU=emSign PKI -# Label: "emSign Root CA - G1" -# Serial: 235931866688319308814040 -# MD5 Fingerprint: 9c:42:84:57:dd:cb:0b:a7:2e:95:ad:b6:f3:da:bc:ac -# SHA1 Fingerprint: 8a:c7:ad:8f:73:ac:4e:c1:b5:75:4d:a5:40:f4:fc:cf:7c:b5:8e:8c -# SHA256 Fingerprint: 40:f6:af:03:46:a9:9a:a1:cd:1d:55:5a:4e:9c:ce:62:c7:f9:63:46:03:ee:40:66:15:83:3d:c8:c8:d0:03:67 ------BEGIN CERTIFICATE----- -MIIDlDCCAnygAwIBAgIKMfXkYgxsWO3W2DANBgkqhkiG9w0BAQsFADBnMQswCQYD -VQQGEwJJTjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBU -ZWNobm9sb2dpZXMgTGltaXRlZDEcMBoGA1UEAxMTZW1TaWduIFJvb3QgQ0EgLSBH -MTAeFw0xODAyMTgxODMwMDBaFw00MzAyMTgxODMwMDBaMGcxCzAJBgNVBAYTAklO -MRMwEQYDVQQLEwplbVNpZ24gUEtJMSUwIwYDVQQKExxlTXVkaHJhIFRlY2hub2xv -Z2llcyBMaW1pdGVkMRwwGgYDVQQDExNlbVNpZ24gUm9vdCBDQSAtIEcxMIIBIjAN -BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAk0u76WaK7p1b1TST0Bsew+eeuGQz -f2N4aLTNLnF115sgxk0pvLZoYIr3IZpWNVrzdr3YzZr/k1ZLpVkGoZM0Kd0WNHVO -8oG0x5ZOrRkVUkr+PHB1cM2vK6sVmjM8qrOLqs1D/fXqcP/tzxE7lM5OMhbTI0Aq -d7OvPAEsbO2ZLIvZTmmYsvePQbAyeGHWDV/D+qJAkh1cF+ZwPjXnorfCYuKrpDhM -tTk1b+oDafo6VGiFbdbyL0NVHpENDtjVaqSW0RM8LHhQ6DqS0hdW5TUaQBw+jSzt -Od9C4INBdN+jzcKGYEho42kLVACL5HZpIQ15TjQIXhTCzLG3rdd8cIrHhQIDAQAB -o0IwQDAdBgNVHQ4EFgQU++8Nhp6w492pufEhF38+/PB3KxowDgYDVR0PAQH/BAQD -AgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQELBQADggEBAFn/8oz1h31x -PaOfG1vR2vjTnGs2vZupYeveFix0PZ7mddrXuqe8QhfnPZHr5X3dPpzxz5KsbEjM -wiI/aTvFthUvozXGaCocV685743QNcMYDHsAVhzNixl03r4PEuDQqqE/AjSxcM6d -GNYIAwlG7mDgfrbESQRRfXBgvKqy/3lyeqYdPV8q+Mri/Tm3R7nrft8EI6/6nAYH -6ftjk4BAtcZsCjEozgyfz7MjNYBBjWzEN3uBL4ChQEKF6dk4jeihU80Bv2noWgby -RQuQ+q7hv53yrlc8pa6yVvSLZUDp/TGBLPQ5Cdjua6e0ph0VpZj3AYHYhX3zUVxx -iN66zB+Afko= ------END CERTIFICATE----- - -# Issuer: CN=emSign ECC Root CA - G3 O=eMudhra Technologies Limited OU=emSign PKI -# Subject: CN=emSign ECC Root CA - G3 O=eMudhra Technologies Limited OU=emSign PKI -# Label: "emSign ECC Root CA - G3" -# Serial: 287880440101571086945156 -# MD5 Fingerprint: ce:0b:72:d1:9f:88:8e:d0:50:03:e8:e3:b8:8b:67:40 -# SHA1 Fingerprint: 30:43:fa:4f:f2:57:dc:a0:c3:80:ee:2e:58:ea:78:b2:3f:e6:bb:c1 -# SHA256 Fingerprint: 86:a1:ec:ba:08:9c:4a:8d:3b:be:27:34:c6:12:ba:34:1d:81:3e:04:3c:f9:e8:a8:62:cd:5c:57:a3:6b:be:6b ------BEGIN CERTIFICATE----- -MIICTjCCAdOgAwIBAgIKPPYHqWhwDtqLhDAKBggqhkjOPQQDAzBrMQswCQYDVQQG -EwJJTjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBUZWNo -bm9sb2dpZXMgTGltaXRlZDEgMB4GA1UEAxMXZW1TaWduIEVDQyBSb290IENBIC0g -RzMwHhcNMTgwMjE4MTgzMDAwWhcNNDMwMjE4MTgzMDAwWjBrMQswCQYDVQQGEwJJ -TjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBUZWNobm9s -b2dpZXMgTGltaXRlZDEgMB4GA1UEAxMXZW1TaWduIEVDQyBSb290IENBIC0gRzMw -djAQBgcqhkjOPQIBBgUrgQQAIgNiAAQjpQy4LRL1KPOxst3iAhKAnjlfSU2fySU0 -WXTsuwYc58Byr+iuL+FBVIcUqEqy6HyC5ltqtdyzdc6LBtCGI79G1Y4PPwT01xyS -fvalY8L1X44uT6EYGQIrMgqCZH0Wk9GjQjBAMB0GA1UdDgQWBBR8XQKEE9TMipuB -zhccLikenEhjQjAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAKBggq -hkjOPQQDAwNpADBmAjEAvvNhzwIQHWSVB7gYboiFBS+DCBeQyh+KTOgNG3qxrdWB -CUfvO6wIBHxcmbHtRwfSAjEAnbpV/KlK6O3t5nYBQnvI+GDZjVGLVTv7jHvrZQnD -+JbNR6iC8hZVdyR+EhCVBCyj ------END CERTIFICATE----- - -# Issuer: CN=emSign Root CA - C1 O=eMudhra Inc OU=emSign PKI -# Subject: CN=emSign Root CA - C1 O=eMudhra Inc OU=emSign PKI -# Label: "emSign Root CA - C1" -# Serial: 825510296613316004955058 -# MD5 Fingerprint: d8:e3:5d:01:21:fa:78:5a:b0:df:ba:d2:ee:2a:5f:68 -# SHA1 Fingerprint: e7:2e:f1:df:fc:b2:09:28:cf:5d:d4:d5:67:37:b1:51:cb:86:4f:01 -# SHA256 Fingerprint: 12:56:09:aa:30:1d:a0:a2:49:b9:7a:82:39:cb:6a:34:21:6f:44:dc:ac:9f:39:54:b1:42:92:f2:e8:c8:60:8f ------BEGIN CERTIFICATE----- -MIIDczCCAlugAwIBAgILAK7PALrEzzL4Q7IwDQYJKoZIhvcNAQELBQAwVjELMAkG -A1UEBhMCVVMxEzARBgNVBAsTCmVtU2lnbiBQS0kxFDASBgNVBAoTC2VNdWRocmEg -SW5jMRwwGgYDVQQDExNlbVNpZ24gUm9vdCBDQSAtIEMxMB4XDTE4MDIxODE4MzAw -MFoXDTQzMDIxODE4MzAwMFowVjELMAkGA1UEBhMCVVMxEzARBgNVBAsTCmVtU2ln -biBQS0kxFDASBgNVBAoTC2VNdWRocmEgSW5jMRwwGgYDVQQDExNlbVNpZ24gUm9v -dCBDQSAtIEMxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAz+upufGZ -BczYKCFK83M0UYRWEPWgTywS4/oTmifQz/l5GnRfHXk5/Fv4cI7gklL35CX5VIPZ -HdPIWoU/Xse2B+4+wM6ar6xWQio5JXDWv7V7Nq2s9nPczdcdioOl+yuQFTdrHCZH -3DspVpNqs8FqOp099cGXOFgFixwR4+S0uF2FHYP+eF8LRWgYSKVGczQ7/g/IdrvH -GPMF0Ybzhe3nudkyrVWIzqa2kbBPrH4VI5b2P/AgNBbeCsbEBEV5f6f9vtKppa+c -xSMq9zwhbL2vj07FOrLzNBL834AaSaTUqZX3noleoomslMuoaJuvimUnzYnu3Yy1 -aylwQ6BpC+S5DwIDAQABo0IwQDAdBgNVHQ4EFgQU/qHgcB4qAzlSWkK+XJGFehiq -TbUwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEL -BQADggEBAMJKVvoVIXsoounlHfv4LcQ5lkFMOycsxGwYFYDGrK9HWS8mC+M2sO87 -/kOXSTKZEhVb3xEp/6tT+LvBeA+snFOvV71ojD1pM/CjoCNjO2RnIkSt1XHLVip4 -kqNPEjE2NuLe/gDEo2APJ62gsIq1NnpSob0n9CAnYuhNlCQT5AoE6TyrLshDCUrG -YQTlSTR+08TI9Q/Aqum6VF7zYytPT1DU/rl7mYw9wC68AivTxEDkigcxHpvOJpkT -+xHqmiIMERnHXhuBUDDIlhJu58tBf5E7oke3VIAb3ADMmpDqw8NQBmIMMMAVSKeo -WXzhriKi4gp6D/piq1JM4fHfyr6DDUI= ------END CERTIFICATE----- - -# Issuer: CN=emSign ECC Root CA - C3 O=eMudhra Inc OU=emSign PKI -# Subject: CN=emSign ECC Root CA - C3 O=eMudhra Inc OU=emSign PKI -# Label: "emSign ECC Root CA - C3" -# Serial: 582948710642506000014504 -# MD5 Fingerprint: 3e:53:b3:a3:81:ee:d7:10:f8:d3:b0:1d:17:92:f5:d5 -# SHA1 Fingerprint: b6:af:43:c2:9b:81:53:7d:f6:ef:6b:c3:1f:1f:60:15:0c:ee:48:66 -# SHA256 Fingerprint: bc:4d:80:9b:15:18:9d:78:db:3e:1d:8c:f4:f9:72:6a:79:5d:a1:64:3c:a5:f1:35:8e:1d:db:0e:dc:0d:7e:b3 ------BEGIN CERTIFICATE----- -MIICKzCCAbGgAwIBAgIKe3G2gla4EnycqDAKBggqhkjOPQQDAzBaMQswCQYDVQQG -EwJVUzETMBEGA1UECxMKZW1TaWduIFBLSTEUMBIGA1UEChMLZU11ZGhyYSBJbmMx -IDAeBgNVBAMTF2VtU2lnbiBFQ0MgUm9vdCBDQSAtIEMzMB4XDTE4MDIxODE4MzAw -MFoXDTQzMDIxODE4MzAwMFowWjELMAkGA1UEBhMCVVMxEzARBgNVBAsTCmVtU2ln -biBQS0kxFDASBgNVBAoTC2VNdWRocmEgSW5jMSAwHgYDVQQDExdlbVNpZ24gRUND -IFJvb3QgQ0EgLSBDMzB2MBAGByqGSM49AgEGBSuBBAAiA2IABP2lYa57JhAd6bci -MK4G9IGzsUJxlTm801Ljr6/58pc1kjZGDoeVjbk5Wum739D+yAdBPLtVb4Ojavti -sIGJAnB9SMVK4+kiVCJNk7tCDK93nCOmfddhEc5lx/h//vXyqaNCMEAwHQYDVR0O -BBYEFPtaSNCAIEDyqOkAB2kZd6fmw/TPMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMB -Af8EBTADAQH/MAoGCCqGSM49BAMDA2gAMGUCMQC02C8Cif22TGK6Q04ThHK1rt0c -3ta13FaPWEBaLd4gTCKDypOofu4SQMfWh0/434UCMBwUZOR8loMRnLDRWmFLpg9J -0wD8ofzkpf9/rdcw0Md3f76BB1UwUCAU9Vc4CqgxUQ== ------END CERTIFICATE----- - -# Issuer: CN=Hongkong Post Root CA 3 O=Hongkong Post -# Subject: CN=Hongkong Post Root CA 3 O=Hongkong Post -# Label: "Hongkong Post Root CA 3" -# Serial: 46170865288971385588281144162979347873371282084 -# MD5 Fingerprint: 11:fc:9f:bd:73:30:02:8a:fd:3f:f3:58:b9:cb:20:f0 -# SHA1 Fingerprint: 58:a2:d0:ec:20:52:81:5b:c1:f3:f8:64:02:24:4e:c2:8e:02:4b:02 -# SHA256 Fingerprint: 5a:2f:c0:3f:0c:83:b0:90:bb:fa:40:60:4b:09:88:44:6c:76:36:18:3d:f9:84:6e:17:10:1a:44:7f:b8:ef:d6 ------BEGIN CERTIFICATE----- -MIIFzzCCA7egAwIBAgIUCBZfikyl7ADJk0DfxMauI7gcWqQwDQYJKoZIhvcNAQEL -BQAwbzELMAkGA1UEBhMCSEsxEjAQBgNVBAgTCUhvbmcgS29uZzESMBAGA1UEBxMJ -SG9uZyBLb25nMRYwFAYDVQQKEw1Ib25na29uZyBQb3N0MSAwHgYDVQQDExdIb25n -a29uZyBQb3N0IFJvb3QgQ0EgMzAeFw0xNzA2MDMwMjI5NDZaFw00MjA2MDMwMjI5 -NDZaMG8xCzAJBgNVBAYTAkhLMRIwEAYDVQQIEwlIb25nIEtvbmcxEjAQBgNVBAcT -CUhvbmcgS29uZzEWMBQGA1UEChMNSG9uZ2tvbmcgUG9zdDEgMB4GA1UEAxMXSG9u -Z2tvbmcgUG9zdCBSb290IENBIDMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK -AoICAQCziNfqzg8gTr7m1gNt7ln8wlffKWihgw4+aMdoWJwcYEuJQwy51BWy7sFO -dem1p+/l6TWZ5Mwc50tfjTMwIDNT2aa71T4Tjukfh0mtUC1Qyhi+AViiE3CWu4mI -VoBc+L0sPOFMV4i707mV78vH9toxdCim5lSJ9UExyuUmGs2C4HDaOym71QP1mbpV -9WTRYA6ziUm4ii8F0oRFKHyPaFASePwLtVPLwpgchKOesL4jpNrcyCse2m5FHomY -2vkALgbpDDtw1VAliJnLzXNg99X/NWfFobxeq81KuEXryGgeDQ0URhLj0mRiikKY -vLTGCAj4/ahMZJx2Ab0vqWwzD9g/KLg8aQFChn5pwckGyuV6RmXpwtZQQS4/t+Tt -bNe/JgERohYpSms0BpDsE9K2+2p20jzt8NYt3eEV7KObLyzJPivkaTv/ciWxNoZb -x39ri1UbSsUgYT2uy1DhCDq+sI9jQVMwCFk8mB13umOResoQUGC/8Ne8lYePl8X+ -l2oBlKN8W4UdKjk60FSh0Tlxnf0h+bV78OLgAo9uliQlLKAeLKjEiafv7ZkGL7YK -TE/bosw3Gq9HhS2KX8Q0NEwA/RiTZxPRN+ZItIsGxVd7GYYKecsAyVKvQv83j+Gj -Hno9UKtjBucVtT+2RTeUN7F+8kjDf8V1/peNRY8apxpyKBpADwIDAQABo2MwYTAP -BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAfBgNVHSMEGDAWgBQXnc0e -i9Y5K3DTXNSguB+wAPzFYTAdBgNVHQ4EFgQUF53NHovWOStw01zUoLgfsAD8xWEw -DQYJKoZIhvcNAQELBQADggIBAFbVe27mIgHSQpsY1Q7XZiNc4/6gx5LS6ZStS6LG -7BJ8dNVI0lkUmcDrudHr9EgwW62nV3OZqdPlt9EuWSRY3GguLmLYauRwCy0gUCCk -MpXRAJi70/33MvJJrsZ64Ee+bs7Lo3I6LWldy8joRTnU+kLBEUx3XZL7av9YROXr -gZ6voJmtvqkBZss4HTzfQx/0TW60uhdG/H39h4F5ag0zD/ov+BS5gLNdTaqX4fnk -GMX41TiMJjz98iji7lpJiCzfeT2OnpA8vUFKOt1b9pq0zj8lMH8yfaIDlNDceqFS -3m6TjRgm/VWsvY+b0s+v54Ysyx8Jb6NvqYTUc79NoXQbTiNg8swOqn+knEwlqLJm -Ozj/2ZQw9nKEvmhVEA/GcywWaZMH/rFF7buiVWqw2rVKAiUnhde3t4ZEFolsgCs+ -l6mc1X5VTMbeRRAc6uk7nwNT7u56AQIWeNTowr5GdogTPyK7SBIdUgC0An4hGh6c -JfTzPV4e0hz5sy229zdcxsshTrD3mUcYhcErulWuBurQB7Lcq9CClnXO0lD+mefP -L5/ndtFhKvshuzHQqp9HpLIiyhY6UFfEW0NnxWViA0kB60PZ2Pierc+xYw5F9KBa -LJstxabArahH9CdMOA0uG0k7UvToiIMrVCjU8jVStDKDYmlkDJGcn5fqdBb9HxEG -mpv0 ------END CERTIFICATE----- - -# Issuer: CN=Entrust Root Certification Authority - G4 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2015 Entrust, Inc. - for authorized use only -# Subject: CN=Entrust Root Certification Authority - G4 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2015 Entrust, Inc. - for authorized use only -# Label: "Entrust Root Certification Authority - G4" -# Serial: 289383649854506086828220374796556676440 -# MD5 Fingerprint: 89:53:f1:83:23:b7:7c:8e:05:f1:8c:71:38:4e:1f:88 -# SHA1 Fingerprint: 14:88:4e:86:26:37:b0:26:af:59:62:5c:40:77:ec:35:29:ba:96:01 -# SHA256 Fingerprint: db:35:17:d1:f6:73:2a:2d:5a:b9:7c:53:3e:c7:07:79:ee:32:70:a6:2f:b4:ac:42:38:37:24:60:e6:f0:1e:88 ------BEGIN CERTIFICATE----- -MIIGSzCCBDOgAwIBAgIRANm1Q3+vqTkPAAAAAFVlrVgwDQYJKoZIhvcNAQELBQAw -gb4xCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQL -Ex9TZWUgd3d3LmVudHJ1c3QubmV0L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykg -MjAxNSBFbnRydXN0LCBJbmMuIC0gZm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMjAw -BgNVBAMTKUVudHJ1c3QgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEc0 -MB4XDTE1MDUyNzExMTExNloXDTM3MTIyNzExNDExNlowgb4xCzAJBgNVBAYTAlVT -MRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQLEx9TZWUgd3d3LmVudHJ1 -c3QubmV0L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykgMjAxNSBFbnRydXN0LCBJ -bmMuIC0gZm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMjAwBgNVBAMTKUVudHJ1c3Qg -Um9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEc0MIICIjANBgkqhkiG9w0B -AQEFAAOCAg8AMIICCgKCAgEAsewsQu7i0TD/pZJH4i3DumSXbcr3DbVZwbPLqGgZ -2K+EbTBwXX7zLtJTmeH+H17ZSK9dE43b/2MzTdMAArzE+NEGCJR5WIoV3imz/f3E -T+iq4qA7ec2/a0My3dl0ELn39GjUu9CH1apLiipvKgS1sqbHoHrmSKvS0VnM1n4j -5pds8ELl3FFLFUHtSUrJ3hCX1nbB76W1NhSXNdh4IjVS70O92yfbYVaCNNzLiGAM -C1rlLAHGVK/XqsEQe9IFWrhAnoanw5CGAlZSCXqc0ieCU0plUmr1POeo8pyvi73T -DtTUXm6Hnmo9RR3RXRv06QqsYJn7ibT/mCzPfB3pAqoEmh643IhuJbNsZvc8kPNX -wbMv9W3y+8qh+CmdRouzavbmZwe+LGcKKh9asj5XxNMhIWNlUpEbsZmOeX7m640A -2Vqq6nPopIICR5b+W45UYaPrL0swsIsjdXJ8ITzI9vF01Bx7owVV7rtNOzK+mndm -nqxpkCIHH2E6lr7lmk/MBTwoWdPBDFSoWWG9yHJM6Nyfh3+9nEg2XpWjDrk4JFX8 -dWbrAuMINClKxuMrLzOg2qOGpRKX/YAr2hRC45K9PvJdXmd0LhyIRyk0X+IyqJwl -N4y6mACXi0mWHv0liqzc2thddG5msP9E36EYxr5ILzeUePiVSj9/E15dWf10hkNj -c0kCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYD -VR0OBBYEFJ84xFYjwznooHFs6FRM5Og6sb9nMA0GCSqGSIb3DQEBCwUAA4ICAQAS -5UKme4sPDORGpbZgQIeMJX6tuGguW8ZAdjwD+MlZ9POrYs4QjbRaZIxowLByQzTS -Gwv2LFPSypBLhmb8qoMi9IsabyZIrHZ3CL/FmFz0Jomee8O5ZDIBf9PD3Vht7LGr -hFV0d4QEJ1JrhkzO3bll/9bGXp+aEJlLdWr+aumXIOTkdnrG0CSqkM0gkLpHZPt/ -B7NTeLUKYvJzQ85BK4FqLoUWlFPUa19yIqtRLULVAJyZv967lDtX/Zr1hstWO1uI -AeV8KEsD+UmDfLJ/fOPtjqF/YFOOVZ1QNBIPt5d7bIdKROf1beyAN/BYGW5KaHbw -H5Lk6rWS02FREAutp9lfx1/cH6NcjKF+m7ee01ZvZl4HliDtC3T7Zk6LERXpgUl+ -b7DUUH8i119lAg2m9IUe2K4GS0qn0jFmwvjO5QimpAKWRGhXxNUzzxkvFMSUHHuk -2fCfDrGA4tGeEWSpiBE6doLlYsKA2KSD7ZPvfC+QsDJMlhVoSFLUmQjAJOgc47Ol -IQ6SwJAfzyBfyjs4x7dtOvPmRLgOMWuIjnDrnBdSqEGULoe256YSxXXfW8AKbnuk -5F6G+TaU33fD6Q3AOfF5u0aOq0NZJ7cguyPpVkAh7DE9ZapD8j3fcEThuk0mEDuY -n/PIjhs4ViFqUZPTkcpG2om3PVODLAgfi49T3f+sHw== ------END CERTIFICATE----- - -# Issuer: CN=Microsoft ECC Root Certificate Authority 2017 O=Microsoft Corporation -# Subject: CN=Microsoft ECC Root Certificate Authority 2017 O=Microsoft Corporation -# Label: "Microsoft ECC Root Certificate Authority 2017" -# Serial: 136839042543790627607696632466672567020 -# MD5 Fingerprint: dd:a1:03:e6:4a:93:10:d1:bf:f0:19:42:cb:fe:ed:67 -# SHA1 Fingerprint: 99:9a:64:c3:7f:f4:7d:9f:ab:95:f1:47:69:89:14:60:ee:c4:c3:c5 -# SHA256 Fingerprint: 35:8d:f3:9d:76:4a:f9:e1:b7:66:e9:c9:72:df:35:2e:e1:5c:fa:c2:27:af:6a:d1:d7:0e:8e:4a:6e:dc:ba:02 ------BEGIN CERTIFICATE----- -MIICWTCCAd+gAwIBAgIQZvI9r4fei7FK6gxXMQHC7DAKBggqhkjOPQQDAzBlMQsw -CQYDVQQGEwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYD -VQQDEy1NaWNyb3NvZnQgRUNDIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIw -MTcwHhcNMTkxMjE4MjMwNjQ1WhcNNDIwNzE4MjMxNjA0WjBlMQswCQYDVQQGEwJV -UzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYDVQQDEy1NaWNy -b3NvZnQgRUNDIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIwMTcwdjAQBgcq -hkjOPQIBBgUrgQQAIgNiAATUvD0CQnVBEyPNgASGAlEvaqiBYgtlzPbKnR5vSmZR -ogPZnZH6thaxjG7efM3beaYvzrvOcS/lpaso7GMEZpn4+vKTEAXhgShC48Zo9OYb -hGBKia/teQ87zvH2RPUBeMCjVDBSMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8E -BTADAQH/MB0GA1UdDgQWBBTIy5lycFIM+Oa+sgRXKSrPQhDtNTAQBgkrBgEEAYI3 -FQEEAwIBADAKBggqhkjOPQQDAwNoADBlAjBY8k3qDPlfXu5gKcs68tvWMoQZP3zV -L8KxzJOuULsJMsbG7X7JNpQS5GiFBqIb0C8CMQCZ6Ra0DvpWSNSkMBaReNtUjGUB -iudQZsIxtzm6uBoiB078a1QWIP8rtedMDE2mT3M= ------END CERTIFICATE----- - -# Issuer: CN=Microsoft RSA Root Certificate Authority 2017 O=Microsoft Corporation -# Subject: CN=Microsoft RSA Root Certificate Authority 2017 O=Microsoft Corporation -# Label: "Microsoft RSA Root Certificate Authority 2017" -# Serial: 40975477897264996090493496164228220339 -# MD5 Fingerprint: 10:ff:00:ff:cf:c9:f8:c7:7a:c0:ee:35:8e:c9:0f:47 -# SHA1 Fingerprint: 73:a5:e6:4a:3b:ff:83:16:ff:0e:dc:cc:61:8a:90:6e:4e:ae:4d:74 -# SHA256 Fingerprint: c7:41:f7:0f:4b:2a:8d:88:bf:2e:71:c1:41:22:ef:53:ef:10:eb:a0:cf:a5:e6:4c:fa:20:f4:18:85:30:73:e0 ------BEGIN CERTIFICATE----- -MIIFqDCCA5CgAwIBAgIQHtOXCV/YtLNHcB6qvn9FszANBgkqhkiG9w0BAQwFADBl -MQswCQYDVQQGEwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYw -NAYDVQQDEy1NaWNyb3NvZnQgUlNBIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5 -IDIwMTcwHhcNMTkxMjE4MjI1MTIyWhcNNDIwNzE4MjMwMDIzWjBlMQswCQYDVQQG -EwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYDVQQDEy1N -aWNyb3NvZnQgUlNBIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIwMTcwggIi -MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKW76UM4wplZEWCpW9R2LBifOZ -Nt9GkMml7Xhqb0eRaPgnZ1AzHaGm++DlQ6OEAlcBXZxIQIJTELy/xztokLaCLeX0 -ZdDMbRnMlfl7rEqUrQ7eS0MdhweSE5CAg2Q1OQT85elss7YfUJQ4ZVBcF0a5toW1 -HLUX6NZFndiyJrDKxHBKrmCk3bPZ7Pw71VdyvD/IybLeS2v4I2wDwAW9lcfNcztm -gGTjGqwu+UcF8ga2m3P1eDNbx6H7JyqhtJqRjJHTOoI+dkC0zVJhUXAoP8XFWvLJ -jEm7FFtNyP9nTUwSlq31/niol4fX/V4ggNyhSyL71Imtus5Hl0dVe49FyGcohJUc -aDDv70ngNXtk55iwlNpNhTs+VcQor1fznhPbRiefHqJeRIOkpcrVE7NLP8TjwuaG -YaRSMLl6IE9vDzhTyzMMEyuP1pq9KsgtsRx9S1HKR9FIJ3Jdh+vVReZIZZ2vUpC6 -W6IYZVcSn2i51BVrlMRpIpj0M+Dt+VGOQVDJNE92kKz8OMHY4Xu54+OU4UZpyw4K -UGsTuqwPN1q3ErWQgR5WrlcihtnJ0tHXUeOrO8ZV/R4O03QK0dqq6mm4lyiPSMQH -+FJDOvTKVTUssKZqwJz58oHhEmrARdlns87/I6KJClTUFLkqqNfs+avNJVgyeY+Q -W5g5xAgGwax/Dj0ApQIDAQABo1QwUjAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/ -BAUwAwEB/zAdBgNVHQ4EFgQUCctZf4aycI8awznjwNnpv7tNsiMwEAYJKwYBBAGC -NxUBBAMCAQAwDQYJKoZIhvcNAQEMBQADggIBAKyvPl3CEZaJjqPnktaXFbgToqZC -LgLNFgVZJ8og6Lq46BrsTaiXVq5lQ7GPAJtSzVXNUzltYkyLDVt8LkS/gxCP81OC -gMNPOsduET/m4xaRhPtthH80dK2Jp86519efhGSSvpWhrQlTM93uCupKUY5vVau6 -tZRGrox/2KJQJWVggEbbMwSubLWYdFQl3JPk+ONVFT24bcMKpBLBaYVu32TxU5nh -SnUgnZUP5NbcA/FZGOhHibJXWpS2qdgXKxdJ5XbLwVaZOjex/2kskZGT4d9Mozd2 -TaGf+G0eHdP67Pv0RR0Tbc/3WeUiJ3IrhvNXuzDtJE3cfVa7o7P4NHmJweDyAmH3 -pvwPuxwXC65B2Xy9J6P9LjrRk5Sxcx0ki69bIImtt2dmefU6xqaWM/5TkshGsRGR -xpl/j8nWZjEgQRCHLQzWwa80mMpkg/sTV9HB8Dx6jKXB/ZUhoHHBk2dxEuqPiApp -GWSZI1b7rCoucL5mxAyE7+WL85MB+GqQk2dLsmijtWKP6T+MejteD+eMuMZ87zf9 -dOLITzNy4ZQ5bb0Sr74MTnB8G2+NszKTc0QWbej09+CVgI+WXTik9KveCjCHk9hN -AHFiRSdLOkKEW39lt2c0Ui2cFmuqqNh7o0JMcccMyj6D5KbvtwEwXlGjefVwaaZB -RA+GsCyRxj3qrg+E ------END CERTIFICATE----- - -# Issuer: CN=e-Szigno Root CA 2017 O=Microsec Ltd. -# Subject: CN=e-Szigno Root CA 2017 O=Microsec Ltd. -# Label: "e-Szigno Root CA 2017" -# Serial: 411379200276854331539784714 -# MD5 Fingerprint: de:1f:f6:9e:84:ae:a7:b4:21:ce:1e:58:7d:d1:84:98 -# SHA1 Fingerprint: 89:d4:83:03:4f:9e:9a:48:80:5f:72:37:d4:a9:a6:ef:cb:7c:1f:d1 -# SHA256 Fingerprint: be:b0:0b:30:83:9b:9b:c3:2c:32:e4:44:79:05:95:06:41:f2:64:21:b1:5e:d0:89:19:8b:51:8a:e2:ea:1b:99 ------BEGIN CERTIFICATE----- -MIICQDCCAeWgAwIBAgIMAVRI7yH9l1kN9QQKMAoGCCqGSM49BAMCMHExCzAJBgNV -BAYTAkhVMREwDwYDVQQHDAhCdWRhcGVzdDEWMBQGA1UECgwNTWljcm9zZWMgTHRk -LjEXMBUGA1UEYQwOVkFUSFUtMjM1ODQ0OTcxHjAcBgNVBAMMFWUtU3ppZ25vIFJv -b3QgQ0EgMjAxNzAeFw0xNzA4MjIxMjA3MDZaFw00MjA4MjIxMjA3MDZaMHExCzAJ -BgNVBAYTAkhVMREwDwYDVQQHDAhCdWRhcGVzdDEWMBQGA1UECgwNTWljcm9zZWMg -THRkLjEXMBUGA1UEYQwOVkFUSFUtMjM1ODQ0OTcxHjAcBgNVBAMMFWUtU3ppZ25v -IFJvb3QgQ0EgMjAxNzBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABJbcPYrYsHtv -xie+RJCxs1YVe45DJH0ahFnuY2iyxl6H0BVIHqiQrb1TotreOpCmYF9oMrWGQd+H -Wyx7xf58etqjYzBhMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0G -A1UdDgQWBBSHERUI0arBeAyxr87GyZDvvzAEwDAfBgNVHSMEGDAWgBSHERUI0arB -eAyxr87GyZDvvzAEwDAKBggqhkjOPQQDAgNJADBGAiEAtVfd14pVCzbhhkT61Nlo -jbjcI4qKDdQvfepz7L9NbKgCIQDLpbQS+ue16M9+k/zzNY9vTlp8tLxOsvxyqltZ -+efcMQ== ------END CERTIFICATE----- - -# Issuer: O=CERTSIGN SA OU=certSIGN ROOT CA G2 -# Subject: O=CERTSIGN SA OU=certSIGN ROOT CA G2 -# Label: "certSIGN Root CA G2" -# Serial: 313609486401300475190 -# MD5 Fingerprint: 8c:f1:75:8a:c6:19:cf:94:b7:f7:65:20:87:c3:97:c7 -# SHA1 Fingerprint: 26:f9:93:b4:ed:3d:28:27:b0:b9:4b:a7:e9:15:1d:a3:8d:92:e5:32 -# SHA256 Fingerprint: 65:7c:fe:2f:a7:3f:aa:38:46:25:71:f3:32:a2:36:3a:46:fc:e7:02:09:51:71:07:02:cd:fb:b6:ee:da:33:05 ------BEGIN CERTIFICATE----- -MIIFRzCCAy+gAwIBAgIJEQA0tk7GNi02MA0GCSqGSIb3DQEBCwUAMEExCzAJBgNV -BAYTAlJPMRQwEgYDVQQKEwtDRVJUU0lHTiBTQTEcMBoGA1UECxMTY2VydFNJR04g -Uk9PVCBDQSBHMjAeFw0xNzAyMDYwOTI3MzVaFw00MjAyMDYwOTI3MzVaMEExCzAJ -BgNVBAYTAlJPMRQwEgYDVQQKEwtDRVJUU0lHTiBTQTEcMBoGA1UECxMTY2VydFNJ -R04gUk9PVCBDQSBHMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMDF -dRmRfUR0dIf+DjuW3NgBFszuY5HnC2/OOwppGnzC46+CjobXXo9X69MhWf05N0Iw -vlDqtg+piNguLWkh59E3GE59kdUWX2tbAMI5Qw02hVK5U2UPHULlj88F0+7cDBrZ -uIt4ImfkabBoxTzkbFpG583H+u/E7Eu9aqSs/cwoUe+StCmrqzWaTOTECMYmzPhp -n+Sc8CnTXPnGFiWeI8MgwT0PPzhAsP6CRDiqWhqKa2NYOLQV07YRaXseVO6MGiKs -cpc/I1mbySKEwQdPzH/iV8oScLumZfNpdWO9lfsbl83kqK/20U6o2YpxJM02PbyW -xPFsqa7lzw1uKA2wDrXKUXt4FMMgL3/7FFXhEZn91QqhngLjYl/rNUssuHLoPj1P -rCy7Lobio3aP5ZMqz6WryFyNSwb/EkaseMsUBzXgqd+L6a8VTxaJW732jcZZroiF -DsGJ6x9nxUWO/203Nit4ZoORUSs9/1F3dmKh7Gc+PoGD4FapUB8fepmrY7+EF3fx -DTvf95xhszWYijqy7DwaNz9+j5LP2RIUZNoQAhVB/0/E6xyjyfqZ90bp4RjZsbgy -LcsUDFDYg2WD7rlcz8sFWkz6GZdr1l0T08JcVLwyc6B49fFtHsufpaafItzRUZ6C -eWRgKRM+o/1Pcmqr4tTluCRVLERLiohEnMqE0yo7AgMBAAGjQjBAMA8GA1UdEwEB -/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBSCIS1mxteg4BXrzkwJ -d8RgnlRuAzANBgkqhkiG9w0BAQsFAAOCAgEAYN4auOfyYILVAzOBywaK8SJJ6ejq -kX/GM15oGQOGO0MBzwdw5AgeZYWR5hEit/UCI46uuR59H35s5r0l1ZUa8gWmr4UC -b6741jH/JclKyMeKqdmfS0mbEVeZkkMR3rYzpMzXjWR91M08KCy0mpbqTfXERMQl -qiCA2ClV9+BB/AYm/7k29UMUA2Z44RGx2iBfRgB4ACGlHgAoYXhvqAEBj500mv/0 -OJD7uNGzcgbJceaBxXntC6Z58hMLnPddDnskk7RI24Zf3lCGeOdA5jGokHZwYa+c -NywRtYK3qq4kNFtyDGkNzVmf9nGvnAvRCjj5BiKDUyUM/FHE5r7iOZULJK2v0ZXk -ltd0ZGtxTgI8qoXzIKNDOXZbbFD+mpwUHmUUihW9o4JFWklWatKcsWMy5WHgUyIO -pwpJ6st+H6jiYoD2EEVSmAYY3qXNL3+q1Ok+CHLsIwMCPKaq2LxndD0UF/tUSxfj -03k9bWtJySgOLnRQvwzZRjoQhsmnP+mg7H/rpXdYaXHmgwo38oZJar55CJD2AhZk -PuXaTH4MNMn5X7azKFGnpyuqSfqNZSlO42sTp5SjLVFteAxEy9/eCG/Oo2Sr05WE -1LlSVHJ7liXMvGnjSG4N0MedJ5qq+BOS3R7fY581qRY27Iy4g/Q9iY/NtBde17MX -QRBdJ3NghVdJIgc= ------END CERTIFICATE----- - -# Issuer: CN=Trustwave Global Certification Authority O=Trustwave Holdings, Inc. -# Subject: CN=Trustwave Global Certification Authority O=Trustwave Holdings, Inc. -# Label: "Trustwave Global Certification Authority" -# Serial: 1846098327275375458322922162 -# MD5 Fingerprint: f8:1c:18:2d:2f:ba:5f:6d:a1:6c:bc:c7:ab:91:c7:0e -# SHA1 Fingerprint: 2f:8f:36:4f:e1:58:97:44:21:59:87:a5:2a:9a:d0:69:95:26:7f:b5 -# SHA256 Fingerprint: 97:55:20:15:f5:dd:fc:3c:87:88:c0:06:94:45:55:40:88:94:45:00:84:f1:00:86:70:86:bc:1a:2b:b5:8d:c8 ------BEGIN CERTIFICATE----- -MIIF2jCCA8KgAwIBAgIMBfcOhtpJ80Y1LrqyMA0GCSqGSIb3DQEBCwUAMIGIMQsw -CQYDVQQGEwJVUzERMA8GA1UECAwISWxsaW5vaXMxEDAOBgNVBAcMB0NoaWNhZ28x -ITAfBgNVBAoMGFRydXN0d2F2ZSBIb2xkaW5ncywgSW5jLjExMC8GA1UEAwwoVHJ1 -c3R3YXZlIEdsb2JhbCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0xNzA4MjMx -OTM0MTJaFw00MjA4MjMxOTM0MTJaMIGIMQswCQYDVQQGEwJVUzERMA8GA1UECAwI -SWxsaW5vaXMxEDAOBgNVBAcMB0NoaWNhZ28xITAfBgNVBAoMGFRydXN0d2F2ZSBI -b2xkaW5ncywgSW5jLjExMC8GA1UEAwwoVHJ1c3R3YXZlIEdsb2JhbCBDZXJ0aWZp -Y2F0aW9uIEF1dGhvcml0eTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIB -ALldUShLPDeS0YLOvR29zd24q88KPuFd5dyqCblXAj7mY2Hf8g+CY66j96xz0Xzn -swuvCAAJWX/NKSqIk4cXGIDtiLK0thAfLdZfVaITXdHG6wZWiYj+rDKd/VzDBcdu -7oaJuogDnXIhhpCujwOl3J+IKMujkkkP7NAP4m1ET4BqstTnoApTAbqOl5F2brz8 -1Ws25kCI1nsvXwXoLG0R8+eyvpJETNKXpP7ScoFDB5zpET71ixpZfR9oWN0EACyW -80OzfpgZdNmcc9kYvkHHNHnZ9GLCQ7mzJ7Aiy/k9UscwR7PJPrhq4ufogXBeQotP -JqX+OsIgbrv4Fo7NDKm0G2x2EOFYeUY+VM6AqFcJNykbmROPDMjWLBz7BegIlT1l -RtzuzWniTY+HKE40Cz7PFNm73bZQmq131BnW2hqIyE4bJ3XYsgjxroMwuREOzYfw -hI0Vcnyh78zyiGG69Gm7DIwLdVcEuE4qFC49DxweMqZiNu5m4iK4BUBjECLzMx10 -coos9TkpoNPnG4CELcU9402x/RpvumUHO1jsQkUm+9jaJXLE9gCxInm943xZYkqc -BW89zubWR2OZxiRvchLIrH+QtAuRcOi35hYQcRfO3gZPSEF9NUqjifLJS3tBEW1n -twiYTOURGa5CgNz7kAXU+FDKvuStx8KU1xad5hePrzb7AgMBAAGjQjBAMA8GA1Ud -EwEB/wQFMAMBAf8wHQYDVR0OBBYEFJngGWcNYtt2s9o9uFvo/ULSMQ6HMA4GA1Ud -DwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAmHNw4rDT7TnsTGDZqRKGFx6W -0OhUKDtkLSGm+J1WE2pIPU/HPinbbViDVD2HfSMF1OQc3Og4ZYbFdada2zUFvXfe -uyk3QAUHw5RSn8pk3fEbK9xGChACMf1KaA0HZJDmHvUqoai7PF35owgLEQzxPy0Q -lG/+4jSHg9bP5Rs1bdID4bANqKCqRieCNqcVtgimQlRXtpla4gt5kNdXElE1GYhB -aCXUNxeEFfsBctyV3lImIJgm4nb1J2/6ADtKYdkNy1GTKv0WBpanI5ojSP5RvbbE -sLFUzt5sQa0WZ37b/TjNuThOssFgy50X31ieemKyJo90lZvkWx3SD92YHJtZuSPT -MaCm/zjdzyBP6VhWOmfD0faZmZ26NraAL4hHT4a/RDqA5Dccprrql5gR0IRiR2Qe -qu5AvzSxnI9O4fKSTx+O856X3vOmeWqJcU9LJxdI/uz0UA9PSX3MReO9ekDFQdxh -VicGaeVyQYHTtgGJoC86cnn+OjC/QezHYj6RS8fZMXZC+fc8Y+wmjHMMfRod6qh8 -h6jCJ3zhM0EPz8/8AKAigJ5Kp28AsEFFtyLKaEjFQqKu3R3y4G5OBVixwJAWKqQ9 -EEC+j2Jjg6mcgn0tAumDMHzLJ8n9HmYAsC7TIS+OMxZsmO0QqAfWzJPP29FpHOTK -yeC2nOnOcXHebD8WpHk= ------END CERTIFICATE----- - -# Issuer: CN=Trustwave Global ECC P256 Certification Authority O=Trustwave Holdings, Inc. -# Subject: CN=Trustwave Global ECC P256 Certification Authority O=Trustwave Holdings, Inc. -# Label: "Trustwave Global ECC P256 Certification Authority" -# Serial: 4151900041497450638097112925 -# MD5 Fingerprint: 5b:44:e3:8d:5d:36:86:26:e8:0d:05:d2:59:a7:83:54 -# SHA1 Fingerprint: b4:90:82:dd:45:0c:be:8b:5b:b1:66:d3:e2:a4:08:26:cd:ed:42:cf -# SHA256 Fingerprint: 94:5b:bc:82:5e:a5:54:f4:89:d1:fd:51:a7:3d:df:2e:a6:24:ac:70:19:a0:52:05:22:5c:22:a7:8c:cf:a8:b4 ------BEGIN CERTIFICATE----- -MIICYDCCAgegAwIBAgIMDWpfCD8oXD5Rld9dMAoGCCqGSM49BAMCMIGRMQswCQYD -VQQGEwJVUzERMA8GA1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAf -BgNVBAoTGFRydXN0d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3 -YXZlIEdsb2JhbCBFQ0MgUDI1NiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0x -NzA4MjMxOTM1MTBaFw00MjA4MjMxOTM1MTBaMIGRMQswCQYDVQQGEwJVUzERMA8G -A1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAfBgNVBAoTGFRydXN0 -d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3YXZlIEdsb2JhbCBF -Q0MgUDI1NiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTBZMBMGByqGSM49AgEGCCqG -SM49AwEHA0IABH77bOYj43MyCMpg5lOcunSNGLB4kFKA3TjASh3RqMyTpJcGOMoN -FWLGjgEqZZ2q3zSRLoHB5DOSMcT9CTqmP62jQzBBMA8GA1UdEwEB/wQFMAMBAf8w -DwYDVR0PAQH/BAUDAwcGADAdBgNVHQ4EFgQUo0EGrJBt0UrrdaVKEJmzsaGLSvcw -CgYIKoZIzj0EAwIDRwAwRAIgB+ZU2g6gWrKuEZ+Hxbb/ad4lvvigtwjzRM4q3wgh -DDcCIC0mA6AFvWvR9lz4ZcyGbbOcNEhjhAnFjXca4syc4XR7 ------END CERTIFICATE----- - -# Issuer: CN=Trustwave Global ECC P384 Certification Authority O=Trustwave Holdings, Inc. -# Subject: CN=Trustwave Global ECC P384 Certification Authority O=Trustwave Holdings, Inc. -# Label: "Trustwave Global ECC P384 Certification Authority" -# Serial: 2704997926503831671788816187 -# MD5 Fingerprint: ea:cf:60:c4:3b:b9:15:29:40:a1:97:ed:78:27:93:d6 -# SHA1 Fingerprint: e7:f3:a3:c8:cf:6f:c3:04:2e:6d:0e:67:32:c5:9e:68:95:0d:5e:d2 -# SHA256 Fingerprint: 55:90:38:59:c8:c0:c3:eb:b8:75:9e:ce:4e:25:57:22:5f:f5:75:8b:bd:38:eb:d4:82:76:60:1e:1b:d5:80:97 ------BEGIN CERTIFICATE----- -MIICnTCCAiSgAwIBAgIMCL2Fl2yZJ6SAaEc7MAoGCCqGSM49BAMDMIGRMQswCQYD -VQQGEwJVUzERMA8GA1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAf -BgNVBAoTGFRydXN0d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3 -YXZlIEdsb2JhbCBFQ0MgUDM4NCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0x -NzA4MjMxOTM2NDNaFw00MjA4MjMxOTM2NDNaMIGRMQswCQYDVQQGEwJVUzERMA8G -A1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAfBgNVBAoTGFRydXN0 -d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3YXZlIEdsb2JhbCBF -Q0MgUDM4NCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTB2MBAGByqGSM49AgEGBSuB -BAAiA2IABGvaDXU1CDFHBa5FmVXxERMuSvgQMSOjfoPTfygIOiYaOs+Xgh+AtycJ -j9GOMMQKmw6sWASr9zZ9lCOkmwqKi6vr/TklZvFe/oyujUF5nQlgziip04pt89ZF -1PKYhDhloKNDMEEwDwYDVR0TAQH/BAUwAwEB/zAPBgNVHQ8BAf8EBQMDBwYAMB0G -A1UdDgQWBBRVqYSJ0sEyvRjLbKYHTsjnnb6CkDAKBggqhkjOPQQDAwNnADBkAjA3 -AZKXRRJ+oPM+rRk6ct30UJMDEr5E0k9BpIycnR+j9sKS50gU/k6bpZFXrsY3crsC -MGclCrEMXu6pY5Jv5ZAL/mYiykf9ijH3g/56vxC+GCsej/YpHpRZ744hN8tRmKVu -Sw== ------END CERTIFICATE----- - -# Issuer: CN=NAVER Global Root Certification Authority O=NAVER BUSINESS PLATFORM Corp. -# Subject: CN=NAVER Global Root Certification Authority O=NAVER BUSINESS PLATFORM Corp. -# Label: "NAVER Global Root Certification Authority" -# Serial: 9013692873798656336226253319739695165984492813 -# MD5 Fingerprint: c8:7e:41:f6:25:3b:f5:09:b3:17:e8:46:3d:bf:d0:9b -# SHA1 Fingerprint: 8f:6b:f2:a9:27:4a:da:14:a0:c4:f4:8e:61:27:f9:c0:1e:78:5d:d1 -# SHA256 Fingerprint: 88:f4:38:dc:f8:ff:d1:fa:8f:42:91:15:ff:e5:f8:2a:e1:e0:6e:0c:70:c3:75:fa:ad:71:7b:34:a4:9e:72:65 ------BEGIN CERTIFICATE----- -MIIFojCCA4qgAwIBAgIUAZQwHqIL3fXFMyqxQ0Rx+NZQTQ0wDQYJKoZIhvcNAQEM -BQAwaTELMAkGA1UEBhMCS1IxJjAkBgNVBAoMHU5BVkVSIEJVU0lORVNTIFBMQVRG -T1JNIENvcnAuMTIwMAYDVQQDDClOQVZFUiBHbG9iYWwgUm9vdCBDZXJ0aWZpY2F0 -aW9uIEF1dGhvcml0eTAeFw0xNzA4MTgwODU4NDJaFw0zNzA4MTgyMzU5NTlaMGkx -CzAJBgNVBAYTAktSMSYwJAYDVQQKDB1OQVZFUiBCVVNJTkVTUyBQTEFURk9STSBD -b3JwLjEyMDAGA1UEAwwpTkFWRVIgR2xvYmFsIFJvb3QgQ2VydGlmaWNhdGlvbiBB -dXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC21PGTXLVA -iQqrDZBbUGOukJR0F0Vy1ntlWilLp1agS7gvQnXp2XskWjFlqxcX0TM62RHcQDaH -38dq6SZeWYp34+hInDEW+j6RscrJo+KfziFTowI2MMtSAuXaMl3Dxeb57hHHi8lE -HoSTGEq0n+USZGnQJoViAbbJAh2+g1G7XNr4rRVqmfeSVPc0W+m/6imBEtRTkZaz -kVrd/pBzKPswRrXKCAfHcXLJZtM0l/aM9BhK4dA9WkW2aacp+yPOiNgSnABIqKYP -szuSjXEOdMWLyEz59JuOuDxp7W87UC9Y7cSw0BwbagzivESq2M0UXZR4Yb8Obtoq -vC8MC3GmsxY/nOb5zJ9TNeIDoKAYv7vxvvTWjIcNQvcGufFt7QSUqP620wbGQGHf -nZ3zVHbOUzoBppJB7ASjjw2i1QnK1sua8e9DXcCrpUHPXFNwcMmIpi3Ua2FzUCaG -YQ5fG8Ir4ozVu53BA0K6lNpfqbDKzE0K70dpAy8i+/Eozr9dUGWokG2zdLAIx6yo -0es+nPxdGoMuK8u180SdOqcXYZaicdNwlhVNt0xz7hlcxVs+Qf6sdWA7G2POAN3a -CJBitOUt7kinaxeZVL6HSuOpXgRM6xBtVNbv8ejyYhbLgGvtPe31HzClrkvJE+2K -AQHJuFFYwGY6sWZLxNUxAmLpdIQM201GLQIDAQABo0IwQDAdBgNVHQ4EFgQU0p+I -36HNLL3s9TsBAZMzJ7LrYEswDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMB -Af8wDQYJKoZIhvcNAQEMBQADggIBADLKgLOdPVQG3dLSLvCkASELZ0jKbY7gyKoN -qo0hV4/GPnrK21HUUrPUloSlWGB/5QuOH/XcChWB5Tu2tyIvCZwTFrFsDDUIbatj -cu3cvuzHV+YwIHHW1xDBE1UBjCpD5EHxzzp6U5LOogMFDTjfArsQLtk70pt6wKGm -+LUx5vR1yblTmXVHIloUFcd4G7ad6Qz4G3bxhYTeodoS76TiEJd6eN4MUZeoIUCL -hr0N8F5OSza7OyAfikJW4Qsav3vQIkMsRIz75Sq0bBwcupTgE34h5prCy8VCZLQe -lHsIJchxzIdFV4XTnyliIoNRlwAYl3dqmJLJfGBs32x9SuRwTMKeuB330DTHD8z7 -p/8Dvq1wkNoL3chtl1+afwkyQf3NosxabUzyqkn+Zvjp2DXrDige7kgvOtB5CTh8 -piKCk5XQA76+AqAF3SAi428diDRgxuYKuQl1C/AH6GmWNcf7I4GOODm4RStDeKLR -LBT/DShycpWbXgnbiUSYqqFJu3FS8r/2/yehNq+4tneI3TqkbZs0kNwUXTC/t+sX -5Ie3cdCh13cV1ELX8vMxmV2b3RZtP+oGI/hGoiLtk/bdmuYqh7GYVPEi92tF4+KO -dh2ajcQGjTa3FPOdVGm3jjzVpG2Tgbet9r1ke8LJaDmgkpzNNIaRkPpkUZ3+/uul -9XXeifdy ------END CERTIFICATE----- - -# Issuer: CN=AC RAIZ FNMT-RCM SERVIDORES SEGUROS O=FNMT-RCM OU=Ceres -# Subject: CN=AC RAIZ FNMT-RCM SERVIDORES SEGUROS O=FNMT-RCM OU=Ceres -# Label: "AC RAIZ FNMT-RCM SERVIDORES SEGUROS" -# Serial: 131542671362353147877283741781055151509 -# MD5 Fingerprint: 19:36:9c:52:03:2f:d2:d1:bb:23:cc:dd:1e:12:55:bb -# SHA1 Fingerprint: 62:ff:d9:9e:c0:65:0d:03:ce:75:93:d2:ed:3f:2d:32:c9:e3:e5:4a -# SHA256 Fingerprint: 55:41:53:b1:3d:2c:f9:dd:b7:53:bf:be:1a:4e:0a:e0:8d:0a:a4:18:70:58:fe:60:a2:b8:62:b2:e4:b8:7b:cb ------BEGIN CERTIFICATE----- -MIICbjCCAfOgAwIBAgIQYvYybOXE42hcG2LdnC6dlTAKBggqhkjOPQQDAzB4MQsw -CQYDVQQGEwJFUzERMA8GA1UECgwIRk5NVC1SQ00xDjAMBgNVBAsMBUNlcmVzMRgw -FgYDVQRhDA9WQVRFUy1RMjgyNjAwNEoxLDAqBgNVBAMMI0FDIFJBSVogRk5NVC1S -Q00gU0VSVklET1JFUyBTRUdVUk9TMB4XDTE4MTIyMDA5MzczM1oXDTQzMTIyMDA5 -MzczM1oweDELMAkGA1UEBhMCRVMxETAPBgNVBAoMCEZOTVQtUkNNMQ4wDAYDVQQL -DAVDZXJlczEYMBYGA1UEYQwPVkFURVMtUTI4MjYwMDRKMSwwKgYDVQQDDCNBQyBS -QUlaIEZOTVQtUkNNIFNFUlZJRE9SRVMgU0VHVVJPUzB2MBAGByqGSM49AgEGBSuB -BAAiA2IABPa6V1PIyqvfNkpSIeSX0oNnnvBlUdBeh8dHsVnyV0ebAAKTRBdp20LH -sbI6GA60XYyzZl2hNPk2LEnb80b8s0RpRBNm/dfF/a82Tc4DTQdxz69qBdKiQ1oK -Um8BA06Oi6NCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYD -VR0OBBYEFAG5L++/EYZg8k/QQW6rcx/n0m5JMAoGCCqGSM49BAMDA2kAMGYCMQCu -SuMrQMN0EfKVrRYj3k4MGuZdpSRea0R7/DjiT8ucRRcRTBQnJlU5dUoDzBOQn5IC -MQD6SmxgiHPz7riYYqnOK8LZiqZwMR2vsJRM60/G49HzYqc8/5MuB1xJAWdpEgJy -v+c= ------END CERTIFICATE----- - -# Issuer: CN=GlobalSign Root R46 O=GlobalSign nv-sa -# Subject: CN=GlobalSign Root R46 O=GlobalSign nv-sa -# Label: "GlobalSign Root R46" -# Serial: 1552617688466950547958867513931858518042577 -# MD5 Fingerprint: c4:14:30:e4:fa:66:43:94:2a:6a:1b:24:5f:19:d0:ef -# SHA1 Fingerprint: 53:a2:b0:4b:ca:6b:d6:45:e6:39:8a:8e:c4:0d:d2:bf:77:c3:a2:90 -# SHA256 Fingerprint: 4f:a3:12:6d:8d:3a:11:d1:c4:85:5a:4f:80:7c:ba:d6:cf:91:9d:3a:5a:88:b0:3b:ea:2c:63:72:d9:3c:40:c9 ------BEGIN CERTIFICATE----- -MIIFWjCCA0KgAwIBAgISEdK7udcjGJ5AXwqdLdDfJWfRMA0GCSqGSIb3DQEBDAUA -MEYxCzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9iYWxTaWduIG52LXNhMRwwGgYD -VQQDExNHbG9iYWxTaWduIFJvb3QgUjQ2MB4XDTE5MDMyMDAwMDAwMFoXDTQ2MDMy -MDAwMDAwMFowRjELMAkGA1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYt -c2ExHDAaBgNVBAMTE0dsb2JhbFNpZ24gUm9vdCBSNDYwggIiMA0GCSqGSIb3DQEB -AQUAA4ICDwAwggIKAoICAQCsrHQy6LNl5brtQyYdpokNRbopiLKkHWPd08EsCVeJ -OaFV6Wc0dwxu5FUdUiXSE2te4R2pt32JMl8Nnp8semNgQB+msLZ4j5lUlghYruQG -vGIFAha/r6gjA7aUD7xubMLL1aa7DOn2wQL7Id5m3RerdELv8HQvJfTqa1VbkNud -316HCkD7rRlr+/fKYIje2sGP1q7Vf9Q8g+7XFkyDRTNrJ9CG0Bwta/OrffGFqfUo -0q3v84RLHIf8E6M6cqJaESvWJ3En7YEtbWaBkoe0G1h6zD8K+kZPTXhc+CtI4wSE -y132tGqzZfxCnlEmIyDLPRT5ge1lFgBPGmSXZgjPjHvjK8Cd+RTyG/FWaha/LIWF -zXg4mutCagI0GIMXTpRW+LaCtfOW3T3zvn8gdz57GSNrLNRyc0NXfeD412lPFzYE -+cCQYDdF3uYM2HSNrpyibXRdQr4G9dlkbgIQrImwTDsHTUB+JMWKmIJ5jqSngiCN -I/onccnfxkF0oE32kRbcRoxfKWMxWXEM2G/CtjJ9++ZdU6Z+Ffy7dXxd7Pj2Fxzs -x2sZy/N78CsHpdlseVR2bJ0cpm4O6XkMqCNqo98bMDGfsVR7/mrLZqrcZdCinkqa -ByFrgY/bxFn63iLABJzjqls2k+g9vXqhnQt2sQvHnf3PmKgGwvgqo6GDoLclcqUC -4wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV -HQ4EFgQUA1yrc4GHqMywptWU4jaWSf8FmSwwDQYJKoZIhvcNAQEMBQADggIBAHx4 -7PYCLLtbfpIrXTncvtgdokIzTfnvpCo7RGkerNlFo048p9gkUbJUHJNOxO97k4Vg -JuoJSOD1u8fpaNK7ajFxzHmuEajwmf3lH7wvqMxX63bEIaZHU1VNaL8FpO7XJqti -2kM3S+LGteWygxk6x9PbTZ4IevPuzz5i+6zoYMzRx6Fcg0XERczzF2sUyQQCPtIk -pnnpHs6i58FZFZ8d4kuaPp92CC1r2LpXFNqD6v6MVenQTqnMdzGxRBF6XLE+0xRF -FRhiJBPSy03OXIPBNvIQtQ6IbbjhVp+J3pZmOUdkLG5NrmJ7v2B0GbhWrJKsFjLt -rWhV/pi60zTe9Mlhww6G9kuEYO4Ne7UyWHmRVSyBQ7N0H3qqJZ4d16GLuc1CLgSk -ZoNNiTW2bKg2SnkheCLQQrzRQDGQob4Ez8pn7fXwgNNgyYMqIgXQBztSvwyeqiv5 -u+YfjyW6hY0XHgL+XVAEV8/+LbzvXMAaq7afJMbfc2hIkCwU9D9SGuTSyxTDYWnP -4vkYxboznxSjBF25cfe1lNj2M8FawTSLfJvdkzrnE6JwYZ+vj+vYxXX4M2bUdGc6 -N3ec592kD3ZDZopD8p/7DEJ4Y9HiD2971KE9dJeFt0g5QdYg/NA6s/rob8SKunE3 -vouXsXgxT7PntgMTzlSdriVZzH81Xwj3QEUxeCp6 ------END CERTIFICATE----- - -# Issuer: CN=GlobalSign Root E46 O=GlobalSign nv-sa -# Subject: CN=GlobalSign Root E46 O=GlobalSign nv-sa -# Label: "GlobalSign Root E46" -# Serial: 1552617690338932563915843282459653771421763 -# MD5 Fingerprint: b5:b8:66:ed:de:08:83:e3:c9:e2:01:34:06:ac:51:6f -# SHA1 Fingerprint: 39:b4:6c:d5:fe:80:06:eb:e2:2f:4a:bb:08:33:a0:af:db:b9:dd:84 -# SHA256 Fingerprint: cb:b9:c4:4d:84:b8:04:3e:10:50:ea:31:a6:9f:51:49:55:d7:bf:d2:e2:c6:b4:93:01:01:9a:d6:1d:9f:50:58 ------BEGIN CERTIFICATE----- -MIICCzCCAZGgAwIBAgISEdK7ujNu1LzmJGjFDYQdmOhDMAoGCCqGSM49BAMDMEYx -CzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9iYWxTaWduIG52LXNhMRwwGgYDVQQD -ExNHbG9iYWxTaWduIFJvb3QgRTQ2MB4XDTE5MDMyMDAwMDAwMFoXDTQ2MDMyMDAw -MDAwMFowRjELMAkGA1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2Ex -HDAaBgNVBAMTE0dsb2JhbFNpZ24gUm9vdCBFNDYwdjAQBgcqhkjOPQIBBgUrgQQA -IgNiAAScDrHPt+ieUnd1NPqlRqetMhkytAepJ8qUuwzSChDH2omwlwxwEwkBjtjq -R+q+soArzfwoDdusvKSGN+1wCAB16pMLey5SnCNoIwZD7JIvU4Tb+0cUB+hflGdd -yXqBPCCjQjBAMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1Ud -DgQWBBQxCpCPtsad0kRLgLWi5h+xEk8blTAKBggqhkjOPQQDAwNoADBlAjEA31SQ -7Zvvi5QCkxeCmb6zniz2C5GMn0oUsfZkvLtoURMMA/cVi4RguYv/Uo7njLwcAjA8 -+RHUjE7AwWHCFUyqqx0LMV87HOIAl0Qx5v5zli/altP+CAezNIm8BZ/3Hobui3A= ------END CERTIFICATE----- - -# Issuer: CN=GLOBALTRUST 2020 O=e-commerce monitoring GmbH -# Subject: CN=GLOBALTRUST 2020 O=e-commerce monitoring GmbH -# Label: "GLOBALTRUST 2020" -# Serial: 109160994242082918454945253 -# MD5 Fingerprint: 8a:c7:6f:cb:6d:e3:cc:a2:f1:7c:83:fa:0e:78:d7:e8 -# SHA1 Fingerprint: d0:67:c1:13:51:01:0c:aa:d0:c7:6a:65:37:31:16:26:4f:53:71:a2 -# SHA256 Fingerprint: 9a:29:6a:51:82:d1:d4:51:a2:e3:7f:43:9b:74:da:af:a2:67:52:33:29:f9:0f:9a:0d:20:07:c3:34:e2:3c:9a ------BEGIN CERTIFICATE----- -MIIFgjCCA2qgAwIBAgILWku9WvtPilv6ZeUwDQYJKoZIhvcNAQELBQAwTTELMAkG -A1UEBhMCQVQxIzAhBgNVBAoTGmUtY29tbWVyY2UgbW9uaXRvcmluZyBHbWJIMRkw -FwYDVQQDExBHTE9CQUxUUlVTVCAyMDIwMB4XDTIwMDIxMDAwMDAwMFoXDTQwMDYx -MDAwMDAwMFowTTELMAkGA1UEBhMCQVQxIzAhBgNVBAoTGmUtY29tbWVyY2UgbW9u -aXRvcmluZyBHbWJIMRkwFwYDVQQDExBHTE9CQUxUUlVTVCAyMDIwMIICIjANBgkq -hkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAri5WrRsc7/aVj6B3GyvTY4+ETUWiD59b -RatZe1E0+eyLinjF3WuvvcTfk0Uev5E4C64OFudBc/jbu9G4UeDLgztzOG53ig9Z -YybNpyrOVPu44sB8R85gfD+yc/LAGbaKkoc1DZAoouQVBGM+uq/ufF7MpotQsjj3 -QWPKzv9pj2gOlTblzLmMCcpL3TGQlsjMH/1WljTbjhzqLL6FLmPdqqmV0/0plRPw -yJiT2S0WR5ARg6I6IqIoV6Lr/sCMKKCmfecqQjuCgGOlYx8ZzHyyZqjC0203b+J+ -BlHZRYQfEs4kUmSFC0iAToexIiIwquuuvuAC4EDosEKAA1GqtH6qRNdDYfOiaxaJ -SaSjpCuKAsR49GiKweR6NrFvG5Ybd0mN1MkGco/PU+PcF4UgStyYJ9ORJitHHmkH -r96i5OTUawuzXnzUJIBHKWk7buis/UDr2O1xcSvy6Fgd60GXIsUf1DnQJ4+H4xj0 -4KlGDfV0OoIu0G4skaMxXDtG6nsEEFZegB31pWXogvziB4xiRfUg3kZwhqG8k9Me -dKZssCz3AwyIDMvUclOGvGBG85hqwvG/Q/lwIHfKN0F5VVJjjVsSn8VoxIidrPIw -q7ejMZdnrY8XD2zHc+0klGvIg5rQmjdJBKuxFshsSUktq6HQjJLyQUp5ISXbY9e2 -nKd+Qmn7OmMCAwEAAaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC -AQYwHQYDVR0OBBYEFNwuH9FhN3nkq9XVsxJxaD1qaJwiMB8GA1UdIwQYMBaAFNwu -H9FhN3nkq9XVsxJxaD1qaJwiMA0GCSqGSIb3DQEBCwUAA4ICAQCR8EICaEDuw2jA -VC/f7GLDw56KoDEoqoOOpFaWEhCGVrqXctJUMHytGdUdaG/7FELYjQ7ztdGl4wJC -XtzoRlgHNQIw4Lx0SsFDKv/bGtCwr2zD/cuz9X9tAy5ZVp0tLTWMstZDFyySCstd -6IwPS3BD0IL/qMy/pJTAvoe9iuOTe8aPmxadJ2W8esVCgmxcB9CpwYhgROmYhRZf -+I/KARDOJcP5YBugxZfD0yyIMaK9MOzQ0MAS8cE54+X1+NZK3TTN+2/BT+MAi1bi -kvcoskJ3ciNnxz8RFbLEAwW+uxF7Cr+obuf/WEPPm2eggAe2HcqtbepBEX4tdJP7 -wry+UUTF72glJ4DjyKDUEuzZpTcdN3y0kcra1LGWge9oXHYQSa9+pTeAsRxSvTOB -TI/53WXZFM2KJVj04sWDpQmQ1GwUY7VA3+vA/MRYfg0UFodUJ25W5HCEuGwyEn6C -MUO+1918oa2u1qsgEu8KwxCMSZY13At1XrFP1U80DhEgB3VDRemjEdqso5nCtnkn -4rnvyOL2NSl6dPrFf4IFYqYK6miyeUcGbvJXqBUzxvd4Sj1Ce2t+/vdG6tHrju+I -aFvowdlxfv1k7/9nR4hYJS8+hge9+6jlgqispdNpQ80xiEmEU5LAsTkbOYMBMMTy -qfrQA71yN2BWHzZ8vTmR9W0Nv3vXkg== ------END CERTIFICATE----- - -# Issuer: CN=ANF Secure Server Root CA O=ANF Autoridad de Certificacion OU=ANF CA Raiz -# Subject: CN=ANF Secure Server Root CA O=ANF Autoridad de Certificacion OU=ANF CA Raiz -# Label: "ANF Secure Server Root CA" -# Serial: 996390341000653745 -# MD5 Fingerprint: 26:a6:44:5a:d9:af:4e:2f:b2:1d:b6:65:b0:4e:e8:96 -# SHA1 Fingerprint: 5b:6e:68:d0:cc:15:b6:a0:5f:1e:c1:5f:ae:02:fc:6b:2f:5d:6f:74 -# SHA256 Fingerprint: fb:8f:ec:75:91:69:b9:10:6b:1e:51:16:44:c6:18:c5:13:04:37:3f:6c:06:43:08:8d:8b:ef:fd:1b:99:75:99 ------BEGIN CERTIFICATE----- -MIIF7zCCA9egAwIBAgIIDdPjvGz5a7EwDQYJKoZIhvcNAQELBQAwgYQxEjAQBgNV -BAUTCUc2MzI4NzUxMDELMAkGA1UEBhMCRVMxJzAlBgNVBAoTHkFORiBBdXRvcmlk -YWQgZGUgQ2VydGlmaWNhY2lvbjEUMBIGA1UECxMLQU5GIENBIFJhaXoxIjAgBgNV -BAMTGUFORiBTZWN1cmUgU2VydmVyIFJvb3QgQ0EwHhcNMTkwOTA0MTAwMDM4WhcN -MzkwODMwMTAwMDM4WjCBhDESMBAGA1UEBRMJRzYzMjg3NTEwMQswCQYDVQQGEwJF -UzEnMCUGA1UEChMeQU5GIEF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uMRQwEgYD -VQQLEwtBTkYgQ0EgUmFpejEiMCAGA1UEAxMZQU5GIFNlY3VyZSBTZXJ2ZXIgUm9v -dCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANvrayvmZFSVgpCj -cqQZAZ2cC4Ffc0m6p6zzBE57lgvsEeBbphzOG9INgxwruJ4dfkUyYA8H6XdYfp9q -yGFOtibBTI3/TO80sh9l2Ll49a2pcbnvT1gdpd50IJeh7WhM3pIXS7yr/2WanvtH -2Vdy8wmhrnZEE26cLUQ5vPnHO6RYPUG9tMJJo8gN0pcvB2VSAKduyK9o7PQUlrZX -H1bDOZ8rbeTzPvY1ZNoMHKGESy9LS+IsJJ1tk0DrtSOOMspvRdOoiXsezx76W0OL -zc2oD2rKDF65nkeP8Nm2CgtYZRczuSPkdxl9y0oukntPLxB3sY0vaJxizOBQ+OyR -p1RMVwnVdmPF6GUe7m1qzwmd+nxPrWAI/VaZDxUse6mAq4xhj0oHdkLePfTdsiQz -W7i1o0TJrH93PB0j7IKppuLIBkwC/qxcmZkLLxCKpvR/1Yd0DVlJRfbwcVw5Kda/ -SiOL9V8BY9KHcyi1Swr1+KuCLH5zJTIdC2MKF4EA/7Z2Xue0sUDKIbvVgFHlSFJn -LNJhiQcND85Cd8BEc5xEUKDbEAotlRyBr+Qc5RQe8TZBAQIvfXOn3kLMTOmJDVb3 -n5HUA8ZsyY/b2BzgQJhdZpmYgG4t/wHFzstGH6wCxkPmrqKEPMVOHj1tyRRM4y5B -u8o5vzY8KhmqQYdOpc5LMnndkEl/AgMBAAGjYzBhMB8GA1UdIwQYMBaAFJxf0Gxj -o1+TypOYCK2Mh6UsXME3MB0GA1UdDgQWBBScX9BsY6Nfk8qTmAitjIelLFzBNzAO -BgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG9w0BAQsFAAOC -AgEATh65isagmD9uw2nAalxJUqzLK114OMHVVISfk/CHGT0sZonrDUL8zPB1hT+L -9IBdeeUXZ701guLyPI59WzbLWoAAKfLOKyzxj6ptBZNscsdW699QIyjlRRA96Gej -rw5VD5AJYu9LWaL2U/HANeQvwSS9eS9OICI7/RogsKQOLHDtdD+4E5UGUcjohybK -pFtqFiGS3XNgnhAY3jyB6ugYw3yJ8otQPr0R4hUDqDZ9MwFsSBXXiJCZBMXM5gf0 -vPSQ7RPi6ovDj6MzD8EpTBNO2hVWcXNyglD2mjN8orGoGjR0ZVzO0eurU+AagNjq -OknkJjCb5RyKqKkVMoaZkgoQI1YS4PbOTOK7vtuNknMBZi9iPrJyJ0U27U1W45eZ -/zo1PqVUSlJZS2Db7v54EX9K3BR5YLZrZAPbFYPhor72I5dQ8AkzNqdxliXzuUJ9 -2zg/LFis6ELhDtjTO0wugumDLmsx2d1Hhk9tl5EuT+IocTUW0fJz/iUrB0ckYyfI -+PbZa/wSMVYIwFNCr5zQM378BvAxRAMU8Vjq8moNqRGyg77FGr8H6lnco4g175x2 -MjxNBiLOFeXdntiP2t7SxDnlF4HPOEfrf4htWRvfn0IUrn7PqLBmZdo3r5+qPeoo -tt7VMVgWglvquxl1AnMaykgaIZOQCo6ThKd9OyMYkomgjaw= ------END CERTIFICATE----- - -# Issuer: CN=Certum EC-384 CA O=Asseco Data Systems S.A. OU=Certum Certification Authority -# Subject: CN=Certum EC-384 CA O=Asseco Data Systems S.A. OU=Certum Certification Authority -# Label: "Certum EC-384 CA" -# Serial: 160250656287871593594747141429395092468 -# MD5 Fingerprint: b6:65:b3:96:60:97:12:a1:ec:4e:e1:3d:a3:c6:c9:f1 -# SHA1 Fingerprint: f3:3e:78:3c:ac:df:f4:a2:cc:ac:67:55:69:56:d7:e5:16:3c:e1:ed -# SHA256 Fingerprint: 6b:32:80:85:62:53:18:aa:50:d1:73:c9:8d:8b:da:09:d5:7e:27:41:3d:11:4c:f7:87:a0:f5:d0:6c:03:0c:f6 ------BEGIN CERTIFICATE----- -MIICZTCCAeugAwIBAgIQeI8nXIESUiClBNAt3bpz9DAKBggqhkjOPQQDAzB0MQsw -CQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEgU3lzdGVtcyBTLkEuMScw -JQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxGTAXBgNVBAMT -EENlcnR1bSBFQy0zODQgQ0EwHhcNMTgwMzI2MDcyNDU0WhcNNDMwMzI2MDcyNDU0 -WjB0MQswCQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEgU3lzdGVtcyBT -LkEuMScwJQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxGTAX -BgNVBAMTEENlcnR1bSBFQy0zODQgQ0EwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAATE -KI6rGFtqvm5kN2PkzeyrOvfMobgOgknXhimfoZTy42B4mIF4Bk3y7JoOV2CDn7Tm -Fy8as10CW4kjPMIRBSqniBMY81CE1700LCeJVf/OTOffph8oxPBUw7l8t1Ot68Kj -QjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI0GZnQkdjrzife81r1HfS+8 -EF9LMA4GA1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNoADBlAjADVS2m5hjEfO/J -UG7BJw+ch69u1RsIGL2SKcHvlJF40jocVYli5RsJHrpka/F2tNQCMQC0QoSZ/6vn -nvuRlydd3LBbMHHOXjgaatkl5+r3YZJW+OraNsKHZZYuciUvf9/DE8k= ------END CERTIFICATE----- - -# Issuer: CN=Certum Trusted Root CA O=Asseco Data Systems S.A. OU=Certum Certification Authority -# Subject: CN=Certum Trusted Root CA O=Asseco Data Systems S.A. OU=Certum Certification Authority -# Label: "Certum Trusted Root CA" -# Serial: 40870380103424195783807378461123655149 -# MD5 Fingerprint: 51:e1:c2:e7:fe:4c:84:af:59:0e:2f:f4:54:6f:ea:29 -# SHA1 Fingerprint: c8:83:44:c0:18:ae:9f:cc:f1:87:b7:8f:22:d1:c5:d7:45:84:ba:e5 -# SHA256 Fingerprint: fe:76:96:57:38:55:77:3e:37:a9:5e:7a:d4:d9:cc:96:c3:01:57:c1:5d:31:76:5b:a9:b1:57:04:e1:ae:78:fd ------BEGIN CERTIFICATE----- -MIIFwDCCA6igAwIBAgIQHr9ZULjJgDdMBvfrVU+17TANBgkqhkiG9w0BAQ0FADB6 -MQswCQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEgU3lzdGVtcyBTLkEu -MScwJQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxHzAdBgNV -BAMTFkNlcnR1bSBUcnVzdGVkIFJvb3QgQ0EwHhcNMTgwMzE2MTIxMDEzWhcNNDMw -MzE2MTIxMDEzWjB6MQswCQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEg -U3lzdGVtcyBTLkEuMScwJQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRo -b3JpdHkxHzAdBgNVBAMTFkNlcnR1bSBUcnVzdGVkIFJvb3QgQ0EwggIiMA0GCSqG -SIb3DQEBAQUAA4ICDwAwggIKAoICAQDRLY67tzbqbTeRn06TpwXkKQMlzhyC93yZ -n0EGze2jusDbCSzBfN8pfktlL5On1AFrAygYo9idBcEq2EXxkd7fO9CAAozPOA/q -p1x4EaTByIVcJdPTsuclzxFUl6s1wB52HO8AU5853BSlLCIls3Jy/I2z5T4IHhQq -NwuIPMqw9MjCoa68wb4pZ1Xi/K1ZXP69VyywkI3C7Te2fJmItdUDmj0VDT06qKhF -8JVOJVkdzZhpu9PMMsmN74H+rX2Ju7pgE8pllWeg8xn2A1bUatMn4qGtg/BKEiJ3 -HAVz4hlxQsDsdUaakFjgao4rpUYwBI4Zshfjvqm6f1bxJAPXsiEodg42MEx51UGa -mqi4NboMOvJEGyCI98Ul1z3G4z5D3Yf+xOr1Uz5MZf87Sst4WmsXXw3Hw09Omiqi -7VdNIuJGmj8PkTQkfVXjjJU30xrwCSss0smNtA0Aq2cpKNgB9RkEth2+dv5yXMSF -ytKAQd8FqKPVhJBPC/PgP5sZ0jeJP/J7UhyM9uH3PAeXjA6iWYEMspA90+NZRu0P -qafegGtaqge2Gcu8V/OXIXoMsSt0Puvap2ctTMSYnjYJdmZm/Bo/6khUHL4wvYBQ -v3y1zgD2DGHZ5yQD4OMBgQ692IU0iL2yNqh7XAjlRICMb/gv1SHKHRzQ+8S1h9E6 -Tsd2tTVItQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBSM+xx1 -vALTn04uSNn5YFSqxLNP+jAOBgNVHQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQENBQAD -ggIBAEii1QALLtA/vBzVtVRJHlpr9OTy4EA34MwUe7nJ+jW1dReTagVphZzNTxl4 -WxmB82M+w85bj/UvXgF2Ez8sALnNllI5SW0ETsXpD4YN4fqzX4IS8TrOZgYkNCvo -zMrnadyHncI013nR03e4qllY/p0m+jiGPp2Kh2RX5Rc64vmNueMzeMGQ2Ljdt4NR -5MTMI9UGfOZR0800McD2RrsLrfw9EAUqO0qRJe6M1ISHgCq8CYyqOhNf6DR5UMEQ -GfnTKB7U0VEwKbOukGfWHwpjscWpxkIxYxeU72nLL/qMFH3EQxiJ2fAyQOaA4kZf -5ePBAFmo+eggvIksDkc0C+pXwlM2/KfUrzHN/gLldfq5Jwn58/U7yn2fqSLLiMmq -0Uc9NneoWWRrJ8/vJ8HjJLWG965+Mk2weWjROeiQWMODvA8s1pfrzgzhIMfatz7D -P78v3DSk+yshzWePS/Tj6tQ/50+6uaWTRRxmHyH6ZF5v4HaUMst19W7l9o/HuKTM -qJZ9ZPskWkoDbGs4xugDQ5r3V7mzKWmTOPQD8rv7gmsHINFSH5pkAnuYZttcTVoP -0ISVoDwUQwbKytu4QTbaakRnh6+v40URFWkIsr4WOZckbxJF0WddCajJFdr60qZf -E2Efv4WstK2tBZQIgx51F9NxO5NQI1mg7TyRVJ12AMXDuDjb ------END CERTIFICATE----- - -# Issuer: CN=TunTrust Root CA O=Agence Nationale de Certification Electronique -# Subject: CN=TunTrust Root CA O=Agence Nationale de Certification Electronique -# Label: "TunTrust Root CA" -# Serial: 108534058042236574382096126452369648152337120275 -# MD5 Fingerprint: 85:13:b9:90:5b:36:5c:b6:5e:b8:5a:f8:e0:31:57:b4 -# SHA1 Fingerprint: cf:e9:70:84:0f:e0:73:0f:9d:f6:0c:7f:2c:4b:ee:20:46:34:9c:bb -# SHA256 Fingerprint: 2e:44:10:2a:b5:8c:b8:54:19:45:1c:8e:19:d9:ac:f3:66:2c:af:bc:61:4b:6a:53:96:0a:30:f7:d0:e2:eb:41 ------BEGIN CERTIFICATE----- -MIIFszCCA5ugAwIBAgIUEwLV4kBMkkaGFmddtLu7sms+/BMwDQYJKoZIhvcNAQEL -BQAwYTELMAkGA1UEBhMCVE4xNzA1BgNVBAoMLkFnZW5jZSBOYXRpb25hbGUgZGUg -Q2VydGlmaWNhdGlvbiBFbGVjdHJvbmlxdWUxGTAXBgNVBAMMEFR1blRydXN0IFJv -b3QgQ0EwHhcNMTkwNDI2MDg1NzU2WhcNNDQwNDI2MDg1NzU2WjBhMQswCQYDVQQG -EwJUTjE3MDUGA1UECgwuQWdlbmNlIE5hdGlvbmFsZSBkZSBDZXJ0aWZpY2F0aW9u -IEVsZWN0cm9uaXF1ZTEZMBcGA1UEAwwQVHVuVHJ1c3QgUm9vdCBDQTCCAiIwDQYJ -KoZIhvcNAQEBBQADggIPADCCAgoCggIBAMPN0/y9BFPdDCA61YguBUtB9YOCfvdZ -n56eY+hz2vYGqU8ftPkLHzmMmiDQfgbU7DTZhrx1W4eI8NLZ1KMKsmwb60ksPqxd -2JQDoOw05TDENX37Jk0bbjBU2PWARZw5rZzJJQRNmpA+TkBuimvNKWfGzC3gdOgF -VwpIUPp6Q9p+7FuaDmJ2/uqdHYVy7BG7NegfJ7/Boce7SBbdVtfMTqDhuazb1YMZ -GoXRlJfXyqNlC/M4+QKu3fZnz8k/9YosRxqZbwUN/dAdgjH8KcwAWJeRTIAAHDOF -li/LQcKLEITDCSSJH7UP2dl3RxiSlGBcx5kDPP73lad9UKGAwqmDrViWVSHbhlnU -r8a83YFuB9tgYv7sEG7aaAH0gxupPqJbI9dkxt/con3YS7qC0lH4Zr8GRuR5KiY2 -eY8fTpkdso8MDhz/yV3A/ZAQprE38806JG60hZC/gLkMjNWb1sjxVj8agIl6qeIb -MlEsPvLfe/ZdeikZjuXIvTZxi11Mwh0/rViizz1wTaZQmCXcI/m4WEEIcb9PuISg -jwBUFfyRbVinljvrS5YnzWuioYasDXxU5mZMZl+QviGaAkYt5IPCgLnPSz7ofzwB -7I9ezX/SKEIBlYrilz0QIX32nRzFNKHsLA4KUiwSVXAkPcvCFDVDXSdOvsC9qnyW -5/yeYa1E0wCXAgMBAAGjYzBhMB0GA1UdDgQWBBQGmpsfU33x9aTI04Y+oXNZtPdE -ITAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFAaamx9TffH1pMjThj6hc1m0 -90QhMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAqgVutt0Vyb+z -xiD2BkewhpMl0425yAA/l/VSJ4hxyXT968pk21vvHl26v9Hr7lxpuhbI87mP0zYu -QEkHDVneixCwSQXi/5E/S7fdAo74gShczNxtr18UnH1YeA32gAm56Q6XKRm4t+v4 -FstVEuTGfbvE7Pi1HE4+Z7/FXxttbUcoqgRYYdZ2vyJ/0Adqp2RT8JeNnYA/u8EH -22Wv5psymsNUk8QcCMNE+3tjEUPRahphanltkE8pjkcFwRJpadbGNjHh/PqAulxP -xOu3Mqz4dWEX1xAZufHSCe96Qp1bWgvUxpVOKs7/B9dPfhgGiPEZtdmYu65xxBzn -dFlY7wyJz4sfdZMaBBSSSFCp61cpABbjNhzI+L/wM9VBD8TMPN3pM0MBkRArHtG5 -Xc0yGYuPjCB31yLEQtyEFpslbei0VXF/sHyz03FJuc9SpAQ/3D2gu68zngowYI7b -nV2UqL1g52KAdoGDDIzMMEZJ4gzSqK/rYXHv5yJiqfdcZGyfFoxnNidF9Ql7v/YQ -CvGwjVRDjAS6oz/v4jXH+XTgbzRB0L9zZVcg+ZtnemZoJE6AZb0QmQZZ8mWvuMZH -u/2QeItBcy6vVR/cO5JyboTT0GFMDcx2V+IthSIVNg3rAZ3r2OvEhJn7wAzMMujj -d9qDRIueVSjAi1jTkD5OGwDxFa2DK5o= ------END CERTIFICATE----- - -# Issuer: CN=HARICA TLS RSA Root CA 2021 O=Hellenic Academic and Research Institutions CA -# Subject: CN=HARICA TLS RSA Root CA 2021 O=Hellenic Academic and Research Institutions CA -# Label: "HARICA TLS RSA Root CA 2021" -# Serial: 76817823531813593706434026085292783742 -# MD5 Fingerprint: 65:47:9b:58:86:dd:2c:f0:fc:a2:84:1f:1e:96:c4:91 -# SHA1 Fingerprint: 02:2d:05:82:fa:88:ce:14:0c:06:79:de:7f:14:10:e9:45:d7:a5:6d -# SHA256 Fingerprint: d9:5d:0e:8e:da:79:52:5b:f9:be:b1:1b:14:d2:10:0d:32:94:98:5f:0c:62:d9:fa:bd:9c:d9:99:ec:cb:7b:1d ------BEGIN CERTIFICATE----- -MIIFpDCCA4ygAwIBAgIQOcqTHO9D88aOk8f0ZIk4fjANBgkqhkiG9w0BAQsFADBs -MQswCQYDVQQGEwJHUjE3MDUGA1UECgwuSGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl -c2VhcmNoIEluc3RpdHV0aW9ucyBDQTEkMCIGA1UEAwwbSEFSSUNBIFRMUyBSU0Eg -Um9vdCBDQSAyMDIxMB4XDTIxMDIxOTEwNTUzOFoXDTQ1MDIxMzEwNTUzN1owbDEL -MAkGA1UEBhMCR1IxNzA1BgNVBAoMLkhlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNl -YXJjaCBJbnN0aXR1dGlvbnMgQ0ExJDAiBgNVBAMMG0hBUklDQSBUTFMgUlNBIFJv -b3QgQ0EgMjAyMTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAIvC569l -mwVnlskNJLnQDmT8zuIkGCyEf3dRywQRNrhe7Wlxp57kJQmXZ8FHws+RFjZiPTgE -4VGC/6zStGndLuwRo0Xua2s7TL+MjaQenRG56Tj5eg4MmOIjHdFOY9TnuEFE+2uv -a9of08WRiFukiZLRgeaMOVig1mlDqa2YUlhu2wr7a89o+uOkXjpFc5gH6l8Cct4M -pbOfrqkdtx2z/IpZ525yZa31MJQjB/OCFks1mJxTuy/K5FrZx40d/JiZ+yykgmvw -Kh+OC19xXFyuQnspiYHLA6OZyoieC0AJQTPb5lh6/a6ZcMBaD9YThnEvdmn8kN3b -LW7R8pv1GmuebxWMevBLKKAiOIAkbDakO/IwkfN4E8/BPzWr8R0RI7VDIp4BkrcY -AuUR0YLbFQDMYTfBKnya4dC6s1BG7oKsnTH4+yPiAwBIcKMJJnkVU2DzOFytOOqB -AGMUuTNe3QvboEUHGjMJ+E20pwKmafTCWQWIZYVWrkvL4N48fS0ayOn7H6NhStYq -E613TBoYm5EPWNgGVMWX+Ko/IIqmhaZ39qb8HOLubpQzKoNQhArlT4b4UEV4AIHr -W2jjJo3Me1xR9BQsQL4aYB16cmEdH2MtiKrOokWQCPxrvrNQKlr9qEgYRtaQQJKQ -CoReaDH46+0N0x3GfZkYVVYnZS6NRcUk7M7jAgMBAAGjQjBAMA8GA1UdEwEB/wQF -MAMBAf8wHQYDVR0OBBYEFApII6ZgpJIKM+qTW8VX6iVNvRLuMA4GA1UdDwEB/wQE -AwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAPpBIqm5iFSVmewzVjIuJndftTgfvnNAU -X15QvWiWkKQUEapobQk1OUAJ2vQJLDSle1mESSmXdMgHHkdt8s4cUCbjnj1AUz/3 -f5Z2EMVGpdAgS1D0NTsY9FVqQRtHBmg8uwkIYtlfVUKqrFOFrJVWNlar5AWMxaja -H6NpvVMPxP/cyuN+8kyIhkdGGvMA9YCRotxDQpSbIPDRzbLrLFPCU3hKTwSUQZqP -JzLB5UkZv/HywouoCjkxKLR9YjYsTewfM7Z+d21+UPCfDtcRj88YxeMn/ibvBZ3P -zzfF0HvaO7AWhAw6k9a+F9sPPg4ZeAnHqQJyIkv3N3a6dcSFA1pj1bF1BcK5vZSt -jBWZp5N99sXzqnTPBIWUmAD04vnKJGW/4GKvyMX6ssmeVkjaef2WdhW+o45WxLM0 -/L5H9MG0qPzVMIho7suuyWPEdr6sOBjhXlzPrjoiUevRi7PzKzMHVIf6tLITe7pT -BGIBnfHAT+7hOtSLIBD6Alfm78ELt5BGnBkpjNxvoEppaZS3JGWg/6w/zgH7IS79 -aPib8qXPMThcFarmlwDB31qlpzmq6YR/PFGoOtmUW4y/Twhx5duoXNTSpv4Ao8YW -xw/ogM4cKGR0GQjTQuPOAF1/sdwTsOEFy9EgqoZ0njnnkf3/W9b3raYvAwtt41dU -63ZTGI0RmLo= ------END CERTIFICATE----- - -# Issuer: CN=HARICA TLS ECC Root CA 2021 O=Hellenic Academic and Research Institutions CA -# Subject: CN=HARICA TLS ECC Root CA 2021 O=Hellenic Academic and Research Institutions CA -# Label: "HARICA TLS ECC Root CA 2021" -# Serial: 137515985548005187474074462014555733966 -# MD5 Fingerprint: ae:f7:4c:e5:66:35:d1:b7:9b:8c:22:93:74:d3:4b:b0 -# SHA1 Fingerprint: bc:b0:c1:9d:e9:98:92:70:19:38:57:e9:8d:a7:b4:5d:6e:ee:01:48 -# SHA256 Fingerprint: 3f:99:cc:47:4a:cf:ce:4d:fe:d5:87:94:66:5e:47:8d:15:47:73:9f:2e:78:0f:1b:b4:ca:9b:13:30:97:d4:01 ------BEGIN CERTIFICATE----- -MIICVDCCAdugAwIBAgIQZ3SdjXfYO2rbIvT/WeK/zjAKBggqhkjOPQQDAzBsMQsw -CQYDVQQGEwJHUjE3MDUGA1UECgwuSGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJlc2Vh -cmNoIEluc3RpdHV0aW9ucyBDQTEkMCIGA1UEAwwbSEFSSUNBIFRMUyBFQ0MgUm9v -dCBDQSAyMDIxMB4XDTIxMDIxOTExMDExMFoXDTQ1MDIxMzExMDEwOVowbDELMAkG -A1UEBhMCR1IxNzA1BgNVBAoMLkhlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJj -aCBJbnN0aXR1dGlvbnMgQ0ExJDAiBgNVBAMMG0hBUklDQSBUTFMgRUNDIFJvb3Qg -Q0EgMjAyMTB2MBAGByqGSM49AgEGBSuBBAAiA2IABDgI/rGgltJ6rK9JOtDA4MM7 -KKrxcm1lAEeIhPyaJmuqS7psBAqIXhfyVYf8MLA04jRYVxqEU+kw2anylnTDUR9Y -STHMmE5gEYd103KUkE+bECUqqHgtvpBBWJAVcqeht6NCMEAwDwYDVR0TAQH/BAUw -AwEB/zAdBgNVHQ4EFgQUyRtTgRL+BNUW0aq8mm+3oJUZbsowDgYDVR0PAQH/BAQD -AgGGMAoGCCqGSM49BAMDA2cAMGQCMBHervjcToiwqfAircJRQO9gcS3ujwLEXQNw -SaSS6sUUiHCm0w2wqsosQJz76YJumgIwK0eaB8bRwoF8yguWGEEbo/QwCZ61IygN -nxS2PFOiTAZpffpskcYqSUXm7LcT4Tps ------END CERTIFICATE----- diff --git a/venv/Lib/site-packages/pip/_vendor/certifi/core.py b/venv/Lib/site-packages/pip/_vendor/certifi/core.py deleted file mode 100644 index b8140cf..0000000 --- a/venv/Lib/site-packages/pip/_vendor/certifi/core.py +++ /dev/null @@ -1,76 +0,0 @@ -# -*- coding: utf-8 -*- - -""" -certifi.py -~~~~~~~~~~ - -This module returns the installation location of cacert.pem or its contents. -""" -import os - - -class _PipPatchedCertificate(Exception): - pass - - -try: - # Return a certificate file on disk for a standalone pip zipapp running in - # an isolated build environment to use. Passing --cert to the standalone - # pip does not work since requests calls where() unconditionally on import. - _PIP_STANDALONE_CERT = os.environ.get("_PIP_STANDALONE_CERT") - if _PIP_STANDALONE_CERT: - def where(): - return _PIP_STANDALONE_CERT - raise _PipPatchedCertificate() - - from importlib.resources import path as get_path, read_text - - _CACERT_CTX = None - _CACERT_PATH = None - - def where(): - # This is slightly terrible, but we want to delay extracting the file - # in cases where we're inside of a zipimport situation until someone - # actually calls where(), but we don't want to re-extract the file - # on every call of where(), so we'll do it once then store it in a - # global variable. - global _CACERT_CTX - global _CACERT_PATH - if _CACERT_PATH is None: - # This is slightly janky, the importlib.resources API wants you to - # manage the cleanup of this file, so it doesn't actually return a - # path, it returns a context manager that will give you the path - # when you enter it and will do any cleanup when you leave it. In - # the common case of not needing a temporary file, it will just - # return the file system location and the __exit__() is a no-op. - # - # We also have to hold onto the actual context manager, because - # it will do the cleanup whenever it gets garbage collected, so - # we will also store that at the global level as well. - _CACERT_CTX = get_path("pip._vendor.certifi", "cacert.pem") - _CACERT_PATH = str(_CACERT_CTX.__enter__()) - - return _CACERT_PATH - -except _PipPatchedCertificate: - pass - -except ImportError: - # This fallback will work for Python versions prior to 3.7 that lack the - # importlib.resources module but relies on the existing `where` function - # so won't address issues with environments like PyOxidizer that don't set - # __file__ on modules. - def read_text(_module, _path, encoding="ascii"): - with open(where(), "r", encoding=encoding) as data: - return data.read() - - # If we don't have importlib.resources, then we will just do the old logic - # of assuming we're on the filesystem and munge the path directly. - def where(): - f = os.path.dirname(__file__) - - return os.path.join(f, "cacert.pem") - - -def contents(): - return read_text("certifi", "cacert.pem", encoding="ascii") diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/__init__.py b/venv/Lib/site-packages/pip/_vendor/chardet/__init__.py deleted file mode 100644 index 80ad254..0000000 --- a/venv/Lib/site-packages/pip/_vendor/chardet/__init__.py +++ /dev/null @@ -1,83 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - - -from .universaldetector import UniversalDetector -from .enums import InputState -from .version import __version__, VERSION - - -__all__ = ['UniversalDetector', 'detect', 'detect_all', '__version__', 'VERSION'] - - -def detect(byte_str): - """ - Detect the encoding of the given byte string. - - :param byte_str: The byte sequence to examine. - :type byte_str: ``bytes`` or ``bytearray`` - """ - if not isinstance(byte_str, bytearray): - if not isinstance(byte_str, bytes): - raise TypeError('Expected object of type bytes or bytearray, got: ' - '{}'.format(type(byte_str))) - else: - byte_str = bytearray(byte_str) - detector = UniversalDetector() - detector.feed(byte_str) - return detector.close() - - -def detect_all(byte_str): - """ - Detect all the possible encodings of the given byte string. - - :param byte_str: The byte sequence to examine. - :type byte_str: ``bytes`` or ``bytearray`` - """ - if not isinstance(byte_str, bytearray): - if not isinstance(byte_str, bytes): - raise TypeError('Expected object of type bytes or bytearray, got: ' - '{}'.format(type(byte_str))) - else: - byte_str = bytearray(byte_str) - - detector = UniversalDetector() - detector.feed(byte_str) - detector.close() - - if detector._input_state == InputState.HIGH_BYTE: - results = [] - for prober in detector._charset_probers: - if prober.get_confidence() > detector.MINIMUM_THRESHOLD: - charset_name = prober.charset_name - lower_charset_name = prober.charset_name.lower() - # Use Windows encoding name instead of ISO-8859 if we saw any - # extra Windows-specific bytes - if lower_charset_name.startswith('iso-8859'): - if detector._has_win_bytes: - charset_name = detector.ISO_WIN_MAP.get(lower_charset_name, - charset_name) - results.append({ - 'encoding': charset_name, - 'confidence': prober.get_confidence(), - 'language': prober.language, - }) - if len(results) > 0: - return sorted(results, key=lambda result: -result['confidence']) - - return [detector.result] diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/__init__.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/__init__.cpython-39.pyc deleted file mode 100644 index 8a34cdb..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/__init__.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/big5freq.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/big5freq.cpython-39.pyc deleted file mode 100644 index cc68fa2..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/big5freq.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/big5prober.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/big5prober.cpython-39.pyc deleted file mode 100644 index 55f1e52..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/big5prober.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/chardistribution.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/chardistribution.cpython-39.pyc deleted file mode 100644 index 1e5d7e0..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/chardistribution.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/charsetgroupprober.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/charsetgroupprober.cpython-39.pyc deleted file mode 100644 index 4f43ff8..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/charsetgroupprober.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/charsetprober.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/charsetprober.cpython-39.pyc deleted file mode 100644 index bf325c2..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/charsetprober.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/codingstatemachine.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/codingstatemachine.cpython-39.pyc deleted file mode 100644 index 00d8478..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/codingstatemachine.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/compat.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/compat.cpython-39.pyc deleted file mode 100644 index 95e2a4d..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/compat.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/cp949prober.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/cp949prober.cpython-39.pyc deleted file mode 100644 index c607304..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/cp949prober.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/enums.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/enums.cpython-39.pyc deleted file mode 100644 index 3b6470c..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/enums.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/escprober.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/escprober.cpython-39.pyc deleted file mode 100644 index 5f2b17d..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/escprober.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/escsm.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/escsm.cpython-39.pyc deleted file mode 100644 index d9f7087..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/escsm.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/eucjpprober.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/eucjpprober.cpython-39.pyc deleted file mode 100644 index 265e315..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/eucjpprober.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/euckrfreq.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/euckrfreq.cpython-39.pyc deleted file mode 100644 index 0a8cb7c..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/euckrfreq.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/euckrprober.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/euckrprober.cpython-39.pyc deleted file mode 100644 index 9914cda..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/euckrprober.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/euctwfreq.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/euctwfreq.cpython-39.pyc deleted file mode 100644 index 036a1a4..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/euctwfreq.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/euctwprober.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/euctwprober.cpython-39.pyc deleted file mode 100644 index 8f8e508..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/euctwprober.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/gb2312freq.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/gb2312freq.cpython-39.pyc deleted file mode 100644 index e1c4430..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/gb2312freq.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/gb2312prober.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/gb2312prober.cpython-39.pyc deleted file mode 100644 index 0fa8b02..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/gb2312prober.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/hebrewprober.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/hebrewprober.cpython-39.pyc deleted file mode 100644 index 7efd0c7..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/hebrewprober.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/jisfreq.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/jisfreq.cpython-39.pyc deleted file mode 100644 index cc3b761..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/jisfreq.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/jpcntx.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/jpcntx.cpython-39.pyc deleted file mode 100644 index 9585399..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/jpcntx.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/langbulgarianmodel.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/langbulgarianmodel.cpython-39.pyc deleted file mode 100644 index 0174e18..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/langbulgarianmodel.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/langgreekmodel.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/langgreekmodel.cpython-39.pyc deleted file mode 100644 index cb0932b..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/langgreekmodel.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/langhebrewmodel.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/langhebrewmodel.cpython-39.pyc deleted file mode 100644 index d2f8f5b..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/langhebrewmodel.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/langhungarianmodel.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/langhungarianmodel.cpython-39.pyc deleted file mode 100644 index 33277ba..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/langhungarianmodel.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/langrussianmodel.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/langrussianmodel.cpython-39.pyc deleted file mode 100644 index 53db7c7..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/langrussianmodel.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/langthaimodel.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/langthaimodel.cpython-39.pyc deleted file mode 100644 index cb30d6e..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/langthaimodel.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/langturkishmodel.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/langturkishmodel.cpython-39.pyc deleted file mode 100644 index 5a62e03..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/langturkishmodel.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/latin1prober.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/latin1prober.cpython-39.pyc deleted file mode 100644 index 6586bda..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/latin1prober.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/mbcharsetprober.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/mbcharsetprober.cpython-39.pyc deleted file mode 100644 index ec65a17..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/mbcharsetprober.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/mbcsgroupprober.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/mbcsgroupprober.cpython-39.pyc deleted file mode 100644 index 393abde..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/mbcsgroupprober.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/mbcssm.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/mbcssm.cpython-39.pyc deleted file mode 100644 index 529adcc..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/mbcssm.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/sbcharsetprober.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/sbcharsetprober.cpython-39.pyc deleted file mode 100644 index 6f5fc0d..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/sbcharsetprober.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/sbcsgroupprober.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/sbcsgroupprober.cpython-39.pyc deleted file mode 100644 index f5b5249..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/sbcsgroupprober.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/sjisprober.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/sjisprober.cpython-39.pyc deleted file mode 100644 index c6c44f5..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/sjisprober.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/universaldetector.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/universaldetector.cpython-39.pyc deleted file mode 100644 index 6886149..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/universaldetector.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/utf8prober.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/utf8prober.cpython-39.pyc deleted file mode 100644 index d353337..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/utf8prober.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/version.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/version.cpython-39.pyc deleted file mode 100644 index 959529c..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/chardet/__pycache__/version.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/big5freq.py b/venv/Lib/site-packages/pip/_vendor/chardet/big5freq.py deleted file mode 100644 index 38f3251..0000000 --- a/venv/Lib/site-packages/pip/_vendor/chardet/big5freq.py +++ /dev/null @@ -1,386 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Communicator client code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -# Big5 frequency table -# by Taiwan's Mandarin Promotion Council -# -# -# 128 --> 0.42261 -# 256 --> 0.57851 -# 512 --> 0.74851 -# 1024 --> 0.89384 -# 2048 --> 0.97583 -# -# Ideal Distribution Ratio = 0.74851/(1-0.74851) =2.98 -# Random Distribution Ration = 512/(5401-512)=0.105 -# -# Typical Distribution Ratio about 25% of Ideal one, still much higher than RDR - -BIG5_TYPICAL_DISTRIBUTION_RATIO = 0.75 - -#Char to FreqOrder table -BIG5_TABLE_SIZE = 5376 - -BIG5_CHAR_TO_FREQ_ORDER = ( - 1,1801,1506, 255,1431, 198, 9, 82, 6,5008, 177, 202,3681,1256,2821, 110, # 16 -3814, 33,3274, 261, 76, 44,2114, 16,2946,2187,1176, 659,3971, 26,3451,2653, # 32 -1198,3972,3350,4202, 410,2215, 302, 590, 361,1964, 8, 204, 58,4510,5009,1932, # 48 - 63,5010,5011, 317,1614, 75, 222, 159,4203,2417,1480,5012,3555,3091, 224,2822, # 64 -3682, 3, 10,3973,1471, 29,2787,1135,2866,1940, 873, 130,3275,1123, 312,5013, # 80 -4511,2052, 507, 252, 682,5014, 142,1915, 124, 206,2947, 34,3556,3204, 64, 604, # 96 -5015,2501,1977,1978, 155,1991, 645, 641,1606,5016,3452, 337, 72, 406,5017, 80, # 112 - 630, 238,3205,1509, 263, 939,1092,2654, 756,1440,1094,3453, 449, 69,2987, 591, # 128 - 179,2096, 471, 115,2035,1844, 60, 50,2988, 134, 806,1869, 734,2036,3454, 180, # 144 - 995,1607, 156, 537,2907, 688,5018, 319,1305, 779,2145, 514,2379, 298,4512, 359, # 160 -2502, 90,2716,1338, 663, 11, 906,1099,2553, 20,2441, 182, 532,1716,5019, 732, # 176 -1376,4204,1311,1420,3206, 25,2317,1056, 113, 399, 382,1950, 242,3455,2474, 529, # 192 -3276, 475,1447,3683,5020, 117, 21, 656, 810,1297,2300,2334,3557,5021, 126,4205, # 208 - 706, 456, 150, 613,4513, 71,1118,2037,4206, 145,3092, 85, 835, 486,2115,1246, # 224 -1426, 428, 727,1285,1015, 800, 106, 623, 303,1281,5022,2128,2359, 347,3815, 221, # 240 -3558,3135,5023,1956,1153,4207, 83, 296,1199,3093, 192, 624, 93,5024, 822,1898, # 256 -2823,3136, 795,2065, 991,1554,1542,1592, 27, 43,2867, 859, 139,1456, 860,4514, # 272 - 437, 712,3974, 164,2397,3137, 695, 211,3037,2097, 195,3975,1608,3559,3560,3684, # 288 -3976, 234, 811,2989,2098,3977,2233,1441,3561,1615,2380, 668,2077,1638, 305, 228, # 304 -1664,4515, 467, 415,5025, 262,2099,1593, 239, 108, 300, 200,1033, 512,1247,2078, # 320 -5026,5027,2176,3207,3685,2682, 593, 845,1062,3277, 88,1723,2038,3978,1951, 212, # 336 - 266, 152, 149, 468,1899,4208,4516, 77, 187,5028,3038, 37, 5,2990,5029,3979, # 352 -5030,5031, 39,2524,4517,2908,3208,2079, 55, 148, 74,4518, 545, 483,1474,1029, # 368 -1665, 217,1870,1531,3138,1104,2655,4209, 24, 172,3562, 900,3980,3563,3564,4519, # 384 - 32,1408,2824,1312, 329, 487,2360,2251,2717, 784,2683, 4,3039,3351,1427,1789, # 400 - 188, 109, 499,5032,3686,1717,1790, 888,1217,3040,4520,5033,3565,5034,3352,1520, # 416 -3687,3981, 196,1034, 775,5035,5036, 929,1816, 249, 439, 38,5037,1063,5038, 794, # 432 -3982,1435,2301, 46, 178,3278,2066,5039,2381,5040, 214,1709,4521, 804, 35, 707, # 448 - 324,3688,1601,2554, 140, 459,4210,5041,5042,1365, 839, 272, 978,2262,2580,3456, # 464 -2129,1363,3689,1423, 697, 100,3094, 48, 70,1231, 495,3139,2196,5043,1294,5044, # 480 -2080, 462, 586,1042,3279, 853, 256, 988, 185,2382,3457,1698, 434,1084,5045,3458, # 496 - 314,2625,2788,4522,2335,2336, 569,2285, 637,1817,2525, 757,1162,1879,1616,3459, # 512 - 287,1577,2116, 768,4523,1671,2868,3566,2526,1321,3816, 909,2418,5046,4211, 933, # 528 -3817,4212,2053,2361,1222,4524, 765,2419,1322, 786,4525,5047,1920,1462,1677,2909, # 544 -1699,5048,4526,1424,2442,3140,3690,2600,3353,1775,1941,3460,3983,4213, 309,1369, # 560 -1130,2825, 364,2234,1653,1299,3984,3567,3985,3986,2656, 525,1085,3041, 902,2001, # 576 -1475, 964,4527, 421,1845,1415,1057,2286, 940,1364,3141, 376,4528,4529,1381, 7, # 592 -2527, 983,2383, 336,1710,2684,1846, 321,3461, 559,1131,3042,2752,1809,1132,1313, # 608 - 265,1481,1858,5049, 352,1203,2826,3280, 167,1089, 420,2827, 776, 792,1724,3568, # 624 -4214,2443,3281,5050,4215,5051, 446, 229, 333,2753, 901,3818,1200,1557,4530,2657, # 640 -1921, 395,2754,2685,3819,4216,1836, 125, 916,3209,2626,4531,5052,5053,3820,5054, # 656 -5055,5056,4532,3142,3691,1133,2555,1757,3462,1510,2318,1409,3569,5057,2146, 438, # 672 -2601,2910,2384,3354,1068, 958,3043, 461, 311,2869,2686,4217,1916,3210,4218,1979, # 688 - 383, 750,2755,2627,4219, 274, 539, 385,1278,1442,5058,1154,1965, 384, 561, 210, # 704 - 98,1295,2556,3570,5059,1711,2420,1482,3463,3987,2911,1257, 129,5060,3821, 642, # 720 - 523,2789,2790,2658,5061, 141,2235,1333, 68, 176, 441, 876, 907,4220, 603,2602, # 736 - 710, 171,3464, 404, 549, 18,3143,2398,1410,3692,1666,5062,3571,4533,2912,4534, # 752 -5063,2991, 368,5064, 146, 366, 99, 871,3693,1543, 748, 807,1586,1185, 22,2263, # 768 - 379,3822,3211,5065,3212, 505,1942,2628,1992,1382,2319,5066, 380,2362, 218, 702, # 784 -1818,1248,3465,3044,3572,3355,3282,5067,2992,3694, 930,3283,3823,5068, 59,5069, # 800 - 585, 601,4221, 497,3466,1112,1314,4535,1802,5070,1223,1472,2177,5071, 749,1837, # 816 - 690,1900,3824,1773,3988,1476, 429,1043,1791,2236,2117, 917,4222, 447,1086,1629, # 832 -5072, 556,5073,5074,2021,1654, 844,1090, 105, 550, 966,1758,2828,1008,1783, 686, # 848 -1095,5075,2287, 793,1602,5076,3573,2603,4536,4223,2948,2302,4537,3825, 980,2503, # 864 - 544, 353, 527,4538, 908,2687,2913,5077, 381,2629,1943,1348,5078,1341,1252, 560, # 880 -3095,5079,3467,2870,5080,2054, 973, 886,2081, 143,4539,5081,5082, 157,3989, 496, # 896 -4224, 57, 840, 540,2039,4540,4541,3468,2118,1445, 970,2264,1748,1966,2082,4225, # 912 -3144,1234,1776,3284,2829,3695, 773,1206,2130,1066,2040,1326,3990,1738,1725,4226, # 928 - 279,3145, 51,1544,2604, 423,1578,2131,2067, 173,4542,1880,5083,5084,1583, 264, # 944 - 610,3696,4543,2444, 280, 154,5085,5086,5087,1739, 338,1282,3096, 693,2871,1411, # 960 -1074,3826,2445,5088,4544,5089,5090,1240, 952,2399,5091,2914,1538,2688, 685,1483, # 976 -4227,2475,1436, 953,4228,2055,4545, 671,2400, 79,4229,2446,3285, 608, 567,2689, # 992 -3469,4230,4231,1691, 393,1261,1792,2401,5092,4546,5093,5094,5095,5096,1383,1672, # 1008 -3827,3213,1464, 522,1119, 661,1150, 216, 675,4547,3991,1432,3574, 609,4548,2690, # 1024 -2402,5097,5098,5099,4232,3045, 0,5100,2476, 315, 231,2447, 301,3356,4549,2385, # 1040 -5101, 233,4233,3697,1819,4550,4551,5102, 96,1777,1315,2083,5103, 257,5104,1810, # 1056 -3698,2718,1139,1820,4234,2022,1124,2164,2791,1778,2659,5105,3097, 363,1655,3214, # 1072 -5106,2993,5107,5108,5109,3992,1567,3993, 718, 103,3215, 849,1443, 341,3357,2949, # 1088 -1484,5110,1712, 127, 67, 339,4235,2403, 679,1412, 821,5111,5112, 834, 738, 351, # 1104 -2994,2147, 846, 235,1497,1881, 418,1993,3828,2719, 186,1100,2148,2756,3575,1545, # 1120 -1355,2950,2872,1377, 583,3994,4236,2581,2995,5113,1298,3699,1078,2557,3700,2363, # 1136 - 78,3829,3830, 267,1289,2100,2002,1594,4237, 348, 369,1274,2197,2178,1838,4552, # 1152 -1821,2830,3701,2757,2288,2003,4553,2951,2758, 144,3358, 882,4554,3995,2759,3470, # 1168 -4555,2915,5114,4238,1726, 320,5115,3996,3046, 788,2996,5116,2831,1774,1327,2873, # 1184 -3997,2832,5117,1306,4556,2004,1700,3831,3576,2364,2660, 787,2023, 506, 824,3702, # 1200 - 534, 323,4557,1044,3359,2024,1901, 946,3471,5118,1779,1500,1678,5119,1882,4558, # 1216 - 165, 243,4559,3703,2528, 123, 683,4239, 764,4560, 36,3998,1793, 589,2916, 816, # 1232 - 626,1667,3047,2237,1639,1555,1622,3832,3999,5120,4000,2874,1370,1228,1933, 891, # 1248 -2084,2917, 304,4240,5121, 292,2997,2720,3577, 691,2101,4241,1115,4561, 118, 662, # 1264 -5122, 611,1156, 854,2386,1316,2875, 2, 386, 515,2918,5123,5124,3286, 868,2238, # 1280 -1486, 855,2661, 785,2216,3048,5125,1040,3216,3578,5126,3146, 448,5127,1525,5128, # 1296 -2165,4562,5129,3833,5130,4242,2833,3579,3147, 503, 818,4001,3148,1568, 814, 676, # 1312 -1444, 306,1749,5131,3834,1416,1030, 197,1428, 805,2834,1501,4563,5132,5133,5134, # 1328 -1994,5135,4564,5136,5137,2198, 13,2792,3704,2998,3149,1229,1917,5138,3835,2132, # 1344 -5139,4243,4565,2404,3580,5140,2217,1511,1727,1120,5141,5142, 646,3836,2448, 307, # 1360 -5143,5144,1595,3217,5145,5146,5147,3705,1113,1356,4002,1465,2529,2530,5148, 519, # 1376 -5149, 128,2133, 92,2289,1980,5150,4003,1512, 342,3150,2199,5151,2793,2218,1981, # 1392 -3360,4244, 290,1656,1317, 789, 827,2365,5152,3837,4566, 562, 581,4004,5153, 401, # 1408 -4567,2252, 94,4568,5154,1399,2794,5155,1463,2025,4569,3218,1944,5156, 828,1105, # 1424 -4245,1262,1394,5157,4246, 605,4570,5158,1784,2876,5159,2835, 819,2102, 578,2200, # 1440 -2952,5160,1502, 436,3287,4247,3288,2836,4005,2919,3472,3473,5161,2721,2320,5162, # 1456 -5163,2337,2068, 23,4571, 193, 826,3838,2103, 699,1630,4248,3098, 390,1794,1064, # 1472 -3581,5164,1579,3099,3100,1400,5165,4249,1839,1640,2877,5166,4572,4573, 137,4250, # 1488 - 598,3101,1967, 780, 104, 974,2953,5167, 278, 899, 253, 402, 572, 504, 493,1339, # 1504 -5168,4006,1275,4574,2582,2558,5169,3706,3049,3102,2253, 565,1334,2722, 863, 41, # 1520 -5170,5171,4575,5172,1657,2338, 19, 463,2760,4251, 606,5173,2999,3289,1087,2085, # 1536 -1323,2662,3000,5174,1631,1623,1750,4252,2691,5175,2878, 791,2723,2663,2339, 232, # 1552 -2421,5176,3001,1498,5177,2664,2630, 755,1366,3707,3290,3151,2026,1609, 119,1918, # 1568 -3474, 862,1026,4253,5178,4007,3839,4576,4008,4577,2265,1952,2477,5179,1125, 817, # 1584 -4254,4255,4009,1513,1766,2041,1487,4256,3050,3291,2837,3840,3152,5180,5181,1507, # 1600 -5182,2692, 733, 40,1632,1106,2879, 345,4257, 841,2531, 230,4578,3002,1847,3292, # 1616 -3475,5183,1263, 986,3476,5184, 735, 879, 254,1137, 857, 622,1300,1180,1388,1562, # 1632 -4010,4011,2954, 967,2761,2665,1349, 592,2134,1692,3361,3003,1995,4258,1679,4012, # 1648 -1902,2188,5185, 739,3708,2724,1296,1290,5186,4259,2201,2202,1922,1563,2605,2559, # 1664 -1871,2762,3004,5187, 435,5188, 343,1108, 596, 17,1751,4579,2239,3477,3709,5189, # 1680 -4580, 294,3582,2955,1693, 477, 979, 281,2042,3583, 643,2043,3710,2631,2795,2266, # 1696 -1031,2340,2135,2303,3584,4581, 367,1249,2560,5190,3585,5191,4582,1283,3362,2005, # 1712 - 240,1762,3363,4583,4584, 836,1069,3153, 474,5192,2149,2532, 268,3586,5193,3219, # 1728 -1521,1284,5194,1658,1546,4260,5195,3587,3588,5196,4261,3364,2693,1685,4262, 961, # 1744 -1673,2632, 190,2006,2203,3841,4585,4586,5197, 570,2504,3711,1490,5198,4587,2633, # 1760 -3293,1957,4588, 584,1514, 396,1045,1945,5199,4589,1968,2449,5200,5201,4590,4013, # 1776 - 619,5202,3154,3294, 215,2007,2796,2561,3220,4591,3221,4592, 763,4263,3842,4593, # 1792 -5203,5204,1958,1767,2956,3365,3712,1174, 452,1477,4594,3366,3155,5205,2838,1253, # 1808 -2387,2189,1091,2290,4264, 492,5206, 638,1169,1825,2136,1752,4014, 648, 926,1021, # 1824 -1324,4595, 520,4596, 997, 847,1007, 892,4597,3843,2267,1872,3713,2405,1785,4598, # 1840 -1953,2957,3103,3222,1728,4265,2044,3714,4599,2008,1701,3156,1551, 30,2268,4266, # 1856 -5207,2027,4600,3589,5208, 501,5209,4267, 594,3478,2166,1822,3590,3479,3591,3223, # 1872 - 829,2839,4268,5210,1680,3157,1225,4269,5211,3295,4601,4270,3158,2341,5212,4602, # 1888 -4271,5213,4015,4016,5214,1848,2388,2606,3367,5215,4603, 374,4017, 652,4272,4273, # 1904 - 375,1140, 798,5216,5217,5218,2366,4604,2269, 546,1659, 138,3051,2450,4605,5219, # 1920 -2254, 612,1849, 910, 796,3844,1740,1371, 825,3845,3846,5220,2920,2562,5221, 692, # 1936 - 444,3052,2634, 801,4606,4274,5222,1491, 244,1053,3053,4275,4276, 340,5223,4018, # 1952 -1041,3005, 293,1168, 87,1357,5224,1539, 959,5225,2240, 721, 694,4277,3847, 219, # 1968 -1478, 644,1417,3368,2666,1413,1401,1335,1389,4019,5226,5227,3006,2367,3159,1826, # 1984 - 730,1515, 184,2840, 66,4607,5228,1660,2958, 246,3369, 378,1457, 226,3480, 975, # 2000 -4020,2959,1264,3592, 674, 696,5229, 163,5230,1141,2422,2167, 713,3593,3370,4608, # 2016 -4021,5231,5232,1186, 15,5233,1079,1070,5234,1522,3224,3594, 276,1050,2725, 758, # 2032 -1126, 653,2960,3296,5235,2342, 889,3595,4022,3104,3007, 903,1250,4609,4023,3481, # 2048 -3596,1342,1681,1718, 766,3297, 286, 89,2961,3715,5236,1713,5237,2607,3371,3008, # 2064 -5238,2962,2219,3225,2880,5239,4610,2505,2533, 181, 387,1075,4024, 731,2190,3372, # 2080 -5240,3298, 310, 313,3482,2304, 770,4278, 54,3054, 189,4611,3105,3848,4025,5241, # 2096 -1230,1617,1850, 355,3597,4279,4612,3373, 111,4280,3716,1350,3160,3483,3055,4281, # 2112 -2150,3299,3598,5242,2797,4026,4027,3009, 722,2009,5243,1071, 247,1207,2343,2478, # 2128 -1378,4613,2010, 864,1437,1214,4614, 373,3849,1142,2220, 667,4615, 442,2763,2563, # 2144 -3850,4028,1969,4282,3300,1840, 837, 170,1107, 934,1336,1883,5244,5245,2119,4283, # 2160 -2841, 743,1569,5246,4616,4284, 582,2389,1418,3484,5247,1803,5248, 357,1395,1729, # 2176 -3717,3301,2423,1564,2241,5249,3106,3851,1633,4617,1114,2086,4285,1532,5250, 482, # 2192 -2451,4618,5251,5252,1492, 833,1466,5253,2726,3599,1641,2842,5254,1526,1272,3718, # 2208 -4286,1686,1795, 416,2564,1903,1954,1804,5255,3852,2798,3853,1159,2321,5256,2881, # 2224 -4619,1610,1584,3056,2424,2764, 443,3302,1163,3161,5257,5258,4029,5259,4287,2506, # 2240 -3057,4620,4030,3162,2104,1647,3600,2011,1873,4288,5260,4289, 431,3485,5261, 250, # 2256 - 97, 81,4290,5262,1648,1851,1558, 160, 848,5263, 866, 740,1694,5264,2204,2843, # 2272 -3226,4291,4621,3719,1687, 950,2479, 426, 469,3227,3720,3721,4031,5265,5266,1188, # 2288 - 424,1996, 861,3601,4292,3854,2205,2694, 168,1235,3602,4293,5267,2087,1674,4622, # 2304 -3374,3303, 220,2565,1009,5268,3855, 670,3010, 332,1208, 717,5269,5270,3603,2452, # 2320 -4032,3375,5271, 513,5272,1209,2882,3376,3163,4623,1080,5273,5274,5275,5276,2534, # 2336 -3722,3604, 815,1587,4033,4034,5277,3605,3486,3856,1254,4624,1328,3058,1390,4035, # 2352 -1741,4036,3857,4037,5278, 236,3858,2453,3304,5279,5280,3723,3859,1273,3860,4625, # 2368 -5281, 308,5282,4626, 245,4627,1852,2480,1307,2583, 430, 715,2137,2454,5283, 270, # 2384 - 199,2883,4038,5284,3606,2727,1753, 761,1754, 725,1661,1841,4628,3487,3724,5285, # 2400 -5286, 587, 14,3305, 227,2608, 326, 480,2270, 943,2765,3607, 291, 650,1884,5287, # 2416 -1702,1226, 102,1547, 62,3488, 904,4629,3489,1164,4294,5288,5289,1224,1548,2766, # 2432 - 391, 498,1493,5290,1386,1419,5291,2056,1177,4630, 813, 880,1081,2368, 566,1145, # 2448 -4631,2291,1001,1035,2566,2609,2242, 394,1286,5292,5293,2069,5294, 86,1494,1730, # 2464 -4039, 491,1588, 745, 897,2963, 843,3377,4040,2767,2884,3306,1768, 998,2221,2070, # 2480 - 397,1827,1195,1970,3725,3011,3378, 284,5295,3861,2507,2138,2120,1904,5296,4041, # 2496 -2151,4042,4295,1036,3490,1905, 114,2567,4296, 209,1527,5297,5298,2964,2844,2635, # 2512 -2390,2728,3164, 812,2568,5299,3307,5300,1559, 737,1885,3726,1210, 885, 28,2695, # 2528 -3608,3862,5301,4297,1004,1780,4632,5302, 346,1982,2222,2696,4633,3863,1742, 797, # 2544 -1642,4043,1934,1072,1384,2152, 896,4044,3308,3727,3228,2885,3609,5303,2569,1959, # 2560 -4634,2455,1786,5304,5305,5306,4045,4298,1005,1308,3728,4299,2729,4635,4636,1528, # 2576 -2610, 161,1178,4300,1983, 987,4637,1101,4301, 631,4046,1157,3229,2425,1343,1241, # 2592 -1016,2243,2570, 372, 877,2344,2508,1160, 555,1935, 911,4047,5307, 466,1170, 169, # 2608 -1051,2921,2697,3729,2481,3012,1182,2012,2571,1251,2636,5308, 992,2345,3491,1540, # 2624 -2730,1201,2071,2406,1997,2482,5309,4638, 528,1923,2191,1503,1874,1570,2369,3379, # 2640 -3309,5310, 557,1073,5311,1828,3492,2088,2271,3165,3059,3107, 767,3108,2799,4639, # 2656 -1006,4302,4640,2346,1267,2179,3730,3230, 778,4048,3231,2731,1597,2667,5312,4641, # 2672 -5313,3493,5314,5315,5316,3310,2698,1433,3311, 131, 95,1504,4049, 723,4303,3166, # 2688 -1842,3610,2768,2192,4050,2028,2105,3731,5317,3013,4051,1218,5318,3380,3232,4052, # 2704 -4304,2584, 248,1634,3864, 912,5319,2845,3732,3060,3865, 654, 53,5320,3014,5321, # 2720 -1688,4642, 777,3494,1032,4053,1425,5322, 191, 820,2121,2846, 971,4643, 931,3233, # 2736 - 135, 664, 783,3866,1998, 772,2922,1936,4054,3867,4644,2923,3234, 282,2732, 640, # 2752 -1372,3495,1127, 922, 325,3381,5323,5324, 711,2045,5325,5326,4055,2223,2800,1937, # 2768 -4056,3382,2224,2255,3868,2305,5327,4645,3869,1258,3312,4057,3235,2139,2965,4058, # 2784 -4059,5328,2225, 258,3236,4646, 101,1227,5329,3313,1755,5330,1391,3314,5331,2924, # 2800 -2057, 893,5332,5333,5334,1402,4305,2347,5335,5336,3237,3611,5337,5338, 878,1325, # 2816 -1781,2801,4647, 259,1385,2585, 744,1183,2272,4648,5339,4060,2509,5340, 684,1024, # 2832 -4306,5341, 472,3612,3496,1165,3315,4061,4062, 322,2153, 881, 455,1695,1152,1340, # 2848 - 660, 554,2154,4649,1058,4650,4307, 830,1065,3383,4063,4651,1924,5342,1703,1919, # 2864 -5343, 932,2273, 122,5344,4652, 947, 677,5345,3870,2637, 297,1906,1925,2274,4653, # 2880 -2322,3316,5346,5347,4308,5348,4309, 84,4310, 112, 989,5349, 547,1059,4064, 701, # 2896 -3613,1019,5350,4311,5351,3497, 942, 639, 457,2306,2456, 993,2966, 407, 851, 494, # 2912 -4654,3384, 927,5352,1237,5353,2426,3385, 573,4312, 680, 921,2925,1279,1875, 285, # 2928 - 790,1448,1984, 719,2168,5354,5355,4655,4065,4066,1649,5356,1541, 563,5357,1077, # 2944 -5358,3386,3061,3498, 511,3015,4067,4068,3733,4069,1268,2572,3387,3238,4656,4657, # 2960 -5359, 535,1048,1276,1189,2926,2029,3167,1438,1373,2847,2967,1134,2013,5360,4313, # 2976 -1238,2586,3109,1259,5361, 700,5362,2968,3168,3734,4314,5363,4315,1146,1876,1907, # 2992 -4658,2611,4070, 781,2427, 132,1589, 203, 147, 273,2802,2407, 898,1787,2155,4071, # 3008 -4072,5364,3871,2803,5365,5366,4659,4660,5367,3239,5368,1635,3872, 965,5369,1805, # 3024 -2699,1516,3614,1121,1082,1329,3317,4073,1449,3873, 65,1128,2848,2927,2769,1590, # 3040 -3874,5370,5371, 12,2668, 45, 976,2587,3169,4661, 517,2535,1013,1037,3240,5372, # 3056 -3875,2849,5373,3876,5374,3499,5375,2612, 614,1999,2323,3877,3110,2733,2638,5376, # 3072 -2588,4316, 599,1269,5377,1811,3735,5378,2700,3111, 759,1060, 489,1806,3388,3318, # 3088 -1358,5379,5380,2391,1387,1215,2639,2256, 490,5381,5382,4317,1759,2392,2348,5383, # 3104 -4662,3878,1908,4074,2640,1807,3241,4663,3500,3319,2770,2349, 874,5384,5385,3501, # 3120 -3736,1859, 91,2928,3737,3062,3879,4664,5386,3170,4075,2669,5387,3502,1202,1403, # 3136 -3880,2969,2536,1517,2510,4665,3503,2511,5388,4666,5389,2701,1886,1495,1731,4076, # 3152 -2370,4667,5390,2030,5391,5392,4077,2702,1216, 237,2589,4318,2324,4078,3881,4668, # 3168 -4669,2703,3615,3504, 445,4670,5393,5394,5395,5396,2771, 61,4079,3738,1823,4080, # 3184 -5397, 687,2046, 935, 925, 405,2670, 703,1096,1860,2734,4671,4081,1877,1367,2704, # 3200 -3389, 918,2106,1782,2483, 334,3320,1611,1093,4672, 564,3171,3505,3739,3390, 945, # 3216 -2641,2058,4673,5398,1926, 872,4319,5399,3506,2705,3112, 349,4320,3740,4082,4674, # 3232 -3882,4321,3741,2156,4083,4675,4676,4322,4677,2408,2047, 782,4084, 400, 251,4323, # 3248 -1624,5400,5401, 277,3742, 299,1265, 476,1191,3883,2122,4324,4325,1109, 205,5402, # 3264 -2590,1000,2157,3616,1861,5403,5404,5405,4678,5406,4679,2573, 107,2484,2158,4085, # 3280 -3507,3172,5407,1533, 541,1301, 158, 753,4326,2886,3617,5408,1696, 370,1088,4327, # 3296 -4680,3618, 579, 327, 440, 162,2244, 269,1938,1374,3508, 968,3063, 56,1396,3113, # 3312 -2107,3321,3391,5409,1927,2159,4681,3016,5410,3619,5411,5412,3743,4682,2485,5413, # 3328 -2804,5414,1650,4683,5415,2613,5416,5417,4086,2671,3392,1149,3393,4087,3884,4088, # 3344 -5418,1076, 49,5419, 951,3242,3322,3323, 450,2850, 920,5420,1812,2805,2371,4328, # 3360 -1909,1138,2372,3885,3509,5421,3243,4684,1910,1147,1518,2428,4685,3886,5422,4686, # 3376 -2393,2614, 260,1796,3244,5423,5424,3887,3324, 708,5425,3620,1704,5426,3621,1351, # 3392 -1618,3394,3017,1887, 944,4329,3395,4330,3064,3396,4331,5427,3744, 422, 413,1714, # 3408 -3325, 500,2059,2350,4332,2486,5428,1344,1911, 954,5429,1668,5430,5431,4089,2409, # 3424 -4333,3622,3888,4334,5432,2307,1318,2512,3114, 133,3115,2887,4687, 629, 31,2851, # 3440 -2706,3889,4688, 850, 949,4689,4090,2970,1732,2089,4335,1496,1853,5433,4091, 620, # 3456 -3245, 981,1242,3745,3397,1619,3746,1643,3326,2140,2457,1971,1719,3510,2169,5434, # 3472 -3246,5435,5436,3398,1829,5437,1277,4690,1565,2048,5438,1636,3623,3116,5439, 869, # 3488 -2852, 655,3890,3891,3117,4092,3018,3892,1310,3624,4691,5440,5441,5442,1733, 558, # 3504 -4692,3747, 335,1549,3065,1756,4336,3748,1946,3511,1830,1291,1192, 470,2735,2108, # 3520 -2806, 913,1054,4093,5443,1027,5444,3066,4094,4693, 982,2672,3399,3173,3512,3247, # 3536 -3248,1947,2807,5445, 571,4694,5446,1831,5447,3625,2591,1523,2429,5448,2090, 984, # 3552 -4695,3749,1960,5449,3750, 852, 923,2808,3513,3751, 969,1519, 999,2049,2325,1705, # 3568 -5450,3118, 615,1662, 151, 597,4095,2410,2326,1049, 275,4696,3752,4337, 568,3753, # 3584 -3626,2487,4338,3754,5451,2430,2275, 409,3249,5452,1566,2888,3514,1002, 769,2853, # 3600 - 194,2091,3174,3755,2226,3327,4339, 628,1505,5453,5454,1763,2180,3019,4096, 521, # 3616 -1161,2592,1788,2206,2411,4697,4097,1625,4340,4341, 412, 42,3119, 464,5455,2642, # 3632 -4698,3400,1760,1571,2889,3515,2537,1219,2207,3893,2643,2141,2373,4699,4700,3328, # 3648 -1651,3401,3627,5456,5457,3628,2488,3516,5458,3756,5459,5460,2276,2092, 460,5461, # 3664 -4701,5462,3020, 962, 588,3629, 289,3250,2644,1116, 52,5463,3067,1797,5464,5465, # 3680 -5466,1467,5467,1598,1143,3757,4342,1985,1734,1067,4702,1280,3402, 465,4703,1572, # 3696 - 510,5468,1928,2245,1813,1644,3630,5469,4704,3758,5470,5471,2673,1573,1534,5472, # 3712 -5473, 536,1808,1761,3517,3894,3175,2645,5474,5475,5476,4705,3518,2929,1912,2809, # 3728 -5477,3329,1122, 377,3251,5478, 360,5479,5480,4343,1529, 551,5481,2060,3759,1769, # 3744 -2431,5482,2930,4344,3330,3120,2327,2109,2031,4706,1404, 136,1468,1479, 672,1171, # 3760 -3252,2308, 271,3176,5483,2772,5484,2050, 678,2736, 865,1948,4707,5485,2014,4098, # 3776 -2971,5486,2737,2227,1397,3068,3760,4708,4709,1735,2931,3403,3631,5487,3895, 509, # 3792 -2854,2458,2890,3896,5488,5489,3177,3178,4710,4345,2538,4711,2309,1166,1010, 552, # 3808 - 681,1888,5490,5491,2972,2973,4099,1287,1596,1862,3179, 358, 453, 736, 175, 478, # 3824 -1117, 905,1167,1097,5492,1854,1530,5493,1706,5494,2181,3519,2292,3761,3520,3632, # 3840 -4346,2093,4347,5495,3404,1193,2489,4348,1458,2193,2208,1863,1889,1421,3331,2932, # 3856 -3069,2182,3521, 595,2123,5496,4100,5497,5498,4349,1707,2646, 223,3762,1359, 751, # 3872 -3121, 183,3522,5499,2810,3021, 419,2374, 633, 704,3897,2394, 241,5500,5501,5502, # 3888 - 838,3022,3763,2277,2773,2459,3898,1939,2051,4101,1309,3122,2246,1181,5503,1136, # 3904 -2209,3899,2375,1446,4350,2310,4712,5504,5505,4351,1055,2615, 484,3764,5506,4102, # 3920 - 625,4352,2278,3405,1499,4353,4103,5507,4104,4354,3253,2279,2280,3523,5508,5509, # 3936 -2774, 808,2616,3765,3406,4105,4355,3123,2539, 526,3407,3900,4356, 955,5510,1620, # 3952 -4357,2647,2432,5511,1429,3766,1669,1832, 994, 928,5512,3633,1260,5513,5514,5515, # 3968 -1949,2293, 741,2933,1626,4358,2738,2460, 867,1184, 362,3408,1392,5516,5517,4106, # 3984 -4359,1770,1736,3254,2934,4713,4714,1929,2707,1459,1158,5518,3070,3409,2891,1292, # 4000 -1930,2513,2855,3767,1986,1187,2072,2015,2617,4360,5519,2574,2514,2170,3768,2490, # 4016 -3332,5520,3769,4715,5521,5522, 666,1003,3023,1022,3634,4361,5523,4716,1814,2257, # 4032 - 574,3901,1603, 295,1535, 705,3902,4362, 283, 858, 417,5524,5525,3255,4717,4718, # 4048 -3071,1220,1890,1046,2281,2461,4107,1393,1599, 689,2575, 388,4363,5526,2491, 802, # 4064 -5527,2811,3903,2061,1405,2258,5528,4719,3904,2110,1052,1345,3256,1585,5529, 809, # 4080 -5530,5531,5532, 575,2739,3524, 956,1552,1469,1144,2328,5533,2329,1560,2462,3635, # 4096 -3257,4108, 616,2210,4364,3180,2183,2294,5534,1833,5535,3525,4720,5536,1319,3770, # 4112 -3771,1211,3636,1023,3258,1293,2812,5537,5538,5539,3905, 607,2311,3906, 762,2892, # 4128 -1439,4365,1360,4721,1485,3072,5540,4722,1038,4366,1450,2062,2648,4367,1379,4723, # 4144 -2593,5541,5542,4368,1352,1414,2330,2935,1172,5543,5544,3907,3908,4724,1798,1451, # 4160 -5545,5546,5547,5548,2936,4109,4110,2492,2351, 411,4111,4112,3637,3333,3124,4725, # 4176 -1561,2674,1452,4113,1375,5549,5550, 47,2974, 316,5551,1406,1591,2937,3181,5552, # 4192 -1025,2142,3125,3182, 354,2740, 884,2228,4369,2412, 508,3772, 726,3638, 996,2433, # 4208 -3639, 729,5553, 392,2194,1453,4114,4726,3773,5554,5555,2463,3640,2618,1675,2813, # 4224 - 919,2352,2975,2353,1270,4727,4115, 73,5556,5557, 647,5558,3259,2856,2259,1550, # 4240 -1346,3024,5559,1332, 883,3526,5560,5561,5562,5563,3334,2775,5564,1212, 831,1347, # 4256 -4370,4728,2331,3909,1864,3073, 720,3910,4729,4730,3911,5565,4371,5566,5567,4731, # 4272 -5568,5569,1799,4732,3774,2619,4733,3641,1645,2376,4734,5570,2938, 669,2211,2675, # 4288 -2434,5571,2893,5572,5573,1028,3260,5574,4372,2413,5575,2260,1353,5576,5577,4735, # 4304 -3183, 518,5578,4116,5579,4373,1961,5580,2143,4374,5581,5582,3025,2354,2355,3912, # 4320 - 516,1834,1454,4117,2708,4375,4736,2229,2620,1972,1129,3642,5583,2776,5584,2976, # 4336 -1422, 577,1470,3026,1524,3410,5585,5586, 432,4376,3074,3527,5587,2594,1455,2515, # 4352 -2230,1973,1175,5588,1020,2741,4118,3528,4737,5589,2742,5590,1743,1361,3075,3529, # 4368 -2649,4119,4377,4738,2295, 895, 924,4378,2171, 331,2247,3076, 166,1627,3077,1098, # 4384 -5591,1232,2894,2231,3411,4739, 657, 403,1196,2377, 542,3775,3412,1600,4379,3530, # 4400 -5592,4740,2777,3261, 576, 530,1362,4741,4742,2540,2676,3776,4120,5593, 842,3913, # 4416 -5594,2814,2032,1014,4121, 213,2709,3413, 665, 621,4380,5595,3777,2939,2435,5596, # 4432 -2436,3335,3643,3414,4743,4381,2541,4382,4744,3644,1682,4383,3531,1380,5597, 724, # 4448 -2282, 600,1670,5598,1337,1233,4745,3126,2248,5599,1621,4746,5600, 651,4384,5601, # 4464 -1612,4385,2621,5602,2857,5603,2743,2312,3078,5604, 716,2464,3079, 174,1255,2710, # 4480 -4122,3645, 548,1320,1398, 728,4123,1574,5605,1891,1197,3080,4124,5606,3081,3082, # 4496 -3778,3646,3779, 747,5607, 635,4386,4747,5608,5609,5610,4387,5611,5612,4748,5613, # 4512 -3415,4749,2437, 451,5614,3780,2542,2073,4388,2744,4389,4125,5615,1764,4750,5616, # 4528 -4390, 350,4751,2283,2395,2493,5617,4391,4126,2249,1434,4127, 488,4752, 458,4392, # 4544 -4128,3781, 771,1330,2396,3914,2576,3184,2160,2414,1553,2677,3185,4393,5618,2494, # 4560 -2895,2622,1720,2711,4394,3416,4753,5619,2543,4395,5620,3262,4396,2778,5621,2016, # 4576 -2745,5622,1155,1017,3782,3915,5623,3336,2313, 201,1865,4397,1430,5624,4129,5625, # 4592 -5626,5627,5628,5629,4398,1604,5630, 414,1866, 371,2595,4754,4755,3532,2017,3127, # 4608 -4756,1708, 960,4399, 887, 389,2172,1536,1663,1721,5631,2232,4130,2356,2940,1580, # 4624 -5632,5633,1744,4757,2544,4758,4759,5634,4760,5635,2074,5636,4761,3647,3417,2896, # 4640 -4400,5637,4401,2650,3418,2815, 673,2712,2465, 709,3533,4131,3648,4402,5638,1148, # 4656 - 502, 634,5639,5640,1204,4762,3649,1575,4763,2623,3783,5641,3784,3128, 948,3263, # 4672 - 121,1745,3916,1110,5642,4403,3083,2516,3027,4132,3785,1151,1771,3917,1488,4133, # 4688 -1987,5643,2438,3534,5644,5645,2094,5646,4404,3918,1213,1407,2816, 531,2746,2545, # 4704 -3264,1011,1537,4764,2779,4405,3129,1061,5647,3786,3787,1867,2897,5648,2018, 120, # 4720 -4406,4407,2063,3650,3265,2314,3919,2678,3419,1955,4765,4134,5649,3535,1047,2713, # 4736 -1266,5650,1368,4766,2858, 649,3420,3920,2546,2747,1102,2859,2679,5651,5652,2000, # 4752 -5653,1111,3651,2977,5654,2495,3921,3652,2817,1855,3421,3788,5655,5656,3422,2415, # 4768 -2898,3337,3266,3653,5657,2577,5658,3654,2818,4135,1460, 856,5659,3655,5660,2899, # 4784 -2978,5661,2900,3922,5662,4408, 632,2517, 875,3923,1697,3924,2296,5663,5664,4767, # 4800 -3028,1239, 580,4768,4409,5665, 914, 936,2075,1190,4136,1039,2124,5666,5667,5668, # 4816 -5669,3423,1473,5670,1354,4410,3925,4769,2173,3084,4137, 915,3338,4411,4412,3339, # 4832 -1605,1835,5671,2748, 398,3656,4413,3926,4138, 328,1913,2860,4139,3927,1331,4414, # 4848 -3029, 937,4415,5672,3657,4140,4141,3424,2161,4770,3425, 524, 742, 538,3085,1012, # 4864 -5673,5674,3928,2466,5675, 658,1103, 225,3929,5676,5677,4771,5678,4772,5679,3267, # 4880 -1243,5680,4142, 963,2250,4773,5681,2714,3658,3186,5682,5683,2596,2332,5684,4774, # 4896 -5685,5686,5687,3536, 957,3426,2547,2033,1931,2941,2467, 870,2019,3659,1746,2780, # 4912 -2781,2439,2468,5688,3930,5689,3789,3130,3790,3537,3427,3791,5690,1179,3086,5691, # 4928 -3187,2378,4416,3792,2548,3188,3131,2749,4143,5692,3428,1556,2549,2297, 977,2901, # 4944 -2034,4144,1205,3429,5693,1765,3430,3189,2125,1271, 714,1689,4775,3538,5694,2333, # 4960 -3931, 533,4417,3660,2184, 617,5695,2469,3340,3539,2315,5696,5697,3190,5698,5699, # 4976 -3932,1988, 618, 427,2651,3540,3431,5700,5701,1244,1690,5702,2819,4418,4776,5703, # 4992 -3541,4777,5704,2284,1576, 473,3661,4419,3432, 972,5705,3662,5706,3087,5707,5708, # 5008 -4778,4779,5709,3793,4145,4146,5710, 153,4780, 356,5711,1892,2902,4420,2144, 408, # 5024 - 803,2357,5712,3933,5713,4421,1646,2578,2518,4781,4782,3934,5714,3935,4422,5715, # 5040 -2416,3433, 752,5716,5717,1962,3341,2979,5718, 746,3030,2470,4783,4423,3794, 698, # 5056 -4784,1893,4424,3663,2550,4785,3664,3936,5719,3191,3434,5720,1824,1302,4147,2715, # 5072 -3937,1974,4425,5721,4426,3192, 823,1303,1288,1236,2861,3542,4148,3435, 774,3938, # 5088 -5722,1581,4786,1304,2862,3939,4787,5723,2440,2162,1083,3268,4427,4149,4428, 344, # 5104 -1173, 288,2316, 454,1683,5724,5725,1461,4788,4150,2597,5726,5727,4789, 985, 894, # 5120 -5728,3436,3193,5729,1914,2942,3795,1989,5730,2111,1975,5731,4151,5732,2579,1194, # 5136 - 425,5733,4790,3194,1245,3796,4429,5734,5735,2863,5736, 636,4791,1856,3940, 760, # 5152 -1800,5737,4430,2212,1508,4792,4152,1894,1684,2298,5738,5739,4793,4431,4432,2213, # 5168 - 479,5740,5741, 832,5742,4153,2496,5743,2980,2497,3797, 990,3132, 627,1815,2652, # 5184 -4433,1582,4434,2126,2112,3543,4794,5744, 799,4435,3195,5745,4795,2113,1737,3031, # 5200 -1018, 543, 754,4436,3342,1676,4796,4797,4154,4798,1489,5746,3544,5747,2624,2903, # 5216 -4155,5748,5749,2981,5750,5751,5752,5753,3196,4799,4800,2185,1722,5754,3269,3270, # 5232 -1843,3665,1715, 481, 365,1976,1857,5755,5756,1963,2498,4801,5757,2127,3666,3271, # 5248 - 433,1895,2064,2076,5758, 602,2750,5759,5760,5761,5762,5763,3032,1628,3437,5764, # 5264 -3197,4802,4156,2904,4803,2519,5765,2551,2782,5766,5767,5768,3343,4804,2905,5769, # 5280 -4805,5770,2864,4806,4807,1221,2982,4157,2520,5771,5772,5773,1868,1990,5774,5775, # 5296 -5776,1896,5777,5778,4808,1897,4158, 318,5779,2095,4159,4437,5780,5781, 485,5782, # 5312 - 938,3941, 553,2680, 116,5783,3942,3667,5784,3545,2681,2783,3438,3344,2820,5785, # 5328 -3668,2943,4160,1747,2944,2983,5786,5787, 207,5788,4809,5789,4810,2521,5790,3033, # 5344 - 890,3669,3943,5791,1878,3798,3439,5792,2186,2358,3440,1652,5793,5794,5795, 941, # 5360 -2299, 208,3546,4161,2020, 330,4438,3944,2906,2499,3799,4439,4811,5796,5797,5798, # 5376 -) - diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/big5prober.py b/venv/Lib/site-packages/pip/_vendor/chardet/big5prober.py deleted file mode 100644 index 98f9970..0000000 --- a/venv/Lib/site-packages/pip/_vendor/chardet/big5prober.py +++ /dev/null @@ -1,47 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Communicator client code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .mbcharsetprober import MultiByteCharSetProber -from .codingstatemachine import CodingStateMachine -from .chardistribution import Big5DistributionAnalysis -from .mbcssm import BIG5_SM_MODEL - - -class Big5Prober(MultiByteCharSetProber): - def __init__(self): - super(Big5Prober, self).__init__() - self.coding_sm = CodingStateMachine(BIG5_SM_MODEL) - self.distribution_analyzer = Big5DistributionAnalysis() - self.reset() - - @property - def charset_name(self): - return "Big5" - - @property - def language(self): - return "Chinese" diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/chardistribution.py b/venv/Lib/site-packages/pip/_vendor/chardet/chardistribution.py deleted file mode 100644 index c0395f4..0000000 --- a/venv/Lib/site-packages/pip/_vendor/chardet/chardistribution.py +++ /dev/null @@ -1,233 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Communicator client code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .euctwfreq import (EUCTW_CHAR_TO_FREQ_ORDER, EUCTW_TABLE_SIZE, - EUCTW_TYPICAL_DISTRIBUTION_RATIO) -from .euckrfreq import (EUCKR_CHAR_TO_FREQ_ORDER, EUCKR_TABLE_SIZE, - EUCKR_TYPICAL_DISTRIBUTION_RATIO) -from .gb2312freq import (GB2312_CHAR_TO_FREQ_ORDER, GB2312_TABLE_SIZE, - GB2312_TYPICAL_DISTRIBUTION_RATIO) -from .big5freq import (BIG5_CHAR_TO_FREQ_ORDER, BIG5_TABLE_SIZE, - BIG5_TYPICAL_DISTRIBUTION_RATIO) -from .jisfreq import (JIS_CHAR_TO_FREQ_ORDER, JIS_TABLE_SIZE, - JIS_TYPICAL_DISTRIBUTION_RATIO) - - -class CharDistributionAnalysis(object): - ENOUGH_DATA_THRESHOLD = 1024 - SURE_YES = 0.99 - SURE_NO = 0.01 - MINIMUM_DATA_THRESHOLD = 3 - - def __init__(self): - # Mapping table to get frequency order from char order (get from - # GetOrder()) - self._char_to_freq_order = None - self._table_size = None # Size of above table - # This is a constant value which varies from language to language, - # used in calculating confidence. See - # http://www.mozilla.org/projects/intl/UniversalCharsetDetection.html - # for further detail. - self.typical_distribution_ratio = None - self._done = None - self._total_chars = None - self._freq_chars = None - self.reset() - - def reset(self): - """reset analyser, clear any state""" - # If this flag is set to True, detection is done and conclusion has - # been made - self._done = False - self._total_chars = 0 # Total characters encountered - # The number of characters whose frequency order is less than 512 - self._freq_chars = 0 - - def feed(self, char, char_len): - """feed a character with known length""" - if char_len == 2: - # we only care about 2-bytes character in our distribution analysis - order = self.get_order(char) - else: - order = -1 - if order >= 0: - self._total_chars += 1 - # order is valid - if order < self._table_size: - if 512 > self._char_to_freq_order[order]: - self._freq_chars += 1 - - def get_confidence(self): - """return confidence based on existing data""" - # if we didn't receive any character in our consideration range, - # return negative answer - if self._total_chars <= 0 or self._freq_chars <= self.MINIMUM_DATA_THRESHOLD: - return self.SURE_NO - - if self._total_chars != self._freq_chars: - r = (self._freq_chars / ((self._total_chars - self._freq_chars) - * self.typical_distribution_ratio)) - if r < self.SURE_YES: - return r - - # normalize confidence (we don't want to be 100% sure) - return self.SURE_YES - - def got_enough_data(self): - # It is not necessary to receive all data to draw conclusion. - # For charset detection, certain amount of data is enough - return self._total_chars > self.ENOUGH_DATA_THRESHOLD - - def get_order(self, byte_str): - # We do not handle characters based on the original encoding string, - # but convert this encoding string to a number, here called order. - # This allows multiple encodings of a language to share one frequency - # table. - return -1 - - -class EUCTWDistributionAnalysis(CharDistributionAnalysis): - def __init__(self): - super(EUCTWDistributionAnalysis, self).__init__() - self._char_to_freq_order = EUCTW_CHAR_TO_FREQ_ORDER - self._table_size = EUCTW_TABLE_SIZE - self.typical_distribution_ratio = EUCTW_TYPICAL_DISTRIBUTION_RATIO - - def get_order(self, byte_str): - # for euc-TW encoding, we are interested - # first byte range: 0xc4 -- 0xfe - # second byte range: 0xa1 -- 0xfe - # no validation needed here. State machine has done that - first_char = byte_str[0] - if first_char >= 0xC4: - return 94 * (first_char - 0xC4) + byte_str[1] - 0xA1 - else: - return -1 - - -class EUCKRDistributionAnalysis(CharDistributionAnalysis): - def __init__(self): - super(EUCKRDistributionAnalysis, self).__init__() - self._char_to_freq_order = EUCKR_CHAR_TO_FREQ_ORDER - self._table_size = EUCKR_TABLE_SIZE - self.typical_distribution_ratio = EUCKR_TYPICAL_DISTRIBUTION_RATIO - - def get_order(self, byte_str): - # for euc-KR encoding, we are interested - # first byte range: 0xb0 -- 0xfe - # second byte range: 0xa1 -- 0xfe - # no validation needed here. State machine has done that - first_char = byte_str[0] - if first_char >= 0xB0: - return 94 * (first_char - 0xB0) + byte_str[1] - 0xA1 - else: - return -1 - - -class GB2312DistributionAnalysis(CharDistributionAnalysis): - def __init__(self): - super(GB2312DistributionAnalysis, self).__init__() - self._char_to_freq_order = GB2312_CHAR_TO_FREQ_ORDER - self._table_size = GB2312_TABLE_SIZE - self.typical_distribution_ratio = GB2312_TYPICAL_DISTRIBUTION_RATIO - - def get_order(self, byte_str): - # for GB2312 encoding, we are interested - # first byte range: 0xb0 -- 0xfe - # second byte range: 0xa1 -- 0xfe - # no validation needed here. State machine has done that - first_char, second_char = byte_str[0], byte_str[1] - if (first_char >= 0xB0) and (second_char >= 0xA1): - return 94 * (first_char - 0xB0) + second_char - 0xA1 - else: - return -1 - - -class Big5DistributionAnalysis(CharDistributionAnalysis): - def __init__(self): - super(Big5DistributionAnalysis, self).__init__() - self._char_to_freq_order = BIG5_CHAR_TO_FREQ_ORDER - self._table_size = BIG5_TABLE_SIZE - self.typical_distribution_ratio = BIG5_TYPICAL_DISTRIBUTION_RATIO - - def get_order(self, byte_str): - # for big5 encoding, we are interested - # first byte range: 0xa4 -- 0xfe - # second byte range: 0x40 -- 0x7e , 0xa1 -- 0xfe - # no validation needed here. State machine has done that - first_char, second_char = byte_str[0], byte_str[1] - if first_char >= 0xA4: - if second_char >= 0xA1: - return 157 * (first_char - 0xA4) + second_char - 0xA1 + 63 - else: - return 157 * (first_char - 0xA4) + second_char - 0x40 - else: - return -1 - - -class SJISDistributionAnalysis(CharDistributionAnalysis): - def __init__(self): - super(SJISDistributionAnalysis, self).__init__() - self._char_to_freq_order = JIS_CHAR_TO_FREQ_ORDER - self._table_size = JIS_TABLE_SIZE - self.typical_distribution_ratio = JIS_TYPICAL_DISTRIBUTION_RATIO - - def get_order(self, byte_str): - # for sjis encoding, we are interested - # first byte range: 0x81 -- 0x9f , 0xe0 -- 0xfe - # second byte range: 0x40 -- 0x7e, 0x81 -- oxfe - # no validation needed here. State machine has done that - first_char, second_char = byte_str[0], byte_str[1] - if (first_char >= 0x81) and (first_char <= 0x9F): - order = 188 * (first_char - 0x81) - elif (first_char >= 0xE0) and (first_char <= 0xEF): - order = 188 * (first_char - 0xE0 + 31) - else: - return -1 - order = order + second_char - 0x40 - if second_char > 0x7F: - order = -1 - return order - - -class EUCJPDistributionAnalysis(CharDistributionAnalysis): - def __init__(self): - super(EUCJPDistributionAnalysis, self).__init__() - self._char_to_freq_order = JIS_CHAR_TO_FREQ_ORDER - self._table_size = JIS_TABLE_SIZE - self.typical_distribution_ratio = JIS_TYPICAL_DISTRIBUTION_RATIO - - def get_order(self, byte_str): - # for euc-JP encoding, we are interested - # first byte range: 0xa0 -- 0xfe - # second byte range: 0xa1 -- 0xfe - # no validation needed here. State machine has done that - char = byte_str[0] - if char >= 0xA0: - return 94 * (char - 0xA1) + byte_str[1] - 0xa1 - else: - return -1 diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/charsetgroupprober.py b/venv/Lib/site-packages/pip/_vendor/chardet/charsetgroupprober.py deleted file mode 100644 index 5812cef..0000000 --- a/venv/Lib/site-packages/pip/_vendor/chardet/charsetgroupprober.py +++ /dev/null @@ -1,107 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Communicator client code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .enums import ProbingState -from .charsetprober import CharSetProber - - -class CharSetGroupProber(CharSetProber): - def __init__(self, lang_filter=None): - super(CharSetGroupProber, self).__init__(lang_filter=lang_filter) - self._active_num = 0 - self.probers = [] - self._best_guess_prober = None - - def reset(self): - super(CharSetGroupProber, self).reset() - self._active_num = 0 - for prober in self.probers: - if prober: - prober.reset() - prober.active = True - self._active_num += 1 - self._best_guess_prober = None - - @property - def charset_name(self): - if not self._best_guess_prober: - self.get_confidence() - if not self._best_guess_prober: - return None - return self._best_guess_prober.charset_name - - @property - def language(self): - if not self._best_guess_prober: - self.get_confidence() - if not self._best_guess_prober: - return None - return self._best_guess_prober.language - - def feed(self, byte_str): - for prober in self.probers: - if not prober: - continue - if not prober.active: - continue - state = prober.feed(byte_str) - if not state: - continue - if state == ProbingState.FOUND_IT: - self._best_guess_prober = prober - self._state = ProbingState.FOUND_IT - return self.state - elif state == ProbingState.NOT_ME: - prober.active = False - self._active_num -= 1 - if self._active_num <= 0: - self._state = ProbingState.NOT_ME - return self.state - return self.state - - def get_confidence(self): - state = self.state - if state == ProbingState.FOUND_IT: - return 0.99 - elif state == ProbingState.NOT_ME: - return 0.01 - best_conf = 0.0 - self._best_guess_prober = None - for prober in self.probers: - if not prober: - continue - if not prober.active: - self.logger.debug('%s not active', prober.charset_name) - continue - conf = prober.get_confidence() - self.logger.debug('%s %s confidence = %s', prober.charset_name, prober.language, conf) - if best_conf < conf: - best_conf = conf - self._best_guess_prober = prober - if not self._best_guess_prober: - return 0.0 - return best_conf diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/charsetprober.py b/venv/Lib/site-packages/pip/_vendor/chardet/charsetprober.py deleted file mode 100644 index eac4e59..0000000 --- a/venv/Lib/site-packages/pip/_vendor/chardet/charsetprober.py +++ /dev/null @@ -1,145 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Universal charset detector code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 2001 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# Shy Shalom - original C code -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -import logging -import re - -from .enums import ProbingState - - -class CharSetProber(object): - - SHORTCUT_THRESHOLD = 0.95 - - def __init__(self, lang_filter=None): - self._state = None - self.lang_filter = lang_filter - self.logger = logging.getLogger(__name__) - - def reset(self): - self._state = ProbingState.DETECTING - - @property - def charset_name(self): - return None - - def feed(self, buf): - pass - - @property - def state(self): - return self._state - - def get_confidence(self): - return 0.0 - - @staticmethod - def filter_high_byte_only(buf): - buf = re.sub(b'([\x00-\x7F])+', b' ', buf) - return buf - - @staticmethod - def filter_international_words(buf): - """ - We define three types of bytes: - alphabet: english alphabets [a-zA-Z] - international: international characters [\x80-\xFF] - marker: everything else [^a-zA-Z\x80-\xFF] - - The input buffer can be thought to contain a series of words delimited - by markers. This function works to filter all words that contain at - least one international character. All contiguous sequences of markers - are replaced by a single space ascii character. - - This filter applies to all scripts which do not use English characters. - """ - filtered = bytearray() - - # This regex expression filters out only words that have at-least one - # international character. The word may include one marker character at - # the end. - words = re.findall(b'[a-zA-Z]*[\x80-\xFF]+[a-zA-Z]*[^a-zA-Z\x80-\xFF]?', - buf) - - for word in words: - filtered.extend(word[:-1]) - - # If the last character in the word is a marker, replace it with a - # space as markers shouldn't affect our analysis (they are used - # similarly across all languages and may thus have similar - # frequencies). - last_char = word[-1:] - if not last_char.isalpha() and last_char < b'\x80': - last_char = b' ' - filtered.extend(last_char) - - return filtered - - @staticmethod - def filter_with_english_letters(buf): - """ - Returns a copy of ``buf`` that retains only the sequences of English - alphabet and high byte characters that are not between <> characters. - Also retains English alphabet and high byte characters immediately - before occurrences of >. - - This filter can be applied to all scripts which contain both English - characters and extended ASCII characters, but is currently only used by - ``Latin1Prober``. - """ - filtered = bytearray() - in_tag = False - prev = 0 - - for curr in range(len(buf)): - # Slice here to get bytes instead of an int with Python 3 - buf_char = buf[curr:curr + 1] - # Check if we're coming out of or entering an HTML tag - if buf_char == b'>': - in_tag = False - elif buf_char == b'<': - in_tag = True - - # If current character is not extended-ASCII and not alphabetic... - if buf_char < b'\x80' and not buf_char.isalpha(): - # ...and we're not in a tag - if curr > prev and not in_tag: - # Keep everything after last non-extended-ASCII, - # non-alphabetic character - filtered.extend(buf[prev:curr]) - # Output a space to delimit stretch we kept - filtered.extend(b' ') - prev = curr + 1 - - # If we're not in a tag... - if not in_tag: - # Keep everything after last non-extended-ASCII, non-alphabetic - # character - filtered.extend(buf[prev:]) - - return filtered diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/cli/__init__.py b/venv/Lib/site-packages/pip/_vendor/chardet/cli/__init__.py deleted file mode 100644 index 8b13789..0000000 --- a/venv/Lib/site-packages/pip/_vendor/chardet/cli/__init__.py +++ /dev/null @@ -1 +0,0 @@ - diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/cli/__pycache__/__init__.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/chardet/cli/__pycache__/__init__.cpython-39.pyc deleted file mode 100644 index 6354d23..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/chardet/cli/__pycache__/__init__.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/cli/__pycache__/chardetect.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/chardet/cli/__pycache__/chardetect.cpython-39.pyc deleted file mode 100644 index 4b77f31..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/chardet/cli/__pycache__/chardetect.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/cli/chardetect.py b/venv/Lib/site-packages/pip/_vendor/chardet/cli/chardetect.py deleted file mode 100644 index 6d6f93a..0000000 --- a/venv/Lib/site-packages/pip/_vendor/chardet/cli/chardetect.py +++ /dev/null @@ -1,84 +0,0 @@ -""" -Script which takes one or more file paths and reports on their detected -encodings - -Example:: - - % chardetect somefile someotherfile - somefile: windows-1252 with confidence 0.5 - someotherfile: ascii with confidence 1.0 - -If no paths are provided, it takes its input from stdin. - -""" - -from __future__ import absolute_import, print_function, unicode_literals - -import argparse -import sys - -from pip._vendor.chardet import __version__ -from pip._vendor.chardet.compat import PY2 -from pip._vendor.chardet.universaldetector import UniversalDetector - - -def description_of(lines, name='stdin'): - """ - Return a string describing the probable encoding of a file or - list of strings. - - :param lines: The lines to get the encoding of. - :type lines: Iterable of bytes - :param name: Name of file or collection of lines - :type name: str - """ - u = UniversalDetector() - for line in lines: - line = bytearray(line) - u.feed(line) - # shortcut out of the loop to save reading further - particularly useful if we read a BOM. - if u.done: - break - u.close() - result = u.result - if PY2: - name = name.decode(sys.getfilesystemencoding(), 'ignore') - if result['encoding']: - return '{}: {} with confidence {}'.format(name, result['encoding'], - result['confidence']) - else: - return '{}: no result'.format(name) - - -def main(argv=None): - """ - Handles command line arguments and gets things started. - - :param argv: List of arguments, as if specified on the command-line. - If None, ``sys.argv[1:]`` is used instead. - :type argv: list of str - """ - # Get command line arguments - parser = argparse.ArgumentParser( - description="Takes one or more file paths and reports their detected \ - encodings") - parser.add_argument('input', - help='File whose encoding we would like to determine. \ - (default: stdin)', - type=argparse.FileType('rb'), nargs='*', - default=[sys.stdin if PY2 else sys.stdin.buffer]) - parser.add_argument('--version', action='version', - version='%(prog)s {}'.format(__version__)) - args = parser.parse_args(argv) - - for f in args.input: - if f.isatty(): - print("You are running chardetect interactively. Press " + - "CTRL-D twice at the start of a blank line to signal the " + - "end of your input. If you want help, run chardetect " + - "--help\n", file=sys.stderr) - print(description_of(f, f.name)) - - -if __name__ == '__main__': - main() diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/codingstatemachine.py b/venv/Lib/site-packages/pip/_vendor/chardet/codingstatemachine.py deleted file mode 100644 index 68fba44..0000000 --- a/venv/Lib/site-packages/pip/_vendor/chardet/codingstatemachine.py +++ /dev/null @@ -1,88 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is mozilla.org code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -import logging - -from .enums import MachineState - - -class CodingStateMachine(object): - """ - A state machine to verify a byte sequence for a particular encoding. For - each byte the detector receives, it will feed that byte to every active - state machine available, one byte at a time. The state machine changes its - state based on its previous state and the byte it receives. There are 3 - states in a state machine that are of interest to an auto-detector: - - START state: This is the state to start with, or a legal byte sequence - (i.e. a valid code point) for character has been identified. - - ME state: This indicates that the state machine identified a byte sequence - that is specific to the charset it is designed for and that - there is no other possible encoding which can contain this byte - sequence. This will to lead to an immediate positive answer for - the detector. - - ERROR state: This indicates the state machine identified an illegal byte - sequence for that encoding. This will lead to an immediate - negative answer for this encoding. Detector will exclude this - encoding from consideration from here on. - """ - def __init__(self, sm): - self._model = sm - self._curr_byte_pos = 0 - self._curr_char_len = 0 - self._curr_state = None - self.logger = logging.getLogger(__name__) - self.reset() - - def reset(self): - self._curr_state = MachineState.START - - def next_state(self, c): - # for each byte we get its class - # if it is first byte, we also get byte length - byte_class = self._model['class_table'][c] - if self._curr_state == MachineState.START: - self._curr_byte_pos = 0 - self._curr_char_len = self._model['char_len_table'][byte_class] - # from byte's class and state_table, we get its next state - curr_state = (self._curr_state * self._model['class_factor'] - + byte_class) - self._curr_state = self._model['state_table'][curr_state] - self._curr_byte_pos += 1 - return self._curr_state - - def get_current_charlen(self): - return self._curr_char_len - - def get_coding_state_machine(self): - return self._model['name'] - - @property - def language(self): - return self._model['language'] diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/compat.py b/venv/Lib/site-packages/pip/_vendor/chardet/compat.py deleted file mode 100644 index 8941572..0000000 --- a/venv/Lib/site-packages/pip/_vendor/chardet/compat.py +++ /dev/null @@ -1,36 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# Contributor(s): -# Dan Blanchard -# Ian Cordasco -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -import sys - - -if sys.version_info < (3, 0): - PY2 = True - PY3 = False - string_types = (str, unicode) - text_type = unicode - iteritems = dict.iteritems -else: - PY2 = False - PY3 = True - string_types = (bytes, str) - text_type = str - iteritems = dict.items diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/cp949prober.py b/venv/Lib/site-packages/pip/_vendor/chardet/cp949prober.py deleted file mode 100644 index efd793a..0000000 --- a/venv/Lib/site-packages/pip/_vendor/chardet/cp949prober.py +++ /dev/null @@ -1,49 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is mozilla.org code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .chardistribution import EUCKRDistributionAnalysis -from .codingstatemachine import CodingStateMachine -from .mbcharsetprober import MultiByteCharSetProber -from .mbcssm import CP949_SM_MODEL - - -class CP949Prober(MultiByteCharSetProber): - def __init__(self): - super(CP949Prober, self).__init__() - self.coding_sm = CodingStateMachine(CP949_SM_MODEL) - # NOTE: CP949 is a superset of EUC-KR, so the distribution should be - # not different. - self.distribution_analyzer = EUCKRDistributionAnalysis() - self.reset() - - @property - def charset_name(self): - return "CP949" - - @property - def language(self): - return "Korean" diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/enums.py b/venv/Lib/site-packages/pip/_vendor/chardet/enums.py deleted file mode 100644 index 0451207..0000000 --- a/venv/Lib/site-packages/pip/_vendor/chardet/enums.py +++ /dev/null @@ -1,76 +0,0 @@ -""" -All of the Enums that are used throughout the chardet package. - -:author: Dan Blanchard (dan.blanchard@gmail.com) -""" - - -class InputState(object): - """ - This enum represents the different states a universal detector can be in. - """ - PURE_ASCII = 0 - ESC_ASCII = 1 - HIGH_BYTE = 2 - - -class LanguageFilter(object): - """ - This enum represents the different language filters we can apply to a - ``UniversalDetector``. - """ - CHINESE_SIMPLIFIED = 0x01 - CHINESE_TRADITIONAL = 0x02 - JAPANESE = 0x04 - KOREAN = 0x08 - NON_CJK = 0x10 - ALL = 0x1F - CHINESE = CHINESE_SIMPLIFIED | CHINESE_TRADITIONAL - CJK = CHINESE | JAPANESE | KOREAN - - -class ProbingState(object): - """ - This enum represents the different states a prober can be in. - """ - DETECTING = 0 - FOUND_IT = 1 - NOT_ME = 2 - - -class MachineState(object): - """ - This enum represents the different states a state machine can be in. - """ - START = 0 - ERROR = 1 - ITS_ME = 2 - - -class SequenceLikelihood(object): - """ - This enum represents the likelihood of a character following the previous one. - """ - NEGATIVE = 0 - UNLIKELY = 1 - LIKELY = 2 - POSITIVE = 3 - - @classmethod - def get_num_categories(cls): - """:returns: The number of likelihood categories in the enum.""" - return 4 - - -class CharacterCategory(object): - """ - This enum represents the different categories language models for - ``SingleByteCharsetProber`` put characters into. - - Anything less than CONTROL is considered a letter. - """ - UNDEFINED = 255 - LINE_BREAK = 254 - SYMBOL = 253 - DIGIT = 252 - CONTROL = 251 diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/escprober.py b/venv/Lib/site-packages/pip/_vendor/chardet/escprober.py deleted file mode 100644 index c70493f..0000000 --- a/venv/Lib/site-packages/pip/_vendor/chardet/escprober.py +++ /dev/null @@ -1,101 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is mozilla.org code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .charsetprober import CharSetProber -from .codingstatemachine import CodingStateMachine -from .enums import LanguageFilter, ProbingState, MachineState -from .escsm import (HZ_SM_MODEL, ISO2022CN_SM_MODEL, ISO2022JP_SM_MODEL, - ISO2022KR_SM_MODEL) - - -class EscCharSetProber(CharSetProber): - """ - This CharSetProber uses a "code scheme" approach for detecting encodings, - whereby easily recognizable escape or shift sequences are relied on to - identify these encodings. - """ - - def __init__(self, lang_filter=None): - super(EscCharSetProber, self).__init__(lang_filter=lang_filter) - self.coding_sm = [] - if self.lang_filter & LanguageFilter.CHINESE_SIMPLIFIED: - self.coding_sm.append(CodingStateMachine(HZ_SM_MODEL)) - self.coding_sm.append(CodingStateMachine(ISO2022CN_SM_MODEL)) - if self.lang_filter & LanguageFilter.JAPANESE: - self.coding_sm.append(CodingStateMachine(ISO2022JP_SM_MODEL)) - if self.lang_filter & LanguageFilter.KOREAN: - self.coding_sm.append(CodingStateMachine(ISO2022KR_SM_MODEL)) - self.active_sm_count = None - self._detected_charset = None - self._detected_language = None - self._state = None - self.reset() - - def reset(self): - super(EscCharSetProber, self).reset() - for coding_sm in self.coding_sm: - if not coding_sm: - continue - coding_sm.active = True - coding_sm.reset() - self.active_sm_count = len(self.coding_sm) - self._detected_charset = None - self._detected_language = None - - @property - def charset_name(self): - return self._detected_charset - - @property - def language(self): - return self._detected_language - - def get_confidence(self): - if self._detected_charset: - return 0.99 - else: - return 0.00 - - def feed(self, byte_str): - for c in byte_str: - for coding_sm in self.coding_sm: - if not coding_sm or not coding_sm.active: - continue - coding_state = coding_sm.next_state(c) - if coding_state == MachineState.ERROR: - coding_sm.active = False - self.active_sm_count -= 1 - if self.active_sm_count <= 0: - self._state = ProbingState.NOT_ME - return self.state - elif coding_state == MachineState.ITS_ME: - self._state = ProbingState.FOUND_IT - self._detected_charset = coding_sm.get_coding_state_machine() - self._detected_language = coding_sm.language - return self.state - - return self.state diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/escsm.py b/venv/Lib/site-packages/pip/_vendor/chardet/escsm.py deleted file mode 100644 index 0069523..0000000 --- a/venv/Lib/site-packages/pip/_vendor/chardet/escsm.py +++ /dev/null @@ -1,246 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is mozilla.org code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .enums import MachineState - -HZ_CLS = ( -1,0,0,0,0,0,0,0, # 00 - 07 -0,0,0,0,0,0,0,0, # 08 - 0f -0,0,0,0,0,0,0,0, # 10 - 17 -0,0,0,1,0,0,0,0, # 18 - 1f -0,0,0,0,0,0,0,0, # 20 - 27 -0,0,0,0,0,0,0,0, # 28 - 2f -0,0,0,0,0,0,0,0, # 30 - 37 -0,0,0,0,0,0,0,0, # 38 - 3f -0,0,0,0,0,0,0,0, # 40 - 47 -0,0,0,0,0,0,0,0, # 48 - 4f -0,0,0,0,0,0,0,0, # 50 - 57 -0,0,0,0,0,0,0,0, # 58 - 5f -0,0,0,0,0,0,0,0, # 60 - 67 -0,0,0,0,0,0,0,0, # 68 - 6f -0,0,0,0,0,0,0,0, # 70 - 77 -0,0,0,4,0,5,2,0, # 78 - 7f -1,1,1,1,1,1,1,1, # 80 - 87 -1,1,1,1,1,1,1,1, # 88 - 8f -1,1,1,1,1,1,1,1, # 90 - 97 -1,1,1,1,1,1,1,1, # 98 - 9f -1,1,1,1,1,1,1,1, # a0 - a7 -1,1,1,1,1,1,1,1, # a8 - af -1,1,1,1,1,1,1,1, # b0 - b7 -1,1,1,1,1,1,1,1, # b8 - bf -1,1,1,1,1,1,1,1, # c0 - c7 -1,1,1,1,1,1,1,1, # c8 - cf -1,1,1,1,1,1,1,1, # d0 - d7 -1,1,1,1,1,1,1,1, # d8 - df -1,1,1,1,1,1,1,1, # e0 - e7 -1,1,1,1,1,1,1,1, # e8 - ef -1,1,1,1,1,1,1,1, # f0 - f7 -1,1,1,1,1,1,1,1, # f8 - ff -) - -HZ_ST = ( -MachineState.START,MachineState.ERROR, 3,MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,# 00-07 -MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,# 08-0f -MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START, 4,MachineState.ERROR,# 10-17 - 5,MachineState.ERROR, 6,MachineState.ERROR, 5, 5, 4,MachineState.ERROR,# 18-1f - 4,MachineState.ERROR, 4, 4, 4,MachineState.ERROR, 4,MachineState.ERROR,# 20-27 - 4,MachineState.ITS_ME,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,# 28-2f -) - -HZ_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0) - -HZ_SM_MODEL = {'class_table': HZ_CLS, - 'class_factor': 6, - 'state_table': HZ_ST, - 'char_len_table': HZ_CHAR_LEN_TABLE, - 'name': "HZ-GB-2312", - 'language': 'Chinese'} - -ISO2022CN_CLS = ( -2,0,0,0,0,0,0,0, # 00 - 07 -0,0,0,0,0,0,0,0, # 08 - 0f -0,0,0,0,0,0,0,0, # 10 - 17 -0,0,0,1,0,0,0,0, # 18 - 1f -0,0,0,0,0,0,0,0, # 20 - 27 -0,3,0,0,0,0,0,0, # 28 - 2f -0,0,0,0,0,0,0,0, # 30 - 37 -0,0,0,0,0,0,0,0, # 38 - 3f -0,0,0,4,0,0,0,0, # 40 - 47 -0,0,0,0,0,0,0,0, # 48 - 4f -0,0,0,0,0,0,0,0, # 50 - 57 -0,0,0,0,0,0,0,0, # 58 - 5f -0,0,0,0,0,0,0,0, # 60 - 67 -0,0,0,0,0,0,0,0, # 68 - 6f -0,0,0,0,0,0,0,0, # 70 - 77 -0,0,0,0,0,0,0,0, # 78 - 7f -2,2,2,2,2,2,2,2, # 80 - 87 -2,2,2,2,2,2,2,2, # 88 - 8f -2,2,2,2,2,2,2,2, # 90 - 97 -2,2,2,2,2,2,2,2, # 98 - 9f -2,2,2,2,2,2,2,2, # a0 - a7 -2,2,2,2,2,2,2,2, # a8 - af -2,2,2,2,2,2,2,2, # b0 - b7 -2,2,2,2,2,2,2,2, # b8 - bf -2,2,2,2,2,2,2,2, # c0 - c7 -2,2,2,2,2,2,2,2, # c8 - cf -2,2,2,2,2,2,2,2, # d0 - d7 -2,2,2,2,2,2,2,2, # d8 - df -2,2,2,2,2,2,2,2, # e0 - e7 -2,2,2,2,2,2,2,2, # e8 - ef -2,2,2,2,2,2,2,2, # f0 - f7 -2,2,2,2,2,2,2,2, # f8 - ff -) - -ISO2022CN_ST = ( -MachineState.START, 3,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,# 00-07 -MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 08-0f -MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,# 10-17 -MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 4,MachineState.ERROR,# 18-1f -MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 20-27 - 5, 6,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 28-2f -MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 30-37 -MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.START,# 38-3f -) - -ISO2022CN_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0, 0, 0, 0) - -ISO2022CN_SM_MODEL = {'class_table': ISO2022CN_CLS, - 'class_factor': 9, - 'state_table': ISO2022CN_ST, - 'char_len_table': ISO2022CN_CHAR_LEN_TABLE, - 'name': "ISO-2022-CN", - 'language': 'Chinese'} - -ISO2022JP_CLS = ( -2,0,0,0,0,0,0,0, # 00 - 07 -0,0,0,0,0,0,2,2, # 08 - 0f -0,0,0,0,0,0,0,0, # 10 - 17 -0,0,0,1,0,0,0,0, # 18 - 1f -0,0,0,0,7,0,0,0, # 20 - 27 -3,0,0,0,0,0,0,0, # 28 - 2f -0,0,0,0,0,0,0,0, # 30 - 37 -0,0,0,0,0,0,0,0, # 38 - 3f -6,0,4,0,8,0,0,0, # 40 - 47 -0,9,5,0,0,0,0,0, # 48 - 4f -0,0,0,0,0,0,0,0, # 50 - 57 -0,0,0,0,0,0,0,0, # 58 - 5f -0,0,0,0,0,0,0,0, # 60 - 67 -0,0,0,0,0,0,0,0, # 68 - 6f -0,0,0,0,0,0,0,0, # 70 - 77 -0,0,0,0,0,0,0,0, # 78 - 7f -2,2,2,2,2,2,2,2, # 80 - 87 -2,2,2,2,2,2,2,2, # 88 - 8f -2,2,2,2,2,2,2,2, # 90 - 97 -2,2,2,2,2,2,2,2, # 98 - 9f -2,2,2,2,2,2,2,2, # a0 - a7 -2,2,2,2,2,2,2,2, # a8 - af -2,2,2,2,2,2,2,2, # b0 - b7 -2,2,2,2,2,2,2,2, # b8 - bf -2,2,2,2,2,2,2,2, # c0 - c7 -2,2,2,2,2,2,2,2, # c8 - cf -2,2,2,2,2,2,2,2, # d0 - d7 -2,2,2,2,2,2,2,2, # d8 - df -2,2,2,2,2,2,2,2, # e0 - e7 -2,2,2,2,2,2,2,2, # e8 - ef -2,2,2,2,2,2,2,2, # f0 - f7 -2,2,2,2,2,2,2,2, # f8 - ff -) - -ISO2022JP_ST = ( -MachineState.START, 3,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,# 00-07 -MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 08-0f -MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,# 10-17 -MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,# 18-1f -MachineState.ERROR, 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 4,MachineState.ERROR,MachineState.ERROR,# 20-27 -MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 6,MachineState.ITS_ME,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,# 28-2f -MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,# 30-37 -MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 38-3f -MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.START,MachineState.START,# 40-47 -) - -ISO2022JP_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0, 0, 0, 0, 0) - -ISO2022JP_SM_MODEL = {'class_table': ISO2022JP_CLS, - 'class_factor': 10, - 'state_table': ISO2022JP_ST, - 'char_len_table': ISO2022JP_CHAR_LEN_TABLE, - 'name': "ISO-2022-JP", - 'language': 'Japanese'} - -ISO2022KR_CLS = ( -2,0,0,0,0,0,0,0, # 00 - 07 -0,0,0,0,0,0,0,0, # 08 - 0f -0,0,0,0,0,0,0,0, # 10 - 17 -0,0,0,1,0,0,0,0, # 18 - 1f -0,0,0,0,3,0,0,0, # 20 - 27 -0,4,0,0,0,0,0,0, # 28 - 2f -0,0,0,0,0,0,0,0, # 30 - 37 -0,0,0,0,0,0,0,0, # 38 - 3f -0,0,0,5,0,0,0,0, # 40 - 47 -0,0,0,0,0,0,0,0, # 48 - 4f -0,0,0,0,0,0,0,0, # 50 - 57 -0,0,0,0,0,0,0,0, # 58 - 5f -0,0,0,0,0,0,0,0, # 60 - 67 -0,0,0,0,0,0,0,0, # 68 - 6f -0,0,0,0,0,0,0,0, # 70 - 77 -0,0,0,0,0,0,0,0, # 78 - 7f -2,2,2,2,2,2,2,2, # 80 - 87 -2,2,2,2,2,2,2,2, # 88 - 8f -2,2,2,2,2,2,2,2, # 90 - 97 -2,2,2,2,2,2,2,2, # 98 - 9f -2,2,2,2,2,2,2,2, # a0 - a7 -2,2,2,2,2,2,2,2, # a8 - af -2,2,2,2,2,2,2,2, # b0 - b7 -2,2,2,2,2,2,2,2, # b8 - bf -2,2,2,2,2,2,2,2, # c0 - c7 -2,2,2,2,2,2,2,2, # c8 - cf -2,2,2,2,2,2,2,2, # d0 - d7 -2,2,2,2,2,2,2,2, # d8 - df -2,2,2,2,2,2,2,2, # e0 - e7 -2,2,2,2,2,2,2,2, # e8 - ef -2,2,2,2,2,2,2,2, # f0 - f7 -2,2,2,2,2,2,2,2, # f8 - ff -) - -ISO2022KR_ST = ( -MachineState.START, 3,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,# 00-07 -MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,# 08-0f -MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 4,MachineState.ERROR,MachineState.ERROR,# 10-17 -MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 18-1f -MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.START,MachineState.START,MachineState.START,MachineState.START,# 20-27 -) - -ISO2022KR_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0) - -ISO2022KR_SM_MODEL = {'class_table': ISO2022KR_CLS, - 'class_factor': 6, - 'state_table': ISO2022KR_ST, - 'char_len_table': ISO2022KR_CHAR_LEN_TABLE, - 'name': "ISO-2022-KR", - 'language': 'Korean'} - - diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/eucjpprober.py b/venv/Lib/site-packages/pip/_vendor/chardet/eucjpprober.py deleted file mode 100644 index 20ce8f7..0000000 --- a/venv/Lib/site-packages/pip/_vendor/chardet/eucjpprober.py +++ /dev/null @@ -1,92 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is mozilla.org code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .enums import ProbingState, MachineState -from .mbcharsetprober import MultiByteCharSetProber -from .codingstatemachine import CodingStateMachine -from .chardistribution import EUCJPDistributionAnalysis -from .jpcntx import EUCJPContextAnalysis -from .mbcssm import EUCJP_SM_MODEL - - -class EUCJPProber(MultiByteCharSetProber): - def __init__(self): - super(EUCJPProber, self).__init__() - self.coding_sm = CodingStateMachine(EUCJP_SM_MODEL) - self.distribution_analyzer = EUCJPDistributionAnalysis() - self.context_analyzer = EUCJPContextAnalysis() - self.reset() - - def reset(self): - super(EUCJPProber, self).reset() - self.context_analyzer.reset() - - @property - def charset_name(self): - return "EUC-JP" - - @property - def language(self): - return "Japanese" - - def feed(self, byte_str): - for i in range(len(byte_str)): - # PY3K: byte_str is a byte array, so byte_str[i] is an int, not a byte - coding_state = self.coding_sm.next_state(byte_str[i]) - if coding_state == MachineState.ERROR: - self.logger.debug('%s %s prober hit error at byte %s', - self.charset_name, self.language, i) - self._state = ProbingState.NOT_ME - break - elif coding_state == MachineState.ITS_ME: - self._state = ProbingState.FOUND_IT - break - elif coding_state == MachineState.START: - char_len = self.coding_sm.get_current_charlen() - if i == 0: - self._last_char[1] = byte_str[0] - self.context_analyzer.feed(self._last_char, char_len) - self.distribution_analyzer.feed(self._last_char, char_len) - else: - self.context_analyzer.feed(byte_str[i - 1:i + 1], - char_len) - self.distribution_analyzer.feed(byte_str[i - 1:i + 1], - char_len) - - self._last_char[0] = byte_str[-1] - - if self.state == ProbingState.DETECTING: - if (self.context_analyzer.got_enough_data() and - (self.get_confidence() > self.SHORTCUT_THRESHOLD)): - self._state = ProbingState.FOUND_IT - - return self.state - - def get_confidence(self): - context_conf = self.context_analyzer.get_confidence() - distrib_conf = self.distribution_analyzer.get_confidence() - return max(context_conf, distrib_conf) diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/euckrfreq.py b/venv/Lib/site-packages/pip/_vendor/chardet/euckrfreq.py deleted file mode 100644 index b68078c..0000000 --- a/venv/Lib/site-packages/pip/_vendor/chardet/euckrfreq.py +++ /dev/null @@ -1,195 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Communicator client code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -# Sampling from about 20M text materials include literature and computer technology - -# 128 --> 0.79 -# 256 --> 0.92 -# 512 --> 0.986 -# 1024 --> 0.99944 -# 2048 --> 0.99999 -# -# Idea Distribution Ratio = 0.98653 / (1-0.98653) = 73.24 -# Random Distribution Ration = 512 / (2350-512) = 0.279. -# -# Typical Distribution Ratio - -EUCKR_TYPICAL_DISTRIBUTION_RATIO = 6.0 - -EUCKR_TABLE_SIZE = 2352 - -# Char to FreqOrder table , -EUCKR_CHAR_TO_FREQ_ORDER = ( - 13, 130, 120,1396, 481,1719,1720, 328, 609, 212,1721, 707, 400, 299,1722, 87, -1397,1723, 104, 536,1117,1203,1724,1267, 685,1268, 508,1725,1726,1727,1728,1398, -1399,1729,1730,1731, 141, 621, 326,1057, 368,1732, 267, 488, 20,1733,1269,1734, - 945,1400,1735, 47, 904,1270,1736,1737, 773, 248,1738, 409, 313, 786, 429,1739, - 116, 987, 813,1401, 683, 75,1204, 145,1740,1741,1742,1743, 16, 847, 667, 622, - 708,1744,1745,1746, 966, 787, 304, 129,1747, 60, 820, 123, 676,1748,1749,1750, -1751, 617,1752, 626,1753,1754,1755,1756, 653,1757,1758,1759,1760,1761,1762, 856, - 344,1763,1764,1765,1766, 89, 401, 418, 806, 905, 848,1767,1768,1769, 946,1205, - 709,1770,1118,1771, 241,1772,1773,1774,1271,1775, 569,1776, 999,1777,1778,1779, -1780, 337, 751,1058, 28, 628, 254,1781, 177, 906, 270, 349, 891,1079,1782, 19, -1783, 379,1784, 315,1785, 629, 754,1402, 559,1786, 636, 203,1206,1787, 710, 567, -1788, 935, 814,1789,1790,1207, 766, 528,1791,1792,1208,1793,1794,1795,1796,1797, -1403,1798,1799, 533,1059,1404,1405,1156,1406, 936, 884,1080,1800, 351,1801,1802, -1803,1804,1805, 801,1806,1807,1808,1119,1809,1157, 714, 474,1407,1810, 298, 899, - 885,1811,1120, 802,1158,1812, 892,1813,1814,1408, 659,1815,1816,1121,1817,1818, -1819,1820,1821,1822, 319,1823, 594, 545,1824, 815, 937,1209,1825,1826, 573,1409, -1022,1827,1210,1828,1829,1830,1831,1832,1833, 556, 722, 807,1122,1060,1834, 697, -1835, 900, 557, 715,1836,1410, 540,1411, 752,1159, 294, 597,1211, 976, 803, 770, -1412,1837,1838, 39, 794,1413, 358,1839, 371, 925,1840, 453, 661, 788, 531, 723, - 544,1023,1081, 869, 91,1841, 392, 430, 790, 602,1414, 677,1082, 457,1415,1416, -1842,1843, 475, 327,1024,1417, 795, 121,1844, 733, 403,1418,1845,1846,1847, 300, - 119, 711,1212, 627,1848,1272, 207,1849,1850, 796,1213, 382,1851, 519,1852,1083, - 893,1853,1854,1855, 367, 809, 487, 671,1856, 663,1857,1858, 956, 471, 306, 857, -1859,1860,1160,1084,1861,1862,1863,1864,1865,1061,1866,1867,1868,1869,1870,1871, - 282, 96, 574,1872, 502,1085,1873,1214,1874, 907,1875,1876, 827, 977,1419,1420, -1421, 268,1877,1422,1878,1879,1880, 308,1881, 2, 537,1882,1883,1215,1884,1885, - 127, 791,1886,1273,1423,1887, 34, 336, 404, 643,1888, 571, 654, 894, 840,1889, - 0, 886,1274, 122, 575, 260, 908, 938,1890,1275, 410, 316,1891,1892, 100,1893, -1894,1123, 48,1161,1124,1025,1895, 633, 901,1276,1896,1897, 115, 816,1898, 317, -1899, 694,1900, 909, 734,1424, 572, 866,1425, 691, 85, 524,1010, 543, 394, 841, -1901,1902,1903,1026,1904,1905,1906,1907,1908,1909, 30, 451, 651, 988, 310,1910, -1911,1426, 810,1216, 93,1912,1913,1277,1217,1914, 858, 759, 45, 58, 181, 610, - 269,1915,1916, 131,1062, 551, 443,1000, 821,1427, 957, 895,1086,1917,1918, 375, -1919, 359,1920, 687,1921, 822,1922, 293,1923,1924, 40, 662, 118, 692, 29, 939, - 887, 640, 482, 174,1925, 69,1162, 728,1428, 910,1926,1278,1218,1279, 386, 870, - 217, 854,1163, 823,1927,1928,1929,1930, 834,1931, 78,1932, 859,1933,1063,1934, -1935,1936,1937, 438,1164, 208, 595,1938,1939,1940,1941,1219,1125,1942, 280, 888, -1429,1430,1220,1431,1943,1944,1945,1946,1947,1280, 150, 510,1432,1948,1949,1950, -1951,1952,1953,1954,1011,1087,1955,1433,1043,1956, 881,1957, 614, 958,1064,1065, -1221,1958, 638,1001, 860, 967, 896,1434, 989, 492, 553,1281,1165,1959,1282,1002, -1283,1222,1960,1961,1962,1963, 36, 383, 228, 753, 247, 454,1964, 876, 678,1965, -1966,1284, 126, 464, 490, 835, 136, 672, 529, 940,1088,1435, 473,1967,1968, 467, - 50, 390, 227, 587, 279, 378, 598, 792, 968, 240, 151, 160, 849, 882,1126,1285, - 639,1044, 133, 140, 288, 360, 811, 563,1027, 561, 142, 523,1969,1970,1971, 7, - 103, 296, 439, 407, 506, 634, 990,1972,1973,1974,1975, 645,1976,1977,1978,1979, -1980,1981, 236,1982,1436,1983,1984,1089, 192, 828, 618, 518,1166, 333,1127,1985, - 818,1223,1986,1987,1988,1989,1990,1991,1992,1993, 342,1128,1286, 746, 842,1994, -1995, 560, 223,1287, 98, 8, 189, 650, 978,1288,1996,1437,1997, 17, 345, 250, - 423, 277, 234, 512, 226, 97, 289, 42, 167,1998, 201,1999,2000, 843, 836, 824, - 532, 338, 783,1090, 182, 576, 436,1438,1439, 527, 500,2001, 947, 889,2002,2003, -2004,2005, 262, 600, 314, 447,2006, 547,2007, 693, 738,1129,2008, 71,1440, 745, - 619, 688,2009, 829,2010,2011, 147,2012, 33, 948,2013,2014, 74, 224,2015, 61, - 191, 918, 399, 637,2016,1028,1130, 257, 902,2017,2018,2019,2020,2021,2022,2023, -2024,2025,2026, 837,2027,2028,2029,2030, 179, 874, 591, 52, 724, 246,2031,2032, -2033,2034,1167, 969,2035,1289, 630, 605, 911,1091,1168,2036,2037,2038,1441, 912, -2039, 623,2040,2041, 253,1169,1290,2042,1442, 146, 620, 611, 577, 433,2043,1224, - 719,1170, 959, 440, 437, 534, 84, 388, 480,1131, 159, 220, 198, 679,2044,1012, - 819,1066,1443, 113,1225, 194, 318,1003,1029,2045,2046,2047,2048,1067,2049,2050, -2051,2052,2053, 59, 913, 112,2054, 632,2055, 455, 144, 739,1291,2056, 273, 681, - 499,2057, 448,2058,2059, 760,2060,2061, 970, 384, 169, 245,1132,2062,2063, 414, -1444,2064,2065, 41, 235,2066, 157, 252, 877, 568, 919, 789, 580,2067, 725,2068, -2069,1292,2070,2071,1445,2072,1446,2073,2074, 55, 588, 66,1447, 271,1092,2075, -1226,2076, 960,1013, 372,2077,2078,2079,2080,2081,1293,2082,2083,2084,2085, 850, -2086,2087,2088,2089,2090, 186,2091,1068, 180,2092,2093,2094, 109,1227, 522, 606, -2095, 867,1448,1093, 991,1171, 926, 353,1133,2096, 581,2097,2098,2099,1294,1449, -1450,2100, 596,1172,1014,1228,2101,1451,1295,1173,1229,2102,2103,1296,1134,1452, - 949,1135,2104,2105,1094,1453,1454,1455,2106,1095,2107,2108,2109,2110,2111,2112, -2113,2114,2115,2116,2117, 804,2118,2119,1230,1231, 805,1456, 405,1136,2120,2121, -2122,2123,2124, 720, 701,1297, 992,1457, 927,1004,2125,2126,2127,2128,2129,2130, - 22, 417,2131, 303,2132, 385,2133, 971, 520, 513,2134,1174, 73,1096, 231, 274, - 962,1458, 673,2135,1459,2136, 152,1137,2137,2138,2139,2140,1005,1138,1460,1139, -2141,2142,2143,2144, 11, 374, 844,2145, 154,1232, 46,1461,2146, 838, 830, 721, -1233, 106,2147, 90, 428, 462, 578, 566,1175, 352,2148,2149, 538,1234, 124,1298, -2150,1462, 761, 565,2151, 686,2152, 649,2153, 72, 173,2154, 460, 415,2155,1463, -2156,1235, 305,2157,2158,2159,2160,2161,2162, 579,2163,2164,2165,2166,2167, 747, -2168,2169,2170,2171,1464, 669,2172,2173,2174,2175,2176,1465,2177, 23, 530, 285, -2178, 335, 729,2179, 397,2180,2181,2182,1030,2183,2184, 698,2185,2186, 325,2187, -2188, 369,2189, 799,1097,1015, 348,2190,1069, 680,2191, 851,1466,2192,2193, 10, -2194, 613, 424,2195, 979, 108, 449, 589, 27, 172, 81,1031, 80, 774, 281, 350, -1032, 525, 301, 582,1176,2196, 674,1045,2197,2198,1467, 730, 762,2199,2200,2201, -2202,1468,2203, 993,2204,2205, 266,1070, 963,1140,2206,2207,2208, 664,1098, 972, -2209,2210,2211,1177,1469,1470, 871,2212,2213,2214,2215,2216,1471,2217,2218,2219, -2220,2221,2222,2223,2224,2225,2226,2227,1472,1236,2228,2229,2230,2231,2232,2233, -2234,2235,1299,2236,2237, 200,2238, 477, 373,2239,2240, 731, 825, 777,2241,2242, -2243, 521, 486, 548,2244,2245,2246,1473,1300, 53, 549, 137, 875, 76, 158,2247, -1301,1474, 469, 396,1016, 278, 712,2248, 321, 442, 503, 767, 744, 941,1237,1178, -1475,2249, 82, 178,1141,1179, 973,2250,1302,2251, 297,2252,2253, 570,2254,2255, -2256, 18, 450, 206,2257, 290, 292,1142,2258, 511, 162, 99, 346, 164, 735,2259, -1476,1477, 4, 554, 343, 798,1099,2260,1100,2261, 43, 171,1303, 139, 215,2262, -2263, 717, 775,2264,1033, 322, 216,2265, 831,2266, 149,2267,1304,2268,2269, 702, -1238, 135, 845, 347, 309,2270, 484,2271, 878, 655, 238,1006,1478,2272, 67,2273, - 295,2274,2275, 461,2276, 478, 942, 412,2277,1034,2278,2279,2280, 265,2281, 541, -2282,2283,2284,2285,2286, 70, 852,1071,2287,2288,2289,2290, 21, 56, 509, 117, - 432,2291,2292, 331, 980, 552,1101, 148, 284, 105, 393,1180,1239, 755,2293, 187, -2294,1046,1479,2295, 340,2296, 63,1047, 230,2297,2298,1305, 763,1306, 101, 800, - 808, 494,2299,2300,2301, 903,2302, 37,1072, 14, 5,2303, 79, 675,2304, 312, -2305,2306,2307,2308,2309,1480, 6,1307,2310,2311,2312, 1, 470, 35, 24, 229, -2313, 695, 210, 86, 778, 15, 784, 592, 779, 32, 77, 855, 964,2314, 259,2315, - 501, 380,2316,2317, 83, 981, 153, 689,1308,1481,1482,1483,2318,2319, 716,1484, -2320,2321,2322,2323,2324,2325,1485,2326,2327, 128, 57, 68, 261,1048, 211, 170, -1240, 31,2328, 51, 435, 742,2329,2330,2331, 635,2332, 264, 456,2333,2334,2335, - 425,2336,1486, 143, 507, 263, 943,2337, 363, 920,1487, 256,1488,1102, 243, 601, -1489,2338,2339,2340,2341,2342,2343,2344, 861,2345,2346,2347,2348,2349,2350, 395, -2351,1490,1491, 62, 535, 166, 225,2352,2353, 668, 419,1241, 138, 604, 928,2354, -1181,2355,1492,1493,2356,2357,2358,1143,2359, 696,2360, 387, 307,1309, 682, 476, -2361,2362, 332, 12, 222, 156,2363, 232,2364, 641, 276, 656, 517,1494,1495,1035, - 416, 736,1496,2365,1017, 586,2366,2367,2368,1497,2369, 242,2370,2371,2372,1498, -2373, 965, 713,2374,2375,2376,2377, 740, 982,1499, 944,1500,1007,2378,2379,1310, -1501,2380,2381,2382, 785, 329,2383,2384,1502,2385,2386,2387, 932,2388,1503,2389, -2390,2391,2392,1242,2393,2394,2395,2396,2397, 994, 950,2398,2399,2400,2401,1504, -1311,2402,2403,2404,2405,1049, 749,2406,2407, 853, 718,1144,1312,2408,1182,1505, -2409,2410, 255, 516, 479, 564, 550, 214,1506,1507,1313, 413, 239, 444, 339,1145, -1036,1508,1509,1314,1037,1510,1315,2411,1511,2412,2413,2414, 176, 703, 497, 624, - 593, 921, 302,2415, 341, 165,1103,1512,2416,1513,2417,2418,2419, 376,2420, 700, -2421,2422,2423, 258, 768,1316,2424,1183,2425, 995, 608,2426,2427,2428,2429, 221, -2430,2431,2432,2433,2434,2435,2436,2437, 195, 323, 726, 188, 897, 983,1317, 377, - 644,1050, 879,2438, 452,2439,2440,2441,2442,2443,2444, 914,2445,2446,2447,2448, - 915, 489,2449,1514,1184,2450,2451, 515, 64, 427, 495,2452, 583,2453, 483, 485, -1038, 562, 213,1515, 748, 666,2454,2455,2456,2457, 334,2458, 780, 996,1008, 705, -1243,2459,2460,2461,2462,2463, 114,2464, 493,1146, 366, 163,1516, 961,1104,2465, - 291,2466,1318,1105,2467,1517, 365,2468, 355, 951,1244,2469,1319,2470, 631,2471, -2472, 218,1320, 364, 320, 756,1518,1519,1321,1520,1322,2473,2474,2475,2476, 997, -2477,2478,2479,2480, 665,1185,2481, 916,1521,2482,2483,2484, 584, 684,2485,2486, - 797,2487,1051,1186,2488,2489,2490,1522,2491,2492, 370,2493,1039,1187, 65,2494, - 434, 205, 463,1188,2495, 125, 812, 391, 402, 826, 699, 286, 398, 155, 781, 771, - 585,2496, 590, 505,1073,2497, 599, 244, 219, 917,1018, 952, 646,1523,2498,1323, -2499,2500, 49, 984, 354, 741,2501, 625,2502,1324,2503,1019, 190, 357, 757, 491, - 95, 782, 868,2504,2505,2506,2507,2508,2509, 134,1524,1074, 422,1525, 898,2510, - 161,2511,2512,2513,2514, 769,2515,1526,2516,2517, 411,1325,2518, 472,1527,2519, -2520,2521,2522,2523,2524, 985,2525,2526,2527,2528,2529,2530, 764,2531,1245,2532, -2533, 25, 204, 311,2534, 496,2535,1052,2536,2537,2538,2539,2540,2541,2542, 199, - 704, 504, 468, 758, 657,1528, 196, 44, 839,1246, 272, 750,2543, 765, 862,2544, -2545,1326,2546, 132, 615, 933,2547, 732,2548,2549,2550,1189,1529,2551, 283,1247, -1053, 607, 929,2552,2553,2554, 930, 183, 872, 616,1040,1147,2555,1148,1020, 441, - 249,1075,2556,2557,2558, 466, 743,2559,2560,2561, 92, 514, 426, 420, 526,2562, -2563,2564,2565,2566,2567,2568, 185,2569,2570,2571,2572, 776,1530, 658,2573, 362, -2574, 361, 922,1076, 793,2575,2576,2577,2578,2579,2580,1531, 251,2581,2582,2583, -2584,1532, 54, 612, 237,1327,2585,2586, 275, 408, 647, 111,2587,1533,1106, 465, - 3, 458, 9, 38,2588, 107, 110, 890, 209, 26, 737, 498,2589,1534,2590, 431, - 202, 88,1535, 356, 287,1107, 660,1149,2591, 381,1536, 986,1150, 445,1248,1151, - 974,2592,2593, 846,2594, 446, 953, 184,1249,1250, 727,2595, 923, 193, 883,2596, -2597,2598, 102, 324, 539, 817,2599, 421,1041,2600, 832,2601, 94, 175, 197, 406, -2602, 459,2603,2604,2605,2606,2607, 330, 555,2608,2609,2610, 706,1108, 389,2611, -2612,2613,2614, 233,2615, 833, 558, 931, 954,1251,2616,2617,1537, 546,2618,2619, -1009,2620,2621,2622,1538, 690,1328,2623, 955,2624,1539,2625,2626, 772,2627,2628, -2629,2630,2631, 924, 648, 863, 603,2632,2633, 934,1540, 864, 865,2634, 642,1042, - 670,1190,2635,2636,2637,2638, 168,2639, 652, 873, 542,1054,1541,2640,2641,2642, # 512, 256 -) - diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/euckrprober.py b/venv/Lib/site-packages/pip/_vendor/chardet/euckrprober.py deleted file mode 100644 index 345a060..0000000 --- a/venv/Lib/site-packages/pip/_vendor/chardet/euckrprober.py +++ /dev/null @@ -1,47 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is mozilla.org code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .mbcharsetprober import MultiByteCharSetProber -from .codingstatemachine import CodingStateMachine -from .chardistribution import EUCKRDistributionAnalysis -from .mbcssm import EUCKR_SM_MODEL - - -class EUCKRProber(MultiByteCharSetProber): - def __init__(self): - super(EUCKRProber, self).__init__() - self.coding_sm = CodingStateMachine(EUCKR_SM_MODEL) - self.distribution_analyzer = EUCKRDistributionAnalysis() - self.reset() - - @property - def charset_name(self): - return "EUC-KR" - - @property - def language(self): - return "Korean" diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/euctwfreq.py b/venv/Lib/site-packages/pip/_vendor/chardet/euctwfreq.py deleted file mode 100644 index ed7a995..0000000 --- a/venv/Lib/site-packages/pip/_vendor/chardet/euctwfreq.py +++ /dev/null @@ -1,387 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Communicator client code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -# EUCTW frequency table -# Converted from big5 work -# by Taiwan's Mandarin Promotion Council -# - -# 128 --> 0.42261 -# 256 --> 0.57851 -# 512 --> 0.74851 -# 1024 --> 0.89384 -# 2048 --> 0.97583 -# -# Idea Distribution Ratio = 0.74851/(1-0.74851) =2.98 -# Random Distribution Ration = 512/(5401-512)=0.105 -# -# Typical Distribution Ratio about 25% of Ideal one, still much higher than RDR - -EUCTW_TYPICAL_DISTRIBUTION_RATIO = 0.75 - -# Char to FreqOrder table , -EUCTW_TABLE_SIZE = 5376 - -EUCTW_CHAR_TO_FREQ_ORDER = ( - 1,1800,1506, 255,1431, 198, 9, 82, 6,7310, 177, 202,3615,1256,2808, 110, # 2742 -3735, 33,3241, 261, 76, 44,2113, 16,2931,2184,1176, 659,3868, 26,3404,2643, # 2758 -1198,3869,3313,4060, 410,2211, 302, 590, 361,1963, 8, 204, 58,4296,7311,1931, # 2774 - 63,7312,7313, 317,1614, 75, 222, 159,4061,2412,1480,7314,3500,3068, 224,2809, # 2790 -3616, 3, 10,3870,1471, 29,2774,1135,2852,1939, 873, 130,3242,1123, 312,7315, # 2806 -4297,2051, 507, 252, 682,7316, 142,1914, 124, 206,2932, 34,3501,3173, 64, 604, # 2822 -7317,2494,1976,1977, 155,1990, 645, 641,1606,7318,3405, 337, 72, 406,7319, 80, # 2838 - 630, 238,3174,1509, 263, 939,1092,2644, 756,1440,1094,3406, 449, 69,2969, 591, # 2854 - 179,2095, 471, 115,2034,1843, 60, 50,2970, 134, 806,1868, 734,2035,3407, 180, # 2870 - 995,1607, 156, 537,2893, 688,7320, 319,1305, 779,2144, 514,2374, 298,4298, 359, # 2886 -2495, 90,2707,1338, 663, 11, 906,1099,2545, 20,2436, 182, 532,1716,7321, 732, # 2902 -1376,4062,1311,1420,3175, 25,2312,1056, 113, 399, 382,1949, 242,3408,2467, 529, # 2918 -3243, 475,1447,3617,7322, 117, 21, 656, 810,1297,2295,2329,3502,7323, 126,4063, # 2934 - 706, 456, 150, 613,4299, 71,1118,2036,4064, 145,3069, 85, 835, 486,2114,1246, # 2950 -1426, 428, 727,1285,1015, 800, 106, 623, 303,1281,7324,2127,2354, 347,3736, 221, # 2966 -3503,3110,7325,1955,1153,4065, 83, 296,1199,3070, 192, 624, 93,7326, 822,1897, # 2982 -2810,3111, 795,2064, 991,1554,1542,1592, 27, 43,2853, 859, 139,1456, 860,4300, # 2998 - 437, 712,3871, 164,2392,3112, 695, 211,3017,2096, 195,3872,1608,3504,3505,3618, # 3014 -3873, 234, 811,2971,2097,3874,2229,1441,3506,1615,2375, 668,2076,1638, 305, 228, # 3030 -1664,4301, 467, 415,7327, 262,2098,1593, 239, 108, 300, 200,1033, 512,1247,2077, # 3046 -7328,7329,2173,3176,3619,2673, 593, 845,1062,3244, 88,1723,2037,3875,1950, 212, # 3062 - 266, 152, 149, 468,1898,4066,4302, 77, 187,7330,3018, 37, 5,2972,7331,3876, # 3078 -7332,7333, 39,2517,4303,2894,3177,2078, 55, 148, 74,4304, 545, 483,1474,1029, # 3094 -1665, 217,1869,1531,3113,1104,2645,4067, 24, 172,3507, 900,3877,3508,3509,4305, # 3110 - 32,1408,2811,1312, 329, 487,2355,2247,2708, 784,2674, 4,3019,3314,1427,1788, # 3126 - 188, 109, 499,7334,3620,1717,1789, 888,1217,3020,4306,7335,3510,7336,3315,1520, # 3142 -3621,3878, 196,1034, 775,7337,7338, 929,1815, 249, 439, 38,7339,1063,7340, 794, # 3158 -3879,1435,2296, 46, 178,3245,2065,7341,2376,7342, 214,1709,4307, 804, 35, 707, # 3174 - 324,3622,1601,2546, 140, 459,4068,7343,7344,1365, 839, 272, 978,2257,2572,3409, # 3190 -2128,1363,3623,1423, 697, 100,3071, 48, 70,1231, 495,3114,2193,7345,1294,7346, # 3206 -2079, 462, 586,1042,3246, 853, 256, 988, 185,2377,3410,1698, 434,1084,7347,3411, # 3222 - 314,2615,2775,4308,2330,2331, 569,2280, 637,1816,2518, 757,1162,1878,1616,3412, # 3238 - 287,1577,2115, 768,4309,1671,2854,3511,2519,1321,3737, 909,2413,7348,4069, 933, # 3254 -3738,7349,2052,2356,1222,4310, 765,2414,1322, 786,4311,7350,1919,1462,1677,2895, # 3270 -1699,7351,4312,1424,2437,3115,3624,2590,3316,1774,1940,3413,3880,4070, 309,1369, # 3286 -1130,2812, 364,2230,1653,1299,3881,3512,3882,3883,2646, 525,1085,3021, 902,2000, # 3302 -1475, 964,4313, 421,1844,1415,1057,2281, 940,1364,3116, 376,4314,4315,1381, 7, # 3318 -2520, 983,2378, 336,1710,2675,1845, 321,3414, 559,1131,3022,2742,1808,1132,1313, # 3334 - 265,1481,1857,7352, 352,1203,2813,3247, 167,1089, 420,2814, 776, 792,1724,3513, # 3350 -4071,2438,3248,7353,4072,7354, 446, 229, 333,2743, 901,3739,1200,1557,4316,2647, # 3366 -1920, 395,2744,2676,3740,4073,1835, 125, 916,3178,2616,4317,7355,7356,3741,7357, # 3382 -7358,7359,4318,3117,3625,1133,2547,1757,3415,1510,2313,1409,3514,7360,2145, 438, # 3398 -2591,2896,2379,3317,1068, 958,3023, 461, 311,2855,2677,4074,1915,3179,4075,1978, # 3414 - 383, 750,2745,2617,4076, 274, 539, 385,1278,1442,7361,1154,1964, 384, 561, 210, # 3430 - 98,1295,2548,3515,7362,1711,2415,1482,3416,3884,2897,1257, 129,7363,3742, 642, # 3446 - 523,2776,2777,2648,7364, 141,2231,1333, 68, 176, 441, 876, 907,4077, 603,2592, # 3462 - 710, 171,3417, 404, 549, 18,3118,2393,1410,3626,1666,7365,3516,4319,2898,4320, # 3478 -7366,2973, 368,7367, 146, 366, 99, 871,3627,1543, 748, 807,1586,1185, 22,2258, # 3494 - 379,3743,3180,7368,3181, 505,1941,2618,1991,1382,2314,7369, 380,2357, 218, 702, # 3510 -1817,1248,3418,3024,3517,3318,3249,7370,2974,3628, 930,3250,3744,7371, 59,7372, # 3526 - 585, 601,4078, 497,3419,1112,1314,4321,1801,7373,1223,1472,2174,7374, 749,1836, # 3542 - 690,1899,3745,1772,3885,1476, 429,1043,1790,2232,2116, 917,4079, 447,1086,1629, # 3558 -7375, 556,7376,7377,2020,1654, 844,1090, 105, 550, 966,1758,2815,1008,1782, 686, # 3574 -1095,7378,2282, 793,1602,7379,3518,2593,4322,4080,2933,2297,4323,3746, 980,2496, # 3590 - 544, 353, 527,4324, 908,2678,2899,7380, 381,2619,1942,1348,7381,1341,1252, 560, # 3606 -3072,7382,3420,2856,7383,2053, 973, 886,2080, 143,4325,7384,7385, 157,3886, 496, # 3622 -4081, 57, 840, 540,2038,4326,4327,3421,2117,1445, 970,2259,1748,1965,2081,4082, # 3638 -3119,1234,1775,3251,2816,3629, 773,1206,2129,1066,2039,1326,3887,1738,1725,4083, # 3654 - 279,3120, 51,1544,2594, 423,1578,2130,2066, 173,4328,1879,7386,7387,1583, 264, # 3670 - 610,3630,4329,2439, 280, 154,7388,7389,7390,1739, 338,1282,3073, 693,2857,1411, # 3686 -1074,3747,2440,7391,4330,7392,7393,1240, 952,2394,7394,2900,1538,2679, 685,1483, # 3702 -4084,2468,1436, 953,4085,2054,4331, 671,2395, 79,4086,2441,3252, 608, 567,2680, # 3718 -3422,4087,4088,1691, 393,1261,1791,2396,7395,4332,7396,7397,7398,7399,1383,1672, # 3734 -3748,3182,1464, 522,1119, 661,1150, 216, 675,4333,3888,1432,3519, 609,4334,2681, # 3750 -2397,7400,7401,7402,4089,3025, 0,7403,2469, 315, 231,2442, 301,3319,4335,2380, # 3766 -7404, 233,4090,3631,1818,4336,4337,7405, 96,1776,1315,2082,7406, 257,7407,1809, # 3782 -3632,2709,1139,1819,4091,2021,1124,2163,2778,1777,2649,7408,3074, 363,1655,3183, # 3798 -7409,2975,7410,7411,7412,3889,1567,3890, 718, 103,3184, 849,1443, 341,3320,2934, # 3814 -1484,7413,1712, 127, 67, 339,4092,2398, 679,1412, 821,7414,7415, 834, 738, 351, # 3830 -2976,2146, 846, 235,1497,1880, 418,1992,3749,2710, 186,1100,2147,2746,3520,1545, # 3846 -1355,2935,2858,1377, 583,3891,4093,2573,2977,7416,1298,3633,1078,2549,3634,2358, # 3862 - 78,3750,3751, 267,1289,2099,2001,1594,4094, 348, 369,1274,2194,2175,1837,4338, # 3878 -1820,2817,3635,2747,2283,2002,4339,2936,2748, 144,3321, 882,4340,3892,2749,3423, # 3894 -4341,2901,7417,4095,1726, 320,7418,3893,3026, 788,2978,7419,2818,1773,1327,2859, # 3910 -3894,2819,7420,1306,4342,2003,1700,3752,3521,2359,2650, 787,2022, 506, 824,3636, # 3926 - 534, 323,4343,1044,3322,2023,1900, 946,3424,7421,1778,1500,1678,7422,1881,4344, # 3942 - 165, 243,4345,3637,2521, 123, 683,4096, 764,4346, 36,3895,1792, 589,2902, 816, # 3958 - 626,1667,3027,2233,1639,1555,1622,3753,3896,7423,3897,2860,1370,1228,1932, 891, # 3974 -2083,2903, 304,4097,7424, 292,2979,2711,3522, 691,2100,4098,1115,4347, 118, 662, # 3990 -7425, 611,1156, 854,2381,1316,2861, 2, 386, 515,2904,7426,7427,3253, 868,2234, # 4006 -1486, 855,2651, 785,2212,3028,7428,1040,3185,3523,7429,3121, 448,7430,1525,7431, # 4022 -2164,4348,7432,3754,7433,4099,2820,3524,3122, 503, 818,3898,3123,1568, 814, 676, # 4038 -1444, 306,1749,7434,3755,1416,1030, 197,1428, 805,2821,1501,4349,7435,7436,7437, # 4054 -1993,7438,4350,7439,7440,2195, 13,2779,3638,2980,3124,1229,1916,7441,3756,2131, # 4070 -7442,4100,4351,2399,3525,7443,2213,1511,1727,1120,7444,7445, 646,3757,2443, 307, # 4086 -7446,7447,1595,3186,7448,7449,7450,3639,1113,1356,3899,1465,2522,2523,7451, 519, # 4102 -7452, 128,2132, 92,2284,1979,7453,3900,1512, 342,3125,2196,7454,2780,2214,1980, # 4118 -3323,7455, 290,1656,1317, 789, 827,2360,7456,3758,4352, 562, 581,3901,7457, 401, # 4134 -4353,2248, 94,4354,1399,2781,7458,1463,2024,4355,3187,1943,7459, 828,1105,4101, # 4150 -1262,1394,7460,4102, 605,4356,7461,1783,2862,7462,2822, 819,2101, 578,2197,2937, # 4166 -7463,1502, 436,3254,4103,3255,2823,3902,2905,3425,3426,7464,2712,2315,7465,7466, # 4182 -2332,2067, 23,4357, 193, 826,3759,2102, 699,1630,4104,3075, 390,1793,1064,3526, # 4198 -7467,1579,3076,3077,1400,7468,4105,1838,1640,2863,7469,4358,4359, 137,4106, 598, # 4214 -3078,1966, 780, 104, 974,2938,7470, 278, 899, 253, 402, 572, 504, 493,1339,7471, # 4230 -3903,1275,4360,2574,2550,7472,3640,3029,3079,2249, 565,1334,2713, 863, 41,7473, # 4246 -7474,4361,7475,1657,2333, 19, 463,2750,4107, 606,7476,2981,3256,1087,2084,1323, # 4262 -2652,2982,7477,1631,1623,1750,4108,2682,7478,2864, 791,2714,2653,2334, 232,2416, # 4278 -7479,2983,1498,7480,2654,2620, 755,1366,3641,3257,3126,2025,1609, 119,1917,3427, # 4294 - 862,1026,4109,7481,3904,3760,4362,3905,4363,2260,1951,2470,7482,1125, 817,4110, # 4310 -4111,3906,1513,1766,2040,1487,4112,3030,3258,2824,3761,3127,7483,7484,1507,7485, # 4326 -2683, 733, 40,1632,1106,2865, 345,4113, 841,2524, 230,4364,2984,1846,3259,3428, # 4342 -7486,1263, 986,3429,7487, 735, 879, 254,1137, 857, 622,1300,1180,1388,1562,3907, # 4358 -3908,2939, 967,2751,2655,1349, 592,2133,1692,3324,2985,1994,4114,1679,3909,1901, # 4374 -2185,7488, 739,3642,2715,1296,1290,7489,4115,2198,2199,1921,1563,2595,2551,1870, # 4390 -2752,2986,7490, 435,7491, 343,1108, 596, 17,1751,4365,2235,3430,3643,7492,4366, # 4406 - 294,3527,2940,1693, 477, 979, 281,2041,3528, 643,2042,3644,2621,2782,2261,1031, # 4422 -2335,2134,2298,3529,4367, 367,1249,2552,7493,3530,7494,4368,1283,3325,2004, 240, # 4438 -1762,3326,4369,4370, 836,1069,3128, 474,7495,2148,2525, 268,3531,7496,3188,1521, # 4454 -1284,7497,1658,1546,4116,7498,3532,3533,7499,4117,3327,2684,1685,4118, 961,1673, # 4470 -2622, 190,2005,2200,3762,4371,4372,7500, 570,2497,3645,1490,7501,4373,2623,3260, # 4486 -1956,4374, 584,1514, 396,1045,1944,7502,4375,1967,2444,7503,7504,4376,3910, 619, # 4502 -7505,3129,3261, 215,2006,2783,2553,3189,4377,3190,4378, 763,4119,3763,4379,7506, # 4518 -7507,1957,1767,2941,3328,3646,1174, 452,1477,4380,3329,3130,7508,2825,1253,2382, # 4534 -2186,1091,2285,4120, 492,7509, 638,1169,1824,2135,1752,3911, 648, 926,1021,1324, # 4550 -4381, 520,4382, 997, 847,1007, 892,4383,3764,2262,1871,3647,7510,2400,1784,4384, # 4566 -1952,2942,3080,3191,1728,4121,2043,3648,4385,2007,1701,3131,1551, 30,2263,4122, # 4582 -7511,2026,4386,3534,7512, 501,7513,4123, 594,3431,2165,1821,3535,3432,3536,3192, # 4598 - 829,2826,4124,7514,1680,3132,1225,4125,7515,3262,4387,4126,3133,2336,7516,4388, # 4614 -4127,7517,3912,3913,7518,1847,2383,2596,3330,7519,4389, 374,3914, 652,4128,4129, # 4630 - 375,1140, 798,7520,7521,7522,2361,4390,2264, 546,1659, 138,3031,2445,4391,7523, # 4646 -2250, 612,1848, 910, 796,3765,1740,1371, 825,3766,3767,7524,2906,2554,7525, 692, # 4662 - 444,3032,2624, 801,4392,4130,7526,1491, 244,1053,3033,4131,4132, 340,7527,3915, # 4678 -1041,2987, 293,1168, 87,1357,7528,1539, 959,7529,2236, 721, 694,4133,3768, 219, # 4694 -1478, 644,1417,3331,2656,1413,1401,1335,1389,3916,7530,7531,2988,2362,3134,1825, # 4710 - 730,1515, 184,2827, 66,4393,7532,1660,2943, 246,3332, 378,1457, 226,3433, 975, # 4726 -3917,2944,1264,3537, 674, 696,7533, 163,7534,1141,2417,2166, 713,3538,3333,4394, # 4742 -3918,7535,7536,1186, 15,7537,1079,1070,7538,1522,3193,3539, 276,1050,2716, 758, # 4758 -1126, 653,2945,3263,7539,2337, 889,3540,3919,3081,2989, 903,1250,4395,3920,3434, # 4774 -3541,1342,1681,1718, 766,3264, 286, 89,2946,3649,7540,1713,7541,2597,3334,2990, # 4790 -7542,2947,2215,3194,2866,7543,4396,2498,2526, 181, 387,1075,3921, 731,2187,3335, # 4806 -7544,3265, 310, 313,3435,2299, 770,4134, 54,3034, 189,4397,3082,3769,3922,7545, # 4822 -1230,1617,1849, 355,3542,4135,4398,3336, 111,4136,3650,1350,3135,3436,3035,4137, # 4838 -2149,3266,3543,7546,2784,3923,3924,2991, 722,2008,7547,1071, 247,1207,2338,2471, # 4854 -1378,4399,2009, 864,1437,1214,4400, 373,3770,1142,2216, 667,4401, 442,2753,2555, # 4870 -3771,3925,1968,4138,3267,1839, 837, 170,1107, 934,1336,1882,7548,7549,2118,4139, # 4886 -2828, 743,1569,7550,4402,4140, 582,2384,1418,3437,7551,1802,7552, 357,1395,1729, # 4902 -3651,3268,2418,1564,2237,7553,3083,3772,1633,4403,1114,2085,4141,1532,7554, 482, # 4918 -2446,4404,7555,7556,1492, 833,1466,7557,2717,3544,1641,2829,7558,1526,1272,3652, # 4934 -4142,1686,1794, 416,2556,1902,1953,1803,7559,3773,2785,3774,1159,2316,7560,2867, # 4950 -4405,1610,1584,3036,2419,2754, 443,3269,1163,3136,7561,7562,3926,7563,4143,2499, # 4966 -3037,4406,3927,3137,2103,1647,3545,2010,1872,4144,7564,4145, 431,3438,7565, 250, # 4982 - 97, 81,4146,7566,1648,1850,1558, 160, 848,7567, 866, 740,1694,7568,2201,2830, # 4998 -3195,4147,4407,3653,1687, 950,2472, 426, 469,3196,3654,3655,3928,7569,7570,1188, # 5014 - 424,1995, 861,3546,4148,3775,2202,2685, 168,1235,3547,4149,7571,2086,1674,4408, # 5030 -3337,3270, 220,2557,1009,7572,3776, 670,2992, 332,1208, 717,7573,7574,3548,2447, # 5046 -3929,3338,7575, 513,7576,1209,2868,3339,3138,4409,1080,7577,7578,7579,7580,2527, # 5062 -3656,3549, 815,1587,3930,3931,7581,3550,3439,3777,1254,4410,1328,3038,1390,3932, # 5078 -1741,3933,3778,3934,7582, 236,3779,2448,3271,7583,7584,3657,3780,1273,3781,4411, # 5094 -7585, 308,7586,4412, 245,4413,1851,2473,1307,2575, 430, 715,2136,2449,7587, 270, # 5110 - 199,2869,3935,7588,3551,2718,1753, 761,1754, 725,1661,1840,4414,3440,3658,7589, # 5126 -7590, 587, 14,3272, 227,2598, 326, 480,2265, 943,2755,3552, 291, 650,1883,7591, # 5142 -1702,1226, 102,1547, 62,3441, 904,4415,3442,1164,4150,7592,7593,1224,1548,2756, # 5158 - 391, 498,1493,7594,1386,1419,7595,2055,1177,4416, 813, 880,1081,2363, 566,1145, # 5174 -4417,2286,1001,1035,2558,2599,2238, 394,1286,7596,7597,2068,7598, 86,1494,1730, # 5190 -3936, 491,1588, 745, 897,2948, 843,3340,3937,2757,2870,3273,1768, 998,2217,2069, # 5206 - 397,1826,1195,1969,3659,2993,3341, 284,7599,3782,2500,2137,2119,1903,7600,3938, # 5222 -2150,3939,4151,1036,3443,1904, 114,2559,4152, 209,1527,7601,7602,2949,2831,2625, # 5238 -2385,2719,3139, 812,2560,7603,3274,7604,1559, 737,1884,3660,1210, 885, 28,2686, # 5254 -3553,3783,7605,4153,1004,1779,4418,7606, 346,1981,2218,2687,4419,3784,1742, 797, # 5270 -1642,3940,1933,1072,1384,2151, 896,3941,3275,3661,3197,2871,3554,7607,2561,1958, # 5286 -4420,2450,1785,7608,7609,7610,3942,4154,1005,1308,3662,4155,2720,4421,4422,1528, # 5302 -2600, 161,1178,4156,1982, 987,4423,1101,4157, 631,3943,1157,3198,2420,1343,1241, # 5318 -1016,2239,2562, 372, 877,2339,2501,1160, 555,1934, 911,3944,7611, 466,1170, 169, # 5334 -1051,2907,2688,3663,2474,2994,1182,2011,2563,1251,2626,7612, 992,2340,3444,1540, # 5350 -2721,1201,2070,2401,1996,2475,7613,4424, 528,1922,2188,1503,1873,1570,2364,3342, # 5366 -3276,7614, 557,1073,7615,1827,3445,2087,2266,3140,3039,3084, 767,3085,2786,4425, # 5382 -1006,4158,4426,2341,1267,2176,3664,3199, 778,3945,3200,2722,1597,2657,7616,4427, # 5398 -7617,3446,7618,7619,7620,3277,2689,1433,3278, 131, 95,1504,3946, 723,4159,3141, # 5414 -1841,3555,2758,2189,3947,2027,2104,3665,7621,2995,3948,1218,7622,3343,3201,3949, # 5430 -4160,2576, 248,1634,3785, 912,7623,2832,3666,3040,3786, 654, 53,7624,2996,7625, # 5446 -1688,4428, 777,3447,1032,3950,1425,7626, 191, 820,2120,2833, 971,4429, 931,3202, # 5462 - 135, 664, 783,3787,1997, 772,2908,1935,3951,3788,4430,2909,3203, 282,2723, 640, # 5478 -1372,3448,1127, 922, 325,3344,7627,7628, 711,2044,7629,7630,3952,2219,2787,1936, # 5494 -3953,3345,2220,2251,3789,2300,7631,4431,3790,1258,3279,3954,3204,2138,2950,3955, # 5510 -3956,7632,2221, 258,3205,4432, 101,1227,7633,3280,1755,7634,1391,3281,7635,2910, # 5526 -2056, 893,7636,7637,7638,1402,4161,2342,7639,7640,3206,3556,7641,7642, 878,1325, # 5542 -1780,2788,4433, 259,1385,2577, 744,1183,2267,4434,7643,3957,2502,7644, 684,1024, # 5558 -4162,7645, 472,3557,3449,1165,3282,3958,3959, 322,2152, 881, 455,1695,1152,1340, # 5574 - 660, 554,2153,4435,1058,4436,4163, 830,1065,3346,3960,4437,1923,7646,1703,1918, # 5590 -7647, 932,2268, 122,7648,4438, 947, 677,7649,3791,2627, 297,1905,1924,2269,4439, # 5606 -2317,3283,7650,7651,4164,7652,4165, 84,4166, 112, 989,7653, 547,1059,3961, 701, # 5622 -3558,1019,7654,4167,7655,3450, 942, 639, 457,2301,2451, 993,2951, 407, 851, 494, # 5638 -4440,3347, 927,7656,1237,7657,2421,3348, 573,4168, 680, 921,2911,1279,1874, 285, # 5654 - 790,1448,1983, 719,2167,7658,7659,4441,3962,3963,1649,7660,1541, 563,7661,1077, # 5670 -7662,3349,3041,3451, 511,2997,3964,3965,3667,3966,1268,2564,3350,3207,4442,4443, # 5686 -7663, 535,1048,1276,1189,2912,2028,3142,1438,1373,2834,2952,1134,2012,7664,4169, # 5702 -1238,2578,3086,1259,7665, 700,7666,2953,3143,3668,4170,7667,4171,1146,1875,1906, # 5718 -4444,2601,3967, 781,2422, 132,1589, 203, 147, 273,2789,2402, 898,1786,2154,3968, # 5734 -3969,7668,3792,2790,7669,7670,4445,4446,7671,3208,7672,1635,3793, 965,7673,1804, # 5750 -2690,1516,3559,1121,1082,1329,3284,3970,1449,3794, 65,1128,2835,2913,2759,1590, # 5766 -3795,7674,7675, 12,2658, 45, 976,2579,3144,4447, 517,2528,1013,1037,3209,7676, # 5782 -3796,2836,7677,3797,7678,3452,7679,2602, 614,1998,2318,3798,3087,2724,2628,7680, # 5798 -2580,4172, 599,1269,7681,1810,3669,7682,2691,3088, 759,1060, 489,1805,3351,3285, # 5814 -1358,7683,7684,2386,1387,1215,2629,2252, 490,7685,7686,4173,1759,2387,2343,7687, # 5830 -4448,3799,1907,3971,2630,1806,3210,4449,3453,3286,2760,2344, 874,7688,7689,3454, # 5846 -3670,1858, 91,2914,3671,3042,3800,4450,7690,3145,3972,2659,7691,3455,1202,1403, # 5862 -3801,2954,2529,1517,2503,4451,3456,2504,7692,4452,7693,2692,1885,1495,1731,3973, # 5878 -2365,4453,7694,2029,7695,7696,3974,2693,1216, 237,2581,4174,2319,3975,3802,4454, # 5894 -4455,2694,3560,3457, 445,4456,7697,7698,7699,7700,2761, 61,3976,3672,1822,3977, # 5910 -7701, 687,2045, 935, 925, 405,2660, 703,1096,1859,2725,4457,3978,1876,1367,2695, # 5926 -3352, 918,2105,1781,2476, 334,3287,1611,1093,4458, 564,3146,3458,3673,3353, 945, # 5942 -2631,2057,4459,7702,1925, 872,4175,7703,3459,2696,3089, 349,4176,3674,3979,4460, # 5958 -3803,4177,3675,2155,3980,4461,4462,4178,4463,2403,2046, 782,3981, 400, 251,4179, # 5974 -1624,7704,7705, 277,3676, 299,1265, 476,1191,3804,2121,4180,4181,1109, 205,7706, # 5990 -2582,1000,2156,3561,1860,7707,7708,7709,4464,7710,4465,2565, 107,2477,2157,3982, # 6006 -3460,3147,7711,1533, 541,1301, 158, 753,4182,2872,3562,7712,1696, 370,1088,4183, # 6022 -4466,3563, 579, 327, 440, 162,2240, 269,1937,1374,3461, 968,3043, 56,1396,3090, # 6038 -2106,3288,3354,7713,1926,2158,4467,2998,7714,3564,7715,7716,3677,4468,2478,7717, # 6054 -2791,7718,1650,4469,7719,2603,7720,7721,3983,2661,3355,1149,3356,3984,3805,3985, # 6070 -7722,1076, 49,7723, 951,3211,3289,3290, 450,2837, 920,7724,1811,2792,2366,4184, # 6086 -1908,1138,2367,3806,3462,7725,3212,4470,1909,1147,1518,2423,4471,3807,7726,4472, # 6102 -2388,2604, 260,1795,3213,7727,7728,3808,3291, 708,7729,3565,1704,7730,3566,1351, # 6118 -1618,3357,2999,1886, 944,4185,3358,4186,3044,3359,4187,7731,3678, 422, 413,1714, # 6134 -3292, 500,2058,2345,4188,2479,7732,1344,1910, 954,7733,1668,7734,7735,3986,2404, # 6150 -4189,3567,3809,4190,7736,2302,1318,2505,3091, 133,3092,2873,4473, 629, 31,2838, # 6166 -2697,3810,4474, 850, 949,4475,3987,2955,1732,2088,4191,1496,1852,7737,3988, 620, # 6182 -3214, 981,1242,3679,3360,1619,3680,1643,3293,2139,2452,1970,1719,3463,2168,7738, # 6198 -3215,7739,7740,3361,1828,7741,1277,4476,1565,2047,7742,1636,3568,3093,7743, 869, # 6214 -2839, 655,3811,3812,3094,3989,3000,3813,1310,3569,4477,7744,7745,7746,1733, 558, # 6230 -4478,3681, 335,1549,3045,1756,4192,3682,1945,3464,1829,1291,1192, 470,2726,2107, # 6246 -2793, 913,1054,3990,7747,1027,7748,3046,3991,4479, 982,2662,3362,3148,3465,3216, # 6262 -3217,1946,2794,7749, 571,4480,7750,1830,7751,3570,2583,1523,2424,7752,2089, 984, # 6278 -4481,3683,1959,7753,3684, 852, 923,2795,3466,3685, 969,1519, 999,2048,2320,1705, # 6294 -7754,3095, 615,1662, 151, 597,3992,2405,2321,1049, 275,4482,3686,4193, 568,3687, # 6310 -3571,2480,4194,3688,7755,2425,2270, 409,3218,7756,1566,2874,3467,1002, 769,2840, # 6326 - 194,2090,3149,3689,2222,3294,4195, 628,1505,7757,7758,1763,2177,3001,3993, 521, # 6342 -1161,2584,1787,2203,2406,4483,3994,1625,4196,4197, 412, 42,3096, 464,7759,2632, # 6358 -4484,3363,1760,1571,2875,3468,2530,1219,2204,3814,2633,2140,2368,4485,4486,3295, # 6374 -1651,3364,3572,7760,7761,3573,2481,3469,7762,3690,7763,7764,2271,2091, 460,7765, # 6390 -4487,7766,3002, 962, 588,3574, 289,3219,2634,1116, 52,7767,3047,1796,7768,7769, # 6406 -7770,1467,7771,1598,1143,3691,4198,1984,1734,1067,4488,1280,3365, 465,4489,1572, # 6422 - 510,7772,1927,2241,1812,1644,3575,7773,4490,3692,7774,7775,2663,1573,1534,7776, # 6438 -7777,4199, 536,1807,1761,3470,3815,3150,2635,7778,7779,7780,4491,3471,2915,1911, # 6454 -2796,7781,3296,1122, 377,3220,7782, 360,7783,7784,4200,1529, 551,7785,2059,3693, # 6470 -1769,2426,7786,2916,4201,3297,3097,2322,2108,2030,4492,1404, 136,1468,1479, 672, # 6486 -1171,3221,2303, 271,3151,7787,2762,7788,2049, 678,2727, 865,1947,4493,7789,2013, # 6502 -3995,2956,7790,2728,2223,1397,3048,3694,4494,4495,1735,2917,3366,3576,7791,3816, # 6518 - 509,2841,2453,2876,3817,7792,7793,3152,3153,4496,4202,2531,4497,2304,1166,1010, # 6534 - 552, 681,1887,7794,7795,2957,2958,3996,1287,1596,1861,3154, 358, 453, 736, 175, # 6550 - 478,1117, 905,1167,1097,7796,1853,1530,7797,1706,7798,2178,3472,2287,3695,3473, # 6566 -3577,4203,2092,4204,7799,3367,1193,2482,4205,1458,2190,2205,1862,1888,1421,3298, # 6582 -2918,3049,2179,3474, 595,2122,7800,3997,7801,7802,4206,1707,2636, 223,3696,1359, # 6598 - 751,3098, 183,3475,7803,2797,3003, 419,2369, 633, 704,3818,2389, 241,7804,7805, # 6614 -7806, 838,3004,3697,2272,2763,2454,3819,1938,2050,3998,1309,3099,2242,1181,7807, # 6630 -1136,2206,3820,2370,1446,4207,2305,4498,7808,7809,4208,1055,2605, 484,3698,7810, # 6646 -3999, 625,4209,2273,3368,1499,4210,4000,7811,4001,4211,3222,2274,2275,3476,7812, # 6662 -7813,2764, 808,2606,3699,3369,4002,4212,3100,2532, 526,3370,3821,4213, 955,7814, # 6678 -1620,4214,2637,2427,7815,1429,3700,1669,1831, 994, 928,7816,3578,1260,7817,7818, # 6694 -7819,1948,2288, 741,2919,1626,4215,2729,2455, 867,1184, 362,3371,1392,7820,7821, # 6710 -4003,4216,1770,1736,3223,2920,4499,4500,1928,2698,1459,1158,7822,3050,3372,2877, # 6726 -1292,1929,2506,2842,3701,1985,1187,2071,2014,2607,4217,7823,2566,2507,2169,3702, # 6742 -2483,3299,7824,3703,4501,7825,7826, 666,1003,3005,1022,3579,4218,7827,4502,1813, # 6758 -2253, 574,3822,1603, 295,1535, 705,3823,4219, 283, 858, 417,7828,7829,3224,4503, # 6774 -4504,3051,1220,1889,1046,2276,2456,4004,1393,1599, 689,2567, 388,4220,7830,2484, # 6790 - 802,7831,2798,3824,2060,1405,2254,7832,4505,3825,2109,1052,1345,3225,1585,7833, # 6806 - 809,7834,7835,7836, 575,2730,3477, 956,1552,1469,1144,2323,7837,2324,1560,2457, # 6822 -3580,3226,4005, 616,2207,3155,2180,2289,7838,1832,7839,3478,4506,7840,1319,3704, # 6838 -3705,1211,3581,1023,3227,1293,2799,7841,7842,7843,3826, 607,2306,3827, 762,2878, # 6854 -1439,4221,1360,7844,1485,3052,7845,4507,1038,4222,1450,2061,2638,4223,1379,4508, # 6870 -2585,7846,7847,4224,1352,1414,2325,2921,1172,7848,7849,3828,3829,7850,1797,1451, # 6886 -7851,7852,7853,7854,2922,4006,4007,2485,2346, 411,4008,4009,3582,3300,3101,4509, # 6902 -1561,2664,1452,4010,1375,7855,7856, 47,2959, 316,7857,1406,1591,2923,3156,7858, # 6918 -1025,2141,3102,3157, 354,2731, 884,2224,4225,2407, 508,3706, 726,3583, 996,2428, # 6934 -3584, 729,7859, 392,2191,1453,4011,4510,3707,7860,7861,2458,3585,2608,1675,2800, # 6950 - 919,2347,2960,2348,1270,4511,4012, 73,7862,7863, 647,7864,3228,2843,2255,1550, # 6966 -1346,3006,7865,1332, 883,3479,7866,7867,7868,7869,3301,2765,7870,1212, 831,1347, # 6982 -4226,4512,2326,3830,1863,3053, 720,3831,4513,4514,3832,7871,4227,7872,7873,4515, # 6998 -7874,7875,1798,4516,3708,2609,4517,3586,1645,2371,7876,7877,2924, 669,2208,2665, # 7014 -2429,7878,2879,7879,7880,1028,3229,7881,4228,2408,7882,2256,1353,7883,7884,4518, # 7030 -3158, 518,7885,4013,7886,4229,1960,7887,2142,4230,7888,7889,3007,2349,2350,3833, # 7046 - 516,1833,1454,4014,2699,4231,4519,2225,2610,1971,1129,3587,7890,2766,7891,2961, # 7062 -1422, 577,1470,3008,1524,3373,7892,7893, 432,4232,3054,3480,7894,2586,1455,2508, # 7078 -2226,1972,1175,7895,1020,2732,4015,3481,4520,7896,2733,7897,1743,1361,3055,3482, # 7094 -2639,4016,4233,4521,2290, 895, 924,4234,2170, 331,2243,3056, 166,1627,3057,1098, # 7110 -7898,1232,2880,2227,3374,4522, 657, 403,1196,2372, 542,3709,3375,1600,4235,3483, # 7126 -7899,4523,2767,3230, 576, 530,1362,7900,4524,2533,2666,3710,4017,7901, 842,3834, # 7142 -7902,2801,2031,1014,4018, 213,2700,3376, 665, 621,4236,7903,3711,2925,2430,7904, # 7158 -2431,3302,3588,3377,7905,4237,2534,4238,4525,3589,1682,4239,3484,1380,7906, 724, # 7174 -2277, 600,1670,7907,1337,1233,4526,3103,2244,7908,1621,4527,7909, 651,4240,7910, # 7190 -1612,4241,2611,7911,2844,7912,2734,2307,3058,7913, 716,2459,3059, 174,1255,2701, # 7206 -4019,3590, 548,1320,1398, 728,4020,1574,7914,1890,1197,3060,4021,7915,3061,3062, # 7222 -3712,3591,3713, 747,7916, 635,4242,4528,7917,7918,7919,4243,7920,7921,4529,7922, # 7238 -3378,4530,2432, 451,7923,3714,2535,2072,4244,2735,4245,4022,7924,1764,4531,7925, # 7254 -4246, 350,7926,2278,2390,2486,7927,4247,4023,2245,1434,4024, 488,4532, 458,4248, # 7270 -4025,3715, 771,1330,2391,3835,2568,3159,2159,2409,1553,2667,3160,4249,7928,2487, # 7286 -2881,2612,1720,2702,4250,3379,4533,7929,2536,4251,7930,3231,4252,2768,7931,2015, # 7302 -2736,7932,1155,1017,3716,3836,7933,3303,2308, 201,1864,4253,1430,7934,4026,7935, # 7318 -7936,7937,7938,7939,4254,1604,7940, 414,1865, 371,2587,4534,4535,3485,2016,3104, # 7334 -4536,1708, 960,4255, 887, 389,2171,1536,1663,1721,7941,2228,4027,2351,2926,1580, # 7350 -7942,7943,7944,1744,7945,2537,4537,4538,7946,4539,7947,2073,7948,7949,3592,3380, # 7366 -2882,4256,7950,4257,2640,3381,2802, 673,2703,2460, 709,3486,4028,3593,4258,7951, # 7382 -1148, 502, 634,7952,7953,1204,4540,3594,1575,4541,2613,3717,7954,3718,3105, 948, # 7398 -3232, 121,1745,3837,1110,7955,4259,3063,2509,3009,4029,3719,1151,1771,3838,1488, # 7414 -4030,1986,7956,2433,3487,7957,7958,2093,7959,4260,3839,1213,1407,2803, 531,2737, # 7430 -2538,3233,1011,1537,7960,2769,4261,3106,1061,7961,3720,3721,1866,2883,7962,2017, # 7446 - 120,4262,4263,2062,3595,3234,2309,3840,2668,3382,1954,4542,7963,7964,3488,1047, # 7462 -2704,1266,7965,1368,4543,2845, 649,3383,3841,2539,2738,1102,2846,2669,7966,7967, # 7478 -1999,7968,1111,3596,2962,7969,2488,3842,3597,2804,1854,3384,3722,7970,7971,3385, # 7494 -2410,2884,3304,3235,3598,7972,2569,7973,3599,2805,4031,1460, 856,7974,3600,7975, # 7510 -2885,2963,7976,2886,3843,7977,4264, 632,2510, 875,3844,1697,3845,2291,7978,7979, # 7526 -4544,3010,1239, 580,4545,4265,7980, 914, 936,2074,1190,4032,1039,2123,7981,7982, # 7542 -7983,3386,1473,7984,1354,4266,3846,7985,2172,3064,4033, 915,3305,4267,4268,3306, # 7558 -1605,1834,7986,2739, 398,3601,4269,3847,4034, 328,1912,2847,4035,3848,1331,4270, # 7574 -3011, 937,4271,7987,3602,4036,4037,3387,2160,4546,3388, 524, 742, 538,3065,1012, # 7590 -7988,7989,3849,2461,7990, 658,1103, 225,3850,7991,7992,4547,7993,4548,7994,3236, # 7606 -1243,7995,4038, 963,2246,4549,7996,2705,3603,3161,7997,7998,2588,2327,7999,4550, # 7622 -8000,8001,8002,3489,3307, 957,3389,2540,2032,1930,2927,2462, 870,2018,3604,1746, # 7638 -2770,2771,2434,2463,8003,3851,8004,3723,3107,3724,3490,3390,3725,8005,1179,3066, # 7654 -8006,3162,2373,4272,3726,2541,3163,3108,2740,4039,8007,3391,1556,2542,2292, 977, # 7670 -2887,2033,4040,1205,3392,8008,1765,3393,3164,2124,1271,1689, 714,4551,3491,8009, # 7686 -2328,3852, 533,4273,3605,2181, 617,8010,2464,3308,3492,2310,8011,8012,3165,8013, # 7702 -8014,3853,1987, 618, 427,2641,3493,3394,8015,8016,1244,1690,8017,2806,4274,4552, # 7718 -8018,3494,8019,8020,2279,1576, 473,3606,4275,3395, 972,8021,3607,8022,3067,8023, # 7734 -8024,4553,4554,8025,3727,4041,4042,8026, 153,4555, 356,8027,1891,2888,4276,2143, # 7750 - 408, 803,2352,8028,3854,8029,4277,1646,2570,2511,4556,4557,3855,8030,3856,4278, # 7766 -8031,2411,3396, 752,8032,8033,1961,2964,8034, 746,3012,2465,8035,4279,3728, 698, # 7782 -4558,1892,4280,3608,2543,4559,3609,3857,8036,3166,3397,8037,1823,1302,4043,2706, # 7798 -3858,1973,4281,8038,4282,3167, 823,1303,1288,1236,2848,3495,4044,3398, 774,3859, # 7814 -8039,1581,4560,1304,2849,3860,4561,8040,2435,2161,1083,3237,4283,4045,4284, 344, # 7830 -1173, 288,2311, 454,1683,8041,8042,1461,4562,4046,2589,8043,8044,4563, 985, 894, # 7846 -8045,3399,3168,8046,1913,2928,3729,1988,8047,2110,1974,8048,4047,8049,2571,1194, # 7862 - 425,8050,4564,3169,1245,3730,4285,8051,8052,2850,8053, 636,4565,1855,3861, 760, # 7878 -1799,8054,4286,2209,1508,4566,4048,1893,1684,2293,8055,8056,8057,4287,4288,2210, # 7894 - 479,8058,8059, 832,8060,4049,2489,8061,2965,2490,3731, 990,3109, 627,1814,2642, # 7910 -4289,1582,4290,2125,2111,3496,4567,8062, 799,4291,3170,8063,4568,2112,1737,3013, # 7926 -1018, 543, 754,4292,3309,1676,4569,4570,4050,8064,1489,8065,3497,8066,2614,2889, # 7942 -4051,8067,8068,2966,8069,8070,8071,8072,3171,4571,4572,2182,1722,8073,3238,3239, # 7958 -1842,3610,1715, 481, 365,1975,1856,8074,8075,1962,2491,4573,8076,2126,3611,3240, # 7974 - 433,1894,2063,2075,8077, 602,2741,8078,8079,8080,8081,8082,3014,1628,3400,8083, # 7990 -3172,4574,4052,2890,4575,2512,8084,2544,2772,8085,8086,8087,3310,4576,2891,8088, # 8006 -4577,8089,2851,4578,4579,1221,2967,4053,2513,8090,8091,8092,1867,1989,8093,8094, # 8022 -8095,1895,8096,8097,4580,1896,4054, 318,8098,2094,4055,4293,8099,8100, 485,8101, # 8038 - 938,3862, 553,2670, 116,8102,3863,3612,8103,3498,2671,2773,3401,3311,2807,8104, # 8054 -3613,2929,4056,1747,2930,2968,8105,8106, 207,8107,8108,2672,4581,2514,8109,3015, # 8070 - 890,3614,3864,8110,1877,3732,3402,8111,2183,2353,3403,1652,8112,8113,8114, 941, # 8086 -2294, 208,3499,4057,2019, 330,4294,3865,2892,2492,3733,4295,8115,8116,8117,8118, # 8102 -) - diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/euctwprober.py b/venv/Lib/site-packages/pip/_vendor/chardet/euctwprober.py deleted file mode 100644 index 35669cc..0000000 --- a/venv/Lib/site-packages/pip/_vendor/chardet/euctwprober.py +++ /dev/null @@ -1,46 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is mozilla.org code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .mbcharsetprober import MultiByteCharSetProber -from .codingstatemachine import CodingStateMachine -from .chardistribution import EUCTWDistributionAnalysis -from .mbcssm import EUCTW_SM_MODEL - -class EUCTWProber(MultiByteCharSetProber): - def __init__(self): - super(EUCTWProber, self).__init__() - self.coding_sm = CodingStateMachine(EUCTW_SM_MODEL) - self.distribution_analyzer = EUCTWDistributionAnalysis() - self.reset() - - @property - def charset_name(self): - return "EUC-TW" - - @property - def language(self): - return "Taiwan" diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/gb2312freq.py b/venv/Lib/site-packages/pip/_vendor/chardet/gb2312freq.py deleted file mode 100644 index 697837b..0000000 --- a/venv/Lib/site-packages/pip/_vendor/chardet/gb2312freq.py +++ /dev/null @@ -1,283 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Communicator client code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -# GB2312 most frequently used character table -# -# Char to FreqOrder table , from hz6763 - -# 512 --> 0.79 -- 0.79 -# 1024 --> 0.92 -- 0.13 -# 2048 --> 0.98 -- 0.06 -# 6768 --> 1.00 -- 0.02 -# -# Ideal Distribution Ratio = 0.79135/(1-0.79135) = 3.79 -# Random Distribution Ration = 512 / (3755 - 512) = 0.157 -# -# Typical Distribution Ratio about 25% of Ideal one, still much higher that RDR - -GB2312_TYPICAL_DISTRIBUTION_RATIO = 0.9 - -GB2312_TABLE_SIZE = 3760 - -GB2312_CHAR_TO_FREQ_ORDER = ( -1671, 749,1443,2364,3924,3807,2330,3921,1704,3463,2691,1511,1515, 572,3191,2205, -2361, 224,2558, 479,1711, 963,3162, 440,4060,1905,2966,2947,3580,2647,3961,3842, -2204, 869,4207, 970,2678,5626,2944,2956,1479,4048, 514,3595, 588,1346,2820,3409, - 249,4088,1746,1873,2047,1774, 581,1813, 358,1174,3590,1014,1561,4844,2245, 670, -1636,3112, 889,1286, 953, 556,2327,3060,1290,3141, 613, 185,3477,1367, 850,3820, -1715,2428,2642,2303,2732,3041,2562,2648,3566,3946,1349, 388,3098,2091,1360,3585, - 152,1687,1539, 738,1559, 59,1232,2925,2267,1388,1249,1741,1679,2960, 151,1566, -1125,1352,4271, 924,4296, 385,3166,4459, 310,1245,2850, 70,3285,2729,3534,3575, -2398,3298,3466,1960,2265, 217,3647, 864,1909,2084,4401,2773,1010,3269,5152, 853, -3051,3121,1244,4251,1895, 364,1499,1540,2313,1180,3655,2268, 562, 715,2417,3061, - 544, 336,3768,2380,1752,4075, 950, 280,2425,4382, 183,2759,3272, 333,4297,2155, -1688,2356,1444,1039,4540, 736,1177,3349,2443,2368,2144,2225, 565, 196,1482,3406, - 927,1335,4147, 692, 878,1311,1653,3911,3622,1378,4200,1840,2969,3149,2126,1816, -2534,1546,2393,2760, 737,2494, 13, 447, 245,2747, 38,2765,2129,2589,1079, 606, - 360, 471,3755,2890, 404, 848, 699,1785,1236, 370,2221,1023,3746,2074,2026,2023, -2388,1581,2119, 812,1141,3091,2536,1519, 804,2053, 406,1596,1090, 784, 548,4414, -1806,2264,2936,1100, 343,4114,5096, 622,3358, 743,3668,1510,1626,5020,3567,2513, -3195,4115,5627,2489,2991, 24,2065,2697,1087,2719, 48,1634, 315, 68, 985,2052, - 198,2239,1347,1107,1439, 597,2366,2172, 871,3307, 919,2487,2790,1867, 236,2570, -1413,3794, 906,3365,3381,1701,1982,1818,1524,2924,1205, 616,2586,2072,2004, 575, - 253,3099, 32,1365,1182, 197,1714,2454,1201, 554,3388,3224,2748, 756,2587, 250, -2567,1507,1517,3529,1922,2761,2337,3416,1961,1677,2452,2238,3153, 615, 911,1506, -1474,2495,1265,1906,2749,3756,3280,2161, 898,2714,1759,3450,2243,2444, 563, 26, -3286,2266,3769,3344,2707,3677, 611,1402, 531,1028,2871,4548,1375, 261,2948, 835, -1190,4134, 353, 840,2684,1900,3082,1435,2109,1207,1674, 329,1872,2781,4055,2686, -2104, 608,3318,2423,2957,2768,1108,3739,3512,3271,3985,2203,1771,3520,1418,2054, -1681,1153, 225,1627,2929, 162,2050,2511,3687,1954, 124,1859,2431,1684,3032,2894, - 585,4805,3969,2869,2704,2088,2032,2095,3656,2635,4362,2209, 256, 518,2042,2105, -3777,3657, 643,2298,1148,1779, 190, 989,3544, 414, 11,2135,2063,2979,1471, 403, -3678, 126, 770,1563, 671,2499,3216,2877, 600,1179, 307,2805,4937,1268,1297,2694, - 252,4032,1448,1494,1331,1394, 127,2256, 222,1647,1035,1481,3056,1915,1048, 873, -3651, 210, 33,1608,2516, 200,1520, 415, 102, 0,3389,1287, 817, 91,3299,2940, - 836,1814, 549,2197,1396,1669,2987,3582,2297,2848,4528,1070, 687, 20,1819, 121, -1552,1364,1461,1968,2617,3540,2824,2083, 177, 948,4938,2291, 110,4549,2066, 648, -3359,1755,2110,2114,4642,4845,1693,3937,3308,1257,1869,2123, 208,1804,3159,2992, -2531,2549,3361,2418,1350,2347,2800,2568,1291,2036,2680, 72, 842,1990, 212,1233, -1154,1586, 75,2027,3410,4900,1823,1337,2710,2676, 728,2810,1522,3026,4995, 157, - 755,1050,4022, 710, 785,1936,2194,2085,1406,2777,2400, 150,1250,4049,1206, 807, -1910, 534, 529,3309,1721,1660, 274, 39,2827, 661,2670,1578, 925,3248,3815,1094, -4278,4901,4252, 41,1150,3747,2572,2227,4501,3658,4902,3813,3357,3617,2884,2258, - 887, 538,4187,3199,1294,2439,3042,2329,2343,2497,1255, 107, 543,1527, 521,3478, -3568, 194,5062, 15, 961,3870,1241,1192,2664, 66,5215,3260,2111,1295,1127,2152, -3805,4135, 901,1164,1976, 398,1278, 530,1460, 748, 904,1054,1966,1426, 53,2909, - 509, 523,2279,1534, 536,1019, 239,1685, 460,2353, 673,1065,2401,3600,4298,2272, -1272,2363, 284,1753,3679,4064,1695, 81, 815,2677,2757,2731,1386, 859, 500,4221, -2190,2566, 757,1006,2519,2068,1166,1455, 337,2654,3203,1863,1682,1914,3025,1252, -1409,1366, 847, 714,2834,2038,3209, 964,2970,1901, 885,2553,1078,1756,3049, 301, -1572,3326, 688,2130,1996,2429,1805,1648,2930,3421,2750,3652,3088, 262,1158,1254, - 389,1641,1812, 526,1719, 923,2073,1073,1902, 468, 489,4625,1140, 857,2375,3070, -3319,2863, 380, 116,1328,2693,1161,2244, 273,1212,1884,2769,3011,1775,1142, 461, -3066,1200,2147,2212, 790, 702,2695,4222,1601,1058, 434,2338,5153,3640, 67,2360, -4099,2502, 618,3472,1329, 416,1132, 830,2782,1807,2653,3211,3510,1662, 192,2124, - 296,3979,1739,1611,3684, 23, 118, 324, 446,1239,1225, 293,2520,3814,3795,2535, -3116, 17,1074, 467,2692,2201, 387,2922, 45,1326,3055,1645,3659,2817, 958, 243, -1903,2320,1339,2825,1784,3289, 356, 576, 865,2315,2381,3377,3916,1088,3122,1713, -1655, 935, 628,4689,1034,1327, 441, 800, 720, 894,1979,2183,1528,5289,2702,1071, -4046,3572,2399,1571,3281, 79, 761,1103, 327, 134, 758,1899,1371,1615, 879, 442, - 215,2605,2579, 173,2048,2485,1057,2975,3317,1097,2253,3801,4263,1403,1650,2946, - 814,4968,3487,1548,2644,1567,1285, 2, 295,2636, 97, 946,3576, 832, 141,4257, -3273, 760,3821,3521,3156,2607, 949,1024,1733,1516,1803,1920,2125,2283,2665,3180, -1501,2064,3560,2171,1592, 803,3518,1416, 732,3897,4258,1363,1362,2458, 119,1427, - 602,1525,2608,1605,1639,3175, 694,3064, 10, 465, 76,2000,4846,4208, 444,3781, -1619,3353,2206,1273,3796, 740,2483, 320,1723,2377,3660,2619,1359,1137,1762,1724, -2345,2842,1850,1862, 912, 821,1866, 612,2625,1735,2573,3369,1093, 844, 89, 937, - 930,1424,3564,2413,2972,1004,3046,3019,2011, 711,3171,1452,4178, 428, 801,1943, - 432, 445,2811, 206,4136,1472, 730, 349, 73, 397,2802,2547, 998,1637,1167, 789, - 396,3217, 154,1218, 716,1120,1780,2819,4826,1931,3334,3762,2139,1215,2627, 552, -3664,3628,3232,1405,2383,3111,1356,2652,3577,3320,3101,1703, 640,1045,1370,1246, -4996, 371,1575,2436,1621,2210, 984,4033,1734,2638, 16,4529, 663,2755,3255,1451, -3917,2257,1253,1955,2234,1263,2951, 214,1229, 617, 485, 359,1831,1969, 473,2310, - 750,2058, 165, 80,2864,2419, 361,4344,2416,2479,1134, 796,3726,1266,2943, 860, -2715, 938, 390,2734,1313,1384, 248, 202, 877,1064,2854, 522,3907, 279,1602, 297, -2357, 395,3740, 137,2075, 944,4089,2584,1267,3802, 62,1533,2285, 178, 176, 780, -2440, 201,3707, 590, 478,1560,4354,2117,1075, 30, 74,4643,4004,1635,1441,2745, - 776,2596, 238,1077,1692,1912,2844, 605, 499,1742,3947, 241,3053, 980,1749, 936, -2640,4511,2582, 515,1543,2162,5322,2892,2993, 890,2148,1924, 665,1827,3581,1032, - 968,3163, 339,1044,1896, 270, 583,1791,1720,4367,1194,3488,3669, 43,2523,1657, - 163,2167, 290,1209,1622,3378, 550, 634,2508,2510, 695,2634,2384,2512,1476,1414, - 220,1469,2341,2138,2852,3183,2900,4939,2865,3502,1211,3680, 854,3227,1299,2976, -3172, 186,2998,1459, 443,1067,3251,1495, 321,1932,3054, 909, 753,1410,1828, 436, -2441,1119,1587,3164,2186,1258, 227, 231,1425,1890,3200,3942, 247, 959, 725,5254, -2741, 577,2158,2079, 929, 120, 174, 838,2813, 591,1115, 417,2024, 40,3240,1536, -1037, 291,4151,2354, 632,1298,2406,2500,3535,1825,1846,3451, 205,1171, 345,4238, - 18,1163, 811, 685,2208,1217, 425,1312,1508,1175,4308,2552,1033, 587,1381,3059, -2984,3482, 340,1316,4023,3972, 792,3176, 519, 777,4690, 918, 933,4130,2981,3741, - 90,3360,2911,2200,5184,4550, 609,3079,2030, 272,3379,2736, 363,3881,1130,1447, - 286, 779, 357,1169,3350,3137,1630,1220,2687,2391, 747,1277,3688,2618,2682,2601, -1156,3196,5290,4034,3102,1689,3596,3128, 874, 219,2783, 798, 508,1843,2461, 269, -1658,1776,1392,1913,2983,3287,2866,2159,2372, 829,4076, 46,4253,2873,1889,1894, - 915,1834,1631,2181,2318, 298, 664,2818,3555,2735, 954,3228,3117, 527,3511,2173, - 681,2712,3033,2247,2346,3467,1652, 155,2164,3382, 113,1994, 450, 899, 494, 994, -1237,2958,1875,2336,1926,3727, 545,1577,1550, 633,3473, 204,1305,3072,2410,1956, -2471, 707,2134, 841,2195,2196,2663,3843,1026,4940, 990,3252,4997, 368,1092, 437, -3212,3258,1933,1829, 675,2977,2893, 412, 943,3723,4644,3294,3283,2230,2373,5154, -2389,2241,2661,2323,1404,2524, 593, 787, 677,3008,1275,2059, 438,2709,2609,2240, -2269,2246,1446, 36,1568,1373,3892,1574,2301,1456,3962, 693,2276,5216,2035,1143, -2720,1919,1797,1811,2763,4137,2597,1830,1699,1488,1198,2090, 424,1694, 312,3634, -3390,4179,3335,2252,1214, 561,1059,3243,2295,2561, 975,5155,2321,2751,3772, 472, -1537,3282,3398,1047,2077,2348,2878,1323,3340,3076, 690,2906, 51, 369, 170,3541, -1060,2187,2688,3670,2541,1083,1683, 928,3918, 459, 109,4427, 599,3744,4286, 143, -2101,2730,2490, 82,1588,3036,2121, 281,1860, 477,4035,1238,2812,3020,2716,3312, -1530,2188,2055,1317, 843, 636,1808,1173,3495, 649, 181,1002, 147,3641,1159,2414, -3750,2289,2795, 813,3123,2610,1136,4368, 5,3391,4541,2174, 420, 429,1728, 754, -1228,2115,2219, 347,2223,2733, 735,1518,3003,2355,3134,1764,3948,3329,1888,2424, -1001,1234,1972,3321,3363,1672,1021,1450,1584, 226, 765, 655,2526,3404,3244,2302, -3665, 731, 594,2184, 319,1576, 621, 658,2656,4299,2099,3864,1279,2071,2598,2739, - 795,3086,3699,3908,1707,2352,2402,1382,3136,2475,1465,4847,3496,3865,1085,3004, -2591,1084, 213,2287,1963,3565,2250, 822, 793,4574,3187,1772,1789,3050, 595,1484, -1959,2770,1080,2650, 456, 422,2996, 940,3322,4328,4345,3092,2742, 965,2784, 739, -4124, 952,1358,2498,2949,2565, 332,2698,2378, 660,2260,2473,4194,3856,2919, 535, -1260,2651,1208,1428,1300,1949,1303,2942, 433,2455,2450,1251,1946, 614,1269, 641, -1306,1810,2737,3078,2912, 564,2365,1419,1415,1497,4460,2367,2185,1379,3005,1307, -3218,2175,1897,3063, 682,1157,4040,4005,1712,1160,1941,1399, 394, 402,2952,1573, -1151,2986,2404, 862, 299,2033,1489,3006, 346, 171,2886,3401,1726,2932, 168,2533, - 47,2507,1030,3735,1145,3370,1395,1318,1579,3609,4560,2857,4116,1457,2529,1965, - 504,1036,2690,2988,2405, 745,5871, 849,2397,2056,3081, 863,2359,3857,2096, 99, -1397,1769,2300,4428,1643,3455,1978,1757,3718,1440, 35,4879,3742,1296,4228,2280, - 160,5063,1599,2013, 166, 520,3479,1646,3345,3012, 490,1937,1545,1264,2182,2505, -1096,1188,1369,1436,2421,1667,2792,2460,1270,2122, 727,3167,2143, 806,1706,1012, -1800,3037, 960,2218,1882, 805, 139,2456,1139,1521, 851,1052,3093,3089, 342,2039, - 744,5097,1468,1502,1585,2087, 223, 939, 326,2140,2577, 892,2481,1623,4077, 982, -3708, 135,2131, 87,2503,3114,2326,1106, 876,1616, 547,2997,2831,2093,3441,4530, -4314, 9,3256,4229,4148, 659,1462,1986,1710,2046,2913,2231,4090,4880,5255,3392, -3274,1368,3689,4645,1477, 705,3384,3635,1068,1529,2941,1458,3782,1509, 100,1656, -2548, 718,2339, 408,1590,2780,3548,1838,4117,3719,1345,3530, 717,3442,2778,3220, -2898,1892,4590,3614,3371,2043,1998,1224,3483, 891, 635, 584,2559,3355, 733,1766, -1729,1172,3789,1891,2307, 781,2982,2271,1957,1580,5773,2633,2005,4195,3097,1535, -3213,1189,1934,5693,3262, 586,3118,1324,1598, 517,1564,2217,1868,1893,4445,3728, -2703,3139,1526,1787,1992,3882,2875,1549,1199,1056,2224,1904,2711,5098,4287, 338, -1993,3129,3489,2689,1809,2815,1997, 957,1855,3898,2550,3275,3057,1105,1319, 627, -1505,1911,1883,3526, 698,3629,3456,1833,1431, 746, 77,1261,2017,2296,1977,1885, - 125,1334,1600, 525,1798,1109,2222,1470,1945, 559,2236,1186,3443,2476,1929,1411, -2411,3135,1777,3372,2621,1841,1613,3229, 668,1430,1839,2643,2916, 195,1989,2671, -2358,1387, 629,3205,2293,5256,4439, 123,1310, 888,1879,4300,3021,3605,1003,1162, -3192,2910,2010, 140,2395,2859, 55,1082,2012,2901, 662, 419,2081,1438, 680,2774, -4654,3912,1620,1731,1625,5035,4065,2328, 512,1344, 802,5443,2163,2311,2537, 524, -3399, 98,1155,2103,1918,2606,3925,2816,1393,2465,1504,3773,2177,3963,1478,4346, - 180,1113,4655,3461,2028,1698, 833,2696,1235,1322,1594,4408,3623,3013,3225,2040, -3022, 541,2881, 607,3632,2029,1665,1219, 639,1385,1686,1099,2803,3231,1938,3188, -2858, 427, 676,2772,1168,2025, 454,3253,2486,3556, 230,1950, 580, 791,1991,1280, -1086,1974,2034, 630, 257,3338,2788,4903,1017, 86,4790, 966,2789,1995,1696,1131, - 259,3095,4188,1308, 179,1463,5257, 289,4107,1248, 42,3413,1725,2288, 896,1947, - 774,4474,4254, 604,3430,4264, 392,2514,2588, 452, 237,1408,3018, 988,4531,1970, -3034,3310, 540,2370,1562,1288,2990, 502,4765,1147, 4,1853,2708, 207, 294,2814, -4078,2902,2509, 684, 34,3105,3532,2551, 644, 709,2801,2344, 573,1727,3573,3557, -2021,1081,3100,4315,2100,3681, 199,2263,1837,2385, 146,3484,1195,2776,3949, 997, -1939,3973,1008,1091,1202,1962,1847,1149,4209,5444,1076, 493, 117,5400,2521, 972, -1490,2934,1796,4542,2374,1512,2933,2657, 413,2888,1135,2762,2314,2156,1355,2369, - 766,2007,2527,2170,3124,2491,2593,2632,4757,2437, 234,3125,3591,1898,1750,1376, -1942,3468,3138, 570,2127,2145,3276,4131, 962, 132,1445,4196, 19, 941,3624,3480, -3366,1973,1374,4461,3431,2629, 283,2415,2275, 808,2887,3620,2112,2563,1353,3610, - 955,1089,3103,1053, 96, 88,4097, 823,3808,1583, 399, 292,4091,3313, 421,1128, - 642,4006, 903,2539,1877,2082, 596, 29,4066,1790, 722,2157, 130, 995,1569, 769, -1485, 464, 513,2213, 288,1923,1101,2453,4316, 133, 486,2445, 50, 625, 487,2207, - 57, 423, 481,2962, 159,3729,1558, 491, 303, 482, 501, 240,2837, 112,3648,2392, -1783, 362, 8,3433,3422, 610,2793,3277,1390,1284,1654, 21,3823, 734, 367, 623, - 193, 287, 374,1009,1483, 816, 476, 313,2255,2340,1262,2150,2899,1146,2581, 782, -2116,1659,2018,1880, 255,3586,3314,1110,2867,2137,2564, 986,2767,5185,2006, 650, - 158, 926, 762, 881,3157,2717,2362,3587, 306,3690,3245,1542,3077,2427,1691,2478, -2118,2985,3490,2438, 539,2305, 983, 129,1754, 355,4201,2386, 827,2923, 104,1773, -2838,2771, 411,2905,3919, 376, 767, 122,1114, 828,2422,1817,3506, 266,3460,1007, -1609,4998, 945,2612,4429,2274, 726,1247,1964,2914,2199,2070,4002,4108, 657,3323, -1422, 579, 455,2764,4737,1222,2895,1670, 824,1223,1487,2525, 558, 861,3080, 598, -2659,2515,1967, 752,2583,2376,2214,4180, 977, 704,2464,4999,2622,4109,1210,2961, - 819,1541, 142,2284, 44, 418, 457,1126,3730,4347,4626,1644,1876,3671,1864, 302, -1063,5694, 624, 723,1984,3745,1314,1676,2488,1610,1449,3558,3569,2166,2098, 409, -1011,2325,3704,2306, 818,1732,1383,1824,1844,3757, 999,2705,3497,1216,1423,2683, -2426,2954,2501,2726,2229,1475,2554,5064,1971,1794,1666,2014,1343, 783, 724, 191, -2434,1354,2220,5065,1763,2752,2472,4152, 131, 175,2885,3434, 92,1466,4920,2616, -3871,3872,3866, 128,1551,1632, 669,1854,3682,4691,4125,1230, 188,2973,3290,1302, -1213, 560,3266, 917, 763,3909,3249,1760, 868,1958, 764,1782,2097, 145,2277,3774, -4462, 64,1491,3062, 971,2132,3606,2442, 221,1226,1617, 218, 323,1185,3207,3147, - 571, 619,1473,1005,1744,2281, 449,1887,2396,3685, 275, 375,3816,1743,3844,3731, - 845,1983,2350,4210,1377, 773, 967,3499,3052,3743,2725,4007,1697,1022,3943,1464, -3264,2855,2722,1952,1029,2839,2467, 84,4383,2215, 820,1391,2015,2448,3672, 377, -1948,2168, 797,2545,3536,2578,2645, 94,2874,1678, 405,1259,3071, 771, 546,1315, - 470,1243,3083, 895,2468, 981, 969,2037, 846,4181, 653,1276,2928, 14,2594, 557, -3007,2474, 156, 902,1338,1740,2574, 537,2518, 973,2282,2216,2433,1928, 138,2903, -1293,2631,1612, 646,3457, 839,2935, 111, 496,2191,2847, 589,3186, 149,3994,2060, -4031,2641,4067,3145,1870, 37,3597,2136,1025,2051,3009,3383,3549,1121,1016,3261, -1301, 251,2446,2599,2153, 872,3246, 637, 334,3705, 831, 884, 921,3065,3140,4092, -2198,1944, 246,2964, 108,2045,1152,1921,2308,1031, 203,3173,4170,1907,3890, 810, -1401,2003,1690, 506, 647,1242,2828,1761,1649,3208,2249,1589,3709,2931,5156,1708, - 498, 666,2613, 834,3817,1231, 184,2851,1124, 883,3197,2261,3710,1765,1553,2658, -1178,2639,2351, 93,1193, 942,2538,2141,4402, 235,1821, 870,1591,2192,1709,1871, -3341,1618,4126,2595,2334, 603, 651, 69, 701, 268,2662,3411,2555,1380,1606, 503, - 448, 254,2371,2646, 574,1187,2309,1770, 322,2235,1292,1801, 305, 566,1133, 229, -2067,2057, 706, 167, 483,2002,2672,3295,1820,3561,3067, 316, 378,2746,3452,1112, - 136,1981, 507,1651,2917,1117, 285,4591, 182,2580,3522,1304, 335,3303,1835,2504, -1795,1792,2248, 674,1018,2106,2449,1857,2292,2845, 976,3047,1781,2600,2727,1389, -1281, 52,3152, 153, 265,3950, 672,3485,3951,4463, 430,1183, 365, 278,2169, 27, -1407,1336,2304, 209,1340,1730,2202,1852,2403,2883, 979,1737,1062, 631,2829,2542, -3876,2592, 825,2086,2226,3048,3625, 352,1417,3724, 542, 991, 431,1351,3938,1861, -2294, 826,1361,2927,3142,3503,1738, 463,2462,2723, 582,1916,1595,2808, 400,3845, -3891,2868,3621,2254, 58,2492,1123, 910,2160,2614,1372,1603,1196,1072,3385,1700, -3267,1980, 696, 480,2430, 920, 799,1570,2920,1951,2041,4047,2540,1321,4223,2469, -3562,2228,1271,2602, 401,2833,3351,2575,5157, 907,2312,1256, 410, 263,3507,1582, - 996, 678,1849,2316,1480, 908,3545,2237, 703,2322, 667,1826,2849,1531,2604,2999, -2407,3146,2151,2630,1786,3711, 469,3542, 497,3899,2409, 858, 837,4446,3393,1274, - 786, 620,1845,2001,3311, 484, 308,3367,1204,1815,3691,2332,1532,2557,1842,2020, -2724,1927,2333,4440, 567, 22,1673,2728,4475,1987,1858,1144,1597, 101,1832,3601, - 12, 974,3783,4391, 951,1412, 1,3720, 453,4608,4041, 528,1041,1027,3230,2628, -1129, 875,1051,3291,1203,2262,1069,2860,2799,2149,2615,3278, 144,1758,3040, 31, - 475,1680, 366,2685,3184, 311,1642,4008,2466,5036,1593,1493,2809, 216,1420,1668, - 233, 304,2128,3284, 232,1429,1768,1040,2008,3407,2740,2967,2543, 242,2133, 778, -1565,2022,2620, 505,2189,2756,1098,2273, 372,1614, 708, 553,2846,2094,2278, 169, -3626,2835,4161, 228,2674,3165, 809,1454,1309, 466,1705,1095, 900,3423, 880,2667, -3751,5258,2317,3109,2571,4317,2766,1503,1342, 866,4447,1118, 63,2076, 314,1881, -1348,1061, 172, 978,3515,1747, 532, 511,3970, 6, 601, 905,2699,3300,1751, 276, -1467,3725,2668, 65,4239,2544,2779,2556,1604, 578,2451,1802, 992,2331,2624,1320, -3446, 713,1513,1013, 103,2786,2447,1661, 886,1702, 916, 654,3574,2031,1556, 751, -2178,2821,2179,1498,1538,2176, 271, 914,2251,2080,1325, 638,1953,2937,3877,2432, -2754, 95,3265,1716, 260,1227,4083, 775, 106,1357,3254, 426,1607, 555,2480, 772, -1985, 244,2546, 474, 495,1046,2611,1851,2061, 71,2089,1675,2590, 742,3758,2843, -3222,1433, 267,2180,2576,2826,2233,2092,3913,2435, 956,1745,3075, 856,2113,1116, - 451, 3,1988,2896,1398, 993,2463,1878,2049,1341,2718,2721,2870,2108, 712,2904, -4363,2753,2324, 277,2872,2349,2649, 384, 987, 435, 691,3000, 922, 164,3939, 652, -1500,1184,4153,2482,3373,2165,4848,2335,3775,3508,3154,2806,2830,1554,2102,1664, -2530,1434,2408, 893,1547,2623,3447,2832,2242,2532,3169,2856,3223,2078, 49,3770, -3469, 462, 318, 656,2259,3250,3069, 679,1629,2758, 344,1138,1104,3120,1836,1283, -3115,2154,1437,4448, 934, 759,1999, 794,2862,1038, 533,2560,1722,2342, 855,2626, -1197,1663,4476,3127, 85,4240,2528, 25,1111,1181,3673, 407,3470,4561,2679,2713, - 768,1925,2841,3986,1544,1165, 932, 373,1240,2146,1930,2673, 721,4766, 354,4333, - 391,2963, 187, 61,3364,1442,1102, 330,1940,1767, 341,3809,4118, 393,2496,2062, -2211, 105, 331, 300, 439, 913,1332, 626, 379,3304,1557, 328, 689,3952, 309,1555, - 931, 317,2517,3027, 325, 569, 686,2107,3084, 60,1042,1333,2794, 264,3177,4014, -1628, 258,3712, 7,4464,1176,1043,1778, 683, 114,1975, 78,1492, 383,1886, 510, - 386, 645,5291,2891,2069,3305,4138,3867,2939,2603,2493,1935,1066,1848,3588,1015, -1282,1289,4609, 697,1453,3044,2666,3611,1856,2412, 54, 719,1330, 568,3778,2459, -1748, 788, 492, 551,1191,1000, 488,3394,3763, 282,1799, 348,2016,1523,3155,2390, -1049, 382,2019,1788,1170, 729,2968,3523, 897,3926,2785,2938,3292, 350,2319,3238, -1718,1717,2655,3453,3143,4465, 161,2889,2980,2009,1421, 56,1908,1640,2387,2232, -1917,1874,2477,4921, 148, 83,3438, 592,4245,2882,1822,1055, 741, 115,1496,1624, - 381,1638,4592,1020, 516,3214, 458, 947,4575,1432, 211,1514,2926,1865,2142, 189, - 852,1221,1400,1486, 882,2299,4036, 351, 28,1122, 700,6479,6480,6481,6482,6483, #last 512 -) - diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/gb2312prober.py b/venv/Lib/site-packages/pip/_vendor/chardet/gb2312prober.py deleted file mode 100644 index 8446d2d..0000000 --- a/venv/Lib/site-packages/pip/_vendor/chardet/gb2312prober.py +++ /dev/null @@ -1,46 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is mozilla.org code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .mbcharsetprober import MultiByteCharSetProber -from .codingstatemachine import CodingStateMachine -from .chardistribution import GB2312DistributionAnalysis -from .mbcssm import GB2312_SM_MODEL - -class GB2312Prober(MultiByteCharSetProber): - def __init__(self): - super(GB2312Prober, self).__init__() - self.coding_sm = CodingStateMachine(GB2312_SM_MODEL) - self.distribution_analyzer = GB2312DistributionAnalysis() - self.reset() - - @property - def charset_name(self): - return "GB2312" - - @property - def language(self): - return "Chinese" diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/hebrewprober.py b/venv/Lib/site-packages/pip/_vendor/chardet/hebrewprober.py deleted file mode 100644 index b0e1bf4..0000000 --- a/venv/Lib/site-packages/pip/_vendor/chardet/hebrewprober.py +++ /dev/null @@ -1,292 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Universal charset detector code. -# -# The Initial Developer of the Original Code is -# Shy Shalom -# Portions created by the Initial Developer are Copyright (C) 2005 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .charsetprober import CharSetProber -from .enums import ProbingState - -# This prober doesn't actually recognize a language or a charset. -# It is a helper prober for the use of the Hebrew model probers - -### General ideas of the Hebrew charset recognition ### -# -# Four main charsets exist in Hebrew: -# "ISO-8859-8" - Visual Hebrew -# "windows-1255" - Logical Hebrew -# "ISO-8859-8-I" - Logical Hebrew -# "x-mac-hebrew" - ?? Logical Hebrew ?? -# -# Both "ISO" charsets use a completely identical set of code points, whereas -# "windows-1255" and "x-mac-hebrew" are two different proper supersets of -# these code points. windows-1255 defines additional characters in the range -# 0x80-0x9F as some misc punctuation marks as well as some Hebrew-specific -# diacritics and additional 'Yiddish' ligature letters in the range 0xc0-0xd6. -# x-mac-hebrew defines similar additional code points but with a different -# mapping. -# -# As far as an average Hebrew text with no diacritics is concerned, all four -# charsets are identical with respect to code points. Meaning that for the -# main Hebrew alphabet, all four map the same values to all 27 Hebrew letters -# (including final letters). -# -# The dominant difference between these charsets is their directionality. -# "Visual" directionality means that the text is ordered as if the renderer is -# not aware of a BIDI rendering algorithm. The renderer sees the text and -# draws it from left to right. The text itself when ordered naturally is read -# backwards. A buffer of Visual Hebrew generally looks like so: -# "[last word of first line spelled backwards] [whole line ordered backwards -# and spelled backwards] [first word of first line spelled backwards] -# [end of line] [last word of second line] ... etc' " -# adding punctuation marks, numbers and English text to visual text is -# naturally also "visual" and from left to right. -# -# "Logical" directionality means the text is ordered "naturally" according to -# the order it is read. It is the responsibility of the renderer to display -# the text from right to left. A BIDI algorithm is used to place general -# punctuation marks, numbers and English text in the text. -# -# Texts in x-mac-hebrew are almost impossible to find on the Internet. From -# what little evidence I could find, it seems that its general directionality -# is Logical. -# -# To sum up all of the above, the Hebrew probing mechanism knows about two -# charsets: -# Visual Hebrew - "ISO-8859-8" - backwards text - Words and sentences are -# backwards while line order is natural. For charset recognition purposes -# the line order is unimportant (In fact, for this implementation, even -# word order is unimportant). -# Logical Hebrew - "windows-1255" - normal, naturally ordered text. -# -# "ISO-8859-8-I" is a subset of windows-1255 and doesn't need to be -# specifically identified. -# "x-mac-hebrew" is also identified as windows-1255. A text in x-mac-hebrew -# that contain special punctuation marks or diacritics is displayed with -# some unconverted characters showing as question marks. This problem might -# be corrected using another model prober for x-mac-hebrew. Due to the fact -# that x-mac-hebrew texts are so rare, writing another model prober isn't -# worth the effort and performance hit. -# -#### The Prober #### -# -# The prober is divided between two SBCharSetProbers and a HebrewProber, -# all of which are managed, created, fed data, inquired and deleted by the -# SBCSGroupProber. The two SBCharSetProbers identify that the text is in -# fact some kind of Hebrew, Logical or Visual. The final decision about which -# one is it is made by the HebrewProber by combining final-letter scores -# with the scores of the two SBCharSetProbers to produce a final answer. -# -# The SBCSGroupProber is responsible for stripping the original text of HTML -# tags, English characters, numbers, low-ASCII punctuation characters, spaces -# and new lines. It reduces any sequence of such characters to a single space. -# The buffer fed to each prober in the SBCS group prober is pure text in -# high-ASCII. -# The two SBCharSetProbers (model probers) share the same language model: -# Win1255Model. -# The first SBCharSetProber uses the model normally as any other -# SBCharSetProber does, to recognize windows-1255, upon which this model was -# built. The second SBCharSetProber is told to make the pair-of-letter -# lookup in the language model backwards. This in practice exactly simulates -# a visual Hebrew model using the windows-1255 logical Hebrew model. -# -# The HebrewProber is not using any language model. All it does is look for -# final-letter evidence suggesting the text is either logical Hebrew or visual -# Hebrew. Disjointed from the model probers, the results of the HebrewProber -# alone are meaningless. HebrewProber always returns 0.00 as confidence -# since it never identifies a charset by itself. Instead, the pointer to the -# HebrewProber is passed to the model probers as a helper "Name Prober". -# When the Group prober receives a positive identification from any prober, -# it asks for the name of the charset identified. If the prober queried is a -# Hebrew model prober, the model prober forwards the call to the -# HebrewProber to make the final decision. In the HebrewProber, the -# decision is made according to the final-letters scores maintained and Both -# model probers scores. The answer is returned in the form of the name of the -# charset identified, either "windows-1255" or "ISO-8859-8". - -class HebrewProber(CharSetProber): - # windows-1255 / ISO-8859-8 code points of interest - FINAL_KAF = 0xea - NORMAL_KAF = 0xeb - FINAL_MEM = 0xed - NORMAL_MEM = 0xee - FINAL_NUN = 0xef - NORMAL_NUN = 0xf0 - FINAL_PE = 0xf3 - NORMAL_PE = 0xf4 - FINAL_TSADI = 0xf5 - NORMAL_TSADI = 0xf6 - - # Minimum Visual vs Logical final letter score difference. - # If the difference is below this, don't rely solely on the final letter score - # distance. - MIN_FINAL_CHAR_DISTANCE = 5 - - # Minimum Visual vs Logical model score difference. - # If the difference is below this, don't rely at all on the model score - # distance. - MIN_MODEL_DISTANCE = 0.01 - - VISUAL_HEBREW_NAME = "ISO-8859-8" - LOGICAL_HEBREW_NAME = "windows-1255" - - def __init__(self): - super(HebrewProber, self).__init__() - self._final_char_logical_score = None - self._final_char_visual_score = None - self._prev = None - self._before_prev = None - self._logical_prober = None - self._visual_prober = None - self.reset() - - def reset(self): - self._final_char_logical_score = 0 - self._final_char_visual_score = 0 - # The two last characters seen in the previous buffer, - # mPrev and mBeforePrev are initialized to space in order to simulate - # a word delimiter at the beginning of the data - self._prev = ' ' - self._before_prev = ' ' - # These probers are owned by the group prober. - - def set_model_probers(self, logicalProber, visualProber): - self._logical_prober = logicalProber - self._visual_prober = visualProber - - def is_final(self, c): - return c in [self.FINAL_KAF, self.FINAL_MEM, self.FINAL_NUN, - self.FINAL_PE, self.FINAL_TSADI] - - def is_non_final(self, c): - # The normal Tsadi is not a good Non-Final letter due to words like - # 'lechotet' (to chat) containing an apostrophe after the tsadi. This - # apostrophe is converted to a space in FilterWithoutEnglishLetters - # causing the Non-Final tsadi to appear at an end of a word even - # though this is not the case in the original text. - # The letters Pe and Kaf rarely display a related behavior of not being - # a good Non-Final letter. Words like 'Pop', 'Winamp' and 'Mubarak' - # for example legally end with a Non-Final Pe or Kaf. However, the - # benefit of these letters as Non-Final letters outweighs the damage - # since these words are quite rare. - return c in [self.NORMAL_KAF, self.NORMAL_MEM, - self.NORMAL_NUN, self.NORMAL_PE] - - def feed(self, byte_str): - # Final letter analysis for logical-visual decision. - # Look for evidence that the received buffer is either logical Hebrew - # or visual Hebrew. - # The following cases are checked: - # 1) A word longer than 1 letter, ending with a final letter. This is - # an indication that the text is laid out "naturally" since the - # final letter really appears at the end. +1 for logical score. - # 2) A word longer than 1 letter, ending with a Non-Final letter. In - # normal Hebrew, words ending with Kaf, Mem, Nun, Pe or Tsadi, - # should not end with the Non-Final form of that letter. Exceptions - # to this rule are mentioned above in isNonFinal(). This is an - # indication that the text is laid out backwards. +1 for visual - # score - # 3) A word longer than 1 letter, starting with a final letter. Final - # letters should not appear at the beginning of a word. This is an - # indication that the text is laid out backwards. +1 for visual - # score. - # - # The visual score and logical score are accumulated throughout the - # text and are finally checked against each other in GetCharSetName(). - # No checking for final letters in the middle of words is done since - # that case is not an indication for either Logical or Visual text. - # - # We automatically filter out all 7-bit characters (replace them with - # spaces) so the word boundary detection works properly. [MAP] - - if self.state == ProbingState.NOT_ME: - # Both model probers say it's not them. No reason to continue. - return ProbingState.NOT_ME - - byte_str = self.filter_high_byte_only(byte_str) - - for cur in byte_str: - if cur == ' ': - # We stand on a space - a word just ended - if self._before_prev != ' ': - # next-to-last char was not a space so self._prev is not a - # 1 letter word - if self.is_final(self._prev): - # case (1) [-2:not space][-1:final letter][cur:space] - self._final_char_logical_score += 1 - elif self.is_non_final(self._prev): - # case (2) [-2:not space][-1:Non-Final letter][ - # cur:space] - self._final_char_visual_score += 1 - else: - # Not standing on a space - if ((self._before_prev == ' ') and - (self.is_final(self._prev)) and (cur != ' ')): - # case (3) [-2:space][-1:final letter][cur:not space] - self._final_char_visual_score += 1 - self._before_prev = self._prev - self._prev = cur - - # Forever detecting, till the end or until both model probers return - # ProbingState.NOT_ME (handled above) - return ProbingState.DETECTING - - @property - def charset_name(self): - # Make the decision: is it Logical or Visual? - # If the final letter score distance is dominant enough, rely on it. - finalsub = self._final_char_logical_score - self._final_char_visual_score - if finalsub >= self.MIN_FINAL_CHAR_DISTANCE: - return self.LOGICAL_HEBREW_NAME - if finalsub <= -self.MIN_FINAL_CHAR_DISTANCE: - return self.VISUAL_HEBREW_NAME - - # It's not dominant enough, try to rely on the model scores instead. - modelsub = (self._logical_prober.get_confidence() - - self._visual_prober.get_confidence()) - if modelsub > self.MIN_MODEL_DISTANCE: - return self.LOGICAL_HEBREW_NAME - if modelsub < -self.MIN_MODEL_DISTANCE: - return self.VISUAL_HEBREW_NAME - - # Still no good, back to final letter distance, maybe it'll save the - # day. - if finalsub < 0.0: - return self.VISUAL_HEBREW_NAME - - # (finalsub > 0 - Logical) or (don't know what to do) default to - # Logical. - return self.LOGICAL_HEBREW_NAME - - @property - def language(self): - return 'Hebrew' - - @property - def state(self): - # Remain active as long as any of the model probers are active. - if (self._logical_prober.state == ProbingState.NOT_ME) and \ - (self._visual_prober.state == ProbingState.NOT_ME): - return ProbingState.NOT_ME - return ProbingState.DETECTING diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/jisfreq.py b/venv/Lib/site-packages/pip/_vendor/chardet/jisfreq.py deleted file mode 100644 index 83fc082..0000000 --- a/venv/Lib/site-packages/pip/_vendor/chardet/jisfreq.py +++ /dev/null @@ -1,325 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Communicator client code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -# Sampling from about 20M text materials include literature and computer technology -# -# Japanese frequency table, applied to both S-JIS and EUC-JP -# They are sorted in order. - -# 128 --> 0.77094 -# 256 --> 0.85710 -# 512 --> 0.92635 -# 1024 --> 0.97130 -# 2048 --> 0.99431 -# -# Ideal Distribution Ratio = 0.92635 / (1-0.92635) = 12.58 -# Random Distribution Ration = 512 / (2965+62+83+86-512) = 0.191 -# -# Typical Distribution Ratio, 25% of IDR - -JIS_TYPICAL_DISTRIBUTION_RATIO = 3.0 - -# Char to FreqOrder table , -JIS_TABLE_SIZE = 4368 - -JIS_CHAR_TO_FREQ_ORDER = ( - 40, 1, 6, 182, 152, 180, 295,2127, 285, 381,3295,4304,3068,4606,3165,3510, # 16 -3511,1822,2785,4607,1193,2226,5070,4608, 171,2996,1247, 18, 179,5071, 856,1661, # 32 -1262,5072, 619, 127,3431,3512,3230,1899,1700, 232, 228,1294,1298, 284, 283,2041, # 48 -2042,1061,1062, 48, 49, 44, 45, 433, 434,1040,1041, 996, 787,2997,1255,4305, # 64 -2108,4609,1684,1648,5073,5074,5075,5076,5077,5078,3687,5079,4610,5080,3927,3928, # 80 -5081,3296,3432, 290,2285,1471,2187,5082,2580,2825,1303,2140,1739,1445,2691,3375, # 96 -1691,3297,4306,4307,4611, 452,3376,1182,2713,3688,3069,4308,5083,5084,5085,5086, # 112 -5087,5088,5089,5090,5091,5092,5093,5094,5095,5096,5097,5098,5099,5100,5101,5102, # 128 -5103,5104,5105,5106,5107,5108,5109,5110,5111,5112,4097,5113,5114,5115,5116,5117, # 144 -5118,5119,5120,5121,5122,5123,5124,5125,5126,5127,5128,5129,5130,5131,5132,5133, # 160 -5134,5135,5136,5137,5138,5139,5140,5141,5142,5143,5144,5145,5146,5147,5148,5149, # 176 -5150,5151,5152,4612,5153,5154,5155,5156,5157,5158,5159,5160,5161,5162,5163,5164, # 192 -5165,5166,5167,5168,5169,5170,5171,5172,5173,5174,5175,1472, 598, 618, 820,1205, # 208 -1309,1412,1858,1307,1692,5176,5177,5178,5179,5180,5181,5182,1142,1452,1234,1172, # 224 -1875,2043,2149,1793,1382,2973, 925,2404,1067,1241, 960,1377,2935,1491, 919,1217, # 240 -1865,2030,1406,1499,2749,4098,5183,5184,5185,5186,5187,5188,2561,4099,3117,1804, # 256 -2049,3689,4309,3513,1663,5189,3166,3118,3298,1587,1561,3433,5190,3119,1625,2998, # 272 -3299,4613,1766,3690,2786,4614,5191,5192,5193,5194,2161, 26,3377, 2,3929, 20, # 288 -3691, 47,4100, 50, 17, 16, 35, 268, 27, 243, 42, 155, 24, 154, 29, 184, # 304 - 4, 91, 14, 92, 53, 396, 33, 289, 9, 37, 64, 620, 21, 39, 321, 5, # 320 - 12, 11, 52, 13, 3, 208, 138, 0, 7, 60, 526, 141, 151,1069, 181, 275, # 336 -1591, 83, 132,1475, 126, 331, 829, 15, 69, 160, 59, 22, 157, 55,1079, 312, # 352 - 109, 38, 23, 25, 10, 19, 79,5195, 61, 382,1124, 8, 30,5196,5197,5198, # 368 -5199,5200,5201,5202,5203,5204,5205,5206, 89, 62, 74, 34,2416, 112, 139, 196, # 384 - 271, 149, 84, 607, 131, 765, 46, 88, 153, 683, 76, 874, 101, 258, 57, 80, # 400 - 32, 364, 121,1508, 169,1547, 68, 235, 145,2999, 41, 360,3027, 70, 63, 31, # 416 - 43, 259, 262,1383, 99, 533, 194, 66, 93, 846, 217, 192, 56, 106, 58, 565, # 432 - 280, 272, 311, 256, 146, 82, 308, 71, 100, 128, 214, 655, 110, 261, 104,1140, # 448 - 54, 51, 36, 87, 67,3070, 185,2618,2936,2020, 28,1066,2390,2059,5207,5208, # 464 -5209,5210,5211,5212,5213,5214,5215,5216,4615,5217,5218,5219,5220,5221,5222,5223, # 480 -5224,5225,5226,5227,5228,5229,5230,5231,5232,5233,5234,5235,5236,3514,5237,5238, # 496 -5239,5240,5241,5242,5243,5244,2297,2031,4616,4310,3692,5245,3071,5246,3598,5247, # 512 -4617,3231,3515,5248,4101,4311,4618,3808,4312,4102,5249,4103,4104,3599,5250,5251, # 528 -5252,5253,5254,5255,5256,5257,5258,5259,5260,5261,5262,5263,5264,5265,5266,5267, # 544 -5268,5269,5270,5271,5272,5273,5274,5275,5276,5277,5278,5279,5280,5281,5282,5283, # 560 -5284,5285,5286,5287,5288,5289,5290,5291,5292,5293,5294,5295,5296,5297,5298,5299, # 576 -5300,5301,5302,5303,5304,5305,5306,5307,5308,5309,5310,5311,5312,5313,5314,5315, # 592 -5316,5317,5318,5319,5320,5321,5322,5323,5324,5325,5326,5327,5328,5329,5330,5331, # 608 -5332,5333,5334,5335,5336,5337,5338,5339,5340,5341,5342,5343,5344,5345,5346,5347, # 624 -5348,5349,5350,5351,5352,5353,5354,5355,5356,5357,5358,5359,5360,5361,5362,5363, # 640 -5364,5365,5366,5367,5368,5369,5370,5371,5372,5373,5374,5375,5376,5377,5378,5379, # 656 -5380,5381, 363, 642,2787,2878,2788,2789,2316,3232,2317,3434,2011, 165,1942,3930, # 672 -3931,3932,3933,5382,4619,5383,4620,5384,5385,5386,5387,5388,5389,5390,5391,5392, # 688 -5393,5394,5395,5396,5397,5398,5399,5400,5401,5402,5403,5404,5405,5406,5407,5408, # 704 -5409,5410,5411,5412,5413,5414,5415,5416,5417,5418,5419,5420,5421,5422,5423,5424, # 720 -5425,5426,5427,5428,5429,5430,5431,5432,5433,5434,5435,5436,5437,5438,5439,5440, # 736 -5441,5442,5443,5444,5445,5446,5447,5448,5449,5450,5451,5452,5453,5454,5455,5456, # 752 -5457,5458,5459,5460,5461,5462,5463,5464,5465,5466,5467,5468,5469,5470,5471,5472, # 768 -5473,5474,5475,5476,5477,5478,5479,5480,5481,5482,5483,5484,5485,5486,5487,5488, # 784 -5489,5490,5491,5492,5493,5494,5495,5496,5497,5498,5499,5500,5501,5502,5503,5504, # 800 -5505,5506,5507,5508,5509,5510,5511,5512,5513,5514,5515,5516,5517,5518,5519,5520, # 816 -5521,5522,5523,5524,5525,5526,5527,5528,5529,5530,5531,5532,5533,5534,5535,5536, # 832 -5537,5538,5539,5540,5541,5542,5543,5544,5545,5546,5547,5548,5549,5550,5551,5552, # 848 -5553,5554,5555,5556,5557,5558,5559,5560,5561,5562,5563,5564,5565,5566,5567,5568, # 864 -5569,5570,5571,5572,5573,5574,5575,5576,5577,5578,5579,5580,5581,5582,5583,5584, # 880 -5585,5586,5587,5588,5589,5590,5591,5592,5593,5594,5595,5596,5597,5598,5599,5600, # 896 -5601,5602,5603,5604,5605,5606,5607,5608,5609,5610,5611,5612,5613,5614,5615,5616, # 912 -5617,5618,5619,5620,5621,5622,5623,5624,5625,5626,5627,5628,5629,5630,5631,5632, # 928 -5633,5634,5635,5636,5637,5638,5639,5640,5641,5642,5643,5644,5645,5646,5647,5648, # 944 -5649,5650,5651,5652,5653,5654,5655,5656,5657,5658,5659,5660,5661,5662,5663,5664, # 960 -5665,5666,5667,5668,5669,5670,5671,5672,5673,5674,5675,5676,5677,5678,5679,5680, # 976 -5681,5682,5683,5684,5685,5686,5687,5688,5689,5690,5691,5692,5693,5694,5695,5696, # 992 -5697,5698,5699,5700,5701,5702,5703,5704,5705,5706,5707,5708,5709,5710,5711,5712, # 1008 -5713,5714,5715,5716,5717,5718,5719,5720,5721,5722,5723,5724,5725,5726,5727,5728, # 1024 -5729,5730,5731,5732,5733,5734,5735,5736,5737,5738,5739,5740,5741,5742,5743,5744, # 1040 -5745,5746,5747,5748,5749,5750,5751,5752,5753,5754,5755,5756,5757,5758,5759,5760, # 1056 -5761,5762,5763,5764,5765,5766,5767,5768,5769,5770,5771,5772,5773,5774,5775,5776, # 1072 -5777,5778,5779,5780,5781,5782,5783,5784,5785,5786,5787,5788,5789,5790,5791,5792, # 1088 -5793,5794,5795,5796,5797,5798,5799,5800,5801,5802,5803,5804,5805,5806,5807,5808, # 1104 -5809,5810,5811,5812,5813,5814,5815,5816,5817,5818,5819,5820,5821,5822,5823,5824, # 1120 -5825,5826,5827,5828,5829,5830,5831,5832,5833,5834,5835,5836,5837,5838,5839,5840, # 1136 -5841,5842,5843,5844,5845,5846,5847,5848,5849,5850,5851,5852,5853,5854,5855,5856, # 1152 -5857,5858,5859,5860,5861,5862,5863,5864,5865,5866,5867,5868,5869,5870,5871,5872, # 1168 -5873,5874,5875,5876,5877,5878,5879,5880,5881,5882,5883,5884,5885,5886,5887,5888, # 1184 -5889,5890,5891,5892,5893,5894,5895,5896,5897,5898,5899,5900,5901,5902,5903,5904, # 1200 -5905,5906,5907,5908,5909,5910,5911,5912,5913,5914,5915,5916,5917,5918,5919,5920, # 1216 -5921,5922,5923,5924,5925,5926,5927,5928,5929,5930,5931,5932,5933,5934,5935,5936, # 1232 -5937,5938,5939,5940,5941,5942,5943,5944,5945,5946,5947,5948,5949,5950,5951,5952, # 1248 -5953,5954,5955,5956,5957,5958,5959,5960,5961,5962,5963,5964,5965,5966,5967,5968, # 1264 -5969,5970,5971,5972,5973,5974,5975,5976,5977,5978,5979,5980,5981,5982,5983,5984, # 1280 -5985,5986,5987,5988,5989,5990,5991,5992,5993,5994,5995,5996,5997,5998,5999,6000, # 1296 -6001,6002,6003,6004,6005,6006,6007,6008,6009,6010,6011,6012,6013,6014,6015,6016, # 1312 -6017,6018,6019,6020,6021,6022,6023,6024,6025,6026,6027,6028,6029,6030,6031,6032, # 1328 -6033,6034,6035,6036,6037,6038,6039,6040,6041,6042,6043,6044,6045,6046,6047,6048, # 1344 -6049,6050,6051,6052,6053,6054,6055,6056,6057,6058,6059,6060,6061,6062,6063,6064, # 1360 -6065,6066,6067,6068,6069,6070,6071,6072,6073,6074,6075,6076,6077,6078,6079,6080, # 1376 -6081,6082,6083,6084,6085,6086,6087,6088,6089,6090,6091,6092,6093,6094,6095,6096, # 1392 -6097,6098,6099,6100,6101,6102,6103,6104,6105,6106,6107,6108,6109,6110,6111,6112, # 1408 -6113,6114,2044,2060,4621, 997,1235, 473,1186,4622, 920,3378,6115,6116, 379,1108, # 1424 -4313,2657,2735,3934,6117,3809, 636,3233, 573,1026,3693,3435,2974,3300,2298,4105, # 1440 - 854,2937,2463, 393,2581,2417, 539, 752,1280,2750,2480, 140,1161, 440, 708,1569, # 1456 - 665,2497,1746,1291,1523,3000, 164,1603, 847,1331, 537,1997, 486, 508,1693,2418, # 1472 -1970,2227, 878,1220, 299,1030, 969, 652,2751, 624,1137,3301,2619, 65,3302,2045, # 1488 -1761,1859,3120,1930,3694,3516, 663,1767, 852, 835,3695, 269, 767,2826,2339,1305, # 1504 - 896,1150, 770,1616,6118, 506,1502,2075,1012,2519, 775,2520,2975,2340,2938,4314, # 1520 -3028,2086,1224,1943,2286,6119,3072,4315,2240,1273,1987,3935,1557, 175, 597, 985, # 1536 -3517,2419,2521,1416,3029, 585, 938,1931,1007,1052,1932,1685,6120,3379,4316,4623, # 1552 - 804, 599,3121,1333,2128,2539,1159,1554,2032,3810, 687,2033,2904, 952, 675,1467, # 1568 -3436,6121,2241,1096,1786,2440,1543,1924, 980,1813,2228, 781,2692,1879, 728,1918, # 1584 -3696,4624, 548,1950,4625,1809,1088,1356,3303,2522,1944, 502, 972, 373, 513,2827, # 1600 - 586,2377,2391,1003,1976,1631,6122,2464,1084, 648,1776,4626,2141, 324, 962,2012, # 1616 -2177,2076,1384, 742,2178,1448,1173,1810, 222, 102, 301, 445, 125,2420, 662,2498, # 1632 - 277, 200,1476,1165,1068, 224,2562,1378,1446, 450,1880, 659, 791, 582,4627,2939, # 1648 -3936,1516,1274, 555,2099,3697,1020,1389,1526,3380,1762,1723,1787,2229, 412,2114, # 1664 -1900,2392,3518, 512,2597, 427,1925,2341,3122,1653,1686,2465,2499, 697, 330, 273, # 1680 - 380,2162, 951, 832, 780, 991,1301,3073, 965,2270,3519, 668,2523,2636,1286, 535, # 1696 -1407, 518, 671, 957,2658,2378, 267, 611,2197,3030,6123, 248,2299, 967,1799,2356, # 1712 - 850,1418,3437,1876,1256,1480,2828,1718,6124,6125,1755,1664,2405,6126,4628,2879, # 1728 -2829, 499,2179, 676,4629, 557,2329,2214,2090, 325,3234, 464, 811,3001, 992,2342, # 1744 -2481,1232,1469, 303,2242, 466,1070,2163, 603,1777,2091,4630,2752,4631,2714, 322, # 1760 -2659,1964,1768, 481,2188,1463,2330,2857,3600,2092,3031,2421,4632,2318,2070,1849, # 1776 -2598,4633,1302,2254,1668,1701,2422,3811,2905,3032,3123,2046,4106,1763,1694,4634, # 1792 -1604, 943,1724,1454, 917, 868,2215,1169,2940, 552,1145,1800,1228,1823,1955, 316, # 1808 -1080,2510, 361,1807,2830,4107,2660,3381,1346,1423,1134,4108,6127, 541,1263,1229, # 1824 -1148,2540, 545, 465,1833,2880,3438,1901,3074,2482, 816,3937, 713,1788,2500, 122, # 1840 -1575, 195,1451,2501,1111,6128, 859, 374,1225,2243,2483,4317, 390,1033,3439,3075, # 1856 -2524,1687, 266, 793,1440,2599, 946, 779, 802, 507, 897,1081, 528,2189,1292, 711, # 1872 -1866,1725,1167,1640, 753, 398,2661,1053, 246, 348,4318, 137,1024,3440,1600,2077, # 1888 -2129, 825,4319, 698, 238, 521, 187,2300,1157,2423,1641,1605,1464,1610,1097,2541, # 1904 -1260,1436, 759,2255,1814,2150, 705,3235, 409,2563,3304, 561,3033,2005,2564, 726, # 1920 -1956,2343,3698,4109, 949,3812,3813,3520,1669, 653,1379,2525, 881,2198, 632,2256, # 1936 -1027, 778,1074, 733,1957, 514,1481,2466, 554,2180, 702,3938,1606,1017,1398,6129, # 1952 -1380,3521, 921, 993,1313, 594, 449,1489,1617,1166, 768,1426,1360, 495,1794,3601, # 1968 -1177,3602,1170,4320,2344, 476, 425,3167,4635,3168,1424, 401,2662,1171,3382,1998, # 1984 -1089,4110, 477,3169, 474,6130,1909, 596,2831,1842, 494, 693,1051,1028,1207,3076, # 2000 - 606,2115, 727,2790,1473,1115, 743,3522, 630, 805,1532,4321,2021, 366,1057, 838, # 2016 - 684,1114,2142,4322,2050,1492,1892,1808,2271,3814,2424,1971,1447,1373,3305,1090, # 2032 -1536,3939,3523,3306,1455,2199, 336, 369,2331,1035, 584,2393, 902, 718,2600,6131, # 2048 -2753, 463,2151,1149,1611,2467, 715,1308,3124,1268, 343,1413,3236,1517,1347,2663, # 2064 -2093,3940,2022,1131,1553,2100,2941,1427,3441,2942,1323,2484,6132,1980, 872,2368, # 2080 -2441,2943, 320,2369,2116,1082, 679,1933,3941,2791,3815, 625,1143,2023, 422,2200, # 2096 -3816,6133, 730,1695, 356,2257,1626,2301,2858,2637,1627,1778, 937, 883,2906,2693, # 2112 -3002,1769,1086, 400,1063,1325,3307,2792,4111,3077, 456,2345,1046, 747,6134,1524, # 2128 - 884,1094,3383,1474,2164,1059, 974,1688,2181,2258,1047, 345,1665,1187, 358, 875, # 2144 -3170, 305, 660,3524,2190,1334,1135,3171,1540,1649,2542,1527, 927, 968,2793, 885, # 2160 -1972,1850, 482, 500,2638,1218,1109,1085,2543,1654,2034, 876, 78,2287,1482,1277, # 2176 - 861,1675,1083,1779, 724,2754, 454, 397,1132,1612,2332, 893, 672,1237, 257,2259, # 2192 -2370, 135,3384, 337,2244, 547, 352, 340, 709,2485,1400, 788,1138,2511, 540, 772, # 2208 -1682,2260,2272,2544,2013,1843,1902,4636,1999,1562,2288,4637,2201,1403,1533, 407, # 2224 - 576,3308,1254,2071, 978,3385, 170, 136,1201,3125,2664,3172,2394, 213, 912, 873, # 2240 -3603,1713,2202, 699,3604,3699, 813,3442, 493, 531,1054, 468,2907,1483, 304, 281, # 2256 -4112,1726,1252,2094, 339,2319,2130,2639, 756,1563,2944, 748, 571,2976,1588,2425, # 2272 -2715,1851,1460,2426,1528,1392,1973,3237, 288,3309, 685,3386, 296, 892,2716,2216, # 2288 -1570,2245, 722,1747,2217, 905,3238,1103,6135,1893,1441,1965, 251,1805,2371,3700, # 2304 -2601,1919,1078, 75,2182,1509,1592,1270,2640,4638,2152,6136,3310,3817, 524, 706, # 2320 -1075, 292,3818,1756,2602, 317, 98,3173,3605,3525,1844,2218,3819,2502, 814, 567, # 2336 - 385,2908,1534,6137, 534,1642,3239, 797,6138,1670,1529, 953,4323, 188,1071, 538, # 2352 - 178, 729,3240,2109,1226,1374,2000,2357,2977, 731,2468,1116,2014,2051,6139,1261, # 2368 -1593, 803,2859,2736,3443, 556, 682, 823,1541,6140,1369,2289,1706,2794, 845, 462, # 2384 -2603,2665,1361, 387, 162,2358,1740, 739,1770,1720,1304,1401,3241,1049, 627,1571, # 2400 -2427,3526,1877,3942,1852,1500, 431,1910,1503, 677, 297,2795, 286,1433,1038,1198, # 2416 -2290,1133,1596,4113,4639,2469,1510,1484,3943,6141,2442, 108, 712,4640,2372, 866, # 2432 -3701,2755,3242,1348, 834,1945,1408,3527,2395,3243,1811, 824, 994,1179,2110,1548, # 2448 -1453, 790,3003, 690,4324,4325,2832,2909,3820,1860,3821, 225,1748, 310, 346,1780, # 2464 -2470, 821,1993,2717,2796, 828, 877,3528,2860,2471,1702,2165,2910,2486,1789, 453, # 2480 - 359,2291,1676, 73,1164,1461,1127,3311, 421, 604, 314,1037, 589, 116,2487, 737, # 2496 - 837,1180, 111, 244, 735,6142,2261,1861,1362, 986, 523, 418, 581,2666,3822, 103, # 2512 - 855, 503,1414,1867,2488,1091, 657,1597, 979, 605,1316,4641,1021,2443,2078,2001, # 2528 -1209, 96, 587,2166,1032, 260,1072,2153, 173, 94, 226,3244, 819,2006,4642,4114, # 2544 -2203, 231,1744, 782, 97,2667, 786,3387, 887, 391, 442,2219,4326,1425,6143,2694, # 2560 - 633,1544,1202, 483,2015, 592,2052,1958,2472,1655, 419, 129,4327,3444,3312,1714, # 2576 -1257,3078,4328,1518,1098, 865,1310,1019,1885,1512,1734, 469,2444, 148, 773, 436, # 2592 -1815,1868,1128,1055,4329,1245,2756,3445,2154,1934,1039,4643, 579,1238, 932,2320, # 2608 - 353, 205, 801, 115,2428, 944,2321,1881, 399,2565,1211, 678, 766,3944, 335,2101, # 2624 -1459,1781,1402,3945,2737,2131,1010, 844, 981,1326,1013, 550,1816,1545,2620,1335, # 2640 -1008, 371,2881, 936,1419,1613,3529,1456,1395,2273,1834,2604,1317,2738,2503, 416, # 2656 -1643,4330, 806,1126, 229, 591,3946,1314,1981,1576,1837,1666, 347,1790, 977,3313, # 2672 - 764,2861,1853, 688,2429,1920,1462, 77, 595, 415,2002,3034, 798,1192,4115,6144, # 2688 -2978,4331,3035,2695,2582,2072,2566, 430,2430,1727, 842,1396,3947,3702, 613, 377, # 2704 - 278, 236,1417,3388,3314,3174, 757,1869, 107,3530,6145,1194, 623,2262, 207,1253, # 2720 -2167,3446,3948, 492,1117,1935, 536,1838,2757,1246,4332, 696,2095,2406,1393,1572, # 2736 -3175,1782, 583, 190, 253,1390,2230, 830,3126,3389, 934,3245,1703,1749,2979,1870, # 2752 -2545,1656,2204, 869,2346,4116,3176,1817, 496,1764,4644, 942,1504, 404,1903,1122, # 2768 -1580,3606,2945,1022, 515, 372,1735, 955,2431,3036,6146,2797,1110,2302,2798, 617, # 2784 -6147, 441, 762,1771,3447,3607,3608,1904, 840,3037, 86, 939,1385, 572,1370,2445, # 2800 -1336, 114,3703, 898, 294, 203,3315, 703,1583,2274, 429, 961,4333,1854,1951,3390, # 2816 -2373,3704,4334,1318,1381, 966,1911,2322,1006,1155, 309, 989, 458,2718,1795,1372, # 2832 -1203, 252,1689,1363,3177, 517,1936, 168,1490, 562, 193,3823,1042,4117,1835, 551, # 2848 - 470,4645, 395, 489,3448,1871,1465,2583,2641, 417,1493, 279,1295, 511,1236,1119, # 2864 - 72,1231,1982,1812,3004, 871,1564, 984,3449,1667,2696,2096,4646,2347,2833,1673, # 2880 -3609, 695,3246,2668, 807,1183,4647, 890, 388,2333,1801,1457,2911,1765,1477,1031, # 2896 -3316,3317,1278,3391,2799,2292,2526, 163,3450,4335,2669,1404,1802,6148,2323,2407, # 2912 -1584,1728,1494,1824,1269, 298, 909,3318,1034,1632, 375, 776,1683,2061, 291, 210, # 2928 -1123, 809,1249,1002,2642,3038, 206,1011,2132, 144, 975, 882,1565, 342, 667, 754, # 2944 -1442,2143,1299,2303,2062, 447, 626,2205,1221,2739,2912,1144,1214,2206,2584, 760, # 2960 -1715, 614, 950,1281,2670,2621, 810, 577,1287,2546,4648, 242,2168, 250,2643, 691, # 2976 - 123,2644, 647, 313,1029, 689,1357,2946,1650, 216, 771,1339,1306, 808,2063, 549, # 2992 - 913,1371,2913,2914,6149,1466,1092,1174,1196,1311,2605,2396,1783,1796,3079, 406, # 3008 -2671,2117,3949,4649, 487,1825,2220,6150,2915, 448,2348,1073,6151,2397,1707, 130, # 3024 - 900,1598, 329, 176,1959,2527,1620,6152,2275,4336,3319,1983,2191,3705,3610,2155, # 3040 -3706,1912,1513,1614,6153,1988, 646, 392,2304,1589,3320,3039,1826,1239,1352,1340, # 3056 -2916, 505,2567,1709,1437,2408,2547, 906,6154,2672, 384,1458,1594,1100,1329, 710, # 3072 - 423,3531,2064,2231,2622,1989,2673,1087,1882, 333, 841,3005,1296,2882,2379, 580, # 3088 -1937,1827,1293,2585, 601, 574, 249,1772,4118,2079,1120, 645, 901,1176,1690, 795, # 3104 -2207, 478,1434, 516,1190,1530, 761,2080, 930,1264, 355, 435,1552, 644,1791, 987, # 3120 - 220,1364,1163,1121,1538, 306,2169,1327,1222, 546,2645, 218, 241, 610,1704,3321, # 3136 -1984,1839,1966,2528, 451,6155,2586,3707,2568, 907,3178, 254,2947, 186,1845,4650, # 3152 - 745, 432,1757, 428,1633, 888,2246,2221,2489,3611,2118,1258,1265, 956,3127,1784, # 3168 -4337,2490, 319, 510, 119, 457,3612, 274,2035,2007,4651,1409,3128, 970,2758, 590, # 3184 -2800, 661,2247,4652,2008,3950,1420,1549,3080,3322,3951,1651,1375,2111, 485,2491, # 3200 -1429,1156,6156,2548,2183,1495, 831,1840,2529,2446, 501,1657, 307,1894,3247,1341, # 3216 - 666, 899,2156,1539,2549,1559, 886, 349,2208,3081,2305,1736,3824,2170,2759,1014, # 3232 -1913,1386, 542,1397,2948, 490, 368, 716, 362, 159, 282,2569,1129,1658,1288,1750, # 3248 -2674, 276, 649,2016, 751,1496, 658,1818,1284,1862,2209,2087,2512,3451, 622,2834, # 3264 - 376, 117,1060,2053,1208,1721,1101,1443, 247,1250,3179,1792,3952,2760,2398,3953, # 3280 -6157,2144,3708, 446,2432,1151,2570,3452,2447,2761,2835,1210,2448,3082, 424,2222, # 3296 -1251,2449,2119,2836, 504,1581,4338, 602, 817, 857,3825,2349,2306, 357,3826,1470, # 3312 -1883,2883, 255, 958, 929,2917,3248, 302,4653,1050,1271,1751,2307,1952,1430,2697, # 3328 -2719,2359, 354,3180, 777, 158,2036,4339,1659,4340,4654,2308,2949,2248,1146,2232, # 3344 -3532,2720,1696,2623,3827,6158,3129,1550,2698,1485,1297,1428, 637, 931,2721,2145, # 3360 - 914,2550,2587, 81,2450, 612, 827,2646,1242,4655,1118,2884, 472,1855,3181,3533, # 3376 -3534, 569,1353,2699,1244,1758,2588,4119,2009,2762,2171,3709,1312,1531,6159,1152, # 3392 -1938, 134,1830, 471,3710,2276,1112,1535,3323,3453,3535, 982,1337,2950, 488, 826, # 3408 - 674,1058,1628,4120,2017, 522,2399, 211, 568,1367,3454, 350, 293,1872,1139,3249, # 3424 -1399,1946,3006,1300,2360,3324, 588, 736,6160,2606, 744, 669,3536,3828,6161,1358, # 3440 - 199, 723, 848, 933, 851,1939,1505,1514,1338,1618,1831,4656,1634,3613, 443,2740, # 3456 -3829, 717,1947, 491,1914,6162,2551,1542,4121,1025,6163,1099,1223, 198,3040,2722, # 3472 - 370, 410,1905,2589, 998,1248,3182,2380, 519,1449,4122,1710, 947, 928,1153,4341, # 3488 -2277, 344,2624,1511, 615, 105, 161,1212,1076,1960,3130,2054,1926,1175,1906,2473, # 3504 - 414,1873,2801,6164,2309, 315,1319,3325, 318,2018,2146,2157, 963, 631, 223,4342, # 3520 -4343,2675, 479,3711,1197,2625,3712,2676,2361,6165,4344,4123,6166,2451,3183,1886, # 3536 -2184,1674,1330,1711,1635,1506, 799, 219,3250,3083,3954,1677,3713,3326,2081,3614, # 3552 -1652,2073,4657,1147,3041,1752, 643,1961, 147,1974,3955,6167,1716,2037, 918,3007, # 3568 -1994, 120,1537, 118, 609,3184,4345, 740,3455,1219, 332,1615,3830,6168,1621,2980, # 3584 -1582, 783, 212, 553,2350,3714,1349,2433,2082,4124, 889,6169,2310,1275,1410, 973, # 3600 - 166,1320,3456,1797,1215,3185,2885,1846,2590,2763,4658, 629, 822,3008, 763, 940, # 3616 -1990,2862, 439,2409,1566,1240,1622, 926,1282,1907,2764, 654,2210,1607, 327,1130, # 3632 -3956,1678,1623,6170,2434,2192, 686, 608,3831,3715, 903,3957,3042,6171,2741,1522, # 3648 -1915,1105,1555,2552,1359, 323,3251,4346,3457, 738,1354,2553,2311,2334,1828,2003, # 3664 -3832,1753,2351,1227,6172,1887,4125,1478,6173,2410,1874,1712,1847, 520,1204,2607, # 3680 - 264,4659, 836,2677,2102, 600,4660,3833,2278,3084,6174,4347,3615,1342, 640, 532, # 3696 - 543,2608,1888,2400,2591,1009,4348,1497, 341,1737,3616,2723,1394, 529,3252,1321, # 3712 - 983,4661,1515,2120, 971,2592, 924, 287,1662,3186,4349,2700,4350,1519, 908,1948, # 3728 -2452, 156, 796,1629,1486,2223,2055, 694,4126,1259,1036,3392,1213,2249,2742,1889, # 3744 -1230,3958,1015, 910, 408, 559,3617,4662, 746, 725, 935,4663,3959,3009,1289, 563, # 3760 - 867,4664,3960,1567,2981,2038,2626, 988,2263,2381,4351, 143,2374, 704,1895,6175, # 3776 -1188,3716,2088, 673,3085,2362,4352, 484,1608,1921,2765,2918, 215, 904,3618,3537, # 3792 - 894, 509, 976,3043,2701,3961,4353,2837,2982, 498,6176,6177,1102,3538,1332,3393, # 3808 -1487,1636,1637, 233, 245,3962, 383, 650, 995,3044, 460,1520,1206,2352, 749,3327, # 3824 - 530, 700, 389,1438,1560,1773,3963,2264, 719,2951,2724,3834, 870,1832,1644,1000, # 3840 - 839,2474,3717, 197,1630,3394, 365,2886,3964,1285,2133, 734, 922, 818,1106, 732, # 3856 - 480,2083,1774,3458, 923,2279,1350, 221,3086, 85,2233,2234,3835,1585,3010,2147, # 3872 -1387,1705,2382,1619,2475, 133, 239,2802,1991,1016,2084,2383, 411,2838,1113, 651, # 3888 -1985,1160,3328, 990,1863,3087,1048,1276,2647, 265,2627,1599,3253,2056, 150, 638, # 3904 -2019, 656, 853, 326,1479, 680,1439,4354,1001,1759, 413,3459,3395,2492,1431, 459, # 3920 -4355,1125,3329,2265,1953,1450,2065,2863, 849, 351,2678,3131,3254,3255,1104,1577, # 3936 - 227,1351,1645,2453,2193,1421,2887, 812,2121, 634, 95,2435, 201,2312,4665,1646, # 3952 -1671,2743,1601,2554,2702,2648,2280,1315,1366,2089,3132,1573,3718,3965,1729,1189, # 3968 - 328,2679,1077,1940,1136, 558,1283, 964,1195, 621,2074,1199,1743,3460,3619,1896, # 3984 -1916,1890,3836,2952,1154,2112,1064, 862, 378,3011,2066,2113,2803,1568,2839,6178, # 4000 -3088,2919,1941,1660,2004,1992,2194, 142, 707,1590,1708,1624,1922,1023,1836,1233, # 4016 -1004,2313, 789, 741,3620,6179,1609,2411,1200,4127,3719,3720,4666,2057,3721, 593, # 4032 -2840, 367,2920,1878,6180,3461,1521, 628,1168, 692,2211,2649, 300, 720,2067,2571, # 4048 -2953,3396, 959,2504,3966,3539,3462,1977, 701,6181, 954,1043, 800, 681, 183,3722, # 4064 -1803,1730,3540,4128,2103, 815,2314, 174, 467, 230,2454,1093,2134, 755,3541,3397, # 4080 -1141,1162,6182,1738,2039, 270,3256,2513,1005,1647,2185,3837, 858,1679,1897,1719, # 4096 -2954,2324,1806, 402, 670, 167,4129,1498,2158,2104, 750,6183, 915, 189,1680,1551, # 4112 - 455,4356,1501,2455, 405,1095,2955, 338,1586,1266,1819, 570, 641,1324, 237,1556, # 4128 -2650,1388,3723,6184,1368,2384,1343,1978,3089,2436, 879,3724, 792,1191, 758,3012, # 4144 -1411,2135,1322,4357, 240,4667,1848,3725,1574,6185, 420,3045,1546,1391, 714,4358, # 4160 -1967, 941,1864, 863, 664, 426, 560,1731,2680,1785,2864,1949,2363, 403,3330,1415, # 4176 -1279,2136,1697,2335, 204, 721,2097,3838, 90,6186,2085,2505, 191,3967, 124,2148, # 4192 -1376,1798,1178,1107,1898,1405, 860,4359,1243,1272,2375,2983,1558,2456,1638, 113, # 4208 -3621, 578,1923,2609, 880, 386,4130, 784,2186,2266,1422,2956,2172,1722, 497, 263, # 4224 -2514,1267,2412,2610, 177,2703,3542, 774,1927,1344, 616,1432,1595,1018, 172,4360, # 4240 -2325, 911,4361, 438,1468,3622, 794,3968,2024,2173,1681,1829,2957, 945, 895,3090, # 4256 - 575,2212,2476, 475,2401,2681, 785,2744,1745,2293,2555,1975,3133,2865, 394,4668, # 4272 -3839, 635,4131, 639, 202,1507,2195,2766,1345,1435,2572,3726,1908,1184,1181,2457, # 4288 -3727,3134,4362, 843,2611, 437, 916,4669, 234, 769,1884,3046,3047,3623, 833,6187, # 4304 -1639,2250,2402,1355,1185,2010,2047, 999, 525,1732,1290,1488,2612, 948,1578,3728, # 4320 -2413,2477,1216,2725,2159, 334,3840,1328,3624,2921,1525,4132, 564,1056, 891,4363, # 4336 -1444,1698,2385,2251,3729,1365,2281,2235,1717,6188, 864,3841,2515, 444, 527,2767, # 4352 -2922,3625, 544, 461,6189, 566, 209,2437,3398,2098,1065,2068,3331,3626,3257,2137, # 4368 #last 512 -) - - diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/jpcntx.py b/venv/Lib/site-packages/pip/_vendor/chardet/jpcntx.py deleted file mode 100644 index 20044e4..0000000 --- a/venv/Lib/site-packages/pip/_vendor/chardet/jpcntx.py +++ /dev/null @@ -1,233 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Communicator client code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - - -# This is hiragana 2-char sequence table, the number in each cell represents its frequency category -jp2CharContext = ( -(0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1), -(2,4,0,4,0,3,0,4,0,3,4,4,4,2,4,3,3,4,3,2,3,3,4,2,3,3,3,2,4,1,4,3,3,1,5,4,3,4,3,4,3,5,3,0,3,5,4,2,0,3,1,0,3,3,0,3,3,0,1,1,0,4,3,0,3,3,0,4,0,2,0,3,5,5,5,5,4,0,4,1,0,3,4), -(0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2), -(0,4,0,5,0,5,0,4,0,4,5,4,4,3,5,3,5,1,5,3,4,3,4,4,3,4,3,3,4,3,5,4,4,3,5,5,3,5,5,5,3,5,5,3,4,5,5,3,1,3,2,0,3,4,0,4,2,0,4,2,1,5,3,2,3,5,0,4,0,2,0,5,4,4,5,4,5,0,4,0,0,4,4), -(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0), -(0,3,0,4,0,3,0,3,0,4,5,4,3,3,3,3,4,3,5,4,4,3,5,4,4,3,4,3,4,4,4,4,5,3,4,4,3,4,5,5,4,5,5,1,4,5,4,3,0,3,3,1,3,3,0,4,4,0,3,3,1,5,3,3,3,5,0,4,0,3,0,4,4,3,4,3,3,0,4,1,1,3,4), -(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0), -(0,4,0,3,0,3,0,4,0,3,4,4,3,2,2,1,2,1,3,1,3,3,3,3,3,4,3,1,3,3,5,3,3,0,4,3,0,5,4,3,3,5,4,4,3,4,4,5,0,1,2,0,1,2,0,2,2,0,1,0,0,5,2,2,1,4,0,3,0,1,0,4,4,3,5,4,3,0,2,1,0,4,3), -(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0), -(0,3,0,5,0,4,0,2,1,4,4,2,4,1,4,2,4,2,4,3,3,3,4,3,3,3,3,1,4,2,3,3,3,1,4,4,1,1,1,4,3,3,2,0,2,4,3,2,0,3,3,0,3,1,1,0,0,0,3,3,0,4,2,2,3,4,0,4,0,3,0,4,4,5,3,4,4,0,3,0,0,1,4), -(1,4,0,4,0,4,0,4,0,3,5,4,4,3,4,3,5,4,3,3,4,3,5,4,4,4,4,3,4,2,4,3,3,1,5,4,3,2,4,5,4,5,5,4,4,5,4,4,0,3,2,2,3,3,0,4,3,1,3,2,1,4,3,3,4,5,0,3,0,2,0,4,5,5,4,5,4,0,4,0,0,5,4), -(0,5,0,5,0,4,0,3,0,4,4,3,4,3,3,3,4,0,4,4,4,3,4,3,4,3,3,1,4,2,4,3,4,0,5,4,1,4,5,4,4,5,3,2,4,3,4,3,2,4,1,3,3,3,2,3,2,0,4,3,3,4,3,3,3,4,0,4,0,3,0,4,5,4,4,4,3,0,4,1,0,1,3), -(0,3,1,4,0,3,0,2,0,3,4,4,3,1,4,2,3,3,4,3,4,3,4,3,4,4,3,2,3,1,5,4,4,1,4,4,3,5,4,4,3,5,5,4,3,4,4,3,1,2,3,1,2,2,0,3,2,0,3,1,0,5,3,3,3,4,3,3,3,3,4,4,4,4,5,4,2,0,3,3,2,4,3), -(0,2,0,3,0,1,0,1,0,0,3,2,0,0,2,0,1,0,2,1,3,3,3,1,2,3,1,0,1,0,4,2,1,1,3,3,0,4,3,3,1,4,3,3,0,3,3,2,0,0,0,0,1,0,0,2,0,0,0,0,0,4,1,0,2,3,2,2,2,1,3,3,3,4,4,3,2,0,3,1,0,3,3), -(0,4,0,4,0,3,0,3,0,4,4,4,3,3,3,3,3,3,4,3,4,2,4,3,4,3,3,2,4,3,4,5,4,1,4,5,3,5,4,5,3,5,4,0,3,5,5,3,1,3,3,2,2,3,0,3,4,1,3,3,2,4,3,3,3,4,0,4,0,3,0,4,5,4,4,5,3,0,4,1,0,3,4), -(0,2,0,3,0,3,0,0,0,2,2,2,1,0,1,0,0,0,3,0,3,0,3,0,1,3,1,0,3,1,3,3,3,1,3,3,3,0,1,3,1,3,4,0,0,3,1,1,0,3,2,0,0,0,0,1,3,0,1,0,0,3,3,2,0,3,0,0,0,0,0,3,4,3,4,3,3,0,3,0,0,2,3), -(2,3,0,3,0,2,0,1,0,3,3,4,3,1,3,1,1,1,3,1,4,3,4,3,3,3,0,0,3,1,5,4,3,1,4,3,2,5,5,4,4,4,4,3,3,4,4,4,0,2,1,1,3,2,0,1,2,0,0,1,0,4,1,3,3,3,0,3,0,1,0,4,4,4,5,5,3,0,2,0,0,4,4), -(0,2,0,1,0,3,1,3,0,2,3,3,3,0,3,1,0,0,3,0,3,2,3,1,3,2,1,1,0,0,4,2,1,0,2,3,1,4,3,2,0,4,4,3,1,3,1,3,0,1,0,0,1,0,0,0,1,0,0,0,0,4,1,1,1,2,0,3,0,0,0,3,4,2,4,3,2,0,1,0,0,3,3), -(0,1,0,4,0,5,0,4,0,2,4,4,2,3,3,2,3,3,5,3,3,3,4,3,4,2,3,0,4,3,3,3,4,1,4,3,2,1,5,5,3,4,5,1,3,5,4,2,0,3,3,0,1,3,0,4,2,0,1,3,1,4,3,3,3,3,0,3,0,1,0,3,4,4,4,5,5,0,3,0,1,4,5), -(0,2,0,3,0,3,0,0,0,2,3,1,3,0,4,0,1,1,3,0,3,4,3,2,3,1,0,3,3,2,3,1,3,0,2,3,0,2,1,4,1,2,2,0,0,3,3,0,0,2,0,0,0,1,0,0,0,0,2,2,0,3,2,1,3,3,0,2,0,2,0,0,3,3,1,2,4,0,3,0,2,2,3), -(2,4,0,5,0,4,0,4,0,2,4,4,4,3,4,3,3,3,1,2,4,3,4,3,4,4,5,0,3,3,3,3,2,0,4,3,1,4,3,4,1,4,4,3,3,4,4,3,1,2,3,0,4,2,0,4,1,0,3,3,0,4,3,3,3,4,0,4,0,2,0,3,5,3,4,5,2,0,3,0,0,4,5), -(0,3,0,4,0,1,0,1,0,1,3,2,2,1,3,0,3,0,2,0,2,0,3,0,2,0,0,0,1,0,1,1,0,0,3,1,0,0,0,4,0,3,1,0,2,1,3,0,0,0,0,0,0,3,0,0,0,0,0,0,0,4,2,2,3,1,0,3,0,0,0,1,4,4,4,3,0,0,4,0,0,1,4), -(1,4,1,5,0,3,0,3,0,4,5,4,4,3,5,3,3,4,4,3,4,1,3,3,3,3,2,1,4,1,5,4,3,1,4,4,3,5,4,4,3,5,4,3,3,4,4,4,0,3,3,1,2,3,0,3,1,0,3,3,0,5,4,4,4,4,4,4,3,3,5,4,4,3,3,5,4,0,3,2,0,4,4), -(0,2,0,3,0,1,0,0,0,1,3,3,3,2,4,1,3,0,3,1,3,0,2,2,1,1,0,0,2,0,4,3,1,0,4,3,0,4,4,4,1,4,3,1,1,3,3,1,0,2,0,0,1,3,0,0,0,0,2,0,0,4,3,2,4,3,5,4,3,3,3,4,3,3,4,3,3,0,2,1,0,3,3), -(0,2,0,4,0,3,0,2,0,2,5,5,3,4,4,4,4,1,4,3,3,0,4,3,4,3,1,3,3,2,4,3,0,3,4,3,0,3,4,4,2,4,4,0,4,5,3,3,2,2,1,1,1,2,0,1,5,0,3,3,2,4,3,3,3,4,0,3,0,2,0,4,4,3,5,5,0,0,3,0,2,3,3), -(0,3,0,4,0,3,0,1,0,3,4,3,3,1,3,3,3,0,3,1,3,0,4,3,3,1,1,0,3,0,3,3,0,0,4,4,0,1,5,4,3,3,5,0,3,3,4,3,0,2,0,1,1,1,0,1,3,0,1,2,1,3,3,2,3,3,0,3,0,1,0,1,3,3,4,4,1,0,1,2,2,1,3), -(0,1,0,4,0,4,0,3,0,1,3,3,3,2,3,1,1,0,3,0,3,3,4,3,2,4,2,0,1,0,4,3,2,0,4,3,0,5,3,3,2,4,4,4,3,3,3,4,0,1,3,0,0,1,0,0,1,0,0,0,0,4,2,3,3,3,0,3,0,0,0,4,4,4,5,3,2,0,3,3,0,3,5), -(0,2,0,3,0,0,0,3,0,1,3,0,2,0,0,0,1,0,3,1,1,3,3,0,0,3,0,0,3,0,2,3,1,0,3,1,0,3,3,2,0,4,2,2,0,2,0,0,0,4,0,0,0,0,0,0,0,0,0,0,0,2,1,2,0,1,0,1,0,0,0,1,3,1,2,0,0,0,1,0,0,1,4), -(0,3,0,3,0,5,0,1,0,2,4,3,1,3,3,2,1,1,5,2,1,0,5,1,2,0,0,0,3,3,2,2,3,2,4,3,0,0,3,3,1,3,3,0,2,5,3,4,0,3,3,0,1,2,0,2,2,0,3,2,0,2,2,3,3,3,0,2,0,1,0,3,4,4,2,5,4,0,3,0,0,3,5), -(0,3,0,3,0,3,0,1,0,3,3,3,3,0,3,0,2,0,2,1,1,0,2,0,1,0,0,0,2,1,0,0,1,0,3,2,0,0,3,3,1,2,3,1,0,3,3,0,0,1,0,0,0,0,0,2,0,0,0,0,0,2,3,1,2,3,0,3,0,1,0,3,2,1,0,4,3,0,1,1,0,3,3), -(0,4,0,5,0,3,0,3,0,4,5,5,4,3,5,3,4,3,5,3,3,2,5,3,4,4,4,3,4,3,4,5,5,3,4,4,3,4,4,5,4,4,4,3,4,5,5,4,2,3,4,2,3,4,0,3,3,1,4,3,2,4,3,3,5,5,0,3,0,3,0,5,5,5,5,4,4,0,4,0,1,4,4), -(0,4,0,4,0,3,0,3,0,3,5,4,4,2,3,2,5,1,3,2,5,1,4,2,3,2,3,3,4,3,3,3,3,2,5,4,1,3,3,5,3,4,4,0,4,4,3,1,1,3,1,0,2,3,0,2,3,0,3,0,0,4,3,1,3,4,0,3,0,2,0,4,4,4,3,4,5,0,4,0,0,3,4), -(0,3,0,3,0,3,1,2,0,3,4,4,3,3,3,0,2,2,4,3,3,1,3,3,3,1,1,0,3,1,4,3,2,3,4,4,2,4,4,4,3,4,4,3,2,4,4,3,1,3,3,1,3,3,0,4,1,0,2,2,1,4,3,2,3,3,5,4,3,3,5,4,4,3,3,0,4,0,3,2,2,4,4), -(0,2,0,1,0,0,0,0,0,1,2,1,3,0,0,0,0,0,2,0,1,2,1,0,0,1,0,0,0,0,3,0,0,1,0,1,1,3,1,0,0,0,1,1,0,1,1,0,0,0,0,0,2,0,0,0,0,0,0,0,0,1,1,2,2,0,3,4,0,0,0,1,1,0,0,1,0,0,0,0,0,1,1), -(0,1,0,0,0,1,0,0,0,0,4,0,4,1,4,0,3,0,4,0,3,0,4,0,3,0,3,0,4,1,5,1,4,0,0,3,0,5,0,5,2,0,1,0,0,0,2,1,4,0,1,3,0,0,3,0,0,3,1,1,4,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0), -(1,4,0,5,0,3,0,2,0,3,5,4,4,3,4,3,5,3,4,3,3,0,4,3,3,3,3,3,3,2,4,4,3,1,3,4,4,5,4,4,3,4,4,1,3,5,4,3,3,3,1,2,2,3,3,1,3,1,3,3,3,5,3,3,4,5,0,3,0,3,0,3,4,3,4,4,3,0,3,0,2,4,3), -(0,1,0,4,0,0,0,0,0,1,4,0,4,1,4,2,4,0,3,0,1,0,1,0,0,0,0,0,2,0,3,1,1,1,0,3,0,0,0,1,2,1,0,0,1,1,1,1,0,1,0,0,0,1,0,0,3,0,0,0,0,3,2,0,2,2,0,1,0,0,0,2,3,2,3,3,0,0,0,0,2,1,0), -(0,5,1,5,0,3,0,3,0,5,4,4,5,1,5,3,3,0,4,3,4,3,5,3,4,3,3,2,4,3,4,3,3,0,3,3,1,4,4,3,4,4,4,3,4,5,5,3,2,3,1,1,3,3,1,3,1,1,3,3,2,4,5,3,3,5,0,4,0,3,0,4,4,3,5,3,3,0,3,4,0,4,3), -(0,5,0,5,0,3,0,2,0,4,4,3,5,2,4,3,3,3,4,4,4,3,5,3,5,3,3,1,4,0,4,3,3,0,3,3,0,4,4,4,4,5,4,3,3,5,5,3,2,3,1,2,3,2,0,1,0,0,3,2,2,4,4,3,1,5,0,4,0,3,0,4,3,1,3,2,1,0,3,3,0,3,3), -(0,4,0,5,0,5,0,4,0,4,5,5,5,3,4,3,3,2,5,4,4,3,5,3,5,3,4,0,4,3,4,4,3,2,4,4,3,4,5,4,4,5,5,0,3,5,5,4,1,3,3,2,3,3,1,3,1,0,4,3,1,4,4,3,4,5,0,4,0,2,0,4,3,4,4,3,3,0,4,0,0,5,5), -(0,4,0,4,0,5,0,1,1,3,3,4,4,3,4,1,3,0,5,1,3,0,3,1,3,1,1,0,3,0,3,3,4,0,4,3,0,4,4,4,3,4,4,0,3,5,4,1,0,3,0,0,2,3,0,3,1,0,3,1,0,3,2,1,3,5,0,3,0,1,0,3,2,3,3,4,4,0,2,2,0,4,4), -(2,4,0,5,0,4,0,3,0,4,5,5,4,3,5,3,5,3,5,3,5,2,5,3,4,3,3,4,3,4,5,3,2,1,5,4,3,2,3,4,5,3,4,1,2,5,4,3,0,3,3,0,3,2,0,2,3,0,4,1,0,3,4,3,3,5,0,3,0,1,0,4,5,5,5,4,3,0,4,2,0,3,5), -(0,5,0,4,0,4,0,2,0,5,4,3,4,3,4,3,3,3,4,3,4,2,5,3,5,3,4,1,4,3,4,4,4,0,3,5,0,4,4,4,4,5,3,1,3,4,5,3,3,3,3,3,3,3,0,2,2,0,3,3,2,4,3,3,3,5,3,4,1,3,3,5,3,2,0,0,0,0,4,3,1,3,3), -(0,1,0,3,0,3,0,1,0,1,3,3,3,2,3,3,3,0,3,0,0,0,3,1,3,0,0,0,2,2,2,3,0,0,3,2,0,1,2,4,1,3,3,0,0,3,3,3,0,1,0,0,2,1,0,0,3,0,3,1,0,3,0,0,1,3,0,2,0,1,0,3,3,1,3,3,0,0,1,1,0,3,3), -(0,2,0,3,0,2,1,4,0,2,2,3,1,1,3,1,1,0,2,0,3,1,2,3,1,3,0,0,1,0,4,3,2,3,3,3,1,4,2,3,3,3,3,1,0,3,1,4,0,1,1,0,1,2,0,1,1,0,1,1,0,3,1,3,2,2,0,1,0,0,0,2,3,3,3,1,0,0,0,0,0,2,3), -(0,5,0,4,0,5,0,2,0,4,5,5,3,3,4,3,3,1,5,4,4,2,4,4,4,3,4,2,4,3,5,5,4,3,3,4,3,3,5,5,4,5,5,1,3,4,5,3,1,4,3,1,3,3,0,3,3,1,4,3,1,4,5,3,3,5,0,4,0,3,0,5,3,3,1,4,3,0,4,0,1,5,3), -(0,5,0,5,0,4,0,2,0,4,4,3,4,3,3,3,3,3,5,4,4,4,4,4,4,5,3,3,5,2,4,4,4,3,4,4,3,3,4,4,5,5,3,3,4,3,4,3,3,4,3,3,3,3,1,2,2,1,4,3,3,5,4,4,3,4,0,4,0,3,0,4,4,4,4,4,1,0,4,2,0,2,4), -(0,4,0,4,0,3,0,1,0,3,5,2,3,0,3,0,2,1,4,2,3,3,4,1,4,3,3,2,4,1,3,3,3,0,3,3,0,0,3,3,3,5,3,3,3,3,3,2,0,2,0,0,2,0,0,2,0,0,1,0,0,3,1,2,2,3,0,3,0,2,0,4,4,3,3,4,1,0,3,0,0,2,4), -(0,0,0,4,0,0,0,0,0,0,1,0,1,0,2,0,0,0,0,0,1,0,2,0,1,0,0,0,0,0,3,1,3,0,3,2,0,0,0,1,0,3,2,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,4,0,2,0,0,0,0,0,0,2), -(0,2,1,3,0,2,0,2,0,3,3,3,3,1,3,1,3,3,3,3,3,3,4,2,2,1,2,1,4,0,4,3,1,3,3,3,2,4,3,5,4,3,3,3,3,3,3,3,0,1,3,0,2,0,0,1,0,0,1,0,0,4,2,0,2,3,0,3,3,0,3,3,4,2,3,1,4,0,1,2,0,2,3), -(0,3,0,3,0,1,0,3,0,2,3,3,3,0,3,1,2,0,3,3,2,3,3,2,3,2,3,1,3,0,4,3,2,0,3,3,1,4,3,3,2,3,4,3,1,3,3,1,1,0,1,1,0,1,0,1,0,1,0,0,0,4,1,1,0,3,0,3,1,0,2,3,3,3,3,3,1,0,0,2,0,3,3), -(0,0,0,0,0,0,0,0,0,0,3,0,2,0,3,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,3,0,3,0,3,1,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,2,0,2,3,0,0,0,0,0,0,0,0,3), -(0,2,0,3,1,3,0,3,0,2,3,3,3,1,3,1,3,1,3,1,3,3,3,1,3,0,2,3,1,1,4,3,3,2,3,3,1,2,2,4,1,3,3,0,1,4,2,3,0,1,3,0,3,0,0,1,3,0,2,0,0,3,3,2,1,3,0,3,0,2,0,3,4,4,4,3,1,0,3,0,0,3,3), -(0,2,0,1,0,2,0,0,0,1,3,2,2,1,3,0,1,1,3,0,3,2,3,1,2,0,2,0,1,1,3,3,3,0,3,3,1,1,2,3,2,3,3,1,2,3,2,0,0,1,0,0,0,0,0,0,3,0,1,0,0,2,1,2,1,3,0,3,0,0,0,3,4,4,4,3,2,0,2,0,0,2,4), -(0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,2,2,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,3,1,0,0,0,0,0,0,0,3), -(0,3,0,3,0,2,0,3,0,3,3,3,2,3,2,2,2,0,3,1,3,3,3,2,3,3,0,0,3,0,3,2,2,0,2,3,1,4,3,4,3,3,2,3,1,5,4,4,0,3,1,2,1,3,0,3,1,1,2,0,2,3,1,3,1,3,0,3,0,1,0,3,3,4,4,2,1,0,2,1,0,2,4), -(0,1,0,3,0,1,0,2,0,1,4,2,5,1,4,0,2,0,2,1,3,1,4,0,2,1,0,0,2,1,4,1,1,0,3,3,0,5,1,3,2,3,3,1,0,3,2,3,0,1,0,0,0,0,0,0,1,0,0,0,0,4,0,1,0,3,0,2,0,1,0,3,3,3,4,3,3,0,0,0,0,2,3), -(0,0,0,1,0,0,0,0,0,0,2,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,1,0,0,1,0,0,0,0,0,3), -(0,1,0,3,0,4,0,3,0,2,4,3,1,0,3,2,2,1,3,1,2,2,3,1,1,1,2,1,3,0,1,2,0,1,3,2,1,3,0,5,5,1,0,0,1,3,2,1,0,3,0,0,1,0,0,0,0,0,3,4,0,1,1,1,3,2,0,2,0,1,0,2,3,3,1,2,3,0,1,0,1,0,4), -(0,0,0,1,0,3,0,3,0,2,2,1,0,0,4,0,3,0,3,1,3,0,3,0,3,0,1,0,3,0,3,1,3,0,3,3,0,0,1,2,1,1,1,0,1,2,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,2,2,1,2,0,0,2,0,0,0,0,2,3,3,3,3,0,0,0,0,1,4), -(0,0,0,3,0,3,0,0,0,0,3,1,1,0,3,0,1,0,2,0,1,0,0,0,0,0,0,0,1,0,3,0,2,0,2,3,0,0,2,2,3,1,2,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,2,0,0,0,0,2,3), -(2,4,0,5,0,5,0,4,0,3,4,3,3,3,4,3,3,3,4,3,4,4,5,4,5,5,5,2,3,0,5,5,4,1,5,4,3,1,5,4,3,4,4,3,3,4,3,3,0,3,2,0,2,3,0,3,0,0,3,3,0,5,3,2,3,3,0,3,0,3,0,3,4,5,4,5,3,0,4,3,0,3,4), -(0,3,0,3,0,3,0,3,0,3,3,4,3,2,3,2,3,0,4,3,3,3,3,3,3,3,3,0,3,2,4,3,3,1,3,4,3,4,4,4,3,4,4,3,2,4,4,1,0,2,0,0,1,1,0,2,0,0,3,1,0,5,3,2,1,3,0,3,0,1,2,4,3,2,4,3,3,0,3,2,0,4,4), -(0,3,0,3,0,1,0,0,0,1,4,3,3,2,3,1,3,1,4,2,3,2,4,2,3,4,3,0,2,2,3,3,3,0,3,3,3,0,3,4,1,3,3,0,3,4,3,3,0,1,1,0,1,0,0,0,4,0,3,0,0,3,1,2,1,3,0,4,0,1,0,4,3,3,4,3,3,0,2,0,0,3,3), -(0,3,0,4,0,1,0,3,0,3,4,3,3,0,3,3,3,1,3,1,3,3,4,3,3,3,0,0,3,1,5,3,3,1,3,3,2,5,4,3,3,4,5,3,2,5,3,4,0,1,0,0,0,0,0,2,0,0,1,1,0,4,2,2,1,3,0,3,0,2,0,4,4,3,5,3,2,0,1,1,0,3,4), -(0,5,0,4,0,5,0,2,0,4,4,3,3,2,3,3,3,1,4,3,4,1,5,3,4,3,4,0,4,2,4,3,4,1,5,4,0,4,4,4,4,5,4,1,3,5,4,2,1,4,1,1,3,2,0,3,1,0,3,2,1,4,3,3,3,4,0,4,0,3,0,4,4,4,3,3,3,0,4,2,0,3,4), -(1,4,0,4,0,3,0,1,0,3,3,3,1,1,3,3,2,2,3,3,1,0,3,2,2,1,2,0,3,1,2,1,2,0,3,2,0,2,2,3,3,4,3,0,3,3,1,2,0,1,1,3,1,2,0,0,3,0,1,1,0,3,2,2,3,3,0,3,0,0,0,2,3,3,4,3,3,0,1,0,0,1,4), -(0,4,0,4,0,4,0,0,0,3,4,4,3,1,4,2,3,2,3,3,3,1,4,3,4,0,3,0,4,2,3,3,2,2,5,4,2,1,3,4,3,4,3,1,3,3,4,2,0,2,1,0,3,3,0,0,2,0,3,1,0,4,4,3,4,3,0,4,0,1,0,2,4,4,4,4,4,0,3,2,0,3,3), -(0,0,0,1,0,4,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,3,2,0,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,2), -(0,2,0,3,0,4,0,4,0,1,3,3,3,0,4,0,2,1,2,1,1,1,2,0,3,1,1,0,1,0,3,1,0,0,3,3,2,0,1,1,0,0,0,0,0,1,0,2,0,2,2,0,3,1,0,0,1,0,1,1,0,1,2,0,3,0,0,0,0,1,0,0,3,3,4,3,1,0,1,0,3,0,2), -(0,0,0,3,0,5,0,0,0,0,1,0,2,0,3,1,0,1,3,0,0,0,2,0,0,0,1,0,0,0,1,1,0,0,4,0,0,0,2,3,0,1,4,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,1,0,0,0,0,0,0,0,2,0,0,3,0,0,0,0,0,3), -(0,2,0,5,0,5,0,1,0,2,4,3,3,2,5,1,3,2,3,3,3,0,4,1,2,0,3,0,4,0,2,2,1,1,5,3,0,0,1,4,2,3,2,0,3,3,3,2,0,2,4,1,1,2,0,1,1,0,3,1,0,1,3,1,2,3,0,2,0,0,0,1,3,5,4,4,4,0,3,0,0,1,3), -(0,4,0,5,0,4,0,4,0,4,5,4,3,3,4,3,3,3,4,3,4,4,5,3,4,5,4,2,4,2,3,4,3,1,4,4,1,3,5,4,4,5,5,4,4,5,5,5,2,3,3,1,4,3,1,3,3,0,3,3,1,4,3,4,4,4,0,3,0,4,0,3,3,4,4,5,0,0,4,3,0,4,5), -(0,4,0,4,0,3,0,3,0,3,4,4,4,3,3,2,4,3,4,3,4,3,5,3,4,3,2,1,4,2,4,4,3,1,3,4,2,4,5,5,3,4,5,4,1,5,4,3,0,3,2,2,3,2,1,3,1,0,3,3,3,5,3,3,3,5,4,4,2,3,3,4,3,3,3,2,1,0,3,2,1,4,3), -(0,4,0,5,0,4,0,3,0,3,5,5,3,2,4,3,4,0,5,4,4,1,4,4,4,3,3,3,4,3,5,5,2,3,3,4,1,2,5,5,3,5,5,2,3,5,5,4,0,3,2,0,3,3,1,1,5,1,4,1,0,4,3,2,3,5,0,4,0,3,0,5,4,3,4,3,0,0,4,1,0,4,4), -(1,3,0,4,0,2,0,2,0,2,5,5,3,3,3,3,3,0,4,2,3,4,4,4,3,4,0,0,3,4,5,4,3,3,3,3,2,5,5,4,5,5,5,4,3,5,5,5,1,3,1,0,1,0,0,3,2,0,4,2,0,5,2,3,2,4,1,3,0,3,0,4,5,4,5,4,3,0,4,2,0,5,4), -(0,3,0,4,0,5,0,3,0,3,4,4,3,2,3,2,3,3,3,3,3,2,4,3,3,2,2,0,3,3,3,3,3,1,3,3,3,0,4,4,3,4,4,1,1,4,4,2,0,3,1,0,1,1,0,4,1,0,2,3,1,3,3,1,3,4,0,3,0,1,0,3,1,3,0,0,1,0,2,0,0,4,4), -(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0), -(0,3,0,3,0,2,0,3,0,1,5,4,3,3,3,1,4,2,1,2,3,4,4,2,4,4,5,0,3,1,4,3,4,0,4,3,3,3,2,3,2,5,3,4,3,2,2,3,0,0,3,0,2,1,0,1,2,0,0,0,0,2,1,1,3,1,0,2,0,4,0,3,4,4,4,5,2,0,2,0,0,1,3), -(0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,1,1,0,0,1,1,0,0,0,4,2,1,1,0,1,0,3,2,0,0,3,1,1,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,1,0,0,0,2,0,0,0,1,4,0,4,2,1,0,0,0,0,0,1), -(0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,0,0,0,1,0,1,0,0,0,0,3,1,0,0,0,2,0,2,1,0,0,1,2,1,0,1,1,0,0,3,0,0,0,0,0,0,0,0,0,0,0,1,3,1,0,0,0,0,0,1,0,0,2,1,0,0,0,0,0,0,0,0,2), -(0,4,0,4,0,4,0,3,0,4,4,3,4,2,4,3,2,0,4,4,4,3,5,3,5,3,3,2,4,2,4,3,4,3,1,4,0,2,3,4,4,4,3,3,3,4,4,4,3,4,1,3,4,3,2,1,2,1,3,3,3,4,4,3,3,5,0,4,0,3,0,4,3,3,3,2,1,0,3,0,0,3,3), -(0,4,0,3,0,3,0,3,0,3,5,5,3,3,3,3,4,3,4,3,3,3,4,4,4,3,3,3,3,4,3,5,3,3,1,3,2,4,5,5,5,5,4,3,4,5,5,3,2,2,3,3,3,3,2,3,3,1,2,3,2,4,3,3,3,4,0,4,0,2,0,4,3,2,2,1,2,0,3,0,0,4,1), -) - -class JapaneseContextAnalysis(object): - NUM_OF_CATEGORY = 6 - DONT_KNOW = -1 - ENOUGH_REL_THRESHOLD = 100 - MAX_REL_THRESHOLD = 1000 - MINIMUM_DATA_THRESHOLD = 4 - - def __init__(self): - self._total_rel = None - self._rel_sample = None - self._need_to_skip_char_num = None - self._last_char_order = None - self._done = None - self.reset() - - def reset(self): - self._total_rel = 0 # total sequence received - # category counters, each integer counts sequence in its category - self._rel_sample = [0] * self.NUM_OF_CATEGORY - # if last byte in current buffer is not the last byte of a character, - # we need to know how many bytes to skip in next buffer - self._need_to_skip_char_num = 0 - self._last_char_order = -1 # The order of previous char - # If this flag is set to True, detection is done and conclusion has - # been made - self._done = False - - def feed(self, byte_str, num_bytes): - if self._done: - return - - # The buffer we got is byte oriented, and a character may span in more than one - # buffers. In case the last one or two byte in last buffer is not - # complete, we record how many byte needed to complete that character - # and skip these bytes here. We can choose to record those bytes as - # well and analyse the character once it is complete, but since a - # character will not make much difference, by simply skipping - # this character will simply our logic and improve performance. - i = self._need_to_skip_char_num - while i < num_bytes: - order, char_len = self.get_order(byte_str[i:i + 2]) - i += char_len - if i > num_bytes: - self._need_to_skip_char_num = i - num_bytes - self._last_char_order = -1 - else: - if (order != -1) and (self._last_char_order != -1): - self._total_rel += 1 - if self._total_rel > self.MAX_REL_THRESHOLD: - self._done = True - break - self._rel_sample[jp2CharContext[self._last_char_order][order]] += 1 - self._last_char_order = order - - def got_enough_data(self): - return self._total_rel > self.ENOUGH_REL_THRESHOLD - - def get_confidence(self): - # This is just one way to calculate confidence. It works well for me. - if self._total_rel > self.MINIMUM_DATA_THRESHOLD: - return (self._total_rel - self._rel_sample[0]) / self._total_rel - else: - return self.DONT_KNOW - - def get_order(self, byte_str): - return -1, 1 - -class SJISContextAnalysis(JapaneseContextAnalysis): - def __init__(self): - super(SJISContextAnalysis, self).__init__() - self._charset_name = "SHIFT_JIS" - - @property - def charset_name(self): - return self._charset_name - - def get_order(self, byte_str): - if not byte_str: - return -1, 1 - # find out current char's byte length - first_char = byte_str[0] - if (0x81 <= first_char <= 0x9F) or (0xE0 <= first_char <= 0xFC): - char_len = 2 - if (first_char == 0x87) or (0xFA <= first_char <= 0xFC): - self._charset_name = "CP932" - else: - char_len = 1 - - # return its order if it is hiragana - if len(byte_str) > 1: - second_char = byte_str[1] - if (first_char == 202) and (0x9F <= second_char <= 0xF1): - return second_char - 0x9F, char_len - - return -1, char_len - -class EUCJPContextAnalysis(JapaneseContextAnalysis): - def get_order(self, byte_str): - if not byte_str: - return -1, 1 - # find out current char's byte length - first_char = byte_str[0] - if (first_char == 0x8E) or (0xA1 <= first_char <= 0xFE): - char_len = 2 - elif first_char == 0x8F: - char_len = 3 - else: - char_len = 1 - - # return its order if it is hiragana - if len(byte_str) > 1: - second_char = byte_str[1] - if (first_char == 0xA4) and (0xA1 <= second_char <= 0xF3): - return second_char - 0xA1, char_len - - return -1, char_len - - diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/langbulgarianmodel.py b/venv/Lib/site-packages/pip/_vendor/chardet/langbulgarianmodel.py deleted file mode 100644 index e963a50..0000000 --- a/venv/Lib/site-packages/pip/_vendor/chardet/langbulgarianmodel.py +++ /dev/null @@ -1,4650 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- - -from pip._vendor.chardet.sbcharsetprober import SingleByteCharSetModel - - -# 3: Positive -# 2: Likely -# 1: Unlikely -# 0: Negative - -BULGARIAN_LANG_MODEL = { - 63: { # 'e' - 63: 1, # 'e' - 45: 0, # '\xad' - 31: 0, # 'Ð' - 32: 0, # 'Б' - 35: 0, # 'Ð’' - 43: 0, # 'Г' - 37: 0, # 'Д' - 44: 0, # 'Е' - 55: 0, # 'Ж' - 47: 0, # 'З' - 40: 0, # 'И' - 59: 0, # 'Й' - 33: 0, # 'К' - 46: 0, # 'Л' - 38: 0, # 'Ðœ' - 36: 0, # 'Ð' - 41: 0, # 'О' - 30: 0, # 'П' - 39: 0, # 'Р' - 28: 0, # 'С' - 34: 0, # 'Т' - 51: 0, # 'У' - 48: 0, # 'Ф' - 49: 0, # 'Ð¥' - 53: 0, # 'Ц' - 50: 0, # 'Ч' - 54: 0, # 'Ш' - 57: 0, # 'Щ' - 61: 0, # 'Ъ' - 60: 0, # 'Ю' - 56: 0, # 'Я' - 1: 0, # 'а' - 18: 1, # 'б' - 9: 1, # 'в' - 20: 1, # 'г' - 11: 1, # 'д' - 3: 1, # 'е' - 23: 1, # 'ж' - 15: 1, # 'з' - 2: 0, # 'и' - 26: 1, # 'й' - 12: 1, # 'к' - 10: 1, # 'л' - 14: 1, # 'м' - 6: 1, # 'н' - 4: 1, # 'о' - 13: 1, # 'п' - 7: 1, # 'Ñ€' - 8: 1, # 'Ñ' - 5: 1, # 'Ñ‚' - 19: 0, # 'у' - 29: 1, # 'Ñ„' - 25: 1, # 'Ñ…' - 22: 0, # 'ц' - 21: 1, # 'ч' - 27: 1, # 'ш' - 24: 1, # 'щ' - 17: 0, # 'ÑŠ' - 52: 0, # 'ÑŒ' - 42: 0, # 'ÑŽ' - 16: 1, # 'Ñ' - 58: 0, # 'Ñ”' - 62: 0, # 'â„–' - }, - 45: { # '\xad' - 63: 0, # 'e' - 45: 0, # '\xad' - 31: 0, # 'Ð' - 32: 1, # 'Б' - 35: 1, # 'Ð’' - 43: 0, # 'Г' - 37: 1, # 'Д' - 44: 0, # 'Е' - 55: 0, # 'Ж' - 47: 0, # 'З' - 40: 1, # 'И' - 59: 0, # 'Й' - 33: 1, # 'К' - 46: 0, # 'Л' - 38: 1, # 'Ðœ' - 36: 0, # 'Ð' - 41: 1, # 'О' - 30: 1, # 'П' - 39: 1, # 'Р' - 28: 1, # 'С' - 34: 0, # 'Т' - 51: 0, # 'У' - 48: 0, # 'Ф' - 49: 1, # 'Ð¥' - 53: 0, # 'Ц' - 50: 0, # 'Ч' - 54: 0, # 'Ш' - 57: 0, # 'Щ' - 61: 0, # 'Ъ' - 60: 0, # 'Ю' - 56: 0, # 'Я' - 1: 0, # 'а' - 18: 0, # 'б' - 9: 0, # 'в' - 20: 0, # 'г' - 11: 0, # 'д' - 3: 0, # 'е' - 23: 0, # 'ж' - 15: 0, # 'з' - 2: 0, # 'и' - 26: 0, # 'й' - 12: 0, # 'к' - 10: 0, # 'л' - 14: 0, # 'м' - 6: 0, # 'н' - 4: 0, # 'о' - 13: 0, # 'п' - 7: 0, # 'Ñ€' - 8: 0, # 'Ñ' - 5: 0, # 'Ñ‚' - 19: 0, # 'у' - 29: 0, # 'Ñ„' - 25: 0, # 'Ñ…' - 22: 0, # 'ц' - 21: 0, # 'ч' - 27: 0, # 'ш' - 24: 0, # 'щ' - 17: 0, # 'ÑŠ' - 52: 0, # 'ÑŒ' - 42: 0, # 'ÑŽ' - 16: 0, # 'Ñ' - 58: 0, # 'Ñ”' - 62: 0, # 'â„–' - }, - 31: { # 'Ð' - 63: 0, # 'e' - 45: 1, # '\xad' - 31: 1, # 'Ð' - 32: 1, # 'Б' - 35: 2, # 'Ð’' - 43: 1, # 'Г' - 37: 2, # 'Д' - 44: 2, # 'Е' - 55: 1, # 'Ж' - 47: 2, # 'З' - 40: 1, # 'И' - 59: 1, # 'Й' - 33: 1, # 'К' - 46: 2, # 'Л' - 38: 1, # 'Ðœ' - 36: 2, # 'Ð' - 41: 1, # 'О' - 30: 2, # 'П' - 39: 2, # 'Р' - 28: 2, # 'С' - 34: 2, # 'Т' - 51: 1, # 'У' - 48: 2, # 'Ф' - 49: 1, # 'Ð¥' - 53: 1, # 'Ц' - 50: 1, # 'Ч' - 54: 1, # 'Ш' - 57: 2, # 'Щ' - 61: 0, # 'Ъ' - 60: 0, # 'Ю' - 56: 1, # 'Я' - 1: 1, # 'а' - 18: 2, # 'б' - 9: 2, # 'в' - 20: 2, # 'г' - 11: 2, # 'д' - 3: 1, # 'е' - 23: 1, # 'ж' - 15: 2, # 'з' - 2: 0, # 'и' - 26: 2, # 'й' - 12: 2, # 'к' - 10: 3, # 'л' - 14: 2, # 'м' - 6: 3, # 'н' - 4: 0, # 'о' - 13: 2, # 'п' - 7: 2, # 'Ñ€' - 8: 2, # 'Ñ' - 5: 2, # 'Ñ‚' - 19: 1, # 'у' - 29: 2, # 'Ñ„' - 25: 1, # 'Ñ…' - 22: 1, # 'ц' - 21: 1, # 'ч' - 27: 1, # 'ш' - 24: 0, # 'щ' - 17: 0, # 'ÑŠ' - 52: 0, # 'ÑŒ' - 42: 0, # 'ÑŽ' - 16: 1, # 'Ñ' - 58: 0, # 'Ñ”' - 62: 0, # 'â„–' - }, - 32: { # 'Б' - 63: 0, # 'e' - 45: 0, # '\xad' - 31: 2, # 'Ð' - 32: 2, # 'Б' - 35: 1, # 'Ð’' - 43: 1, # 'Г' - 37: 2, # 'Д' - 44: 1, # 'Е' - 55: 1, # 'Ж' - 47: 2, # 'З' - 40: 1, # 'И' - 59: 0, # 'Й' - 33: 1, # 'К' - 46: 1, # 'Л' - 38: 1, # 'Ðœ' - 36: 2, # 'Ð' - 41: 2, # 'О' - 30: 1, # 'П' - 39: 1, # 'Р' - 28: 2, # 'С' - 34: 2, # 'Т' - 51: 1, # 'У' - 48: 2, # 'Ф' - 49: 1, # 'Ð¥' - 53: 1, # 'Ц' - 50: 1, # 'Ч' - 54: 0, # 'Ш' - 57: 1, # 'Щ' - 61: 2, # 'Ъ' - 60: 1, # 'Ю' - 56: 1, # 'Я' - 1: 3, # 'а' - 18: 0, # 'б' - 9: 0, # 'в' - 20: 0, # 'г' - 11: 1, # 'д' - 3: 3, # 'е' - 23: 0, # 'ж' - 15: 0, # 'з' - 2: 2, # 'и' - 26: 0, # 'й' - 12: 0, # 'к' - 10: 2, # 'л' - 14: 0, # 'м' - 6: 0, # 'н' - 4: 3, # 'о' - 13: 0, # 'п' - 7: 2, # 'Ñ€' - 8: 1, # 'Ñ' - 5: 0, # 'Ñ‚' - 19: 2, # 'у' - 29: 0, # 'Ñ„' - 25: 1, # 'Ñ…' - 22: 0, # 'ц' - 21: 0, # 'ч' - 27: 0, # 'ш' - 24: 0, # 'щ' - 17: 3, # 'ÑŠ' - 52: 1, # 'ÑŒ' - 42: 1, # 'ÑŽ' - 16: 2, # 'Ñ' - 58: 0, # 'Ñ”' - 62: 0, # 'â„–' - }, - 35: { # 'Ð’' - 63: 0, # 'e' - 45: 0, # '\xad' - 31: 2, # 'Ð' - 32: 1, # 'Б' - 35: 1, # 'Ð’' - 43: 0, # 'Г' - 37: 1, # 'Д' - 44: 2, # 'Е' - 55: 0, # 'Ж' - 47: 0, # 'З' - 40: 2, # 'И' - 59: 0, # 'Й' - 33: 1, # 'К' - 46: 1, # 'Л' - 38: 1, # 'Ðœ' - 36: 1, # 'Ð' - 41: 1, # 'О' - 30: 1, # 'П' - 39: 2, # 'Р' - 28: 2, # 'С' - 34: 1, # 'Т' - 51: 1, # 'У' - 48: 2, # 'Ф' - 49: 0, # 'Ð¥' - 53: 1, # 'Ц' - 50: 0, # 'Ч' - 54: 0, # 'Ш' - 57: 0, # 'Щ' - 61: 1, # 'Ъ' - 60: 1, # 'Ю' - 56: 2, # 'Я' - 1: 3, # 'а' - 18: 1, # 'б' - 9: 0, # 'в' - 20: 0, # 'г' - 11: 1, # 'д' - 3: 3, # 'е' - 23: 1, # 'ж' - 15: 2, # 'з' - 2: 3, # 'и' - 26: 0, # 'й' - 12: 1, # 'к' - 10: 2, # 'л' - 14: 1, # 'м' - 6: 2, # 'н' - 4: 2, # 'о' - 13: 1, # 'п' - 7: 2, # 'Ñ€' - 8: 2, # 'Ñ' - 5: 2, # 'Ñ‚' - 19: 1, # 'у' - 29: 0, # 'Ñ„' - 25: 1, # 'Ñ…' - 22: 0, # 'ц' - 21: 2, # 'ч' - 27: 0, # 'ш' - 24: 0, # 'щ' - 17: 2, # 'ÑŠ' - 52: 1, # 'ÑŒ' - 42: 1, # 'ÑŽ' - 16: 1, # 'Ñ' - 58: 0, # 'Ñ”' - 62: 0, # 'â„–' - }, - 43: { # 'Г' - 63: 0, # 'e' - 45: 0, # '\xad' - 31: 2, # 'Ð' - 32: 1, # 'Б' - 35: 0, # 'Ð’' - 43: 0, # 'Г' - 37: 1, # 'Д' - 44: 2, # 'Е' - 55: 0, # 'Ж' - 47: 1, # 'З' - 40: 1, # 'И' - 59: 0, # 'Й' - 33: 1, # 'К' - 46: 1, # 'Л' - 38: 0, # 'Ðœ' - 36: 1, # 'Ð' - 41: 1, # 'О' - 30: 0, # 'П' - 39: 1, # 'Р' - 28: 1, # 'С' - 34: 0, # 'Т' - 51: 1, # 'У' - 48: 1, # 'Ф' - 49: 0, # 'Ð¥' - 53: 0, # 'Ц' - 50: 0, # 'Ч' - 54: 0, # 'Ш' - 57: 1, # 'Щ' - 61: 1, # 'Ъ' - 60: 0, # 'Ю' - 56: 0, # 'Я' - 1: 2, # 'а' - 18: 1, # 'б' - 9: 1, # 'в' - 20: 0, # 'г' - 11: 1, # 'д' - 3: 3, # 'е' - 23: 1, # 'ж' - 15: 0, # 'з' - 2: 2, # 'и' - 26: 0, # 'й' - 12: 1, # 'к' - 10: 2, # 'л' - 14: 1, # 'м' - 6: 1, # 'н' - 4: 2, # 'о' - 13: 0, # 'п' - 7: 2, # 'Ñ€' - 8: 0, # 'Ñ' - 5: 0, # 'Ñ‚' - 19: 2, # 'у' - 29: 0, # 'Ñ„' - 25: 0, # 'Ñ…' - 22: 0, # 'ц' - 21: 0, # 'ч' - 27: 0, # 'ш' - 24: 1, # 'щ' - 17: 2, # 'ÑŠ' - 52: 1, # 'ÑŒ' - 42: 1, # 'ÑŽ' - 16: 1, # 'Ñ' - 58: 0, # 'Ñ”' - 62: 0, # 'â„–' - }, - 37: { # 'Д' - 63: 0, # 'e' - 45: 0, # '\xad' - 31: 2, # 'Ð' - 32: 1, # 'Б' - 35: 2, # 'Ð’' - 43: 1, # 'Г' - 37: 2, # 'Д' - 44: 2, # 'Е' - 55: 2, # 'Ж' - 47: 1, # 'З' - 40: 2, # 'И' - 59: 0, # 'Й' - 33: 1, # 'К' - 46: 1, # 'Л' - 38: 1, # 'Ðœ' - 36: 1, # 'Ð' - 41: 2, # 'О' - 30: 2, # 'П' - 39: 1, # 'Р' - 28: 2, # 'С' - 34: 1, # 'Т' - 51: 1, # 'У' - 48: 1, # 'Ф' - 49: 0, # 'Ð¥' - 53: 1, # 'Ц' - 50: 1, # 'Ч' - 54: 0, # 'Ш' - 57: 0, # 'Щ' - 61: 1, # 'Ъ' - 60: 1, # 'Ю' - 56: 1, # 'Я' - 1: 3, # 'а' - 18: 0, # 'б' - 9: 2, # 'в' - 20: 0, # 'г' - 11: 0, # 'д' - 3: 3, # 'е' - 23: 3, # 'ж' - 15: 1, # 'з' - 2: 3, # 'и' - 26: 0, # 'й' - 12: 0, # 'к' - 10: 1, # 'л' - 14: 1, # 'м' - 6: 2, # 'н' - 4: 3, # 'о' - 13: 0, # 'п' - 7: 2, # 'Ñ€' - 8: 0, # 'Ñ' - 5: 0, # 'Ñ‚' - 19: 2, # 'у' - 29: 0, # 'Ñ„' - 25: 0, # 'Ñ…' - 22: 0, # 'ц' - 21: 0, # 'ч' - 27: 0, # 'ш' - 24: 0, # 'щ' - 17: 2, # 'ÑŠ' - 52: 1, # 'ÑŒ' - 42: 2, # 'ÑŽ' - 16: 1, # 'Ñ' - 58: 0, # 'Ñ”' - 62: 0, # 'â„–' - }, - 44: { # 'Е' - 63: 0, # 'e' - 45: 0, # '\xad' - 31: 1, # 'Ð' - 32: 1, # 'Б' - 35: 2, # 'Ð’' - 43: 1, # 'Г' - 37: 1, # 'Д' - 44: 1, # 'Е' - 55: 1, # 'Ж' - 47: 1, # 'З' - 40: 1, # 'И' - 59: 1, # 'Й' - 33: 2, # 'К' - 46: 2, # 'Л' - 38: 1, # 'Ðœ' - 36: 2, # 'Ð' - 41: 2, # 'О' - 30: 1, # 'П' - 39: 2, # 'Р' - 28: 2, # 'С' - 34: 2, # 'Т' - 51: 1, # 'У' - 48: 2, # 'Ф' - 49: 1, # 'Ð¥' - 53: 2, # 'Ц' - 50: 1, # 'Ч' - 54: 1, # 'Ш' - 57: 1, # 'Щ' - 61: 0, # 'Ъ' - 60: 0, # 'Ю' - 56: 1, # 'Я' - 1: 0, # 'а' - 18: 1, # 'б' - 9: 2, # 'в' - 20: 1, # 'г' - 11: 2, # 'д' - 3: 0, # 'е' - 23: 1, # 'ж' - 15: 1, # 'з' - 2: 0, # 'и' - 26: 1, # 'й' - 12: 2, # 'к' - 10: 2, # 'л' - 14: 2, # 'м' - 6: 2, # 'н' - 4: 0, # 'о' - 13: 1, # 'п' - 7: 2, # 'Ñ€' - 8: 2, # 'Ñ' - 5: 1, # 'Ñ‚' - 19: 1, # 'у' - 29: 1, # 'Ñ„' - 25: 1, # 'Ñ…' - 22: 0, # 'ц' - 21: 1, # 'ч' - 27: 1, # 'ш' - 24: 1, # 'щ' - 17: 1, # 'ÑŠ' - 52: 0, # 'ÑŒ' - 42: 1, # 'ÑŽ' - 16: 1, # 'Ñ' - 58: 0, # 'Ñ”' - 62: 0, # 'â„–' - }, - 55: { # 'Ж' - 63: 0, # 'e' - 45: 0, # '\xad' - 31: 1, # 'Ð' - 32: 0, # 'Б' - 35: 1, # 'Ð’' - 43: 0, # 'Г' - 37: 1, # 'Д' - 44: 1, # 'Е' - 55: 0, # 'Ж' - 47: 0, # 'З' - 40: 1, # 'И' - 59: 0, # 'Й' - 33: 1, # 'К' - 46: 0, # 'Л' - 38: 0, # 'Ðœ' - 36: 1, # 'Ð' - 41: 1, # 'О' - 30: 0, # 'П' - 39: 0, # 'Р' - 28: 0, # 'С' - 34: 0, # 'Т' - 51: 1, # 'У' - 48: 0, # 'Ф' - 49: 0, # 'Ð¥' - 53: 0, # 'Ц' - 50: 0, # 'Ч' - 54: 0, # 'Ш' - 57: 0, # 'Щ' - 61: 0, # 'Ъ' - 60: 0, # 'Ю' - 56: 0, # 'Я' - 1: 2, # 'а' - 18: 0, # 'б' - 9: 0, # 'в' - 20: 0, # 'г' - 11: 1, # 'д' - 3: 2, # 'е' - 23: 0, # 'ж' - 15: 0, # 'з' - 2: 2, # 'и' - 26: 0, # 'й' - 12: 0, # 'к' - 10: 0, # 'л' - 14: 0, # 'м' - 6: 0, # 'н' - 4: 2, # 'о' - 13: 1, # 'п' - 7: 1, # 'Ñ€' - 8: 0, # 'Ñ' - 5: 0, # 'Ñ‚' - 19: 1, # 'у' - 29: 0, # 'Ñ„' - 25: 0, # 'Ñ…' - 22: 0, # 'ц' - 21: 0, # 'ч' - 27: 0, # 'ш' - 24: 0, # 'щ' - 17: 1, # 'ÑŠ' - 52: 1, # 'ÑŒ' - 42: 1, # 'ÑŽ' - 16: 0, # 'Ñ' - 58: 0, # 'Ñ”' - 62: 0, # 'â„–' - }, - 47: { # 'З' - 63: 0, # 'e' - 45: 0, # '\xad' - 31: 2, # 'Ð' - 32: 1, # 'Б' - 35: 1, # 'Ð’' - 43: 1, # 'Г' - 37: 1, # 'Д' - 44: 1, # 'Е' - 55: 0, # 'Ж' - 47: 1, # 'З' - 40: 1, # 'И' - 59: 0, # 'Й' - 33: 1, # 'К' - 46: 1, # 'Л' - 38: 1, # 'Ðœ' - 36: 2, # 'Ð' - 41: 1, # 'О' - 30: 1, # 'П' - 39: 1, # 'Р' - 28: 1, # 'С' - 34: 1, # 'Т' - 51: 1, # 'У' - 48: 0, # 'Ф' - 49: 1, # 'Ð¥' - 53: 1, # 'Ц' - 50: 0, # 'Ч' - 54: 0, # 'Ш' - 57: 0, # 'Щ' - 61: 1, # 'Ъ' - 60: 0, # 'Ю' - 56: 1, # 'Я' - 1: 3, # 'а' - 18: 1, # 'б' - 9: 2, # 'в' - 20: 1, # 'г' - 11: 2, # 'д' - 3: 2, # 'е' - 23: 0, # 'ж' - 15: 0, # 'з' - 2: 1, # 'и' - 26: 0, # 'й' - 12: 0, # 'к' - 10: 2, # 'л' - 14: 1, # 'м' - 6: 1, # 'н' - 4: 1, # 'о' - 13: 0, # 'п' - 7: 1, # 'Ñ€' - 8: 0, # 'Ñ' - 5: 0, # 'Ñ‚' - 19: 1, # 'у' - 29: 0, # 'Ñ„' - 25: 0, # 'Ñ…' - 22: 0, # 'ц' - 21: 0, # 'ч' - 27: 0, # 'ш' - 24: 0, # 'щ' - 17: 1, # 'ÑŠ' - 52: 0, # 'ÑŒ' - 42: 1, # 'ÑŽ' - 16: 0, # 'Ñ' - 58: 0, # 'Ñ”' - 62: 0, # 'â„–' - }, - 40: { # 'И' - 63: 0, # 'e' - 45: 1, # '\xad' - 31: 1, # 'Ð' - 32: 1, # 'Б' - 35: 1, # 'Ð’' - 43: 1, # 'Г' - 37: 1, # 'Д' - 44: 2, # 'Е' - 55: 1, # 'Ж' - 47: 2, # 'З' - 40: 1, # 'И' - 59: 1, # 'Й' - 33: 2, # 'К' - 46: 2, # 'Л' - 38: 2, # 'Ðœ' - 36: 2, # 'Ð' - 41: 1, # 'О' - 30: 1, # 'П' - 39: 2, # 'Р' - 28: 2, # 'С' - 34: 2, # 'Т' - 51: 0, # 'У' - 48: 1, # 'Ф' - 49: 1, # 'Ð¥' - 53: 1, # 'Ц' - 50: 1, # 'Ч' - 54: 1, # 'Ш' - 57: 1, # 'Щ' - 61: 0, # 'Ъ' - 60: 0, # 'Ю' - 56: 2, # 'Я' - 1: 1, # 'а' - 18: 1, # 'б' - 9: 3, # 'в' - 20: 2, # 'г' - 11: 1, # 'д' - 3: 1, # 'е' - 23: 0, # 'ж' - 15: 3, # 'з' - 2: 0, # 'и' - 26: 1, # 'й' - 12: 1, # 'к' - 10: 2, # 'л' - 14: 2, # 'м' - 6: 2, # 'н' - 4: 0, # 'о' - 13: 1, # 'п' - 7: 2, # 'Ñ€' - 8: 2, # 'Ñ' - 5: 2, # 'Ñ‚' - 19: 0, # 'у' - 29: 1, # 'Ñ„' - 25: 1, # 'Ñ…' - 22: 1, # 'ц' - 21: 1, # 'ч' - 27: 1, # 'ш' - 24: 1, # 'щ' - 17: 0, # 'ÑŠ' - 52: 0, # 'ÑŒ' - 42: 0, # 'ÑŽ' - 16: 0, # 'Ñ' - 58: 0, # 'Ñ”' - 62: 0, # 'â„–' - }, - 59: { # 'Й' - 63: 0, # 'e' - 45: 0, # '\xad' - 31: 0, # 'Ð' - 32: 0, # 'Б' - 35: 0, # 'Ð’' - 43: 0, # 'Г' - 37: 1, # 'Д' - 44: 1, # 'Е' - 55: 0, # 'Ж' - 47: 0, # 'З' - 40: 0, # 'И' - 59: 0, # 'Й' - 33: 1, # 'К' - 46: 1, # 'Л' - 38: 1, # 'Ðœ' - 36: 1, # 'Ð' - 41: 1, # 'О' - 30: 0, # 'П' - 39: 0, # 'Р' - 28: 1, # 'С' - 34: 1, # 'Т' - 51: 0, # 'У' - 48: 0, # 'Ф' - 49: 0, # 'Ð¥' - 53: 0, # 'Ц' - 50: 1, # 'Ч' - 54: 0, # 'Ш' - 57: 0, # 'Щ' - 61: 0, # 'Ъ' - 60: 0, # 'Ю' - 56: 1, # 'Я' - 1: 0, # 'а' - 18: 0, # 'б' - 9: 0, # 'в' - 20: 0, # 'г' - 11: 0, # 'д' - 3: 1, # 'е' - 23: 0, # 'ж' - 15: 0, # 'з' - 2: 0, # 'и' - 26: 0, # 'й' - 12: 0, # 'к' - 10: 0, # 'л' - 14: 0, # 'м' - 6: 0, # 'н' - 4: 2, # 'о' - 13: 0, # 'п' - 7: 0, # 'Ñ€' - 8: 0, # 'Ñ' - 5: 0, # 'Ñ‚' - 19: 0, # 'у' - 29: 0, # 'Ñ„' - 25: 0, # 'Ñ…' - 22: 0, # 'ц' - 21: 0, # 'ч' - 27: 0, # 'ш' - 24: 0, # 'щ' - 17: 1, # 'ÑŠ' - 52: 0, # 'ÑŒ' - 42: 0, # 'ÑŽ' - 16: 0, # 'Ñ' - 58: 0, # 'Ñ”' - 62: 0, # 'â„–' - }, - 33: { # 'К' - 63: 0, # 'e' - 45: 1, # '\xad' - 31: 2, # 'Ð' - 32: 1, # 'Б' - 35: 1, # 'Ð’' - 43: 1, # 'Г' - 37: 1, # 'Д' - 44: 1, # 'Е' - 55: 0, # 'Ж' - 47: 1, # 'З' - 40: 2, # 'И' - 59: 0, # 'Й' - 33: 1, # 'К' - 46: 1, # 'Л' - 38: 0, # 'Ðœ' - 36: 2, # 'Ð' - 41: 2, # 'О' - 30: 2, # 'П' - 39: 1, # 'Р' - 28: 2, # 'С' - 34: 1, # 'Т' - 51: 1, # 'У' - 48: 1, # 'Ф' - 49: 1, # 'Ð¥' - 53: 1, # 'Ц' - 50: 0, # 'Ч' - 54: 0, # 'Ш' - 57: 0, # 'Щ' - 61: 1, # 'Ъ' - 60: 1, # 'Ю' - 56: 0, # 'Я' - 1: 3, # 'а' - 18: 0, # 'б' - 9: 1, # 'в' - 20: 0, # 'г' - 11: 0, # 'д' - 3: 2, # 'е' - 23: 1, # 'ж' - 15: 0, # 'з' - 2: 2, # 'и' - 26: 0, # 'й' - 12: 0, # 'к' - 10: 2, # 'л' - 14: 1, # 'м' - 6: 2, # 'н' - 4: 3, # 'о' - 13: 0, # 'п' - 7: 3, # 'Ñ€' - 8: 1, # 'Ñ' - 5: 0, # 'Ñ‚' - 19: 2, # 'у' - 29: 0, # 'Ñ„' - 25: 1, # 'Ñ…' - 22: 0, # 'ц' - 21: 0, # 'ч' - 27: 1, # 'ш' - 24: 0, # 'щ' - 17: 2, # 'ÑŠ' - 52: 1, # 'ÑŒ' - 42: 2, # 'ÑŽ' - 16: 0, # 'Ñ' - 58: 0, # 'Ñ”' - 62: 0, # 'â„–' - }, - 46: { # 'Л' - 63: 1, # 'e' - 45: 0, # '\xad' - 31: 2, # 'Ð' - 32: 1, # 'Б' - 35: 1, # 'Ð’' - 43: 2, # 'Г' - 37: 1, # 'Д' - 44: 2, # 'Е' - 55: 0, # 'Ж' - 47: 1, # 'З' - 40: 2, # 'И' - 59: 0, # 'Й' - 33: 1, # 'К' - 46: 1, # 'Л' - 38: 0, # 'Ðœ' - 36: 1, # 'Ð' - 41: 2, # 'О' - 30: 1, # 'П' - 39: 0, # 'Р' - 28: 1, # 'С' - 34: 1, # 'Т' - 51: 1, # 'У' - 48: 0, # 'Ф' - 49: 1, # 'Ð¥' - 53: 1, # 'Ц' - 50: 1, # 'Ч' - 54: 0, # 'Ш' - 57: 0, # 'Щ' - 61: 1, # 'Ъ' - 60: 1, # 'Ю' - 56: 1, # 'Я' - 1: 2, # 'а' - 18: 0, # 'б' - 9: 1, # 'в' - 20: 0, # 'г' - 11: 0, # 'д' - 3: 3, # 'е' - 23: 0, # 'ж' - 15: 0, # 'з' - 2: 2, # 'и' - 26: 0, # 'й' - 12: 0, # 'к' - 10: 0, # 'л' - 14: 0, # 'м' - 6: 0, # 'н' - 4: 2, # 'о' - 13: 0, # 'п' - 7: 0, # 'Ñ€' - 8: 0, # 'Ñ' - 5: 0, # 'Ñ‚' - 19: 2, # 'у' - 29: 0, # 'Ñ„' - 25: 0, # 'Ñ…' - 22: 0, # 'ц' - 21: 0, # 'ч' - 27: 0, # 'ш' - 24: 0, # 'щ' - 17: 1, # 'ÑŠ' - 52: 1, # 'ÑŒ' - 42: 2, # 'ÑŽ' - 16: 1, # 'Ñ' - 58: 0, # 'Ñ”' - 62: 0, # 'â„–' - }, - 38: { # 'Ðœ' - 63: 0, # 'e' - 45: 0, # '\xad' - 31: 2, # 'Ð' - 32: 1, # 'Б' - 35: 2, # 'Ð’' - 43: 0, # 'Г' - 37: 1, # 'Д' - 44: 1, # 'Е' - 55: 0, # 'Ж' - 47: 1, # 'З' - 40: 2, # 'И' - 59: 0, # 'Й' - 33: 1, # 'К' - 46: 1, # 'Л' - 38: 1, # 'Ðœ' - 36: 1, # 'Ð' - 41: 2, # 'О' - 30: 1, # 'П' - 39: 1, # 'Р' - 28: 2, # 'С' - 34: 1, # 'Т' - 51: 1, # 'У' - 48: 1, # 'Ф' - 49: 0, # 'Ð¥' - 53: 1, # 'Ц' - 50: 0, # 'Ч' - 54: 0, # 'Ш' - 57: 0, # 'Щ' - 61: 1, # 'Ъ' - 60: 0, # 'Ю' - 56: 1, # 'Я' - 1: 3, # 'а' - 18: 0, # 'б' - 9: 0, # 'в' - 20: 0, # 'г' - 11: 0, # 'д' - 3: 3, # 'е' - 23: 0, # 'ж' - 15: 0, # 'з' - 2: 3, # 'и' - 26: 0, # 'й' - 12: 0, # 'к' - 10: 2, # 'л' - 14: 0, # 'м' - 6: 2, # 'н' - 4: 3, # 'о' - 13: 0, # 'п' - 7: 1, # 'Ñ€' - 8: 0, # 'Ñ' - 5: 0, # 'Ñ‚' - 19: 2, # 'у' - 29: 0, # 'Ñ„' - 25: 0, # 'Ñ…' - 22: 0, # 'ц' - 21: 0, # 'ч' - 27: 0, # 'ш' - 24: 0, # 'щ' - 17: 2, # 'ÑŠ' - 52: 1, # 'ÑŒ' - 42: 2, # 'ÑŽ' - 16: 1, # 'Ñ' - 58: 0, # 'Ñ”' - 62: 0, # 'â„–' - }, - 36: { # 'Ð' - 63: 0, # 'e' - 45: 0, # '\xad' - 31: 2, # 'Ð' - 32: 2, # 'Б' - 35: 1, # 'Ð’' - 43: 1, # 'Г' - 37: 2, # 'Д' - 44: 2, # 'Е' - 55: 1, # 'Ж' - 47: 1, # 'З' - 40: 2, # 'И' - 59: 1, # 'Й' - 33: 2, # 'К' - 46: 1, # 'Л' - 38: 1, # 'Ðœ' - 36: 1, # 'Ð' - 41: 2, # 'О' - 30: 1, # 'П' - 39: 1, # 'Р' - 28: 2, # 'С' - 34: 2, # 'Т' - 51: 1, # 'У' - 48: 1, # 'Ф' - 49: 1, # 'Ð¥' - 53: 1, # 'Ц' - 50: 1, # 'Ч' - 54: 1, # 'Ш' - 57: 0, # 'Щ' - 61: 1, # 'Ъ' - 60: 1, # 'Ю' - 56: 1, # 'Я' - 1: 3, # 'а' - 18: 0, # 'б' - 9: 0, # 'в' - 20: 1, # 'г' - 11: 0, # 'д' - 3: 3, # 'е' - 23: 0, # 'ж' - 15: 0, # 'з' - 2: 3, # 'и' - 26: 0, # 'й' - 12: 0, # 'к' - 10: 0, # 'л' - 14: 0, # 'м' - 6: 0, # 'н' - 4: 3, # 'о' - 13: 0, # 'п' - 7: 0, # 'Ñ€' - 8: 0, # 'Ñ' - 5: 1, # 'Ñ‚' - 19: 1, # 'у' - 29: 0, # 'Ñ„' - 25: 0, # 'Ñ…' - 22: 0, # 'ц' - 21: 0, # 'ч' - 27: 1, # 'ш' - 24: 0, # 'щ' - 17: 0, # 'ÑŠ' - 52: 0, # 'ÑŒ' - 42: 2, # 'ÑŽ' - 16: 2, # 'Ñ' - 58: 0, # 'Ñ”' - 62: 0, # 'â„–' - }, - 41: { # 'О' - 63: 0, # 'e' - 45: 0, # '\xad' - 31: 1, # 'Ð' - 32: 1, # 'Б' - 35: 2, # 'Ð’' - 43: 1, # 'Г' - 37: 2, # 'Д' - 44: 1, # 'Е' - 55: 1, # 'Ж' - 47: 1, # 'З' - 40: 1, # 'И' - 59: 1, # 'Й' - 33: 2, # 'К' - 46: 2, # 'Л' - 38: 2, # 'Ðœ' - 36: 2, # 'Ð' - 41: 2, # 'О' - 30: 1, # 'П' - 39: 2, # 'Р' - 28: 2, # 'С' - 34: 2, # 'Т' - 51: 1, # 'У' - 48: 1, # 'Ф' - 49: 1, # 'Ð¥' - 53: 0, # 'Ц' - 50: 1, # 'Ч' - 54: 1, # 'Ш' - 57: 1, # 'Щ' - 61: 0, # 'Ъ' - 60: 0, # 'Ю' - 56: 1, # 'Я' - 1: 1, # 'а' - 18: 2, # 'б' - 9: 2, # 'в' - 20: 2, # 'г' - 11: 1, # 'д' - 3: 1, # 'е' - 23: 1, # 'ж' - 15: 1, # 'з' - 2: 0, # 'и' - 26: 1, # 'й' - 12: 2, # 'к' - 10: 2, # 'л' - 14: 1, # 'м' - 6: 1, # 'н' - 4: 0, # 'о' - 13: 2, # 'п' - 7: 2, # 'Ñ€' - 8: 2, # 'Ñ' - 5: 3, # 'Ñ‚' - 19: 1, # 'у' - 29: 1, # 'Ñ„' - 25: 1, # 'Ñ…' - 22: 1, # 'ц' - 21: 2, # 'ч' - 27: 0, # 'ш' - 24: 2, # 'щ' - 17: 0, # 'ÑŠ' - 52: 0, # 'ÑŒ' - 42: 0, # 'ÑŽ' - 16: 1, # 'Ñ' - 58: 0, # 'Ñ”' - 62: 0, # 'â„–' - }, - 30: { # 'П' - 63: 0, # 'e' - 45: 1, # '\xad' - 31: 2, # 'Ð' - 32: 1, # 'Б' - 35: 1, # 'Ð’' - 43: 1, # 'Г' - 37: 1, # 'Д' - 44: 1, # 'Е' - 55: 0, # 'Ж' - 47: 1, # 'З' - 40: 2, # 'И' - 59: 0, # 'Й' - 33: 1, # 'К' - 46: 1, # 'Л' - 38: 1, # 'Ðœ' - 36: 1, # 'Ð' - 41: 2, # 'О' - 30: 2, # 'П' - 39: 2, # 'Р' - 28: 2, # 'С' - 34: 1, # 'Т' - 51: 2, # 'У' - 48: 1, # 'Ф' - 49: 0, # 'Ð¥' - 53: 1, # 'Ц' - 50: 1, # 'Ч' - 54: 1, # 'Ш' - 57: 0, # 'Щ' - 61: 1, # 'Ъ' - 60: 1, # 'Ю' - 56: 0, # 'Я' - 1: 3, # 'а' - 18: 0, # 'б' - 9: 0, # 'в' - 20: 0, # 'г' - 11: 2, # 'д' - 3: 3, # 'е' - 23: 0, # 'ж' - 15: 0, # 'з' - 2: 2, # 'и' - 26: 0, # 'й' - 12: 1, # 'к' - 10: 3, # 'л' - 14: 0, # 'м' - 6: 1, # 'н' - 4: 3, # 'о' - 13: 0, # 'п' - 7: 3, # 'Ñ€' - 8: 1, # 'Ñ' - 5: 1, # 'Ñ‚' - 19: 2, # 'у' - 29: 1, # 'Ñ„' - 25: 1, # 'Ñ…' - 22: 0, # 'ц' - 21: 1, # 'ч' - 27: 1, # 'ш' - 24: 0, # 'щ' - 17: 2, # 'ÑŠ' - 52: 1, # 'ÑŒ' - 42: 1, # 'ÑŽ' - 16: 1, # 'Ñ' - 58: 0, # 'Ñ”' - 62: 0, # 'â„–' - }, - 39: { # 'Р' - 63: 0, # 'e' - 45: 1, # '\xad' - 31: 2, # 'Ð' - 32: 1, # 'Б' - 35: 1, # 'Ð’' - 43: 2, # 'Г' - 37: 2, # 'Д' - 44: 2, # 'Е' - 55: 0, # 'Ж' - 47: 1, # 'З' - 40: 2, # 'И' - 59: 0, # 'Й' - 33: 1, # 'К' - 46: 0, # 'Л' - 38: 1, # 'Ðœ' - 36: 1, # 'Ð' - 41: 2, # 'О' - 30: 2, # 'П' - 39: 1, # 'Р' - 28: 1, # 'С' - 34: 1, # 'Т' - 51: 1, # 'У' - 48: 1, # 'Ф' - 49: 1, # 'Ð¥' - 53: 1, # 'Ц' - 50: 1, # 'Ч' - 54: 0, # 'Ш' - 57: 0, # 'Щ' - 61: 1, # 'Ъ' - 60: 1, # 'Ю' - 56: 1, # 'Я' - 1: 3, # 'а' - 18: 0, # 'б' - 9: 0, # 'в' - 20: 0, # 'г' - 11: 0, # 'д' - 3: 2, # 'е' - 23: 0, # 'ж' - 15: 0, # 'з' - 2: 2, # 'и' - 26: 0, # 'й' - 12: 0, # 'к' - 10: 0, # 'л' - 14: 0, # 'м' - 6: 1, # 'н' - 4: 3, # 'о' - 13: 0, # 'п' - 7: 0, # 'Ñ€' - 8: 1, # 'Ñ' - 5: 0, # 'Ñ‚' - 19: 3, # 'у' - 29: 0, # 'Ñ„' - 25: 0, # 'Ñ…' - 22: 0, # 'ц' - 21: 0, # 'ч' - 27: 0, # 'ш' - 24: 0, # 'щ' - 17: 1, # 'ÑŠ' - 52: 0, # 'ÑŒ' - 42: 1, # 'ÑŽ' - 16: 1, # 'Ñ' - 58: 0, # 'Ñ”' - 62: 0, # 'â„–' - }, - 28: { # 'С' - 63: 1, # 'e' - 45: 0, # '\xad' - 31: 3, # 'Ð' - 32: 2, # 'Б' - 35: 2, # 'Ð’' - 43: 1, # 'Г' - 37: 2, # 'Д' - 44: 2, # 'Е' - 55: 1, # 'Ж' - 47: 1, # 'З' - 40: 2, # 'И' - 59: 0, # 'Й' - 33: 2, # 'К' - 46: 1, # 'Л' - 38: 1, # 'Ðœ' - 36: 1, # 'Ð' - 41: 2, # 'О' - 30: 2, # 'П' - 39: 1, # 'Р' - 28: 2, # 'С' - 34: 2, # 'Т' - 51: 1, # 'У' - 48: 1, # 'Ф' - 49: 0, # 'Ð¥' - 53: 0, # 'Ц' - 50: 0, # 'Ч' - 54: 0, # 'Ш' - 57: 0, # 'Щ' - 61: 1, # 'Ъ' - 60: 1, # 'Ю' - 56: 1, # 'Я' - 1: 3, # 'а' - 18: 1, # 'б' - 9: 2, # 'в' - 20: 1, # 'г' - 11: 1, # 'д' - 3: 3, # 'е' - 23: 0, # 'ж' - 15: 0, # 'з' - 2: 3, # 'и' - 26: 0, # 'й' - 12: 2, # 'к' - 10: 3, # 'л' - 14: 2, # 'м' - 6: 1, # 'н' - 4: 3, # 'о' - 13: 3, # 'п' - 7: 2, # 'Ñ€' - 8: 0, # 'Ñ' - 5: 3, # 'Ñ‚' - 19: 2, # 'у' - 29: 2, # 'Ñ„' - 25: 1, # 'Ñ…' - 22: 1, # 'ц' - 21: 1, # 'ч' - 27: 0, # 'ш' - 24: 0, # 'щ' - 17: 3, # 'ÑŠ' - 52: 1, # 'ÑŒ' - 42: 1, # 'ÑŽ' - 16: 1, # 'Ñ' - 58: 0, # 'Ñ”' - 62: 0, # 'â„–' - }, - 34: { # 'Т' - 63: 0, # 'e' - 45: 0, # '\xad' - 31: 2, # 'Ð' - 32: 2, # 'Б' - 35: 1, # 'Ð’' - 43: 0, # 'Г' - 37: 1, # 'Д' - 44: 2, # 'Е' - 55: 0, # 'Ж' - 47: 0, # 'З' - 40: 2, # 'И' - 59: 0, # 'Й' - 33: 2, # 'К' - 46: 1, # 'Л' - 38: 1, # 'Ðœ' - 36: 1, # 'Ð' - 41: 2, # 'О' - 30: 1, # 'П' - 39: 2, # 'Р' - 28: 2, # 'С' - 34: 1, # 'Т' - 51: 1, # 'У' - 48: 1, # 'Ф' - 49: 0, # 'Ð¥' - 53: 1, # 'Ц' - 50: 0, # 'Ч' - 54: 0, # 'Ш' - 57: 0, # 'Щ' - 61: 1, # 'Ъ' - 60: 0, # 'Ю' - 56: 1, # 'Я' - 1: 3, # 'а' - 18: 1, # 'б' - 9: 1, # 'в' - 20: 0, # 'г' - 11: 0, # 'д' - 3: 3, # 'е' - 23: 0, # 'ж' - 15: 0, # 'з' - 2: 2, # 'и' - 26: 0, # 'й' - 12: 1, # 'к' - 10: 1, # 'л' - 14: 0, # 'м' - 6: 0, # 'н' - 4: 3, # 'о' - 13: 0, # 'п' - 7: 3, # 'Ñ€' - 8: 0, # 'Ñ' - 5: 0, # 'Ñ‚' - 19: 2, # 'у' - 29: 0, # 'Ñ„' - 25: 0, # 'Ñ…' - 22: 0, # 'ц' - 21: 0, # 'ч' - 27: 0, # 'ш' - 24: 0, # 'щ' - 17: 2, # 'ÑŠ' - 52: 0, # 'ÑŒ' - 42: 1, # 'ÑŽ' - 16: 2, # 'Ñ' - 58: 0, # 'Ñ”' - 62: 0, # 'â„–' - }, - 51: { # 'У' - 63: 0, # 'e' - 45: 1, # '\xad' - 31: 1, # 'Ð' - 32: 1, # 'Б' - 35: 1, # 'Ð’' - 43: 1, # 'Г' - 37: 1, # 'Д' - 44: 2, # 'Е' - 55: 1, # 'Ж' - 47: 1, # 'З' - 40: 1, # 'И' - 59: 0, # 'Й' - 33: 1, # 'К' - 46: 1, # 'Л' - 38: 1, # 'Ðœ' - 36: 1, # 'Ð' - 41: 0, # 'О' - 30: 1, # 'П' - 39: 1, # 'Р' - 28: 1, # 'С' - 34: 2, # 'Т' - 51: 0, # 'У' - 48: 1, # 'Ф' - 49: 1, # 'Ð¥' - 53: 1, # 'Ц' - 50: 1, # 'Ч' - 54: 1, # 'Ш' - 57: 0, # 'Щ' - 61: 0, # 'Ъ' - 60: 0, # 'Ю' - 56: 0, # 'Я' - 1: 1, # 'а' - 18: 1, # 'б' - 9: 2, # 'в' - 20: 1, # 'г' - 11: 1, # 'д' - 3: 2, # 'е' - 23: 1, # 'ж' - 15: 1, # 'з' - 2: 2, # 'и' - 26: 1, # 'й' - 12: 2, # 'к' - 10: 1, # 'л' - 14: 1, # 'м' - 6: 2, # 'н' - 4: 2, # 'о' - 13: 1, # 'п' - 7: 1, # 'Ñ€' - 8: 2, # 'Ñ' - 5: 1, # 'Ñ‚' - 19: 1, # 'у' - 29: 0, # 'Ñ„' - 25: 1, # 'Ñ…' - 22: 0, # 'ц' - 21: 2, # 'ч' - 27: 1, # 'ш' - 24: 0, # 'щ' - 17: 1, # 'ÑŠ' - 52: 0, # 'ÑŒ' - 42: 0, # 'ÑŽ' - 16: 0, # 'Ñ' - 58: 0, # 'Ñ”' - 62: 0, # 'â„–' - }, - 48: { # 'Ф' - 63: 0, # 'e' - 45: 0, # '\xad' - 31: 2, # 'Ð' - 32: 1, # 'Б' - 35: 1, # 'Ð’' - 43: 0, # 'Г' - 37: 0, # 'Д' - 44: 1, # 'Е' - 55: 0, # 'Ж' - 47: 0, # 'З' - 40: 2, # 'И' - 59: 0, # 'Й' - 33: 1, # 'К' - 46: 1, # 'Л' - 38: 0, # 'Ðœ' - 36: 1, # 'Ð' - 41: 1, # 'О' - 30: 2, # 'П' - 39: 1, # 'Р' - 28: 2, # 'С' - 34: 1, # 'Т' - 51: 1, # 'У' - 48: 0, # 'Ф' - 49: 0, # 'Ð¥' - 53: 0, # 'Ц' - 50: 0, # 'Ч' - 54: 0, # 'Ш' - 57: 0, # 'Щ' - 61: 0, # 'Ъ' - 60: 0, # 'Ю' - 56: 0, # 'Я' - 1: 2, # 'а' - 18: 0, # 'б' - 9: 0, # 'в' - 20: 0, # 'г' - 11: 0, # 'д' - 3: 2, # 'е' - 23: 0, # 'ж' - 15: 0, # 'з' - 2: 2, # 'и' - 26: 0, # 'й' - 12: 0, # 'к' - 10: 2, # 'л' - 14: 0, # 'м' - 6: 0, # 'н' - 4: 2, # 'о' - 13: 0, # 'п' - 7: 2, # 'Ñ€' - 8: 0, # 'Ñ' - 5: 0, # 'Ñ‚' - 19: 1, # 'у' - 29: 0, # 'Ñ„' - 25: 0, # 'Ñ…' - 22: 0, # 'ц' - 21: 0, # 'ч' - 27: 0, # 'ш' - 24: 0, # 'щ' - 17: 1, # 'ÑŠ' - 52: 1, # 'ÑŒ' - 42: 1, # 'ÑŽ' - 16: 0, # 'Ñ' - 58: 0, # 'Ñ”' - 62: 0, # 'â„–' - }, - 49: { # 'Ð¥' - 63: 0, # 'e' - 45: 0, # '\xad' - 31: 1, # 'Ð' - 32: 0, # 'Б' - 35: 1, # 'Ð’' - 43: 1, # 'Г' - 37: 1, # 'Д' - 44: 1, # 'Е' - 55: 0, # 'Ж' - 47: 0, # 'З' - 40: 1, # 'И' - 59: 0, # 'Й' - 33: 0, # 'К' - 46: 1, # 'Л' - 38: 1, # 'Ðœ' - 36: 1, # 'Ð' - 41: 1, # 'О' - 30: 1, # 'П' - 39: 1, # 'Р' - 28: 0, # 'С' - 34: 0, # 'Т' - 51: 0, # 'У' - 48: 0, # 'Ф' - 49: 1, # 'Ð¥' - 53: 0, # 'Ц' - 50: 0, # 'Ч' - 54: 0, # 'Ш' - 57: 0, # 'Щ' - 61: 0, # 'Ъ' - 60: 0, # 'Ю' - 56: 0, # 'Я' - 1: 2, # 'а' - 18: 0, # 'б' - 9: 1, # 'в' - 20: 0, # 'г' - 11: 0, # 'д' - 3: 2, # 'е' - 23: 0, # 'ж' - 15: 0, # 'з' - 2: 2, # 'и' - 26: 0, # 'й' - 12: 0, # 'к' - 10: 1, # 'л' - 14: 1, # 'м' - 6: 0, # 'н' - 4: 2, # 'о' - 13: 0, # 'п' - 7: 2, # 'Ñ€' - 8: 0, # 'Ñ' - 5: 0, # 'Ñ‚' - 19: 2, # 'у' - 29: 0, # 'Ñ„' - 25: 0, # 'Ñ…' - 22: 0, # 'ц' - 21: 0, # 'ч' - 27: 0, # 'ш' - 24: 0, # 'щ' - 17: 2, # 'ÑŠ' - 52: 1, # 'ÑŒ' - 42: 1, # 'ÑŽ' - 16: 0, # 'Ñ' - 58: 0, # 'Ñ”' - 62: 0, # 'â„–' - }, - 53: { # 'Ц' - 63: 0, # 'e' - 45: 0, # '\xad' - 31: 1, # 'Ð' - 32: 0, # 'Б' - 35: 1, # 'Ð’' - 43: 0, # 'Г' - 37: 0, # 'Д' - 44: 1, # 'Е' - 55: 0, # 'Ж' - 47: 0, # 'З' - 40: 2, # 'И' - 59: 0, # 'Й' - 33: 2, # 'К' - 46: 1, # 'Л' - 38: 1, # 'Ðœ' - 36: 0, # 'Ð' - 41: 0, # 'О' - 30: 0, # 'П' - 39: 1, # 'Р' - 28: 2, # 'С' - 34: 0, # 'Т' - 51: 1, # 'У' - 48: 0, # 'Ф' - 49: 0, # 'Ð¥' - 53: 0, # 'Ц' - 50: 0, # 'Ч' - 54: 0, # 'Ш' - 57: 0, # 'Щ' - 61: 0, # 'Ъ' - 60: 0, # 'Ю' - 56: 0, # 'Я' - 1: 2, # 'а' - 18: 0, # 'б' - 9: 2, # 'в' - 20: 0, # 'г' - 11: 0, # 'д' - 3: 2, # 'е' - 23: 0, # 'ж' - 15: 1, # 'з' - 2: 2, # 'и' - 26: 0, # 'й' - 12: 0, # 'к' - 10: 0, # 'л' - 14: 0, # 'м' - 6: 0, # 'н' - 4: 1, # 'о' - 13: 0, # 'п' - 7: 1, # 'Ñ€' - 8: 0, # 'Ñ' - 5: 0, # 'Ñ‚' - 19: 1, # 'у' - 29: 0, # 'Ñ„' - 25: 0, # 'Ñ…' - 22: 0, # 'ц' - 21: 0, # 'ч' - 27: 0, # 'ш' - 24: 0, # 'щ' - 17: 1, # 'ÑŠ' - 52: 0, # 'ÑŒ' - 42: 1, # 'ÑŽ' - 16: 1, # 'Ñ' - 58: 0, # 'Ñ”' - 62: 0, # 'â„–' - }, - 50: { # 'Ч' - 63: 0, # 'e' - 45: 0, # '\xad' - 31: 2, # 'Ð' - 32: 1, # 'Б' - 35: 0, # 'Ð’' - 43: 0, # 'Г' - 37: 0, # 'Д' - 44: 1, # 'Е' - 55: 0, # 'Ж' - 47: 1, # 'З' - 40: 1, # 'И' - 59: 0, # 'Й' - 33: 1, # 'К' - 46: 1, # 'Л' - 38: 0, # 'Ðœ' - 36: 1, # 'Ð' - 41: 1, # 'О' - 30: 0, # 'П' - 39: 0, # 'Р' - 28: 0, # 'С' - 34: 0, # 'Т' - 51: 1, # 'У' - 48: 0, # 'Ф' - 49: 0, # 'Ð¥' - 53: 0, # 'Ц' - 50: 0, # 'Ч' - 54: 0, # 'Ш' - 57: 0, # 'Щ' - 61: 0, # 'Ъ' - 60: 0, # 'Ю' - 56: 0, # 'Я' - 1: 2, # 'а' - 18: 0, # 'б' - 9: 0, # 'в' - 20: 0, # 'г' - 11: 0, # 'д' - 3: 3, # 'е' - 23: 1, # 'ж' - 15: 0, # 'з' - 2: 2, # 'и' - 26: 0, # 'й' - 12: 0, # 'к' - 10: 1, # 'л' - 14: 0, # 'м' - 6: 0, # 'н' - 4: 2, # 'о' - 13: 0, # 'п' - 7: 1, # 'Ñ€' - 8: 0, # 'Ñ' - 5: 0, # 'Ñ‚' - 19: 2, # 'у' - 29: 0, # 'Ñ„' - 25: 0, # 'Ñ…' - 22: 0, # 'ц' - 21: 0, # 'ч' - 27: 0, # 'ш' - 24: 0, # 'щ' - 17: 1, # 'ÑŠ' - 52: 1, # 'ÑŒ' - 42: 0, # 'ÑŽ' - 16: 0, # 'Ñ' - 58: 0, # 'Ñ”' - 62: 0, # 'â„–' - }, - 54: { # 'Ш' - 63: 0, # 'e' - 45: 0, # '\xad' - 31: 1, # 'Ð' - 32: 0, # 'Б' - 35: 0, # 'Ð’' - 43: 0, # 'Г' - 37: 0, # 'Д' - 44: 1, # 'Е' - 55: 0, # 'Ж' - 47: 1, # 'З' - 40: 1, # 'И' - 59: 0, # 'Й' - 33: 1, # 'К' - 46: 0, # 'Л' - 38: 0, # 'Ðœ' - 36: 1, # 'Ð' - 41: 1, # 'О' - 30: 0, # 'П' - 39: 0, # 'Р' - 28: 0, # 'С' - 34: 0, # 'Т' - 51: 1, # 'У' - 48: 0, # 'Ф' - 49: 0, # 'Ð¥' - 53: 0, # 'Ц' - 50: 0, # 'Ч' - 54: 0, # 'Ш' - 57: 0, # 'Щ' - 61: 0, # 'Ъ' - 60: 0, # 'Ю' - 56: 0, # 'Я' - 1: 2, # 'а' - 18: 0, # 'б' - 9: 2, # 'в' - 20: 0, # 'г' - 11: 0, # 'д' - 3: 2, # 'е' - 23: 0, # 'ж' - 15: 0, # 'з' - 2: 2, # 'и' - 26: 0, # 'й' - 12: 1, # 'к' - 10: 1, # 'л' - 14: 1, # 'м' - 6: 1, # 'н' - 4: 2, # 'о' - 13: 1, # 'п' - 7: 1, # 'Ñ€' - 8: 0, # 'Ñ' - 5: 0, # 'Ñ‚' - 19: 2, # 'у' - 29: 0, # 'Ñ„' - 25: 0, # 'Ñ…' - 22: 0, # 'ц' - 21: 1, # 'ч' - 27: 0, # 'ш' - 24: 0, # 'щ' - 17: 1, # 'ÑŠ' - 52: 1, # 'ÑŒ' - 42: 0, # 'ÑŽ' - 16: 0, # 'Ñ' - 58: 0, # 'Ñ”' - 62: 0, # 'â„–' - }, - 57: { # 'Щ' - 63: 0, # 'e' - 45: 0, # '\xad' - 31: 1, # 'Ð' - 32: 0, # 'Б' - 35: 0, # 'Ð’' - 43: 0, # 'Г' - 37: 0, # 'Д' - 44: 1, # 'Е' - 55: 0, # 'Ж' - 47: 0, # 'З' - 40: 1, # 'И' - 59: 0, # 'Й' - 33: 0, # 'К' - 46: 0, # 'Л' - 38: 0, # 'Ðœ' - 36: 0, # 'Ð' - 41: 1, # 'О' - 30: 0, # 'П' - 39: 0, # 'Р' - 28: 0, # 'С' - 34: 0, # 'Т' - 51: 0, # 'У' - 48: 0, # 'Ф' - 49: 0, # 'Ð¥' - 53: 0, # 'Ц' - 50: 0, # 'Ч' - 54: 0, # 'Ш' - 57: 0, # 'Щ' - 61: 0, # 'Ъ' - 60: 0, # 'Ю' - 56: 0, # 'Я' - 1: 2, # 'а' - 18: 0, # 'б' - 9: 0, # 'в' - 20: 0, # 'г' - 11: 0, # 'д' - 3: 2, # 'е' - 23: 0, # 'ж' - 15: 0, # 'з' - 2: 1, # 'и' - 26: 0, # 'й' - 12: 0, # 'к' - 10: 0, # 'л' - 14: 0, # 'м' - 6: 0, # 'н' - 4: 1, # 'о' - 13: 0, # 'п' - 7: 1, # 'Ñ€' - 8: 0, # 'Ñ' - 5: 0, # 'Ñ‚' - 19: 1, # 'у' - 29: 0, # 'Ñ„' - 25: 0, # 'Ñ…' - 22: 0, # 'ц' - 21: 0, # 'ч' - 27: 0, # 'ш' - 24: 0, # 'щ' - 17: 1, # 'ÑŠ' - 52: 0, # 'ÑŒ' - 42: 0, # 'ÑŽ' - 16: 1, # 'Ñ' - 58: 0, # 'Ñ”' - 62: 0, # 'â„–' - }, - 61: { # 'Ъ' - 63: 0, # 'e' - 45: 0, # '\xad' - 31: 0, # 'Ð' - 32: 1, # 'Б' - 35: 1, # 'Ð’' - 43: 0, # 'Г' - 37: 1, # 'Д' - 44: 0, # 'Е' - 55: 1, # 'Ж' - 47: 1, # 'З' - 40: 0, # 'И' - 59: 0, # 'Й' - 33: 1, # 'К' - 46: 2, # 'Л' - 38: 1, # 'Ðœ' - 36: 1, # 'Ð' - 41: 0, # 'О' - 30: 1, # 'П' - 39: 2, # 'Р' - 28: 1, # 'С' - 34: 1, # 'Т' - 51: 0, # 'У' - 48: 0, # 'Ф' - 49: 1, # 'Ð¥' - 53: 1, # 'Ц' - 50: 1, # 'Ч' - 54: 1, # 'Ш' - 57: 1, # 'Щ' - 61: 0, # 'Ъ' - 60: 0, # 'Ю' - 56: 0, # 'Я' - 1: 0, # 'а' - 18: 0, # 'б' - 9: 0, # 'в' - 20: 0, # 'г' - 11: 0, # 'д' - 3: 0, # 'е' - 23: 0, # 'ж' - 15: 0, # 'з' - 2: 0, # 'и' - 26: 0, # 'й' - 12: 0, # 'к' - 10: 1, # 'л' - 14: 0, # 'м' - 6: 1, # 'н' - 4: 0, # 'о' - 13: 0, # 'п' - 7: 1, # 'Ñ€' - 8: 0, # 'Ñ' - 5: 0, # 'Ñ‚' - 19: 0, # 'у' - 29: 0, # 'Ñ„' - 25: 0, # 'Ñ…' - 22: 0, # 'ц' - 21: 0, # 'ч' - 27: 0, # 'ш' - 24: 0, # 'щ' - 17: 0, # 'ÑŠ' - 52: 0, # 'ÑŒ' - 42: 0, # 'ÑŽ' - 16: 0, # 'Ñ' - 58: 0, # 'Ñ”' - 62: 0, # 'â„–' - }, - 60: { # 'Ю' - 63: 0, # 'e' - 45: 0, # '\xad' - 31: 1, # 'Ð' - 32: 1, # 'Б' - 35: 0, # 'Ð’' - 43: 1, # 'Г' - 37: 1, # 'Д' - 44: 0, # 'Е' - 55: 1, # 'Ж' - 47: 0, # 'З' - 40: 0, # 'И' - 59: 0, # 'Й' - 33: 1, # 'К' - 46: 1, # 'Л' - 38: 0, # 'Ðœ' - 36: 1, # 'Ð' - 41: 0, # 'О' - 30: 0, # 'П' - 39: 1, # 'Р' - 28: 1, # 'С' - 34: 0, # 'Т' - 51: 0, # 'У' - 48: 0, # 'Ф' - 49: 0, # 'Ð¥' - 53: 0, # 'Ц' - 50: 0, # 'Ч' - 54: 0, # 'Ш' - 57: 0, # 'Щ' - 61: 0, # 'Ъ' - 60: 0, # 'Ю' - 56: 0, # 'Я' - 1: 0, # 'а' - 18: 1, # 'б' - 9: 1, # 'в' - 20: 2, # 'г' - 11: 1, # 'д' - 3: 0, # 'е' - 23: 2, # 'ж' - 15: 1, # 'з' - 2: 1, # 'и' - 26: 0, # 'й' - 12: 1, # 'к' - 10: 1, # 'л' - 14: 1, # 'м' - 6: 1, # 'н' - 4: 0, # 'о' - 13: 1, # 'п' - 7: 1, # 'Ñ€' - 8: 1, # 'Ñ' - 5: 1, # 'Ñ‚' - 19: 0, # 'у' - 29: 0, # 'Ñ„' - 25: 1, # 'Ñ…' - 22: 0, # 'ц' - 21: 0, # 'ч' - 27: 0, # 'ш' - 24: 0, # 'щ' - 17: 0, # 'ÑŠ' - 52: 0, # 'ÑŒ' - 42: 0, # 'ÑŽ' - 16: 0, # 'Ñ' - 58: 0, # 'Ñ”' - 62: 0, # 'â„–' - }, - 56: { # 'Я' - 63: 0, # 'e' - 45: 0, # '\xad' - 31: 0, # 'Ð' - 32: 1, # 'Б' - 35: 1, # 'Ð’' - 43: 1, # 'Г' - 37: 1, # 'Д' - 44: 0, # 'Е' - 55: 0, # 'Ж' - 47: 0, # 'З' - 40: 0, # 'И' - 59: 0, # 'Й' - 33: 1, # 'К' - 46: 1, # 'Л' - 38: 1, # 'Ðœ' - 36: 1, # 'Ð' - 41: 0, # 'О' - 30: 0, # 'П' - 39: 0, # 'Р' - 28: 1, # 'С' - 34: 2, # 'Т' - 51: 0, # 'У' - 48: 0, # 'Ф' - 49: 0, # 'Ð¥' - 53: 0, # 'Ц' - 50: 0, # 'Ч' - 54: 0, # 'Ш' - 57: 0, # 'Щ' - 61: 0, # 'Ъ' - 60: 0, # 'Ю' - 56: 0, # 'Я' - 1: 0, # 'а' - 18: 1, # 'б' - 9: 1, # 'в' - 20: 1, # 'г' - 11: 1, # 'д' - 3: 0, # 'е' - 23: 0, # 'ж' - 15: 1, # 'з' - 2: 1, # 'и' - 26: 1, # 'й' - 12: 1, # 'к' - 10: 1, # 'л' - 14: 2, # 'м' - 6: 2, # 'н' - 4: 0, # 'о' - 13: 2, # 'п' - 7: 1, # 'Ñ€' - 8: 1, # 'Ñ' - 5: 1, # 'Ñ‚' - 19: 0, # 'у' - 29: 0, # 'Ñ„' - 25: 1, # 'Ñ…' - 22: 0, # 'ц' - 21: 0, # 'ч' - 27: 1, # 'ш' - 24: 0, # 'щ' - 17: 0, # 'ÑŠ' - 52: 0, # 'ÑŒ' - 42: 1, # 'ÑŽ' - 16: 0, # 'Ñ' - 58: 0, # 'Ñ”' - 62: 0, # 'â„–' - }, - 1: { # 'а' - 63: 1, # 'e' - 45: 1, # '\xad' - 31: 1, # 'Ð' - 32: 0, # 'Б' - 35: 0, # 'Ð’' - 43: 0, # 'Г' - 37: 0, # 'Д' - 44: 1, # 'Е' - 55: 0, # 'Ж' - 47: 0, # 'З' - 40: 0, # 'И' - 59: 0, # 'Й' - 33: 0, # 'К' - 46: 0, # 'Л' - 38: 0, # 'Ðœ' - 36: 0, # 'Ð' - 41: 0, # 'О' - 30: 0, # 'П' - 39: 0, # 'Р' - 28: 0, # 'С' - 34: 0, # 'Т' - 51: 0, # 'У' - 48: 0, # 'Ф' - 49: 0, # 'Ð¥' - 53: 0, # 'Ц' - 50: 0, # 'Ч' - 54: 0, # 'Ш' - 57: 0, # 'Щ' - 61: 0, # 'Ъ' - 60: 0, # 'Ю' - 56: 0, # 'Я' - 1: 1, # 'а' - 18: 3, # 'б' - 9: 3, # 'в' - 20: 3, # 'г' - 11: 3, # 'д' - 3: 3, # 'е' - 23: 3, # 'ж' - 15: 3, # 'з' - 2: 3, # 'и' - 26: 3, # 'й' - 12: 3, # 'к' - 10: 3, # 'л' - 14: 3, # 'м' - 6: 3, # 'н' - 4: 2, # 'о' - 13: 3, # 'п' - 7: 3, # 'Ñ€' - 8: 3, # 'Ñ' - 5: 3, # 'Ñ‚' - 19: 3, # 'у' - 29: 3, # 'Ñ„' - 25: 3, # 'Ñ…' - 22: 3, # 'ц' - 21: 3, # 'ч' - 27: 3, # 'ш' - 24: 3, # 'щ' - 17: 0, # 'ÑŠ' - 52: 0, # 'ÑŒ' - 42: 1, # 'ÑŽ' - 16: 3, # 'Ñ' - 58: 0, # 'Ñ”' - 62: 0, # 'â„–' - }, - 18: { # 'б' - 63: 1, # 'e' - 45: 0, # '\xad' - 31: 0, # 'Ð' - 32: 0, # 'Б' - 35: 0, # 'Ð’' - 43: 0, # 'Г' - 37: 0, # 'Д' - 44: 0, # 'Е' - 55: 0, # 'Ж' - 47: 0, # 'З' - 40: 0, # 'И' - 59: 0, # 'Й' - 33: 0, # 'К' - 46: 0, # 'Л' - 38: 0, # 'Ðœ' - 36: 0, # 'Ð' - 41: 0, # 'О' - 30: 0, # 'П' - 39: 0, # 'Р' - 28: 0, # 'С' - 34: 0, # 'Т' - 51: 0, # 'У' - 48: 0, # 'Ф' - 49: 0, # 'Ð¥' - 53: 0, # 'Ц' - 50: 0, # 'Ч' - 54: 0, # 'Ш' - 57: 0, # 'Щ' - 61: 0, # 'Ъ' - 60: 0, # 'Ю' - 56: 0, # 'Я' - 1: 3, # 'а' - 18: 0, # 'б' - 9: 3, # 'в' - 20: 1, # 'г' - 11: 2, # 'д' - 3: 3, # 'е' - 23: 1, # 'ж' - 15: 1, # 'з' - 2: 3, # 'и' - 26: 0, # 'й' - 12: 1, # 'к' - 10: 3, # 'л' - 14: 2, # 'м' - 6: 3, # 'н' - 4: 3, # 'о' - 13: 1, # 'п' - 7: 3, # 'Ñ€' - 8: 3, # 'Ñ' - 5: 0, # 'Ñ‚' - 19: 3, # 'у' - 29: 0, # 'Ñ„' - 25: 2, # 'Ñ…' - 22: 1, # 'ц' - 21: 1, # 'ч' - 27: 1, # 'ш' - 24: 3, # 'щ' - 17: 3, # 'ÑŠ' - 52: 1, # 'ÑŒ' - 42: 2, # 'ÑŽ' - 16: 3, # 'Ñ' - 58: 0, # 'Ñ”' - 62: 0, # 'â„–' - }, - 9: { # 'в' - 63: 1, # 'e' - 45: 1, # '\xad' - 31: 0, # 'Ð' - 32: 1, # 'Б' - 35: 0, # 'Ð’' - 43: 0, # 'Г' - 37: 0, # 'Д' - 44: 0, # 'Е' - 55: 0, # 'Ж' - 47: 0, # 'З' - 40: 0, # 'И' - 59: 0, # 'Й' - 33: 0, # 'К' - 46: 0, # 'Л' - 38: 0, # 'Ðœ' - 36: 0, # 'Ð' - 41: 0, # 'О' - 30: 0, # 'П' - 39: 0, # 'Р' - 28: 0, # 'С' - 34: 0, # 'Т' - 51: 0, # 'У' - 48: 1, # 'Ф' - 49: 0, # 'Ð¥' - 53: 0, # 'Ц' - 50: 0, # 'Ч' - 54: 0, # 'Ш' - 57: 0, # 'Щ' - 61: 0, # 'Ъ' - 60: 0, # 'Ю' - 56: 0, # 'Я' - 1: 3, # 'а' - 18: 1, # 'б' - 9: 0, # 'в' - 20: 2, # 'г' - 11: 3, # 'д' - 3: 3, # 'е' - 23: 1, # 'ж' - 15: 3, # 'з' - 2: 3, # 'и' - 26: 0, # 'й' - 12: 3, # 'к' - 10: 3, # 'л' - 14: 2, # 'м' - 6: 3, # 'н' - 4: 3, # 'о' - 13: 2, # 'п' - 7: 3, # 'Ñ€' - 8: 3, # 'Ñ' - 5: 3, # 'Ñ‚' - 19: 2, # 'у' - 29: 0, # 'Ñ„' - 25: 2, # 'Ñ…' - 22: 2, # 'ц' - 21: 3, # 'ч' - 27: 2, # 'ш' - 24: 1, # 'щ' - 17: 3, # 'ÑŠ' - 52: 1, # 'ÑŒ' - 42: 2, # 'ÑŽ' - 16: 3, # 'Ñ' - 58: 0, # 'Ñ”' - 62: 0, # 'â„–' - }, - 20: { # 'г' - 63: 0, # 'e' - 45: 0, # '\xad' - 31: 0, # 'Ð' - 32: 0, # 'Б' - 35: 0, # 'Ð’' - 43: 0, # 'Г' - 37: 0, # 'Д' - 44: 0, # 'Е' - 55: 0, # 'Ж' - 47: 0, # 'З' - 40: 0, # 'И' - 59: 0, # 'Й' - 33: 0, # 'К' - 46: 0, # 'Л' - 38: 0, # 'Ðœ' - 36: 0, # 'Ð' - 41: 0, # 'О' - 30: 0, # 'П' - 39: 0, # 'Р' - 28: 0, # 'С' - 34: 0, # 'Т' - 51: 0, # 'У' - 48: 0, # 'Ф' - 49: 0, # 'Ð¥' - 53: 0, # 'Ц' - 50: 0, # 'Ч' - 54: 0, # 'Ш' - 57: 0, # 'Щ' - 61: 0, # 'Ъ' - 60: 0, # 'Ю' - 56: 0, # 'Я' - 1: 3, # 'а' - 18: 1, # 'б' - 9: 2, # 'в' - 20: 1, # 'г' - 11: 2, # 'д' - 3: 3, # 'е' - 23: 0, # 'ж' - 15: 1, # 'з' - 2: 3, # 'и' - 26: 0, # 'й' - 12: 1, # 'к' - 10: 3, # 'л' - 14: 1, # 'м' - 6: 3, # 'н' - 4: 3, # 'о' - 13: 1, # 'п' - 7: 3, # 'Ñ€' - 8: 2, # 'Ñ' - 5: 2, # 'Ñ‚' - 19: 3, # 'у' - 29: 1, # 'Ñ„' - 25: 1, # 'Ñ…' - 22: 0, # 'ц' - 21: 1, # 'ч' - 27: 0, # 'ш' - 24: 0, # 'щ' - 17: 3, # 'ÑŠ' - 52: 1, # 'ÑŒ' - 42: 1, # 'ÑŽ' - 16: 1, # 'Ñ' - 58: 0, # 'Ñ”' - 62: 0, # 'â„–' - }, - 11: { # 'д' - 63: 1, # 'e' - 45: 0, # '\xad' - 31: 0, # 'Ð' - 32: 0, # 'Б' - 35: 0, # 'Ð’' - 43: 0, # 'Г' - 37: 0, # 'Д' - 44: 0, # 'Е' - 55: 0, # 'Ж' - 47: 0, # 'З' - 40: 0, # 'И' - 59: 0, # 'Й' - 33: 0, # 'К' - 46: 0, # 'Л' - 38: 0, # 'Ðœ' - 36: 0, # 'Ð' - 41: 0, # 'О' - 30: 0, # 'П' - 39: 0, # 'Р' - 28: 0, # 'С' - 34: 0, # 'Т' - 51: 0, # 'У' - 48: 0, # 'Ф' - 49: 0, # 'Ð¥' - 53: 0, # 'Ц' - 50: 0, # 'Ч' - 54: 0, # 'Ш' - 57: 0, # 'Щ' - 61: 0, # 'Ъ' - 60: 0, # 'Ю' - 56: 0, # 'Я' - 1: 3, # 'а' - 18: 2, # 'б' - 9: 3, # 'в' - 20: 2, # 'г' - 11: 2, # 'д' - 3: 3, # 'е' - 23: 3, # 'ж' - 15: 2, # 'з' - 2: 3, # 'и' - 26: 0, # 'й' - 12: 3, # 'к' - 10: 3, # 'л' - 14: 3, # 'м' - 6: 3, # 'н' - 4: 3, # 'о' - 13: 3, # 'п' - 7: 3, # 'Ñ€' - 8: 3, # 'Ñ' - 5: 1, # 'Ñ‚' - 19: 3, # 'у' - 29: 1, # 'Ñ„' - 25: 2, # 'Ñ…' - 22: 2, # 'ц' - 21: 2, # 'ч' - 27: 1, # 'ш' - 24: 1, # 'щ' - 17: 3, # 'ÑŠ' - 52: 1, # 'ÑŒ' - 42: 1, # 'ÑŽ' - 16: 3, # 'Ñ' - 58: 0, # 'Ñ”' - 62: 0, # 'â„–' - }, - 3: { # 'е' - 63: 0, # 'e' - 45: 1, # '\xad' - 31: 0, # 'Ð' - 32: 0, # 'Б' - 35: 0, # 'Ð’' - 43: 0, # 'Г' - 37: 0, # 'Д' - 44: 0, # 'Е' - 55: 0, # 'Ж' - 47: 0, # 'З' - 40: 0, # 'И' - 59: 0, # 'Й' - 33: 0, # 'К' - 46: 0, # 'Л' - 38: 0, # 'Ðœ' - 36: 0, # 'Ð' - 41: 0, # 'О' - 30: 0, # 'П' - 39: 0, # 'Р' - 28: 0, # 'С' - 34: 0, # 'Т' - 51: 0, # 'У' - 48: 0, # 'Ф' - 49: 0, # 'Ð¥' - 53: 0, # 'Ц' - 50: 0, # 'Ч' - 54: 0, # 'Ш' - 57: 0, # 'Щ' - 61: 0, # 'Ъ' - 60: 0, # 'Ю' - 56: 0, # 'Я' - 1: 2, # 'а' - 18: 3, # 'б' - 9: 3, # 'в' - 20: 3, # 'г' - 11: 3, # 'д' - 3: 2, # 'е' - 23: 3, # 'ж' - 15: 3, # 'з' - 2: 2, # 'и' - 26: 3, # 'й' - 12: 3, # 'к' - 10: 3, # 'л' - 14: 3, # 'м' - 6: 3, # 'н' - 4: 3, # 'о' - 13: 3, # 'п' - 7: 3, # 'Ñ€' - 8: 3, # 'Ñ' - 5: 3, # 'Ñ‚' - 19: 2, # 'у' - 29: 3, # 'Ñ„' - 25: 3, # 'Ñ…' - 22: 3, # 'ц' - 21: 3, # 'ч' - 27: 3, # 'ш' - 24: 3, # 'щ' - 17: 1, # 'ÑŠ' - 52: 0, # 'ÑŒ' - 42: 1, # 'ÑŽ' - 16: 3, # 'Ñ' - 58: 0, # 'Ñ”' - 62: 0, # 'â„–' - }, - 23: { # 'ж' - 63: 0, # 'e' - 45: 0, # '\xad' - 31: 0, # 'Ð' - 32: 0, # 'Б' - 35: 0, # 'Ð’' - 43: 0, # 'Г' - 37: 0, # 'Д' - 44: 0, # 'Е' - 55: 0, # 'Ж' - 47: 0, # 'З' - 40: 0, # 'И' - 59: 0, # 'Й' - 33: 0, # 'К' - 46: 0, # 'Л' - 38: 0, # 'Ðœ' - 36: 0, # 'Ð' - 41: 0, # 'О' - 30: 0, # 'П' - 39: 0, # 'Р' - 28: 0, # 'С' - 34: 0, # 'Т' - 51: 0, # 'У' - 48: 0, # 'Ф' - 49: 0, # 'Ð¥' - 53: 0, # 'Ц' - 50: 0, # 'Ч' - 54: 0, # 'Ш' - 57: 0, # 'Щ' - 61: 0, # 'Ъ' - 60: 0, # 'Ю' - 56: 0, # 'Я' - 1: 3, # 'а' - 18: 3, # 'б' - 9: 2, # 'в' - 20: 1, # 'г' - 11: 3, # 'д' - 3: 3, # 'е' - 23: 0, # 'ж' - 15: 0, # 'з' - 2: 3, # 'и' - 26: 0, # 'й' - 12: 2, # 'к' - 10: 1, # 'л' - 14: 1, # 'м' - 6: 3, # 'н' - 4: 2, # 'о' - 13: 1, # 'п' - 7: 1, # 'Ñ€' - 8: 1, # 'Ñ' - 5: 1, # 'Ñ‚' - 19: 2, # 'у' - 29: 0, # 'Ñ„' - 25: 0, # 'Ñ…' - 22: 1, # 'ц' - 21: 1, # 'ч' - 27: 0, # 'ш' - 24: 0, # 'щ' - 17: 2, # 'ÑŠ' - 52: 0, # 'ÑŒ' - 42: 0, # 'ÑŽ' - 16: 1, # 'Ñ' - 58: 0, # 'Ñ”' - 62: 0, # 'â„–' - }, - 15: { # 'з' - 63: 1, # 'e' - 45: 0, # '\xad' - 31: 0, # 'Ð' - 32: 0, # 'Б' - 35: 0, # 'Ð’' - 43: 0, # 'Г' - 37: 0, # 'Д' - 44: 0, # 'Е' - 55: 0, # 'Ж' - 47: 0, # 'З' - 40: 0, # 'И' - 59: 0, # 'Й' - 33: 0, # 'К' - 46: 0, # 'Л' - 38: 0, # 'Ðœ' - 36: 0, # 'Ð' - 41: 0, # 'О' - 30: 0, # 'П' - 39: 0, # 'Р' - 28: 0, # 'С' - 34: 0, # 'Т' - 51: 0, # 'У' - 48: 0, # 'Ф' - 49: 0, # 'Ð¥' - 53: 0, # 'Ц' - 50: 0, # 'Ч' - 54: 0, # 'Ш' - 57: 0, # 'Щ' - 61: 0, # 'Ъ' - 60: 0, # 'Ю' - 56: 0, # 'Я' - 1: 3, # 'а' - 18: 3, # 'б' - 9: 3, # 'в' - 20: 3, # 'г' - 11: 3, # 'д' - 3: 3, # 'е' - 23: 1, # 'ж' - 15: 1, # 'з' - 2: 3, # 'и' - 26: 0, # 'й' - 12: 3, # 'к' - 10: 3, # 'л' - 14: 3, # 'м' - 6: 3, # 'н' - 4: 3, # 'о' - 13: 3, # 'п' - 7: 3, # 'Ñ€' - 8: 3, # 'Ñ' - 5: 3, # 'Ñ‚' - 19: 3, # 'у' - 29: 1, # 'Ñ„' - 25: 2, # 'Ñ…' - 22: 2, # 'ц' - 21: 2, # 'ч' - 27: 2, # 'ш' - 24: 1, # 'щ' - 17: 2, # 'ÑŠ' - 52: 1, # 'ÑŒ' - 42: 1, # 'ÑŽ' - 16: 2, # 'Ñ' - 58: 0, # 'Ñ”' - 62: 0, # 'â„–' - }, - 2: { # 'и' - 63: 1, # 'e' - 45: 1, # '\xad' - 31: 0, # 'Ð' - 32: 0, # 'Б' - 35: 0, # 'Ð’' - 43: 1, # 'Г' - 37: 0, # 'Д' - 44: 0, # 'Е' - 55: 0, # 'Ж' - 47: 0, # 'З' - 40: 0, # 'И' - 59: 0, # 'Й' - 33: 1, # 'К' - 46: 0, # 'Л' - 38: 0, # 'Ðœ' - 36: 0, # 'Ð' - 41: 0, # 'О' - 30: 1, # 'П' - 39: 0, # 'Р' - 28: 0, # 'С' - 34: 0, # 'Т' - 51: 0, # 'У' - 48: 1, # 'Ф' - 49: 0, # 'Ð¥' - 53: 0, # 'Ц' - 50: 0, # 'Ч' - 54: 0, # 'Ш' - 57: 0, # 'Щ' - 61: 0, # 'Ъ' - 60: 0, # 'Ю' - 56: 0, # 'Я' - 1: 3, # 'а' - 18: 3, # 'б' - 9: 3, # 'в' - 20: 3, # 'г' - 11: 3, # 'д' - 3: 3, # 'е' - 23: 3, # 'ж' - 15: 3, # 'з' - 2: 3, # 'и' - 26: 3, # 'й' - 12: 3, # 'к' - 10: 3, # 'л' - 14: 3, # 'м' - 6: 3, # 'н' - 4: 3, # 'о' - 13: 3, # 'п' - 7: 3, # 'Ñ€' - 8: 3, # 'Ñ' - 5: 3, # 'Ñ‚' - 19: 2, # 'у' - 29: 3, # 'Ñ„' - 25: 3, # 'Ñ…' - 22: 3, # 'ц' - 21: 3, # 'ч' - 27: 3, # 'ш' - 24: 3, # 'щ' - 17: 2, # 'ÑŠ' - 52: 0, # 'ÑŒ' - 42: 1, # 'ÑŽ' - 16: 3, # 'Ñ' - 58: 0, # 'Ñ”' - 62: 0, # 'â„–' - }, - 26: { # 'й' - 63: 0, # 'e' - 45: 0, # '\xad' - 31: 0, # 'Ð' - 32: 0, # 'Б' - 35: 0, # 'Ð’' - 43: 0, # 'Г' - 37: 0, # 'Д' - 44: 0, # 'Е' - 55: 0, # 'Ж' - 47: 0, # 'З' - 40: 0, # 'И' - 59: 0, # 'Й' - 33: 0, # 'К' - 46: 0, # 'Л' - 38: 0, # 'Ðœ' - 36: 0, # 'Ð' - 41: 0, # 'О' - 30: 0, # 'П' - 39: 0, # 'Р' - 28: 0, # 'С' - 34: 0, # 'Т' - 51: 0, # 'У' - 48: 0, # 'Ф' - 49: 0, # 'Ð¥' - 53: 0, # 'Ц' - 50: 0, # 'Ч' - 54: 0, # 'Ш' - 57: 0, # 'Щ' - 61: 0, # 'Ъ' - 60: 0, # 'Ю' - 56: 0, # 'Я' - 1: 1, # 'а' - 18: 2, # 'б' - 9: 2, # 'в' - 20: 1, # 'г' - 11: 2, # 'д' - 3: 2, # 'е' - 23: 0, # 'ж' - 15: 2, # 'з' - 2: 1, # 'и' - 26: 0, # 'й' - 12: 3, # 'к' - 10: 2, # 'л' - 14: 2, # 'м' - 6: 3, # 'н' - 4: 2, # 'о' - 13: 1, # 'п' - 7: 2, # 'Ñ€' - 8: 3, # 'Ñ' - 5: 3, # 'Ñ‚' - 19: 1, # 'у' - 29: 2, # 'Ñ„' - 25: 1, # 'Ñ…' - 22: 2, # 'ц' - 21: 2, # 'ч' - 27: 1, # 'ш' - 24: 1, # 'щ' - 17: 1, # 'ÑŠ' - 52: 0, # 'ÑŒ' - 42: 0, # 'ÑŽ' - 16: 1, # 'Ñ' - 58: 0, # 'Ñ”' - 62: 0, # 'â„–' - }, - 12: { # 'к' - 63: 1, # 'e' - 45: 0, # '\xad' - 31: 0, # 'Ð' - 32: 0, # 'Б' - 35: 1, # 'Ð’' - 43: 0, # 'Г' - 37: 0, # 'Д' - 44: 0, # 'Е' - 55: 0, # 'Ж' - 47: 0, # 'З' - 40: 1, # 'И' - 59: 0, # 'Й' - 33: 0, # 'К' - 46: 0, # 'Л' - 38: 0, # 'Ðœ' - 36: 0, # 'Ð' - 41: 0, # 'О' - 30: 0, # 'П' - 39: 0, # 'Р' - 28: 0, # 'С' - 34: 0, # 'Т' - 51: 0, # 'У' - 48: 0, # 'Ф' - 49: 0, # 'Ð¥' - 53: 0, # 'Ц' - 50: 0, # 'Ч' - 54: 0, # 'Ш' - 57: 0, # 'Щ' - 61: 0, # 'Ъ' - 60: 0, # 'Ю' - 56: 0, # 'Я' - 1: 3, # 'а' - 18: 1, # 'б' - 9: 3, # 'в' - 20: 2, # 'г' - 11: 1, # 'д' - 3: 3, # 'е' - 23: 0, # 'ж' - 15: 2, # 'з' - 2: 3, # 'и' - 26: 0, # 'й' - 12: 1, # 'к' - 10: 3, # 'л' - 14: 2, # 'м' - 6: 3, # 'н' - 4: 3, # 'о' - 13: 1, # 'п' - 7: 3, # 'Ñ€' - 8: 3, # 'Ñ' - 5: 3, # 'Ñ‚' - 19: 3, # 'у' - 29: 1, # 'Ñ„' - 25: 1, # 'Ñ…' - 22: 3, # 'ц' - 21: 2, # 'ч' - 27: 1, # 'ш' - 24: 0, # 'щ' - 17: 3, # 'ÑŠ' - 52: 1, # 'ÑŒ' - 42: 2, # 'ÑŽ' - 16: 1, # 'Ñ' - 58: 0, # 'Ñ”' - 62: 0, # 'â„–' - }, - 10: { # 'л' - 63: 1, # 'e' - 45: 1, # '\xad' - 31: 0, # 'Ð' - 32: 0, # 'Б' - 35: 0, # 'Ð’' - 43: 0, # 'Г' - 37: 0, # 'Д' - 44: 0, # 'Е' - 55: 0, # 'Ж' - 47: 0, # 'З' - 40: 0, # 'И' - 59: 0, # 'Й' - 33: 0, # 'К' - 46: 0, # 'Л' - 38: 0, # 'Ðœ' - 36: 0, # 'Ð' - 41: 0, # 'О' - 30: 0, # 'П' - 39: 0, # 'Р' - 28: 1, # 'С' - 34: 0, # 'Т' - 51: 0, # 'У' - 48: 0, # 'Ф' - 49: 0, # 'Ð¥' - 53: 0, # 'Ц' - 50: 0, # 'Ч' - 54: 0, # 'Ш' - 57: 0, # 'Щ' - 61: 0, # 'Ъ' - 60: 0, # 'Ю' - 56: 0, # 'Я' - 1: 3, # 'а' - 18: 3, # 'б' - 9: 3, # 'в' - 20: 3, # 'г' - 11: 2, # 'д' - 3: 3, # 'е' - 23: 3, # 'ж' - 15: 2, # 'з' - 2: 3, # 'и' - 26: 0, # 'й' - 12: 3, # 'к' - 10: 1, # 'л' - 14: 2, # 'м' - 6: 3, # 'н' - 4: 3, # 'о' - 13: 2, # 'п' - 7: 2, # 'Ñ€' - 8: 3, # 'Ñ' - 5: 3, # 'Ñ‚' - 19: 3, # 'у' - 29: 2, # 'Ñ„' - 25: 2, # 'Ñ…' - 22: 2, # 'ц' - 21: 2, # 'ч' - 27: 2, # 'ш' - 24: 1, # 'щ' - 17: 3, # 'ÑŠ' - 52: 2, # 'ÑŒ' - 42: 3, # 'ÑŽ' - 16: 3, # 'Ñ' - 58: 0, # 'Ñ”' - 62: 0, # 'â„–' - }, - 14: { # 'м' - 63: 1, # 'e' - 45: 0, # '\xad' - 31: 1, # 'Ð' - 32: 0, # 'Б' - 35: 0, # 'Ð’' - 43: 0, # 'Г' - 37: 0, # 'Д' - 44: 0, # 'Е' - 55: 0, # 'Ж' - 47: 0, # 'З' - 40: 0, # 'И' - 59: 0, # 'Й' - 33: 0, # 'К' - 46: 0, # 'Л' - 38: 0, # 'Ðœ' - 36: 0, # 'Ð' - 41: 0, # 'О' - 30: 0, # 'П' - 39: 0, # 'Р' - 28: 0, # 'С' - 34: 0, # 'Т' - 51: 0, # 'У' - 48: 0, # 'Ф' - 49: 0, # 'Ð¥' - 53: 0, # 'Ц' - 50: 0, # 'Ч' - 54: 0, # 'Ш' - 57: 0, # 'Щ' - 61: 0, # 'Ъ' - 60: 0, # 'Ю' - 56: 0, # 'Я' - 1: 3, # 'а' - 18: 3, # 'б' - 9: 3, # 'в' - 20: 1, # 'г' - 11: 1, # 'д' - 3: 3, # 'е' - 23: 1, # 'ж' - 15: 1, # 'з' - 2: 3, # 'и' - 26: 0, # 'й' - 12: 2, # 'к' - 10: 3, # 'л' - 14: 1, # 'м' - 6: 3, # 'н' - 4: 3, # 'о' - 13: 3, # 'п' - 7: 2, # 'Ñ€' - 8: 2, # 'Ñ' - 5: 1, # 'Ñ‚' - 19: 3, # 'у' - 29: 2, # 'Ñ„' - 25: 1, # 'Ñ…' - 22: 2, # 'ц' - 21: 2, # 'ч' - 27: 2, # 'ш' - 24: 1, # 'щ' - 17: 3, # 'ÑŠ' - 52: 1, # 'ÑŒ' - 42: 2, # 'ÑŽ' - 16: 3, # 'Ñ' - 58: 0, # 'Ñ”' - 62: 0, # 'â„–' - }, - 6: { # 'н' - 63: 1, # 'e' - 45: 0, # '\xad' - 31: 0, # 'Ð' - 32: 0, # 'Б' - 35: 0, # 'Ð’' - 43: 0, # 'Г' - 37: 0, # 'Д' - 44: 0, # 'Е' - 55: 0, # 'Ж' - 47: 0, # 'З' - 40: 0, # 'И' - 59: 0, # 'Й' - 33: 0, # 'К' - 46: 0, # 'Л' - 38: 0, # 'Ðœ' - 36: 0, # 'Ð' - 41: 0, # 'О' - 30: 0, # 'П' - 39: 1, # 'Р' - 28: 0, # 'С' - 34: 0, # 'Т' - 51: 0, # 'У' - 48: 0, # 'Ф' - 49: 0, # 'Ð¥' - 53: 0, # 'Ц' - 50: 0, # 'Ч' - 54: 0, # 'Ш' - 57: 0, # 'Щ' - 61: 0, # 'Ъ' - 60: 0, # 'Ю' - 56: 0, # 'Я' - 1: 3, # 'а' - 18: 2, # 'б' - 9: 2, # 'в' - 20: 3, # 'г' - 11: 3, # 'д' - 3: 3, # 'е' - 23: 2, # 'ж' - 15: 2, # 'з' - 2: 3, # 'и' - 26: 0, # 'й' - 12: 3, # 'к' - 10: 2, # 'л' - 14: 1, # 'м' - 6: 3, # 'н' - 4: 3, # 'о' - 13: 1, # 'п' - 7: 2, # 'Ñ€' - 8: 3, # 'Ñ' - 5: 3, # 'Ñ‚' - 19: 3, # 'у' - 29: 3, # 'Ñ„' - 25: 2, # 'Ñ…' - 22: 3, # 'ц' - 21: 3, # 'ч' - 27: 2, # 'ш' - 24: 1, # 'щ' - 17: 3, # 'ÑŠ' - 52: 2, # 'ÑŒ' - 42: 2, # 'ÑŽ' - 16: 3, # 'Ñ' - 58: 0, # 'Ñ”' - 62: 0, # 'â„–' - }, - 4: { # 'о' - 63: 0, # 'e' - 45: 1, # '\xad' - 31: 0, # 'Ð' - 32: 0, # 'Б' - 35: 0, # 'Ð’' - 43: 0, # 'Г' - 37: 0, # 'Д' - 44: 0, # 'Е' - 55: 0, # 'Ж' - 47: 0, # 'З' - 40: 0, # 'И' - 59: 0, # 'Й' - 33: 0, # 'К' - 46: 0, # 'Л' - 38: 0, # 'Ðœ' - 36: 0, # 'Ð' - 41: 0, # 'О' - 30: 0, # 'П' - 39: 0, # 'Р' - 28: 0, # 'С' - 34: 0, # 'Т' - 51: 0, # 'У' - 48: 0, # 'Ф' - 49: 0, # 'Ð¥' - 53: 0, # 'Ц' - 50: 0, # 'Ч' - 54: 0, # 'Ш' - 57: 0, # 'Щ' - 61: 0, # 'Ъ' - 60: 0, # 'Ю' - 56: 0, # 'Я' - 1: 2, # 'а' - 18: 3, # 'б' - 9: 3, # 'в' - 20: 3, # 'г' - 11: 3, # 'д' - 3: 3, # 'е' - 23: 3, # 'ж' - 15: 3, # 'з' - 2: 3, # 'и' - 26: 3, # 'й' - 12: 3, # 'к' - 10: 3, # 'л' - 14: 3, # 'м' - 6: 3, # 'н' - 4: 2, # 'о' - 13: 3, # 'п' - 7: 3, # 'Ñ€' - 8: 3, # 'Ñ' - 5: 3, # 'Ñ‚' - 19: 2, # 'у' - 29: 3, # 'Ñ„' - 25: 3, # 'Ñ…' - 22: 3, # 'ц' - 21: 3, # 'ч' - 27: 3, # 'ш' - 24: 3, # 'щ' - 17: 1, # 'ÑŠ' - 52: 0, # 'ÑŒ' - 42: 1, # 'ÑŽ' - 16: 3, # 'Ñ' - 58: 0, # 'Ñ”' - 62: 0, # 'â„–' - }, - 13: { # 'п' - 63: 1, # 'e' - 45: 0, # '\xad' - 31: 0, # 'Ð' - 32: 0, # 'Б' - 35: 0, # 'Ð’' - 43: 0, # 'Г' - 37: 0, # 'Д' - 44: 0, # 'Е' - 55: 0, # 'Ж' - 47: 0, # 'З' - 40: 0, # 'И' - 59: 0, # 'Й' - 33: 0, # 'К' - 46: 0, # 'Л' - 38: 0, # 'Ðœ' - 36: 0, # 'Ð' - 41: 0, # 'О' - 30: 0, # 'П' - 39: 0, # 'Р' - 28: 0, # 'С' - 34: 0, # 'Т' - 51: 0, # 'У' - 48: 0, # 'Ф' - 49: 0, # 'Ð¥' - 53: 0, # 'Ц' - 50: 0, # 'Ч' - 54: 0, # 'Ш' - 57: 0, # 'Щ' - 61: 0, # 'Ъ' - 60: 0, # 'Ю' - 56: 0, # 'Я' - 1: 3, # 'а' - 18: 1, # 'б' - 9: 2, # 'в' - 20: 1, # 'г' - 11: 1, # 'д' - 3: 3, # 'е' - 23: 0, # 'ж' - 15: 1, # 'з' - 2: 3, # 'и' - 26: 1, # 'й' - 12: 2, # 'к' - 10: 3, # 'л' - 14: 1, # 'м' - 6: 2, # 'н' - 4: 3, # 'о' - 13: 1, # 'п' - 7: 3, # 'Ñ€' - 8: 2, # 'Ñ' - 5: 2, # 'Ñ‚' - 19: 3, # 'у' - 29: 1, # 'Ñ„' - 25: 1, # 'Ñ…' - 22: 2, # 'ц' - 21: 2, # 'ч' - 27: 1, # 'ш' - 24: 1, # 'щ' - 17: 3, # 'ÑŠ' - 52: 1, # 'ÑŒ' - 42: 2, # 'ÑŽ' - 16: 2, # 'Ñ' - 58: 0, # 'Ñ”' - 62: 0, # 'â„–' - }, - 7: { # 'Ñ€' - 63: 1, # 'e' - 45: 0, # '\xad' - 31: 0, # 'Ð' - 32: 0, # 'Б' - 35: 0, # 'Ð’' - 43: 0, # 'Г' - 37: 0, # 'Д' - 44: 0, # 'Е' - 55: 0, # 'Ж' - 47: 0, # 'З' - 40: 0, # 'И' - 59: 0, # 'Й' - 33: 0, # 'К' - 46: 0, # 'Л' - 38: 0, # 'Ðœ' - 36: 0, # 'Ð' - 41: 0, # 'О' - 30: 0, # 'П' - 39: 0, # 'Р' - 28: 0, # 'С' - 34: 0, # 'Т' - 51: 0, # 'У' - 48: 0, # 'Ф' - 49: 0, # 'Ð¥' - 53: 0, # 'Ц' - 50: 0, # 'Ч' - 54: 0, # 'Ш' - 57: 0, # 'Щ' - 61: 0, # 'Ъ' - 60: 0, # 'Ю' - 56: 0, # 'Я' - 1: 3, # 'а' - 18: 3, # 'б' - 9: 3, # 'в' - 20: 3, # 'г' - 11: 3, # 'д' - 3: 3, # 'е' - 23: 3, # 'ж' - 15: 2, # 'з' - 2: 3, # 'и' - 26: 0, # 'й' - 12: 3, # 'к' - 10: 3, # 'л' - 14: 3, # 'м' - 6: 3, # 'н' - 4: 3, # 'о' - 13: 2, # 'п' - 7: 1, # 'Ñ€' - 8: 3, # 'Ñ' - 5: 3, # 'Ñ‚' - 19: 3, # 'у' - 29: 2, # 'Ñ„' - 25: 3, # 'Ñ…' - 22: 3, # 'ц' - 21: 2, # 'ч' - 27: 3, # 'ш' - 24: 1, # 'щ' - 17: 3, # 'ÑŠ' - 52: 1, # 'ÑŒ' - 42: 2, # 'ÑŽ' - 16: 3, # 'Ñ' - 58: 0, # 'Ñ”' - 62: 0, # 'â„–' - }, - 8: { # 'Ñ' - 63: 1, # 'e' - 45: 0, # '\xad' - 31: 0, # 'Ð' - 32: 0, # 'Б' - 35: 0, # 'Ð’' - 43: 0, # 'Г' - 37: 0, # 'Д' - 44: 0, # 'Е' - 55: 0, # 'Ж' - 47: 0, # 'З' - 40: 0, # 'И' - 59: 0, # 'Й' - 33: 0, # 'К' - 46: 0, # 'Л' - 38: 0, # 'Ðœ' - 36: 0, # 'Ð' - 41: 0, # 'О' - 30: 0, # 'П' - 39: 0, # 'Р' - 28: 0, # 'С' - 34: 0, # 'Т' - 51: 0, # 'У' - 48: 0, # 'Ф' - 49: 0, # 'Ð¥' - 53: 0, # 'Ц' - 50: 0, # 'Ч' - 54: 0, # 'Ш' - 57: 0, # 'Щ' - 61: 0, # 'Ъ' - 60: 0, # 'Ю' - 56: 0, # 'Я' - 1: 3, # 'а' - 18: 2, # 'б' - 9: 3, # 'в' - 20: 2, # 'г' - 11: 2, # 'д' - 3: 3, # 'е' - 23: 0, # 'ж' - 15: 1, # 'з' - 2: 3, # 'и' - 26: 0, # 'й' - 12: 3, # 'к' - 10: 3, # 'л' - 14: 3, # 'м' - 6: 3, # 'н' - 4: 3, # 'о' - 13: 3, # 'п' - 7: 3, # 'Ñ€' - 8: 1, # 'Ñ' - 5: 3, # 'Ñ‚' - 19: 3, # 'у' - 29: 2, # 'Ñ„' - 25: 2, # 'Ñ…' - 22: 2, # 'ц' - 21: 2, # 'ч' - 27: 2, # 'ш' - 24: 0, # 'щ' - 17: 3, # 'ÑŠ' - 52: 2, # 'ÑŒ' - 42: 2, # 'ÑŽ' - 16: 3, # 'Ñ' - 58: 0, # 'Ñ”' - 62: 0, # 'â„–' - }, - 5: { # 'Ñ‚' - 63: 1, # 'e' - 45: 0, # '\xad' - 31: 0, # 'Ð' - 32: 0, # 'Б' - 35: 0, # 'Ð’' - 43: 0, # 'Г' - 37: 0, # 'Д' - 44: 0, # 'Е' - 55: 0, # 'Ж' - 47: 0, # 'З' - 40: 0, # 'И' - 59: 0, # 'Й' - 33: 0, # 'К' - 46: 0, # 'Л' - 38: 0, # 'Ðœ' - 36: 0, # 'Ð' - 41: 0, # 'О' - 30: 0, # 'П' - 39: 0, # 'Р' - 28: 0, # 'С' - 34: 0, # 'Т' - 51: 0, # 'У' - 48: 0, # 'Ф' - 49: 0, # 'Ð¥' - 53: 0, # 'Ц' - 50: 0, # 'Ч' - 54: 0, # 'Ш' - 57: 0, # 'Щ' - 61: 0, # 'Ъ' - 60: 0, # 'Ю' - 56: 0, # 'Я' - 1: 3, # 'а' - 18: 3, # 'б' - 9: 3, # 'в' - 20: 2, # 'г' - 11: 2, # 'д' - 3: 3, # 'е' - 23: 1, # 'ж' - 15: 1, # 'з' - 2: 3, # 'и' - 26: 0, # 'й' - 12: 3, # 'к' - 10: 3, # 'л' - 14: 2, # 'м' - 6: 3, # 'н' - 4: 3, # 'о' - 13: 2, # 'п' - 7: 3, # 'Ñ€' - 8: 3, # 'Ñ' - 5: 3, # 'Ñ‚' - 19: 3, # 'у' - 29: 1, # 'Ñ„' - 25: 2, # 'Ñ…' - 22: 2, # 'ц' - 21: 2, # 'ч' - 27: 1, # 'ш' - 24: 1, # 'щ' - 17: 3, # 'ÑŠ' - 52: 2, # 'ÑŒ' - 42: 2, # 'ÑŽ' - 16: 3, # 'Ñ' - 58: 0, # 'Ñ”' - 62: 0, # 'â„–' - }, - 19: { # 'у' - 63: 0, # 'e' - 45: 0, # '\xad' - 31: 0, # 'Ð' - 32: 0, # 'Б' - 35: 0, # 'Ð’' - 43: 0, # 'Г' - 37: 0, # 'Д' - 44: 0, # 'Е' - 55: 0, # 'Ж' - 47: 0, # 'З' - 40: 0, # 'И' - 59: 0, # 'Й' - 33: 0, # 'К' - 46: 0, # 'Л' - 38: 0, # 'Ðœ' - 36: 0, # 'Ð' - 41: 0, # 'О' - 30: 0, # 'П' - 39: 0, # 'Р' - 28: 0, # 'С' - 34: 0, # 'Т' - 51: 0, # 'У' - 48: 0, # 'Ф' - 49: 0, # 'Ð¥' - 53: 0, # 'Ц' - 50: 0, # 'Ч' - 54: 0, # 'Ш' - 57: 0, # 'Щ' - 61: 0, # 'Ъ' - 60: 0, # 'Ю' - 56: 0, # 'Я' - 1: 3, # 'а' - 18: 3, # 'б' - 9: 3, # 'в' - 20: 3, # 'г' - 11: 3, # 'д' - 3: 2, # 'е' - 23: 3, # 'ж' - 15: 3, # 'з' - 2: 2, # 'и' - 26: 2, # 'й' - 12: 3, # 'к' - 10: 3, # 'л' - 14: 3, # 'м' - 6: 3, # 'н' - 4: 2, # 'о' - 13: 3, # 'п' - 7: 3, # 'Ñ€' - 8: 3, # 'Ñ' - 5: 3, # 'Ñ‚' - 19: 1, # 'у' - 29: 2, # 'Ñ„' - 25: 2, # 'Ñ…' - 22: 2, # 'ц' - 21: 3, # 'ч' - 27: 3, # 'ш' - 24: 2, # 'щ' - 17: 1, # 'ÑŠ' - 52: 0, # 'ÑŒ' - 42: 1, # 'ÑŽ' - 16: 1, # 'Ñ' - 58: 0, # 'Ñ”' - 62: 0, # 'â„–' - }, - 29: { # 'Ñ„' - 63: 1, # 'e' - 45: 0, # '\xad' - 31: 0, # 'Ð' - 32: 0, # 'Б' - 35: 0, # 'Ð’' - 43: 0, # 'Г' - 37: 0, # 'Д' - 44: 0, # 'Е' - 55: 0, # 'Ж' - 47: 0, # 'З' - 40: 0, # 'И' - 59: 0, # 'Й' - 33: 0, # 'К' - 46: 0, # 'Л' - 38: 0, # 'Ðœ' - 36: 0, # 'Ð' - 41: 0, # 'О' - 30: 0, # 'П' - 39: 0, # 'Р' - 28: 0, # 'С' - 34: 0, # 'Т' - 51: 0, # 'У' - 48: 0, # 'Ф' - 49: 0, # 'Ð¥' - 53: 0, # 'Ц' - 50: 0, # 'Ч' - 54: 0, # 'Ш' - 57: 0, # 'Щ' - 61: 0, # 'Ъ' - 60: 0, # 'Ю' - 56: 0, # 'Я' - 1: 3, # 'а' - 18: 1, # 'б' - 9: 1, # 'в' - 20: 1, # 'г' - 11: 0, # 'д' - 3: 3, # 'е' - 23: 0, # 'ж' - 15: 0, # 'з' - 2: 3, # 'и' - 26: 0, # 'й' - 12: 2, # 'к' - 10: 2, # 'л' - 14: 1, # 'м' - 6: 1, # 'н' - 4: 3, # 'о' - 13: 0, # 'п' - 7: 2, # 'Ñ€' - 8: 2, # 'Ñ' - 5: 2, # 'Ñ‚' - 19: 2, # 'у' - 29: 0, # 'Ñ„' - 25: 1, # 'Ñ…' - 22: 0, # 'ц' - 21: 1, # 'ч' - 27: 1, # 'ш' - 24: 0, # 'щ' - 17: 2, # 'ÑŠ' - 52: 2, # 'ÑŒ' - 42: 1, # 'ÑŽ' - 16: 1, # 'Ñ' - 58: 0, # 'Ñ”' - 62: 0, # 'â„–' - }, - 25: { # 'Ñ…' - 63: 0, # 'e' - 45: 0, # '\xad' - 31: 0, # 'Ð' - 32: 0, # 'Б' - 35: 0, # 'Ð’' - 43: 0, # 'Г' - 37: 0, # 'Д' - 44: 0, # 'Е' - 55: 0, # 'Ж' - 47: 0, # 'З' - 40: 0, # 'И' - 59: 0, # 'Й' - 33: 0, # 'К' - 46: 0, # 'Л' - 38: 0, # 'Ðœ' - 36: 0, # 'Ð' - 41: 0, # 'О' - 30: 0, # 'П' - 39: 0, # 'Р' - 28: 0, # 'С' - 34: 0, # 'Т' - 51: 0, # 'У' - 48: 0, # 'Ф' - 49: 0, # 'Ð¥' - 53: 0, # 'Ц' - 50: 0, # 'Ч' - 54: 0, # 'Ш' - 57: 0, # 'Щ' - 61: 0, # 'Ъ' - 60: 0, # 'Ю' - 56: 0, # 'Я' - 1: 3, # 'а' - 18: 1, # 'б' - 9: 3, # 'в' - 20: 0, # 'г' - 11: 1, # 'д' - 3: 2, # 'е' - 23: 0, # 'ж' - 15: 1, # 'з' - 2: 3, # 'и' - 26: 0, # 'й' - 12: 1, # 'к' - 10: 2, # 'л' - 14: 2, # 'м' - 6: 3, # 'н' - 4: 3, # 'о' - 13: 1, # 'п' - 7: 3, # 'Ñ€' - 8: 1, # 'Ñ' - 5: 2, # 'Ñ‚' - 19: 3, # 'у' - 29: 0, # 'Ñ„' - 25: 1, # 'Ñ…' - 22: 0, # 'ц' - 21: 1, # 'ч' - 27: 0, # 'ш' - 24: 0, # 'щ' - 17: 2, # 'ÑŠ' - 52: 0, # 'ÑŒ' - 42: 1, # 'ÑŽ' - 16: 1, # 'Ñ' - 58: 0, # 'Ñ”' - 62: 0, # 'â„–' - }, - 22: { # 'ц' - 63: 1, # 'e' - 45: 0, # '\xad' - 31: 0, # 'Ð' - 32: 0, # 'Б' - 35: 0, # 'Ð’' - 43: 0, # 'Г' - 37: 0, # 'Д' - 44: 0, # 'Е' - 55: 0, # 'Ж' - 47: 0, # 'З' - 40: 0, # 'И' - 59: 0, # 'Й' - 33: 0, # 'К' - 46: 0, # 'Л' - 38: 0, # 'Ðœ' - 36: 0, # 'Ð' - 41: 0, # 'О' - 30: 0, # 'П' - 39: 0, # 'Р' - 28: 0, # 'С' - 34: 0, # 'Т' - 51: 0, # 'У' - 48: 0, # 'Ф' - 49: 0, # 'Ð¥' - 53: 0, # 'Ц' - 50: 0, # 'Ч' - 54: 0, # 'Ш' - 57: 0, # 'Щ' - 61: 0, # 'Ъ' - 60: 0, # 'Ю' - 56: 0, # 'Я' - 1: 3, # 'а' - 18: 1, # 'б' - 9: 2, # 'в' - 20: 1, # 'г' - 11: 1, # 'д' - 3: 3, # 'е' - 23: 0, # 'ж' - 15: 1, # 'з' - 2: 3, # 'и' - 26: 0, # 'й' - 12: 2, # 'к' - 10: 1, # 'л' - 14: 1, # 'м' - 6: 1, # 'н' - 4: 2, # 'о' - 13: 1, # 'п' - 7: 1, # 'Ñ€' - 8: 1, # 'Ñ' - 5: 1, # 'Ñ‚' - 19: 2, # 'у' - 29: 1, # 'Ñ„' - 25: 1, # 'Ñ…' - 22: 1, # 'ц' - 21: 1, # 'ч' - 27: 1, # 'ш' - 24: 1, # 'щ' - 17: 2, # 'ÑŠ' - 52: 1, # 'ÑŒ' - 42: 0, # 'ÑŽ' - 16: 2, # 'Ñ' - 58: 0, # 'Ñ”' - 62: 0, # 'â„–' - }, - 21: { # 'ч' - 63: 1, # 'e' - 45: 0, # '\xad' - 31: 0, # 'Ð' - 32: 0, # 'Б' - 35: 0, # 'Ð’' - 43: 0, # 'Г' - 37: 0, # 'Д' - 44: 0, # 'Е' - 55: 0, # 'Ж' - 47: 0, # 'З' - 40: 0, # 'И' - 59: 0, # 'Й' - 33: 0, # 'К' - 46: 0, # 'Л' - 38: 0, # 'Ðœ' - 36: 0, # 'Ð' - 41: 0, # 'О' - 30: 0, # 'П' - 39: 0, # 'Р' - 28: 0, # 'С' - 34: 0, # 'Т' - 51: 0, # 'У' - 48: 0, # 'Ф' - 49: 0, # 'Ð¥' - 53: 0, # 'Ц' - 50: 0, # 'Ч' - 54: 0, # 'Ш' - 57: 0, # 'Щ' - 61: 0, # 'Ъ' - 60: 0, # 'Ю' - 56: 0, # 'Я' - 1: 3, # 'а' - 18: 1, # 'б' - 9: 3, # 'в' - 20: 1, # 'г' - 11: 0, # 'д' - 3: 3, # 'е' - 23: 1, # 'ж' - 15: 0, # 'з' - 2: 3, # 'и' - 26: 0, # 'й' - 12: 3, # 'к' - 10: 2, # 'л' - 14: 2, # 'м' - 6: 3, # 'н' - 4: 3, # 'о' - 13: 0, # 'п' - 7: 2, # 'Ñ€' - 8: 0, # 'Ñ' - 5: 2, # 'Ñ‚' - 19: 3, # 'у' - 29: 0, # 'Ñ„' - 25: 0, # 'Ñ…' - 22: 0, # 'ц' - 21: 0, # 'ч' - 27: 1, # 'ш' - 24: 0, # 'щ' - 17: 2, # 'ÑŠ' - 52: 0, # 'ÑŒ' - 42: 1, # 'ÑŽ' - 16: 0, # 'Ñ' - 58: 0, # 'Ñ”' - 62: 0, # 'â„–' - }, - 27: { # 'ш' - 63: 1, # 'e' - 45: 0, # '\xad' - 31: 0, # 'Ð' - 32: 0, # 'Б' - 35: 0, # 'Ð’' - 43: 0, # 'Г' - 37: 0, # 'Д' - 44: 0, # 'Е' - 55: 0, # 'Ж' - 47: 0, # 'З' - 40: 0, # 'И' - 59: 0, # 'Й' - 33: 0, # 'К' - 46: 0, # 'Л' - 38: 0, # 'Ðœ' - 36: 0, # 'Ð' - 41: 0, # 'О' - 30: 0, # 'П' - 39: 0, # 'Р' - 28: 0, # 'С' - 34: 0, # 'Т' - 51: 0, # 'У' - 48: 0, # 'Ф' - 49: 0, # 'Ð¥' - 53: 0, # 'Ц' - 50: 0, # 'Ч' - 54: 0, # 'Ш' - 57: 0, # 'Щ' - 61: 0, # 'Ъ' - 60: 0, # 'Ю' - 56: 0, # 'Я' - 1: 3, # 'а' - 18: 0, # 'б' - 9: 2, # 'в' - 20: 0, # 'г' - 11: 1, # 'д' - 3: 3, # 'е' - 23: 0, # 'ж' - 15: 0, # 'з' - 2: 3, # 'и' - 26: 0, # 'й' - 12: 3, # 'к' - 10: 2, # 'л' - 14: 1, # 'м' - 6: 3, # 'н' - 4: 2, # 'о' - 13: 2, # 'п' - 7: 1, # 'Ñ€' - 8: 0, # 'Ñ' - 5: 1, # 'Ñ‚' - 19: 2, # 'у' - 29: 1, # 'Ñ„' - 25: 0, # 'Ñ…' - 22: 0, # 'ц' - 21: 1, # 'ч' - 27: 0, # 'ш' - 24: 0, # 'щ' - 17: 2, # 'ÑŠ' - 52: 1, # 'ÑŒ' - 42: 1, # 'ÑŽ' - 16: 0, # 'Ñ' - 58: 0, # 'Ñ”' - 62: 0, # 'â„–' - }, - 24: { # 'щ' - 63: 1, # 'e' - 45: 0, # '\xad' - 31: 0, # 'Ð' - 32: 0, # 'Б' - 35: 0, # 'Ð’' - 43: 0, # 'Г' - 37: 0, # 'Д' - 44: 0, # 'Е' - 55: 0, # 'Ж' - 47: 0, # 'З' - 40: 0, # 'И' - 59: 0, # 'Й' - 33: 0, # 'К' - 46: 0, # 'Л' - 38: 0, # 'Ðœ' - 36: 0, # 'Ð' - 41: 0, # 'О' - 30: 0, # 'П' - 39: 0, # 'Р' - 28: 0, # 'С' - 34: 0, # 'Т' - 51: 0, # 'У' - 48: 0, # 'Ф' - 49: 0, # 'Ð¥' - 53: 0, # 'Ц' - 50: 0, # 'Ч' - 54: 0, # 'Ш' - 57: 0, # 'Щ' - 61: 0, # 'Ъ' - 60: 0, # 'Ю' - 56: 0, # 'Я' - 1: 3, # 'а' - 18: 0, # 'б' - 9: 1, # 'в' - 20: 0, # 'г' - 11: 0, # 'д' - 3: 3, # 'е' - 23: 0, # 'ж' - 15: 0, # 'з' - 2: 3, # 'и' - 26: 0, # 'й' - 12: 1, # 'к' - 10: 0, # 'л' - 14: 0, # 'м' - 6: 2, # 'н' - 4: 3, # 'о' - 13: 0, # 'п' - 7: 1, # 'Ñ€' - 8: 0, # 'Ñ' - 5: 2, # 'Ñ‚' - 19: 3, # 'у' - 29: 0, # 'Ñ„' - 25: 0, # 'Ñ…' - 22: 1, # 'ц' - 21: 0, # 'ч' - 27: 0, # 'ш' - 24: 0, # 'щ' - 17: 1, # 'ÑŠ' - 52: 0, # 'ÑŒ' - 42: 0, # 'ÑŽ' - 16: 2, # 'Ñ' - 58: 0, # 'Ñ”' - 62: 0, # 'â„–' - }, - 17: { # 'ÑŠ' - 63: 0, # 'e' - 45: 0, # '\xad' - 31: 0, # 'Ð' - 32: 0, # 'Б' - 35: 0, # 'Ð’' - 43: 0, # 'Г' - 37: 0, # 'Д' - 44: 0, # 'Е' - 55: 0, # 'Ж' - 47: 0, # 'З' - 40: 0, # 'И' - 59: 0, # 'Й' - 33: 0, # 'К' - 46: 0, # 'Л' - 38: 0, # 'Ðœ' - 36: 0, # 'Ð' - 41: 0, # 'О' - 30: 0, # 'П' - 39: 0, # 'Р' - 28: 0, # 'С' - 34: 0, # 'Т' - 51: 0, # 'У' - 48: 0, # 'Ф' - 49: 0, # 'Ð¥' - 53: 0, # 'Ц' - 50: 0, # 'Ч' - 54: 0, # 'Ш' - 57: 0, # 'Щ' - 61: 0, # 'Ъ' - 60: 0, # 'Ю' - 56: 0, # 'Я' - 1: 1, # 'а' - 18: 3, # 'б' - 9: 3, # 'в' - 20: 3, # 'г' - 11: 3, # 'д' - 3: 2, # 'е' - 23: 3, # 'ж' - 15: 3, # 'з' - 2: 1, # 'и' - 26: 2, # 'й' - 12: 3, # 'к' - 10: 3, # 'л' - 14: 3, # 'м' - 6: 3, # 'н' - 4: 3, # 'о' - 13: 3, # 'п' - 7: 3, # 'Ñ€' - 8: 3, # 'Ñ' - 5: 3, # 'Ñ‚' - 19: 1, # 'у' - 29: 1, # 'Ñ„' - 25: 2, # 'Ñ…' - 22: 2, # 'ц' - 21: 3, # 'ч' - 27: 2, # 'ш' - 24: 3, # 'щ' - 17: 0, # 'ÑŠ' - 52: 0, # 'ÑŒ' - 42: 2, # 'ÑŽ' - 16: 0, # 'Ñ' - 58: 0, # 'Ñ”' - 62: 0, # 'â„–' - }, - 52: { # 'ÑŒ' - 63: 0, # 'e' - 45: 0, # '\xad' - 31: 0, # 'Ð' - 32: 0, # 'Б' - 35: 0, # 'Ð’' - 43: 0, # 'Г' - 37: 0, # 'Д' - 44: 0, # 'Е' - 55: 0, # 'Ж' - 47: 0, # 'З' - 40: 0, # 'И' - 59: 0, # 'Й' - 33: 0, # 'К' - 46: 0, # 'Л' - 38: 0, # 'Ðœ' - 36: 0, # 'Ð' - 41: 0, # 'О' - 30: 0, # 'П' - 39: 0, # 'Р' - 28: 0, # 'С' - 34: 0, # 'Т' - 51: 0, # 'У' - 48: 0, # 'Ф' - 49: 0, # 'Ð¥' - 53: 0, # 'Ц' - 50: 0, # 'Ч' - 54: 0, # 'Ш' - 57: 0, # 'Щ' - 61: 0, # 'Ъ' - 60: 0, # 'Ю' - 56: 0, # 'Я' - 1: 0, # 'а' - 18: 0, # 'б' - 9: 0, # 'в' - 20: 0, # 'г' - 11: 0, # 'д' - 3: 1, # 'е' - 23: 0, # 'ж' - 15: 0, # 'з' - 2: 0, # 'и' - 26: 0, # 'й' - 12: 1, # 'к' - 10: 0, # 'л' - 14: 0, # 'м' - 6: 1, # 'н' - 4: 3, # 'о' - 13: 0, # 'п' - 7: 0, # 'Ñ€' - 8: 0, # 'Ñ' - 5: 1, # 'Ñ‚' - 19: 0, # 'у' - 29: 0, # 'Ñ„' - 25: 0, # 'Ñ…' - 22: 1, # 'ц' - 21: 0, # 'ч' - 27: 0, # 'ш' - 24: 0, # 'щ' - 17: 0, # 'ÑŠ' - 52: 0, # 'ÑŒ' - 42: 1, # 'ÑŽ' - 16: 0, # 'Ñ' - 58: 0, # 'Ñ”' - 62: 0, # 'â„–' - }, - 42: { # 'ÑŽ' - 63: 0, # 'e' - 45: 0, # '\xad' - 31: 0, # 'Ð' - 32: 0, # 'Б' - 35: 0, # 'Ð’' - 43: 0, # 'Г' - 37: 0, # 'Д' - 44: 0, # 'Е' - 55: 0, # 'Ж' - 47: 0, # 'З' - 40: 0, # 'И' - 59: 0, # 'Й' - 33: 0, # 'К' - 46: 0, # 'Л' - 38: 0, # 'Ðœ' - 36: 0, # 'Ð' - 41: 0, # 'О' - 30: 0, # 'П' - 39: 0, # 'Р' - 28: 0, # 'С' - 34: 0, # 'Т' - 51: 0, # 'У' - 48: 0, # 'Ф' - 49: 0, # 'Ð¥' - 53: 0, # 'Ц' - 50: 0, # 'Ч' - 54: 0, # 'Ш' - 57: 0, # 'Щ' - 61: 0, # 'Ъ' - 60: 0, # 'Ю' - 56: 0, # 'Я' - 1: 1, # 'а' - 18: 2, # 'б' - 9: 1, # 'в' - 20: 2, # 'г' - 11: 2, # 'д' - 3: 1, # 'е' - 23: 2, # 'ж' - 15: 2, # 'з' - 2: 1, # 'и' - 26: 1, # 'й' - 12: 2, # 'к' - 10: 2, # 'л' - 14: 2, # 'м' - 6: 2, # 'н' - 4: 1, # 'о' - 13: 1, # 'п' - 7: 2, # 'Ñ€' - 8: 2, # 'Ñ' - 5: 2, # 'Ñ‚' - 19: 1, # 'у' - 29: 1, # 'Ñ„' - 25: 1, # 'Ñ…' - 22: 2, # 'ц' - 21: 3, # 'ч' - 27: 1, # 'ш' - 24: 1, # 'щ' - 17: 1, # 'ÑŠ' - 52: 0, # 'ÑŒ' - 42: 0, # 'ÑŽ' - 16: 1, # 'Ñ' - 58: 0, # 'Ñ”' - 62: 0, # 'â„–' - }, - 16: { # 'Ñ' - 63: 0, # 'e' - 45: 1, # '\xad' - 31: 0, # 'Ð' - 32: 0, # 'Б' - 35: 0, # 'Ð’' - 43: 0, # 'Г' - 37: 0, # 'Д' - 44: 0, # 'Е' - 55: 0, # 'Ж' - 47: 0, # 'З' - 40: 0, # 'И' - 59: 0, # 'Й' - 33: 0, # 'К' - 46: 0, # 'Л' - 38: 0, # 'Ðœ' - 36: 0, # 'Ð' - 41: 0, # 'О' - 30: 0, # 'П' - 39: 0, # 'Р' - 28: 0, # 'С' - 34: 0, # 'Т' - 51: 0, # 'У' - 48: 0, # 'Ф' - 49: 0, # 'Ð¥' - 53: 0, # 'Ц' - 50: 0, # 'Ч' - 54: 0, # 'Ш' - 57: 0, # 'Щ' - 61: 0, # 'Ъ' - 60: 0, # 'Ю' - 56: 0, # 'Я' - 1: 0, # 'а' - 18: 3, # 'б' - 9: 3, # 'в' - 20: 2, # 'г' - 11: 3, # 'д' - 3: 2, # 'е' - 23: 1, # 'ж' - 15: 2, # 'з' - 2: 1, # 'и' - 26: 2, # 'й' - 12: 3, # 'к' - 10: 3, # 'л' - 14: 3, # 'м' - 6: 3, # 'н' - 4: 1, # 'о' - 13: 2, # 'п' - 7: 2, # 'Ñ€' - 8: 3, # 'Ñ' - 5: 3, # 'Ñ‚' - 19: 1, # 'у' - 29: 1, # 'Ñ„' - 25: 3, # 'Ñ…' - 22: 2, # 'ц' - 21: 1, # 'ч' - 27: 1, # 'ш' - 24: 2, # 'щ' - 17: 0, # 'ÑŠ' - 52: 0, # 'ÑŒ' - 42: 0, # 'ÑŽ' - 16: 1, # 'Ñ' - 58: 0, # 'Ñ”' - 62: 0, # 'â„–' - }, - 58: { # 'Ñ”' - 63: 0, # 'e' - 45: 0, # '\xad' - 31: 0, # 'Ð' - 32: 0, # 'Б' - 35: 0, # 'Ð’' - 43: 0, # 'Г' - 37: 0, # 'Д' - 44: 0, # 'Е' - 55: 0, # 'Ж' - 47: 0, # 'З' - 40: 0, # 'И' - 59: 0, # 'Й' - 33: 0, # 'К' - 46: 0, # 'Л' - 38: 0, # 'Ðœ' - 36: 0, # 'Ð' - 41: 0, # 'О' - 30: 0, # 'П' - 39: 0, # 'Р' - 28: 0, # 'С' - 34: 0, # 'Т' - 51: 0, # 'У' - 48: 0, # 'Ф' - 49: 0, # 'Ð¥' - 53: 0, # 'Ц' - 50: 0, # 'Ч' - 54: 0, # 'Ш' - 57: 0, # 'Щ' - 61: 0, # 'Ъ' - 60: 0, # 'Ю' - 56: 0, # 'Я' - 1: 0, # 'а' - 18: 0, # 'б' - 9: 0, # 'в' - 20: 0, # 'г' - 11: 0, # 'д' - 3: 0, # 'е' - 23: 0, # 'ж' - 15: 0, # 'з' - 2: 0, # 'и' - 26: 0, # 'й' - 12: 0, # 'к' - 10: 0, # 'л' - 14: 0, # 'м' - 6: 0, # 'н' - 4: 0, # 'о' - 13: 0, # 'п' - 7: 0, # 'Ñ€' - 8: 0, # 'Ñ' - 5: 0, # 'Ñ‚' - 19: 0, # 'у' - 29: 0, # 'Ñ„' - 25: 0, # 'Ñ…' - 22: 0, # 'ц' - 21: 0, # 'ч' - 27: 0, # 'ш' - 24: 0, # 'щ' - 17: 0, # 'ÑŠ' - 52: 0, # 'ÑŒ' - 42: 0, # 'ÑŽ' - 16: 0, # 'Ñ' - 58: 0, # 'Ñ”' - 62: 0, # 'â„–' - }, - 62: { # 'â„–' - 63: 0, # 'e' - 45: 0, # '\xad' - 31: 0, # 'Ð' - 32: 0, # 'Б' - 35: 0, # 'Ð’' - 43: 0, # 'Г' - 37: 0, # 'Д' - 44: 0, # 'Е' - 55: 0, # 'Ж' - 47: 0, # 'З' - 40: 0, # 'И' - 59: 0, # 'Й' - 33: 0, # 'К' - 46: 0, # 'Л' - 38: 0, # 'Ðœ' - 36: 0, # 'Ð' - 41: 0, # 'О' - 30: 0, # 'П' - 39: 0, # 'Р' - 28: 0, # 'С' - 34: 0, # 'Т' - 51: 0, # 'У' - 48: 0, # 'Ф' - 49: 0, # 'Ð¥' - 53: 0, # 'Ц' - 50: 0, # 'Ч' - 54: 0, # 'Ш' - 57: 0, # 'Щ' - 61: 0, # 'Ъ' - 60: 0, # 'Ю' - 56: 0, # 'Я' - 1: 0, # 'а' - 18: 0, # 'б' - 9: 0, # 'в' - 20: 0, # 'г' - 11: 0, # 'д' - 3: 0, # 'е' - 23: 0, # 'ж' - 15: 0, # 'з' - 2: 0, # 'и' - 26: 0, # 'й' - 12: 0, # 'к' - 10: 0, # 'л' - 14: 0, # 'м' - 6: 0, # 'н' - 4: 0, # 'о' - 13: 0, # 'п' - 7: 0, # 'Ñ€' - 8: 0, # 'Ñ' - 5: 0, # 'Ñ‚' - 19: 0, # 'у' - 29: 0, # 'Ñ„' - 25: 0, # 'Ñ…' - 22: 0, # 'ц' - 21: 0, # 'ч' - 27: 0, # 'ш' - 24: 0, # 'щ' - 17: 0, # 'ÑŠ' - 52: 0, # 'ÑŒ' - 42: 0, # 'ÑŽ' - 16: 0, # 'Ñ' - 58: 0, # 'Ñ”' - 62: 0, # 'â„–' - }, -} - -# 255: Undefined characters that did not exist in training text -# 254: Carriage/Return -# 253: symbol (punctuation) that does not belong to word -# 252: 0 - 9 -# 251: Control characters - -# Character Mapping Table(s): -ISO_8859_5_BULGARIAN_CHAR_TO_ORDER = { - 0: 255, # '\x00' - 1: 255, # '\x01' - 2: 255, # '\x02' - 3: 255, # '\x03' - 4: 255, # '\x04' - 5: 255, # '\x05' - 6: 255, # '\x06' - 7: 255, # '\x07' - 8: 255, # '\x08' - 9: 255, # '\t' - 10: 254, # '\n' - 11: 255, # '\x0b' - 12: 255, # '\x0c' - 13: 254, # '\r' - 14: 255, # '\x0e' - 15: 255, # '\x0f' - 16: 255, # '\x10' - 17: 255, # '\x11' - 18: 255, # '\x12' - 19: 255, # '\x13' - 20: 255, # '\x14' - 21: 255, # '\x15' - 22: 255, # '\x16' - 23: 255, # '\x17' - 24: 255, # '\x18' - 25: 255, # '\x19' - 26: 255, # '\x1a' - 27: 255, # '\x1b' - 28: 255, # '\x1c' - 29: 255, # '\x1d' - 30: 255, # '\x1e' - 31: 255, # '\x1f' - 32: 253, # ' ' - 33: 253, # '!' - 34: 253, # '"' - 35: 253, # '#' - 36: 253, # '$' - 37: 253, # '%' - 38: 253, # '&' - 39: 253, # "'" - 40: 253, # '(' - 41: 253, # ')' - 42: 253, # '*' - 43: 253, # '+' - 44: 253, # ',' - 45: 253, # '-' - 46: 253, # '.' - 47: 253, # '/' - 48: 252, # '0' - 49: 252, # '1' - 50: 252, # '2' - 51: 252, # '3' - 52: 252, # '4' - 53: 252, # '5' - 54: 252, # '6' - 55: 252, # '7' - 56: 252, # '8' - 57: 252, # '9' - 58: 253, # ':' - 59: 253, # ';' - 60: 253, # '<' - 61: 253, # '=' - 62: 253, # '>' - 63: 253, # '?' - 64: 253, # '@' - 65: 77, # 'A' - 66: 90, # 'B' - 67: 99, # 'C' - 68: 100, # 'D' - 69: 72, # 'E' - 70: 109, # 'F' - 71: 107, # 'G' - 72: 101, # 'H' - 73: 79, # 'I' - 74: 185, # 'J' - 75: 81, # 'K' - 76: 102, # 'L' - 77: 76, # 'M' - 78: 94, # 'N' - 79: 82, # 'O' - 80: 110, # 'P' - 81: 186, # 'Q' - 82: 108, # 'R' - 83: 91, # 'S' - 84: 74, # 'T' - 85: 119, # 'U' - 86: 84, # 'V' - 87: 96, # 'W' - 88: 111, # 'X' - 89: 187, # 'Y' - 90: 115, # 'Z' - 91: 253, # '[' - 92: 253, # '\\' - 93: 253, # ']' - 94: 253, # '^' - 95: 253, # '_' - 96: 253, # '`' - 97: 65, # 'a' - 98: 69, # 'b' - 99: 70, # 'c' - 100: 66, # 'd' - 101: 63, # 'e' - 102: 68, # 'f' - 103: 112, # 'g' - 104: 103, # 'h' - 105: 92, # 'i' - 106: 194, # 'j' - 107: 104, # 'k' - 108: 95, # 'l' - 109: 86, # 'm' - 110: 87, # 'n' - 111: 71, # 'o' - 112: 116, # 'p' - 113: 195, # 'q' - 114: 85, # 'r' - 115: 93, # 's' - 116: 97, # 't' - 117: 113, # 'u' - 118: 196, # 'v' - 119: 197, # 'w' - 120: 198, # 'x' - 121: 199, # 'y' - 122: 200, # 'z' - 123: 253, # '{' - 124: 253, # '|' - 125: 253, # '}' - 126: 253, # '~' - 127: 253, # '\x7f' - 128: 194, # '\x80' - 129: 195, # '\x81' - 130: 196, # '\x82' - 131: 197, # '\x83' - 132: 198, # '\x84' - 133: 199, # '\x85' - 134: 200, # '\x86' - 135: 201, # '\x87' - 136: 202, # '\x88' - 137: 203, # '\x89' - 138: 204, # '\x8a' - 139: 205, # '\x8b' - 140: 206, # '\x8c' - 141: 207, # '\x8d' - 142: 208, # '\x8e' - 143: 209, # '\x8f' - 144: 210, # '\x90' - 145: 211, # '\x91' - 146: 212, # '\x92' - 147: 213, # '\x93' - 148: 214, # '\x94' - 149: 215, # '\x95' - 150: 216, # '\x96' - 151: 217, # '\x97' - 152: 218, # '\x98' - 153: 219, # '\x99' - 154: 220, # '\x9a' - 155: 221, # '\x9b' - 156: 222, # '\x9c' - 157: 223, # '\x9d' - 158: 224, # '\x9e' - 159: 225, # '\x9f' - 160: 81, # '\xa0' - 161: 226, # 'Ð' - 162: 227, # 'Ђ' - 163: 228, # 'Ѓ' - 164: 229, # 'Є' - 165: 230, # 'Ð…' - 166: 105, # 'І' - 167: 231, # 'Ї' - 168: 232, # 'Ј' - 169: 233, # 'Љ' - 170: 234, # 'Њ' - 171: 235, # 'Ћ' - 172: 236, # 'ÐŒ' - 173: 45, # '\xad' - 174: 237, # 'ÐŽ' - 175: 238, # 'Ð' - 176: 31, # 'Ð' - 177: 32, # 'Б' - 178: 35, # 'Ð’' - 179: 43, # 'Г' - 180: 37, # 'Д' - 181: 44, # 'Е' - 182: 55, # 'Ж' - 183: 47, # 'З' - 184: 40, # 'И' - 185: 59, # 'Й' - 186: 33, # 'К' - 187: 46, # 'Л' - 188: 38, # 'Ðœ' - 189: 36, # 'Ð' - 190: 41, # 'О' - 191: 30, # 'П' - 192: 39, # 'Р' - 193: 28, # 'С' - 194: 34, # 'Т' - 195: 51, # 'У' - 196: 48, # 'Ф' - 197: 49, # 'Ð¥' - 198: 53, # 'Ц' - 199: 50, # 'Ч' - 200: 54, # 'Ш' - 201: 57, # 'Щ' - 202: 61, # 'Ъ' - 203: 239, # 'Ы' - 204: 67, # 'Ь' - 205: 240, # 'Э' - 206: 60, # 'Ю' - 207: 56, # 'Я' - 208: 1, # 'а' - 209: 18, # 'б' - 210: 9, # 'в' - 211: 20, # 'г' - 212: 11, # 'д' - 213: 3, # 'е' - 214: 23, # 'ж' - 215: 15, # 'з' - 216: 2, # 'и' - 217: 26, # 'й' - 218: 12, # 'к' - 219: 10, # 'л' - 220: 14, # 'м' - 221: 6, # 'н' - 222: 4, # 'о' - 223: 13, # 'п' - 224: 7, # 'Ñ€' - 225: 8, # 'Ñ' - 226: 5, # 'Ñ‚' - 227: 19, # 'у' - 228: 29, # 'Ñ„' - 229: 25, # 'Ñ…' - 230: 22, # 'ц' - 231: 21, # 'ч' - 232: 27, # 'ш' - 233: 24, # 'щ' - 234: 17, # 'ÑŠ' - 235: 75, # 'Ñ‹' - 236: 52, # 'ÑŒ' - 237: 241, # 'Ñ' - 238: 42, # 'ÑŽ' - 239: 16, # 'Ñ' - 240: 62, # 'â„–' - 241: 242, # 'Ñ‘' - 242: 243, # 'Ñ’' - 243: 244, # 'Ñ“' - 244: 58, # 'Ñ”' - 245: 245, # 'Ñ•' - 246: 98, # 'Ñ–' - 247: 246, # 'Ñ—' - 248: 247, # 'ј' - 249: 248, # 'Ñ™' - 250: 249, # 'Ñš' - 251: 250, # 'Ñ›' - 252: 251, # 'Ñœ' - 253: 91, # '§' - 254: 252, # 'Ñž' - 255: 253, # 'ÑŸ' -} - -ISO_8859_5_BULGARIAN_MODEL = SingleByteCharSetModel(charset_name='ISO-8859-5', - language='Bulgarian', - char_to_order_map=ISO_8859_5_BULGARIAN_CHAR_TO_ORDER, - language_model=BULGARIAN_LANG_MODEL, - typical_positive_ratio=0.969392, - keep_ascii_letters=False, - alphabet='ÐБВГДЕЖЗИЙКЛМÐОПРСТУФХЦЧШЩЪЬЮЯабвгдежзийклмнопрÑтуфхцчшщъьюÑ') - -WINDOWS_1251_BULGARIAN_CHAR_TO_ORDER = { - 0: 255, # '\x00' - 1: 255, # '\x01' - 2: 255, # '\x02' - 3: 255, # '\x03' - 4: 255, # '\x04' - 5: 255, # '\x05' - 6: 255, # '\x06' - 7: 255, # '\x07' - 8: 255, # '\x08' - 9: 255, # '\t' - 10: 254, # '\n' - 11: 255, # '\x0b' - 12: 255, # '\x0c' - 13: 254, # '\r' - 14: 255, # '\x0e' - 15: 255, # '\x0f' - 16: 255, # '\x10' - 17: 255, # '\x11' - 18: 255, # '\x12' - 19: 255, # '\x13' - 20: 255, # '\x14' - 21: 255, # '\x15' - 22: 255, # '\x16' - 23: 255, # '\x17' - 24: 255, # '\x18' - 25: 255, # '\x19' - 26: 255, # '\x1a' - 27: 255, # '\x1b' - 28: 255, # '\x1c' - 29: 255, # '\x1d' - 30: 255, # '\x1e' - 31: 255, # '\x1f' - 32: 253, # ' ' - 33: 253, # '!' - 34: 253, # '"' - 35: 253, # '#' - 36: 253, # '$' - 37: 253, # '%' - 38: 253, # '&' - 39: 253, # "'" - 40: 253, # '(' - 41: 253, # ')' - 42: 253, # '*' - 43: 253, # '+' - 44: 253, # ',' - 45: 253, # '-' - 46: 253, # '.' - 47: 253, # '/' - 48: 252, # '0' - 49: 252, # '1' - 50: 252, # '2' - 51: 252, # '3' - 52: 252, # '4' - 53: 252, # '5' - 54: 252, # '6' - 55: 252, # '7' - 56: 252, # '8' - 57: 252, # '9' - 58: 253, # ':' - 59: 253, # ';' - 60: 253, # '<' - 61: 253, # '=' - 62: 253, # '>' - 63: 253, # '?' - 64: 253, # '@' - 65: 77, # 'A' - 66: 90, # 'B' - 67: 99, # 'C' - 68: 100, # 'D' - 69: 72, # 'E' - 70: 109, # 'F' - 71: 107, # 'G' - 72: 101, # 'H' - 73: 79, # 'I' - 74: 185, # 'J' - 75: 81, # 'K' - 76: 102, # 'L' - 77: 76, # 'M' - 78: 94, # 'N' - 79: 82, # 'O' - 80: 110, # 'P' - 81: 186, # 'Q' - 82: 108, # 'R' - 83: 91, # 'S' - 84: 74, # 'T' - 85: 119, # 'U' - 86: 84, # 'V' - 87: 96, # 'W' - 88: 111, # 'X' - 89: 187, # 'Y' - 90: 115, # 'Z' - 91: 253, # '[' - 92: 253, # '\\' - 93: 253, # ']' - 94: 253, # '^' - 95: 253, # '_' - 96: 253, # '`' - 97: 65, # 'a' - 98: 69, # 'b' - 99: 70, # 'c' - 100: 66, # 'd' - 101: 63, # 'e' - 102: 68, # 'f' - 103: 112, # 'g' - 104: 103, # 'h' - 105: 92, # 'i' - 106: 194, # 'j' - 107: 104, # 'k' - 108: 95, # 'l' - 109: 86, # 'm' - 110: 87, # 'n' - 111: 71, # 'o' - 112: 116, # 'p' - 113: 195, # 'q' - 114: 85, # 'r' - 115: 93, # 's' - 116: 97, # 't' - 117: 113, # 'u' - 118: 196, # 'v' - 119: 197, # 'w' - 120: 198, # 'x' - 121: 199, # 'y' - 122: 200, # 'z' - 123: 253, # '{' - 124: 253, # '|' - 125: 253, # '}' - 126: 253, # '~' - 127: 253, # '\x7f' - 128: 206, # 'Ђ' - 129: 207, # 'Ѓ' - 130: 208, # '‚' - 131: 209, # 'Ñ“' - 132: 210, # '„' - 133: 211, # '…' - 134: 212, # '†' - 135: 213, # '‡' - 136: 120, # '€' - 137: 214, # '‰' - 138: 215, # 'Љ' - 139: 216, # '‹' - 140: 217, # 'Њ' - 141: 218, # 'ÐŒ' - 142: 219, # 'Ћ' - 143: 220, # 'Ð' - 144: 221, # 'Ñ’' - 145: 78, # '‘' - 146: 64, # '’' - 147: 83, # '“' - 148: 121, # 'â€' - 149: 98, # '•' - 150: 117, # '–' - 151: 105, # '—' - 152: 222, # None - 153: 223, # 'â„¢' - 154: 224, # 'Ñ™' - 155: 225, # '›' - 156: 226, # 'Ñš' - 157: 227, # 'Ñœ' - 158: 228, # 'Ñ›' - 159: 229, # 'ÑŸ' - 160: 88, # '\xa0' - 161: 230, # 'ÐŽ' - 162: 231, # 'Ñž' - 163: 232, # 'Ј' - 164: 233, # '¤' - 165: 122, # 'Ò' - 166: 89, # '¦' - 167: 106, # '§' - 168: 234, # 'Ð' - 169: 235, # '©' - 170: 236, # 'Є' - 171: 237, # '«' - 172: 238, # '¬' - 173: 45, # '\xad' - 174: 239, # '®' - 175: 240, # 'Ї' - 176: 73, # '°' - 177: 80, # '±' - 178: 118, # 'І' - 179: 114, # 'Ñ–' - 180: 241, # 'Ò‘' - 181: 242, # 'µ' - 182: 243, # '¶' - 183: 244, # '·' - 184: 245, # 'Ñ‘' - 185: 62, # 'â„–' - 186: 58, # 'Ñ”' - 187: 246, # '»' - 188: 247, # 'ј' - 189: 248, # 'Ð…' - 190: 249, # 'Ñ•' - 191: 250, # 'Ñ—' - 192: 31, # 'Ð' - 193: 32, # 'Б' - 194: 35, # 'Ð’' - 195: 43, # 'Г' - 196: 37, # 'Д' - 197: 44, # 'Е' - 198: 55, # 'Ж' - 199: 47, # 'З' - 200: 40, # 'И' - 201: 59, # 'Й' - 202: 33, # 'К' - 203: 46, # 'Л' - 204: 38, # 'Ðœ' - 205: 36, # 'Ð' - 206: 41, # 'О' - 207: 30, # 'П' - 208: 39, # 'Р' - 209: 28, # 'С' - 210: 34, # 'Т' - 211: 51, # 'У' - 212: 48, # 'Ф' - 213: 49, # 'Ð¥' - 214: 53, # 'Ц' - 215: 50, # 'Ч' - 216: 54, # 'Ш' - 217: 57, # 'Щ' - 218: 61, # 'Ъ' - 219: 251, # 'Ы' - 220: 67, # 'Ь' - 221: 252, # 'Э' - 222: 60, # 'Ю' - 223: 56, # 'Я' - 224: 1, # 'а' - 225: 18, # 'б' - 226: 9, # 'в' - 227: 20, # 'г' - 228: 11, # 'д' - 229: 3, # 'е' - 230: 23, # 'ж' - 231: 15, # 'з' - 232: 2, # 'и' - 233: 26, # 'й' - 234: 12, # 'к' - 235: 10, # 'л' - 236: 14, # 'м' - 237: 6, # 'н' - 238: 4, # 'о' - 239: 13, # 'п' - 240: 7, # 'Ñ€' - 241: 8, # 'Ñ' - 242: 5, # 'Ñ‚' - 243: 19, # 'у' - 244: 29, # 'Ñ„' - 245: 25, # 'Ñ…' - 246: 22, # 'ц' - 247: 21, # 'ч' - 248: 27, # 'ш' - 249: 24, # 'щ' - 250: 17, # 'ÑŠ' - 251: 75, # 'Ñ‹' - 252: 52, # 'ÑŒ' - 253: 253, # 'Ñ' - 254: 42, # 'ÑŽ' - 255: 16, # 'Ñ' -} - -WINDOWS_1251_BULGARIAN_MODEL = SingleByteCharSetModel(charset_name='windows-1251', - language='Bulgarian', - char_to_order_map=WINDOWS_1251_BULGARIAN_CHAR_TO_ORDER, - language_model=BULGARIAN_LANG_MODEL, - typical_positive_ratio=0.969392, - keep_ascii_letters=False, - alphabet='ÐБВГДЕЖЗИЙКЛМÐОПРСТУФХЦЧШЩЪЬЮЯабвгдежзийклмнопрÑтуфхцчшщъьюÑ') - diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/langgreekmodel.py b/venv/Lib/site-packages/pip/_vendor/chardet/langgreekmodel.py deleted file mode 100644 index d99528e..0000000 --- a/venv/Lib/site-packages/pip/_vendor/chardet/langgreekmodel.py +++ /dev/null @@ -1,4398 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- - -from pip._vendor.chardet.sbcharsetprober import SingleByteCharSetModel - - -# 3: Positive -# 2: Likely -# 1: Unlikely -# 0: Negative - -GREEK_LANG_MODEL = { - 60: { # 'e' - 60: 2, # 'e' - 55: 1, # 'o' - 58: 2, # 't' - 36: 1, # '·' - 61: 0, # 'Ά' - 46: 0, # 'Έ' - 54: 0, # 'ÎŒ' - 31: 0, # 'Α' - 51: 0, # 'Î’' - 43: 0, # 'Γ' - 41: 0, # 'Δ' - 34: 0, # 'Ε' - 40: 0, # 'Η' - 52: 0, # 'Θ' - 47: 0, # 'Ι' - 44: 0, # 'Κ' - 53: 0, # 'Λ' - 38: 0, # 'Îœ' - 49: 0, # 'Î' - 59: 0, # 'Ξ' - 39: 0, # 'Ο' - 35: 0, # 'Π' - 48: 0, # 'Ρ' - 37: 0, # 'Σ' - 33: 0, # 'Τ' - 45: 0, # 'Î¥' - 56: 0, # 'Φ' - 50: 1, # 'Χ' - 57: 0, # 'Ω' - 17: 0, # 'ά' - 18: 0, # 'έ' - 22: 0, # 'ή' - 15: 0, # 'ί' - 1: 0, # 'α' - 29: 0, # 'β' - 20: 0, # 'γ' - 21: 0, # 'δ' - 3: 0, # 'ε' - 32: 0, # 'ζ' - 13: 0, # 'η' - 25: 0, # 'θ' - 5: 0, # 'ι' - 11: 0, # 'κ' - 16: 0, # 'λ' - 10: 0, # 'μ' - 6: 0, # 'ν' - 30: 0, # 'ξ' - 4: 0, # 'ο' - 9: 0, # 'Ï€' - 8: 0, # 'Ï' - 14: 0, # 'Ï‚' - 7: 0, # 'σ' - 2: 0, # 'Ï„' - 12: 0, # 'Ï…' - 28: 0, # 'φ' - 23: 0, # 'χ' - 42: 0, # 'ψ' - 24: 0, # 'ω' - 19: 0, # 'ÏŒ' - 26: 0, # 'Ï' - 27: 0, # 'ÏŽ' - }, - 55: { # 'o' - 60: 0, # 'e' - 55: 2, # 'o' - 58: 2, # 't' - 36: 1, # '·' - 61: 0, # 'Ά' - 46: 0, # 'Έ' - 54: 0, # 'ÎŒ' - 31: 0, # 'Α' - 51: 0, # 'Î’' - 43: 0, # 'Γ' - 41: 0, # 'Δ' - 34: 0, # 'Ε' - 40: 0, # 'Η' - 52: 0, # 'Θ' - 47: 0, # 'Ι' - 44: 0, # 'Κ' - 53: 0, # 'Λ' - 38: 0, # 'Îœ' - 49: 0, # 'Î' - 59: 0, # 'Ξ' - 39: 0, # 'Ο' - 35: 0, # 'Π' - 48: 0, # 'Ρ' - 37: 0, # 'Σ' - 33: 0, # 'Τ' - 45: 0, # 'Î¥' - 56: 0, # 'Φ' - 50: 0, # 'Χ' - 57: 0, # 'Ω' - 17: 0, # 'ά' - 18: 0, # 'έ' - 22: 0, # 'ή' - 15: 0, # 'ί' - 1: 0, # 'α' - 29: 0, # 'β' - 20: 0, # 'γ' - 21: 0, # 'δ' - 3: 0, # 'ε' - 32: 0, # 'ζ' - 13: 0, # 'η' - 25: 0, # 'θ' - 5: 0, # 'ι' - 11: 0, # 'κ' - 16: 0, # 'λ' - 10: 0, # 'μ' - 6: 1, # 'ν' - 30: 0, # 'ξ' - 4: 0, # 'ο' - 9: 0, # 'Ï€' - 8: 0, # 'Ï' - 14: 0, # 'Ï‚' - 7: 0, # 'σ' - 2: 0, # 'Ï„' - 12: 1, # 'Ï…' - 28: 0, # 'φ' - 23: 0, # 'χ' - 42: 0, # 'ψ' - 24: 0, # 'ω' - 19: 0, # 'ÏŒ' - 26: 0, # 'Ï' - 27: 0, # 'ÏŽ' - }, - 58: { # 't' - 60: 2, # 'e' - 55: 1, # 'o' - 58: 1, # 't' - 36: 0, # '·' - 61: 0, # 'Ά' - 46: 0, # 'Έ' - 54: 0, # 'ÎŒ' - 31: 0, # 'Α' - 51: 0, # 'Î’' - 43: 0, # 'Γ' - 41: 0, # 'Δ' - 34: 0, # 'Ε' - 40: 0, # 'Η' - 52: 0, # 'Θ' - 47: 0, # 'Ι' - 44: 0, # 'Κ' - 53: 0, # 'Λ' - 38: 0, # 'Îœ' - 49: 0, # 'Î' - 59: 0, # 'Ξ' - 39: 0, # 'Ο' - 35: 0, # 'Π' - 48: 0, # 'Ρ' - 37: 0, # 'Σ' - 33: 0, # 'Τ' - 45: 0, # 'Î¥' - 56: 0, # 'Φ' - 50: 0, # 'Χ' - 57: 0, # 'Ω' - 17: 2, # 'ά' - 18: 0, # 'έ' - 22: 0, # 'ή' - 15: 0, # 'ί' - 1: 0, # 'α' - 29: 0, # 'β' - 20: 0, # 'γ' - 21: 0, # 'δ' - 3: 0, # 'ε' - 32: 0, # 'ζ' - 13: 0, # 'η' - 25: 0, # 'θ' - 5: 0, # 'ι' - 11: 0, # 'κ' - 16: 0, # 'λ' - 10: 0, # 'μ' - 6: 0, # 'ν' - 30: 0, # 'ξ' - 4: 1, # 'ο' - 9: 0, # 'Ï€' - 8: 0, # 'Ï' - 14: 0, # 'Ï‚' - 7: 0, # 'σ' - 2: 0, # 'Ï„' - 12: 0, # 'Ï…' - 28: 0, # 'φ' - 23: 0, # 'χ' - 42: 0, # 'ψ' - 24: 0, # 'ω' - 19: 0, # 'ÏŒ' - 26: 0, # 'Ï' - 27: 0, # 'ÏŽ' - }, - 36: { # '·' - 60: 0, # 'e' - 55: 0, # 'o' - 58: 0, # 't' - 36: 0, # '·' - 61: 0, # 'Ά' - 46: 0, # 'Έ' - 54: 0, # 'ÎŒ' - 31: 0, # 'Α' - 51: 0, # 'Î’' - 43: 0, # 'Γ' - 41: 0, # 'Δ' - 34: 0, # 'Ε' - 40: 0, # 'Η' - 52: 0, # 'Θ' - 47: 0, # 'Ι' - 44: 0, # 'Κ' - 53: 0, # 'Λ' - 38: 0, # 'Îœ' - 49: 0, # 'Î' - 59: 0, # 'Ξ' - 39: 0, # 'Ο' - 35: 0, # 'Π' - 48: 0, # 'Ρ' - 37: 0, # 'Σ' - 33: 0, # 'Τ' - 45: 0, # 'Î¥' - 56: 0, # 'Φ' - 50: 0, # 'Χ' - 57: 0, # 'Ω' - 17: 0, # 'ά' - 18: 0, # 'έ' - 22: 0, # 'ή' - 15: 0, # 'ί' - 1: 0, # 'α' - 29: 0, # 'β' - 20: 0, # 'γ' - 21: 0, # 'δ' - 3: 0, # 'ε' - 32: 0, # 'ζ' - 13: 0, # 'η' - 25: 0, # 'θ' - 5: 0, # 'ι' - 11: 0, # 'κ' - 16: 0, # 'λ' - 10: 0, # 'μ' - 6: 0, # 'ν' - 30: 0, # 'ξ' - 4: 0, # 'ο' - 9: 0, # 'Ï€' - 8: 0, # 'Ï' - 14: 0, # 'Ï‚' - 7: 0, # 'σ' - 2: 0, # 'Ï„' - 12: 0, # 'Ï…' - 28: 0, # 'φ' - 23: 0, # 'χ' - 42: 0, # 'ψ' - 24: 0, # 'ω' - 19: 0, # 'ÏŒ' - 26: 0, # 'Ï' - 27: 0, # 'ÏŽ' - }, - 61: { # 'Ά' - 60: 0, # 'e' - 55: 0, # 'o' - 58: 0, # 't' - 36: 0, # '·' - 61: 0, # 'Ά' - 46: 0, # 'Έ' - 54: 0, # 'ÎŒ' - 31: 0, # 'Α' - 51: 0, # 'Î’' - 43: 0, # 'Γ' - 41: 0, # 'Δ' - 34: 0, # 'Ε' - 40: 0, # 'Η' - 52: 0, # 'Θ' - 47: 0, # 'Ι' - 44: 0, # 'Κ' - 53: 0, # 'Λ' - 38: 0, # 'Îœ' - 49: 0, # 'Î' - 59: 0, # 'Ξ' - 39: 0, # 'Ο' - 35: 0, # 'Π' - 48: 0, # 'Ρ' - 37: 0, # 'Σ' - 33: 0, # 'Τ' - 45: 0, # 'Î¥' - 56: 0, # 'Φ' - 50: 0, # 'Χ' - 57: 0, # 'Ω' - 17: 0, # 'ά' - 18: 0, # 'έ' - 22: 0, # 'ή' - 15: 0, # 'ί' - 1: 0, # 'α' - 29: 0, # 'β' - 20: 1, # 'γ' - 21: 2, # 'δ' - 3: 0, # 'ε' - 32: 0, # 'ζ' - 13: 0, # 'η' - 25: 0, # 'θ' - 5: 0, # 'ι' - 11: 0, # 'κ' - 16: 2, # 'λ' - 10: 0, # 'μ' - 6: 0, # 'ν' - 30: 0, # 'ξ' - 4: 0, # 'ο' - 9: 1, # 'Ï€' - 8: 2, # 'Ï' - 14: 0, # 'Ï‚' - 7: 0, # 'σ' - 2: 0, # 'Ï„' - 12: 0, # 'Ï…' - 28: 0, # 'φ' - 23: 0, # 'χ' - 42: 0, # 'ψ' - 24: 0, # 'ω' - 19: 0, # 'ÏŒ' - 26: 0, # 'Ï' - 27: 0, # 'ÏŽ' - }, - 46: { # 'Έ' - 60: 0, # 'e' - 55: 0, # 'o' - 58: 0, # 't' - 36: 0, # '·' - 61: 0, # 'Ά' - 46: 0, # 'Έ' - 54: 0, # 'ÎŒ' - 31: 0, # 'Α' - 51: 0, # 'Î’' - 43: 0, # 'Γ' - 41: 0, # 'Δ' - 34: 0, # 'Ε' - 40: 0, # 'Η' - 52: 0, # 'Θ' - 47: 0, # 'Ι' - 44: 0, # 'Κ' - 53: 0, # 'Λ' - 38: 0, # 'Îœ' - 49: 0, # 'Î' - 59: 0, # 'Ξ' - 39: 0, # 'Ο' - 35: 0, # 'Π' - 48: 0, # 'Ρ' - 37: 0, # 'Σ' - 33: 0, # 'Τ' - 45: 0, # 'Î¥' - 56: 0, # 'Φ' - 50: 0, # 'Χ' - 57: 0, # 'Ω' - 17: 0, # 'ά' - 18: 0, # 'έ' - 22: 0, # 'ή' - 15: 0, # 'ί' - 1: 0, # 'α' - 29: 2, # 'β' - 20: 2, # 'γ' - 21: 0, # 'δ' - 3: 0, # 'ε' - 32: 0, # 'ζ' - 13: 0, # 'η' - 25: 0, # 'θ' - 5: 0, # 'ι' - 11: 2, # 'κ' - 16: 2, # 'λ' - 10: 0, # 'μ' - 6: 3, # 'ν' - 30: 2, # 'ξ' - 4: 0, # 'ο' - 9: 2, # 'Ï€' - 8: 2, # 'Ï' - 14: 0, # 'Ï‚' - 7: 1, # 'σ' - 2: 2, # 'Ï„' - 12: 0, # 'Ï…' - 28: 2, # 'φ' - 23: 3, # 'χ' - 42: 0, # 'ψ' - 24: 0, # 'ω' - 19: 0, # 'ÏŒ' - 26: 0, # 'Ï' - 27: 0, # 'ÏŽ' - }, - 54: { # 'ÎŒ' - 60: 0, # 'e' - 55: 0, # 'o' - 58: 0, # 't' - 36: 0, # '·' - 61: 0, # 'Ά' - 46: 0, # 'Έ' - 54: 0, # 'ÎŒ' - 31: 0, # 'Α' - 51: 0, # 'Î’' - 43: 0, # 'Γ' - 41: 0, # 'Δ' - 34: 0, # 'Ε' - 40: 0, # 'Η' - 52: 0, # 'Θ' - 47: 0, # 'Ι' - 44: 0, # 'Κ' - 53: 0, # 'Λ' - 38: 0, # 'Îœ' - 49: 0, # 'Î' - 59: 0, # 'Ξ' - 39: 0, # 'Ο' - 35: 0, # 'Π' - 48: 0, # 'Ρ' - 37: 0, # 'Σ' - 33: 0, # 'Τ' - 45: 0, # 'Î¥' - 56: 0, # 'Φ' - 50: 0, # 'Χ' - 57: 0, # 'Ω' - 17: 0, # 'ά' - 18: 0, # 'έ' - 22: 0, # 'ή' - 15: 0, # 'ί' - 1: 0, # 'α' - 29: 0, # 'β' - 20: 0, # 'γ' - 21: 0, # 'δ' - 3: 0, # 'ε' - 32: 0, # 'ζ' - 13: 0, # 'η' - 25: 0, # 'θ' - 5: 0, # 'ι' - 11: 0, # 'κ' - 16: 2, # 'λ' - 10: 2, # 'μ' - 6: 2, # 'ν' - 30: 0, # 'ξ' - 4: 0, # 'ο' - 9: 2, # 'Ï€' - 8: 0, # 'Ï' - 14: 0, # 'Ï‚' - 7: 2, # 'σ' - 2: 3, # 'Ï„' - 12: 0, # 'Ï…' - 28: 0, # 'φ' - 23: 2, # 'χ' - 42: 0, # 'ψ' - 24: 0, # 'ω' - 19: 0, # 'ÏŒ' - 26: 0, # 'Ï' - 27: 0, # 'ÏŽ' - }, - 31: { # 'Α' - 60: 0, # 'e' - 55: 0, # 'o' - 58: 0, # 't' - 36: 0, # '·' - 61: 0, # 'Ά' - 46: 0, # 'Έ' - 54: 0, # 'ÎŒ' - 31: 0, # 'Α' - 51: 2, # 'Î’' - 43: 2, # 'Γ' - 41: 1, # 'Δ' - 34: 0, # 'Ε' - 40: 0, # 'Η' - 52: 2, # 'Θ' - 47: 2, # 'Ι' - 44: 2, # 'Κ' - 53: 2, # 'Λ' - 38: 2, # 'Îœ' - 49: 2, # 'Î' - 59: 1, # 'Ξ' - 39: 0, # 'Ο' - 35: 2, # 'Π' - 48: 2, # 'Ρ' - 37: 2, # 'Σ' - 33: 2, # 'Τ' - 45: 2, # 'Î¥' - 56: 2, # 'Φ' - 50: 0, # 'Χ' - 57: 0, # 'Ω' - 17: 0, # 'ά' - 18: 0, # 'έ' - 22: 0, # 'ή' - 15: 0, # 'ί' - 1: 0, # 'α' - 29: 0, # 'β' - 20: 2, # 'γ' - 21: 0, # 'δ' - 3: 0, # 'ε' - 32: 0, # 'ζ' - 13: 0, # 'η' - 25: 1, # 'θ' - 5: 0, # 'ι' - 11: 2, # 'κ' - 16: 3, # 'λ' - 10: 2, # 'μ' - 6: 3, # 'ν' - 30: 2, # 'ξ' - 4: 0, # 'ο' - 9: 3, # 'Ï€' - 8: 3, # 'Ï' - 14: 2, # 'Ï‚' - 7: 2, # 'σ' - 2: 0, # 'Ï„' - 12: 3, # 'Ï…' - 28: 2, # 'φ' - 23: 0, # 'χ' - 42: 0, # 'ψ' - 24: 0, # 'ω' - 19: 0, # 'ÏŒ' - 26: 2, # 'Ï' - 27: 0, # 'ÏŽ' - }, - 51: { # 'Î’' - 60: 0, # 'e' - 55: 0, # 'o' - 58: 0, # 't' - 36: 0, # '·' - 61: 0, # 'Ά' - 46: 0, # 'Έ' - 54: 0, # 'ÎŒ' - 31: 2, # 'Α' - 51: 0, # 'Î’' - 43: 0, # 'Γ' - 41: 0, # 'Δ' - 34: 1, # 'Ε' - 40: 1, # 'Η' - 52: 0, # 'Θ' - 47: 1, # 'Ι' - 44: 0, # 'Κ' - 53: 1, # 'Λ' - 38: 0, # 'Îœ' - 49: 0, # 'Î' - 59: 0, # 'Ξ' - 39: 2, # 'Ο' - 35: 0, # 'Π' - 48: 0, # 'Ρ' - 37: 0, # 'Σ' - 33: 0, # 'Τ' - 45: 0, # 'Î¥' - 56: 0, # 'Φ' - 50: 0, # 'Χ' - 57: 0, # 'Ω' - 17: 2, # 'ά' - 18: 2, # 'έ' - 22: 2, # 'ή' - 15: 0, # 'ί' - 1: 2, # 'α' - 29: 0, # 'β' - 20: 0, # 'γ' - 21: 0, # 'δ' - 3: 2, # 'ε' - 32: 0, # 'ζ' - 13: 0, # 'η' - 25: 0, # 'θ' - 5: 2, # 'ι' - 11: 0, # 'κ' - 16: 2, # 'λ' - 10: 0, # 'μ' - 6: 0, # 'ν' - 30: 0, # 'ξ' - 4: 2, # 'ο' - 9: 0, # 'Ï€' - 8: 2, # 'Ï' - 14: 0, # 'Ï‚' - 7: 0, # 'σ' - 2: 0, # 'Ï„' - 12: 0, # 'Ï…' - 28: 0, # 'φ' - 23: 0, # 'χ' - 42: 0, # 'ψ' - 24: 0, # 'ω' - 19: 0, # 'ÏŒ' - 26: 0, # 'Ï' - 27: 0, # 'ÏŽ' - }, - 43: { # 'Γ' - 60: 0, # 'e' - 55: 0, # 'o' - 58: 0, # 't' - 36: 0, # '·' - 61: 0, # 'Ά' - 46: 0, # 'Έ' - 54: 0, # 'ÎŒ' - 31: 1, # 'Α' - 51: 0, # 'Î’' - 43: 2, # 'Γ' - 41: 0, # 'Δ' - 34: 2, # 'Ε' - 40: 1, # 'Η' - 52: 0, # 'Θ' - 47: 2, # 'Ι' - 44: 1, # 'Κ' - 53: 1, # 'Λ' - 38: 0, # 'Îœ' - 49: 0, # 'Î' - 59: 0, # 'Ξ' - 39: 1, # 'Ο' - 35: 0, # 'Π' - 48: 2, # 'Ρ' - 37: 0, # 'Σ' - 33: 0, # 'Τ' - 45: 2, # 'Î¥' - 56: 0, # 'Φ' - 50: 1, # 'Χ' - 57: 2, # 'Ω' - 17: 0, # 'ά' - 18: 0, # 'έ' - 22: 0, # 'ή' - 15: 2, # 'ί' - 1: 2, # 'α' - 29: 0, # 'β' - 20: 0, # 'γ' - 21: 0, # 'δ' - 3: 2, # 'ε' - 32: 0, # 'ζ' - 13: 0, # 'η' - 25: 0, # 'θ' - 5: 3, # 'ι' - 11: 0, # 'κ' - 16: 2, # 'λ' - 10: 0, # 'μ' - 6: 2, # 'ν' - 30: 0, # 'ξ' - 4: 0, # 'ο' - 9: 0, # 'Ï€' - 8: 2, # 'Ï' - 14: 0, # 'Ï‚' - 7: 0, # 'σ' - 2: 0, # 'Ï„' - 12: 0, # 'Ï…' - 28: 0, # 'φ' - 23: 0, # 'χ' - 42: 0, # 'ψ' - 24: 0, # 'ω' - 19: 0, # 'ÏŒ' - 26: 0, # 'Ï' - 27: 0, # 'ÏŽ' - }, - 41: { # 'Δ' - 60: 0, # 'e' - 55: 0, # 'o' - 58: 0, # 't' - 36: 0, # '·' - 61: 0, # 'Ά' - 46: 0, # 'Έ' - 54: 0, # 'ÎŒ' - 31: 0, # 'Α' - 51: 0, # 'Î’' - 43: 0, # 'Γ' - 41: 0, # 'Δ' - 34: 2, # 'Ε' - 40: 2, # 'Η' - 52: 0, # 'Θ' - 47: 2, # 'Ι' - 44: 0, # 'Κ' - 53: 0, # 'Λ' - 38: 0, # 'Îœ' - 49: 0, # 'Î' - 59: 0, # 'Ξ' - 39: 2, # 'Ο' - 35: 0, # 'Π' - 48: 0, # 'Ρ' - 37: 0, # 'Σ' - 33: 0, # 'Τ' - 45: 0, # 'Î¥' - 56: 0, # 'Φ' - 50: 0, # 'Χ' - 57: 2, # 'Ω' - 17: 0, # 'ά' - 18: 0, # 'έ' - 22: 2, # 'ή' - 15: 2, # 'ί' - 1: 0, # 'α' - 29: 0, # 'β' - 20: 0, # 'γ' - 21: 0, # 'δ' - 3: 3, # 'ε' - 32: 0, # 'ζ' - 13: 2, # 'η' - 25: 0, # 'θ' - 5: 3, # 'ι' - 11: 0, # 'κ' - 16: 0, # 'λ' - 10: 0, # 'μ' - 6: 0, # 'ν' - 30: 0, # 'ξ' - 4: 2, # 'ο' - 9: 0, # 'Ï€' - 8: 2, # 'Ï' - 14: 0, # 'Ï‚' - 7: 0, # 'σ' - 2: 0, # 'Ï„' - 12: 2, # 'Ï…' - 28: 0, # 'φ' - 23: 0, # 'χ' - 42: 0, # 'ψ' - 24: 2, # 'ω' - 19: 1, # 'ÏŒ' - 26: 2, # 'Ï' - 27: 2, # 'ÏŽ' - }, - 34: { # 'Ε' - 60: 0, # 'e' - 55: 0, # 'o' - 58: 0, # 't' - 36: 0, # '·' - 61: 0, # 'Ά' - 46: 0, # 'Έ' - 54: 0, # 'ÎŒ' - 31: 2, # 'Α' - 51: 0, # 'Î’' - 43: 2, # 'Γ' - 41: 2, # 'Δ' - 34: 0, # 'Ε' - 40: 0, # 'Η' - 52: 0, # 'Θ' - 47: 2, # 'Ι' - 44: 2, # 'Κ' - 53: 2, # 'Λ' - 38: 2, # 'Îœ' - 49: 2, # 'Î' - 59: 1, # 'Ξ' - 39: 0, # 'Ο' - 35: 2, # 'Π' - 48: 2, # 'Ρ' - 37: 2, # 'Σ' - 33: 2, # 'Τ' - 45: 2, # 'Î¥' - 56: 0, # 'Φ' - 50: 2, # 'Χ' - 57: 2, # 'Ω' - 17: 3, # 'ά' - 18: 0, # 'έ' - 22: 0, # 'ή' - 15: 3, # 'ί' - 1: 0, # 'α' - 29: 0, # 'β' - 20: 3, # 'γ' - 21: 2, # 'δ' - 3: 1, # 'ε' - 32: 0, # 'ζ' - 13: 0, # 'η' - 25: 1, # 'θ' - 5: 2, # 'ι' - 11: 3, # 'κ' - 16: 3, # 'λ' - 10: 2, # 'μ' - 6: 3, # 'ν' - 30: 2, # 'ξ' - 4: 0, # 'ο' - 9: 3, # 'Ï€' - 8: 2, # 'Ï' - 14: 0, # 'Ï‚' - 7: 2, # 'σ' - 2: 2, # 'Ï„' - 12: 2, # 'Ï…' - 28: 2, # 'φ' - 23: 0, # 'χ' - 42: 0, # 'ψ' - 24: 0, # 'ω' - 19: 0, # 'ÏŒ' - 26: 1, # 'Ï' - 27: 0, # 'ÏŽ' - }, - 40: { # 'Η' - 60: 0, # 'e' - 55: 0, # 'o' - 58: 0, # 't' - 36: 0, # '·' - 61: 0, # 'Ά' - 46: 0, # 'Έ' - 54: 0, # 'ÎŒ' - 31: 0, # 'Α' - 51: 0, # 'Î’' - 43: 1, # 'Γ' - 41: 0, # 'Δ' - 34: 0, # 'Ε' - 40: 0, # 'Η' - 52: 2, # 'Θ' - 47: 0, # 'Ι' - 44: 2, # 'Κ' - 53: 0, # 'Λ' - 38: 2, # 'Îœ' - 49: 2, # 'Î' - 59: 0, # 'Ξ' - 39: 0, # 'Ο' - 35: 2, # 'Π' - 48: 2, # 'Ρ' - 37: 2, # 'Σ' - 33: 2, # 'Τ' - 45: 1, # 'Î¥' - 56: 0, # 'Φ' - 50: 0, # 'Χ' - 57: 0, # 'Ω' - 17: 0, # 'ά' - 18: 0, # 'έ' - 22: 0, # 'ή' - 15: 0, # 'ί' - 1: 0, # 'α' - 29: 0, # 'β' - 20: 0, # 'γ' - 21: 0, # 'δ' - 3: 0, # 'ε' - 32: 0, # 'ζ' - 13: 0, # 'η' - 25: 0, # 'θ' - 5: 0, # 'ι' - 11: 0, # 'κ' - 16: 2, # 'λ' - 10: 0, # 'μ' - 6: 1, # 'ν' - 30: 0, # 'ξ' - 4: 0, # 'ο' - 9: 0, # 'Ï€' - 8: 0, # 'Ï' - 14: 0, # 'Ï‚' - 7: 0, # 'σ' - 2: 0, # 'Ï„' - 12: 0, # 'Ï…' - 28: 0, # 'φ' - 23: 1, # 'χ' - 42: 0, # 'ψ' - 24: 0, # 'ω' - 19: 0, # 'ÏŒ' - 26: 0, # 'Ï' - 27: 0, # 'ÏŽ' - }, - 52: { # 'Θ' - 60: 0, # 'e' - 55: 0, # 'o' - 58: 0, # 't' - 36: 0, # '·' - 61: 0, # 'Ά' - 46: 0, # 'Έ' - 54: 0, # 'ÎŒ' - 31: 2, # 'Α' - 51: 0, # 'Î’' - 43: 0, # 'Γ' - 41: 0, # 'Δ' - 34: 2, # 'Ε' - 40: 2, # 'Η' - 52: 0, # 'Θ' - 47: 0, # 'Ι' - 44: 0, # 'Κ' - 53: 0, # 'Λ' - 38: 0, # 'Îœ' - 49: 0, # 'Î' - 59: 0, # 'Ξ' - 39: 2, # 'Ο' - 35: 0, # 'Π' - 48: 1, # 'Ρ' - 37: 0, # 'Σ' - 33: 0, # 'Τ' - 45: 1, # 'Î¥' - 56: 0, # 'Φ' - 50: 0, # 'Χ' - 57: 0, # 'Ω' - 17: 0, # 'ά' - 18: 2, # 'έ' - 22: 0, # 'ή' - 15: 0, # 'ί' - 1: 3, # 'α' - 29: 0, # 'β' - 20: 0, # 'γ' - 21: 0, # 'δ' - 3: 2, # 'ε' - 32: 0, # 'ζ' - 13: 0, # 'η' - 25: 0, # 'θ' - 5: 0, # 'ι' - 11: 0, # 'κ' - 16: 0, # 'λ' - 10: 0, # 'μ' - 6: 0, # 'ν' - 30: 0, # 'ξ' - 4: 0, # 'ο' - 9: 0, # 'Ï€' - 8: 0, # 'Ï' - 14: 0, # 'Ï‚' - 7: 0, # 'σ' - 2: 0, # 'Ï„' - 12: 2, # 'Ï…' - 28: 0, # 'φ' - 23: 0, # 'χ' - 42: 0, # 'ψ' - 24: 0, # 'ω' - 19: 0, # 'ÏŒ' - 26: 2, # 'Ï' - 27: 0, # 'ÏŽ' - }, - 47: { # 'Ι' - 60: 0, # 'e' - 55: 0, # 'o' - 58: 0, # 't' - 36: 0, # '·' - 61: 0, # 'Ά' - 46: 0, # 'Έ' - 54: 0, # 'ÎŒ' - 31: 2, # 'Α' - 51: 1, # 'Î’' - 43: 1, # 'Γ' - 41: 2, # 'Δ' - 34: 2, # 'Ε' - 40: 2, # 'Η' - 52: 0, # 'Θ' - 47: 0, # 'Ι' - 44: 2, # 'Κ' - 53: 2, # 'Λ' - 38: 2, # 'Îœ' - 49: 2, # 'Î' - 59: 0, # 'Ξ' - 39: 2, # 'Ο' - 35: 0, # 'Π' - 48: 2, # 'Ρ' - 37: 2, # 'Σ' - 33: 2, # 'Τ' - 45: 0, # 'Î¥' - 56: 2, # 'Φ' - 50: 0, # 'Χ' - 57: 2, # 'Ω' - 17: 0, # 'ά' - 18: 0, # 'έ' - 22: 0, # 'ή' - 15: 0, # 'ί' - 1: 2, # 'α' - 29: 0, # 'β' - 20: 0, # 'γ' - 21: 2, # 'δ' - 3: 0, # 'ε' - 32: 0, # 'ζ' - 13: 0, # 'η' - 25: 0, # 'θ' - 5: 0, # 'ι' - 11: 0, # 'κ' - 16: 0, # 'λ' - 10: 0, # 'μ' - 6: 1, # 'ν' - 30: 0, # 'ξ' - 4: 2, # 'ο' - 9: 0, # 'Ï€' - 8: 0, # 'Ï' - 14: 0, # 'Ï‚' - 7: 2, # 'σ' - 2: 1, # 'Ï„' - 12: 0, # 'Ï…' - 28: 0, # 'φ' - 23: 0, # 'χ' - 42: 0, # 'ψ' - 24: 1, # 'ω' - 19: 0, # 'ÏŒ' - 26: 0, # 'Ï' - 27: 0, # 'ÏŽ' - }, - 44: { # 'Κ' - 60: 0, # 'e' - 55: 0, # 'o' - 58: 0, # 't' - 36: 0, # '·' - 61: 0, # 'Ά' - 46: 0, # 'Έ' - 54: 0, # 'ÎŒ' - 31: 2, # 'Α' - 51: 0, # 'Î’' - 43: 0, # 'Γ' - 41: 1, # 'Δ' - 34: 2, # 'Ε' - 40: 2, # 'Η' - 52: 0, # 'Θ' - 47: 0, # 'Ι' - 44: 0, # 'Κ' - 53: 0, # 'Λ' - 38: 1, # 'Îœ' - 49: 0, # 'Î' - 59: 0, # 'Ξ' - 39: 2, # 'Ο' - 35: 0, # 'Π' - 48: 2, # 'Ρ' - 37: 0, # 'Σ' - 33: 1, # 'Τ' - 45: 2, # 'Î¥' - 56: 0, # 'Φ' - 50: 0, # 'Χ' - 57: 1, # 'Ω' - 17: 3, # 'ά' - 18: 0, # 'έ' - 22: 0, # 'ή' - 15: 0, # 'ί' - 1: 3, # 'α' - 29: 0, # 'β' - 20: 0, # 'γ' - 21: 0, # 'δ' - 3: 2, # 'ε' - 32: 0, # 'ζ' - 13: 0, # 'η' - 25: 0, # 'θ' - 5: 2, # 'ι' - 11: 0, # 'κ' - 16: 2, # 'λ' - 10: 0, # 'μ' - 6: 0, # 'ν' - 30: 0, # 'ξ' - 4: 2, # 'ο' - 9: 0, # 'Ï€' - 8: 2, # 'Ï' - 14: 0, # 'Ï‚' - 7: 0, # 'σ' - 2: 0, # 'Ï„' - 12: 2, # 'Ï…' - 28: 0, # 'φ' - 23: 0, # 'χ' - 42: 0, # 'ψ' - 24: 0, # 'ω' - 19: 2, # 'ÏŒ' - 26: 2, # 'Ï' - 27: 2, # 'ÏŽ' - }, - 53: { # 'Λ' - 60: 0, # 'e' - 55: 0, # 'o' - 58: 0, # 't' - 36: 0, # '·' - 61: 0, # 'Ά' - 46: 0, # 'Έ' - 54: 0, # 'ÎŒ' - 31: 2, # 'Α' - 51: 0, # 'Î’' - 43: 0, # 'Γ' - 41: 0, # 'Δ' - 34: 2, # 'Ε' - 40: 2, # 'Η' - 52: 0, # 'Θ' - 47: 2, # 'Ι' - 44: 0, # 'Κ' - 53: 2, # 'Λ' - 38: 0, # 'Îœ' - 49: 0, # 'Î' - 59: 0, # 'Ξ' - 39: 2, # 'Ο' - 35: 0, # 'Π' - 48: 0, # 'Ρ' - 37: 2, # 'Σ' - 33: 0, # 'Τ' - 45: 2, # 'Î¥' - 56: 0, # 'Φ' - 50: 0, # 'Χ' - 57: 2, # 'Ω' - 17: 2, # 'ά' - 18: 2, # 'έ' - 22: 0, # 'ή' - 15: 2, # 'ί' - 1: 2, # 'α' - 29: 0, # 'β' - 20: 0, # 'γ' - 21: 0, # 'δ' - 3: 2, # 'ε' - 32: 0, # 'ζ' - 13: 0, # 'η' - 25: 0, # 'θ' - 5: 1, # 'ι' - 11: 0, # 'κ' - 16: 0, # 'λ' - 10: 0, # 'μ' - 6: 0, # 'ν' - 30: 0, # 'ξ' - 4: 2, # 'ο' - 9: 0, # 'Ï€' - 8: 0, # 'Ï' - 14: 0, # 'Ï‚' - 7: 0, # 'σ' - 2: 0, # 'Ï„' - 12: 2, # 'Ï…' - 28: 0, # 'φ' - 23: 0, # 'χ' - 42: 0, # 'ψ' - 24: 0, # 'ω' - 19: 2, # 'ÏŒ' - 26: 2, # 'Ï' - 27: 0, # 'ÏŽ' - }, - 38: { # 'Îœ' - 60: 0, # 'e' - 55: 0, # 'o' - 58: 0, # 't' - 36: 0, # '·' - 61: 0, # 'Ά' - 46: 0, # 'Έ' - 54: 0, # 'ÎŒ' - 31: 2, # 'Α' - 51: 2, # 'Î’' - 43: 0, # 'Γ' - 41: 0, # 'Δ' - 34: 2, # 'Ε' - 40: 2, # 'Η' - 52: 0, # 'Θ' - 47: 2, # 'Ι' - 44: 0, # 'Κ' - 53: 0, # 'Λ' - 38: 2, # 'Îœ' - 49: 0, # 'Î' - 59: 0, # 'Ξ' - 39: 2, # 'Ο' - 35: 2, # 'Π' - 48: 0, # 'Ρ' - 37: 0, # 'Σ' - 33: 0, # 'Τ' - 45: 0, # 'Î¥' - 56: 0, # 'Φ' - 50: 0, # 'Χ' - 57: 0, # 'Ω' - 17: 2, # 'ά' - 18: 2, # 'έ' - 22: 2, # 'ή' - 15: 2, # 'ί' - 1: 2, # 'α' - 29: 0, # 'β' - 20: 0, # 'γ' - 21: 0, # 'δ' - 3: 3, # 'ε' - 32: 0, # 'ζ' - 13: 2, # 'η' - 25: 0, # 'θ' - 5: 3, # 'ι' - 11: 0, # 'κ' - 16: 0, # 'λ' - 10: 0, # 'μ' - 6: 0, # 'ν' - 30: 0, # 'ξ' - 4: 2, # 'ο' - 9: 3, # 'Ï€' - 8: 0, # 'Ï' - 14: 0, # 'Ï‚' - 7: 0, # 'σ' - 2: 0, # 'Ï„' - 12: 2, # 'Ï…' - 28: 0, # 'φ' - 23: 0, # 'χ' - 42: 0, # 'ψ' - 24: 0, # 'ω' - 19: 2, # 'ÏŒ' - 26: 0, # 'Ï' - 27: 0, # 'ÏŽ' - }, - 49: { # 'Î' - 60: 2, # 'e' - 55: 0, # 'o' - 58: 0, # 't' - 36: 0, # '·' - 61: 0, # 'Ά' - 46: 0, # 'Έ' - 54: 0, # 'ÎŒ' - 31: 2, # 'Α' - 51: 0, # 'Î’' - 43: 0, # 'Γ' - 41: 0, # 'Δ' - 34: 2, # 'Ε' - 40: 2, # 'Η' - 52: 0, # 'Θ' - 47: 2, # 'Ι' - 44: 0, # 'Κ' - 53: 0, # 'Λ' - 38: 0, # 'Îœ' - 49: 0, # 'Î' - 59: 0, # 'Ξ' - 39: 2, # 'Ο' - 35: 0, # 'Π' - 48: 0, # 'Ρ' - 37: 0, # 'Σ' - 33: 2, # 'Τ' - 45: 0, # 'Î¥' - 56: 0, # 'Φ' - 50: 0, # 'Χ' - 57: 2, # 'Ω' - 17: 0, # 'ά' - 18: 2, # 'έ' - 22: 0, # 'ή' - 15: 2, # 'ί' - 1: 2, # 'α' - 29: 0, # 'β' - 20: 0, # 'γ' - 21: 0, # 'δ' - 3: 1, # 'ε' - 32: 0, # 'ζ' - 13: 0, # 'η' - 25: 0, # 'θ' - 5: 0, # 'ι' - 11: 0, # 'κ' - 16: 0, # 'λ' - 10: 0, # 'μ' - 6: 0, # 'ν' - 30: 0, # 'ξ' - 4: 2, # 'ο' - 9: 0, # 'Ï€' - 8: 0, # 'Ï' - 14: 0, # 'Ï‚' - 7: 0, # 'σ' - 2: 0, # 'Ï„' - 12: 0, # 'Ï…' - 28: 0, # 'φ' - 23: 0, # 'χ' - 42: 0, # 'ψ' - 24: 1, # 'ω' - 19: 2, # 'ÏŒ' - 26: 0, # 'Ï' - 27: 0, # 'ÏŽ' - }, - 59: { # 'Ξ' - 60: 0, # 'e' - 55: 0, # 'o' - 58: 0, # 't' - 36: 0, # '·' - 61: 0, # 'Ά' - 46: 0, # 'Έ' - 54: 0, # 'ÎŒ' - 31: 0, # 'Α' - 51: 0, # 'Î’' - 43: 0, # 'Γ' - 41: 0, # 'Δ' - 34: 1, # 'Ε' - 40: 1, # 'Η' - 52: 0, # 'Θ' - 47: 0, # 'Ι' - 44: 0, # 'Κ' - 53: 0, # 'Λ' - 38: 0, # 'Îœ' - 49: 0, # 'Î' - 59: 0, # 'Ξ' - 39: 1, # 'Ο' - 35: 0, # 'Π' - 48: 0, # 'Ρ' - 37: 0, # 'Σ' - 33: 0, # 'Τ' - 45: 0, # 'Î¥' - 56: 0, # 'Φ' - 50: 0, # 'Χ' - 57: 0, # 'Ω' - 17: 0, # 'ά' - 18: 2, # 'έ' - 22: 0, # 'ή' - 15: 0, # 'ί' - 1: 2, # 'α' - 29: 0, # 'β' - 20: 0, # 'γ' - 21: 0, # 'δ' - 3: 2, # 'ε' - 32: 0, # 'ζ' - 13: 0, # 'η' - 25: 0, # 'θ' - 5: 0, # 'ι' - 11: 0, # 'κ' - 16: 0, # 'λ' - 10: 0, # 'μ' - 6: 0, # 'ν' - 30: 0, # 'ξ' - 4: 0, # 'ο' - 9: 0, # 'Ï€' - 8: 0, # 'Ï' - 14: 0, # 'Ï‚' - 7: 0, # 'σ' - 2: 0, # 'Ï„' - 12: 0, # 'Ï…' - 28: 0, # 'φ' - 23: 0, # 'χ' - 42: 0, # 'ψ' - 24: 0, # 'ω' - 19: 0, # 'ÏŒ' - 26: 0, # 'Ï' - 27: 0, # 'ÏŽ' - }, - 39: { # 'Ο' - 60: 0, # 'e' - 55: 0, # 'o' - 58: 0, # 't' - 36: 0, # '·' - 61: 0, # 'Ά' - 46: 0, # 'Έ' - 54: 0, # 'ÎŒ' - 31: 0, # 'Α' - 51: 1, # 'Î’' - 43: 2, # 'Γ' - 41: 2, # 'Δ' - 34: 2, # 'Ε' - 40: 1, # 'Η' - 52: 2, # 'Θ' - 47: 2, # 'Ι' - 44: 2, # 'Κ' - 53: 2, # 'Λ' - 38: 2, # 'Îœ' - 49: 2, # 'Î' - 59: 0, # 'Ξ' - 39: 0, # 'Ο' - 35: 2, # 'Π' - 48: 2, # 'Ρ' - 37: 2, # 'Σ' - 33: 2, # 'Τ' - 45: 2, # 'Î¥' - 56: 2, # 'Φ' - 50: 2, # 'Χ' - 57: 0, # 'Ω' - 17: 0, # 'ά' - 18: 0, # 'έ' - 22: 0, # 'ή' - 15: 0, # 'ί' - 1: 0, # 'α' - 29: 0, # 'β' - 20: 0, # 'γ' - 21: 2, # 'δ' - 3: 0, # 'ε' - 32: 0, # 'ζ' - 13: 0, # 'η' - 25: 0, # 'θ' - 5: 3, # 'ι' - 11: 2, # 'κ' - 16: 2, # 'λ' - 10: 2, # 'μ' - 6: 2, # 'ν' - 30: 0, # 'ξ' - 4: 0, # 'ο' - 9: 2, # 'Ï€' - 8: 2, # 'Ï' - 14: 0, # 'Ï‚' - 7: 0, # 'σ' - 2: 2, # 'Ï„' - 12: 2, # 'Ï…' - 28: 1, # 'φ' - 23: 1, # 'χ' - 42: 0, # 'ψ' - 24: 0, # 'ω' - 19: 0, # 'ÏŒ' - 26: 2, # 'Ï' - 27: 0, # 'ÏŽ' - }, - 35: { # 'Π' - 60: 0, # 'e' - 55: 0, # 'o' - 58: 0, # 't' - 36: 0, # '·' - 61: 0, # 'Ά' - 46: 0, # 'Έ' - 54: 0, # 'ÎŒ' - 31: 2, # 'Α' - 51: 0, # 'Î’' - 43: 0, # 'Γ' - 41: 0, # 'Δ' - 34: 2, # 'Ε' - 40: 0, # 'Η' - 52: 0, # 'Θ' - 47: 2, # 'Ι' - 44: 0, # 'Κ' - 53: 2, # 'Λ' - 38: 1, # 'Îœ' - 49: 0, # 'Î' - 59: 0, # 'Ξ' - 39: 2, # 'Ο' - 35: 0, # 'Π' - 48: 2, # 'Ρ' - 37: 0, # 'Σ' - 33: 1, # 'Τ' - 45: 0, # 'Î¥' - 56: 0, # 'Φ' - 50: 1, # 'Χ' - 57: 2, # 'Ω' - 17: 2, # 'ά' - 18: 1, # 'έ' - 22: 1, # 'ή' - 15: 2, # 'ί' - 1: 3, # 'α' - 29: 0, # 'β' - 20: 0, # 'γ' - 21: 0, # 'δ' - 3: 3, # 'ε' - 32: 0, # 'ζ' - 13: 2, # 'η' - 25: 0, # 'θ' - 5: 2, # 'ι' - 11: 0, # 'κ' - 16: 2, # 'λ' - 10: 0, # 'μ' - 6: 2, # 'ν' - 30: 0, # 'ξ' - 4: 3, # 'ο' - 9: 0, # 'Ï€' - 8: 3, # 'Ï' - 14: 0, # 'Ï‚' - 7: 0, # 'σ' - 2: 0, # 'Ï„' - 12: 2, # 'Ï…' - 28: 0, # 'φ' - 23: 2, # 'χ' - 42: 0, # 'ψ' - 24: 2, # 'ω' - 19: 2, # 'ÏŒ' - 26: 0, # 'Ï' - 27: 3, # 'ÏŽ' - }, - 48: { # 'Ρ' - 60: 0, # 'e' - 55: 0, # 'o' - 58: 0, # 't' - 36: 0, # '·' - 61: 0, # 'Ά' - 46: 0, # 'Έ' - 54: 0, # 'ÎŒ' - 31: 2, # 'Α' - 51: 0, # 'Î’' - 43: 1, # 'Γ' - 41: 1, # 'Δ' - 34: 2, # 'Ε' - 40: 2, # 'Η' - 52: 0, # 'Θ' - 47: 2, # 'Ι' - 44: 0, # 'Κ' - 53: 0, # 'Λ' - 38: 0, # 'Îœ' - 49: 2, # 'Î' - 59: 0, # 'Ξ' - 39: 2, # 'Ο' - 35: 0, # 'Π' - 48: 2, # 'Ρ' - 37: 0, # 'Σ' - 33: 1, # 'Τ' - 45: 1, # 'Î¥' - 56: 0, # 'Φ' - 50: 1, # 'Χ' - 57: 1, # 'Ω' - 17: 0, # 'ά' - 18: 0, # 'έ' - 22: 0, # 'ή' - 15: 2, # 'ί' - 1: 0, # 'α' - 29: 0, # 'β' - 20: 0, # 'γ' - 21: 0, # 'δ' - 3: 0, # 'ε' - 32: 0, # 'ζ' - 13: 0, # 'η' - 25: 0, # 'θ' - 5: 0, # 'ι' - 11: 0, # 'κ' - 16: 0, # 'λ' - 10: 0, # 'μ' - 6: 0, # 'ν' - 30: 0, # 'ξ' - 4: 1, # 'ο' - 9: 0, # 'Ï€' - 8: 0, # 'Ï' - 14: 0, # 'Ï‚' - 7: 0, # 'σ' - 2: 0, # 'Ï„' - 12: 3, # 'Ï…' - 28: 0, # 'φ' - 23: 0, # 'χ' - 42: 0, # 'ψ' - 24: 2, # 'ω' - 19: 0, # 'ÏŒ' - 26: 2, # 'Ï' - 27: 0, # 'ÏŽ' - }, - 37: { # 'Σ' - 60: 0, # 'e' - 55: 0, # 'o' - 58: 0, # 't' - 36: 0, # '·' - 61: 0, # 'Ά' - 46: 0, # 'Έ' - 54: 0, # 'ÎŒ' - 31: 2, # 'Α' - 51: 0, # 'Î’' - 43: 0, # 'Γ' - 41: 1, # 'Δ' - 34: 2, # 'Ε' - 40: 2, # 'Η' - 52: 0, # 'Θ' - 47: 2, # 'Ι' - 44: 2, # 'Κ' - 53: 0, # 'Λ' - 38: 2, # 'Îœ' - 49: 0, # 'Î' - 59: 0, # 'Ξ' - 39: 2, # 'Ο' - 35: 0, # 'Π' - 48: 0, # 'Ρ' - 37: 2, # 'Σ' - 33: 2, # 'Τ' - 45: 2, # 'Î¥' - 56: 0, # 'Φ' - 50: 2, # 'Χ' - 57: 2, # 'Ω' - 17: 0, # 'ά' - 18: 0, # 'έ' - 22: 2, # 'ή' - 15: 2, # 'ί' - 1: 2, # 'α' - 29: 2, # 'β' - 20: 0, # 'γ' - 21: 0, # 'δ' - 3: 3, # 'ε' - 32: 0, # 'ζ' - 13: 3, # 'η' - 25: 0, # 'θ' - 5: 2, # 'ι' - 11: 2, # 'κ' - 16: 0, # 'λ' - 10: 0, # 'μ' - 6: 0, # 'ν' - 30: 0, # 'ξ' - 4: 2, # 'ο' - 9: 2, # 'Ï€' - 8: 0, # 'Ï' - 14: 0, # 'Ï‚' - 7: 0, # 'σ' - 2: 3, # 'Ï„' - 12: 3, # 'Ï…' - 28: 0, # 'φ' - 23: 2, # 'χ' - 42: 0, # 'ψ' - 24: 2, # 'ω' - 19: 0, # 'ÏŒ' - 26: 2, # 'Ï' - 27: 2, # 'ÏŽ' - }, - 33: { # 'Τ' - 60: 0, # 'e' - 55: 1, # 'o' - 58: 0, # 't' - 36: 0, # '·' - 61: 0, # 'Ά' - 46: 0, # 'Έ' - 54: 0, # 'ÎŒ' - 31: 2, # 'Α' - 51: 0, # 'Î’' - 43: 0, # 'Γ' - 41: 0, # 'Δ' - 34: 2, # 'Ε' - 40: 2, # 'Η' - 52: 0, # 'Θ' - 47: 2, # 'Ι' - 44: 2, # 'Κ' - 53: 0, # 'Λ' - 38: 0, # 'Îœ' - 49: 0, # 'Î' - 59: 0, # 'Ξ' - 39: 2, # 'Ο' - 35: 0, # 'Π' - 48: 2, # 'Ρ' - 37: 0, # 'Σ' - 33: 1, # 'Τ' - 45: 1, # 'Î¥' - 56: 0, # 'Φ' - 50: 0, # 'Χ' - 57: 2, # 'Ω' - 17: 2, # 'ά' - 18: 2, # 'έ' - 22: 0, # 'ή' - 15: 2, # 'ί' - 1: 3, # 'α' - 29: 0, # 'β' - 20: 0, # 'γ' - 21: 0, # 'δ' - 3: 2, # 'ε' - 32: 0, # 'ζ' - 13: 2, # 'η' - 25: 0, # 'θ' - 5: 2, # 'ι' - 11: 0, # 'κ' - 16: 0, # 'λ' - 10: 2, # 'μ' - 6: 0, # 'ν' - 30: 0, # 'ξ' - 4: 3, # 'ο' - 9: 0, # 'Ï€' - 8: 2, # 'Ï' - 14: 0, # 'Ï‚' - 7: 2, # 'σ' - 2: 0, # 'Ï„' - 12: 2, # 'Ï…' - 28: 0, # 'φ' - 23: 0, # 'χ' - 42: 0, # 'ψ' - 24: 0, # 'ω' - 19: 2, # 'ÏŒ' - 26: 2, # 'Ï' - 27: 3, # 'ÏŽ' - }, - 45: { # 'Î¥' - 60: 0, # 'e' - 55: 0, # 'o' - 58: 0, # 't' - 36: 0, # '·' - 61: 0, # 'Ά' - 46: 0, # 'Έ' - 54: 0, # 'ÎŒ' - 31: 0, # 'Α' - 51: 0, # 'Î’' - 43: 2, # 'Γ' - 41: 0, # 'Δ' - 34: 1, # 'Ε' - 40: 2, # 'Η' - 52: 2, # 'Θ' - 47: 0, # 'Ι' - 44: 0, # 'Κ' - 53: 1, # 'Λ' - 38: 2, # 'Îœ' - 49: 2, # 'Î' - 59: 0, # 'Ξ' - 39: 0, # 'Ο' - 35: 2, # 'Π' - 48: 1, # 'Ρ' - 37: 2, # 'Σ' - 33: 2, # 'Τ' - 45: 0, # 'Î¥' - 56: 0, # 'Φ' - 50: 1, # 'Χ' - 57: 0, # 'Ω' - 17: 0, # 'ά' - 18: 0, # 'έ' - 22: 0, # 'ή' - 15: 0, # 'ί' - 1: 0, # 'α' - 29: 0, # 'β' - 20: 0, # 'γ' - 21: 0, # 'δ' - 3: 0, # 'ε' - 32: 0, # 'ζ' - 13: 0, # 'η' - 25: 0, # 'θ' - 5: 0, # 'ι' - 11: 0, # 'κ' - 16: 2, # 'λ' - 10: 0, # 'μ' - 6: 0, # 'ν' - 30: 0, # 'ξ' - 4: 0, # 'ο' - 9: 3, # 'Ï€' - 8: 0, # 'Ï' - 14: 0, # 'Ï‚' - 7: 0, # 'σ' - 2: 0, # 'Ï„' - 12: 0, # 'Ï…' - 28: 0, # 'φ' - 23: 0, # 'χ' - 42: 0, # 'ψ' - 24: 0, # 'ω' - 19: 0, # 'ÏŒ' - 26: 0, # 'Ï' - 27: 0, # 'ÏŽ' - }, - 56: { # 'Φ' - 60: 0, # 'e' - 55: 0, # 'o' - 58: 0, # 't' - 36: 0, # '·' - 61: 0, # 'Ά' - 46: 0, # 'Έ' - 54: 0, # 'ÎŒ' - 31: 1, # 'Α' - 51: 0, # 'Î’' - 43: 0, # 'Γ' - 41: 0, # 'Δ' - 34: 0, # 'Ε' - 40: 1, # 'Η' - 52: 0, # 'Θ' - 47: 2, # 'Ι' - 44: 0, # 'Κ' - 53: 0, # 'Λ' - 38: 0, # 'Îœ' - 49: 0, # 'Î' - 59: 0, # 'Ξ' - 39: 2, # 'Ο' - 35: 0, # 'Π' - 48: 0, # 'Ρ' - 37: 0, # 'Σ' - 33: 0, # 'Τ' - 45: 0, # 'Î¥' - 56: 0, # 'Φ' - 50: 0, # 'Χ' - 57: 0, # 'Ω' - 17: 0, # 'ά' - 18: 0, # 'έ' - 22: 0, # 'ή' - 15: 0, # 'ί' - 1: 2, # 'α' - 29: 0, # 'β' - 20: 0, # 'γ' - 21: 0, # 'δ' - 3: 2, # 'ε' - 32: 0, # 'ζ' - 13: 0, # 'η' - 25: 0, # 'θ' - 5: 2, # 'ι' - 11: 0, # 'κ' - 16: 0, # 'λ' - 10: 0, # 'μ' - 6: 0, # 'ν' - 30: 0, # 'ξ' - 4: 2, # 'ο' - 9: 0, # 'Ï€' - 8: 0, # 'Ï' - 14: 0, # 'Ï‚' - 7: 0, # 'σ' - 2: 2, # 'Ï„' - 12: 2, # 'Ï…' - 28: 0, # 'φ' - 23: 0, # 'χ' - 42: 0, # 'ψ' - 24: 0, # 'ω' - 19: 0, # 'ÏŒ' - 26: 1, # 'Ï' - 27: 1, # 'ÏŽ' - }, - 50: { # 'Χ' - 60: 0, # 'e' - 55: 0, # 'o' - 58: 0, # 't' - 36: 0, # '·' - 61: 0, # 'Ά' - 46: 0, # 'Έ' - 54: 0, # 'ÎŒ' - 31: 1, # 'Α' - 51: 0, # 'Î’' - 43: 0, # 'Γ' - 41: 0, # 'Δ' - 34: 2, # 'Ε' - 40: 2, # 'Η' - 52: 0, # 'Θ' - 47: 2, # 'Ι' - 44: 0, # 'Κ' - 53: 0, # 'Λ' - 38: 0, # 'Îœ' - 49: 1, # 'Î' - 59: 0, # 'Ξ' - 39: 1, # 'Ο' - 35: 0, # 'Π' - 48: 2, # 'Ρ' - 37: 0, # 'Σ' - 33: 0, # 'Τ' - 45: 0, # 'Î¥' - 56: 0, # 'Φ' - 50: 1, # 'Χ' - 57: 1, # 'Ω' - 17: 2, # 'ά' - 18: 0, # 'έ' - 22: 0, # 'ή' - 15: 0, # 'ί' - 1: 2, # 'α' - 29: 0, # 'β' - 20: 0, # 'γ' - 21: 0, # 'δ' - 3: 2, # 'ε' - 32: 0, # 'ζ' - 13: 0, # 'η' - 25: 0, # 'θ' - 5: 0, # 'ι' - 11: 0, # 'κ' - 16: 0, # 'λ' - 10: 0, # 'μ' - 6: 0, # 'ν' - 30: 0, # 'ξ' - 4: 2, # 'ο' - 9: 0, # 'Ï€' - 8: 3, # 'Ï' - 14: 0, # 'Ï‚' - 7: 0, # 'σ' - 2: 2, # 'Ï„' - 12: 0, # 'Ï…' - 28: 0, # 'φ' - 23: 0, # 'χ' - 42: 0, # 'ψ' - 24: 2, # 'ω' - 19: 0, # 'ÏŒ' - 26: 0, # 'Ï' - 27: 0, # 'ÏŽ' - }, - 57: { # 'Ω' - 60: 0, # 'e' - 55: 0, # 'o' - 58: 0, # 't' - 36: 0, # '·' - 61: 0, # 'Ά' - 46: 0, # 'Έ' - 54: 0, # 'ÎŒ' - 31: 0, # 'Α' - 51: 0, # 'Î’' - 43: 1, # 'Γ' - 41: 0, # 'Δ' - 34: 0, # 'Ε' - 40: 0, # 'Η' - 52: 0, # 'Θ' - 47: 0, # 'Ι' - 44: 0, # 'Κ' - 53: 1, # 'Λ' - 38: 0, # 'Îœ' - 49: 2, # 'Î' - 59: 0, # 'Ξ' - 39: 0, # 'Ο' - 35: 0, # 'Π' - 48: 2, # 'Ρ' - 37: 2, # 'Σ' - 33: 2, # 'Τ' - 45: 0, # 'Î¥' - 56: 0, # 'Φ' - 50: 0, # 'Χ' - 57: 0, # 'Ω' - 17: 0, # 'ά' - 18: 0, # 'έ' - 22: 0, # 'ή' - 15: 0, # 'ί' - 1: 0, # 'α' - 29: 0, # 'β' - 20: 0, # 'γ' - 21: 0, # 'δ' - 3: 0, # 'ε' - 32: 0, # 'ζ' - 13: 0, # 'η' - 25: 0, # 'θ' - 5: 0, # 'ι' - 11: 0, # 'κ' - 16: 0, # 'λ' - 10: 0, # 'μ' - 6: 0, # 'ν' - 30: 0, # 'ξ' - 4: 0, # 'ο' - 9: 0, # 'Ï€' - 8: 2, # 'Ï' - 14: 2, # 'Ï‚' - 7: 2, # 'σ' - 2: 0, # 'Ï„' - 12: 0, # 'Ï…' - 28: 0, # 'φ' - 23: 1, # 'χ' - 42: 0, # 'ψ' - 24: 0, # 'ω' - 19: 0, # 'ÏŒ' - 26: 0, # 'Ï' - 27: 0, # 'ÏŽ' - }, - 17: { # 'ά' - 60: 0, # 'e' - 55: 0, # 'o' - 58: 0, # 't' - 36: 2, # '·' - 61: 0, # 'Ά' - 46: 0, # 'Έ' - 54: 0, # 'ÎŒ' - 31: 0, # 'Α' - 51: 0, # 'Î’' - 43: 0, # 'Γ' - 41: 0, # 'Δ' - 34: 0, # 'Ε' - 40: 0, # 'Η' - 52: 0, # 'Θ' - 47: 0, # 'Ι' - 44: 0, # 'Κ' - 53: 0, # 'Λ' - 38: 0, # 'Îœ' - 49: 0, # 'Î' - 59: 0, # 'Ξ' - 39: 0, # 'Ο' - 35: 0, # 'Π' - 48: 0, # 'Ρ' - 37: 0, # 'Σ' - 33: 0, # 'Τ' - 45: 0, # 'Î¥' - 56: 0, # 'Φ' - 50: 0, # 'Χ' - 57: 0, # 'Ω' - 17: 0, # 'ά' - 18: 0, # 'έ' - 22: 0, # 'ή' - 15: 0, # 'ί' - 1: 0, # 'α' - 29: 3, # 'β' - 20: 3, # 'γ' - 21: 3, # 'δ' - 3: 3, # 'ε' - 32: 3, # 'ζ' - 13: 0, # 'η' - 25: 3, # 'θ' - 5: 2, # 'ι' - 11: 3, # 'κ' - 16: 3, # 'λ' - 10: 3, # 'μ' - 6: 3, # 'ν' - 30: 3, # 'ξ' - 4: 0, # 'ο' - 9: 3, # 'Ï€' - 8: 3, # 'Ï' - 14: 3, # 'Ï‚' - 7: 3, # 'σ' - 2: 3, # 'Ï„' - 12: 0, # 'Ï…' - 28: 3, # 'φ' - 23: 3, # 'χ' - 42: 3, # 'ψ' - 24: 2, # 'ω' - 19: 0, # 'ÏŒ' - 26: 0, # 'Ï' - 27: 0, # 'ÏŽ' - }, - 18: { # 'έ' - 60: 0, # 'e' - 55: 0, # 'o' - 58: 0, # 't' - 36: 0, # '·' - 61: 0, # 'Ά' - 46: 0, # 'Έ' - 54: 0, # 'ÎŒ' - 31: 0, # 'Α' - 51: 0, # 'Î’' - 43: 0, # 'Γ' - 41: 0, # 'Δ' - 34: 0, # 'Ε' - 40: 0, # 'Η' - 52: 0, # 'Θ' - 47: 0, # 'Ι' - 44: 0, # 'Κ' - 53: 0, # 'Λ' - 38: 0, # 'Îœ' - 49: 0, # 'Î' - 59: 0, # 'Ξ' - 39: 0, # 'Ο' - 35: 0, # 'Π' - 48: 0, # 'Ρ' - 37: 0, # 'Σ' - 33: 0, # 'Τ' - 45: 0, # 'Î¥' - 56: 0, # 'Φ' - 50: 0, # 'Χ' - 57: 0, # 'Ω' - 17: 0, # 'ά' - 18: 0, # 'έ' - 22: 0, # 'ή' - 15: 0, # 'ί' - 1: 3, # 'α' - 29: 2, # 'β' - 20: 3, # 'γ' - 21: 2, # 'δ' - 3: 3, # 'ε' - 32: 2, # 'ζ' - 13: 0, # 'η' - 25: 3, # 'θ' - 5: 0, # 'ι' - 11: 3, # 'κ' - 16: 3, # 'λ' - 10: 3, # 'μ' - 6: 3, # 'ν' - 30: 3, # 'ξ' - 4: 3, # 'ο' - 9: 3, # 'Ï€' - 8: 3, # 'Ï' - 14: 3, # 'Ï‚' - 7: 3, # 'σ' - 2: 3, # 'Ï„' - 12: 0, # 'Ï…' - 28: 3, # 'φ' - 23: 3, # 'χ' - 42: 3, # 'ψ' - 24: 2, # 'ω' - 19: 0, # 'ÏŒ' - 26: 0, # 'Ï' - 27: 0, # 'ÏŽ' - }, - 22: { # 'ή' - 60: 0, # 'e' - 55: 0, # 'o' - 58: 0, # 't' - 36: 1, # '·' - 61: 0, # 'Ά' - 46: 0, # 'Έ' - 54: 0, # 'ÎŒ' - 31: 0, # 'Α' - 51: 0, # 'Î’' - 43: 0, # 'Γ' - 41: 0, # 'Δ' - 34: 0, # 'Ε' - 40: 0, # 'Η' - 52: 0, # 'Θ' - 47: 0, # 'Ι' - 44: 0, # 'Κ' - 53: 0, # 'Λ' - 38: 0, # 'Îœ' - 49: 0, # 'Î' - 59: 0, # 'Ξ' - 39: 0, # 'Ο' - 35: 0, # 'Π' - 48: 0, # 'Ρ' - 37: 0, # 'Σ' - 33: 0, # 'Τ' - 45: 0, # 'Î¥' - 56: 0, # 'Φ' - 50: 0, # 'Χ' - 57: 0, # 'Ω' - 17: 0, # 'ά' - 18: 0, # 'έ' - 22: 0, # 'ή' - 15: 0, # 'ί' - 1: 0, # 'α' - 29: 0, # 'β' - 20: 3, # 'γ' - 21: 3, # 'δ' - 3: 0, # 'ε' - 32: 0, # 'ζ' - 13: 0, # 'η' - 25: 3, # 'θ' - 5: 0, # 'ι' - 11: 3, # 'κ' - 16: 2, # 'λ' - 10: 3, # 'μ' - 6: 3, # 'ν' - 30: 2, # 'ξ' - 4: 0, # 'ο' - 9: 3, # 'Ï€' - 8: 3, # 'Ï' - 14: 3, # 'Ï‚' - 7: 3, # 'σ' - 2: 3, # 'Ï„' - 12: 0, # 'Ï…' - 28: 2, # 'φ' - 23: 3, # 'χ' - 42: 2, # 'ψ' - 24: 0, # 'ω' - 19: 0, # 'ÏŒ' - 26: 0, # 'Ï' - 27: 0, # 'ÏŽ' - }, - 15: { # 'ί' - 60: 0, # 'e' - 55: 0, # 'o' - 58: 0, # 't' - 36: 0, # '·' - 61: 0, # 'Ά' - 46: 0, # 'Έ' - 54: 0, # 'ÎŒ' - 31: 0, # 'Α' - 51: 0, # 'Î’' - 43: 0, # 'Γ' - 41: 0, # 'Δ' - 34: 0, # 'Ε' - 40: 0, # 'Η' - 52: 0, # 'Θ' - 47: 0, # 'Ι' - 44: 0, # 'Κ' - 53: 0, # 'Λ' - 38: 0, # 'Îœ' - 49: 0, # 'Î' - 59: 0, # 'Ξ' - 39: 0, # 'Ο' - 35: 0, # 'Π' - 48: 0, # 'Ρ' - 37: 0, # 'Σ' - 33: 0, # 'Τ' - 45: 0, # 'Î¥' - 56: 0, # 'Φ' - 50: 0, # 'Χ' - 57: 0, # 'Ω' - 17: 0, # 'ά' - 18: 0, # 'έ' - 22: 0, # 'ή' - 15: 0, # 'ί' - 1: 3, # 'α' - 29: 2, # 'β' - 20: 3, # 'γ' - 21: 3, # 'δ' - 3: 3, # 'ε' - 32: 3, # 'ζ' - 13: 3, # 'η' - 25: 3, # 'θ' - 5: 0, # 'ι' - 11: 3, # 'κ' - 16: 3, # 'λ' - 10: 3, # 'μ' - 6: 3, # 'ν' - 30: 3, # 'ξ' - 4: 3, # 'ο' - 9: 3, # 'Ï€' - 8: 3, # 'Ï' - 14: 3, # 'Ï‚' - 7: 3, # 'σ' - 2: 3, # 'Ï„' - 12: 0, # 'Ï…' - 28: 1, # 'φ' - 23: 3, # 'χ' - 42: 2, # 'ψ' - 24: 3, # 'ω' - 19: 0, # 'ÏŒ' - 26: 0, # 'Ï' - 27: 0, # 'ÏŽ' - }, - 1: { # 'α' - 60: 0, # 'e' - 55: 0, # 'o' - 58: 0, # 't' - 36: 2, # '·' - 61: 0, # 'Ά' - 46: 0, # 'Έ' - 54: 0, # 'ÎŒ' - 31: 0, # 'Α' - 51: 0, # 'Î’' - 43: 0, # 'Γ' - 41: 0, # 'Δ' - 34: 0, # 'Ε' - 40: 0, # 'Η' - 52: 0, # 'Θ' - 47: 0, # 'Ι' - 44: 0, # 'Κ' - 53: 0, # 'Λ' - 38: 0, # 'Îœ' - 49: 0, # 'Î' - 59: 0, # 'Ξ' - 39: 0, # 'Ο' - 35: 0, # 'Π' - 48: 0, # 'Ρ' - 37: 0, # 'Σ' - 33: 0, # 'Τ' - 45: 0, # 'Î¥' - 56: 0, # 'Φ' - 50: 0, # 'Χ' - 57: 0, # 'Ω' - 17: 0, # 'ά' - 18: 2, # 'έ' - 22: 0, # 'ή' - 15: 3, # 'ί' - 1: 0, # 'α' - 29: 3, # 'β' - 20: 3, # 'γ' - 21: 3, # 'δ' - 3: 2, # 'ε' - 32: 3, # 'ζ' - 13: 1, # 'η' - 25: 3, # 'θ' - 5: 3, # 'ι' - 11: 3, # 'κ' - 16: 3, # 'λ' - 10: 3, # 'μ' - 6: 3, # 'ν' - 30: 3, # 'ξ' - 4: 2, # 'ο' - 9: 3, # 'Ï€' - 8: 3, # 'Ï' - 14: 3, # 'Ï‚' - 7: 3, # 'σ' - 2: 3, # 'Ï„' - 12: 3, # 'Ï…' - 28: 3, # 'φ' - 23: 3, # 'χ' - 42: 2, # 'ψ' - 24: 0, # 'ω' - 19: 2, # 'ÏŒ' - 26: 2, # 'Ï' - 27: 0, # 'ÏŽ' - }, - 29: { # 'β' - 60: 0, # 'e' - 55: 0, # 'o' - 58: 0, # 't' - 36: 0, # '·' - 61: 0, # 'Ά' - 46: 0, # 'Έ' - 54: 0, # 'ÎŒ' - 31: 0, # 'Α' - 51: 0, # 'Î’' - 43: 0, # 'Γ' - 41: 0, # 'Δ' - 34: 0, # 'Ε' - 40: 0, # 'Η' - 52: 0, # 'Θ' - 47: 0, # 'Ι' - 44: 0, # 'Κ' - 53: 0, # 'Λ' - 38: 0, # 'Îœ' - 49: 0, # 'Î' - 59: 0, # 'Ξ' - 39: 0, # 'Ο' - 35: 0, # 'Π' - 48: 0, # 'Ρ' - 37: 0, # 'Σ' - 33: 0, # 'Τ' - 45: 0, # 'Î¥' - 56: 0, # 'Φ' - 50: 0, # 'Χ' - 57: 0, # 'Ω' - 17: 3, # 'ά' - 18: 2, # 'έ' - 22: 3, # 'ή' - 15: 2, # 'ί' - 1: 3, # 'α' - 29: 0, # 'β' - 20: 2, # 'γ' - 21: 2, # 'δ' - 3: 3, # 'ε' - 32: 0, # 'ζ' - 13: 2, # 'η' - 25: 0, # 'θ' - 5: 3, # 'ι' - 11: 0, # 'κ' - 16: 3, # 'λ' - 10: 0, # 'μ' - 6: 0, # 'ν' - 30: 0, # 'ξ' - 4: 3, # 'ο' - 9: 0, # 'Ï€' - 8: 3, # 'Ï' - 14: 0, # 'Ï‚' - 7: 0, # 'σ' - 2: 0, # 'Ï„' - 12: 0, # 'Ï…' - 28: 0, # 'φ' - 23: 0, # 'χ' - 42: 0, # 'ψ' - 24: 2, # 'ω' - 19: 2, # 'ÏŒ' - 26: 2, # 'Ï' - 27: 2, # 'ÏŽ' - }, - 20: { # 'γ' - 60: 0, # 'e' - 55: 0, # 'o' - 58: 0, # 't' - 36: 0, # '·' - 61: 0, # 'Ά' - 46: 0, # 'Έ' - 54: 0, # 'ÎŒ' - 31: 0, # 'Α' - 51: 0, # 'Î’' - 43: 0, # 'Γ' - 41: 0, # 'Δ' - 34: 0, # 'Ε' - 40: 0, # 'Η' - 52: 0, # 'Θ' - 47: 0, # 'Ι' - 44: 0, # 'Κ' - 53: 0, # 'Λ' - 38: 0, # 'Îœ' - 49: 0, # 'Î' - 59: 0, # 'Ξ' - 39: 0, # 'Ο' - 35: 0, # 'Π' - 48: 0, # 'Ρ' - 37: 0, # 'Σ' - 33: 0, # 'Τ' - 45: 0, # 'Î¥' - 56: 0, # 'Φ' - 50: 0, # 'Χ' - 57: 0, # 'Ω' - 17: 3, # 'ά' - 18: 3, # 'έ' - 22: 3, # 'ή' - 15: 3, # 'ί' - 1: 3, # 'α' - 29: 0, # 'β' - 20: 3, # 'γ' - 21: 0, # 'δ' - 3: 3, # 'ε' - 32: 0, # 'ζ' - 13: 3, # 'η' - 25: 0, # 'θ' - 5: 3, # 'ι' - 11: 3, # 'κ' - 16: 3, # 'λ' - 10: 3, # 'μ' - 6: 3, # 'ν' - 30: 3, # 'ξ' - 4: 3, # 'ο' - 9: 0, # 'Ï€' - 8: 3, # 'Ï' - 14: 0, # 'Ï‚' - 7: 0, # 'σ' - 2: 0, # 'Ï„' - 12: 2, # 'Ï…' - 28: 0, # 'φ' - 23: 3, # 'χ' - 42: 0, # 'ψ' - 24: 3, # 'ω' - 19: 3, # 'ÏŒ' - 26: 2, # 'Ï' - 27: 3, # 'ÏŽ' - }, - 21: { # 'δ' - 60: 0, # 'e' - 55: 0, # 'o' - 58: 0, # 't' - 36: 0, # '·' - 61: 0, # 'Ά' - 46: 0, # 'Έ' - 54: 0, # 'ÎŒ' - 31: 0, # 'Α' - 51: 0, # 'Î’' - 43: 0, # 'Γ' - 41: 0, # 'Δ' - 34: 0, # 'Ε' - 40: 0, # 'Η' - 52: 0, # 'Θ' - 47: 0, # 'Ι' - 44: 0, # 'Κ' - 53: 0, # 'Λ' - 38: 0, # 'Îœ' - 49: 0, # 'Î' - 59: 0, # 'Ξ' - 39: 0, # 'Ο' - 35: 0, # 'Π' - 48: 0, # 'Ρ' - 37: 0, # 'Σ' - 33: 0, # 'Τ' - 45: 0, # 'Î¥' - 56: 0, # 'Φ' - 50: 0, # 'Χ' - 57: 0, # 'Ω' - 17: 2, # 'ά' - 18: 3, # 'έ' - 22: 3, # 'ή' - 15: 3, # 'ί' - 1: 3, # 'α' - 29: 0, # 'β' - 20: 0, # 'γ' - 21: 0, # 'δ' - 3: 3, # 'ε' - 32: 0, # 'ζ' - 13: 3, # 'η' - 25: 0, # 'θ' - 5: 3, # 'ι' - 11: 0, # 'κ' - 16: 0, # 'λ' - 10: 0, # 'μ' - 6: 0, # 'ν' - 30: 0, # 'ξ' - 4: 3, # 'ο' - 9: 0, # 'Ï€' - 8: 3, # 'Ï' - 14: 0, # 'Ï‚' - 7: 0, # 'σ' - 2: 0, # 'Ï„' - 12: 3, # 'Ï…' - 28: 0, # 'φ' - 23: 0, # 'χ' - 42: 0, # 'ψ' - 24: 3, # 'ω' - 19: 3, # 'ÏŒ' - 26: 3, # 'Ï' - 27: 3, # 'ÏŽ' - }, - 3: { # 'ε' - 60: 0, # 'e' - 55: 0, # 'o' - 58: 0, # 't' - 36: 2, # '·' - 61: 0, # 'Ά' - 46: 0, # 'Έ' - 54: 0, # 'ÎŒ' - 31: 0, # 'Α' - 51: 0, # 'Î’' - 43: 0, # 'Γ' - 41: 0, # 'Δ' - 34: 0, # 'Ε' - 40: 0, # 'Η' - 52: 0, # 'Θ' - 47: 0, # 'Ι' - 44: 0, # 'Κ' - 53: 0, # 'Λ' - 38: 0, # 'Îœ' - 49: 0, # 'Î' - 59: 0, # 'Ξ' - 39: 0, # 'Ο' - 35: 0, # 'Π' - 48: 0, # 'Ρ' - 37: 0, # 'Σ' - 33: 0, # 'Τ' - 45: 0, # 'Î¥' - 56: 0, # 'Φ' - 50: 0, # 'Χ' - 57: 0, # 'Ω' - 17: 3, # 'ά' - 18: 0, # 'έ' - 22: 0, # 'ή' - 15: 3, # 'ί' - 1: 2, # 'α' - 29: 3, # 'β' - 20: 3, # 'γ' - 21: 3, # 'δ' - 3: 2, # 'ε' - 32: 2, # 'ζ' - 13: 0, # 'η' - 25: 3, # 'θ' - 5: 3, # 'ι' - 11: 3, # 'κ' - 16: 3, # 'λ' - 10: 3, # 'μ' - 6: 3, # 'ν' - 30: 3, # 'ξ' - 4: 2, # 'ο' - 9: 3, # 'Ï€' - 8: 3, # 'Ï' - 14: 3, # 'Ï‚' - 7: 3, # 'σ' - 2: 3, # 'Ï„' - 12: 3, # 'Ï…' - 28: 3, # 'φ' - 23: 3, # 'χ' - 42: 2, # 'ψ' - 24: 3, # 'ω' - 19: 2, # 'ÏŒ' - 26: 3, # 'Ï' - 27: 2, # 'ÏŽ' - }, - 32: { # 'ζ' - 60: 0, # 'e' - 55: 0, # 'o' - 58: 0, # 't' - 36: 0, # '·' - 61: 0, # 'Ά' - 46: 0, # 'Έ' - 54: 0, # 'ÎŒ' - 31: 0, # 'Α' - 51: 0, # 'Î’' - 43: 0, # 'Γ' - 41: 0, # 'Δ' - 34: 0, # 'Ε' - 40: 0, # 'Η' - 52: 0, # 'Θ' - 47: 0, # 'Ι' - 44: 0, # 'Κ' - 53: 0, # 'Λ' - 38: 0, # 'Îœ' - 49: 0, # 'Î' - 59: 0, # 'Ξ' - 39: 0, # 'Ο' - 35: 0, # 'Π' - 48: 0, # 'Ρ' - 37: 0, # 'Σ' - 33: 0, # 'Τ' - 45: 0, # 'Î¥' - 56: 0, # 'Φ' - 50: 0, # 'Χ' - 57: 0, # 'Ω' - 17: 2, # 'ά' - 18: 2, # 'έ' - 22: 2, # 'ή' - 15: 2, # 'ί' - 1: 2, # 'α' - 29: 0, # 'β' - 20: 0, # 'γ' - 21: 0, # 'δ' - 3: 3, # 'ε' - 32: 0, # 'ζ' - 13: 3, # 'η' - 25: 0, # 'θ' - 5: 2, # 'ι' - 11: 0, # 'κ' - 16: 0, # 'λ' - 10: 0, # 'μ' - 6: 0, # 'ν' - 30: 0, # 'ξ' - 4: 3, # 'ο' - 9: 0, # 'Ï€' - 8: 0, # 'Ï' - 14: 0, # 'Ï‚' - 7: 0, # 'σ' - 2: 0, # 'Ï„' - 12: 1, # 'Ï…' - 28: 0, # 'φ' - 23: 0, # 'χ' - 42: 0, # 'ψ' - 24: 3, # 'ω' - 19: 2, # 'ÏŒ' - 26: 0, # 'Ï' - 27: 2, # 'ÏŽ' - }, - 13: { # 'η' - 60: 0, # 'e' - 55: 0, # 'o' - 58: 0, # 't' - 36: 2, # '·' - 61: 0, # 'Ά' - 46: 0, # 'Έ' - 54: 0, # 'ÎŒ' - 31: 0, # 'Α' - 51: 0, # 'Î’' - 43: 0, # 'Γ' - 41: 0, # 'Δ' - 34: 0, # 'Ε' - 40: 0, # 'Η' - 52: 0, # 'Θ' - 47: 0, # 'Ι' - 44: 0, # 'Κ' - 53: 0, # 'Λ' - 38: 0, # 'Îœ' - 49: 0, # 'Î' - 59: 0, # 'Ξ' - 39: 0, # 'Ο' - 35: 0, # 'Π' - 48: 0, # 'Ρ' - 37: 0, # 'Σ' - 33: 0, # 'Τ' - 45: 0, # 'Î¥' - 56: 0, # 'Φ' - 50: 0, # 'Χ' - 57: 0, # 'Ω' - 17: 0, # 'ά' - 18: 0, # 'έ' - 22: 0, # 'ή' - 15: 0, # 'ί' - 1: 0, # 'α' - 29: 0, # 'β' - 20: 3, # 'γ' - 21: 2, # 'δ' - 3: 0, # 'ε' - 32: 0, # 'ζ' - 13: 0, # 'η' - 25: 3, # 'θ' - 5: 0, # 'ι' - 11: 3, # 'κ' - 16: 3, # 'λ' - 10: 3, # 'μ' - 6: 3, # 'ν' - 30: 2, # 'ξ' - 4: 0, # 'ο' - 9: 2, # 'Ï€' - 8: 3, # 'Ï' - 14: 3, # 'Ï‚' - 7: 3, # 'σ' - 2: 3, # 'Ï„' - 12: 0, # 'Ï…' - 28: 2, # 'φ' - 23: 3, # 'χ' - 42: 2, # 'ψ' - 24: 0, # 'ω' - 19: 0, # 'ÏŒ' - 26: 0, # 'Ï' - 27: 0, # 'ÏŽ' - }, - 25: { # 'θ' - 60: 0, # 'e' - 55: 0, # 'o' - 58: 0, # 't' - 36: 0, # '·' - 61: 0, # 'Ά' - 46: 0, # 'Έ' - 54: 0, # 'ÎŒ' - 31: 0, # 'Α' - 51: 0, # 'Î’' - 43: 0, # 'Γ' - 41: 0, # 'Δ' - 34: 0, # 'Ε' - 40: 0, # 'Η' - 52: 0, # 'Θ' - 47: 0, # 'Ι' - 44: 0, # 'Κ' - 53: 0, # 'Λ' - 38: 0, # 'Îœ' - 49: 0, # 'Î' - 59: 0, # 'Ξ' - 39: 0, # 'Ο' - 35: 0, # 'Π' - 48: 0, # 'Ρ' - 37: 0, # 'Σ' - 33: 0, # 'Τ' - 45: 0, # 'Î¥' - 56: 0, # 'Φ' - 50: 0, # 'Χ' - 57: 0, # 'Ω' - 17: 2, # 'ά' - 18: 3, # 'έ' - 22: 3, # 'ή' - 15: 2, # 'ί' - 1: 3, # 'α' - 29: 0, # 'β' - 20: 0, # 'γ' - 21: 0, # 'δ' - 3: 3, # 'ε' - 32: 0, # 'ζ' - 13: 3, # 'η' - 25: 0, # 'θ' - 5: 3, # 'ι' - 11: 0, # 'κ' - 16: 1, # 'λ' - 10: 3, # 'μ' - 6: 2, # 'ν' - 30: 0, # 'ξ' - 4: 3, # 'ο' - 9: 0, # 'Ï€' - 8: 3, # 'Ï' - 14: 0, # 'Ï‚' - 7: 0, # 'σ' - 2: 0, # 'Ï„' - 12: 3, # 'Ï…' - 28: 0, # 'φ' - 23: 0, # 'χ' - 42: 0, # 'ψ' - 24: 3, # 'ω' - 19: 3, # 'ÏŒ' - 26: 3, # 'Ï' - 27: 3, # 'ÏŽ' - }, - 5: { # 'ι' - 60: 0, # 'e' - 55: 1, # 'o' - 58: 0, # 't' - 36: 2, # '·' - 61: 0, # 'Ά' - 46: 0, # 'Έ' - 54: 0, # 'ÎŒ' - 31: 0, # 'Α' - 51: 0, # 'Î’' - 43: 0, # 'Γ' - 41: 0, # 'Δ' - 34: 1, # 'Ε' - 40: 0, # 'Η' - 52: 0, # 'Θ' - 47: 0, # 'Ι' - 44: 0, # 'Κ' - 53: 0, # 'Λ' - 38: 0, # 'Îœ' - 49: 0, # 'Î' - 59: 0, # 'Ξ' - 39: 0, # 'Ο' - 35: 0, # 'Π' - 48: 0, # 'Ρ' - 37: 0, # 'Σ' - 33: 0, # 'Τ' - 45: 0, # 'Î¥' - 56: 0, # 'Φ' - 50: 0, # 'Χ' - 57: 0, # 'Ω' - 17: 3, # 'ά' - 18: 3, # 'έ' - 22: 3, # 'ή' - 15: 0, # 'ί' - 1: 3, # 'α' - 29: 3, # 'β' - 20: 3, # 'γ' - 21: 3, # 'δ' - 3: 3, # 'ε' - 32: 2, # 'ζ' - 13: 3, # 'η' - 25: 3, # 'θ' - 5: 0, # 'ι' - 11: 3, # 'κ' - 16: 3, # 'λ' - 10: 3, # 'μ' - 6: 3, # 'ν' - 30: 3, # 'ξ' - 4: 3, # 'ο' - 9: 3, # 'Ï€' - 8: 3, # 'Ï' - 14: 3, # 'Ï‚' - 7: 3, # 'σ' - 2: 3, # 'Ï„' - 12: 0, # 'Ï…' - 28: 2, # 'φ' - 23: 3, # 'χ' - 42: 2, # 'ψ' - 24: 3, # 'ω' - 19: 3, # 'ÏŒ' - 26: 0, # 'Ï' - 27: 3, # 'ÏŽ' - }, - 11: { # 'κ' - 60: 0, # 'e' - 55: 0, # 'o' - 58: 0, # 't' - 36: 0, # '·' - 61: 0, # 'Ά' - 46: 0, # 'Έ' - 54: 0, # 'ÎŒ' - 31: 0, # 'Α' - 51: 0, # 'Î’' - 43: 0, # 'Γ' - 41: 0, # 'Δ' - 34: 0, # 'Ε' - 40: 0, # 'Η' - 52: 0, # 'Θ' - 47: 0, # 'Ι' - 44: 0, # 'Κ' - 53: 0, # 'Λ' - 38: 0, # 'Îœ' - 49: 0, # 'Î' - 59: 0, # 'Ξ' - 39: 0, # 'Ο' - 35: 0, # 'Π' - 48: 0, # 'Ρ' - 37: 0, # 'Σ' - 33: 0, # 'Τ' - 45: 0, # 'Î¥' - 56: 0, # 'Φ' - 50: 0, # 'Χ' - 57: 0, # 'Ω' - 17: 3, # 'ά' - 18: 3, # 'έ' - 22: 3, # 'ή' - 15: 3, # 'ί' - 1: 3, # 'α' - 29: 0, # 'β' - 20: 0, # 'γ' - 21: 3, # 'δ' - 3: 3, # 'ε' - 32: 0, # 'ζ' - 13: 3, # 'η' - 25: 2, # 'θ' - 5: 3, # 'ι' - 11: 3, # 'κ' - 16: 3, # 'λ' - 10: 3, # 'μ' - 6: 2, # 'ν' - 30: 0, # 'ξ' - 4: 3, # 'ο' - 9: 2, # 'Ï€' - 8: 3, # 'Ï' - 14: 0, # 'Ï‚' - 7: 0, # 'σ' - 2: 3, # 'Ï„' - 12: 3, # 'Ï…' - 28: 2, # 'φ' - 23: 2, # 'χ' - 42: 0, # 'ψ' - 24: 3, # 'ω' - 19: 3, # 'ÏŒ' - 26: 3, # 'Ï' - 27: 3, # 'ÏŽ' - }, - 16: { # 'λ' - 60: 0, # 'e' - 55: 0, # 'o' - 58: 0, # 't' - 36: 0, # '·' - 61: 0, # 'Ά' - 46: 0, # 'Έ' - 54: 0, # 'ÎŒ' - 31: 0, # 'Α' - 51: 0, # 'Î’' - 43: 0, # 'Γ' - 41: 0, # 'Δ' - 34: 0, # 'Ε' - 40: 0, # 'Η' - 52: 0, # 'Θ' - 47: 0, # 'Ι' - 44: 0, # 'Κ' - 53: 0, # 'Λ' - 38: 0, # 'Îœ' - 49: 0, # 'Î' - 59: 0, # 'Ξ' - 39: 0, # 'Ο' - 35: 0, # 'Π' - 48: 0, # 'Ρ' - 37: 0, # 'Σ' - 33: 0, # 'Τ' - 45: 0, # 'Î¥' - 56: 0, # 'Φ' - 50: 0, # 'Χ' - 57: 0, # 'Ω' - 17: 3, # 'ά' - 18: 3, # 'έ' - 22: 3, # 'ή' - 15: 3, # 'ί' - 1: 3, # 'α' - 29: 1, # 'β' - 20: 2, # 'γ' - 21: 1, # 'δ' - 3: 3, # 'ε' - 32: 0, # 'ζ' - 13: 3, # 'η' - 25: 2, # 'θ' - 5: 3, # 'ι' - 11: 2, # 'κ' - 16: 3, # 'λ' - 10: 2, # 'μ' - 6: 2, # 'ν' - 30: 0, # 'ξ' - 4: 3, # 'ο' - 9: 3, # 'Ï€' - 8: 0, # 'Ï' - 14: 0, # 'Ï‚' - 7: 0, # 'σ' - 2: 3, # 'Ï„' - 12: 3, # 'Ï…' - 28: 2, # 'φ' - 23: 0, # 'χ' - 42: 0, # 'ψ' - 24: 3, # 'ω' - 19: 3, # 'ÏŒ' - 26: 3, # 'Ï' - 27: 3, # 'ÏŽ' - }, - 10: { # 'μ' - 60: 0, # 'e' - 55: 0, # 'o' - 58: 0, # 't' - 36: 0, # '·' - 61: 0, # 'Ά' - 46: 0, # 'Έ' - 54: 0, # 'ÎŒ' - 31: 0, # 'Α' - 51: 0, # 'Î’' - 43: 0, # 'Γ' - 41: 0, # 'Δ' - 34: 1, # 'Ε' - 40: 0, # 'Η' - 52: 0, # 'Θ' - 47: 0, # 'Ι' - 44: 0, # 'Κ' - 53: 0, # 'Λ' - 38: 0, # 'Îœ' - 49: 0, # 'Î' - 59: 0, # 'Ξ' - 39: 0, # 'Ο' - 35: 0, # 'Π' - 48: 0, # 'Ρ' - 37: 0, # 'Σ' - 33: 0, # 'Τ' - 45: 0, # 'Î¥' - 56: 0, # 'Φ' - 50: 0, # 'Χ' - 57: 0, # 'Ω' - 17: 3, # 'ά' - 18: 3, # 'έ' - 22: 3, # 'ή' - 15: 3, # 'ί' - 1: 3, # 'α' - 29: 3, # 'β' - 20: 0, # 'γ' - 21: 0, # 'δ' - 3: 3, # 'ε' - 32: 0, # 'ζ' - 13: 3, # 'η' - 25: 0, # 'θ' - 5: 3, # 'ι' - 11: 0, # 'κ' - 16: 0, # 'λ' - 10: 3, # 'μ' - 6: 3, # 'ν' - 30: 0, # 'ξ' - 4: 3, # 'ο' - 9: 3, # 'Ï€' - 8: 0, # 'Ï' - 14: 0, # 'Ï‚' - 7: 0, # 'σ' - 2: 0, # 'Ï„' - 12: 2, # 'Ï…' - 28: 3, # 'φ' - 23: 0, # 'χ' - 42: 2, # 'ψ' - 24: 3, # 'ω' - 19: 3, # 'ÏŒ' - 26: 2, # 'Ï' - 27: 2, # 'ÏŽ' - }, - 6: { # 'ν' - 60: 0, # 'e' - 55: 0, # 'o' - 58: 0, # 't' - 36: 2, # '·' - 61: 0, # 'Ά' - 46: 0, # 'Έ' - 54: 0, # 'ÎŒ' - 31: 0, # 'Α' - 51: 0, # 'Î’' - 43: 0, # 'Γ' - 41: 0, # 'Δ' - 34: 0, # 'Ε' - 40: 0, # 'Η' - 52: 0, # 'Θ' - 47: 0, # 'Ι' - 44: 0, # 'Κ' - 53: 0, # 'Λ' - 38: 0, # 'Îœ' - 49: 0, # 'Î' - 59: 0, # 'Ξ' - 39: 0, # 'Ο' - 35: 0, # 'Π' - 48: 0, # 'Ρ' - 37: 0, # 'Σ' - 33: 0, # 'Τ' - 45: 0, # 'Î¥' - 56: 0, # 'Φ' - 50: 0, # 'Χ' - 57: 0, # 'Ω' - 17: 3, # 'ά' - 18: 3, # 'έ' - 22: 3, # 'ή' - 15: 3, # 'ί' - 1: 3, # 'α' - 29: 0, # 'β' - 20: 0, # 'γ' - 21: 3, # 'δ' - 3: 3, # 'ε' - 32: 2, # 'ζ' - 13: 3, # 'η' - 25: 3, # 'θ' - 5: 3, # 'ι' - 11: 0, # 'κ' - 16: 1, # 'λ' - 10: 0, # 'μ' - 6: 2, # 'ν' - 30: 0, # 'ξ' - 4: 3, # 'ο' - 9: 0, # 'Ï€' - 8: 0, # 'Ï' - 14: 0, # 'Ï‚' - 7: 3, # 'σ' - 2: 3, # 'Ï„' - 12: 3, # 'Ï…' - 28: 0, # 'φ' - 23: 0, # 'χ' - 42: 0, # 'ψ' - 24: 3, # 'ω' - 19: 3, # 'ÏŒ' - 26: 3, # 'Ï' - 27: 3, # 'ÏŽ' - }, - 30: { # 'ξ' - 60: 0, # 'e' - 55: 0, # 'o' - 58: 0, # 't' - 36: 0, # '·' - 61: 0, # 'Ά' - 46: 0, # 'Έ' - 54: 0, # 'ÎŒ' - 31: 0, # 'Α' - 51: 0, # 'Î’' - 43: 0, # 'Γ' - 41: 0, # 'Δ' - 34: 0, # 'Ε' - 40: 0, # 'Η' - 52: 0, # 'Θ' - 47: 0, # 'Ι' - 44: 0, # 'Κ' - 53: 0, # 'Λ' - 38: 0, # 'Îœ' - 49: 0, # 'Î' - 59: 0, # 'Ξ' - 39: 0, # 'Ο' - 35: 0, # 'Π' - 48: 0, # 'Ρ' - 37: 0, # 'Σ' - 33: 0, # 'Τ' - 45: 0, # 'Î¥' - 56: 0, # 'Φ' - 50: 0, # 'Χ' - 57: 0, # 'Ω' - 17: 2, # 'ά' - 18: 3, # 'έ' - 22: 3, # 'ή' - 15: 2, # 'ί' - 1: 3, # 'α' - 29: 0, # 'β' - 20: 0, # 'γ' - 21: 0, # 'δ' - 3: 3, # 'ε' - 32: 0, # 'ζ' - 13: 3, # 'η' - 25: 0, # 'θ' - 5: 2, # 'ι' - 11: 0, # 'κ' - 16: 0, # 'λ' - 10: 0, # 'μ' - 6: 0, # 'ν' - 30: 0, # 'ξ' - 4: 3, # 'ο' - 9: 0, # 'Ï€' - 8: 0, # 'Ï' - 14: 0, # 'Ï‚' - 7: 0, # 'σ' - 2: 3, # 'Ï„' - 12: 2, # 'Ï…' - 28: 0, # 'φ' - 23: 0, # 'χ' - 42: 0, # 'ψ' - 24: 3, # 'ω' - 19: 2, # 'ÏŒ' - 26: 3, # 'Ï' - 27: 1, # 'ÏŽ' - }, - 4: { # 'ο' - 60: 0, # 'e' - 55: 0, # 'o' - 58: 0, # 't' - 36: 2, # '·' - 61: 0, # 'Ά' - 46: 0, # 'Έ' - 54: 0, # 'ÎŒ' - 31: 0, # 'Α' - 51: 0, # 'Î’' - 43: 0, # 'Γ' - 41: 0, # 'Δ' - 34: 0, # 'Ε' - 40: 0, # 'Η' - 52: 0, # 'Θ' - 47: 0, # 'Ι' - 44: 0, # 'Κ' - 53: 0, # 'Λ' - 38: 0, # 'Îœ' - 49: 0, # 'Î' - 59: 0, # 'Ξ' - 39: 0, # 'Ο' - 35: 0, # 'Π' - 48: 0, # 'Ρ' - 37: 0, # 'Σ' - 33: 0, # 'Τ' - 45: 0, # 'Î¥' - 56: 0, # 'Φ' - 50: 0, # 'Χ' - 57: 0, # 'Ω' - 17: 0, # 'ά' - 18: 2, # 'έ' - 22: 3, # 'ή' - 15: 3, # 'ί' - 1: 2, # 'α' - 29: 3, # 'β' - 20: 3, # 'γ' - 21: 3, # 'δ' - 3: 3, # 'ε' - 32: 0, # 'ζ' - 13: 3, # 'η' - 25: 3, # 'θ' - 5: 3, # 'ι' - 11: 3, # 'κ' - 16: 3, # 'λ' - 10: 3, # 'μ' - 6: 3, # 'ν' - 30: 2, # 'ξ' - 4: 2, # 'ο' - 9: 3, # 'Ï€' - 8: 3, # 'Ï' - 14: 3, # 'Ï‚' - 7: 3, # 'σ' - 2: 3, # 'Ï„' - 12: 3, # 'Ï…' - 28: 3, # 'φ' - 23: 3, # 'χ' - 42: 2, # 'ψ' - 24: 2, # 'ω' - 19: 1, # 'ÏŒ' - 26: 3, # 'Ï' - 27: 2, # 'ÏŽ' - }, - 9: { # 'Ï€' - 60: 0, # 'e' - 55: 0, # 'o' - 58: 0, # 't' - 36: 0, # '·' - 61: 0, # 'Ά' - 46: 0, # 'Έ' - 54: 0, # 'ÎŒ' - 31: 0, # 'Α' - 51: 0, # 'Î’' - 43: 0, # 'Γ' - 41: 0, # 'Δ' - 34: 0, # 'Ε' - 40: 0, # 'Η' - 52: 0, # 'Θ' - 47: 0, # 'Ι' - 44: 0, # 'Κ' - 53: 0, # 'Λ' - 38: 0, # 'Îœ' - 49: 0, # 'Î' - 59: 0, # 'Ξ' - 39: 0, # 'Ο' - 35: 0, # 'Π' - 48: 0, # 'Ρ' - 37: 0, # 'Σ' - 33: 0, # 'Τ' - 45: 0, # 'Î¥' - 56: 0, # 'Φ' - 50: 0, # 'Χ' - 57: 0, # 'Ω' - 17: 3, # 'ά' - 18: 3, # 'έ' - 22: 3, # 'ή' - 15: 3, # 'ί' - 1: 3, # 'α' - 29: 0, # 'β' - 20: 0, # 'γ' - 21: 0, # 'δ' - 3: 3, # 'ε' - 32: 0, # 'ζ' - 13: 3, # 'η' - 25: 0, # 'θ' - 5: 3, # 'ι' - 11: 0, # 'κ' - 16: 3, # 'λ' - 10: 0, # 'μ' - 6: 2, # 'ν' - 30: 0, # 'ξ' - 4: 3, # 'ο' - 9: 0, # 'Ï€' - 8: 3, # 'Ï' - 14: 2, # 'Ï‚' - 7: 0, # 'σ' - 2: 3, # 'Ï„' - 12: 3, # 'Ï…' - 28: 0, # 'φ' - 23: 2, # 'χ' - 42: 0, # 'ψ' - 24: 3, # 'ω' - 19: 3, # 'ÏŒ' - 26: 2, # 'Ï' - 27: 3, # 'ÏŽ' - }, - 8: { # 'Ï' - 60: 0, # 'e' - 55: 0, # 'o' - 58: 0, # 't' - 36: 0, # '·' - 61: 0, # 'Ά' - 46: 0, # 'Έ' - 54: 0, # 'ÎŒ' - 31: 0, # 'Α' - 51: 0, # 'Î’' - 43: 0, # 'Γ' - 41: 0, # 'Δ' - 34: 0, # 'Ε' - 40: 0, # 'Η' - 52: 0, # 'Θ' - 47: 0, # 'Ι' - 44: 0, # 'Κ' - 53: 0, # 'Λ' - 38: 0, # 'Îœ' - 49: 0, # 'Î' - 59: 0, # 'Ξ' - 39: 0, # 'Ο' - 35: 0, # 'Π' - 48: 0, # 'Ρ' - 37: 0, # 'Σ' - 33: 0, # 'Τ' - 45: 0, # 'Î¥' - 56: 0, # 'Φ' - 50: 0, # 'Χ' - 57: 0, # 'Ω' - 17: 3, # 'ά' - 18: 3, # 'έ' - 22: 3, # 'ή' - 15: 3, # 'ί' - 1: 3, # 'α' - 29: 2, # 'β' - 20: 3, # 'γ' - 21: 2, # 'δ' - 3: 3, # 'ε' - 32: 0, # 'ζ' - 13: 3, # 'η' - 25: 3, # 'θ' - 5: 3, # 'ι' - 11: 3, # 'κ' - 16: 1, # 'λ' - 10: 3, # 'μ' - 6: 3, # 'ν' - 30: 2, # 'ξ' - 4: 3, # 'ο' - 9: 2, # 'Ï€' - 8: 2, # 'Ï' - 14: 0, # 'Ï‚' - 7: 2, # 'σ' - 2: 3, # 'Ï„' - 12: 3, # 'Ï…' - 28: 3, # 'φ' - 23: 3, # 'χ' - 42: 0, # 'ψ' - 24: 3, # 'ω' - 19: 3, # 'ÏŒ' - 26: 3, # 'Ï' - 27: 3, # 'ÏŽ' - }, - 14: { # 'Ï‚' - 60: 0, # 'e' - 55: 0, # 'o' - 58: 0, # 't' - 36: 2, # '·' - 61: 0, # 'Ά' - 46: 0, # 'Έ' - 54: 0, # 'ÎŒ' - 31: 0, # 'Α' - 51: 0, # 'Î’' - 43: 0, # 'Γ' - 41: 0, # 'Δ' - 34: 0, # 'Ε' - 40: 0, # 'Η' - 52: 0, # 'Θ' - 47: 0, # 'Ι' - 44: 0, # 'Κ' - 53: 0, # 'Λ' - 38: 0, # 'Îœ' - 49: 0, # 'Î' - 59: 0, # 'Ξ' - 39: 0, # 'Ο' - 35: 0, # 'Π' - 48: 0, # 'Ρ' - 37: 0, # 'Σ' - 33: 0, # 'Τ' - 45: 0, # 'Î¥' - 56: 0, # 'Φ' - 50: 0, # 'Χ' - 57: 0, # 'Ω' - 17: 0, # 'ά' - 18: 0, # 'έ' - 22: 0, # 'ή' - 15: 0, # 'ί' - 1: 0, # 'α' - 29: 0, # 'β' - 20: 0, # 'γ' - 21: 0, # 'δ' - 3: 0, # 'ε' - 32: 0, # 'ζ' - 13: 0, # 'η' - 25: 0, # 'θ' - 5: 0, # 'ι' - 11: 0, # 'κ' - 16: 0, # 'λ' - 10: 0, # 'μ' - 6: 0, # 'ν' - 30: 0, # 'ξ' - 4: 0, # 'ο' - 9: 0, # 'Ï€' - 8: 0, # 'Ï' - 14: 0, # 'Ï‚' - 7: 0, # 'σ' - 2: 0, # 'Ï„' - 12: 0, # 'Ï…' - 28: 0, # 'φ' - 23: 0, # 'χ' - 42: 0, # 'ψ' - 24: 0, # 'ω' - 19: 0, # 'ÏŒ' - 26: 0, # 'Ï' - 27: 0, # 'ÏŽ' - }, - 7: { # 'σ' - 60: 0, # 'e' - 55: 0, # 'o' - 58: 0, # 't' - 36: 0, # '·' - 61: 0, # 'Ά' - 46: 0, # 'Έ' - 54: 0, # 'ÎŒ' - 31: 0, # 'Α' - 51: 0, # 'Î’' - 43: 0, # 'Γ' - 41: 0, # 'Δ' - 34: 0, # 'Ε' - 40: 0, # 'Η' - 52: 0, # 'Θ' - 47: 0, # 'Ι' - 44: 0, # 'Κ' - 53: 0, # 'Λ' - 38: 0, # 'Îœ' - 49: 0, # 'Î' - 59: 0, # 'Ξ' - 39: 0, # 'Ο' - 35: 0, # 'Π' - 48: 0, # 'Ρ' - 37: 0, # 'Σ' - 33: 0, # 'Τ' - 45: 0, # 'Î¥' - 56: 0, # 'Φ' - 50: 0, # 'Χ' - 57: 0, # 'Ω' - 17: 2, # 'ά' - 18: 2, # 'έ' - 22: 3, # 'ή' - 15: 3, # 'ί' - 1: 3, # 'α' - 29: 3, # 'β' - 20: 0, # 'γ' - 21: 2, # 'δ' - 3: 3, # 'ε' - 32: 0, # 'ζ' - 13: 3, # 'η' - 25: 3, # 'θ' - 5: 3, # 'ι' - 11: 3, # 'κ' - 16: 2, # 'λ' - 10: 3, # 'μ' - 6: 0, # 'ν' - 30: 0, # 'ξ' - 4: 3, # 'ο' - 9: 3, # 'Ï€' - 8: 0, # 'Ï' - 14: 0, # 'Ï‚' - 7: 3, # 'σ' - 2: 3, # 'Ï„' - 12: 3, # 'Ï…' - 28: 3, # 'φ' - 23: 3, # 'χ' - 42: 0, # 'ψ' - 24: 3, # 'ω' - 19: 3, # 'ÏŒ' - 26: 3, # 'Ï' - 27: 2, # 'ÏŽ' - }, - 2: { # 'Ï„' - 60: 0, # 'e' - 55: 2, # 'o' - 58: 0, # 't' - 36: 0, # '·' - 61: 0, # 'Ά' - 46: 0, # 'Έ' - 54: 0, # 'ÎŒ' - 31: 0, # 'Α' - 51: 0, # 'Î’' - 43: 0, # 'Γ' - 41: 0, # 'Δ' - 34: 0, # 'Ε' - 40: 0, # 'Η' - 52: 0, # 'Θ' - 47: 0, # 'Ι' - 44: 0, # 'Κ' - 53: 0, # 'Λ' - 38: 0, # 'Îœ' - 49: 0, # 'Î' - 59: 0, # 'Ξ' - 39: 0, # 'Ο' - 35: 0, # 'Π' - 48: 0, # 'Ρ' - 37: 0, # 'Σ' - 33: 0, # 'Τ' - 45: 0, # 'Î¥' - 56: 0, # 'Φ' - 50: 0, # 'Χ' - 57: 0, # 'Ω' - 17: 3, # 'ά' - 18: 3, # 'έ' - 22: 3, # 'ή' - 15: 3, # 'ί' - 1: 3, # 'α' - 29: 0, # 'β' - 20: 0, # 'γ' - 21: 0, # 'δ' - 3: 3, # 'ε' - 32: 2, # 'ζ' - 13: 3, # 'η' - 25: 0, # 'θ' - 5: 3, # 'ι' - 11: 2, # 'κ' - 16: 2, # 'λ' - 10: 3, # 'μ' - 6: 0, # 'ν' - 30: 0, # 'ξ' - 4: 3, # 'ο' - 9: 0, # 'Ï€' - 8: 3, # 'Ï' - 14: 0, # 'Ï‚' - 7: 3, # 'σ' - 2: 3, # 'Ï„' - 12: 3, # 'Ï…' - 28: 2, # 'φ' - 23: 0, # 'χ' - 42: 0, # 'ψ' - 24: 3, # 'ω' - 19: 3, # 'ÏŒ' - 26: 3, # 'Ï' - 27: 3, # 'ÏŽ' - }, - 12: { # 'Ï…' - 60: 0, # 'e' - 55: 0, # 'o' - 58: 0, # 't' - 36: 0, # '·' - 61: 0, # 'Ά' - 46: 0, # 'Έ' - 54: 0, # 'ÎŒ' - 31: 0, # 'Α' - 51: 0, # 'Î’' - 43: 0, # 'Γ' - 41: 0, # 'Δ' - 34: 0, # 'Ε' - 40: 0, # 'Η' - 52: 0, # 'Θ' - 47: 0, # 'Ι' - 44: 0, # 'Κ' - 53: 0, # 'Λ' - 38: 0, # 'Îœ' - 49: 0, # 'Î' - 59: 0, # 'Ξ' - 39: 0, # 'Ο' - 35: 0, # 'Π' - 48: 0, # 'Ρ' - 37: 0, # 'Σ' - 33: 0, # 'Τ' - 45: 0, # 'Î¥' - 56: 0, # 'Φ' - 50: 0, # 'Χ' - 57: 0, # 'Ω' - 17: 2, # 'ά' - 18: 2, # 'έ' - 22: 3, # 'ή' - 15: 2, # 'ί' - 1: 3, # 'α' - 29: 2, # 'β' - 20: 3, # 'γ' - 21: 2, # 'δ' - 3: 2, # 'ε' - 32: 2, # 'ζ' - 13: 2, # 'η' - 25: 3, # 'θ' - 5: 2, # 'ι' - 11: 3, # 'κ' - 16: 3, # 'λ' - 10: 3, # 'μ' - 6: 3, # 'ν' - 30: 3, # 'ξ' - 4: 3, # 'ο' - 9: 3, # 'Ï€' - 8: 3, # 'Ï' - 14: 3, # 'Ï‚' - 7: 3, # 'σ' - 2: 3, # 'Ï„' - 12: 0, # 'Ï…' - 28: 2, # 'φ' - 23: 3, # 'χ' - 42: 2, # 'ψ' - 24: 2, # 'ω' - 19: 2, # 'ÏŒ' - 26: 0, # 'Ï' - 27: 2, # 'ÏŽ' - }, - 28: { # 'φ' - 60: 0, # 'e' - 55: 0, # 'o' - 58: 0, # 't' - 36: 0, # '·' - 61: 0, # 'Ά' - 46: 0, # 'Έ' - 54: 0, # 'ÎŒ' - 31: 0, # 'Α' - 51: 0, # 'Î’' - 43: 0, # 'Γ' - 41: 0, # 'Δ' - 34: 0, # 'Ε' - 40: 0, # 'Η' - 52: 0, # 'Θ' - 47: 0, # 'Ι' - 44: 0, # 'Κ' - 53: 0, # 'Λ' - 38: 0, # 'Îœ' - 49: 0, # 'Î' - 59: 0, # 'Ξ' - 39: 0, # 'Ο' - 35: 0, # 'Π' - 48: 0, # 'Ρ' - 37: 0, # 'Σ' - 33: 0, # 'Τ' - 45: 0, # 'Î¥' - 56: 0, # 'Φ' - 50: 0, # 'Χ' - 57: 0, # 'Ω' - 17: 3, # 'ά' - 18: 3, # 'έ' - 22: 3, # 'ή' - 15: 3, # 'ί' - 1: 3, # 'α' - 29: 0, # 'β' - 20: 0, # 'γ' - 21: 0, # 'δ' - 3: 3, # 'ε' - 32: 0, # 'ζ' - 13: 2, # 'η' - 25: 2, # 'θ' - 5: 3, # 'ι' - 11: 0, # 'κ' - 16: 2, # 'λ' - 10: 0, # 'μ' - 6: 1, # 'ν' - 30: 0, # 'ξ' - 4: 3, # 'ο' - 9: 0, # 'Ï€' - 8: 3, # 'Ï' - 14: 0, # 'Ï‚' - 7: 0, # 'σ' - 2: 3, # 'Ï„' - 12: 3, # 'Ï…' - 28: 1, # 'φ' - 23: 0, # 'χ' - 42: 0, # 'ψ' - 24: 3, # 'ω' - 19: 3, # 'ÏŒ' - 26: 2, # 'Ï' - 27: 2, # 'ÏŽ' - }, - 23: { # 'χ' - 60: 0, # 'e' - 55: 0, # 'o' - 58: 0, # 't' - 36: 0, # '·' - 61: 0, # 'Ά' - 46: 0, # 'Έ' - 54: 0, # 'ÎŒ' - 31: 0, # 'Α' - 51: 0, # 'Î’' - 43: 0, # 'Γ' - 41: 0, # 'Δ' - 34: 0, # 'Ε' - 40: 0, # 'Η' - 52: 0, # 'Θ' - 47: 0, # 'Ι' - 44: 0, # 'Κ' - 53: 0, # 'Λ' - 38: 0, # 'Îœ' - 49: 0, # 'Î' - 59: 0, # 'Ξ' - 39: 0, # 'Ο' - 35: 0, # 'Π' - 48: 0, # 'Ρ' - 37: 0, # 'Σ' - 33: 0, # 'Τ' - 45: 0, # 'Î¥' - 56: 0, # 'Φ' - 50: 0, # 'Χ' - 57: 0, # 'Ω' - 17: 3, # 'ά' - 18: 2, # 'έ' - 22: 3, # 'ή' - 15: 3, # 'ί' - 1: 3, # 'α' - 29: 0, # 'β' - 20: 0, # 'γ' - 21: 0, # 'δ' - 3: 3, # 'ε' - 32: 0, # 'ζ' - 13: 2, # 'η' - 25: 2, # 'θ' - 5: 3, # 'ι' - 11: 0, # 'κ' - 16: 2, # 'λ' - 10: 2, # 'μ' - 6: 3, # 'ν' - 30: 0, # 'ξ' - 4: 3, # 'ο' - 9: 0, # 'Ï€' - 8: 3, # 'Ï' - 14: 0, # 'Ï‚' - 7: 0, # 'σ' - 2: 3, # 'Ï„' - 12: 3, # 'Ï…' - 28: 0, # 'φ' - 23: 2, # 'χ' - 42: 0, # 'ψ' - 24: 3, # 'ω' - 19: 3, # 'ÏŒ' - 26: 3, # 'Ï' - 27: 3, # 'ÏŽ' - }, - 42: { # 'ψ' - 60: 0, # 'e' - 55: 0, # 'o' - 58: 0, # 't' - 36: 0, # '·' - 61: 0, # 'Ά' - 46: 0, # 'Έ' - 54: 0, # 'ÎŒ' - 31: 0, # 'Α' - 51: 0, # 'Î’' - 43: 0, # 'Γ' - 41: 0, # 'Δ' - 34: 0, # 'Ε' - 40: 0, # 'Η' - 52: 0, # 'Θ' - 47: 0, # 'Ι' - 44: 0, # 'Κ' - 53: 0, # 'Λ' - 38: 0, # 'Îœ' - 49: 0, # 'Î' - 59: 0, # 'Ξ' - 39: 0, # 'Ο' - 35: 0, # 'Π' - 48: 0, # 'Ρ' - 37: 0, # 'Σ' - 33: 0, # 'Τ' - 45: 0, # 'Î¥' - 56: 0, # 'Φ' - 50: 0, # 'Χ' - 57: 0, # 'Ω' - 17: 2, # 'ά' - 18: 2, # 'έ' - 22: 1, # 'ή' - 15: 2, # 'ί' - 1: 2, # 'α' - 29: 0, # 'β' - 20: 0, # 'γ' - 21: 0, # 'δ' - 3: 3, # 'ε' - 32: 0, # 'ζ' - 13: 3, # 'η' - 25: 0, # 'θ' - 5: 2, # 'ι' - 11: 0, # 'κ' - 16: 0, # 'λ' - 10: 0, # 'μ' - 6: 0, # 'ν' - 30: 0, # 'ξ' - 4: 2, # 'ο' - 9: 0, # 'Ï€' - 8: 0, # 'Ï' - 14: 0, # 'Ï‚' - 7: 0, # 'σ' - 2: 2, # 'Ï„' - 12: 1, # 'Ï…' - 28: 0, # 'φ' - 23: 0, # 'χ' - 42: 0, # 'ψ' - 24: 2, # 'ω' - 19: 0, # 'ÏŒ' - 26: 0, # 'Ï' - 27: 0, # 'ÏŽ' - }, - 24: { # 'ω' - 60: 0, # 'e' - 55: 0, # 'o' - 58: 0, # 't' - 36: 0, # '·' - 61: 0, # 'Ά' - 46: 0, # 'Έ' - 54: 0, # 'ÎŒ' - 31: 0, # 'Α' - 51: 0, # 'Î’' - 43: 0, # 'Γ' - 41: 0, # 'Δ' - 34: 0, # 'Ε' - 40: 0, # 'Η' - 52: 0, # 'Θ' - 47: 0, # 'Ι' - 44: 0, # 'Κ' - 53: 0, # 'Λ' - 38: 0, # 'Îœ' - 49: 0, # 'Î' - 59: 0, # 'Ξ' - 39: 0, # 'Ο' - 35: 0, # 'Π' - 48: 0, # 'Ρ' - 37: 0, # 'Σ' - 33: 0, # 'Τ' - 45: 0, # 'Î¥' - 56: 0, # 'Φ' - 50: 0, # 'Χ' - 57: 0, # 'Ω' - 17: 1, # 'ά' - 18: 0, # 'έ' - 22: 2, # 'ή' - 15: 0, # 'ί' - 1: 0, # 'α' - 29: 2, # 'β' - 20: 3, # 'γ' - 21: 2, # 'δ' - 3: 0, # 'ε' - 32: 0, # 'ζ' - 13: 0, # 'η' - 25: 3, # 'θ' - 5: 2, # 'ι' - 11: 0, # 'κ' - 16: 2, # 'λ' - 10: 3, # 'μ' - 6: 3, # 'ν' - 30: 0, # 'ξ' - 4: 0, # 'ο' - 9: 3, # 'Ï€' - 8: 3, # 'Ï' - 14: 3, # 'Ï‚' - 7: 3, # 'σ' - 2: 3, # 'Ï„' - 12: 0, # 'Ï…' - 28: 2, # 'φ' - 23: 2, # 'χ' - 42: 0, # 'ψ' - 24: 0, # 'ω' - 19: 0, # 'ÏŒ' - 26: 0, # 'Ï' - 27: 0, # 'ÏŽ' - }, - 19: { # 'ÏŒ' - 60: 0, # 'e' - 55: 0, # 'o' - 58: 0, # 't' - 36: 0, # '·' - 61: 0, # 'Ά' - 46: 0, # 'Έ' - 54: 0, # 'ÎŒ' - 31: 0, # 'Α' - 51: 0, # 'Î’' - 43: 0, # 'Γ' - 41: 0, # 'Δ' - 34: 0, # 'Ε' - 40: 0, # 'Η' - 52: 0, # 'Θ' - 47: 0, # 'Ι' - 44: 0, # 'Κ' - 53: 0, # 'Λ' - 38: 0, # 'Îœ' - 49: 0, # 'Î' - 59: 0, # 'Ξ' - 39: 0, # 'Ο' - 35: 0, # 'Π' - 48: 0, # 'Ρ' - 37: 0, # 'Σ' - 33: 0, # 'Τ' - 45: 0, # 'Î¥' - 56: 0, # 'Φ' - 50: 0, # 'Χ' - 57: 0, # 'Ω' - 17: 0, # 'ά' - 18: 0, # 'έ' - 22: 0, # 'ή' - 15: 0, # 'ί' - 1: 0, # 'α' - 29: 3, # 'β' - 20: 3, # 'γ' - 21: 3, # 'δ' - 3: 1, # 'ε' - 32: 2, # 'ζ' - 13: 2, # 'η' - 25: 2, # 'θ' - 5: 2, # 'ι' - 11: 3, # 'κ' - 16: 3, # 'λ' - 10: 3, # 'μ' - 6: 3, # 'ν' - 30: 1, # 'ξ' - 4: 2, # 'ο' - 9: 3, # 'Ï€' - 8: 3, # 'Ï' - 14: 3, # 'Ï‚' - 7: 3, # 'σ' - 2: 3, # 'Ï„' - 12: 0, # 'Ï…' - 28: 2, # 'φ' - 23: 3, # 'χ' - 42: 2, # 'ψ' - 24: 0, # 'ω' - 19: 0, # 'ÏŒ' - 26: 0, # 'Ï' - 27: 0, # 'ÏŽ' - }, - 26: { # 'Ï' - 60: 0, # 'e' - 55: 0, # 'o' - 58: 0, # 't' - 36: 0, # '·' - 61: 0, # 'Ά' - 46: 0, # 'Έ' - 54: 0, # 'ÎŒ' - 31: 0, # 'Α' - 51: 0, # 'Î’' - 43: 0, # 'Γ' - 41: 0, # 'Δ' - 34: 0, # 'Ε' - 40: 0, # 'Η' - 52: 0, # 'Θ' - 47: 0, # 'Ι' - 44: 0, # 'Κ' - 53: 0, # 'Λ' - 38: 0, # 'Îœ' - 49: 0, # 'Î' - 59: 0, # 'Ξ' - 39: 0, # 'Ο' - 35: 0, # 'Π' - 48: 0, # 'Ρ' - 37: 0, # 'Σ' - 33: 0, # 'Τ' - 45: 0, # 'Î¥' - 56: 0, # 'Φ' - 50: 0, # 'Χ' - 57: 0, # 'Ω' - 17: 0, # 'ά' - 18: 0, # 'έ' - 22: 0, # 'ή' - 15: 0, # 'ί' - 1: 2, # 'α' - 29: 2, # 'β' - 20: 2, # 'γ' - 21: 1, # 'δ' - 3: 3, # 'ε' - 32: 0, # 'ζ' - 13: 2, # 'η' - 25: 3, # 'θ' - 5: 0, # 'ι' - 11: 3, # 'κ' - 16: 3, # 'λ' - 10: 3, # 'μ' - 6: 3, # 'ν' - 30: 2, # 'ξ' - 4: 3, # 'ο' - 9: 3, # 'Ï€' - 8: 3, # 'Ï' - 14: 3, # 'Ï‚' - 7: 3, # 'σ' - 2: 3, # 'Ï„' - 12: 0, # 'Ï…' - 28: 2, # 'φ' - 23: 2, # 'χ' - 42: 2, # 'ψ' - 24: 2, # 'ω' - 19: 0, # 'ÏŒ' - 26: 0, # 'Ï' - 27: 0, # 'ÏŽ' - }, - 27: { # 'ÏŽ' - 60: 0, # 'e' - 55: 0, # 'o' - 58: 0, # 't' - 36: 0, # '·' - 61: 0, # 'Ά' - 46: 0, # 'Έ' - 54: 0, # 'ÎŒ' - 31: 0, # 'Α' - 51: 0, # 'Î’' - 43: 0, # 'Γ' - 41: 0, # 'Δ' - 34: 0, # 'Ε' - 40: 0, # 'Η' - 52: 0, # 'Θ' - 47: 0, # 'Ι' - 44: 0, # 'Κ' - 53: 0, # 'Λ' - 38: 0, # 'Îœ' - 49: 0, # 'Î' - 59: 0, # 'Ξ' - 39: 0, # 'Ο' - 35: 0, # 'Π' - 48: 0, # 'Ρ' - 37: 0, # 'Σ' - 33: 0, # 'Τ' - 45: 0, # 'Î¥' - 56: 0, # 'Φ' - 50: 0, # 'Χ' - 57: 0, # 'Ω' - 17: 0, # 'ά' - 18: 0, # 'έ' - 22: 0, # 'ή' - 15: 0, # 'ί' - 1: 0, # 'α' - 29: 1, # 'β' - 20: 0, # 'γ' - 21: 3, # 'δ' - 3: 0, # 'ε' - 32: 0, # 'ζ' - 13: 1, # 'η' - 25: 2, # 'θ' - 5: 2, # 'ι' - 11: 0, # 'κ' - 16: 2, # 'λ' - 10: 3, # 'μ' - 6: 3, # 'ν' - 30: 1, # 'ξ' - 4: 0, # 'ο' - 9: 2, # 'Ï€' - 8: 3, # 'Ï' - 14: 3, # 'Ï‚' - 7: 3, # 'σ' - 2: 3, # 'Ï„' - 12: 0, # 'Ï…' - 28: 1, # 'φ' - 23: 1, # 'χ' - 42: 0, # 'ψ' - 24: 0, # 'ω' - 19: 0, # 'ÏŒ' - 26: 0, # 'Ï' - 27: 0, # 'ÏŽ' - }, -} - -# 255: Undefined characters that did not exist in training text -# 254: Carriage/Return -# 253: symbol (punctuation) that does not belong to word -# 252: 0 - 9 -# 251: Control characters - -# Character Mapping Table(s): -WINDOWS_1253_GREEK_CHAR_TO_ORDER = { - 0: 255, # '\x00' - 1: 255, # '\x01' - 2: 255, # '\x02' - 3: 255, # '\x03' - 4: 255, # '\x04' - 5: 255, # '\x05' - 6: 255, # '\x06' - 7: 255, # '\x07' - 8: 255, # '\x08' - 9: 255, # '\t' - 10: 254, # '\n' - 11: 255, # '\x0b' - 12: 255, # '\x0c' - 13: 254, # '\r' - 14: 255, # '\x0e' - 15: 255, # '\x0f' - 16: 255, # '\x10' - 17: 255, # '\x11' - 18: 255, # '\x12' - 19: 255, # '\x13' - 20: 255, # '\x14' - 21: 255, # '\x15' - 22: 255, # '\x16' - 23: 255, # '\x17' - 24: 255, # '\x18' - 25: 255, # '\x19' - 26: 255, # '\x1a' - 27: 255, # '\x1b' - 28: 255, # '\x1c' - 29: 255, # '\x1d' - 30: 255, # '\x1e' - 31: 255, # '\x1f' - 32: 253, # ' ' - 33: 253, # '!' - 34: 253, # '"' - 35: 253, # '#' - 36: 253, # '$' - 37: 253, # '%' - 38: 253, # '&' - 39: 253, # "'" - 40: 253, # '(' - 41: 253, # ')' - 42: 253, # '*' - 43: 253, # '+' - 44: 253, # ',' - 45: 253, # '-' - 46: 253, # '.' - 47: 253, # '/' - 48: 252, # '0' - 49: 252, # '1' - 50: 252, # '2' - 51: 252, # '3' - 52: 252, # '4' - 53: 252, # '5' - 54: 252, # '6' - 55: 252, # '7' - 56: 252, # '8' - 57: 252, # '9' - 58: 253, # ':' - 59: 253, # ';' - 60: 253, # '<' - 61: 253, # '=' - 62: 253, # '>' - 63: 253, # '?' - 64: 253, # '@' - 65: 82, # 'A' - 66: 100, # 'B' - 67: 104, # 'C' - 68: 94, # 'D' - 69: 98, # 'E' - 70: 101, # 'F' - 71: 116, # 'G' - 72: 102, # 'H' - 73: 111, # 'I' - 74: 187, # 'J' - 75: 117, # 'K' - 76: 92, # 'L' - 77: 88, # 'M' - 78: 113, # 'N' - 79: 85, # 'O' - 80: 79, # 'P' - 81: 118, # 'Q' - 82: 105, # 'R' - 83: 83, # 'S' - 84: 67, # 'T' - 85: 114, # 'U' - 86: 119, # 'V' - 87: 95, # 'W' - 88: 99, # 'X' - 89: 109, # 'Y' - 90: 188, # 'Z' - 91: 253, # '[' - 92: 253, # '\\' - 93: 253, # ']' - 94: 253, # '^' - 95: 253, # '_' - 96: 253, # '`' - 97: 72, # 'a' - 98: 70, # 'b' - 99: 80, # 'c' - 100: 81, # 'd' - 101: 60, # 'e' - 102: 96, # 'f' - 103: 93, # 'g' - 104: 89, # 'h' - 105: 68, # 'i' - 106: 120, # 'j' - 107: 97, # 'k' - 108: 77, # 'l' - 109: 86, # 'm' - 110: 69, # 'n' - 111: 55, # 'o' - 112: 78, # 'p' - 113: 115, # 'q' - 114: 65, # 'r' - 115: 66, # 's' - 116: 58, # 't' - 117: 76, # 'u' - 118: 106, # 'v' - 119: 103, # 'w' - 120: 87, # 'x' - 121: 107, # 'y' - 122: 112, # 'z' - 123: 253, # '{' - 124: 253, # '|' - 125: 253, # '}' - 126: 253, # '~' - 127: 253, # '\x7f' - 128: 255, # '€' - 129: 255, # None - 130: 255, # '‚' - 131: 255, # 'Æ’' - 132: 255, # '„' - 133: 255, # '…' - 134: 255, # '†' - 135: 255, # '‡' - 136: 255, # None - 137: 255, # '‰' - 138: 255, # None - 139: 255, # '‹' - 140: 255, # None - 141: 255, # None - 142: 255, # None - 143: 255, # None - 144: 255, # None - 145: 255, # '‘' - 146: 255, # '’' - 147: 255, # '“' - 148: 255, # 'â€' - 149: 255, # '•' - 150: 255, # '–' - 151: 255, # '—' - 152: 255, # None - 153: 255, # 'â„¢' - 154: 255, # None - 155: 255, # '›' - 156: 255, # None - 157: 255, # None - 158: 255, # None - 159: 255, # None - 160: 253, # '\xa0' - 161: 233, # 'Î…' - 162: 61, # 'Ά' - 163: 253, # '£' - 164: 253, # '¤' - 165: 253, # 'Â¥' - 166: 253, # '¦' - 167: 253, # '§' - 168: 253, # '¨' - 169: 253, # '©' - 170: 253, # None - 171: 253, # '«' - 172: 253, # '¬' - 173: 74, # '\xad' - 174: 253, # '®' - 175: 253, # '―' - 176: 253, # '°' - 177: 253, # '±' - 178: 253, # '²' - 179: 253, # '³' - 180: 247, # '΄' - 181: 253, # 'µ' - 182: 253, # '¶' - 183: 36, # '·' - 184: 46, # 'Έ' - 185: 71, # 'Ή' - 186: 73, # 'Ί' - 187: 253, # '»' - 188: 54, # 'ÎŒ' - 189: 253, # '½' - 190: 108, # 'ÎŽ' - 191: 123, # 'Î' - 192: 110, # 'Î' - 193: 31, # 'Α' - 194: 51, # 'Î’' - 195: 43, # 'Γ' - 196: 41, # 'Δ' - 197: 34, # 'Ε' - 198: 91, # 'Ζ' - 199: 40, # 'Η' - 200: 52, # 'Θ' - 201: 47, # 'Ι' - 202: 44, # 'Κ' - 203: 53, # 'Λ' - 204: 38, # 'Îœ' - 205: 49, # 'Î' - 206: 59, # 'Ξ' - 207: 39, # 'Ο' - 208: 35, # 'Π' - 209: 48, # 'Ρ' - 210: 250, # None - 211: 37, # 'Σ' - 212: 33, # 'Τ' - 213: 45, # 'Î¥' - 214: 56, # 'Φ' - 215: 50, # 'Χ' - 216: 84, # 'Ψ' - 217: 57, # 'Ω' - 218: 120, # 'Ϊ' - 219: 121, # 'Ϋ' - 220: 17, # 'ά' - 221: 18, # 'έ' - 222: 22, # 'ή' - 223: 15, # 'ί' - 224: 124, # 'ΰ' - 225: 1, # 'α' - 226: 29, # 'β' - 227: 20, # 'γ' - 228: 21, # 'δ' - 229: 3, # 'ε' - 230: 32, # 'ζ' - 231: 13, # 'η' - 232: 25, # 'θ' - 233: 5, # 'ι' - 234: 11, # 'κ' - 235: 16, # 'λ' - 236: 10, # 'μ' - 237: 6, # 'ν' - 238: 30, # 'ξ' - 239: 4, # 'ο' - 240: 9, # 'Ï€' - 241: 8, # 'Ï' - 242: 14, # 'Ï‚' - 243: 7, # 'σ' - 244: 2, # 'Ï„' - 245: 12, # 'Ï…' - 246: 28, # 'φ' - 247: 23, # 'χ' - 248: 42, # 'ψ' - 249: 24, # 'ω' - 250: 64, # 'ÏŠ' - 251: 75, # 'Ï‹' - 252: 19, # 'ÏŒ' - 253: 26, # 'Ï' - 254: 27, # 'ÏŽ' - 255: 253, # None -} - -WINDOWS_1253_GREEK_MODEL = SingleByteCharSetModel(charset_name='windows-1253', - language='Greek', - char_to_order_map=WINDOWS_1253_GREEK_CHAR_TO_ORDER, - language_model=GREEK_LANG_MODEL, - typical_positive_ratio=0.982851, - keep_ascii_letters=False, - alphabet='ΆΈΉΊΌΎÎΑΒΓΔΕΖΗΘΙΚΛΜÎΞΟΠΡΣΤΥΦΧΨΩάέήίαβγδεζηθικλμνξοπÏςστυφχψωόÏÏŽ') - -ISO_8859_7_GREEK_CHAR_TO_ORDER = { - 0: 255, # '\x00' - 1: 255, # '\x01' - 2: 255, # '\x02' - 3: 255, # '\x03' - 4: 255, # '\x04' - 5: 255, # '\x05' - 6: 255, # '\x06' - 7: 255, # '\x07' - 8: 255, # '\x08' - 9: 255, # '\t' - 10: 254, # '\n' - 11: 255, # '\x0b' - 12: 255, # '\x0c' - 13: 254, # '\r' - 14: 255, # '\x0e' - 15: 255, # '\x0f' - 16: 255, # '\x10' - 17: 255, # '\x11' - 18: 255, # '\x12' - 19: 255, # '\x13' - 20: 255, # '\x14' - 21: 255, # '\x15' - 22: 255, # '\x16' - 23: 255, # '\x17' - 24: 255, # '\x18' - 25: 255, # '\x19' - 26: 255, # '\x1a' - 27: 255, # '\x1b' - 28: 255, # '\x1c' - 29: 255, # '\x1d' - 30: 255, # '\x1e' - 31: 255, # '\x1f' - 32: 253, # ' ' - 33: 253, # '!' - 34: 253, # '"' - 35: 253, # '#' - 36: 253, # '$' - 37: 253, # '%' - 38: 253, # '&' - 39: 253, # "'" - 40: 253, # '(' - 41: 253, # ')' - 42: 253, # '*' - 43: 253, # '+' - 44: 253, # ',' - 45: 253, # '-' - 46: 253, # '.' - 47: 253, # '/' - 48: 252, # '0' - 49: 252, # '1' - 50: 252, # '2' - 51: 252, # '3' - 52: 252, # '4' - 53: 252, # '5' - 54: 252, # '6' - 55: 252, # '7' - 56: 252, # '8' - 57: 252, # '9' - 58: 253, # ':' - 59: 253, # ';' - 60: 253, # '<' - 61: 253, # '=' - 62: 253, # '>' - 63: 253, # '?' - 64: 253, # '@' - 65: 82, # 'A' - 66: 100, # 'B' - 67: 104, # 'C' - 68: 94, # 'D' - 69: 98, # 'E' - 70: 101, # 'F' - 71: 116, # 'G' - 72: 102, # 'H' - 73: 111, # 'I' - 74: 187, # 'J' - 75: 117, # 'K' - 76: 92, # 'L' - 77: 88, # 'M' - 78: 113, # 'N' - 79: 85, # 'O' - 80: 79, # 'P' - 81: 118, # 'Q' - 82: 105, # 'R' - 83: 83, # 'S' - 84: 67, # 'T' - 85: 114, # 'U' - 86: 119, # 'V' - 87: 95, # 'W' - 88: 99, # 'X' - 89: 109, # 'Y' - 90: 188, # 'Z' - 91: 253, # '[' - 92: 253, # '\\' - 93: 253, # ']' - 94: 253, # '^' - 95: 253, # '_' - 96: 253, # '`' - 97: 72, # 'a' - 98: 70, # 'b' - 99: 80, # 'c' - 100: 81, # 'd' - 101: 60, # 'e' - 102: 96, # 'f' - 103: 93, # 'g' - 104: 89, # 'h' - 105: 68, # 'i' - 106: 120, # 'j' - 107: 97, # 'k' - 108: 77, # 'l' - 109: 86, # 'm' - 110: 69, # 'n' - 111: 55, # 'o' - 112: 78, # 'p' - 113: 115, # 'q' - 114: 65, # 'r' - 115: 66, # 's' - 116: 58, # 't' - 117: 76, # 'u' - 118: 106, # 'v' - 119: 103, # 'w' - 120: 87, # 'x' - 121: 107, # 'y' - 122: 112, # 'z' - 123: 253, # '{' - 124: 253, # '|' - 125: 253, # '}' - 126: 253, # '~' - 127: 253, # '\x7f' - 128: 255, # '\x80' - 129: 255, # '\x81' - 130: 255, # '\x82' - 131: 255, # '\x83' - 132: 255, # '\x84' - 133: 255, # '\x85' - 134: 255, # '\x86' - 135: 255, # '\x87' - 136: 255, # '\x88' - 137: 255, # '\x89' - 138: 255, # '\x8a' - 139: 255, # '\x8b' - 140: 255, # '\x8c' - 141: 255, # '\x8d' - 142: 255, # '\x8e' - 143: 255, # '\x8f' - 144: 255, # '\x90' - 145: 255, # '\x91' - 146: 255, # '\x92' - 147: 255, # '\x93' - 148: 255, # '\x94' - 149: 255, # '\x95' - 150: 255, # '\x96' - 151: 255, # '\x97' - 152: 255, # '\x98' - 153: 255, # '\x99' - 154: 255, # '\x9a' - 155: 255, # '\x9b' - 156: 255, # '\x9c' - 157: 255, # '\x9d' - 158: 255, # '\x9e' - 159: 255, # '\x9f' - 160: 253, # '\xa0' - 161: 233, # '‘' - 162: 90, # '’' - 163: 253, # '£' - 164: 253, # '€' - 165: 253, # '₯' - 166: 253, # '¦' - 167: 253, # '§' - 168: 253, # '¨' - 169: 253, # '©' - 170: 253, # 'ͺ' - 171: 253, # '«' - 172: 253, # '¬' - 173: 74, # '\xad' - 174: 253, # None - 175: 253, # '―' - 176: 253, # '°' - 177: 253, # '±' - 178: 253, # '²' - 179: 253, # '³' - 180: 247, # '΄' - 181: 248, # 'Î…' - 182: 61, # 'Ά' - 183: 36, # '·' - 184: 46, # 'Έ' - 185: 71, # 'Ή' - 186: 73, # 'Ί' - 187: 253, # '»' - 188: 54, # 'ÎŒ' - 189: 253, # '½' - 190: 108, # 'ÎŽ' - 191: 123, # 'Î' - 192: 110, # 'Î' - 193: 31, # 'Α' - 194: 51, # 'Î’' - 195: 43, # 'Γ' - 196: 41, # 'Δ' - 197: 34, # 'Ε' - 198: 91, # 'Ζ' - 199: 40, # 'Η' - 200: 52, # 'Θ' - 201: 47, # 'Ι' - 202: 44, # 'Κ' - 203: 53, # 'Λ' - 204: 38, # 'Îœ' - 205: 49, # 'Î' - 206: 59, # 'Ξ' - 207: 39, # 'Ο' - 208: 35, # 'Π' - 209: 48, # 'Ρ' - 210: 250, # None - 211: 37, # 'Σ' - 212: 33, # 'Τ' - 213: 45, # 'Î¥' - 214: 56, # 'Φ' - 215: 50, # 'Χ' - 216: 84, # 'Ψ' - 217: 57, # 'Ω' - 218: 120, # 'Ϊ' - 219: 121, # 'Ϋ' - 220: 17, # 'ά' - 221: 18, # 'έ' - 222: 22, # 'ή' - 223: 15, # 'ί' - 224: 124, # 'ΰ' - 225: 1, # 'α' - 226: 29, # 'β' - 227: 20, # 'γ' - 228: 21, # 'δ' - 229: 3, # 'ε' - 230: 32, # 'ζ' - 231: 13, # 'η' - 232: 25, # 'θ' - 233: 5, # 'ι' - 234: 11, # 'κ' - 235: 16, # 'λ' - 236: 10, # 'μ' - 237: 6, # 'ν' - 238: 30, # 'ξ' - 239: 4, # 'ο' - 240: 9, # 'Ï€' - 241: 8, # 'Ï' - 242: 14, # 'Ï‚' - 243: 7, # 'σ' - 244: 2, # 'Ï„' - 245: 12, # 'Ï…' - 246: 28, # 'φ' - 247: 23, # 'χ' - 248: 42, # 'ψ' - 249: 24, # 'ω' - 250: 64, # 'ÏŠ' - 251: 75, # 'Ï‹' - 252: 19, # 'ÏŒ' - 253: 26, # 'Ï' - 254: 27, # 'ÏŽ' - 255: 253, # None -} - -ISO_8859_7_GREEK_MODEL = SingleByteCharSetModel(charset_name='ISO-8859-7', - language='Greek', - char_to_order_map=ISO_8859_7_GREEK_CHAR_TO_ORDER, - language_model=GREEK_LANG_MODEL, - typical_positive_ratio=0.982851, - keep_ascii_letters=False, - alphabet='ΆΈΉΊΌΎÎΑΒΓΔΕΖΗΘΙΚΛΜÎΞΟΠΡΣΤΥΦΧΨΩάέήίαβγδεζηθικλμνξοπÏςστυφχψωόÏÏŽ') - diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/langhebrewmodel.py b/venv/Lib/site-packages/pip/_vendor/chardet/langhebrewmodel.py deleted file mode 100644 index 484c652..0000000 --- a/venv/Lib/site-packages/pip/_vendor/chardet/langhebrewmodel.py +++ /dev/null @@ -1,4383 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- - -from pip._vendor.chardet.sbcharsetprober import SingleByteCharSetModel - - -# 3: Positive -# 2: Likely -# 1: Unlikely -# 0: Negative - -HEBREW_LANG_MODEL = { - 50: { # 'a' - 50: 0, # 'a' - 60: 1, # 'c' - 61: 1, # 'd' - 42: 1, # 'e' - 53: 1, # 'i' - 56: 2, # 'l' - 54: 2, # 'n' - 49: 0, # 'o' - 51: 2, # 'r' - 43: 1, # 's' - 44: 2, # 't' - 63: 1, # 'u' - 34: 0, # '\xa0' - 55: 0, # '´' - 48: 0, # '¼' - 39: 0, # '½' - 57: 0, # '¾' - 30: 0, # 'Ö°' - 59: 0, # 'Ö±' - 41: 0, # 'Ö²' - 33: 0, # 'Ö´' - 37: 0, # 'Öµ' - 36: 0, # 'Ö¶' - 31: 0, # 'Ö·' - 29: 0, # 'Ö¸' - 35: 0, # 'Ö¹' - 62: 0, # 'Ö»' - 28: 0, # 'Ö¼' - 38: 0, # '×' - 45: 0, # 'ׂ' - 9: 0, # '×' - 8: 0, # 'ב' - 20: 0, # '×’' - 16: 0, # 'ד' - 3: 1, # '×”' - 2: 0, # 'ו' - 24: 0, # '×–' - 14: 0, # '×—' - 22: 0, # 'ט' - 1: 0, # '×™' - 25: 0, # 'ך' - 15: 0, # '×›' - 4: 0, # 'ל' - 11: 0, # '×' - 6: 1, # 'מ' - 23: 0, # 'ן' - 12: 0, # '× ' - 19: 0, # 'ס' - 13: 0, # '×¢' - 26: 0, # '×£' - 18: 0, # 'פ' - 27: 0, # '×¥' - 21: 0, # 'צ' - 17: 1, # 'ק' - 7: 0, # 'ר' - 10: 1, # 'ש' - 5: 0, # 'ת' - 32: 0, # '–' - 52: 1, # '’' - 47: 0, # '“' - 46: 1, # 'â€' - 58: 0, # '†' - 40: 1, # '…' - }, - 60: { # 'c' - 50: 1, # 'a' - 60: 1, # 'c' - 61: 0, # 'd' - 42: 1, # 'e' - 53: 1, # 'i' - 56: 1, # 'l' - 54: 0, # 'n' - 49: 1, # 'o' - 51: 1, # 'r' - 43: 1, # 's' - 44: 2, # 't' - 63: 1, # 'u' - 34: 0, # '\xa0' - 55: 0, # '´' - 48: 0, # '¼' - 39: 0, # '½' - 57: 0, # '¾' - 30: 0, # 'Ö°' - 59: 0, # 'Ö±' - 41: 0, # 'Ö²' - 33: 0, # 'Ö´' - 37: 0, # 'Öµ' - 36: 0, # 'Ö¶' - 31: 0, # 'Ö·' - 29: 0, # 'Ö¸' - 35: 0, # 'Ö¹' - 62: 0, # 'Ö»' - 28: 0, # 'Ö¼' - 38: 0, # '×' - 45: 0, # 'ׂ' - 9: 1, # '×' - 8: 0, # 'ב' - 20: 0, # '×’' - 16: 0, # 'ד' - 3: 1, # '×”' - 2: 0, # 'ו' - 24: 0, # '×–' - 14: 0, # '×—' - 22: 0, # 'ט' - 1: 0, # '×™' - 25: 0, # 'ך' - 15: 0, # '×›' - 4: 0, # 'ל' - 11: 0, # '×' - 6: 1, # 'מ' - 23: 0, # 'ן' - 12: 1, # '× ' - 19: 0, # 'ס' - 13: 0, # '×¢' - 26: 0, # '×£' - 18: 0, # 'פ' - 27: 0, # '×¥' - 21: 0, # 'צ' - 17: 0, # 'ק' - 7: 0, # 'ר' - 10: 0, # 'ש' - 5: 0, # 'ת' - 32: 0, # '–' - 52: 0, # '’' - 47: 0, # '“' - 46: 1, # 'â€' - 58: 0, # '†' - 40: 1, # '…' - }, - 61: { # 'd' - 50: 1, # 'a' - 60: 0, # 'c' - 61: 1, # 'd' - 42: 1, # 'e' - 53: 1, # 'i' - 56: 1, # 'l' - 54: 1, # 'n' - 49: 2, # 'o' - 51: 1, # 'r' - 43: 1, # 's' - 44: 0, # 't' - 63: 1, # 'u' - 34: 0, # '\xa0' - 55: 0, # '´' - 48: 0, # '¼' - 39: 0, # '½' - 57: 0, # '¾' - 30: 0, # 'Ö°' - 59: 0, # 'Ö±' - 41: 0, # 'Ö²' - 33: 0, # 'Ö´' - 37: 0, # 'Öµ' - 36: 0, # 'Ö¶' - 31: 0, # 'Ö·' - 29: 0, # 'Ö¸' - 35: 0, # 'Ö¹' - 62: 0, # 'Ö»' - 28: 0, # 'Ö¼' - 38: 0, # '×' - 45: 0, # 'ׂ' - 9: 0, # '×' - 8: 0, # 'ב' - 20: 0, # '×’' - 16: 0, # 'ד' - 3: 1, # '×”' - 2: 0, # 'ו' - 24: 0, # '×–' - 14: 0, # '×—' - 22: 0, # 'ט' - 1: 0, # '×™' - 25: 0, # 'ך' - 15: 0, # '×›' - 4: 0, # 'ל' - 11: 0, # '×' - 6: 0, # 'מ' - 23: 0, # 'ן' - 12: 0, # '× ' - 19: 0, # 'ס' - 13: 0, # '×¢' - 26: 0, # '×£' - 18: 0, # 'פ' - 27: 0, # '×¥' - 21: 0, # 'צ' - 17: 0, # 'ק' - 7: 0, # 'ר' - 10: 0, # 'ש' - 5: 0, # 'ת' - 32: 1, # '–' - 52: 1, # '’' - 47: 0, # '“' - 46: 1, # 'â€' - 58: 0, # '†' - 40: 1, # '…' - }, - 42: { # 'e' - 50: 1, # 'a' - 60: 1, # 'c' - 61: 2, # 'd' - 42: 1, # 'e' - 53: 1, # 'i' - 56: 2, # 'l' - 54: 2, # 'n' - 49: 1, # 'o' - 51: 2, # 'r' - 43: 2, # 's' - 44: 2, # 't' - 63: 1, # 'u' - 34: 1, # '\xa0' - 55: 0, # '´' - 48: 0, # '¼' - 39: 0, # '½' - 57: 0, # '¾' - 30: 0, # 'Ö°' - 59: 0, # 'Ö±' - 41: 0, # 'Ö²' - 33: 0, # 'Ö´' - 37: 0, # 'Öµ' - 36: 0, # 'Ö¶' - 31: 0, # 'Ö·' - 29: 0, # 'Ö¸' - 35: 0, # 'Ö¹' - 62: 0, # 'Ö»' - 28: 0, # 'Ö¼' - 38: 0, # '×' - 45: 0, # 'ׂ' - 9: 0, # '×' - 8: 0, # 'ב' - 20: 0, # '×’' - 16: 0, # 'ד' - 3: 0, # '×”' - 2: 0, # 'ו' - 24: 0, # '×–' - 14: 0, # '×—' - 22: 0, # 'ט' - 1: 0, # '×™' - 25: 0, # 'ך' - 15: 0, # '×›' - 4: 0, # 'ל' - 11: 0, # '×' - 6: 0, # 'מ' - 23: 0, # 'ן' - 12: 0, # '× ' - 19: 0, # 'ס' - 13: 0, # '×¢' - 26: 0, # '×£' - 18: 1, # 'פ' - 27: 0, # '×¥' - 21: 0, # 'צ' - 17: 0, # 'ק' - 7: 0, # 'ר' - 10: 0, # 'ש' - 5: 0, # 'ת' - 32: 1, # '–' - 52: 2, # '’' - 47: 0, # '“' - 46: 1, # 'â€' - 58: 0, # '†' - 40: 1, # '…' - }, - 53: { # 'i' - 50: 1, # 'a' - 60: 2, # 'c' - 61: 1, # 'd' - 42: 1, # 'e' - 53: 0, # 'i' - 56: 1, # 'l' - 54: 2, # 'n' - 49: 2, # 'o' - 51: 1, # 'r' - 43: 2, # 's' - 44: 2, # 't' - 63: 1, # 'u' - 34: 0, # '\xa0' - 55: 1, # '´' - 48: 0, # '¼' - 39: 0, # '½' - 57: 0, # '¾' - 30: 0, # 'Ö°' - 59: 0, # 'Ö±' - 41: 0, # 'Ö²' - 33: 0, # 'Ö´' - 37: 0, # 'Öµ' - 36: 0, # 'Ö¶' - 31: 0, # 'Ö·' - 29: 0, # 'Ö¸' - 35: 0, # 'Ö¹' - 62: 0, # 'Ö»' - 28: 0, # 'Ö¼' - 38: 0, # '×' - 45: 0, # 'ׂ' - 9: 0, # '×' - 8: 0, # 'ב' - 20: 0, # '×’' - 16: 0, # 'ד' - 3: 0, # '×”' - 2: 0, # 'ו' - 24: 0, # '×–' - 14: 0, # '×—' - 22: 0, # 'ט' - 1: 0, # '×™' - 25: 0, # 'ך' - 15: 0, # '×›' - 4: 0, # 'ל' - 11: 0, # '×' - 6: 0, # 'מ' - 23: 0, # 'ן' - 12: 0, # '× ' - 19: 0, # 'ס' - 13: 0, # '×¢' - 26: 0, # '×£' - 18: 0, # 'פ' - 27: 0, # '×¥' - 21: 0, # 'צ' - 17: 0, # 'ק' - 7: 0, # 'ר' - 10: 0, # 'ש' - 5: 0, # 'ת' - 32: 0, # '–' - 52: 1, # '’' - 47: 0, # '“' - 46: 0, # 'â€' - 58: 0, # '†' - 40: 0, # '…' - }, - 56: { # 'l' - 50: 1, # 'a' - 60: 1, # 'c' - 61: 1, # 'd' - 42: 2, # 'e' - 53: 2, # 'i' - 56: 2, # 'l' - 54: 1, # 'n' - 49: 1, # 'o' - 51: 0, # 'r' - 43: 1, # 's' - 44: 1, # 't' - 63: 1, # 'u' - 34: 0, # '\xa0' - 55: 0, # '´' - 48: 0, # '¼' - 39: 0, # '½' - 57: 0, # '¾' - 30: 0, # 'Ö°' - 59: 0, # 'Ö±' - 41: 0, # 'Ö²' - 33: 0, # 'Ö´' - 37: 0, # 'Öµ' - 36: 0, # 'Ö¶' - 31: 0, # 'Ö·' - 29: 0, # 'Ö¸' - 35: 0, # 'Ö¹' - 62: 0, # 'Ö»' - 28: 0, # 'Ö¼' - 38: 0, # '×' - 45: 0, # 'ׂ' - 9: 0, # '×' - 8: 0, # 'ב' - 20: 0, # '×’' - 16: 0, # 'ד' - 3: 0, # '×”' - 2: 0, # 'ו' - 24: 0, # '×–' - 14: 0, # '×—' - 22: 0, # 'ט' - 1: 0, # '×™' - 25: 0, # 'ך' - 15: 0, # '×›' - 4: 0, # 'ל' - 11: 0, # '×' - 6: 0, # 'מ' - 23: 0, # 'ן' - 12: 0, # '× ' - 19: 0, # 'ס' - 13: 0, # '×¢' - 26: 0, # '×£' - 18: 0, # 'פ' - 27: 0, # '×¥' - 21: 0, # 'צ' - 17: 0, # 'ק' - 7: 0, # 'ר' - 10: 0, # 'ש' - 5: 0, # 'ת' - 32: 0, # '–' - 52: 1, # '’' - 47: 0, # '“' - 46: 1, # 'â€' - 58: 0, # '†' - 40: 1, # '…' - }, - 54: { # 'n' - 50: 1, # 'a' - 60: 1, # 'c' - 61: 1, # 'd' - 42: 1, # 'e' - 53: 1, # 'i' - 56: 1, # 'l' - 54: 1, # 'n' - 49: 1, # 'o' - 51: 0, # 'r' - 43: 1, # 's' - 44: 2, # 't' - 63: 1, # 'u' - 34: 0, # '\xa0' - 55: 0, # '´' - 48: 0, # '¼' - 39: 0, # '½' - 57: 0, # '¾' - 30: 0, # 'Ö°' - 59: 0, # 'Ö±' - 41: 0, # 'Ö²' - 33: 0, # 'Ö´' - 37: 0, # 'Öµ' - 36: 0, # 'Ö¶' - 31: 0, # 'Ö·' - 29: 0, # 'Ö¸' - 35: 0, # 'Ö¹' - 62: 0, # 'Ö»' - 28: 0, # 'Ö¼' - 38: 0, # '×' - 45: 0, # 'ׂ' - 9: 0, # '×' - 8: 0, # 'ב' - 20: 0, # '×’' - 16: 0, # 'ד' - 3: 1, # '×”' - 2: 0, # 'ו' - 24: 0, # '×–' - 14: 0, # '×—' - 22: 0, # 'ט' - 1: 0, # '×™' - 25: 0, # 'ך' - 15: 0, # '×›' - 4: 0, # 'ל' - 11: 0, # '×' - 6: 0, # 'מ' - 23: 0, # 'ן' - 12: 0, # '× ' - 19: 0, # 'ס' - 13: 0, # '×¢' - 26: 0, # '×£' - 18: 0, # 'פ' - 27: 0, # '×¥' - 21: 0, # 'צ' - 17: 0, # 'ק' - 7: 0, # 'ר' - 10: 0, # 'ש' - 5: 0, # 'ת' - 32: 0, # '–' - 52: 2, # '’' - 47: 0, # '“' - 46: 1, # 'â€' - 58: 0, # '†' - 40: 1, # '…' - }, - 49: { # 'o' - 50: 1, # 'a' - 60: 1, # 'c' - 61: 1, # 'd' - 42: 1, # 'e' - 53: 1, # 'i' - 56: 1, # 'l' - 54: 2, # 'n' - 49: 1, # 'o' - 51: 2, # 'r' - 43: 1, # 's' - 44: 1, # 't' - 63: 1, # 'u' - 34: 0, # '\xa0' - 55: 0, # '´' - 48: 0, # '¼' - 39: 0, # '½' - 57: 0, # '¾' - 30: 0, # 'Ö°' - 59: 0, # 'Ö±' - 41: 0, # 'Ö²' - 33: 0, # 'Ö´' - 37: 0, # 'Öµ' - 36: 0, # 'Ö¶' - 31: 0, # 'Ö·' - 29: 0, # 'Ö¸' - 35: 0, # 'Ö¹' - 62: 0, # 'Ö»' - 28: 0, # 'Ö¼' - 38: 0, # '×' - 45: 0, # 'ׂ' - 9: 0, # '×' - 8: 0, # 'ב' - 20: 0, # '×’' - 16: 0, # 'ד' - 3: 0, # '×”' - 2: 0, # 'ו' - 24: 0, # '×–' - 14: 0, # '×—' - 22: 0, # 'ט' - 1: 0, # '×™' - 25: 0, # 'ך' - 15: 0, # '×›' - 4: 0, # 'ל' - 11: 0, # '×' - 6: 0, # 'מ' - 23: 0, # 'ן' - 12: 0, # '× ' - 19: 0, # 'ס' - 13: 0, # '×¢' - 26: 0, # '×£' - 18: 0, # 'פ' - 27: 0, # '×¥' - 21: 0, # 'צ' - 17: 0, # 'ק' - 7: 0, # 'ר' - 10: 0, # 'ש' - 5: 0, # 'ת' - 32: 0, # '–' - 52: 1, # '’' - 47: 0, # '“' - 46: 1, # 'â€' - 58: 0, # '†' - 40: 1, # '…' - }, - 51: { # 'r' - 50: 2, # 'a' - 60: 1, # 'c' - 61: 1, # 'd' - 42: 2, # 'e' - 53: 1, # 'i' - 56: 1, # 'l' - 54: 1, # 'n' - 49: 2, # 'o' - 51: 1, # 'r' - 43: 1, # 's' - 44: 1, # 't' - 63: 1, # 'u' - 34: 0, # '\xa0' - 55: 0, # '´' - 48: 0, # '¼' - 39: 0, # '½' - 57: 0, # '¾' - 30: 0, # 'Ö°' - 59: 0, # 'Ö±' - 41: 0, # 'Ö²' - 33: 0, # 'Ö´' - 37: 0, # 'Öµ' - 36: 0, # 'Ö¶' - 31: 0, # 'Ö·' - 29: 0, # 'Ö¸' - 35: 0, # 'Ö¹' - 62: 0, # 'Ö»' - 28: 0, # 'Ö¼' - 38: 0, # '×' - 45: 0, # 'ׂ' - 9: 0, # '×' - 8: 0, # 'ב' - 20: 0, # '×’' - 16: 0, # 'ד' - 3: 0, # '×”' - 2: 0, # 'ו' - 24: 0, # '×–' - 14: 0, # '×—' - 22: 0, # 'ט' - 1: 0, # '×™' - 25: 0, # 'ך' - 15: 0, # '×›' - 4: 0, # 'ל' - 11: 0, # '×' - 6: 0, # 'מ' - 23: 0, # 'ן' - 12: 0, # '× ' - 19: 0, # 'ס' - 13: 0, # '×¢' - 26: 0, # '×£' - 18: 0, # 'פ' - 27: 0, # '×¥' - 21: 0, # 'צ' - 17: 0, # 'ק' - 7: 0, # 'ר' - 10: 0, # 'ש' - 5: 0, # 'ת' - 32: 0, # '–' - 52: 2, # '’' - 47: 0, # '“' - 46: 1, # 'â€' - 58: 0, # '†' - 40: 1, # '…' - }, - 43: { # 's' - 50: 1, # 'a' - 60: 1, # 'c' - 61: 0, # 'd' - 42: 2, # 'e' - 53: 1, # 'i' - 56: 1, # 'l' - 54: 1, # 'n' - 49: 1, # 'o' - 51: 1, # 'r' - 43: 1, # 's' - 44: 2, # 't' - 63: 1, # 'u' - 34: 0, # '\xa0' - 55: 0, # '´' - 48: 0, # '¼' - 39: 0, # '½' - 57: 0, # '¾' - 30: 0, # 'Ö°' - 59: 0, # 'Ö±' - 41: 0, # 'Ö²' - 33: 0, # 'Ö´' - 37: 0, # 'Öµ' - 36: 0, # 'Ö¶' - 31: 0, # 'Ö·' - 29: 0, # 'Ö¸' - 35: 0, # 'Ö¹' - 62: 0, # 'Ö»' - 28: 0, # 'Ö¼' - 38: 0, # '×' - 45: 0, # 'ׂ' - 9: 0, # '×' - 8: 0, # 'ב' - 20: 0, # '×’' - 16: 0, # 'ד' - 3: 0, # '×”' - 2: 0, # 'ו' - 24: 0, # '×–' - 14: 0, # '×—' - 22: 0, # 'ט' - 1: 0, # '×™' - 25: 0, # 'ך' - 15: 0, # '×›' - 4: 0, # 'ל' - 11: 0, # '×' - 6: 0, # 'מ' - 23: 0, # 'ן' - 12: 0, # '× ' - 19: 0, # 'ס' - 13: 0, # '×¢' - 26: 0, # '×£' - 18: 0, # 'פ' - 27: 0, # '×¥' - 21: 0, # 'צ' - 17: 0, # 'ק' - 7: 0, # 'ר' - 10: 0, # 'ש' - 5: 0, # 'ת' - 32: 0, # '–' - 52: 1, # '’' - 47: 0, # '“' - 46: 2, # 'â€' - 58: 0, # '†' - 40: 2, # '…' - }, - 44: { # 't' - 50: 1, # 'a' - 60: 1, # 'c' - 61: 0, # 'd' - 42: 2, # 'e' - 53: 2, # 'i' - 56: 1, # 'l' - 54: 0, # 'n' - 49: 1, # 'o' - 51: 1, # 'r' - 43: 1, # 's' - 44: 1, # 't' - 63: 1, # 'u' - 34: 1, # '\xa0' - 55: 0, # '´' - 48: 0, # '¼' - 39: 0, # '½' - 57: 0, # '¾' - 30: 0, # 'Ö°' - 59: 0, # 'Ö±' - 41: 0, # 'Ö²' - 33: 0, # 'Ö´' - 37: 0, # 'Öµ' - 36: 0, # 'Ö¶' - 31: 0, # 'Ö·' - 29: 0, # 'Ö¸' - 35: 0, # 'Ö¹' - 62: 0, # 'Ö»' - 28: 0, # 'Ö¼' - 38: 0, # '×' - 45: 0, # 'ׂ' - 9: 0, # '×' - 8: 0, # 'ב' - 20: 0, # '×’' - 16: 0, # 'ד' - 3: 0, # '×”' - 2: 0, # 'ו' - 24: 0, # '×–' - 14: 0, # '×—' - 22: 0, # 'ט' - 1: 0, # '×™' - 25: 0, # 'ך' - 15: 0, # '×›' - 4: 0, # 'ל' - 11: 0, # '×' - 6: 0, # 'מ' - 23: 0, # 'ן' - 12: 0, # '× ' - 19: 0, # 'ס' - 13: 0, # '×¢' - 26: 0, # '×£' - 18: 0, # 'פ' - 27: 0, # '×¥' - 21: 0, # 'צ' - 17: 0, # 'ק' - 7: 0, # 'ר' - 10: 0, # 'ש' - 5: 0, # 'ת' - 32: 0, # '–' - 52: 2, # '’' - 47: 0, # '“' - 46: 1, # 'â€' - 58: 0, # '†' - 40: 1, # '…' - }, - 63: { # 'u' - 50: 1, # 'a' - 60: 1, # 'c' - 61: 1, # 'd' - 42: 1, # 'e' - 53: 1, # 'i' - 56: 1, # 'l' - 54: 1, # 'n' - 49: 0, # 'o' - 51: 1, # 'r' - 43: 2, # 's' - 44: 1, # 't' - 63: 0, # 'u' - 34: 0, # '\xa0' - 55: 0, # '´' - 48: 0, # '¼' - 39: 0, # '½' - 57: 0, # '¾' - 30: 0, # 'Ö°' - 59: 0, # 'Ö±' - 41: 0, # 'Ö²' - 33: 0, # 'Ö´' - 37: 0, # 'Öµ' - 36: 0, # 'Ö¶' - 31: 0, # 'Ö·' - 29: 0, # 'Ö¸' - 35: 0, # 'Ö¹' - 62: 0, # 'Ö»' - 28: 0, # 'Ö¼' - 38: 0, # '×' - 45: 0, # 'ׂ' - 9: 0, # '×' - 8: 0, # 'ב' - 20: 0, # '×’' - 16: 0, # 'ד' - 3: 0, # '×”' - 2: 0, # 'ו' - 24: 0, # '×–' - 14: 0, # '×—' - 22: 0, # 'ט' - 1: 0, # '×™' - 25: 0, # 'ך' - 15: 0, # '×›' - 4: 0, # 'ל' - 11: 0, # '×' - 6: 0, # 'מ' - 23: 0, # 'ן' - 12: 0, # '× ' - 19: 0, # 'ס' - 13: 0, # '×¢' - 26: 0, # '×£' - 18: 0, # 'פ' - 27: 0, # '×¥' - 21: 0, # 'צ' - 17: 0, # 'ק' - 7: 0, # 'ר' - 10: 0, # 'ש' - 5: 0, # 'ת' - 32: 0, # '–' - 52: 1, # '’' - 47: 0, # '“' - 46: 0, # 'â€' - 58: 0, # '†' - 40: 0, # '…' - }, - 34: { # '\xa0' - 50: 1, # 'a' - 60: 0, # 'c' - 61: 1, # 'd' - 42: 0, # 'e' - 53: 1, # 'i' - 56: 0, # 'l' - 54: 1, # 'n' - 49: 1, # 'o' - 51: 0, # 'r' - 43: 1, # 's' - 44: 1, # 't' - 63: 0, # 'u' - 34: 2, # '\xa0' - 55: 0, # '´' - 48: 0, # '¼' - 39: 0, # '½' - 57: 0, # '¾' - 30: 0, # 'Ö°' - 59: 0, # 'Ö±' - 41: 0, # 'Ö²' - 33: 0, # 'Ö´' - 37: 0, # 'Öµ' - 36: 0, # 'Ö¶' - 31: 0, # 'Ö·' - 29: 0, # 'Ö¸' - 35: 0, # 'Ö¹' - 62: 0, # 'Ö»' - 28: 0, # 'Ö¼' - 38: 0, # '×' - 45: 0, # 'ׂ' - 9: 2, # '×' - 8: 1, # 'ב' - 20: 1, # '×’' - 16: 1, # 'ד' - 3: 1, # '×”' - 2: 1, # 'ו' - 24: 1, # '×–' - 14: 1, # '×—' - 22: 1, # 'ט' - 1: 2, # '×™' - 25: 0, # 'ך' - 15: 1, # '×›' - 4: 1, # 'ל' - 11: 0, # '×' - 6: 2, # 'מ' - 23: 0, # 'ן' - 12: 1, # '× ' - 19: 1, # 'ס' - 13: 1, # '×¢' - 26: 0, # '×£' - 18: 1, # 'פ' - 27: 0, # '×¥' - 21: 1, # 'צ' - 17: 1, # 'ק' - 7: 1, # 'ר' - 10: 1, # 'ש' - 5: 1, # 'ת' - 32: 0, # '–' - 52: 0, # '’' - 47: 0, # '“' - 46: 0, # 'â€' - 58: 0, # '†' - 40: 0, # '…' - }, - 55: { # '´' - 50: 0, # 'a' - 60: 0, # 'c' - 61: 0, # 'd' - 42: 0, # 'e' - 53: 0, # 'i' - 56: 0, # 'l' - 54: 0, # 'n' - 49: 0, # 'o' - 51: 0, # 'r' - 43: 1, # 's' - 44: 0, # 't' - 63: 0, # 'u' - 34: 0, # '\xa0' - 55: 0, # '´' - 48: 0, # '¼' - 39: 0, # '½' - 57: 0, # '¾' - 30: 0, # 'Ö°' - 59: 0, # 'Ö±' - 41: 0, # 'Ö²' - 33: 0, # 'Ö´' - 37: 0, # 'Öµ' - 36: 0, # 'Ö¶' - 31: 0, # 'Ö·' - 29: 0, # 'Ö¸' - 35: 0, # 'Ö¹' - 62: 0, # 'Ö»' - 28: 0, # 'Ö¼' - 38: 0, # '×' - 45: 0, # 'ׂ' - 9: 1, # '×' - 8: 0, # 'ב' - 20: 0, # '×’' - 16: 0, # 'ד' - 3: 1, # '×”' - 2: 1, # 'ו' - 24: 0, # '×–' - 14: 0, # '×—' - 22: 0, # 'ט' - 1: 2, # '×™' - 25: 0, # 'ך' - 15: 0, # '×›' - 4: 1, # 'ל' - 11: 0, # '×' - 6: 1, # 'מ' - 23: 1, # 'ן' - 12: 1, # '× ' - 19: 1, # 'ס' - 13: 0, # '×¢' - 26: 0, # '×£' - 18: 0, # 'פ' - 27: 0, # '×¥' - 21: 0, # 'צ' - 17: 0, # 'ק' - 7: 1, # 'ר' - 10: 1, # 'ש' - 5: 0, # 'ת' - 32: 0, # '–' - 52: 0, # '’' - 47: 0, # '“' - 46: 0, # 'â€' - 58: 0, # '†' - 40: 0, # '…' - }, - 48: { # '¼' - 50: 0, # 'a' - 60: 0, # 'c' - 61: 0, # 'd' - 42: 0, # 'e' - 53: 0, # 'i' - 56: 0, # 'l' - 54: 0, # 'n' - 49: 0, # 'o' - 51: 0, # 'r' - 43: 0, # 's' - 44: 0, # 't' - 63: 0, # 'u' - 34: 0, # '\xa0' - 55: 0, # '´' - 48: 0, # '¼' - 39: 0, # '½' - 57: 0, # '¾' - 30: 0, # 'Ö°' - 59: 0, # 'Ö±' - 41: 0, # 'Ö²' - 33: 0, # 'Ö´' - 37: 0, # 'Öµ' - 36: 0, # 'Ö¶' - 31: 0, # 'Ö·' - 29: 0, # 'Ö¸' - 35: 0, # 'Ö¹' - 62: 0, # 'Ö»' - 28: 0, # 'Ö¼' - 38: 0, # '×' - 45: 0, # 'ׂ' - 9: 1, # '×' - 8: 0, # 'ב' - 20: 0, # '×’' - 16: 0, # 'ד' - 3: 0, # '×”' - 2: 1, # 'ו' - 24: 0, # '×–' - 14: 0, # '×—' - 22: 0, # 'ט' - 1: 0, # '×™' - 25: 0, # 'ך' - 15: 1, # '×›' - 4: 1, # 'ל' - 11: 0, # '×' - 6: 1, # 'מ' - 23: 0, # 'ן' - 12: 0, # '× ' - 19: 0, # 'ס' - 13: 0, # '×¢' - 26: 0, # '×£' - 18: 0, # 'פ' - 27: 0, # '×¥' - 21: 0, # 'צ' - 17: 0, # 'ק' - 7: 0, # 'ר' - 10: 0, # 'ש' - 5: 0, # 'ת' - 32: 0, # '–' - 52: 0, # '’' - 47: 0, # '“' - 46: 0, # 'â€' - 58: 0, # '†' - 40: 0, # '…' - }, - 39: { # '½' - 50: 0, # 'a' - 60: 0, # 'c' - 61: 0, # 'd' - 42: 0, # 'e' - 53: 0, # 'i' - 56: 0, # 'l' - 54: 0, # 'n' - 49: 0, # 'o' - 51: 0, # 'r' - 43: 0, # 's' - 44: 0, # 't' - 63: 0, # 'u' - 34: 0, # '\xa0' - 55: 0, # '´' - 48: 0, # '¼' - 39: 0, # '½' - 57: 0, # '¾' - 30: 0, # 'Ö°' - 59: 0, # 'Ö±' - 41: 0, # 'Ö²' - 33: 0, # 'Ö´' - 37: 0, # 'Öµ' - 36: 0, # 'Ö¶' - 31: 0, # 'Ö·' - 29: 0, # 'Ö¸' - 35: 0, # 'Ö¹' - 62: 0, # 'Ö»' - 28: 0, # 'Ö¼' - 38: 0, # '×' - 45: 0, # 'ׂ' - 9: 0, # '×' - 8: 0, # 'ב' - 20: 0, # '×’' - 16: 0, # 'ד' - 3: 0, # '×”' - 2: 0, # 'ו' - 24: 0, # '×–' - 14: 0, # '×—' - 22: 0, # 'ט' - 1: 0, # '×™' - 25: 0, # 'ך' - 15: 1, # '×›' - 4: 1, # 'ל' - 11: 0, # '×' - 6: 0, # 'מ' - 23: 0, # 'ן' - 12: 0, # '× ' - 19: 0, # 'ס' - 13: 0, # '×¢' - 26: 0, # '×£' - 18: 0, # 'פ' - 27: 0, # '×¥' - 21: 1, # 'צ' - 17: 1, # 'ק' - 7: 0, # 'ר' - 10: 0, # 'ש' - 5: 0, # 'ת' - 32: 0, # '–' - 52: 0, # '’' - 47: 0, # '“' - 46: 0, # 'â€' - 58: 0, # '†' - 40: 0, # '…' - }, - 57: { # '¾' - 50: 0, # 'a' - 60: 0, # 'c' - 61: 0, # 'd' - 42: 0, # 'e' - 53: 0, # 'i' - 56: 0, # 'l' - 54: 0, # 'n' - 49: 0, # 'o' - 51: 0, # 'r' - 43: 0, # 's' - 44: 0, # 't' - 63: 0, # 'u' - 34: 0, # '\xa0' - 55: 0, # '´' - 48: 0, # '¼' - 39: 0, # '½' - 57: 0, # '¾' - 30: 0, # 'Ö°' - 59: 0, # 'Ö±' - 41: 0, # 'Ö²' - 33: 0, # 'Ö´' - 37: 0, # 'Öµ' - 36: 0, # 'Ö¶' - 31: 0, # 'Ö·' - 29: 0, # 'Ö¸' - 35: 0, # 'Ö¹' - 62: 0, # 'Ö»' - 28: 0, # 'Ö¼' - 38: 0, # '×' - 45: 0, # 'ׂ' - 9: 0, # '×' - 8: 0, # 'ב' - 20: 0, # '×’' - 16: 0, # 'ד' - 3: 0, # '×”' - 2: 0, # 'ו' - 24: 0, # '×–' - 14: 0, # '×—' - 22: 0, # 'ט' - 1: 0, # '×™' - 25: 0, # 'ך' - 15: 0, # '×›' - 4: 0, # 'ל' - 11: 0, # '×' - 6: 0, # 'מ' - 23: 0, # 'ן' - 12: 0, # '× ' - 19: 0, # 'ס' - 13: 0, # '×¢' - 26: 0, # '×£' - 18: 0, # 'פ' - 27: 0, # '×¥' - 21: 0, # 'צ' - 17: 0, # 'ק' - 7: 0, # 'ר' - 10: 0, # 'ש' - 5: 0, # 'ת' - 32: 0, # '–' - 52: 0, # '’' - 47: 0, # '“' - 46: 0, # 'â€' - 58: 0, # '†' - 40: 0, # '…' - }, - 30: { # 'Ö°' - 50: 0, # 'a' - 60: 0, # 'c' - 61: 0, # 'd' - 42: 0, # 'e' - 53: 0, # 'i' - 56: 0, # 'l' - 54: 0, # 'n' - 49: 0, # 'o' - 51: 0, # 'r' - 43: 0, # 's' - 44: 0, # 't' - 63: 0, # 'u' - 34: 0, # '\xa0' - 55: 0, # '´' - 48: 0, # '¼' - 39: 0, # '½' - 57: 0, # '¾' - 30: 0, # 'Ö°' - 59: 0, # 'Ö±' - 41: 0, # 'Ö²' - 33: 0, # 'Ö´' - 37: 0, # 'Öµ' - 36: 1, # 'Ö¶' - 31: 0, # 'Ö·' - 29: 0, # 'Ö¸' - 35: 1, # 'Ö¹' - 62: 0, # 'Ö»' - 28: 0, # 'Ö¼' - 38: 0, # '×' - 45: 0, # 'ׂ' - 9: 2, # '×' - 8: 2, # 'ב' - 20: 2, # '×’' - 16: 2, # 'ד' - 3: 2, # '×”' - 2: 2, # 'ו' - 24: 2, # '×–' - 14: 2, # '×—' - 22: 2, # 'ט' - 1: 2, # '×™' - 25: 2, # 'ך' - 15: 2, # '×›' - 4: 2, # 'ל' - 11: 1, # '×' - 6: 2, # 'מ' - 23: 0, # 'ן' - 12: 2, # '× ' - 19: 2, # 'ס' - 13: 2, # '×¢' - 26: 0, # '×£' - 18: 2, # 'פ' - 27: 0, # '×¥' - 21: 2, # 'צ' - 17: 2, # 'ק' - 7: 2, # 'ר' - 10: 2, # 'ש' - 5: 2, # 'ת' - 32: 0, # '–' - 52: 0, # '’' - 47: 0, # '“' - 46: 0, # 'â€' - 58: 0, # '†' - 40: 0, # '…' - }, - 59: { # 'Ö±' - 50: 0, # 'a' - 60: 0, # 'c' - 61: 0, # 'd' - 42: 0, # 'e' - 53: 0, # 'i' - 56: 0, # 'l' - 54: 0, # 'n' - 49: 0, # 'o' - 51: 0, # 'r' - 43: 0, # 's' - 44: 0, # 't' - 63: 0, # 'u' - 34: 0, # '\xa0' - 55: 0, # '´' - 48: 0, # '¼' - 39: 0, # '½' - 57: 0, # '¾' - 30: 1, # 'Ö°' - 59: 0, # 'Ö±' - 41: 0, # 'Ö²' - 33: 0, # 'Ö´' - 37: 0, # 'Öµ' - 36: 0, # 'Ö¶' - 31: 0, # 'Ö·' - 29: 0, # 'Ö¸' - 35: 0, # 'Ö¹' - 62: 0, # 'Ö»' - 28: 0, # 'Ö¼' - 38: 0, # '×' - 45: 0, # 'ׂ' - 9: 0, # '×' - 8: 1, # 'ב' - 20: 1, # '×’' - 16: 0, # 'ד' - 3: 0, # '×”' - 2: 0, # 'ו' - 24: 1, # '×–' - 14: 0, # '×—' - 22: 0, # 'ט' - 1: 1, # '×™' - 25: 0, # 'ך' - 15: 1, # '×›' - 4: 2, # 'ל' - 11: 0, # '×' - 6: 2, # 'מ' - 23: 0, # 'ן' - 12: 1, # '× ' - 19: 0, # 'ס' - 13: 0, # '×¢' - 26: 0, # '×£' - 18: 0, # 'פ' - 27: 0, # '×¥' - 21: 0, # 'צ' - 17: 0, # 'ק' - 7: 1, # 'ר' - 10: 1, # 'ש' - 5: 0, # 'ת' - 32: 0, # '–' - 52: 0, # '’' - 47: 0, # '“' - 46: 0, # 'â€' - 58: 0, # '†' - 40: 0, # '…' - }, - 41: { # 'Ö²' - 50: 0, # 'a' - 60: 0, # 'c' - 61: 0, # 'd' - 42: 0, # 'e' - 53: 0, # 'i' - 56: 0, # 'l' - 54: 0, # 'n' - 49: 0, # 'o' - 51: 0, # 'r' - 43: 0, # 's' - 44: 0, # 't' - 63: 0, # 'u' - 34: 0, # '\xa0' - 55: 0, # '´' - 48: 0, # '¼' - 39: 0, # '½' - 57: 0, # '¾' - 30: 0, # 'Ö°' - 59: 0, # 'Ö±' - 41: 0, # 'Ö²' - 33: 0, # 'Ö´' - 37: 0, # 'Öµ' - 36: 0, # 'Ö¶' - 31: 0, # 'Ö·' - 29: 0, # 'Ö¸' - 35: 0, # 'Ö¹' - 62: 0, # 'Ö»' - 28: 0, # 'Ö¼' - 38: 0, # '×' - 45: 0, # 'ׂ' - 9: 0, # '×' - 8: 2, # 'ב' - 20: 1, # '×’' - 16: 2, # 'ד' - 3: 1, # '×”' - 2: 1, # 'ו' - 24: 1, # '×–' - 14: 1, # '×—' - 22: 1, # 'ט' - 1: 1, # '×™' - 25: 1, # 'ך' - 15: 1, # '×›' - 4: 2, # 'ל' - 11: 0, # '×' - 6: 2, # 'מ' - 23: 0, # 'ן' - 12: 2, # '× ' - 19: 1, # 'ס' - 13: 0, # '×¢' - 26: 0, # '×£' - 18: 1, # 'פ' - 27: 0, # '×¥' - 21: 2, # 'צ' - 17: 1, # 'ק' - 7: 2, # 'ר' - 10: 2, # 'ש' - 5: 1, # 'ת' - 32: 0, # '–' - 52: 0, # '’' - 47: 0, # '“' - 46: 0, # 'â€' - 58: 0, # '†' - 40: 0, # '…' - }, - 33: { # 'Ö´' - 50: 0, # 'a' - 60: 0, # 'c' - 61: 0, # 'd' - 42: 0, # 'e' - 53: 0, # 'i' - 56: 0, # 'l' - 54: 0, # 'n' - 49: 0, # 'o' - 51: 0, # 'r' - 43: 0, # 's' - 44: 0, # 't' - 63: 0, # 'u' - 34: 0, # '\xa0' - 55: 0, # '´' - 48: 0, # '¼' - 39: 0, # '½' - 57: 0, # '¾' - 30: 1, # 'Ö°' - 59: 0, # 'Ö±' - 41: 0, # 'Ö²' - 33: 1, # 'Ö´' - 37: 0, # 'Öµ' - 36: 1, # 'Ö¶' - 31: 0, # 'Ö·' - 29: 1, # 'Ö¸' - 35: 0, # 'Ö¹' - 62: 0, # 'Ö»' - 28: 1, # 'Ö¼' - 38: 0, # '×' - 45: 0, # 'ׂ' - 9: 1, # '×' - 8: 2, # 'ב' - 20: 2, # '×’' - 16: 2, # 'ד' - 3: 1, # '×”' - 2: 1, # 'ו' - 24: 2, # '×–' - 14: 1, # '×—' - 22: 1, # 'ט' - 1: 3, # '×™' - 25: 1, # 'ך' - 15: 2, # '×›' - 4: 2, # 'ל' - 11: 2, # '×' - 6: 2, # 'מ' - 23: 2, # 'ן' - 12: 2, # '× ' - 19: 2, # 'ס' - 13: 1, # '×¢' - 26: 0, # '×£' - 18: 2, # 'פ' - 27: 1, # '×¥' - 21: 2, # 'צ' - 17: 2, # 'ק' - 7: 2, # 'ר' - 10: 2, # 'ש' - 5: 2, # 'ת' - 32: 0, # '–' - 52: 0, # '’' - 47: 0, # '“' - 46: 0, # 'â€' - 58: 0, # '†' - 40: 0, # '…' - }, - 37: { # 'Öµ' - 50: 0, # 'a' - 60: 0, # 'c' - 61: 0, # 'd' - 42: 0, # 'e' - 53: 0, # 'i' - 56: 0, # 'l' - 54: 0, # 'n' - 49: 0, # 'o' - 51: 0, # 'r' - 43: 0, # 's' - 44: 0, # 't' - 63: 0, # 'u' - 34: 0, # '\xa0' - 55: 0, # '´' - 48: 0, # '¼' - 39: 0, # '½' - 57: 0, # '¾' - 30: 0, # 'Ö°' - 59: 0, # 'Ö±' - 41: 0, # 'Ö²' - 33: 0, # 'Ö´' - 37: 0, # 'Öµ' - 36: 1, # 'Ö¶' - 31: 1, # 'Ö·' - 29: 1, # 'Ö¸' - 35: 0, # 'Ö¹' - 62: 0, # 'Ö»' - 28: 0, # 'Ö¼' - 38: 0, # '×' - 45: 0, # 'ׂ' - 9: 2, # '×' - 8: 2, # 'ב' - 20: 1, # '×’' - 16: 2, # 'ד' - 3: 2, # '×”' - 2: 1, # 'ו' - 24: 1, # '×–' - 14: 2, # '×—' - 22: 1, # 'ט' - 1: 3, # '×™' - 25: 2, # 'ך' - 15: 1, # '×›' - 4: 2, # 'ל' - 11: 2, # '×' - 6: 1, # 'מ' - 23: 2, # 'ן' - 12: 2, # '× ' - 19: 1, # 'ס' - 13: 2, # '×¢' - 26: 1, # '×£' - 18: 1, # 'פ' - 27: 1, # '×¥' - 21: 1, # 'צ' - 17: 1, # 'ק' - 7: 2, # 'ר' - 10: 2, # 'ש' - 5: 2, # 'ת' - 32: 0, # '–' - 52: 0, # '’' - 47: 0, # '“' - 46: 0, # 'â€' - 58: 0, # '†' - 40: 0, # '…' - }, - 36: { # 'Ö¶' - 50: 0, # 'a' - 60: 0, # 'c' - 61: 0, # 'd' - 42: 0, # 'e' - 53: 0, # 'i' - 56: 0, # 'l' - 54: 0, # 'n' - 49: 0, # 'o' - 51: 0, # 'r' - 43: 0, # 's' - 44: 0, # 't' - 63: 0, # 'u' - 34: 0, # '\xa0' - 55: 0, # '´' - 48: 0, # '¼' - 39: 0, # '½' - 57: 0, # '¾' - 30: 0, # 'Ö°' - 59: 0, # 'Ö±' - 41: 0, # 'Ö²' - 33: 0, # 'Ö´' - 37: 0, # 'Öµ' - 36: 1, # 'Ö¶' - 31: 1, # 'Ö·' - 29: 1, # 'Ö¸' - 35: 0, # 'Ö¹' - 62: 0, # 'Ö»' - 28: 0, # 'Ö¼' - 38: 0, # '×' - 45: 0, # 'ׂ' - 9: 2, # '×' - 8: 2, # 'ב' - 20: 1, # '×’' - 16: 2, # 'ד' - 3: 2, # '×”' - 2: 1, # 'ו' - 24: 1, # '×–' - 14: 2, # '×—' - 22: 1, # 'ט' - 1: 2, # '×™' - 25: 2, # 'ך' - 15: 1, # '×›' - 4: 2, # 'ל' - 11: 2, # '×' - 6: 2, # 'מ' - 23: 2, # 'ן' - 12: 2, # '× ' - 19: 2, # 'ס' - 13: 1, # '×¢' - 26: 1, # '×£' - 18: 1, # 'פ' - 27: 2, # '×¥' - 21: 1, # 'צ' - 17: 1, # 'ק' - 7: 2, # 'ר' - 10: 2, # 'ש' - 5: 2, # 'ת' - 32: 0, # '–' - 52: 0, # '’' - 47: 0, # '“' - 46: 0, # 'â€' - 58: 0, # '†' - 40: 0, # '…' - }, - 31: { # 'Ö·' - 50: 0, # 'a' - 60: 0, # 'c' - 61: 0, # 'd' - 42: 0, # 'e' - 53: 0, # 'i' - 56: 0, # 'l' - 54: 0, # 'n' - 49: 0, # 'o' - 51: 0, # 'r' - 43: 0, # 's' - 44: 0, # 't' - 63: 0, # 'u' - 34: 0, # '\xa0' - 55: 0, # '´' - 48: 0, # '¼' - 39: 0, # '½' - 57: 0, # '¾' - 30: 1, # 'Ö°' - 59: 0, # 'Ö±' - 41: 0, # 'Ö²' - 33: 0, # 'Ö´' - 37: 0, # 'Öµ' - 36: 1, # 'Ö¶' - 31: 0, # 'Ö·' - 29: 2, # 'Ö¸' - 35: 0, # 'Ö¹' - 62: 0, # 'Ö»' - 28: 0, # 'Ö¼' - 38: 0, # '×' - 45: 0, # 'ׂ' - 9: 2, # '×' - 8: 2, # 'ב' - 20: 2, # '×’' - 16: 2, # 'ד' - 3: 2, # '×”' - 2: 1, # 'ו' - 24: 2, # '×–' - 14: 2, # '×—' - 22: 2, # 'ט' - 1: 3, # '×™' - 25: 1, # 'ך' - 15: 2, # '×›' - 4: 2, # 'ל' - 11: 2, # '×' - 6: 2, # 'מ' - 23: 2, # 'ן' - 12: 2, # '× ' - 19: 2, # 'ס' - 13: 2, # '×¢' - 26: 2, # '×£' - 18: 2, # 'פ' - 27: 1, # '×¥' - 21: 2, # 'צ' - 17: 2, # 'ק' - 7: 2, # 'ר' - 10: 2, # 'ש' - 5: 2, # 'ת' - 32: 0, # '–' - 52: 0, # '’' - 47: 0, # '“' - 46: 0, # 'â€' - 58: 0, # '†' - 40: 0, # '…' - }, - 29: { # 'Ö¸' - 50: 0, # 'a' - 60: 0, # 'c' - 61: 0, # 'd' - 42: 0, # 'e' - 53: 0, # 'i' - 56: 0, # 'l' - 54: 0, # 'n' - 49: 0, # 'o' - 51: 0, # 'r' - 43: 0, # 's' - 44: 0, # 't' - 63: 0, # 'u' - 34: 0, # '\xa0' - 55: 0, # '´' - 48: 0, # '¼' - 39: 0, # '½' - 57: 0, # '¾' - 30: 0, # 'Ö°' - 59: 0, # 'Ö±' - 41: 0, # 'Ö²' - 33: 0, # 'Ö´' - 37: 0, # 'Öµ' - 36: 0, # 'Ö¶' - 31: 1, # 'Ö·' - 29: 2, # 'Ö¸' - 35: 0, # 'Ö¹' - 62: 0, # 'Ö»' - 28: 1, # 'Ö¼' - 38: 0, # '×' - 45: 0, # 'ׂ' - 9: 2, # '×' - 8: 2, # 'ב' - 20: 2, # '×’' - 16: 2, # 'ד' - 3: 3, # '×”' - 2: 2, # 'ו' - 24: 2, # '×–' - 14: 2, # '×—' - 22: 1, # 'ט' - 1: 2, # '×™' - 25: 2, # 'ך' - 15: 2, # '×›' - 4: 2, # 'ל' - 11: 2, # '×' - 6: 2, # 'מ' - 23: 2, # 'ן' - 12: 2, # '× ' - 19: 1, # 'ס' - 13: 2, # '×¢' - 26: 1, # '×£' - 18: 2, # 'פ' - 27: 1, # '×¥' - 21: 2, # 'צ' - 17: 2, # 'ק' - 7: 2, # 'ר' - 10: 2, # 'ש' - 5: 2, # 'ת' - 32: 0, # '–' - 52: 0, # '’' - 47: 0, # '“' - 46: 0, # 'â€' - 58: 0, # '†' - 40: 0, # '…' - }, - 35: { # 'Ö¹' - 50: 0, # 'a' - 60: 0, # 'c' - 61: 0, # 'd' - 42: 0, # 'e' - 53: 0, # 'i' - 56: 0, # 'l' - 54: 0, # 'n' - 49: 0, # 'o' - 51: 0, # 'r' - 43: 0, # 's' - 44: 0, # 't' - 63: 0, # 'u' - 34: 0, # '\xa0' - 55: 0, # '´' - 48: 0, # '¼' - 39: 0, # '½' - 57: 0, # '¾' - 30: 0, # 'Ö°' - 59: 0, # 'Ö±' - 41: 0, # 'Ö²' - 33: 0, # 'Ö´' - 37: 0, # 'Öµ' - 36: 0, # 'Ö¶' - 31: 0, # 'Ö·' - 29: 0, # 'Ö¸' - 35: 1, # 'Ö¹' - 62: 0, # 'Ö»' - 28: 0, # 'Ö¼' - 38: 0, # '×' - 45: 0, # 'ׂ' - 9: 2, # '×' - 8: 2, # 'ב' - 20: 1, # '×’' - 16: 2, # 'ד' - 3: 2, # '×”' - 2: 1, # 'ו' - 24: 1, # '×–' - 14: 1, # '×—' - 22: 1, # 'ט' - 1: 1, # '×™' - 25: 1, # 'ך' - 15: 2, # '×›' - 4: 2, # 'ל' - 11: 2, # '×' - 6: 2, # 'מ' - 23: 2, # 'ן' - 12: 2, # '× ' - 19: 2, # 'ס' - 13: 2, # '×¢' - 26: 1, # '×£' - 18: 2, # 'פ' - 27: 1, # '×¥' - 21: 2, # 'צ' - 17: 2, # 'ק' - 7: 2, # 'ר' - 10: 2, # 'ש' - 5: 2, # 'ת' - 32: 0, # '–' - 52: 0, # '’' - 47: 0, # '“' - 46: 0, # 'â€' - 58: 0, # '†' - 40: 0, # '…' - }, - 62: { # 'Ö»' - 50: 0, # 'a' - 60: 0, # 'c' - 61: 0, # 'd' - 42: 0, # 'e' - 53: 0, # 'i' - 56: 0, # 'l' - 54: 0, # 'n' - 49: 0, # 'o' - 51: 0, # 'r' - 43: 0, # 's' - 44: 0, # 't' - 63: 0, # 'u' - 34: 0, # '\xa0' - 55: 0, # '´' - 48: 0, # '¼' - 39: 0, # '½' - 57: 0, # '¾' - 30: 0, # 'Ö°' - 59: 0, # 'Ö±' - 41: 0, # 'Ö²' - 33: 0, # 'Ö´' - 37: 0, # 'Öµ' - 36: 0, # 'Ö¶' - 31: 0, # 'Ö·' - 29: 0, # 'Ö¸' - 35: 0, # 'Ö¹' - 62: 0, # 'Ö»' - 28: 0, # 'Ö¼' - 38: 0, # '×' - 45: 0, # 'ׂ' - 9: 0, # '×' - 8: 1, # 'ב' - 20: 1, # '×’' - 16: 1, # 'ד' - 3: 1, # '×”' - 2: 1, # 'ו' - 24: 1, # '×–' - 14: 1, # '×—' - 22: 0, # 'ט' - 1: 1, # '×™' - 25: 0, # 'ך' - 15: 1, # '×›' - 4: 2, # 'ל' - 11: 1, # '×' - 6: 1, # 'מ' - 23: 1, # 'ן' - 12: 1, # '× ' - 19: 1, # 'ס' - 13: 1, # '×¢' - 26: 0, # '×£' - 18: 1, # 'פ' - 27: 0, # '×¥' - 21: 1, # 'צ' - 17: 1, # 'ק' - 7: 1, # 'ר' - 10: 1, # 'ש' - 5: 1, # 'ת' - 32: 0, # '–' - 52: 0, # '’' - 47: 0, # '“' - 46: 0, # 'â€' - 58: 0, # '†' - 40: 0, # '…' - }, - 28: { # 'Ö¼' - 50: 0, # 'a' - 60: 0, # 'c' - 61: 0, # 'd' - 42: 0, # 'e' - 53: 0, # 'i' - 56: 0, # 'l' - 54: 0, # 'n' - 49: 0, # 'o' - 51: 0, # 'r' - 43: 0, # 's' - 44: 0, # 't' - 63: 0, # 'u' - 34: 0, # '\xa0' - 55: 0, # '´' - 48: 0, # '¼' - 39: 0, # '½' - 57: 0, # '¾' - 30: 3, # 'Ö°' - 59: 0, # 'Ö±' - 41: 1, # 'Ö²' - 33: 3, # 'Ö´' - 37: 2, # 'Öµ' - 36: 2, # 'Ö¶' - 31: 3, # 'Ö·' - 29: 3, # 'Ö¸' - 35: 2, # 'Ö¹' - 62: 1, # 'Ö»' - 28: 0, # 'Ö¼' - 38: 2, # '×' - 45: 1, # 'ׂ' - 9: 2, # '×' - 8: 2, # 'ב' - 20: 1, # '×’' - 16: 2, # 'ד' - 3: 1, # '×”' - 2: 2, # 'ו' - 24: 1, # '×–' - 14: 1, # '×—' - 22: 1, # 'ט' - 1: 2, # '×™' - 25: 2, # 'ך' - 15: 2, # '×›' - 4: 2, # 'ל' - 11: 1, # '×' - 6: 2, # 'מ' - 23: 1, # 'ן' - 12: 2, # '× ' - 19: 1, # 'ס' - 13: 2, # '×¢' - 26: 1, # '×£' - 18: 1, # 'פ' - 27: 1, # '×¥' - 21: 1, # 'צ' - 17: 1, # 'ק' - 7: 2, # 'ר' - 10: 2, # 'ש' - 5: 2, # 'ת' - 32: 0, # '–' - 52: 0, # '’' - 47: 0, # '“' - 46: 0, # 'â€' - 58: 0, # '†' - 40: 0, # '…' - }, - 38: { # '×' - 50: 0, # 'a' - 60: 0, # 'c' - 61: 0, # 'd' - 42: 0, # 'e' - 53: 0, # 'i' - 56: 0, # 'l' - 54: 0, # 'n' - 49: 0, # 'o' - 51: 0, # 'r' - 43: 0, # 's' - 44: 0, # 't' - 63: 0, # 'u' - 34: 0, # '\xa0' - 55: 0, # '´' - 48: 0, # '¼' - 39: 0, # '½' - 57: 0, # '¾' - 30: 2, # 'Ö°' - 59: 0, # 'Ö±' - 41: 0, # 'Ö²' - 33: 2, # 'Ö´' - 37: 2, # 'Öµ' - 36: 2, # 'Ö¶' - 31: 2, # 'Ö·' - 29: 2, # 'Ö¸' - 35: 1, # 'Ö¹' - 62: 1, # 'Ö»' - 28: 0, # 'Ö¼' - 38: 0, # '×' - 45: 0, # 'ׂ' - 9: 0, # '×' - 8: 0, # 'ב' - 20: 0, # '×’' - 16: 0, # 'ד' - 3: 0, # '×”' - 2: 2, # 'ו' - 24: 0, # '×–' - 14: 0, # '×—' - 22: 0, # 'ט' - 1: 1, # '×™' - 25: 0, # 'ך' - 15: 0, # '×›' - 4: 0, # 'ל' - 11: 0, # '×' - 6: 0, # 'מ' - 23: 0, # 'ן' - 12: 0, # '× ' - 19: 0, # 'ס' - 13: 1, # '×¢' - 26: 0, # '×£' - 18: 0, # 'פ' - 27: 0, # '×¥' - 21: 0, # 'צ' - 17: 0, # 'ק' - 7: 0, # 'ר' - 10: 0, # 'ש' - 5: 0, # 'ת' - 32: 0, # '–' - 52: 0, # '’' - 47: 0, # '“' - 46: 0, # 'â€' - 58: 0, # '†' - 40: 0, # '…' - }, - 45: { # 'ׂ' - 50: 0, # 'a' - 60: 0, # 'c' - 61: 0, # 'd' - 42: 0, # 'e' - 53: 0, # 'i' - 56: 0, # 'l' - 54: 0, # 'n' - 49: 0, # 'o' - 51: 0, # 'r' - 43: 0, # 's' - 44: 0, # 't' - 63: 0, # 'u' - 34: 0, # '\xa0' - 55: 0, # '´' - 48: 0, # '¼' - 39: 0, # '½' - 57: 0, # '¾' - 30: 2, # 'Ö°' - 59: 0, # 'Ö±' - 41: 0, # 'Ö²' - 33: 2, # 'Ö´' - 37: 1, # 'Öµ' - 36: 2, # 'Ö¶' - 31: 1, # 'Ö·' - 29: 2, # 'Ö¸' - 35: 1, # 'Ö¹' - 62: 0, # 'Ö»' - 28: 0, # 'Ö¼' - 38: 0, # '×' - 45: 0, # 'ׂ' - 9: 1, # '×' - 8: 0, # 'ב' - 20: 1, # '×’' - 16: 0, # 'ד' - 3: 1, # '×”' - 2: 2, # 'ו' - 24: 0, # '×–' - 14: 1, # '×—' - 22: 0, # 'ט' - 1: 1, # '×™' - 25: 0, # 'ך' - 15: 0, # '×›' - 4: 0, # 'ל' - 11: 1, # '×' - 6: 1, # 'מ' - 23: 0, # 'ן' - 12: 1, # '× ' - 19: 0, # 'ס' - 13: 1, # '×¢' - 26: 0, # '×£' - 18: 1, # 'פ' - 27: 0, # '×¥' - 21: 0, # 'צ' - 17: 0, # 'ק' - 7: 1, # 'ר' - 10: 0, # 'ש' - 5: 1, # 'ת' - 32: 0, # '–' - 52: 0, # '’' - 47: 0, # '“' - 46: 0, # 'â€' - 58: 0, # '†' - 40: 0, # '…' - }, - 9: { # '×' - 50: 0, # 'a' - 60: 0, # 'c' - 61: 0, # 'd' - 42: 0, # 'e' - 53: 0, # 'i' - 56: 0, # 'l' - 54: 0, # 'n' - 49: 0, # 'o' - 51: 0, # 'r' - 43: 0, # 's' - 44: 0, # 't' - 63: 0, # 'u' - 34: 1, # '\xa0' - 55: 1, # '´' - 48: 1, # '¼' - 39: 0, # '½' - 57: 0, # '¾' - 30: 0, # 'Ö°' - 59: 2, # 'Ö±' - 41: 2, # 'Ö²' - 33: 2, # 'Ö´' - 37: 2, # 'Öµ' - 36: 2, # 'Ö¶' - 31: 2, # 'Ö·' - 29: 2, # 'Ö¸' - 35: 2, # 'Ö¹' - 62: 1, # 'Ö»' - 28: 0, # 'Ö¼' - 38: 0, # '×' - 45: 0, # 'ׂ' - 9: 2, # '×' - 8: 3, # 'ב' - 20: 3, # '×’' - 16: 3, # 'ד' - 3: 3, # '×”' - 2: 3, # 'ו' - 24: 3, # '×–' - 14: 3, # '×—' - 22: 3, # 'ט' - 1: 3, # '×™' - 25: 3, # 'ך' - 15: 3, # '×›' - 4: 3, # 'ל' - 11: 3, # '×' - 6: 3, # 'מ' - 23: 3, # 'ן' - 12: 3, # '× ' - 19: 3, # 'ס' - 13: 2, # '×¢' - 26: 3, # '×£' - 18: 3, # 'פ' - 27: 1, # '×¥' - 21: 3, # 'צ' - 17: 3, # 'ק' - 7: 3, # 'ר' - 10: 3, # 'ש' - 5: 3, # 'ת' - 32: 0, # '–' - 52: 0, # '’' - 47: 0, # '“' - 46: 1, # 'â€' - 58: 0, # '†' - 40: 1, # '…' - }, - 8: { # 'ב' - 50: 0, # 'a' - 60: 0, # 'c' - 61: 1, # 'd' - 42: 0, # 'e' - 53: 0, # 'i' - 56: 0, # 'l' - 54: 0, # 'n' - 49: 0, # 'o' - 51: 0, # 'r' - 43: 0, # 's' - 44: 0, # 't' - 63: 0, # 'u' - 34: 1, # '\xa0' - 55: 1, # '´' - 48: 0, # '¼' - 39: 0, # '½' - 57: 0, # '¾' - 30: 2, # 'Ö°' - 59: 0, # 'Ö±' - 41: 0, # 'Ö²' - 33: 2, # 'Ö´' - 37: 2, # 'Öµ' - 36: 2, # 'Ö¶' - 31: 2, # 'Ö·' - 29: 2, # 'Ö¸' - 35: 2, # 'Ö¹' - 62: 1, # 'Ö»' - 28: 3, # 'Ö¼' - 38: 0, # '×' - 45: 0, # 'ׂ' - 9: 3, # '×' - 8: 3, # 'ב' - 20: 3, # '×’' - 16: 3, # 'ד' - 3: 3, # '×”' - 2: 3, # 'ו' - 24: 3, # '×–' - 14: 3, # '×—' - 22: 3, # 'ט' - 1: 3, # '×™' - 25: 2, # 'ך' - 15: 3, # '×›' - 4: 3, # 'ל' - 11: 2, # '×' - 6: 3, # 'מ' - 23: 3, # 'ן' - 12: 3, # '× ' - 19: 3, # 'ס' - 13: 3, # '×¢' - 26: 1, # '×£' - 18: 3, # 'פ' - 27: 2, # '×¥' - 21: 3, # 'צ' - 17: 3, # 'ק' - 7: 3, # 'ר' - 10: 3, # 'ש' - 5: 3, # 'ת' - 32: 1, # '–' - 52: 0, # '’' - 47: 0, # '“' - 46: 1, # 'â€' - 58: 0, # '†' - 40: 1, # '…' - }, - 20: { # '×’' - 50: 0, # 'a' - 60: 0, # 'c' - 61: 0, # 'd' - 42: 0, # 'e' - 53: 0, # 'i' - 56: 0, # 'l' - 54: 0, # 'n' - 49: 0, # 'o' - 51: 0, # 'r' - 43: 0, # 's' - 44: 0, # 't' - 63: 0, # 'u' - 34: 1, # '\xa0' - 55: 2, # '´' - 48: 0, # '¼' - 39: 0, # '½' - 57: 0, # '¾' - 30: 2, # 'Ö°' - 59: 0, # 'Ö±' - 41: 0, # 'Ö²' - 33: 1, # 'Ö´' - 37: 1, # 'Öµ' - 36: 1, # 'Ö¶' - 31: 2, # 'Ö·' - 29: 2, # 'Ö¸' - 35: 1, # 'Ö¹' - 62: 0, # 'Ö»' - 28: 2, # 'Ö¼' - 38: 0, # '×' - 45: 0, # 'ׂ' - 9: 2, # '×' - 8: 3, # 'ב' - 20: 2, # '×’' - 16: 3, # 'ד' - 3: 3, # '×”' - 2: 3, # 'ו' - 24: 3, # '×–' - 14: 2, # '×—' - 22: 2, # 'ט' - 1: 3, # '×™' - 25: 1, # 'ך' - 15: 1, # '×›' - 4: 3, # 'ל' - 11: 3, # '×' - 6: 3, # 'מ' - 23: 3, # 'ן' - 12: 3, # '× ' - 19: 2, # 'ס' - 13: 3, # '×¢' - 26: 2, # '×£' - 18: 2, # 'פ' - 27: 1, # '×¥' - 21: 1, # 'צ' - 17: 1, # 'ק' - 7: 3, # 'ר' - 10: 3, # 'ש' - 5: 3, # 'ת' - 32: 0, # '–' - 52: 1, # '’' - 47: 0, # '“' - 46: 1, # 'â€' - 58: 0, # '†' - 40: 0, # '…' - }, - 16: { # 'ד' - 50: 0, # 'a' - 60: 0, # 'c' - 61: 0, # 'd' - 42: 0, # 'e' - 53: 0, # 'i' - 56: 0, # 'l' - 54: 0, # 'n' - 49: 0, # 'o' - 51: 0, # 'r' - 43: 0, # 's' - 44: 0, # 't' - 63: 0, # 'u' - 34: 0, # '\xa0' - 55: 0, # '´' - 48: 0, # '¼' - 39: 0, # '½' - 57: 0, # '¾' - 30: 2, # 'Ö°' - 59: 0, # 'Ö±' - 41: 0, # 'Ö²' - 33: 2, # 'Ö´' - 37: 2, # 'Öµ' - 36: 2, # 'Ö¶' - 31: 2, # 'Ö·' - 29: 2, # 'Ö¸' - 35: 2, # 'Ö¹' - 62: 1, # 'Ö»' - 28: 2, # 'Ö¼' - 38: 0, # '×' - 45: 0, # 'ׂ' - 9: 3, # '×' - 8: 3, # 'ב' - 20: 3, # '×’' - 16: 3, # 'ד' - 3: 3, # '×”' - 2: 3, # 'ו' - 24: 1, # '×–' - 14: 2, # '×—' - 22: 2, # 'ט' - 1: 3, # '×™' - 25: 2, # 'ך' - 15: 2, # '×›' - 4: 3, # 'ל' - 11: 3, # '×' - 6: 3, # 'מ' - 23: 2, # 'ן' - 12: 3, # '× ' - 19: 2, # 'ס' - 13: 3, # '×¢' - 26: 2, # '×£' - 18: 3, # 'פ' - 27: 0, # '×¥' - 21: 2, # 'צ' - 17: 3, # 'ק' - 7: 3, # 'ר' - 10: 3, # 'ש' - 5: 3, # 'ת' - 32: 0, # '–' - 52: 0, # '’' - 47: 0, # '“' - 46: 1, # 'â€' - 58: 0, # '†' - 40: 1, # '…' - }, - 3: { # '×”' - 50: 0, # 'a' - 60: 0, # 'c' - 61: 1, # 'd' - 42: 0, # 'e' - 53: 0, # 'i' - 56: 0, # 'l' - 54: 0, # 'n' - 49: 0, # 'o' - 51: 0, # 'r' - 43: 0, # 's' - 44: 0, # 't' - 63: 0, # 'u' - 34: 1, # '\xa0' - 55: 0, # '´' - 48: 1, # '¼' - 39: 0, # '½' - 57: 0, # '¾' - 30: 1, # 'Ö°' - 59: 1, # 'Ö±' - 41: 2, # 'Ö²' - 33: 2, # 'Ö´' - 37: 2, # 'Öµ' - 36: 2, # 'Ö¶' - 31: 3, # 'Ö·' - 29: 2, # 'Ö¸' - 35: 1, # 'Ö¹' - 62: 1, # 'Ö»' - 28: 2, # 'Ö¼' - 38: 0, # '×' - 45: 0, # 'ׂ' - 9: 3, # '×' - 8: 3, # 'ב' - 20: 3, # '×’' - 16: 3, # 'ד' - 3: 3, # '×”' - 2: 3, # 'ו' - 24: 3, # '×–' - 14: 3, # '×—' - 22: 3, # 'ט' - 1: 3, # '×™' - 25: 1, # 'ך' - 15: 3, # '×›' - 4: 3, # 'ל' - 11: 3, # '×' - 6: 3, # 'מ' - 23: 3, # 'ן' - 12: 3, # '× ' - 19: 3, # 'ס' - 13: 3, # '×¢' - 26: 0, # '×£' - 18: 3, # 'פ' - 27: 1, # '×¥' - 21: 3, # 'צ' - 17: 3, # 'ק' - 7: 3, # 'ר' - 10: 3, # 'ש' - 5: 3, # 'ת' - 32: 1, # '–' - 52: 1, # '’' - 47: 0, # '“' - 46: 1, # 'â€' - 58: 0, # '†' - 40: 2, # '…' - }, - 2: { # 'ו' - 50: 0, # 'a' - 60: 0, # 'c' - 61: 0, # 'd' - 42: 0, # 'e' - 53: 0, # 'i' - 56: 0, # 'l' - 54: 0, # 'n' - 49: 0, # 'o' - 51: 0, # 'r' - 43: 0, # 's' - 44: 1, # 't' - 63: 0, # 'u' - 34: 1, # '\xa0' - 55: 1, # '´' - 48: 1, # '¼' - 39: 0, # '½' - 57: 0, # '¾' - 30: 2, # 'Ö°' - 59: 0, # 'Ö±' - 41: 0, # 'Ö²' - 33: 2, # 'Ö´' - 37: 1, # 'Öµ' - 36: 1, # 'Ö¶' - 31: 2, # 'Ö·' - 29: 2, # 'Ö¸' - 35: 3, # 'Ö¹' - 62: 0, # 'Ö»' - 28: 3, # 'Ö¼' - 38: 0, # '×' - 45: 0, # 'ׂ' - 9: 3, # '×' - 8: 3, # 'ב' - 20: 3, # '×’' - 16: 3, # 'ד' - 3: 3, # '×”' - 2: 3, # 'ו' - 24: 3, # '×–' - 14: 3, # '×—' - 22: 3, # 'ט' - 1: 3, # '×™' - 25: 3, # 'ך' - 15: 3, # '×›' - 4: 3, # 'ל' - 11: 3, # '×' - 6: 3, # 'מ' - 23: 3, # 'ן' - 12: 3, # '× ' - 19: 3, # 'ס' - 13: 3, # '×¢' - 26: 3, # '×£' - 18: 3, # 'פ' - 27: 3, # '×¥' - 21: 3, # 'צ' - 17: 3, # 'ק' - 7: 3, # 'ר' - 10: 3, # 'ש' - 5: 3, # 'ת' - 32: 1, # '–' - 52: 0, # '’' - 47: 0, # '“' - 46: 1, # 'â€' - 58: 0, # '†' - 40: 2, # '…' - }, - 24: { # '×–' - 50: 0, # 'a' - 60: 0, # 'c' - 61: 0, # 'd' - 42: 0, # 'e' - 53: 0, # 'i' - 56: 0, # 'l' - 54: 0, # 'n' - 49: 0, # 'o' - 51: 0, # 'r' - 43: 0, # 's' - 44: 0, # 't' - 63: 0, # 'u' - 34: 0, # '\xa0' - 55: 1, # '´' - 48: 0, # '¼' - 39: 0, # '½' - 57: 0, # '¾' - 30: 2, # 'Ö°' - 59: 0, # 'Ö±' - 41: 1, # 'Ö²' - 33: 1, # 'Ö´' - 37: 2, # 'Öµ' - 36: 2, # 'Ö¶' - 31: 2, # 'Ö·' - 29: 2, # 'Ö¸' - 35: 1, # 'Ö¹' - 62: 1, # 'Ö»' - 28: 2, # 'Ö¼' - 38: 0, # '×' - 45: 0, # 'ׂ' - 9: 3, # '×' - 8: 2, # 'ב' - 20: 2, # '×’' - 16: 2, # 'ד' - 3: 3, # '×”' - 2: 3, # 'ו' - 24: 2, # '×–' - 14: 2, # '×—' - 22: 1, # 'ט' - 1: 3, # '×™' - 25: 1, # 'ך' - 15: 3, # '×›' - 4: 3, # 'ל' - 11: 2, # '×' - 6: 3, # 'מ' - 23: 2, # 'ן' - 12: 2, # '× ' - 19: 1, # 'ס' - 13: 2, # '×¢' - 26: 1, # '×£' - 18: 1, # 'פ' - 27: 0, # '×¥' - 21: 2, # 'צ' - 17: 3, # 'ק' - 7: 3, # 'ר' - 10: 1, # 'ש' - 5: 2, # 'ת' - 32: 0, # '–' - 52: 0, # '’' - 47: 0, # '“' - 46: 0, # 'â€' - 58: 0, # '†' - 40: 1, # '…' - }, - 14: { # '×—' - 50: 0, # 'a' - 60: 0, # 'c' - 61: 0, # 'd' - 42: 0, # 'e' - 53: 0, # 'i' - 56: 0, # 'l' - 54: 0, # 'n' - 49: 0, # 'o' - 51: 0, # 'r' - 43: 0, # 's' - 44: 0, # 't' - 63: 0, # 'u' - 34: 1, # '\xa0' - 55: 1, # '´' - 48: 0, # '¼' - 39: 0, # '½' - 57: 0, # '¾' - 30: 2, # 'Ö°' - 59: 1, # 'Ö±' - 41: 2, # 'Ö²' - 33: 2, # 'Ö´' - 37: 2, # 'Öµ' - 36: 2, # 'Ö¶' - 31: 2, # 'Ö·' - 29: 2, # 'Ö¸' - 35: 2, # 'Ö¹' - 62: 1, # 'Ö»' - 28: 0, # 'Ö¼' - 38: 0, # '×' - 45: 0, # 'ׂ' - 9: 2, # '×' - 8: 3, # 'ב' - 20: 2, # '×’' - 16: 3, # 'ד' - 3: 3, # '×”' - 2: 3, # 'ו' - 24: 3, # '×–' - 14: 2, # '×—' - 22: 2, # 'ט' - 1: 3, # '×™' - 25: 1, # 'ך' - 15: 2, # '×›' - 4: 3, # 'ל' - 11: 3, # '×' - 6: 3, # 'מ' - 23: 2, # 'ן' - 12: 3, # '× ' - 19: 3, # 'ס' - 13: 1, # '×¢' - 26: 2, # '×£' - 18: 2, # 'פ' - 27: 2, # '×¥' - 21: 3, # 'צ' - 17: 3, # 'ק' - 7: 3, # 'ר' - 10: 3, # 'ש' - 5: 3, # 'ת' - 32: 0, # '–' - 52: 1, # '’' - 47: 0, # '“' - 46: 1, # 'â€' - 58: 0, # '†' - 40: 1, # '…' - }, - 22: { # 'ט' - 50: 0, # 'a' - 60: 0, # 'c' - 61: 0, # 'd' - 42: 0, # 'e' - 53: 0, # 'i' - 56: 0, # 'l' - 54: 0, # 'n' - 49: 0, # 'o' - 51: 0, # 'r' - 43: 0, # 's' - 44: 0, # 't' - 63: 0, # 'u' - 34: 1, # '\xa0' - 55: 0, # '´' - 48: 0, # '¼' - 39: 0, # '½' - 57: 0, # '¾' - 30: 2, # 'Ö°' - 59: 0, # 'Ö±' - 41: 0, # 'Ö²' - 33: 2, # 'Ö´' - 37: 1, # 'Öµ' - 36: 1, # 'Ö¶' - 31: 2, # 'Ö·' - 29: 1, # 'Ö¸' - 35: 1, # 'Ö¹' - 62: 1, # 'Ö»' - 28: 1, # 'Ö¼' - 38: 0, # '×' - 45: 0, # 'ׂ' - 9: 3, # '×' - 8: 3, # 'ב' - 20: 3, # '×’' - 16: 1, # 'ד' - 3: 3, # '×”' - 2: 3, # 'ו' - 24: 2, # '×–' - 14: 3, # '×—' - 22: 2, # 'ט' - 1: 3, # '×™' - 25: 1, # 'ך' - 15: 2, # '×›' - 4: 3, # 'ל' - 11: 2, # '×' - 6: 2, # 'מ' - 23: 2, # 'ן' - 12: 3, # '× ' - 19: 2, # 'ס' - 13: 3, # '×¢' - 26: 2, # '×£' - 18: 3, # 'פ' - 27: 1, # '×¥' - 21: 2, # 'צ' - 17: 2, # 'ק' - 7: 3, # 'ר' - 10: 2, # 'ש' - 5: 3, # 'ת' - 32: 0, # '–' - 52: 0, # '’' - 47: 0, # '“' - 46: 0, # 'â€' - 58: 0, # '†' - 40: 1, # '…' - }, - 1: { # '×™' - 50: 0, # 'a' - 60: 0, # 'c' - 61: 0, # 'd' - 42: 0, # 'e' - 53: 0, # 'i' - 56: 0, # 'l' - 54: 0, # 'n' - 49: 0, # 'o' - 51: 0, # 'r' - 43: 0, # 's' - 44: 0, # 't' - 63: 0, # 'u' - 34: 1, # '\xa0' - 55: 1, # '´' - 48: 1, # '¼' - 39: 0, # '½' - 57: 0, # '¾' - 30: 2, # 'Ö°' - 59: 0, # 'Ö±' - 41: 0, # 'Ö²' - 33: 2, # 'Ö´' - 37: 2, # 'Öµ' - 36: 1, # 'Ö¶' - 31: 2, # 'Ö·' - 29: 2, # 'Ö¸' - 35: 2, # 'Ö¹' - 62: 1, # 'Ö»' - 28: 2, # 'Ö¼' - 38: 0, # '×' - 45: 0, # 'ׂ' - 9: 3, # '×' - 8: 3, # 'ב' - 20: 3, # '×’' - 16: 3, # 'ד' - 3: 3, # '×”' - 2: 3, # 'ו' - 24: 3, # '×–' - 14: 3, # '×—' - 22: 3, # 'ט' - 1: 3, # '×™' - 25: 3, # 'ך' - 15: 3, # '×›' - 4: 3, # 'ל' - 11: 3, # '×' - 6: 3, # 'מ' - 23: 3, # 'ן' - 12: 3, # '× ' - 19: 3, # 'ס' - 13: 3, # '×¢' - 26: 3, # '×£' - 18: 3, # 'פ' - 27: 3, # '×¥' - 21: 3, # 'צ' - 17: 3, # 'ק' - 7: 3, # 'ר' - 10: 3, # 'ש' - 5: 3, # 'ת' - 32: 1, # '–' - 52: 0, # '’' - 47: 0, # '“' - 46: 1, # 'â€' - 58: 0, # '†' - 40: 2, # '…' - }, - 25: { # 'ך' - 50: 0, # 'a' - 60: 0, # 'c' - 61: 0, # 'd' - 42: 0, # 'e' - 53: 0, # 'i' - 56: 0, # 'l' - 54: 0, # 'n' - 49: 0, # 'o' - 51: 0, # 'r' - 43: 0, # 's' - 44: 0, # 't' - 63: 0, # 'u' - 34: 0, # '\xa0' - 55: 0, # '´' - 48: 0, # '¼' - 39: 0, # '½' - 57: 0, # '¾' - 30: 2, # 'Ö°' - 59: 0, # 'Ö±' - 41: 0, # 'Ö²' - 33: 0, # 'Ö´' - 37: 0, # 'Öµ' - 36: 0, # 'Ö¶' - 31: 0, # 'Ö·' - 29: 2, # 'Ö¸' - 35: 0, # 'Ö¹' - 62: 0, # 'Ö»' - 28: 1, # 'Ö¼' - 38: 0, # '×' - 45: 0, # 'ׂ' - 9: 1, # '×' - 8: 0, # 'ב' - 20: 0, # '×’' - 16: 0, # 'ד' - 3: 1, # '×”' - 2: 0, # 'ו' - 24: 0, # '×–' - 14: 1, # '×—' - 22: 0, # 'ט' - 1: 0, # '×™' - 25: 0, # 'ך' - 15: 0, # '×›' - 4: 1, # 'ל' - 11: 0, # '×' - 6: 1, # 'מ' - 23: 0, # 'ן' - 12: 0, # '× ' - 19: 0, # 'ס' - 13: 0, # '×¢' - 26: 0, # '×£' - 18: 0, # 'פ' - 27: 0, # '×¥' - 21: 0, # 'צ' - 17: 0, # 'ק' - 7: 0, # 'ר' - 10: 1, # 'ש' - 5: 0, # 'ת' - 32: 0, # '–' - 52: 0, # '’' - 47: 0, # '“' - 46: 0, # 'â€' - 58: 0, # '†' - 40: 1, # '…' - }, - 15: { # '×›' - 50: 0, # 'a' - 60: 0, # 'c' - 61: 0, # 'd' - 42: 0, # 'e' - 53: 0, # 'i' - 56: 0, # 'l' - 54: 0, # 'n' - 49: 0, # 'o' - 51: 0, # 'r' - 43: 0, # 's' - 44: 0, # 't' - 63: 0, # 'u' - 34: 0, # '\xa0' - 55: 0, # '´' - 48: 0, # '¼' - 39: 0, # '½' - 57: 0, # '¾' - 30: 2, # 'Ö°' - 59: 0, # 'Ö±' - 41: 0, # 'Ö²' - 33: 2, # 'Ö´' - 37: 2, # 'Öµ' - 36: 2, # 'Ö¶' - 31: 2, # 'Ö·' - 29: 2, # 'Ö¸' - 35: 1, # 'Ö¹' - 62: 1, # 'Ö»' - 28: 3, # 'Ö¼' - 38: 0, # '×' - 45: 0, # 'ׂ' - 9: 3, # '×' - 8: 3, # 'ב' - 20: 2, # '×’' - 16: 3, # 'ד' - 3: 3, # '×”' - 2: 3, # 'ו' - 24: 3, # '×–' - 14: 3, # '×—' - 22: 2, # 'ט' - 1: 3, # '×™' - 25: 3, # 'ך' - 15: 3, # '×›' - 4: 3, # 'ל' - 11: 3, # '×' - 6: 3, # 'מ' - 23: 3, # 'ן' - 12: 3, # '× ' - 19: 3, # 'ס' - 13: 2, # '×¢' - 26: 3, # '×£' - 18: 3, # 'פ' - 27: 1, # '×¥' - 21: 2, # 'צ' - 17: 2, # 'ק' - 7: 3, # 'ר' - 10: 3, # 'ש' - 5: 3, # 'ת' - 32: 0, # '–' - 52: 0, # '’' - 47: 0, # '“' - 46: 0, # 'â€' - 58: 0, # '†' - 40: 0, # '…' - }, - 4: { # 'ל' - 50: 0, # 'a' - 60: 0, # 'c' - 61: 0, # 'd' - 42: 0, # 'e' - 53: 0, # 'i' - 56: 0, # 'l' - 54: 0, # 'n' - 49: 0, # 'o' - 51: 0, # 'r' - 43: 0, # 's' - 44: 0, # 't' - 63: 0, # 'u' - 34: 1, # '\xa0' - 55: 1, # '´' - 48: 0, # '¼' - 39: 0, # '½' - 57: 0, # '¾' - 30: 3, # 'Ö°' - 59: 0, # 'Ö±' - 41: 0, # 'Ö²' - 33: 2, # 'Ö´' - 37: 2, # 'Öµ' - 36: 2, # 'Ö¶' - 31: 2, # 'Ö·' - 29: 2, # 'Ö¸' - 35: 2, # 'Ö¹' - 62: 1, # 'Ö»' - 28: 2, # 'Ö¼' - 38: 0, # '×' - 45: 0, # 'ׂ' - 9: 3, # '×' - 8: 3, # 'ב' - 20: 3, # '×’' - 16: 3, # 'ד' - 3: 3, # '×”' - 2: 3, # 'ו' - 24: 3, # '×–' - 14: 3, # '×—' - 22: 3, # 'ט' - 1: 3, # '×™' - 25: 3, # 'ך' - 15: 3, # '×›' - 4: 3, # 'ל' - 11: 3, # '×' - 6: 3, # 'מ' - 23: 2, # 'ן' - 12: 3, # '× ' - 19: 3, # 'ס' - 13: 3, # '×¢' - 26: 2, # '×£' - 18: 3, # 'פ' - 27: 2, # '×¥' - 21: 3, # 'צ' - 17: 3, # 'ק' - 7: 3, # 'ר' - 10: 3, # 'ש' - 5: 3, # 'ת' - 32: 1, # '–' - 52: 0, # '’' - 47: 0, # '“' - 46: 1, # 'â€' - 58: 0, # '†' - 40: 1, # '…' - }, - 11: { # '×' - 50: 0, # 'a' - 60: 0, # 'c' - 61: 0, # 'd' - 42: 0, # 'e' - 53: 0, # 'i' - 56: 0, # 'l' - 54: 0, # 'n' - 49: 0, # 'o' - 51: 0, # 'r' - 43: 0, # 's' - 44: 0, # 't' - 63: 0, # 'u' - 34: 1, # '\xa0' - 55: 0, # '´' - 48: 0, # '¼' - 39: 0, # '½' - 57: 0, # '¾' - 30: 0, # 'Ö°' - 59: 0, # 'Ö±' - 41: 0, # 'Ö²' - 33: 0, # 'Ö´' - 37: 0, # 'Öµ' - 36: 0, # 'Ö¶' - 31: 0, # 'Ö·' - 29: 0, # 'Ö¸' - 35: 0, # 'Ö¹' - 62: 0, # 'Ö»' - 28: 0, # 'Ö¼' - 38: 0, # '×' - 45: 0, # 'ׂ' - 9: 1, # '×' - 8: 1, # 'ב' - 20: 1, # '×’' - 16: 0, # 'ד' - 3: 1, # '×”' - 2: 1, # 'ו' - 24: 1, # '×–' - 14: 1, # '×—' - 22: 0, # 'ט' - 1: 1, # '×™' - 25: 0, # 'ך' - 15: 1, # '×›' - 4: 1, # 'ל' - 11: 1, # '×' - 6: 1, # 'מ' - 23: 0, # 'ן' - 12: 1, # '× ' - 19: 0, # 'ס' - 13: 1, # '×¢' - 26: 0, # '×£' - 18: 1, # 'פ' - 27: 1, # '×¥' - 21: 1, # 'צ' - 17: 1, # 'ק' - 7: 1, # 'ר' - 10: 1, # 'ש' - 5: 1, # 'ת' - 32: 0, # '–' - 52: 0, # '’' - 47: 0, # '“' - 46: 1, # 'â€' - 58: 0, # '†' - 40: 2, # '…' - }, - 6: { # 'מ' - 50: 0, # 'a' - 60: 0, # 'c' - 61: 0, # 'd' - 42: 0, # 'e' - 53: 0, # 'i' - 56: 0, # 'l' - 54: 0, # 'n' - 49: 0, # 'o' - 51: 0, # 'r' - 43: 0, # 's' - 44: 0, # 't' - 63: 0, # 'u' - 34: 0, # '\xa0' - 55: 1, # '´' - 48: 0, # '¼' - 39: 0, # '½' - 57: 0, # '¾' - 30: 2, # 'Ö°' - 59: 0, # 'Ö±' - 41: 0, # 'Ö²' - 33: 2, # 'Ö´' - 37: 2, # 'Öµ' - 36: 2, # 'Ö¶' - 31: 2, # 'Ö·' - 29: 2, # 'Ö¸' - 35: 2, # 'Ö¹' - 62: 1, # 'Ö»' - 28: 2, # 'Ö¼' - 38: 0, # '×' - 45: 0, # 'ׂ' - 9: 3, # '×' - 8: 3, # 'ב' - 20: 3, # '×’' - 16: 3, # 'ד' - 3: 3, # '×”' - 2: 3, # 'ו' - 24: 3, # '×–' - 14: 3, # '×—' - 22: 3, # 'ט' - 1: 3, # '×™' - 25: 2, # 'ך' - 15: 3, # '×›' - 4: 3, # 'ל' - 11: 3, # '×' - 6: 3, # 'מ' - 23: 3, # 'ן' - 12: 3, # '× ' - 19: 3, # 'ס' - 13: 3, # '×¢' - 26: 0, # '×£' - 18: 3, # 'פ' - 27: 2, # '×¥' - 21: 3, # 'צ' - 17: 3, # 'ק' - 7: 3, # 'ר' - 10: 3, # 'ש' - 5: 3, # 'ת' - 32: 0, # '–' - 52: 0, # '’' - 47: 0, # '“' - 46: 0, # 'â€' - 58: 0, # '†' - 40: 1, # '…' - }, - 23: { # 'ן' - 50: 0, # 'a' - 60: 0, # 'c' - 61: 0, # 'd' - 42: 0, # 'e' - 53: 0, # 'i' - 56: 0, # 'l' - 54: 0, # 'n' - 49: 0, # 'o' - 51: 0, # 'r' - 43: 0, # 's' - 44: 0, # 't' - 63: 0, # 'u' - 34: 1, # '\xa0' - 55: 0, # '´' - 48: 1, # '¼' - 39: 0, # '½' - 57: 0, # '¾' - 30: 0, # 'Ö°' - 59: 0, # 'Ö±' - 41: 0, # 'Ö²' - 33: 0, # 'Ö´' - 37: 0, # 'Öµ' - 36: 0, # 'Ö¶' - 31: 0, # 'Ö·' - 29: 0, # 'Ö¸' - 35: 0, # 'Ö¹' - 62: 0, # 'Ö»' - 28: 0, # 'Ö¼' - 38: 0, # '×' - 45: 0, # 'ׂ' - 9: 1, # '×' - 8: 1, # 'ב' - 20: 1, # '×’' - 16: 1, # 'ד' - 3: 1, # '×”' - 2: 1, # 'ו' - 24: 0, # '×–' - 14: 1, # '×—' - 22: 1, # 'ט' - 1: 1, # '×™' - 25: 0, # 'ך' - 15: 1, # '×›' - 4: 1, # 'ל' - 11: 1, # '×' - 6: 1, # 'מ' - 23: 0, # 'ן' - 12: 1, # '× ' - 19: 1, # 'ס' - 13: 1, # '×¢' - 26: 1, # '×£' - 18: 1, # 'פ' - 27: 0, # '×¥' - 21: 0, # 'צ' - 17: 1, # 'ק' - 7: 1, # 'ר' - 10: 1, # 'ש' - 5: 1, # 'ת' - 32: 1, # '–' - 52: 0, # '’' - 47: 0, # '“' - 46: 1, # 'â€' - 58: 0, # '†' - 40: 2, # '…' - }, - 12: { # '× ' - 50: 0, # 'a' - 60: 0, # 'c' - 61: 0, # 'd' - 42: 0, # 'e' - 53: 0, # 'i' - 56: 0, # 'l' - 54: 0, # 'n' - 49: 0, # 'o' - 51: 0, # 'r' - 43: 0, # 's' - 44: 0, # 't' - 63: 0, # 'u' - 34: 0, # '\xa0' - 55: 0, # '´' - 48: 0, # '¼' - 39: 0, # '½' - 57: 0, # '¾' - 30: 2, # 'Ö°' - 59: 0, # 'Ö±' - 41: 0, # 'Ö²' - 33: 2, # 'Ö´' - 37: 2, # 'Öµ' - 36: 2, # 'Ö¶' - 31: 2, # 'Ö·' - 29: 2, # 'Ö¸' - 35: 1, # 'Ö¹' - 62: 1, # 'Ö»' - 28: 2, # 'Ö¼' - 38: 0, # '×' - 45: 0, # 'ׂ' - 9: 3, # '×' - 8: 3, # 'ב' - 20: 3, # '×’' - 16: 3, # 'ד' - 3: 3, # '×”' - 2: 3, # 'ו' - 24: 3, # '×–' - 14: 3, # '×—' - 22: 3, # 'ט' - 1: 3, # '×™' - 25: 2, # 'ך' - 15: 3, # '×›' - 4: 3, # 'ל' - 11: 3, # '×' - 6: 3, # 'מ' - 23: 3, # 'ן' - 12: 3, # '× ' - 19: 3, # 'ס' - 13: 3, # '×¢' - 26: 2, # '×£' - 18: 3, # 'פ' - 27: 2, # '×¥' - 21: 3, # 'צ' - 17: 3, # 'ק' - 7: 3, # 'ר' - 10: 3, # 'ש' - 5: 3, # 'ת' - 32: 0, # '–' - 52: 0, # '’' - 47: 0, # '“' - 46: 0, # 'â€' - 58: 0, # '†' - 40: 0, # '…' - }, - 19: { # 'ס' - 50: 0, # 'a' - 60: 0, # 'c' - 61: 0, # 'd' - 42: 0, # 'e' - 53: 0, # 'i' - 56: 0, # 'l' - 54: 0, # 'n' - 49: 0, # 'o' - 51: 0, # 'r' - 43: 0, # 's' - 44: 0, # 't' - 63: 0, # 'u' - 34: 1, # '\xa0' - 55: 1, # '´' - 48: 0, # '¼' - 39: 0, # '½' - 57: 0, # '¾' - 30: 2, # 'Ö°' - 59: 0, # 'Ö±' - 41: 0, # 'Ö²' - 33: 2, # 'Ö´' - 37: 1, # 'Öµ' - 36: 2, # 'Ö¶' - 31: 2, # 'Ö·' - 29: 1, # 'Ö¸' - 35: 1, # 'Ö¹' - 62: 2, # 'Ö»' - 28: 2, # 'Ö¼' - 38: 0, # '×' - 45: 0, # 'ׂ' - 9: 2, # '×' - 8: 3, # 'ב' - 20: 3, # '×’' - 16: 3, # 'ד' - 3: 3, # '×”' - 2: 3, # 'ו' - 24: 1, # '×–' - 14: 3, # '×—' - 22: 3, # 'ט' - 1: 3, # '×™' - 25: 2, # 'ך' - 15: 3, # '×›' - 4: 3, # 'ל' - 11: 2, # '×' - 6: 3, # 'מ' - 23: 2, # 'ן' - 12: 3, # '× ' - 19: 2, # 'ס' - 13: 3, # '×¢' - 26: 3, # '×£' - 18: 3, # 'פ' - 27: 0, # '×¥' - 21: 2, # 'צ' - 17: 3, # 'ק' - 7: 3, # 'ר' - 10: 1, # 'ש' - 5: 3, # 'ת' - 32: 0, # '–' - 52: 0, # '’' - 47: 0, # '“' - 46: 1, # 'â€' - 58: 0, # '†' - 40: 1, # '…' - }, - 13: { # '×¢' - 50: 0, # 'a' - 60: 0, # 'c' - 61: 0, # 'd' - 42: 0, # 'e' - 53: 0, # 'i' - 56: 0, # 'l' - 54: 0, # 'n' - 49: 0, # 'o' - 51: 0, # 'r' - 43: 0, # 's' - 44: 0, # 't' - 63: 0, # 'u' - 34: 0, # '\xa0' - 55: 0, # '´' - 48: 1, # '¼' - 39: 0, # '½' - 57: 0, # '¾' - 30: 1, # 'Ö°' - 59: 1, # 'Ö±' - 41: 2, # 'Ö²' - 33: 2, # 'Ö´' - 37: 2, # 'Öµ' - 36: 2, # 'Ö¶' - 31: 2, # 'Ö·' - 29: 2, # 'Ö¸' - 35: 2, # 'Ö¹' - 62: 1, # 'Ö»' - 28: 0, # 'Ö¼' - 38: 0, # '×' - 45: 0, # 'ׂ' - 9: 2, # '×' - 8: 3, # 'ב' - 20: 3, # '×’' - 16: 3, # 'ד' - 3: 3, # '×”' - 2: 3, # 'ו' - 24: 3, # '×–' - 14: 1, # '×—' - 22: 3, # 'ט' - 1: 3, # '×™' - 25: 2, # 'ך' - 15: 2, # '×›' - 4: 3, # 'ל' - 11: 3, # '×' - 6: 3, # 'מ' - 23: 2, # 'ן' - 12: 3, # '× ' - 19: 3, # 'ס' - 13: 2, # '×¢' - 26: 1, # '×£' - 18: 2, # 'פ' - 27: 2, # '×¥' - 21: 3, # 'צ' - 17: 3, # 'ק' - 7: 3, # 'ר' - 10: 3, # 'ש' - 5: 3, # 'ת' - 32: 0, # '–' - 52: 0, # '’' - 47: 0, # '“' - 46: 1, # 'â€' - 58: 0, # '†' - 40: 1, # '…' - }, - 26: { # '×£' - 50: 0, # 'a' - 60: 0, # 'c' - 61: 0, # 'd' - 42: 0, # 'e' - 53: 0, # 'i' - 56: 0, # 'l' - 54: 0, # 'n' - 49: 0, # 'o' - 51: 0, # 'r' - 43: 0, # 's' - 44: 0, # 't' - 63: 0, # 'u' - 34: 0, # '\xa0' - 55: 0, # '´' - 48: 0, # '¼' - 39: 0, # '½' - 57: 0, # '¾' - 30: 0, # 'Ö°' - 59: 0, # 'Ö±' - 41: 0, # 'Ö²' - 33: 0, # 'Ö´' - 37: 0, # 'Öµ' - 36: 0, # 'Ö¶' - 31: 0, # 'Ö·' - 29: 0, # 'Ö¸' - 35: 0, # 'Ö¹' - 62: 0, # 'Ö»' - 28: 0, # 'Ö¼' - 38: 0, # '×' - 45: 0, # 'ׂ' - 9: 1, # '×' - 8: 0, # 'ב' - 20: 0, # '×’' - 16: 0, # 'ד' - 3: 0, # '×”' - 2: 1, # 'ו' - 24: 0, # '×–' - 14: 1, # '×—' - 22: 0, # 'ט' - 1: 0, # '×™' - 25: 0, # 'ך' - 15: 1, # '×›' - 4: 1, # 'ל' - 11: 0, # '×' - 6: 1, # 'מ' - 23: 0, # 'ן' - 12: 0, # '× ' - 19: 1, # 'ס' - 13: 0, # '×¢' - 26: 1, # '×£' - 18: 1, # 'פ' - 27: 0, # '×¥' - 21: 0, # 'צ' - 17: 1, # 'ק' - 7: 1, # 'ר' - 10: 1, # 'ש' - 5: 0, # 'ת' - 32: 0, # '–' - 52: 0, # '’' - 47: 0, # '“' - 46: 0, # 'â€' - 58: 0, # '†' - 40: 1, # '…' - }, - 18: { # 'פ' - 50: 0, # 'a' - 60: 0, # 'c' - 61: 0, # 'd' - 42: 0, # 'e' - 53: 0, # 'i' - 56: 0, # 'l' - 54: 0, # 'n' - 49: 0, # 'o' - 51: 0, # 'r' - 43: 0, # 's' - 44: 0, # 't' - 63: 0, # 'u' - 34: 0, # '\xa0' - 55: 1, # '´' - 48: 0, # '¼' - 39: 0, # '½' - 57: 0, # '¾' - 30: 2, # 'Ö°' - 59: 0, # 'Ö±' - 41: 0, # 'Ö²' - 33: 2, # 'Ö´' - 37: 1, # 'Öµ' - 36: 2, # 'Ö¶' - 31: 1, # 'Ö·' - 29: 2, # 'Ö¸' - 35: 1, # 'Ö¹' - 62: 1, # 'Ö»' - 28: 2, # 'Ö¼' - 38: 0, # '×' - 45: 0, # 'ׂ' - 9: 3, # '×' - 8: 2, # 'ב' - 20: 3, # '×’' - 16: 2, # 'ד' - 3: 3, # '×”' - 2: 3, # 'ו' - 24: 2, # '×–' - 14: 3, # '×—' - 22: 3, # 'ט' - 1: 3, # '×™' - 25: 2, # 'ך' - 15: 3, # '×›' - 4: 3, # 'ל' - 11: 2, # '×' - 6: 2, # 'מ' - 23: 3, # 'ן' - 12: 3, # '× ' - 19: 3, # 'ס' - 13: 3, # '×¢' - 26: 2, # '×£' - 18: 2, # 'פ' - 27: 2, # '×¥' - 21: 3, # 'צ' - 17: 3, # 'ק' - 7: 3, # 'ר' - 10: 3, # 'ש' - 5: 3, # 'ת' - 32: 0, # '–' - 52: 0, # '’' - 47: 0, # '“' - 46: 1, # 'â€' - 58: 0, # '†' - 40: 0, # '…' - }, - 27: { # '×¥' - 50: 0, # 'a' - 60: 0, # 'c' - 61: 0, # 'd' - 42: 0, # 'e' - 53: 0, # 'i' - 56: 0, # 'l' - 54: 0, # 'n' - 49: 0, # 'o' - 51: 0, # 'r' - 43: 0, # 's' - 44: 0, # 't' - 63: 0, # 'u' - 34: 0, # '\xa0' - 55: 1, # '´' - 48: 0, # '¼' - 39: 0, # '½' - 57: 0, # '¾' - 30: 0, # 'Ö°' - 59: 0, # 'Ö±' - 41: 0, # 'Ö²' - 33: 0, # 'Ö´' - 37: 0, # 'Öµ' - 36: 0, # 'Ö¶' - 31: 0, # 'Ö·' - 29: 0, # 'Ö¸' - 35: 0, # 'Ö¹' - 62: 0, # 'Ö»' - 28: 0, # 'Ö¼' - 38: 0, # '×' - 45: 0, # 'ׂ' - 9: 1, # '×' - 8: 0, # 'ב' - 20: 0, # '×’' - 16: 0, # 'ד' - 3: 0, # '×”' - 2: 0, # 'ו' - 24: 0, # '×–' - 14: 0, # '×—' - 22: 0, # 'ט' - 1: 0, # '×™' - 25: 0, # 'ך' - 15: 0, # '×›' - 4: 1, # 'ל' - 11: 0, # '×' - 6: 0, # 'מ' - 23: 0, # 'ן' - 12: 0, # '× ' - 19: 1, # 'ס' - 13: 0, # '×¢' - 26: 0, # '×£' - 18: 0, # 'פ' - 27: 0, # '×¥' - 21: 0, # 'צ' - 17: 0, # 'ק' - 7: 1, # 'ר' - 10: 0, # 'ש' - 5: 1, # 'ת' - 32: 0, # '–' - 52: 0, # '’' - 47: 0, # '“' - 46: 0, # 'â€' - 58: 0, # '†' - 40: 1, # '…' - }, - 21: { # 'צ' - 50: 0, # 'a' - 60: 0, # 'c' - 61: 0, # 'd' - 42: 0, # 'e' - 53: 0, # 'i' - 56: 0, # 'l' - 54: 0, # 'n' - 49: 0, # 'o' - 51: 0, # 'r' - 43: 0, # 's' - 44: 0, # 't' - 63: 0, # 'u' - 34: 0, # '\xa0' - 55: 1, # '´' - 48: 0, # '¼' - 39: 0, # '½' - 57: 0, # '¾' - 30: 2, # 'Ö°' - 59: 0, # 'Ö±' - 41: 0, # 'Ö²' - 33: 2, # 'Ö´' - 37: 2, # 'Öµ' - 36: 1, # 'Ö¶' - 31: 2, # 'Ö·' - 29: 2, # 'Ö¸' - 35: 1, # 'Ö¹' - 62: 1, # 'Ö»' - 28: 2, # 'Ö¼' - 38: 0, # '×' - 45: 0, # 'ׂ' - 9: 3, # '×' - 8: 3, # 'ב' - 20: 2, # '×’' - 16: 3, # 'ד' - 3: 3, # '×”' - 2: 3, # 'ו' - 24: 1, # '×–' - 14: 3, # '×—' - 22: 2, # 'ט' - 1: 3, # '×™' - 25: 1, # 'ך' - 15: 1, # '×›' - 4: 3, # 'ל' - 11: 2, # '×' - 6: 3, # 'מ' - 23: 2, # 'ן' - 12: 3, # '× ' - 19: 1, # 'ס' - 13: 3, # '×¢' - 26: 2, # '×£' - 18: 3, # 'פ' - 27: 2, # '×¥' - 21: 2, # 'צ' - 17: 3, # 'ק' - 7: 3, # 'ר' - 10: 0, # 'ש' - 5: 3, # 'ת' - 32: 0, # '–' - 52: 0, # '’' - 47: 0, # '“' - 46: 0, # 'â€' - 58: 0, # '†' - 40: 0, # '…' - }, - 17: { # 'ק' - 50: 0, # 'a' - 60: 0, # 'c' - 61: 0, # 'd' - 42: 0, # 'e' - 53: 0, # 'i' - 56: 0, # 'l' - 54: 0, # 'n' - 49: 0, # 'o' - 51: 0, # 'r' - 43: 0, # 's' - 44: 0, # 't' - 63: 0, # 'u' - 34: 1, # '\xa0' - 55: 1, # '´' - 48: 0, # '¼' - 39: 0, # '½' - 57: 0, # '¾' - 30: 2, # 'Ö°' - 59: 0, # 'Ö±' - 41: 0, # 'Ö²' - 33: 2, # 'Ö´' - 37: 2, # 'Öµ' - 36: 1, # 'Ö¶' - 31: 2, # 'Ö·' - 29: 2, # 'Ö¸' - 35: 2, # 'Ö¹' - 62: 1, # 'Ö»' - 28: 2, # 'Ö¼' - 38: 0, # '×' - 45: 0, # 'ׂ' - 9: 3, # '×' - 8: 3, # 'ב' - 20: 2, # '×’' - 16: 3, # 'ד' - 3: 3, # '×”' - 2: 3, # 'ו' - 24: 2, # '×–' - 14: 3, # '×—' - 22: 3, # 'ט' - 1: 3, # '×™' - 25: 1, # 'ך' - 15: 1, # '×›' - 4: 3, # 'ל' - 11: 2, # '×' - 6: 3, # 'מ' - 23: 2, # 'ן' - 12: 3, # '× ' - 19: 3, # 'ס' - 13: 3, # '×¢' - 26: 2, # '×£' - 18: 3, # 'פ' - 27: 2, # '×¥' - 21: 3, # 'צ' - 17: 2, # 'ק' - 7: 3, # 'ר' - 10: 3, # 'ש' - 5: 3, # 'ת' - 32: 0, # '–' - 52: 1, # '’' - 47: 0, # '“' - 46: 1, # 'â€' - 58: 0, # '†' - 40: 1, # '…' - }, - 7: { # 'ר' - 50: 0, # 'a' - 60: 0, # 'c' - 61: 0, # 'd' - 42: 0, # 'e' - 53: 0, # 'i' - 56: 0, # 'l' - 54: 0, # 'n' - 49: 0, # 'o' - 51: 0, # 'r' - 43: 0, # 's' - 44: 0, # 't' - 63: 0, # 'u' - 34: 1, # '\xa0' - 55: 2, # '´' - 48: 1, # '¼' - 39: 0, # '½' - 57: 0, # '¾' - 30: 2, # 'Ö°' - 59: 0, # 'Ö±' - 41: 1, # 'Ö²' - 33: 2, # 'Ö´' - 37: 2, # 'Öµ' - 36: 2, # 'Ö¶' - 31: 2, # 'Ö·' - 29: 2, # 'Ö¸' - 35: 2, # 'Ö¹' - 62: 1, # 'Ö»' - 28: 0, # 'Ö¼' - 38: 0, # '×' - 45: 0, # 'ׂ' - 9: 3, # '×' - 8: 3, # 'ב' - 20: 3, # '×’' - 16: 3, # 'ד' - 3: 3, # '×”' - 2: 3, # 'ו' - 24: 3, # '×–' - 14: 3, # '×—' - 22: 3, # 'ט' - 1: 3, # '×™' - 25: 3, # 'ך' - 15: 3, # '×›' - 4: 3, # 'ל' - 11: 3, # '×' - 6: 3, # 'מ' - 23: 3, # 'ן' - 12: 3, # '× ' - 19: 3, # 'ס' - 13: 3, # '×¢' - 26: 2, # '×£' - 18: 3, # 'פ' - 27: 3, # '×¥' - 21: 3, # 'צ' - 17: 3, # 'ק' - 7: 3, # 'ר' - 10: 3, # 'ש' - 5: 3, # 'ת' - 32: 0, # '–' - 52: 0, # '’' - 47: 0, # '“' - 46: 1, # 'â€' - 58: 0, # '†' - 40: 2, # '…' - }, - 10: { # 'ש' - 50: 0, # 'a' - 60: 0, # 'c' - 61: 0, # 'd' - 42: 0, # 'e' - 53: 0, # 'i' - 56: 0, # 'l' - 54: 0, # 'n' - 49: 0, # 'o' - 51: 0, # 'r' - 43: 0, # 's' - 44: 0, # 't' - 63: 0, # 'u' - 34: 1, # '\xa0' - 55: 0, # '´' - 48: 0, # '¼' - 39: 0, # '½' - 57: 0, # '¾' - 30: 1, # 'Ö°' - 59: 0, # 'Ö±' - 41: 0, # 'Ö²' - 33: 1, # 'Ö´' - 37: 1, # 'Öµ' - 36: 1, # 'Ö¶' - 31: 1, # 'Ö·' - 29: 1, # 'Ö¸' - 35: 1, # 'Ö¹' - 62: 1, # 'Ö»' - 28: 2, # 'Ö¼' - 38: 3, # '×' - 45: 2, # 'ׂ' - 9: 3, # '×' - 8: 3, # 'ב' - 20: 3, # '×’' - 16: 3, # 'ד' - 3: 3, # '×”' - 2: 3, # 'ו' - 24: 2, # '×–' - 14: 3, # '×—' - 22: 3, # 'ט' - 1: 3, # '×™' - 25: 3, # 'ך' - 15: 3, # '×›' - 4: 3, # 'ל' - 11: 3, # '×' - 6: 3, # 'מ' - 23: 2, # 'ן' - 12: 3, # '× ' - 19: 2, # 'ס' - 13: 3, # '×¢' - 26: 2, # '×£' - 18: 3, # 'פ' - 27: 1, # '×¥' - 21: 2, # 'צ' - 17: 3, # 'ק' - 7: 3, # 'ר' - 10: 3, # 'ש' - 5: 3, # 'ת' - 32: 0, # '–' - 52: 0, # '’' - 47: 0, # '“' - 46: 1, # 'â€' - 58: 0, # '†' - 40: 1, # '…' - }, - 5: { # 'ת' - 50: 0, # 'a' - 60: 0, # 'c' - 61: 0, # 'd' - 42: 0, # 'e' - 53: 0, # 'i' - 56: 0, # 'l' - 54: 0, # 'n' - 49: 0, # 'o' - 51: 0, # 'r' - 43: 0, # 's' - 44: 0, # 't' - 63: 0, # 'u' - 34: 1, # '\xa0' - 55: 0, # '´' - 48: 1, # '¼' - 39: 1, # '½' - 57: 0, # '¾' - 30: 2, # 'Ö°' - 59: 0, # 'Ö±' - 41: 0, # 'Ö²' - 33: 2, # 'Ö´' - 37: 2, # 'Öµ' - 36: 2, # 'Ö¶' - 31: 2, # 'Ö·' - 29: 2, # 'Ö¸' - 35: 1, # 'Ö¹' - 62: 1, # 'Ö»' - 28: 2, # 'Ö¼' - 38: 0, # '×' - 45: 0, # 'ׂ' - 9: 3, # '×' - 8: 3, # 'ב' - 20: 3, # '×’' - 16: 2, # 'ד' - 3: 3, # '×”' - 2: 3, # 'ו' - 24: 2, # '×–' - 14: 3, # '×—' - 22: 2, # 'ט' - 1: 3, # '×™' - 25: 2, # 'ך' - 15: 3, # '×›' - 4: 3, # 'ל' - 11: 3, # '×' - 6: 3, # 'מ' - 23: 3, # 'ן' - 12: 3, # '× ' - 19: 2, # 'ס' - 13: 3, # '×¢' - 26: 2, # '×£' - 18: 3, # 'פ' - 27: 1, # '×¥' - 21: 2, # 'צ' - 17: 3, # 'ק' - 7: 3, # 'ר' - 10: 3, # 'ש' - 5: 3, # 'ת' - 32: 1, # '–' - 52: 1, # '’' - 47: 0, # '“' - 46: 0, # 'â€' - 58: 0, # '†' - 40: 2, # '…' - }, - 32: { # '–' - 50: 0, # 'a' - 60: 0, # 'c' - 61: 0, # 'd' - 42: 0, # 'e' - 53: 0, # 'i' - 56: 0, # 'l' - 54: 1, # 'n' - 49: 0, # 'o' - 51: 0, # 'r' - 43: 0, # 's' - 44: 0, # 't' - 63: 0, # 'u' - 34: 0, # '\xa0' - 55: 0, # '´' - 48: 0, # '¼' - 39: 0, # '½' - 57: 0, # '¾' - 30: 0, # 'Ö°' - 59: 0, # 'Ö±' - 41: 0, # 'Ö²' - 33: 0, # 'Ö´' - 37: 0, # 'Öµ' - 36: 0, # 'Ö¶' - 31: 0, # 'Ö·' - 29: 0, # 'Ö¸' - 35: 0, # 'Ö¹' - 62: 0, # 'Ö»' - 28: 0, # 'Ö¼' - 38: 0, # '×' - 45: 0, # 'ׂ' - 9: 1, # '×' - 8: 1, # 'ב' - 20: 1, # '×’' - 16: 1, # 'ד' - 3: 1, # '×”' - 2: 1, # 'ו' - 24: 0, # '×–' - 14: 1, # '×—' - 22: 0, # 'ט' - 1: 1, # '×™' - 25: 0, # 'ך' - 15: 1, # '×›' - 4: 1, # 'ל' - 11: 0, # '×' - 6: 1, # 'מ' - 23: 0, # 'ן' - 12: 0, # '× ' - 19: 1, # 'ס' - 13: 1, # '×¢' - 26: 0, # '×£' - 18: 1, # 'פ' - 27: 0, # '×¥' - 21: 1, # 'צ' - 17: 0, # 'ק' - 7: 1, # 'ר' - 10: 1, # 'ש' - 5: 1, # 'ת' - 32: 0, # '–' - 52: 0, # '’' - 47: 0, # '“' - 46: 0, # 'â€' - 58: 0, # '†' - 40: 0, # '…' - }, - 52: { # '’' - 50: 1, # 'a' - 60: 0, # 'c' - 61: 1, # 'd' - 42: 1, # 'e' - 53: 1, # 'i' - 56: 1, # 'l' - 54: 0, # 'n' - 49: 0, # 'o' - 51: 1, # 'r' - 43: 2, # 's' - 44: 2, # 't' - 63: 1, # 'u' - 34: 0, # '\xa0' - 55: 0, # '´' - 48: 0, # '¼' - 39: 0, # '½' - 57: 0, # '¾' - 30: 0, # 'Ö°' - 59: 0, # 'Ö±' - 41: 0, # 'Ö²' - 33: 0, # 'Ö´' - 37: 0, # 'Öµ' - 36: 0, # 'Ö¶' - 31: 0, # 'Ö·' - 29: 0, # 'Ö¸' - 35: 0, # 'Ö¹' - 62: 0, # 'Ö»' - 28: 0, # 'Ö¼' - 38: 0, # '×' - 45: 0, # 'ׂ' - 9: 0, # '×' - 8: 0, # 'ב' - 20: 0, # '×’' - 16: 0, # 'ד' - 3: 0, # '×”' - 2: 1, # 'ו' - 24: 0, # '×–' - 14: 0, # '×—' - 22: 0, # 'ט' - 1: 0, # '×™' - 25: 0, # 'ך' - 15: 0, # '×›' - 4: 0, # 'ל' - 11: 0, # '×' - 6: 1, # 'מ' - 23: 0, # 'ן' - 12: 0, # '× ' - 19: 0, # 'ס' - 13: 0, # '×¢' - 26: 0, # '×£' - 18: 0, # 'פ' - 27: 0, # '×¥' - 21: 0, # 'צ' - 17: 0, # 'ק' - 7: 0, # 'ר' - 10: 0, # 'ש' - 5: 1, # 'ת' - 32: 0, # '–' - 52: 0, # '’' - 47: 0, # '“' - 46: 0, # 'â€' - 58: 0, # '†' - 40: 0, # '…' - }, - 47: { # '“' - 50: 1, # 'a' - 60: 1, # 'c' - 61: 1, # 'd' - 42: 1, # 'e' - 53: 1, # 'i' - 56: 1, # 'l' - 54: 1, # 'n' - 49: 1, # 'o' - 51: 1, # 'r' - 43: 1, # 's' - 44: 1, # 't' - 63: 1, # 'u' - 34: 0, # '\xa0' - 55: 0, # '´' - 48: 0, # '¼' - 39: 0, # '½' - 57: 0, # '¾' - 30: 0, # 'Ö°' - 59: 0, # 'Ö±' - 41: 0, # 'Ö²' - 33: 0, # 'Ö´' - 37: 0, # 'Öµ' - 36: 0, # 'Ö¶' - 31: 0, # 'Ö·' - 29: 0, # 'Ö¸' - 35: 0, # 'Ö¹' - 62: 0, # 'Ö»' - 28: 0, # 'Ö¼' - 38: 0, # '×' - 45: 0, # 'ׂ' - 9: 2, # '×' - 8: 1, # 'ב' - 20: 1, # '×’' - 16: 1, # 'ד' - 3: 1, # '×”' - 2: 1, # 'ו' - 24: 1, # '×–' - 14: 1, # '×—' - 22: 1, # 'ט' - 1: 1, # '×™' - 25: 0, # 'ך' - 15: 1, # '×›' - 4: 1, # 'ל' - 11: 0, # '×' - 6: 1, # 'מ' - 23: 0, # 'ן' - 12: 1, # '× ' - 19: 1, # 'ס' - 13: 1, # '×¢' - 26: 0, # '×£' - 18: 1, # 'פ' - 27: 0, # '×¥' - 21: 1, # 'צ' - 17: 1, # 'ק' - 7: 1, # 'ר' - 10: 1, # 'ש' - 5: 1, # 'ת' - 32: 0, # '–' - 52: 0, # '’' - 47: 0, # '“' - 46: 0, # 'â€' - 58: 0, # '†' - 40: 0, # '…' - }, - 46: { # 'â€' - 50: 0, # 'a' - 60: 0, # 'c' - 61: 0, # 'd' - 42: 0, # 'e' - 53: 0, # 'i' - 56: 0, # 'l' - 54: 0, # 'n' - 49: 0, # 'o' - 51: 0, # 'r' - 43: 0, # 's' - 44: 1, # 't' - 63: 0, # 'u' - 34: 0, # '\xa0' - 55: 0, # '´' - 48: 0, # '¼' - 39: 0, # '½' - 57: 0, # '¾' - 30: 0, # 'Ö°' - 59: 0, # 'Ö±' - 41: 0, # 'Ö²' - 33: 0, # 'Ö´' - 37: 0, # 'Öµ' - 36: 0, # 'Ö¶' - 31: 0, # 'Ö·' - 29: 0, # 'Ö¸' - 35: 0, # 'Ö¹' - 62: 0, # 'Ö»' - 28: 0, # 'Ö¼' - 38: 0, # '×' - 45: 0, # 'ׂ' - 9: 1, # '×' - 8: 1, # 'ב' - 20: 1, # '×’' - 16: 0, # 'ד' - 3: 0, # '×”' - 2: 0, # 'ו' - 24: 0, # '×–' - 14: 0, # '×—' - 22: 0, # 'ט' - 1: 1, # '×™' - 25: 0, # 'ך' - 15: 1, # '×›' - 4: 1, # 'ל' - 11: 0, # '×' - 6: 1, # 'מ' - 23: 0, # 'ן' - 12: 0, # '× ' - 19: 0, # 'ס' - 13: 0, # '×¢' - 26: 0, # '×£' - 18: 0, # 'פ' - 27: 0, # '×¥' - 21: 1, # 'צ' - 17: 0, # 'ק' - 7: 1, # 'ר' - 10: 0, # 'ש' - 5: 0, # 'ת' - 32: 0, # '–' - 52: 0, # '’' - 47: 0, # '“' - 46: 0, # 'â€' - 58: 0, # '†' - 40: 0, # '…' - }, - 58: { # '†' - 50: 0, # 'a' - 60: 0, # 'c' - 61: 0, # 'd' - 42: 0, # 'e' - 53: 0, # 'i' - 56: 0, # 'l' - 54: 0, # 'n' - 49: 0, # 'o' - 51: 0, # 'r' - 43: 0, # 's' - 44: 0, # 't' - 63: 0, # 'u' - 34: 0, # '\xa0' - 55: 0, # '´' - 48: 0, # '¼' - 39: 0, # '½' - 57: 0, # '¾' - 30: 0, # 'Ö°' - 59: 0, # 'Ö±' - 41: 0, # 'Ö²' - 33: 0, # 'Ö´' - 37: 0, # 'Öµ' - 36: 0, # 'Ö¶' - 31: 0, # 'Ö·' - 29: 0, # 'Ö¸' - 35: 0, # 'Ö¹' - 62: 0, # 'Ö»' - 28: 0, # 'Ö¼' - 38: 0, # '×' - 45: 0, # 'ׂ' - 9: 0, # '×' - 8: 0, # 'ב' - 20: 0, # '×’' - 16: 0, # 'ד' - 3: 0, # '×”' - 2: 0, # 'ו' - 24: 0, # '×–' - 14: 0, # '×—' - 22: 0, # 'ט' - 1: 0, # '×™' - 25: 0, # 'ך' - 15: 0, # '×›' - 4: 0, # 'ל' - 11: 0, # '×' - 6: 0, # 'מ' - 23: 0, # 'ן' - 12: 0, # '× ' - 19: 0, # 'ס' - 13: 0, # '×¢' - 26: 0, # '×£' - 18: 0, # 'פ' - 27: 0, # '×¥' - 21: 0, # 'צ' - 17: 0, # 'ק' - 7: 0, # 'ר' - 10: 0, # 'ש' - 5: 0, # 'ת' - 32: 0, # '–' - 52: 0, # '’' - 47: 0, # '“' - 46: 0, # 'â€' - 58: 2, # '†' - 40: 0, # '…' - }, - 40: { # '…' - 50: 1, # 'a' - 60: 1, # 'c' - 61: 1, # 'd' - 42: 1, # 'e' - 53: 1, # 'i' - 56: 0, # 'l' - 54: 1, # 'n' - 49: 0, # 'o' - 51: 1, # 'r' - 43: 1, # 's' - 44: 1, # 't' - 63: 0, # 'u' - 34: 0, # '\xa0' - 55: 0, # '´' - 48: 0, # '¼' - 39: 0, # '½' - 57: 0, # '¾' - 30: 0, # 'Ö°' - 59: 0, # 'Ö±' - 41: 0, # 'Ö²' - 33: 0, # 'Ö´' - 37: 0, # 'Öµ' - 36: 0, # 'Ö¶' - 31: 0, # 'Ö·' - 29: 0, # 'Ö¸' - 35: 0, # 'Ö¹' - 62: 0, # 'Ö»' - 28: 0, # 'Ö¼' - 38: 0, # '×' - 45: 0, # 'ׂ' - 9: 1, # '×' - 8: 0, # 'ב' - 20: 0, # '×’' - 16: 0, # 'ד' - 3: 1, # '×”' - 2: 1, # 'ו' - 24: 1, # '×–' - 14: 0, # '×—' - 22: 0, # 'ט' - 1: 1, # '×™' - 25: 0, # 'ך' - 15: 1, # '×›' - 4: 1, # 'ל' - 11: 0, # '×' - 6: 1, # 'מ' - 23: 0, # 'ן' - 12: 1, # '× ' - 19: 0, # 'ס' - 13: 0, # '×¢' - 26: 0, # '×£' - 18: 1, # 'פ' - 27: 0, # '×¥' - 21: 0, # 'צ' - 17: 0, # 'ק' - 7: 1, # 'ר' - 10: 1, # 'ש' - 5: 1, # 'ת' - 32: 0, # '–' - 52: 0, # '’' - 47: 0, # '“' - 46: 1, # 'â€' - 58: 0, # '†' - 40: 2, # '…' - }, -} - -# 255: Undefined characters that did not exist in training text -# 254: Carriage/Return -# 253: symbol (punctuation) that does not belong to word -# 252: 0 - 9 -# 251: Control characters - -# Character Mapping Table(s): -WINDOWS_1255_HEBREW_CHAR_TO_ORDER = { - 0: 255, # '\x00' - 1: 255, # '\x01' - 2: 255, # '\x02' - 3: 255, # '\x03' - 4: 255, # '\x04' - 5: 255, # '\x05' - 6: 255, # '\x06' - 7: 255, # '\x07' - 8: 255, # '\x08' - 9: 255, # '\t' - 10: 254, # '\n' - 11: 255, # '\x0b' - 12: 255, # '\x0c' - 13: 254, # '\r' - 14: 255, # '\x0e' - 15: 255, # '\x0f' - 16: 255, # '\x10' - 17: 255, # '\x11' - 18: 255, # '\x12' - 19: 255, # '\x13' - 20: 255, # '\x14' - 21: 255, # '\x15' - 22: 255, # '\x16' - 23: 255, # '\x17' - 24: 255, # '\x18' - 25: 255, # '\x19' - 26: 255, # '\x1a' - 27: 255, # '\x1b' - 28: 255, # '\x1c' - 29: 255, # '\x1d' - 30: 255, # '\x1e' - 31: 255, # '\x1f' - 32: 253, # ' ' - 33: 253, # '!' - 34: 253, # '"' - 35: 253, # '#' - 36: 253, # '$' - 37: 253, # '%' - 38: 253, # '&' - 39: 253, # "'" - 40: 253, # '(' - 41: 253, # ')' - 42: 253, # '*' - 43: 253, # '+' - 44: 253, # ',' - 45: 253, # '-' - 46: 253, # '.' - 47: 253, # '/' - 48: 252, # '0' - 49: 252, # '1' - 50: 252, # '2' - 51: 252, # '3' - 52: 252, # '4' - 53: 252, # '5' - 54: 252, # '6' - 55: 252, # '7' - 56: 252, # '8' - 57: 252, # '9' - 58: 253, # ':' - 59: 253, # ';' - 60: 253, # '<' - 61: 253, # '=' - 62: 253, # '>' - 63: 253, # '?' - 64: 253, # '@' - 65: 69, # 'A' - 66: 91, # 'B' - 67: 79, # 'C' - 68: 80, # 'D' - 69: 92, # 'E' - 70: 89, # 'F' - 71: 97, # 'G' - 72: 90, # 'H' - 73: 68, # 'I' - 74: 111, # 'J' - 75: 112, # 'K' - 76: 82, # 'L' - 77: 73, # 'M' - 78: 95, # 'N' - 79: 85, # 'O' - 80: 78, # 'P' - 81: 121, # 'Q' - 82: 86, # 'R' - 83: 71, # 'S' - 84: 67, # 'T' - 85: 102, # 'U' - 86: 107, # 'V' - 87: 84, # 'W' - 88: 114, # 'X' - 89: 103, # 'Y' - 90: 115, # 'Z' - 91: 253, # '[' - 92: 253, # '\\' - 93: 253, # ']' - 94: 253, # '^' - 95: 253, # '_' - 96: 253, # '`' - 97: 50, # 'a' - 98: 74, # 'b' - 99: 60, # 'c' - 100: 61, # 'd' - 101: 42, # 'e' - 102: 76, # 'f' - 103: 70, # 'g' - 104: 64, # 'h' - 105: 53, # 'i' - 106: 105, # 'j' - 107: 93, # 'k' - 108: 56, # 'l' - 109: 65, # 'm' - 110: 54, # 'n' - 111: 49, # 'o' - 112: 66, # 'p' - 113: 110, # 'q' - 114: 51, # 'r' - 115: 43, # 's' - 116: 44, # 't' - 117: 63, # 'u' - 118: 81, # 'v' - 119: 77, # 'w' - 120: 98, # 'x' - 121: 75, # 'y' - 122: 108, # 'z' - 123: 253, # '{' - 124: 253, # '|' - 125: 253, # '}' - 126: 253, # '~' - 127: 253, # '\x7f' - 128: 124, # '€' - 129: 202, # None - 130: 203, # '‚' - 131: 204, # 'Æ’' - 132: 205, # '„' - 133: 40, # '…' - 134: 58, # '†' - 135: 206, # '‡' - 136: 207, # 'ˆ' - 137: 208, # '‰' - 138: 209, # None - 139: 210, # '‹' - 140: 211, # None - 141: 212, # None - 142: 213, # None - 143: 214, # None - 144: 215, # None - 145: 83, # '‘' - 146: 52, # '’' - 147: 47, # '“' - 148: 46, # 'â€' - 149: 72, # '•' - 150: 32, # '–' - 151: 94, # '—' - 152: 216, # 'Ëœ' - 153: 113, # 'â„¢' - 154: 217, # None - 155: 109, # '›' - 156: 218, # None - 157: 219, # None - 158: 220, # None - 159: 221, # None - 160: 34, # '\xa0' - 161: 116, # '¡' - 162: 222, # '¢' - 163: 118, # '£' - 164: 100, # '₪' - 165: 223, # 'Â¥' - 166: 224, # '¦' - 167: 117, # '§' - 168: 119, # '¨' - 169: 104, # '©' - 170: 125, # '×' - 171: 225, # '«' - 172: 226, # '¬' - 173: 87, # '\xad' - 174: 99, # '®' - 175: 227, # '¯' - 176: 106, # '°' - 177: 122, # '±' - 178: 123, # '²' - 179: 228, # '³' - 180: 55, # '´' - 181: 229, # 'µ' - 182: 230, # '¶' - 183: 101, # '·' - 184: 231, # '¸' - 185: 232, # '¹' - 186: 120, # '÷' - 187: 233, # '»' - 188: 48, # '¼' - 189: 39, # '½' - 190: 57, # '¾' - 191: 234, # '¿' - 192: 30, # 'Ö°' - 193: 59, # 'Ö±' - 194: 41, # 'Ö²' - 195: 88, # 'Ö³' - 196: 33, # 'Ö´' - 197: 37, # 'Öµ' - 198: 36, # 'Ö¶' - 199: 31, # 'Ö·' - 200: 29, # 'Ö¸' - 201: 35, # 'Ö¹' - 202: 235, # None - 203: 62, # 'Ö»' - 204: 28, # 'Ö¼' - 205: 236, # 'Ö½' - 206: 126, # 'Ö¾' - 207: 237, # 'Ö¿' - 208: 238, # '×€' - 209: 38, # '×' - 210: 45, # 'ׂ' - 211: 239, # '׃' - 212: 240, # '×°' - 213: 241, # '×±' - 214: 242, # 'ײ' - 215: 243, # '׳' - 216: 127, # '×´' - 217: 244, # None - 218: 245, # None - 219: 246, # None - 220: 247, # None - 221: 248, # None - 222: 249, # None - 223: 250, # None - 224: 9, # '×' - 225: 8, # 'ב' - 226: 20, # '×’' - 227: 16, # 'ד' - 228: 3, # '×”' - 229: 2, # 'ו' - 230: 24, # '×–' - 231: 14, # '×—' - 232: 22, # 'ט' - 233: 1, # '×™' - 234: 25, # 'ך' - 235: 15, # '×›' - 236: 4, # 'ל' - 237: 11, # '×' - 238: 6, # 'מ' - 239: 23, # 'ן' - 240: 12, # '× ' - 241: 19, # 'ס' - 242: 13, # '×¢' - 243: 26, # '×£' - 244: 18, # 'פ' - 245: 27, # '×¥' - 246: 21, # 'צ' - 247: 17, # 'ק' - 248: 7, # 'ר' - 249: 10, # 'ש' - 250: 5, # 'ת' - 251: 251, # None - 252: 252, # None - 253: 128, # '\u200e' - 254: 96, # '\u200f' - 255: 253, # None -} - -WINDOWS_1255_HEBREW_MODEL = SingleByteCharSetModel(charset_name='windows-1255', - language='Hebrew', - char_to_order_map=WINDOWS_1255_HEBREW_CHAR_TO_ORDER, - language_model=HEBREW_LANG_MODEL, - typical_positive_ratio=0.984004, - keep_ascii_letters=False, - alphabet='×בגדהוזחטיךכל×מןנסעףפץצקרשתװױײ') - diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/langhungarianmodel.py b/venv/Lib/site-packages/pip/_vendor/chardet/langhungarianmodel.py deleted file mode 100644 index bbc5cda..0000000 --- a/venv/Lib/site-packages/pip/_vendor/chardet/langhungarianmodel.py +++ /dev/null @@ -1,4650 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- - -from pip._vendor.chardet.sbcharsetprober import SingleByteCharSetModel - - -# 3: Positive -# 2: Likely -# 1: Unlikely -# 0: Negative - -HUNGARIAN_LANG_MODEL = { - 28: { # 'A' - 28: 0, # 'A' - 40: 1, # 'B' - 54: 1, # 'C' - 45: 2, # 'D' - 32: 1, # 'E' - 50: 1, # 'F' - 49: 2, # 'G' - 38: 1, # 'H' - 39: 2, # 'I' - 53: 1, # 'J' - 36: 2, # 'K' - 41: 2, # 'L' - 34: 1, # 'M' - 35: 2, # 'N' - 47: 1, # 'O' - 46: 2, # 'P' - 43: 2, # 'R' - 33: 2, # 'S' - 37: 2, # 'T' - 57: 1, # 'U' - 48: 1, # 'V' - 55: 1, # 'Y' - 52: 2, # 'Z' - 2: 0, # 'a' - 18: 1, # 'b' - 26: 1, # 'c' - 17: 2, # 'd' - 1: 1, # 'e' - 27: 1, # 'f' - 12: 1, # 'g' - 20: 1, # 'h' - 9: 1, # 'i' - 22: 1, # 'j' - 7: 2, # 'k' - 6: 2, # 'l' - 13: 2, # 'm' - 4: 2, # 'n' - 8: 0, # 'o' - 23: 2, # 'p' - 10: 2, # 'r' - 5: 1, # 's' - 3: 1, # 't' - 21: 1, # 'u' - 19: 1, # 'v' - 62: 1, # 'x' - 16: 0, # 'y' - 11: 3, # 'z' - 51: 1, # 'Ã' - 44: 0, # 'É' - 61: 1, # 'Ã' - 58: 0, # 'Ó' - 59: 0, # 'Ö' - 60: 0, # 'Ú' - 63: 0, # 'Ãœ' - 14: 0, # 'á' - 15: 0, # 'é' - 30: 0, # 'í' - 25: 0, # 'ó' - 24: 0, # 'ö' - 31: 0, # 'ú' - 29: 0, # 'ü' - 42: 0, # 'Å‘' - 56: 0, # 'ű' - }, - 40: { # 'B' - 28: 2, # 'A' - 40: 1, # 'B' - 54: 1, # 'C' - 45: 1, # 'D' - 32: 2, # 'E' - 50: 0, # 'F' - 49: 0, # 'G' - 38: 0, # 'H' - 39: 1, # 'I' - 53: 1, # 'J' - 36: 1, # 'K' - 41: 1, # 'L' - 34: 0, # 'M' - 35: 1, # 'N' - 47: 2, # 'O' - 46: 0, # 'P' - 43: 1, # 'R' - 33: 1, # 'S' - 37: 1, # 'T' - 57: 1, # 'U' - 48: 1, # 'V' - 55: 0, # 'Y' - 52: 0, # 'Z' - 2: 2, # 'a' - 18: 0, # 'b' - 26: 0, # 'c' - 17: 0, # 'd' - 1: 3, # 'e' - 27: 0, # 'f' - 12: 0, # 'g' - 20: 0, # 'h' - 9: 2, # 'i' - 22: 1, # 'j' - 7: 0, # 'k' - 6: 1, # 'l' - 13: 0, # 'm' - 4: 0, # 'n' - 8: 2, # 'o' - 23: 1, # 'p' - 10: 2, # 'r' - 5: 0, # 's' - 3: 0, # 't' - 21: 3, # 'u' - 19: 0, # 'v' - 62: 0, # 'x' - 16: 1, # 'y' - 11: 0, # 'z' - 51: 1, # 'Ã' - 44: 1, # 'É' - 61: 1, # 'Ã' - 58: 1, # 'Ó' - 59: 1, # 'Ö' - 60: 1, # 'Ú' - 63: 1, # 'Ãœ' - 14: 2, # 'á' - 15: 2, # 'é' - 30: 1, # 'í' - 25: 1, # 'ó' - 24: 1, # 'ö' - 31: 1, # 'ú' - 29: 1, # 'ü' - 42: 1, # 'Å‘' - 56: 1, # 'ű' - }, - 54: { # 'C' - 28: 1, # 'A' - 40: 1, # 'B' - 54: 1, # 'C' - 45: 1, # 'D' - 32: 1, # 'E' - 50: 0, # 'F' - 49: 0, # 'G' - 38: 1, # 'H' - 39: 2, # 'I' - 53: 1, # 'J' - 36: 1, # 'K' - 41: 1, # 'L' - 34: 1, # 'M' - 35: 0, # 'N' - 47: 1, # 'O' - 46: 1, # 'P' - 43: 1, # 'R' - 33: 2, # 'S' - 37: 1, # 'T' - 57: 1, # 'U' - 48: 0, # 'V' - 55: 1, # 'Y' - 52: 1, # 'Z' - 2: 2, # 'a' - 18: 0, # 'b' - 26: 0, # 'c' - 17: 0, # 'd' - 1: 1, # 'e' - 27: 0, # 'f' - 12: 0, # 'g' - 20: 1, # 'h' - 9: 1, # 'i' - 22: 0, # 'j' - 7: 0, # 'k' - 6: 1, # 'l' - 13: 0, # 'm' - 4: 0, # 'n' - 8: 2, # 'o' - 23: 0, # 'p' - 10: 1, # 'r' - 5: 3, # 's' - 3: 0, # 't' - 21: 1, # 'u' - 19: 0, # 'v' - 62: 0, # 'x' - 16: 1, # 'y' - 11: 1, # 'z' - 51: 1, # 'Ã' - 44: 1, # 'É' - 61: 1, # 'Ã' - 58: 0, # 'Ó' - 59: 0, # 'Ö' - 60: 0, # 'Ú' - 63: 0, # 'Ãœ' - 14: 1, # 'á' - 15: 1, # 'é' - 30: 1, # 'í' - 25: 1, # 'ó' - 24: 0, # 'ö' - 31: 0, # 'ú' - 29: 0, # 'ü' - 42: 0, # 'Å‘' - 56: 0, # 'ű' - }, - 45: { # 'D' - 28: 2, # 'A' - 40: 1, # 'B' - 54: 0, # 'C' - 45: 1, # 'D' - 32: 2, # 'E' - 50: 1, # 'F' - 49: 1, # 'G' - 38: 1, # 'H' - 39: 2, # 'I' - 53: 1, # 'J' - 36: 1, # 'K' - 41: 0, # 'L' - 34: 1, # 'M' - 35: 1, # 'N' - 47: 2, # 'O' - 46: 0, # 'P' - 43: 1, # 'R' - 33: 1, # 'S' - 37: 1, # 'T' - 57: 1, # 'U' - 48: 1, # 'V' - 55: 1, # 'Y' - 52: 1, # 'Z' - 2: 2, # 'a' - 18: 0, # 'b' - 26: 0, # 'c' - 17: 0, # 'd' - 1: 3, # 'e' - 27: 0, # 'f' - 12: 0, # 'g' - 20: 0, # 'h' - 9: 1, # 'i' - 22: 0, # 'j' - 7: 0, # 'k' - 6: 0, # 'l' - 13: 0, # 'm' - 4: 0, # 'n' - 8: 1, # 'o' - 23: 0, # 'p' - 10: 2, # 'r' - 5: 0, # 's' - 3: 0, # 't' - 21: 2, # 'u' - 19: 0, # 'v' - 62: 0, # 'x' - 16: 1, # 'y' - 11: 1, # 'z' - 51: 1, # 'Ã' - 44: 1, # 'É' - 61: 1, # 'Ã' - 58: 1, # 'Ó' - 59: 1, # 'Ö' - 60: 1, # 'Ú' - 63: 1, # 'Ãœ' - 14: 1, # 'á' - 15: 1, # 'é' - 30: 1, # 'í' - 25: 1, # 'ó' - 24: 1, # 'ö' - 31: 1, # 'ú' - 29: 1, # 'ü' - 42: 1, # 'Å‘' - 56: 0, # 'ű' - }, - 32: { # 'E' - 28: 1, # 'A' - 40: 1, # 'B' - 54: 1, # 'C' - 45: 1, # 'D' - 32: 1, # 'E' - 50: 1, # 'F' - 49: 2, # 'G' - 38: 1, # 'H' - 39: 1, # 'I' - 53: 1, # 'J' - 36: 2, # 'K' - 41: 2, # 'L' - 34: 2, # 'M' - 35: 2, # 'N' - 47: 1, # 'O' - 46: 1, # 'P' - 43: 2, # 'R' - 33: 2, # 'S' - 37: 2, # 'T' - 57: 1, # 'U' - 48: 1, # 'V' - 55: 1, # 'Y' - 52: 1, # 'Z' - 2: 1, # 'a' - 18: 1, # 'b' - 26: 1, # 'c' - 17: 2, # 'd' - 1: 1, # 'e' - 27: 1, # 'f' - 12: 3, # 'g' - 20: 1, # 'h' - 9: 1, # 'i' - 22: 1, # 'j' - 7: 1, # 'k' - 6: 2, # 'l' - 13: 2, # 'm' - 4: 2, # 'n' - 8: 0, # 'o' - 23: 1, # 'p' - 10: 2, # 'r' - 5: 2, # 's' - 3: 1, # 't' - 21: 2, # 'u' - 19: 1, # 'v' - 62: 1, # 'x' - 16: 0, # 'y' - 11: 3, # 'z' - 51: 1, # 'Ã' - 44: 1, # 'É' - 61: 0, # 'Ã' - 58: 1, # 'Ó' - 59: 1, # 'Ö' - 60: 0, # 'Ú' - 63: 1, # 'Ãœ' - 14: 0, # 'á' - 15: 0, # 'é' - 30: 0, # 'í' - 25: 0, # 'ó' - 24: 1, # 'ö' - 31: 0, # 'ú' - 29: 0, # 'ü' - 42: 0, # 'Å‘' - 56: 0, # 'ű' - }, - 50: { # 'F' - 28: 1, # 'A' - 40: 0, # 'B' - 54: 0, # 'C' - 45: 0, # 'D' - 32: 1, # 'E' - 50: 1, # 'F' - 49: 0, # 'G' - 38: 1, # 'H' - 39: 1, # 'I' - 53: 1, # 'J' - 36: 1, # 'K' - 41: 1, # 'L' - 34: 1, # 'M' - 35: 1, # 'N' - 47: 1, # 'O' - 46: 0, # 'P' - 43: 1, # 'R' - 33: 0, # 'S' - 37: 1, # 'T' - 57: 1, # 'U' - 48: 0, # 'V' - 55: 1, # 'Y' - 52: 0, # 'Z' - 2: 2, # 'a' - 18: 0, # 'b' - 26: 0, # 'c' - 17: 0, # 'd' - 1: 2, # 'e' - 27: 1, # 'f' - 12: 0, # 'g' - 20: 0, # 'h' - 9: 2, # 'i' - 22: 1, # 'j' - 7: 0, # 'k' - 6: 1, # 'l' - 13: 0, # 'm' - 4: 0, # 'n' - 8: 2, # 'o' - 23: 0, # 'p' - 10: 2, # 'r' - 5: 0, # 's' - 3: 0, # 't' - 21: 1, # 'u' - 19: 0, # 'v' - 62: 0, # 'x' - 16: 0, # 'y' - 11: 0, # 'z' - 51: 1, # 'Ã' - 44: 1, # 'É' - 61: 0, # 'Ã' - 58: 1, # 'Ó' - 59: 1, # 'Ö' - 60: 0, # 'Ú' - 63: 1, # 'Ãœ' - 14: 1, # 'á' - 15: 1, # 'é' - 30: 0, # 'í' - 25: 0, # 'ó' - 24: 2, # 'ö' - 31: 1, # 'ú' - 29: 1, # 'ü' - 42: 1, # 'Å‘' - 56: 1, # 'ű' - }, - 49: { # 'G' - 28: 2, # 'A' - 40: 1, # 'B' - 54: 1, # 'C' - 45: 1, # 'D' - 32: 2, # 'E' - 50: 1, # 'F' - 49: 1, # 'G' - 38: 1, # 'H' - 39: 1, # 'I' - 53: 1, # 'J' - 36: 1, # 'K' - 41: 1, # 'L' - 34: 1, # 'M' - 35: 1, # 'N' - 47: 1, # 'O' - 46: 1, # 'P' - 43: 1, # 'R' - 33: 1, # 'S' - 37: 1, # 'T' - 57: 1, # 'U' - 48: 1, # 'V' - 55: 2, # 'Y' - 52: 1, # 'Z' - 2: 2, # 'a' - 18: 0, # 'b' - 26: 0, # 'c' - 17: 0, # 'd' - 1: 2, # 'e' - 27: 0, # 'f' - 12: 0, # 'g' - 20: 0, # 'h' - 9: 1, # 'i' - 22: 0, # 'j' - 7: 0, # 'k' - 6: 1, # 'l' - 13: 0, # 'm' - 4: 0, # 'n' - 8: 2, # 'o' - 23: 0, # 'p' - 10: 2, # 'r' - 5: 0, # 's' - 3: 0, # 't' - 21: 1, # 'u' - 19: 0, # 'v' - 62: 0, # 'x' - 16: 2, # 'y' - 11: 0, # 'z' - 51: 1, # 'Ã' - 44: 1, # 'É' - 61: 1, # 'Ã' - 58: 1, # 'Ó' - 59: 1, # 'Ö' - 60: 1, # 'Ú' - 63: 1, # 'Ãœ' - 14: 1, # 'á' - 15: 1, # 'é' - 30: 0, # 'í' - 25: 1, # 'ó' - 24: 1, # 'ö' - 31: 1, # 'ú' - 29: 1, # 'ü' - 42: 1, # 'Å‘' - 56: 0, # 'ű' - }, - 38: { # 'H' - 28: 2, # 'A' - 40: 1, # 'B' - 54: 1, # 'C' - 45: 0, # 'D' - 32: 1, # 'E' - 50: 0, # 'F' - 49: 0, # 'G' - 38: 0, # 'H' - 39: 1, # 'I' - 53: 0, # 'J' - 36: 0, # 'K' - 41: 1, # 'L' - 34: 0, # 'M' - 35: 0, # 'N' - 47: 1, # 'O' - 46: 0, # 'P' - 43: 1, # 'R' - 33: 1, # 'S' - 37: 1, # 'T' - 57: 1, # 'U' - 48: 0, # 'V' - 55: 1, # 'Y' - 52: 0, # 'Z' - 2: 3, # 'a' - 18: 0, # 'b' - 26: 0, # 'c' - 17: 0, # 'd' - 1: 2, # 'e' - 27: 0, # 'f' - 12: 0, # 'g' - 20: 0, # 'h' - 9: 2, # 'i' - 22: 1, # 'j' - 7: 0, # 'k' - 6: 1, # 'l' - 13: 1, # 'm' - 4: 0, # 'n' - 8: 3, # 'o' - 23: 0, # 'p' - 10: 1, # 'r' - 5: 0, # 's' - 3: 0, # 't' - 21: 2, # 'u' - 19: 0, # 'v' - 62: 0, # 'x' - 16: 1, # 'y' - 11: 0, # 'z' - 51: 2, # 'Ã' - 44: 2, # 'É' - 61: 1, # 'Ã' - 58: 1, # 'Ó' - 59: 1, # 'Ö' - 60: 1, # 'Ú' - 63: 1, # 'Ãœ' - 14: 2, # 'á' - 15: 1, # 'é' - 30: 2, # 'í' - 25: 1, # 'ó' - 24: 1, # 'ö' - 31: 1, # 'ú' - 29: 1, # 'ü' - 42: 1, # 'Å‘' - 56: 1, # 'ű' - }, - 39: { # 'I' - 28: 2, # 'A' - 40: 1, # 'B' - 54: 1, # 'C' - 45: 1, # 'D' - 32: 1, # 'E' - 50: 1, # 'F' - 49: 1, # 'G' - 38: 1, # 'H' - 39: 2, # 'I' - 53: 1, # 'J' - 36: 2, # 'K' - 41: 2, # 'L' - 34: 1, # 'M' - 35: 2, # 'N' - 47: 1, # 'O' - 46: 1, # 'P' - 43: 1, # 'R' - 33: 2, # 'S' - 37: 1, # 'T' - 57: 1, # 'U' - 48: 1, # 'V' - 55: 0, # 'Y' - 52: 2, # 'Z' - 2: 0, # 'a' - 18: 1, # 'b' - 26: 1, # 'c' - 17: 2, # 'd' - 1: 0, # 'e' - 27: 1, # 'f' - 12: 2, # 'g' - 20: 1, # 'h' - 9: 0, # 'i' - 22: 1, # 'j' - 7: 1, # 'k' - 6: 2, # 'l' - 13: 2, # 'm' - 4: 1, # 'n' - 8: 0, # 'o' - 23: 1, # 'p' - 10: 2, # 'r' - 5: 2, # 's' - 3: 2, # 't' - 21: 0, # 'u' - 19: 1, # 'v' - 62: 0, # 'x' - 16: 0, # 'y' - 11: 1, # 'z' - 51: 1, # 'Ã' - 44: 1, # 'É' - 61: 0, # 'Ã' - 58: 1, # 'Ó' - 59: 1, # 'Ö' - 60: 1, # 'Ú' - 63: 1, # 'Ãœ' - 14: 0, # 'á' - 15: 0, # 'é' - 30: 0, # 'í' - 25: 0, # 'ó' - 24: 0, # 'ö' - 31: 0, # 'ú' - 29: 0, # 'ü' - 42: 0, # 'Å‘' - 56: 0, # 'ű' - }, - 53: { # 'J' - 28: 2, # 'A' - 40: 0, # 'B' - 54: 1, # 'C' - 45: 1, # 'D' - 32: 2, # 'E' - 50: 0, # 'F' - 49: 0, # 'G' - 38: 1, # 'H' - 39: 1, # 'I' - 53: 1, # 'J' - 36: 1, # 'K' - 41: 1, # 'L' - 34: 1, # 'M' - 35: 1, # 'N' - 47: 1, # 'O' - 46: 0, # 'P' - 43: 0, # 'R' - 33: 1, # 'S' - 37: 1, # 'T' - 57: 1, # 'U' - 48: 0, # 'V' - 55: 0, # 'Y' - 52: 1, # 'Z' - 2: 2, # 'a' - 18: 0, # 'b' - 26: 0, # 'c' - 17: 0, # 'd' - 1: 2, # 'e' - 27: 0, # 'f' - 12: 0, # 'g' - 20: 0, # 'h' - 9: 1, # 'i' - 22: 0, # 'j' - 7: 0, # 'k' - 6: 0, # 'l' - 13: 0, # 'm' - 4: 0, # 'n' - 8: 1, # 'o' - 23: 0, # 'p' - 10: 0, # 'r' - 5: 0, # 's' - 3: 0, # 't' - 21: 2, # 'u' - 19: 0, # 'v' - 62: 0, # 'x' - 16: 0, # 'y' - 11: 0, # 'z' - 51: 1, # 'Ã' - 44: 1, # 'É' - 61: 0, # 'Ã' - 58: 1, # 'Ó' - 59: 1, # 'Ö' - 60: 1, # 'Ú' - 63: 1, # 'Ãœ' - 14: 2, # 'á' - 15: 1, # 'é' - 30: 0, # 'í' - 25: 2, # 'ó' - 24: 2, # 'ö' - 31: 1, # 'ú' - 29: 0, # 'ü' - 42: 1, # 'Å‘' - 56: 0, # 'ű' - }, - 36: { # 'K' - 28: 2, # 'A' - 40: 1, # 'B' - 54: 1, # 'C' - 45: 1, # 'D' - 32: 2, # 'E' - 50: 1, # 'F' - 49: 0, # 'G' - 38: 1, # 'H' - 39: 2, # 'I' - 53: 1, # 'J' - 36: 1, # 'K' - 41: 1, # 'L' - 34: 1, # 'M' - 35: 1, # 'N' - 47: 2, # 'O' - 46: 0, # 'P' - 43: 1, # 'R' - 33: 1, # 'S' - 37: 1, # 'T' - 57: 1, # 'U' - 48: 1, # 'V' - 55: 1, # 'Y' - 52: 0, # 'Z' - 2: 2, # 'a' - 18: 0, # 'b' - 26: 0, # 'c' - 17: 0, # 'd' - 1: 2, # 'e' - 27: 1, # 'f' - 12: 0, # 'g' - 20: 1, # 'h' - 9: 3, # 'i' - 22: 0, # 'j' - 7: 0, # 'k' - 6: 1, # 'l' - 13: 1, # 'm' - 4: 1, # 'n' - 8: 2, # 'o' - 23: 0, # 'p' - 10: 2, # 'r' - 5: 0, # 's' - 3: 0, # 't' - 21: 1, # 'u' - 19: 1, # 'v' - 62: 0, # 'x' - 16: 1, # 'y' - 11: 0, # 'z' - 51: 1, # 'Ã' - 44: 1, # 'É' - 61: 1, # 'Ã' - 58: 1, # 'Ó' - 59: 2, # 'Ö' - 60: 1, # 'Ú' - 63: 1, # 'Ãœ' - 14: 2, # 'á' - 15: 2, # 'é' - 30: 1, # 'í' - 25: 1, # 'ó' - 24: 2, # 'ö' - 31: 1, # 'ú' - 29: 2, # 'ü' - 42: 1, # 'Å‘' - 56: 0, # 'ű' - }, - 41: { # 'L' - 28: 2, # 'A' - 40: 1, # 'B' - 54: 1, # 'C' - 45: 1, # 'D' - 32: 2, # 'E' - 50: 1, # 'F' - 49: 1, # 'G' - 38: 1, # 'H' - 39: 2, # 'I' - 53: 1, # 'J' - 36: 1, # 'K' - 41: 2, # 'L' - 34: 1, # 'M' - 35: 1, # 'N' - 47: 2, # 'O' - 46: 0, # 'P' - 43: 1, # 'R' - 33: 1, # 'S' - 37: 2, # 'T' - 57: 1, # 'U' - 48: 1, # 'V' - 55: 1, # 'Y' - 52: 1, # 'Z' - 2: 2, # 'a' - 18: 0, # 'b' - 26: 0, # 'c' - 17: 0, # 'd' - 1: 3, # 'e' - 27: 0, # 'f' - 12: 0, # 'g' - 20: 0, # 'h' - 9: 2, # 'i' - 22: 1, # 'j' - 7: 0, # 'k' - 6: 1, # 'l' - 13: 0, # 'm' - 4: 0, # 'n' - 8: 2, # 'o' - 23: 0, # 'p' - 10: 0, # 'r' - 5: 0, # 's' - 3: 0, # 't' - 21: 2, # 'u' - 19: 0, # 'v' - 62: 0, # 'x' - 16: 1, # 'y' - 11: 0, # 'z' - 51: 2, # 'Ã' - 44: 1, # 'É' - 61: 1, # 'Ã' - 58: 1, # 'Ó' - 59: 1, # 'Ö' - 60: 1, # 'Ú' - 63: 1, # 'Ãœ' - 14: 2, # 'á' - 15: 1, # 'é' - 30: 1, # 'í' - 25: 1, # 'ó' - 24: 1, # 'ö' - 31: 0, # 'ú' - 29: 1, # 'ü' - 42: 0, # 'Å‘' - 56: 0, # 'ű' - }, - 34: { # 'M' - 28: 2, # 'A' - 40: 1, # 'B' - 54: 0, # 'C' - 45: 0, # 'D' - 32: 2, # 'E' - 50: 1, # 'F' - 49: 0, # 'G' - 38: 1, # 'H' - 39: 2, # 'I' - 53: 1, # 'J' - 36: 1, # 'K' - 41: 1, # 'L' - 34: 1, # 'M' - 35: 1, # 'N' - 47: 1, # 'O' - 46: 1, # 'P' - 43: 1, # 'R' - 33: 1, # 'S' - 37: 1, # 'T' - 57: 1, # 'U' - 48: 1, # 'V' - 55: 1, # 'Y' - 52: 1, # 'Z' - 2: 3, # 'a' - 18: 0, # 'b' - 26: 1, # 'c' - 17: 0, # 'd' - 1: 3, # 'e' - 27: 0, # 'f' - 12: 0, # 'g' - 20: 0, # 'h' - 9: 3, # 'i' - 22: 0, # 'j' - 7: 0, # 'k' - 6: 0, # 'l' - 13: 1, # 'm' - 4: 1, # 'n' - 8: 3, # 'o' - 23: 0, # 'p' - 10: 1, # 'r' - 5: 0, # 's' - 3: 0, # 't' - 21: 2, # 'u' - 19: 0, # 'v' - 62: 0, # 'x' - 16: 1, # 'y' - 11: 0, # 'z' - 51: 2, # 'Ã' - 44: 1, # 'É' - 61: 1, # 'Ã' - 58: 1, # 'Ó' - 59: 1, # 'Ö' - 60: 1, # 'Ú' - 63: 1, # 'Ãœ' - 14: 2, # 'á' - 15: 2, # 'é' - 30: 1, # 'í' - 25: 1, # 'ó' - 24: 1, # 'ö' - 31: 1, # 'ú' - 29: 1, # 'ü' - 42: 0, # 'Å‘' - 56: 1, # 'ű' - }, - 35: { # 'N' - 28: 2, # 'A' - 40: 1, # 'B' - 54: 1, # 'C' - 45: 2, # 'D' - 32: 2, # 'E' - 50: 1, # 'F' - 49: 1, # 'G' - 38: 1, # 'H' - 39: 1, # 'I' - 53: 1, # 'J' - 36: 1, # 'K' - 41: 1, # 'L' - 34: 1, # 'M' - 35: 1, # 'N' - 47: 1, # 'O' - 46: 1, # 'P' - 43: 1, # 'R' - 33: 1, # 'S' - 37: 2, # 'T' - 57: 1, # 'U' - 48: 1, # 'V' - 55: 2, # 'Y' - 52: 1, # 'Z' - 2: 3, # 'a' - 18: 0, # 'b' - 26: 0, # 'c' - 17: 0, # 'd' - 1: 3, # 'e' - 27: 0, # 'f' - 12: 0, # 'g' - 20: 0, # 'h' - 9: 2, # 'i' - 22: 0, # 'j' - 7: 0, # 'k' - 6: 0, # 'l' - 13: 0, # 'm' - 4: 1, # 'n' - 8: 2, # 'o' - 23: 0, # 'p' - 10: 0, # 'r' - 5: 0, # 's' - 3: 0, # 't' - 21: 1, # 'u' - 19: 0, # 'v' - 62: 0, # 'x' - 16: 2, # 'y' - 11: 0, # 'z' - 51: 1, # 'Ã' - 44: 1, # 'É' - 61: 1, # 'Ã' - 58: 1, # 'Ó' - 59: 1, # 'Ö' - 60: 1, # 'Ú' - 63: 1, # 'Ãœ' - 14: 1, # 'á' - 15: 2, # 'é' - 30: 1, # 'í' - 25: 1, # 'ó' - 24: 1, # 'ö' - 31: 0, # 'ú' - 29: 0, # 'ü' - 42: 1, # 'Å‘' - 56: 0, # 'ű' - }, - 47: { # 'O' - 28: 1, # 'A' - 40: 1, # 'B' - 54: 1, # 'C' - 45: 1, # 'D' - 32: 1, # 'E' - 50: 1, # 'F' - 49: 1, # 'G' - 38: 1, # 'H' - 39: 1, # 'I' - 53: 1, # 'J' - 36: 2, # 'K' - 41: 2, # 'L' - 34: 2, # 'M' - 35: 2, # 'N' - 47: 1, # 'O' - 46: 1, # 'P' - 43: 2, # 'R' - 33: 2, # 'S' - 37: 2, # 'T' - 57: 1, # 'U' - 48: 1, # 'V' - 55: 1, # 'Y' - 52: 1, # 'Z' - 2: 0, # 'a' - 18: 1, # 'b' - 26: 1, # 'c' - 17: 1, # 'd' - 1: 1, # 'e' - 27: 1, # 'f' - 12: 1, # 'g' - 20: 1, # 'h' - 9: 1, # 'i' - 22: 1, # 'j' - 7: 2, # 'k' - 6: 2, # 'l' - 13: 1, # 'm' - 4: 1, # 'n' - 8: 1, # 'o' - 23: 1, # 'p' - 10: 2, # 'r' - 5: 1, # 's' - 3: 2, # 't' - 21: 1, # 'u' - 19: 0, # 'v' - 62: 1, # 'x' - 16: 0, # 'y' - 11: 1, # 'z' - 51: 1, # 'Ã' - 44: 1, # 'É' - 61: 0, # 'Ã' - 58: 1, # 'Ó' - 59: 0, # 'Ö' - 60: 0, # 'Ú' - 63: 0, # 'Ãœ' - 14: 0, # 'á' - 15: 0, # 'é' - 30: 0, # 'í' - 25: 0, # 'ó' - 24: 0, # 'ö' - 31: 0, # 'ú' - 29: 0, # 'ü' - 42: 0, # 'Å‘' - 56: 0, # 'ű' - }, - 46: { # 'P' - 28: 1, # 'A' - 40: 1, # 'B' - 54: 1, # 'C' - 45: 1, # 'D' - 32: 1, # 'E' - 50: 1, # 'F' - 49: 1, # 'G' - 38: 1, # 'H' - 39: 1, # 'I' - 53: 1, # 'J' - 36: 1, # 'K' - 41: 1, # 'L' - 34: 0, # 'M' - 35: 1, # 'N' - 47: 1, # 'O' - 46: 1, # 'P' - 43: 2, # 'R' - 33: 1, # 'S' - 37: 1, # 'T' - 57: 1, # 'U' - 48: 1, # 'V' - 55: 0, # 'Y' - 52: 1, # 'Z' - 2: 2, # 'a' - 18: 0, # 'b' - 26: 0, # 'c' - 17: 0, # 'd' - 1: 2, # 'e' - 27: 1, # 'f' - 12: 0, # 'g' - 20: 1, # 'h' - 9: 2, # 'i' - 22: 0, # 'j' - 7: 0, # 'k' - 6: 1, # 'l' - 13: 0, # 'm' - 4: 1, # 'n' - 8: 2, # 'o' - 23: 0, # 'p' - 10: 2, # 'r' - 5: 1, # 's' - 3: 0, # 't' - 21: 1, # 'u' - 19: 0, # 'v' - 62: 0, # 'x' - 16: 1, # 'y' - 11: 0, # 'z' - 51: 2, # 'Ã' - 44: 1, # 'É' - 61: 1, # 'Ã' - 58: 1, # 'Ó' - 59: 1, # 'Ö' - 60: 0, # 'Ú' - 63: 1, # 'Ãœ' - 14: 3, # 'á' - 15: 2, # 'é' - 30: 0, # 'í' - 25: 1, # 'ó' - 24: 1, # 'ö' - 31: 0, # 'ú' - 29: 1, # 'ü' - 42: 1, # 'Å‘' - 56: 0, # 'ű' - }, - 43: { # 'R' - 28: 2, # 'A' - 40: 1, # 'B' - 54: 1, # 'C' - 45: 1, # 'D' - 32: 2, # 'E' - 50: 1, # 'F' - 49: 1, # 'G' - 38: 1, # 'H' - 39: 2, # 'I' - 53: 1, # 'J' - 36: 1, # 'K' - 41: 1, # 'L' - 34: 1, # 'M' - 35: 1, # 'N' - 47: 2, # 'O' - 46: 1, # 'P' - 43: 1, # 'R' - 33: 2, # 'S' - 37: 2, # 'T' - 57: 1, # 'U' - 48: 1, # 'V' - 55: 1, # 'Y' - 52: 1, # 'Z' - 2: 2, # 'a' - 18: 0, # 'b' - 26: 0, # 'c' - 17: 0, # 'd' - 1: 2, # 'e' - 27: 0, # 'f' - 12: 0, # 'g' - 20: 1, # 'h' - 9: 2, # 'i' - 22: 0, # 'j' - 7: 0, # 'k' - 6: 0, # 'l' - 13: 0, # 'm' - 4: 0, # 'n' - 8: 2, # 'o' - 23: 0, # 'p' - 10: 0, # 'r' - 5: 0, # 's' - 3: 0, # 't' - 21: 1, # 'u' - 19: 0, # 'v' - 62: 0, # 'x' - 16: 1, # 'y' - 11: 0, # 'z' - 51: 2, # 'Ã' - 44: 1, # 'É' - 61: 1, # 'Ã' - 58: 2, # 'Ó' - 59: 1, # 'Ö' - 60: 1, # 'Ú' - 63: 1, # 'Ãœ' - 14: 2, # 'á' - 15: 2, # 'é' - 30: 1, # 'í' - 25: 2, # 'ó' - 24: 1, # 'ö' - 31: 1, # 'ú' - 29: 1, # 'ü' - 42: 0, # 'Å‘' - 56: 0, # 'ű' - }, - 33: { # 'S' - 28: 2, # 'A' - 40: 1, # 'B' - 54: 1, # 'C' - 45: 1, # 'D' - 32: 2, # 'E' - 50: 1, # 'F' - 49: 1, # 'G' - 38: 1, # 'H' - 39: 2, # 'I' - 53: 1, # 'J' - 36: 1, # 'K' - 41: 1, # 'L' - 34: 1, # 'M' - 35: 1, # 'N' - 47: 2, # 'O' - 46: 1, # 'P' - 43: 1, # 'R' - 33: 2, # 'S' - 37: 2, # 'T' - 57: 1, # 'U' - 48: 1, # 'V' - 55: 1, # 'Y' - 52: 3, # 'Z' - 2: 2, # 'a' - 18: 0, # 'b' - 26: 1, # 'c' - 17: 0, # 'd' - 1: 2, # 'e' - 27: 0, # 'f' - 12: 0, # 'g' - 20: 1, # 'h' - 9: 2, # 'i' - 22: 0, # 'j' - 7: 1, # 'k' - 6: 1, # 'l' - 13: 1, # 'm' - 4: 0, # 'n' - 8: 2, # 'o' - 23: 1, # 'p' - 10: 0, # 'r' - 5: 0, # 's' - 3: 1, # 't' - 21: 1, # 'u' - 19: 1, # 'v' - 62: 0, # 'x' - 16: 1, # 'y' - 11: 3, # 'z' - 51: 2, # 'Ã' - 44: 1, # 'É' - 61: 1, # 'Ã' - 58: 1, # 'Ó' - 59: 1, # 'Ö' - 60: 1, # 'Ú' - 63: 1, # 'Ãœ' - 14: 2, # 'á' - 15: 1, # 'é' - 30: 1, # 'í' - 25: 1, # 'ó' - 24: 1, # 'ö' - 31: 1, # 'ú' - 29: 1, # 'ü' - 42: 1, # 'Å‘' - 56: 1, # 'ű' - }, - 37: { # 'T' - 28: 2, # 'A' - 40: 1, # 'B' - 54: 1, # 'C' - 45: 1, # 'D' - 32: 2, # 'E' - 50: 1, # 'F' - 49: 1, # 'G' - 38: 1, # 'H' - 39: 2, # 'I' - 53: 1, # 'J' - 36: 1, # 'K' - 41: 1, # 'L' - 34: 1, # 'M' - 35: 1, # 'N' - 47: 2, # 'O' - 46: 1, # 'P' - 43: 2, # 'R' - 33: 1, # 'S' - 37: 2, # 'T' - 57: 1, # 'U' - 48: 1, # 'V' - 55: 1, # 'Y' - 52: 1, # 'Z' - 2: 2, # 'a' - 18: 0, # 'b' - 26: 0, # 'c' - 17: 0, # 'd' - 1: 2, # 'e' - 27: 0, # 'f' - 12: 0, # 'g' - 20: 1, # 'h' - 9: 2, # 'i' - 22: 0, # 'j' - 7: 0, # 'k' - 6: 0, # 'l' - 13: 0, # 'm' - 4: 0, # 'n' - 8: 2, # 'o' - 23: 0, # 'p' - 10: 1, # 'r' - 5: 1, # 's' - 3: 0, # 't' - 21: 2, # 'u' - 19: 0, # 'v' - 62: 0, # 'x' - 16: 1, # 'y' - 11: 1, # 'z' - 51: 2, # 'Ã' - 44: 2, # 'É' - 61: 1, # 'Ã' - 58: 1, # 'Ó' - 59: 1, # 'Ö' - 60: 1, # 'Ú' - 63: 1, # 'Ãœ' - 14: 2, # 'á' - 15: 1, # 'é' - 30: 1, # 'í' - 25: 1, # 'ó' - 24: 2, # 'ö' - 31: 1, # 'ú' - 29: 1, # 'ü' - 42: 1, # 'Å‘' - 56: 1, # 'ű' - }, - 57: { # 'U' - 28: 1, # 'A' - 40: 1, # 'B' - 54: 1, # 'C' - 45: 1, # 'D' - 32: 1, # 'E' - 50: 1, # 'F' - 49: 1, # 'G' - 38: 1, # 'H' - 39: 1, # 'I' - 53: 1, # 'J' - 36: 1, # 'K' - 41: 1, # 'L' - 34: 1, # 'M' - 35: 1, # 'N' - 47: 1, # 'O' - 46: 1, # 'P' - 43: 1, # 'R' - 33: 2, # 'S' - 37: 1, # 'T' - 57: 0, # 'U' - 48: 1, # 'V' - 55: 0, # 'Y' - 52: 1, # 'Z' - 2: 0, # 'a' - 18: 1, # 'b' - 26: 1, # 'c' - 17: 1, # 'd' - 1: 1, # 'e' - 27: 0, # 'f' - 12: 2, # 'g' - 20: 0, # 'h' - 9: 0, # 'i' - 22: 1, # 'j' - 7: 1, # 'k' - 6: 1, # 'l' - 13: 1, # 'm' - 4: 1, # 'n' - 8: 0, # 'o' - 23: 1, # 'p' - 10: 1, # 'r' - 5: 1, # 's' - 3: 1, # 't' - 21: 0, # 'u' - 19: 0, # 'v' - 62: 0, # 'x' - 16: 0, # 'y' - 11: 1, # 'z' - 51: 0, # 'Ã' - 44: 0, # 'É' - 61: 1, # 'Ã' - 58: 0, # 'Ó' - 59: 0, # 'Ö' - 60: 0, # 'Ú' - 63: 0, # 'Ãœ' - 14: 0, # 'á' - 15: 0, # 'é' - 30: 0, # 'í' - 25: 0, # 'ó' - 24: 0, # 'ö' - 31: 0, # 'ú' - 29: 0, # 'ü' - 42: 0, # 'Å‘' - 56: 0, # 'ű' - }, - 48: { # 'V' - 28: 2, # 'A' - 40: 0, # 'B' - 54: 0, # 'C' - 45: 1, # 'D' - 32: 2, # 'E' - 50: 1, # 'F' - 49: 0, # 'G' - 38: 0, # 'H' - 39: 2, # 'I' - 53: 1, # 'J' - 36: 1, # 'K' - 41: 0, # 'L' - 34: 1, # 'M' - 35: 1, # 'N' - 47: 1, # 'O' - 46: 1, # 'P' - 43: 1, # 'R' - 33: 1, # 'S' - 37: 1, # 'T' - 57: 1, # 'U' - 48: 1, # 'V' - 55: 1, # 'Y' - 52: 0, # 'Z' - 2: 3, # 'a' - 18: 0, # 'b' - 26: 0, # 'c' - 17: 0, # 'd' - 1: 2, # 'e' - 27: 0, # 'f' - 12: 0, # 'g' - 20: 0, # 'h' - 9: 2, # 'i' - 22: 0, # 'j' - 7: 0, # 'k' - 6: 1, # 'l' - 13: 0, # 'm' - 4: 0, # 'n' - 8: 2, # 'o' - 23: 0, # 'p' - 10: 0, # 'r' - 5: 0, # 's' - 3: 0, # 't' - 21: 1, # 'u' - 19: 0, # 'v' - 62: 0, # 'x' - 16: 0, # 'y' - 11: 0, # 'z' - 51: 2, # 'Ã' - 44: 2, # 'É' - 61: 1, # 'Ã' - 58: 1, # 'Ó' - 59: 1, # 'Ö' - 60: 0, # 'Ú' - 63: 1, # 'Ãœ' - 14: 2, # 'á' - 15: 2, # 'é' - 30: 1, # 'í' - 25: 0, # 'ó' - 24: 1, # 'ö' - 31: 0, # 'ú' - 29: 0, # 'ü' - 42: 0, # 'Å‘' - 56: 0, # 'ű' - }, - 55: { # 'Y' - 28: 2, # 'A' - 40: 1, # 'B' - 54: 1, # 'C' - 45: 1, # 'D' - 32: 2, # 'E' - 50: 1, # 'F' - 49: 1, # 'G' - 38: 1, # 'H' - 39: 1, # 'I' - 53: 1, # 'J' - 36: 1, # 'K' - 41: 1, # 'L' - 34: 1, # 'M' - 35: 1, # 'N' - 47: 1, # 'O' - 46: 1, # 'P' - 43: 1, # 'R' - 33: 1, # 'S' - 37: 1, # 'T' - 57: 1, # 'U' - 48: 1, # 'V' - 55: 0, # 'Y' - 52: 2, # 'Z' - 2: 1, # 'a' - 18: 0, # 'b' - 26: 0, # 'c' - 17: 1, # 'd' - 1: 1, # 'e' - 27: 0, # 'f' - 12: 0, # 'g' - 20: 0, # 'h' - 9: 0, # 'i' - 22: 0, # 'j' - 7: 0, # 'k' - 6: 0, # 'l' - 13: 0, # 'm' - 4: 0, # 'n' - 8: 1, # 'o' - 23: 1, # 'p' - 10: 0, # 'r' - 5: 0, # 's' - 3: 0, # 't' - 21: 0, # 'u' - 19: 1, # 'v' - 62: 0, # 'x' - 16: 0, # 'y' - 11: 0, # 'z' - 51: 1, # 'Ã' - 44: 1, # 'É' - 61: 1, # 'Ã' - 58: 1, # 'Ó' - 59: 1, # 'Ö' - 60: 1, # 'Ú' - 63: 1, # 'Ãœ' - 14: 0, # 'á' - 15: 0, # 'é' - 30: 0, # 'í' - 25: 0, # 'ó' - 24: 0, # 'ö' - 31: 0, # 'ú' - 29: 0, # 'ü' - 42: 0, # 'Å‘' - 56: 0, # 'ű' - }, - 52: { # 'Z' - 28: 2, # 'A' - 40: 1, # 'B' - 54: 0, # 'C' - 45: 1, # 'D' - 32: 2, # 'E' - 50: 1, # 'F' - 49: 1, # 'G' - 38: 1, # 'H' - 39: 2, # 'I' - 53: 1, # 'J' - 36: 1, # 'K' - 41: 1, # 'L' - 34: 1, # 'M' - 35: 1, # 'N' - 47: 2, # 'O' - 46: 1, # 'P' - 43: 1, # 'R' - 33: 2, # 'S' - 37: 1, # 'T' - 57: 1, # 'U' - 48: 1, # 'V' - 55: 1, # 'Y' - 52: 1, # 'Z' - 2: 1, # 'a' - 18: 0, # 'b' - 26: 0, # 'c' - 17: 0, # 'd' - 1: 1, # 'e' - 27: 0, # 'f' - 12: 0, # 'g' - 20: 0, # 'h' - 9: 1, # 'i' - 22: 0, # 'j' - 7: 0, # 'k' - 6: 0, # 'l' - 13: 0, # 'm' - 4: 1, # 'n' - 8: 1, # 'o' - 23: 0, # 'p' - 10: 1, # 'r' - 5: 2, # 's' - 3: 0, # 't' - 21: 1, # 'u' - 19: 0, # 'v' - 62: 0, # 'x' - 16: 0, # 'y' - 11: 0, # 'z' - 51: 2, # 'Ã' - 44: 1, # 'É' - 61: 1, # 'Ã' - 58: 1, # 'Ó' - 59: 1, # 'Ö' - 60: 1, # 'Ú' - 63: 1, # 'Ãœ' - 14: 1, # 'á' - 15: 1, # 'é' - 30: 0, # 'í' - 25: 0, # 'ó' - 24: 1, # 'ö' - 31: 1, # 'ú' - 29: 1, # 'ü' - 42: 0, # 'Å‘' - 56: 0, # 'ű' - }, - 2: { # 'a' - 28: 0, # 'A' - 40: 0, # 'B' - 54: 0, # 'C' - 45: 0, # 'D' - 32: 0, # 'E' - 50: 0, # 'F' - 49: 0, # 'G' - 38: 0, # 'H' - 39: 0, # 'I' - 53: 0, # 'J' - 36: 0, # 'K' - 41: 0, # 'L' - 34: 0, # 'M' - 35: 0, # 'N' - 47: 0, # 'O' - 46: 0, # 'P' - 43: 0, # 'R' - 33: 0, # 'S' - 37: 0, # 'T' - 57: 0, # 'U' - 48: 0, # 'V' - 55: 0, # 'Y' - 52: 0, # 'Z' - 2: 1, # 'a' - 18: 3, # 'b' - 26: 3, # 'c' - 17: 3, # 'd' - 1: 2, # 'e' - 27: 2, # 'f' - 12: 3, # 'g' - 20: 3, # 'h' - 9: 3, # 'i' - 22: 3, # 'j' - 7: 3, # 'k' - 6: 3, # 'l' - 13: 3, # 'm' - 4: 3, # 'n' - 8: 2, # 'o' - 23: 3, # 'p' - 10: 3, # 'r' - 5: 3, # 's' - 3: 3, # 't' - 21: 3, # 'u' - 19: 3, # 'v' - 62: 1, # 'x' - 16: 2, # 'y' - 11: 3, # 'z' - 51: 0, # 'Ã' - 44: 0, # 'É' - 61: 0, # 'Ã' - 58: 0, # 'Ó' - 59: 0, # 'Ö' - 60: 0, # 'Ú' - 63: 0, # 'Ãœ' - 14: 1, # 'á' - 15: 1, # 'é' - 30: 1, # 'í' - 25: 1, # 'ó' - 24: 1, # 'ö' - 31: 1, # 'ú' - 29: 1, # 'ü' - 42: 0, # 'Å‘' - 56: 0, # 'ű' - }, - 18: { # 'b' - 28: 0, # 'A' - 40: 0, # 'B' - 54: 0, # 'C' - 45: 0, # 'D' - 32: 0, # 'E' - 50: 0, # 'F' - 49: 0, # 'G' - 38: 0, # 'H' - 39: 0, # 'I' - 53: 0, # 'J' - 36: 0, # 'K' - 41: 0, # 'L' - 34: 0, # 'M' - 35: 0, # 'N' - 47: 0, # 'O' - 46: 0, # 'P' - 43: 0, # 'R' - 33: 0, # 'S' - 37: 0, # 'T' - 57: 0, # 'U' - 48: 0, # 'V' - 55: 0, # 'Y' - 52: 0, # 'Z' - 2: 3, # 'a' - 18: 3, # 'b' - 26: 1, # 'c' - 17: 1, # 'd' - 1: 3, # 'e' - 27: 1, # 'f' - 12: 1, # 'g' - 20: 1, # 'h' - 9: 3, # 'i' - 22: 2, # 'j' - 7: 2, # 'k' - 6: 2, # 'l' - 13: 1, # 'm' - 4: 2, # 'n' - 8: 3, # 'o' - 23: 1, # 'p' - 10: 3, # 'r' - 5: 2, # 's' - 3: 1, # 't' - 21: 3, # 'u' - 19: 1, # 'v' - 62: 0, # 'x' - 16: 1, # 'y' - 11: 1, # 'z' - 51: 0, # 'Ã' - 44: 0, # 'É' - 61: 0, # 'Ã' - 58: 0, # 'Ó' - 59: 0, # 'Ö' - 60: 0, # 'Ú' - 63: 0, # 'Ãœ' - 14: 3, # 'á' - 15: 3, # 'é' - 30: 2, # 'í' - 25: 3, # 'ó' - 24: 2, # 'ö' - 31: 2, # 'ú' - 29: 2, # 'ü' - 42: 2, # 'Å‘' - 56: 1, # 'ű' - }, - 26: { # 'c' - 28: 0, # 'A' - 40: 0, # 'B' - 54: 1, # 'C' - 45: 0, # 'D' - 32: 0, # 'E' - 50: 0, # 'F' - 49: 1, # 'G' - 38: 0, # 'H' - 39: 0, # 'I' - 53: 0, # 'J' - 36: 0, # 'K' - 41: 0, # 'L' - 34: 0, # 'M' - 35: 0, # 'N' - 47: 0, # 'O' - 46: 0, # 'P' - 43: 0, # 'R' - 33: 0, # 'S' - 37: 0, # 'T' - 57: 0, # 'U' - 48: 0, # 'V' - 55: 0, # 'Y' - 52: 0, # 'Z' - 2: 2, # 'a' - 18: 1, # 'b' - 26: 2, # 'c' - 17: 1, # 'd' - 1: 3, # 'e' - 27: 1, # 'f' - 12: 1, # 'g' - 20: 3, # 'h' - 9: 3, # 'i' - 22: 1, # 'j' - 7: 2, # 'k' - 6: 1, # 'l' - 13: 1, # 'm' - 4: 1, # 'n' - 8: 3, # 'o' - 23: 1, # 'p' - 10: 2, # 'r' - 5: 3, # 's' - 3: 2, # 't' - 21: 2, # 'u' - 19: 1, # 'v' - 62: 0, # 'x' - 16: 1, # 'y' - 11: 2, # 'z' - 51: 0, # 'Ã' - 44: 0, # 'É' - 61: 0, # 'Ã' - 58: 0, # 'Ó' - 59: 0, # 'Ö' - 60: 0, # 'Ú' - 63: 0, # 'Ãœ' - 14: 2, # 'á' - 15: 2, # 'é' - 30: 2, # 'í' - 25: 1, # 'ó' - 24: 1, # 'ö' - 31: 1, # 'ú' - 29: 1, # 'ü' - 42: 0, # 'Å‘' - 56: 0, # 'ű' - }, - 17: { # 'd' - 28: 0, # 'A' - 40: 0, # 'B' - 54: 0, # 'C' - 45: 0, # 'D' - 32: 0, # 'E' - 50: 0, # 'F' - 49: 0, # 'G' - 38: 0, # 'H' - 39: 0, # 'I' - 53: 0, # 'J' - 36: 0, # 'K' - 41: 0, # 'L' - 34: 0, # 'M' - 35: 0, # 'N' - 47: 0, # 'O' - 46: 0, # 'P' - 43: 0, # 'R' - 33: 0, # 'S' - 37: 0, # 'T' - 57: 0, # 'U' - 48: 0, # 'V' - 55: 0, # 'Y' - 52: 0, # 'Z' - 2: 3, # 'a' - 18: 2, # 'b' - 26: 1, # 'c' - 17: 2, # 'd' - 1: 3, # 'e' - 27: 1, # 'f' - 12: 1, # 'g' - 20: 2, # 'h' - 9: 3, # 'i' - 22: 3, # 'j' - 7: 2, # 'k' - 6: 1, # 'l' - 13: 2, # 'm' - 4: 3, # 'n' - 8: 3, # 'o' - 23: 1, # 'p' - 10: 3, # 'r' - 5: 3, # 's' - 3: 3, # 't' - 21: 3, # 'u' - 19: 3, # 'v' - 62: 0, # 'x' - 16: 2, # 'y' - 11: 2, # 'z' - 51: 0, # 'Ã' - 44: 0, # 'É' - 61: 0, # 'Ã' - 58: 0, # 'Ó' - 59: 0, # 'Ö' - 60: 0, # 'Ú' - 63: 0, # 'Ãœ' - 14: 3, # 'á' - 15: 3, # 'é' - 30: 3, # 'í' - 25: 3, # 'ó' - 24: 3, # 'ö' - 31: 2, # 'ú' - 29: 2, # 'ü' - 42: 2, # 'Å‘' - 56: 1, # 'ű' - }, - 1: { # 'e' - 28: 0, # 'A' - 40: 0, # 'B' - 54: 0, # 'C' - 45: 0, # 'D' - 32: 0, # 'E' - 50: 0, # 'F' - 49: 0, # 'G' - 38: 0, # 'H' - 39: 0, # 'I' - 53: 0, # 'J' - 36: 0, # 'K' - 41: 0, # 'L' - 34: 0, # 'M' - 35: 0, # 'N' - 47: 0, # 'O' - 46: 0, # 'P' - 43: 0, # 'R' - 33: 0, # 'S' - 37: 0, # 'T' - 57: 0, # 'U' - 48: 0, # 'V' - 55: 0, # 'Y' - 52: 0, # 'Z' - 2: 2, # 'a' - 18: 3, # 'b' - 26: 3, # 'c' - 17: 3, # 'd' - 1: 2, # 'e' - 27: 3, # 'f' - 12: 3, # 'g' - 20: 3, # 'h' - 9: 3, # 'i' - 22: 3, # 'j' - 7: 3, # 'k' - 6: 3, # 'l' - 13: 3, # 'm' - 4: 3, # 'n' - 8: 2, # 'o' - 23: 3, # 'p' - 10: 3, # 'r' - 5: 3, # 's' - 3: 3, # 't' - 21: 2, # 'u' - 19: 3, # 'v' - 62: 2, # 'x' - 16: 2, # 'y' - 11: 3, # 'z' - 51: 0, # 'Ã' - 44: 0, # 'É' - 61: 0, # 'Ã' - 58: 0, # 'Ó' - 59: 0, # 'Ö' - 60: 0, # 'Ú' - 63: 0, # 'Ãœ' - 14: 3, # 'á' - 15: 1, # 'é' - 30: 1, # 'í' - 25: 1, # 'ó' - 24: 1, # 'ö' - 31: 1, # 'ú' - 29: 1, # 'ü' - 42: 0, # 'Å‘' - 56: 0, # 'ű' - }, - 27: { # 'f' - 28: 0, # 'A' - 40: 0, # 'B' - 54: 0, # 'C' - 45: 0, # 'D' - 32: 0, # 'E' - 50: 0, # 'F' - 49: 0, # 'G' - 38: 0, # 'H' - 39: 0, # 'I' - 53: 0, # 'J' - 36: 0, # 'K' - 41: 0, # 'L' - 34: 0, # 'M' - 35: 0, # 'N' - 47: 0, # 'O' - 46: 0, # 'P' - 43: 0, # 'R' - 33: 0, # 'S' - 37: 0, # 'T' - 57: 0, # 'U' - 48: 0, # 'V' - 55: 0, # 'Y' - 52: 0, # 'Z' - 2: 3, # 'a' - 18: 1, # 'b' - 26: 1, # 'c' - 17: 1, # 'd' - 1: 3, # 'e' - 27: 2, # 'f' - 12: 1, # 'g' - 20: 1, # 'h' - 9: 3, # 'i' - 22: 2, # 'j' - 7: 1, # 'k' - 6: 1, # 'l' - 13: 1, # 'm' - 4: 1, # 'n' - 8: 3, # 'o' - 23: 0, # 'p' - 10: 3, # 'r' - 5: 1, # 's' - 3: 1, # 't' - 21: 2, # 'u' - 19: 1, # 'v' - 62: 0, # 'x' - 16: 1, # 'y' - 11: 0, # 'z' - 51: 0, # 'Ã' - 44: 0, # 'É' - 61: 0, # 'Ã' - 58: 0, # 'Ó' - 59: 0, # 'Ö' - 60: 0, # 'Ú' - 63: 0, # 'Ãœ' - 14: 3, # 'á' - 15: 3, # 'é' - 30: 1, # 'í' - 25: 1, # 'ó' - 24: 3, # 'ö' - 31: 1, # 'ú' - 29: 2, # 'ü' - 42: 1, # 'Å‘' - 56: 1, # 'ű' - }, - 12: { # 'g' - 28: 0, # 'A' - 40: 0, # 'B' - 54: 0, # 'C' - 45: 0, # 'D' - 32: 0, # 'E' - 50: 0, # 'F' - 49: 0, # 'G' - 38: 0, # 'H' - 39: 0, # 'I' - 53: 0, # 'J' - 36: 0, # 'K' - 41: 0, # 'L' - 34: 0, # 'M' - 35: 0, # 'N' - 47: 0, # 'O' - 46: 0, # 'P' - 43: 0, # 'R' - 33: 0, # 'S' - 37: 0, # 'T' - 57: 0, # 'U' - 48: 0, # 'V' - 55: 0, # 'Y' - 52: 0, # 'Z' - 2: 3, # 'a' - 18: 3, # 'b' - 26: 2, # 'c' - 17: 2, # 'd' - 1: 3, # 'e' - 27: 2, # 'f' - 12: 3, # 'g' - 20: 3, # 'h' - 9: 3, # 'i' - 22: 3, # 'j' - 7: 2, # 'k' - 6: 3, # 'l' - 13: 2, # 'm' - 4: 3, # 'n' - 8: 3, # 'o' - 23: 1, # 'p' - 10: 3, # 'r' - 5: 3, # 's' - 3: 3, # 't' - 21: 3, # 'u' - 19: 3, # 'v' - 62: 0, # 'x' - 16: 3, # 'y' - 11: 2, # 'z' - 51: 0, # 'Ã' - 44: 0, # 'É' - 61: 0, # 'Ã' - 58: 0, # 'Ó' - 59: 0, # 'Ö' - 60: 0, # 'Ú' - 63: 0, # 'Ãœ' - 14: 3, # 'á' - 15: 3, # 'é' - 30: 2, # 'í' - 25: 3, # 'ó' - 24: 2, # 'ö' - 31: 2, # 'ú' - 29: 2, # 'ü' - 42: 2, # 'Å‘' - 56: 1, # 'ű' - }, - 20: { # 'h' - 28: 0, # 'A' - 40: 0, # 'B' - 54: 0, # 'C' - 45: 0, # 'D' - 32: 0, # 'E' - 50: 0, # 'F' - 49: 0, # 'G' - 38: 0, # 'H' - 39: 0, # 'I' - 53: 0, # 'J' - 36: 0, # 'K' - 41: 0, # 'L' - 34: 0, # 'M' - 35: 0, # 'N' - 47: 0, # 'O' - 46: 0, # 'P' - 43: 0, # 'R' - 33: 0, # 'S' - 37: 0, # 'T' - 57: 0, # 'U' - 48: 0, # 'V' - 55: 0, # 'Y' - 52: 0, # 'Z' - 2: 3, # 'a' - 18: 1, # 'b' - 26: 1, # 'c' - 17: 0, # 'd' - 1: 3, # 'e' - 27: 0, # 'f' - 12: 1, # 'g' - 20: 2, # 'h' - 9: 3, # 'i' - 22: 1, # 'j' - 7: 1, # 'k' - 6: 1, # 'l' - 13: 1, # 'm' - 4: 1, # 'n' - 8: 3, # 'o' - 23: 0, # 'p' - 10: 1, # 'r' - 5: 2, # 's' - 3: 1, # 't' - 21: 3, # 'u' - 19: 1, # 'v' - 62: 0, # 'x' - 16: 2, # 'y' - 11: 0, # 'z' - 51: 0, # 'Ã' - 44: 0, # 'É' - 61: 0, # 'Ã' - 58: 0, # 'Ó' - 59: 0, # 'Ö' - 60: 0, # 'Ú' - 63: 0, # 'Ãœ' - 14: 3, # 'á' - 15: 3, # 'é' - 30: 3, # 'í' - 25: 2, # 'ó' - 24: 2, # 'ö' - 31: 2, # 'ú' - 29: 1, # 'ü' - 42: 1, # 'Å‘' - 56: 1, # 'ű' - }, - 9: { # 'i' - 28: 0, # 'A' - 40: 0, # 'B' - 54: 0, # 'C' - 45: 0, # 'D' - 32: 0, # 'E' - 50: 0, # 'F' - 49: 0, # 'G' - 38: 0, # 'H' - 39: 0, # 'I' - 53: 0, # 'J' - 36: 0, # 'K' - 41: 0, # 'L' - 34: 0, # 'M' - 35: 0, # 'N' - 47: 0, # 'O' - 46: 0, # 'P' - 43: 0, # 'R' - 33: 0, # 'S' - 37: 0, # 'T' - 57: 0, # 'U' - 48: 0, # 'V' - 55: 0, # 'Y' - 52: 0, # 'Z' - 2: 3, # 'a' - 18: 3, # 'b' - 26: 3, # 'c' - 17: 3, # 'd' - 1: 3, # 'e' - 27: 3, # 'f' - 12: 3, # 'g' - 20: 3, # 'h' - 9: 2, # 'i' - 22: 2, # 'j' - 7: 3, # 'k' - 6: 3, # 'l' - 13: 3, # 'm' - 4: 3, # 'n' - 8: 2, # 'o' - 23: 2, # 'p' - 10: 3, # 'r' - 5: 3, # 's' - 3: 3, # 't' - 21: 3, # 'u' - 19: 3, # 'v' - 62: 1, # 'x' - 16: 1, # 'y' - 11: 3, # 'z' - 51: 0, # 'Ã' - 44: 0, # 'É' - 61: 0, # 'Ã' - 58: 0, # 'Ó' - 59: 0, # 'Ö' - 60: 0, # 'Ú' - 63: 0, # 'Ãœ' - 14: 3, # 'á' - 15: 2, # 'é' - 30: 1, # 'í' - 25: 3, # 'ó' - 24: 1, # 'ö' - 31: 2, # 'ú' - 29: 1, # 'ü' - 42: 0, # 'Å‘' - 56: 1, # 'ű' - }, - 22: { # 'j' - 28: 0, # 'A' - 40: 0, # 'B' - 54: 0, # 'C' - 45: 0, # 'D' - 32: 0, # 'E' - 50: 0, # 'F' - 49: 0, # 'G' - 38: 0, # 'H' - 39: 0, # 'I' - 53: 0, # 'J' - 36: 0, # 'K' - 41: 0, # 'L' - 34: 0, # 'M' - 35: 0, # 'N' - 47: 0, # 'O' - 46: 0, # 'P' - 43: 0, # 'R' - 33: 0, # 'S' - 37: 0, # 'T' - 57: 0, # 'U' - 48: 0, # 'V' - 55: 0, # 'Y' - 52: 0, # 'Z' - 2: 3, # 'a' - 18: 2, # 'b' - 26: 1, # 'c' - 17: 3, # 'd' - 1: 3, # 'e' - 27: 1, # 'f' - 12: 1, # 'g' - 20: 2, # 'h' - 9: 1, # 'i' - 22: 2, # 'j' - 7: 2, # 'k' - 6: 2, # 'l' - 13: 1, # 'm' - 4: 2, # 'n' - 8: 3, # 'o' - 23: 1, # 'p' - 10: 2, # 'r' - 5: 2, # 's' - 3: 3, # 't' - 21: 3, # 'u' - 19: 1, # 'v' - 62: 0, # 'x' - 16: 0, # 'y' - 11: 2, # 'z' - 51: 0, # 'Ã' - 44: 0, # 'É' - 61: 0, # 'Ã' - 58: 0, # 'Ó' - 59: 0, # 'Ö' - 60: 0, # 'Ú' - 63: 0, # 'Ãœ' - 14: 3, # 'á' - 15: 3, # 'é' - 30: 1, # 'í' - 25: 3, # 'ó' - 24: 3, # 'ö' - 31: 3, # 'ú' - 29: 2, # 'ü' - 42: 1, # 'Å‘' - 56: 1, # 'ű' - }, - 7: { # 'k' - 28: 0, # 'A' - 40: 0, # 'B' - 54: 0, # 'C' - 45: 0, # 'D' - 32: 0, # 'E' - 50: 0, # 'F' - 49: 0, # 'G' - 38: 0, # 'H' - 39: 0, # 'I' - 53: 0, # 'J' - 36: 0, # 'K' - 41: 0, # 'L' - 34: 0, # 'M' - 35: 0, # 'N' - 47: 0, # 'O' - 46: 0, # 'P' - 43: 0, # 'R' - 33: 0, # 'S' - 37: 0, # 'T' - 57: 0, # 'U' - 48: 0, # 'V' - 55: 0, # 'Y' - 52: 0, # 'Z' - 2: 3, # 'a' - 18: 3, # 'b' - 26: 2, # 'c' - 17: 1, # 'd' - 1: 3, # 'e' - 27: 1, # 'f' - 12: 1, # 'g' - 20: 2, # 'h' - 9: 3, # 'i' - 22: 2, # 'j' - 7: 3, # 'k' - 6: 3, # 'l' - 13: 1, # 'm' - 4: 3, # 'n' - 8: 3, # 'o' - 23: 1, # 'p' - 10: 3, # 'r' - 5: 3, # 's' - 3: 3, # 't' - 21: 3, # 'u' - 19: 2, # 'v' - 62: 0, # 'x' - 16: 2, # 'y' - 11: 1, # 'z' - 51: 0, # 'Ã' - 44: 0, # 'É' - 61: 0, # 'Ã' - 58: 0, # 'Ó' - 59: 0, # 'Ö' - 60: 0, # 'Ú' - 63: 0, # 'Ãœ' - 14: 3, # 'á' - 15: 3, # 'é' - 30: 3, # 'í' - 25: 2, # 'ó' - 24: 3, # 'ö' - 31: 1, # 'ú' - 29: 3, # 'ü' - 42: 1, # 'Å‘' - 56: 1, # 'ű' - }, - 6: { # 'l' - 28: 0, # 'A' - 40: 0, # 'B' - 54: 0, # 'C' - 45: 0, # 'D' - 32: 0, # 'E' - 50: 0, # 'F' - 49: 0, # 'G' - 38: 0, # 'H' - 39: 0, # 'I' - 53: 0, # 'J' - 36: 1, # 'K' - 41: 0, # 'L' - 34: 0, # 'M' - 35: 1, # 'N' - 47: 0, # 'O' - 46: 0, # 'P' - 43: 0, # 'R' - 33: 0, # 'S' - 37: 0, # 'T' - 57: 0, # 'U' - 48: 0, # 'V' - 55: 0, # 'Y' - 52: 0, # 'Z' - 2: 3, # 'a' - 18: 2, # 'b' - 26: 3, # 'c' - 17: 3, # 'd' - 1: 3, # 'e' - 27: 3, # 'f' - 12: 3, # 'g' - 20: 3, # 'h' - 9: 3, # 'i' - 22: 3, # 'j' - 7: 3, # 'k' - 6: 3, # 'l' - 13: 3, # 'm' - 4: 3, # 'n' - 8: 3, # 'o' - 23: 2, # 'p' - 10: 2, # 'r' - 5: 3, # 's' - 3: 3, # 't' - 21: 3, # 'u' - 19: 3, # 'v' - 62: 0, # 'x' - 16: 3, # 'y' - 11: 2, # 'z' - 51: 0, # 'Ã' - 44: 0, # 'É' - 61: 0, # 'Ã' - 58: 0, # 'Ó' - 59: 0, # 'Ö' - 60: 0, # 'Ú' - 63: 0, # 'Ãœ' - 14: 3, # 'á' - 15: 3, # 'é' - 30: 3, # 'í' - 25: 3, # 'ó' - 24: 3, # 'ö' - 31: 2, # 'ú' - 29: 2, # 'ü' - 42: 3, # 'Å‘' - 56: 1, # 'ű' - }, - 13: { # 'm' - 28: 0, # 'A' - 40: 0, # 'B' - 54: 0, # 'C' - 45: 0, # 'D' - 32: 0, # 'E' - 50: 0, # 'F' - 49: 0, # 'G' - 38: 0, # 'H' - 39: 0, # 'I' - 53: 0, # 'J' - 36: 0, # 'K' - 41: 0, # 'L' - 34: 0, # 'M' - 35: 0, # 'N' - 47: 0, # 'O' - 46: 0, # 'P' - 43: 0, # 'R' - 33: 0, # 'S' - 37: 0, # 'T' - 57: 0, # 'U' - 48: 0, # 'V' - 55: 0, # 'Y' - 52: 0, # 'Z' - 2: 3, # 'a' - 18: 3, # 'b' - 26: 2, # 'c' - 17: 1, # 'd' - 1: 3, # 'e' - 27: 1, # 'f' - 12: 1, # 'g' - 20: 2, # 'h' - 9: 3, # 'i' - 22: 2, # 'j' - 7: 1, # 'k' - 6: 3, # 'l' - 13: 3, # 'm' - 4: 2, # 'n' - 8: 3, # 'o' - 23: 3, # 'p' - 10: 2, # 'r' - 5: 2, # 's' - 3: 2, # 't' - 21: 3, # 'u' - 19: 1, # 'v' - 62: 0, # 'x' - 16: 1, # 'y' - 11: 2, # 'z' - 51: 0, # 'Ã' - 44: 0, # 'É' - 61: 0, # 'Ã' - 58: 0, # 'Ó' - 59: 0, # 'Ö' - 60: 0, # 'Ú' - 63: 0, # 'Ãœ' - 14: 3, # 'á' - 15: 3, # 'é' - 30: 2, # 'í' - 25: 2, # 'ó' - 24: 2, # 'ö' - 31: 2, # 'ú' - 29: 2, # 'ü' - 42: 1, # 'Å‘' - 56: 2, # 'ű' - }, - 4: { # 'n' - 28: 0, # 'A' - 40: 0, # 'B' - 54: 0, # 'C' - 45: 0, # 'D' - 32: 0, # 'E' - 50: 0, # 'F' - 49: 0, # 'G' - 38: 0, # 'H' - 39: 0, # 'I' - 53: 0, # 'J' - 36: 0, # 'K' - 41: 0, # 'L' - 34: 0, # 'M' - 35: 0, # 'N' - 47: 0, # 'O' - 46: 0, # 'P' - 43: 0, # 'R' - 33: 0, # 'S' - 37: 0, # 'T' - 57: 0, # 'U' - 48: 0, # 'V' - 55: 0, # 'Y' - 52: 0, # 'Z' - 2: 3, # 'a' - 18: 3, # 'b' - 26: 3, # 'c' - 17: 3, # 'd' - 1: 3, # 'e' - 27: 2, # 'f' - 12: 3, # 'g' - 20: 3, # 'h' - 9: 3, # 'i' - 22: 2, # 'j' - 7: 3, # 'k' - 6: 2, # 'l' - 13: 2, # 'm' - 4: 3, # 'n' - 8: 3, # 'o' - 23: 2, # 'p' - 10: 2, # 'r' - 5: 3, # 's' - 3: 3, # 't' - 21: 3, # 'u' - 19: 2, # 'v' - 62: 1, # 'x' - 16: 3, # 'y' - 11: 3, # 'z' - 51: 0, # 'Ã' - 44: 0, # 'É' - 61: 0, # 'Ã' - 58: 0, # 'Ó' - 59: 0, # 'Ö' - 60: 0, # 'Ú' - 63: 0, # 'Ãœ' - 14: 3, # 'á' - 15: 3, # 'é' - 30: 2, # 'í' - 25: 2, # 'ó' - 24: 3, # 'ö' - 31: 2, # 'ú' - 29: 3, # 'ü' - 42: 2, # 'Å‘' - 56: 1, # 'ű' - }, - 8: { # 'o' - 28: 0, # 'A' - 40: 0, # 'B' - 54: 0, # 'C' - 45: 0, # 'D' - 32: 0, # 'E' - 50: 0, # 'F' - 49: 0, # 'G' - 38: 0, # 'H' - 39: 0, # 'I' - 53: 0, # 'J' - 36: 0, # 'K' - 41: 0, # 'L' - 34: 0, # 'M' - 35: 0, # 'N' - 47: 1, # 'O' - 46: 0, # 'P' - 43: 0, # 'R' - 33: 0, # 'S' - 37: 0, # 'T' - 57: 0, # 'U' - 48: 0, # 'V' - 55: 0, # 'Y' - 52: 0, # 'Z' - 2: 2, # 'a' - 18: 3, # 'b' - 26: 3, # 'c' - 17: 3, # 'd' - 1: 2, # 'e' - 27: 2, # 'f' - 12: 3, # 'g' - 20: 3, # 'h' - 9: 2, # 'i' - 22: 2, # 'j' - 7: 3, # 'k' - 6: 3, # 'l' - 13: 3, # 'm' - 4: 3, # 'n' - 8: 1, # 'o' - 23: 3, # 'p' - 10: 3, # 'r' - 5: 3, # 's' - 3: 3, # 't' - 21: 2, # 'u' - 19: 3, # 'v' - 62: 1, # 'x' - 16: 1, # 'y' - 11: 3, # 'z' - 51: 0, # 'Ã' - 44: 0, # 'É' - 61: 0, # 'Ã' - 58: 0, # 'Ó' - 59: 0, # 'Ö' - 60: 0, # 'Ú' - 63: 0, # 'Ãœ' - 14: 1, # 'á' - 15: 2, # 'é' - 30: 1, # 'í' - 25: 1, # 'ó' - 24: 1, # 'ö' - 31: 1, # 'ú' - 29: 1, # 'ü' - 42: 0, # 'Å‘' - 56: 0, # 'ű' - }, - 23: { # 'p' - 28: 0, # 'A' - 40: 0, # 'B' - 54: 0, # 'C' - 45: 0, # 'D' - 32: 0, # 'E' - 50: 0, # 'F' - 49: 0, # 'G' - 38: 0, # 'H' - 39: 0, # 'I' - 53: 0, # 'J' - 36: 0, # 'K' - 41: 0, # 'L' - 34: 0, # 'M' - 35: 0, # 'N' - 47: 0, # 'O' - 46: 0, # 'P' - 43: 0, # 'R' - 33: 0, # 'S' - 37: 0, # 'T' - 57: 0, # 'U' - 48: 0, # 'V' - 55: 0, # 'Y' - 52: 0, # 'Z' - 2: 3, # 'a' - 18: 1, # 'b' - 26: 2, # 'c' - 17: 1, # 'd' - 1: 3, # 'e' - 27: 1, # 'f' - 12: 1, # 'g' - 20: 2, # 'h' - 9: 3, # 'i' - 22: 2, # 'j' - 7: 2, # 'k' - 6: 3, # 'l' - 13: 1, # 'm' - 4: 2, # 'n' - 8: 3, # 'o' - 23: 3, # 'p' - 10: 3, # 'r' - 5: 2, # 's' - 3: 2, # 't' - 21: 3, # 'u' - 19: 2, # 'v' - 62: 0, # 'x' - 16: 1, # 'y' - 11: 2, # 'z' - 51: 0, # 'Ã' - 44: 0, # 'É' - 61: 0, # 'Ã' - 58: 0, # 'Ó' - 59: 0, # 'Ö' - 60: 0, # 'Ú' - 63: 0, # 'Ãœ' - 14: 3, # 'á' - 15: 3, # 'é' - 30: 2, # 'í' - 25: 2, # 'ó' - 24: 2, # 'ö' - 31: 1, # 'ú' - 29: 2, # 'ü' - 42: 1, # 'Å‘' - 56: 1, # 'ű' - }, - 10: { # 'r' - 28: 0, # 'A' - 40: 0, # 'B' - 54: 0, # 'C' - 45: 0, # 'D' - 32: 0, # 'E' - 50: 0, # 'F' - 49: 0, # 'G' - 38: 0, # 'H' - 39: 0, # 'I' - 53: 0, # 'J' - 36: 0, # 'K' - 41: 0, # 'L' - 34: 0, # 'M' - 35: 0, # 'N' - 47: 0, # 'O' - 46: 0, # 'P' - 43: 0, # 'R' - 33: 0, # 'S' - 37: 0, # 'T' - 57: 0, # 'U' - 48: 0, # 'V' - 55: 0, # 'Y' - 52: 0, # 'Z' - 2: 3, # 'a' - 18: 3, # 'b' - 26: 3, # 'c' - 17: 3, # 'd' - 1: 3, # 'e' - 27: 2, # 'f' - 12: 3, # 'g' - 20: 2, # 'h' - 9: 3, # 'i' - 22: 3, # 'j' - 7: 3, # 'k' - 6: 3, # 'l' - 13: 3, # 'm' - 4: 3, # 'n' - 8: 3, # 'o' - 23: 2, # 'p' - 10: 3, # 'r' - 5: 3, # 's' - 3: 3, # 't' - 21: 3, # 'u' - 19: 3, # 'v' - 62: 1, # 'x' - 16: 2, # 'y' - 11: 3, # 'z' - 51: 0, # 'Ã' - 44: 0, # 'É' - 61: 0, # 'Ã' - 58: 0, # 'Ó' - 59: 0, # 'Ö' - 60: 0, # 'Ú' - 63: 0, # 'Ãœ' - 14: 3, # 'á' - 15: 3, # 'é' - 30: 2, # 'í' - 25: 3, # 'ó' - 24: 3, # 'ö' - 31: 3, # 'ú' - 29: 3, # 'ü' - 42: 2, # 'Å‘' - 56: 2, # 'ű' - }, - 5: { # 's' - 28: 0, # 'A' - 40: 0, # 'B' - 54: 0, # 'C' - 45: 0, # 'D' - 32: 0, # 'E' - 50: 0, # 'F' - 49: 0, # 'G' - 38: 0, # 'H' - 39: 0, # 'I' - 53: 0, # 'J' - 36: 0, # 'K' - 41: 0, # 'L' - 34: 0, # 'M' - 35: 0, # 'N' - 47: 0, # 'O' - 46: 0, # 'P' - 43: 0, # 'R' - 33: 0, # 'S' - 37: 0, # 'T' - 57: 0, # 'U' - 48: 0, # 'V' - 55: 0, # 'Y' - 52: 0, # 'Z' - 2: 3, # 'a' - 18: 3, # 'b' - 26: 2, # 'c' - 17: 2, # 'd' - 1: 3, # 'e' - 27: 2, # 'f' - 12: 2, # 'g' - 20: 2, # 'h' - 9: 3, # 'i' - 22: 1, # 'j' - 7: 3, # 'k' - 6: 2, # 'l' - 13: 3, # 'm' - 4: 3, # 'n' - 8: 3, # 'o' - 23: 2, # 'p' - 10: 3, # 'r' - 5: 3, # 's' - 3: 3, # 't' - 21: 3, # 'u' - 19: 2, # 'v' - 62: 0, # 'x' - 16: 1, # 'y' - 11: 3, # 'z' - 51: 0, # 'Ã' - 44: 0, # 'É' - 61: 0, # 'Ã' - 58: 0, # 'Ó' - 59: 0, # 'Ö' - 60: 0, # 'Ú' - 63: 0, # 'Ãœ' - 14: 3, # 'á' - 15: 3, # 'é' - 30: 3, # 'í' - 25: 3, # 'ó' - 24: 3, # 'ö' - 31: 3, # 'ú' - 29: 3, # 'ü' - 42: 2, # 'Å‘' - 56: 1, # 'ű' - }, - 3: { # 't' - 28: 0, # 'A' - 40: 0, # 'B' - 54: 0, # 'C' - 45: 0, # 'D' - 32: 0, # 'E' - 50: 0, # 'F' - 49: 0, # 'G' - 38: 0, # 'H' - 39: 0, # 'I' - 53: 0, # 'J' - 36: 0, # 'K' - 41: 0, # 'L' - 34: 0, # 'M' - 35: 0, # 'N' - 47: 0, # 'O' - 46: 0, # 'P' - 43: 0, # 'R' - 33: 0, # 'S' - 37: 0, # 'T' - 57: 0, # 'U' - 48: 0, # 'V' - 55: 0, # 'Y' - 52: 0, # 'Z' - 2: 3, # 'a' - 18: 3, # 'b' - 26: 2, # 'c' - 17: 1, # 'd' - 1: 3, # 'e' - 27: 2, # 'f' - 12: 1, # 'g' - 20: 3, # 'h' - 9: 3, # 'i' - 22: 3, # 'j' - 7: 3, # 'k' - 6: 3, # 'l' - 13: 2, # 'm' - 4: 3, # 'n' - 8: 3, # 'o' - 23: 1, # 'p' - 10: 3, # 'r' - 5: 3, # 's' - 3: 3, # 't' - 21: 3, # 'u' - 19: 3, # 'v' - 62: 0, # 'x' - 16: 3, # 'y' - 11: 1, # 'z' - 51: 0, # 'Ã' - 44: 0, # 'É' - 61: 0, # 'Ã' - 58: 0, # 'Ó' - 59: 0, # 'Ö' - 60: 0, # 'Ú' - 63: 0, # 'Ãœ' - 14: 3, # 'á' - 15: 3, # 'é' - 30: 2, # 'í' - 25: 3, # 'ó' - 24: 3, # 'ö' - 31: 3, # 'ú' - 29: 3, # 'ü' - 42: 3, # 'Å‘' - 56: 2, # 'ű' - }, - 21: { # 'u' - 28: 0, # 'A' - 40: 0, # 'B' - 54: 0, # 'C' - 45: 0, # 'D' - 32: 0, # 'E' - 50: 0, # 'F' - 49: 0, # 'G' - 38: 0, # 'H' - 39: 0, # 'I' - 53: 0, # 'J' - 36: 0, # 'K' - 41: 0, # 'L' - 34: 0, # 'M' - 35: 0, # 'N' - 47: 0, # 'O' - 46: 0, # 'P' - 43: 0, # 'R' - 33: 0, # 'S' - 37: 0, # 'T' - 57: 0, # 'U' - 48: 0, # 'V' - 55: 0, # 'Y' - 52: 0, # 'Z' - 2: 1, # 'a' - 18: 2, # 'b' - 26: 2, # 'c' - 17: 3, # 'd' - 1: 2, # 'e' - 27: 1, # 'f' - 12: 3, # 'g' - 20: 2, # 'h' - 9: 2, # 'i' - 22: 2, # 'j' - 7: 3, # 'k' - 6: 3, # 'l' - 13: 3, # 'm' - 4: 3, # 'n' - 8: 1, # 'o' - 23: 2, # 'p' - 10: 3, # 'r' - 5: 3, # 's' - 3: 3, # 't' - 21: 1, # 'u' - 19: 3, # 'v' - 62: 1, # 'x' - 16: 1, # 'y' - 11: 2, # 'z' - 51: 0, # 'Ã' - 44: 0, # 'É' - 61: 0, # 'Ã' - 58: 0, # 'Ó' - 59: 0, # 'Ö' - 60: 0, # 'Ú' - 63: 0, # 'Ãœ' - 14: 2, # 'á' - 15: 1, # 'é' - 30: 1, # 'í' - 25: 1, # 'ó' - 24: 0, # 'ö' - 31: 1, # 'ú' - 29: 0, # 'ü' - 42: 0, # 'Å‘' - 56: 0, # 'ű' - }, - 19: { # 'v' - 28: 0, # 'A' - 40: 0, # 'B' - 54: 0, # 'C' - 45: 0, # 'D' - 32: 0, # 'E' - 50: 0, # 'F' - 49: 0, # 'G' - 38: 0, # 'H' - 39: 0, # 'I' - 53: 0, # 'J' - 36: 0, # 'K' - 41: 0, # 'L' - 34: 0, # 'M' - 35: 0, # 'N' - 47: 0, # 'O' - 46: 0, # 'P' - 43: 0, # 'R' - 33: 0, # 'S' - 37: 0, # 'T' - 57: 0, # 'U' - 48: 0, # 'V' - 55: 0, # 'Y' - 52: 0, # 'Z' - 2: 3, # 'a' - 18: 2, # 'b' - 26: 1, # 'c' - 17: 1, # 'd' - 1: 3, # 'e' - 27: 1, # 'f' - 12: 1, # 'g' - 20: 1, # 'h' - 9: 3, # 'i' - 22: 1, # 'j' - 7: 1, # 'k' - 6: 1, # 'l' - 13: 1, # 'm' - 4: 1, # 'n' - 8: 3, # 'o' - 23: 1, # 'p' - 10: 1, # 'r' - 5: 2, # 's' - 3: 2, # 't' - 21: 2, # 'u' - 19: 2, # 'v' - 62: 0, # 'x' - 16: 1, # 'y' - 11: 1, # 'z' - 51: 0, # 'Ã' - 44: 0, # 'É' - 61: 0, # 'Ã' - 58: 0, # 'Ó' - 59: 0, # 'Ö' - 60: 0, # 'Ú' - 63: 0, # 'Ãœ' - 14: 3, # 'á' - 15: 3, # 'é' - 30: 2, # 'í' - 25: 2, # 'ó' - 24: 2, # 'ö' - 31: 1, # 'ú' - 29: 2, # 'ü' - 42: 1, # 'Å‘' - 56: 1, # 'ű' - }, - 62: { # 'x' - 28: 0, # 'A' - 40: 0, # 'B' - 54: 0, # 'C' - 45: 0, # 'D' - 32: 0, # 'E' - 50: 0, # 'F' - 49: 0, # 'G' - 38: 0, # 'H' - 39: 0, # 'I' - 53: 0, # 'J' - 36: 0, # 'K' - 41: 0, # 'L' - 34: 0, # 'M' - 35: 0, # 'N' - 47: 0, # 'O' - 46: 0, # 'P' - 43: 0, # 'R' - 33: 0, # 'S' - 37: 0, # 'T' - 57: 0, # 'U' - 48: 0, # 'V' - 55: 0, # 'Y' - 52: 0, # 'Z' - 2: 1, # 'a' - 18: 1, # 'b' - 26: 1, # 'c' - 17: 0, # 'd' - 1: 1, # 'e' - 27: 1, # 'f' - 12: 0, # 'g' - 20: 0, # 'h' - 9: 1, # 'i' - 22: 0, # 'j' - 7: 1, # 'k' - 6: 1, # 'l' - 13: 1, # 'm' - 4: 1, # 'n' - 8: 1, # 'o' - 23: 1, # 'p' - 10: 1, # 'r' - 5: 1, # 's' - 3: 1, # 't' - 21: 1, # 'u' - 19: 0, # 'v' - 62: 0, # 'x' - 16: 0, # 'y' - 11: 0, # 'z' - 51: 0, # 'Ã' - 44: 0, # 'É' - 61: 0, # 'Ã' - 58: 0, # 'Ó' - 59: 0, # 'Ö' - 60: 0, # 'Ú' - 63: 0, # 'Ãœ' - 14: 1, # 'á' - 15: 1, # 'é' - 30: 1, # 'í' - 25: 1, # 'ó' - 24: 0, # 'ö' - 31: 0, # 'ú' - 29: 0, # 'ü' - 42: 0, # 'Å‘' - 56: 0, # 'ű' - }, - 16: { # 'y' - 28: 0, # 'A' - 40: 0, # 'B' - 54: 0, # 'C' - 45: 0, # 'D' - 32: 0, # 'E' - 50: 0, # 'F' - 49: 0, # 'G' - 38: 0, # 'H' - 39: 0, # 'I' - 53: 0, # 'J' - 36: 0, # 'K' - 41: 0, # 'L' - 34: 0, # 'M' - 35: 0, # 'N' - 47: 0, # 'O' - 46: 0, # 'P' - 43: 0, # 'R' - 33: 0, # 'S' - 37: 0, # 'T' - 57: 0, # 'U' - 48: 0, # 'V' - 55: 0, # 'Y' - 52: 0, # 'Z' - 2: 3, # 'a' - 18: 2, # 'b' - 26: 1, # 'c' - 17: 1, # 'd' - 1: 3, # 'e' - 27: 2, # 'f' - 12: 2, # 'g' - 20: 2, # 'h' - 9: 3, # 'i' - 22: 2, # 'j' - 7: 2, # 'k' - 6: 2, # 'l' - 13: 2, # 'm' - 4: 3, # 'n' - 8: 3, # 'o' - 23: 2, # 'p' - 10: 2, # 'r' - 5: 3, # 's' - 3: 3, # 't' - 21: 3, # 'u' - 19: 3, # 'v' - 62: 0, # 'x' - 16: 0, # 'y' - 11: 2, # 'z' - 51: 0, # 'Ã' - 44: 0, # 'É' - 61: 0, # 'Ã' - 58: 0, # 'Ó' - 59: 0, # 'Ö' - 60: 0, # 'Ú' - 63: 0, # 'Ãœ' - 14: 3, # 'á' - 15: 3, # 'é' - 30: 2, # 'í' - 25: 2, # 'ó' - 24: 3, # 'ö' - 31: 2, # 'ú' - 29: 2, # 'ü' - 42: 1, # 'Å‘' - 56: 2, # 'ű' - }, - 11: { # 'z' - 28: 0, # 'A' - 40: 0, # 'B' - 54: 0, # 'C' - 45: 0, # 'D' - 32: 0, # 'E' - 50: 0, # 'F' - 49: 0, # 'G' - 38: 0, # 'H' - 39: 0, # 'I' - 53: 0, # 'J' - 36: 0, # 'K' - 41: 0, # 'L' - 34: 0, # 'M' - 35: 0, # 'N' - 47: 0, # 'O' - 46: 0, # 'P' - 43: 0, # 'R' - 33: 0, # 'S' - 37: 0, # 'T' - 57: 0, # 'U' - 48: 0, # 'V' - 55: 0, # 'Y' - 52: 0, # 'Z' - 2: 3, # 'a' - 18: 2, # 'b' - 26: 1, # 'c' - 17: 3, # 'd' - 1: 3, # 'e' - 27: 1, # 'f' - 12: 2, # 'g' - 20: 2, # 'h' - 9: 3, # 'i' - 22: 1, # 'j' - 7: 3, # 'k' - 6: 2, # 'l' - 13: 3, # 'm' - 4: 3, # 'n' - 8: 3, # 'o' - 23: 1, # 'p' - 10: 2, # 'r' - 5: 3, # 's' - 3: 3, # 't' - 21: 3, # 'u' - 19: 2, # 'v' - 62: 0, # 'x' - 16: 1, # 'y' - 11: 3, # 'z' - 51: 0, # 'Ã' - 44: 0, # 'É' - 61: 0, # 'Ã' - 58: 0, # 'Ó' - 59: 0, # 'Ö' - 60: 0, # 'Ú' - 63: 0, # 'Ãœ' - 14: 3, # 'á' - 15: 3, # 'é' - 30: 3, # 'í' - 25: 3, # 'ó' - 24: 3, # 'ö' - 31: 2, # 'ú' - 29: 3, # 'ü' - 42: 2, # 'Å‘' - 56: 1, # 'ű' - }, - 51: { # 'Ã' - 28: 0, # 'A' - 40: 1, # 'B' - 54: 1, # 'C' - 45: 1, # 'D' - 32: 0, # 'E' - 50: 1, # 'F' - 49: 2, # 'G' - 38: 1, # 'H' - 39: 1, # 'I' - 53: 1, # 'J' - 36: 1, # 'K' - 41: 2, # 'L' - 34: 1, # 'M' - 35: 2, # 'N' - 47: 0, # 'O' - 46: 1, # 'P' - 43: 2, # 'R' - 33: 2, # 'S' - 37: 1, # 'T' - 57: 0, # 'U' - 48: 1, # 'V' - 55: 0, # 'Y' - 52: 1, # 'Z' - 2: 0, # 'a' - 18: 1, # 'b' - 26: 1, # 'c' - 17: 1, # 'd' - 1: 0, # 'e' - 27: 0, # 'f' - 12: 1, # 'g' - 20: 1, # 'h' - 9: 0, # 'i' - 22: 1, # 'j' - 7: 1, # 'k' - 6: 2, # 'l' - 13: 2, # 'm' - 4: 0, # 'n' - 8: 0, # 'o' - 23: 1, # 'p' - 10: 1, # 'r' - 5: 1, # 's' - 3: 1, # 't' - 21: 0, # 'u' - 19: 0, # 'v' - 62: 0, # 'x' - 16: 0, # 'y' - 11: 1, # 'z' - 51: 0, # 'Ã' - 44: 0, # 'É' - 61: 1, # 'Ã' - 58: 0, # 'Ó' - 59: 0, # 'Ö' - 60: 0, # 'Ú' - 63: 0, # 'Ãœ' - 14: 0, # 'á' - 15: 0, # 'é' - 30: 0, # 'í' - 25: 0, # 'ó' - 24: 0, # 'ö' - 31: 0, # 'ú' - 29: 0, # 'ü' - 42: 0, # 'Å‘' - 56: 0, # 'ű' - }, - 44: { # 'É' - 28: 0, # 'A' - 40: 1, # 'B' - 54: 1, # 'C' - 45: 1, # 'D' - 32: 1, # 'E' - 50: 0, # 'F' - 49: 2, # 'G' - 38: 1, # 'H' - 39: 1, # 'I' - 53: 1, # 'J' - 36: 1, # 'K' - 41: 2, # 'L' - 34: 1, # 'M' - 35: 2, # 'N' - 47: 0, # 'O' - 46: 1, # 'P' - 43: 2, # 'R' - 33: 2, # 'S' - 37: 2, # 'T' - 57: 0, # 'U' - 48: 1, # 'V' - 55: 0, # 'Y' - 52: 1, # 'Z' - 2: 0, # 'a' - 18: 1, # 'b' - 26: 1, # 'c' - 17: 1, # 'd' - 1: 0, # 'e' - 27: 0, # 'f' - 12: 1, # 'g' - 20: 1, # 'h' - 9: 0, # 'i' - 22: 1, # 'j' - 7: 1, # 'k' - 6: 2, # 'l' - 13: 1, # 'm' - 4: 2, # 'n' - 8: 0, # 'o' - 23: 1, # 'p' - 10: 2, # 'r' - 5: 3, # 's' - 3: 1, # 't' - 21: 0, # 'u' - 19: 1, # 'v' - 62: 0, # 'x' - 16: 0, # 'y' - 11: 0, # 'z' - 51: 0, # 'Ã' - 44: 1, # 'É' - 61: 0, # 'Ã' - 58: 0, # 'Ó' - 59: 0, # 'Ö' - 60: 0, # 'Ú' - 63: 0, # 'Ãœ' - 14: 0, # 'á' - 15: 0, # 'é' - 30: 0, # 'í' - 25: 0, # 'ó' - 24: 0, # 'ö' - 31: 0, # 'ú' - 29: 0, # 'ü' - 42: 0, # 'Å‘' - 56: 0, # 'ű' - }, - 61: { # 'Ã' - 28: 0, # 'A' - 40: 1, # 'B' - 54: 1, # 'C' - 45: 1, # 'D' - 32: 0, # 'E' - 50: 1, # 'F' - 49: 1, # 'G' - 38: 0, # 'H' - 39: 0, # 'I' - 53: 1, # 'J' - 36: 0, # 'K' - 41: 1, # 'L' - 34: 1, # 'M' - 35: 1, # 'N' - 47: 0, # 'O' - 46: 1, # 'P' - 43: 1, # 'R' - 33: 1, # 'S' - 37: 1, # 'T' - 57: 0, # 'U' - 48: 1, # 'V' - 55: 0, # 'Y' - 52: 1, # 'Z' - 2: 0, # 'a' - 18: 0, # 'b' - 26: 0, # 'c' - 17: 0, # 'd' - 1: 0, # 'e' - 27: 0, # 'f' - 12: 2, # 'g' - 20: 0, # 'h' - 9: 0, # 'i' - 22: 0, # 'j' - 7: 0, # 'k' - 6: 0, # 'l' - 13: 1, # 'm' - 4: 0, # 'n' - 8: 0, # 'o' - 23: 0, # 'p' - 10: 1, # 'r' - 5: 0, # 's' - 3: 1, # 't' - 21: 0, # 'u' - 19: 0, # 'v' - 62: 0, # 'x' - 16: 0, # 'y' - 11: 1, # 'z' - 51: 0, # 'Ã' - 44: 0, # 'É' - 61: 0, # 'Ã' - 58: 0, # 'Ó' - 59: 0, # 'Ö' - 60: 0, # 'Ú' - 63: 0, # 'Ãœ' - 14: 0, # 'á' - 15: 0, # 'é' - 30: 0, # 'í' - 25: 0, # 'ó' - 24: 0, # 'ö' - 31: 0, # 'ú' - 29: 0, # 'ü' - 42: 0, # 'Å‘' - 56: 0, # 'ű' - }, - 58: { # 'Ó' - 28: 1, # 'A' - 40: 1, # 'B' - 54: 1, # 'C' - 45: 1, # 'D' - 32: 0, # 'E' - 50: 1, # 'F' - 49: 1, # 'G' - 38: 1, # 'H' - 39: 1, # 'I' - 53: 1, # 'J' - 36: 1, # 'K' - 41: 2, # 'L' - 34: 1, # 'M' - 35: 1, # 'N' - 47: 0, # 'O' - 46: 1, # 'P' - 43: 1, # 'R' - 33: 1, # 'S' - 37: 1, # 'T' - 57: 0, # 'U' - 48: 1, # 'V' - 55: 0, # 'Y' - 52: 1, # 'Z' - 2: 0, # 'a' - 18: 1, # 'b' - 26: 1, # 'c' - 17: 1, # 'd' - 1: 0, # 'e' - 27: 0, # 'f' - 12: 0, # 'g' - 20: 2, # 'h' - 9: 0, # 'i' - 22: 0, # 'j' - 7: 1, # 'k' - 6: 1, # 'l' - 13: 0, # 'm' - 4: 1, # 'n' - 8: 0, # 'o' - 23: 1, # 'p' - 10: 1, # 'r' - 5: 1, # 's' - 3: 0, # 't' - 21: 0, # 'u' - 19: 1, # 'v' - 62: 0, # 'x' - 16: 0, # 'y' - 11: 1, # 'z' - 51: 0, # 'Ã' - 44: 1, # 'É' - 61: 0, # 'Ã' - 58: 0, # 'Ó' - 59: 0, # 'Ö' - 60: 0, # 'Ú' - 63: 0, # 'Ãœ' - 14: 0, # 'á' - 15: 0, # 'é' - 30: 0, # 'í' - 25: 0, # 'ó' - 24: 0, # 'ö' - 31: 0, # 'ú' - 29: 0, # 'ü' - 42: 0, # 'Å‘' - 56: 0, # 'ű' - }, - 59: { # 'Ö' - 28: 0, # 'A' - 40: 1, # 'B' - 54: 1, # 'C' - 45: 1, # 'D' - 32: 0, # 'E' - 50: 0, # 'F' - 49: 1, # 'G' - 38: 1, # 'H' - 39: 0, # 'I' - 53: 1, # 'J' - 36: 1, # 'K' - 41: 1, # 'L' - 34: 1, # 'M' - 35: 1, # 'N' - 47: 0, # 'O' - 46: 1, # 'P' - 43: 1, # 'R' - 33: 1, # 'S' - 37: 1, # 'T' - 57: 0, # 'U' - 48: 1, # 'V' - 55: 0, # 'Y' - 52: 1, # 'Z' - 2: 0, # 'a' - 18: 0, # 'b' - 26: 1, # 'c' - 17: 1, # 'd' - 1: 0, # 'e' - 27: 0, # 'f' - 12: 0, # 'g' - 20: 0, # 'h' - 9: 0, # 'i' - 22: 0, # 'j' - 7: 1, # 'k' - 6: 1, # 'l' - 13: 1, # 'm' - 4: 1, # 'n' - 8: 0, # 'o' - 23: 0, # 'p' - 10: 2, # 'r' - 5: 1, # 's' - 3: 1, # 't' - 21: 0, # 'u' - 19: 1, # 'v' - 62: 0, # 'x' - 16: 0, # 'y' - 11: 1, # 'z' - 51: 0, # 'Ã' - 44: 0, # 'É' - 61: 0, # 'Ã' - 58: 0, # 'Ó' - 59: 0, # 'Ö' - 60: 0, # 'Ú' - 63: 0, # 'Ãœ' - 14: 0, # 'á' - 15: 0, # 'é' - 30: 0, # 'í' - 25: 0, # 'ó' - 24: 0, # 'ö' - 31: 0, # 'ú' - 29: 0, # 'ü' - 42: 0, # 'Å‘' - 56: 0, # 'ű' - }, - 60: { # 'Ú' - 28: 0, # 'A' - 40: 1, # 'B' - 54: 1, # 'C' - 45: 1, # 'D' - 32: 0, # 'E' - 50: 1, # 'F' - 49: 1, # 'G' - 38: 0, # 'H' - 39: 0, # 'I' - 53: 1, # 'J' - 36: 1, # 'K' - 41: 1, # 'L' - 34: 1, # 'M' - 35: 1, # 'N' - 47: 0, # 'O' - 46: 0, # 'P' - 43: 1, # 'R' - 33: 1, # 'S' - 37: 1, # 'T' - 57: 0, # 'U' - 48: 1, # 'V' - 55: 0, # 'Y' - 52: 1, # 'Z' - 2: 0, # 'a' - 18: 0, # 'b' - 26: 0, # 'c' - 17: 0, # 'd' - 1: 0, # 'e' - 27: 0, # 'f' - 12: 2, # 'g' - 20: 0, # 'h' - 9: 0, # 'i' - 22: 2, # 'j' - 7: 0, # 'k' - 6: 0, # 'l' - 13: 0, # 'm' - 4: 1, # 'n' - 8: 0, # 'o' - 23: 0, # 'p' - 10: 1, # 'r' - 5: 1, # 's' - 3: 1, # 't' - 21: 0, # 'u' - 19: 0, # 'v' - 62: 0, # 'x' - 16: 0, # 'y' - 11: 0, # 'z' - 51: 0, # 'Ã' - 44: 0, # 'É' - 61: 0, # 'Ã' - 58: 0, # 'Ó' - 59: 0, # 'Ö' - 60: 0, # 'Ú' - 63: 0, # 'Ãœ' - 14: 0, # 'á' - 15: 0, # 'é' - 30: 0, # 'í' - 25: 0, # 'ó' - 24: 0, # 'ö' - 31: 0, # 'ú' - 29: 0, # 'ü' - 42: 0, # 'Å‘' - 56: 0, # 'ű' - }, - 63: { # 'Ãœ' - 28: 0, # 'A' - 40: 1, # 'B' - 54: 0, # 'C' - 45: 1, # 'D' - 32: 0, # 'E' - 50: 0, # 'F' - 49: 1, # 'G' - 38: 1, # 'H' - 39: 0, # 'I' - 53: 1, # 'J' - 36: 1, # 'K' - 41: 1, # 'L' - 34: 1, # 'M' - 35: 1, # 'N' - 47: 0, # 'O' - 46: 0, # 'P' - 43: 1, # 'R' - 33: 1, # 'S' - 37: 1, # 'T' - 57: 0, # 'U' - 48: 1, # 'V' - 55: 0, # 'Y' - 52: 1, # 'Z' - 2: 0, # 'a' - 18: 1, # 'b' - 26: 0, # 'c' - 17: 1, # 'd' - 1: 0, # 'e' - 27: 0, # 'f' - 12: 1, # 'g' - 20: 0, # 'h' - 9: 0, # 'i' - 22: 0, # 'j' - 7: 0, # 'k' - 6: 1, # 'l' - 13: 0, # 'm' - 4: 1, # 'n' - 8: 0, # 'o' - 23: 0, # 'p' - 10: 1, # 'r' - 5: 1, # 's' - 3: 1, # 't' - 21: 0, # 'u' - 19: 1, # 'v' - 62: 0, # 'x' - 16: 0, # 'y' - 11: 1, # 'z' - 51: 0, # 'Ã' - 44: 0, # 'É' - 61: 0, # 'Ã' - 58: 0, # 'Ó' - 59: 0, # 'Ö' - 60: 0, # 'Ú' - 63: 0, # 'Ãœ' - 14: 0, # 'á' - 15: 0, # 'é' - 30: 0, # 'í' - 25: 0, # 'ó' - 24: 0, # 'ö' - 31: 0, # 'ú' - 29: 0, # 'ü' - 42: 0, # 'Å‘' - 56: 0, # 'ű' - }, - 14: { # 'á' - 28: 0, # 'A' - 40: 0, # 'B' - 54: 0, # 'C' - 45: 0, # 'D' - 32: 0, # 'E' - 50: 0, # 'F' - 49: 0, # 'G' - 38: 0, # 'H' - 39: 0, # 'I' - 53: 0, # 'J' - 36: 0, # 'K' - 41: 0, # 'L' - 34: 0, # 'M' - 35: 0, # 'N' - 47: 0, # 'O' - 46: 0, # 'P' - 43: 0, # 'R' - 33: 0, # 'S' - 37: 0, # 'T' - 57: 0, # 'U' - 48: 0, # 'V' - 55: 0, # 'Y' - 52: 0, # 'Z' - 2: 1, # 'a' - 18: 3, # 'b' - 26: 3, # 'c' - 17: 3, # 'd' - 1: 1, # 'e' - 27: 2, # 'f' - 12: 3, # 'g' - 20: 2, # 'h' - 9: 2, # 'i' - 22: 3, # 'j' - 7: 3, # 'k' - 6: 3, # 'l' - 13: 3, # 'm' - 4: 3, # 'n' - 8: 1, # 'o' - 23: 2, # 'p' - 10: 3, # 'r' - 5: 3, # 's' - 3: 3, # 't' - 21: 2, # 'u' - 19: 3, # 'v' - 62: 0, # 'x' - 16: 1, # 'y' - 11: 3, # 'z' - 51: 0, # 'Ã' - 44: 0, # 'É' - 61: 0, # 'Ã' - 58: 0, # 'Ó' - 59: 0, # 'Ö' - 60: 0, # 'Ú' - 63: 0, # 'Ãœ' - 14: 1, # 'á' - 15: 2, # 'é' - 30: 1, # 'í' - 25: 0, # 'ó' - 24: 1, # 'ö' - 31: 0, # 'ú' - 29: 1, # 'ü' - 42: 0, # 'Å‘' - 56: 0, # 'ű' - }, - 15: { # 'é' - 28: 0, # 'A' - 40: 0, # 'B' - 54: 0, # 'C' - 45: 0, # 'D' - 32: 0, # 'E' - 50: 0, # 'F' - 49: 0, # 'G' - 38: 0, # 'H' - 39: 0, # 'I' - 53: 0, # 'J' - 36: 0, # 'K' - 41: 0, # 'L' - 34: 0, # 'M' - 35: 0, # 'N' - 47: 0, # 'O' - 46: 0, # 'P' - 43: 0, # 'R' - 33: 0, # 'S' - 37: 0, # 'T' - 57: 0, # 'U' - 48: 0, # 'V' - 55: 0, # 'Y' - 52: 0, # 'Z' - 2: 1, # 'a' - 18: 3, # 'b' - 26: 2, # 'c' - 17: 3, # 'd' - 1: 1, # 'e' - 27: 1, # 'f' - 12: 3, # 'g' - 20: 3, # 'h' - 9: 2, # 'i' - 22: 2, # 'j' - 7: 3, # 'k' - 6: 3, # 'l' - 13: 3, # 'm' - 4: 3, # 'n' - 8: 1, # 'o' - 23: 3, # 'p' - 10: 3, # 'r' - 5: 3, # 's' - 3: 3, # 't' - 21: 0, # 'u' - 19: 3, # 'v' - 62: 0, # 'x' - 16: 0, # 'y' - 11: 3, # 'z' - 51: 0, # 'Ã' - 44: 0, # 'É' - 61: 0, # 'Ã' - 58: 0, # 'Ó' - 59: 0, # 'Ö' - 60: 0, # 'Ú' - 63: 0, # 'Ãœ' - 14: 1, # 'á' - 15: 1, # 'é' - 30: 0, # 'í' - 25: 0, # 'ó' - 24: 0, # 'ö' - 31: 0, # 'ú' - 29: 1, # 'ü' - 42: 0, # 'Å‘' - 56: 0, # 'ű' - }, - 30: { # 'í' - 28: 0, # 'A' - 40: 0, # 'B' - 54: 0, # 'C' - 45: 0, # 'D' - 32: 0, # 'E' - 50: 0, # 'F' - 49: 0, # 'G' - 38: 0, # 'H' - 39: 0, # 'I' - 53: 0, # 'J' - 36: 0, # 'K' - 41: 0, # 'L' - 34: 0, # 'M' - 35: 0, # 'N' - 47: 0, # 'O' - 46: 0, # 'P' - 43: 0, # 'R' - 33: 0, # 'S' - 37: 0, # 'T' - 57: 0, # 'U' - 48: 0, # 'V' - 55: 0, # 'Y' - 52: 0, # 'Z' - 2: 0, # 'a' - 18: 1, # 'b' - 26: 2, # 'c' - 17: 1, # 'd' - 1: 0, # 'e' - 27: 1, # 'f' - 12: 3, # 'g' - 20: 0, # 'h' - 9: 0, # 'i' - 22: 1, # 'j' - 7: 1, # 'k' - 6: 2, # 'l' - 13: 2, # 'm' - 4: 3, # 'n' - 8: 0, # 'o' - 23: 1, # 'p' - 10: 3, # 'r' - 5: 2, # 's' - 3: 3, # 't' - 21: 0, # 'u' - 19: 3, # 'v' - 62: 0, # 'x' - 16: 0, # 'y' - 11: 2, # 'z' - 51: 0, # 'Ã' - 44: 0, # 'É' - 61: 0, # 'Ã' - 58: 0, # 'Ó' - 59: 0, # 'Ö' - 60: 0, # 'Ú' - 63: 0, # 'Ãœ' - 14: 0, # 'á' - 15: 0, # 'é' - 30: 0, # 'í' - 25: 0, # 'ó' - 24: 0, # 'ö' - 31: 0, # 'ú' - 29: 0, # 'ü' - 42: 0, # 'Å‘' - 56: 0, # 'ű' - }, - 25: { # 'ó' - 28: 0, # 'A' - 40: 0, # 'B' - 54: 0, # 'C' - 45: 0, # 'D' - 32: 0, # 'E' - 50: 0, # 'F' - 49: 0, # 'G' - 38: 0, # 'H' - 39: 0, # 'I' - 53: 0, # 'J' - 36: 0, # 'K' - 41: 0, # 'L' - 34: 0, # 'M' - 35: 0, # 'N' - 47: 0, # 'O' - 46: 0, # 'P' - 43: 0, # 'R' - 33: 0, # 'S' - 37: 0, # 'T' - 57: 0, # 'U' - 48: 0, # 'V' - 55: 0, # 'Y' - 52: 0, # 'Z' - 2: 2, # 'a' - 18: 3, # 'b' - 26: 2, # 'c' - 17: 3, # 'd' - 1: 1, # 'e' - 27: 2, # 'f' - 12: 2, # 'g' - 20: 2, # 'h' - 9: 2, # 'i' - 22: 2, # 'j' - 7: 3, # 'k' - 6: 3, # 'l' - 13: 2, # 'm' - 4: 3, # 'n' - 8: 1, # 'o' - 23: 2, # 'p' - 10: 3, # 'r' - 5: 3, # 's' - 3: 3, # 't' - 21: 1, # 'u' - 19: 2, # 'v' - 62: 0, # 'x' - 16: 0, # 'y' - 11: 3, # 'z' - 51: 0, # 'Ã' - 44: 0, # 'É' - 61: 0, # 'Ã' - 58: 0, # 'Ó' - 59: 0, # 'Ö' - 60: 0, # 'Ú' - 63: 0, # 'Ãœ' - 14: 1, # 'á' - 15: 1, # 'é' - 30: 1, # 'í' - 25: 0, # 'ó' - 24: 1, # 'ö' - 31: 1, # 'ú' - 29: 1, # 'ü' - 42: 0, # 'Å‘' - 56: 0, # 'ű' - }, - 24: { # 'ö' - 28: 0, # 'A' - 40: 0, # 'B' - 54: 0, # 'C' - 45: 0, # 'D' - 32: 0, # 'E' - 50: 0, # 'F' - 49: 0, # 'G' - 38: 0, # 'H' - 39: 0, # 'I' - 53: 0, # 'J' - 36: 0, # 'K' - 41: 0, # 'L' - 34: 0, # 'M' - 35: 0, # 'N' - 47: 0, # 'O' - 46: 0, # 'P' - 43: 0, # 'R' - 33: 0, # 'S' - 37: 0, # 'T' - 57: 0, # 'U' - 48: 0, # 'V' - 55: 0, # 'Y' - 52: 0, # 'Z' - 2: 0, # 'a' - 18: 3, # 'b' - 26: 1, # 'c' - 17: 2, # 'd' - 1: 0, # 'e' - 27: 1, # 'f' - 12: 2, # 'g' - 20: 1, # 'h' - 9: 0, # 'i' - 22: 1, # 'j' - 7: 3, # 'k' - 6: 3, # 'l' - 13: 3, # 'm' - 4: 3, # 'n' - 8: 0, # 'o' - 23: 2, # 'p' - 10: 3, # 'r' - 5: 3, # 's' - 3: 3, # 't' - 21: 0, # 'u' - 19: 3, # 'v' - 62: 0, # 'x' - 16: 0, # 'y' - 11: 3, # 'z' - 51: 0, # 'Ã' - 44: 0, # 'É' - 61: 0, # 'Ã' - 58: 0, # 'Ó' - 59: 0, # 'Ö' - 60: 0, # 'Ú' - 63: 0, # 'Ãœ' - 14: 0, # 'á' - 15: 0, # 'é' - 30: 0, # 'í' - 25: 0, # 'ó' - 24: 0, # 'ö' - 31: 0, # 'ú' - 29: 0, # 'ü' - 42: 0, # 'Å‘' - 56: 0, # 'ű' - }, - 31: { # 'ú' - 28: 0, # 'A' - 40: 0, # 'B' - 54: 0, # 'C' - 45: 0, # 'D' - 32: 0, # 'E' - 50: 0, # 'F' - 49: 0, # 'G' - 38: 0, # 'H' - 39: 0, # 'I' - 53: 0, # 'J' - 36: 0, # 'K' - 41: 0, # 'L' - 34: 0, # 'M' - 35: 0, # 'N' - 47: 0, # 'O' - 46: 0, # 'P' - 43: 0, # 'R' - 33: 0, # 'S' - 37: 0, # 'T' - 57: 0, # 'U' - 48: 0, # 'V' - 55: 0, # 'Y' - 52: 0, # 'Z' - 2: 1, # 'a' - 18: 1, # 'b' - 26: 2, # 'c' - 17: 1, # 'd' - 1: 1, # 'e' - 27: 2, # 'f' - 12: 3, # 'g' - 20: 1, # 'h' - 9: 1, # 'i' - 22: 3, # 'j' - 7: 1, # 'k' - 6: 3, # 'l' - 13: 1, # 'm' - 4: 2, # 'n' - 8: 0, # 'o' - 23: 1, # 'p' - 10: 3, # 'r' - 5: 3, # 's' - 3: 2, # 't' - 21: 1, # 'u' - 19: 1, # 'v' - 62: 0, # 'x' - 16: 0, # 'y' - 11: 2, # 'z' - 51: 0, # 'Ã' - 44: 0, # 'É' - 61: 0, # 'Ã' - 58: 0, # 'Ó' - 59: 0, # 'Ö' - 60: 0, # 'Ú' - 63: 0, # 'Ãœ' - 14: 1, # 'á' - 15: 1, # 'é' - 30: 0, # 'í' - 25: 0, # 'ó' - 24: 0, # 'ö' - 31: 0, # 'ú' - 29: 0, # 'ü' - 42: 0, # 'Å‘' - 56: 0, # 'ű' - }, - 29: { # 'ü' - 28: 0, # 'A' - 40: 0, # 'B' - 54: 0, # 'C' - 45: 0, # 'D' - 32: 0, # 'E' - 50: 0, # 'F' - 49: 0, # 'G' - 38: 0, # 'H' - 39: 0, # 'I' - 53: 0, # 'J' - 36: 0, # 'K' - 41: 0, # 'L' - 34: 0, # 'M' - 35: 0, # 'N' - 47: 0, # 'O' - 46: 0, # 'P' - 43: 0, # 'R' - 33: 0, # 'S' - 37: 0, # 'T' - 57: 0, # 'U' - 48: 0, # 'V' - 55: 0, # 'Y' - 52: 0, # 'Z' - 2: 1, # 'a' - 18: 1, # 'b' - 26: 1, # 'c' - 17: 2, # 'd' - 1: 1, # 'e' - 27: 1, # 'f' - 12: 3, # 'g' - 20: 2, # 'h' - 9: 1, # 'i' - 22: 1, # 'j' - 7: 3, # 'k' - 6: 3, # 'l' - 13: 1, # 'm' - 4: 3, # 'n' - 8: 0, # 'o' - 23: 1, # 'p' - 10: 2, # 'r' - 5: 2, # 's' - 3: 2, # 't' - 21: 0, # 'u' - 19: 2, # 'v' - 62: 0, # 'x' - 16: 0, # 'y' - 11: 2, # 'z' - 51: 0, # 'Ã' - 44: 0, # 'É' - 61: 0, # 'Ã' - 58: 0, # 'Ó' - 59: 0, # 'Ö' - 60: 0, # 'Ú' - 63: 0, # 'Ãœ' - 14: 0, # 'á' - 15: 1, # 'é' - 30: 0, # 'í' - 25: 0, # 'ó' - 24: 0, # 'ö' - 31: 0, # 'ú' - 29: 0, # 'ü' - 42: 0, # 'Å‘' - 56: 0, # 'ű' - }, - 42: { # 'Å‘' - 28: 0, # 'A' - 40: 0, # 'B' - 54: 0, # 'C' - 45: 0, # 'D' - 32: 0, # 'E' - 50: 0, # 'F' - 49: 0, # 'G' - 38: 0, # 'H' - 39: 0, # 'I' - 53: 0, # 'J' - 36: 0, # 'K' - 41: 0, # 'L' - 34: 0, # 'M' - 35: 0, # 'N' - 47: 0, # 'O' - 46: 0, # 'P' - 43: 0, # 'R' - 33: 0, # 'S' - 37: 0, # 'T' - 57: 0, # 'U' - 48: 0, # 'V' - 55: 0, # 'Y' - 52: 0, # 'Z' - 2: 1, # 'a' - 18: 2, # 'b' - 26: 1, # 'c' - 17: 2, # 'd' - 1: 1, # 'e' - 27: 1, # 'f' - 12: 1, # 'g' - 20: 1, # 'h' - 9: 1, # 'i' - 22: 1, # 'j' - 7: 2, # 'k' - 6: 3, # 'l' - 13: 1, # 'm' - 4: 2, # 'n' - 8: 1, # 'o' - 23: 1, # 'p' - 10: 2, # 'r' - 5: 2, # 's' - 3: 2, # 't' - 21: 1, # 'u' - 19: 1, # 'v' - 62: 0, # 'x' - 16: 0, # 'y' - 11: 2, # 'z' - 51: 0, # 'Ã' - 44: 0, # 'É' - 61: 0, # 'Ã' - 58: 0, # 'Ó' - 59: 0, # 'Ö' - 60: 0, # 'Ú' - 63: 0, # 'Ãœ' - 14: 0, # 'á' - 15: 1, # 'é' - 30: 1, # 'í' - 25: 0, # 'ó' - 24: 0, # 'ö' - 31: 0, # 'ú' - 29: 1, # 'ü' - 42: 0, # 'Å‘' - 56: 0, # 'ű' - }, - 56: { # 'ű' - 28: 0, # 'A' - 40: 0, # 'B' - 54: 0, # 'C' - 45: 0, # 'D' - 32: 0, # 'E' - 50: 0, # 'F' - 49: 0, # 'G' - 38: 0, # 'H' - 39: 0, # 'I' - 53: 0, # 'J' - 36: 0, # 'K' - 41: 0, # 'L' - 34: 0, # 'M' - 35: 0, # 'N' - 47: 0, # 'O' - 46: 0, # 'P' - 43: 0, # 'R' - 33: 0, # 'S' - 37: 0, # 'T' - 57: 0, # 'U' - 48: 0, # 'V' - 55: 0, # 'Y' - 52: 0, # 'Z' - 2: 1, # 'a' - 18: 1, # 'b' - 26: 0, # 'c' - 17: 1, # 'd' - 1: 1, # 'e' - 27: 1, # 'f' - 12: 1, # 'g' - 20: 1, # 'h' - 9: 1, # 'i' - 22: 1, # 'j' - 7: 1, # 'k' - 6: 1, # 'l' - 13: 0, # 'm' - 4: 2, # 'n' - 8: 0, # 'o' - 23: 0, # 'p' - 10: 1, # 'r' - 5: 1, # 's' - 3: 1, # 't' - 21: 0, # 'u' - 19: 1, # 'v' - 62: 0, # 'x' - 16: 0, # 'y' - 11: 2, # 'z' - 51: 0, # 'Ã' - 44: 0, # 'É' - 61: 0, # 'Ã' - 58: 0, # 'Ó' - 59: 0, # 'Ö' - 60: 0, # 'Ú' - 63: 0, # 'Ãœ' - 14: 0, # 'á' - 15: 0, # 'é' - 30: 0, # 'í' - 25: 0, # 'ó' - 24: 0, # 'ö' - 31: 0, # 'ú' - 29: 0, # 'ü' - 42: 0, # 'Å‘' - 56: 0, # 'ű' - }, -} - -# 255: Undefined characters that did not exist in training text -# 254: Carriage/Return -# 253: symbol (punctuation) that does not belong to word -# 252: 0 - 9 -# 251: Control characters - -# Character Mapping Table(s): -WINDOWS_1250_HUNGARIAN_CHAR_TO_ORDER = { - 0: 255, # '\x00' - 1: 255, # '\x01' - 2: 255, # '\x02' - 3: 255, # '\x03' - 4: 255, # '\x04' - 5: 255, # '\x05' - 6: 255, # '\x06' - 7: 255, # '\x07' - 8: 255, # '\x08' - 9: 255, # '\t' - 10: 254, # '\n' - 11: 255, # '\x0b' - 12: 255, # '\x0c' - 13: 254, # '\r' - 14: 255, # '\x0e' - 15: 255, # '\x0f' - 16: 255, # '\x10' - 17: 255, # '\x11' - 18: 255, # '\x12' - 19: 255, # '\x13' - 20: 255, # '\x14' - 21: 255, # '\x15' - 22: 255, # '\x16' - 23: 255, # '\x17' - 24: 255, # '\x18' - 25: 255, # '\x19' - 26: 255, # '\x1a' - 27: 255, # '\x1b' - 28: 255, # '\x1c' - 29: 255, # '\x1d' - 30: 255, # '\x1e' - 31: 255, # '\x1f' - 32: 253, # ' ' - 33: 253, # '!' - 34: 253, # '"' - 35: 253, # '#' - 36: 253, # '$' - 37: 253, # '%' - 38: 253, # '&' - 39: 253, # "'" - 40: 253, # '(' - 41: 253, # ')' - 42: 253, # '*' - 43: 253, # '+' - 44: 253, # ',' - 45: 253, # '-' - 46: 253, # '.' - 47: 253, # '/' - 48: 252, # '0' - 49: 252, # '1' - 50: 252, # '2' - 51: 252, # '3' - 52: 252, # '4' - 53: 252, # '5' - 54: 252, # '6' - 55: 252, # '7' - 56: 252, # '8' - 57: 252, # '9' - 58: 253, # ':' - 59: 253, # ';' - 60: 253, # '<' - 61: 253, # '=' - 62: 253, # '>' - 63: 253, # '?' - 64: 253, # '@' - 65: 28, # 'A' - 66: 40, # 'B' - 67: 54, # 'C' - 68: 45, # 'D' - 69: 32, # 'E' - 70: 50, # 'F' - 71: 49, # 'G' - 72: 38, # 'H' - 73: 39, # 'I' - 74: 53, # 'J' - 75: 36, # 'K' - 76: 41, # 'L' - 77: 34, # 'M' - 78: 35, # 'N' - 79: 47, # 'O' - 80: 46, # 'P' - 81: 72, # 'Q' - 82: 43, # 'R' - 83: 33, # 'S' - 84: 37, # 'T' - 85: 57, # 'U' - 86: 48, # 'V' - 87: 64, # 'W' - 88: 68, # 'X' - 89: 55, # 'Y' - 90: 52, # 'Z' - 91: 253, # '[' - 92: 253, # '\\' - 93: 253, # ']' - 94: 253, # '^' - 95: 253, # '_' - 96: 253, # '`' - 97: 2, # 'a' - 98: 18, # 'b' - 99: 26, # 'c' - 100: 17, # 'd' - 101: 1, # 'e' - 102: 27, # 'f' - 103: 12, # 'g' - 104: 20, # 'h' - 105: 9, # 'i' - 106: 22, # 'j' - 107: 7, # 'k' - 108: 6, # 'l' - 109: 13, # 'm' - 110: 4, # 'n' - 111: 8, # 'o' - 112: 23, # 'p' - 113: 67, # 'q' - 114: 10, # 'r' - 115: 5, # 's' - 116: 3, # 't' - 117: 21, # 'u' - 118: 19, # 'v' - 119: 65, # 'w' - 120: 62, # 'x' - 121: 16, # 'y' - 122: 11, # 'z' - 123: 253, # '{' - 124: 253, # '|' - 125: 253, # '}' - 126: 253, # '~' - 127: 253, # '\x7f' - 128: 161, # '€' - 129: 162, # None - 130: 163, # '‚' - 131: 164, # None - 132: 165, # '„' - 133: 166, # '…' - 134: 167, # '†' - 135: 168, # '‡' - 136: 169, # None - 137: 170, # '‰' - 138: 171, # 'Å ' - 139: 172, # '‹' - 140: 173, # 'Åš' - 141: 174, # 'Ť' - 142: 175, # 'Ž' - 143: 176, # 'Ź' - 144: 177, # None - 145: 178, # '‘' - 146: 179, # '’' - 147: 180, # '“' - 148: 78, # 'â€' - 149: 181, # '•' - 150: 69, # '–' - 151: 182, # '—' - 152: 183, # None - 153: 184, # 'â„¢' - 154: 185, # 'Å¡' - 155: 186, # '›' - 156: 187, # 'Å›' - 157: 188, # 'Å¥' - 158: 189, # 'ž' - 159: 190, # 'ź' - 160: 191, # '\xa0' - 161: 192, # 'ˇ' - 162: 193, # '˘' - 163: 194, # 'Å' - 164: 195, # '¤' - 165: 196, # 'Ä„' - 166: 197, # '¦' - 167: 76, # '§' - 168: 198, # '¨' - 169: 199, # '©' - 170: 200, # 'Åž' - 171: 201, # '«' - 172: 202, # '¬' - 173: 203, # '\xad' - 174: 204, # '®' - 175: 205, # 'Å»' - 176: 81, # '°' - 177: 206, # '±' - 178: 207, # 'Ë›' - 179: 208, # 'Å‚' - 180: 209, # '´' - 181: 210, # 'µ' - 182: 211, # '¶' - 183: 212, # '·' - 184: 213, # '¸' - 185: 214, # 'Ä…' - 186: 215, # 'ÅŸ' - 187: 216, # '»' - 188: 217, # 'Ľ' - 189: 218, # 'Ë' - 190: 219, # 'ľ' - 191: 220, # 'ż' - 192: 221, # 'Å”' - 193: 51, # 'Ã' - 194: 83, # 'Â' - 195: 222, # 'Ä‚' - 196: 80, # 'Ä' - 197: 223, # 'Ĺ' - 198: 224, # 'Ć' - 199: 225, # 'Ç' - 200: 226, # 'ÄŒ' - 201: 44, # 'É' - 202: 227, # 'Ę' - 203: 228, # 'Ë' - 204: 229, # 'Äš' - 205: 61, # 'Ã' - 206: 230, # 'ÃŽ' - 207: 231, # 'ÄŽ' - 208: 232, # 'Ä' - 209: 233, # 'Ń' - 210: 234, # 'Ň' - 211: 58, # 'Ó' - 212: 235, # 'Ô' - 213: 66, # 'Å' - 214: 59, # 'Ö' - 215: 236, # '×' - 216: 237, # 'Ř' - 217: 238, # 'Å®' - 218: 60, # 'Ú' - 219: 70, # 'Å°' - 220: 63, # 'Ãœ' - 221: 239, # 'Ã' - 222: 240, # 'Å¢' - 223: 241, # 'ß' - 224: 84, # 'Å•' - 225: 14, # 'á' - 226: 75, # 'â' - 227: 242, # 'ă' - 228: 71, # 'ä' - 229: 82, # 'ĺ' - 230: 243, # 'ć' - 231: 73, # 'ç' - 232: 244, # 'Ä' - 233: 15, # 'é' - 234: 85, # 'Ä™' - 235: 79, # 'ë' - 236: 86, # 'Ä›' - 237: 30, # 'í' - 238: 77, # 'î' - 239: 87, # 'Ä' - 240: 245, # 'Ä‘' - 241: 246, # 'Å„' - 242: 247, # 'ň' - 243: 25, # 'ó' - 244: 74, # 'ô' - 245: 42, # 'Å‘' - 246: 24, # 'ö' - 247: 248, # '÷' - 248: 249, # 'Å™' - 249: 250, # 'ů' - 250: 31, # 'ú' - 251: 56, # 'ű' - 252: 29, # 'ü' - 253: 251, # 'ý' - 254: 252, # 'Å£' - 255: 253, # 'Ë™' -} - -WINDOWS_1250_HUNGARIAN_MODEL = SingleByteCharSetModel(charset_name='windows-1250', - language='Hungarian', - char_to_order_map=WINDOWS_1250_HUNGARIAN_CHAR_TO_ORDER, - language_model=HUNGARIAN_LANG_MODEL, - typical_positive_ratio=0.947368, - keep_ascii_letters=True, - alphabet='ABCDEFGHIJKLMNOPRSTUVZabcdefghijklmnoprstuvzÃÉÃÓÖÚÜáéíóöúüÅőŰű') - -ISO_8859_2_HUNGARIAN_CHAR_TO_ORDER = { - 0: 255, # '\x00' - 1: 255, # '\x01' - 2: 255, # '\x02' - 3: 255, # '\x03' - 4: 255, # '\x04' - 5: 255, # '\x05' - 6: 255, # '\x06' - 7: 255, # '\x07' - 8: 255, # '\x08' - 9: 255, # '\t' - 10: 254, # '\n' - 11: 255, # '\x0b' - 12: 255, # '\x0c' - 13: 254, # '\r' - 14: 255, # '\x0e' - 15: 255, # '\x0f' - 16: 255, # '\x10' - 17: 255, # '\x11' - 18: 255, # '\x12' - 19: 255, # '\x13' - 20: 255, # '\x14' - 21: 255, # '\x15' - 22: 255, # '\x16' - 23: 255, # '\x17' - 24: 255, # '\x18' - 25: 255, # '\x19' - 26: 255, # '\x1a' - 27: 255, # '\x1b' - 28: 255, # '\x1c' - 29: 255, # '\x1d' - 30: 255, # '\x1e' - 31: 255, # '\x1f' - 32: 253, # ' ' - 33: 253, # '!' - 34: 253, # '"' - 35: 253, # '#' - 36: 253, # '$' - 37: 253, # '%' - 38: 253, # '&' - 39: 253, # "'" - 40: 253, # '(' - 41: 253, # ')' - 42: 253, # '*' - 43: 253, # '+' - 44: 253, # ',' - 45: 253, # '-' - 46: 253, # '.' - 47: 253, # '/' - 48: 252, # '0' - 49: 252, # '1' - 50: 252, # '2' - 51: 252, # '3' - 52: 252, # '4' - 53: 252, # '5' - 54: 252, # '6' - 55: 252, # '7' - 56: 252, # '8' - 57: 252, # '9' - 58: 253, # ':' - 59: 253, # ';' - 60: 253, # '<' - 61: 253, # '=' - 62: 253, # '>' - 63: 253, # '?' - 64: 253, # '@' - 65: 28, # 'A' - 66: 40, # 'B' - 67: 54, # 'C' - 68: 45, # 'D' - 69: 32, # 'E' - 70: 50, # 'F' - 71: 49, # 'G' - 72: 38, # 'H' - 73: 39, # 'I' - 74: 53, # 'J' - 75: 36, # 'K' - 76: 41, # 'L' - 77: 34, # 'M' - 78: 35, # 'N' - 79: 47, # 'O' - 80: 46, # 'P' - 81: 71, # 'Q' - 82: 43, # 'R' - 83: 33, # 'S' - 84: 37, # 'T' - 85: 57, # 'U' - 86: 48, # 'V' - 87: 64, # 'W' - 88: 68, # 'X' - 89: 55, # 'Y' - 90: 52, # 'Z' - 91: 253, # '[' - 92: 253, # '\\' - 93: 253, # ']' - 94: 253, # '^' - 95: 253, # '_' - 96: 253, # '`' - 97: 2, # 'a' - 98: 18, # 'b' - 99: 26, # 'c' - 100: 17, # 'd' - 101: 1, # 'e' - 102: 27, # 'f' - 103: 12, # 'g' - 104: 20, # 'h' - 105: 9, # 'i' - 106: 22, # 'j' - 107: 7, # 'k' - 108: 6, # 'l' - 109: 13, # 'm' - 110: 4, # 'n' - 111: 8, # 'o' - 112: 23, # 'p' - 113: 67, # 'q' - 114: 10, # 'r' - 115: 5, # 's' - 116: 3, # 't' - 117: 21, # 'u' - 118: 19, # 'v' - 119: 65, # 'w' - 120: 62, # 'x' - 121: 16, # 'y' - 122: 11, # 'z' - 123: 253, # '{' - 124: 253, # '|' - 125: 253, # '}' - 126: 253, # '~' - 127: 253, # '\x7f' - 128: 159, # '\x80' - 129: 160, # '\x81' - 130: 161, # '\x82' - 131: 162, # '\x83' - 132: 163, # '\x84' - 133: 164, # '\x85' - 134: 165, # '\x86' - 135: 166, # '\x87' - 136: 167, # '\x88' - 137: 168, # '\x89' - 138: 169, # '\x8a' - 139: 170, # '\x8b' - 140: 171, # '\x8c' - 141: 172, # '\x8d' - 142: 173, # '\x8e' - 143: 174, # '\x8f' - 144: 175, # '\x90' - 145: 176, # '\x91' - 146: 177, # '\x92' - 147: 178, # '\x93' - 148: 179, # '\x94' - 149: 180, # '\x95' - 150: 181, # '\x96' - 151: 182, # '\x97' - 152: 183, # '\x98' - 153: 184, # '\x99' - 154: 185, # '\x9a' - 155: 186, # '\x9b' - 156: 187, # '\x9c' - 157: 188, # '\x9d' - 158: 189, # '\x9e' - 159: 190, # '\x9f' - 160: 191, # '\xa0' - 161: 192, # 'Ä„' - 162: 193, # '˘' - 163: 194, # 'Å' - 164: 195, # '¤' - 165: 196, # 'Ľ' - 166: 197, # 'Åš' - 167: 75, # '§' - 168: 198, # '¨' - 169: 199, # 'Å ' - 170: 200, # 'Åž' - 171: 201, # 'Ť' - 172: 202, # 'Ź' - 173: 203, # '\xad' - 174: 204, # 'Ž' - 175: 205, # 'Å»' - 176: 79, # '°' - 177: 206, # 'Ä…' - 178: 207, # 'Ë›' - 179: 208, # 'Å‚' - 180: 209, # '´' - 181: 210, # 'ľ' - 182: 211, # 'Å›' - 183: 212, # 'ˇ' - 184: 213, # '¸' - 185: 214, # 'Å¡' - 186: 215, # 'ÅŸ' - 187: 216, # 'Å¥' - 188: 217, # 'ź' - 189: 218, # 'Ë' - 190: 219, # 'ž' - 191: 220, # 'ż' - 192: 221, # 'Å”' - 193: 51, # 'Ã' - 194: 81, # 'Â' - 195: 222, # 'Ä‚' - 196: 78, # 'Ä' - 197: 223, # 'Ĺ' - 198: 224, # 'Ć' - 199: 225, # 'Ç' - 200: 226, # 'ÄŒ' - 201: 44, # 'É' - 202: 227, # 'Ę' - 203: 228, # 'Ë' - 204: 229, # 'Äš' - 205: 61, # 'Ã' - 206: 230, # 'ÃŽ' - 207: 231, # 'ÄŽ' - 208: 232, # 'Ä' - 209: 233, # 'Ń' - 210: 234, # 'Ň' - 211: 58, # 'Ó' - 212: 235, # 'Ô' - 213: 66, # 'Å' - 214: 59, # 'Ö' - 215: 236, # '×' - 216: 237, # 'Ř' - 217: 238, # 'Å®' - 218: 60, # 'Ú' - 219: 69, # 'Å°' - 220: 63, # 'Ãœ' - 221: 239, # 'Ã' - 222: 240, # 'Å¢' - 223: 241, # 'ß' - 224: 82, # 'Å•' - 225: 14, # 'á' - 226: 74, # 'â' - 227: 242, # 'ă' - 228: 70, # 'ä' - 229: 80, # 'ĺ' - 230: 243, # 'ć' - 231: 72, # 'ç' - 232: 244, # 'Ä' - 233: 15, # 'é' - 234: 83, # 'Ä™' - 235: 77, # 'ë' - 236: 84, # 'Ä›' - 237: 30, # 'í' - 238: 76, # 'î' - 239: 85, # 'Ä' - 240: 245, # 'Ä‘' - 241: 246, # 'Å„' - 242: 247, # 'ň' - 243: 25, # 'ó' - 244: 73, # 'ô' - 245: 42, # 'Å‘' - 246: 24, # 'ö' - 247: 248, # '÷' - 248: 249, # 'Å™' - 249: 250, # 'ů' - 250: 31, # 'ú' - 251: 56, # 'ű' - 252: 29, # 'ü' - 253: 251, # 'ý' - 254: 252, # 'Å£' - 255: 253, # 'Ë™' -} - -ISO_8859_2_HUNGARIAN_MODEL = SingleByteCharSetModel(charset_name='ISO-8859-2', - language='Hungarian', - char_to_order_map=ISO_8859_2_HUNGARIAN_CHAR_TO_ORDER, - language_model=HUNGARIAN_LANG_MODEL, - typical_positive_ratio=0.947368, - keep_ascii_letters=True, - alphabet='ABCDEFGHIJKLMNOPRSTUVZabcdefghijklmnoprstuvzÃÉÃÓÖÚÜáéíóöúüÅőŰű') - diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/langrussianmodel.py b/venv/Lib/site-packages/pip/_vendor/chardet/langrussianmodel.py deleted file mode 100644 index 5594452..0000000 --- a/venv/Lib/site-packages/pip/_vendor/chardet/langrussianmodel.py +++ /dev/null @@ -1,5718 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- - -from pip._vendor.chardet.sbcharsetprober import SingleByteCharSetModel - - -# 3: Positive -# 2: Likely -# 1: Unlikely -# 0: Negative - -RUSSIAN_LANG_MODEL = { - 37: { # 'Ð' - 37: 0, # 'Ð' - 44: 1, # 'Б' - 33: 1, # 'Ð’' - 46: 1, # 'Г' - 41: 1, # 'Д' - 48: 1, # 'Е' - 56: 1, # 'Ж' - 51: 1, # 'З' - 42: 1, # 'И' - 60: 1, # 'Й' - 36: 1, # 'К' - 49: 1, # 'Л' - 38: 1, # 'Ðœ' - 31: 2, # 'Ð' - 34: 1, # 'О' - 35: 1, # 'П' - 45: 1, # 'Р' - 32: 1, # 'С' - 40: 1, # 'Т' - 52: 1, # 'У' - 53: 1, # 'Ф' - 55: 1, # 'Ð¥' - 58: 1, # 'Ц' - 50: 1, # 'Ч' - 57: 1, # 'Ш' - 63: 1, # 'Щ' - 62: 0, # 'Ы' - 61: 0, # 'Ь' - 47: 0, # 'Э' - 59: 1, # 'Ю' - 43: 1, # 'Я' - 3: 1, # 'а' - 21: 2, # 'б' - 10: 2, # 'в' - 19: 2, # 'г' - 13: 2, # 'д' - 2: 0, # 'е' - 24: 1, # 'ж' - 20: 1, # 'з' - 4: 0, # 'и' - 23: 1, # 'й' - 11: 2, # 'к' - 8: 3, # 'л' - 12: 2, # 'м' - 5: 2, # 'н' - 1: 0, # 'о' - 15: 2, # 'п' - 9: 2, # 'Ñ€' - 7: 2, # 'Ñ' - 6: 2, # 'Ñ‚' - 14: 2, # 'у' - 39: 2, # 'Ñ„' - 26: 2, # 'Ñ…' - 28: 0, # 'ц' - 22: 1, # 'ч' - 25: 2, # 'ш' - 29: 0, # 'щ' - 54: 0, # 'ÑŠ' - 18: 0, # 'Ñ‹' - 17: 0, # 'ÑŒ' - 30: 1, # 'Ñ' - 27: 0, # 'ÑŽ' - 16: 0, # 'Ñ' - }, - 44: { # 'Б' - 37: 1, # 'Ð' - 44: 0, # 'Б' - 33: 1, # 'Ð’' - 46: 1, # 'Г' - 41: 0, # 'Д' - 48: 1, # 'Е' - 56: 0, # 'Ж' - 51: 0, # 'З' - 42: 1, # 'И' - 60: 0, # 'Й' - 36: 0, # 'К' - 49: 1, # 'Л' - 38: 1, # 'Ðœ' - 31: 1, # 'Ð' - 34: 1, # 'О' - 35: 0, # 'П' - 45: 1, # 'Р' - 32: 0, # 'С' - 40: 0, # 'Т' - 52: 1, # 'У' - 53: 0, # 'Ф' - 55: 0, # 'Ð¥' - 58: 0, # 'Ц' - 50: 0, # 'Ч' - 57: 0, # 'Ш' - 63: 0, # 'Щ' - 62: 1, # 'Ы' - 61: 0, # 'Ь' - 47: 0, # 'Э' - 59: 0, # 'Ю' - 43: 1, # 'Я' - 3: 2, # 'а' - 21: 0, # 'б' - 10: 0, # 'в' - 19: 0, # 'г' - 13: 1, # 'д' - 2: 3, # 'е' - 24: 0, # 'ж' - 20: 0, # 'з' - 4: 2, # 'и' - 23: 0, # 'й' - 11: 0, # 'к' - 8: 2, # 'л' - 12: 0, # 'м' - 5: 0, # 'н' - 1: 3, # 'о' - 15: 0, # 'п' - 9: 2, # 'Ñ€' - 7: 0, # 'Ñ' - 6: 0, # 'Ñ‚' - 14: 2, # 'у' - 39: 0, # 'Ñ„' - 26: 0, # 'Ñ…' - 28: 0, # 'ц' - 22: 0, # 'ч' - 25: 0, # 'ш' - 29: 0, # 'щ' - 54: 0, # 'ÑŠ' - 18: 2, # 'Ñ‹' - 17: 1, # 'ÑŒ' - 30: 2, # 'Ñ' - 27: 1, # 'ÑŽ' - 16: 1, # 'Ñ' - }, - 33: { # 'Ð’' - 37: 2, # 'Ð' - 44: 0, # 'Б' - 33: 1, # 'Ð’' - 46: 0, # 'Г' - 41: 1, # 'Д' - 48: 1, # 'Е' - 56: 0, # 'Ж' - 51: 0, # 'З' - 42: 1, # 'И' - 60: 0, # 'Й' - 36: 1, # 'К' - 49: 1, # 'Л' - 38: 1, # 'Ðœ' - 31: 1, # 'Ð' - 34: 1, # 'О' - 35: 1, # 'П' - 45: 1, # 'Р' - 32: 1, # 'С' - 40: 1, # 'Т' - 52: 1, # 'У' - 53: 0, # 'Ф' - 55: 0, # 'Ð¥' - 58: 0, # 'Ц' - 50: 0, # 'Ч' - 57: 1, # 'Ш' - 63: 0, # 'Щ' - 62: 1, # 'Ы' - 61: 1, # 'Ь' - 47: 0, # 'Э' - 59: 0, # 'Ю' - 43: 1, # 'Я' - 3: 2, # 'а' - 21: 1, # 'б' - 10: 1, # 'в' - 19: 1, # 'г' - 13: 2, # 'д' - 2: 3, # 'е' - 24: 0, # 'ж' - 20: 2, # 'з' - 4: 2, # 'и' - 23: 0, # 'й' - 11: 1, # 'к' - 8: 2, # 'л' - 12: 2, # 'м' - 5: 2, # 'н' - 1: 3, # 'о' - 15: 2, # 'п' - 9: 2, # 'Ñ€' - 7: 3, # 'Ñ' - 6: 2, # 'Ñ‚' - 14: 2, # 'у' - 39: 0, # 'Ñ„' - 26: 1, # 'Ñ…' - 28: 1, # 'ц' - 22: 2, # 'ч' - 25: 1, # 'ш' - 29: 0, # 'щ' - 54: 1, # 'ÑŠ' - 18: 3, # 'Ñ‹' - 17: 1, # 'ÑŒ' - 30: 2, # 'Ñ' - 27: 0, # 'ÑŽ' - 16: 1, # 'Ñ' - }, - 46: { # 'Г' - 37: 1, # 'Ð' - 44: 1, # 'Б' - 33: 0, # 'Ð’' - 46: 0, # 'Г' - 41: 1, # 'Д' - 48: 1, # 'Е' - 56: 0, # 'Ж' - 51: 0, # 'З' - 42: 1, # 'И' - 60: 0, # 'Й' - 36: 0, # 'К' - 49: 1, # 'Л' - 38: 1, # 'Ðœ' - 31: 1, # 'Ð' - 34: 1, # 'О' - 35: 1, # 'П' - 45: 1, # 'Р' - 32: 0, # 'С' - 40: 0, # 'Т' - 52: 1, # 'У' - 53: 0, # 'Ф' - 55: 0, # 'Ð¥' - 58: 0, # 'Ц' - 50: 0, # 'Ч' - 57: 0, # 'Ш' - 63: 0, # 'Щ' - 62: 0, # 'Ы' - 61: 0, # 'Ь' - 47: 0, # 'Э' - 59: 0, # 'Ю' - 43: 0, # 'Я' - 3: 2, # 'а' - 21: 0, # 'б' - 10: 1, # 'в' - 19: 0, # 'г' - 13: 2, # 'д' - 2: 2, # 'е' - 24: 0, # 'ж' - 20: 0, # 'з' - 4: 2, # 'и' - 23: 0, # 'й' - 11: 0, # 'к' - 8: 2, # 'л' - 12: 1, # 'м' - 5: 1, # 'н' - 1: 3, # 'о' - 15: 0, # 'п' - 9: 2, # 'Ñ€' - 7: 0, # 'Ñ' - 6: 0, # 'Ñ‚' - 14: 2, # 'у' - 39: 0, # 'Ñ„' - 26: 0, # 'Ñ…' - 28: 0, # 'ц' - 22: 0, # 'ч' - 25: 0, # 'ш' - 29: 0, # 'щ' - 54: 0, # 'ÑŠ' - 18: 0, # 'Ñ‹' - 17: 1, # 'ÑŒ' - 30: 1, # 'Ñ' - 27: 1, # 'ÑŽ' - 16: 0, # 'Ñ' - }, - 41: { # 'Д' - 37: 1, # 'Ð' - 44: 0, # 'Б' - 33: 1, # 'Ð’' - 46: 0, # 'Г' - 41: 0, # 'Д' - 48: 2, # 'Е' - 56: 1, # 'Ж' - 51: 0, # 'З' - 42: 1, # 'И' - 60: 0, # 'Й' - 36: 1, # 'К' - 49: 1, # 'Л' - 38: 0, # 'Ðœ' - 31: 1, # 'Ð' - 34: 1, # 'О' - 35: 0, # 'П' - 45: 1, # 'Р' - 32: 1, # 'С' - 40: 0, # 'Т' - 52: 1, # 'У' - 53: 0, # 'Ф' - 55: 0, # 'Ð¥' - 58: 1, # 'Ц' - 50: 1, # 'Ч' - 57: 0, # 'Ш' - 63: 0, # 'Щ' - 62: 1, # 'Ы' - 61: 1, # 'Ь' - 47: 0, # 'Э' - 59: 0, # 'Ю' - 43: 1, # 'Я' - 3: 3, # 'а' - 21: 0, # 'б' - 10: 2, # 'в' - 19: 0, # 'г' - 13: 0, # 'д' - 2: 2, # 'е' - 24: 3, # 'ж' - 20: 1, # 'з' - 4: 2, # 'и' - 23: 0, # 'й' - 11: 0, # 'к' - 8: 2, # 'л' - 12: 1, # 'м' - 5: 1, # 'н' - 1: 3, # 'о' - 15: 0, # 'п' - 9: 2, # 'Ñ€' - 7: 0, # 'Ñ' - 6: 0, # 'Ñ‚' - 14: 2, # 'у' - 39: 0, # 'Ñ„' - 26: 1, # 'Ñ…' - 28: 0, # 'ц' - 22: 0, # 'ч' - 25: 0, # 'ш' - 29: 0, # 'щ' - 54: 0, # 'ÑŠ' - 18: 1, # 'Ñ‹' - 17: 1, # 'ÑŒ' - 30: 2, # 'Ñ' - 27: 1, # 'ÑŽ' - 16: 1, # 'Ñ' - }, - 48: { # 'Е' - 37: 1, # 'Ð' - 44: 1, # 'Б' - 33: 1, # 'Ð’' - 46: 1, # 'Г' - 41: 1, # 'Д' - 48: 1, # 'Е' - 56: 1, # 'Ж' - 51: 1, # 'З' - 42: 1, # 'И' - 60: 1, # 'Й' - 36: 1, # 'К' - 49: 1, # 'Л' - 38: 1, # 'Ðœ' - 31: 2, # 'Ð' - 34: 1, # 'О' - 35: 1, # 'П' - 45: 2, # 'Р' - 32: 2, # 'С' - 40: 1, # 'Т' - 52: 0, # 'У' - 53: 0, # 'Ф' - 55: 1, # 'Ð¥' - 58: 1, # 'Ц' - 50: 1, # 'Ч' - 57: 1, # 'Ш' - 63: 1, # 'Щ' - 62: 0, # 'Ы' - 61: 0, # 'Ь' - 47: 0, # 'Э' - 59: 0, # 'Ю' - 43: 1, # 'Я' - 3: 0, # 'а' - 21: 0, # 'б' - 10: 2, # 'в' - 19: 2, # 'г' - 13: 2, # 'д' - 2: 2, # 'е' - 24: 1, # 'ж' - 20: 1, # 'з' - 4: 0, # 'и' - 23: 2, # 'й' - 11: 1, # 'к' - 8: 2, # 'л' - 12: 2, # 'м' - 5: 1, # 'н' - 1: 0, # 'о' - 15: 1, # 'п' - 9: 1, # 'Ñ€' - 7: 3, # 'Ñ' - 6: 0, # 'Ñ‚' - 14: 0, # 'у' - 39: 1, # 'Ñ„' - 26: 1, # 'Ñ…' - 28: 0, # 'ц' - 22: 0, # 'ч' - 25: 1, # 'ш' - 29: 2, # 'щ' - 54: 0, # 'ÑŠ' - 18: 0, # 'Ñ‹' - 17: 0, # 'ÑŒ' - 30: 0, # 'Ñ' - 27: 1, # 'ÑŽ' - 16: 0, # 'Ñ' - }, - 56: { # 'Ж' - 37: 1, # 'Ð' - 44: 0, # 'Б' - 33: 0, # 'Ð’' - 46: 0, # 'Г' - 41: 1, # 'Д' - 48: 1, # 'Е' - 56: 0, # 'Ж' - 51: 1, # 'З' - 42: 1, # 'И' - 60: 0, # 'Й' - 36: 0, # 'К' - 49: 0, # 'Л' - 38: 0, # 'Ðœ' - 31: 1, # 'Ð' - 34: 1, # 'О' - 35: 0, # 'П' - 45: 0, # 'Р' - 32: 0, # 'С' - 40: 0, # 'Т' - 52: 1, # 'У' - 53: 0, # 'Ф' - 55: 0, # 'Ð¥' - 58: 0, # 'Ц' - 50: 0, # 'Ч' - 57: 0, # 'Ш' - 63: 0, # 'Щ' - 62: 0, # 'Ы' - 61: 0, # 'Ь' - 47: 0, # 'Э' - 59: 0, # 'Ю' - 43: 0, # 'Я' - 3: 2, # 'а' - 21: 1, # 'б' - 10: 0, # 'в' - 19: 1, # 'г' - 13: 1, # 'д' - 2: 2, # 'е' - 24: 1, # 'ж' - 20: 0, # 'з' - 4: 2, # 'и' - 23: 0, # 'й' - 11: 0, # 'к' - 8: 0, # 'л' - 12: 1, # 'м' - 5: 0, # 'н' - 1: 2, # 'о' - 15: 0, # 'п' - 9: 1, # 'Ñ€' - 7: 0, # 'Ñ' - 6: 0, # 'Ñ‚' - 14: 2, # 'у' - 39: 0, # 'Ñ„' - 26: 0, # 'Ñ…' - 28: 0, # 'ц' - 22: 0, # 'ч' - 25: 0, # 'ш' - 29: 0, # 'щ' - 54: 0, # 'ÑŠ' - 18: 0, # 'Ñ‹' - 17: 0, # 'ÑŒ' - 30: 0, # 'Ñ' - 27: 2, # 'ÑŽ' - 16: 0, # 'Ñ' - }, - 51: { # 'З' - 37: 1, # 'Ð' - 44: 0, # 'Б' - 33: 1, # 'Ð’' - 46: 1, # 'Г' - 41: 1, # 'Д' - 48: 1, # 'Е' - 56: 0, # 'Ж' - 51: 0, # 'З' - 42: 1, # 'И' - 60: 0, # 'Й' - 36: 0, # 'К' - 49: 1, # 'Л' - 38: 1, # 'Ðœ' - 31: 1, # 'Ð' - 34: 1, # 'О' - 35: 0, # 'П' - 45: 1, # 'Р' - 32: 0, # 'С' - 40: 0, # 'Т' - 52: 1, # 'У' - 53: 0, # 'Ф' - 55: 0, # 'Ð¥' - 58: 0, # 'Ц' - 50: 0, # 'Ч' - 57: 0, # 'Ш' - 63: 0, # 'Щ' - 62: 1, # 'Ы' - 61: 1, # 'Ь' - 47: 0, # 'Э' - 59: 0, # 'Ю' - 43: 0, # 'Я' - 3: 3, # 'а' - 21: 1, # 'б' - 10: 2, # 'в' - 19: 0, # 'г' - 13: 2, # 'д' - 2: 2, # 'е' - 24: 0, # 'ж' - 20: 0, # 'з' - 4: 2, # 'и' - 23: 0, # 'й' - 11: 0, # 'к' - 8: 1, # 'л' - 12: 1, # 'м' - 5: 2, # 'н' - 1: 2, # 'о' - 15: 0, # 'п' - 9: 1, # 'Ñ€' - 7: 0, # 'Ñ' - 6: 0, # 'Ñ‚' - 14: 1, # 'у' - 39: 0, # 'Ñ„' - 26: 0, # 'Ñ…' - 28: 0, # 'ц' - 22: 0, # 'ч' - 25: 0, # 'ш' - 29: 0, # 'щ' - 54: 0, # 'ÑŠ' - 18: 1, # 'Ñ‹' - 17: 0, # 'ÑŒ' - 30: 0, # 'Ñ' - 27: 0, # 'ÑŽ' - 16: 1, # 'Ñ' - }, - 42: { # 'И' - 37: 1, # 'Ð' - 44: 1, # 'Б' - 33: 1, # 'Ð’' - 46: 1, # 'Г' - 41: 1, # 'Д' - 48: 2, # 'Е' - 56: 1, # 'Ж' - 51: 1, # 'З' - 42: 1, # 'И' - 60: 1, # 'Й' - 36: 1, # 'К' - 49: 1, # 'Л' - 38: 1, # 'Ðœ' - 31: 1, # 'Ð' - 34: 1, # 'О' - 35: 1, # 'П' - 45: 1, # 'Р' - 32: 2, # 'С' - 40: 1, # 'Т' - 52: 0, # 'У' - 53: 1, # 'Ф' - 55: 1, # 'Ð¥' - 58: 1, # 'Ц' - 50: 1, # 'Ч' - 57: 0, # 'Ш' - 63: 1, # 'Щ' - 62: 0, # 'Ы' - 61: 0, # 'Ь' - 47: 0, # 'Э' - 59: 1, # 'Ю' - 43: 1, # 'Я' - 3: 1, # 'а' - 21: 2, # 'б' - 10: 2, # 'в' - 19: 2, # 'г' - 13: 2, # 'д' - 2: 2, # 'е' - 24: 0, # 'ж' - 20: 2, # 'з' - 4: 1, # 'и' - 23: 0, # 'й' - 11: 1, # 'к' - 8: 2, # 'л' - 12: 2, # 'м' - 5: 2, # 'н' - 1: 1, # 'о' - 15: 1, # 'п' - 9: 2, # 'Ñ€' - 7: 2, # 'Ñ' - 6: 2, # 'Ñ‚' - 14: 1, # 'у' - 39: 1, # 'Ñ„' - 26: 2, # 'Ñ…' - 28: 0, # 'ц' - 22: 0, # 'ч' - 25: 1, # 'ш' - 29: 1, # 'щ' - 54: 0, # 'ÑŠ' - 18: 0, # 'Ñ‹' - 17: 0, # 'ÑŒ' - 30: 0, # 'Ñ' - 27: 1, # 'ÑŽ' - 16: 0, # 'Ñ' - }, - 60: { # 'Й' - 37: 0, # 'Ð' - 44: 0, # 'Б' - 33: 0, # 'Ð’' - 46: 0, # 'Г' - 41: 1, # 'Д' - 48: 0, # 'Е' - 56: 0, # 'Ж' - 51: 0, # 'З' - 42: 0, # 'И' - 60: 0, # 'Й' - 36: 1, # 'К' - 49: 1, # 'Л' - 38: 0, # 'Ðœ' - 31: 1, # 'Ð' - 34: 0, # 'О' - 35: 0, # 'П' - 45: 0, # 'Р' - 32: 1, # 'С' - 40: 1, # 'Т' - 52: 0, # 'У' - 53: 0, # 'Ф' - 55: 1, # 'Ð¥' - 58: 1, # 'Ц' - 50: 0, # 'Ч' - 57: 0, # 'Ш' - 63: 0, # 'Щ' - 62: 0, # 'Ы' - 61: 0, # 'Ь' - 47: 0, # 'Э' - 59: 0, # 'Ю' - 43: 0, # 'Я' - 3: 0, # 'а' - 21: 0, # 'б' - 10: 0, # 'в' - 19: 0, # 'г' - 13: 0, # 'д' - 2: 1, # 'е' - 24: 0, # 'ж' - 20: 0, # 'з' - 4: 0, # 'и' - 23: 0, # 'й' - 11: 0, # 'к' - 8: 0, # 'л' - 12: 0, # 'м' - 5: 0, # 'н' - 1: 2, # 'о' - 15: 0, # 'п' - 9: 0, # 'Ñ€' - 7: 0, # 'Ñ' - 6: 0, # 'Ñ‚' - 14: 0, # 'у' - 39: 0, # 'Ñ„' - 26: 0, # 'Ñ…' - 28: 0, # 'ц' - 22: 0, # 'ч' - 25: 0, # 'ш' - 29: 0, # 'щ' - 54: 0, # 'ÑŠ' - 18: 0, # 'Ñ‹' - 17: 0, # 'ÑŒ' - 30: 0, # 'Ñ' - 27: 0, # 'ÑŽ' - 16: 0, # 'Ñ' - }, - 36: { # 'К' - 37: 2, # 'Ð' - 44: 0, # 'Б' - 33: 1, # 'Ð’' - 46: 0, # 'Г' - 41: 0, # 'Д' - 48: 1, # 'Е' - 56: 0, # 'Ж' - 51: 1, # 'З' - 42: 1, # 'И' - 60: 0, # 'Й' - 36: 0, # 'К' - 49: 1, # 'Л' - 38: 0, # 'Ðœ' - 31: 1, # 'Ð' - 34: 2, # 'О' - 35: 1, # 'П' - 45: 1, # 'Р' - 32: 1, # 'С' - 40: 1, # 'Т' - 52: 1, # 'У' - 53: 0, # 'Ф' - 55: 0, # 'Ð¥' - 58: 1, # 'Ц' - 50: 0, # 'Ч' - 57: 0, # 'Ш' - 63: 0, # 'Щ' - 62: 0, # 'Ы' - 61: 0, # 'Ь' - 47: 0, # 'Э' - 59: 0, # 'Ю' - 43: 0, # 'Я' - 3: 3, # 'а' - 21: 0, # 'б' - 10: 1, # 'в' - 19: 0, # 'г' - 13: 0, # 'д' - 2: 2, # 'е' - 24: 0, # 'ж' - 20: 0, # 'з' - 4: 2, # 'и' - 23: 0, # 'й' - 11: 0, # 'к' - 8: 2, # 'л' - 12: 0, # 'м' - 5: 1, # 'н' - 1: 3, # 'о' - 15: 0, # 'п' - 9: 2, # 'Ñ€' - 7: 2, # 'Ñ' - 6: 2, # 'Ñ‚' - 14: 2, # 'у' - 39: 0, # 'Ñ„' - 26: 1, # 'Ñ…' - 28: 0, # 'ц' - 22: 0, # 'ч' - 25: 0, # 'ш' - 29: 0, # 'щ' - 54: 0, # 'ÑŠ' - 18: 1, # 'Ñ‹' - 17: 1, # 'ÑŒ' - 30: 2, # 'Ñ' - 27: 1, # 'ÑŽ' - 16: 0, # 'Ñ' - }, - 49: { # 'Л' - 37: 2, # 'Ð' - 44: 0, # 'Б' - 33: 0, # 'Ð’' - 46: 1, # 'Г' - 41: 0, # 'Д' - 48: 1, # 'Е' - 56: 1, # 'Ж' - 51: 0, # 'З' - 42: 1, # 'И' - 60: 0, # 'Й' - 36: 1, # 'К' - 49: 1, # 'Л' - 38: 1, # 'Ðœ' - 31: 0, # 'Ð' - 34: 1, # 'О' - 35: 1, # 'П' - 45: 0, # 'Р' - 32: 1, # 'С' - 40: 1, # 'Т' - 52: 1, # 'У' - 53: 0, # 'Ф' - 55: 0, # 'Ð¥' - 58: 0, # 'Ц' - 50: 1, # 'Ч' - 57: 0, # 'Ш' - 63: 0, # 'Щ' - 62: 1, # 'Ы' - 61: 1, # 'Ь' - 47: 0, # 'Э' - 59: 1, # 'Ю' - 43: 1, # 'Я' - 3: 2, # 'а' - 21: 0, # 'б' - 10: 0, # 'в' - 19: 1, # 'г' - 13: 0, # 'д' - 2: 2, # 'е' - 24: 1, # 'ж' - 20: 0, # 'з' - 4: 2, # 'и' - 23: 0, # 'й' - 11: 0, # 'к' - 8: 1, # 'л' - 12: 0, # 'м' - 5: 1, # 'н' - 1: 2, # 'о' - 15: 0, # 'п' - 9: 0, # 'Ñ€' - 7: 0, # 'Ñ' - 6: 0, # 'Ñ‚' - 14: 2, # 'у' - 39: 0, # 'Ñ„' - 26: 1, # 'Ñ…' - 28: 0, # 'ц' - 22: 0, # 'ч' - 25: 0, # 'ш' - 29: 0, # 'щ' - 54: 0, # 'ÑŠ' - 18: 1, # 'Ñ‹' - 17: 1, # 'ÑŒ' - 30: 2, # 'Ñ' - 27: 2, # 'ÑŽ' - 16: 1, # 'Ñ' - }, - 38: { # 'Ðœ' - 37: 1, # 'Ð' - 44: 1, # 'Б' - 33: 1, # 'Ð’' - 46: 0, # 'Г' - 41: 0, # 'Д' - 48: 1, # 'Е' - 56: 0, # 'Ж' - 51: 0, # 'З' - 42: 1, # 'И' - 60: 0, # 'Й' - 36: 1, # 'К' - 49: 1, # 'Л' - 38: 1, # 'Ðœ' - 31: 1, # 'Ð' - 34: 1, # 'О' - 35: 1, # 'П' - 45: 1, # 'Р' - 32: 1, # 'С' - 40: 1, # 'Т' - 52: 1, # 'У' - 53: 1, # 'Ф' - 55: 1, # 'Ð¥' - 58: 0, # 'Ц' - 50: 0, # 'Ч' - 57: 0, # 'Ш' - 63: 0, # 'Щ' - 62: 1, # 'Ы' - 61: 0, # 'Ь' - 47: 1, # 'Э' - 59: 0, # 'Ю' - 43: 1, # 'Я' - 3: 3, # 'а' - 21: 0, # 'б' - 10: 0, # 'в' - 19: 1, # 'г' - 13: 0, # 'д' - 2: 2, # 'е' - 24: 0, # 'ж' - 20: 0, # 'з' - 4: 3, # 'и' - 23: 0, # 'й' - 11: 0, # 'к' - 8: 1, # 'л' - 12: 1, # 'м' - 5: 2, # 'н' - 1: 3, # 'о' - 15: 0, # 'п' - 9: 1, # 'Ñ€' - 7: 1, # 'Ñ' - 6: 0, # 'Ñ‚' - 14: 2, # 'у' - 39: 0, # 'Ñ„' - 26: 0, # 'Ñ…' - 28: 0, # 'ц' - 22: 0, # 'ч' - 25: 0, # 'ш' - 29: 0, # 'щ' - 54: 0, # 'ÑŠ' - 18: 3, # 'Ñ‹' - 17: 1, # 'ÑŒ' - 30: 2, # 'Ñ' - 27: 1, # 'ÑŽ' - 16: 1, # 'Ñ' - }, - 31: { # 'Ð' - 37: 2, # 'Ð' - 44: 0, # 'Б' - 33: 0, # 'Ð’' - 46: 1, # 'Г' - 41: 1, # 'Д' - 48: 1, # 'Е' - 56: 0, # 'Ж' - 51: 1, # 'З' - 42: 2, # 'И' - 60: 0, # 'Й' - 36: 1, # 'К' - 49: 0, # 'Л' - 38: 0, # 'Ðœ' - 31: 1, # 'Ð' - 34: 1, # 'О' - 35: 0, # 'П' - 45: 1, # 'Р' - 32: 1, # 'С' - 40: 1, # 'Т' - 52: 1, # 'У' - 53: 1, # 'Ф' - 55: 1, # 'Ð¥' - 58: 1, # 'Ц' - 50: 1, # 'Ч' - 57: 0, # 'Ш' - 63: 0, # 'Щ' - 62: 1, # 'Ы' - 61: 1, # 'Ь' - 47: 1, # 'Э' - 59: 0, # 'Ю' - 43: 1, # 'Я' - 3: 3, # 'а' - 21: 0, # 'б' - 10: 0, # 'в' - 19: 0, # 'г' - 13: 0, # 'д' - 2: 3, # 'е' - 24: 0, # 'ж' - 20: 0, # 'з' - 4: 3, # 'и' - 23: 0, # 'й' - 11: 0, # 'к' - 8: 0, # 'л' - 12: 0, # 'м' - 5: 0, # 'н' - 1: 3, # 'о' - 15: 0, # 'п' - 9: 1, # 'Ñ€' - 7: 0, # 'Ñ' - 6: 0, # 'Ñ‚' - 14: 3, # 'у' - 39: 0, # 'Ñ„' - 26: 1, # 'Ñ…' - 28: 0, # 'ц' - 22: 0, # 'ч' - 25: 0, # 'ш' - 29: 0, # 'щ' - 54: 0, # 'ÑŠ' - 18: 1, # 'Ñ‹' - 17: 2, # 'ÑŒ' - 30: 1, # 'Ñ' - 27: 1, # 'ÑŽ' - 16: 1, # 'Ñ' - }, - 34: { # 'О' - 37: 0, # 'Ð' - 44: 1, # 'Б' - 33: 1, # 'Ð’' - 46: 1, # 'Г' - 41: 2, # 'Д' - 48: 1, # 'Е' - 56: 1, # 'Ж' - 51: 1, # 'З' - 42: 1, # 'И' - 60: 1, # 'Й' - 36: 1, # 'К' - 49: 2, # 'Л' - 38: 1, # 'Ðœ' - 31: 2, # 'Ð' - 34: 1, # 'О' - 35: 1, # 'П' - 45: 2, # 'Р' - 32: 1, # 'С' - 40: 1, # 'Т' - 52: 1, # 'У' - 53: 1, # 'Ф' - 55: 1, # 'Ð¥' - 58: 0, # 'Ц' - 50: 1, # 'Ч' - 57: 1, # 'Ш' - 63: 1, # 'Щ' - 62: 0, # 'Ы' - 61: 0, # 'Ь' - 47: 0, # 'Э' - 59: 0, # 'Ю' - 43: 1, # 'Я' - 3: 1, # 'а' - 21: 2, # 'б' - 10: 1, # 'в' - 19: 2, # 'г' - 13: 2, # 'д' - 2: 0, # 'е' - 24: 1, # 'ж' - 20: 1, # 'з' - 4: 0, # 'и' - 23: 1, # 'й' - 11: 2, # 'к' - 8: 2, # 'л' - 12: 1, # 'м' - 5: 3, # 'н' - 1: 0, # 'о' - 15: 2, # 'п' - 9: 2, # 'Ñ€' - 7: 2, # 'Ñ' - 6: 2, # 'Ñ‚' - 14: 1, # 'у' - 39: 1, # 'Ñ„' - 26: 2, # 'Ñ…' - 28: 1, # 'ц' - 22: 2, # 'ч' - 25: 2, # 'ш' - 29: 1, # 'щ' - 54: 0, # 'ÑŠ' - 18: 0, # 'Ñ‹' - 17: 0, # 'ÑŒ' - 30: 0, # 'Ñ' - 27: 0, # 'ÑŽ' - 16: 0, # 'Ñ' - }, - 35: { # 'П' - 37: 1, # 'Ð' - 44: 0, # 'Б' - 33: 0, # 'Ð’' - 46: 0, # 'Г' - 41: 0, # 'Д' - 48: 1, # 'Е' - 56: 0, # 'Ж' - 51: 0, # 'З' - 42: 1, # 'И' - 60: 0, # 'Й' - 36: 0, # 'К' - 49: 1, # 'Л' - 38: 0, # 'Ðœ' - 31: 1, # 'Ð' - 34: 1, # 'О' - 35: 1, # 'П' - 45: 2, # 'Р' - 32: 1, # 'С' - 40: 1, # 'Т' - 52: 1, # 'У' - 53: 0, # 'Ф' - 55: 0, # 'Ð¥' - 58: 0, # 'Ц' - 50: 0, # 'Ч' - 57: 0, # 'Ш' - 63: 0, # 'Щ' - 62: 1, # 'Ы' - 61: 1, # 'Ь' - 47: 0, # 'Э' - 59: 0, # 'Ю' - 43: 1, # 'Я' - 3: 2, # 'а' - 21: 0, # 'б' - 10: 0, # 'в' - 19: 0, # 'г' - 13: 0, # 'д' - 2: 2, # 'е' - 24: 0, # 'ж' - 20: 0, # 'з' - 4: 2, # 'и' - 23: 0, # 'й' - 11: 0, # 'к' - 8: 2, # 'л' - 12: 0, # 'м' - 5: 1, # 'н' - 1: 3, # 'о' - 15: 0, # 'п' - 9: 3, # 'Ñ€' - 7: 1, # 'Ñ' - 6: 1, # 'Ñ‚' - 14: 2, # 'у' - 39: 1, # 'Ñ„' - 26: 0, # 'Ñ…' - 28: 0, # 'ц' - 22: 0, # 'ч' - 25: 1, # 'ш' - 29: 0, # 'щ' - 54: 0, # 'ÑŠ' - 18: 1, # 'Ñ‹' - 17: 2, # 'ÑŒ' - 30: 1, # 'Ñ' - 27: 0, # 'ÑŽ' - 16: 2, # 'Ñ' - }, - 45: { # 'Р' - 37: 2, # 'Ð' - 44: 1, # 'Б' - 33: 1, # 'Ð’' - 46: 1, # 'Г' - 41: 1, # 'Д' - 48: 2, # 'Е' - 56: 1, # 'Ж' - 51: 0, # 'З' - 42: 2, # 'И' - 60: 0, # 'Й' - 36: 1, # 'К' - 49: 1, # 'Л' - 38: 1, # 'Ðœ' - 31: 1, # 'Ð' - 34: 2, # 'О' - 35: 0, # 'П' - 45: 1, # 'Р' - 32: 1, # 'С' - 40: 1, # 'Т' - 52: 1, # 'У' - 53: 0, # 'Ф' - 55: 1, # 'Ð¥' - 58: 1, # 'Ц' - 50: 1, # 'Ч' - 57: 1, # 'Ш' - 63: 0, # 'Щ' - 62: 1, # 'Ы' - 61: 1, # 'Ь' - 47: 1, # 'Э' - 59: 1, # 'Ю' - 43: 1, # 'Я' - 3: 3, # 'а' - 21: 0, # 'б' - 10: 1, # 'в' - 19: 0, # 'г' - 13: 0, # 'д' - 2: 2, # 'е' - 24: 1, # 'ж' - 20: 0, # 'з' - 4: 2, # 'и' - 23: 0, # 'й' - 11: 0, # 'к' - 8: 0, # 'л' - 12: 0, # 'м' - 5: 0, # 'н' - 1: 3, # 'о' - 15: 0, # 'п' - 9: 1, # 'Ñ€' - 7: 0, # 'Ñ' - 6: 0, # 'Ñ‚' - 14: 2, # 'у' - 39: 0, # 'Ñ„' - 26: 0, # 'Ñ…' - 28: 0, # 'ц' - 22: 0, # 'ч' - 25: 0, # 'ш' - 29: 0, # 'щ' - 54: 0, # 'ÑŠ' - 18: 2, # 'Ñ‹' - 17: 0, # 'ÑŒ' - 30: 1, # 'Ñ' - 27: 1, # 'ÑŽ' - 16: 2, # 'Ñ' - }, - 32: { # 'С' - 37: 1, # 'Ð' - 44: 1, # 'Б' - 33: 1, # 'Ð’' - 46: 1, # 'Г' - 41: 1, # 'Д' - 48: 1, # 'Е' - 56: 0, # 'Ж' - 51: 0, # 'З' - 42: 1, # 'И' - 60: 0, # 'Й' - 36: 1, # 'К' - 49: 1, # 'Л' - 38: 1, # 'Ðœ' - 31: 1, # 'Ð' - 34: 1, # 'О' - 35: 1, # 'П' - 45: 1, # 'Р' - 32: 1, # 'С' - 40: 2, # 'Т' - 52: 1, # 'У' - 53: 0, # 'Ф' - 55: 1, # 'Ð¥' - 58: 1, # 'Ц' - 50: 1, # 'Ч' - 57: 1, # 'Ш' - 63: 0, # 'Щ' - 62: 1, # 'Ы' - 61: 1, # 'Ь' - 47: 1, # 'Э' - 59: 1, # 'Ю' - 43: 1, # 'Я' - 3: 2, # 'а' - 21: 1, # 'б' - 10: 2, # 'в' - 19: 1, # 'г' - 13: 2, # 'д' - 2: 3, # 'е' - 24: 1, # 'ж' - 20: 1, # 'з' - 4: 2, # 'и' - 23: 0, # 'й' - 11: 2, # 'к' - 8: 2, # 'л' - 12: 2, # 'м' - 5: 2, # 'н' - 1: 2, # 'о' - 15: 2, # 'п' - 9: 2, # 'Ñ€' - 7: 1, # 'Ñ' - 6: 3, # 'Ñ‚' - 14: 2, # 'у' - 39: 1, # 'Ñ„' - 26: 1, # 'Ñ…' - 28: 1, # 'ц' - 22: 1, # 'ч' - 25: 0, # 'ш' - 29: 0, # 'щ' - 54: 1, # 'ÑŠ' - 18: 1, # 'Ñ‹' - 17: 1, # 'ÑŒ' - 30: 2, # 'Ñ' - 27: 1, # 'ÑŽ' - 16: 1, # 'Ñ' - }, - 40: { # 'Т' - 37: 1, # 'Ð' - 44: 0, # 'Б' - 33: 1, # 'Ð’' - 46: 0, # 'Г' - 41: 0, # 'Д' - 48: 1, # 'Е' - 56: 0, # 'Ж' - 51: 0, # 'З' - 42: 1, # 'И' - 60: 0, # 'Й' - 36: 1, # 'К' - 49: 1, # 'Л' - 38: 1, # 'Ðœ' - 31: 1, # 'Ð' - 34: 2, # 'О' - 35: 0, # 'П' - 45: 1, # 'Р' - 32: 1, # 'С' - 40: 1, # 'Т' - 52: 1, # 'У' - 53: 0, # 'Ф' - 55: 0, # 'Ð¥' - 58: 0, # 'Ц' - 50: 1, # 'Ч' - 57: 0, # 'Ш' - 63: 0, # 'Щ' - 62: 1, # 'Ы' - 61: 1, # 'Ь' - 47: 1, # 'Э' - 59: 1, # 'Ю' - 43: 1, # 'Я' - 3: 3, # 'а' - 21: 1, # 'б' - 10: 2, # 'в' - 19: 0, # 'г' - 13: 0, # 'д' - 2: 3, # 'е' - 24: 0, # 'ж' - 20: 0, # 'з' - 4: 2, # 'и' - 23: 0, # 'й' - 11: 1, # 'к' - 8: 1, # 'л' - 12: 0, # 'м' - 5: 0, # 'н' - 1: 3, # 'о' - 15: 0, # 'п' - 9: 2, # 'Ñ€' - 7: 1, # 'Ñ' - 6: 0, # 'Ñ‚' - 14: 2, # 'у' - 39: 0, # 'Ñ„' - 26: 0, # 'Ñ…' - 28: 0, # 'ц' - 22: 0, # 'ч' - 25: 0, # 'ш' - 29: 1, # 'щ' - 54: 0, # 'ÑŠ' - 18: 3, # 'Ñ‹' - 17: 1, # 'ÑŒ' - 30: 2, # 'Ñ' - 27: 1, # 'ÑŽ' - 16: 1, # 'Ñ' - }, - 52: { # 'У' - 37: 1, # 'Ð' - 44: 1, # 'Б' - 33: 1, # 'Ð’' - 46: 1, # 'Г' - 41: 1, # 'Д' - 48: 1, # 'Е' - 56: 1, # 'Ж' - 51: 0, # 'З' - 42: 0, # 'И' - 60: 1, # 'Й' - 36: 1, # 'К' - 49: 1, # 'Л' - 38: 1, # 'Ðœ' - 31: 1, # 'Ð' - 34: 1, # 'О' - 35: 1, # 'П' - 45: 1, # 'Р' - 32: 1, # 'С' - 40: 1, # 'Т' - 52: 0, # 'У' - 53: 0, # 'Ф' - 55: 1, # 'Ð¥' - 58: 0, # 'Ц' - 50: 1, # 'Ч' - 57: 1, # 'Ш' - 63: 1, # 'Щ' - 62: 0, # 'Ы' - 61: 0, # 'Ь' - 47: 0, # 'Э' - 59: 1, # 'Ю' - 43: 0, # 'Я' - 3: 1, # 'а' - 21: 2, # 'б' - 10: 2, # 'в' - 19: 1, # 'г' - 13: 2, # 'д' - 2: 1, # 'е' - 24: 2, # 'ж' - 20: 2, # 'з' - 4: 2, # 'и' - 23: 1, # 'й' - 11: 1, # 'к' - 8: 2, # 'л' - 12: 2, # 'м' - 5: 1, # 'н' - 1: 2, # 'о' - 15: 1, # 'п' - 9: 2, # 'Ñ€' - 7: 2, # 'Ñ' - 6: 2, # 'Ñ‚' - 14: 0, # 'у' - 39: 1, # 'Ñ„' - 26: 1, # 'Ñ…' - 28: 1, # 'ц' - 22: 2, # 'ч' - 25: 1, # 'ш' - 29: 1, # 'щ' - 54: 0, # 'ÑŠ' - 18: 0, # 'Ñ‹' - 17: 0, # 'ÑŒ' - 30: 2, # 'Ñ' - 27: 1, # 'ÑŽ' - 16: 0, # 'Ñ' - }, - 53: { # 'Ф' - 37: 1, # 'Ð' - 44: 1, # 'Б' - 33: 0, # 'Ð’' - 46: 0, # 'Г' - 41: 0, # 'Д' - 48: 1, # 'Е' - 56: 0, # 'Ж' - 51: 0, # 'З' - 42: 1, # 'И' - 60: 0, # 'Й' - 36: 0, # 'К' - 49: 1, # 'Л' - 38: 0, # 'Ðœ' - 31: 0, # 'Ð' - 34: 1, # 'О' - 35: 0, # 'П' - 45: 1, # 'Р' - 32: 0, # 'С' - 40: 0, # 'Т' - 52: 1, # 'У' - 53: 0, # 'Ф' - 55: 0, # 'Ð¥' - 58: 0, # 'Ц' - 50: 0, # 'Ч' - 57: 0, # 'Ш' - 63: 0, # 'Щ' - 62: 0, # 'Ы' - 61: 0, # 'Ь' - 47: 0, # 'Э' - 59: 0, # 'Ю' - 43: 0, # 'Я' - 3: 2, # 'а' - 21: 0, # 'б' - 10: 0, # 'в' - 19: 0, # 'г' - 13: 0, # 'д' - 2: 2, # 'е' - 24: 0, # 'ж' - 20: 0, # 'з' - 4: 2, # 'и' - 23: 0, # 'й' - 11: 0, # 'к' - 8: 2, # 'л' - 12: 0, # 'м' - 5: 0, # 'н' - 1: 2, # 'о' - 15: 0, # 'п' - 9: 2, # 'Ñ€' - 7: 0, # 'Ñ' - 6: 1, # 'Ñ‚' - 14: 2, # 'у' - 39: 0, # 'Ñ„' - 26: 0, # 'Ñ…' - 28: 0, # 'ц' - 22: 0, # 'ч' - 25: 0, # 'ш' - 29: 0, # 'щ' - 54: 0, # 'ÑŠ' - 18: 0, # 'Ñ‹' - 17: 1, # 'ÑŒ' - 30: 2, # 'Ñ' - 27: 0, # 'ÑŽ' - 16: 0, # 'Ñ' - }, - 55: { # 'Ð¥' - 37: 1, # 'Ð' - 44: 0, # 'Б' - 33: 1, # 'Ð’' - 46: 0, # 'Г' - 41: 0, # 'Д' - 48: 0, # 'Е' - 56: 0, # 'Ж' - 51: 0, # 'З' - 42: 1, # 'И' - 60: 0, # 'Й' - 36: 0, # 'К' - 49: 1, # 'Л' - 38: 1, # 'Ðœ' - 31: 1, # 'Ð' - 34: 1, # 'О' - 35: 0, # 'П' - 45: 0, # 'Р' - 32: 0, # 'С' - 40: 0, # 'Т' - 52: 0, # 'У' - 53: 0, # 'Ф' - 55: 0, # 'Ð¥' - 58: 0, # 'Ц' - 50: 0, # 'Ч' - 57: 0, # 'Ш' - 63: 0, # 'Щ' - 62: 0, # 'Ы' - 61: 0, # 'Ь' - 47: 0, # 'Э' - 59: 0, # 'Ю' - 43: 0, # 'Я' - 3: 2, # 'а' - 21: 0, # 'б' - 10: 2, # 'в' - 19: 0, # 'г' - 13: 0, # 'д' - 2: 2, # 'е' - 24: 0, # 'ж' - 20: 0, # 'з' - 4: 2, # 'и' - 23: 0, # 'й' - 11: 0, # 'к' - 8: 2, # 'л' - 12: 1, # 'м' - 5: 0, # 'н' - 1: 2, # 'о' - 15: 0, # 'п' - 9: 2, # 'Ñ€' - 7: 0, # 'Ñ' - 6: 0, # 'Ñ‚' - 14: 1, # 'у' - 39: 0, # 'Ñ„' - 26: 0, # 'Ñ…' - 28: 0, # 'ц' - 22: 0, # 'ч' - 25: 0, # 'ш' - 29: 0, # 'щ' - 54: 0, # 'ÑŠ' - 18: 0, # 'Ñ‹' - 17: 1, # 'ÑŒ' - 30: 1, # 'Ñ' - 27: 0, # 'ÑŽ' - 16: 0, # 'Ñ' - }, - 58: { # 'Ц' - 37: 1, # 'Ð' - 44: 0, # 'Б' - 33: 0, # 'Ð’' - 46: 0, # 'Г' - 41: 0, # 'Д' - 48: 1, # 'Е' - 56: 0, # 'Ж' - 51: 0, # 'З' - 42: 1, # 'И' - 60: 0, # 'Й' - 36: 1, # 'К' - 49: 0, # 'Л' - 38: 0, # 'Ðœ' - 31: 0, # 'Ð' - 34: 1, # 'О' - 35: 0, # 'П' - 45: 0, # 'Р' - 32: 0, # 'С' - 40: 0, # 'Т' - 52: 1, # 'У' - 53: 0, # 'Ф' - 55: 0, # 'Ð¥' - 58: 0, # 'Ц' - 50: 0, # 'Ч' - 57: 0, # 'Ш' - 63: 0, # 'Щ' - 62: 1, # 'Ы' - 61: 0, # 'Ь' - 47: 0, # 'Э' - 59: 0, # 'Ю' - 43: 0, # 'Я' - 3: 1, # 'а' - 21: 0, # 'б' - 10: 1, # 'в' - 19: 0, # 'г' - 13: 0, # 'д' - 2: 2, # 'е' - 24: 0, # 'ж' - 20: 0, # 'з' - 4: 2, # 'и' - 23: 0, # 'й' - 11: 0, # 'к' - 8: 0, # 'л' - 12: 0, # 'м' - 5: 0, # 'н' - 1: 0, # 'о' - 15: 0, # 'п' - 9: 0, # 'Ñ€' - 7: 0, # 'Ñ' - 6: 0, # 'Ñ‚' - 14: 1, # 'у' - 39: 0, # 'Ñ„' - 26: 0, # 'Ñ…' - 28: 0, # 'ц' - 22: 0, # 'ч' - 25: 0, # 'ш' - 29: 0, # 'щ' - 54: 0, # 'ÑŠ' - 18: 1, # 'Ñ‹' - 17: 0, # 'ÑŒ' - 30: 0, # 'Ñ' - 27: 1, # 'ÑŽ' - 16: 0, # 'Ñ' - }, - 50: { # 'Ч' - 37: 1, # 'Ð' - 44: 0, # 'Б' - 33: 0, # 'Ð’' - 46: 0, # 'Г' - 41: 0, # 'Д' - 48: 1, # 'Е' - 56: 0, # 'Ж' - 51: 0, # 'З' - 42: 1, # 'И' - 60: 0, # 'Й' - 36: 1, # 'К' - 49: 0, # 'Л' - 38: 0, # 'Ðœ' - 31: 1, # 'Ð' - 34: 0, # 'О' - 35: 1, # 'П' - 45: 0, # 'Р' - 32: 0, # 'С' - 40: 1, # 'Т' - 52: 1, # 'У' - 53: 0, # 'Ф' - 55: 0, # 'Ð¥' - 58: 0, # 'Ц' - 50: 0, # 'Ч' - 57: 0, # 'Ш' - 63: 0, # 'Щ' - 62: 0, # 'Ы' - 61: 1, # 'Ь' - 47: 0, # 'Э' - 59: 0, # 'Ю' - 43: 0, # 'Я' - 3: 2, # 'а' - 21: 0, # 'б' - 10: 0, # 'в' - 19: 0, # 'г' - 13: 0, # 'д' - 2: 2, # 'е' - 24: 0, # 'ж' - 20: 0, # 'з' - 4: 2, # 'и' - 23: 0, # 'й' - 11: 0, # 'к' - 8: 1, # 'л' - 12: 0, # 'м' - 5: 0, # 'н' - 1: 1, # 'о' - 15: 0, # 'п' - 9: 1, # 'Ñ€' - 7: 0, # 'Ñ' - 6: 3, # 'Ñ‚' - 14: 2, # 'у' - 39: 0, # 'Ñ„' - 26: 0, # 'Ñ…' - 28: 0, # 'ц' - 22: 0, # 'ч' - 25: 0, # 'ш' - 29: 0, # 'щ' - 54: 0, # 'ÑŠ' - 18: 0, # 'Ñ‹' - 17: 1, # 'ÑŒ' - 30: 0, # 'Ñ' - 27: 0, # 'ÑŽ' - 16: 0, # 'Ñ' - }, - 57: { # 'Ш' - 37: 1, # 'Ð' - 44: 0, # 'Б' - 33: 0, # 'Ð’' - 46: 0, # 'Г' - 41: 0, # 'Д' - 48: 1, # 'Е' - 56: 0, # 'Ж' - 51: 0, # 'З' - 42: 1, # 'И' - 60: 0, # 'Й' - 36: 1, # 'К' - 49: 1, # 'Л' - 38: 0, # 'Ðœ' - 31: 1, # 'Ð' - 34: 1, # 'О' - 35: 0, # 'П' - 45: 0, # 'Р' - 32: 0, # 'С' - 40: 0, # 'Т' - 52: 1, # 'У' - 53: 0, # 'Ф' - 55: 0, # 'Ð¥' - 58: 0, # 'Ц' - 50: 0, # 'Ч' - 57: 0, # 'Ш' - 63: 0, # 'Щ' - 62: 0, # 'Ы' - 61: 0, # 'Ь' - 47: 0, # 'Э' - 59: 0, # 'Ю' - 43: 0, # 'Я' - 3: 2, # 'а' - 21: 0, # 'б' - 10: 1, # 'в' - 19: 0, # 'г' - 13: 0, # 'д' - 2: 2, # 'е' - 24: 0, # 'ж' - 20: 0, # 'з' - 4: 1, # 'и' - 23: 0, # 'й' - 11: 1, # 'к' - 8: 2, # 'л' - 12: 1, # 'м' - 5: 1, # 'н' - 1: 2, # 'о' - 15: 2, # 'п' - 9: 1, # 'Ñ€' - 7: 0, # 'Ñ' - 6: 2, # 'Ñ‚' - 14: 2, # 'у' - 39: 0, # 'Ñ„' - 26: 1, # 'Ñ…' - 28: 0, # 'ц' - 22: 0, # 'ч' - 25: 1, # 'ш' - 29: 0, # 'щ' - 54: 0, # 'ÑŠ' - 18: 0, # 'Ñ‹' - 17: 0, # 'ÑŒ' - 30: 1, # 'Ñ' - 27: 0, # 'ÑŽ' - 16: 0, # 'Ñ' - }, - 63: { # 'Щ' - 37: 1, # 'Ð' - 44: 0, # 'Б' - 33: 0, # 'Ð’' - 46: 0, # 'Г' - 41: 0, # 'Д' - 48: 1, # 'Е' - 56: 0, # 'Ж' - 51: 0, # 'З' - 42: 1, # 'И' - 60: 0, # 'Й' - 36: 0, # 'К' - 49: 0, # 'Л' - 38: 0, # 'Ðœ' - 31: 0, # 'Ð' - 34: 0, # 'О' - 35: 0, # 'П' - 45: 0, # 'Р' - 32: 0, # 'С' - 40: 0, # 'Т' - 52: 0, # 'У' - 53: 0, # 'Ф' - 55: 0, # 'Ð¥' - 58: 0, # 'Ц' - 50: 0, # 'Ч' - 57: 0, # 'Ш' - 63: 0, # 'Щ' - 62: 0, # 'Ы' - 61: 1, # 'Ь' - 47: 0, # 'Э' - 59: 0, # 'Ю' - 43: 0, # 'Я' - 3: 1, # 'а' - 21: 0, # 'б' - 10: 0, # 'в' - 19: 0, # 'г' - 13: 0, # 'д' - 2: 1, # 'е' - 24: 0, # 'ж' - 20: 0, # 'з' - 4: 1, # 'и' - 23: 0, # 'й' - 11: 0, # 'к' - 8: 0, # 'л' - 12: 0, # 'м' - 5: 0, # 'н' - 1: 1, # 'о' - 15: 0, # 'п' - 9: 0, # 'Ñ€' - 7: 0, # 'Ñ' - 6: 0, # 'Ñ‚' - 14: 1, # 'у' - 39: 0, # 'Ñ„' - 26: 0, # 'Ñ…' - 28: 0, # 'ц' - 22: 0, # 'ч' - 25: 0, # 'ш' - 29: 0, # 'щ' - 54: 0, # 'ÑŠ' - 18: 0, # 'Ñ‹' - 17: 0, # 'ÑŒ' - 30: 0, # 'Ñ' - 27: 0, # 'ÑŽ' - 16: 0, # 'Ñ' - }, - 62: { # 'Ы' - 37: 0, # 'Ð' - 44: 0, # 'Б' - 33: 1, # 'Ð’' - 46: 1, # 'Г' - 41: 0, # 'Д' - 48: 1, # 'Е' - 56: 0, # 'Ж' - 51: 0, # 'З' - 42: 0, # 'И' - 60: 1, # 'Й' - 36: 1, # 'К' - 49: 1, # 'Л' - 38: 1, # 'Ðœ' - 31: 1, # 'Ð' - 34: 0, # 'О' - 35: 1, # 'П' - 45: 1, # 'Р' - 32: 1, # 'С' - 40: 1, # 'Т' - 52: 0, # 'У' - 53: 0, # 'Ф' - 55: 1, # 'Ð¥' - 58: 1, # 'Ц' - 50: 0, # 'Ч' - 57: 1, # 'Ш' - 63: 0, # 'Щ' - 62: 0, # 'Ы' - 61: 0, # 'Ь' - 47: 0, # 'Э' - 59: 0, # 'Ю' - 43: 0, # 'Я' - 3: 0, # 'а' - 21: 0, # 'б' - 10: 0, # 'в' - 19: 0, # 'г' - 13: 0, # 'д' - 2: 0, # 'е' - 24: 0, # 'ж' - 20: 0, # 'з' - 4: 0, # 'и' - 23: 0, # 'й' - 11: 0, # 'к' - 8: 0, # 'л' - 12: 0, # 'м' - 5: 0, # 'н' - 1: 0, # 'о' - 15: 0, # 'п' - 9: 0, # 'Ñ€' - 7: 0, # 'Ñ' - 6: 0, # 'Ñ‚' - 14: 0, # 'у' - 39: 0, # 'Ñ„' - 26: 0, # 'Ñ…' - 28: 0, # 'ц' - 22: 0, # 'ч' - 25: 0, # 'ш' - 29: 0, # 'щ' - 54: 0, # 'ÑŠ' - 18: 0, # 'Ñ‹' - 17: 0, # 'ÑŒ' - 30: 0, # 'Ñ' - 27: 0, # 'ÑŽ' - 16: 0, # 'Ñ' - }, - 61: { # 'Ь' - 37: 0, # 'Ð' - 44: 1, # 'Б' - 33: 1, # 'Ð’' - 46: 0, # 'Г' - 41: 1, # 'Д' - 48: 1, # 'Е' - 56: 0, # 'Ж' - 51: 0, # 'З' - 42: 1, # 'И' - 60: 0, # 'Й' - 36: 1, # 'К' - 49: 0, # 'Л' - 38: 1, # 'Ðœ' - 31: 1, # 'Ð' - 34: 1, # 'О' - 35: 0, # 'П' - 45: 0, # 'Р' - 32: 1, # 'С' - 40: 0, # 'Т' - 52: 0, # 'У' - 53: 1, # 'Ф' - 55: 0, # 'Ð¥' - 58: 0, # 'Ц' - 50: 0, # 'Ч' - 57: 1, # 'Ш' - 63: 0, # 'Щ' - 62: 0, # 'Ы' - 61: 0, # 'Ь' - 47: 0, # 'Э' - 59: 1, # 'Ю' - 43: 1, # 'Я' - 3: 0, # 'а' - 21: 0, # 'б' - 10: 0, # 'в' - 19: 0, # 'г' - 13: 0, # 'д' - 2: 0, # 'е' - 24: 0, # 'ж' - 20: 0, # 'з' - 4: 0, # 'и' - 23: 0, # 'й' - 11: 0, # 'к' - 8: 0, # 'л' - 12: 0, # 'м' - 5: 0, # 'н' - 1: 0, # 'о' - 15: 0, # 'п' - 9: 0, # 'Ñ€' - 7: 0, # 'Ñ' - 6: 0, # 'Ñ‚' - 14: 0, # 'у' - 39: 0, # 'Ñ„' - 26: 0, # 'Ñ…' - 28: 0, # 'ц' - 22: 0, # 'ч' - 25: 0, # 'ш' - 29: 0, # 'щ' - 54: 0, # 'ÑŠ' - 18: 0, # 'Ñ‹' - 17: 0, # 'ÑŒ' - 30: 0, # 'Ñ' - 27: 0, # 'ÑŽ' - 16: 0, # 'Ñ' - }, - 47: { # 'Э' - 37: 0, # 'Ð' - 44: 0, # 'Б' - 33: 1, # 'Ð’' - 46: 0, # 'Г' - 41: 1, # 'Д' - 48: 0, # 'Е' - 56: 0, # 'Ж' - 51: 0, # 'З' - 42: 0, # 'И' - 60: 1, # 'Й' - 36: 1, # 'К' - 49: 1, # 'Л' - 38: 1, # 'Ðœ' - 31: 1, # 'Ð' - 34: 0, # 'О' - 35: 1, # 'П' - 45: 1, # 'Р' - 32: 1, # 'С' - 40: 1, # 'Т' - 52: 0, # 'У' - 53: 0, # 'Ф' - 55: 0, # 'Ð¥' - 58: 0, # 'Ц' - 50: 0, # 'Ч' - 57: 0, # 'Ш' - 63: 0, # 'Щ' - 62: 0, # 'Ы' - 61: 0, # 'Ь' - 47: 0, # 'Э' - 59: 0, # 'Ю' - 43: 0, # 'Я' - 3: 1, # 'а' - 21: 1, # 'б' - 10: 2, # 'в' - 19: 1, # 'г' - 13: 2, # 'д' - 2: 0, # 'е' - 24: 1, # 'ж' - 20: 0, # 'з' - 4: 0, # 'и' - 23: 2, # 'й' - 11: 2, # 'к' - 8: 2, # 'л' - 12: 2, # 'м' - 5: 2, # 'н' - 1: 0, # 'о' - 15: 1, # 'п' - 9: 2, # 'Ñ€' - 7: 1, # 'Ñ' - 6: 3, # 'Ñ‚' - 14: 1, # 'у' - 39: 1, # 'Ñ„' - 26: 1, # 'Ñ…' - 28: 0, # 'ц' - 22: 0, # 'ч' - 25: 1, # 'ш' - 29: 0, # 'щ' - 54: 0, # 'ÑŠ' - 18: 0, # 'Ñ‹' - 17: 0, # 'ÑŒ' - 30: 0, # 'Ñ' - 27: 0, # 'ÑŽ' - 16: 0, # 'Ñ' - }, - 59: { # 'Ю' - 37: 1, # 'Ð' - 44: 1, # 'Б' - 33: 0, # 'Ð’' - 46: 0, # 'Г' - 41: 1, # 'Д' - 48: 0, # 'Е' - 56: 0, # 'Ж' - 51: 0, # 'З' - 42: 0, # 'И' - 60: 0, # 'Й' - 36: 0, # 'К' - 49: 0, # 'Л' - 38: 0, # 'Ðœ' - 31: 1, # 'Ð' - 34: 0, # 'О' - 35: 0, # 'П' - 45: 1, # 'Р' - 32: 0, # 'С' - 40: 1, # 'Т' - 52: 0, # 'У' - 53: 0, # 'Ф' - 55: 0, # 'Ð¥' - 58: 0, # 'Ц' - 50: 1, # 'Ч' - 57: 0, # 'Ш' - 63: 1, # 'Щ' - 62: 0, # 'Ы' - 61: 0, # 'Ь' - 47: 0, # 'Э' - 59: 0, # 'Ю' - 43: 0, # 'Я' - 3: 0, # 'а' - 21: 1, # 'б' - 10: 0, # 'в' - 19: 1, # 'г' - 13: 1, # 'д' - 2: 0, # 'е' - 24: 1, # 'ж' - 20: 0, # 'з' - 4: 0, # 'и' - 23: 0, # 'й' - 11: 1, # 'к' - 8: 2, # 'л' - 12: 1, # 'м' - 5: 2, # 'н' - 1: 0, # 'о' - 15: 1, # 'п' - 9: 1, # 'Ñ€' - 7: 1, # 'Ñ' - 6: 0, # 'Ñ‚' - 14: 0, # 'у' - 39: 0, # 'Ñ„' - 26: 1, # 'Ñ…' - 28: 0, # 'ц' - 22: 0, # 'ч' - 25: 0, # 'ш' - 29: 0, # 'щ' - 54: 0, # 'ÑŠ' - 18: 0, # 'Ñ‹' - 17: 0, # 'ÑŒ' - 30: 0, # 'Ñ' - 27: 0, # 'ÑŽ' - 16: 0, # 'Ñ' - }, - 43: { # 'Я' - 37: 0, # 'Ð' - 44: 0, # 'Б' - 33: 1, # 'Ð’' - 46: 1, # 'Г' - 41: 0, # 'Д' - 48: 1, # 'Е' - 56: 0, # 'Ж' - 51: 0, # 'З' - 42: 1, # 'И' - 60: 0, # 'Й' - 36: 1, # 'К' - 49: 0, # 'Л' - 38: 0, # 'Ðœ' - 31: 1, # 'Ð' - 34: 0, # 'О' - 35: 0, # 'П' - 45: 0, # 'Р' - 32: 1, # 'С' - 40: 1, # 'Т' - 52: 0, # 'У' - 53: 0, # 'Ф' - 55: 1, # 'Ð¥' - 58: 0, # 'Ц' - 50: 1, # 'Ч' - 57: 0, # 'Ш' - 63: 1, # 'Щ' - 62: 0, # 'Ы' - 61: 0, # 'Ь' - 47: 0, # 'Э' - 59: 1, # 'Ю' - 43: 1, # 'Я' - 3: 0, # 'а' - 21: 1, # 'б' - 10: 1, # 'в' - 19: 1, # 'г' - 13: 1, # 'д' - 2: 0, # 'е' - 24: 0, # 'ж' - 20: 1, # 'з' - 4: 0, # 'и' - 23: 1, # 'й' - 11: 1, # 'к' - 8: 1, # 'л' - 12: 1, # 'м' - 5: 2, # 'н' - 1: 0, # 'о' - 15: 1, # 'п' - 9: 1, # 'Ñ€' - 7: 1, # 'Ñ' - 6: 0, # 'Ñ‚' - 14: 0, # 'у' - 39: 0, # 'Ñ„' - 26: 1, # 'Ñ…' - 28: 0, # 'ц' - 22: 0, # 'ч' - 25: 1, # 'ш' - 29: 1, # 'щ' - 54: 0, # 'ÑŠ' - 18: 0, # 'Ñ‹' - 17: 0, # 'ÑŒ' - 30: 0, # 'Ñ' - 27: 0, # 'ÑŽ' - 16: 0, # 'Ñ' - }, - 3: { # 'а' - 37: 0, # 'Ð' - 44: 0, # 'Б' - 33: 0, # 'Ð’' - 46: 0, # 'Г' - 41: 0, # 'Д' - 48: 0, # 'Е' - 56: 0, # 'Ж' - 51: 0, # 'З' - 42: 1, # 'И' - 60: 0, # 'Й' - 36: 0, # 'К' - 49: 0, # 'Л' - 38: 0, # 'Ðœ' - 31: 1, # 'Ð' - 34: 0, # 'О' - 35: 0, # 'П' - 45: 0, # 'Р' - 32: 0, # 'С' - 40: 0, # 'Т' - 52: 0, # 'У' - 53: 0, # 'Ф' - 55: 0, # 'Ð¥' - 58: 0, # 'Ц' - 50: 0, # 'Ч' - 57: 0, # 'Ш' - 63: 0, # 'Щ' - 62: 0, # 'Ы' - 61: 0, # 'Ь' - 47: 0, # 'Э' - 59: 0, # 'Ю' - 43: 0, # 'Я' - 3: 2, # 'а' - 21: 3, # 'б' - 10: 3, # 'в' - 19: 3, # 'г' - 13: 3, # 'д' - 2: 3, # 'е' - 24: 3, # 'ж' - 20: 3, # 'з' - 4: 3, # 'и' - 23: 3, # 'й' - 11: 3, # 'к' - 8: 3, # 'л' - 12: 3, # 'м' - 5: 3, # 'н' - 1: 2, # 'о' - 15: 3, # 'п' - 9: 3, # 'Ñ€' - 7: 3, # 'Ñ' - 6: 3, # 'Ñ‚' - 14: 3, # 'у' - 39: 2, # 'Ñ„' - 26: 3, # 'Ñ…' - 28: 3, # 'ц' - 22: 3, # 'ч' - 25: 3, # 'ш' - 29: 3, # 'щ' - 54: 0, # 'ÑŠ' - 18: 0, # 'Ñ‹' - 17: 0, # 'ÑŒ' - 30: 2, # 'Ñ' - 27: 3, # 'ÑŽ' - 16: 3, # 'Ñ' - }, - 21: { # 'б' - 37: 0, # 'Ð' - 44: 0, # 'Б' - 33: 0, # 'Ð’' - 46: 0, # 'Г' - 41: 0, # 'Д' - 48: 0, # 'Е' - 56: 0, # 'Ж' - 51: 0, # 'З' - 42: 0, # 'И' - 60: 0, # 'Й' - 36: 1, # 'К' - 49: 0, # 'Л' - 38: 0, # 'Ðœ' - 31: 0, # 'Ð' - 34: 0, # 'О' - 35: 0, # 'П' - 45: 0, # 'Р' - 32: 0, # 'С' - 40: 0, # 'Т' - 52: 0, # 'У' - 53: 0, # 'Ф' - 55: 0, # 'Ð¥' - 58: 0, # 'Ц' - 50: 0, # 'Ч' - 57: 0, # 'Ш' - 63: 0, # 'Щ' - 62: 0, # 'Ы' - 61: 0, # 'Ь' - 47: 0, # 'Э' - 59: 0, # 'Ю' - 43: 0, # 'Я' - 3: 3, # 'а' - 21: 2, # 'б' - 10: 2, # 'в' - 19: 1, # 'г' - 13: 2, # 'д' - 2: 3, # 'е' - 24: 2, # 'ж' - 20: 1, # 'з' - 4: 3, # 'и' - 23: 0, # 'й' - 11: 2, # 'к' - 8: 3, # 'л' - 12: 2, # 'м' - 5: 3, # 'н' - 1: 3, # 'о' - 15: 1, # 'п' - 9: 3, # 'Ñ€' - 7: 3, # 'Ñ' - 6: 2, # 'Ñ‚' - 14: 3, # 'у' - 39: 0, # 'Ñ„' - 26: 2, # 'Ñ…' - 28: 1, # 'ц' - 22: 1, # 'ч' - 25: 2, # 'ш' - 29: 3, # 'щ' - 54: 2, # 'ÑŠ' - 18: 3, # 'Ñ‹' - 17: 2, # 'ÑŒ' - 30: 1, # 'Ñ' - 27: 2, # 'ÑŽ' - 16: 3, # 'Ñ' - }, - 10: { # 'в' - 37: 0, # 'Ð' - 44: 0, # 'Б' - 33: 0, # 'Ð’' - 46: 0, # 'Г' - 41: 0, # 'Д' - 48: 0, # 'Е' - 56: 0, # 'Ж' - 51: 0, # 'З' - 42: 0, # 'И' - 60: 0, # 'Й' - 36: 0, # 'К' - 49: 0, # 'Л' - 38: 0, # 'Ðœ' - 31: 0, # 'Ð' - 34: 0, # 'О' - 35: 0, # 'П' - 45: 0, # 'Р' - 32: 0, # 'С' - 40: 0, # 'Т' - 52: 0, # 'У' - 53: 0, # 'Ф' - 55: 0, # 'Ð¥' - 58: 0, # 'Ц' - 50: 0, # 'Ч' - 57: 0, # 'Ш' - 63: 0, # 'Щ' - 62: 0, # 'Ы' - 61: 0, # 'Ь' - 47: 0, # 'Э' - 59: 0, # 'Ю' - 43: 0, # 'Я' - 3: 3, # 'а' - 21: 2, # 'б' - 10: 2, # 'в' - 19: 2, # 'г' - 13: 3, # 'д' - 2: 3, # 'е' - 24: 1, # 'ж' - 20: 3, # 'з' - 4: 3, # 'и' - 23: 0, # 'й' - 11: 3, # 'к' - 8: 3, # 'л' - 12: 2, # 'м' - 5: 3, # 'н' - 1: 3, # 'о' - 15: 3, # 'п' - 9: 3, # 'Ñ€' - 7: 3, # 'Ñ' - 6: 3, # 'Ñ‚' - 14: 3, # 'у' - 39: 1, # 'Ñ„' - 26: 2, # 'Ñ…' - 28: 2, # 'ц' - 22: 2, # 'ч' - 25: 3, # 'ш' - 29: 2, # 'щ' - 54: 2, # 'ÑŠ' - 18: 3, # 'Ñ‹' - 17: 3, # 'ÑŒ' - 30: 1, # 'Ñ' - 27: 1, # 'ÑŽ' - 16: 3, # 'Ñ' - }, - 19: { # 'г' - 37: 0, # 'Ð' - 44: 0, # 'Б' - 33: 0, # 'Ð’' - 46: 0, # 'Г' - 41: 0, # 'Д' - 48: 0, # 'Е' - 56: 0, # 'Ж' - 51: 0, # 'З' - 42: 0, # 'И' - 60: 0, # 'Й' - 36: 0, # 'К' - 49: 0, # 'Л' - 38: 0, # 'Ðœ' - 31: 0, # 'Ð' - 34: 0, # 'О' - 35: 0, # 'П' - 45: 0, # 'Р' - 32: 0, # 'С' - 40: 0, # 'Т' - 52: 0, # 'У' - 53: 0, # 'Ф' - 55: 0, # 'Ð¥' - 58: 0, # 'Ц' - 50: 0, # 'Ч' - 57: 0, # 'Ш' - 63: 0, # 'Щ' - 62: 0, # 'Ы' - 61: 0, # 'Ь' - 47: 0, # 'Э' - 59: 0, # 'Ю' - 43: 0, # 'Я' - 3: 3, # 'а' - 21: 1, # 'б' - 10: 2, # 'в' - 19: 1, # 'г' - 13: 3, # 'д' - 2: 3, # 'е' - 24: 0, # 'ж' - 20: 1, # 'з' - 4: 3, # 'и' - 23: 0, # 'й' - 11: 2, # 'к' - 8: 3, # 'л' - 12: 2, # 'м' - 5: 3, # 'н' - 1: 3, # 'о' - 15: 0, # 'п' - 9: 3, # 'Ñ€' - 7: 2, # 'Ñ' - 6: 2, # 'Ñ‚' - 14: 3, # 'у' - 39: 1, # 'Ñ„' - 26: 1, # 'Ñ…' - 28: 1, # 'ц' - 22: 2, # 'ч' - 25: 1, # 'ш' - 29: 0, # 'щ' - 54: 0, # 'ÑŠ' - 18: 1, # 'Ñ‹' - 17: 1, # 'ÑŒ' - 30: 1, # 'Ñ' - 27: 1, # 'ÑŽ' - 16: 0, # 'Ñ' - }, - 13: { # 'д' - 37: 0, # 'Ð' - 44: 0, # 'Б' - 33: 0, # 'Ð’' - 46: 0, # 'Г' - 41: 0, # 'Д' - 48: 0, # 'Е' - 56: 0, # 'Ж' - 51: 0, # 'З' - 42: 0, # 'И' - 60: 0, # 'Й' - 36: 0, # 'К' - 49: 0, # 'Л' - 38: 0, # 'Ðœ' - 31: 0, # 'Ð' - 34: 0, # 'О' - 35: 0, # 'П' - 45: 0, # 'Р' - 32: 0, # 'С' - 40: 0, # 'Т' - 52: 0, # 'У' - 53: 0, # 'Ф' - 55: 0, # 'Ð¥' - 58: 0, # 'Ц' - 50: 0, # 'Ч' - 57: 0, # 'Ш' - 63: 0, # 'Щ' - 62: 0, # 'Ы' - 61: 0, # 'Ь' - 47: 0, # 'Э' - 59: 0, # 'Ю' - 43: 0, # 'Я' - 3: 3, # 'а' - 21: 2, # 'б' - 10: 3, # 'в' - 19: 2, # 'г' - 13: 2, # 'д' - 2: 3, # 'е' - 24: 2, # 'ж' - 20: 2, # 'з' - 4: 3, # 'и' - 23: 0, # 'й' - 11: 3, # 'к' - 8: 3, # 'л' - 12: 2, # 'м' - 5: 3, # 'н' - 1: 3, # 'о' - 15: 2, # 'п' - 9: 3, # 'Ñ€' - 7: 3, # 'Ñ' - 6: 3, # 'Ñ‚' - 14: 3, # 'у' - 39: 1, # 'Ñ„' - 26: 2, # 'Ñ…' - 28: 3, # 'ц' - 22: 2, # 'ч' - 25: 2, # 'ш' - 29: 1, # 'щ' - 54: 2, # 'ÑŠ' - 18: 3, # 'Ñ‹' - 17: 3, # 'ÑŒ' - 30: 1, # 'Ñ' - 27: 2, # 'ÑŽ' - 16: 3, # 'Ñ' - }, - 2: { # 'е' - 37: 0, # 'Ð' - 44: 0, # 'Б' - 33: 0, # 'Ð’' - 46: 0, # 'Г' - 41: 0, # 'Д' - 48: 0, # 'Е' - 56: 0, # 'Ж' - 51: 0, # 'З' - 42: 0, # 'И' - 60: 0, # 'Й' - 36: 0, # 'К' - 49: 0, # 'Л' - 38: 0, # 'Ðœ' - 31: 0, # 'Ð' - 34: 0, # 'О' - 35: 0, # 'П' - 45: 0, # 'Р' - 32: 0, # 'С' - 40: 0, # 'Т' - 52: 0, # 'У' - 53: 0, # 'Ф' - 55: 0, # 'Ð¥' - 58: 0, # 'Ц' - 50: 0, # 'Ч' - 57: 0, # 'Ш' - 63: 0, # 'Щ' - 62: 0, # 'Ы' - 61: 0, # 'Ь' - 47: 0, # 'Э' - 59: 0, # 'Ю' - 43: 0, # 'Я' - 3: 2, # 'а' - 21: 3, # 'б' - 10: 3, # 'в' - 19: 3, # 'г' - 13: 3, # 'д' - 2: 3, # 'е' - 24: 3, # 'ж' - 20: 3, # 'з' - 4: 2, # 'и' - 23: 3, # 'й' - 11: 3, # 'к' - 8: 3, # 'л' - 12: 3, # 'м' - 5: 3, # 'н' - 1: 3, # 'о' - 15: 3, # 'п' - 9: 3, # 'Ñ€' - 7: 3, # 'Ñ' - 6: 3, # 'Ñ‚' - 14: 2, # 'у' - 39: 2, # 'Ñ„' - 26: 3, # 'Ñ…' - 28: 3, # 'ц' - 22: 3, # 'ч' - 25: 3, # 'ш' - 29: 3, # 'щ' - 54: 0, # 'ÑŠ' - 18: 0, # 'Ñ‹' - 17: 0, # 'ÑŒ' - 30: 1, # 'Ñ' - 27: 2, # 'ÑŽ' - 16: 3, # 'Ñ' - }, - 24: { # 'ж' - 37: 0, # 'Ð' - 44: 0, # 'Б' - 33: 0, # 'Ð’' - 46: 0, # 'Г' - 41: 0, # 'Д' - 48: 0, # 'Е' - 56: 0, # 'Ж' - 51: 0, # 'З' - 42: 0, # 'И' - 60: 0, # 'Й' - 36: 0, # 'К' - 49: 0, # 'Л' - 38: 0, # 'Ðœ' - 31: 0, # 'Ð' - 34: 0, # 'О' - 35: 0, # 'П' - 45: 0, # 'Р' - 32: 0, # 'С' - 40: 0, # 'Т' - 52: 0, # 'У' - 53: 0, # 'Ф' - 55: 0, # 'Ð¥' - 58: 0, # 'Ц' - 50: 0, # 'Ч' - 57: 0, # 'Ш' - 63: 0, # 'Щ' - 62: 0, # 'Ы' - 61: 0, # 'Ь' - 47: 0, # 'Э' - 59: 0, # 'Ю' - 43: 0, # 'Я' - 3: 3, # 'а' - 21: 2, # 'б' - 10: 1, # 'в' - 19: 2, # 'г' - 13: 3, # 'д' - 2: 3, # 'е' - 24: 2, # 'ж' - 20: 1, # 'з' - 4: 3, # 'и' - 23: 0, # 'й' - 11: 2, # 'к' - 8: 2, # 'л' - 12: 1, # 'м' - 5: 3, # 'н' - 1: 2, # 'о' - 15: 1, # 'п' - 9: 2, # 'Ñ€' - 7: 2, # 'Ñ' - 6: 1, # 'Ñ‚' - 14: 3, # 'у' - 39: 1, # 'Ñ„' - 26: 0, # 'Ñ…' - 28: 1, # 'ц' - 22: 2, # 'ч' - 25: 0, # 'ш' - 29: 0, # 'щ' - 54: 0, # 'ÑŠ' - 18: 1, # 'Ñ‹' - 17: 2, # 'ÑŒ' - 30: 1, # 'Ñ' - 27: 1, # 'ÑŽ' - 16: 1, # 'Ñ' - }, - 20: { # 'з' - 37: 0, # 'Ð' - 44: 0, # 'Б' - 33: 0, # 'Ð’' - 46: 0, # 'Г' - 41: 0, # 'Д' - 48: 0, # 'Е' - 56: 0, # 'Ж' - 51: 0, # 'З' - 42: 0, # 'И' - 60: 0, # 'Й' - 36: 0, # 'К' - 49: 0, # 'Л' - 38: 0, # 'Ðœ' - 31: 0, # 'Ð' - 34: 0, # 'О' - 35: 0, # 'П' - 45: 0, # 'Р' - 32: 0, # 'С' - 40: 0, # 'Т' - 52: 0, # 'У' - 53: 0, # 'Ф' - 55: 0, # 'Ð¥' - 58: 0, # 'Ц' - 50: 0, # 'Ч' - 57: 0, # 'Ш' - 63: 0, # 'Щ' - 62: 0, # 'Ы' - 61: 0, # 'Ь' - 47: 0, # 'Э' - 59: 0, # 'Ю' - 43: 0, # 'Я' - 3: 3, # 'а' - 21: 3, # 'б' - 10: 3, # 'в' - 19: 3, # 'г' - 13: 3, # 'д' - 2: 3, # 'е' - 24: 2, # 'ж' - 20: 2, # 'з' - 4: 3, # 'и' - 23: 0, # 'й' - 11: 3, # 'к' - 8: 3, # 'л' - 12: 3, # 'м' - 5: 3, # 'н' - 1: 3, # 'о' - 15: 0, # 'п' - 9: 3, # 'Ñ€' - 7: 2, # 'Ñ' - 6: 2, # 'Ñ‚' - 14: 3, # 'у' - 39: 0, # 'Ñ„' - 26: 0, # 'Ñ…' - 28: 1, # 'ц' - 22: 2, # 'ч' - 25: 1, # 'ш' - 29: 0, # 'щ' - 54: 2, # 'ÑŠ' - 18: 3, # 'Ñ‹' - 17: 2, # 'ÑŒ' - 30: 1, # 'Ñ' - 27: 1, # 'ÑŽ' - 16: 3, # 'Ñ' - }, - 4: { # 'и' - 37: 1, # 'Ð' - 44: 0, # 'Б' - 33: 0, # 'Ð’' - 46: 0, # 'Г' - 41: 0, # 'Д' - 48: 0, # 'Е' - 56: 0, # 'Ж' - 51: 0, # 'З' - 42: 0, # 'И' - 60: 0, # 'Й' - 36: 0, # 'К' - 49: 0, # 'Л' - 38: 0, # 'Ðœ' - 31: 1, # 'Ð' - 34: 0, # 'О' - 35: 0, # 'П' - 45: 0, # 'Р' - 32: 0, # 'С' - 40: 0, # 'Т' - 52: 0, # 'У' - 53: 0, # 'Ф' - 55: 0, # 'Ð¥' - 58: 0, # 'Ц' - 50: 0, # 'Ч' - 57: 0, # 'Ш' - 63: 0, # 'Щ' - 62: 0, # 'Ы' - 61: 0, # 'Ь' - 47: 0, # 'Э' - 59: 0, # 'Ю' - 43: 0, # 'Я' - 3: 3, # 'а' - 21: 3, # 'б' - 10: 3, # 'в' - 19: 3, # 'г' - 13: 3, # 'д' - 2: 3, # 'е' - 24: 3, # 'ж' - 20: 3, # 'з' - 4: 3, # 'и' - 23: 3, # 'й' - 11: 3, # 'к' - 8: 3, # 'л' - 12: 3, # 'м' - 5: 3, # 'н' - 1: 3, # 'о' - 15: 3, # 'п' - 9: 3, # 'Ñ€' - 7: 3, # 'Ñ' - 6: 3, # 'Ñ‚' - 14: 2, # 'у' - 39: 2, # 'Ñ„' - 26: 3, # 'Ñ…' - 28: 3, # 'ц' - 22: 3, # 'ч' - 25: 3, # 'ш' - 29: 3, # 'щ' - 54: 0, # 'ÑŠ' - 18: 0, # 'Ñ‹' - 17: 0, # 'ÑŒ' - 30: 2, # 'Ñ' - 27: 3, # 'ÑŽ' - 16: 3, # 'Ñ' - }, - 23: { # 'й' - 37: 0, # 'Ð' - 44: 0, # 'Б' - 33: 0, # 'Ð’' - 46: 0, # 'Г' - 41: 0, # 'Д' - 48: 0, # 'Е' - 56: 0, # 'Ж' - 51: 0, # 'З' - 42: 0, # 'И' - 60: 0, # 'Й' - 36: 0, # 'К' - 49: 0, # 'Л' - 38: 0, # 'Ðœ' - 31: 0, # 'Ð' - 34: 0, # 'О' - 35: 0, # 'П' - 45: 0, # 'Р' - 32: 0, # 'С' - 40: 0, # 'Т' - 52: 0, # 'У' - 53: 0, # 'Ф' - 55: 0, # 'Ð¥' - 58: 0, # 'Ц' - 50: 0, # 'Ч' - 57: 0, # 'Ш' - 63: 0, # 'Щ' - 62: 0, # 'Ы' - 61: 0, # 'Ь' - 47: 0, # 'Э' - 59: 0, # 'Ю' - 43: 0, # 'Я' - 3: 1, # 'а' - 21: 1, # 'б' - 10: 1, # 'в' - 19: 2, # 'г' - 13: 3, # 'д' - 2: 2, # 'е' - 24: 0, # 'ж' - 20: 2, # 'з' - 4: 1, # 'и' - 23: 0, # 'й' - 11: 2, # 'к' - 8: 2, # 'л' - 12: 2, # 'м' - 5: 3, # 'н' - 1: 2, # 'о' - 15: 1, # 'п' - 9: 2, # 'Ñ€' - 7: 3, # 'Ñ' - 6: 3, # 'Ñ‚' - 14: 1, # 'у' - 39: 2, # 'Ñ„' - 26: 1, # 'Ñ…' - 28: 2, # 'ц' - 22: 3, # 'ч' - 25: 2, # 'ш' - 29: 1, # 'щ' - 54: 0, # 'ÑŠ' - 18: 0, # 'Ñ‹' - 17: 0, # 'ÑŒ' - 30: 1, # 'Ñ' - 27: 1, # 'ÑŽ' - 16: 2, # 'Ñ' - }, - 11: { # 'к' - 37: 0, # 'Ð' - 44: 0, # 'Б' - 33: 0, # 'Ð’' - 46: 0, # 'Г' - 41: 0, # 'Д' - 48: 0, # 'Е' - 56: 0, # 'Ж' - 51: 0, # 'З' - 42: 0, # 'И' - 60: 0, # 'Й' - 36: 0, # 'К' - 49: 0, # 'Л' - 38: 0, # 'Ðœ' - 31: 0, # 'Ð' - 34: 0, # 'О' - 35: 0, # 'П' - 45: 0, # 'Р' - 32: 0, # 'С' - 40: 0, # 'Т' - 52: 0, # 'У' - 53: 0, # 'Ф' - 55: 0, # 'Ð¥' - 58: 0, # 'Ц' - 50: 0, # 'Ч' - 57: 0, # 'Ш' - 63: 0, # 'Щ' - 62: 0, # 'Ы' - 61: 0, # 'Ь' - 47: 0, # 'Э' - 59: 0, # 'Ю' - 43: 0, # 'Я' - 3: 3, # 'а' - 21: 1, # 'б' - 10: 3, # 'в' - 19: 1, # 'г' - 13: 1, # 'д' - 2: 3, # 'е' - 24: 2, # 'ж' - 20: 2, # 'з' - 4: 3, # 'и' - 23: 0, # 'й' - 11: 2, # 'к' - 8: 3, # 'л' - 12: 1, # 'м' - 5: 3, # 'н' - 1: 3, # 'о' - 15: 0, # 'п' - 9: 3, # 'Ñ€' - 7: 3, # 'Ñ' - 6: 3, # 'Ñ‚' - 14: 3, # 'у' - 39: 1, # 'Ñ„' - 26: 2, # 'Ñ…' - 28: 2, # 'ц' - 22: 1, # 'ч' - 25: 2, # 'ш' - 29: 0, # 'щ' - 54: 0, # 'ÑŠ' - 18: 1, # 'Ñ‹' - 17: 1, # 'ÑŒ' - 30: 1, # 'Ñ' - 27: 1, # 'ÑŽ' - 16: 1, # 'Ñ' - }, - 8: { # 'л' - 37: 0, # 'Ð' - 44: 0, # 'Б' - 33: 0, # 'Ð’' - 46: 0, # 'Г' - 41: 0, # 'Д' - 48: 0, # 'Е' - 56: 0, # 'Ж' - 51: 0, # 'З' - 42: 0, # 'И' - 60: 0, # 'Й' - 36: 0, # 'К' - 49: 0, # 'Л' - 38: 0, # 'Ðœ' - 31: 0, # 'Ð' - 34: 0, # 'О' - 35: 0, # 'П' - 45: 0, # 'Р' - 32: 0, # 'С' - 40: 0, # 'Т' - 52: 0, # 'У' - 53: 0, # 'Ф' - 55: 0, # 'Ð¥' - 58: 0, # 'Ц' - 50: 0, # 'Ч' - 57: 0, # 'Ш' - 63: 0, # 'Щ' - 62: 0, # 'Ы' - 61: 0, # 'Ь' - 47: 0, # 'Э' - 59: 0, # 'Ю' - 43: 0, # 'Я' - 3: 3, # 'а' - 21: 2, # 'б' - 10: 2, # 'в' - 19: 3, # 'г' - 13: 2, # 'д' - 2: 3, # 'е' - 24: 3, # 'ж' - 20: 2, # 'з' - 4: 3, # 'и' - 23: 0, # 'й' - 11: 3, # 'к' - 8: 3, # 'л' - 12: 2, # 'м' - 5: 3, # 'н' - 1: 3, # 'о' - 15: 2, # 'п' - 9: 1, # 'Ñ€' - 7: 3, # 'Ñ' - 6: 2, # 'Ñ‚' - 14: 3, # 'у' - 39: 2, # 'Ñ„' - 26: 2, # 'Ñ…' - 28: 1, # 'ц' - 22: 3, # 'ч' - 25: 2, # 'ш' - 29: 1, # 'щ' - 54: 0, # 'ÑŠ' - 18: 3, # 'Ñ‹' - 17: 3, # 'ÑŒ' - 30: 1, # 'Ñ' - 27: 3, # 'ÑŽ' - 16: 3, # 'Ñ' - }, - 12: { # 'м' - 37: 0, # 'Ð' - 44: 0, # 'Б' - 33: 0, # 'Ð’' - 46: 0, # 'Г' - 41: 0, # 'Д' - 48: 0, # 'Е' - 56: 0, # 'Ж' - 51: 0, # 'З' - 42: 0, # 'И' - 60: 0, # 'Й' - 36: 0, # 'К' - 49: 0, # 'Л' - 38: 0, # 'Ðœ' - 31: 0, # 'Ð' - 34: 0, # 'О' - 35: 0, # 'П' - 45: 0, # 'Р' - 32: 0, # 'С' - 40: 0, # 'Т' - 52: 0, # 'У' - 53: 0, # 'Ф' - 55: 0, # 'Ð¥' - 58: 0, # 'Ц' - 50: 0, # 'Ч' - 57: 0, # 'Ш' - 63: 0, # 'Щ' - 62: 0, # 'Ы' - 61: 0, # 'Ь' - 47: 0, # 'Э' - 59: 0, # 'Ю' - 43: 0, # 'Я' - 3: 3, # 'а' - 21: 2, # 'б' - 10: 2, # 'в' - 19: 2, # 'г' - 13: 1, # 'д' - 2: 3, # 'е' - 24: 1, # 'ж' - 20: 1, # 'з' - 4: 3, # 'и' - 23: 0, # 'й' - 11: 2, # 'к' - 8: 3, # 'л' - 12: 2, # 'м' - 5: 3, # 'н' - 1: 3, # 'о' - 15: 2, # 'п' - 9: 2, # 'Ñ€' - 7: 3, # 'Ñ' - 6: 2, # 'Ñ‚' - 14: 3, # 'у' - 39: 2, # 'Ñ„' - 26: 2, # 'Ñ…' - 28: 2, # 'ц' - 22: 2, # 'ч' - 25: 1, # 'ш' - 29: 1, # 'щ' - 54: 0, # 'ÑŠ' - 18: 3, # 'Ñ‹' - 17: 2, # 'ÑŒ' - 30: 2, # 'Ñ' - 27: 1, # 'ÑŽ' - 16: 3, # 'Ñ' - }, - 5: { # 'н' - 37: 0, # 'Ð' - 44: 0, # 'Б' - 33: 0, # 'Ð’' - 46: 0, # 'Г' - 41: 0, # 'Д' - 48: 0, # 'Е' - 56: 0, # 'Ж' - 51: 0, # 'З' - 42: 0, # 'И' - 60: 0, # 'Й' - 36: 0, # 'К' - 49: 0, # 'Л' - 38: 0, # 'Ðœ' - 31: 0, # 'Ð' - 34: 0, # 'О' - 35: 0, # 'П' - 45: 0, # 'Р' - 32: 0, # 'С' - 40: 0, # 'Т' - 52: 0, # 'У' - 53: 0, # 'Ф' - 55: 0, # 'Ð¥' - 58: 0, # 'Ц' - 50: 0, # 'Ч' - 57: 0, # 'Ш' - 63: 0, # 'Щ' - 62: 0, # 'Ы' - 61: 0, # 'Ь' - 47: 0, # 'Э' - 59: 0, # 'Ю' - 43: 0, # 'Я' - 3: 3, # 'а' - 21: 2, # 'б' - 10: 2, # 'в' - 19: 3, # 'г' - 13: 3, # 'д' - 2: 3, # 'е' - 24: 2, # 'ж' - 20: 2, # 'з' - 4: 3, # 'и' - 23: 0, # 'й' - 11: 3, # 'к' - 8: 2, # 'л' - 12: 1, # 'м' - 5: 3, # 'н' - 1: 3, # 'о' - 15: 1, # 'п' - 9: 2, # 'Ñ€' - 7: 3, # 'Ñ' - 6: 3, # 'Ñ‚' - 14: 3, # 'у' - 39: 2, # 'Ñ„' - 26: 2, # 'Ñ…' - 28: 3, # 'ц' - 22: 3, # 'ч' - 25: 2, # 'ш' - 29: 2, # 'щ' - 54: 1, # 'ÑŠ' - 18: 3, # 'Ñ‹' - 17: 3, # 'ÑŒ' - 30: 1, # 'Ñ' - 27: 3, # 'ÑŽ' - 16: 3, # 'Ñ' - }, - 1: { # 'о' - 37: 0, # 'Ð' - 44: 0, # 'Б' - 33: 0, # 'Ð’' - 46: 0, # 'Г' - 41: 0, # 'Д' - 48: 0, # 'Е' - 56: 0, # 'Ж' - 51: 0, # 'З' - 42: 0, # 'И' - 60: 0, # 'Й' - 36: 0, # 'К' - 49: 0, # 'Л' - 38: 0, # 'Ðœ' - 31: 0, # 'Ð' - 34: 0, # 'О' - 35: 0, # 'П' - 45: 0, # 'Р' - 32: 0, # 'С' - 40: 0, # 'Т' - 52: 0, # 'У' - 53: 0, # 'Ф' - 55: 0, # 'Ð¥' - 58: 0, # 'Ц' - 50: 0, # 'Ч' - 57: 0, # 'Ш' - 63: 0, # 'Щ' - 62: 0, # 'Ы' - 61: 0, # 'Ь' - 47: 0, # 'Э' - 59: 0, # 'Ю' - 43: 0, # 'Я' - 3: 2, # 'а' - 21: 3, # 'б' - 10: 3, # 'в' - 19: 3, # 'г' - 13: 3, # 'д' - 2: 3, # 'е' - 24: 3, # 'ж' - 20: 3, # 'з' - 4: 3, # 'и' - 23: 3, # 'й' - 11: 3, # 'к' - 8: 3, # 'л' - 12: 3, # 'м' - 5: 3, # 'н' - 1: 3, # 'о' - 15: 3, # 'п' - 9: 3, # 'Ñ€' - 7: 3, # 'Ñ' - 6: 3, # 'Ñ‚' - 14: 2, # 'у' - 39: 2, # 'Ñ„' - 26: 3, # 'Ñ…' - 28: 2, # 'ц' - 22: 3, # 'ч' - 25: 3, # 'ш' - 29: 3, # 'щ' - 54: 0, # 'ÑŠ' - 18: 0, # 'Ñ‹' - 17: 0, # 'ÑŒ' - 30: 2, # 'Ñ' - 27: 3, # 'ÑŽ' - 16: 3, # 'Ñ' - }, - 15: { # 'п' - 37: 0, # 'Ð' - 44: 0, # 'Б' - 33: 0, # 'Ð’' - 46: 0, # 'Г' - 41: 0, # 'Д' - 48: 0, # 'Е' - 56: 0, # 'Ж' - 51: 0, # 'З' - 42: 0, # 'И' - 60: 0, # 'Й' - 36: 0, # 'К' - 49: 0, # 'Л' - 38: 0, # 'Ðœ' - 31: 0, # 'Ð' - 34: 0, # 'О' - 35: 0, # 'П' - 45: 0, # 'Р' - 32: 0, # 'С' - 40: 0, # 'Т' - 52: 0, # 'У' - 53: 0, # 'Ф' - 55: 0, # 'Ð¥' - 58: 0, # 'Ц' - 50: 0, # 'Ч' - 57: 0, # 'Ш' - 63: 0, # 'Щ' - 62: 0, # 'Ы' - 61: 0, # 'Ь' - 47: 0, # 'Э' - 59: 0, # 'Ю' - 43: 0, # 'Я' - 3: 3, # 'а' - 21: 1, # 'б' - 10: 0, # 'в' - 19: 0, # 'г' - 13: 0, # 'д' - 2: 3, # 'е' - 24: 0, # 'ж' - 20: 0, # 'з' - 4: 3, # 'и' - 23: 0, # 'й' - 11: 2, # 'к' - 8: 3, # 'л' - 12: 1, # 'м' - 5: 3, # 'н' - 1: 3, # 'о' - 15: 2, # 'п' - 9: 3, # 'Ñ€' - 7: 2, # 'Ñ' - 6: 2, # 'Ñ‚' - 14: 3, # 'у' - 39: 1, # 'Ñ„' - 26: 0, # 'Ñ…' - 28: 2, # 'ц' - 22: 2, # 'ч' - 25: 1, # 'ш' - 29: 1, # 'щ' - 54: 0, # 'ÑŠ' - 18: 3, # 'Ñ‹' - 17: 2, # 'ÑŒ' - 30: 1, # 'Ñ' - 27: 1, # 'ÑŽ' - 16: 3, # 'Ñ' - }, - 9: { # 'Ñ€' - 37: 0, # 'Ð' - 44: 0, # 'Б' - 33: 0, # 'Ð’' - 46: 0, # 'Г' - 41: 0, # 'Д' - 48: 0, # 'Е' - 56: 0, # 'Ж' - 51: 0, # 'З' - 42: 0, # 'И' - 60: 0, # 'Й' - 36: 0, # 'К' - 49: 0, # 'Л' - 38: 0, # 'Ðœ' - 31: 0, # 'Ð' - 34: 0, # 'О' - 35: 0, # 'П' - 45: 0, # 'Р' - 32: 0, # 'С' - 40: 0, # 'Т' - 52: 0, # 'У' - 53: 0, # 'Ф' - 55: 0, # 'Ð¥' - 58: 0, # 'Ц' - 50: 0, # 'Ч' - 57: 0, # 'Ш' - 63: 0, # 'Щ' - 62: 0, # 'Ы' - 61: 0, # 'Ь' - 47: 0, # 'Э' - 59: 0, # 'Ю' - 43: 0, # 'Я' - 3: 3, # 'а' - 21: 2, # 'б' - 10: 3, # 'в' - 19: 3, # 'г' - 13: 3, # 'д' - 2: 3, # 'е' - 24: 3, # 'ж' - 20: 2, # 'з' - 4: 3, # 'и' - 23: 0, # 'й' - 11: 3, # 'к' - 8: 2, # 'л' - 12: 3, # 'м' - 5: 3, # 'н' - 1: 3, # 'о' - 15: 2, # 'п' - 9: 2, # 'Ñ€' - 7: 3, # 'Ñ' - 6: 3, # 'Ñ‚' - 14: 3, # 'у' - 39: 2, # 'Ñ„' - 26: 3, # 'Ñ…' - 28: 2, # 'ц' - 22: 2, # 'ч' - 25: 3, # 'ш' - 29: 2, # 'щ' - 54: 0, # 'ÑŠ' - 18: 3, # 'Ñ‹' - 17: 3, # 'ÑŒ' - 30: 2, # 'Ñ' - 27: 2, # 'ÑŽ' - 16: 3, # 'Ñ' - }, - 7: { # 'Ñ' - 37: 0, # 'Ð' - 44: 0, # 'Б' - 33: 0, # 'Ð’' - 46: 0, # 'Г' - 41: 0, # 'Д' - 48: 0, # 'Е' - 56: 0, # 'Ж' - 51: 1, # 'З' - 42: 0, # 'И' - 60: 0, # 'Й' - 36: 0, # 'К' - 49: 0, # 'Л' - 38: 0, # 'Ðœ' - 31: 0, # 'Ð' - 34: 0, # 'О' - 35: 0, # 'П' - 45: 0, # 'Р' - 32: 0, # 'С' - 40: 0, # 'Т' - 52: 0, # 'У' - 53: 0, # 'Ф' - 55: 0, # 'Ð¥' - 58: 0, # 'Ц' - 50: 0, # 'Ч' - 57: 0, # 'Ш' - 63: 0, # 'Щ' - 62: 0, # 'Ы' - 61: 0, # 'Ь' - 47: 0, # 'Э' - 59: 0, # 'Ю' - 43: 0, # 'Я' - 3: 3, # 'а' - 21: 2, # 'б' - 10: 3, # 'в' - 19: 2, # 'г' - 13: 3, # 'д' - 2: 3, # 'е' - 24: 2, # 'ж' - 20: 2, # 'з' - 4: 3, # 'и' - 23: 0, # 'й' - 11: 3, # 'к' - 8: 3, # 'л' - 12: 3, # 'м' - 5: 3, # 'н' - 1: 3, # 'о' - 15: 3, # 'п' - 9: 3, # 'Ñ€' - 7: 3, # 'Ñ' - 6: 3, # 'Ñ‚' - 14: 3, # 'у' - 39: 2, # 'Ñ„' - 26: 3, # 'Ñ…' - 28: 2, # 'ц' - 22: 3, # 'ч' - 25: 2, # 'ш' - 29: 1, # 'щ' - 54: 2, # 'ÑŠ' - 18: 3, # 'Ñ‹' - 17: 3, # 'ÑŒ' - 30: 2, # 'Ñ' - 27: 3, # 'ÑŽ' - 16: 3, # 'Ñ' - }, - 6: { # 'Ñ‚' - 37: 0, # 'Ð' - 44: 0, # 'Б' - 33: 0, # 'Ð’' - 46: 0, # 'Г' - 41: 0, # 'Д' - 48: 0, # 'Е' - 56: 0, # 'Ж' - 51: 0, # 'З' - 42: 0, # 'И' - 60: 0, # 'Й' - 36: 0, # 'К' - 49: 0, # 'Л' - 38: 0, # 'Ðœ' - 31: 0, # 'Ð' - 34: 0, # 'О' - 35: 0, # 'П' - 45: 0, # 'Р' - 32: 0, # 'С' - 40: 0, # 'Т' - 52: 0, # 'У' - 53: 0, # 'Ф' - 55: 0, # 'Ð¥' - 58: 0, # 'Ц' - 50: 0, # 'Ч' - 57: 0, # 'Ш' - 63: 0, # 'Щ' - 62: 0, # 'Ы' - 61: 0, # 'Ь' - 47: 0, # 'Э' - 59: 0, # 'Ю' - 43: 0, # 'Я' - 3: 3, # 'а' - 21: 2, # 'б' - 10: 3, # 'в' - 19: 2, # 'г' - 13: 2, # 'д' - 2: 3, # 'е' - 24: 1, # 'ж' - 20: 1, # 'з' - 4: 3, # 'и' - 23: 0, # 'й' - 11: 3, # 'к' - 8: 3, # 'л' - 12: 2, # 'м' - 5: 3, # 'н' - 1: 3, # 'о' - 15: 2, # 'п' - 9: 3, # 'Ñ€' - 7: 3, # 'Ñ' - 6: 2, # 'Ñ‚' - 14: 3, # 'у' - 39: 2, # 'Ñ„' - 26: 2, # 'Ñ…' - 28: 2, # 'ц' - 22: 2, # 'ч' - 25: 2, # 'ш' - 29: 2, # 'щ' - 54: 2, # 'ÑŠ' - 18: 3, # 'Ñ‹' - 17: 3, # 'ÑŒ' - 30: 2, # 'Ñ' - 27: 2, # 'ÑŽ' - 16: 3, # 'Ñ' - }, - 14: { # 'у' - 37: 0, # 'Ð' - 44: 0, # 'Б' - 33: 0, # 'Ð’' - 46: 0, # 'Г' - 41: 0, # 'Д' - 48: 0, # 'Е' - 56: 0, # 'Ж' - 51: 0, # 'З' - 42: 0, # 'И' - 60: 0, # 'Й' - 36: 0, # 'К' - 49: 0, # 'Л' - 38: 0, # 'Ðœ' - 31: 0, # 'Ð' - 34: 0, # 'О' - 35: 0, # 'П' - 45: 0, # 'Р' - 32: 0, # 'С' - 40: 0, # 'Т' - 52: 0, # 'У' - 53: 0, # 'Ф' - 55: 0, # 'Ð¥' - 58: 0, # 'Ц' - 50: 0, # 'Ч' - 57: 0, # 'Ш' - 63: 0, # 'Щ' - 62: 0, # 'Ы' - 61: 0, # 'Ь' - 47: 0, # 'Э' - 59: 0, # 'Ю' - 43: 0, # 'Я' - 3: 2, # 'а' - 21: 3, # 'б' - 10: 3, # 'в' - 19: 3, # 'г' - 13: 3, # 'д' - 2: 3, # 'е' - 24: 3, # 'ж' - 20: 3, # 'з' - 4: 2, # 'и' - 23: 2, # 'й' - 11: 3, # 'к' - 8: 3, # 'л' - 12: 3, # 'м' - 5: 3, # 'н' - 1: 2, # 'о' - 15: 3, # 'п' - 9: 3, # 'Ñ€' - 7: 3, # 'Ñ' - 6: 3, # 'Ñ‚' - 14: 1, # 'у' - 39: 2, # 'Ñ„' - 26: 3, # 'Ñ…' - 28: 2, # 'ц' - 22: 3, # 'ч' - 25: 3, # 'ш' - 29: 3, # 'щ' - 54: 0, # 'ÑŠ' - 18: 0, # 'Ñ‹' - 17: 0, # 'ÑŒ' - 30: 2, # 'Ñ' - 27: 3, # 'ÑŽ' - 16: 2, # 'Ñ' - }, - 39: { # 'Ñ„' - 37: 0, # 'Ð' - 44: 0, # 'Б' - 33: 0, # 'Ð’' - 46: 0, # 'Г' - 41: 0, # 'Д' - 48: 0, # 'Е' - 56: 0, # 'Ж' - 51: 0, # 'З' - 42: 0, # 'И' - 60: 0, # 'Й' - 36: 0, # 'К' - 49: 0, # 'Л' - 38: 0, # 'Ðœ' - 31: 0, # 'Ð' - 34: 0, # 'О' - 35: 0, # 'П' - 45: 0, # 'Р' - 32: 0, # 'С' - 40: 0, # 'Т' - 52: 0, # 'У' - 53: 0, # 'Ф' - 55: 0, # 'Ð¥' - 58: 0, # 'Ц' - 50: 0, # 'Ч' - 57: 0, # 'Ш' - 63: 0, # 'Щ' - 62: 0, # 'Ы' - 61: 0, # 'Ь' - 47: 0, # 'Э' - 59: 0, # 'Ю' - 43: 0, # 'Я' - 3: 3, # 'а' - 21: 1, # 'б' - 10: 0, # 'в' - 19: 1, # 'г' - 13: 0, # 'д' - 2: 3, # 'е' - 24: 0, # 'ж' - 20: 0, # 'з' - 4: 3, # 'и' - 23: 0, # 'й' - 11: 1, # 'к' - 8: 2, # 'л' - 12: 1, # 'м' - 5: 1, # 'н' - 1: 3, # 'о' - 15: 1, # 'п' - 9: 2, # 'Ñ€' - 7: 2, # 'Ñ' - 6: 2, # 'Ñ‚' - 14: 2, # 'у' - 39: 2, # 'Ñ„' - 26: 0, # 'Ñ…' - 28: 0, # 'ц' - 22: 1, # 'ч' - 25: 1, # 'ш' - 29: 0, # 'щ' - 54: 0, # 'ÑŠ' - 18: 2, # 'Ñ‹' - 17: 1, # 'ÑŒ' - 30: 2, # 'Ñ' - 27: 1, # 'ÑŽ' - 16: 1, # 'Ñ' - }, - 26: { # 'Ñ…' - 37: 0, # 'Ð' - 44: 0, # 'Б' - 33: 0, # 'Ð’' - 46: 0, # 'Г' - 41: 0, # 'Д' - 48: 0, # 'Е' - 56: 0, # 'Ж' - 51: 0, # 'З' - 42: 0, # 'И' - 60: 0, # 'Й' - 36: 0, # 'К' - 49: 0, # 'Л' - 38: 0, # 'Ðœ' - 31: 0, # 'Ð' - 34: 0, # 'О' - 35: 0, # 'П' - 45: 0, # 'Р' - 32: 0, # 'С' - 40: 0, # 'Т' - 52: 0, # 'У' - 53: 0, # 'Ф' - 55: 0, # 'Ð¥' - 58: 0, # 'Ц' - 50: 0, # 'Ч' - 57: 0, # 'Ш' - 63: 0, # 'Щ' - 62: 0, # 'Ы' - 61: 0, # 'Ь' - 47: 0, # 'Э' - 59: 0, # 'Ю' - 43: 0, # 'Я' - 3: 3, # 'а' - 21: 0, # 'б' - 10: 3, # 'в' - 19: 1, # 'г' - 13: 1, # 'д' - 2: 2, # 'е' - 24: 0, # 'ж' - 20: 1, # 'з' - 4: 3, # 'и' - 23: 0, # 'й' - 11: 1, # 'к' - 8: 2, # 'л' - 12: 2, # 'м' - 5: 3, # 'н' - 1: 3, # 'о' - 15: 1, # 'п' - 9: 3, # 'Ñ€' - 7: 2, # 'Ñ' - 6: 2, # 'Ñ‚' - 14: 2, # 'у' - 39: 1, # 'Ñ„' - 26: 1, # 'Ñ…' - 28: 1, # 'ц' - 22: 1, # 'ч' - 25: 2, # 'ш' - 29: 0, # 'щ' - 54: 1, # 'ÑŠ' - 18: 0, # 'Ñ‹' - 17: 1, # 'ÑŒ' - 30: 1, # 'Ñ' - 27: 1, # 'ÑŽ' - 16: 0, # 'Ñ' - }, - 28: { # 'ц' - 37: 0, # 'Ð' - 44: 0, # 'Б' - 33: 0, # 'Ð’' - 46: 0, # 'Г' - 41: 0, # 'Д' - 48: 0, # 'Е' - 56: 0, # 'Ж' - 51: 0, # 'З' - 42: 0, # 'И' - 60: 0, # 'Й' - 36: 0, # 'К' - 49: 0, # 'Л' - 38: 0, # 'Ðœ' - 31: 0, # 'Ð' - 34: 0, # 'О' - 35: 0, # 'П' - 45: 0, # 'Р' - 32: 0, # 'С' - 40: 0, # 'Т' - 52: 0, # 'У' - 53: 0, # 'Ф' - 55: 0, # 'Ð¥' - 58: 0, # 'Ц' - 50: 0, # 'Ч' - 57: 0, # 'Ш' - 63: 0, # 'Щ' - 62: 0, # 'Ы' - 61: 0, # 'Ь' - 47: 0, # 'Э' - 59: 0, # 'Ю' - 43: 0, # 'Я' - 3: 3, # 'а' - 21: 1, # 'б' - 10: 2, # 'в' - 19: 1, # 'г' - 13: 1, # 'д' - 2: 3, # 'е' - 24: 0, # 'ж' - 20: 1, # 'з' - 4: 3, # 'и' - 23: 0, # 'й' - 11: 2, # 'к' - 8: 1, # 'л' - 12: 1, # 'м' - 5: 1, # 'н' - 1: 3, # 'о' - 15: 0, # 'п' - 9: 1, # 'Ñ€' - 7: 0, # 'Ñ' - 6: 1, # 'Ñ‚' - 14: 3, # 'у' - 39: 0, # 'Ñ„' - 26: 0, # 'Ñ…' - 28: 1, # 'ц' - 22: 0, # 'ч' - 25: 1, # 'ш' - 29: 0, # 'щ' - 54: 0, # 'ÑŠ' - 18: 3, # 'Ñ‹' - 17: 1, # 'ÑŒ' - 30: 0, # 'Ñ' - 27: 1, # 'ÑŽ' - 16: 0, # 'Ñ' - }, - 22: { # 'ч' - 37: 0, # 'Ð' - 44: 0, # 'Б' - 33: 0, # 'Ð’' - 46: 0, # 'Г' - 41: 0, # 'Д' - 48: 0, # 'Е' - 56: 0, # 'Ж' - 51: 0, # 'З' - 42: 0, # 'И' - 60: 0, # 'Й' - 36: 0, # 'К' - 49: 0, # 'Л' - 38: 0, # 'Ðœ' - 31: 0, # 'Ð' - 34: 0, # 'О' - 35: 0, # 'П' - 45: 0, # 'Р' - 32: 0, # 'С' - 40: 0, # 'Т' - 52: 0, # 'У' - 53: 0, # 'Ф' - 55: 0, # 'Ð¥' - 58: 0, # 'Ц' - 50: 0, # 'Ч' - 57: 0, # 'Ш' - 63: 0, # 'Щ' - 62: 0, # 'Ы' - 61: 0, # 'Ь' - 47: 0, # 'Э' - 59: 0, # 'Ю' - 43: 0, # 'Я' - 3: 3, # 'а' - 21: 1, # 'б' - 10: 1, # 'в' - 19: 0, # 'г' - 13: 0, # 'д' - 2: 3, # 'е' - 24: 1, # 'ж' - 20: 0, # 'з' - 4: 3, # 'и' - 23: 0, # 'й' - 11: 3, # 'к' - 8: 2, # 'л' - 12: 1, # 'м' - 5: 3, # 'н' - 1: 2, # 'о' - 15: 0, # 'п' - 9: 2, # 'Ñ€' - 7: 1, # 'Ñ' - 6: 3, # 'Ñ‚' - 14: 3, # 'у' - 39: 1, # 'Ñ„' - 26: 1, # 'Ñ…' - 28: 0, # 'ц' - 22: 1, # 'ч' - 25: 2, # 'ш' - 29: 0, # 'щ' - 54: 0, # 'ÑŠ' - 18: 0, # 'Ñ‹' - 17: 3, # 'ÑŒ' - 30: 0, # 'Ñ' - 27: 0, # 'ÑŽ' - 16: 0, # 'Ñ' - }, - 25: { # 'ш' - 37: 0, # 'Ð' - 44: 0, # 'Б' - 33: 0, # 'Ð’' - 46: 0, # 'Г' - 41: 0, # 'Д' - 48: 0, # 'Е' - 56: 0, # 'Ж' - 51: 0, # 'З' - 42: 0, # 'И' - 60: 0, # 'Й' - 36: 0, # 'К' - 49: 0, # 'Л' - 38: 0, # 'Ðœ' - 31: 0, # 'Ð' - 34: 0, # 'О' - 35: 0, # 'П' - 45: 0, # 'Р' - 32: 0, # 'С' - 40: 0, # 'Т' - 52: 0, # 'У' - 53: 0, # 'Ф' - 55: 0, # 'Ð¥' - 58: 0, # 'Ц' - 50: 0, # 'Ч' - 57: 0, # 'Ш' - 63: 0, # 'Щ' - 62: 0, # 'Ы' - 61: 0, # 'Ь' - 47: 0, # 'Э' - 59: 0, # 'Ю' - 43: 0, # 'Я' - 3: 3, # 'а' - 21: 1, # 'б' - 10: 2, # 'в' - 19: 1, # 'г' - 13: 0, # 'д' - 2: 3, # 'е' - 24: 0, # 'ж' - 20: 0, # 'з' - 4: 3, # 'и' - 23: 0, # 'й' - 11: 3, # 'к' - 8: 3, # 'л' - 12: 2, # 'м' - 5: 3, # 'н' - 1: 3, # 'о' - 15: 2, # 'п' - 9: 2, # 'Ñ€' - 7: 1, # 'Ñ' - 6: 2, # 'Ñ‚' - 14: 3, # 'у' - 39: 2, # 'Ñ„' - 26: 1, # 'Ñ…' - 28: 1, # 'ц' - 22: 1, # 'ч' - 25: 1, # 'ш' - 29: 0, # 'щ' - 54: 0, # 'ÑŠ' - 18: 0, # 'Ñ‹' - 17: 3, # 'ÑŒ' - 30: 1, # 'Ñ' - 27: 1, # 'ÑŽ' - 16: 0, # 'Ñ' - }, - 29: { # 'щ' - 37: 0, # 'Ð' - 44: 0, # 'Б' - 33: 0, # 'Ð’' - 46: 0, # 'Г' - 41: 0, # 'Д' - 48: 0, # 'Е' - 56: 0, # 'Ж' - 51: 0, # 'З' - 42: 0, # 'И' - 60: 0, # 'Й' - 36: 0, # 'К' - 49: 0, # 'Л' - 38: 0, # 'Ðœ' - 31: 0, # 'Ð' - 34: 0, # 'О' - 35: 0, # 'П' - 45: 0, # 'Р' - 32: 0, # 'С' - 40: 0, # 'Т' - 52: 0, # 'У' - 53: 0, # 'Ф' - 55: 0, # 'Ð¥' - 58: 0, # 'Ц' - 50: 0, # 'Ч' - 57: 0, # 'Ш' - 63: 0, # 'Щ' - 62: 0, # 'Ы' - 61: 0, # 'Ь' - 47: 0, # 'Э' - 59: 0, # 'Ю' - 43: 0, # 'Я' - 3: 3, # 'а' - 21: 0, # 'б' - 10: 1, # 'в' - 19: 0, # 'г' - 13: 0, # 'д' - 2: 3, # 'е' - 24: 0, # 'ж' - 20: 0, # 'з' - 4: 3, # 'и' - 23: 0, # 'й' - 11: 0, # 'к' - 8: 0, # 'л' - 12: 1, # 'м' - 5: 2, # 'н' - 1: 1, # 'о' - 15: 0, # 'п' - 9: 2, # 'Ñ€' - 7: 0, # 'Ñ' - 6: 0, # 'Ñ‚' - 14: 2, # 'у' - 39: 0, # 'Ñ„' - 26: 0, # 'Ñ…' - 28: 0, # 'ц' - 22: 0, # 'ч' - 25: 0, # 'ш' - 29: 0, # 'щ' - 54: 0, # 'ÑŠ' - 18: 0, # 'Ñ‹' - 17: 2, # 'ÑŒ' - 30: 0, # 'Ñ' - 27: 0, # 'ÑŽ' - 16: 0, # 'Ñ' - }, - 54: { # 'ÑŠ' - 37: 0, # 'Ð' - 44: 0, # 'Б' - 33: 0, # 'Ð’' - 46: 0, # 'Г' - 41: 0, # 'Д' - 48: 0, # 'Е' - 56: 0, # 'Ж' - 51: 0, # 'З' - 42: 0, # 'И' - 60: 0, # 'Й' - 36: 0, # 'К' - 49: 0, # 'Л' - 38: 0, # 'Ðœ' - 31: 0, # 'Ð' - 34: 0, # 'О' - 35: 0, # 'П' - 45: 0, # 'Р' - 32: 0, # 'С' - 40: 0, # 'Т' - 52: 0, # 'У' - 53: 0, # 'Ф' - 55: 0, # 'Ð¥' - 58: 0, # 'Ц' - 50: 0, # 'Ч' - 57: 0, # 'Ш' - 63: 0, # 'Щ' - 62: 0, # 'Ы' - 61: 0, # 'Ь' - 47: 0, # 'Э' - 59: 0, # 'Ю' - 43: 0, # 'Я' - 3: 0, # 'а' - 21: 0, # 'б' - 10: 0, # 'в' - 19: 0, # 'г' - 13: 0, # 'д' - 2: 2, # 'е' - 24: 0, # 'ж' - 20: 0, # 'з' - 4: 0, # 'и' - 23: 0, # 'й' - 11: 0, # 'к' - 8: 0, # 'л' - 12: 0, # 'м' - 5: 0, # 'н' - 1: 0, # 'о' - 15: 0, # 'п' - 9: 0, # 'Ñ€' - 7: 0, # 'Ñ' - 6: 0, # 'Ñ‚' - 14: 0, # 'у' - 39: 0, # 'Ñ„' - 26: 0, # 'Ñ…' - 28: 0, # 'ц' - 22: 0, # 'ч' - 25: 0, # 'ш' - 29: 0, # 'щ' - 54: 0, # 'ÑŠ' - 18: 0, # 'Ñ‹' - 17: 0, # 'ÑŒ' - 30: 0, # 'Ñ' - 27: 1, # 'ÑŽ' - 16: 2, # 'Ñ' - }, - 18: { # 'Ñ‹' - 37: 0, # 'Ð' - 44: 0, # 'Б' - 33: 0, # 'Ð’' - 46: 0, # 'Г' - 41: 0, # 'Д' - 48: 0, # 'Е' - 56: 0, # 'Ж' - 51: 0, # 'З' - 42: 0, # 'И' - 60: 0, # 'Й' - 36: 0, # 'К' - 49: 0, # 'Л' - 38: 0, # 'Ðœ' - 31: 0, # 'Ð' - 34: 0, # 'О' - 35: 0, # 'П' - 45: 0, # 'Р' - 32: 0, # 'С' - 40: 0, # 'Т' - 52: 0, # 'У' - 53: 0, # 'Ф' - 55: 0, # 'Ð¥' - 58: 0, # 'Ц' - 50: 0, # 'Ч' - 57: 0, # 'Ш' - 63: 0, # 'Щ' - 62: 0, # 'Ы' - 61: 0, # 'Ь' - 47: 0, # 'Э' - 59: 0, # 'Ю' - 43: 0, # 'Я' - 3: 0, # 'а' - 21: 3, # 'б' - 10: 3, # 'в' - 19: 2, # 'г' - 13: 2, # 'д' - 2: 3, # 'е' - 24: 2, # 'ж' - 20: 2, # 'з' - 4: 2, # 'и' - 23: 3, # 'й' - 11: 3, # 'к' - 8: 3, # 'л' - 12: 3, # 'м' - 5: 3, # 'н' - 1: 1, # 'о' - 15: 3, # 'п' - 9: 3, # 'Ñ€' - 7: 3, # 'Ñ' - 6: 3, # 'Ñ‚' - 14: 1, # 'у' - 39: 0, # 'Ñ„' - 26: 3, # 'Ñ…' - 28: 2, # 'ц' - 22: 3, # 'ч' - 25: 3, # 'ш' - 29: 2, # 'щ' - 54: 0, # 'ÑŠ' - 18: 0, # 'Ñ‹' - 17: 0, # 'ÑŒ' - 30: 0, # 'Ñ' - 27: 0, # 'ÑŽ' - 16: 2, # 'Ñ' - }, - 17: { # 'ÑŒ' - 37: 0, # 'Ð' - 44: 0, # 'Б' - 33: 0, # 'Ð’' - 46: 0, # 'Г' - 41: 0, # 'Д' - 48: 0, # 'Е' - 56: 0, # 'Ж' - 51: 0, # 'З' - 42: 0, # 'И' - 60: 0, # 'Й' - 36: 0, # 'К' - 49: 0, # 'Л' - 38: 0, # 'Ðœ' - 31: 0, # 'Ð' - 34: 0, # 'О' - 35: 0, # 'П' - 45: 0, # 'Р' - 32: 0, # 'С' - 40: 0, # 'Т' - 52: 0, # 'У' - 53: 0, # 'Ф' - 55: 0, # 'Ð¥' - 58: 0, # 'Ц' - 50: 0, # 'Ч' - 57: 0, # 'Ш' - 63: 0, # 'Щ' - 62: 0, # 'Ы' - 61: 0, # 'Ь' - 47: 0, # 'Э' - 59: 0, # 'Ю' - 43: 0, # 'Я' - 3: 0, # 'а' - 21: 2, # 'б' - 10: 2, # 'в' - 19: 2, # 'г' - 13: 2, # 'д' - 2: 3, # 'е' - 24: 1, # 'ж' - 20: 3, # 'з' - 4: 2, # 'и' - 23: 0, # 'й' - 11: 3, # 'к' - 8: 0, # 'л' - 12: 3, # 'м' - 5: 3, # 'н' - 1: 2, # 'о' - 15: 2, # 'п' - 9: 1, # 'Ñ€' - 7: 3, # 'Ñ' - 6: 2, # 'Ñ‚' - 14: 0, # 'у' - 39: 2, # 'Ñ„' - 26: 1, # 'Ñ…' - 28: 2, # 'ц' - 22: 2, # 'ч' - 25: 3, # 'ш' - 29: 2, # 'щ' - 54: 0, # 'ÑŠ' - 18: 0, # 'Ñ‹' - 17: 0, # 'ÑŒ' - 30: 1, # 'Ñ' - 27: 3, # 'ÑŽ' - 16: 3, # 'Ñ' - }, - 30: { # 'Ñ' - 37: 0, # 'Ð' - 44: 0, # 'Б' - 33: 0, # 'Ð’' - 46: 0, # 'Г' - 41: 0, # 'Д' - 48: 0, # 'Е' - 56: 0, # 'Ж' - 51: 0, # 'З' - 42: 0, # 'И' - 60: 0, # 'Й' - 36: 0, # 'К' - 49: 0, # 'Л' - 38: 1, # 'Ðœ' - 31: 1, # 'Ð' - 34: 0, # 'О' - 35: 0, # 'П' - 45: 1, # 'Р' - 32: 1, # 'С' - 40: 0, # 'Т' - 52: 0, # 'У' - 53: 1, # 'Ф' - 55: 0, # 'Ð¥' - 58: 0, # 'Ц' - 50: 0, # 'Ч' - 57: 0, # 'Ш' - 63: 0, # 'Щ' - 62: 0, # 'Ы' - 61: 0, # 'Ь' - 47: 0, # 'Э' - 59: 0, # 'Ю' - 43: 0, # 'Я' - 3: 0, # 'а' - 21: 1, # 'б' - 10: 1, # 'в' - 19: 1, # 'г' - 13: 2, # 'д' - 2: 1, # 'е' - 24: 0, # 'ж' - 20: 1, # 'з' - 4: 0, # 'и' - 23: 2, # 'й' - 11: 2, # 'к' - 8: 2, # 'л' - 12: 2, # 'м' - 5: 2, # 'н' - 1: 0, # 'о' - 15: 2, # 'п' - 9: 2, # 'Ñ€' - 7: 2, # 'Ñ' - 6: 3, # 'Ñ‚' - 14: 1, # 'у' - 39: 2, # 'Ñ„' - 26: 1, # 'Ñ…' - 28: 0, # 'ц' - 22: 0, # 'ч' - 25: 1, # 'ш' - 29: 0, # 'щ' - 54: 0, # 'ÑŠ' - 18: 0, # 'Ñ‹' - 17: 0, # 'ÑŒ' - 30: 1, # 'Ñ' - 27: 1, # 'ÑŽ' - 16: 1, # 'Ñ' - }, - 27: { # 'ÑŽ' - 37: 0, # 'Ð' - 44: 0, # 'Б' - 33: 0, # 'Ð’' - 46: 0, # 'Г' - 41: 0, # 'Д' - 48: 0, # 'Е' - 56: 0, # 'Ж' - 51: 0, # 'З' - 42: 0, # 'И' - 60: 0, # 'Й' - 36: 0, # 'К' - 49: 0, # 'Л' - 38: 0, # 'Ðœ' - 31: 0, # 'Ð' - 34: 0, # 'О' - 35: 0, # 'П' - 45: 0, # 'Р' - 32: 0, # 'С' - 40: 0, # 'Т' - 52: 0, # 'У' - 53: 0, # 'Ф' - 55: 0, # 'Ð¥' - 58: 0, # 'Ц' - 50: 0, # 'Ч' - 57: 0, # 'Ш' - 63: 0, # 'Щ' - 62: 0, # 'Ы' - 61: 0, # 'Ь' - 47: 0, # 'Э' - 59: 0, # 'Ю' - 43: 0, # 'Я' - 3: 2, # 'а' - 21: 3, # 'б' - 10: 1, # 'в' - 19: 2, # 'г' - 13: 3, # 'д' - 2: 1, # 'е' - 24: 2, # 'ж' - 20: 2, # 'з' - 4: 1, # 'и' - 23: 1, # 'й' - 11: 2, # 'к' - 8: 2, # 'л' - 12: 2, # 'м' - 5: 2, # 'н' - 1: 1, # 'о' - 15: 2, # 'п' - 9: 2, # 'Ñ€' - 7: 3, # 'Ñ' - 6: 3, # 'Ñ‚' - 14: 0, # 'у' - 39: 1, # 'Ñ„' - 26: 2, # 'Ñ…' - 28: 2, # 'ц' - 22: 2, # 'ч' - 25: 2, # 'ш' - 29: 3, # 'щ' - 54: 0, # 'ÑŠ' - 18: 0, # 'Ñ‹' - 17: 0, # 'ÑŒ' - 30: 1, # 'Ñ' - 27: 2, # 'ÑŽ' - 16: 1, # 'Ñ' - }, - 16: { # 'Ñ' - 37: 0, # 'Ð' - 44: 0, # 'Б' - 33: 0, # 'Ð’' - 46: 0, # 'Г' - 41: 0, # 'Д' - 48: 0, # 'Е' - 56: 0, # 'Ж' - 51: 0, # 'З' - 42: 0, # 'И' - 60: 0, # 'Й' - 36: 0, # 'К' - 49: 0, # 'Л' - 38: 0, # 'Ðœ' - 31: 0, # 'Ð' - 34: 0, # 'О' - 35: 0, # 'П' - 45: 0, # 'Р' - 32: 0, # 'С' - 40: 0, # 'Т' - 52: 0, # 'У' - 53: 0, # 'Ф' - 55: 0, # 'Ð¥' - 58: 0, # 'Ц' - 50: 0, # 'Ч' - 57: 0, # 'Ш' - 63: 0, # 'Щ' - 62: 0, # 'Ы' - 61: 0, # 'Ь' - 47: 0, # 'Э' - 59: 0, # 'Ю' - 43: 0, # 'Я' - 3: 0, # 'а' - 21: 2, # 'б' - 10: 3, # 'в' - 19: 2, # 'г' - 13: 3, # 'д' - 2: 3, # 'е' - 24: 3, # 'ж' - 20: 3, # 'з' - 4: 2, # 'и' - 23: 2, # 'й' - 11: 3, # 'к' - 8: 3, # 'л' - 12: 3, # 'м' - 5: 3, # 'н' - 1: 0, # 'о' - 15: 2, # 'п' - 9: 2, # 'Ñ€' - 7: 3, # 'Ñ' - 6: 3, # 'Ñ‚' - 14: 1, # 'у' - 39: 1, # 'Ñ„' - 26: 3, # 'Ñ…' - 28: 2, # 'ц' - 22: 2, # 'ч' - 25: 2, # 'ш' - 29: 3, # 'щ' - 54: 0, # 'ÑŠ' - 18: 0, # 'Ñ‹' - 17: 0, # 'ÑŒ' - 30: 0, # 'Ñ' - 27: 2, # 'ÑŽ' - 16: 2, # 'Ñ' - }, -} - -# 255: Undefined characters that did not exist in training text -# 254: Carriage/Return -# 253: symbol (punctuation) that does not belong to word -# 252: 0 - 9 -# 251: Control characters - -# Character Mapping Table(s): -IBM866_RUSSIAN_CHAR_TO_ORDER = { - 0: 255, # '\x00' - 1: 255, # '\x01' - 2: 255, # '\x02' - 3: 255, # '\x03' - 4: 255, # '\x04' - 5: 255, # '\x05' - 6: 255, # '\x06' - 7: 255, # '\x07' - 8: 255, # '\x08' - 9: 255, # '\t' - 10: 254, # '\n' - 11: 255, # '\x0b' - 12: 255, # '\x0c' - 13: 254, # '\r' - 14: 255, # '\x0e' - 15: 255, # '\x0f' - 16: 255, # '\x10' - 17: 255, # '\x11' - 18: 255, # '\x12' - 19: 255, # '\x13' - 20: 255, # '\x14' - 21: 255, # '\x15' - 22: 255, # '\x16' - 23: 255, # '\x17' - 24: 255, # '\x18' - 25: 255, # '\x19' - 26: 255, # '\x1a' - 27: 255, # '\x1b' - 28: 255, # '\x1c' - 29: 255, # '\x1d' - 30: 255, # '\x1e' - 31: 255, # '\x1f' - 32: 253, # ' ' - 33: 253, # '!' - 34: 253, # '"' - 35: 253, # '#' - 36: 253, # '$' - 37: 253, # '%' - 38: 253, # '&' - 39: 253, # "'" - 40: 253, # '(' - 41: 253, # ')' - 42: 253, # '*' - 43: 253, # '+' - 44: 253, # ',' - 45: 253, # '-' - 46: 253, # '.' - 47: 253, # '/' - 48: 252, # '0' - 49: 252, # '1' - 50: 252, # '2' - 51: 252, # '3' - 52: 252, # '4' - 53: 252, # '5' - 54: 252, # '6' - 55: 252, # '7' - 56: 252, # '8' - 57: 252, # '9' - 58: 253, # ':' - 59: 253, # ';' - 60: 253, # '<' - 61: 253, # '=' - 62: 253, # '>' - 63: 253, # '?' - 64: 253, # '@' - 65: 142, # 'A' - 66: 143, # 'B' - 67: 144, # 'C' - 68: 145, # 'D' - 69: 146, # 'E' - 70: 147, # 'F' - 71: 148, # 'G' - 72: 149, # 'H' - 73: 150, # 'I' - 74: 151, # 'J' - 75: 152, # 'K' - 76: 74, # 'L' - 77: 153, # 'M' - 78: 75, # 'N' - 79: 154, # 'O' - 80: 155, # 'P' - 81: 156, # 'Q' - 82: 157, # 'R' - 83: 158, # 'S' - 84: 159, # 'T' - 85: 160, # 'U' - 86: 161, # 'V' - 87: 162, # 'W' - 88: 163, # 'X' - 89: 164, # 'Y' - 90: 165, # 'Z' - 91: 253, # '[' - 92: 253, # '\\' - 93: 253, # ']' - 94: 253, # '^' - 95: 253, # '_' - 96: 253, # '`' - 97: 71, # 'a' - 98: 172, # 'b' - 99: 66, # 'c' - 100: 173, # 'd' - 101: 65, # 'e' - 102: 174, # 'f' - 103: 76, # 'g' - 104: 175, # 'h' - 105: 64, # 'i' - 106: 176, # 'j' - 107: 177, # 'k' - 108: 77, # 'l' - 109: 72, # 'm' - 110: 178, # 'n' - 111: 69, # 'o' - 112: 67, # 'p' - 113: 179, # 'q' - 114: 78, # 'r' - 115: 73, # 's' - 116: 180, # 't' - 117: 181, # 'u' - 118: 79, # 'v' - 119: 182, # 'w' - 120: 183, # 'x' - 121: 184, # 'y' - 122: 185, # 'z' - 123: 253, # '{' - 124: 253, # '|' - 125: 253, # '}' - 126: 253, # '~' - 127: 253, # '\x7f' - 128: 37, # 'Ð' - 129: 44, # 'Б' - 130: 33, # 'Ð’' - 131: 46, # 'Г' - 132: 41, # 'Д' - 133: 48, # 'Е' - 134: 56, # 'Ж' - 135: 51, # 'З' - 136: 42, # 'И' - 137: 60, # 'Й' - 138: 36, # 'К' - 139: 49, # 'Л' - 140: 38, # 'Ðœ' - 141: 31, # 'Ð' - 142: 34, # 'О' - 143: 35, # 'П' - 144: 45, # 'Р' - 145: 32, # 'С' - 146: 40, # 'Т' - 147: 52, # 'У' - 148: 53, # 'Ф' - 149: 55, # 'Ð¥' - 150: 58, # 'Ц' - 151: 50, # 'Ч' - 152: 57, # 'Ш' - 153: 63, # 'Щ' - 154: 70, # 'Ъ' - 155: 62, # 'Ы' - 156: 61, # 'Ь' - 157: 47, # 'Э' - 158: 59, # 'Ю' - 159: 43, # 'Я' - 160: 3, # 'а' - 161: 21, # 'б' - 162: 10, # 'в' - 163: 19, # 'г' - 164: 13, # 'д' - 165: 2, # 'е' - 166: 24, # 'ж' - 167: 20, # 'з' - 168: 4, # 'и' - 169: 23, # 'й' - 170: 11, # 'к' - 171: 8, # 'л' - 172: 12, # 'м' - 173: 5, # 'н' - 174: 1, # 'о' - 175: 15, # 'п' - 176: 191, # 'â–‘' - 177: 192, # 'â–’' - 178: 193, # 'â–“' - 179: 194, # '│' - 180: 195, # '┤' - 181: 196, # 'â•¡' - 182: 197, # 'â•¢' - 183: 198, # 'â•–' - 184: 199, # 'â••' - 185: 200, # 'â•£' - 186: 201, # 'â•‘' - 187: 202, # 'â•—' - 188: 203, # 'â•' - 189: 204, # 'â•œ' - 190: 205, # 'â•›' - 191: 206, # 'â”' - 192: 207, # 'â””' - 193: 208, # 'â”´' - 194: 209, # '┬' - 195: 210, # '├' - 196: 211, # '─' - 197: 212, # '┼' - 198: 213, # 'â•ž' - 199: 214, # 'â•Ÿ' - 200: 215, # 'â•š' - 201: 216, # 'â•”' - 202: 217, # 'â•©' - 203: 218, # '╦' - 204: 219, # 'â• ' - 205: 220, # 'â•' - 206: 221, # '╬' - 207: 222, # '╧' - 208: 223, # '╨' - 209: 224, # '╤' - 210: 225, # 'â•¥' - 211: 226, # 'â•™' - 212: 227, # '╘' - 213: 228, # 'â•’' - 214: 229, # 'â•“' - 215: 230, # 'â•«' - 216: 231, # '╪' - 217: 232, # '┘' - 218: 233, # '┌' - 219: 234, # 'â–ˆ' - 220: 235, # 'â–„' - 221: 236, # 'â–Œ' - 222: 237, # 'â–' - 223: 238, # 'â–€' - 224: 9, # 'Ñ€' - 225: 7, # 'Ñ' - 226: 6, # 'Ñ‚' - 227: 14, # 'у' - 228: 39, # 'Ñ„' - 229: 26, # 'Ñ…' - 230: 28, # 'ц' - 231: 22, # 'ч' - 232: 25, # 'ш' - 233: 29, # 'щ' - 234: 54, # 'ÑŠ' - 235: 18, # 'Ñ‹' - 236: 17, # 'ÑŒ' - 237: 30, # 'Ñ' - 238: 27, # 'ÑŽ' - 239: 16, # 'Ñ' - 240: 239, # 'Ð' - 241: 68, # 'Ñ‘' - 242: 240, # 'Є' - 243: 241, # 'Ñ”' - 244: 242, # 'Ї' - 245: 243, # 'Ñ—' - 246: 244, # 'ÐŽ' - 247: 245, # 'Ñž' - 248: 246, # '°' - 249: 247, # '∙' - 250: 248, # '·' - 251: 249, # '√' - 252: 250, # 'â„–' - 253: 251, # '¤' - 254: 252, # 'â– ' - 255: 255, # '\xa0' -} - -IBM866_RUSSIAN_MODEL = SingleByteCharSetModel(charset_name='IBM866', - language='Russian', - char_to_order_map=IBM866_RUSSIAN_CHAR_TO_ORDER, - language_model=RUSSIAN_LANG_MODEL, - typical_positive_ratio=0.976601, - keep_ascii_letters=False, - alphabet='ÐÐБВГДЕЖЗИЙКЛМÐОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрÑтуфхцчшщъыьÑÑŽÑÑ‘') - -WINDOWS_1251_RUSSIAN_CHAR_TO_ORDER = { - 0: 255, # '\x00' - 1: 255, # '\x01' - 2: 255, # '\x02' - 3: 255, # '\x03' - 4: 255, # '\x04' - 5: 255, # '\x05' - 6: 255, # '\x06' - 7: 255, # '\x07' - 8: 255, # '\x08' - 9: 255, # '\t' - 10: 254, # '\n' - 11: 255, # '\x0b' - 12: 255, # '\x0c' - 13: 254, # '\r' - 14: 255, # '\x0e' - 15: 255, # '\x0f' - 16: 255, # '\x10' - 17: 255, # '\x11' - 18: 255, # '\x12' - 19: 255, # '\x13' - 20: 255, # '\x14' - 21: 255, # '\x15' - 22: 255, # '\x16' - 23: 255, # '\x17' - 24: 255, # '\x18' - 25: 255, # '\x19' - 26: 255, # '\x1a' - 27: 255, # '\x1b' - 28: 255, # '\x1c' - 29: 255, # '\x1d' - 30: 255, # '\x1e' - 31: 255, # '\x1f' - 32: 253, # ' ' - 33: 253, # '!' - 34: 253, # '"' - 35: 253, # '#' - 36: 253, # '$' - 37: 253, # '%' - 38: 253, # '&' - 39: 253, # "'" - 40: 253, # '(' - 41: 253, # ')' - 42: 253, # '*' - 43: 253, # '+' - 44: 253, # ',' - 45: 253, # '-' - 46: 253, # '.' - 47: 253, # '/' - 48: 252, # '0' - 49: 252, # '1' - 50: 252, # '2' - 51: 252, # '3' - 52: 252, # '4' - 53: 252, # '5' - 54: 252, # '6' - 55: 252, # '7' - 56: 252, # '8' - 57: 252, # '9' - 58: 253, # ':' - 59: 253, # ';' - 60: 253, # '<' - 61: 253, # '=' - 62: 253, # '>' - 63: 253, # '?' - 64: 253, # '@' - 65: 142, # 'A' - 66: 143, # 'B' - 67: 144, # 'C' - 68: 145, # 'D' - 69: 146, # 'E' - 70: 147, # 'F' - 71: 148, # 'G' - 72: 149, # 'H' - 73: 150, # 'I' - 74: 151, # 'J' - 75: 152, # 'K' - 76: 74, # 'L' - 77: 153, # 'M' - 78: 75, # 'N' - 79: 154, # 'O' - 80: 155, # 'P' - 81: 156, # 'Q' - 82: 157, # 'R' - 83: 158, # 'S' - 84: 159, # 'T' - 85: 160, # 'U' - 86: 161, # 'V' - 87: 162, # 'W' - 88: 163, # 'X' - 89: 164, # 'Y' - 90: 165, # 'Z' - 91: 253, # '[' - 92: 253, # '\\' - 93: 253, # ']' - 94: 253, # '^' - 95: 253, # '_' - 96: 253, # '`' - 97: 71, # 'a' - 98: 172, # 'b' - 99: 66, # 'c' - 100: 173, # 'd' - 101: 65, # 'e' - 102: 174, # 'f' - 103: 76, # 'g' - 104: 175, # 'h' - 105: 64, # 'i' - 106: 176, # 'j' - 107: 177, # 'k' - 108: 77, # 'l' - 109: 72, # 'm' - 110: 178, # 'n' - 111: 69, # 'o' - 112: 67, # 'p' - 113: 179, # 'q' - 114: 78, # 'r' - 115: 73, # 's' - 116: 180, # 't' - 117: 181, # 'u' - 118: 79, # 'v' - 119: 182, # 'w' - 120: 183, # 'x' - 121: 184, # 'y' - 122: 185, # 'z' - 123: 253, # '{' - 124: 253, # '|' - 125: 253, # '}' - 126: 253, # '~' - 127: 253, # '\x7f' - 128: 191, # 'Ђ' - 129: 192, # 'Ѓ' - 130: 193, # '‚' - 131: 194, # 'Ñ“' - 132: 195, # '„' - 133: 196, # '…' - 134: 197, # '†' - 135: 198, # '‡' - 136: 199, # '€' - 137: 200, # '‰' - 138: 201, # 'Љ' - 139: 202, # '‹' - 140: 203, # 'Њ' - 141: 204, # 'ÐŒ' - 142: 205, # 'Ћ' - 143: 206, # 'Ð' - 144: 207, # 'Ñ’' - 145: 208, # '‘' - 146: 209, # '’' - 147: 210, # '“' - 148: 211, # 'â€' - 149: 212, # '•' - 150: 213, # '–' - 151: 214, # '—' - 152: 215, # None - 153: 216, # 'â„¢' - 154: 217, # 'Ñ™' - 155: 218, # '›' - 156: 219, # 'Ñš' - 157: 220, # 'Ñœ' - 158: 221, # 'Ñ›' - 159: 222, # 'ÑŸ' - 160: 223, # '\xa0' - 161: 224, # 'ÐŽ' - 162: 225, # 'Ñž' - 163: 226, # 'Ј' - 164: 227, # '¤' - 165: 228, # 'Ò' - 166: 229, # '¦' - 167: 230, # '§' - 168: 231, # 'Ð' - 169: 232, # '©' - 170: 233, # 'Є' - 171: 234, # '«' - 172: 235, # '¬' - 173: 236, # '\xad' - 174: 237, # '®' - 175: 238, # 'Ї' - 176: 239, # '°' - 177: 240, # '±' - 178: 241, # 'І' - 179: 242, # 'Ñ–' - 180: 243, # 'Ò‘' - 181: 244, # 'µ' - 182: 245, # '¶' - 183: 246, # '·' - 184: 68, # 'Ñ‘' - 185: 247, # 'â„–' - 186: 248, # 'Ñ”' - 187: 249, # '»' - 188: 250, # 'ј' - 189: 251, # 'Ð…' - 190: 252, # 'Ñ•' - 191: 253, # 'Ñ—' - 192: 37, # 'Ð' - 193: 44, # 'Б' - 194: 33, # 'Ð’' - 195: 46, # 'Г' - 196: 41, # 'Д' - 197: 48, # 'Е' - 198: 56, # 'Ж' - 199: 51, # 'З' - 200: 42, # 'И' - 201: 60, # 'Й' - 202: 36, # 'К' - 203: 49, # 'Л' - 204: 38, # 'Ðœ' - 205: 31, # 'Ð' - 206: 34, # 'О' - 207: 35, # 'П' - 208: 45, # 'Р' - 209: 32, # 'С' - 210: 40, # 'Т' - 211: 52, # 'У' - 212: 53, # 'Ф' - 213: 55, # 'Ð¥' - 214: 58, # 'Ц' - 215: 50, # 'Ч' - 216: 57, # 'Ш' - 217: 63, # 'Щ' - 218: 70, # 'Ъ' - 219: 62, # 'Ы' - 220: 61, # 'Ь' - 221: 47, # 'Э' - 222: 59, # 'Ю' - 223: 43, # 'Я' - 224: 3, # 'а' - 225: 21, # 'б' - 226: 10, # 'в' - 227: 19, # 'г' - 228: 13, # 'д' - 229: 2, # 'е' - 230: 24, # 'ж' - 231: 20, # 'з' - 232: 4, # 'и' - 233: 23, # 'й' - 234: 11, # 'к' - 235: 8, # 'л' - 236: 12, # 'м' - 237: 5, # 'н' - 238: 1, # 'о' - 239: 15, # 'п' - 240: 9, # 'Ñ€' - 241: 7, # 'Ñ' - 242: 6, # 'Ñ‚' - 243: 14, # 'у' - 244: 39, # 'Ñ„' - 245: 26, # 'Ñ…' - 246: 28, # 'ц' - 247: 22, # 'ч' - 248: 25, # 'ш' - 249: 29, # 'щ' - 250: 54, # 'ÑŠ' - 251: 18, # 'Ñ‹' - 252: 17, # 'ÑŒ' - 253: 30, # 'Ñ' - 254: 27, # 'ÑŽ' - 255: 16, # 'Ñ' -} - -WINDOWS_1251_RUSSIAN_MODEL = SingleByteCharSetModel(charset_name='windows-1251', - language='Russian', - char_to_order_map=WINDOWS_1251_RUSSIAN_CHAR_TO_ORDER, - language_model=RUSSIAN_LANG_MODEL, - typical_positive_ratio=0.976601, - keep_ascii_letters=False, - alphabet='ÐÐБВГДЕЖЗИЙКЛМÐОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрÑтуфхцчшщъыьÑÑŽÑÑ‘') - -IBM855_RUSSIAN_CHAR_TO_ORDER = { - 0: 255, # '\x00' - 1: 255, # '\x01' - 2: 255, # '\x02' - 3: 255, # '\x03' - 4: 255, # '\x04' - 5: 255, # '\x05' - 6: 255, # '\x06' - 7: 255, # '\x07' - 8: 255, # '\x08' - 9: 255, # '\t' - 10: 254, # '\n' - 11: 255, # '\x0b' - 12: 255, # '\x0c' - 13: 254, # '\r' - 14: 255, # '\x0e' - 15: 255, # '\x0f' - 16: 255, # '\x10' - 17: 255, # '\x11' - 18: 255, # '\x12' - 19: 255, # '\x13' - 20: 255, # '\x14' - 21: 255, # '\x15' - 22: 255, # '\x16' - 23: 255, # '\x17' - 24: 255, # '\x18' - 25: 255, # '\x19' - 26: 255, # '\x1a' - 27: 255, # '\x1b' - 28: 255, # '\x1c' - 29: 255, # '\x1d' - 30: 255, # '\x1e' - 31: 255, # '\x1f' - 32: 253, # ' ' - 33: 253, # '!' - 34: 253, # '"' - 35: 253, # '#' - 36: 253, # '$' - 37: 253, # '%' - 38: 253, # '&' - 39: 253, # "'" - 40: 253, # '(' - 41: 253, # ')' - 42: 253, # '*' - 43: 253, # '+' - 44: 253, # ',' - 45: 253, # '-' - 46: 253, # '.' - 47: 253, # '/' - 48: 252, # '0' - 49: 252, # '1' - 50: 252, # '2' - 51: 252, # '3' - 52: 252, # '4' - 53: 252, # '5' - 54: 252, # '6' - 55: 252, # '7' - 56: 252, # '8' - 57: 252, # '9' - 58: 253, # ':' - 59: 253, # ';' - 60: 253, # '<' - 61: 253, # '=' - 62: 253, # '>' - 63: 253, # '?' - 64: 253, # '@' - 65: 142, # 'A' - 66: 143, # 'B' - 67: 144, # 'C' - 68: 145, # 'D' - 69: 146, # 'E' - 70: 147, # 'F' - 71: 148, # 'G' - 72: 149, # 'H' - 73: 150, # 'I' - 74: 151, # 'J' - 75: 152, # 'K' - 76: 74, # 'L' - 77: 153, # 'M' - 78: 75, # 'N' - 79: 154, # 'O' - 80: 155, # 'P' - 81: 156, # 'Q' - 82: 157, # 'R' - 83: 158, # 'S' - 84: 159, # 'T' - 85: 160, # 'U' - 86: 161, # 'V' - 87: 162, # 'W' - 88: 163, # 'X' - 89: 164, # 'Y' - 90: 165, # 'Z' - 91: 253, # '[' - 92: 253, # '\\' - 93: 253, # ']' - 94: 253, # '^' - 95: 253, # '_' - 96: 253, # '`' - 97: 71, # 'a' - 98: 172, # 'b' - 99: 66, # 'c' - 100: 173, # 'd' - 101: 65, # 'e' - 102: 174, # 'f' - 103: 76, # 'g' - 104: 175, # 'h' - 105: 64, # 'i' - 106: 176, # 'j' - 107: 177, # 'k' - 108: 77, # 'l' - 109: 72, # 'm' - 110: 178, # 'n' - 111: 69, # 'o' - 112: 67, # 'p' - 113: 179, # 'q' - 114: 78, # 'r' - 115: 73, # 's' - 116: 180, # 't' - 117: 181, # 'u' - 118: 79, # 'v' - 119: 182, # 'w' - 120: 183, # 'x' - 121: 184, # 'y' - 122: 185, # 'z' - 123: 253, # '{' - 124: 253, # '|' - 125: 253, # '}' - 126: 253, # '~' - 127: 253, # '\x7f' - 128: 191, # 'Ñ’' - 129: 192, # 'Ђ' - 130: 193, # 'Ñ“' - 131: 194, # 'Ѓ' - 132: 68, # 'Ñ‘' - 133: 195, # 'Ð' - 134: 196, # 'Ñ”' - 135: 197, # 'Є' - 136: 198, # 'Ñ•' - 137: 199, # 'Ð…' - 138: 200, # 'Ñ–' - 139: 201, # 'І' - 140: 202, # 'Ñ—' - 141: 203, # 'Ї' - 142: 204, # 'ј' - 143: 205, # 'Ј' - 144: 206, # 'Ñ™' - 145: 207, # 'Љ' - 146: 208, # 'Ñš' - 147: 209, # 'Њ' - 148: 210, # 'Ñ›' - 149: 211, # 'Ћ' - 150: 212, # 'Ñœ' - 151: 213, # 'ÐŒ' - 152: 214, # 'Ñž' - 153: 215, # 'ÐŽ' - 154: 216, # 'ÑŸ' - 155: 217, # 'Ð' - 156: 27, # 'ÑŽ' - 157: 59, # 'Ю' - 158: 54, # 'ÑŠ' - 159: 70, # 'Ъ' - 160: 3, # 'а' - 161: 37, # 'Ð' - 162: 21, # 'б' - 163: 44, # 'Б' - 164: 28, # 'ц' - 165: 58, # 'Ц' - 166: 13, # 'д' - 167: 41, # 'Д' - 168: 2, # 'е' - 169: 48, # 'Е' - 170: 39, # 'Ñ„' - 171: 53, # 'Ф' - 172: 19, # 'г' - 173: 46, # 'Г' - 174: 218, # '«' - 175: 219, # '»' - 176: 220, # 'â–‘' - 177: 221, # 'â–’' - 178: 222, # 'â–“' - 179: 223, # '│' - 180: 224, # '┤' - 181: 26, # 'Ñ…' - 182: 55, # 'Ð¥' - 183: 4, # 'и' - 184: 42, # 'И' - 185: 225, # 'â•£' - 186: 226, # 'â•‘' - 187: 227, # 'â•—' - 188: 228, # 'â•' - 189: 23, # 'й' - 190: 60, # 'Й' - 191: 229, # 'â”' - 192: 230, # 'â””' - 193: 231, # 'â”´' - 194: 232, # '┬' - 195: 233, # '├' - 196: 234, # '─' - 197: 235, # '┼' - 198: 11, # 'к' - 199: 36, # 'К' - 200: 236, # 'â•š' - 201: 237, # 'â•”' - 202: 238, # 'â•©' - 203: 239, # '╦' - 204: 240, # 'â• ' - 205: 241, # 'â•' - 206: 242, # '╬' - 207: 243, # '¤' - 208: 8, # 'л' - 209: 49, # 'Л' - 210: 12, # 'м' - 211: 38, # 'Ðœ' - 212: 5, # 'н' - 213: 31, # 'Ð' - 214: 1, # 'о' - 215: 34, # 'О' - 216: 15, # 'п' - 217: 244, # '┘' - 218: 245, # '┌' - 219: 246, # 'â–ˆ' - 220: 247, # 'â–„' - 221: 35, # 'П' - 222: 16, # 'Ñ' - 223: 248, # 'â–€' - 224: 43, # 'Я' - 225: 9, # 'Ñ€' - 226: 45, # 'Р' - 227: 7, # 'Ñ' - 228: 32, # 'С' - 229: 6, # 'Ñ‚' - 230: 40, # 'Т' - 231: 14, # 'у' - 232: 52, # 'У' - 233: 24, # 'ж' - 234: 56, # 'Ж' - 235: 10, # 'в' - 236: 33, # 'Ð’' - 237: 17, # 'ÑŒ' - 238: 61, # 'Ь' - 239: 249, # 'â„–' - 240: 250, # '\xad' - 241: 18, # 'Ñ‹' - 242: 62, # 'Ы' - 243: 20, # 'з' - 244: 51, # 'З' - 245: 25, # 'ш' - 246: 57, # 'Ш' - 247: 30, # 'Ñ' - 248: 47, # 'Э' - 249: 29, # 'щ' - 250: 63, # 'Щ' - 251: 22, # 'ч' - 252: 50, # 'Ч' - 253: 251, # '§' - 254: 252, # 'â– ' - 255: 255, # '\xa0' -} - -IBM855_RUSSIAN_MODEL = SingleByteCharSetModel(charset_name='IBM855', - language='Russian', - char_to_order_map=IBM855_RUSSIAN_CHAR_TO_ORDER, - language_model=RUSSIAN_LANG_MODEL, - typical_positive_ratio=0.976601, - keep_ascii_letters=False, - alphabet='ÐÐБВГДЕЖЗИЙКЛМÐОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрÑтуфхцчшщъыьÑÑŽÑÑ‘') - -KOI8_R_RUSSIAN_CHAR_TO_ORDER = { - 0: 255, # '\x00' - 1: 255, # '\x01' - 2: 255, # '\x02' - 3: 255, # '\x03' - 4: 255, # '\x04' - 5: 255, # '\x05' - 6: 255, # '\x06' - 7: 255, # '\x07' - 8: 255, # '\x08' - 9: 255, # '\t' - 10: 254, # '\n' - 11: 255, # '\x0b' - 12: 255, # '\x0c' - 13: 254, # '\r' - 14: 255, # '\x0e' - 15: 255, # '\x0f' - 16: 255, # '\x10' - 17: 255, # '\x11' - 18: 255, # '\x12' - 19: 255, # '\x13' - 20: 255, # '\x14' - 21: 255, # '\x15' - 22: 255, # '\x16' - 23: 255, # '\x17' - 24: 255, # '\x18' - 25: 255, # '\x19' - 26: 255, # '\x1a' - 27: 255, # '\x1b' - 28: 255, # '\x1c' - 29: 255, # '\x1d' - 30: 255, # '\x1e' - 31: 255, # '\x1f' - 32: 253, # ' ' - 33: 253, # '!' - 34: 253, # '"' - 35: 253, # '#' - 36: 253, # '$' - 37: 253, # '%' - 38: 253, # '&' - 39: 253, # "'" - 40: 253, # '(' - 41: 253, # ')' - 42: 253, # '*' - 43: 253, # '+' - 44: 253, # ',' - 45: 253, # '-' - 46: 253, # '.' - 47: 253, # '/' - 48: 252, # '0' - 49: 252, # '1' - 50: 252, # '2' - 51: 252, # '3' - 52: 252, # '4' - 53: 252, # '5' - 54: 252, # '6' - 55: 252, # '7' - 56: 252, # '8' - 57: 252, # '9' - 58: 253, # ':' - 59: 253, # ';' - 60: 253, # '<' - 61: 253, # '=' - 62: 253, # '>' - 63: 253, # '?' - 64: 253, # '@' - 65: 142, # 'A' - 66: 143, # 'B' - 67: 144, # 'C' - 68: 145, # 'D' - 69: 146, # 'E' - 70: 147, # 'F' - 71: 148, # 'G' - 72: 149, # 'H' - 73: 150, # 'I' - 74: 151, # 'J' - 75: 152, # 'K' - 76: 74, # 'L' - 77: 153, # 'M' - 78: 75, # 'N' - 79: 154, # 'O' - 80: 155, # 'P' - 81: 156, # 'Q' - 82: 157, # 'R' - 83: 158, # 'S' - 84: 159, # 'T' - 85: 160, # 'U' - 86: 161, # 'V' - 87: 162, # 'W' - 88: 163, # 'X' - 89: 164, # 'Y' - 90: 165, # 'Z' - 91: 253, # '[' - 92: 253, # '\\' - 93: 253, # ']' - 94: 253, # '^' - 95: 253, # '_' - 96: 253, # '`' - 97: 71, # 'a' - 98: 172, # 'b' - 99: 66, # 'c' - 100: 173, # 'd' - 101: 65, # 'e' - 102: 174, # 'f' - 103: 76, # 'g' - 104: 175, # 'h' - 105: 64, # 'i' - 106: 176, # 'j' - 107: 177, # 'k' - 108: 77, # 'l' - 109: 72, # 'm' - 110: 178, # 'n' - 111: 69, # 'o' - 112: 67, # 'p' - 113: 179, # 'q' - 114: 78, # 'r' - 115: 73, # 's' - 116: 180, # 't' - 117: 181, # 'u' - 118: 79, # 'v' - 119: 182, # 'w' - 120: 183, # 'x' - 121: 184, # 'y' - 122: 185, # 'z' - 123: 253, # '{' - 124: 253, # '|' - 125: 253, # '}' - 126: 253, # '~' - 127: 253, # '\x7f' - 128: 191, # '─' - 129: 192, # '│' - 130: 193, # '┌' - 131: 194, # 'â”' - 132: 195, # 'â””' - 133: 196, # '┘' - 134: 197, # '├' - 135: 198, # '┤' - 136: 199, # '┬' - 137: 200, # 'â”´' - 138: 201, # '┼' - 139: 202, # 'â–€' - 140: 203, # 'â–„' - 141: 204, # 'â–ˆ' - 142: 205, # 'â–Œ' - 143: 206, # 'â–' - 144: 207, # 'â–‘' - 145: 208, # 'â–’' - 146: 209, # 'â–“' - 147: 210, # '⌠' - 148: 211, # 'â– ' - 149: 212, # '∙' - 150: 213, # '√' - 151: 214, # '≈' - 152: 215, # '≤' - 153: 216, # '≥' - 154: 217, # '\xa0' - 155: 218, # '⌡' - 156: 219, # '°' - 157: 220, # '²' - 158: 221, # '·' - 159: 222, # '÷' - 160: 223, # 'â•' - 161: 224, # 'â•‘' - 162: 225, # 'â•’' - 163: 68, # 'Ñ‘' - 164: 226, # 'â•“' - 165: 227, # 'â•”' - 166: 228, # 'â••' - 167: 229, # 'â•–' - 168: 230, # 'â•—' - 169: 231, # '╘' - 170: 232, # 'â•™' - 171: 233, # 'â•š' - 172: 234, # 'â•›' - 173: 235, # 'â•œ' - 174: 236, # 'â•' - 175: 237, # 'â•ž' - 176: 238, # 'â•Ÿ' - 177: 239, # 'â• ' - 178: 240, # 'â•¡' - 179: 241, # 'Ð' - 180: 242, # 'â•¢' - 181: 243, # 'â•£' - 182: 244, # '╤' - 183: 245, # 'â•¥' - 184: 246, # '╦' - 185: 247, # '╧' - 186: 248, # '╨' - 187: 249, # 'â•©' - 188: 250, # '╪' - 189: 251, # 'â•«' - 190: 252, # '╬' - 191: 253, # '©' - 192: 27, # 'ÑŽ' - 193: 3, # 'а' - 194: 21, # 'б' - 195: 28, # 'ц' - 196: 13, # 'д' - 197: 2, # 'е' - 198: 39, # 'Ñ„' - 199: 19, # 'г' - 200: 26, # 'Ñ…' - 201: 4, # 'и' - 202: 23, # 'й' - 203: 11, # 'к' - 204: 8, # 'л' - 205: 12, # 'м' - 206: 5, # 'н' - 207: 1, # 'о' - 208: 15, # 'п' - 209: 16, # 'Ñ' - 210: 9, # 'Ñ€' - 211: 7, # 'Ñ' - 212: 6, # 'Ñ‚' - 213: 14, # 'у' - 214: 24, # 'ж' - 215: 10, # 'в' - 216: 17, # 'ÑŒ' - 217: 18, # 'Ñ‹' - 218: 20, # 'з' - 219: 25, # 'ш' - 220: 30, # 'Ñ' - 221: 29, # 'щ' - 222: 22, # 'ч' - 223: 54, # 'ÑŠ' - 224: 59, # 'Ю' - 225: 37, # 'Ð' - 226: 44, # 'Б' - 227: 58, # 'Ц' - 228: 41, # 'Д' - 229: 48, # 'Е' - 230: 53, # 'Ф' - 231: 46, # 'Г' - 232: 55, # 'Ð¥' - 233: 42, # 'И' - 234: 60, # 'Й' - 235: 36, # 'К' - 236: 49, # 'Л' - 237: 38, # 'Ðœ' - 238: 31, # 'Ð' - 239: 34, # 'О' - 240: 35, # 'П' - 241: 43, # 'Я' - 242: 45, # 'Р' - 243: 32, # 'С' - 244: 40, # 'Т' - 245: 52, # 'У' - 246: 56, # 'Ж' - 247: 33, # 'Ð’' - 248: 61, # 'Ь' - 249: 62, # 'Ы' - 250: 51, # 'З' - 251: 57, # 'Ш' - 252: 47, # 'Э' - 253: 63, # 'Щ' - 254: 50, # 'Ч' - 255: 70, # 'Ъ' -} - -KOI8_R_RUSSIAN_MODEL = SingleByteCharSetModel(charset_name='KOI8-R', - language='Russian', - char_to_order_map=KOI8_R_RUSSIAN_CHAR_TO_ORDER, - language_model=RUSSIAN_LANG_MODEL, - typical_positive_ratio=0.976601, - keep_ascii_letters=False, - alphabet='ÐÐБВГДЕЖЗИЙКЛМÐОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрÑтуфхцчшщъыьÑÑŽÑÑ‘') - -MACCYRILLIC_RUSSIAN_CHAR_TO_ORDER = { - 0: 255, # '\x00' - 1: 255, # '\x01' - 2: 255, # '\x02' - 3: 255, # '\x03' - 4: 255, # '\x04' - 5: 255, # '\x05' - 6: 255, # '\x06' - 7: 255, # '\x07' - 8: 255, # '\x08' - 9: 255, # '\t' - 10: 254, # '\n' - 11: 255, # '\x0b' - 12: 255, # '\x0c' - 13: 254, # '\r' - 14: 255, # '\x0e' - 15: 255, # '\x0f' - 16: 255, # '\x10' - 17: 255, # '\x11' - 18: 255, # '\x12' - 19: 255, # '\x13' - 20: 255, # '\x14' - 21: 255, # '\x15' - 22: 255, # '\x16' - 23: 255, # '\x17' - 24: 255, # '\x18' - 25: 255, # '\x19' - 26: 255, # '\x1a' - 27: 255, # '\x1b' - 28: 255, # '\x1c' - 29: 255, # '\x1d' - 30: 255, # '\x1e' - 31: 255, # '\x1f' - 32: 253, # ' ' - 33: 253, # '!' - 34: 253, # '"' - 35: 253, # '#' - 36: 253, # '$' - 37: 253, # '%' - 38: 253, # '&' - 39: 253, # "'" - 40: 253, # '(' - 41: 253, # ')' - 42: 253, # '*' - 43: 253, # '+' - 44: 253, # ',' - 45: 253, # '-' - 46: 253, # '.' - 47: 253, # '/' - 48: 252, # '0' - 49: 252, # '1' - 50: 252, # '2' - 51: 252, # '3' - 52: 252, # '4' - 53: 252, # '5' - 54: 252, # '6' - 55: 252, # '7' - 56: 252, # '8' - 57: 252, # '9' - 58: 253, # ':' - 59: 253, # ';' - 60: 253, # '<' - 61: 253, # '=' - 62: 253, # '>' - 63: 253, # '?' - 64: 253, # '@' - 65: 142, # 'A' - 66: 143, # 'B' - 67: 144, # 'C' - 68: 145, # 'D' - 69: 146, # 'E' - 70: 147, # 'F' - 71: 148, # 'G' - 72: 149, # 'H' - 73: 150, # 'I' - 74: 151, # 'J' - 75: 152, # 'K' - 76: 74, # 'L' - 77: 153, # 'M' - 78: 75, # 'N' - 79: 154, # 'O' - 80: 155, # 'P' - 81: 156, # 'Q' - 82: 157, # 'R' - 83: 158, # 'S' - 84: 159, # 'T' - 85: 160, # 'U' - 86: 161, # 'V' - 87: 162, # 'W' - 88: 163, # 'X' - 89: 164, # 'Y' - 90: 165, # 'Z' - 91: 253, # '[' - 92: 253, # '\\' - 93: 253, # ']' - 94: 253, # '^' - 95: 253, # '_' - 96: 253, # '`' - 97: 71, # 'a' - 98: 172, # 'b' - 99: 66, # 'c' - 100: 173, # 'd' - 101: 65, # 'e' - 102: 174, # 'f' - 103: 76, # 'g' - 104: 175, # 'h' - 105: 64, # 'i' - 106: 176, # 'j' - 107: 177, # 'k' - 108: 77, # 'l' - 109: 72, # 'm' - 110: 178, # 'n' - 111: 69, # 'o' - 112: 67, # 'p' - 113: 179, # 'q' - 114: 78, # 'r' - 115: 73, # 's' - 116: 180, # 't' - 117: 181, # 'u' - 118: 79, # 'v' - 119: 182, # 'w' - 120: 183, # 'x' - 121: 184, # 'y' - 122: 185, # 'z' - 123: 253, # '{' - 124: 253, # '|' - 125: 253, # '}' - 126: 253, # '~' - 127: 253, # '\x7f' - 128: 37, # 'Ð' - 129: 44, # 'Б' - 130: 33, # 'Ð’' - 131: 46, # 'Г' - 132: 41, # 'Д' - 133: 48, # 'Е' - 134: 56, # 'Ж' - 135: 51, # 'З' - 136: 42, # 'И' - 137: 60, # 'Й' - 138: 36, # 'К' - 139: 49, # 'Л' - 140: 38, # 'Ðœ' - 141: 31, # 'Ð' - 142: 34, # 'О' - 143: 35, # 'П' - 144: 45, # 'Р' - 145: 32, # 'С' - 146: 40, # 'Т' - 147: 52, # 'У' - 148: 53, # 'Ф' - 149: 55, # 'Ð¥' - 150: 58, # 'Ц' - 151: 50, # 'Ч' - 152: 57, # 'Ш' - 153: 63, # 'Щ' - 154: 70, # 'Ъ' - 155: 62, # 'Ы' - 156: 61, # 'Ь' - 157: 47, # 'Э' - 158: 59, # 'Ю' - 159: 43, # 'Я' - 160: 191, # '†' - 161: 192, # '°' - 162: 193, # 'Ò' - 163: 194, # '£' - 164: 195, # '§' - 165: 196, # '•' - 166: 197, # '¶' - 167: 198, # 'І' - 168: 199, # '®' - 169: 200, # '©' - 170: 201, # 'â„¢' - 171: 202, # 'Ђ' - 172: 203, # 'Ñ’' - 173: 204, # '≠' - 174: 205, # 'Ѓ' - 175: 206, # 'Ñ“' - 176: 207, # '∞' - 177: 208, # '±' - 178: 209, # '≤' - 179: 210, # '≥' - 180: 211, # 'Ñ–' - 181: 212, # 'µ' - 182: 213, # 'Ò‘' - 183: 214, # 'Ј' - 184: 215, # 'Є' - 185: 216, # 'Ñ”' - 186: 217, # 'Ї' - 187: 218, # 'Ñ—' - 188: 219, # 'Љ' - 189: 220, # 'Ñ™' - 190: 221, # 'Њ' - 191: 222, # 'Ñš' - 192: 223, # 'ј' - 193: 224, # 'Ð…' - 194: 225, # '¬' - 195: 226, # '√' - 196: 227, # 'Æ’' - 197: 228, # '≈' - 198: 229, # '∆' - 199: 230, # '«' - 200: 231, # '»' - 201: 232, # '…' - 202: 233, # '\xa0' - 203: 234, # 'Ћ' - 204: 235, # 'Ñ›' - 205: 236, # 'ÐŒ' - 206: 237, # 'Ñœ' - 207: 238, # 'Ñ•' - 208: 239, # '–' - 209: 240, # '—' - 210: 241, # '“' - 211: 242, # 'â€' - 212: 243, # '‘' - 213: 244, # '’' - 214: 245, # '÷' - 215: 246, # '„' - 216: 247, # 'ÐŽ' - 217: 248, # 'Ñž' - 218: 249, # 'Ð' - 219: 250, # 'ÑŸ' - 220: 251, # 'â„–' - 221: 252, # 'Ð' - 222: 68, # 'Ñ‘' - 223: 16, # 'Ñ' - 224: 3, # 'а' - 225: 21, # 'б' - 226: 10, # 'в' - 227: 19, # 'г' - 228: 13, # 'д' - 229: 2, # 'е' - 230: 24, # 'ж' - 231: 20, # 'з' - 232: 4, # 'и' - 233: 23, # 'й' - 234: 11, # 'к' - 235: 8, # 'л' - 236: 12, # 'м' - 237: 5, # 'н' - 238: 1, # 'о' - 239: 15, # 'п' - 240: 9, # 'Ñ€' - 241: 7, # 'Ñ' - 242: 6, # 'Ñ‚' - 243: 14, # 'у' - 244: 39, # 'Ñ„' - 245: 26, # 'Ñ…' - 246: 28, # 'ц' - 247: 22, # 'ч' - 248: 25, # 'ш' - 249: 29, # 'щ' - 250: 54, # 'ÑŠ' - 251: 18, # 'Ñ‹' - 252: 17, # 'ÑŒ' - 253: 30, # 'Ñ' - 254: 27, # 'ÑŽ' - 255: 255, # '€' -} - -MACCYRILLIC_RUSSIAN_MODEL = SingleByteCharSetModel(charset_name='MacCyrillic', - language='Russian', - char_to_order_map=MACCYRILLIC_RUSSIAN_CHAR_TO_ORDER, - language_model=RUSSIAN_LANG_MODEL, - typical_positive_ratio=0.976601, - keep_ascii_letters=False, - alphabet='ÐÐБВГДЕЖЗИЙКЛМÐОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрÑтуфхцчшщъыьÑÑŽÑÑ‘') - -ISO_8859_5_RUSSIAN_CHAR_TO_ORDER = { - 0: 255, # '\x00' - 1: 255, # '\x01' - 2: 255, # '\x02' - 3: 255, # '\x03' - 4: 255, # '\x04' - 5: 255, # '\x05' - 6: 255, # '\x06' - 7: 255, # '\x07' - 8: 255, # '\x08' - 9: 255, # '\t' - 10: 254, # '\n' - 11: 255, # '\x0b' - 12: 255, # '\x0c' - 13: 254, # '\r' - 14: 255, # '\x0e' - 15: 255, # '\x0f' - 16: 255, # '\x10' - 17: 255, # '\x11' - 18: 255, # '\x12' - 19: 255, # '\x13' - 20: 255, # '\x14' - 21: 255, # '\x15' - 22: 255, # '\x16' - 23: 255, # '\x17' - 24: 255, # '\x18' - 25: 255, # '\x19' - 26: 255, # '\x1a' - 27: 255, # '\x1b' - 28: 255, # '\x1c' - 29: 255, # '\x1d' - 30: 255, # '\x1e' - 31: 255, # '\x1f' - 32: 253, # ' ' - 33: 253, # '!' - 34: 253, # '"' - 35: 253, # '#' - 36: 253, # '$' - 37: 253, # '%' - 38: 253, # '&' - 39: 253, # "'" - 40: 253, # '(' - 41: 253, # ')' - 42: 253, # '*' - 43: 253, # '+' - 44: 253, # ',' - 45: 253, # '-' - 46: 253, # '.' - 47: 253, # '/' - 48: 252, # '0' - 49: 252, # '1' - 50: 252, # '2' - 51: 252, # '3' - 52: 252, # '4' - 53: 252, # '5' - 54: 252, # '6' - 55: 252, # '7' - 56: 252, # '8' - 57: 252, # '9' - 58: 253, # ':' - 59: 253, # ';' - 60: 253, # '<' - 61: 253, # '=' - 62: 253, # '>' - 63: 253, # '?' - 64: 253, # '@' - 65: 142, # 'A' - 66: 143, # 'B' - 67: 144, # 'C' - 68: 145, # 'D' - 69: 146, # 'E' - 70: 147, # 'F' - 71: 148, # 'G' - 72: 149, # 'H' - 73: 150, # 'I' - 74: 151, # 'J' - 75: 152, # 'K' - 76: 74, # 'L' - 77: 153, # 'M' - 78: 75, # 'N' - 79: 154, # 'O' - 80: 155, # 'P' - 81: 156, # 'Q' - 82: 157, # 'R' - 83: 158, # 'S' - 84: 159, # 'T' - 85: 160, # 'U' - 86: 161, # 'V' - 87: 162, # 'W' - 88: 163, # 'X' - 89: 164, # 'Y' - 90: 165, # 'Z' - 91: 253, # '[' - 92: 253, # '\\' - 93: 253, # ']' - 94: 253, # '^' - 95: 253, # '_' - 96: 253, # '`' - 97: 71, # 'a' - 98: 172, # 'b' - 99: 66, # 'c' - 100: 173, # 'd' - 101: 65, # 'e' - 102: 174, # 'f' - 103: 76, # 'g' - 104: 175, # 'h' - 105: 64, # 'i' - 106: 176, # 'j' - 107: 177, # 'k' - 108: 77, # 'l' - 109: 72, # 'm' - 110: 178, # 'n' - 111: 69, # 'o' - 112: 67, # 'p' - 113: 179, # 'q' - 114: 78, # 'r' - 115: 73, # 's' - 116: 180, # 't' - 117: 181, # 'u' - 118: 79, # 'v' - 119: 182, # 'w' - 120: 183, # 'x' - 121: 184, # 'y' - 122: 185, # 'z' - 123: 253, # '{' - 124: 253, # '|' - 125: 253, # '}' - 126: 253, # '~' - 127: 253, # '\x7f' - 128: 191, # '\x80' - 129: 192, # '\x81' - 130: 193, # '\x82' - 131: 194, # '\x83' - 132: 195, # '\x84' - 133: 196, # '\x85' - 134: 197, # '\x86' - 135: 198, # '\x87' - 136: 199, # '\x88' - 137: 200, # '\x89' - 138: 201, # '\x8a' - 139: 202, # '\x8b' - 140: 203, # '\x8c' - 141: 204, # '\x8d' - 142: 205, # '\x8e' - 143: 206, # '\x8f' - 144: 207, # '\x90' - 145: 208, # '\x91' - 146: 209, # '\x92' - 147: 210, # '\x93' - 148: 211, # '\x94' - 149: 212, # '\x95' - 150: 213, # '\x96' - 151: 214, # '\x97' - 152: 215, # '\x98' - 153: 216, # '\x99' - 154: 217, # '\x9a' - 155: 218, # '\x9b' - 156: 219, # '\x9c' - 157: 220, # '\x9d' - 158: 221, # '\x9e' - 159: 222, # '\x9f' - 160: 223, # '\xa0' - 161: 224, # 'Ð' - 162: 225, # 'Ђ' - 163: 226, # 'Ѓ' - 164: 227, # 'Є' - 165: 228, # 'Ð…' - 166: 229, # 'І' - 167: 230, # 'Ї' - 168: 231, # 'Ј' - 169: 232, # 'Љ' - 170: 233, # 'Њ' - 171: 234, # 'Ћ' - 172: 235, # 'ÐŒ' - 173: 236, # '\xad' - 174: 237, # 'ÐŽ' - 175: 238, # 'Ð' - 176: 37, # 'Ð' - 177: 44, # 'Б' - 178: 33, # 'Ð’' - 179: 46, # 'Г' - 180: 41, # 'Д' - 181: 48, # 'Е' - 182: 56, # 'Ж' - 183: 51, # 'З' - 184: 42, # 'И' - 185: 60, # 'Й' - 186: 36, # 'К' - 187: 49, # 'Л' - 188: 38, # 'Ðœ' - 189: 31, # 'Ð' - 190: 34, # 'О' - 191: 35, # 'П' - 192: 45, # 'Р' - 193: 32, # 'С' - 194: 40, # 'Т' - 195: 52, # 'У' - 196: 53, # 'Ф' - 197: 55, # 'Ð¥' - 198: 58, # 'Ц' - 199: 50, # 'Ч' - 200: 57, # 'Ш' - 201: 63, # 'Щ' - 202: 70, # 'Ъ' - 203: 62, # 'Ы' - 204: 61, # 'Ь' - 205: 47, # 'Э' - 206: 59, # 'Ю' - 207: 43, # 'Я' - 208: 3, # 'а' - 209: 21, # 'б' - 210: 10, # 'в' - 211: 19, # 'г' - 212: 13, # 'д' - 213: 2, # 'е' - 214: 24, # 'ж' - 215: 20, # 'з' - 216: 4, # 'и' - 217: 23, # 'й' - 218: 11, # 'к' - 219: 8, # 'л' - 220: 12, # 'м' - 221: 5, # 'н' - 222: 1, # 'о' - 223: 15, # 'п' - 224: 9, # 'Ñ€' - 225: 7, # 'Ñ' - 226: 6, # 'Ñ‚' - 227: 14, # 'у' - 228: 39, # 'Ñ„' - 229: 26, # 'Ñ…' - 230: 28, # 'ц' - 231: 22, # 'ч' - 232: 25, # 'ш' - 233: 29, # 'щ' - 234: 54, # 'ÑŠ' - 235: 18, # 'Ñ‹' - 236: 17, # 'ÑŒ' - 237: 30, # 'Ñ' - 238: 27, # 'ÑŽ' - 239: 16, # 'Ñ' - 240: 239, # 'â„–' - 241: 68, # 'Ñ‘' - 242: 240, # 'Ñ’' - 243: 241, # 'Ñ“' - 244: 242, # 'Ñ”' - 245: 243, # 'Ñ•' - 246: 244, # 'Ñ–' - 247: 245, # 'Ñ—' - 248: 246, # 'ј' - 249: 247, # 'Ñ™' - 250: 248, # 'Ñš' - 251: 249, # 'Ñ›' - 252: 250, # 'Ñœ' - 253: 251, # '§' - 254: 252, # 'Ñž' - 255: 255, # 'ÑŸ' -} - -ISO_8859_5_RUSSIAN_MODEL = SingleByteCharSetModel(charset_name='ISO-8859-5', - language='Russian', - char_to_order_map=ISO_8859_5_RUSSIAN_CHAR_TO_ORDER, - language_model=RUSSIAN_LANG_MODEL, - typical_positive_ratio=0.976601, - keep_ascii_letters=False, - alphabet='ÐÐБВГДЕЖЗИЙКЛМÐОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрÑтуфхцчшщъыьÑÑŽÑÑ‘') - diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/langthaimodel.py b/venv/Lib/site-packages/pip/_vendor/chardet/langthaimodel.py deleted file mode 100644 index 9a37db5..0000000 --- a/venv/Lib/site-packages/pip/_vendor/chardet/langthaimodel.py +++ /dev/null @@ -1,4383 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- - -from pip._vendor.chardet.sbcharsetprober import SingleByteCharSetModel - - -# 3: Positive -# 2: Likely -# 1: Unlikely -# 0: Negative - -THAI_LANG_MODEL = { - 5: { # 'à¸' - 5: 2, # 'à¸' - 30: 2, # 'ข' - 24: 2, # 'ค' - 8: 2, # 'ง' - 26: 2, # 'จ' - 52: 0, # 'ฉ' - 34: 1, # 'ช' - 51: 1, # 'ซ' - 47: 0, # 'à¸' - 58: 3, # 'ฎ' - 57: 2, # 'à¸' - 49: 0, # 'à¸' - 53: 0, # 'ฑ' - 55: 0, # 'ฒ' - 43: 2, # 'ณ' - 20: 2, # 'ด' - 19: 3, # 'ต' - 44: 0, # 'ถ' - 14: 2, # 'ท' - 48: 0, # 'ธ' - 3: 2, # 'น' - 17: 1, # 'บ' - 25: 2, # 'ป' - 39: 1, # 'ผ' - 62: 1, # 'à¸' - 31: 1, # 'พ' - 54: 0, # 'ฟ' - 45: 1, # 'ภ' - 9: 2, # 'ม' - 16: 1, # 'ย' - 2: 3, # 'ร' - 61: 2, # 'ฤ' - 15: 3, # 'ล' - 12: 3, # 'ว' - 42: 2, # 'ศ' - 46: 3, # 'ษ' - 18: 2, # 'ส' - 21: 2, # 'ห' - 4: 3, # 'อ' - 63: 1, # 'ฯ' - 22: 2, # 'ะ' - 10: 3, # 'ั' - 1: 3, # 'า' - 36: 3, # 'ำ' - 23: 3, # 'ิ' - 13: 3, # 'ี' - 40: 0, # 'ึ' - 27: 2, # 'ื' - 32: 2, # 'ุ' - 35: 1, # 'ู' - 11: 2, # 'เ' - 28: 2, # 'à¹' - 41: 1, # 'โ' - 29: 1, # 'ใ' - 33: 2, # 'ไ' - 50: 1, # 'ๆ' - 37: 3, # '็' - 6: 3, # '่' - 7: 3, # '้' - 38: 2, # '์' - 56: 0, # '๑' - 59: 0, # '๒' - 60: 0, # '๕' - }, - 30: { # 'ข' - 5: 1, # 'à¸' - 30: 0, # 'ข' - 24: 1, # 'ค' - 8: 1, # 'ง' - 26: 1, # 'จ' - 52: 0, # 'ฉ' - 34: 0, # 'ช' - 51: 0, # 'ซ' - 47: 0, # 'à¸' - 58: 0, # 'ฎ' - 57: 0, # 'à¸' - 49: 0, # 'à¸' - 53: 0, # 'ฑ' - 55: 0, # 'ฒ' - 43: 2, # 'ณ' - 20: 0, # 'ด' - 19: 2, # 'ต' - 44: 0, # 'ถ' - 14: 1, # 'ท' - 48: 0, # 'ธ' - 3: 2, # 'น' - 17: 1, # 'บ' - 25: 1, # 'ป' - 39: 0, # 'ผ' - 62: 0, # 'à¸' - 31: 0, # 'พ' - 54: 0, # 'ฟ' - 45: 0, # 'ภ' - 9: 0, # 'ม' - 16: 2, # 'ย' - 2: 1, # 'ร' - 61: 0, # 'ฤ' - 15: 0, # 'ล' - 12: 2, # 'ว' - 42: 0, # 'ศ' - 46: 0, # 'ษ' - 18: 1, # 'ส' - 21: 1, # 'ห' - 4: 3, # 'อ' - 63: 0, # 'ฯ' - 22: 0, # 'ะ' - 10: 3, # 'ั' - 1: 3, # 'า' - 36: 0, # 'ำ' - 23: 0, # 'ิ' - 13: 2, # 'ี' - 40: 3, # 'ึ' - 27: 1, # 'ื' - 32: 1, # 'ุ' - 35: 0, # 'ู' - 11: 0, # 'เ' - 28: 0, # 'à¹' - 41: 0, # 'โ' - 29: 1, # 'ใ' - 33: 0, # 'ไ' - 50: 0, # 'ๆ' - 37: 1, # '็' - 6: 2, # '่' - 7: 3, # '้' - 38: 1, # '์' - 56: 0, # '๑' - 59: 0, # '๒' - 60: 0, # '๕' - }, - 24: { # 'ค' - 5: 0, # 'à¸' - 30: 0, # 'ข' - 24: 2, # 'ค' - 8: 2, # 'ง' - 26: 0, # 'จ' - 52: 0, # 'ฉ' - 34: 0, # 'ช' - 51: 0, # 'ซ' - 47: 0, # 'à¸' - 58: 0, # 'ฎ' - 57: 0, # 'à¸' - 49: 0, # 'à¸' - 53: 0, # 'ฑ' - 55: 0, # 'ฒ' - 43: 2, # 'ณ' - 20: 2, # 'ด' - 19: 2, # 'ต' - 44: 0, # 'ถ' - 14: 1, # 'ท' - 48: 0, # 'ธ' - 3: 3, # 'น' - 17: 0, # 'บ' - 25: 1, # 'ป' - 39: 0, # 'ผ' - 62: 0, # 'à¸' - 31: 0, # 'พ' - 54: 0, # 'ฟ' - 45: 0, # 'ภ' - 9: 2, # 'ม' - 16: 2, # 'ย' - 2: 3, # 'ร' - 61: 0, # 'ฤ' - 15: 3, # 'ล' - 12: 3, # 'ว' - 42: 0, # 'ศ' - 46: 0, # 'ษ' - 18: 1, # 'ส' - 21: 0, # 'ห' - 4: 2, # 'อ' - 63: 0, # 'ฯ' - 22: 2, # 'ะ' - 10: 3, # 'ั' - 1: 2, # 'า' - 36: 3, # 'ำ' - 23: 3, # 'ิ' - 13: 2, # 'ี' - 40: 0, # 'ึ' - 27: 3, # 'ื' - 32: 3, # 'ุ' - 35: 2, # 'ู' - 11: 1, # 'เ' - 28: 0, # 'à¹' - 41: 3, # 'โ' - 29: 0, # 'ใ' - 33: 0, # 'ไ' - 50: 0, # 'ๆ' - 37: 1, # '็' - 6: 3, # '่' - 7: 3, # '้' - 38: 3, # '์' - 56: 0, # '๑' - 59: 0, # '๒' - 60: 0, # '๕' - }, - 8: { # 'ง' - 5: 3, # 'à¸' - 30: 2, # 'ข' - 24: 3, # 'ค' - 8: 2, # 'ง' - 26: 2, # 'จ' - 52: 1, # 'ฉ' - 34: 2, # 'ช' - 51: 1, # 'ซ' - 47: 0, # 'à¸' - 58: 0, # 'ฎ' - 57: 0, # 'à¸' - 49: 0, # 'à¸' - 53: 0, # 'ฑ' - 55: 0, # 'ฒ' - 43: 0, # 'ณ' - 20: 2, # 'ด' - 19: 2, # 'ต' - 44: 1, # 'ถ' - 14: 3, # 'ท' - 48: 1, # 'ธ' - 3: 3, # 'น' - 17: 2, # 'บ' - 25: 2, # 'ป' - 39: 2, # 'ผ' - 62: 1, # 'à¸' - 31: 2, # 'พ' - 54: 0, # 'ฟ' - 45: 1, # 'ภ' - 9: 2, # 'ม' - 16: 1, # 'ย' - 2: 2, # 'ร' - 61: 0, # 'ฤ' - 15: 2, # 'ล' - 12: 2, # 'ว' - 42: 2, # 'ศ' - 46: 1, # 'ษ' - 18: 3, # 'ส' - 21: 3, # 'ห' - 4: 2, # 'อ' - 63: 0, # 'ฯ' - 22: 0, # 'ะ' - 10: 1, # 'ั' - 1: 3, # 'า' - 36: 0, # 'ำ' - 23: 2, # 'ิ' - 13: 1, # 'ี' - 40: 0, # 'ึ' - 27: 1, # 'ื' - 32: 1, # 'ุ' - 35: 0, # 'ู' - 11: 3, # 'เ' - 28: 2, # 'à¹' - 41: 1, # 'โ' - 29: 2, # 'ใ' - 33: 2, # 'ไ' - 50: 3, # 'ๆ' - 37: 0, # '็' - 6: 2, # '่' - 7: 0, # '้' - 38: 0, # '์' - 56: 0, # '๑' - 59: 0, # '๒' - 60: 0, # '๕' - }, - 26: { # 'จ' - 5: 2, # 'à¸' - 30: 1, # 'ข' - 24: 0, # 'ค' - 8: 2, # 'ง' - 26: 3, # 'จ' - 52: 0, # 'ฉ' - 34: 0, # 'ช' - 51: 0, # 'ซ' - 47: 0, # 'à¸' - 58: 0, # 'ฎ' - 57: 0, # 'à¸' - 49: 0, # 'à¸' - 53: 0, # 'ฑ' - 55: 0, # 'ฒ' - 43: 0, # 'ณ' - 20: 2, # 'ด' - 19: 1, # 'ต' - 44: 1, # 'ถ' - 14: 2, # 'ท' - 48: 0, # 'ธ' - 3: 3, # 'น' - 17: 1, # 'บ' - 25: 0, # 'ป' - 39: 0, # 'ผ' - 62: 0, # 'à¸' - 31: 1, # 'พ' - 54: 0, # 'ฟ' - 45: 0, # 'ภ' - 9: 1, # 'ม' - 16: 1, # 'ย' - 2: 3, # 'ร' - 61: 0, # 'ฤ' - 15: 0, # 'ล' - 12: 1, # 'ว' - 42: 0, # 'ศ' - 46: 0, # 'ษ' - 18: 2, # 'ส' - 21: 1, # 'ห' - 4: 2, # 'อ' - 63: 0, # 'ฯ' - 22: 3, # 'ะ' - 10: 3, # 'ั' - 1: 3, # 'า' - 36: 3, # 'ำ' - 23: 2, # 'ิ' - 13: 1, # 'ี' - 40: 3, # 'ึ' - 27: 1, # 'ื' - 32: 3, # 'ุ' - 35: 2, # 'ู' - 11: 1, # 'เ' - 28: 1, # 'à¹' - 41: 0, # 'โ' - 29: 1, # 'ใ' - 33: 1, # 'ไ' - 50: 0, # 'ๆ' - 37: 0, # '็' - 6: 2, # '่' - 7: 2, # '้' - 38: 0, # '์' - 56: 0, # '๑' - 59: 0, # '๒' - 60: 0, # '๕' - }, - 52: { # 'ฉ' - 5: 0, # 'à¸' - 30: 0, # 'ข' - 24: 0, # 'ค' - 8: 0, # 'ง' - 26: 0, # 'จ' - 52: 0, # 'ฉ' - 34: 0, # 'ช' - 51: 0, # 'ซ' - 47: 0, # 'à¸' - 58: 0, # 'ฎ' - 57: 0, # 'à¸' - 49: 0, # 'à¸' - 53: 0, # 'ฑ' - 55: 0, # 'ฒ' - 43: 0, # 'ณ' - 20: 0, # 'ด' - 19: 0, # 'ต' - 44: 0, # 'ถ' - 14: 0, # 'ท' - 48: 0, # 'ธ' - 3: 0, # 'น' - 17: 3, # 'บ' - 25: 0, # 'ป' - 39: 0, # 'ผ' - 62: 0, # 'à¸' - 31: 3, # 'พ' - 54: 0, # 'ฟ' - 45: 0, # 'ภ' - 9: 1, # 'ม' - 16: 1, # 'ย' - 2: 0, # 'ร' - 61: 0, # 'ฤ' - 15: 2, # 'ล' - 12: 1, # 'ว' - 42: 0, # 'ศ' - 46: 0, # 'ษ' - 18: 0, # 'ส' - 21: 0, # 'ห' - 4: 0, # 'อ' - 63: 0, # 'ฯ' - 22: 1, # 'ะ' - 10: 1, # 'ั' - 1: 1, # 'า' - 36: 0, # 'ำ' - 23: 1, # 'ิ' - 13: 1, # 'ี' - 40: 0, # 'ึ' - 27: 0, # 'ื' - 32: 1, # 'ุ' - 35: 0, # 'ู' - 11: 0, # 'เ' - 28: 0, # 'à¹' - 41: 0, # 'โ' - 29: 0, # 'ใ' - 33: 0, # 'ไ' - 50: 0, # 'ๆ' - 37: 0, # '็' - 6: 0, # '่' - 7: 0, # '้' - 38: 0, # '์' - 56: 0, # '๑' - 59: 0, # '๒' - 60: 0, # '๕' - }, - 34: { # 'ช' - 5: 1, # 'à¸' - 30: 0, # 'ข' - 24: 0, # 'ค' - 8: 1, # 'ง' - 26: 0, # 'จ' - 52: 0, # 'ฉ' - 34: 0, # 'ช' - 51: 0, # 'ซ' - 47: 1, # 'à¸' - 58: 0, # 'ฎ' - 57: 0, # 'à¸' - 49: 0, # 'à¸' - 53: 0, # 'ฑ' - 55: 0, # 'ฒ' - 43: 0, # 'ณ' - 20: 0, # 'ด' - 19: 0, # 'ต' - 44: 0, # 'ถ' - 14: 1, # 'ท' - 48: 0, # 'ธ' - 3: 3, # 'น' - 17: 2, # 'บ' - 25: 0, # 'ป' - 39: 0, # 'ผ' - 62: 0, # 'à¸' - 31: 0, # 'พ' - 54: 0, # 'ฟ' - 45: 0, # 'ภ' - 9: 2, # 'ม' - 16: 1, # 'ย' - 2: 1, # 'ร' - 61: 0, # 'ฤ' - 15: 0, # 'ล' - 12: 1, # 'ว' - 42: 0, # 'ศ' - 46: 0, # 'ษ' - 18: 0, # 'ส' - 21: 0, # 'ห' - 4: 2, # 'อ' - 63: 0, # 'ฯ' - 22: 0, # 'ะ' - 10: 2, # 'ั' - 1: 3, # 'า' - 36: 1, # 'ำ' - 23: 3, # 'ิ' - 13: 2, # 'ี' - 40: 0, # 'ึ' - 27: 3, # 'ื' - 32: 3, # 'ุ' - 35: 1, # 'ู' - 11: 0, # 'เ' - 28: 0, # 'à¹' - 41: 0, # 'โ' - 29: 0, # 'ใ' - 33: 0, # 'ไ' - 50: 0, # 'ๆ' - 37: 1, # '็' - 6: 3, # '่' - 7: 3, # '้' - 38: 0, # '์' - 56: 0, # '๑' - 59: 0, # '๒' - 60: 0, # '๕' - }, - 51: { # 'ซ' - 5: 0, # 'à¸' - 30: 0, # 'ข' - 24: 0, # 'ค' - 8: 0, # 'ง' - 26: 0, # 'จ' - 52: 0, # 'ฉ' - 34: 0, # 'ช' - 51: 0, # 'ซ' - 47: 0, # 'à¸' - 58: 0, # 'ฎ' - 57: 0, # 'à¸' - 49: 0, # 'à¸' - 53: 0, # 'ฑ' - 55: 0, # 'ฒ' - 43: 0, # 'ณ' - 20: 0, # 'ด' - 19: 0, # 'ต' - 44: 0, # 'ถ' - 14: 0, # 'ท' - 48: 0, # 'ธ' - 3: 1, # 'น' - 17: 0, # 'บ' - 25: 0, # 'ป' - 39: 0, # 'ผ' - 62: 0, # 'à¸' - 31: 0, # 'พ' - 54: 0, # 'ฟ' - 45: 0, # 'ภ' - 9: 0, # 'ม' - 16: 0, # 'ย' - 2: 0, # 'ร' - 61: 0, # 'ฤ' - 15: 1, # 'ล' - 12: 0, # 'ว' - 42: 0, # 'ศ' - 46: 0, # 'ษ' - 18: 1, # 'ส' - 21: 0, # 'ห' - 4: 2, # 'อ' - 63: 0, # 'ฯ' - 22: 0, # 'ะ' - 10: 1, # 'ั' - 1: 1, # 'า' - 36: 0, # 'ำ' - 23: 1, # 'ิ' - 13: 2, # 'ี' - 40: 3, # 'ึ' - 27: 2, # 'ื' - 32: 1, # 'ุ' - 35: 1, # 'ู' - 11: 1, # 'เ' - 28: 0, # 'à¹' - 41: 0, # 'โ' - 29: 0, # 'ใ' - 33: 0, # 'ไ' - 50: 0, # 'ๆ' - 37: 1, # '็' - 6: 1, # '่' - 7: 2, # '้' - 38: 1, # '์' - 56: 0, # '๑' - 59: 0, # '๒' - 60: 0, # '๕' - }, - 47: { # 'à¸' - 5: 1, # 'à¸' - 30: 1, # 'ข' - 24: 0, # 'ค' - 8: 0, # 'ง' - 26: 0, # 'จ' - 52: 0, # 'ฉ' - 34: 1, # 'ช' - 51: 0, # 'ซ' - 47: 3, # 'à¸' - 58: 0, # 'ฎ' - 57: 0, # 'à¸' - 49: 0, # 'à¸' - 53: 0, # 'ฑ' - 55: 0, # 'ฒ' - 43: 0, # 'ณ' - 20: 0, # 'ด' - 19: 0, # 'ต' - 44: 0, # 'ถ' - 14: 1, # 'ท' - 48: 0, # 'ธ' - 3: 0, # 'น' - 17: 1, # 'บ' - 25: 1, # 'ป' - 39: 0, # 'ผ' - 62: 0, # 'à¸' - 31: 0, # 'พ' - 54: 0, # 'ฟ' - 45: 0, # 'ภ' - 9: 1, # 'ม' - 16: 0, # 'ย' - 2: 0, # 'ร' - 61: 0, # 'ฤ' - 15: 1, # 'ล' - 12: 0, # 'ว' - 42: 0, # 'ศ' - 46: 0, # 'ษ' - 18: 1, # 'ส' - 21: 2, # 'ห' - 4: 1, # 'อ' - 63: 0, # 'ฯ' - 22: 1, # 'ะ' - 10: 2, # 'ั' - 1: 3, # 'า' - 36: 0, # 'ำ' - 23: 1, # 'ิ' - 13: 1, # 'ี' - 40: 0, # 'ึ' - 27: 0, # 'ื' - 32: 0, # 'ุ' - 35: 0, # 'ู' - 11: 1, # 'เ' - 28: 1, # 'à¹' - 41: 0, # 'โ' - 29: 1, # 'ใ' - 33: 0, # 'ไ' - 50: 1, # 'ๆ' - 37: 0, # '็' - 6: 2, # '่' - 7: 0, # '้' - 38: 0, # '์' - 56: 0, # '๑' - 59: 0, # '๒' - 60: 0, # '๕' - }, - 58: { # 'ฎ' - 5: 2, # 'à¸' - 30: 0, # 'ข' - 24: 0, # 'ค' - 8: 0, # 'ง' - 26: 0, # 'จ' - 52: 0, # 'ฉ' - 34: 0, # 'ช' - 51: 0, # 'ซ' - 47: 0, # 'à¸' - 58: 0, # 'ฎ' - 57: 0, # 'à¸' - 49: 0, # 'à¸' - 53: 0, # 'ฑ' - 55: 0, # 'ฒ' - 43: 0, # 'ณ' - 20: 0, # 'ด' - 19: 0, # 'ต' - 44: 0, # 'ถ' - 14: 0, # 'ท' - 48: 0, # 'ธ' - 3: 0, # 'น' - 17: 0, # 'บ' - 25: 0, # 'ป' - 39: 0, # 'ผ' - 62: 0, # 'à¸' - 31: 0, # 'พ' - 54: 0, # 'ฟ' - 45: 0, # 'ภ' - 9: 0, # 'ม' - 16: 0, # 'ย' - 2: 0, # 'ร' - 61: 0, # 'ฤ' - 15: 0, # 'ล' - 12: 0, # 'ว' - 42: 0, # 'ศ' - 46: 0, # 'ษ' - 18: 0, # 'ส' - 21: 1, # 'ห' - 4: 0, # 'อ' - 63: 0, # 'ฯ' - 22: 0, # 'ะ' - 10: 0, # 'ั' - 1: 0, # 'า' - 36: 0, # 'ำ' - 23: 1, # 'ิ' - 13: 2, # 'ี' - 40: 0, # 'ึ' - 27: 0, # 'ื' - 32: 0, # 'ุ' - 35: 0, # 'ู' - 11: 0, # 'เ' - 28: 0, # 'à¹' - 41: 0, # 'โ' - 29: 0, # 'ใ' - 33: 0, # 'ไ' - 50: 0, # 'ๆ' - 37: 0, # '็' - 6: 0, # '่' - 7: 0, # '้' - 38: 0, # '์' - 56: 0, # '๑' - 59: 0, # '๒' - 60: 0, # '๕' - }, - 57: { # 'à¸' - 5: 0, # 'à¸' - 30: 0, # 'ข' - 24: 0, # 'ค' - 8: 0, # 'ง' - 26: 0, # 'จ' - 52: 0, # 'ฉ' - 34: 0, # 'ช' - 51: 0, # 'ซ' - 47: 0, # 'à¸' - 58: 0, # 'ฎ' - 57: 0, # 'à¸' - 49: 0, # 'à¸' - 53: 0, # 'ฑ' - 55: 0, # 'ฒ' - 43: 0, # 'ณ' - 20: 0, # 'ด' - 19: 0, # 'ต' - 44: 0, # 'ถ' - 14: 0, # 'ท' - 48: 0, # 'ธ' - 3: 0, # 'น' - 17: 0, # 'บ' - 25: 0, # 'ป' - 39: 0, # 'ผ' - 62: 0, # 'à¸' - 31: 0, # 'พ' - 54: 0, # 'ฟ' - 45: 0, # 'ภ' - 9: 0, # 'ม' - 16: 0, # 'ย' - 2: 0, # 'ร' - 61: 0, # 'ฤ' - 15: 0, # 'ล' - 12: 0, # 'ว' - 42: 0, # 'ศ' - 46: 0, # 'ษ' - 18: 0, # 'ส' - 21: 0, # 'ห' - 4: 0, # 'อ' - 63: 0, # 'ฯ' - 22: 0, # 'ะ' - 10: 0, # 'ั' - 1: 0, # 'า' - 36: 0, # 'ำ' - 23: 3, # 'ิ' - 13: 1, # 'ี' - 40: 0, # 'ึ' - 27: 0, # 'ื' - 32: 0, # 'ุ' - 35: 0, # 'ู' - 11: 0, # 'เ' - 28: 0, # 'à¹' - 41: 0, # 'โ' - 29: 0, # 'ใ' - 33: 0, # 'ไ' - 50: 0, # 'ๆ' - 37: 0, # '็' - 6: 0, # '่' - 7: 0, # '้' - 38: 0, # '์' - 56: 0, # '๑' - 59: 0, # '๒' - 60: 0, # '๕' - }, - 49: { # 'à¸' - 5: 1, # 'à¸' - 30: 0, # 'ข' - 24: 0, # 'ค' - 8: 0, # 'ง' - 26: 0, # 'จ' - 52: 0, # 'ฉ' - 34: 0, # 'ช' - 51: 0, # 'ซ' - 47: 0, # 'à¸' - 58: 0, # 'ฎ' - 57: 0, # 'à¸' - 49: 0, # 'à¸' - 53: 0, # 'ฑ' - 55: 0, # 'ฒ' - 43: 0, # 'ณ' - 20: 0, # 'ด' - 19: 0, # 'ต' - 44: 0, # 'ถ' - 14: 0, # 'ท' - 48: 0, # 'ธ' - 3: 0, # 'น' - 17: 2, # 'บ' - 25: 0, # 'ป' - 39: 0, # 'ผ' - 62: 0, # 'à¸' - 31: 0, # 'พ' - 54: 0, # 'ฟ' - 45: 0, # 'ภ' - 9: 2, # 'ม' - 16: 0, # 'ย' - 2: 0, # 'ร' - 61: 0, # 'ฤ' - 15: 0, # 'ล' - 12: 0, # 'ว' - 42: 1, # 'ศ' - 46: 0, # 'ษ' - 18: 0, # 'ส' - 21: 0, # 'ห' - 4: 1, # 'อ' - 63: 0, # 'ฯ' - 22: 0, # 'ะ' - 10: 0, # 'ั' - 1: 3, # 'า' - 36: 0, # 'ำ' - 23: 0, # 'ิ' - 13: 0, # 'ี' - 40: 0, # 'ึ' - 27: 0, # 'ื' - 32: 0, # 'ุ' - 35: 0, # 'ู' - 11: 0, # 'เ' - 28: 0, # 'à¹' - 41: 0, # 'โ' - 29: 0, # 'ใ' - 33: 0, # 'ไ' - 50: 0, # 'ๆ' - 37: 0, # '็' - 6: 0, # '่' - 7: 0, # '้' - 38: 1, # '์' - 56: 0, # '๑' - 59: 0, # '๒' - 60: 0, # '๕' - }, - 53: { # 'ฑ' - 5: 0, # 'à¸' - 30: 0, # 'ข' - 24: 0, # 'ค' - 8: 0, # 'ง' - 26: 0, # 'จ' - 52: 0, # 'ฉ' - 34: 0, # 'ช' - 51: 0, # 'ซ' - 47: 0, # 'à¸' - 58: 0, # 'ฎ' - 57: 0, # 'à¸' - 49: 0, # 'à¸' - 53: 0, # 'ฑ' - 55: 0, # 'ฒ' - 43: 0, # 'ณ' - 20: 0, # 'ด' - 19: 0, # 'ต' - 44: 0, # 'ถ' - 14: 0, # 'ท' - 48: 0, # 'ธ' - 3: 0, # 'น' - 17: 0, # 'บ' - 25: 0, # 'ป' - 39: 0, # 'ผ' - 62: 0, # 'à¸' - 31: 0, # 'พ' - 54: 0, # 'ฟ' - 45: 0, # 'ภ' - 9: 0, # 'ม' - 16: 0, # 'ย' - 2: 0, # 'ร' - 61: 0, # 'ฤ' - 15: 0, # 'ล' - 12: 0, # 'ว' - 42: 0, # 'ศ' - 46: 0, # 'ษ' - 18: 0, # 'ส' - 21: 0, # 'ห' - 4: 0, # 'อ' - 63: 0, # 'ฯ' - 22: 0, # 'ะ' - 10: 0, # 'ั' - 1: 0, # 'า' - 36: 0, # 'ำ' - 23: 2, # 'ิ' - 13: 0, # 'ี' - 40: 0, # 'ึ' - 27: 0, # 'ื' - 32: 0, # 'ุ' - 35: 0, # 'ู' - 11: 0, # 'เ' - 28: 0, # 'à¹' - 41: 0, # 'โ' - 29: 0, # 'ใ' - 33: 0, # 'ไ' - 50: 0, # 'ๆ' - 37: 0, # '็' - 6: 0, # '่' - 7: 0, # '้' - 38: 3, # '์' - 56: 0, # '๑' - 59: 0, # '๒' - 60: 0, # '๕' - }, - 55: { # 'ฒ' - 5: 0, # 'à¸' - 30: 0, # 'ข' - 24: 0, # 'ค' - 8: 0, # 'ง' - 26: 0, # 'จ' - 52: 0, # 'ฉ' - 34: 0, # 'ช' - 51: 0, # 'ซ' - 47: 0, # 'à¸' - 58: 0, # 'ฎ' - 57: 0, # 'à¸' - 49: 0, # 'à¸' - 53: 0, # 'ฑ' - 55: 0, # 'ฒ' - 43: 0, # 'ณ' - 20: 0, # 'ด' - 19: 0, # 'ต' - 44: 0, # 'ถ' - 14: 0, # 'ท' - 48: 0, # 'ธ' - 3: 3, # 'น' - 17: 0, # 'บ' - 25: 0, # 'ป' - 39: 0, # 'ผ' - 62: 0, # 'à¸' - 31: 1, # 'พ' - 54: 0, # 'ฟ' - 45: 0, # 'ภ' - 9: 0, # 'ม' - 16: 0, # 'ย' - 2: 0, # 'ร' - 61: 0, # 'ฤ' - 15: 0, # 'ล' - 12: 0, # 'ว' - 42: 0, # 'ศ' - 46: 0, # 'ษ' - 18: 0, # 'ส' - 21: 0, # 'ห' - 4: 0, # 'อ' - 63: 0, # 'ฯ' - 22: 0, # 'ะ' - 10: 0, # 'ั' - 1: 0, # 'า' - 36: 0, # 'ำ' - 23: 1, # 'ิ' - 13: 0, # 'ี' - 40: 0, # 'ึ' - 27: 0, # 'ื' - 32: 0, # 'ุ' - 35: 0, # 'ู' - 11: 0, # 'เ' - 28: 0, # 'à¹' - 41: 0, # 'โ' - 29: 0, # 'ใ' - 33: 0, # 'ไ' - 50: 0, # 'ๆ' - 37: 0, # '็' - 6: 0, # '่' - 7: 0, # '้' - 38: 0, # '์' - 56: 0, # '๑' - 59: 0, # '๒' - 60: 0, # '๕' - }, - 43: { # 'ณ' - 5: 1, # 'à¸' - 30: 0, # 'ข' - 24: 0, # 'ค' - 8: 0, # 'ง' - 26: 0, # 'จ' - 52: 0, # 'ฉ' - 34: 0, # 'ช' - 51: 0, # 'ซ' - 47: 0, # 'à¸' - 58: 0, # 'ฎ' - 57: 0, # 'à¸' - 49: 0, # 'à¸' - 53: 3, # 'ฑ' - 55: 0, # 'ฒ' - 43: 0, # 'ณ' - 20: 0, # 'ด' - 19: 0, # 'ต' - 44: 0, # 'ถ' - 14: 0, # 'ท' - 48: 0, # 'ธ' - 3: 0, # 'น' - 17: 0, # 'บ' - 25: 0, # 'ป' - 39: 0, # 'ผ' - 62: 0, # 'à¸' - 31: 0, # 'พ' - 54: 0, # 'ฟ' - 45: 3, # 'ภ' - 9: 0, # 'ม' - 16: 0, # 'ย' - 2: 1, # 'ร' - 61: 0, # 'ฤ' - 15: 0, # 'ล' - 12: 1, # 'ว' - 42: 0, # 'ศ' - 46: 0, # 'ษ' - 18: 1, # 'ส' - 21: 1, # 'ห' - 4: 0, # 'อ' - 63: 0, # 'ฯ' - 22: 3, # 'ะ' - 10: 0, # 'ั' - 1: 3, # 'า' - 36: 0, # 'ำ' - 23: 1, # 'ิ' - 13: 2, # 'ี' - 40: 0, # 'ึ' - 27: 0, # 'ื' - 32: 0, # 'ุ' - 35: 0, # 'ู' - 11: 1, # 'เ' - 28: 1, # 'à¹' - 41: 0, # 'โ' - 29: 1, # 'ใ' - 33: 1, # 'ไ' - 50: 0, # 'ๆ' - 37: 0, # '็' - 6: 0, # '่' - 7: 0, # '้' - 38: 3, # '์' - 56: 0, # '๑' - 59: 0, # '๒' - 60: 0, # '๕' - }, - 20: { # 'ด' - 5: 2, # 'à¸' - 30: 2, # 'ข' - 24: 2, # 'ค' - 8: 3, # 'ง' - 26: 2, # 'จ' - 52: 0, # 'ฉ' - 34: 1, # 'ช' - 51: 0, # 'ซ' - 47: 0, # 'à¸' - 58: 0, # 'ฎ' - 57: 0, # 'à¸' - 49: 0, # 'à¸' - 53: 0, # 'ฑ' - 55: 0, # 'ฒ' - 43: 0, # 'ณ' - 20: 1, # 'ด' - 19: 2, # 'ต' - 44: 1, # 'ถ' - 14: 2, # 'ท' - 48: 0, # 'ธ' - 3: 1, # 'น' - 17: 1, # 'บ' - 25: 1, # 'ป' - 39: 1, # 'ผ' - 62: 0, # 'à¸' - 31: 1, # 'พ' - 54: 0, # 'ฟ' - 45: 1, # 'ภ' - 9: 2, # 'ม' - 16: 3, # 'ย' - 2: 2, # 'ร' - 61: 0, # 'ฤ' - 15: 2, # 'ล' - 12: 2, # 'ว' - 42: 0, # 'ศ' - 46: 0, # 'ษ' - 18: 2, # 'ส' - 21: 2, # 'ห' - 4: 1, # 'อ' - 63: 0, # 'ฯ' - 22: 0, # 'ะ' - 10: 3, # 'ั' - 1: 2, # 'า' - 36: 2, # 'ำ' - 23: 3, # 'ิ' - 13: 3, # 'ี' - 40: 1, # 'ึ' - 27: 2, # 'ื' - 32: 3, # 'ุ' - 35: 2, # 'ู' - 11: 2, # 'เ' - 28: 2, # 'à¹' - 41: 1, # 'โ' - 29: 2, # 'ใ' - 33: 2, # 'ไ' - 50: 2, # 'ๆ' - 37: 2, # '็' - 6: 1, # '่' - 7: 3, # '้' - 38: 1, # '์' - 56: 0, # '๑' - 59: 0, # '๒' - 60: 0, # '๕' - }, - 19: { # 'ต' - 5: 2, # 'à¸' - 30: 1, # 'ข' - 24: 1, # 'ค' - 8: 0, # 'ง' - 26: 1, # 'จ' - 52: 0, # 'ฉ' - 34: 1, # 'ช' - 51: 0, # 'ซ' - 47: 0, # 'à¸' - 58: 0, # 'ฎ' - 57: 0, # 'à¸' - 49: 0, # 'à¸' - 53: 0, # 'ฑ' - 55: 0, # 'ฒ' - 43: 0, # 'ณ' - 20: 1, # 'ด' - 19: 1, # 'ต' - 44: 2, # 'ถ' - 14: 1, # 'ท' - 48: 0, # 'ธ' - 3: 2, # 'น' - 17: 1, # 'บ' - 25: 1, # 'ป' - 39: 1, # 'ผ' - 62: 0, # 'à¸' - 31: 1, # 'พ' - 54: 0, # 'ฟ' - 45: 2, # 'ภ' - 9: 1, # 'ม' - 16: 1, # 'ย' - 2: 3, # 'ร' - 61: 0, # 'ฤ' - 15: 2, # 'ล' - 12: 1, # 'ว' - 42: 0, # 'ศ' - 46: 0, # 'ษ' - 18: 3, # 'ส' - 21: 0, # 'ห' - 4: 3, # 'อ' - 63: 1, # 'ฯ' - 22: 2, # 'ะ' - 10: 3, # 'ั' - 1: 3, # 'า' - 36: 2, # 'ำ' - 23: 3, # 'ิ' - 13: 2, # 'ี' - 40: 1, # 'ึ' - 27: 1, # 'ื' - 32: 3, # 'ุ' - 35: 2, # 'ู' - 11: 1, # 'เ' - 28: 1, # 'à¹' - 41: 1, # 'โ' - 29: 1, # 'ใ' - 33: 1, # 'ไ' - 50: 0, # 'ๆ' - 37: 2, # '็' - 6: 3, # '่' - 7: 3, # '้' - 38: 2, # '์' - 56: 0, # '๑' - 59: 0, # '๒' - 60: 0, # '๕' - }, - 44: { # 'ถ' - 5: 1, # 'à¸' - 30: 0, # 'ข' - 24: 1, # 'ค' - 8: 0, # 'ง' - 26: 1, # 'จ' - 52: 0, # 'ฉ' - 34: 0, # 'ช' - 51: 0, # 'ซ' - 47: 0, # 'à¸' - 58: 0, # 'ฎ' - 57: 0, # 'à¸' - 49: 0, # 'à¸' - 53: 0, # 'ฑ' - 55: 0, # 'ฒ' - 43: 0, # 'ณ' - 20: 0, # 'ด' - 19: 1, # 'ต' - 44: 0, # 'ถ' - 14: 1, # 'ท' - 48: 0, # 'ธ' - 3: 1, # 'น' - 17: 2, # 'บ' - 25: 0, # 'ป' - 39: 0, # 'ผ' - 62: 0, # 'à¸' - 31: 1, # 'พ' - 54: 0, # 'ฟ' - 45: 0, # 'ภ' - 9: 0, # 'ม' - 16: 0, # 'ย' - 2: 1, # 'ร' - 61: 0, # 'ฤ' - 15: 1, # 'ล' - 12: 1, # 'ว' - 42: 0, # 'ศ' - 46: 0, # 'ษ' - 18: 1, # 'ส' - 21: 0, # 'ห' - 4: 1, # 'อ' - 63: 0, # 'ฯ' - 22: 0, # 'ะ' - 10: 2, # 'ั' - 1: 3, # 'า' - 36: 0, # 'ำ' - 23: 2, # 'ิ' - 13: 1, # 'ี' - 40: 3, # 'ึ' - 27: 2, # 'ื' - 32: 2, # 'ุ' - 35: 3, # 'ู' - 11: 1, # 'เ' - 28: 1, # 'à¹' - 41: 0, # 'โ' - 29: 1, # 'ใ' - 33: 1, # 'ไ' - 50: 0, # 'ๆ' - 37: 0, # '็' - 6: 2, # '่' - 7: 3, # '้' - 38: 0, # '์' - 56: 0, # '๑' - 59: 0, # '๒' - 60: 0, # '๕' - }, - 14: { # 'ท' - 5: 1, # 'à¸' - 30: 1, # 'ข' - 24: 3, # 'ค' - 8: 1, # 'ง' - 26: 1, # 'จ' - 52: 0, # 'ฉ' - 34: 0, # 'ช' - 51: 0, # 'ซ' - 47: 0, # 'à¸' - 58: 0, # 'ฎ' - 57: 0, # 'à¸' - 49: 0, # 'à¸' - 53: 0, # 'ฑ' - 55: 0, # 'ฒ' - 43: 0, # 'ณ' - 20: 2, # 'ด' - 19: 1, # 'ต' - 44: 0, # 'ถ' - 14: 1, # 'ท' - 48: 3, # 'ธ' - 3: 3, # 'น' - 17: 2, # 'บ' - 25: 2, # 'ป' - 39: 1, # 'ผ' - 62: 0, # 'à¸' - 31: 2, # 'พ' - 54: 0, # 'ฟ' - 45: 0, # 'ภ' - 9: 1, # 'ม' - 16: 3, # 'ย' - 2: 3, # 'ร' - 61: 1, # 'ฤ' - 15: 1, # 'ล' - 12: 2, # 'ว' - 42: 3, # 'ศ' - 46: 1, # 'ษ' - 18: 1, # 'ส' - 21: 0, # 'ห' - 4: 2, # 'อ' - 63: 0, # 'ฯ' - 22: 2, # 'ะ' - 10: 3, # 'ั' - 1: 3, # 'า' - 36: 3, # 'ำ' - 23: 2, # 'ิ' - 13: 3, # 'ี' - 40: 2, # 'ึ' - 27: 1, # 'ื' - 32: 3, # 'ุ' - 35: 1, # 'ู' - 11: 0, # 'เ' - 28: 1, # 'à¹' - 41: 0, # 'โ' - 29: 1, # 'ใ' - 33: 0, # 'ไ' - 50: 0, # 'ๆ' - 37: 1, # '็' - 6: 3, # '่' - 7: 3, # '้' - 38: 2, # '์' - 56: 0, # '๑' - 59: 0, # '๒' - 60: 0, # '๕' - }, - 48: { # 'ธ' - 5: 0, # 'à¸' - 30: 0, # 'ข' - 24: 0, # 'ค' - 8: 1, # 'ง' - 26: 0, # 'จ' - 52: 0, # 'ฉ' - 34: 0, # 'ช' - 51: 0, # 'ซ' - 47: 0, # 'à¸' - 58: 0, # 'ฎ' - 57: 0, # 'à¸' - 49: 0, # 'à¸' - 53: 0, # 'ฑ' - 55: 0, # 'ฒ' - 43: 0, # 'ณ' - 20: 0, # 'ด' - 19: 0, # 'ต' - 44: 0, # 'ถ' - 14: 0, # 'ท' - 48: 0, # 'ธ' - 3: 1, # 'น' - 17: 0, # 'บ' - 25: 0, # 'ป' - 39: 0, # 'ผ' - 62: 0, # 'à¸' - 31: 0, # 'พ' - 54: 0, # 'ฟ' - 45: 0, # 'ภ' - 9: 0, # 'ม' - 16: 0, # 'ย' - 2: 2, # 'ร' - 61: 0, # 'ฤ' - 15: 0, # 'ล' - 12: 0, # 'ว' - 42: 0, # 'ศ' - 46: 0, # 'ษ' - 18: 0, # 'ส' - 21: 0, # 'ห' - 4: 0, # 'อ' - 63: 0, # 'ฯ' - 22: 0, # 'ะ' - 10: 0, # 'ั' - 1: 2, # 'า' - 36: 0, # 'ำ' - 23: 3, # 'ิ' - 13: 3, # 'ี' - 40: 0, # 'ึ' - 27: 0, # 'ื' - 32: 2, # 'ุ' - 35: 0, # 'ู' - 11: 0, # 'เ' - 28: 0, # 'à¹' - 41: 0, # 'โ' - 29: 0, # 'ใ' - 33: 0, # 'ไ' - 50: 0, # 'ๆ' - 37: 0, # '็' - 6: 0, # '่' - 7: 0, # '้' - 38: 3, # '์' - 56: 0, # '๑' - 59: 0, # '๒' - 60: 0, # '๕' - }, - 3: { # 'น' - 5: 3, # 'à¸' - 30: 2, # 'ข' - 24: 3, # 'ค' - 8: 1, # 'ง' - 26: 2, # 'จ' - 52: 0, # 'ฉ' - 34: 1, # 'ช' - 51: 1, # 'ซ' - 47: 0, # 'à¸' - 58: 0, # 'ฎ' - 57: 0, # 'à¸' - 49: 1, # 'à¸' - 53: 0, # 'ฑ' - 55: 0, # 'ฒ' - 43: 0, # 'ณ' - 20: 3, # 'ด' - 19: 3, # 'ต' - 44: 2, # 'ถ' - 14: 3, # 'ท' - 48: 3, # 'ธ' - 3: 2, # 'น' - 17: 2, # 'บ' - 25: 2, # 'ป' - 39: 2, # 'ผ' - 62: 0, # 'à¸' - 31: 2, # 'พ' - 54: 1, # 'ฟ' - 45: 1, # 'ภ' - 9: 2, # 'ม' - 16: 2, # 'ย' - 2: 2, # 'ร' - 61: 1, # 'ฤ' - 15: 2, # 'ล' - 12: 3, # 'ว' - 42: 1, # 'ศ' - 46: 0, # 'ษ' - 18: 2, # 'ส' - 21: 2, # 'ห' - 4: 3, # 'อ' - 63: 1, # 'ฯ' - 22: 2, # 'ะ' - 10: 3, # 'ั' - 1: 3, # 'า' - 36: 3, # 'ำ' - 23: 3, # 'ิ' - 13: 3, # 'ี' - 40: 3, # 'ึ' - 27: 3, # 'ื' - 32: 3, # 'ุ' - 35: 2, # 'ู' - 11: 3, # 'เ' - 28: 2, # 'à¹' - 41: 3, # 'โ' - 29: 3, # 'ใ' - 33: 3, # 'ไ' - 50: 2, # 'ๆ' - 37: 1, # '็' - 6: 3, # '่' - 7: 3, # '้' - 38: 2, # '์' - 56: 0, # '๑' - 59: 0, # '๒' - 60: 0, # '๕' - }, - 17: { # 'บ' - 5: 3, # 'à¸' - 30: 2, # 'ข' - 24: 2, # 'ค' - 8: 1, # 'ง' - 26: 1, # 'จ' - 52: 1, # 'ฉ' - 34: 1, # 'ช' - 51: 1, # 'ซ' - 47: 0, # 'à¸' - 58: 0, # 'ฎ' - 57: 0, # 'à¸' - 49: 0, # 'à¸' - 53: 0, # 'ฑ' - 55: 0, # 'ฒ' - 43: 0, # 'ณ' - 20: 1, # 'ด' - 19: 2, # 'ต' - 44: 1, # 'ถ' - 14: 3, # 'ท' - 48: 0, # 'ธ' - 3: 3, # 'น' - 17: 3, # 'บ' - 25: 2, # 'ป' - 39: 2, # 'ผ' - 62: 0, # 'à¸' - 31: 1, # 'พ' - 54: 1, # 'ฟ' - 45: 1, # 'ภ' - 9: 1, # 'ม' - 16: 0, # 'ย' - 2: 3, # 'ร' - 61: 0, # 'ฤ' - 15: 2, # 'ล' - 12: 3, # 'ว' - 42: 0, # 'ศ' - 46: 0, # 'ษ' - 18: 2, # 'ส' - 21: 2, # 'ห' - 4: 2, # 'อ' - 63: 1, # 'ฯ' - 22: 0, # 'ะ' - 10: 3, # 'ั' - 1: 3, # 'า' - 36: 2, # 'ำ' - 23: 2, # 'ิ' - 13: 2, # 'ี' - 40: 0, # 'ึ' - 27: 2, # 'ื' - 32: 3, # 'ุ' - 35: 2, # 'ู' - 11: 2, # 'เ' - 28: 2, # 'à¹' - 41: 1, # 'โ' - 29: 2, # 'ใ' - 33: 2, # 'ไ' - 50: 0, # 'ๆ' - 37: 1, # '็' - 6: 2, # '่' - 7: 2, # '้' - 38: 0, # '์' - 56: 0, # '๑' - 59: 0, # '๒' - 60: 0, # '๕' - }, - 25: { # 'ป' - 5: 2, # 'à¸' - 30: 0, # 'ข' - 24: 1, # 'ค' - 8: 0, # 'ง' - 26: 1, # 'จ' - 52: 0, # 'ฉ' - 34: 0, # 'ช' - 51: 1, # 'ซ' - 47: 0, # 'à¸' - 58: 1, # 'ฎ' - 57: 3, # 'à¸' - 49: 1, # 'à¸' - 53: 0, # 'ฑ' - 55: 0, # 'ฒ' - 43: 0, # 'ณ' - 20: 1, # 'ด' - 19: 1, # 'ต' - 44: 1, # 'ถ' - 14: 1, # 'ท' - 48: 0, # 'ธ' - 3: 2, # 'น' - 17: 0, # 'บ' - 25: 1, # 'ป' - 39: 1, # 'ผ' - 62: 1, # 'à¸' - 31: 1, # 'พ' - 54: 0, # 'ฟ' - 45: 0, # 'ภ' - 9: 1, # 'ม' - 16: 0, # 'ย' - 2: 3, # 'ร' - 61: 0, # 'ฤ' - 15: 3, # 'ล' - 12: 1, # 'ว' - 42: 0, # 'ศ' - 46: 1, # 'ษ' - 18: 2, # 'ส' - 21: 1, # 'ห' - 4: 2, # 'อ' - 63: 0, # 'ฯ' - 22: 1, # 'ะ' - 10: 3, # 'ั' - 1: 1, # 'า' - 36: 0, # 'ำ' - 23: 2, # 'ิ' - 13: 3, # 'ี' - 40: 0, # 'ึ' - 27: 0, # 'ื' - 32: 1, # 'ุ' - 35: 0, # 'ู' - 11: 1, # 'เ' - 28: 2, # 'à¹' - 41: 0, # 'โ' - 29: 1, # 'ใ' - 33: 2, # 'ไ' - 50: 0, # 'ๆ' - 37: 3, # '็' - 6: 1, # '่' - 7: 2, # '้' - 38: 1, # '์' - 56: 0, # '๑' - 59: 0, # '๒' - 60: 0, # '๕' - }, - 39: { # 'ผ' - 5: 1, # 'à¸' - 30: 0, # 'ข' - 24: 0, # 'ค' - 8: 1, # 'ง' - 26: 0, # 'จ' - 52: 0, # 'ฉ' - 34: 0, # 'ช' - 51: 0, # 'ซ' - 47: 0, # 'à¸' - 58: 0, # 'ฎ' - 57: 0, # 'à¸' - 49: 0, # 'à¸' - 53: 0, # 'ฑ' - 55: 0, # 'ฒ' - 43: 0, # 'ณ' - 20: 0, # 'ด' - 19: 0, # 'ต' - 44: 0, # 'ถ' - 14: 0, # 'ท' - 48: 0, # 'ธ' - 3: 2, # 'น' - 17: 0, # 'บ' - 25: 0, # 'ป' - 39: 0, # 'ผ' - 62: 0, # 'à¸' - 31: 0, # 'พ' - 54: 0, # 'ฟ' - 45: 0, # 'ภ' - 9: 1, # 'ม' - 16: 2, # 'ย' - 2: 0, # 'ร' - 61: 0, # 'ฤ' - 15: 3, # 'ล' - 12: 0, # 'ว' - 42: 0, # 'ศ' - 46: 0, # 'ษ' - 18: 1, # 'ส' - 21: 0, # 'ห' - 4: 0, # 'อ' - 63: 0, # 'ฯ' - 22: 1, # 'ะ' - 10: 1, # 'ั' - 1: 0, # 'า' - 36: 0, # 'ำ' - 23: 2, # 'ิ' - 13: 0, # 'ี' - 40: 0, # 'ึ' - 27: 1, # 'ื' - 32: 0, # 'ุ' - 35: 3, # 'ู' - 11: 0, # 'เ' - 28: 0, # 'à¹' - 41: 0, # 'โ' - 29: 0, # 'ใ' - 33: 0, # 'ไ' - 50: 0, # 'ๆ' - 37: 0, # '็' - 6: 3, # '่' - 7: 1, # '้' - 38: 0, # '์' - 56: 0, # '๑' - 59: 0, # '๒' - 60: 0, # '๕' - }, - 62: { # 'à¸' - 5: 0, # 'à¸' - 30: 0, # 'ข' - 24: 0, # 'ค' - 8: 0, # 'ง' - 26: 0, # 'จ' - 52: 0, # 'ฉ' - 34: 0, # 'ช' - 51: 0, # 'ซ' - 47: 0, # 'à¸' - 58: 0, # 'ฎ' - 57: 0, # 'à¸' - 49: 0, # 'à¸' - 53: 0, # 'ฑ' - 55: 0, # 'ฒ' - 43: 0, # 'ณ' - 20: 0, # 'ด' - 19: 0, # 'ต' - 44: 0, # 'ถ' - 14: 0, # 'ท' - 48: 0, # 'ธ' - 3: 1, # 'น' - 17: 0, # 'บ' - 25: 0, # 'ป' - 39: 0, # 'ผ' - 62: 0, # 'à¸' - 31: 0, # 'พ' - 54: 0, # 'ฟ' - 45: 0, # 'ภ' - 9: 0, # 'ม' - 16: 0, # 'ย' - 2: 1, # 'ร' - 61: 0, # 'ฤ' - 15: 0, # 'ล' - 12: 0, # 'ว' - 42: 0, # 'ศ' - 46: 0, # 'ษ' - 18: 0, # 'ส' - 21: 0, # 'ห' - 4: 0, # 'อ' - 63: 0, # 'ฯ' - 22: 0, # 'ะ' - 10: 1, # 'ั' - 1: 0, # 'า' - 36: 0, # 'ำ' - 23: 0, # 'ิ' - 13: 1, # 'ี' - 40: 2, # 'ึ' - 27: 0, # 'ื' - 32: 0, # 'ุ' - 35: 0, # 'ู' - 11: 0, # 'เ' - 28: 0, # 'à¹' - 41: 0, # 'โ' - 29: 0, # 'ใ' - 33: 0, # 'ไ' - 50: 0, # 'ๆ' - 37: 0, # '็' - 6: 2, # '่' - 7: 1, # '้' - 38: 0, # '์' - 56: 0, # '๑' - 59: 0, # '๒' - 60: 0, # '๕' - }, - 31: { # 'พ' - 5: 1, # 'à¸' - 30: 1, # 'ข' - 24: 1, # 'ค' - 8: 1, # 'ง' - 26: 1, # 'จ' - 52: 0, # 'ฉ' - 34: 0, # 'ช' - 51: 0, # 'ซ' - 47: 0, # 'à¸' - 58: 0, # 'ฎ' - 57: 0, # 'à¸' - 49: 0, # 'à¸' - 53: 0, # 'ฑ' - 55: 0, # 'ฒ' - 43: 1, # 'ณ' - 20: 1, # 'ด' - 19: 1, # 'ต' - 44: 0, # 'ถ' - 14: 2, # 'ท' - 48: 1, # 'ธ' - 3: 3, # 'น' - 17: 2, # 'บ' - 25: 0, # 'ป' - 39: 1, # 'ผ' - 62: 0, # 'à¸' - 31: 1, # 'พ' - 54: 0, # 'ฟ' - 45: 0, # 'ภ' - 9: 1, # 'ม' - 16: 2, # 'ย' - 2: 3, # 'ร' - 61: 2, # 'ฤ' - 15: 2, # 'ล' - 12: 2, # 'ว' - 42: 0, # 'ศ' - 46: 0, # 'ษ' - 18: 1, # 'ส' - 21: 1, # 'ห' - 4: 2, # 'อ' - 63: 1, # 'ฯ' - 22: 0, # 'ะ' - 10: 3, # 'ั' - 1: 3, # 'า' - 36: 0, # 'ำ' - 23: 3, # 'ิ' - 13: 2, # 'ี' - 40: 1, # 'ึ' - 27: 3, # 'ื' - 32: 1, # 'ุ' - 35: 2, # 'ู' - 11: 1, # 'เ' - 28: 1, # 'à¹' - 41: 0, # 'โ' - 29: 1, # 'ใ' - 33: 1, # 'ไ' - 50: 0, # 'ๆ' - 37: 1, # '็' - 6: 0, # '่' - 7: 1, # '้' - 38: 3, # '์' - 56: 0, # '๑' - 59: 0, # '๒' - 60: 0, # '๕' - }, - 54: { # 'ฟ' - 5: 0, # 'à¸' - 30: 0, # 'ข' - 24: 0, # 'ค' - 8: 0, # 'ง' - 26: 0, # 'จ' - 52: 0, # 'ฉ' - 34: 1, # 'ช' - 51: 0, # 'ซ' - 47: 0, # 'à¸' - 58: 0, # 'ฎ' - 57: 0, # 'à¸' - 49: 0, # 'à¸' - 53: 0, # 'ฑ' - 55: 0, # 'ฒ' - 43: 0, # 'ณ' - 20: 0, # 'ด' - 19: 1, # 'ต' - 44: 0, # 'ถ' - 14: 1, # 'ท' - 48: 0, # 'ธ' - 3: 0, # 'น' - 17: 0, # 'บ' - 25: 0, # 'ป' - 39: 0, # 'ผ' - 62: 0, # 'à¸' - 31: 0, # 'พ' - 54: 2, # 'ฟ' - 45: 0, # 'ภ' - 9: 0, # 'ม' - 16: 0, # 'ย' - 2: 1, # 'ร' - 61: 0, # 'ฤ' - 15: 2, # 'ล' - 12: 0, # 'ว' - 42: 0, # 'ศ' - 46: 0, # 'ษ' - 18: 1, # 'ส' - 21: 0, # 'ห' - 4: 1, # 'อ' - 63: 0, # 'ฯ' - 22: 0, # 'ะ' - 10: 2, # 'ั' - 1: 0, # 'า' - 36: 0, # 'ำ' - 23: 1, # 'ิ' - 13: 1, # 'ี' - 40: 0, # 'ึ' - 27: 1, # 'ื' - 32: 1, # 'ุ' - 35: 0, # 'ู' - 11: 0, # 'เ' - 28: 1, # 'à¹' - 41: 0, # 'โ' - 29: 0, # 'ใ' - 33: 0, # 'ไ' - 50: 0, # 'ๆ' - 37: 0, # '็' - 6: 0, # '่' - 7: 2, # '้' - 38: 0, # '์' - 56: 0, # '๑' - 59: 0, # '๒' - 60: 0, # '๕' - }, - 45: { # 'ภ' - 5: 0, # 'à¸' - 30: 0, # 'ข' - 24: 1, # 'ค' - 8: 0, # 'ง' - 26: 0, # 'จ' - 52: 0, # 'ฉ' - 34: 0, # 'ช' - 51: 0, # 'ซ' - 47: 0, # 'à¸' - 58: 0, # 'ฎ' - 57: 0, # 'à¸' - 49: 0, # 'à¸' - 53: 0, # 'ฑ' - 55: 0, # 'ฒ' - 43: 0, # 'ณ' - 20: 0, # 'ด' - 19: 0, # 'ต' - 44: 0, # 'ถ' - 14: 3, # 'ท' - 48: 0, # 'ธ' - 3: 0, # 'น' - 17: 0, # 'บ' - 25: 0, # 'ป' - 39: 0, # 'ผ' - 62: 0, # 'à¸' - 31: 1, # 'พ' - 54: 0, # 'ฟ' - 45: 0, # 'ภ' - 9: 0, # 'ม' - 16: 0, # 'ย' - 2: 1, # 'ร' - 61: 0, # 'ฤ' - 15: 0, # 'ล' - 12: 0, # 'ว' - 42: 0, # 'ศ' - 46: 0, # 'ษ' - 18: 0, # 'ส' - 21: 0, # 'ห' - 4: 0, # 'อ' - 63: 0, # 'ฯ' - 22: 0, # 'ะ' - 10: 3, # 'ั' - 1: 3, # 'า' - 36: 0, # 'ำ' - 23: 1, # 'ิ' - 13: 0, # 'ี' - 40: 0, # 'ึ' - 27: 0, # 'ื' - 32: 0, # 'ุ' - 35: 2, # 'ู' - 11: 0, # 'เ' - 28: 0, # 'à¹' - 41: 0, # 'โ' - 29: 0, # 'ใ' - 33: 0, # 'ไ' - 50: 0, # 'ๆ' - 37: 0, # '็' - 6: 0, # '่' - 7: 0, # '้' - 38: 1, # '์' - 56: 0, # '๑' - 59: 0, # '๒' - 60: 0, # '๕' - }, - 9: { # 'ม' - 5: 2, # 'à¸' - 30: 2, # 'ข' - 24: 2, # 'ค' - 8: 2, # 'ง' - 26: 2, # 'จ' - 52: 0, # 'ฉ' - 34: 1, # 'ช' - 51: 1, # 'ซ' - 47: 0, # 'à¸' - 58: 0, # 'ฎ' - 57: 0, # 'à¸' - 49: 0, # 'à¸' - 53: 0, # 'ฑ' - 55: 0, # 'ฒ' - 43: 1, # 'ณ' - 20: 2, # 'ด' - 19: 2, # 'ต' - 44: 1, # 'ถ' - 14: 2, # 'ท' - 48: 1, # 'ธ' - 3: 3, # 'น' - 17: 2, # 'บ' - 25: 2, # 'ป' - 39: 1, # 'ผ' - 62: 0, # 'à¸' - 31: 3, # 'พ' - 54: 0, # 'ฟ' - 45: 1, # 'ภ' - 9: 2, # 'ม' - 16: 1, # 'ย' - 2: 2, # 'ร' - 61: 2, # 'ฤ' - 15: 2, # 'ล' - 12: 2, # 'ว' - 42: 1, # 'ศ' - 46: 1, # 'ษ' - 18: 3, # 'ส' - 21: 3, # 'ห' - 4: 3, # 'อ' - 63: 0, # 'ฯ' - 22: 1, # 'ะ' - 10: 3, # 'ั' - 1: 3, # 'า' - 36: 0, # 'ำ' - 23: 3, # 'ิ' - 13: 3, # 'ี' - 40: 0, # 'ึ' - 27: 3, # 'ื' - 32: 3, # 'ุ' - 35: 3, # 'ู' - 11: 2, # 'เ' - 28: 2, # 'à¹' - 41: 2, # 'โ' - 29: 2, # 'ใ' - 33: 2, # 'ไ' - 50: 1, # 'ๆ' - 37: 1, # '็' - 6: 3, # '่' - 7: 2, # '้' - 38: 1, # '์' - 56: 0, # '๑' - 59: 0, # '๒' - 60: 0, # '๕' - }, - 16: { # 'ย' - 5: 3, # 'à¸' - 30: 1, # 'ข' - 24: 2, # 'ค' - 8: 3, # 'ง' - 26: 2, # 'จ' - 52: 0, # 'ฉ' - 34: 2, # 'ช' - 51: 0, # 'ซ' - 47: 2, # 'à¸' - 58: 0, # 'ฎ' - 57: 0, # 'à¸' - 49: 0, # 'à¸' - 53: 0, # 'ฑ' - 55: 0, # 'ฒ' - 43: 0, # 'ณ' - 20: 2, # 'ด' - 19: 2, # 'ต' - 44: 1, # 'ถ' - 14: 2, # 'ท' - 48: 1, # 'ธ' - 3: 3, # 'น' - 17: 3, # 'บ' - 25: 1, # 'ป' - 39: 1, # 'ผ' - 62: 0, # 'à¸' - 31: 1, # 'พ' - 54: 0, # 'ฟ' - 45: 1, # 'ภ' - 9: 2, # 'ม' - 16: 0, # 'ย' - 2: 2, # 'ร' - 61: 0, # 'ฤ' - 15: 1, # 'ล' - 12: 3, # 'ว' - 42: 1, # 'ศ' - 46: 0, # 'ษ' - 18: 2, # 'ส' - 21: 1, # 'ห' - 4: 2, # 'อ' - 63: 0, # 'ฯ' - 22: 2, # 'ะ' - 10: 3, # 'ั' - 1: 3, # 'า' - 36: 0, # 'ำ' - 23: 2, # 'ิ' - 13: 3, # 'ี' - 40: 1, # 'ึ' - 27: 2, # 'ื' - 32: 2, # 'ุ' - 35: 3, # 'ู' - 11: 2, # 'เ' - 28: 1, # 'à¹' - 41: 1, # 'โ' - 29: 2, # 'ใ' - 33: 2, # 'ไ' - 50: 2, # 'ๆ' - 37: 1, # '็' - 6: 3, # '่' - 7: 2, # '้' - 38: 3, # '์' - 56: 0, # '๑' - 59: 0, # '๒' - 60: 0, # '๕' - }, - 2: { # 'ร' - 5: 3, # 'à¸' - 30: 2, # 'ข' - 24: 2, # 'ค' - 8: 3, # 'ง' - 26: 2, # 'จ' - 52: 0, # 'ฉ' - 34: 2, # 'ช' - 51: 1, # 'ซ' - 47: 0, # 'à¸' - 58: 0, # 'ฎ' - 57: 0, # 'à¸' - 49: 3, # 'à¸' - 53: 0, # 'ฑ' - 55: 0, # 'ฒ' - 43: 3, # 'ณ' - 20: 2, # 'ด' - 19: 2, # 'ต' - 44: 3, # 'ถ' - 14: 3, # 'ท' - 48: 1, # 'ธ' - 3: 2, # 'น' - 17: 2, # 'บ' - 25: 3, # 'ป' - 39: 2, # 'ผ' - 62: 1, # 'à¸' - 31: 2, # 'พ' - 54: 1, # 'ฟ' - 45: 1, # 'ภ' - 9: 3, # 'ม' - 16: 2, # 'ย' - 2: 3, # 'ร' - 61: 0, # 'ฤ' - 15: 2, # 'ล' - 12: 3, # 'ว' - 42: 2, # 'ศ' - 46: 2, # 'ษ' - 18: 2, # 'ส' - 21: 2, # 'ห' - 4: 3, # 'อ' - 63: 1, # 'ฯ' - 22: 3, # 'ะ' - 10: 3, # 'ั' - 1: 3, # 'า' - 36: 0, # 'ำ' - 23: 3, # 'ิ' - 13: 3, # 'ี' - 40: 2, # 'ึ' - 27: 3, # 'ื' - 32: 3, # 'ุ' - 35: 3, # 'ู' - 11: 3, # 'เ' - 28: 3, # 'à¹' - 41: 1, # 'โ' - 29: 2, # 'ใ' - 33: 1, # 'ไ' - 50: 0, # 'ๆ' - 37: 3, # '็' - 6: 3, # '่' - 7: 3, # '้' - 38: 3, # '์' - 56: 0, # '๑' - 59: 0, # '๒' - 60: 0, # '๕' - }, - 61: { # 'ฤ' - 5: 0, # 'à¸' - 30: 0, # 'ข' - 24: 0, # 'ค' - 8: 0, # 'ง' - 26: 0, # 'จ' - 52: 0, # 'ฉ' - 34: 0, # 'ช' - 51: 0, # 'ซ' - 47: 0, # 'à¸' - 58: 0, # 'ฎ' - 57: 0, # 'à¸' - 49: 0, # 'à¸' - 53: 0, # 'ฑ' - 55: 0, # 'ฒ' - 43: 0, # 'ณ' - 20: 0, # 'ด' - 19: 2, # 'ต' - 44: 0, # 'ถ' - 14: 2, # 'ท' - 48: 0, # 'ธ' - 3: 0, # 'น' - 17: 0, # 'บ' - 25: 0, # 'ป' - 39: 0, # 'ผ' - 62: 0, # 'à¸' - 31: 0, # 'พ' - 54: 0, # 'ฟ' - 45: 0, # 'ภ' - 9: 1, # 'ม' - 16: 0, # 'ย' - 2: 0, # 'ร' - 61: 0, # 'ฤ' - 15: 0, # 'ล' - 12: 0, # 'ว' - 42: 0, # 'ศ' - 46: 2, # 'ษ' - 18: 0, # 'ส' - 21: 0, # 'ห' - 4: 0, # 'อ' - 63: 0, # 'ฯ' - 22: 0, # 'ะ' - 10: 0, # 'ั' - 1: 0, # 'า' - 36: 0, # 'ำ' - 23: 0, # 'ิ' - 13: 0, # 'ี' - 40: 0, # 'ึ' - 27: 0, # 'ื' - 32: 0, # 'ุ' - 35: 0, # 'ู' - 11: 0, # 'เ' - 28: 0, # 'à¹' - 41: 0, # 'โ' - 29: 0, # 'ใ' - 33: 0, # 'ไ' - 50: 0, # 'ๆ' - 37: 0, # '็' - 6: 0, # '่' - 7: 0, # '้' - 38: 0, # '์' - 56: 0, # '๑' - 59: 0, # '๒' - 60: 0, # '๕' - }, - 15: { # 'ล' - 5: 2, # 'à¸' - 30: 3, # 'ข' - 24: 1, # 'ค' - 8: 3, # 'ง' - 26: 1, # 'จ' - 52: 0, # 'ฉ' - 34: 1, # 'ช' - 51: 0, # 'ซ' - 47: 0, # 'à¸' - 58: 0, # 'ฎ' - 57: 0, # 'à¸' - 49: 0, # 'à¸' - 53: 0, # 'ฑ' - 55: 0, # 'ฒ' - 43: 0, # 'ณ' - 20: 2, # 'ด' - 19: 2, # 'ต' - 44: 1, # 'ถ' - 14: 2, # 'ท' - 48: 0, # 'ธ' - 3: 1, # 'น' - 17: 2, # 'บ' - 25: 2, # 'ป' - 39: 1, # 'ผ' - 62: 0, # 'à¸' - 31: 0, # 'พ' - 54: 0, # 'ฟ' - 45: 1, # 'ภ' - 9: 1, # 'ม' - 16: 3, # 'ย' - 2: 1, # 'ร' - 61: 0, # 'ฤ' - 15: 1, # 'ล' - 12: 1, # 'ว' - 42: 0, # 'ศ' - 46: 0, # 'ษ' - 18: 2, # 'ส' - 21: 1, # 'ห' - 4: 3, # 'อ' - 63: 2, # 'ฯ' - 22: 3, # 'ะ' - 10: 3, # 'ั' - 1: 3, # 'า' - 36: 2, # 'ำ' - 23: 3, # 'ิ' - 13: 3, # 'ี' - 40: 2, # 'ึ' - 27: 3, # 'ื' - 32: 2, # 'ุ' - 35: 3, # 'ู' - 11: 2, # 'เ' - 28: 1, # 'à¹' - 41: 1, # 'โ' - 29: 2, # 'ใ' - 33: 1, # 'ไ' - 50: 0, # 'ๆ' - 37: 2, # '็' - 6: 3, # '่' - 7: 3, # '้' - 38: 2, # '์' - 56: 0, # '๑' - 59: 0, # '๒' - 60: 0, # '๕' - }, - 12: { # 'ว' - 5: 3, # 'à¸' - 30: 2, # 'ข' - 24: 1, # 'ค' - 8: 3, # 'ง' - 26: 2, # 'จ' - 52: 0, # 'ฉ' - 34: 1, # 'ช' - 51: 1, # 'ซ' - 47: 0, # 'à¸' - 58: 0, # 'ฎ' - 57: 0, # 'à¸' - 49: 0, # 'à¸' - 53: 0, # 'ฑ' - 55: 0, # 'ฒ' - 43: 1, # 'ณ' - 20: 2, # 'ด' - 19: 1, # 'ต' - 44: 1, # 'ถ' - 14: 1, # 'ท' - 48: 0, # 'ธ' - 3: 3, # 'น' - 17: 2, # 'บ' - 25: 1, # 'ป' - 39: 1, # 'ผ' - 62: 0, # 'à¸' - 31: 1, # 'พ' - 54: 1, # 'ฟ' - 45: 0, # 'ภ' - 9: 3, # 'ม' - 16: 3, # 'ย' - 2: 3, # 'ร' - 61: 0, # 'ฤ' - 15: 3, # 'ล' - 12: 1, # 'ว' - 42: 0, # 'ศ' - 46: 0, # 'ษ' - 18: 2, # 'ส' - 21: 2, # 'ห' - 4: 2, # 'อ' - 63: 0, # 'ฯ' - 22: 2, # 'ะ' - 10: 3, # 'ั' - 1: 3, # 'า' - 36: 0, # 'ำ' - 23: 3, # 'ิ' - 13: 2, # 'ี' - 40: 0, # 'ึ' - 27: 0, # 'ื' - 32: 2, # 'ุ' - 35: 0, # 'ู' - 11: 3, # 'เ' - 28: 2, # 'à¹' - 41: 1, # 'โ' - 29: 1, # 'ใ' - 33: 2, # 'ไ' - 50: 1, # 'ๆ' - 37: 0, # '็' - 6: 3, # '่' - 7: 3, # '้' - 38: 1, # '์' - 56: 0, # '๑' - 59: 0, # '๒' - 60: 0, # '๕' - }, - 42: { # 'ศ' - 5: 1, # 'à¸' - 30: 0, # 'ข' - 24: 1, # 'ค' - 8: 0, # 'ง' - 26: 1, # 'จ' - 52: 0, # 'ฉ' - 34: 0, # 'ช' - 51: 0, # 'ซ' - 47: 1, # 'à¸' - 58: 0, # 'ฎ' - 57: 0, # 'à¸' - 49: 0, # 'à¸' - 53: 0, # 'ฑ' - 55: 0, # 'ฒ' - 43: 0, # 'ณ' - 20: 0, # 'ด' - 19: 1, # 'ต' - 44: 0, # 'ถ' - 14: 1, # 'ท' - 48: 0, # 'ธ' - 3: 2, # 'น' - 17: 0, # 'บ' - 25: 0, # 'ป' - 39: 0, # 'ผ' - 62: 0, # 'à¸' - 31: 0, # 'พ' - 54: 0, # 'ฟ' - 45: 0, # 'ภ' - 9: 0, # 'ม' - 16: 0, # 'ย' - 2: 2, # 'ร' - 61: 0, # 'ฤ' - 15: 0, # 'ล' - 12: 2, # 'ว' - 42: 1, # 'ศ' - 46: 2, # 'ษ' - 18: 1, # 'ส' - 21: 0, # 'ห' - 4: 0, # 'อ' - 63: 0, # 'ฯ' - 22: 0, # 'ะ' - 10: 2, # 'ั' - 1: 3, # 'า' - 36: 0, # 'ำ' - 23: 2, # 'ิ' - 13: 0, # 'ี' - 40: 3, # 'ึ' - 27: 0, # 'ื' - 32: 0, # 'ุ' - 35: 2, # 'ู' - 11: 0, # 'เ' - 28: 1, # 'à¹' - 41: 0, # 'โ' - 29: 1, # 'ใ' - 33: 1, # 'ไ' - 50: 0, # 'ๆ' - 37: 0, # '็' - 6: 0, # '่' - 7: 0, # '้' - 38: 1, # '์' - 56: 0, # '๑' - 59: 0, # '๒' - 60: 0, # '๕' - }, - 46: { # 'ษ' - 5: 0, # 'à¸' - 30: 0, # 'ข' - 24: 0, # 'ค' - 8: 0, # 'ง' - 26: 0, # 'จ' - 52: 0, # 'ฉ' - 34: 0, # 'ช' - 51: 0, # 'ซ' - 47: 0, # 'à¸' - 58: 2, # 'ฎ' - 57: 1, # 'à¸' - 49: 2, # 'à¸' - 53: 0, # 'ฑ' - 55: 0, # 'ฒ' - 43: 3, # 'ณ' - 20: 0, # 'ด' - 19: 1, # 'ต' - 44: 0, # 'ถ' - 14: 1, # 'ท' - 48: 0, # 'ธ' - 3: 0, # 'น' - 17: 0, # 'บ' - 25: 0, # 'ป' - 39: 0, # 'ผ' - 62: 0, # 'à¸' - 31: 0, # 'พ' - 54: 0, # 'ฟ' - 45: 1, # 'ภ' - 9: 1, # 'ม' - 16: 2, # 'ย' - 2: 2, # 'ร' - 61: 0, # 'ฤ' - 15: 0, # 'ล' - 12: 0, # 'ว' - 42: 1, # 'ศ' - 46: 0, # 'ษ' - 18: 0, # 'ส' - 21: 0, # 'ห' - 4: 0, # 'อ' - 63: 0, # 'ฯ' - 22: 2, # 'ะ' - 10: 2, # 'ั' - 1: 3, # 'า' - 36: 0, # 'ำ' - 23: 0, # 'ิ' - 13: 1, # 'ี' - 40: 0, # 'ึ' - 27: 0, # 'ื' - 32: 0, # 'ุ' - 35: 0, # 'ู' - 11: 1, # 'เ' - 28: 0, # 'à¹' - 41: 0, # 'โ' - 29: 0, # 'ใ' - 33: 0, # 'ไ' - 50: 0, # 'ๆ' - 37: 0, # '็' - 6: 0, # '่' - 7: 0, # '้' - 38: 2, # '์' - 56: 0, # '๑' - 59: 0, # '๒' - 60: 0, # '๕' - }, - 18: { # 'ส' - 5: 2, # 'à¸' - 30: 0, # 'ข' - 24: 0, # 'ค' - 8: 2, # 'ง' - 26: 1, # 'จ' - 52: 0, # 'ฉ' - 34: 0, # 'ช' - 51: 0, # 'ซ' - 47: 0, # 'à¸' - 58: 0, # 'ฎ' - 57: 0, # 'à¸' - 49: 0, # 'à¸' - 53: 0, # 'ฑ' - 55: 0, # 'ฒ' - 43: 0, # 'ณ' - 20: 3, # 'ด' - 19: 3, # 'ต' - 44: 3, # 'ถ' - 14: 0, # 'ท' - 48: 0, # 'ธ' - 3: 3, # 'น' - 17: 2, # 'บ' - 25: 1, # 'ป' - 39: 0, # 'ผ' - 62: 0, # 'à¸' - 31: 0, # 'พ' - 54: 0, # 'ฟ' - 45: 2, # 'ภ' - 9: 3, # 'ม' - 16: 1, # 'ย' - 2: 3, # 'ร' - 61: 0, # 'ฤ' - 15: 1, # 'ล' - 12: 2, # 'ว' - 42: 0, # 'ศ' - 46: 0, # 'ษ' - 18: 0, # 'ส' - 21: 2, # 'ห' - 4: 3, # 'อ' - 63: 0, # 'ฯ' - 22: 2, # 'ะ' - 10: 3, # 'ั' - 1: 3, # 'า' - 36: 3, # 'ำ' - 23: 3, # 'ิ' - 13: 3, # 'ี' - 40: 2, # 'ึ' - 27: 3, # 'ื' - 32: 3, # 'ุ' - 35: 3, # 'ู' - 11: 2, # 'เ' - 28: 0, # 'à¹' - 41: 1, # 'โ' - 29: 0, # 'ใ' - 33: 1, # 'ไ' - 50: 0, # 'ๆ' - 37: 0, # '็' - 6: 3, # '่' - 7: 1, # '้' - 38: 2, # '์' - 56: 0, # '๑' - 59: 0, # '๒' - 60: 0, # '๕' - }, - 21: { # 'ห' - 5: 3, # 'à¸' - 30: 0, # 'ข' - 24: 0, # 'ค' - 8: 1, # 'ง' - 26: 0, # 'จ' - 52: 0, # 'ฉ' - 34: 0, # 'ช' - 51: 0, # 'ซ' - 47: 2, # 'à¸' - 58: 0, # 'ฎ' - 57: 0, # 'à¸' - 49: 0, # 'à¸' - 53: 0, # 'ฑ' - 55: 0, # 'ฒ' - 43: 0, # 'ณ' - 20: 1, # 'ด' - 19: 3, # 'ต' - 44: 0, # 'ถ' - 14: 0, # 'ท' - 48: 0, # 'ธ' - 3: 3, # 'น' - 17: 0, # 'บ' - 25: 1, # 'ป' - 39: 0, # 'ผ' - 62: 0, # 'à¸' - 31: 1, # 'พ' - 54: 0, # 'ฟ' - 45: 0, # 'ภ' - 9: 3, # 'ม' - 16: 2, # 'ย' - 2: 3, # 'ร' - 61: 0, # 'ฤ' - 15: 3, # 'ล' - 12: 2, # 'ว' - 42: 0, # 'ศ' - 46: 0, # 'ษ' - 18: 0, # 'ส' - 21: 0, # 'ห' - 4: 3, # 'อ' - 63: 0, # 'ฯ' - 22: 1, # 'ะ' - 10: 3, # 'ั' - 1: 3, # 'า' - 36: 0, # 'ำ' - 23: 1, # 'ิ' - 13: 1, # 'ี' - 40: 0, # 'ึ' - 27: 0, # 'ื' - 32: 1, # 'ุ' - 35: 1, # 'ู' - 11: 0, # 'เ' - 28: 0, # 'à¹' - 41: 0, # 'โ' - 29: 0, # 'ใ' - 33: 0, # 'ไ' - 50: 0, # 'ๆ' - 37: 3, # '็' - 6: 3, # '่' - 7: 3, # '้' - 38: 2, # '์' - 56: 0, # '๑' - 59: 0, # '๒' - 60: 0, # '๕' - }, - 4: { # 'อ' - 5: 3, # 'à¸' - 30: 1, # 'ข' - 24: 2, # 'ค' - 8: 3, # 'ง' - 26: 1, # 'จ' - 52: 0, # 'ฉ' - 34: 1, # 'ช' - 51: 0, # 'ซ' - 47: 0, # 'à¸' - 58: 0, # 'ฎ' - 57: 0, # 'à¸' - 49: 0, # 'à¸' - 53: 0, # 'ฑ' - 55: 0, # 'ฒ' - 43: 0, # 'ณ' - 20: 3, # 'ด' - 19: 2, # 'ต' - 44: 1, # 'ถ' - 14: 2, # 'ท' - 48: 1, # 'ธ' - 3: 3, # 'น' - 17: 3, # 'บ' - 25: 1, # 'ป' - 39: 1, # 'ผ' - 62: 0, # 'à¸' - 31: 1, # 'พ' - 54: 1, # 'ฟ' - 45: 1, # 'ภ' - 9: 3, # 'ม' - 16: 3, # 'ย' - 2: 3, # 'ร' - 61: 0, # 'ฤ' - 15: 2, # 'ล' - 12: 2, # 'ว' - 42: 1, # 'ศ' - 46: 0, # 'ษ' - 18: 2, # 'ส' - 21: 2, # 'ห' - 4: 3, # 'อ' - 63: 0, # 'ฯ' - 22: 2, # 'ะ' - 10: 3, # 'ั' - 1: 3, # 'า' - 36: 2, # 'ำ' - 23: 2, # 'ิ' - 13: 3, # 'ี' - 40: 0, # 'ึ' - 27: 3, # 'ื' - 32: 3, # 'ุ' - 35: 0, # 'ู' - 11: 3, # 'เ' - 28: 1, # 'à¹' - 41: 1, # 'โ' - 29: 2, # 'ใ' - 33: 2, # 'ไ' - 50: 1, # 'ๆ' - 37: 1, # '็' - 6: 2, # '่' - 7: 2, # '้' - 38: 0, # '์' - 56: 0, # '๑' - 59: 0, # '๒' - 60: 0, # '๕' - }, - 63: { # 'ฯ' - 5: 0, # 'à¸' - 30: 0, # 'ข' - 24: 0, # 'ค' - 8: 0, # 'ง' - 26: 0, # 'จ' - 52: 0, # 'ฉ' - 34: 0, # 'ช' - 51: 0, # 'ซ' - 47: 0, # 'à¸' - 58: 0, # 'ฎ' - 57: 0, # 'à¸' - 49: 0, # 'à¸' - 53: 0, # 'ฑ' - 55: 0, # 'ฒ' - 43: 0, # 'ณ' - 20: 0, # 'ด' - 19: 0, # 'ต' - 44: 0, # 'ถ' - 14: 0, # 'ท' - 48: 0, # 'ธ' - 3: 0, # 'น' - 17: 0, # 'บ' - 25: 0, # 'ป' - 39: 0, # 'ผ' - 62: 0, # 'à¸' - 31: 0, # 'พ' - 54: 0, # 'ฟ' - 45: 0, # 'ภ' - 9: 0, # 'ม' - 16: 0, # 'ย' - 2: 0, # 'ร' - 61: 0, # 'ฤ' - 15: 2, # 'ล' - 12: 0, # 'ว' - 42: 0, # 'ศ' - 46: 0, # 'ษ' - 18: 0, # 'ส' - 21: 0, # 'ห' - 4: 0, # 'อ' - 63: 0, # 'ฯ' - 22: 0, # 'ะ' - 10: 0, # 'ั' - 1: 0, # 'า' - 36: 0, # 'ำ' - 23: 0, # 'ิ' - 13: 0, # 'ี' - 40: 0, # 'ึ' - 27: 0, # 'ื' - 32: 0, # 'ุ' - 35: 0, # 'ู' - 11: 0, # 'เ' - 28: 0, # 'à¹' - 41: 0, # 'โ' - 29: 0, # 'ใ' - 33: 0, # 'ไ' - 50: 0, # 'ๆ' - 37: 0, # '็' - 6: 0, # '่' - 7: 0, # '้' - 38: 0, # '์' - 56: 0, # '๑' - 59: 0, # '๒' - 60: 0, # '๕' - }, - 22: { # 'ะ' - 5: 3, # 'à¸' - 30: 1, # 'ข' - 24: 2, # 'ค' - 8: 1, # 'ง' - 26: 2, # 'จ' - 52: 0, # 'ฉ' - 34: 3, # 'ช' - 51: 0, # 'ซ' - 47: 0, # 'à¸' - 58: 0, # 'ฎ' - 57: 0, # 'à¸' - 49: 0, # 'à¸' - 53: 0, # 'ฑ' - 55: 0, # 'ฒ' - 43: 0, # 'ณ' - 20: 3, # 'ด' - 19: 3, # 'ต' - 44: 1, # 'ถ' - 14: 3, # 'ท' - 48: 1, # 'ธ' - 3: 2, # 'น' - 17: 3, # 'บ' - 25: 2, # 'ป' - 39: 1, # 'ผ' - 62: 0, # 'à¸' - 31: 2, # 'พ' - 54: 0, # 'ฟ' - 45: 1, # 'ภ' - 9: 3, # 'ม' - 16: 2, # 'ย' - 2: 2, # 'ร' - 61: 0, # 'ฤ' - 15: 2, # 'ล' - 12: 2, # 'ว' - 42: 0, # 'ศ' - 46: 0, # 'ษ' - 18: 3, # 'ส' - 21: 3, # 'ห' - 4: 2, # 'อ' - 63: 1, # 'ฯ' - 22: 1, # 'ะ' - 10: 0, # 'ั' - 1: 0, # 'า' - 36: 0, # 'ำ' - 23: 0, # 'ิ' - 13: 0, # 'ี' - 40: 0, # 'ึ' - 27: 0, # 'ื' - 32: 0, # 'ุ' - 35: 0, # 'ู' - 11: 3, # 'เ' - 28: 2, # 'à¹' - 41: 1, # 'โ' - 29: 2, # 'ใ' - 33: 2, # 'ไ' - 50: 0, # 'ๆ' - 37: 0, # '็' - 6: 0, # '่' - 7: 0, # '้' - 38: 0, # '์' - 56: 0, # '๑' - 59: 0, # '๒' - 60: 0, # '๕' - }, - 10: { # 'ั' - 5: 3, # 'à¸' - 30: 0, # 'ข' - 24: 1, # 'ค' - 8: 3, # 'ง' - 26: 3, # 'จ' - 52: 0, # 'ฉ' - 34: 1, # 'ช' - 51: 0, # 'ซ' - 47: 3, # 'à¸' - 58: 0, # 'ฎ' - 57: 0, # 'à¸' - 49: 2, # 'à¸' - 53: 0, # 'ฑ' - 55: 3, # 'ฒ' - 43: 3, # 'ณ' - 20: 3, # 'ด' - 19: 3, # 'ต' - 44: 0, # 'ถ' - 14: 2, # 'ท' - 48: 0, # 'ธ' - 3: 3, # 'น' - 17: 3, # 'บ' - 25: 1, # 'ป' - 39: 0, # 'ผ' - 62: 0, # 'à¸' - 31: 2, # 'พ' - 54: 0, # 'ฟ' - 45: 0, # 'ภ' - 9: 3, # 'ม' - 16: 3, # 'ย' - 2: 0, # 'ร' - 61: 0, # 'ฤ' - 15: 2, # 'ล' - 12: 3, # 'ว' - 42: 2, # 'ศ' - 46: 0, # 'ษ' - 18: 3, # 'ส' - 21: 0, # 'ห' - 4: 0, # 'อ' - 63: 0, # 'ฯ' - 22: 0, # 'ะ' - 10: 0, # 'ั' - 1: 0, # 'า' - 36: 0, # 'ำ' - 23: 0, # 'ิ' - 13: 0, # 'ี' - 40: 0, # 'ึ' - 27: 0, # 'ื' - 32: 0, # 'ุ' - 35: 0, # 'ู' - 11: 0, # 'เ' - 28: 0, # 'à¹' - 41: 0, # 'โ' - 29: 0, # 'ใ' - 33: 0, # 'ไ' - 50: 0, # 'ๆ' - 37: 0, # '็' - 6: 3, # '่' - 7: 3, # '้' - 38: 0, # '์' - 56: 0, # '๑' - 59: 0, # '๒' - 60: 0, # '๕' - }, - 1: { # 'า' - 5: 3, # 'à¸' - 30: 2, # 'ข' - 24: 3, # 'ค' - 8: 3, # 'ง' - 26: 3, # 'จ' - 52: 0, # 'ฉ' - 34: 3, # 'ช' - 51: 1, # 'ซ' - 47: 2, # 'à¸' - 58: 0, # 'ฎ' - 57: 0, # 'à¸' - 49: 0, # 'à¸' - 53: 0, # 'ฑ' - 55: 0, # 'ฒ' - 43: 3, # 'ณ' - 20: 3, # 'ด' - 19: 3, # 'ต' - 44: 1, # 'ถ' - 14: 3, # 'ท' - 48: 2, # 'ธ' - 3: 3, # 'น' - 17: 3, # 'บ' - 25: 2, # 'ป' - 39: 1, # 'ผ' - 62: 1, # 'à¸' - 31: 3, # 'พ' - 54: 1, # 'ฟ' - 45: 1, # 'ภ' - 9: 3, # 'ม' - 16: 3, # 'ย' - 2: 3, # 'ร' - 61: 0, # 'ฤ' - 15: 3, # 'ล' - 12: 3, # 'ว' - 42: 2, # 'ศ' - 46: 3, # 'ษ' - 18: 3, # 'ส' - 21: 3, # 'ห' - 4: 2, # 'อ' - 63: 1, # 'ฯ' - 22: 3, # 'ะ' - 10: 0, # 'ั' - 1: 0, # 'า' - 36: 0, # 'ำ' - 23: 0, # 'ิ' - 13: 0, # 'ี' - 40: 0, # 'ึ' - 27: 0, # 'ื' - 32: 0, # 'ุ' - 35: 0, # 'ู' - 11: 3, # 'เ' - 28: 2, # 'à¹' - 41: 1, # 'โ' - 29: 2, # 'ใ' - 33: 2, # 'ไ' - 50: 1, # 'ๆ' - 37: 0, # '็' - 6: 0, # '่' - 7: 0, # '้' - 38: 0, # '์' - 56: 0, # '๑' - 59: 0, # '๒' - 60: 0, # '๕' - }, - 36: { # 'ำ' - 5: 2, # 'à¸' - 30: 1, # 'ข' - 24: 3, # 'ค' - 8: 2, # 'ง' - 26: 1, # 'จ' - 52: 0, # 'ฉ' - 34: 0, # 'ช' - 51: 0, # 'ซ' - 47: 0, # 'à¸' - 58: 0, # 'ฎ' - 57: 0, # 'à¸' - 49: 1, # 'à¸' - 53: 0, # 'ฑ' - 55: 0, # 'ฒ' - 43: 0, # 'ณ' - 20: 1, # 'ด' - 19: 1, # 'ต' - 44: 1, # 'ถ' - 14: 1, # 'ท' - 48: 0, # 'ธ' - 3: 3, # 'น' - 17: 1, # 'บ' - 25: 1, # 'ป' - 39: 1, # 'ผ' - 62: 0, # 'à¸' - 31: 1, # 'พ' - 54: 0, # 'ฟ' - 45: 1, # 'ภ' - 9: 1, # 'ม' - 16: 0, # 'ย' - 2: 2, # 'ร' - 61: 0, # 'ฤ' - 15: 2, # 'ล' - 12: 1, # 'ว' - 42: 0, # 'ศ' - 46: 0, # 'ษ' - 18: 1, # 'ส' - 21: 3, # 'ห' - 4: 1, # 'อ' - 63: 0, # 'ฯ' - 22: 0, # 'ะ' - 10: 0, # 'ั' - 1: 0, # 'า' - 36: 0, # 'ำ' - 23: 0, # 'ิ' - 13: 0, # 'ี' - 40: 0, # 'ึ' - 27: 0, # 'ื' - 32: 0, # 'ุ' - 35: 0, # 'ู' - 11: 3, # 'เ' - 28: 2, # 'à¹' - 41: 1, # 'โ' - 29: 2, # 'ใ' - 33: 2, # 'ไ' - 50: 0, # 'ๆ' - 37: 0, # '็' - 6: 0, # '่' - 7: 0, # '้' - 38: 0, # '์' - 56: 0, # '๑' - 59: 0, # '๒' - 60: 0, # '๕' - }, - 23: { # 'ิ' - 5: 3, # 'à¸' - 30: 1, # 'ข' - 24: 2, # 'ค' - 8: 3, # 'ง' - 26: 3, # 'จ' - 52: 0, # 'ฉ' - 34: 3, # 'ช' - 51: 0, # 'ซ' - 47: 2, # 'à¸' - 58: 0, # 'ฎ' - 57: 0, # 'à¸' - 49: 0, # 'à¸' - 53: 0, # 'ฑ' - 55: 0, # 'ฒ' - 43: 0, # 'ณ' - 20: 3, # 'ด' - 19: 3, # 'ต' - 44: 1, # 'ถ' - 14: 3, # 'ท' - 48: 3, # 'ธ' - 3: 3, # 'น' - 17: 3, # 'บ' - 25: 2, # 'ป' - 39: 2, # 'ผ' - 62: 0, # 'à¸' - 31: 3, # 'พ' - 54: 1, # 'ฟ' - 45: 2, # 'ภ' - 9: 3, # 'ม' - 16: 2, # 'ย' - 2: 2, # 'ร' - 61: 0, # 'ฤ' - 15: 2, # 'ล' - 12: 3, # 'ว' - 42: 3, # 'ศ' - 46: 2, # 'ษ' - 18: 2, # 'ส' - 21: 3, # 'ห' - 4: 1, # 'อ' - 63: 1, # 'ฯ' - 22: 0, # 'ะ' - 10: 0, # 'ั' - 1: 0, # 'า' - 36: 0, # 'ำ' - 23: 0, # 'ิ' - 13: 0, # 'ี' - 40: 0, # 'ึ' - 27: 0, # 'ื' - 32: 0, # 'ุ' - 35: 0, # 'ู' - 11: 3, # 'เ' - 28: 1, # 'à¹' - 41: 1, # 'โ' - 29: 1, # 'ใ' - 33: 0, # 'ไ' - 50: 0, # 'ๆ' - 37: 0, # '็' - 6: 3, # '่' - 7: 2, # '้' - 38: 2, # '์' - 56: 0, # '๑' - 59: 0, # '๒' - 60: 0, # '๕' - }, - 13: { # 'ี' - 5: 3, # 'à¸' - 30: 2, # 'ข' - 24: 2, # 'ค' - 8: 0, # 'ง' - 26: 1, # 'จ' - 52: 0, # 'ฉ' - 34: 1, # 'ช' - 51: 0, # 'ซ' - 47: 0, # 'à¸' - 58: 0, # 'ฎ' - 57: 0, # 'à¸' - 49: 0, # 'à¸' - 53: 0, # 'ฑ' - 55: 0, # 'ฒ' - 43: 0, # 'ณ' - 20: 2, # 'ด' - 19: 1, # 'ต' - 44: 0, # 'ถ' - 14: 2, # 'ท' - 48: 0, # 'ธ' - 3: 1, # 'น' - 17: 2, # 'บ' - 25: 2, # 'ป' - 39: 1, # 'ผ' - 62: 0, # 'à¸' - 31: 2, # 'พ' - 54: 0, # 'ฟ' - 45: 0, # 'ภ' - 9: 2, # 'ม' - 16: 3, # 'ย' - 2: 2, # 'ร' - 61: 0, # 'ฤ' - 15: 1, # 'ล' - 12: 2, # 'ว' - 42: 1, # 'ศ' - 46: 0, # 'ษ' - 18: 2, # 'ส' - 21: 1, # 'ห' - 4: 2, # 'อ' - 63: 0, # 'ฯ' - 22: 0, # 'ะ' - 10: 0, # 'ั' - 1: 0, # 'า' - 36: 0, # 'ำ' - 23: 0, # 'ิ' - 13: 0, # 'ี' - 40: 0, # 'ึ' - 27: 0, # 'ื' - 32: 0, # 'ุ' - 35: 0, # 'ู' - 11: 2, # 'เ' - 28: 2, # 'à¹' - 41: 1, # 'โ' - 29: 1, # 'ใ' - 33: 1, # 'ไ' - 50: 1, # 'ๆ' - 37: 0, # '็' - 6: 3, # '่' - 7: 3, # '้' - 38: 0, # '์' - 56: 0, # '๑' - 59: 0, # '๒' - 60: 0, # '๕' - }, - 40: { # 'ึ' - 5: 3, # 'à¸' - 30: 0, # 'ข' - 24: 0, # 'ค' - 8: 3, # 'ง' - 26: 0, # 'จ' - 52: 0, # 'ฉ' - 34: 0, # 'ช' - 51: 0, # 'ซ' - 47: 0, # 'à¸' - 58: 0, # 'ฎ' - 57: 0, # 'à¸' - 49: 0, # 'à¸' - 53: 0, # 'ฑ' - 55: 0, # 'ฒ' - 43: 0, # 'ณ' - 20: 1, # 'ด' - 19: 0, # 'ต' - 44: 0, # 'ถ' - 14: 0, # 'ท' - 48: 0, # 'ธ' - 3: 0, # 'น' - 17: 0, # 'บ' - 25: 0, # 'ป' - 39: 0, # 'ผ' - 62: 0, # 'à¸' - 31: 0, # 'พ' - 54: 0, # 'ฟ' - 45: 0, # 'ภ' - 9: 1, # 'ม' - 16: 0, # 'ย' - 2: 0, # 'ร' - 61: 0, # 'ฤ' - 15: 0, # 'ล' - 12: 0, # 'ว' - 42: 0, # 'ศ' - 46: 0, # 'ษ' - 18: 0, # 'ส' - 21: 0, # 'ห' - 4: 0, # 'อ' - 63: 0, # 'ฯ' - 22: 0, # 'ะ' - 10: 0, # 'ั' - 1: 0, # 'า' - 36: 0, # 'ำ' - 23: 0, # 'ิ' - 13: 0, # 'ี' - 40: 0, # 'ึ' - 27: 0, # 'ื' - 32: 0, # 'ุ' - 35: 0, # 'ู' - 11: 0, # 'เ' - 28: 0, # 'à¹' - 41: 0, # 'โ' - 29: 0, # 'ใ' - 33: 0, # 'ไ' - 50: 0, # 'ๆ' - 37: 0, # '็' - 6: 3, # '่' - 7: 3, # '้' - 38: 0, # '์' - 56: 0, # '๑' - 59: 0, # '๒' - 60: 0, # '๕' - }, - 27: { # 'ื' - 5: 0, # 'à¸' - 30: 0, # 'ข' - 24: 0, # 'ค' - 8: 0, # 'ง' - 26: 0, # 'จ' - 52: 0, # 'ฉ' - 34: 1, # 'ช' - 51: 0, # 'ซ' - 47: 0, # 'à¸' - 58: 0, # 'ฎ' - 57: 0, # 'à¸' - 49: 0, # 'à¸' - 53: 0, # 'ฑ' - 55: 0, # 'ฒ' - 43: 0, # 'ณ' - 20: 1, # 'ด' - 19: 0, # 'ต' - 44: 0, # 'ถ' - 14: 0, # 'ท' - 48: 0, # 'ธ' - 3: 2, # 'น' - 17: 3, # 'บ' - 25: 0, # 'ป' - 39: 0, # 'ผ' - 62: 0, # 'à¸' - 31: 0, # 'พ' - 54: 0, # 'ฟ' - 45: 0, # 'ภ' - 9: 2, # 'ม' - 16: 0, # 'ย' - 2: 0, # 'ร' - 61: 0, # 'ฤ' - 15: 0, # 'ล' - 12: 0, # 'ว' - 42: 0, # 'ศ' - 46: 0, # 'ษ' - 18: 0, # 'ส' - 21: 0, # 'ห' - 4: 3, # 'อ' - 63: 0, # 'ฯ' - 22: 0, # 'ะ' - 10: 0, # 'ั' - 1: 0, # 'า' - 36: 0, # 'ำ' - 23: 0, # 'ิ' - 13: 0, # 'ี' - 40: 0, # 'ึ' - 27: 0, # 'ื' - 32: 0, # 'ุ' - 35: 0, # 'ู' - 11: 0, # 'เ' - 28: 0, # 'à¹' - 41: 0, # 'โ' - 29: 0, # 'ใ' - 33: 0, # 'ไ' - 50: 0, # 'ๆ' - 37: 0, # '็' - 6: 3, # '่' - 7: 3, # '้' - 38: 0, # '์' - 56: 0, # '๑' - 59: 0, # '๒' - 60: 0, # '๕' - }, - 32: { # 'ุ' - 5: 3, # 'à¸' - 30: 2, # 'ข' - 24: 3, # 'ค' - 8: 3, # 'ง' - 26: 0, # 'จ' - 52: 0, # 'ฉ' - 34: 0, # 'ช' - 51: 0, # 'ซ' - 47: 2, # 'à¸' - 58: 0, # 'ฎ' - 57: 0, # 'à¸' - 49: 0, # 'à¸' - 53: 0, # 'ฑ' - 55: 1, # 'ฒ' - 43: 3, # 'ณ' - 20: 3, # 'ด' - 19: 3, # 'ต' - 44: 1, # 'ถ' - 14: 2, # 'ท' - 48: 1, # 'ธ' - 3: 2, # 'น' - 17: 2, # 'บ' - 25: 2, # 'ป' - 39: 2, # 'ผ' - 62: 0, # 'à¸' - 31: 1, # 'พ' - 54: 0, # 'ฟ' - 45: 1, # 'ภ' - 9: 3, # 'ม' - 16: 1, # 'ย' - 2: 2, # 'ร' - 61: 0, # 'ฤ' - 15: 2, # 'ล' - 12: 1, # 'ว' - 42: 1, # 'ศ' - 46: 2, # 'ษ' - 18: 1, # 'ส' - 21: 1, # 'ห' - 4: 1, # 'อ' - 63: 0, # 'ฯ' - 22: 0, # 'ะ' - 10: 0, # 'ั' - 1: 0, # 'า' - 36: 0, # 'ำ' - 23: 0, # 'ิ' - 13: 0, # 'ี' - 40: 0, # 'ึ' - 27: 0, # 'ื' - 32: 0, # 'ุ' - 35: 0, # 'ู' - 11: 1, # 'เ' - 28: 0, # 'à¹' - 41: 1, # 'โ' - 29: 0, # 'ใ' - 33: 1, # 'ไ' - 50: 0, # 'ๆ' - 37: 0, # '็' - 6: 3, # '่' - 7: 2, # '้' - 38: 1, # '์' - 56: 0, # '๑' - 59: 0, # '๒' - 60: 0, # '๕' - }, - 35: { # 'ู' - 5: 3, # 'à¸' - 30: 0, # 'ข' - 24: 0, # 'ค' - 8: 2, # 'ง' - 26: 1, # 'จ' - 52: 0, # 'ฉ' - 34: 0, # 'ช' - 51: 0, # 'ซ' - 47: 2, # 'à¸' - 58: 0, # 'ฎ' - 57: 0, # 'à¸' - 49: 0, # 'à¸' - 53: 0, # 'ฑ' - 55: 0, # 'ฒ' - 43: 1, # 'ณ' - 20: 2, # 'ด' - 19: 2, # 'ต' - 44: 0, # 'ถ' - 14: 1, # 'ท' - 48: 0, # 'ธ' - 3: 2, # 'น' - 17: 0, # 'บ' - 25: 3, # 'ป' - 39: 0, # 'ผ' - 62: 0, # 'à¸' - 31: 0, # 'พ' - 54: 0, # 'ฟ' - 45: 0, # 'ภ' - 9: 2, # 'ม' - 16: 0, # 'ย' - 2: 1, # 'ร' - 61: 0, # 'ฤ' - 15: 3, # 'ล' - 12: 1, # 'ว' - 42: 0, # 'ศ' - 46: 0, # 'ษ' - 18: 0, # 'ส' - 21: 0, # 'ห' - 4: 0, # 'อ' - 63: 0, # 'ฯ' - 22: 0, # 'ะ' - 10: 0, # 'ั' - 1: 0, # 'า' - 36: 0, # 'ำ' - 23: 0, # 'ิ' - 13: 0, # 'ี' - 40: 0, # 'ึ' - 27: 0, # 'ื' - 32: 0, # 'ุ' - 35: 0, # 'ู' - 11: 1, # 'เ' - 28: 1, # 'à¹' - 41: 1, # 'โ' - 29: 0, # 'ใ' - 33: 0, # 'ไ' - 50: 0, # 'ๆ' - 37: 0, # '็' - 6: 3, # '่' - 7: 3, # '้' - 38: 0, # '์' - 56: 0, # '๑' - 59: 0, # '๒' - 60: 0, # '๕' - }, - 11: { # 'เ' - 5: 3, # 'à¸' - 30: 3, # 'ข' - 24: 3, # 'ค' - 8: 2, # 'ง' - 26: 3, # 'จ' - 52: 3, # 'ฉ' - 34: 3, # 'ช' - 51: 2, # 'ซ' - 47: 0, # 'à¸' - 58: 0, # 'ฎ' - 57: 0, # 'à¸' - 49: 0, # 'à¸' - 53: 0, # 'ฑ' - 55: 0, # 'ฒ' - 43: 1, # 'ณ' - 20: 3, # 'ด' - 19: 3, # 'ต' - 44: 1, # 'ถ' - 14: 3, # 'ท' - 48: 1, # 'ธ' - 3: 3, # 'น' - 17: 3, # 'บ' - 25: 3, # 'ป' - 39: 2, # 'ผ' - 62: 1, # 'à¸' - 31: 3, # 'พ' - 54: 1, # 'ฟ' - 45: 3, # 'ภ' - 9: 3, # 'ม' - 16: 2, # 'ย' - 2: 3, # 'ร' - 61: 0, # 'ฤ' - 15: 3, # 'ล' - 12: 3, # 'ว' - 42: 2, # 'ศ' - 46: 0, # 'ษ' - 18: 3, # 'ส' - 21: 3, # 'ห' - 4: 3, # 'อ' - 63: 0, # 'ฯ' - 22: 0, # 'ะ' - 10: 0, # 'ั' - 1: 0, # 'า' - 36: 0, # 'ำ' - 23: 0, # 'ิ' - 13: 0, # 'ี' - 40: 0, # 'ึ' - 27: 0, # 'ื' - 32: 0, # 'ุ' - 35: 0, # 'ู' - 11: 0, # 'เ' - 28: 0, # 'à¹' - 41: 0, # 'โ' - 29: 0, # 'ใ' - 33: 0, # 'ไ' - 50: 0, # 'ๆ' - 37: 0, # '็' - 6: 0, # '่' - 7: 0, # '้' - 38: 0, # '์' - 56: 0, # '๑' - 59: 0, # '๒' - 60: 0, # '๕' - }, - 28: { # 'à¹' - 5: 3, # 'à¸' - 30: 2, # 'ข' - 24: 2, # 'ค' - 8: 1, # 'ง' - 26: 2, # 'จ' - 52: 0, # 'ฉ' - 34: 1, # 'ช' - 51: 0, # 'ซ' - 47: 0, # 'à¸' - 58: 0, # 'ฎ' - 57: 0, # 'à¸' - 49: 0, # 'à¸' - 53: 0, # 'ฑ' - 55: 0, # 'ฒ' - 43: 0, # 'ณ' - 20: 2, # 'ด' - 19: 3, # 'ต' - 44: 2, # 'ถ' - 14: 3, # 'ท' - 48: 0, # 'ธ' - 3: 3, # 'น' - 17: 3, # 'บ' - 25: 2, # 'ป' - 39: 3, # 'ผ' - 62: 0, # 'à¸' - 31: 2, # 'พ' - 54: 2, # 'ฟ' - 45: 0, # 'ภ' - 9: 2, # 'ม' - 16: 2, # 'ย' - 2: 2, # 'ร' - 61: 0, # 'ฤ' - 15: 3, # 'ล' - 12: 2, # 'ว' - 42: 0, # 'ศ' - 46: 0, # 'ษ' - 18: 3, # 'ส' - 21: 3, # 'ห' - 4: 1, # 'อ' - 63: 0, # 'ฯ' - 22: 0, # 'ะ' - 10: 0, # 'ั' - 1: 0, # 'า' - 36: 0, # 'ำ' - 23: 0, # 'ิ' - 13: 0, # 'ี' - 40: 0, # 'ึ' - 27: 0, # 'ื' - 32: 0, # 'ุ' - 35: 0, # 'ู' - 11: 0, # 'เ' - 28: 0, # 'à¹' - 41: 0, # 'โ' - 29: 0, # 'ใ' - 33: 0, # 'ไ' - 50: 0, # 'ๆ' - 37: 0, # '็' - 6: 0, # '่' - 7: 0, # '้' - 38: 0, # '์' - 56: 0, # '๑' - 59: 0, # '๒' - 60: 0, # '๕' - }, - 41: { # 'โ' - 5: 2, # 'à¸' - 30: 1, # 'ข' - 24: 2, # 'ค' - 8: 0, # 'ง' - 26: 1, # 'จ' - 52: 1, # 'ฉ' - 34: 1, # 'ช' - 51: 1, # 'ซ' - 47: 0, # 'à¸' - 58: 0, # 'ฎ' - 57: 0, # 'à¸' - 49: 0, # 'à¸' - 53: 0, # 'ฑ' - 55: 0, # 'ฒ' - 43: 0, # 'ณ' - 20: 3, # 'ด' - 19: 2, # 'ต' - 44: 0, # 'ถ' - 14: 2, # 'ท' - 48: 0, # 'ธ' - 3: 3, # 'น' - 17: 1, # 'บ' - 25: 3, # 'ป' - 39: 0, # 'ผ' - 62: 0, # 'à¸' - 31: 1, # 'พ' - 54: 1, # 'ฟ' - 45: 1, # 'ภ' - 9: 1, # 'ม' - 16: 2, # 'ย' - 2: 2, # 'ร' - 61: 0, # 'ฤ' - 15: 3, # 'ล' - 12: 0, # 'ว' - 42: 1, # 'ศ' - 46: 0, # 'ษ' - 18: 2, # 'ส' - 21: 0, # 'ห' - 4: 2, # 'อ' - 63: 0, # 'ฯ' - 22: 0, # 'ะ' - 10: 0, # 'ั' - 1: 0, # 'า' - 36: 0, # 'ำ' - 23: 0, # 'ิ' - 13: 0, # 'ี' - 40: 0, # 'ึ' - 27: 0, # 'ื' - 32: 0, # 'ุ' - 35: 0, # 'ู' - 11: 0, # 'เ' - 28: 0, # 'à¹' - 41: 0, # 'โ' - 29: 0, # 'ใ' - 33: 0, # 'ไ' - 50: 0, # 'ๆ' - 37: 0, # '็' - 6: 0, # '่' - 7: 0, # '้' - 38: 0, # '์' - 56: 0, # '๑' - 59: 0, # '๒' - 60: 0, # '๕' - }, - 29: { # 'ใ' - 5: 2, # 'à¸' - 30: 0, # 'ข' - 24: 1, # 'ค' - 8: 0, # 'ง' - 26: 3, # 'จ' - 52: 0, # 'ฉ' - 34: 3, # 'ช' - 51: 0, # 'ซ' - 47: 0, # 'à¸' - 58: 0, # 'ฎ' - 57: 0, # 'à¸' - 49: 0, # 'à¸' - 53: 0, # 'ฑ' - 55: 0, # 'ฒ' - 43: 0, # 'ณ' - 20: 3, # 'ด' - 19: 1, # 'ต' - 44: 0, # 'ถ' - 14: 0, # 'ท' - 48: 0, # 'ธ' - 3: 3, # 'น' - 17: 2, # 'บ' - 25: 0, # 'ป' - 39: 0, # 'ผ' - 62: 0, # 'à¸' - 31: 0, # 'พ' - 54: 0, # 'ฟ' - 45: 0, # 'ภ' - 9: 0, # 'ม' - 16: 1, # 'ย' - 2: 0, # 'ร' - 61: 0, # 'ฤ' - 15: 0, # 'ล' - 12: 0, # 'ว' - 42: 0, # 'ศ' - 46: 0, # 'ษ' - 18: 3, # 'ส' - 21: 3, # 'ห' - 4: 0, # 'อ' - 63: 0, # 'ฯ' - 22: 0, # 'ะ' - 10: 0, # 'ั' - 1: 0, # 'า' - 36: 0, # 'ำ' - 23: 0, # 'ิ' - 13: 0, # 'ี' - 40: 0, # 'ึ' - 27: 0, # 'ื' - 32: 0, # 'ุ' - 35: 0, # 'ู' - 11: 0, # 'เ' - 28: 0, # 'à¹' - 41: 0, # 'โ' - 29: 0, # 'ใ' - 33: 0, # 'ไ' - 50: 0, # 'ๆ' - 37: 0, # '็' - 6: 0, # '่' - 7: 0, # '้' - 38: 0, # '์' - 56: 0, # '๑' - 59: 0, # '๒' - 60: 0, # '๕' - }, - 33: { # 'ไ' - 5: 1, # 'à¸' - 30: 2, # 'ข' - 24: 0, # 'ค' - 8: 0, # 'ง' - 26: 0, # 'จ' - 52: 0, # 'ฉ' - 34: 1, # 'ช' - 51: 1, # 'ซ' - 47: 0, # 'à¸' - 58: 0, # 'ฎ' - 57: 0, # 'à¸' - 49: 0, # 'à¸' - 53: 0, # 'ฑ' - 55: 0, # 'ฒ' - 43: 0, # 'ณ' - 20: 3, # 'ด' - 19: 1, # 'ต' - 44: 0, # 'ถ' - 14: 3, # 'ท' - 48: 0, # 'ธ' - 3: 0, # 'น' - 17: 1, # 'บ' - 25: 3, # 'ป' - 39: 0, # 'ผ' - 62: 0, # 'à¸' - 31: 0, # 'พ' - 54: 2, # 'ฟ' - 45: 0, # 'ภ' - 9: 3, # 'ม' - 16: 0, # 'ย' - 2: 3, # 'ร' - 61: 0, # 'ฤ' - 15: 1, # 'ล' - 12: 3, # 'ว' - 42: 0, # 'ศ' - 46: 0, # 'ษ' - 18: 1, # 'ส' - 21: 2, # 'ห' - 4: 0, # 'อ' - 63: 0, # 'ฯ' - 22: 0, # 'ะ' - 10: 0, # 'ั' - 1: 0, # 'า' - 36: 0, # 'ำ' - 23: 0, # 'ิ' - 13: 0, # 'ี' - 40: 0, # 'ึ' - 27: 0, # 'ื' - 32: 0, # 'ุ' - 35: 0, # 'ู' - 11: 0, # 'เ' - 28: 0, # 'à¹' - 41: 0, # 'โ' - 29: 0, # 'ใ' - 33: 0, # 'ไ' - 50: 0, # 'ๆ' - 37: 0, # '็' - 6: 0, # '่' - 7: 0, # '้' - 38: 0, # '์' - 56: 0, # '๑' - 59: 0, # '๒' - 60: 0, # '๕' - }, - 50: { # 'ๆ' - 5: 0, # 'à¸' - 30: 0, # 'ข' - 24: 0, # 'ค' - 8: 0, # 'ง' - 26: 0, # 'จ' - 52: 0, # 'ฉ' - 34: 0, # 'ช' - 51: 0, # 'ซ' - 47: 0, # 'à¸' - 58: 0, # 'ฎ' - 57: 0, # 'à¸' - 49: 0, # 'à¸' - 53: 0, # 'ฑ' - 55: 0, # 'ฒ' - 43: 0, # 'ณ' - 20: 0, # 'ด' - 19: 0, # 'ต' - 44: 0, # 'ถ' - 14: 0, # 'ท' - 48: 0, # 'ธ' - 3: 0, # 'น' - 17: 0, # 'บ' - 25: 0, # 'ป' - 39: 0, # 'ผ' - 62: 0, # 'à¸' - 31: 0, # 'พ' - 54: 0, # 'ฟ' - 45: 0, # 'ภ' - 9: 0, # 'ม' - 16: 0, # 'ย' - 2: 0, # 'ร' - 61: 0, # 'ฤ' - 15: 0, # 'ล' - 12: 0, # 'ว' - 42: 0, # 'ศ' - 46: 0, # 'ษ' - 18: 0, # 'ส' - 21: 0, # 'ห' - 4: 0, # 'อ' - 63: 0, # 'ฯ' - 22: 0, # 'ะ' - 10: 0, # 'ั' - 1: 0, # 'า' - 36: 0, # 'ำ' - 23: 0, # 'ิ' - 13: 0, # 'ี' - 40: 0, # 'ึ' - 27: 0, # 'ื' - 32: 0, # 'ุ' - 35: 0, # 'ู' - 11: 0, # 'เ' - 28: 0, # 'à¹' - 41: 0, # 'โ' - 29: 0, # 'ใ' - 33: 0, # 'ไ' - 50: 0, # 'ๆ' - 37: 0, # '็' - 6: 0, # '่' - 7: 0, # '้' - 38: 0, # '์' - 56: 0, # '๑' - 59: 0, # '๒' - 60: 0, # '๕' - }, - 37: { # '็' - 5: 2, # 'à¸' - 30: 1, # 'ข' - 24: 2, # 'ค' - 8: 2, # 'ง' - 26: 3, # 'จ' - 52: 0, # 'ฉ' - 34: 0, # 'ช' - 51: 0, # 'ซ' - 47: 1, # 'à¸' - 58: 0, # 'ฎ' - 57: 0, # 'à¸' - 49: 0, # 'à¸' - 53: 0, # 'ฑ' - 55: 0, # 'ฒ' - 43: 0, # 'ณ' - 20: 1, # 'ด' - 19: 2, # 'ต' - 44: 0, # 'ถ' - 14: 1, # 'ท' - 48: 0, # 'ธ' - 3: 3, # 'น' - 17: 3, # 'บ' - 25: 0, # 'ป' - 39: 0, # 'ผ' - 62: 0, # 'à¸' - 31: 0, # 'พ' - 54: 0, # 'ฟ' - 45: 0, # 'ภ' - 9: 2, # 'ม' - 16: 1, # 'ย' - 2: 0, # 'ร' - 61: 0, # 'ฤ' - 15: 0, # 'ล' - 12: 2, # 'ว' - 42: 0, # 'ศ' - 46: 0, # 'ษ' - 18: 1, # 'ส' - 21: 0, # 'ห' - 4: 1, # 'อ' - 63: 0, # 'ฯ' - 22: 0, # 'ะ' - 10: 0, # 'ั' - 1: 0, # 'า' - 36: 0, # 'ำ' - 23: 0, # 'ิ' - 13: 0, # 'ี' - 40: 0, # 'ึ' - 27: 0, # 'ื' - 32: 0, # 'ุ' - 35: 0, # 'ู' - 11: 1, # 'เ' - 28: 0, # 'à¹' - 41: 0, # 'โ' - 29: 0, # 'ใ' - 33: 1, # 'ไ' - 50: 0, # 'ๆ' - 37: 0, # '็' - 6: 0, # '่' - 7: 0, # '้' - 38: 0, # '์' - 56: 0, # '๑' - 59: 0, # '๒' - 60: 0, # '๕' - }, - 6: { # '่' - 5: 2, # 'à¸' - 30: 1, # 'ข' - 24: 2, # 'ค' - 8: 3, # 'ง' - 26: 2, # 'จ' - 52: 0, # 'ฉ' - 34: 1, # 'ช' - 51: 1, # 'ซ' - 47: 0, # 'à¸' - 58: 0, # 'ฎ' - 57: 0, # 'à¸' - 49: 1, # 'à¸' - 53: 0, # 'ฑ' - 55: 0, # 'ฒ' - 43: 0, # 'ณ' - 20: 1, # 'ด' - 19: 2, # 'ต' - 44: 1, # 'ถ' - 14: 2, # 'ท' - 48: 1, # 'ธ' - 3: 3, # 'น' - 17: 1, # 'บ' - 25: 2, # 'ป' - 39: 2, # 'ผ' - 62: 1, # 'à¸' - 31: 1, # 'พ' - 54: 0, # 'ฟ' - 45: 0, # 'ภ' - 9: 3, # 'ม' - 16: 3, # 'ย' - 2: 2, # 'ร' - 61: 0, # 'ฤ' - 15: 2, # 'ล' - 12: 3, # 'ว' - 42: 0, # 'ศ' - 46: 0, # 'ษ' - 18: 2, # 'ส' - 21: 1, # 'ห' - 4: 3, # 'อ' - 63: 0, # 'ฯ' - 22: 1, # 'ะ' - 10: 0, # 'ั' - 1: 3, # 'า' - 36: 2, # 'ำ' - 23: 0, # 'ิ' - 13: 0, # 'ี' - 40: 0, # 'ึ' - 27: 0, # 'ื' - 32: 0, # 'ุ' - 35: 0, # 'ู' - 11: 3, # 'เ' - 28: 2, # 'à¹' - 41: 1, # 'โ' - 29: 2, # 'ใ' - 33: 2, # 'ไ' - 50: 1, # 'ๆ' - 37: 0, # '็' - 6: 0, # '่' - 7: 0, # '้' - 38: 0, # '์' - 56: 0, # '๑' - 59: 0, # '๒' - 60: 0, # '๕' - }, - 7: { # '้' - 5: 2, # 'à¸' - 30: 1, # 'ข' - 24: 2, # 'ค' - 8: 3, # 'ง' - 26: 2, # 'จ' - 52: 0, # 'ฉ' - 34: 1, # 'ช' - 51: 1, # 'ซ' - 47: 0, # 'à¸' - 58: 0, # 'ฎ' - 57: 0, # 'à¸' - 49: 0, # 'à¸' - 53: 0, # 'ฑ' - 55: 0, # 'ฒ' - 43: 0, # 'ณ' - 20: 1, # 'ด' - 19: 2, # 'ต' - 44: 1, # 'ถ' - 14: 2, # 'ท' - 48: 0, # 'ธ' - 3: 3, # 'น' - 17: 2, # 'บ' - 25: 2, # 'ป' - 39: 2, # 'ผ' - 62: 0, # 'à¸' - 31: 1, # 'พ' - 54: 1, # 'ฟ' - 45: 0, # 'ภ' - 9: 3, # 'ม' - 16: 2, # 'ย' - 2: 2, # 'ร' - 61: 0, # 'ฤ' - 15: 1, # 'ล' - 12: 3, # 'ว' - 42: 1, # 'ศ' - 46: 0, # 'ษ' - 18: 2, # 'ส' - 21: 2, # 'ห' - 4: 3, # 'อ' - 63: 0, # 'ฯ' - 22: 0, # 'ะ' - 10: 0, # 'ั' - 1: 3, # 'า' - 36: 2, # 'ำ' - 23: 0, # 'ิ' - 13: 0, # 'ี' - 40: 0, # 'ึ' - 27: 0, # 'ื' - 32: 0, # 'ุ' - 35: 0, # 'ู' - 11: 2, # 'เ' - 28: 2, # 'à¹' - 41: 1, # 'โ' - 29: 2, # 'ใ' - 33: 2, # 'ไ' - 50: 0, # 'ๆ' - 37: 0, # '็' - 6: 0, # '่' - 7: 0, # '้' - 38: 0, # '์' - 56: 0, # '๑' - 59: 0, # '๒' - 60: 0, # '๕' - }, - 38: { # '์' - 5: 2, # 'à¸' - 30: 1, # 'ข' - 24: 1, # 'ค' - 8: 0, # 'ง' - 26: 1, # 'จ' - 52: 0, # 'ฉ' - 34: 1, # 'ช' - 51: 0, # 'ซ' - 47: 0, # 'à¸' - 58: 0, # 'ฎ' - 57: 0, # 'à¸' - 49: 0, # 'à¸' - 53: 0, # 'ฑ' - 55: 0, # 'ฒ' - 43: 0, # 'ณ' - 20: 2, # 'ด' - 19: 1, # 'ต' - 44: 1, # 'ถ' - 14: 1, # 'ท' - 48: 0, # 'ธ' - 3: 1, # 'น' - 17: 1, # 'บ' - 25: 1, # 'ป' - 39: 0, # 'ผ' - 62: 0, # 'à¸' - 31: 1, # 'พ' - 54: 1, # 'ฟ' - 45: 0, # 'ภ' - 9: 2, # 'ม' - 16: 0, # 'ย' - 2: 1, # 'ร' - 61: 1, # 'ฤ' - 15: 1, # 'ล' - 12: 1, # 'ว' - 42: 0, # 'ศ' - 46: 0, # 'ษ' - 18: 1, # 'ส' - 21: 1, # 'ห' - 4: 2, # 'อ' - 63: 1, # 'ฯ' - 22: 0, # 'ะ' - 10: 0, # 'ั' - 1: 0, # 'า' - 36: 0, # 'ำ' - 23: 0, # 'ิ' - 13: 0, # 'ี' - 40: 0, # 'ึ' - 27: 0, # 'ื' - 32: 0, # 'ุ' - 35: 0, # 'ู' - 11: 2, # 'เ' - 28: 2, # 'à¹' - 41: 1, # 'โ' - 29: 1, # 'ใ' - 33: 1, # 'ไ' - 50: 0, # 'ๆ' - 37: 0, # '็' - 6: 0, # '่' - 7: 0, # '้' - 38: 0, # '์' - 56: 0, # '๑' - 59: 0, # '๒' - 60: 0, # '๕' - }, - 56: { # '๑' - 5: 0, # 'à¸' - 30: 0, # 'ข' - 24: 0, # 'ค' - 8: 0, # 'ง' - 26: 0, # 'จ' - 52: 0, # 'ฉ' - 34: 0, # 'ช' - 51: 0, # 'ซ' - 47: 0, # 'à¸' - 58: 0, # 'ฎ' - 57: 0, # 'à¸' - 49: 0, # 'à¸' - 53: 0, # 'ฑ' - 55: 0, # 'ฒ' - 43: 0, # 'ณ' - 20: 0, # 'ด' - 19: 0, # 'ต' - 44: 0, # 'ถ' - 14: 0, # 'ท' - 48: 0, # 'ธ' - 3: 0, # 'น' - 17: 0, # 'บ' - 25: 0, # 'ป' - 39: 0, # 'ผ' - 62: 0, # 'à¸' - 31: 0, # 'พ' - 54: 0, # 'ฟ' - 45: 0, # 'ภ' - 9: 0, # 'ม' - 16: 0, # 'ย' - 2: 0, # 'ร' - 61: 0, # 'ฤ' - 15: 0, # 'ล' - 12: 0, # 'ว' - 42: 0, # 'ศ' - 46: 0, # 'ษ' - 18: 0, # 'ส' - 21: 0, # 'ห' - 4: 0, # 'อ' - 63: 0, # 'ฯ' - 22: 0, # 'ะ' - 10: 0, # 'ั' - 1: 0, # 'า' - 36: 0, # 'ำ' - 23: 0, # 'ิ' - 13: 0, # 'ี' - 40: 0, # 'ึ' - 27: 0, # 'ื' - 32: 0, # 'ุ' - 35: 0, # 'ู' - 11: 0, # 'เ' - 28: 0, # 'à¹' - 41: 0, # 'โ' - 29: 0, # 'ใ' - 33: 0, # 'ไ' - 50: 0, # 'ๆ' - 37: 0, # '็' - 6: 0, # '่' - 7: 0, # '้' - 38: 0, # '์' - 56: 2, # '๑' - 59: 1, # '๒' - 60: 1, # '๕' - }, - 59: { # '๒' - 5: 0, # 'à¸' - 30: 0, # 'ข' - 24: 0, # 'ค' - 8: 0, # 'ง' - 26: 0, # 'จ' - 52: 0, # 'ฉ' - 34: 0, # 'ช' - 51: 0, # 'ซ' - 47: 0, # 'à¸' - 58: 0, # 'ฎ' - 57: 0, # 'à¸' - 49: 0, # 'à¸' - 53: 0, # 'ฑ' - 55: 0, # 'ฒ' - 43: 0, # 'ณ' - 20: 0, # 'ด' - 19: 0, # 'ต' - 44: 0, # 'ถ' - 14: 0, # 'ท' - 48: 0, # 'ธ' - 3: 0, # 'น' - 17: 0, # 'บ' - 25: 0, # 'ป' - 39: 0, # 'ผ' - 62: 0, # 'à¸' - 31: 0, # 'พ' - 54: 0, # 'ฟ' - 45: 0, # 'ภ' - 9: 0, # 'ม' - 16: 0, # 'ย' - 2: 0, # 'ร' - 61: 0, # 'ฤ' - 15: 0, # 'ล' - 12: 0, # 'ว' - 42: 0, # 'ศ' - 46: 0, # 'ษ' - 18: 0, # 'ส' - 21: 0, # 'ห' - 4: 0, # 'อ' - 63: 0, # 'ฯ' - 22: 0, # 'ะ' - 10: 0, # 'ั' - 1: 0, # 'า' - 36: 0, # 'ำ' - 23: 0, # 'ิ' - 13: 0, # 'ี' - 40: 0, # 'ึ' - 27: 0, # 'ื' - 32: 0, # 'ุ' - 35: 0, # 'ู' - 11: 0, # 'เ' - 28: 0, # 'à¹' - 41: 0, # 'โ' - 29: 0, # 'ใ' - 33: 0, # 'ไ' - 50: 0, # 'ๆ' - 37: 0, # '็' - 6: 0, # '่' - 7: 0, # '้' - 38: 0, # '์' - 56: 1, # '๑' - 59: 1, # '๒' - 60: 3, # '๕' - }, - 60: { # '๕' - 5: 0, # 'à¸' - 30: 0, # 'ข' - 24: 0, # 'ค' - 8: 0, # 'ง' - 26: 0, # 'จ' - 52: 0, # 'ฉ' - 34: 0, # 'ช' - 51: 0, # 'ซ' - 47: 0, # 'à¸' - 58: 0, # 'ฎ' - 57: 0, # 'à¸' - 49: 0, # 'à¸' - 53: 0, # 'ฑ' - 55: 0, # 'ฒ' - 43: 0, # 'ณ' - 20: 0, # 'ด' - 19: 0, # 'ต' - 44: 0, # 'ถ' - 14: 0, # 'ท' - 48: 0, # 'ธ' - 3: 0, # 'น' - 17: 0, # 'บ' - 25: 0, # 'ป' - 39: 0, # 'ผ' - 62: 0, # 'à¸' - 31: 0, # 'พ' - 54: 0, # 'ฟ' - 45: 0, # 'ภ' - 9: 0, # 'ม' - 16: 0, # 'ย' - 2: 0, # 'ร' - 61: 0, # 'ฤ' - 15: 0, # 'ล' - 12: 0, # 'ว' - 42: 0, # 'ศ' - 46: 0, # 'ษ' - 18: 0, # 'ส' - 21: 0, # 'ห' - 4: 0, # 'อ' - 63: 0, # 'ฯ' - 22: 0, # 'ะ' - 10: 0, # 'ั' - 1: 0, # 'า' - 36: 0, # 'ำ' - 23: 0, # 'ิ' - 13: 0, # 'ี' - 40: 0, # 'ึ' - 27: 0, # 'ื' - 32: 0, # 'ุ' - 35: 0, # 'ู' - 11: 0, # 'เ' - 28: 0, # 'à¹' - 41: 0, # 'โ' - 29: 0, # 'ใ' - 33: 0, # 'ไ' - 50: 0, # 'ๆ' - 37: 0, # '็' - 6: 0, # '่' - 7: 0, # '้' - 38: 0, # '์' - 56: 2, # '๑' - 59: 1, # '๒' - 60: 0, # '๕' - }, -} - -# 255: Undefined characters that did not exist in training text -# 254: Carriage/Return -# 253: symbol (punctuation) that does not belong to word -# 252: 0 - 9 -# 251: Control characters - -# Character Mapping Table(s): -TIS_620_THAI_CHAR_TO_ORDER = { - 0: 255, # '\x00' - 1: 255, # '\x01' - 2: 255, # '\x02' - 3: 255, # '\x03' - 4: 255, # '\x04' - 5: 255, # '\x05' - 6: 255, # '\x06' - 7: 255, # '\x07' - 8: 255, # '\x08' - 9: 255, # '\t' - 10: 254, # '\n' - 11: 255, # '\x0b' - 12: 255, # '\x0c' - 13: 254, # '\r' - 14: 255, # '\x0e' - 15: 255, # '\x0f' - 16: 255, # '\x10' - 17: 255, # '\x11' - 18: 255, # '\x12' - 19: 255, # '\x13' - 20: 255, # '\x14' - 21: 255, # '\x15' - 22: 255, # '\x16' - 23: 255, # '\x17' - 24: 255, # '\x18' - 25: 255, # '\x19' - 26: 255, # '\x1a' - 27: 255, # '\x1b' - 28: 255, # '\x1c' - 29: 255, # '\x1d' - 30: 255, # '\x1e' - 31: 255, # '\x1f' - 32: 253, # ' ' - 33: 253, # '!' - 34: 253, # '"' - 35: 253, # '#' - 36: 253, # '$' - 37: 253, # '%' - 38: 253, # '&' - 39: 253, # "'" - 40: 253, # '(' - 41: 253, # ')' - 42: 253, # '*' - 43: 253, # '+' - 44: 253, # ',' - 45: 253, # '-' - 46: 253, # '.' - 47: 253, # '/' - 48: 252, # '0' - 49: 252, # '1' - 50: 252, # '2' - 51: 252, # '3' - 52: 252, # '4' - 53: 252, # '5' - 54: 252, # '6' - 55: 252, # '7' - 56: 252, # '8' - 57: 252, # '9' - 58: 253, # ':' - 59: 253, # ';' - 60: 253, # '<' - 61: 253, # '=' - 62: 253, # '>' - 63: 253, # '?' - 64: 253, # '@' - 65: 182, # 'A' - 66: 106, # 'B' - 67: 107, # 'C' - 68: 100, # 'D' - 69: 183, # 'E' - 70: 184, # 'F' - 71: 185, # 'G' - 72: 101, # 'H' - 73: 94, # 'I' - 74: 186, # 'J' - 75: 187, # 'K' - 76: 108, # 'L' - 77: 109, # 'M' - 78: 110, # 'N' - 79: 111, # 'O' - 80: 188, # 'P' - 81: 189, # 'Q' - 82: 190, # 'R' - 83: 89, # 'S' - 84: 95, # 'T' - 85: 112, # 'U' - 86: 113, # 'V' - 87: 191, # 'W' - 88: 192, # 'X' - 89: 193, # 'Y' - 90: 194, # 'Z' - 91: 253, # '[' - 92: 253, # '\\' - 93: 253, # ']' - 94: 253, # '^' - 95: 253, # '_' - 96: 253, # '`' - 97: 64, # 'a' - 98: 72, # 'b' - 99: 73, # 'c' - 100: 114, # 'd' - 101: 74, # 'e' - 102: 115, # 'f' - 103: 116, # 'g' - 104: 102, # 'h' - 105: 81, # 'i' - 106: 201, # 'j' - 107: 117, # 'k' - 108: 90, # 'l' - 109: 103, # 'm' - 110: 78, # 'n' - 111: 82, # 'o' - 112: 96, # 'p' - 113: 202, # 'q' - 114: 91, # 'r' - 115: 79, # 's' - 116: 84, # 't' - 117: 104, # 'u' - 118: 105, # 'v' - 119: 97, # 'w' - 120: 98, # 'x' - 121: 92, # 'y' - 122: 203, # 'z' - 123: 253, # '{' - 124: 253, # '|' - 125: 253, # '}' - 126: 253, # '~' - 127: 253, # '\x7f' - 128: 209, # '\x80' - 129: 210, # '\x81' - 130: 211, # '\x82' - 131: 212, # '\x83' - 132: 213, # '\x84' - 133: 88, # '\x85' - 134: 214, # '\x86' - 135: 215, # '\x87' - 136: 216, # '\x88' - 137: 217, # '\x89' - 138: 218, # '\x8a' - 139: 219, # '\x8b' - 140: 220, # '\x8c' - 141: 118, # '\x8d' - 142: 221, # '\x8e' - 143: 222, # '\x8f' - 144: 223, # '\x90' - 145: 224, # '\x91' - 146: 99, # '\x92' - 147: 85, # '\x93' - 148: 83, # '\x94' - 149: 225, # '\x95' - 150: 226, # '\x96' - 151: 227, # '\x97' - 152: 228, # '\x98' - 153: 229, # '\x99' - 154: 230, # '\x9a' - 155: 231, # '\x9b' - 156: 232, # '\x9c' - 157: 233, # '\x9d' - 158: 234, # '\x9e' - 159: 235, # '\x9f' - 160: 236, # None - 161: 5, # 'à¸' - 162: 30, # 'ข' - 163: 237, # 'ฃ' - 164: 24, # 'ค' - 165: 238, # 'ฅ' - 166: 75, # 'ฆ' - 167: 8, # 'ง' - 168: 26, # 'จ' - 169: 52, # 'ฉ' - 170: 34, # 'ช' - 171: 51, # 'ซ' - 172: 119, # 'ฌ' - 173: 47, # 'à¸' - 174: 58, # 'ฎ' - 175: 57, # 'à¸' - 176: 49, # 'à¸' - 177: 53, # 'ฑ' - 178: 55, # 'ฒ' - 179: 43, # 'ณ' - 180: 20, # 'ด' - 181: 19, # 'ต' - 182: 44, # 'ถ' - 183: 14, # 'ท' - 184: 48, # 'ธ' - 185: 3, # 'น' - 186: 17, # 'บ' - 187: 25, # 'ป' - 188: 39, # 'ผ' - 189: 62, # 'à¸' - 190: 31, # 'พ' - 191: 54, # 'ฟ' - 192: 45, # 'ภ' - 193: 9, # 'ม' - 194: 16, # 'ย' - 195: 2, # 'ร' - 196: 61, # 'ฤ' - 197: 15, # 'ล' - 198: 239, # 'ฦ' - 199: 12, # 'ว' - 200: 42, # 'ศ' - 201: 46, # 'ษ' - 202: 18, # 'ส' - 203: 21, # 'ห' - 204: 76, # 'ฬ' - 205: 4, # 'อ' - 206: 66, # 'ฮ' - 207: 63, # 'ฯ' - 208: 22, # 'ะ' - 209: 10, # 'ั' - 210: 1, # 'า' - 211: 36, # 'ำ' - 212: 23, # 'ิ' - 213: 13, # 'ี' - 214: 40, # 'ึ' - 215: 27, # 'ื' - 216: 32, # 'ุ' - 217: 35, # 'ู' - 218: 86, # 'ฺ' - 219: 240, # None - 220: 241, # None - 221: 242, # None - 222: 243, # None - 223: 244, # '฿' - 224: 11, # 'เ' - 225: 28, # 'à¹' - 226: 41, # 'โ' - 227: 29, # 'ใ' - 228: 33, # 'ไ' - 229: 245, # 'ๅ' - 230: 50, # 'ๆ' - 231: 37, # '็' - 232: 6, # '่' - 233: 7, # '้' - 234: 67, # '๊' - 235: 77, # '๋' - 236: 38, # '์' - 237: 93, # 'à¹' - 238: 246, # '๎' - 239: 247, # 'à¹' - 240: 68, # 'à¹' - 241: 56, # '๑' - 242: 59, # '๒' - 243: 65, # '๓' - 244: 69, # '๔' - 245: 60, # '๕' - 246: 70, # '๖' - 247: 80, # '๗' - 248: 71, # '๘' - 249: 87, # '๙' - 250: 248, # '๚' - 251: 249, # '๛' - 252: 250, # None - 253: 251, # None - 254: 252, # None - 255: 253, # None -} - -TIS_620_THAI_MODEL = SingleByteCharSetModel(charset_name='TIS-620', - language='Thai', - char_to_order_map=TIS_620_THAI_CHAR_TO_ORDER, - language_model=THAI_LANG_MODEL, - typical_positive_ratio=0.926386, - keep_ascii_letters=False, - alphabet='à¸à¸‚ฃคฅฆงจฉชซฌà¸à¸Žà¸à¸à¸‘ฒณดตถทธนบปผà¸à¸žà¸Ÿà¸ à¸¡à¸¢à¸£à¸¤à¸¥à¸¦à¸§à¸¨à¸©à¸ªà¸«à¸¬à¸­à¸®à¸¯à¸°à¸±à¸²à¸³à¸´à¸µà¸¶à¸·à¸¸à¸¹à¸ºà¸¿à¹€à¹à¹‚ใไๅๆ็่้๊๋์à¹à¹Žà¹à¹à¹‘๒๓๔๕๖๗๘๙๚๛') - diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/langturkishmodel.py b/venv/Lib/site-packages/pip/_vendor/chardet/langturkishmodel.py deleted file mode 100644 index 43f4230..0000000 --- a/venv/Lib/site-packages/pip/_vendor/chardet/langturkishmodel.py +++ /dev/null @@ -1,4383 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- - -from pip._vendor.chardet.sbcharsetprober import SingleByteCharSetModel - - -# 3: Positive -# 2: Likely -# 1: Unlikely -# 0: Negative - -TURKISH_LANG_MODEL = { - 23: { # 'A' - 23: 0, # 'A' - 37: 0, # 'B' - 47: 0, # 'C' - 39: 0, # 'D' - 29: 0, # 'E' - 52: 0, # 'F' - 36: 0, # 'G' - 45: 0, # 'H' - 53: 0, # 'I' - 60: 0, # 'J' - 16: 0, # 'K' - 49: 0, # 'L' - 20: 0, # 'M' - 46: 0, # 'N' - 42: 0, # 'O' - 48: 0, # 'P' - 44: 0, # 'R' - 35: 0, # 'S' - 31: 0, # 'T' - 51: 0, # 'U' - 38: 0, # 'V' - 62: 0, # 'W' - 43: 0, # 'Y' - 56: 0, # 'Z' - 1: 3, # 'a' - 21: 0, # 'b' - 28: 0, # 'c' - 12: 2, # 'd' - 2: 3, # 'e' - 18: 0, # 'f' - 27: 1, # 'g' - 25: 1, # 'h' - 3: 1, # 'i' - 24: 0, # 'j' - 10: 2, # 'k' - 5: 1, # 'l' - 13: 1, # 'm' - 4: 1, # 'n' - 15: 0, # 'o' - 26: 0, # 'p' - 7: 1, # 'r' - 8: 1, # 's' - 9: 1, # 't' - 14: 1, # 'u' - 32: 0, # 'v' - 57: 0, # 'w' - 58: 0, # 'x' - 11: 3, # 'y' - 22: 0, # 'z' - 63: 0, # '·' - 54: 0, # 'Ç' - 50: 0, # 'Ö' - 55: 0, # 'Ãœ' - 59: 0, # 'â' - 33: 1, # 'ç' - 61: 0, # 'î' - 34: 0, # 'ö' - 17: 0, # 'ü' - 30: 0, # 'ÄŸ' - 41: 0, # 'Ä°' - 6: 0, # 'ı' - 40: 0, # 'Åž' - 19: 0, # 'ÅŸ' - }, - 37: { # 'B' - 23: 0, # 'A' - 37: 0, # 'B' - 47: 2, # 'C' - 39: 0, # 'D' - 29: 0, # 'E' - 52: 2, # 'F' - 36: 0, # 'G' - 45: 0, # 'H' - 53: 0, # 'I' - 60: 0, # 'J' - 16: 1, # 'K' - 49: 0, # 'L' - 20: 0, # 'M' - 46: 0, # 'N' - 42: 0, # 'O' - 48: 1, # 'P' - 44: 0, # 'R' - 35: 1, # 'S' - 31: 0, # 'T' - 51: 0, # 'U' - 38: 1, # 'V' - 62: 0, # 'W' - 43: 1, # 'Y' - 56: 0, # 'Z' - 1: 2, # 'a' - 21: 0, # 'b' - 28: 2, # 'c' - 12: 0, # 'd' - 2: 3, # 'e' - 18: 0, # 'f' - 27: 0, # 'g' - 25: 0, # 'h' - 3: 0, # 'i' - 24: 0, # 'j' - 10: 0, # 'k' - 5: 0, # 'l' - 13: 1, # 'm' - 4: 1, # 'n' - 15: 0, # 'o' - 26: 0, # 'p' - 7: 0, # 'r' - 8: 0, # 's' - 9: 0, # 't' - 14: 2, # 'u' - 32: 0, # 'v' - 57: 0, # 'w' - 58: 0, # 'x' - 11: 0, # 'y' - 22: 1, # 'z' - 63: 0, # '·' - 54: 0, # 'Ç' - 50: 1, # 'Ö' - 55: 0, # 'Ãœ' - 59: 0, # 'â' - 33: 0, # 'ç' - 61: 0, # 'î' - 34: 1, # 'ö' - 17: 0, # 'ü' - 30: 0, # 'ÄŸ' - 41: 0, # 'Ä°' - 6: 0, # 'ı' - 40: 1, # 'Åž' - 19: 1, # 'ÅŸ' - }, - 47: { # 'C' - 23: 0, # 'A' - 37: 0, # 'B' - 47: 0, # 'C' - 39: 0, # 'D' - 29: 0, # 'E' - 52: 1, # 'F' - 36: 0, # 'G' - 45: 0, # 'H' - 53: 0, # 'I' - 60: 0, # 'J' - 16: 0, # 'K' - 49: 1, # 'L' - 20: 0, # 'M' - 46: 1, # 'N' - 42: 0, # 'O' - 48: 1, # 'P' - 44: 1, # 'R' - 35: 0, # 'S' - 31: 0, # 'T' - 51: 0, # 'U' - 38: 1, # 'V' - 62: 0, # 'W' - 43: 1, # 'Y' - 56: 0, # 'Z' - 1: 3, # 'a' - 21: 0, # 'b' - 28: 2, # 'c' - 12: 0, # 'd' - 2: 3, # 'e' - 18: 0, # 'f' - 27: 0, # 'g' - 25: 0, # 'h' - 3: 0, # 'i' - 24: 2, # 'j' - 10: 1, # 'k' - 5: 2, # 'l' - 13: 2, # 'm' - 4: 2, # 'n' - 15: 1, # 'o' - 26: 0, # 'p' - 7: 2, # 'r' - 8: 0, # 's' - 9: 0, # 't' - 14: 3, # 'u' - 32: 0, # 'v' - 57: 0, # 'w' - 58: 0, # 'x' - 11: 0, # 'y' - 22: 2, # 'z' - 63: 0, # '·' - 54: 0, # 'Ç' - 50: 1, # 'Ö' - 55: 0, # 'Ãœ' - 59: 0, # 'â' - 33: 1, # 'ç' - 61: 0, # 'î' - 34: 1, # 'ö' - 17: 0, # 'ü' - 30: 0, # 'ÄŸ' - 41: 1, # 'Ä°' - 6: 3, # 'ı' - 40: 0, # 'Åž' - 19: 0, # 'ÅŸ' - }, - 39: { # 'D' - 23: 0, # 'A' - 37: 0, # 'B' - 47: 0, # 'C' - 39: 0, # 'D' - 29: 0, # 'E' - 52: 1, # 'F' - 36: 0, # 'G' - 45: 0, # 'H' - 53: 0, # 'I' - 60: 0, # 'J' - 16: 1, # 'K' - 49: 0, # 'L' - 20: 0, # 'M' - 46: 0, # 'N' - 42: 0, # 'O' - 48: 1, # 'P' - 44: 0, # 'R' - 35: 0, # 'S' - 31: 0, # 'T' - 51: 0, # 'U' - 38: 0, # 'V' - 62: 0, # 'W' - 43: 0, # 'Y' - 56: 0, # 'Z' - 1: 2, # 'a' - 21: 0, # 'b' - 28: 2, # 'c' - 12: 0, # 'd' - 2: 2, # 'e' - 18: 0, # 'f' - 27: 0, # 'g' - 25: 0, # 'h' - 3: 0, # 'i' - 24: 0, # 'j' - 10: 0, # 'k' - 5: 1, # 'l' - 13: 3, # 'm' - 4: 0, # 'n' - 15: 1, # 'o' - 26: 0, # 'p' - 7: 0, # 'r' - 8: 0, # 's' - 9: 0, # 't' - 14: 1, # 'u' - 32: 0, # 'v' - 57: 0, # 'w' - 58: 0, # 'x' - 11: 0, # 'y' - 22: 1, # 'z' - 63: 0, # '·' - 54: 1, # 'Ç' - 50: 0, # 'Ö' - 55: 0, # 'Ãœ' - 59: 0, # 'â' - 33: 1, # 'ç' - 61: 0, # 'î' - 34: 0, # 'ö' - 17: 0, # 'ü' - 30: 1, # 'ÄŸ' - 41: 0, # 'Ä°' - 6: 1, # 'ı' - 40: 1, # 'Åž' - 19: 0, # 'ÅŸ' - }, - 29: { # 'E' - 23: 0, # 'A' - 37: 0, # 'B' - 47: 0, # 'C' - 39: 0, # 'D' - 29: 1, # 'E' - 52: 0, # 'F' - 36: 0, # 'G' - 45: 0, # 'H' - 53: 0, # 'I' - 60: 0, # 'J' - 16: 3, # 'K' - 49: 0, # 'L' - 20: 1, # 'M' - 46: 0, # 'N' - 42: 0, # 'O' - 48: 0, # 'P' - 44: 0, # 'R' - 35: 0, # 'S' - 31: 0, # 'T' - 51: 0, # 'U' - 38: 0, # 'V' - 62: 0, # 'W' - 43: 0, # 'Y' - 56: 0, # 'Z' - 1: 3, # 'a' - 21: 0, # 'b' - 28: 0, # 'c' - 12: 2, # 'd' - 2: 3, # 'e' - 18: 0, # 'f' - 27: 1, # 'g' - 25: 0, # 'h' - 3: 1, # 'i' - 24: 1, # 'j' - 10: 0, # 'k' - 5: 3, # 'l' - 13: 3, # 'm' - 4: 3, # 'n' - 15: 0, # 'o' - 26: 0, # 'p' - 7: 0, # 'r' - 8: 1, # 's' - 9: 1, # 't' - 14: 1, # 'u' - 32: 1, # 'v' - 57: 0, # 'w' - 58: 0, # 'x' - 11: 2, # 'y' - 22: 0, # 'z' - 63: 0, # '·' - 54: 0, # 'Ç' - 50: 0, # 'Ö' - 55: 0, # 'Ãœ' - 59: 0, # 'â' - 33: 0, # 'ç' - 61: 0, # 'î' - 34: 0, # 'ö' - 17: 0, # 'ü' - 30: 0, # 'ÄŸ' - 41: 0, # 'Ä°' - 6: 3, # 'ı' - 40: 0, # 'Åž' - 19: 0, # 'ÅŸ' - }, - 52: { # 'F' - 23: 0, # 'A' - 37: 1, # 'B' - 47: 1, # 'C' - 39: 1, # 'D' - 29: 1, # 'E' - 52: 2, # 'F' - 36: 0, # 'G' - 45: 2, # 'H' - 53: 1, # 'I' - 60: 0, # 'J' - 16: 0, # 'K' - 49: 0, # 'L' - 20: 1, # 'M' - 46: 1, # 'N' - 42: 1, # 'O' - 48: 2, # 'P' - 44: 1, # 'R' - 35: 1, # 'S' - 31: 1, # 'T' - 51: 1, # 'U' - 38: 1, # 'V' - 62: 0, # 'W' - 43: 2, # 'Y' - 56: 0, # 'Z' - 1: 0, # 'a' - 21: 1, # 'b' - 28: 1, # 'c' - 12: 1, # 'd' - 2: 0, # 'e' - 18: 1, # 'f' - 27: 0, # 'g' - 25: 0, # 'h' - 3: 2, # 'i' - 24: 1, # 'j' - 10: 0, # 'k' - 5: 0, # 'l' - 13: 1, # 'm' - 4: 2, # 'n' - 15: 1, # 'o' - 26: 0, # 'p' - 7: 2, # 'r' - 8: 1, # 's' - 9: 1, # 't' - 14: 1, # 'u' - 32: 0, # 'v' - 57: 0, # 'w' - 58: 0, # 'x' - 11: 1, # 'y' - 22: 1, # 'z' - 63: 0, # '·' - 54: 0, # 'Ç' - 50: 1, # 'Ö' - 55: 2, # 'Ãœ' - 59: 0, # 'â' - 33: 0, # 'ç' - 61: 0, # 'î' - 34: 2, # 'ö' - 17: 0, # 'ü' - 30: 1, # 'ÄŸ' - 41: 1, # 'Ä°' - 6: 2, # 'ı' - 40: 0, # 'Åž' - 19: 2, # 'ÅŸ' - }, - 36: { # 'G' - 23: 1, # 'A' - 37: 0, # 'B' - 47: 1, # 'C' - 39: 0, # 'D' - 29: 0, # 'E' - 52: 1, # 'F' - 36: 2, # 'G' - 45: 0, # 'H' - 53: 0, # 'I' - 60: 0, # 'J' - 16: 2, # 'K' - 49: 0, # 'L' - 20: 0, # 'M' - 46: 2, # 'N' - 42: 1, # 'O' - 48: 1, # 'P' - 44: 1, # 'R' - 35: 1, # 'S' - 31: 0, # 'T' - 51: 1, # 'U' - 38: 2, # 'V' - 62: 0, # 'W' - 43: 0, # 'Y' - 56: 0, # 'Z' - 1: 3, # 'a' - 21: 0, # 'b' - 28: 1, # 'c' - 12: 0, # 'd' - 2: 3, # 'e' - 18: 0, # 'f' - 27: 0, # 'g' - 25: 0, # 'h' - 3: 0, # 'i' - 24: 1, # 'j' - 10: 1, # 'k' - 5: 0, # 'l' - 13: 3, # 'm' - 4: 2, # 'n' - 15: 0, # 'o' - 26: 1, # 'p' - 7: 0, # 'r' - 8: 1, # 's' - 9: 1, # 't' - 14: 3, # 'u' - 32: 0, # 'v' - 57: 0, # 'w' - 58: 1, # 'x' - 11: 0, # 'y' - 22: 2, # 'z' - 63: 0, # '·' - 54: 1, # 'Ç' - 50: 2, # 'Ö' - 55: 0, # 'Ãœ' - 59: 1, # 'â' - 33: 2, # 'ç' - 61: 0, # 'î' - 34: 0, # 'ö' - 17: 0, # 'ü' - 30: 1, # 'ÄŸ' - 41: 1, # 'Ä°' - 6: 2, # 'ı' - 40: 2, # 'Åž' - 19: 1, # 'ÅŸ' - }, - 45: { # 'H' - 23: 0, # 'A' - 37: 1, # 'B' - 47: 0, # 'C' - 39: 0, # 'D' - 29: 0, # 'E' - 52: 2, # 'F' - 36: 2, # 'G' - 45: 1, # 'H' - 53: 1, # 'I' - 60: 0, # 'J' - 16: 2, # 'K' - 49: 1, # 'L' - 20: 0, # 'M' - 46: 1, # 'N' - 42: 1, # 'O' - 48: 1, # 'P' - 44: 0, # 'R' - 35: 2, # 'S' - 31: 0, # 'T' - 51: 1, # 'U' - 38: 2, # 'V' - 62: 0, # 'W' - 43: 0, # 'Y' - 56: 0, # 'Z' - 1: 3, # 'a' - 21: 0, # 'b' - 28: 2, # 'c' - 12: 0, # 'd' - 2: 3, # 'e' - 18: 0, # 'f' - 27: 0, # 'g' - 25: 0, # 'h' - 3: 2, # 'i' - 24: 0, # 'j' - 10: 1, # 'k' - 5: 0, # 'l' - 13: 2, # 'm' - 4: 0, # 'n' - 15: 1, # 'o' - 26: 1, # 'p' - 7: 1, # 'r' - 8: 0, # 's' - 9: 0, # 't' - 14: 3, # 'u' - 32: 0, # 'v' - 57: 0, # 'w' - 58: 0, # 'x' - 11: 0, # 'y' - 22: 2, # 'z' - 63: 0, # '·' - 54: 1, # 'Ç' - 50: 1, # 'Ö' - 55: 0, # 'Ãœ' - 59: 0, # 'â' - 33: 1, # 'ç' - 61: 0, # 'î' - 34: 1, # 'ö' - 17: 0, # 'ü' - 30: 2, # 'ÄŸ' - 41: 1, # 'Ä°' - 6: 0, # 'ı' - 40: 2, # 'Åž' - 19: 1, # 'ÅŸ' - }, - 53: { # 'I' - 23: 0, # 'A' - 37: 0, # 'B' - 47: 0, # 'C' - 39: 0, # 'D' - 29: 0, # 'E' - 52: 1, # 'F' - 36: 0, # 'G' - 45: 0, # 'H' - 53: 0, # 'I' - 60: 0, # 'J' - 16: 2, # 'K' - 49: 0, # 'L' - 20: 0, # 'M' - 46: 0, # 'N' - 42: 0, # 'O' - 48: 1, # 'P' - 44: 0, # 'R' - 35: 0, # 'S' - 31: 0, # 'T' - 51: 0, # 'U' - 38: 0, # 'V' - 62: 0, # 'W' - 43: 0, # 'Y' - 56: 0, # 'Z' - 1: 2, # 'a' - 21: 0, # 'b' - 28: 2, # 'c' - 12: 0, # 'd' - 2: 2, # 'e' - 18: 0, # 'f' - 27: 0, # 'g' - 25: 0, # 'h' - 3: 0, # 'i' - 24: 0, # 'j' - 10: 0, # 'k' - 5: 2, # 'l' - 13: 2, # 'm' - 4: 0, # 'n' - 15: 0, # 'o' - 26: 0, # 'p' - 7: 0, # 'r' - 8: 0, # 's' - 9: 0, # 't' - 14: 2, # 'u' - 32: 0, # 'v' - 57: 0, # 'w' - 58: 0, # 'x' - 11: 0, # 'y' - 22: 2, # 'z' - 63: 0, # '·' - 54: 1, # 'Ç' - 50: 0, # 'Ö' - 55: 0, # 'Ãœ' - 59: 0, # 'â' - 33: 2, # 'ç' - 61: 0, # 'î' - 34: 1, # 'ö' - 17: 0, # 'ü' - 30: 0, # 'ÄŸ' - 41: 0, # 'Ä°' - 6: 0, # 'ı' - 40: 1, # 'Åž' - 19: 1, # 'ÅŸ' - }, - 60: { # 'J' - 23: 0, # 'A' - 37: 0, # 'B' - 47: 0, # 'C' - 39: 0, # 'D' - 29: 0, # 'E' - 52: 0, # 'F' - 36: 0, # 'G' - 45: 0, # 'H' - 53: 0, # 'I' - 60: 0, # 'J' - 16: 0, # 'K' - 49: 0, # 'L' - 20: 1, # 'M' - 46: 0, # 'N' - 42: 0, # 'O' - 48: 0, # 'P' - 44: 0, # 'R' - 35: 0, # 'S' - 31: 0, # 'T' - 51: 0, # 'U' - 38: 0, # 'V' - 62: 0, # 'W' - 43: 0, # 'Y' - 56: 0, # 'Z' - 1: 0, # 'a' - 21: 1, # 'b' - 28: 0, # 'c' - 12: 1, # 'd' - 2: 0, # 'e' - 18: 0, # 'f' - 27: 0, # 'g' - 25: 0, # 'h' - 3: 1, # 'i' - 24: 0, # 'j' - 10: 0, # 'k' - 5: 0, # 'l' - 13: 0, # 'm' - 4: 1, # 'n' - 15: 0, # 'o' - 26: 0, # 'p' - 7: 0, # 'r' - 8: 1, # 's' - 9: 0, # 't' - 14: 0, # 'u' - 32: 0, # 'v' - 57: 0, # 'w' - 58: 0, # 'x' - 11: 0, # 'y' - 22: 0, # 'z' - 63: 0, # '·' - 54: 0, # 'Ç' - 50: 0, # 'Ö' - 55: 0, # 'Ãœ' - 59: 0, # 'â' - 33: 0, # 'ç' - 61: 0, # 'î' - 34: 0, # 'ö' - 17: 0, # 'ü' - 30: 0, # 'ÄŸ' - 41: 0, # 'Ä°' - 6: 0, # 'ı' - 40: 0, # 'Åž' - 19: 0, # 'ÅŸ' - }, - 16: { # 'K' - 23: 0, # 'A' - 37: 0, # 'B' - 47: 0, # 'C' - 39: 0, # 'D' - 29: 3, # 'E' - 52: 0, # 'F' - 36: 0, # 'G' - 45: 0, # 'H' - 53: 0, # 'I' - 60: 0, # 'J' - 16: 0, # 'K' - 49: 0, # 'L' - 20: 2, # 'M' - 46: 0, # 'N' - 42: 0, # 'O' - 48: 0, # 'P' - 44: 0, # 'R' - 35: 0, # 'S' - 31: 2, # 'T' - 51: 0, # 'U' - 38: 0, # 'V' - 62: 0, # 'W' - 43: 0, # 'Y' - 56: 0, # 'Z' - 1: 2, # 'a' - 21: 3, # 'b' - 28: 0, # 'c' - 12: 3, # 'd' - 2: 1, # 'e' - 18: 3, # 'f' - 27: 3, # 'g' - 25: 3, # 'h' - 3: 3, # 'i' - 24: 2, # 'j' - 10: 3, # 'k' - 5: 0, # 'l' - 13: 0, # 'm' - 4: 3, # 'n' - 15: 0, # 'o' - 26: 1, # 'p' - 7: 3, # 'r' - 8: 3, # 's' - 9: 3, # 't' - 14: 0, # 'u' - 32: 3, # 'v' - 57: 0, # 'w' - 58: 0, # 'x' - 11: 2, # 'y' - 22: 1, # 'z' - 63: 0, # '·' - 54: 0, # 'Ç' - 50: 0, # 'Ö' - 55: 0, # 'Ãœ' - 59: 0, # 'â' - 33: 0, # 'ç' - 61: 0, # 'î' - 34: 0, # 'ö' - 17: 2, # 'ü' - 30: 0, # 'ÄŸ' - 41: 1, # 'Ä°' - 6: 3, # 'ı' - 40: 0, # 'Åž' - 19: 0, # 'ÅŸ' - }, - 49: { # 'L' - 23: 0, # 'A' - 37: 0, # 'B' - 47: 0, # 'C' - 39: 0, # 'D' - 29: 2, # 'E' - 52: 0, # 'F' - 36: 1, # 'G' - 45: 1, # 'H' - 53: 0, # 'I' - 60: 0, # 'J' - 16: 0, # 'K' - 49: 0, # 'L' - 20: 1, # 'M' - 46: 0, # 'N' - 42: 2, # 'O' - 48: 0, # 'P' - 44: 0, # 'R' - 35: 0, # 'S' - 31: 0, # 'T' - 51: 0, # 'U' - 38: 0, # 'V' - 62: 0, # 'W' - 43: 1, # 'Y' - 56: 0, # 'Z' - 1: 0, # 'a' - 21: 3, # 'b' - 28: 0, # 'c' - 12: 2, # 'd' - 2: 0, # 'e' - 18: 0, # 'f' - 27: 0, # 'g' - 25: 0, # 'h' - 3: 2, # 'i' - 24: 0, # 'j' - 10: 1, # 'k' - 5: 0, # 'l' - 13: 0, # 'm' - 4: 2, # 'n' - 15: 1, # 'o' - 26: 1, # 'p' - 7: 1, # 'r' - 8: 1, # 's' - 9: 1, # 't' - 14: 0, # 'u' - 32: 0, # 'v' - 57: 0, # 'w' - 58: 0, # 'x' - 11: 2, # 'y' - 22: 0, # 'z' - 63: 0, # '·' - 54: 0, # 'Ç' - 50: 0, # 'Ö' - 55: 2, # 'Ãœ' - 59: 0, # 'â' - 33: 0, # 'ç' - 61: 0, # 'î' - 34: 1, # 'ö' - 17: 1, # 'ü' - 30: 1, # 'ÄŸ' - 41: 0, # 'Ä°' - 6: 2, # 'ı' - 40: 0, # 'Åž' - 19: 0, # 'ÅŸ' - }, - 20: { # 'M' - 23: 1, # 'A' - 37: 0, # 'B' - 47: 0, # 'C' - 39: 0, # 'D' - 29: 0, # 'E' - 52: 0, # 'F' - 36: 0, # 'G' - 45: 0, # 'H' - 53: 0, # 'I' - 60: 1, # 'J' - 16: 3, # 'K' - 49: 0, # 'L' - 20: 2, # 'M' - 46: 0, # 'N' - 42: 0, # 'O' - 48: 0, # 'P' - 44: 0, # 'R' - 35: 0, # 'S' - 31: 1, # 'T' - 51: 0, # 'U' - 38: 0, # 'V' - 62: 0, # 'W' - 43: 0, # 'Y' - 56: 0, # 'Z' - 1: 3, # 'a' - 21: 2, # 'b' - 28: 0, # 'c' - 12: 3, # 'd' - 2: 3, # 'e' - 18: 0, # 'f' - 27: 1, # 'g' - 25: 1, # 'h' - 3: 2, # 'i' - 24: 2, # 'j' - 10: 2, # 'k' - 5: 2, # 'l' - 13: 3, # 'm' - 4: 3, # 'n' - 15: 0, # 'o' - 26: 1, # 'p' - 7: 3, # 'r' - 8: 0, # 's' - 9: 2, # 't' - 14: 3, # 'u' - 32: 0, # 'v' - 57: 0, # 'w' - 58: 0, # 'x' - 11: 2, # 'y' - 22: 0, # 'z' - 63: 0, # '·' - 54: 0, # 'Ç' - 50: 0, # 'Ö' - 55: 0, # 'Ãœ' - 59: 0, # 'â' - 33: 3, # 'ç' - 61: 0, # 'î' - 34: 0, # 'ö' - 17: 0, # 'ü' - 30: 0, # 'ÄŸ' - 41: 0, # 'Ä°' - 6: 3, # 'ı' - 40: 0, # 'Åž' - 19: 0, # 'ÅŸ' - }, - 46: { # 'N' - 23: 0, # 'A' - 37: 1, # 'B' - 47: 0, # 'C' - 39: 0, # 'D' - 29: 0, # 'E' - 52: 1, # 'F' - 36: 1, # 'G' - 45: 1, # 'H' - 53: 0, # 'I' - 60: 0, # 'J' - 16: 2, # 'K' - 49: 0, # 'L' - 20: 0, # 'M' - 46: 1, # 'N' - 42: 0, # 'O' - 48: 0, # 'P' - 44: 1, # 'R' - 35: 1, # 'S' - 31: 0, # 'T' - 51: 1, # 'U' - 38: 2, # 'V' - 62: 0, # 'W' - 43: 1, # 'Y' - 56: 0, # 'Z' - 1: 3, # 'a' - 21: 0, # 'b' - 28: 2, # 'c' - 12: 0, # 'd' - 2: 3, # 'e' - 18: 0, # 'f' - 27: 1, # 'g' - 25: 0, # 'h' - 3: 0, # 'i' - 24: 2, # 'j' - 10: 1, # 'k' - 5: 1, # 'l' - 13: 3, # 'm' - 4: 2, # 'n' - 15: 1, # 'o' - 26: 1, # 'p' - 7: 1, # 'r' - 8: 0, # 's' - 9: 0, # 't' - 14: 3, # 'u' - 32: 0, # 'v' - 57: 0, # 'w' - 58: 1, # 'x' - 11: 1, # 'y' - 22: 2, # 'z' - 63: 0, # '·' - 54: 1, # 'Ç' - 50: 1, # 'Ö' - 55: 0, # 'Ãœ' - 59: 0, # 'â' - 33: 0, # 'ç' - 61: 0, # 'î' - 34: 1, # 'ö' - 17: 0, # 'ü' - 30: 0, # 'ÄŸ' - 41: 1, # 'Ä°' - 6: 2, # 'ı' - 40: 1, # 'Åž' - 19: 1, # 'ÅŸ' - }, - 42: { # 'O' - 23: 0, # 'A' - 37: 0, # 'B' - 47: 0, # 'C' - 39: 0, # 'D' - 29: 0, # 'E' - 52: 1, # 'F' - 36: 0, # 'G' - 45: 1, # 'H' - 53: 0, # 'I' - 60: 0, # 'J' - 16: 2, # 'K' - 49: 1, # 'L' - 20: 0, # 'M' - 46: 0, # 'N' - 42: 0, # 'O' - 48: 2, # 'P' - 44: 1, # 'R' - 35: 1, # 'S' - 31: 0, # 'T' - 51: 1, # 'U' - 38: 1, # 'V' - 62: 0, # 'W' - 43: 0, # 'Y' - 56: 0, # 'Z' - 1: 3, # 'a' - 21: 0, # 'b' - 28: 2, # 'c' - 12: 0, # 'd' - 2: 2, # 'e' - 18: 0, # 'f' - 27: 0, # 'g' - 25: 0, # 'h' - 3: 0, # 'i' - 24: 0, # 'j' - 10: 0, # 'k' - 5: 3, # 'l' - 13: 3, # 'm' - 4: 0, # 'n' - 15: 1, # 'o' - 26: 0, # 'p' - 7: 0, # 'r' - 8: 0, # 's' - 9: 0, # 't' - 14: 2, # 'u' - 32: 0, # 'v' - 57: 0, # 'w' - 58: 0, # 'x' - 11: 0, # 'y' - 22: 2, # 'z' - 63: 0, # '·' - 54: 2, # 'Ç' - 50: 1, # 'Ö' - 55: 0, # 'Ãœ' - 59: 0, # 'â' - 33: 2, # 'ç' - 61: 0, # 'î' - 34: 1, # 'ö' - 17: 0, # 'ü' - 30: 1, # 'ÄŸ' - 41: 2, # 'Ä°' - 6: 1, # 'ı' - 40: 1, # 'Åž' - 19: 1, # 'ÅŸ' - }, - 48: { # 'P' - 23: 0, # 'A' - 37: 0, # 'B' - 47: 2, # 'C' - 39: 0, # 'D' - 29: 0, # 'E' - 52: 2, # 'F' - 36: 1, # 'G' - 45: 1, # 'H' - 53: 0, # 'I' - 60: 0, # 'J' - 16: 2, # 'K' - 49: 0, # 'L' - 20: 0, # 'M' - 46: 1, # 'N' - 42: 1, # 'O' - 48: 1, # 'P' - 44: 0, # 'R' - 35: 1, # 'S' - 31: 0, # 'T' - 51: 0, # 'U' - 38: 1, # 'V' - 62: 0, # 'W' - 43: 0, # 'Y' - 56: 0, # 'Z' - 1: 2, # 'a' - 21: 0, # 'b' - 28: 2, # 'c' - 12: 0, # 'd' - 2: 3, # 'e' - 18: 0, # 'f' - 27: 0, # 'g' - 25: 0, # 'h' - 3: 0, # 'i' - 24: 0, # 'j' - 10: 1, # 'k' - 5: 0, # 'l' - 13: 2, # 'm' - 4: 0, # 'n' - 15: 2, # 'o' - 26: 0, # 'p' - 7: 0, # 'r' - 8: 0, # 's' - 9: 0, # 't' - 14: 2, # 'u' - 32: 0, # 'v' - 57: 0, # 'w' - 58: 2, # 'x' - 11: 0, # 'y' - 22: 2, # 'z' - 63: 0, # '·' - 54: 1, # 'Ç' - 50: 2, # 'Ö' - 55: 0, # 'Ãœ' - 59: 0, # 'â' - 33: 0, # 'ç' - 61: 0, # 'î' - 34: 2, # 'ö' - 17: 0, # 'ü' - 30: 1, # 'ÄŸ' - 41: 1, # 'Ä°' - 6: 0, # 'ı' - 40: 2, # 'Åž' - 19: 1, # 'ÅŸ' - }, - 44: { # 'R' - 23: 0, # 'A' - 37: 0, # 'B' - 47: 1, # 'C' - 39: 0, # 'D' - 29: 0, # 'E' - 52: 1, # 'F' - 36: 0, # 'G' - 45: 0, # 'H' - 53: 0, # 'I' - 60: 0, # 'J' - 16: 3, # 'K' - 49: 0, # 'L' - 20: 0, # 'M' - 46: 0, # 'N' - 42: 0, # 'O' - 48: 1, # 'P' - 44: 0, # 'R' - 35: 0, # 'S' - 31: 0, # 'T' - 51: 0, # 'U' - 38: 0, # 'V' - 62: 0, # 'W' - 43: 1, # 'Y' - 56: 0, # 'Z' - 1: 3, # 'a' - 21: 1, # 'b' - 28: 1, # 'c' - 12: 0, # 'd' - 2: 2, # 'e' - 18: 0, # 'f' - 27: 0, # 'g' - 25: 0, # 'h' - 3: 0, # 'i' - 24: 0, # 'j' - 10: 1, # 'k' - 5: 2, # 'l' - 13: 2, # 'm' - 4: 0, # 'n' - 15: 1, # 'o' - 26: 0, # 'p' - 7: 0, # 'r' - 8: 0, # 's' - 9: 0, # 't' - 14: 2, # 'u' - 32: 0, # 'v' - 57: 0, # 'w' - 58: 0, # 'x' - 11: 1, # 'y' - 22: 2, # 'z' - 63: 0, # '·' - 54: 0, # 'Ç' - 50: 1, # 'Ö' - 55: 0, # 'Ãœ' - 59: 0, # 'â' - 33: 1, # 'ç' - 61: 0, # 'î' - 34: 1, # 'ö' - 17: 1, # 'ü' - 30: 1, # 'ÄŸ' - 41: 0, # 'Ä°' - 6: 2, # 'ı' - 40: 1, # 'Åž' - 19: 1, # 'ÅŸ' - }, - 35: { # 'S' - 23: 0, # 'A' - 37: 0, # 'B' - 47: 1, # 'C' - 39: 0, # 'D' - 29: 0, # 'E' - 52: 1, # 'F' - 36: 1, # 'G' - 45: 1, # 'H' - 53: 0, # 'I' - 60: 0, # 'J' - 16: 3, # 'K' - 49: 1, # 'L' - 20: 1, # 'M' - 46: 0, # 'N' - 42: 0, # 'O' - 48: 1, # 'P' - 44: 0, # 'R' - 35: 0, # 'S' - 31: 0, # 'T' - 51: 1, # 'U' - 38: 1, # 'V' - 62: 0, # 'W' - 43: 1, # 'Y' - 56: 0, # 'Z' - 1: 3, # 'a' - 21: 0, # 'b' - 28: 2, # 'c' - 12: 0, # 'd' - 2: 3, # 'e' - 18: 0, # 'f' - 27: 0, # 'g' - 25: 0, # 'h' - 3: 0, # 'i' - 24: 0, # 'j' - 10: 1, # 'k' - 5: 1, # 'l' - 13: 2, # 'm' - 4: 1, # 'n' - 15: 0, # 'o' - 26: 0, # 'p' - 7: 0, # 'r' - 8: 0, # 's' - 9: 1, # 't' - 14: 2, # 'u' - 32: 0, # 'v' - 57: 0, # 'w' - 58: 0, # 'x' - 11: 0, # 'y' - 22: 1, # 'z' - 63: 0, # '·' - 54: 2, # 'Ç' - 50: 2, # 'Ö' - 55: 0, # 'Ãœ' - 59: 0, # 'â' - 33: 3, # 'ç' - 61: 0, # 'î' - 34: 1, # 'ö' - 17: 0, # 'ü' - 30: 0, # 'ÄŸ' - 41: 0, # 'Ä°' - 6: 3, # 'ı' - 40: 2, # 'Åž' - 19: 1, # 'ÅŸ' - }, - 31: { # 'T' - 23: 0, # 'A' - 37: 0, # 'B' - 47: 0, # 'C' - 39: 0, # 'D' - 29: 0, # 'E' - 52: 0, # 'F' - 36: 0, # 'G' - 45: 0, # 'H' - 53: 0, # 'I' - 60: 1, # 'J' - 16: 2, # 'K' - 49: 0, # 'L' - 20: 1, # 'M' - 46: 0, # 'N' - 42: 0, # 'O' - 48: 0, # 'P' - 44: 0, # 'R' - 35: 0, # 'S' - 31: 2, # 'T' - 51: 0, # 'U' - 38: 0, # 'V' - 62: 0, # 'W' - 43: 0, # 'Y' - 56: 0, # 'Z' - 1: 3, # 'a' - 21: 2, # 'b' - 28: 0, # 'c' - 12: 1, # 'd' - 2: 3, # 'e' - 18: 2, # 'f' - 27: 2, # 'g' - 25: 0, # 'h' - 3: 1, # 'i' - 24: 1, # 'j' - 10: 2, # 'k' - 5: 2, # 'l' - 13: 3, # 'm' - 4: 3, # 'n' - 15: 0, # 'o' - 26: 2, # 'p' - 7: 2, # 'r' - 8: 0, # 's' - 9: 2, # 't' - 14: 2, # 'u' - 32: 1, # 'v' - 57: 1, # 'w' - 58: 1, # 'x' - 11: 2, # 'y' - 22: 0, # 'z' - 63: 0, # '·' - 54: 0, # 'Ç' - 50: 0, # 'Ö' - 55: 0, # 'Ãœ' - 59: 0, # 'â' - 33: 0, # 'ç' - 61: 0, # 'î' - 34: 0, # 'ö' - 17: 1, # 'ü' - 30: 0, # 'ÄŸ' - 41: 0, # 'Ä°' - 6: 3, # 'ı' - 40: 0, # 'Åž' - 19: 0, # 'ÅŸ' - }, - 51: { # 'U' - 23: 0, # 'A' - 37: 0, # 'B' - 47: 0, # 'C' - 39: 0, # 'D' - 29: 0, # 'E' - 52: 1, # 'F' - 36: 1, # 'G' - 45: 0, # 'H' - 53: 0, # 'I' - 60: 0, # 'J' - 16: 1, # 'K' - 49: 0, # 'L' - 20: 0, # 'M' - 46: 1, # 'N' - 42: 0, # 'O' - 48: 1, # 'P' - 44: 0, # 'R' - 35: 0, # 'S' - 31: 0, # 'T' - 51: 1, # 'U' - 38: 1, # 'V' - 62: 0, # 'W' - 43: 0, # 'Y' - 56: 0, # 'Z' - 1: 3, # 'a' - 21: 0, # 'b' - 28: 1, # 'c' - 12: 0, # 'd' - 2: 3, # 'e' - 18: 0, # 'f' - 27: 2, # 'g' - 25: 0, # 'h' - 3: 0, # 'i' - 24: 0, # 'j' - 10: 1, # 'k' - 5: 1, # 'l' - 13: 3, # 'm' - 4: 2, # 'n' - 15: 0, # 'o' - 26: 1, # 'p' - 7: 0, # 'r' - 8: 0, # 's' - 9: 0, # 't' - 14: 2, # 'u' - 32: 0, # 'v' - 57: 0, # 'w' - 58: 0, # 'x' - 11: 0, # 'y' - 22: 2, # 'z' - 63: 0, # '·' - 54: 1, # 'Ç' - 50: 1, # 'Ö' - 55: 0, # 'Ãœ' - 59: 0, # 'â' - 33: 0, # 'ç' - 61: 0, # 'î' - 34: 0, # 'ö' - 17: 0, # 'ü' - 30: 1, # 'ÄŸ' - 41: 1, # 'Ä°' - 6: 2, # 'ı' - 40: 0, # 'Åž' - 19: 1, # 'ÅŸ' - }, - 38: { # 'V' - 23: 1, # 'A' - 37: 1, # 'B' - 47: 1, # 'C' - 39: 0, # 'D' - 29: 0, # 'E' - 52: 2, # 'F' - 36: 0, # 'G' - 45: 0, # 'H' - 53: 0, # 'I' - 60: 0, # 'J' - 16: 3, # 'K' - 49: 0, # 'L' - 20: 3, # 'M' - 46: 0, # 'N' - 42: 0, # 'O' - 48: 1, # 'P' - 44: 1, # 'R' - 35: 0, # 'S' - 31: 0, # 'T' - 51: 1, # 'U' - 38: 1, # 'V' - 62: 0, # 'W' - 43: 0, # 'Y' - 56: 0, # 'Z' - 1: 3, # 'a' - 21: 0, # 'b' - 28: 2, # 'c' - 12: 0, # 'd' - 2: 3, # 'e' - 18: 0, # 'f' - 27: 0, # 'g' - 25: 0, # 'h' - 3: 0, # 'i' - 24: 0, # 'j' - 10: 0, # 'k' - 5: 2, # 'l' - 13: 2, # 'm' - 4: 0, # 'n' - 15: 2, # 'o' - 26: 0, # 'p' - 7: 0, # 'r' - 8: 0, # 's' - 9: 1, # 't' - 14: 3, # 'u' - 32: 0, # 'v' - 57: 0, # 'w' - 58: 0, # 'x' - 11: 1, # 'y' - 22: 2, # 'z' - 63: 0, # '·' - 54: 1, # 'Ç' - 50: 1, # 'Ö' - 55: 0, # 'Ãœ' - 59: 1, # 'â' - 33: 2, # 'ç' - 61: 0, # 'î' - 34: 1, # 'ö' - 17: 0, # 'ü' - 30: 1, # 'ÄŸ' - 41: 1, # 'Ä°' - 6: 3, # 'ı' - 40: 2, # 'Åž' - 19: 1, # 'ÅŸ' - }, - 62: { # 'W' - 23: 0, # 'A' - 37: 0, # 'B' - 47: 0, # 'C' - 39: 0, # 'D' - 29: 0, # 'E' - 52: 0, # 'F' - 36: 0, # 'G' - 45: 0, # 'H' - 53: 0, # 'I' - 60: 0, # 'J' - 16: 0, # 'K' - 49: 0, # 'L' - 20: 0, # 'M' - 46: 0, # 'N' - 42: 0, # 'O' - 48: 0, # 'P' - 44: 0, # 'R' - 35: 0, # 'S' - 31: 0, # 'T' - 51: 0, # 'U' - 38: 0, # 'V' - 62: 0, # 'W' - 43: 0, # 'Y' - 56: 0, # 'Z' - 1: 0, # 'a' - 21: 0, # 'b' - 28: 0, # 'c' - 12: 0, # 'd' - 2: 0, # 'e' - 18: 0, # 'f' - 27: 0, # 'g' - 25: 0, # 'h' - 3: 0, # 'i' - 24: 0, # 'j' - 10: 0, # 'k' - 5: 0, # 'l' - 13: 0, # 'm' - 4: 0, # 'n' - 15: 0, # 'o' - 26: 0, # 'p' - 7: 0, # 'r' - 8: 0, # 's' - 9: 0, # 't' - 14: 0, # 'u' - 32: 0, # 'v' - 57: 0, # 'w' - 58: 0, # 'x' - 11: 0, # 'y' - 22: 0, # 'z' - 63: 0, # '·' - 54: 0, # 'Ç' - 50: 0, # 'Ö' - 55: 0, # 'Ãœ' - 59: 0, # 'â' - 33: 0, # 'ç' - 61: 0, # 'î' - 34: 0, # 'ö' - 17: 0, # 'ü' - 30: 0, # 'ÄŸ' - 41: 0, # 'Ä°' - 6: 0, # 'ı' - 40: 0, # 'Åž' - 19: 0, # 'ÅŸ' - }, - 43: { # 'Y' - 23: 0, # 'A' - 37: 0, # 'B' - 47: 1, # 'C' - 39: 0, # 'D' - 29: 0, # 'E' - 52: 2, # 'F' - 36: 0, # 'G' - 45: 1, # 'H' - 53: 1, # 'I' - 60: 0, # 'J' - 16: 2, # 'K' - 49: 0, # 'L' - 20: 0, # 'M' - 46: 2, # 'N' - 42: 0, # 'O' - 48: 2, # 'P' - 44: 1, # 'R' - 35: 1, # 'S' - 31: 0, # 'T' - 51: 1, # 'U' - 38: 2, # 'V' - 62: 0, # 'W' - 43: 0, # 'Y' - 56: 0, # 'Z' - 1: 3, # 'a' - 21: 0, # 'b' - 28: 2, # 'c' - 12: 0, # 'd' - 2: 2, # 'e' - 18: 0, # 'f' - 27: 0, # 'g' - 25: 0, # 'h' - 3: 0, # 'i' - 24: 1, # 'j' - 10: 1, # 'k' - 5: 1, # 'l' - 13: 3, # 'm' - 4: 0, # 'n' - 15: 2, # 'o' - 26: 0, # 'p' - 7: 0, # 'r' - 8: 0, # 's' - 9: 0, # 't' - 14: 3, # 'u' - 32: 0, # 'v' - 57: 0, # 'w' - 58: 1, # 'x' - 11: 0, # 'y' - 22: 2, # 'z' - 63: 0, # '·' - 54: 1, # 'Ç' - 50: 2, # 'Ö' - 55: 1, # 'Ãœ' - 59: 1, # 'â' - 33: 0, # 'ç' - 61: 0, # 'î' - 34: 1, # 'ö' - 17: 0, # 'ü' - 30: 1, # 'ÄŸ' - 41: 1, # 'Ä°' - 6: 0, # 'ı' - 40: 2, # 'Åž' - 19: 1, # 'ÅŸ' - }, - 56: { # 'Z' - 23: 0, # 'A' - 37: 0, # 'B' - 47: 0, # 'C' - 39: 0, # 'D' - 29: 0, # 'E' - 52: 0, # 'F' - 36: 0, # 'G' - 45: 0, # 'H' - 53: 0, # 'I' - 60: 0, # 'J' - 16: 0, # 'K' - 49: 0, # 'L' - 20: 0, # 'M' - 46: 0, # 'N' - 42: 0, # 'O' - 48: 0, # 'P' - 44: 0, # 'R' - 35: 0, # 'S' - 31: 0, # 'T' - 51: 0, # 'U' - 38: 0, # 'V' - 62: 0, # 'W' - 43: 0, # 'Y' - 56: 2, # 'Z' - 1: 2, # 'a' - 21: 1, # 'b' - 28: 0, # 'c' - 12: 0, # 'd' - 2: 2, # 'e' - 18: 0, # 'f' - 27: 0, # 'g' - 25: 0, # 'h' - 3: 2, # 'i' - 24: 1, # 'j' - 10: 0, # 'k' - 5: 0, # 'l' - 13: 1, # 'm' - 4: 1, # 'n' - 15: 0, # 'o' - 26: 0, # 'p' - 7: 1, # 'r' - 8: 1, # 's' - 9: 0, # 't' - 14: 2, # 'u' - 32: 0, # 'v' - 57: 0, # 'w' - 58: 0, # 'x' - 11: 0, # 'y' - 22: 0, # 'z' - 63: 0, # '·' - 54: 0, # 'Ç' - 50: 0, # 'Ö' - 55: 0, # 'Ãœ' - 59: 0, # 'â' - 33: 0, # 'ç' - 61: 0, # 'î' - 34: 0, # 'ö' - 17: 1, # 'ü' - 30: 0, # 'ÄŸ' - 41: 0, # 'Ä°' - 6: 1, # 'ı' - 40: 0, # 'Åž' - 19: 0, # 'ÅŸ' - }, - 1: { # 'a' - 23: 3, # 'A' - 37: 0, # 'B' - 47: 1, # 'C' - 39: 0, # 'D' - 29: 3, # 'E' - 52: 0, # 'F' - 36: 1, # 'G' - 45: 1, # 'H' - 53: 0, # 'I' - 60: 0, # 'J' - 16: 0, # 'K' - 49: 0, # 'L' - 20: 3, # 'M' - 46: 1, # 'N' - 42: 0, # 'O' - 48: 1, # 'P' - 44: 0, # 'R' - 35: 0, # 'S' - 31: 3, # 'T' - 51: 0, # 'U' - 38: 1, # 'V' - 62: 0, # 'W' - 43: 0, # 'Y' - 56: 2, # 'Z' - 1: 2, # 'a' - 21: 3, # 'b' - 28: 0, # 'c' - 12: 3, # 'd' - 2: 2, # 'e' - 18: 3, # 'f' - 27: 3, # 'g' - 25: 3, # 'h' - 3: 3, # 'i' - 24: 3, # 'j' - 10: 3, # 'k' - 5: 0, # 'l' - 13: 2, # 'm' - 4: 3, # 'n' - 15: 1, # 'o' - 26: 3, # 'p' - 7: 3, # 'r' - 8: 3, # 's' - 9: 3, # 't' - 14: 3, # 'u' - 32: 3, # 'v' - 57: 2, # 'w' - 58: 0, # 'x' - 11: 3, # 'y' - 22: 0, # 'z' - 63: 1, # '·' - 54: 0, # 'Ç' - 50: 0, # 'Ö' - 55: 0, # 'Ãœ' - 59: 0, # 'â' - 33: 1, # 'ç' - 61: 1, # 'î' - 34: 1, # 'ö' - 17: 3, # 'ü' - 30: 0, # 'ÄŸ' - 41: 0, # 'Ä°' - 6: 3, # 'ı' - 40: 0, # 'Åž' - 19: 1, # 'ÅŸ' - }, - 21: { # 'b' - 23: 0, # 'A' - 37: 0, # 'B' - 47: 0, # 'C' - 39: 0, # 'D' - 29: 0, # 'E' - 52: 0, # 'F' - 36: 1, # 'G' - 45: 0, # 'H' - 53: 0, # 'I' - 60: 1, # 'J' - 16: 2, # 'K' - 49: 0, # 'L' - 20: 2, # 'M' - 46: 0, # 'N' - 42: 0, # 'O' - 48: 0, # 'P' - 44: 0, # 'R' - 35: 0, # 'S' - 31: 1, # 'T' - 51: 0, # 'U' - 38: 0, # 'V' - 62: 0, # 'W' - 43: 1, # 'Y' - 56: 0, # 'Z' - 1: 3, # 'a' - 21: 2, # 'b' - 28: 0, # 'c' - 12: 3, # 'd' - 2: 3, # 'e' - 18: 0, # 'f' - 27: 3, # 'g' - 25: 1, # 'h' - 3: 3, # 'i' - 24: 2, # 'j' - 10: 3, # 'k' - 5: 3, # 'l' - 13: 3, # 'm' - 4: 3, # 'n' - 15: 0, # 'o' - 26: 3, # 'p' - 7: 1, # 'r' - 8: 2, # 's' - 9: 2, # 't' - 14: 2, # 'u' - 32: 1, # 'v' - 57: 0, # 'w' - 58: 1, # 'x' - 11: 3, # 'y' - 22: 0, # 'z' - 63: 0, # '·' - 54: 0, # 'Ç' - 50: 0, # 'Ö' - 55: 0, # 'Ãœ' - 59: 0, # 'â' - 33: 1, # 'ç' - 61: 0, # 'î' - 34: 0, # 'ö' - 17: 0, # 'ü' - 30: 1, # 'ÄŸ' - 41: 0, # 'Ä°' - 6: 2, # 'ı' - 40: 0, # 'Åž' - 19: 0, # 'ÅŸ' - }, - 28: { # 'c' - 23: 0, # 'A' - 37: 1, # 'B' - 47: 1, # 'C' - 39: 1, # 'D' - 29: 2, # 'E' - 52: 0, # 'F' - 36: 2, # 'G' - 45: 2, # 'H' - 53: 1, # 'I' - 60: 0, # 'J' - 16: 0, # 'K' - 49: 0, # 'L' - 20: 2, # 'M' - 46: 1, # 'N' - 42: 1, # 'O' - 48: 2, # 'P' - 44: 1, # 'R' - 35: 1, # 'S' - 31: 2, # 'T' - 51: 2, # 'U' - 38: 2, # 'V' - 62: 0, # 'W' - 43: 3, # 'Y' - 56: 0, # 'Z' - 1: 1, # 'a' - 21: 1, # 'b' - 28: 2, # 'c' - 12: 2, # 'd' - 2: 1, # 'e' - 18: 1, # 'f' - 27: 2, # 'g' - 25: 2, # 'h' - 3: 3, # 'i' - 24: 1, # 'j' - 10: 3, # 'k' - 5: 0, # 'l' - 13: 2, # 'm' - 4: 3, # 'n' - 15: 2, # 'o' - 26: 2, # 'p' - 7: 3, # 'r' - 8: 3, # 's' - 9: 3, # 't' - 14: 1, # 'u' - 32: 0, # 'v' - 57: 1, # 'w' - 58: 0, # 'x' - 11: 2, # 'y' - 22: 1, # 'z' - 63: 1, # '·' - 54: 0, # 'Ç' - 50: 0, # 'Ö' - 55: 1, # 'Ãœ' - 59: 0, # 'â' - 33: 0, # 'ç' - 61: 1, # 'î' - 34: 2, # 'ö' - 17: 2, # 'ü' - 30: 2, # 'ÄŸ' - 41: 1, # 'Ä°' - 6: 3, # 'ı' - 40: 0, # 'Åž' - 19: 2, # 'ÅŸ' - }, - 12: { # 'd' - 23: 1, # 'A' - 37: 0, # 'B' - 47: 0, # 'C' - 39: 0, # 'D' - 29: 0, # 'E' - 52: 0, # 'F' - 36: 0, # 'G' - 45: 0, # 'H' - 53: 0, # 'I' - 60: 2, # 'J' - 16: 3, # 'K' - 49: 0, # 'L' - 20: 3, # 'M' - 46: 0, # 'N' - 42: 0, # 'O' - 48: 0, # 'P' - 44: 0, # 'R' - 35: 1, # 'S' - 31: 1, # 'T' - 51: 0, # 'U' - 38: 0, # 'V' - 62: 0, # 'W' - 43: 0, # 'Y' - 56: 0, # 'Z' - 1: 3, # 'a' - 21: 2, # 'b' - 28: 1, # 'c' - 12: 3, # 'd' - 2: 3, # 'e' - 18: 1, # 'f' - 27: 3, # 'g' - 25: 3, # 'h' - 3: 2, # 'i' - 24: 3, # 'j' - 10: 2, # 'k' - 5: 3, # 'l' - 13: 3, # 'm' - 4: 3, # 'n' - 15: 1, # 'o' - 26: 2, # 'p' - 7: 3, # 'r' - 8: 2, # 's' - 9: 2, # 't' - 14: 3, # 'u' - 32: 1, # 'v' - 57: 0, # 'w' - 58: 1, # 'x' - 11: 3, # 'y' - 22: 1, # 'z' - 63: 1, # '·' - 54: 0, # 'Ç' - 50: 0, # 'Ö' - 55: 0, # 'Ãœ' - 59: 0, # 'â' - 33: 0, # 'ç' - 61: 0, # 'î' - 34: 0, # 'ö' - 17: 1, # 'ü' - 30: 0, # 'ÄŸ' - 41: 0, # 'Ä°' - 6: 2, # 'ı' - 40: 0, # 'Åž' - 19: 0, # 'ÅŸ' - }, - 2: { # 'e' - 23: 2, # 'A' - 37: 0, # 'B' - 47: 2, # 'C' - 39: 0, # 'D' - 29: 3, # 'E' - 52: 1, # 'F' - 36: 0, # 'G' - 45: 0, # 'H' - 53: 0, # 'I' - 60: 0, # 'J' - 16: 1, # 'K' - 49: 0, # 'L' - 20: 3, # 'M' - 46: 1, # 'N' - 42: 0, # 'O' - 48: 1, # 'P' - 44: 1, # 'R' - 35: 0, # 'S' - 31: 3, # 'T' - 51: 0, # 'U' - 38: 1, # 'V' - 62: 0, # 'W' - 43: 1, # 'Y' - 56: 0, # 'Z' - 1: 3, # 'a' - 21: 3, # 'b' - 28: 0, # 'c' - 12: 3, # 'd' - 2: 2, # 'e' - 18: 3, # 'f' - 27: 3, # 'g' - 25: 3, # 'h' - 3: 3, # 'i' - 24: 3, # 'j' - 10: 3, # 'k' - 5: 0, # 'l' - 13: 2, # 'm' - 4: 3, # 'n' - 15: 1, # 'o' - 26: 3, # 'p' - 7: 3, # 'r' - 8: 3, # 's' - 9: 3, # 't' - 14: 3, # 'u' - 32: 3, # 'v' - 57: 2, # 'w' - 58: 0, # 'x' - 11: 3, # 'y' - 22: 1, # 'z' - 63: 1, # '·' - 54: 0, # 'Ç' - 50: 0, # 'Ö' - 55: 0, # 'Ãœ' - 59: 0, # 'â' - 33: 1, # 'ç' - 61: 0, # 'î' - 34: 1, # 'ö' - 17: 3, # 'ü' - 30: 0, # 'ÄŸ' - 41: 0, # 'Ä°' - 6: 3, # 'ı' - 40: 0, # 'Åž' - 19: 0, # 'ÅŸ' - }, - 18: { # 'f' - 23: 0, # 'A' - 37: 0, # 'B' - 47: 0, # 'C' - 39: 0, # 'D' - 29: 0, # 'E' - 52: 0, # 'F' - 36: 0, # 'G' - 45: 0, # 'H' - 53: 0, # 'I' - 60: 0, # 'J' - 16: 2, # 'K' - 49: 0, # 'L' - 20: 2, # 'M' - 46: 0, # 'N' - 42: 0, # 'O' - 48: 0, # 'P' - 44: 0, # 'R' - 35: 0, # 'S' - 31: 2, # 'T' - 51: 0, # 'U' - 38: 0, # 'V' - 62: 0, # 'W' - 43: 0, # 'Y' - 56: 0, # 'Z' - 1: 3, # 'a' - 21: 1, # 'b' - 28: 0, # 'c' - 12: 3, # 'd' - 2: 3, # 'e' - 18: 2, # 'f' - 27: 1, # 'g' - 25: 1, # 'h' - 3: 1, # 'i' - 24: 1, # 'j' - 10: 1, # 'k' - 5: 3, # 'l' - 13: 3, # 'm' - 4: 3, # 'n' - 15: 0, # 'o' - 26: 2, # 'p' - 7: 1, # 'r' - 8: 3, # 's' - 9: 3, # 't' - 14: 1, # 'u' - 32: 2, # 'v' - 57: 0, # 'w' - 58: 0, # 'x' - 11: 1, # 'y' - 22: 0, # 'z' - 63: 0, # '·' - 54: 0, # 'Ç' - 50: 0, # 'Ö' - 55: 0, # 'Ãœ' - 59: 0, # 'â' - 33: 1, # 'ç' - 61: 0, # 'î' - 34: 0, # 'ö' - 17: 1, # 'ü' - 30: 0, # 'ÄŸ' - 41: 0, # 'Ä°' - 6: 1, # 'ı' - 40: 0, # 'Åž' - 19: 0, # 'ÅŸ' - }, - 27: { # 'g' - 23: 0, # 'A' - 37: 0, # 'B' - 47: 0, # 'C' - 39: 0, # 'D' - 29: 0, # 'E' - 52: 0, # 'F' - 36: 0, # 'G' - 45: 0, # 'H' - 53: 0, # 'I' - 60: 0, # 'J' - 16: 3, # 'K' - 49: 0, # 'L' - 20: 0, # 'M' - 46: 0, # 'N' - 42: 0, # 'O' - 48: 0, # 'P' - 44: 0, # 'R' - 35: 1, # 'S' - 31: 1, # 'T' - 51: 0, # 'U' - 38: 2, # 'V' - 62: 0, # 'W' - 43: 0, # 'Y' - 56: 0, # 'Z' - 1: 3, # 'a' - 21: 1, # 'b' - 28: 0, # 'c' - 12: 1, # 'd' - 2: 3, # 'e' - 18: 0, # 'f' - 27: 2, # 'g' - 25: 1, # 'h' - 3: 2, # 'i' - 24: 3, # 'j' - 10: 2, # 'k' - 5: 3, # 'l' - 13: 3, # 'm' - 4: 2, # 'n' - 15: 0, # 'o' - 26: 1, # 'p' - 7: 2, # 'r' - 8: 2, # 's' - 9: 3, # 't' - 14: 3, # 'u' - 32: 1, # 'v' - 57: 0, # 'w' - 58: 0, # 'x' - 11: 1, # 'y' - 22: 0, # 'z' - 63: 1, # '·' - 54: 0, # 'Ç' - 50: 0, # 'Ö' - 55: 0, # 'Ãœ' - 59: 0, # 'â' - 33: 0, # 'ç' - 61: 0, # 'î' - 34: 0, # 'ö' - 17: 0, # 'ü' - 30: 0, # 'ÄŸ' - 41: 0, # 'Ä°' - 6: 2, # 'ı' - 40: 0, # 'Åž' - 19: 0, # 'ÅŸ' - }, - 25: { # 'h' - 23: 0, # 'A' - 37: 0, # 'B' - 47: 0, # 'C' - 39: 0, # 'D' - 29: 0, # 'E' - 52: 0, # 'F' - 36: 0, # 'G' - 45: 0, # 'H' - 53: 0, # 'I' - 60: 0, # 'J' - 16: 2, # 'K' - 49: 0, # 'L' - 20: 0, # 'M' - 46: 0, # 'N' - 42: 0, # 'O' - 48: 0, # 'P' - 44: 0, # 'R' - 35: 0, # 'S' - 31: 0, # 'T' - 51: 0, # 'U' - 38: 0, # 'V' - 62: 0, # 'W' - 43: 0, # 'Y' - 56: 0, # 'Z' - 1: 3, # 'a' - 21: 0, # 'b' - 28: 0, # 'c' - 12: 2, # 'd' - 2: 3, # 'e' - 18: 0, # 'f' - 27: 1, # 'g' - 25: 2, # 'h' - 3: 2, # 'i' - 24: 3, # 'j' - 10: 3, # 'k' - 5: 3, # 'l' - 13: 3, # 'm' - 4: 3, # 'n' - 15: 1, # 'o' - 26: 1, # 'p' - 7: 3, # 'r' - 8: 3, # 's' - 9: 2, # 't' - 14: 3, # 'u' - 32: 2, # 'v' - 57: 1, # 'w' - 58: 0, # 'x' - 11: 1, # 'y' - 22: 0, # 'z' - 63: 0, # '·' - 54: 0, # 'Ç' - 50: 0, # 'Ö' - 55: 0, # 'Ãœ' - 59: 0, # 'â' - 33: 0, # 'ç' - 61: 0, # 'î' - 34: 0, # 'ö' - 17: 0, # 'ü' - 30: 0, # 'ÄŸ' - 41: 0, # 'Ä°' - 6: 3, # 'ı' - 40: 0, # 'Åž' - 19: 0, # 'ÅŸ' - }, - 3: { # 'i' - 23: 2, # 'A' - 37: 0, # 'B' - 47: 0, # 'C' - 39: 0, # 'D' - 29: 0, # 'E' - 52: 0, # 'F' - 36: 0, # 'G' - 45: 0, # 'H' - 53: 0, # 'I' - 60: 1, # 'J' - 16: 3, # 'K' - 49: 0, # 'L' - 20: 3, # 'M' - 46: 0, # 'N' - 42: 1, # 'O' - 48: 0, # 'P' - 44: 0, # 'R' - 35: 1, # 'S' - 31: 2, # 'T' - 51: 0, # 'U' - 38: 1, # 'V' - 62: 0, # 'W' - 43: 0, # 'Y' - 56: 0, # 'Z' - 1: 3, # 'a' - 21: 2, # 'b' - 28: 0, # 'c' - 12: 3, # 'd' - 2: 3, # 'e' - 18: 2, # 'f' - 27: 3, # 'g' - 25: 1, # 'h' - 3: 3, # 'i' - 24: 2, # 'j' - 10: 3, # 'k' - 5: 3, # 'l' - 13: 3, # 'm' - 4: 3, # 'n' - 15: 1, # 'o' - 26: 3, # 'p' - 7: 3, # 'r' - 8: 3, # 's' - 9: 3, # 't' - 14: 3, # 'u' - 32: 2, # 'v' - 57: 1, # 'w' - 58: 1, # 'x' - 11: 3, # 'y' - 22: 1, # 'z' - 63: 1, # '·' - 54: 0, # 'Ç' - 50: 0, # 'Ö' - 55: 1, # 'Ãœ' - 59: 0, # 'â' - 33: 2, # 'ç' - 61: 0, # 'î' - 34: 0, # 'ö' - 17: 3, # 'ü' - 30: 0, # 'ÄŸ' - 41: 1, # 'Ä°' - 6: 2, # 'ı' - 40: 0, # 'Åž' - 19: 0, # 'ÅŸ' - }, - 24: { # 'j' - 23: 0, # 'A' - 37: 0, # 'B' - 47: 0, # 'C' - 39: 0, # 'D' - 29: 0, # 'E' - 52: 0, # 'F' - 36: 0, # 'G' - 45: 0, # 'H' - 53: 0, # 'I' - 60: 1, # 'J' - 16: 2, # 'K' - 49: 0, # 'L' - 20: 2, # 'M' - 46: 0, # 'N' - 42: 0, # 'O' - 48: 1, # 'P' - 44: 0, # 'R' - 35: 0, # 'S' - 31: 1, # 'T' - 51: 0, # 'U' - 38: 0, # 'V' - 62: 0, # 'W' - 43: 0, # 'Y' - 56: 1, # 'Z' - 1: 3, # 'a' - 21: 1, # 'b' - 28: 1, # 'c' - 12: 3, # 'd' - 2: 3, # 'e' - 18: 2, # 'f' - 27: 1, # 'g' - 25: 1, # 'h' - 3: 2, # 'i' - 24: 1, # 'j' - 10: 2, # 'k' - 5: 2, # 'l' - 13: 3, # 'm' - 4: 2, # 'n' - 15: 0, # 'o' - 26: 1, # 'p' - 7: 2, # 'r' - 8: 3, # 's' - 9: 2, # 't' - 14: 3, # 'u' - 32: 2, # 'v' - 57: 0, # 'w' - 58: 2, # 'x' - 11: 1, # 'y' - 22: 0, # 'z' - 63: 0, # '·' - 54: 0, # 'Ç' - 50: 0, # 'Ö' - 55: 0, # 'Ãœ' - 59: 0, # 'â' - 33: 1, # 'ç' - 61: 0, # 'î' - 34: 0, # 'ö' - 17: 1, # 'ü' - 30: 0, # 'ÄŸ' - 41: 0, # 'Ä°' - 6: 3, # 'ı' - 40: 0, # 'Åž' - 19: 0, # 'ÅŸ' - }, - 10: { # 'k' - 23: 0, # 'A' - 37: 0, # 'B' - 47: 0, # 'C' - 39: 0, # 'D' - 29: 0, # 'E' - 52: 0, # 'F' - 36: 0, # 'G' - 45: 0, # 'H' - 53: 0, # 'I' - 60: 0, # 'J' - 16: 3, # 'K' - 49: 0, # 'L' - 20: 2, # 'M' - 46: 0, # 'N' - 42: 0, # 'O' - 48: 0, # 'P' - 44: 0, # 'R' - 35: 0, # 'S' - 31: 3, # 'T' - 51: 0, # 'U' - 38: 1, # 'V' - 62: 0, # 'W' - 43: 0, # 'Y' - 56: 1, # 'Z' - 1: 3, # 'a' - 21: 2, # 'b' - 28: 0, # 'c' - 12: 2, # 'd' - 2: 3, # 'e' - 18: 1, # 'f' - 27: 2, # 'g' - 25: 2, # 'h' - 3: 3, # 'i' - 24: 2, # 'j' - 10: 2, # 'k' - 5: 3, # 'l' - 13: 3, # 'm' - 4: 3, # 'n' - 15: 0, # 'o' - 26: 3, # 'p' - 7: 2, # 'r' - 8: 2, # 's' - 9: 2, # 't' - 14: 3, # 'u' - 32: 0, # 'v' - 57: 0, # 'w' - 58: 1, # 'x' - 11: 3, # 'y' - 22: 0, # 'z' - 63: 1, # '·' - 54: 0, # 'Ç' - 50: 0, # 'Ö' - 55: 0, # 'Ãœ' - 59: 0, # 'â' - 33: 3, # 'ç' - 61: 0, # 'î' - 34: 1, # 'ö' - 17: 3, # 'ü' - 30: 1, # 'ÄŸ' - 41: 0, # 'Ä°' - 6: 3, # 'ı' - 40: 0, # 'Åž' - 19: 1, # 'ÅŸ' - }, - 5: { # 'l' - 23: 0, # 'A' - 37: 0, # 'B' - 47: 0, # 'C' - 39: 0, # 'D' - 29: 3, # 'E' - 52: 0, # 'F' - 36: 0, # 'G' - 45: 0, # 'H' - 53: 0, # 'I' - 60: 0, # 'J' - 16: 0, # 'K' - 49: 0, # 'L' - 20: 2, # 'M' - 46: 0, # 'N' - 42: 0, # 'O' - 48: 0, # 'P' - 44: 0, # 'R' - 35: 0, # 'S' - 31: 1, # 'T' - 51: 0, # 'U' - 38: 0, # 'V' - 62: 0, # 'W' - 43: 0, # 'Y' - 56: 0, # 'Z' - 1: 0, # 'a' - 21: 3, # 'b' - 28: 0, # 'c' - 12: 3, # 'd' - 2: 1, # 'e' - 18: 3, # 'f' - 27: 3, # 'g' - 25: 2, # 'h' - 3: 3, # 'i' - 24: 2, # 'j' - 10: 3, # 'k' - 5: 1, # 'l' - 13: 1, # 'm' - 4: 3, # 'n' - 15: 0, # 'o' - 26: 2, # 'p' - 7: 3, # 'r' - 8: 3, # 's' - 9: 3, # 't' - 14: 2, # 'u' - 32: 2, # 'v' - 57: 0, # 'w' - 58: 0, # 'x' - 11: 3, # 'y' - 22: 0, # 'z' - 63: 0, # '·' - 54: 0, # 'Ç' - 50: 0, # 'Ö' - 55: 0, # 'Ãœ' - 59: 0, # 'â' - 33: 1, # 'ç' - 61: 0, # 'î' - 34: 0, # 'ö' - 17: 2, # 'ü' - 30: 0, # 'ÄŸ' - 41: 0, # 'Ä°' - 6: 3, # 'ı' - 40: 0, # 'Åž' - 19: 0, # 'ÅŸ' - }, - 13: { # 'm' - 23: 1, # 'A' - 37: 0, # 'B' - 47: 0, # 'C' - 39: 0, # 'D' - 29: 3, # 'E' - 52: 0, # 'F' - 36: 0, # 'G' - 45: 0, # 'H' - 53: 0, # 'I' - 60: 0, # 'J' - 16: 0, # 'K' - 49: 0, # 'L' - 20: 3, # 'M' - 46: 0, # 'N' - 42: 0, # 'O' - 48: 0, # 'P' - 44: 0, # 'R' - 35: 0, # 'S' - 31: 3, # 'T' - 51: 0, # 'U' - 38: 0, # 'V' - 62: 0, # 'W' - 43: 1, # 'Y' - 56: 0, # 'Z' - 1: 2, # 'a' - 21: 3, # 'b' - 28: 0, # 'c' - 12: 3, # 'd' - 2: 2, # 'e' - 18: 3, # 'f' - 27: 3, # 'g' - 25: 3, # 'h' - 3: 3, # 'i' - 24: 3, # 'j' - 10: 3, # 'k' - 5: 0, # 'l' - 13: 2, # 'm' - 4: 3, # 'n' - 15: 1, # 'o' - 26: 2, # 'p' - 7: 3, # 'r' - 8: 3, # 's' - 9: 3, # 't' - 14: 2, # 'u' - 32: 2, # 'v' - 57: 1, # 'w' - 58: 0, # 'x' - 11: 3, # 'y' - 22: 0, # 'z' - 63: 0, # '·' - 54: 0, # 'Ç' - 50: 0, # 'Ö' - 55: 0, # 'Ãœ' - 59: 0, # 'â' - 33: 0, # 'ç' - 61: 0, # 'î' - 34: 0, # 'ö' - 17: 3, # 'ü' - 30: 0, # 'ÄŸ' - 41: 0, # 'Ä°' - 6: 3, # 'ı' - 40: 0, # 'Åž' - 19: 1, # 'ÅŸ' - }, - 4: { # 'n' - 23: 1, # 'A' - 37: 0, # 'B' - 47: 0, # 'C' - 39: 0, # 'D' - 29: 0, # 'E' - 52: 0, # 'F' - 36: 0, # 'G' - 45: 1, # 'H' - 53: 0, # 'I' - 60: 2, # 'J' - 16: 3, # 'K' - 49: 0, # 'L' - 20: 3, # 'M' - 46: 0, # 'N' - 42: 0, # 'O' - 48: 0, # 'P' - 44: 0, # 'R' - 35: 0, # 'S' - 31: 2, # 'T' - 51: 0, # 'U' - 38: 0, # 'V' - 62: 0, # 'W' - 43: 0, # 'Y' - 56: 0, # 'Z' - 1: 3, # 'a' - 21: 2, # 'b' - 28: 1, # 'c' - 12: 3, # 'd' - 2: 3, # 'e' - 18: 1, # 'f' - 27: 2, # 'g' - 25: 3, # 'h' - 3: 2, # 'i' - 24: 2, # 'j' - 10: 3, # 'k' - 5: 3, # 'l' - 13: 3, # 'm' - 4: 3, # 'n' - 15: 1, # 'o' - 26: 3, # 'p' - 7: 2, # 'r' - 8: 3, # 's' - 9: 3, # 't' - 14: 3, # 'u' - 32: 2, # 'v' - 57: 0, # 'w' - 58: 2, # 'x' - 11: 3, # 'y' - 22: 0, # 'z' - 63: 0, # '·' - 54: 0, # 'Ç' - 50: 0, # 'Ö' - 55: 0, # 'Ãœ' - 59: 0, # 'â' - 33: 1, # 'ç' - 61: 0, # 'î' - 34: 0, # 'ö' - 17: 2, # 'ü' - 30: 0, # 'ÄŸ' - 41: 0, # 'Ä°' - 6: 1, # 'ı' - 40: 0, # 'Åž' - 19: 0, # 'ÅŸ' - }, - 15: { # 'o' - 23: 0, # 'A' - 37: 0, # 'B' - 47: 1, # 'C' - 39: 0, # 'D' - 29: 0, # 'E' - 52: 2, # 'F' - 36: 1, # 'G' - 45: 1, # 'H' - 53: 1, # 'I' - 60: 0, # 'J' - 16: 3, # 'K' - 49: 2, # 'L' - 20: 0, # 'M' - 46: 2, # 'N' - 42: 1, # 'O' - 48: 2, # 'P' - 44: 1, # 'R' - 35: 0, # 'S' - 31: 0, # 'T' - 51: 0, # 'U' - 38: 0, # 'V' - 62: 0, # 'W' - 43: 0, # 'Y' - 56: 0, # 'Z' - 1: 3, # 'a' - 21: 0, # 'b' - 28: 2, # 'c' - 12: 0, # 'd' - 2: 3, # 'e' - 18: 0, # 'f' - 27: 0, # 'g' - 25: 0, # 'h' - 3: 1, # 'i' - 24: 2, # 'j' - 10: 1, # 'k' - 5: 3, # 'l' - 13: 3, # 'm' - 4: 2, # 'n' - 15: 2, # 'o' - 26: 0, # 'p' - 7: 1, # 'r' - 8: 0, # 's' - 9: 0, # 't' - 14: 3, # 'u' - 32: 0, # 'v' - 57: 0, # 'w' - 58: 2, # 'x' - 11: 0, # 'y' - 22: 2, # 'z' - 63: 0, # '·' - 54: 1, # 'Ç' - 50: 2, # 'Ö' - 55: 0, # 'Ãœ' - 59: 0, # 'â' - 33: 3, # 'ç' - 61: 0, # 'î' - 34: 1, # 'ö' - 17: 0, # 'ü' - 30: 2, # 'ÄŸ' - 41: 2, # 'Ä°' - 6: 3, # 'ı' - 40: 2, # 'Åž' - 19: 2, # 'ÅŸ' - }, - 26: { # 'p' - 23: 0, # 'A' - 37: 0, # 'B' - 47: 0, # 'C' - 39: 0, # 'D' - 29: 0, # 'E' - 52: 0, # 'F' - 36: 0, # 'G' - 45: 0, # 'H' - 53: 0, # 'I' - 60: 0, # 'J' - 16: 3, # 'K' - 49: 0, # 'L' - 20: 1, # 'M' - 46: 0, # 'N' - 42: 0, # 'O' - 48: 0, # 'P' - 44: 0, # 'R' - 35: 0, # 'S' - 31: 0, # 'T' - 51: 0, # 'U' - 38: 0, # 'V' - 62: 0, # 'W' - 43: 0, # 'Y' - 56: 0, # 'Z' - 1: 3, # 'a' - 21: 1, # 'b' - 28: 0, # 'c' - 12: 1, # 'd' - 2: 3, # 'e' - 18: 0, # 'f' - 27: 1, # 'g' - 25: 1, # 'h' - 3: 2, # 'i' - 24: 3, # 'j' - 10: 1, # 'k' - 5: 3, # 'l' - 13: 3, # 'm' - 4: 2, # 'n' - 15: 0, # 'o' - 26: 2, # 'p' - 7: 2, # 'r' - 8: 1, # 's' - 9: 1, # 't' - 14: 3, # 'u' - 32: 0, # 'v' - 57: 0, # 'w' - 58: 1, # 'x' - 11: 1, # 'y' - 22: 0, # 'z' - 63: 0, # '·' - 54: 0, # 'Ç' - 50: 0, # 'Ö' - 55: 0, # 'Ãœ' - 59: 0, # 'â' - 33: 3, # 'ç' - 61: 0, # 'î' - 34: 0, # 'ö' - 17: 1, # 'ü' - 30: 0, # 'ÄŸ' - 41: 0, # 'Ä°' - 6: 3, # 'ı' - 40: 0, # 'Åž' - 19: 0, # 'ÅŸ' - }, - 7: { # 'r' - 23: 0, # 'A' - 37: 0, # 'B' - 47: 0, # 'C' - 39: 0, # 'D' - 29: 0, # 'E' - 52: 1, # 'F' - 36: 0, # 'G' - 45: 0, # 'H' - 53: 0, # 'I' - 60: 2, # 'J' - 16: 3, # 'K' - 49: 0, # 'L' - 20: 2, # 'M' - 46: 0, # 'N' - 42: 0, # 'O' - 48: 0, # 'P' - 44: 0, # 'R' - 35: 0, # 'S' - 31: 2, # 'T' - 51: 1, # 'U' - 38: 0, # 'V' - 62: 0, # 'W' - 43: 0, # 'Y' - 56: 1, # 'Z' - 1: 3, # 'a' - 21: 1, # 'b' - 28: 0, # 'c' - 12: 3, # 'd' - 2: 3, # 'e' - 18: 0, # 'f' - 27: 2, # 'g' - 25: 3, # 'h' - 3: 2, # 'i' - 24: 2, # 'j' - 10: 3, # 'k' - 5: 3, # 'l' - 13: 3, # 'm' - 4: 3, # 'n' - 15: 0, # 'o' - 26: 2, # 'p' - 7: 3, # 'r' - 8: 3, # 's' - 9: 3, # 't' - 14: 3, # 'u' - 32: 2, # 'v' - 57: 0, # 'w' - 58: 1, # 'x' - 11: 2, # 'y' - 22: 0, # 'z' - 63: 1, # '·' - 54: 0, # 'Ç' - 50: 0, # 'Ö' - 55: 0, # 'Ãœ' - 59: 0, # 'â' - 33: 2, # 'ç' - 61: 0, # 'î' - 34: 0, # 'ö' - 17: 3, # 'ü' - 30: 0, # 'ÄŸ' - 41: 0, # 'Ä°' - 6: 2, # 'ı' - 40: 0, # 'Åž' - 19: 0, # 'ÅŸ' - }, - 8: { # 's' - 23: 1, # 'A' - 37: 0, # 'B' - 47: 0, # 'C' - 39: 0, # 'D' - 29: 0, # 'E' - 52: 0, # 'F' - 36: 1, # 'G' - 45: 0, # 'H' - 53: 0, # 'I' - 60: 0, # 'J' - 16: 3, # 'K' - 49: 0, # 'L' - 20: 3, # 'M' - 46: 0, # 'N' - 42: 0, # 'O' - 48: 0, # 'P' - 44: 0, # 'R' - 35: 0, # 'S' - 31: 2, # 'T' - 51: 0, # 'U' - 38: 0, # 'V' - 62: 0, # 'W' - 43: 0, # 'Y' - 56: 1, # 'Z' - 1: 3, # 'a' - 21: 2, # 'b' - 28: 1, # 'c' - 12: 3, # 'd' - 2: 3, # 'e' - 18: 0, # 'f' - 27: 2, # 'g' - 25: 2, # 'h' - 3: 2, # 'i' - 24: 3, # 'j' - 10: 3, # 'k' - 5: 3, # 'l' - 13: 3, # 'm' - 4: 3, # 'n' - 15: 0, # 'o' - 26: 3, # 'p' - 7: 3, # 'r' - 8: 3, # 's' - 9: 3, # 't' - 14: 3, # 'u' - 32: 2, # 'v' - 57: 0, # 'w' - 58: 1, # 'x' - 11: 2, # 'y' - 22: 1, # 'z' - 63: 0, # '·' - 54: 0, # 'Ç' - 50: 0, # 'Ö' - 55: 0, # 'Ãœ' - 59: 0, # 'â' - 33: 2, # 'ç' - 61: 0, # 'î' - 34: 0, # 'ö' - 17: 2, # 'ü' - 30: 0, # 'ÄŸ' - 41: 0, # 'Ä°' - 6: 3, # 'ı' - 40: 0, # 'Åž' - 19: 1, # 'ÅŸ' - }, - 9: { # 't' - 23: 0, # 'A' - 37: 0, # 'B' - 47: 0, # 'C' - 39: 0, # 'D' - 29: 0, # 'E' - 52: 0, # 'F' - 36: 0, # 'G' - 45: 0, # 'H' - 53: 0, # 'I' - 60: 1, # 'J' - 16: 3, # 'K' - 49: 0, # 'L' - 20: 2, # 'M' - 46: 0, # 'N' - 42: 0, # 'O' - 48: 0, # 'P' - 44: 0, # 'R' - 35: 0, # 'S' - 31: 2, # 'T' - 51: 0, # 'U' - 38: 0, # 'V' - 62: 0, # 'W' - 43: 0, # 'Y' - 56: 1, # 'Z' - 1: 3, # 'a' - 21: 3, # 'b' - 28: 0, # 'c' - 12: 3, # 'd' - 2: 3, # 'e' - 18: 2, # 'f' - 27: 2, # 'g' - 25: 2, # 'h' - 3: 2, # 'i' - 24: 2, # 'j' - 10: 3, # 'k' - 5: 3, # 'l' - 13: 3, # 'm' - 4: 3, # 'n' - 15: 0, # 'o' - 26: 2, # 'p' - 7: 3, # 'r' - 8: 3, # 's' - 9: 3, # 't' - 14: 3, # 'u' - 32: 3, # 'v' - 57: 0, # 'w' - 58: 2, # 'x' - 11: 2, # 'y' - 22: 0, # 'z' - 63: 0, # '·' - 54: 0, # 'Ç' - 50: 0, # 'Ö' - 55: 0, # 'Ãœ' - 59: 0, # 'â' - 33: 3, # 'ç' - 61: 0, # 'î' - 34: 0, # 'ö' - 17: 2, # 'ü' - 30: 0, # 'ÄŸ' - 41: 0, # 'Ä°' - 6: 3, # 'ı' - 40: 0, # 'Åž' - 19: 0, # 'ÅŸ' - }, - 14: { # 'u' - 23: 3, # 'A' - 37: 0, # 'B' - 47: 0, # 'C' - 39: 0, # 'D' - 29: 3, # 'E' - 52: 0, # 'F' - 36: 0, # 'G' - 45: 1, # 'H' - 53: 0, # 'I' - 60: 1, # 'J' - 16: 0, # 'K' - 49: 0, # 'L' - 20: 3, # 'M' - 46: 2, # 'N' - 42: 0, # 'O' - 48: 1, # 'P' - 44: 0, # 'R' - 35: 0, # 'S' - 31: 3, # 'T' - 51: 0, # 'U' - 38: 0, # 'V' - 62: 0, # 'W' - 43: 1, # 'Y' - 56: 2, # 'Z' - 1: 2, # 'a' - 21: 3, # 'b' - 28: 0, # 'c' - 12: 3, # 'd' - 2: 2, # 'e' - 18: 2, # 'f' - 27: 3, # 'g' - 25: 3, # 'h' - 3: 3, # 'i' - 24: 2, # 'j' - 10: 3, # 'k' - 5: 0, # 'l' - 13: 3, # 'm' - 4: 3, # 'n' - 15: 0, # 'o' - 26: 3, # 'p' - 7: 3, # 'r' - 8: 3, # 's' - 9: 3, # 't' - 14: 3, # 'u' - 32: 2, # 'v' - 57: 2, # 'w' - 58: 0, # 'x' - 11: 3, # 'y' - 22: 0, # 'z' - 63: 1, # '·' - 54: 0, # 'Ç' - 50: 0, # 'Ö' - 55: 0, # 'Ãœ' - 59: 0, # 'â' - 33: 0, # 'ç' - 61: 0, # 'î' - 34: 0, # 'ö' - 17: 3, # 'ü' - 30: 1, # 'ÄŸ' - 41: 0, # 'Ä°' - 6: 3, # 'ı' - 40: 0, # 'Åž' - 19: 0, # 'ÅŸ' - }, - 32: { # 'v' - 23: 0, # 'A' - 37: 0, # 'B' - 47: 0, # 'C' - 39: 0, # 'D' - 29: 0, # 'E' - 52: 0, # 'F' - 36: 0, # 'G' - 45: 0, # 'H' - 53: 0, # 'I' - 60: 0, # 'J' - 16: 3, # 'K' - 49: 0, # 'L' - 20: 1, # 'M' - 46: 0, # 'N' - 42: 0, # 'O' - 48: 0, # 'P' - 44: 0, # 'R' - 35: 0, # 'S' - 31: 0, # 'T' - 51: 0, # 'U' - 38: 0, # 'V' - 62: 0, # 'W' - 43: 0, # 'Y' - 56: 0, # 'Z' - 1: 3, # 'a' - 21: 0, # 'b' - 28: 0, # 'c' - 12: 3, # 'd' - 2: 3, # 'e' - 18: 0, # 'f' - 27: 0, # 'g' - 25: 0, # 'h' - 3: 0, # 'i' - 24: 1, # 'j' - 10: 1, # 'k' - 5: 3, # 'l' - 13: 2, # 'm' - 4: 3, # 'n' - 15: 0, # 'o' - 26: 1, # 'p' - 7: 1, # 'r' - 8: 2, # 's' - 9: 3, # 't' - 14: 3, # 'u' - 32: 1, # 'v' - 57: 0, # 'w' - 58: 0, # 'x' - 11: 0, # 'y' - 22: 0, # 'z' - 63: 0, # '·' - 54: 0, # 'Ç' - 50: 0, # 'Ö' - 55: 0, # 'Ãœ' - 59: 0, # 'â' - 33: 2, # 'ç' - 61: 0, # 'î' - 34: 0, # 'ö' - 17: 0, # 'ü' - 30: 0, # 'ÄŸ' - 41: 0, # 'Ä°' - 6: 1, # 'ı' - 40: 0, # 'Åž' - 19: 0, # 'ÅŸ' - }, - 57: { # 'w' - 23: 0, # 'A' - 37: 0, # 'B' - 47: 0, # 'C' - 39: 0, # 'D' - 29: 0, # 'E' - 52: 0, # 'F' - 36: 0, # 'G' - 45: 0, # 'H' - 53: 0, # 'I' - 60: 0, # 'J' - 16: 0, # 'K' - 49: 0, # 'L' - 20: 0, # 'M' - 46: 0, # 'N' - 42: 0, # 'O' - 48: 0, # 'P' - 44: 0, # 'R' - 35: 0, # 'S' - 31: 0, # 'T' - 51: 1, # 'U' - 38: 0, # 'V' - 62: 0, # 'W' - 43: 0, # 'Y' - 56: 0, # 'Z' - 1: 1, # 'a' - 21: 0, # 'b' - 28: 0, # 'c' - 12: 0, # 'd' - 2: 2, # 'e' - 18: 0, # 'f' - 27: 0, # 'g' - 25: 1, # 'h' - 3: 0, # 'i' - 24: 0, # 'j' - 10: 1, # 'k' - 5: 0, # 'l' - 13: 0, # 'm' - 4: 1, # 'n' - 15: 0, # 'o' - 26: 0, # 'p' - 7: 0, # 'r' - 8: 1, # 's' - 9: 0, # 't' - 14: 1, # 'u' - 32: 0, # 'v' - 57: 2, # 'w' - 58: 0, # 'x' - 11: 0, # 'y' - 22: 0, # 'z' - 63: 1, # '·' - 54: 0, # 'Ç' - 50: 0, # 'Ö' - 55: 0, # 'Ãœ' - 59: 0, # 'â' - 33: 0, # 'ç' - 61: 0, # 'î' - 34: 0, # 'ö' - 17: 1, # 'ü' - 30: 0, # 'ÄŸ' - 41: 0, # 'Ä°' - 6: 0, # 'ı' - 40: 0, # 'Åž' - 19: 0, # 'ÅŸ' - }, - 58: { # 'x' - 23: 0, # 'A' - 37: 0, # 'B' - 47: 0, # 'C' - 39: 0, # 'D' - 29: 1, # 'E' - 52: 0, # 'F' - 36: 0, # 'G' - 45: 0, # 'H' - 53: 0, # 'I' - 60: 1, # 'J' - 16: 0, # 'K' - 49: 0, # 'L' - 20: 1, # 'M' - 46: 0, # 'N' - 42: 0, # 'O' - 48: 0, # 'P' - 44: 0, # 'R' - 35: 0, # 'S' - 31: 0, # 'T' - 51: 0, # 'U' - 38: 0, # 'V' - 62: 0, # 'W' - 43: 0, # 'Y' - 56: 0, # 'Z' - 1: 0, # 'a' - 21: 1, # 'b' - 28: 0, # 'c' - 12: 2, # 'd' - 2: 1, # 'e' - 18: 0, # 'f' - 27: 0, # 'g' - 25: 0, # 'h' - 3: 2, # 'i' - 24: 2, # 'j' - 10: 1, # 'k' - 5: 0, # 'l' - 13: 0, # 'm' - 4: 2, # 'n' - 15: 0, # 'o' - 26: 0, # 'p' - 7: 1, # 'r' - 8: 2, # 's' - 9: 1, # 't' - 14: 0, # 'u' - 32: 0, # 'v' - 57: 0, # 'w' - 58: 0, # 'x' - 11: 2, # 'y' - 22: 0, # 'z' - 63: 0, # '·' - 54: 0, # 'Ç' - 50: 0, # 'Ö' - 55: 0, # 'Ãœ' - 59: 0, # 'â' - 33: 0, # 'ç' - 61: 0, # 'î' - 34: 0, # 'ö' - 17: 1, # 'ü' - 30: 0, # 'ÄŸ' - 41: 0, # 'Ä°' - 6: 2, # 'ı' - 40: 0, # 'Åž' - 19: 0, # 'ÅŸ' - }, - 11: { # 'y' - 23: 1, # 'A' - 37: 0, # 'B' - 47: 0, # 'C' - 39: 0, # 'D' - 29: 0, # 'E' - 52: 0, # 'F' - 36: 0, # 'G' - 45: 0, # 'H' - 53: 0, # 'I' - 60: 1, # 'J' - 16: 3, # 'K' - 49: 0, # 'L' - 20: 1, # 'M' - 46: 0, # 'N' - 42: 0, # 'O' - 48: 0, # 'P' - 44: 0, # 'R' - 35: 0, # 'S' - 31: 1, # 'T' - 51: 0, # 'U' - 38: 0, # 'V' - 62: 0, # 'W' - 43: 1, # 'Y' - 56: 1, # 'Z' - 1: 3, # 'a' - 21: 1, # 'b' - 28: 0, # 'c' - 12: 2, # 'd' - 2: 3, # 'e' - 18: 0, # 'f' - 27: 2, # 'g' - 25: 2, # 'h' - 3: 2, # 'i' - 24: 1, # 'j' - 10: 2, # 'k' - 5: 3, # 'l' - 13: 3, # 'm' - 4: 3, # 'n' - 15: 0, # 'o' - 26: 1, # 'p' - 7: 2, # 'r' - 8: 1, # 's' - 9: 2, # 't' - 14: 3, # 'u' - 32: 0, # 'v' - 57: 0, # 'w' - 58: 1, # 'x' - 11: 3, # 'y' - 22: 0, # 'z' - 63: 0, # '·' - 54: 0, # 'Ç' - 50: 0, # 'Ö' - 55: 0, # 'Ãœ' - 59: 0, # 'â' - 33: 3, # 'ç' - 61: 0, # 'î' - 34: 0, # 'ö' - 17: 2, # 'ü' - 30: 0, # 'ÄŸ' - 41: 0, # 'Ä°' - 6: 3, # 'ı' - 40: 0, # 'Åž' - 19: 0, # 'ÅŸ' - }, - 22: { # 'z' - 23: 2, # 'A' - 37: 2, # 'B' - 47: 1, # 'C' - 39: 2, # 'D' - 29: 3, # 'E' - 52: 1, # 'F' - 36: 2, # 'G' - 45: 2, # 'H' - 53: 1, # 'I' - 60: 0, # 'J' - 16: 0, # 'K' - 49: 0, # 'L' - 20: 3, # 'M' - 46: 2, # 'N' - 42: 2, # 'O' - 48: 2, # 'P' - 44: 1, # 'R' - 35: 1, # 'S' - 31: 3, # 'T' - 51: 2, # 'U' - 38: 2, # 'V' - 62: 0, # 'W' - 43: 2, # 'Y' - 56: 1, # 'Z' - 1: 1, # 'a' - 21: 2, # 'b' - 28: 1, # 'c' - 12: 2, # 'd' - 2: 2, # 'e' - 18: 3, # 'f' - 27: 2, # 'g' - 25: 2, # 'h' - 3: 3, # 'i' - 24: 2, # 'j' - 10: 3, # 'k' - 5: 0, # 'l' - 13: 2, # 'm' - 4: 3, # 'n' - 15: 2, # 'o' - 26: 2, # 'p' - 7: 3, # 'r' - 8: 3, # 's' - 9: 3, # 't' - 14: 0, # 'u' - 32: 2, # 'v' - 57: 0, # 'w' - 58: 0, # 'x' - 11: 3, # 'y' - 22: 2, # 'z' - 63: 1, # '·' - 54: 0, # 'Ç' - 50: 0, # 'Ö' - 55: 2, # 'Ãœ' - 59: 1, # 'â' - 33: 0, # 'ç' - 61: 0, # 'î' - 34: 2, # 'ö' - 17: 2, # 'ü' - 30: 2, # 'ÄŸ' - 41: 1, # 'Ä°' - 6: 3, # 'ı' - 40: 1, # 'Åž' - 19: 2, # 'ÅŸ' - }, - 63: { # '·' - 23: 0, # 'A' - 37: 0, # 'B' - 47: 0, # 'C' - 39: 0, # 'D' - 29: 0, # 'E' - 52: 0, # 'F' - 36: 0, # 'G' - 45: 0, # 'H' - 53: 0, # 'I' - 60: 0, # 'J' - 16: 0, # 'K' - 49: 0, # 'L' - 20: 0, # 'M' - 46: 0, # 'N' - 42: 0, # 'O' - 48: 0, # 'P' - 44: 0, # 'R' - 35: 0, # 'S' - 31: 0, # 'T' - 51: 0, # 'U' - 38: 0, # 'V' - 62: 0, # 'W' - 43: 0, # 'Y' - 56: 0, # 'Z' - 1: 0, # 'a' - 21: 0, # 'b' - 28: 0, # 'c' - 12: 0, # 'd' - 2: 1, # 'e' - 18: 0, # 'f' - 27: 0, # 'g' - 25: 0, # 'h' - 3: 0, # 'i' - 24: 0, # 'j' - 10: 0, # 'k' - 5: 0, # 'l' - 13: 2, # 'm' - 4: 0, # 'n' - 15: 0, # 'o' - 26: 0, # 'p' - 7: 0, # 'r' - 8: 0, # 's' - 9: 0, # 't' - 14: 2, # 'u' - 32: 0, # 'v' - 57: 0, # 'w' - 58: 0, # 'x' - 11: 0, # 'y' - 22: 0, # 'z' - 63: 0, # '·' - 54: 0, # 'Ç' - 50: 0, # 'Ö' - 55: 0, # 'Ãœ' - 59: 0, # 'â' - 33: 0, # 'ç' - 61: 0, # 'î' - 34: 0, # 'ö' - 17: 0, # 'ü' - 30: 0, # 'ÄŸ' - 41: 0, # 'Ä°' - 6: 0, # 'ı' - 40: 0, # 'Åž' - 19: 0, # 'ÅŸ' - }, - 54: { # 'Ç' - 23: 0, # 'A' - 37: 0, # 'B' - 47: 1, # 'C' - 39: 1, # 'D' - 29: 0, # 'E' - 52: 0, # 'F' - 36: 1, # 'G' - 45: 1, # 'H' - 53: 1, # 'I' - 60: 0, # 'J' - 16: 0, # 'K' - 49: 0, # 'L' - 20: 0, # 'M' - 46: 0, # 'N' - 42: 1, # 'O' - 48: 1, # 'P' - 44: 0, # 'R' - 35: 0, # 'S' - 31: 0, # 'T' - 51: 1, # 'U' - 38: 1, # 'V' - 62: 0, # 'W' - 43: 2, # 'Y' - 56: 0, # 'Z' - 1: 0, # 'a' - 21: 1, # 'b' - 28: 0, # 'c' - 12: 1, # 'd' - 2: 0, # 'e' - 18: 0, # 'f' - 27: 1, # 'g' - 25: 0, # 'h' - 3: 3, # 'i' - 24: 0, # 'j' - 10: 1, # 'k' - 5: 0, # 'l' - 13: 0, # 'm' - 4: 2, # 'n' - 15: 1, # 'o' - 26: 0, # 'p' - 7: 2, # 'r' - 8: 0, # 's' - 9: 1, # 't' - 14: 0, # 'u' - 32: 2, # 'v' - 57: 0, # 'w' - 58: 0, # 'x' - 11: 0, # 'y' - 22: 0, # 'z' - 63: 0, # '·' - 54: 0, # 'Ç' - 50: 0, # 'Ö' - 55: 2, # 'Ãœ' - 59: 0, # 'â' - 33: 0, # 'ç' - 61: 0, # 'î' - 34: 1, # 'ö' - 17: 0, # 'ü' - 30: 0, # 'ÄŸ' - 41: 0, # 'Ä°' - 6: 2, # 'ı' - 40: 0, # 'Åž' - 19: 1, # 'ÅŸ' - }, - 50: { # 'Ö' - 23: 0, # 'A' - 37: 0, # 'B' - 47: 1, # 'C' - 39: 1, # 'D' - 29: 2, # 'E' - 52: 0, # 'F' - 36: 1, # 'G' - 45: 2, # 'H' - 53: 0, # 'I' - 60: 0, # 'J' - 16: 0, # 'K' - 49: 0, # 'L' - 20: 1, # 'M' - 46: 1, # 'N' - 42: 2, # 'O' - 48: 2, # 'P' - 44: 1, # 'R' - 35: 0, # 'S' - 31: 0, # 'T' - 51: 1, # 'U' - 38: 1, # 'V' - 62: 0, # 'W' - 43: 2, # 'Y' - 56: 0, # 'Z' - 1: 0, # 'a' - 21: 2, # 'b' - 28: 1, # 'c' - 12: 2, # 'd' - 2: 0, # 'e' - 18: 1, # 'f' - 27: 1, # 'g' - 25: 1, # 'h' - 3: 2, # 'i' - 24: 0, # 'j' - 10: 2, # 'k' - 5: 0, # 'l' - 13: 0, # 'm' - 4: 3, # 'n' - 15: 2, # 'o' - 26: 2, # 'p' - 7: 3, # 'r' - 8: 1, # 's' - 9: 2, # 't' - 14: 0, # 'u' - 32: 1, # 'v' - 57: 0, # 'w' - 58: 0, # 'x' - 11: 0, # 'y' - 22: 1, # 'z' - 63: 0, # '·' - 54: 0, # 'Ç' - 50: 0, # 'Ö' - 55: 0, # 'Ãœ' - 59: 0, # 'â' - 33: 0, # 'ç' - 61: 0, # 'î' - 34: 2, # 'ö' - 17: 2, # 'ü' - 30: 1, # 'ÄŸ' - 41: 0, # 'Ä°' - 6: 2, # 'ı' - 40: 0, # 'Åž' - 19: 1, # 'ÅŸ' - }, - 55: { # 'Ãœ' - 23: 0, # 'A' - 37: 0, # 'B' - 47: 0, # 'C' - 39: 0, # 'D' - 29: 0, # 'E' - 52: 2, # 'F' - 36: 0, # 'G' - 45: 0, # 'H' - 53: 0, # 'I' - 60: 0, # 'J' - 16: 1, # 'K' - 49: 0, # 'L' - 20: 0, # 'M' - 46: 0, # 'N' - 42: 0, # 'O' - 48: 1, # 'P' - 44: 0, # 'R' - 35: 0, # 'S' - 31: 0, # 'T' - 51: 0, # 'U' - 38: 1, # 'V' - 62: 0, # 'W' - 43: 0, # 'Y' - 56: 0, # 'Z' - 1: 2, # 'a' - 21: 0, # 'b' - 28: 2, # 'c' - 12: 0, # 'd' - 2: 2, # 'e' - 18: 0, # 'f' - 27: 1, # 'g' - 25: 0, # 'h' - 3: 0, # 'i' - 24: 0, # 'j' - 10: 0, # 'k' - 5: 1, # 'l' - 13: 1, # 'm' - 4: 1, # 'n' - 15: 0, # 'o' - 26: 0, # 'p' - 7: 0, # 'r' - 8: 0, # 's' - 9: 1, # 't' - 14: 2, # 'u' - 32: 0, # 'v' - 57: 0, # 'w' - 58: 0, # 'x' - 11: 0, # 'y' - 22: 1, # 'z' - 63: 0, # '·' - 54: 0, # 'Ç' - 50: 1, # 'Ö' - 55: 0, # 'Ãœ' - 59: 0, # 'â' - 33: 0, # 'ç' - 61: 0, # 'î' - 34: 1, # 'ö' - 17: 0, # 'ü' - 30: 1, # 'ÄŸ' - 41: 1, # 'Ä°' - 6: 0, # 'ı' - 40: 0, # 'Åž' - 19: 1, # 'ÅŸ' - }, - 59: { # 'â' - 23: 0, # 'A' - 37: 0, # 'B' - 47: 0, # 'C' - 39: 0, # 'D' - 29: 0, # 'E' - 52: 0, # 'F' - 36: 1, # 'G' - 45: 0, # 'H' - 53: 0, # 'I' - 60: 0, # 'J' - 16: 1, # 'K' - 49: 0, # 'L' - 20: 0, # 'M' - 46: 0, # 'N' - 42: 0, # 'O' - 48: 0, # 'P' - 44: 0, # 'R' - 35: 0, # 'S' - 31: 0, # 'T' - 51: 0, # 'U' - 38: 0, # 'V' - 62: 0, # 'W' - 43: 0, # 'Y' - 56: 0, # 'Z' - 1: 2, # 'a' - 21: 0, # 'b' - 28: 0, # 'c' - 12: 0, # 'd' - 2: 2, # 'e' - 18: 0, # 'f' - 27: 0, # 'g' - 25: 0, # 'h' - 3: 0, # 'i' - 24: 0, # 'j' - 10: 0, # 'k' - 5: 0, # 'l' - 13: 2, # 'm' - 4: 0, # 'n' - 15: 1, # 'o' - 26: 0, # 'p' - 7: 0, # 'r' - 8: 0, # 's' - 9: 0, # 't' - 14: 2, # 'u' - 32: 0, # 'v' - 57: 0, # 'w' - 58: 0, # 'x' - 11: 0, # 'y' - 22: 1, # 'z' - 63: 0, # '·' - 54: 0, # 'Ç' - 50: 0, # 'Ö' - 55: 0, # 'Ãœ' - 59: 0, # 'â' - 33: 0, # 'ç' - 61: 0, # 'î' - 34: 0, # 'ö' - 17: 0, # 'ü' - 30: 0, # 'ÄŸ' - 41: 0, # 'Ä°' - 6: 1, # 'ı' - 40: 1, # 'Åž' - 19: 0, # 'ÅŸ' - }, - 33: { # 'ç' - 23: 0, # 'A' - 37: 0, # 'B' - 47: 0, # 'C' - 39: 0, # 'D' - 29: 3, # 'E' - 52: 0, # 'F' - 36: 0, # 'G' - 45: 0, # 'H' - 53: 0, # 'I' - 60: 0, # 'J' - 16: 0, # 'K' - 49: 0, # 'L' - 20: 1, # 'M' - 46: 0, # 'N' - 42: 0, # 'O' - 48: 0, # 'P' - 44: 0, # 'R' - 35: 0, # 'S' - 31: 2, # 'T' - 51: 0, # 'U' - 38: 1, # 'V' - 62: 0, # 'W' - 43: 0, # 'Y' - 56: 0, # 'Z' - 1: 0, # 'a' - 21: 3, # 'b' - 28: 0, # 'c' - 12: 2, # 'd' - 2: 0, # 'e' - 18: 2, # 'f' - 27: 1, # 'g' - 25: 3, # 'h' - 3: 3, # 'i' - 24: 0, # 'j' - 10: 3, # 'k' - 5: 0, # 'l' - 13: 0, # 'm' - 4: 3, # 'n' - 15: 0, # 'o' - 26: 1, # 'p' - 7: 3, # 'r' - 8: 2, # 's' - 9: 3, # 't' - 14: 0, # 'u' - 32: 2, # 'v' - 57: 0, # 'w' - 58: 0, # 'x' - 11: 2, # 'y' - 22: 0, # 'z' - 63: 0, # '·' - 54: 0, # 'Ç' - 50: 0, # 'Ö' - 55: 0, # 'Ãœ' - 59: 0, # 'â' - 33: 0, # 'ç' - 61: 0, # 'î' - 34: 0, # 'ö' - 17: 1, # 'ü' - 30: 0, # 'ÄŸ' - 41: 0, # 'Ä°' - 6: 3, # 'ı' - 40: 0, # 'Åž' - 19: 0, # 'ÅŸ' - }, - 61: { # 'î' - 23: 0, # 'A' - 37: 0, # 'B' - 47: 0, # 'C' - 39: 0, # 'D' - 29: 0, # 'E' - 52: 0, # 'F' - 36: 0, # 'G' - 45: 0, # 'H' - 53: 0, # 'I' - 60: 0, # 'J' - 16: 0, # 'K' - 49: 0, # 'L' - 20: 0, # 'M' - 46: 0, # 'N' - 42: 0, # 'O' - 48: 0, # 'P' - 44: 0, # 'R' - 35: 0, # 'S' - 31: 0, # 'T' - 51: 0, # 'U' - 38: 0, # 'V' - 62: 0, # 'W' - 43: 0, # 'Y' - 56: 1, # 'Z' - 1: 2, # 'a' - 21: 0, # 'b' - 28: 0, # 'c' - 12: 0, # 'd' - 2: 2, # 'e' - 18: 0, # 'f' - 27: 0, # 'g' - 25: 0, # 'h' - 3: 0, # 'i' - 24: 1, # 'j' - 10: 0, # 'k' - 5: 0, # 'l' - 13: 1, # 'm' - 4: 1, # 'n' - 15: 0, # 'o' - 26: 0, # 'p' - 7: 0, # 'r' - 8: 0, # 's' - 9: 0, # 't' - 14: 1, # 'u' - 32: 0, # 'v' - 57: 0, # 'w' - 58: 0, # 'x' - 11: 0, # 'y' - 22: 1, # 'z' - 63: 0, # '·' - 54: 0, # 'Ç' - 50: 0, # 'Ö' - 55: 0, # 'Ãœ' - 59: 0, # 'â' - 33: 0, # 'ç' - 61: 1, # 'î' - 34: 0, # 'ö' - 17: 0, # 'ü' - 30: 0, # 'ÄŸ' - 41: 0, # 'Ä°' - 6: 1, # 'ı' - 40: 0, # 'Åž' - 19: 0, # 'ÅŸ' - }, - 34: { # 'ö' - 23: 0, # 'A' - 37: 1, # 'B' - 47: 1, # 'C' - 39: 0, # 'D' - 29: 0, # 'E' - 52: 2, # 'F' - 36: 1, # 'G' - 45: 1, # 'H' - 53: 0, # 'I' - 60: 0, # 'J' - 16: 3, # 'K' - 49: 1, # 'L' - 20: 0, # 'M' - 46: 1, # 'N' - 42: 1, # 'O' - 48: 2, # 'P' - 44: 1, # 'R' - 35: 1, # 'S' - 31: 1, # 'T' - 51: 1, # 'U' - 38: 1, # 'V' - 62: 0, # 'W' - 43: 0, # 'Y' - 56: 1, # 'Z' - 1: 3, # 'a' - 21: 1, # 'b' - 28: 2, # 'c' - 12: 1, # 'd' - 2: 3, # 'e' - 18: 0, # 'f' - 27: 2, # 'g' - 25: 2, # 'h' - 3: 1, # 'i' - 24: 2, # 'j' - 10: 1, # 'k' - 5: 2, # 'l' - 13: 3, # 'm' - 4: 2, # 'n' - 15: 2, # 'o' - 26: 0, # 'p' - 7: 0, # 'r' - 8: 3, # 's' - 9: 1, # 't' - 14: 3, # 'u' - 32: 0, # 'v' - 57: 0, # 'w' - 58: 0, # 'x' - 11: 1, # 'y' - 22: 2, # 'z' - 63: 0, # '·' - 54: 1, # 'Ç' - 50: 2, # 'Ö' - 55: 0, # 'Ãœ' - 59: 0, # 'â' - 33: 2, # 'ç' - 61: 0, # 'î' - 34: 2, # 'ö' - 17: 0, # 'ü' - 30: 2, # 'ÄŸ' - 41: 1, # 'Ä°' - 6: 1, # 'ı' - 40: 2, # 'Åž' - 19: 1, # 'ÅŸ' - }, - 17: { # 'ü' - 23: 0, # 'A' - 37: 0, # 'B' - 47: 1, # 'C' - 39: 0, # 'D' - 29: 0, # 'E' - 52: 0, # 'F' - 36: 0, # 'G' - 45: 0, # 'H' - 53: 0, # 'I' - 60: 1, # 'J' - 16: 1, # 'K' - 49: 0, # 'L' - 20: 1, # 'M' - 46: 0, # 'N' - 42: 0, # 'O' - 48: 0, # 'P' - 44: 0, # 'R' - 35: 0, # 'S' - 31: 1, # 'T' - 51: 0, # 'U' - 38: 0, # 'V' - 62: 0, # 'W' - 43: 0, # 'Y' - 56: 1, # 'Z' - 1: 3, # 'a' - 21: 0, # 'b' - 28: 0, # 'c' - 12: 1, # 'd' - 2: 3, # 'e' - 18: 1, # 'f' - 27: 2, # 'g' - 25: 0, # 'h' - 3: 1, # 'i' - 24: 1, # 'j' - 10: 2, # 'k' - 5: 3, # 'l' - 13: 2, # 'm' - 4: 3, # 'n' - 15: 0, # 'o' - 26: 2, # 'p' - 7: 2, # 'r' - 8: 3, # 's' - 9: 2, # 't' - 14: 3, # 'u' - 32: 1, # 'v' - 57: 1, # 'w' - 58: 0, # 'x' - 11: 0, # 'y' - 22: 0, # 'z' - 63: 0, # '·' - 54: 0, # 'Ç' - 50: 0, # 'Ö' - 55: 0, # 'Ãœ' - 59: 0, # 'â' - 33: 1, # 'ç' - 61: 0, # 'î' - 34: 0, # 'ö' - 17: 2, # 'ü' - 30: 0, # 'ÄŸ' - 41: 0, # 'Ä°' - 6: 2, # 'ı' - 40: 0, # 'Åž' - 19: 0, # 'ÅŸ' - }, - 30: { # 'ÄŸ' - 23: 0, # 'A' - 37: 2, # 'B' - 47: 1, # 'C' - 39: 0, # 'D' - 29: 0, # 'E' - 52: 2, # 'F' - 36: 1, # 'G' - 45: 0, # 'H' - 53: 1, # 'I' - 60: 0, # 'J' - 16: 3, # 'K' - 49: 0, # 'L' - 20: 1, # 'M' - 46: 2, # 'N' - 42: 2, # 'O' - 48: 1, # 'P' - 44: 1, # 'R' - 35: 0, # 'S' - 31: 1, # 'T' - 51: 0, # 'U' - 38: 2, # 'V' - 62: 0, # 'W' - 43: 2, # 'Y' - 56: 0, # 'Z' - 1: 3, # 'a' - 21: 0, # 'b' - 28: 2, # 'c' - 12: 0, # 'd' - 2: 2, # 'e' - 18: 0, # 'f' - 27: 0, # 'g' - 25: 0, # 'h' - 3: 0, # 'i' - 24: 3, # 'j' - 10: 1, # 'k' - 5: 2, # 'l' - 13: 3, # 'm' - 4: 0, # 'n' - 15: 1, # 'o' - 26: 0, # 'p' - 7: 1, # 'r' - 8: 0, # 's' - 9: 0, # 't' - 14: 3, # 'u' - 32: 0, # 'v' - 57: 0, # 'w' - 58: 0, # 'x' - 11: 0, # 'y' - 22: 2, # 'z' - 63: 0, # '·' - 54: 2, # 'Ç' - 50: 2, # 'Ö' - 55: 0, # 'Ãœ' - 59: 0, # 'â' - 33: 1, # 'ç' - 61: 0, # 'î' - 34: 2, # 'ö' - 17: 0, # 'ü' - 30: 1, # 'ÄŸ' - 41: 2, # 'Ä°' - 6: 2, # 'ı' - 40: 2, # 'Åž' - 19: 1, # 'ÅŸ' - }, - 41: { # 'Ä°' - 23: 0, # 'A' - 37: 0, # 'B' - 47: 1, # 'C' - 39: 1, # 'D' - 29: 1, # 'E' - 52: 0, # 'F' - 36: 2, # 'G' - 45: 2, # 'H' - 53: 0, # 'I' - 60: 0, # 'J' - 16: 0, # 'K' - 49: 0, # 'L' - 20: 2, # 'M' - 46: 1, # 'N' - 42: 1, # 'O' - 48: 2, # 'P' - 44: 0, # 'R' - 35: 1, # 'S' - 31: 1, # 'T' - 51: 1, # 'U' - 38: 1, # 'V' - 62: 0, # 'W' - 43: 2, # 'Y' - 56: 0, # 'Z' - 1: 1, # 'a' - 21: 2, # 'b' - 28: 1, # 'c' - 12: 2, # 'd' - 2: 1, # 'e' - 18: 0, # 'f' - 27: 3, # 'g' - 25: 2, # 'h' - 3: 2, # 'i' - 24: 2, # 'j' - 10: 2, # 'k' - 5: 0, # 'l' - 13: 1, # 'm' - 4: 3, # 'n' - 15: 1, # 'o' - 26: 1, # 'p' - 7: 3, # 'r' - 8: 3, # 's' - 9: 2, # 't' - 14: 0, # 'u' - 32: 0, # 'v' - 57: 1, # 'w' - 58: 0, # 'x' - 11: 2, # 'y' - 22: 0, # 'z' - 63: 0, # '·' - 54: 0, # 'Ç' - 50: 0, # 'Ö' - 55: 1, # 'Ãœ' - 59: 1, # 'â' - 33: 0, # 'ç' - 61: 0, # 'î' - 34: 1, # 'ö' - 17: 1, # 'ü' - 30: 2, # 'ÄŸ' - 41: 0, # 'Ä°' - 6: 3, # 'ı' - 40: 0, # 'Åž' - 19: 1, # 'ÅŸ' - }, - 6: { # 'ı' - 23: 2, # 'A' - 37: 0, # 'B' - 47: 0, # 'C' - 39: 0, # 'D' - 29: 0, # 'E' - 52: 0, # 'F' - 36: 1, # 'G' - 45: 0, # 'H' - 53: 0, # 'I' - 60: 2, # 'J' - 16: 3, # 'K' - 49: 0, # 'L' - 20: 3, # 'M' - 46: 1, # 'N' - 42: 0, # 'O' - 48: 0, # 'P' - 44: 0, # 'R' - 35: 0, # 'S' - 31: 2, # 'T' - 51: 0, # 'U' - 38: 0, # 'V' - 62: 0, # 'W' - 43: 2, # 'Y' - 56: 1, # 'Z' - 1: 3, # 'a' - 21: 2, # 'b' - 28: 1, # 'c' - 12: 3, # 'd' - 2: 3, # 'e' - 18: 3, # 'f' - 27: 3, # 'g' - 25: 2, # 'h' - 3: 3, # 'i' - 24: 3, # 'j' - 10: 3, # 'k' - 5: 3, # 'l' - 13: 3, # 'm' - 4: 3, # 'n' - 15: 0, # 'o' - 26: 3, # 'p' - 7: 3, # 'r' - 8: 3, # 's' - 9: 3, # 't' - 14: 3, # 'u' - 32: 3, # 'v' - 57: 1, # 'w' - 58: 1, # 'x' - 11: 3, # 'y' - 22: 0, # 'z' - 63: 1, # '·' - 54: 0, # 'Ç' - 50: 0, # 'Ö' - 55: 0, # 'Ãœ' - 59: 0, # 'â' - 33: 2, # 'ç' - 61: 0, # 'î' - 34: 0, # 'ö' - 17: 3, # 'ü' - 30: 0, # 'ÄŸ' - 41: 0, # 'Ä°' - 6: 3, # 'ı' - 40: 0, # 'Åž' - 19: 0, # 'ÅŸ' - }, - 40: { # 'Åž' - 23: 0, # 'A' - 37: 0, # 'B' - 47: 1, # 'C' - 39: 1, # 'D' - 29: 1, # 'E' - 52: 0, # 'F' - 36: 1, # 'G' - 45: 2, # 'H' - 53: 1, # 'I' - 60: 0, # 'J' - 16: 0, # 'K' - 49: 0, # 'L' - 20: 2, # 'M' - 46: 1, # 'N' - 42: 1, # 'O' - 48: 2, # 'P' - 44: 2, # 'R' - 35: 1, # 'S' - 31: 1, # 'T' - 51: 0, # 'U' - 38: 1, # 'V' - 62: 0, # 'W' - 43: 2, # 'Y' - 56: 1, # 'Z' - 1: 0, # 'a' - 21: 2, # 'b' - 28: 0, # 'c' - 12: 2, # 'd' - 2: 0, # 'e' - 18: 3, # 'f' - 27: 0, # 'g' - 25: 2, # 'h' - 3: 3, # 'i' - 24: 2, # 'j' - 10: 1, # 'k' - 5: 0, # 'l' - 13: 1, # 'm' - 4: 3, # 'n' - 15: 2, # 'o' - 26: 0, # 'p' - 7: 3, # 'r' - 8: 2, # 's' - 9: 2, # 't' - 14: 1, # 'u' - 32: 3, # 'v' - 57: 0, # 'w' - 58: 0, # 'x' - 11: 2, # 'y' - 22: 0, # 'z' - 63: 0, # '·' - 54: 0, # 'Ç' - 50: 0, # 'Ö' - 55: 1, # 'Ãœ' - 59: 0, # 'â' - 33: 0, # 'ç' - 61: 0, # 'î' - 34: 2, # 'ö' - 17: 1, # 'ü' - 30: 2, # 'ÄŸ' - 41: 0, # 'Ä°' - 6: 2, # 'ı' - 40: 1, # 'Åž' - 19: 2, # 'ÅŸ' - }, - 19: { # 'ÅŸ' - 23: 0, # 'A' - 37: 0, # 'B' - 47: 1, # 'C' - 39: 0, # 'D' - 29: 0, # 'E' - 52: 2, # 'F' - 36: 1, # 'G' - 45: 0, # 'H' - 53: 0, # 'I' - 60: 0, # 'J' - 16: 3, # 'K' - 49: 2, # 'L' - 20: 0, # 'M' - 46: 1, # 'N' - 42: 1, # 'O' - 48: 1, # 'P' - 44: 1, # 'R' - 35: 1, # 'S' - 31: 0, # 'T' - 51: 1, # 'U' - 38: 1, # 'V' - 62: 0, # 'W' - 43: 1, # 'Y' - 56: 0, # 'Z' - 1: 3, # 'a' - 21: 1, # 'b' - 28: 2, # 'c' - 12: 0, # 'd' - 2: 3, # 'e' - 18: 0, # 'f' - 27: 2, # 'g' - 25: 1, # 'h' - 3: 1, # 'i' - 24: 0, # 'j' - 10: 2, # 'k' - 5: 2, # 'l' - 13: 3, # 'm' - 4: 0, # 'n' - 15: 0, # 'o' - 26: 1, # 'p' - 7: 3, # 'r' - 8: 0, # 's' - 9: 0, # 't' - 14: 3, # 'u' - 32: 0, # 'v' - 57: 0, # 'w' - 58: 0, # 'x' - 11: 0, # 'y' - 22: 2, # 'z' - 63: 0, # '·' - 54: 1, # 'Ç' - 50: 2, # 'Ö' - 55: 0, # 'Ãœ' - 59: 0, # 'â' - 33: 1, # 'ç' - 61: 1, # 'î' - 34: 2, # 'ö' - 17: 0, # 'ü' - 30: 1, # 'ÄŸ' - 41: 1, # 'Ä°' - 6: 1, # 'ı' - 40: 1, # 'Åž' - 19: 1, # 'ÅŸ' - }, -} - -# 255: Undefined characters that did not exist in training text -# 254: Carriage/Return -# 253: symbol (punctuation) that does not belong to word -# 252: 0 - 9 -# 251: Control characters - -# Character Mapping Table(s): -ISO_8859_9_TURKISH_CHAR_TO_ORDER = { - 0: 255, # '\x00' - 1: 255, # '\x01' - 2: 255, # '\x02' - 3: 255, # '\x03' - 4: 255, # '\x04' - 5: 255, # '\x05' - 6: 255, # '\x06' - 7: 255, # '\x07' - 8: 255, # '\x08' - 9: 255, # '\t' - 10: 255, # '\n' - 11: 255, # '\x0b' - 12: 255, # '\x0c' - 13: 255, # '\r' - 14: 255, # '\x0e' - 15: 255, # '\x0f' - 16: 255, # '\x10' - 17: 255, # '\x11' - 18: 255, # '\x12' - 19: 255, # '\x13' - 20: 255, # '\x14' - 21: 255, # '\x15' - 22: 255, # '\x16' - 23: 255, # '\x17' - 24: 255, # '\x18' - 25: 255, # '\x19' - 26: 255, # '\x1a' - 27: 255, # '\x1b' - 28: 255, # '\x1c' - 29: 255, # '\x1d' - 30: 255, # '\x1e' - 31: 255, # '\x1f' - 32: 255, # ' ' - 33: 255, # '!' - 34: 255, # '"' - 35: 255, # '#' - 36: 255, # '$' - 37: 255, # '%' - 38: 255, # '&' - 39: 255, # "'" - 40: 255, # '(' - 41: 255, # ')' - 42: 255, # '*' - 43: 255, # '+' - 44: 255, # ',' - 45: 255, # '-' - 46: 255, # '.' - 47: 255, # '/' - 48: 255, # '0' - 49: 255, # '1' - 50: 255, # '2' - 51: 255, # '3' - 52: 255, # '4' - 53: 255, # '5' - 54: 255, # '6' - 55: 255, # '7' - 56: 255, # '8' - 57: 255, # '9' - 58: 255, # ':' - 59: 255, # ';' - 60: 255, # '<' - 61: 255, # '=' - 62: 255, # '>' - 63: 255, # '?' - 64: 255, # '@' - 65: 23, # 'A' - 66: 37, # 'B' - 67: 47, # 'C' - 68: 39, # 'D' - 69: 29, # 'E' - 70: 52, # 'F' - 71: 36, # 'G' - 72: 45, # 'H' - 73: 53, # 'I' - 74: 60, # 'J' - 75: 16, # 'K' - 76: 49, # 'L' - 77: 20, # 'M' - 78: 46, # 'N' - 79: 42, # 'O' - 80: 48, # 'P' - 81: 69, # 'Q' - 82: 44, # 'R' - 83: 35, # 'S' - 84: 31, # 'T' - 85: 51, # 'U' - 86: 38, # 'V' - 87: 62, # 'W' - 88: 65, # 'X' - 89: 43, # 'Y' - 90: 56, # 'Z' - 91: 255, # '[' - 92: 255, # '\\' - 93: 255, # ']' - 94: 255, # '^' - 95: 255, # '_' - 96: 255, # '`' - 97: 1, # 'a' - 98: 21, # 'b' - 99: 28, # 'c' - 100: 12, # 'd' - 101: 2, # 'e' - 102: 18, # 'f' - 103: 27, # 'g' - 104: 25, # 'h' - 105: 3, # 'i' - 106: 24, # 'j' - 107: 10, # 'k' - 108: 5, # 'l' - 109: 13, # 'm' - 110: 4, # 'n' - 111: 15, # 'o' - 112: 26, # 'p' - 113: 64, # 'q' - 114: 7, # 'r' - 115: 8, # 's' - 116: 9, # 't' - 117: 14, # 'u' - 118: 32, # 'v' - 119: 57, # 'w' - 120: 58, # 'x' - 121: 11, # 'y' - 122: 22, # 'z' - 123: 255, # '{' - 124: 255, # '|' - 125: 255, # '}' - 126: 255, # '~' - 127: 255, # '\x7f' - 128: 180, # '\x80' - 129: 179, # '\x81' - 130: 178, # '\x82' - 131: 177, # '\x83' - 132: 176, # '\x84' - 133: 175, # '\x85' - 134: 174, # '\x86' - 135: 173, # '\x87' - 136: 172, # '\x88' - 137: 171, # '\x89' - 138: 170, # '\x8a' - 139: 169, # '\x8b' - 140: 168, # '\x8c' - 141: 167, # '\x8d' - 142: 166, # '\x8e' - 143: 165, # '\x8f' - 144: 164, # '\x90' - 145: 163, # '\x91' - 146: 162, # '\x92' - 147: 161, # '\x93' - 148: 160, # '\x94' - 149: 159, # '\x95' - 150: 101, # '\x96' - 151: 158, # '\x97' - 152: 157, # '\x98' - 153: 156, # '\x99' - 154: 155, # '\x9a' - 155: 154, # '\x9b' - 156: 153, # '\x9c' - 157: 152, # '\x9d' - 158: 151, # '\x9e' - 159: 106, # '\x9f' - 160: 150, # '\xa0' - 161: 149, # '¡' - 162: 148, # '¢' - 163: 147, # '£' - 164: 146, # '¤' - 165: 145, # 'Â¥' - 166: 144, # '¦' - 167: 100, # '§' - 168: 143, # '¨' - 169: 142, # '©' - 170: 141, # 'ª' - 171: 140, # '«' - 172: 139, # '¬' - 173: 138, # '\xad' - 174: 137, # '®' - 175: 136, # '¯' - 176: 94, # '°' - 177: 80, # '±' - 178: 93, # '²' - 179: 135, # '³' - 180: 105, # '´' - 181: 134, # 'µ' - 182: 133, # '¶' - 183: 63, # '·' - 184: 132, # '¸' - 185: 131, # '¹' - 186: 130, # 'º' - 187: 129, # '»' - 188: 128, # '¼' - 189: 127, # '½' - 190: 126, # '¾' - 191: 125, # '¿' - 192: 124, # 'À' - 193: 104, # 'Ã' - 194: 73, # 'Â' - 195: 99, # 'Ã' - 196: 79, # 'Ä' - 197: 85, # 'Ã…' - 198: 123, # 'Æ' - 199: 54, # 'Ç' - 200: 122, # 'È' - 201: 98, # 'É' - 202: 92, # 'Ê' - 203: 121, # 'Ë' - 204: 120, # 'ÃŒ' - 205: 91, # 'Ã' - 206: 103, # 'ÃŽ' - 207: 119, # 'Ã' - 208: 68, # 'Äž' - 209: 118, # 'Ñ' - 210: 117, # 'Ã’' - 211: 97, # 'Ó' - 212: 116, # 'Ô' - 213: 115, # 'Õ' - 214: 50, # 'Ö' - 215: 90, # '×' - 216: 114, # 'Ø' - 217: 113, # 'Ù' - 218: 112, # 'Ú' - 219: 111, # 'Û' - 220: 55, # 'Ãœ' - 221: 41, # 'Ä°' - 222: 40, # 'Åž' - 223: 86, # 'ß' - 224: 89, # 'à' - 225: 70, # 'á' - 226: 59, # 'â' - 227: 78, # 'ã' - 228: 71, # 'ä' - 229: 82, # 'Ã¥' - 230: 88, # 'æ' - 231: 33, # 'ç' - 232: 77, # 'è' - 233: 66, # 'é' - 234: 84, # 'ê' - 235: 83, # 'ë' - 236: 110, # 'ì' - 237: 75, # 'í' - 238: 61, # 'î' - 239: 96, # 'ï' - 240: 30, # 'ÄŸ' - 241: 67, # 'ñ' - 242: 109, # 'ò' - 243: 74, # 'ó' - 244: 87, # 'ô' - 245: 102, # 'õ' - 246: 34, # 'ö' - 247: 95, # '÷' - 248: 81, # 'ø' - 249: 108, # 'ù' - 250: 76, # 'ú' - 251: 72, # 'û' - 252: 17, # 'ü' - 253: 6, # 'ı' - 254: 19, # 'ÅŸ' - 255: 107, # 'ÿ' -} - -ISO_8859_9_TURKISH_MODEL = SingleByteCharSetModel(charset_name='ISO-8859-9', - language='Turkish', - char_to_order_map=ISO_8859_9_TURKISH_CHAR_TO_ORDER, - language_model=TURKISH_LANG_MODEL, - typical_positive_ratio=0.97029, - keep_ascii_letters=True, - alphabet='ABCDEFGHIJKLMNOPRSTUVYZabcdefghijklmnoprstuvyzÂÇÎÖÛÜâçîöûüĞğİıŞş') - diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/latin1prober.py b/venv/Lib/site-packages/pip/_vendor/chardet/latin1prober.py deleted file mode 100644 index 7d1e8c2..0000000 --- a/venv/Lib/site-packages/pip/_vendor/chardet/latin1prober.py +++ /dev/null @@ -1,145 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Universal charset detector code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 2001 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# Shy Shalom - original C code -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .charsetprober import CharSetProber -from .enums import ProbingState - -FREQ_CAT_NUM = 4 - -UDF = 0 # undefined -OTH = 1 # other -ASC = 2 # ascii capital letter -ASS = 3 # ascii small letter -ACV = 4 # accent capital vowel -ACO = 5 # accent capital other -ASV = 6 # accent small vowel -ASO = 7 # accent small other -CLASS_NUM = 8 # total classes - -Latin1_CharToClass = ( - OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 00 - 07 - OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 08 - 0F - OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 10 - 17 - OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 18 - 1F - OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 20 - 27 - OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 28 - 2F - OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 30 - 37 - OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 38 - 3F - OTH, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 40 - 47 - ASC, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 48 - 4F - ASC, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 50 - 57 - ASC, ASC, ASC, OTH, OTH, OTH, OTH, OTH, # 58 - 5F - OTH, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 60 - 67 - ASS, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 68 - 6F - ASS, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 70 - 77 - ASS, ASS, ASS, OTH, OTH, OTH, OTH, OTH, # 78 - 7F - OTH, UDF, OTH, ASO, OTH, OTH, OTH, OTH, # 80 - 87 - OTH, OTH, ACO, OTH, ACO, UDF, ACO, UDF, # 88 - 8F - UDF, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 90 - 97 - OTH, OTH, ASO, OTH, ASO, UDF, ASO, ACO, # 98 - 9F - OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # A0 - A7 - OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # A8 - AF - OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # B0 - B7 - OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # B8 - BF - ACV, ACV, ACV, ACV, ACV, ACV, ACO, ACO, # C0 - C7 - ACV, ACV, ACV, ACV, ACV, ACV, ACV, ACV, # C8 - CF - ACO, ACO, ACV, ACV, ACV, ACV, ACV, OTH, # D0 - D7 - ACV, ACV, ACV, ACV, ACV, ACO, ACO, ACO, # D8 - DF - ASV, ASV, ASV, ASV, ASV, ASV, ASO, ASO, # E0 - E7 - ASV, ASV, ASV, ASV, ASV, ASV, ASV, ASV, # E8 - EF - ASO, ASO, ASV, ASV, ASV, ASV, ASV, OTH, # F0 - F7 - ASV, ASV, ASV, ASV, ASV, ASO, ASO, ASO, # F8 - FF -) - -# 0 : illegal -# 1 : very unlikely -# 2 : normal -# 3 : very likely -Latin1ClassModel = ( -# UDF OTH ASC ASS ACV ACO ASV ASO - 0, 0, 0, 0, 0, 0, 0, 0, # UDF - 0, 3, 3, 3, 3, 3, 3, 3, # OTH - 0, 3, 3, 3, 3, 3, 3, 3, # ASC - 0, 3, 3, 3, 1, 1, 3, 3, # ASS - 0, 3, 3, 3, 1, 2, 1, 2, # ACV - 0, 3, 3, 3, 3, 3, 3, 3, # ACO - 0, 3, 1, 3, 1, 1, 1, 3, # ASV - 0, 3, 1, 3, 1, 1, 3, 3, # ASO -) - - -class Latin1Prober(CharSetProber): - def __init__(self): - super(Latin1Prober, self).__init__() - self._last_char_class = None - self._freq_counter = None - self.reset() - - def reset(self): - self._last_char_class = OTH - self._freq_counter = [0] * FREQ_CAT_NUM - CharSetProber.reset(self) - - @property - def charset_name(self): - return "ISO-8859-1" - - @property - def language(self): - return "" - - def feed(self, byte_str): - byte_str = self.filter_with_english_letters(byte_str) - for c in byte_str: - char_class = Latin1_CharToClass[c] - freq = Latin1ClassModel[(self._last_char_class * CLASS_NUM) - + char_class] - if freq == 0: - self._state = ProbingState.NOT_ME - break - self._freq_counter[freq] += 1 - self._last_char_class = char_class - - return self.state - - def get_confidence(self): - if self.state == ProbingState.NOT_ME: - return 0.01 - - total = sum(self._freq_counter) - if total < 0.01: - confidence = 0.0 - else: - confidence = ((self._freq_counter[3] - self._freq_counter[1] * 20.0) - / total) - if confidence < 0.0: - confidence = 0.0 - # lower the confidence of latin1 so that other more accurate - # detector can take priority. - confidence = confidence * 0.73 - return confidence diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/mbcharsetprober.py b/venv/Lib/site-packages/pip/_vendor/chardet/mbcharsetprober.py deleted file mode 100644 index 6256ecf..0000000 --- a/venv/Lib/site-packages/pip/_vendor/chardet/mbcharsetprober.py +++ /dev/null @@ -1,91 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Universal charset detector code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 2001 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# Shy Shalom - original C code -# Proofpoint, Inc. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .charsetprober import CharSetProber -from .enums import ProbingState, MachineState - - -class MultiByteCharSetProber(CharSetProber): - """ - MultiByteCharSetProber - """ - - def __init__(self, lang_filter=None): - super(MultiByteCharSetProber, self).__init__(lang_filter=lang_filter) - self.distribution_analyzer = None - self.coding_sm = None - self._last_char = [0, 0] - - def reset(self): - super(MultiByteCharSetProber, self).reset() - if self.coding_sm: - self.coding_sm.reset() - if self.distribution_analyzer: - self.distribution_analyzer.reset() - self._last_char = [0, 0] - - @property - def charset_name(self): - raise NotImplementedError - - @property - def language(self): - raise NotImplementedError - - def feed(self, byte_str): - for i in range(len(byte_str)): - coding_state = self.coding_sm.next_state(byte_str[i]) - if coding_state == MachineState.ERROR: - self.logger.debug('%s %s prober hit error at byte %s', - self.charset_name, self.language, i) - self._state = ProbingState.NOT_ME - break - elif coding_state == MachineState.ITS_ME: - self._state = ProbingState.FOUND_IT - break - elif coding_state == MachineState.START: - char_len = self.coding_sm.get_current_charlen() - if i == 0: - self._last_char[1] = byte_str[0] - self.distribution_analyzer.feed(self._last_char, char_len) - else: - self.distribution_analyzer.feed(byte_str[i - 1:i + 1], - char_len) - - self._last_char[0] = byte_str[-1] - - if self.state == ProbingState.DETECTING: - if (self.distribution_analyzer.got_enough_data() and - (self.get_confidence() > self.SHORTCUT_THRESHOLD)): - self._state = ProbingState.FOUND_IT - - return self.state - - def get_confidence(self): - return self.distribution_analyzer.get_confidence() diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/mbcsgroupprober.py b/venv/Lib/site-packages/pip/_vendor/chardet/mbcsgroupprober.py deleted file mode 100644 index 530abe7..0000000 --- a/venv/Lib/site-packages/pip/_vendor/chardet/mbcsgroupprober.py +++ /dev/null @@ -1,54 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Universal charset detector code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 2001 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# Shy Shalom - original C code -# Proofpoint, Inc. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .charsetgroupprober import CharSetGroupProber -from .utf8prober import UTF8Prober -from .sjisprober import SJISProber -from .eucjpprober import EUCJPProber -from .gb2312prober import GB2312Prober -from .euckrprober import EUCKRProber -from .cp949prober import CP949Prober -from .big5prober import Big5Prober -from .euctwprober import EUCTWProber - - -class MBCSGroupProber(CharSetGroupProber): - def __init__(self, lang_filter=None): - super(MBCSGroupProber, self).__init__(lang_filter=lang_filter) - self.probers = [ - UTF8Prober(), - SJISProber(), - EUCJPProber(), - GB2312Prober(), - EUCKRProber(), - CP949Prober(), - Big5Prober(), - EUCTWProber() - ] - self.reset() diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/mbcssm.py b/venv/Lib/site-packages/pip/_vendor/chardet/mbcssm.py deleted file mode 100644 index 8360d0f..0000000 --- a/venv/Lib/site-packages/pip/_vendor/chardet/mbcssm.py +++ /dev/null @@ -1,572 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is mozilla.org code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .enums import MachineState - -# BIG5 - -BIG5_CLS = ( - 1,1,1,1,1,1,1,1, # 00 - 07 #allow 0x00 as legal value - 1,1,1,1,1,1,0,0, # 08 - 0f - 1,1,1,1,1,1,1,1, # 10 - 17 - 1,1,1,0,1,1,1,1, # 18 - 1f - 1,1,1,1,1,1,1,1, # 20 - 27 - 1,1,1,1,1,1,1,1, # 28 - 2f - 1,1,1,1,1,1,1,1, # 30 - 37 - 1,1,1,1,1,1,1,1, # 38 - 3f - 2,2,2,2,2,2,2,2, # 40 - 47 - 2,2,2,2,2,2,2,2, # 48 - 4f - 2,2,2,2,2,2,2,2, # 50 - 57 - 2,2,2,2,2,2,2,2, # 58 - 5f - 2,2,2,2,2,2,2,2, # 60 - 67 - 2,2,2,2,2,2,2,2, # 68 - 6f - 2,2,2,2,2,2,2,2, # 70 - 77 - 2,2,2,2,2,2,2,1, # 78 - 7f - 4,4,4,4,4,4,4,4, # 80 - 87 - 4,4,4,4,4,4,4,4, # 88 - 8f - 4,4,4,4,4,4,4,4, # 90 - 97 - 4,4,4,4,4,4,4,4, # 98 - 9f - 4,3,3,3,3,3,3,3, # a0 - a7 - 3,3,3,3,3,3,3,3, # a8 - af - 3,3,3,3,3,3,3,3, # b0 - b7 - 3,3,3,3,3,3,3,3, # b8 - bf - 3,3,3,3,3,3,3,3, # c0 - c7 - 3,3,3,3,3,3,3,3, # c8 - cf - 3,3,3,3,3,3,3,3, # d0 - d7 - 3,3,3,3,3,3,3,3, # d8 - df - 3,3,3,3,3,3,3,3, # e0 - e7 - 3,3,3,3,3,3,3,3, # e8 - ef - 3,3,3,3,3,3,3,3, # f0 - f7 - 3,3,3,3,3,3,3,0 # f8 - ff -) - -BIG5_ST = ( - MachineState.ERROR,MachineState.START,MachineState.START, 3,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#00-07 - MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,#08-0f - MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START#10-17 -) - -BIG5_CHAR_LEN_TABLE = (0, 1, 1, 2, 0) - -BIG5_SM_MODEL = {'class_table': BIG5_CLS, - 'class_factor': 5, - 'state_table': BIG5_ST, - 'char_len_table': BIG5_CHAR_LEN_TABLE, - 'name': 'Big5'} - -# CP949 - -CP949_CLS = ( - 1,1,1,1,1,1,1,1, 1,1,1,1,1,1,0,0, # 00 - 0f - 1,1,1,1,1,1,1,1, 1,1,1,0,1,1,1,1, # 10 - 1f - 1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1, # 20 - 2f - 1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1, # 30 - 3f - 1,4,4,4,4,4,4,4, 4,4,4,4,4,4,4,4, # 40 - 4f - 4,4,5,5,5,5,5,5, 5,5,5,1,1,1,1,1, # 50 - 5f - 1,5,5,5,5,5,5,5, 5,5,5,5,5,5,5,5, # 60 - 6f - 5,5,5,5,5,5,5,5, 5,5,5,1,1,1,1,1, # 70 - 7f - 0,6,6,6,6,6,6,6, 6,6,6,6,6,6,6,6, # 80 - 8f - 6,6,6,6,6,6,6,6, 6,6,6,6,6,6,6,6, # 90 - 9f - 6,7,7,7,7,7,7,7, 7,7,7,7,7,8,8,8, # a0 - af - 7,7,7,7,7,7,7,7, 7,7,7,7,7,7,7,7, # b0 - bf - 7,7,7,7,7,7,9,2, 2,3,2,2,2,2,2,2, # c0 - cf - 2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,2, # d0 - df - 2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,2, # e0 - ef - 2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,0, # f0 - ff -) - -CP949_ST = ( -#cls= 0 1 2 3 4 5 6 7 8 9 # previous state = - MachineState.ERROR,MachineState.START, 3,MachineState.ERROR,MachineState.START,MachineState.START, 4, 5,MachineState.ERROR, 6, # MachineState.START - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, # MachineState.ERROR - MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME, # MachineState.ITS_ME - MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START, # 3 - MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START, # 4 - MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START, # 5 - MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START, # 6 -) - -CP949_CHAR_LEN_TABLE = (0, 1, 2, 0, 1, 1, 2, 2, 0, 2) - -CP949_SM_MODEL = {'class_table': CP949_CLS, - 'class_factor': 10, - 'state_table': CP949_ST, - 'char_len_table': CP949_CHAR_LEN_TABLE, - 'name': 'CP949'} - -# EUC-JP - -EUCJP_CLS = ( - 4,4,4,4,4,4,4,4, # 00 - 07 - 4,4,4,4,4,4,5,5, # 08 - 0f - 4,4,4,4,4,4,4,4, # 10 - 17 - 4,4,4,5,4,4,4,4, # 18 - 1f - 4,4,4,4,4,4,4,4, # 20 - 27 - 4,4,4,4,4,4,4,4, # 28 - 2f - 4,4,4,4,4,4,4,4, # 30 - 37 - 4,4,4,4,4,4,4,4, # 38 - 3f - 4,4,4,4,4,4,4,4, # 40 - 47 - 4,4,4,4,4,4,4,4, # 48 - 4f - 4,4,4,4,4,4,4,4, # 50 - 57 - 4,4,4,4,4,4,4,4, # 58 - 5f - 4,4,4,4,4,4,4,4, # 60 - 67 - 4,4,4,4,4,4,4,4, # 68 - 6f - 4,4,4,4,4,4,4,4, # 70 - 77 - 4,4,4,4,4,4,4,4, # 78 - 7f - 5,5,5,5,5,5,5,5, # 80 - 87 - 5,5,5,5,5,5,1,3, # 88 - 8f - 5,5,5,5,5,5,5,5, # 90 - 97 - 5,5,5,5,5,5,5,5, # 98 - 9f - 5,2,2,2,2,2,2,2, # a0 - a7 - 2,2,2,2,2,2,2,2, # a8 - af - 2,2,2,2,2,2,2,2, # b0 - b7 - 2,2,2,2,2,2,2,2, # b8 - bf - 2,2,2,2,2,2,2,2, # c0 - c7 - 2,2,2,2,2,2,2,2, # c8 - cf - 2,2,2,2,2,2,2,2, # d0 - d7 - 2,2,2,2,2,2,2,2, # d8 - df - 0,0,0,0,0,0,0,0, # e0 - e7 - 0,0,0,0,0,0,0,0, # e8 - ef - 0,0,0,0,0,0,0,0, # f0 - f7 - 0,0,0,0,0,0,0,5 # f8 - ff -) - -EUCJP_ST = ( - 3, 4, 3, 5,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#00-07 - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f - MachineState.ITS_ME,MachineState.ITS_ME,MachineState.START,MachineState.ERROR,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#10-17 - MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 3,MachineState.ERROR,#18-1f - 3,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START#20-27 -) - -EUCJP_CHAR_LEN_TABLE = (2, 2, 2, 3, 1, 0) - -EUCJP_SM_MODEL = {'class_table': EUCJP_CLS, - 'class_factor': 6, - 'state_table': EUCJP_ST, - 'char_len_table': EUCJP_CHAR_LEN_TABLE, - 'name': 'EUC-JP'} - -# EUC-KR - -EUCKR_CLS = ( - 1,1,1,1,1,1,1,1, # 00 - 07 - 1,1,1,1,1,1,0,0, # 08 - 0f - 1,1,1,1,1,1,1,1, # 10 - 17 - 1,1,1,0,1,1,1,1, # 18 - 1f - 1,1,1,1,1,1,1,1, # 20 - 27 - 1,1,1,1,1,1,1,1, # 28 - 2f - 1,1,1,1,1,1,1,1, # 30 - 37 - 1,1,1,1,1,1,1,1, # 38 - 3f - 1,1,1,1,1,1,1,1, # 40 - 47 - 1,1,1,1,1,1,1,1, # 48 - 4f - 1,1,1,1,1,1,1,1, # 50 - 57 - 1,1,1,1,1,1,1,1, # 58 - 5f - 1,1,1,1,1,1,1,1, # 60 - 67 - 1,1,1,1,1,1,1,1, # 68 - 6f - 1,1,1,1,1,1,1,1, # 70 - 77 - 1,1,1,1,1,1,1,1, # 78 - 7f - 0,0,0,0,0,0,0,0, # 80 - 87 - 0,0,0,0,0,0,0,0, # 88 - 8f - 0,0,0,0,0,0,0,0, # 90 - 97 - 0,0,0,0,0,0,0,0, # 98 - 9f - 0,2,2,2,2,2,2,2, # a0 - a7 - 2,2,2,2,2,3,3,3, # a8 - af - 2,2,2,2,2,2,2,2, # b0 - b7 - 2,2,2,2,2,2,2,2, # b8 - bf - 2,2,2,2,2,2,2,2, # c0 - c7 - 2,3,2,2,2,2,2,2, # c8 - cf - 2,2,2,2,2,2,2,2, # d0 - d7 - 2,2,2,2,2,2,2,2, # d8 - df - 2,2,2,2,2,2,2,2, # e0 - e7 - 2,2,2,2,2,2,2,2, # e8 - ef - 2,2,2,2,2,2,2,2, # f0 - f7 - 2,2,2,2,2,2,2,0 # f8 - ff -) - -EUCKR_ST = ( - MachineState.ERROR,MachineState.START, 3,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#00-07 - MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START #08-0f -) - -EUCKR_CHAR_LEN_TABLE = (0, 1, 2, 0) - -EUCKR_SM_MODEL = {'class_table': EUCKR_CLS, - 'class_factor': 4, - 'state_table': EUCKR_ST, - 'char_len_table': EUCKR_CHAR_LEN_TABLE, - 'name': 'EUC-KR'} - -# EUC-TW - -EUCTW_CLS = ( - 2,2,2,2,2,2,2,2, # 00 - 07 - 2,2,2,2,2,2,0,0, # 08 - 0f - 2,2,2,2,2,2,2,2, # 10 - 17 - 2,2,2,0,2,2,2,2, # 18 - 1f - 2,2,2,2,2,2,2,2, # 20 - 27 - 2,2,2,2,2,2,2,2, # 28 - 2f - 2,2,2,2,2,2,2,2, # 30 - 37 - 2,2,2,2,2,2,2,2, # 38 - 3f - 2,2,2,2,2,2,2,2, # 40 - 47 - 2,2,2,2,2,2,2,2, # 48 - 4f - 2,2,2,2,2,2,2,2, # 50 - 57 - 2,2,2,2,2,2,2,2, # 58 - 5f - 2,2,2,2,2,2,2,2, # 60 - 67 - 2,2,2,2,2,2,2,2, # 68 - 6f - 2,2,2,2,2,2,2,2, # 70 - 77 - 2,2,2,2,2,2,2,2, # 78 - 7f - 0,0,0,0,0,0,0,0, # 80 - 87 - 0,0,0,0,0,0,6,0, # 88 - 8f - 0,0,0,0,0,0,0,0, # 90 - 97 - 0,0,0,0,0,0,0,0, # 98 - 9f - 0,3,4,4,4,4,4,4, # a0 - a7 - 5,5,1,1,1,1,1,1, # a8 - af - 1,1,1,1,1,1,1,1, # b0 - b7 - 1,1,1,1,1,1,1,1, # b8 - bf - 1,1,3,1,3,3,3,3, # c0 - c7 - 3,3,3,3,3,3,3,3, # c8 - cf - 3,3,3,3,3,3,3,3, # d0 - d7 - 3,3,3,3,3,3,3,3, # d8 - df - 3,3,3,3,3,3,3,3, # e0 - e7 - 3,3,3,3,3,3,3,3, # e8 - ef - 3,3,3,3,3,3,3,3, # f0 - f7 - 3,3,3,3,3,3,3,0 # f8 - ff -) - -EUCTW_ST = ( - MachineState.ERROR,MachineState.ERROR,MachineState.START, 3, 3, 3, 4,MachineState.ERROR,#00-07 - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f - MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.START,MachineState.ERROR,#10-17 - MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#18-1f - 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.ERROR,MachineState.START,MachineState.START,#20-27 - MachineState.START,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START #28-2f -) - -EUCTW_CHAR_LEN_TABLE = (0, 0, 1, 2, 2, 2, 3) - -EUCTW_SM_MODEL = {'class_table': EUCTW_CLS, - 'class_factor': 7, - 'state_table': EUCTW_ST, - 'char_len_table': EUCTW_CHAR_LEN_TABLE, - 'name': 'x-euc-tw'} - -# GB2312 - -GB2312_CLS = ( - 1,1,1,1,1,1,1,1, # 00 - 07 - 1,1,1,1,1,1,0,0, # 08 - 0f - 1,1,1,1,1,1,1,1, # 10 - 17 - 1,1,1,0,1,1,1,1, # 18 - 1f - 1,1,1,1,1,1,1,1, # 20 - 27 - 1,1,1,1,1,1,1,1, # 28 - 2f - 3,3,3,3,3,3,3,3, # 30 - 37 - 3,3,1,1,1,1,1,1, # 38 - 3f - 2,2,2,2,2,2,2,2, # 40 - 47 - 2,2,2,2,2,2,2,2, # 48 - 4f - 2,2,2,2,2,2,2,2, # 50 - 57 - 2,2,2,2,2,2,2,2, # 58 - 5f - 2,2,2,2,2,2,2,2, # 60 - 67 - 2,2,2,2,2,2,2,2, # 68 - 6f - 2,2,2,2,2,2,2,2, # 70 - 77 - 2,2,2,2,2,2,2,4, # 78 - 7f - 5,6,6,6,6,6,6,6, # 80 - 87 - 6,6,6,6,6,6,6,6, # 88 - 8f - 6,6,6,6,6,6,6,6, # 90 - 97 - 6,6,6,6,6,6,6,6, # 98 - 9f - 6,6,6,6,6,6,6,6, # a0 - a7 - 6,6,6,6,6,6,6,6, # a8 - af - 6,6,6,6,6,6,6,6, # b0 - b7 - 6,6,6,6,6,6,6,6, # b8 - bf - 6,6,6,6,6,6,6,6, # c0 - c7 - 6,6,6,6,6,6,6,6, # c8 - cf - 6,6,6,6,6,6,6,6, # d0 - d7 - 6,6,6,6,6,6,6,6, # d8 - df - 6,6,6,6,6,6,6,6, # e0 - e7 - 6,6,6,6,6,6,6,6, # e8 - ef - 6,6,6,6,6,6,6,6, # f0 - f7 - 6,6,6,6,6,6,6,0 # f8 - ff -) - -GB2312_ST = ( - MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START, 3,MachineState.ERROR,#00-07 - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f - MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.START,#10-17 - 4,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#18-1f - MachineState.ERROR,MachineState.ERROR, 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,#20-27 - MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START #28-2f -) - -# To be accurate, the length of class 6 can be either 2 or 4. -# But it is not necessary to discriminate between the two since -# it is used for frequency analysis only, and we are validating -# each code range there as well. So it is safe to set it to be -# 2 here. -GB2312_CHAR_LEN_TABLE = (0, 1, 1, 1, 1, 1, 2) - -GB2312_SM_MODEL = {'class_table': GB2312_CLS, - 'class_factor': 7, - 'state_table': GB2312_ST, - 'char_len_table': GB2312_CHAR_LEN_TABLE, - 'name': 'GB2312'} - -# Shift_JIS - -SJIS_CLS = ( - 1,1,1,1,1,1,1,1, # 00 - 07 - 1,1,1,1,1,1,0,0, # 08 - 0f - 1,1,1,1,1,1,1,1, # 10 - 17 - 1,1,1,0,1,1,1,1, # 18 - 1f - 1,1,1,1,1,1,1,1, # 20 - 27 - 1,1,1,1,1,1,1,1, # 28 - 2f - 1,1,1,1,1,1,1,1, # 30 - 37 - 1,1,1,1,1,1,1,1, # 38 - 3f - 2,2,2,2,2,2,2,2, # 40 - 47 - 2,2,2,2,2,2,2,2, # 48 - 4f - 2,2,2,2,2,2,2,2, # 50 - 57 - 2,2,2,2,2,2,2,2, # 58 - 5f - 2,2,2,2,2,2,2,2, # 60 - 67 - 2,2,2,2,2,2,2,2, # 68 - 6f - 2,2,2,2,2,2,2,2, # 70 - 77 - 2,2,2,2,2,2,2,1, # 78 - 7f - 3,3,3,3,3,2,2,3, # 80 - 87 - 3,3,3,3,3,3,3,3, # 88 - 8f - 3,3,3,3,3,3,3,3, # 90 - 97 - 3,3,3,3,3,3,3,3, # 98 - 9f - #0xa0 is illegal in sjis encoding, but some pages does - #contain such byte. We need to be more error forgiven. - 2,2,2,2,2,2,2,2, # a0 - a7 - 2,2,2,2,2,2,2,2, # a8 - af - 2,2,2,2,2,2,2,2, # b0 - b7 - 2,2,2,2,2,2,2,2, # b8 - bf - 2,2,2,2,2,2,2,2, # c0 - c7 - 2,2,2,2,2,2,2,2, # c8 - cf - 2,2,2,2,2,2,2,2, # d0 - d7 - 2,2,2,2,2,2,2,2, # d8 - df - 3,3,3,3,3,3,3,3, # e0 - e7 - 3,3,3,3,3,4,4,4, # e8 - ef - 3,3,3,3,3,3,3,3, # f0 - f7 - 3,3,3,3,3,0,0,0) # f8 - ff - - -SJIS_ST = ( - MachineState.ERROR,MachineState.START,MachineState.START, 3,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#00-07 - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f - MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START #10-17 -) - -SJIS_CHAR_LEN_TABLE = (0, 1, 1, 2, 0, 0) - -SJIS_SM_MODEL = {'class_table': SJIS_CLS, - 'class_factor': 6, - 'state_table': SJIS_ST, - 'char_len_table': SJIS_CHAR_LEN_TABLE, - 'name': 'Shift_JIS'} - -# UCS2-BE - -UCS2BE_CLS = ( - 0,0,0,0,0,0,0,0, # 00 - 07 - 0,0,1,0,0,2,0,0, # 08 - 0f - 0,0,0,0,0,0,0,0, # 10 - 17 - 0,0,0,3,0,0,0,0, # 18 - 1f - 0,0,0,0,0,0,0,0, # 20 - 27 - 0,3,3,3,3,3,0,0, # 28 - 2f - 0,0,0,0,0,0,0,0, # 30 - 37 - 0,0,0,0,0,0,0,0, # 38 - 3f - 0,0,0,0,0,0,0,0, # 40 - 47 - 0,0,0,0,0,0,0,0, # 48 - 4f - 0,0,0,0,0,0,0,0, # 50 - 57 - 0,0,0,0,0,0,0,0, # 58 - 5f - 0,0,0,0,0,0,0,0, # 60 - 67 - 0,0,0,0,0,0,0,0, # 68 - 6f - 0,0,0,0,0,0,0,0, # 70 - 77 - 0,0,0,0,0,0,0,0, # 78 - 7f - 0,0,0,0,0,0,0,0, # 80 - 87 - 0,0,0,0,0,0,0,0, # 88 - 8f - 0,0,0,0,0,0,0,0, # 90 - 97 - 0,0,0,0,0,0,0,0, # 98 - 9f - 0,0,0,0,0,0,0,0, # a0 - a7 - 0,0,0,0,0,0,0,0, # a8 - af - 0,0,0,0,0,0,0,0, # b0 - b7 - 0,0,0,0,0,0,0,0, # b8 - bf - 0,0,0,0,0,0,0,0, # c0 - c7 - 0,0,0,0,0,0,0,0, # c8 - cf - 0,0,0,0,0,0,0,0, # d0 - d7 - 0,0,0,0,0,0,0,0, # d8 - df - 0,0,0,0,0,0,0,0, # e0 - e7 - 0,0,0,0,0,0,0,0, # e8 - ef - 0,0,0,0,0,0,0,0, # f0 - f7 - 0,0,0,0,0,0,4,5 # f8 - ff -) - -UCS2BE_ST = ( - 5, 7, 7,MachineState.ERROR, 4, 3,MachineState.ERROR,MachineState.ERROR,#00-07 - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f - MachineState.ITS_ME,MachineState.ITS_ME, 6, 6, 6, 6,MachineState.ERROR,MachineState.ERROR,#10-17 - 6, 6, 6, 6, 6,MachineState.ITS_ME, 6, 6,#18-1f - 6, 6, 6, 6, 5, 7, 7,MachineState.ERROR,#20-27 - 5, 8, 6, 6,MachineState.ERROR, 6, 6, 6,#28-2f - 6, 6, 6, 6,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START #30-37 -) - -UCS2BE_CHAR_LEN_TABLE = (2, 2, 2, 0, 2, 2) - -UCS2BE_SM_MODEL = {'class_table': UCS2BE_CLS, - 'class_factor': 6, - 'state_table': UCS2BE_ST, - 'char_len_table': UCS2BE_CHAR_LEN_TABLE, - 'name': 'UTF-16BE'} - -# UCS2-LE - -UCS2LE_CLS = ( - 0,0,0,0,0,0,0,0, # 00 - 07 - 0,0,1,0,0,2,0,0, # 08 - 0f - 0,0,0,0,0,0,0,0, # 10 - 17 - 0,0,0,3,0,0,0,0, # 18 - 1f - 0,0,0,0,0,0,0,0, # 20 - 27 - 0,3,3,3,3,3,0,0, # 28 - 2f - 0,0,0,0,0,0,0,0, # 30 - 37 - 0,0,0,0,0,0,0,0, # 38 - 3f - 0,0,0,0,0,0,0,0, # 40 - 47 - 0,0,0,0,0,0,0,0, # 48 - 4f - 0,0,0,0,0,0,0,0, # 50 - 57 - 0,0,0,0,0,0,0,0, # 58 - 5f - 0,0,0,0,0,0,0,0, # 60 - 67 - 0,0,0,0,0,0,0,0, # 68 - 6f - 0,0,0,0,0,0,0,0, # 70 - 77 - 0,0,0,0,0,0,0,0, # 78 - 7f - 0,0,0,0,0,0,0,0, # 80 - 87 - 0,0,0,0,0,0,0,0, # 88 - 8f - 0,0,0,0,0,0,0,0, # 90 - 97 - 0,0,0,0,0,0,0,0, # 98 - 9f - 0,0,0,0,0,0,0,0, # a0 - a7 - 0,0,0,0,0,0,0,0, # a8 - af - 0,0,0,0,0,0,0,0, # b0 - b7 - 0,0,0,0,0,0,0,0, # b8 - bf - 0,0,0,0,0,0,0,0, # c0 - c7 - 0,0,0,0,0,0,0,0, # c8 - cf - 0,0,0,0,0,0,0,0, # d0 - d7 - 0,0,0,0,0,0,0,0, # d8 - df - 0,0,0,0,0,0,0,0, # e0 - e7 - 0,0,0,0,0,0,0,0, # e8 - ef - 0,0,0,0,0,0,0,0, # f0 - f7 - 0,0,0,0,0,0,4,5 # f8 - ff -) - -UCS2LE_ST = ( - 6, 6, 7, 6, 4, 3,MachineState.ERROR,MachineState.ERROR,#00-07 - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f - MachineState.ITS_ME,MachineState.ITS_ME, 5, 5, 5,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,#10-17 - 5, 5, 5,MachineState.ERROR, 5,MachineState.ERROR, 6, 6,#18-1f - 7, 6, 8, 8, 5, 5, 5,MachineState.ERROR,#20-27 - 5, 5, 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 5, 5,#28-2f - 5, 5, 5,MachineState.ERROR, 5,MachineState.ERROR,MachineState.START,MachineState.START #30-37 -) - -UCS2LE_CHAR_LEN_TABLE = (2, 2, 2, 2, 2, 2) - -UCS2LE_SM_MODEL = {'class_table': UCS2LE_CLS, - 'class_factor': 6, - 'state_table': UCS2LE_ST, - 'char_len_table': UCS2LE_CHAR_LEN_TABLE, - 'name': 'UTF-16LE'} - -# UTF-8 - -UTF8_CLS = ( - 1,1,1,1,1,1,1,1, # 00 - 07 #allow 0x00 as a legal value - 1,1,1,1,1,1,0,0, # 08 - 0f - 1,1,1,1,1,1,1,1, # 10 - 17 - 1,1,1,0,1,1,1,1, # 18 - 1f - 1,1,1,1,1,1,1,1, # 20 - 27 - 1,1,1,1,1,1,1,1, # 28 - 2f - 1,1,1,1,1,1,1,1, # 30 - 37 - 1,1,1,1,1,1,1,1, # 38 - 3f - 1,1,1,1,1,1,1,1, # 40 - 47 - 1,1,1,1,1,1,1,1, # 48 - 4f - 1,1,1,1,1,1,1,1, # 50 - 57 - 1,1,1,1,1,1,1,1, # 58 - 5f - 1,1,1,1,1,1,1,1, # 60 - 67 - 1,1,1,1,1,1,1,1, # 68 - 6f - 1,1,1,1,1,1,1,1, # 70 - 77 - 1,1,1,1,1,1,1,1, # 78 - 7f - 2,2,2,2,3,3,3,3, # 80 - 87 - 4,4,4,4,4,4,4,4, # 88 - 8f - 4,4,4,4,4,4,4,4, # 90 - 97 - 4,4,4,4,4,4,4,4, # 98 - 9f - 5,5,5,5,5,5,5,5, # a0 - a7 - 5,5,5,5,5,5,5,5, # a8 - af - 5,5,5,5,5,5,5,5, # b0 - b7 - 5,5,5,5,5,5,5,5, # b8 - bf - 0,0,6,6,6,6,6,6, # c0 - c7 - 6,6,6,6,6,6,6,6, # c8 - cf - 6,6,6,6,6,6,6,6, # d0 - d7 - 6,6,6,6,6,6,6,6, # d8 - df - 7,8,8,8,8,8,8,8, # e0 - e7 - 8,8,8,8,8,9,8,8, # e8 - ef - 10,11,11,11,11,11,11,11, # f0 - f7 - 12,13,13,13,14,15,0,0 # f8 - ff -) - -UTF8_ST = ( - MachineState.ERROR,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 12, 10,#00-07 - 9, 11, 8, 7, 6, 5, 4, 3,#08-0f - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#10-17 - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#18-1f - MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#20-27 - MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#28-2f - MachineState.ERROR,MachineState.ERROR, 5, 5, 5, 5,MachineState.ERROR,MachineState.ERROR,#30-37 - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#38-3f - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 5, 5, 5,MachineState.ERROR,MachineState.ERROR,#40-47 - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#48-4f - MachineState.ERROR,MachineState.ERROR, 7, 7, 7, 7,MachineState.ERROR,MachineState.ERROR,#50-57 - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#58-5f - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 7, 7,MachineState.ERROR,MachineState.ERROR,#60-67 - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#68-6f - MachineState.ERROR,MachineState.ERROR, 9, 9, 9, 9,MachineState.ERROR,MachineState.ERROR,#70-77 - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#78-7f - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 9,MachineState.ERROR,MachineState.ERROR,#80-87 - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#88-8f - MachineState.ERROR,MachineState.ERROR, 12, 12, 12, 12,MachineState.ERROR,MachineState.ERROR,#90-97 - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#98-9f - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 12,MachineState.ERROR,MachineState.ERROR,#a0-a7 - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#a8-af - MachineState.ERROR,MachineState.ERROR, 12, 12, 12,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#b0-b7 - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#b8-bf - MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,#c0-c7 - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR #c8-cf -) - -UTF8_CHAR_LEN_TABLE = (0, 1, 0, 0, 0, 0, 2, 3, 3, 3, 4, 4, 5, 5, 6, 6) - -UTF8_SM_MODEL = {'class_table': UTF8_CLS, - 'class_factor': 16, - 'state_table': UTF8_ST, - 'char_len_table': UTF8_CHAR_LEN_TABLE, - 'name': 'UTF-8'} diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/metadata/__init__.py b/venv/Lib/site-packages/pip/_vendor/chardet/metadata/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/metadata/__pycache__/__init__.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/chardet/metadata/__pycache__/__init__.cpython-39.pyc deleted file mode 100644 index 08ca211..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/chardet/metadata/__pycache__/__init__.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/metadata/__pycache__/languages.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/chardet/metadata/__pycache__/languages.cpython-39.pyc deleted file mode 100644 index 223768e..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/chardet/metadata/__pycache__/languages.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/metadata/languages.py b/venv/Lib/site-packages/pip/_vendor/chardet/metadata/languages.py deleted file mode 100644 index 3237d5a..0000000 --- a/venv/Lib/site-packages/pip/_vendor/chardet/metadata/languages.py +++ /dev/null @@ -1,310 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -""" -Metadata about languages used by our model training code for our -SingleByteCharSetProbers. Could be used for other things in the future. - -This code is based on the language metadata from the uchardet project. -""" -from __future__ import absolute_import, print_function - -from string import ascii_letters - - -# TODO: Add Ukranian (KOI8-U) - -class Language(object): - """Metadata about a language useful for training models - - :ivar name: The human name for the language, in English. - :type name: str - :ivar iso_code: 2-letter ISO 639-1 if possible, 3-letter ISO code otherwise, - or use another catalog as a last resort. - :type iso_code: str - :ivar use_ascii: Whether or not ASCII letters should be included in trained - models. - :type use_ascii: bool - :ivar charsets: The charsets we want to support and create data for. - :type charsets: list of str - :ivar alphabet: The characters in the language's alphabet. If `use_ascii` is - `True`, you only need to add those not in the ASCII set. - :type alphabet: str - :ivar wiki_start_pages: The Wikipedia pages to start from if we're crawling - Wikipedia for training data. - :type wiki_start_pages: list of str - """ - def __init__(self, name=None, iso_code=None, use_ascii=True, charsets=None, - alphabet=None, wiki_start_pages=None): - super(Language, self).__init__() - self.name = name - self.iso_code = iso_code - self.use_ascii = use_ascii - self.charsets = charsets - if self.use_ascii: - if alphabet: - alphabet += ascii_letters - else: - alphabet = ascii_letters - elif not alphabet: - raise ValueError('Must supply alphabet if use_ascii is False') - self.alphabet = ''.join(sorted(set(alphabet))) if alphabet else None - self.wiki_start_pages = wiki_start_pages - - def __repr__(self): - return '{}({})'.format(self.__class__.__name__, - ', '.join('{}={!r}'.format(k, v) - for k, v in self.__dict__.items() - if not k.startswith('_'))) - - -LANGUAGES = {'Arabic': Language(name='Arabic', - iso_code='ar', - use_ascii=False, - # We only support encodings that use isolated - # forms, because the current recommendation is - # that the rendering system handles presentation - # forms. This means we purposefully skip IBM864. - charsets=['ISO-8859-6', 'WINDOWS-1256', - 'CP720', 'CP864'], - alphabet=u'ءآأؤإئابةتثجحخدذرزسشصضطظعغػؼؽؾؿـÙقكلمنهوىيًٌÙÙŽÙÙÙ‘', - wiki_start_pages=[u'الصÙحة_الرئيسية']), - 'Belarusian': Language(name='Belarusian', - iso_code='be', - use_ascii=False, - charsets=['ISO-8859-5', 'WINDOWS-1251', - 'IBM866', 'MacCyrillic'], - alphabet=(u'ÐБВГДЕÐЖЗІЙКЛМÐОПРСТУЎФХЦЧШЫЬЭЮЯ' - u'абвгдеёжзійклмнопрÑтуўфхцчшыьÑÑŽÑʼ'), - wiki_start_pages=[u'ГалоўнаÑ_Ñтаронка']), - 'Bulgarian': Language(name='Bulgarian', - iso_code='bg', - use_ascii=False, - charsets=['ISO-8859-5', 'WINDOWS-1251', - 'IBM855'], - alphabet=(u'ÐБВГДЕЖЗИЙКЛМÐОПРСТУФХЦЧШЩЪЬЮЯ' - u'абвгдежзийклмнопрÑтуфхцчшщъьюÑ'), - wiki_start_pages=[u'Ðачална_Ñтраница']), - 'Czech': Language(name='Czech', - iso_code='cz', - use_ascii=True, - charsets=['ISO-8859-2', 'WINDOWS-1250'], - alphabet=u'áÄÄéěíňóřšťúůýžÃČĎÉĚÃŇÓŘŠŤÚŮÃŽ', - wiki_start_pages=[u'Hlavní_strana']), - 'Danish': Language(name='Danish', - iso_code='da', - use_ascii=True, - charsets=['ISO-8859-1', 'ISO-8859-15', - 'WINDOWS-1252'], - alphabet=u'æøåÆØÅ', - wiki_start_pages=[u'Forside']), - 'German': Language(name='German', - iso_code='de', - use_ascii=True, - charsets=['ISO-8859-1', 'WINDOWS-1252'], - alphabet=u'äöüßÄÖÜ', - wiki_start_pages=[u'Wikipedia:Hauptseite']), - 'Greek': Language(name='Greek', - iso_code='el', - use_ascii=False, - charsets=['ISO-8859-7', 'WINDOWS-1253'], - alphabet=(u'αβγδεζηθικλμνξοπÏσςτυφχψωάέήίόÏÏŽ' - u'ΑΒΓΔΕΖΗΘΙΚΛΜÎΞΟΠΡΣΣΤΥΦΧΨΩΆΈΉΊΌΎÎ'), - wiki_start_pages=[u'ΠÏλη:ΚÏÏια']), - 'English': Language(name='English', - iso_code='en', - use_ascii=True, - charsets=['ISO-8859-1', 'WINDOWS-1252'], - wiki_start_pages=[u'Main_Page']), - 'Esperanto': Language(name='Esperanto', - iso_code='eo', - # Q, W, X, and Y not used at all - use_ascii=False, - charsets=['ISO-8859-3'], - alphabet=(u'abcĉdefgÄhÄ¥ijĵklmnoprsÅtuÅ­vz' - u'ABCĈDEFGÄœHĤIJÄ´KLMNOPRSÅœTUŬVZ'), - wiki_start_pages=[u'Vikipedio:ĈefpaÄo']), - 'Spanish': Language(name='Spanish', - iso_code='es', - use_ascii=True, - charsets=['ISO-8859-1', 'ISO-8859-15', - 'WINDOWS-1252'], - alphabet=u'ñáéíóúüÑÃÉÃÓÚÜ', - wiki_start_pages=[u'Wikipedia:Portada']), - 'Estonian': Language(name='Estonian', - iso_code='et', - use_ascii=False, - charsets=['ISO-8859-4', 'ISO-8859-13', - 'WINDOWS-1257'], - # C, F, Å , Q, W, X, Y, Z, Ž are only for - # loanwords - alphabet=(u'ABDEGHIJKLMNOPRSTUVÕÄÖÜ' - u'abdeghijklmnoprstuvõäöü'), - wiki_start_pages=[u'Esileht']), - 'Finnish': Language(name='Finnish', - iso_code='fi', - use_ascii=True, - charsets=['ISO-8859-1', 'ISO-8859-15', - 'WINDOWS-1252'], - alphabet=u'ÅÄÖŠŽåäöšž', - wiki_start_pages=[u'Wikipedia:Etusivu']), - 'French': Language(name='French', - iso_code='fr', - use_ascii=True, - charsets=['ISO-8859-1', 'ISO-8859-15', - 'WINDOWS-1252'], - alphabet=u'œàâçèéîïùûêŒÀÂÇÈÉÎÃÙÛÊ', - wiki_start_pages=[u'Wikipédia:Accueil_principal', - u'BÅ“uf (animal)']), - 'Hebrew': Language(name='Hebrew', - iso_code='he', - use_ascii=False, - charsets=['ISO-8859-8', 'WINDOWS-1255'], - alphabet=u'×בגדהוזחטיךכל×מןנסעףפץצקרשתװױײ', - wiki_start_pages=[u'עמוד_ר×שי']), - 'Croatian': Language(name='Croatian', - iso_code='hr', - # Q, W, X, Y are only used for foreign words. - use_ascii=False, - charsets=['ISO-8859-2', 'WINDOWS-1250'], - alphabet=(u'abcÄćdÄ‘efghijklmnoprsÅ¡tuvzž' - u'ABCČĆDÄEFGHIJKLMNOPRSÅ TUVZŽ'), - wiki_start_pages=[u'Glavna_stranica']), - 'Hungarian': Language(name='Hungarian', - iso_code='hu', - # Q, W, X, Y are only used for foreign words. - use_ascii=False, - charsets=['ISO-8859-2', 'WINDOWS-1250'], - alphabet=(u'abcdefghijklmnoprstuvzáéíóöőúüű' - u'ABCDEFGHIJKLMNOPRSTUVZÃÉÃÓÖÅÚÜŰ'), - wiki_start_pages=[u'KezdÅ‘lap']), - 'Italian': Language(name='Italian', - iso_code='it', - use_ascii=True, - charsets=['ISO-8859-1', 'ISO-8859-15', - 'WINDOWS-1252'], - alphabet=u'ÀÈÉÌÒÓÙàèéìòóù', - wiki_start_pages=[u'Pagina_principale']), - 'Lithuanian': Language(name='Lithuanian', - iso_code='lt', - use_ascii=False, - charsets=['ISO-8859-13', 'WINDOWS-1257', - 'ISO-8859-4'], - # Q, W, and X not used at all - alphabet=(u'AÄ„BCÄŒDEĘĖFGHIÄ®YJKLMNOPRSÅ TUŲŪVZŽ' - u'aÄ…bcÄdeęėfghiįyjklmnoprsÅ¡tuųūvzž'), - wiki_start_pages=[u'Pagrindinis_puslapis']), - 'Latvian': Language(name='Latvian', - iso_code='lv', - use_ascii=False, - charsets=['ISO-8859-13', 'WINDOWS-1257', - 'ISO-8859-4'], - # Q, W, X, Y are only for loanwords - alphabet=(u'AÄ€BCÄŒDEÄ’FGÄ¢HIĪJKĶLÄ»MNÅ…OPRSÅ TUŪVZŽ' - u'aÄbcÄdeÄ“fgÄ£hiÄ«jkÄ·lļmnņoprsÅ¡tuÅ«vzž'), - wiki_start_pages=[u'SÄkumlapa']), - 'Macedonian': Language(name='Macedonian', - iso_code='mk', - use_ascii=False, - charsets=['ISO-8859-5', 'WINDOWS-1251', - 'MacCyrillic', 'IBM855'], - alphabet=(u'ÐБВГДЃЕЖЗЅИЈКЛЉМÐЊОПРСТЌУФХЦЧÐШ' - u'абвгдѓежзѕијклљмнњопрÑтќуфхцчџш'), - wiki_start_pages=[u'Главна_Ñтраница']), - 'Dutch': Language(name='Dutch', - iso_code='nl', - use_ascii=True, - charsets=['ISO-8859-1', 'WINDOWS-1252'], - wiki_start_pages=[u'Hoofdpagina']), - 'Polish': Language(name='Polish', - iso_code='pl', - # Q and X are only used for foreign words. - use_ascii=False, - charsets=['ISO-8859-2', 'WINDOWS-1250'], - alphabet=(u'AÄ„BCĆDEĘFGHIJKLÅMNŃOÓPRSÅšTUWYZŹŻ' - u'aÄ…bcćdeÄ™fghijklÅ‚mnÅ„oóprsÅ›tuwyzźż'), - wiki_start_pages=[u'Wikipedia:Strona_główna']), - 'Portuguese': Language(name='Portuguese', - iso_code='pt', - use_ascii=True, - charsets=['ISO-8859-1', 'ISO-8859-15', - 'WINDOWS-1252'], - alphabet=u'ÃÂÃÀÇÉÊÃÓÔÕÚáâãàçéêíóôõú', - wiki_start_pages=[u'Wikipédia:Página_principal']), - 'Romanian': Language(name='Romanian', - iso_code='ro', - use_ascii=True, - charsets=['ISO-8859-2', 'WINDOWS-1250'], - alphabet=u'ăâîșțĂÂÎȘȚ', - wiki_start_pages=[u'Pagina_principală']), - 'Russian': Language(name='Russian', - iso_code='ru', - use_ascii=False, - charsets=['ISO-8859-5', 'WINDOWS-1251', - 'KOI8-R', 'MacCyrillic', 'IBM866', - 'IBM855'], - alphabet=(u'абвгдеёжзийклмнопрÑтуфхцчшщъыьÑÑŽÑ' - u'ÐБВГДЕÐЖЗИЙКЛМÐОПРСТУФХЦЧШЩЪЫЬЭЮЯ'), - wiki_start_pages=[u'ЗаглавнаÑ_Ñтраница']), - 'Slovak': Language(name='Slovak', - iso_code='sk', - use_ascii=True, - charsets=['ISO-8859-2', 'WINDOWS-1250'], - alphabet=u'áäÄÄéíĺľňóôŕšťúýžÃÄČĎÉÃĹĽŇÓÔŔŠŤÚÃŽ', - wiki_start_pages=[u'Hlavná_stránka']), - 'Slovene': Language(name='Slovene', - iso_code='sl', - # Q, W, X, Y are only used for foreign words. - use_ascii=False, - charsets=['ISO-8859-2', 'WINDOWS-1250'], - alphabet=(u'abcÄdefghijklmnoprsÅ¡tuvzž' - u'ABCÄŒDEFGHIJKLMNOPRSÅ TUVZŽ'), - wiki_start_pages=[u'Glavna_stran']), - # Serbian can be written in both Latin and Cyrillic, but there's no - # simple way to get the Latin alphabet pages from Wikipedia through - # the API, so for now we just support Cyrillic. - 'Serbian': Language(name='Serbian', - iso_code='sr', - alphabet=(u'ÐБВГДЂЕЖЗИЈКЛЉМÐЊОПРСТЋУФХЦЧÐШ' - u'абвгдђежзијклљмнњопрÑтћуфхцчџш'), - charsets=['ISO-8859-5', 'WINDOWS-1251', - 'MacCyrillic', 'IBM855'], - wiki_start_pages=[u'Главна_Ñтрана']), - 'Thai': Language(name='Thai', - iso_code='th', - use_ascii=False, - charsets=['ISO-8859-11', 'TIS-620', 'CP874'], - alphabet=u'à¸à¸‚ฃคฅฆงจฉชซฌà¸à¸Žà¸à¸à¸‘ฒณดตถทธนบปผà¸à¸žà¸Ÿà¸ à¸¡à¸¢à¸£à¸¤à¸¥à¸¦à¸§à¸¨à¸©à¸ªà¸«à¸¬à¸­à¸®à¸¯à¸°à¸±à¸²à¸³à¸´à¸µà¸¶à¸·à¸ºà¸¸à¸¹à¸¿à¹€à¹à¹‚ใไๅๆ็่้๊๋์à¹à¹Žà¹à¹à¹‘๒๓๔๕๖๗๘๙๚๛', - wiki_start_pages=[u'หน้าหลัà¸']), - 'Turkish': Language(name='Turkish', - iso_code='tr', - # Q, W, and X are not used by Turkish - use_ascii=False, - charsets=['ISO-8859-3', 'ISO-8859-9', - 'WINDOWS-1254'], - alphabet=(u'abcçdefgÄŸhıijklmnoöprsÅŸtuüvyzâîû' - u'ABCÇDEFGÄžHIÄ°JKLMNOÖPRSÅžTUÃœVYZÂÎÛ'), - wiki_start_pages=[u'Ana_Sayfa']), - 'Vietnamese': Language(name='Vietnamese', - iso_code='vi', - use_ascii=False, - # Windows-1258 is the only common 8-bit - # Vietnamese encoding supported by Python. - # From Wikipedia: - # For systems that lack support for Unicode, - # dozens of 8-bit Vietnamese code pages are - # available.[1] The most common are VISCII - # (TCVN 5712:1993), VPS, and Windows-1258.[3] - # Where ASCII is required, such as when - # ensuring readability in plain text e-mail, - # Vietnamese letters are often encoded - # according to Vietnamese Quoted-Readable - # (VIQR) or VSCII Mnemonic (VSCII-MNEM),[4] - # though usage of either variable-width - # scheme has declined dramatically following - # the adoption of Unicode on the World Wide - # Web. - charsets=['WINDOWS-1258'], - alphabet=(u'aăâbcdÄ‘eêghiklmnoôơpqrstuÆ°vxy' - u'AĂÂBCDÄEÊGHIKLMNOÔƠPQRSTUƯVXY'), - wiki_start_pages=[u'Chữ_Quốc_ngữ']), - } diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/sbcharsetprober.py b/venv/Lib/site-packages/pip/_vendor/chardet/sbcharsetprober.py deleted file mode 100644 index 46ba835..0000000 --- a/venv/Lib/site-packages/pip/_vendor/chardet/sbcharsetprober.py +++ /dev/null @@ -1,145 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Universal charset detector code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 2001 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# Shy Shalom - original C code -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from collections import namedtuple - -from .charsetprober import CharSetProber -from .enums import CharacterCategory, ProbingState, SequenceLikelihood - - -SingleByteCharSetModel = namedtuple('SingleByteCharSetModel', - ['charset_name', - 'language', - 'char_to_order_map', - 'language_model', - 'typical_positive_ratio', - 'keep_ascii_letters', - 'alphabet']) - - -class SingleByteCharSetProber(CharSetProber): - SAMPLE_SIZE = 64 - SB_ENOUGH_REL_THRESHOLD = 1024 # 0.25 * SAMPLE_SIZE^2 - POSITIVE_SHORTCUT_THRESHOLD = 0.95 - NEGATIVE_SHORTCUT_THRESHOLD = 0.05 - - def __init__(self, model, reversed=False, name_prober=None): - super(SingleByteCharSetProber, self).__init__() - self._model = model - # TRUE if we need to reverse every pair in the model lookup - self._reversed = reversed - # Optional auxiliary prober for name decision - self._name_prober = name_prober - self._last_order = None - self._seq_counters = None - self._total_seqs = None - self._total_char = None - self._freq_char = None - self.reset() - - def reset(self): - super(SingleByteCharSetProber, self).reset() - # char order of last character - self._last_order = 255 - self._seq_counters = [0] * SequenceLikelihood.get_num_categories() - self._total_seqs = 0 - self._total_char = 0 - # characters that fall in our sampling range - self._freq_char = 0 - - @property - def charset_name(self): - if self._name_prober: - return self._name_prober.charset_name - else: - return self._model.charset_name - - @property - def language(self): - if self._name_prober: - return self._name_prober.language - else: - return self._model.language - - def feed(self, byte_str): - # TODO: Make filter_international_words keep things in self.alphabet - if not self._model.keep_ascii_letters: - byte_str = self.filter_international_words(byte_str) - if not byte_str: - return self.state - char_to_order_map = self._model.char_to_order_map - language_model = self._model.language_model - for char in byte_str: - order = char_to_order_map.get(char, CharacterCategory.UNDEFINED) - # XXX: This was SYMBOL_CAT_ORDER before, with a value of 250, but - # CharacterCategory.SYMBOL is actually 253, so we use CONTROL - # to make it closer to the original intent. The only difference - # is whether or not we count digits and control characters for - # _total_char purposes. - if order < CharacterCategory.CONTROL: - self._total_char += 1 - # TODO: Follow uchardet's lead and discount confidence for frequent - # control characters. - # See https://github.com/BYVoid/uchardet/commit/55b4f23971db61 - if order < self.SAMPLE_SIZE: - self._freq_char += 1 - if self._last_order < self.SAMPLE_SIZE: - self._total_seqs += 1 - if not self._reversed: - lm_cat = language_model[self._last_order][order] - else: - lm_cat = language_model[order][self._last_order] - self._seq_counters[lm_cat] += 1 - self._last_order = order - - charset_name = self._model.charset_name - if self.state == ProbingState.DETECTING: - if self._total_seqs > self.SB_ENOUGH_REL_THRESHOLD: - confidence = self.get_confidence() - if confidence > self.POSITIVE_SHORTCUT_THRESHOLD: - self.logger.debug('%s confidence = %s, we have a winner', - charset_name, confidence) - self._state = ProbingState.FOUND_IT - elif confidence < self.NEGATIVE_SHORTCUT_THRESHOLD: - self.logger.debug('%s confidence = %s, below negative ' - 'shortcut threshhold %s', charset_name, - confidence, - self.NEGATIVE_SHORTCUT_THRESHOLD) - self._state = ProbingState.NOT_ME - - return self.state - - def get_confidence(self): - r = 0.01 - if self._total_seqs > 0: - r = ((1.0 * self._seq_counters[SequenceLikelihood.POSITIVE]) / - self._total_seqs / self._model.typical_positive_ratio) - r = r * self._freq_char / self._total_char - if r >= 1.0: - r = 0.99 - return r diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/sbcsgroupprober.py b/venv/Lib/site-packages/pip/_vendor/chardet/sbcsgroupprober.py deleted file mode 100644 index bdeef4e..0000000 --- a/venv/Lib/site-packages/pip/_vendor/chardet/sbcsgroupprober.py +++ /dev/null @@ -1,83 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Universal charset detector code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 2001 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# Shy Shalom - original C code -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .charsetgroupprober import CharSetGroupProber -from .hebrewprober import HebrewProber -from .langbulgarianmodel import (ISO_8859_5_BULGARIAN_MODEL, - WINDOWS_1251_BULGARIAN_MODEL) -from .langgreekmodel import ISO_8859_7_GREEK_MODEL, WINDOWS_1253_GREEK_MODEL -from .langhebrewmodel import WINDOWS_1255_HEBREW_MODEL -# from .langhungarianmodel import (ISO_8859_2_HUNGARIAN_MODEL, -# WINDOWS_1250_HUNGARIAN_MODEL) -from .langrussianmodel import (IBM855_RUSSIAN_MODEL, IBM866_RUSSIAN_MODEL, - ISO_8859_5_RUSSIAN_MODEL, KOI8_R_RUSSIAN_MODEL, - MACCYRILLIC_RUSSIAN_MODEL, - WINDOWS_1251_RUSSIAN_MODEL) -from .langthaimodel import TIS_620_THAI_MODEL -from .langturkishmodel import ISO_8859_9_TURKISH_MODEL -from .sbcharsetprober import SingleByteCharSetProber - - -class SBCSGroupProber(CharSetGroupProber): - def __init__(self): - super(SBCSGroupProber, self).__init__() - hebrew_prober = HebrewProber() - logical_hebrew_prober = SingleByteCharSetProber(WINDOWS_1255_HEBREW_MODEL, - False, hebrew_prober) - # TODO: See if using ISO-8859-8 Hebrew model works better here, since - # it's actually the visual one - visual_hebrew_prober = SingleByteCharSetProber(WINDOWS_1255_HEBREW_MODEL, - True, hebrew_prober) - hebrew_prober.set_model_probers(logical_hebrew_prober, - visual_hebrew_prober) - # TODO: ORDER MATTERS HERE. I changed the order vs what was in master - # and several tests failed that did not before. Some thought - # should be put into the ordering, and we should consider making - # order not matter here, because that is very counter-intuitive. - self.probers = [ - SingleByteCharSetProber(WINDOWS_1251_RUSSIAN_MODEL), - SingleByteCharSetProber(KOI8_R_RUSSIAN_MODEL), - SingleByteCharSetProber(ISO_8859_5_RUSSIAN_MODEL), - SingleByteCharSetProber(MACCYRILLIC_RUSSIAN_MODEL), - SingleByteCharSetProber(IBM866_RUSSIAN_MODEL), - SingleByteCharSetProber(IBM855_RUSSIAN_MODEL), - SingleByteCharSetProber(ISO_8859_7_GREEK_MODEL), - SingleByteCharSetProber(WINDOWS_1253_GREEK_MODEL), - SingleByteCharSetProber(ISO_8859_5_BULGARIAN_MODEL), - SingleByteCharSetProber(WINDOWS_1251_BULGARIAN_MODEL), - # TODO: Restore Hungarian encodings (iso-8859-2 and windows-1250) - # after we retrain model. - # SingleByteCharSetProber(ISO_8859_2_HUNGARIAN_MODEL), - # SingleByteCharSetProber(WINDOWS_1250_HUNGARIAN_MODEL), - SingleByteCharSetProber(TIS_620_THAI_MODEL), - SingleByteCharSetProber(ISO_8859_9_TURKISH_MODEL), - hebrew_prober, - logical_hebrew_prober, - visual_hebrew_prober, - ] - self.reset() diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/sjisprober.py b/venv/Lib/site-packages/pip/_vendor/chardet/sjisprober.py deleted file mode 100644 index 9e29623..0000000 --- a/venv/Lib/site-packages/pip/_vendor/chardet/sjisprober.py +++ /dev/null @@ -1,92 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is mozilla.org code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .mbcharsetprober import MultiByteCharSetProber -from .codingstatemachine import CodingStateMachine -from .chardistribution import SJISDistributionAnalysis -from .jpcntx import SJISContextAnalysis -from .mbcssm import SJIS_SM_MODEL -from .enums import ProbingState, MachineState - - -class SJISProber(MultiByteCharSetProber): - def __init__(self): - super(SJISProber, self).__init__() - self.coding_sm = CodingStateMachine(SJIS_SM_MODEL) - self.distribution_analyzer = SJISDistributionAnalysis() - self.context_analyzer = SJISContextAnalysis() - self.reset() - - def reset(self): - super(SJISProber, self).reset() - self.context_analyzer.reset() - - @property - def charset_name(self): - return self.context_analyzer.charset_name - - @property - def language(self): - return "Japanese" - - def feed(self, byte_str): - for i in range(len(byte_str)): - coding_state = self.coding_sm.next_state(byte_str[i]) - if coding_state == MachineState.ERROR: - self.logger.debug('%s %s prober hit error at byte %s', - self.charset_name, self.language, i) - self._state = ProbingState.NOT_ME - break - elif coding_state == MachineState.ITS_ME: - self._state = ProbingState.FOUND_IT - break - elif coding_state == MachineState.START: - char_len = self.coding_sm.get_current_charlen() - if i == 0: - self._last_char[1] = byte_str[0] - self.context_analyzer.feed(self._last_char[2 - char_len:], - char_len) - self.distribution_analyzer.feed(self._last_char, char_len) - else: - self.context_analyzer.feed(byte_str[i + 1 - char_len:i + 3 - - char_len], char_len) - self.distribution_analyzer.feed(byte_str[i - 1:i + 1], - char_len) - - self._last_char[0] = byte_str[-1] - - if self.state == ProbingState.DETECTING: - if (self.context_analyzer.got_enough_data() and - (self.get_confidence() > self.SHORTCUT_THRESHOLD)): - self._state = ProbingState.FOUND_IT - - return self.state - - def get_confidence(self): - context_conf = self.context_analyzer.get_confidence() - distrib_conf = self.distribution_analyzer.get_confidence() - return max(context_conf, distrib_conf) diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/universaldetector.py b/venv/Lib/site-packages/pip/_vendor/chardet/universaldetector.py deleted file mode 100644 index 055a8ac..0000000 --- a/venv/Lib/site-packages/pip/_vendor/chardet/universaldetector.py +++ /dev/null @@ -1,286 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Universal charset detector code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 2001 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# Shy Shalom - original C code -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### -""" -Module containing the UniversalDetector detector class, which is the primary -class a user of ``chardet`` should use. - -:author: Mark Pilgrim (initial port to Python) -:author: Shy Shalom (original C code) -:author: Dan Blanchard (major refactoring for 3.0) -:author: Ian Cordasco -""" - - -import codecs -import logging -import re - -from .charsetgroupprober import CharSetGroupProber -from .enums import InputState, LanguageFilter, ProbingState -from .escprober import EscCharSetProber -from .latin1prober import Latin1Prober -from .mbcsgroupprober import MBCSGroupProber -from .sbcsgroupprober import SBCSGroupProber - - -class UniversalDetector(object): - """ - The ``UniversalDetector`` class underlies the ``chardet.detect`` function - and coordinates all of the different charset probers. - - To get a ``dict`` containing an encoding and its confidence, you can simply - run: - - .. code:: - - u = UniversalDetector() - u.feed(some_bytes) - u.close() - detected = u.result - - """ - - MINIMUM_THRESHOLD = 0.20 - HIGH_BYTE_DETECTOR = re.compile(b'[\x80-\xFF]') - ESC_DETECTOR = re.compile(b'(\033|~{)') - WIN_BYTE_DETECTOR = re.compile(b'[\x80-\x9F]') - ISO_WIN_MAP = {'iso-8859-1': 'Windows-1252', - 'iso-8859-2': 'Windows-1250', - 'iso-8859-5': 'Windows-1251', - 'iso-8859-6': 'Windows-1256', - 'iso-8859-7': 'Windows-1253', - 'iso-8859-8': 'Windows-1255', - 'iso-8859-9': 'Windows-1254', - 'iso-8859-13': 'Windows-1257'} - - def __init__(self, lang_filter=LanguageFilter.ALL): - self._esc_charset_prober = None - self._charset_probers = [] - self.result = None - self.done = None - self._got_data = None - self._input_state = None - self._last_char = None - self.lang_filter = lang_filter - self.logger = logging.getLogger(__name__) - self._has_win_bytes = None - self.reset() - - def reset(self): - """ - Reset the UniversalDetector and all of its probers back to their - initial states. This is called by ``__init__``, so you only need to - call this directly in between analyses of different documents. - """ - self.result = {'encoding': None, 'confidence': 0.0, 'language': None} - self.done = False - self._got_data = False - self._has_win_bytes = False - self._input_state = InputState.PURE_ASCII - self._last_char = b'' - if self._esc_charset_prober: - self._esc_charset_prober.reset() - for prober in self._charset_probers: - prober.reset() - - def feed(self, byte_str): - """ - Takes a chunk of a document and feeds it through all of the relevant - charset probers. - - After calling ``feed``, you can check the value of the ``done`` - attribute to see if you need to continue feeding the - ``UniversalDetector`` more data, or if it has made a prediction - (in the ``result`` attribute). - - .. note:: - You should always call ``close`` when you're done feeding in your - document if ``done`` is not already ``True``. - """ - if self.done: - return - - if not len(byte_str): - return - - if not isinstance(byte_str, bytearray): - byte_str = bytearray(byte_str) - - # First check for known BOMs, since these are guaranteed to be correct - if not self._got_data: - # If the data starts with BOM, we know it is UTF - if byte_str.startswith(codecs.BOM_UTF8): - # EF BB BF UTF-8 with BOM - self.result = {'encoding': "UTF-8-SIG", - 'confidence': 1.0, - 'language': ''} - elif byte_str.startswith((codecs.BOM_UTF32_LE, - codecs.BOM_UTF32_BE)): - # FF FE 00 00 UTF-32, little-endian BOM - # 00 00 FE FF UTF-32, big-endian BOM - self.result = {'encoding': "UTF-32", - 'confidence': 1.0, - 'language': ''} - elif byte_str.startswith(b'\xFE\xFF\x00\x00'): - # FE FF 00 00 UCS-4, unusual octet order BOM (3412) - self.result = {'encoding': "X-ISO-10646-UCS-4-3412", - 'confidence': 1.0, - 'language': ''} - elif byte_str.startswith(b'\x00\x00\xFF\xFE'): - # 00 00 FF FE UCS-4, unusual octet order BOM (2143) - self.result = {'encoding': "X-ISO-10646-UCS-4-2143", - 'confidence': 1.0, - 'language': ''} - elif byte_str.startswith((codecs.BOM_LE, codecs.BOM_BE)): - # FF FE UTF-16, little endian BOM - # FE FF UTF-16, big endian BOM - self.result = {'encoding': "UTF-16", - 'confidence': 1.0, - 'language': ''} - - self._got_data = True - if self.result['encoding'] is not None: - self.done = True - return - - # If none of those matched and we've only see ASCII so far, check - # for high bytes and escape sequences - if self._input_state == InputState.PURE_ASCII: - if self.HIGH_BYTE_DETECTOR.search(byte_str): - self._input_state = InputState.HIGH_BYTE - elif self._input_state == InputState.PURE_ASCII and \ - self.ESC_DETECTOR.search(self._last_char + byte_str): - self._input_state = InputState.ESC_ASCII - - self._last_char = byte_str[-1:] - - # If we've seen escape sequences, use the EscCharSetProber, which - # uses a simple state machine to check for known escape sequences in - # HZ and ISO-2022 encodings, since those are the only encodings that - # use such sequences. - if self._input_state == InputState.ESC_ASCII: - if not self._esc_charset_prober: - self._esc_charset_prober = EscCharSetProber(self.lang_filter) - if self._esc_charset_prober.feed(byte_str) == ProbingState.FOUND_IT: - self.result = {'encoding': - self._esc_charset_prober.charset_name, - 'confidence': - self._esc_charset_prober.get_confidence(), - 'language': - self._esc_charset_prober.language} - self.done = True - # If we've seen high bytes (i.e., those with values greater than 127), - # we need to do more complicated checks using all our multi-byte and - # single-byte probers that are left. The single-byte probers - # use character bigram distributions to determine the encoding, whereas - # the multi-byte probers use a combination of character unigram and - # bigram distributions. - elif self._input_state == InputState.HIGH_BYTE: - if not self._charset_probers: - self._charset_probers = [MBCSGroupProber(self.lang_filter)] - # If we're checking non-CJK encodings, use single-byte prober - if self.lang_filter & LanguageFilter.NON_CJK: - self._charset_probers.append(SBCSGroupProber()) - self._charset_probers.append(Latin1Prober()) - for prober in self._charset_probers: - if prober.feed(byte_str) == ProbingState.FOUND_IT: - self.result = {'encoding': prober.charset_name, - 'confidence': prober.get_confidence(), - 'language': prober.language} - self.done = True - break - if self.WIN_BYTE_DETECTOR.search(byte_str): - self._has_win_bytes = True - - def close(self): - """ - Stop analyzing the current document and come up with a final - prediction. - - :returns: The ``result`` attribute, a ``dict`` with the keys - `encoding`, `confidence`, and `language`. - """ - # Don't bother with checks if we're already done - if self.done: - return self.result - self.done = True - - if not self._got_data: - self.logger.debug('no data received!') - - # Default to ASCII if it is all we've seen so far - elif self._input_state == InputState.PURE_ASCII: - self.result = {'encoding': 'ascii', - 'confidence': 1.0, - 'language': ''} - - # If we have seen non-ASCII, return the best that met MINIMUM_THRESHOLD - elif self._input_state == InputState.HIGH_BYTE: - prober_confidence = None - max_prober_confidence = 0.0 - max_prober = None - for prober in self._charset_probers: - if not prober: - continue - prober_confidence = prober.get_confidence() - if prober_confidence > max_prober_confidence: - max_prober_confidence = prober_confidence - max_prober = prober - if max_prober and (max_prober_confidence > self.MINIMUM_THRESHOLD): - charset_name = max_prober.charset_name - lower_charset_name = max_prober.charset_name.lower() - confidence = max_prober.get_confidence() - # Use Windows encoding name instead of ISO-8859 if we saw any - # extra Windows-specific bytes - if lower_charset_name.startswith('iso-8859'): - if self._has_win_bytes: - charset_name = self.ISO_WIN_MAP.get(lower_charset_name, - charset_name) - self.result = {'encoding': charset_name, - 'confidence': confidence, - 'language': max_prober.language} - - # Log all prober confidences if none met MINIMUM_THRESHOLD - if self.logger.getEffectiveLevel() <= logging.DEBUG: - if self.result['encoding'] is None: - self.logger.debug('no probers hit minimum threshold') - for group_prober in self._charset_probers: - if not group_prober: - continue - if isinstance(group_prober, CharSetGroupProber): - for prober in group_prober.probers: - self.logger.debug('%s %s confidence = %s', - prober.charset_name, - prober.language, - prober.get_confidence()) - else: - self.logger.debug('%s %s confidence = %s', - group_prober.charset_name, - group_prober.language, - group_prober.get_confidence()) - return self.result diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/utf8prober.py b/venv/Lib/site-packages/pip/_vendor/chardet/utf8prober.py deleted file mode 100644 index 6c3196c..0000000 --- a/venv/Lib/site-packages/pip/_vendor/chardet/utf8prober.py +++ /dev/null @@ -1,82 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is mozilla.org code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .charsetprober import CharSetProber -from .enums import ProbingState, MachineState -from .codingstatemachine import CodingStateMachine -from .mbcssm import UTF8_SM_MODEL - - - -class UTF8Prober(CharSetProber): - ONE_CHAR_PROB = 0.5 - - def __init__(self): - super(UTF8Prober, self).__init__() - self.coding_sm = CodingStateMachine(UTF8_SM_MODEL) - self._num_mb_chars = None - self.reset() - - def reset(self): - super(UTF8Prober, self).reset() - self.coding_sm.reset() - self._num_mb_chars = 0 - - @property - def charset_name(self): - return "utf-8" - - @property - def language(self): - return "" - - def feed(self, byte_str): - for c in byte_str: - coding_state = self.coding_sm.next_state(c) - if coding_state == MachineState.ERROR: - self._state = ProbingState.NOT_ME - break - elif coding_state == MachineState.ITS_ME: - self._state = ProbingState.FOUND_IT - break - elif coding_state == MachineState.START: - if self.coding_sm.get_current_charlen() >= 2: - self._num_mb_chars += 1 - - if self.state == ProbingState.DETECTING: - if self.get_confidence() > self.SHORTCUT_THRESHOLD: - self._state = ProbingState.FOUND_IT - - return self.state - - def get_confidence(self): - unlike = 0.99 - if self._num_mb_chars < 6: - unlike *= self.ONE_CHAR_PROB ** self._num_mb_chars - return 1.0 - unlike - else: - return unlike diff --git a/venv/Lib/site-packages/pip/_vendor/chardet/version.py b/venv/Lib/site-packages/pip/_vendor/chardet/version.py deleted file mode 100644 index 70369b9..0000000 --- a/venv/Lib/site-packages/pip/_vendor/chardet/version.py +++ /dev/null @@ -1,9 +0,0 @@ -""" -This module exists only to simplify retrieving the version number of chardet -from within setup.py and from chardet subpackages. - -:author: Dan Blanchard (dan.blanchard@gmail.com) -""" - -__version__ = "4.0.0" -VERSION = __version__.split('.') diff --git a/venv/Lib/site-packages/pip/_vendor/colorama/__init__.py b/venv/Lib/site-packages/pip/_vendor/colorama/__init__.py deleted file mode 100644 index b149ed7..0000000 --- a/venv/Lib/site-packages/pip/_vendor/colorama/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. -from .initialise import init, deinit, reinit, colorama_text -from .ansi import Fore, Back, Style, Cursor -from .ansitowin32 import AnsiToWin32 - -__version__ = '0.4.4' diff --git a/venv/Lib/site-packages/pip/_vendor/colorama/__pycache__/__init__.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/colorama/__pycache__/__init__.cpython-39.pyc deleted file mode 100644 index 6a7c9fc..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/colorama/__pycache__/__init__.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/colorama/__pycache__/ansi.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/colorama/__pycache__/ansi.cpython-39.pyc deleted file mode 100644 index 07deccb..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/colorama/__pycache__/ansi.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/colorama/__pycache__/ansitowin32.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/colorama/__pycache__/ansitowin32.cpython-39.pyc deleted file mode 100644 index 0bc4f21..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/colorama/__pycache__/ansitowin32.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/colorama/__pycache__/initialise.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/colorama/__pycache__/initialise.cpython-39.pyc deleted file mode 100644 index 957c3f2..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/colorama/__pycache__/initialise.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/colorama/__pycache__/win32.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/colorama/__pycache__/win32.cpython-39.pyc deleted file mode 100644 index d57b37f..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/colorama/__pycache__/win32.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/colorama/__pycache__/winterm.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/colorama/__pycache__/winterm.cpython-39.pyc deleted file mode 100644 index 41eebf1..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/colorama/__pycache__/winterm.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/colorama/ansi.py b/venv/Lib/site-packages/pip/_vendor/colorama/ansi.py deleted file mode 100644 index 11ec695..0000000 --- a/venv/Lib/site-packages/pip/_vendor/colorama/ansi.py +++ /dev/null @@ -1,102 +0,0 @@ -# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. -''' -This module generates ANSI character codes to printing colors to terminals. -See: http://en.wikipedia.org/wiki/ANSI_escape_code -''' - -CSI = '\033[' -OSC = '\033]' -BEL = '\a' - - -def code_to_chars(code): - return CSI + str(code) + 'm' - -def set_title(title): - return OSC + '2;' + title + BEL - -def clear_screen(mode=2): - return CSI + str(mode) + 'J' - -def clear_line(mode=2): - return CSI + str(mode) + 'K' - - -class AnsiCodes(object): - def __init__(self): - # the subclasses declare class attributes which are numbers. - # Upon instantiation we define instance attributes, which are the same - # as the class attributes but wrapped with the ANSI escape sequence - for name in dir(self): - if not name.startswith('_'): - value = getattr(self, name) - setattr(self, name, code_to_chars(value)) - - -class AnsiCursor(object): - def UP(self, n=1): - return CSI + str(n) + 'A' - def DOWN(self, n=1): - return CSI + str(n) + 'B' - def FORWARD(self, n=1): - return CSI + str(n) + 'C' - def BACK(self, n=1): - return CSI + str(n) + 'D' - def POS(self, x=1, y=1): - return CSI + str(y) + ';' + str(x) + 'H' - - -class AnsiFore(AnsiCodes): - BLACK = 30 - RED = 31 - GREEN = 32 - YELLOW = 33 - BLUE = 34 - MAGENTA = 35 - CYAN = 36 - WHITE = 37 - RESET = 39 - - # These are fairly well supported, but not part of the standard. - LIGHTBLACK_EX = 90 - LIGHTRED_EX = 91 - LIGHTGREEN_EX = 92 - LIGHTYELLOW_EX = 93 - LIGHTBLUE_EX = 94 - LIGHTMAGENTA_EX = 95 - LIGHTCYAN_EX = 96 - LIGHTWHITE_EX = 97 - - -class AnsiBack(AnsiCodes): - BLACK = 40 - RED = 41 - GREEN = 42 - YELLOW = 43 - BLUE = 44 - MAGENTA = 45 - CYAN = 46 - WHITE = 47 - RESET = 49 - - # These are fairly well supported, but not part of the standard. - LIGHTBLACK_EX = 100 - LIGHTRED_EX = 101 - LIGHTGREEN_EX = 102 - LIGHTYELLOW_EX = 103 - LIGHTBLUE_EX = 104 - LIGHTMAGENTA_EX = 105 - LIGHTCYAN_EX = 106 - LIGHTWHITE_EX = 107 - - -class AnsiStyle(AnsiCodes): - BRIGHT = 1 - DIM = 2 - NORMAL = 22 - RESET_ALL = 0 - -Fore = AnsiFore() -Back = AnsiBack() -Style = AnsiStyle() -Cursor = AnsiCursor() diff --git a/venv/Lib/site-packages/pip/_vendor/colorama/ansitowin32.py b/venv/Lib/site-packages/pip/_vendor/colorama/ansitowin32.py deleted file mode 100644 index 6039a05..0000000 --- a/venv/Lib/site-packages/pip/_vendor/colorama/ansitowin32.py +++ /dev/null @@ -1,258 +0,0 @@ -# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. -import re -import sys -import os - -from .ansi import AnsiFore, AnsiBack, AnsiStyle, Style, BEL -from .winterm import WinTerm, WinColor, WinStyle -from .win32 import windll, winapi_test - - -winterm = None -if windll is not None: - winterm = WinTerm() - - -class StreamWrapper(object): - ''' - Wraps a stream (such as stdout), acting as a transparent proxy for all - attribute access apart from method 'write()', which is delegated to our - Converter instance. - ''' - def __init__(self, wrapped, converter): - # double-underscore everything to prevent clashes with names of - # attributes on the wrapped stream object. - self.__wrapped = wrapped - self.__convertor = converter - - def __getattr__(self, name): - return getattr(self.__wrapped, name) - - def __enter__(self, *args, **kwargs): - # special method lookup bypasses __getattr__/__getattribute__, see - # https://stackoverflow.com/questions/12632894/why-doesnt-getattr-work-with-exit - # thus, contextlib magic methods are not proxied via __getattr__ - return self.__wrapped.__enter__(*args, **kwargs) - - def __exit__(self, *args, **kwargs): - return self.__wrapped.__exit__(*args, **kwargs) - - def write(self, text): - self.__convertor.write(text) - - def isatty(self): - stream = self.__wrapped - if 'PYCHARM_HOSTED' in os.environ: - if stream is not None and (stream is sys.__stdout__ or stream is sys.__stderr__): - return True - try: - stream_isatty = stream.isatty - except AttributeError: - return False - else: - return stream_isatty() - - @property - def closed(self): - stream = self.__wrapped - try: - return stream.closed - except AttributeError: - return True - - -class AnsiToWin32(object): - ''' - Implements a 'write()' method which, on Windows, will strip ANSI character - sequences from the text, and if outputting to a tty, will convert them into - win32 function calls. - ''' - ANSI_CSI_RE = re.compile('\001?\033\\[((?:\\d|;)*)([a-zA-Z])\002?') # Control Sequence Introducer - ANSI_OSC_RE = re.compile('\001?\033\\]([^\a]*)(\a)\002?') # Operating System Command - - def __init__(self, wrapped, convert=None, strip=None, autoreset=False): - # The wrapped stream (normally sys.stdout or sys.stderr) - self.wrapped = wrapped - - # should we reset colors to defaults after every .write() - self.autoreset = autoreset - - # create the proxy wrapping our output stream - self.stream = StreamWrapper(wrapped, self) - - on_windows = os.name == 'nt' - # We test if the WinAPI works, because even if we are on Windows - # we may be using a terminal that doesn't support the WinAPI - # (e.g. Cygwin Terminal). In this case it's up to the terminal - # to support the ANSI codes. - conversion_supported = on_windows and winapi_test() - - # should we strip ANSI sequences from our output? - if strip is None: - strip = conversion_supported or (not self.stream.closed and not self.stream.isatty()) - self.strip = strip - - # should we should convert ANSI sequences into win32 calls? - if convert is None: - convert = conversion_supported and not self.stream.closed and self.stream.isatty() - self.convert = convert - - # dict of ansi codes to win32 functions and parameters - self.win32_calls = self.get_win32_calls() - - # are we wrapping stderr? - self.on_stderr = self.wrapped is sys.stderr - - def should_wrap(self): - ''' - True if this class is actually needed. If false, then the output - stream will not be affected, nor will win32 calls be issued, so - wrapping stdout is not actually required. This will generally be - False on non-Windows platforms, unless optional functionality like - autoreset has been requested using kwargs to init() - ''' - return self.convert or self.strip or self.autoreset - - def get_win32_calls(self): - if self.convert and winterm: - return { - AnsiStyle.RESET_ALL: (winterm.reset_all, ), - AnsiStyle.BRIGHT: (winterm.style, WinStyle.BRIGHT), - AnsiStyle.DIM: (winterm.style, WinStyle.NORMAL), - AnsiStyle.NORMAL: (winterm.style, WinStyle.NORMAL), - AnsiFore.BLACK: (winterm.fore, WinColor.BLACK), - AnsiFore.RED: (winterm.fore, WinColor.RED), - AnsiFore.GREEN: (winterm.fore, WinColor.GREEN), - AnsiFore.YELLOW: (winterm.fore, WinColor.YELLOW), - AnsiFore.BLUE: (winterm.fore, WinColor.BLUE), - AnsiFore.MAGENTA: (winterm.fore, WinColor.MAGENTA), - AnsiFore.CYAN: (winterm.fore, WinColor.CYAN), - AnsiFore.WHITE: (winterm.fore, WinColor.GREY), - AnsiFore.RESET: (winterm.fore, ), - AnsiFore.LIGHTBLACK_EX: (winterm.fore, WinColor.BLACK, True), - AnsiFore.LIGHTRED_EX: (winterm.fore, WinColor.RED, True), - AnsiFore.LIGHTGREEN_EX: (winterm.fore, WinColor.GREEN, True), - AnsiFore.LIGHTYELLOW_EX: (winterm.fore, WinColor.YELLOW, True), - AnsiFore.LIGHTBLUE_EX: (winterm.fore, WinColor.BLUE, True), - AnsiFore.LIGHTMAGENTA_EX: (winterm.fore, WinColor.MAGENTA, True), - AnsiFore.LIGHTCYAN_EX: (winterm.fore, WinColor.CYAN, True), - AnsiFore.LIGHTWHITE_EX: (winterm.fore, WinColor.GREY, True), - AnsiBack.BLACK: (winterm.back, WinColor.BLACK), - AnsiBack.RED: (winterm.back, WinColor.RED), - AnsiBack.GREEN: (winterm.back, WinColor.GREEN), - AnsiBack.YELLOW: (winterm.back, WinColor.YELLOW), - AnsiBack.BLUE: (winterm.back, WinColor.BLUE), - AnsiBack.MAGENTA: (winterm.back, WinColor.MAGENTA), - AnsiBack.CYAN: (winterm.back, WinColor.CYAN), - AnsiBack.WHITE: (winterm.back, WinColor.GREY), - AnsiBack.RESET: (winterm.back, ), - AnsiBack.LIGHTBLACK_EX: (winterm.back, WinColor.BLACK, True), - AnsiBack.LIGHTRED_EX: (winterm.back, WinColor.RED, True), - AnsiBack.LIGHTGREEN_EX: (winterm.back, WinColor.GREEN, True), - AnsiBack.LIGHTYELLOW_EX: (winterm.back, WinColor.YELLOW, True), - AnsiBack.LIGHTBLUE_EX: (winterm.back, WinColor.BLUE, True), - AnsiBack.LIGHTMAGENTA_EX: (winterm.back, WinColor.MAGENTA, True), - AnsiBack.LIGHTCYAN_EX: (winterm.back, WinColor.CYAN, True), - AnsiBack.LIGHTWHITE_EX: (winterm.back, WinColor.GREY, True), - } - return dict() - - def write(self, text): - if self.strip or self.convert: - self.write_and_convert(text) - else: - self.wrapped.write(text) - self.wrapped.flush() - if self.autoreset: - self.reset_all() - - - def reset_all(self): - if self.convert: - self.call_win32('m', (0,)) - elif not self.strip and not self.stream.closed: - self.wrapped.write(Style.RESET_ALL) - - - def write_and_convert(self, text): - ''' - Write the given text to our wrapped stream, stripping any ANSI - sequences from the text, and optionally converting them into win32 - calls. - ''' - cursor = 0 - text = self.convert_osc(text) - for match in self.ANSI_CSI_RE.finditer(text): - start, end = match.span() - self.write_plain_text(text, cursor, start) - self.convert_ansi(*match.groups()) - cursor = end - self.write_plain_text(text, cursor, len(text)) - - - def write_plain_text(self, text, start, end): - if start < end: - self.wrapped.write(text[start:end]) - self.wrapped.flush() - - - def convert_ansi(self, paramstring, command): - if self.convert: - params = self.extract_params(command, paramstring) - self.call_win32(command, params) - - - def extract_params(self, command, paramstring): - if command in 'Hf': - params = tuple(int(p) if len(p) != 0 else 1 for p in paramstring.split(';')) - while len(params) < 2: - # defaults: - params = params + (1,) - else: - params = tuple(int(p) for p in paramstring.split(';') if len(p) != 0) - if len(params) == 0: - # defaults: - if command in 'JKm': - params = (0,) - elif command in 'ABCD': - params = (1,) - - return params - - - def call_win32(self, command, params): - if command == 'm': - for param in params: - if param in self.win32_calls: - func_args = self.win32_calls[param] - func = func_args[0] - args = func_args[1:] - kwargs = dict(on_stderr=self.on_stderr) - func(*args, **kwargs) - elif command in 'J': - winterm.erase_screen(params[0], on_stderr=self.on_stderr) - elif command in 'K': - winterm.erase_line(params[0], on_stderr=self.on_stderr) - elif command in 'Hf': # cursor position - absolute - winterm.set_cursor_position(params, on_stderr=self.on_stderr) - elif command in 'ABCD': # cursor position - relative - n = params[0] - # A - up, B - down, C - forward, D - back - x, y = {'A': (0, -n), 'B': (0, n), 'C': (n, 0), 'D': (-n, 0)}[command] - winterm.cursor_adjust(x, y, on_stderr=self.on_stderr) - - - def convert_osc(self, text): - for match in self.ANSI_OSC_RE.finditer(text): - start, end = match.span() - text = text[:start] + text[end:] - paramstring, command = match.groups() - if command == BEL: - if paramstring.count(";") == 1: - params = paramstring.split(";") - # 0 - change title and icon (we will only change title) - # 1 - change icon (we don't support this) - # 2 - change title - if params[0] in '02': - winterm.set_title(params[1]) - return text diff --git a/venv/Lib/site-packages/pip/_vendor/colorama/initialise.py b/venv/Lib/site-packages/pip/_vendor/colorama/initialise.py deleted file mode 100644 index 430d066..0000000 --- a/venv/Lib/site-packages/pip/_vendor/colorama/initialise.py +++ /dev/null @@ -1,80 +0,0 @@ -# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. -import atexit -import contextlib -import sys - -from .ansitowin32 import AnsiToWin32 - - -orig_stdout = None -orig_stderr = None - -wrapped_stdout = None -wrapped_stderr = None - -atexit_done = False - - -def reset_all(): - if AnsiToWin32 is not None: # Issue #74: objects might become None at exit - AnsiToWin32(orig_stdout).reset_all() - - -def init(autoreset=False, convert=None, strip=None, wrap=True): - - if not wrap and any([autoreset, convert, strip]): - raise ValueError('wrap=False conflicts with any other arg=True') - - global wrapped_stdout, wrapped_stderr - global orig_stdout, orig_stderr - - orig_stdout = sys.stdout - orig_stderr = sys.stderr - - if sys.stdout is None: - wrapped_stdout = None - else: - sys.stdout = wrapped_stdout = \ - wrap_stream(orig_stdout, convert, strip, autoreset, wrap) - if sys.stderr is None: - wrapped_stderr = None - else: - sys.stderr = wrapped_stderr = \ - wrap_stream(orig_stderr, convert, strip, autoreset, wrap) - - global atexit_done - if not atexit_done: - atexit.register(reset_all) - atexit_done = True - - -def deinit(): - if orig_stdout is not None: - sys.stdout = orig_stdout - if orig_stderr is not None: - sys.stderr = orig_stderr - - -@contextlib.contextmanager -def colorama_text(*args, **kwargs): - init(*args, **kwargs) - try: - yield - finally: - deinit() - - -def reinit(): - if wrapped_stdout is not None: - sys.stdout = wrapped_stdout - if wrapped_stderr is not None: - sys.stderr = wrapped_stderr - - -def wrap_stream(stream, convert, strip, autoreset, wrap): - if wrap: - wrapper = AnsiToWin32(stream, - convert=convert, strip=strip, autoreset=autoreset) - if wrapper.should_wrap(): - stream = wrapper.stream - return stream diff --git a/venv/Lib/site-packages/pip/_vendor/colorama/win32.py b/venv/Lib/site-packages/pip/_vendor/colorama/win32.py deleted file mode 100644 index c2d8360..0000000 --- a/venv/Lib/site-packages/pip/_vendor/colorama/win32.py +++ /dev/null @@ -1,152 +0,0 @@ -# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. - -# from winbase.h -STDOUT = -11 -STDERR = -12 - -try: - import ctypes - from ctypes import LibraryLoader - windll = LibraryLoader(ctypes.WinDLL) - from ctypes import wintypes -except (AttributeError, ImportError): - windll = None - SetConsoleTextAttribute = lambda *_: None - winapi_test = lambda *_: None -else: - from ctypes import byref, Structure, c_char, POINTER - - COORD = wintypes._COORD - - class CONSOLE_SCREEN_BUFFER_INFO(Structure): - """struct in wincon.h.""" - _fields_ = [ - ("dwSize", COORD), - ("dwCursorPosition", COORD), - ("wAttributes", wintypes.WORD), - ("srWindow", wintypes.SMALL_RECT), - ("dwMaximumWindowSize", COORD), - ] - def __str__(self): - return '(%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d)' % ( - self.dwSize.Y, self.dwSize.X - , self.dwCursorPosition.Y, self.dwCursorPosition.X - , self.wAttributes - , self.srWindow.Top, self.srWindow.Left, self.srWindow.Bottom, self.srWindow.Right - , self.dwMaximumWindowSize.Y, self.dwMaximumWindowSize.X - ) - - _GetStdHandle = windll.kernel32.GetStdHandle - _GetStdHandle.argtypes = [ - wintypes.DWORD, - ] - _GetStdHandle.restype = wintypes.HANDLE - - _GetConsoleScreenBufferInfo = windll.kernel32.GetConsoleScreenBufferInfo - _GetConsoleScreenBufferInfo.argtypes = [ - wintypes.HANDLE, - POINTER(CONSOLE_SCREEN_BUFFER_INFO), - ] - _GetConsoleScreenBufferInfo.restype = wintypes.BOOL - - _SetConsoleTextAttribute = windll.kernel32.SetConsoleTextAttribute - _SetConsoleTextAttribute.argtypes = [ - wintypes.HANDLE, - wintypes.WORD, - ] - _SetConsoleTextAttribute.restype = wintypes.BOOL - - _SetConsoleCursorPosition = windll.kernel32.SetConsoleCursorPosition - _SetConsoleCursorPosition.argtypes = [ - wintypes.HANDLE, - COORD, - ] - _SetConsoleCursorPosition.restype = wintypes.BOOL - - _FillConsoleOutputCharacterA = windll.kernel32.FillConsoleOutputCharacterA - _FillConsoleOutputCharacterA.argtypes = [ - wintypes.HANDLE, - c_char, - wintypes.DWORD, - COORD, - POINTER(wintypes.DWORD), - ] - _FillConsoleOutputCharacterA.restype = wintypes.BOOL - - _FillConsoleOutputAttribute = windll.kernel32.FillConsoleOutputAttribute - _FillConsoleOutputAttribute.argtypes = [ - wintypes.HANDLE, - wintypes.WORD, - wintypes.DWORD, - COORD, - POINTER(wintypes.DWORD), - ] - _FillConsoleOutputAttribute.restype = wintypes.BOOL - - _SetConsoleTitleW = windll.kernel32.SetConsoleTitleW - _SetConsoleTitleW.argtypes = [ - wintypes.LPCWSTR - ] - _SetConsoleTitleW.restype = wintypes.BOOL - - def _winapi_test(handle): - csbi = CONSOLE_SCREEN_BUFFER_INFO() - success = _GetConsoleScreenBufferInfo( - handle, byref(csbi)) - return bool(success) - - def winapi_test(): - return any(_winapi_test(h) for h in - (_GetStdHandle(STDOUT), _GetStdHandle(STDERR))) - - def GetConsoleScreenBufferInfo(stream_id=STDOUT): - handle = _GetStdHandle(stream_id) - csbi = CONSOLE_SCREEN_BUFFER_INFO() - success = _GetConsoleScreenBufferInfo( - handle, byref(csbi)) - return csbi - - def SetConsoleTextAttribute(stream_id, attrs): - handle = _GetStdHandle(stream_id) - return _SetConsoleTextAttribute(handle, attrs) - - def SetConsoleCursorPosition(stream_id, position, adjust=True): - position = COORD(*position) - # If the position is out of range, do nothing. - if position.Y <= 0 or position.X <= 0: - return - # Adjust for Windows' SetConsoleCursorPosition: - # 1. being 0-based, while ANSI is 1-based. - # 2. expecting (x,y), while ANSI uses (y,x). - adjusted_position = COORD(position.Y - 1, position.X - 1) - if adjust: - # Adjust for viewport's scroll position - sr = GetConsoleScreenBufferInfo(STDOUT).srWindow - adjusted_position.Y += sr.Top - adjusted_position.X += sr.Left - # Resume normal processing - handle = _GetStdHandle(stream_id) - return _SetConsoleCursorPosition(handle, adjusted_position) - - def FillConsoleOutputCharacter(stream_id, char, length, start): - handle = _GetStdHandle(stream_id) - char = c_char(char.encode()) - length = wintypes.DWORD(length) - num_written = wintypes.DWORD(0) - # Note that this is hard-coded for ANSI (vs wide) bytes. - success = _FillConsoleOutputCharacterA( - handle, char, length, start, byref(num_written)) - return num_written.value - - def FillConsoleOutputAttribute(stream_id, attr, length, start): - ''' FillConsoleOutputAttribute( hConsole, csbi.wAttributes, dwConSize, coordScreen, &cCharsWritten )''' - handle = _GetStdHandle(stream_id) - attribute = wintypes.WORD(attr) - length = wintypes.DWORD(length) - num_written = wintypes.DWORD(0) - # Note that this is hard-coded for ANSI (vs wide) bytes. - return _FillConsoleOutputAttribute( - handle, attribute, length, start, byref(num_written)) - - def SetConsoleTitle(title): - return _SetConsoleTitleW(title) diff --git a/venv/Lib/site-packages/pip/_vendor/colorama/winterm.py b/venv/Lib/site-packages/pip/_vendor/colorama/winterm.py deleted file mode 100644 index 0fdb4ec..0000000 --- a/venv/Lib/site-packages/pip/_vendor/colorama/winterm.py +++ /dev/null @@ -1,169 +0,0 @@ -# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. -from . import win32 - - -# from wincon.h -class WinColor(object): - BLACK = 0 - BLUE = 1 - GREEN = 2 - CYAN = 3 - RED = 4 - MAGENTA = 5 - YELLOW = 6 - GREY = 7 - -# from wincon.h -class WinStyle(object): - NORMAL = 0x00 # dim text, dim background - BRIGHT = 0x08 # bright text, dim background - BRIGHT_BACKGROUND = 0x80 # dim text, bright background - -class WinTerm(object): - - def __init__(self): - self._default = win32.GetConsoleScreenBufferInfo(win32.STDOUT).wAttributes - self.set_attrs(self._default) - self._default_fore = self._fore - self._default_back = self._back - self._default_style = self._style - # In order to emulate LIGHT_EX in windows, we borrow the BRIGHT style. - # So that LIGHT_EX colors and BRIGHT style do not clobber each other, - # we track them separately, since LIGHT_EX is overwritten by Fore/Back - # and BRIGHT is overwritten by Style codes. - self._light = 0 - - def get_attrs(self): - return self._fore + self._back * 16 + (self._style | self._light) - - def set_attrs(self, value): - self._fore = value & 7 - self._back = (value >> 4) & 7 - self._style = value & (WinStyle.BRIGHT | WinStyle.BRIGHT_BACKGROUND) - - def reset_all(self, on_stderr=None): - self.set_attrs(self._default) - self.set_console(attrs=self._default) - self._light = 0 - - def fore(self, fore=None, light=False, on_stderr=False): - if fore is None: - fore = self._default_fore - self._fore = fore - # Emulate LIGHT_EX with BRIGHT Style - if light: - self._light |= WinStyle.BRIGHT - else: - self._light &= ~WinStyle.BRIGHT - self.set_console(on_stderr=on_stderr) - - def back(self, back=None, light=False, on_stderr=False): - if back is None: - back = self._default_back - self._back = back - # Emulate LIGHT_EX with BRIGHT_BACKGROUND Style - if light: - self._light |= WinStyle.BRIGHT_BACKGROUND - else: - self._light &= ~WinStyle.BRIGHT_BACKGROUND - self.set_console(on_stderr=on_stderr) - - def style(self, style=None, on_stderr=False): - if style is None: - style = self._default_style - self._style = style - self.set_console(on_stderr=on_stderr) - - def set_console(self, attrs=None, on_stderr=False): - if attrs is None: - attrs = self.get_attrs() - handle = win32.STDOUT - if on_stderr: - handle = win32.STDERR - win32.SetConsoleTextAttribute(handle, attrs) - - def get_position(self, handle): - position = win32.GetConsoleScreenBufferInfo(handle).dwCursorPosition - # Because Windows coordinates are 0-based, - # and win32.SetConsoleCursorPosition expects 1-based. - position.X += 1 - position.Y += 1 - return position - - def set_cursor_position(self, position=None, on_stderr=False): - if position is None: - # I'm not currently tracking the position, so there is no default. - # position = self.get_position() - return - handle = win32.STDOUT - if on_stderr: - handle = win32.STDERR - win32.SetConsoleCursorPosition(handle, position) - - def cursor_adjust(self, x, y, on_stderr=False): - handle = win32.STDOUT - if on_stderr: - handle = win32.STDERR - position = self.get_position(handle) - adjusted_position = (position.Y + y, position.X + x) - win32.SetConsoleCursorPosition(handle, adjusted_position, adjust=False) - - def erase_screen(self, mode=0, on_stderr=False): - # 0 should clear from the cursor to the end of the screen. - # 1 should clear from the cursor to the beginning of the screen. - # 2 should clear the entire screen, and move cursor to (1,1) - handle = win32.STDOUT - if on_stderr: - handle = win32.STDERR - csbi = win32.GetConsoleScreenBufferInfo(handle) - # get the number of character cells in the current buffer - cells_in_screen = csbi.dwSize.X * csbi.dwSize.Y - # get number of character cells before current cursor position - cells_before_cursor = csbi.dwSize.X * csbi.dwCursorPosition.Y + csbi.dwCursorPosition.X - if mode == 0: - from_coord = csbi.dwCursorPosition - cells_to_erase = cells_in_screen - cells_before_cursor - elif mode == 1: - from_coord = win32.COORD(0, 0) - cells_to_erase = cells_before_cursor - elif mode == 2: - from_coord = win32.COORD(0, 0) - cells_to_erase = cells_in_screen - else: - # invalid mode - return - # fill the entire screen with blanks - win32.FillConsoleOutputCharacter(handle, ' ', cells_to_erase, from_coord) - # now set the buffer's attributes accordingly - win32.FillConsoleOutputAttribute(handle, self.get_attrs(), cells_to_erase, from_coord) - if mode == 2: - # put the cursor where needed - win32.SetConsoleCursorPosition(handle, (1, 1)) - - def erase_line(self, mode=0, on_stderr=False): - # 0 should clear from the cursor to the end of the line. - # 1 should clear from the cursor to the beginning of the line. - # 2 should clear the entire line. - handle = win32.STDOUT - if on_stderr: - handle = win32.STDERR - csbi = win32.GetConsoleScreenBufferInfo(handle) - if mode == 0: - from_coord = csbi.dwCursorPosition - cells_to_erase = csbi.dwSize.X - csbi.dwCursorPosition.X - elif mode == 1: - from_coord = win32.COORD(0, csbi.dwCursorPosition.Y) - cells_to_erase = csbi.dwCursorPosition.X - elif mode == 2: - from_coord = win32.COORD(0, csbi.dwCursorPosition.Y) - cells_to_erase = csbi.dwSize.X - else: - # invalid mode - return - # fill the entire screen with blanks - win32.FillConsoleOutputCharacter(handle, ' ', cells_to_erase, from_coord) - # now set the buffer's attributes accordingly - win32.FillConsoleOutputAttribute(handle, self.get_attrs(), cells_to_erase, from_coord) - - def set_title(self, title): - win32.SetConsoleTitle(title) diff --git a/venv/Lib/site-packages/pip/_vendor/distlib/__init__.py b/venv/Lib/site-packages/pip/_vendor/distlib/__init__.py deleted file mode 100644 index 1154948..0000000 --- a/venv/Lib/site-packages/pip/_vendor/distlib/__init__.py +++ /dev/null @@ -1,23 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright (C) 2012-2019 Vinay Sajip. -# Licensed to the Python Software Foundation under a contributor agreement. -# See LICENSE.txt and CONTRIBUTORS.txt. -# -import logging - -__version__ = '0.3.3' - -class DistlibException(Exception): - pass - -try: - from logging import NullHandler -except ImportError: # pragma: no cover - class NullHandler(logging.Handler): - def handle(self, record): pass - def emit(self, record): pass - def createLock(self): self.lock = None - -logger = logging.getLogger(__name__) -logger.addHandler(NullHandler()) diff --git a/venv/Lib/site-packages/pip/_vendor/distlib/__pycache__/__init__.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/distlib/__pycache__/__init__.cpython-39.pyc deleted file mode 100644 index 1c31649..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/distlib/__pycache__/__init__.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/distlib/__pycache__/compat.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/distlib/__pycache__/compat.cpython-39.pyc deleted file mode 100644 index 60c5152..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/distlib/__pycache__/compat.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/distlib/__pycache__/database.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/distlib/__pycache__/database.cpython-39.pyc deleted file mode 100644 index 8119cda..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/distlib/__pycache__/database.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/distlib/__pycache__/index.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/distlib/__pycache__/index.cpython-39.pyc deleted file mode 100644 index 8106a86..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/distlib/__pycache__/index.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/distlib/__pycache__/locators.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/distlib/__pycache__/locators.cpython-39.pyc deleted file mode 100644 index c8b36eb..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/distlib/__pycache__/locators.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/distlib/__pycache__/manifest.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/distlib/__pycache__/manifest.cpython-39.pyc deleted file mode 100644 index 5839932..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/distlib/__pycache__/manifest.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/distlib/__pycache__/markers.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/distlib/__pycache__/markers.cpython-39.pyc deleted file mode 100644 index 5145376..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/distlib/__pycache__/markers.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/distlib/__pycache__/metadata.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/distlib/__pycache__/metadata.cpython-39.pyc deleted file mode 100644 index 97c86fe..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/distlib/__pycache__/metadata.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/distlib/__pycache__/resources.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/distlib/__pycache__/resources.cpython-39.pyc deleted file mode 100644 index 134bd5f..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/distlib/__pycache__/resources.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/distlib/__pycache__/scripts.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/distlib/__pycache__/scripts.cpython-39.pyc deleted file mode 100644 index 063d232..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/distlib/__pycache__/scripts.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/distlib/__pycache__/util.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/distlib/__pycache__/util.cpython-39.pyc deleted file mode 100644 index 4b812df..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/distlib/__pycache__/util.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/distlib/__pycache__/version.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/distlib/__pycache__/version.cpython-39.pyc deleted file mode 100644 index 2d25510..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/distlib/__pycache__/version.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/distlib/__pycache__/wheel.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/distlib/__pycache__/wheel.cpython-39.pyc deleted file mode 100644 index b2cd3e8..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/distlib/__pycache__/wheel.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/distlib/_backport/__init__.py b/venv/Lib/site-packages/pip/_vendor/distlib/_backport/__init__.py deleted file mode 100644 index f7dbf4c..0000000 --- a/venv/Lib/site-packages/pip/_vendor/distlib/_backport/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -"""Modules copied from Python 3 standard libraries, for internal use only. - -Individual classes and functions are found in d2._backport.misc. Intended -usage is to always import things missing from 3.1 from that module: the -built-in/stdlib objects will be used if found. -""" diff --git a/venv/Lib/site-packages/pip/_vendor/distlib/_backport/__pycache__/__init__.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/distlib/_backport/__pycache__/__init__.cpython-39.pyc deleted file mode 100644 index bea6e05..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/distlib/_backport/__pycache__/__init__.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/distlib/_backport/__pycache__/misc.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/distlib/_backport/__pycache__/misc.cpython-39.pyc deleted file mode 100644 index 921196c..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/distlib/_backport/__pycache__/misc.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/distlib/_backport/__pycache__/shutil.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/distlib/_backport/__pycache__/shutil.cpython-39.pyc deleted file mode 100644 index 493b710..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/distlib/_backport/__pycache__/shutil.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/distlib/_backport/__pycache__/sysconfig.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/distlib/_backport/__pycache__/sysconfig.cpython-39.pyc deleted file mode 100644 index c180f62..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/distlib/_backport/__pycache__/sysconfig.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/distlib/_backport/__pycache__/tarfile.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/distlib/_backport/__pycache__/tarfile.cpython-39.pyc deleted file mode 100644 index 3e8d090..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/distlib/_backport/__pycache__/tarfile.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/distlib/_backport/misc.py b/venv/Lib/site-packages/pip/_vendor/distlib/_backport/misc.py deleted file mode 100644 index cfb318d..0000000 --- a/venv/Lib/site-packages/pip/_vendor/distlib/_backport/misc.py +++ /dev/null @@ -1,41 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright (C) 2012 The Python Software Foundation. -# See LICENSE.txt and CONTRIBUTORS.txt. -# -"""Backports for individual classes and functions.""" - -import os -import sys - -__all__ = ['cache_from_source', 'callable', 'fsencode'] - - -try: - from imp import cache_from_source -except ImportError: - def cache_from_source(py_file, debug=__debug__): - ext = debug and 'c' or 'o' - return py_file + ext - - -try: - callable = callable -except NameError: - from collections import Callable - - def callable(obj): - return isinstance(obj, Callable) - - -try: - fsencode = os.fsencode -except AttributeError: - def fsencode(filename): - if isinstance(filename, bytes): - return filename - elif isinstance(filename, str): - return filename.encode(sys.getfilesystemencoding()) - else: - raise TypeError("expect bytes or str, not %s" % - type(filename).__name__) diff --git a/venv/Lib/site-packages/pip/_vendor/distlib/_backport/shutil.py b/venv/Lib/site-packages/pip/_vendor/distlib/_backport/shutil.py deleted file mode 100644 index 10ed362..0000000 --- a/venv/Lib/site-packages/pip/_vendor/distlib/_backport/shutil.py +++ /dev/null @@ -1,764 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright (C) 2012 The Python Software Foundation. -# See LICENSE.txt and CONTRIBUTORS.txt. -# -"""Utility functions for copying and archiving files and directory trees. - -XXX The functions here don't copy the resource fork or other metadata on Mac. - -""" - -import os -import sys -import stat -from os.path import abspath -import fnmatch -try: - from collections.abc import Callable -except ImportError: - from collections import Callable -import errno -from . import tarfile - -try: - import bz2 - _BZ2_SUPPORTED = True -except ImportError: - _BZ2_SUPPORTED = False - -try: - from pwd import getpwnam -except ImportError: - getpwnam = None - -try: - from grp import getgrnam -except ImportError: - getgrnam = None - -__all__ = ["copyfileobj", "copyfile", "copymode", "copystat", "copy", "copy2", - "copytree", "move", "rmtree", "Error", "SpecialFileError", - "ExecError", "make_archive", "get_archive_formats", - "register_archive_format", "unregister_archive_format", - "get_unpack_formats", "register_unpack_format", - "unregister_unpack_format", "unpack_archive", "ignore_patterns"] - -class Error(EnvironmentError): - pass - -class SpecialFileError(EnvironmentError): - """Raised when trying to do a kind of operation (e.g. copying) which is - not supported on a special file (e.g. a named pipe)""" - -class ExecError(EnvironmentError): - """Raised when a command could not be executed""" - -class ReadError(EnvironmentError): - """Raised when an archive cannot be read""" - -class RegistryError(Exception): - """Raised when a registry operation with the archiving - and unpacking registries fails""" - - -try: - WindowsError -except NameError: - WindowsError = None - -def copyfileobj(fsrc, fdst, length=16*1024): - """copy data from file-like object fsrc to file-like object fdst""" - while 1: - buf = fsrc.read(length) - if not buf: - break - fdst.write(buf) - -def _samefile(src, dst): - # Macintosh, Unix. - if hasattr(os.path, 'samefile'): - try: - return os.path.samefile(src, dst) - except OSError: - return False - - # All other platforms: check for same pathname. - return (os.path.normcase(os.path.abspath(src)) == - os.path.normcase(os.path.abspath(dst))) - -def copyfile(src, dst): - """Copy data from src to dst""" - if _samefile(src, dst): - raise Error("`%s` and `%s` are the same file" % (src, dst)) - - for fn in [src, dst]: - try: - st = os.stat(fn) - except OSError: - # File most likely does not exist - pass - else: - # XXX What about other special files? (sockets, devices...) - if stat.S_ISFIFO(st.st_mode): - raise SpecialFileError("`%s` is a named pipe" % fn) - - with open(src, 'rb') as fsrc: - with open(dst, 'wb') as fdst: - copyfileobj(fsrc, fdst) - -def copymode(src, dst): - """Copy mode bits from src to dst""" - if hasattr(os, 'chmod'): - st = os.stat(src) - mode = stat.S_IMODE(st.st_mode) - os.chmod(dst, mode) - -def copystat(src, dst): - """Copy all stat info (mode bits, atime, mtime, flags) from src to dst""" - st = os.stat(src) - mode = stat.S_IMODE(st.st_mode) - if hasattr(os, 'utime'): - os.utime(dst, (st.st_atime, st.st_mtime)) - if hasattr(os, 'chmod'): - os.chmod(dst, mode) - if hasattr(os, 'chflags') and hasattr(st, 'st_flags'): - try: - os.chflags(dst, st.st_flags) - except OSError as why: - if (not hasattr(errno, 'EOPNOTSUPP') or - why.errno != errno.EOPNOTSUPP): - raise - -def copy(src, dst): - """Copy data and mode bits ("cp src dst"). - - The destination may be a directory. - - """ - if os.path.isdir(dst): - dst = os.path.join(dst, os.path.basename(src)) - copyfile(src, dst) - copymode(src, dst) - -def copy2(src, dst): - """Copy data and all stat info ("cp -p src dst"). - - The destination may be a directory. - - """ - if os.path.isdir(dst): - dst = os.path.join(dst, os.path.basename(src)) - copyfile(src, dst) - copystat(src, dst) - -def ignore_patterns(*patterns): - """Function that can be used as copytree() ignore parameter. - - Patterns is a sequence of glob-style patterns - that are used to exclude files""" - def _ignore_patterns(path, names): - ignored_names = [] - for pattern in patterns: - ignored_names.extend(fnmatch.filter(names, pattern)) - return set(ignored_names) - return _ignore_patterns - -def copytree(src, dst, symlinks=False, ignore=None, copy_function=copy2, - ignore_dangling_symlinks=False): - """Recursively copy a directory tree. - - The destination directory must not already exist. - If exception(s) occur, an Error is raised with a list of reasons. - - If the optional symlinks flag is true, symbolic links in the - source tree result in symbolic links in the destination tree; if - it is false, the contents of the files pointed to by symbolic - links are copied. If the file pointed by the symlink doesn't - exist, an exception will be added in the list of errors raised in - an Error exception at the end of the copy process. - - You can set the optional ignore_dangling_symlinks flag to true if you - want to silence this exception. Notice that this has no effect on - platforms that don't support os.symlink. - - The optional ignore argument is a callable. If given, it - is called with the `src` parameter, which is the directory - being visited by copytree(), and `names` which is the list of - `src` contents, as returned by os.listdir(): - - callable(src, names) -> ignored_names - - Since copytree() is called recursively, the callable will be - called once for each directory that is copied. It returns a - list of names relative to the `src` directory that should - not be copied. - - The optional copy_function argument is a callable that will be used - to copy each file. It will be called with the source path and the - destination path as arguments. By default, copy2() is used, but any - function that supports the same signature (like copy()) can be used. - - """ - names = os.listdir(src) - if ignore is not None: - ignored_names = ignore(src, names) - else: - ignored_names = set() - - os.makedirs(dst) - errors = [] - for name in names: - if name in ignored_names: - continue - srcname = os.path.join(src, name) - dstname = os.path.join(dst, name) - try: - if os.path.islink(srcname): - linkto = os.readlink(srcname) - if symlinks: - os.symlink(linkto, dstname) - else: - # ignore dangling symlink if the flag is on - if not os.path.exists(linkto) and ignore_dangling_symlinks: - continue - # otherwise let the copy occurs. copy2 will raise an error - copy_function(srcname, dstname) - elif os.path.isdir(srcname): - copytree(srcname, dstname, symlinks, ignore, copy_function) - else: - # Will raise a SpecialFileError for unsupported file types - copy_function(srcname, dstname) - # catch the Error from the recursive copytree so that we can - # continue with other files - except Error as err: - errors.extend(err.args[0]) - except EnvironmentError as why: - errors.append((srcname, dstname, str(why))) - try: - copystat(src, dst) - except OSError as why: - if WindowsError is not None and isinstance(why, WindowsError): - # Copying file access times may fail on Windows - pass - else: - errors.extend((src, dst, str(why))) - if errors: - raise Error(errors) - -def rmtree(path, ignore_errors=False, onerror=None): - """Recursively delete a directory tree. - - If ignore_errors is set, errors are ignored; otherwise, if onerror - is set, it is called to handle the error with arguments (func, - path, exc_info) where func is os.listdir, os.remove, or os.rmdir; - path is the argument to that function that caused it to fail; and - exc_info is a tuple returned by sys.exc_info(). If ignore_errors - is false and onerror is None, an exception is raised. - - """ - if ignore_errors: - def onerror(*args): - pass - elif onerror is None: - def onerror(*args): - raise - try: - if os.path.islink(path): - # symlinks to directories are forbidden, see bug #1669 - raise OSError("Cannot call rmtree on a symbolic link") - except OSError: - onerror(os.path.islink, path, sys.exc_info()) - # can't continue even if onerror hook returns - return - names = [] - try: - names = os.listdir(path) - except os.error: - onerror(os.listdir, path, sys.exc_info()) - for name in names: - fullname = os.path.join(path, name) - try: - mode = os.lstat(fullname).st_mode - except os.error: - mode = 0 - if stat.S_ISDIR(mode): - rmtree(fullname, ignore_errors, onerror) - else: - try: - os.remove(fullname) - except os.error: - onerror(os.remove, fullname, sys.exc_info()) - try: - os.rmdir(path) - except os.error: - onerror(os.rmdir, path, sys.exc_info()) - - -def _basename(path): - # A basename() variant which first strips the trailing slash, if present. - # Thus we always get the last component of the path, even for directories. - return os.path.basename(path.rstrip(os.path.sep)) - -def move(src, dst): - """Recursively move a file or directory to another location. This is - similar to the Unix "mv" command. - - If the destination is a directory or a symlink to a directory, the source - is moved inside the directory. The destination path must not already - exist. - - If the destination already exists but is not a directory, it may be - overwritten depending on os.rename() semantics. - - If the destination is on our current filesystem, then rename() is used. - Otherwise, src is copied to the destination and then removed. - A lot more could be done here... A look at a mv.c shows a lot of - the issues this implementation glosses over. - - """ - real_dst = dst - if os.path.isdir(dst): - if _samefile(src, dst): - # We might be on a case insensitive filesystem, - # perform the rename anyway. - os.rename(src, dst) - return - - real_dst = os.path.join(dst, _basename(src)) - if os.path.exists(real_dst): - raise Error("Destination path '%s' already exists" % real_dst) - try: - os.rename(src, real_dst) - except OSError: - if os.path.isdir(src): - if _destinsrc(src, dst): - raise Error("Cannot move a directory '%s' into itself '%s'." % (src, dst)) - copytree(src, real_dst, symlinks=True) - rmtree(src) - else: - copy2(src, real_dst) - os.unlink(src) - -def _destinsrc(src, dst): - src = abspath(src) - dst = abspath(dst) - if not src.endswith(os.path.sep): - src += os.path.sep - if not dst.endswith(os.path.sep): - dst += os.path.sep - return dst.startswith(src) - -def _get_gid(name): - """Returns a gid, given a group name.""" - if getgrnam is None or name is None: - return None - try: - result = getgrnam(name) - except KeyError: - result = None - if result is not None: - return result[2] - return None - -def _get_uid(name): - """Returns an uid, given a user name.""" - if getpwnam is None or name is None: - return None - try: - result = getpwnam(name) - except KeyError: - result = None - if result is not None: - return result[2] - return None - -def _make_tarball(base_name, base_dir, compress="gzip", verbose=0, dry_run=0, - owner=None, group=None, logger=None): - """Create a (possibly compressed) tar file from all the files under - 'base_dir'. - - 'compress' must be "gzip" (the default), "bzip2", or None. - - 'owner' and 'group' can be used to define an owner and a group for the - archive that is being built. If not provided, the current owner and group - will be used. - - The output tar file will be named 'base_name' + ".tar", possibly plus - the appropriate compression extension (".gz", or ".bz2"). - - Returns the output filename. - """ - tar_compression = {'gzip': 'gz', None: ''} - compress_ext = {'gzip': '.gz'} - - if _BZ2_SUPPORTED: - tar_compression['bzip2'] = 'bz2' - compress_ext['bzip2'] = '.bz2' - - # flags for compression program, each element of list will be an argument - if compress is not None and compress not in compress_ext: - raise ValueError("bad value for 'compress', or compression format not " - "supported : {0}".format(compress)) - - archive_name = base_name + '.tar' + compress_ext.get(compress, '') - archive_dir = os.path.dirname(archive_name) - - if not os.path.exists(archive_dir): - if logger is not None: - logger.info("creating %s", archive_dir) - if not dry_run: - os.makedirs(archive_dir) - - # creating the tarball - if logger is not None: - logger.info('Creating tar archive') - - uid = _get_uid(owner) - gid = _get_gid(group) - - def _set_uid_gid(tarinfo): - if gid is not None: - tarinfo.gid = gid - tarinfo.gname = group - if uid is not None: - tarinfo.uid = uid - tarinfo.uname = owner - return tarinfo - - if not dry_run: - tar = tarfile.open(archive_name, 'w|%s' % tar_compression[compress]) - try: - tar.add(base_dir, filter=_set_uid_gid) - finally: - tar.close() - - return archive_name - -def _call_external_zip(base_dir, zip_filename, verbose=False, dry_run=False): - # XXX see if we want to keep an external call here - if verbose: - zipoptions = "-r" - else: - zipoptions = "-rq" - from distutils.errors import DistutilsExecError - from distutils.spawn import spawn - try: - spawn(["zip", zipoptions, zip_filename, base_dir], dry_run=dry_run) - except DistutilsExecError: - # XXX really should distinguish between "couldn't find - # external 'zip' command" and "zip failed". - raise ExecError("unable to create zip file '%s': " - "could neither import the 'zipfile' module nor " - "find a standalone zip utility") % zip_filename - -def _make_zipfile(base_name, base_dir, verbose=0, dry_run=0, logger=None): - """Create a zip file from all the files under 'base_dir'. - - The output zip file will be named 'base_name' + ".zip". Uses either the - "zipfile" Python module (if available) or the InfoZIP "zip" utility - (if installed and found on the default search path). If neither tool is - available, raises ExecError. Returns the name of the output zip - file. - """ - zip_filename = base_name + ".zip" - archive_dir = os.path.dirname(base_name) - - if not os.path.exists(archive_dir): - if logger is not None: - logger.info("creating %s", archive_dir) - if not dry_run: - os.makedirs(archive_dir) - - # If zipfile module is not available, try spawning an external 'zip' - # command. - try: - import zipfile - except ImportError: - zipfile = None - - if zipfile is None: - _call_external_zip(base_dir, zip_filename, verbose, dry_run) - else: - if logger is not None: - logger.info("creating '%s' and adding '%s' to it", - zip_filename, base_dir) - - if not dry_run: - zip = zipfile.ZipFile(zip_filename, "w", - compression=zipfile.ZIP_DEFLATED) - - for dirpath, dirnames, filenames in os.walk(base_dir): - for name in filenames: - path = os.path.normpath(os.path.join(dirpath, name)) - if os.path.isfile(path): - zip.write(path, path) - if logger is not None: - logger.info("adding '%s'", path) - zip.close() - - return zip_filename - -_ARCHIVE_FORMATS = { - 'gztar': (_make_tarball, [('compress', 'gzip')], "gzip'ed tar-file"), - 'bztar': (_make_tarball, [('compress', 'bzip2')], "bzip2'ed tar-file"), - 'tar': (_make_tarball, [('compress', None)], "uncompressed tar file"), - 'zip': (_make_zipfile, [], "ZIP file"), - } - -if _BZ2_SUPPORTED: - _ARCHIVE_FORMATS['bztar'] = (_make_tarball, [('compress', 'bzip2')], - "bzip2'ed tar-file") - -def get_archive_formats(): - """Returns a list of supported formats for archiving and unarchiving. - - Each element of the returned sequence is a tuple (name, description) - """ - formats = [(name, registry[2]) for name, registry in - _ARCHIVE_FORMATS.items()] - formats.sort() - return formats - -def register_archive_format(name, function, extra_args=None, description=''): - """Registers an archive format. - - name is the name of the format. function is the callable that will be - used to create archives. If provided, extra_args is a sequence of - (name, value) tuples that will be passed as arguments to the callable. - description can be provided to describe the format, and will be returned - by the get_archive_formats() function. - """ - if extra_args is None: - extra_args = [] - if not isinstance(function, Callable): - raise TypeError('The %s object is not callable' % function) - if not isinstance(extra_args, (tuple, list)): - raise TypeError('extra_args needs to be a sequence') - for element in extra_args: - if not isinstance(element, (tuple, list)) or len(element) !=2: - raise TypeError('extra_args elements are : (arg_name, value)') - - _ARCHIVE_FORMATS[name] = (function, extra_args, description) - -def unregister_archive_format(name): - del _ARCHIVE_FORMATS[name] - -def make_archive(base_name, format, root_dir=None, base_dir=None, verbose=0, - dry_run=0, owner=None, group=None, logger=None): - """Create an archive file (eg. zip or tar). - - 'base_name' is the name of the file to create, minus any format-specific - extension; 'format' is the archive format: one of "zip", "tar", "bztar" - or "gztar". - - 'root_dir' is a directory that will be the root directory of the - archive; ie. we typically chdir into 'root_dir' before creating the - archive. 'base_dir' is the directory where we start archiving from; - ie. 'base_dir' will be the common prefix of all files and - directories in the archive. 'root_dir' and 'base_dir' both default - to the current directory. Returns the name of the archive file. - - 'owner' and 'group' are used when creating a tar archive. By default, - uses the current owner and group. - """ - save_cwd = os.getcwd() - if root_dir is not None: - if logger is not None: - logger.debug("changing into '%s'", root_dir) - base_name = os.path.abspath(base_name) - if not dry_run: - os.chdir(root_dir) - - if base_dir is None: - base_dir = os.curdir - - kwargs = {'dry_run': dry_run, 'logger': logger} - - try: - format_info = _ARCHIVE_FORMATS[format] - except KeyError: - raise ValueError("unknown archive format '%s'" % format) - - func = format_info[0] - for arg, val in format_info[1]: - kwargs[arg] = val - - if format != 'zip': - kwargs['owner'] = owner - kwargs['group'] = group - - try: - filename = func(base_name, base_dir, **kwargs) - finally: - if root_dir is not None: - if logger is not None: - logger.debug("changing back to '%s'", save_cwd) - os.chdir(save_cwd) - - return filename - - -def get_unpack_formats(): - """Returns a list of supported formats for unpacking. - - Each element of the returned sequence is a tuple - (name, extensions, description) - """ - formats = [(name, info[0], info[3]) for name, info in - _UNPACK_FORMATS.items()] - formats.sort() - return formats - -def _check_unpack_options(extensions, function, extra_args): - """Checks what gets registered as an unpacker.""" - # first make sure no other unpacker is registered for this extension - existing_extensions = {} - for name, info in _UNPACK_FORMATS.items(): - for ext in info[0]: - existing_extensions[ext] = name - - for extension in extensions: - if extension in existing_extensions: - msg = '%s is already registered for "%s"' - raise RegistryError(msg % (extension, - existing_extensions[extension])) - - if not isinstance(function, Callable): - raise TypeError('The registered function must be a callable') - - -def register_unpack_format(name, extensions, function, extra_args=None, - description=''): - """Registers an unpack format. - - `name` is the name of the format. `extensions` is a list of extensions - corresponding to the format. - - `function` is the callable that will be - used to unpack archives. The callable will receive archives to unpack. - If it's unable to handle an archive, it needs to raise a ReadError - exception. - - If provided, `extra_args` is a sequence of - (name, value) tuples that will be passed as arguments to the callable. - description can be provided to describe the format, and will be returned - by the get_unpack_formats() function. - """ - if extra_args is None: - extra_args = [] - _check_unpack_options(extensions, function, extra_args) - _UNPACK_FORMATS[name] = extensions, function, extra_args, description - -def unregister_unpack_format(name): - """Removes the pack format from the registry.""" - del _UNPACK_FORMATS[name] - -def _ensure_directory(path): - """Ensure that the parent directory of `path` exists""" - dirname = os.path.dirname(path) - if not os.path.isdir(dirname): - os.makedirs(dirname) - -def _unpack_zipfile(filename, extract_dir): - """Unpack zip `filename` to `extract_dir` - """ - try: - import zipfile - except ImportError: - raise ReadError('zlib not supported, cannot unpack this archive.') - - if not zipfile.is_zipfile(filename): - raise ReadError("%s is not a zip file" % filename) - - zip = zipfile.ZipFile(filename) - try: - for info in zip.infolist(): - name = info.filename - - # don't extract absolute paths or ones with .. in them - if name.startswith('/') or '..' in name: - continue - - target = os.path.join(extract_dir, *name.split('/')) - if not target: - continue - - _ensure_directory(target) - if not name.endswith('/'): - # file - data = zip.read(info.filename) - f = open(target, 'wb') - try: - f.write(data) - finally: - f.close() - del data - finally: - zip.close() - -def _unpack_tarfile(filename, extract_dir): - """Unpack tar/tar.gz/tar.bz2 `filename` to `extract_dir` - """ - try: - tarobj = tarfile.open(filename) - except tarfile.TarError: - raise ReadError( - "%s is not a compressed or uncompressed tar file" % filename) - try: - tarobj.extractall(extract_dir) - finally: - tarobj.close() - -_UNPACK_FORMATS = { - 'gztar': (['.tar.gz', '.tgz'], _unpack_tarfile, [], "gzip'ed tar-file"), - 'tar': (['.tar'], _unpack_tarfile, [], "uncompressed tar file"), - 'zip': (['.zip'], _unpack_zipfile, [], "ZIP file") - } - -if _BZ2_SUPPORTED: - _UNPACK_FORMATS['bztar'] = (['.bz2'], _unpack_tarfile, [], - "bzip2'ed tar-file") - -def _find_unpack_format(filename): - for name, info in _UNPACK_FORMATS.items(): - for extension in info[0]: - if filename.endswith(extension): - return name - return None - -def unpack_archive(filename, extract_dir=None, format=None): - """Unpack an archive. - - `filename` is the name of the archive. - - `extract_dir` is the name of the target directory, where the archive - is unpacked. If not provided, the current working directory is used. - - `format` is the archive format: one of "zip", "tar", or "gztar". Or any - other registered format. If not provided, unpack_archive will use the - filename extension and see if an unpacker was registered for that - extension. - - In case none is found, a ValueError is raised. - """ - if extract_dir is None: - extract_dir = os.getcwd() - - if format is not None: - try: - format_info = _UNPACK_FORMATS[format] - except KeyError: - raise ValueError("Unknown unpack format '{0}'".format(format)) - - func = format_info[1] - func(filename, extract_dir, **dict(format_info[2])) - else: - # we need to look at the registered unpackers supported extensions - format = _find_unpack_format(filename) - if format is None: - raise ReadError("Unknown archive format '{0}'".format(filename)) - - func = _UNPACK_FORMATS[format][1] - kwargs = dict(_UNPACK_FORMATS[format][2]) - func(filename, extract_dir, **kwargs) diff --git a/venv/Lib/site-packages/pip/_vendor/distlib/_backport/sysconfig.cfg b/venv/Lib/site-packages/pip/_vendor/distlib/_backport/sysconfig.cfg deleted file mode 100644 index 1746bd0..0000000 --- a/venv/Lib/site-packages/pip/_vendor/distlib/_backport/sysconfig.cfg +++ /dev/null @@ -1,84 +0,0 @@ -[posix_prefix] -# Configuration directories. Some of these come straight out of the -# configure script. They are for implementing the other variables, not to -# be used directly in [resource_locations]. -confdir = /etc -datadir = /usr/share -libdir = /usr/lib -statedir = /var -# User resource directory -local = ~/.local/{distribution.name} - -stdlib = {base}/lib/python{py_version_short} -platstdlib = {platbase}/lib/python{py_version_short} -purelib = {base}/lib/python{py_version_short}/site-packages -platlib = {platbase}/lib/python{py_version_short}/site-packages -include = {base}/include/python{py_version_short}{abiflags} -platinclude = {platbase}/include/python{py_version_short}{abiflags} -data = {base} - -[posix_home] -stdlib = {base}/lib/python -platstdlib = {base}/lib/python -purelib = {base}/lib/python -platlib = {base}/lib/python -include = {base}/include/python -platinclude = {base}/include/python -scripts = {base}/bin -data = {base} - -[nt] -stdlib = {base}/Lib -platstdlib = {base}/Lib -purelib = {base}/Lib/site-packages -platlib = {base}/Lib/site-packages -include = {base}/Include -platinclude = {base}/Include -scripts = {base}/Scripts -data = {base} - -[os2] -stdlib = {base}/Lib -platstdlib = {base}/Lib -purelib = {base}/Lib/site-packages -platlib = {base}/Lib/site-packages -include = {base}/Include -platinclude = {base}/Include -scripts = {base}/Scripts -data = {base} - -[os2_home] -stdlib = {userbase}/lib/python{py_version_short} -platstdlib = {userbase}/lib/python{py_version_short} -purelib = {userbase}/lib/python{py_version_short}/site-packages -platlib = {userbase}/lib/python{py_version_short}/site-packages -include = {userbase}/include/python{py_version_short} -scripts = {userbase}/bin -data = {userbase} - -[nt_user] -stdlib = {userbase}/Python{py_version_nodot} -platstdlib = {userbase}/Python{py_version_nodot} -purelib = {userbase}/Python{py_version_nodot}/site-packages -platlib = {userbase}/Python{py_version_nodot}/site-packages -include = {userbase}/Python{py_version_nodot}/Include -scripts = {userbase}/Scripts -data = {userbase} - -[posix_user] -stdlib = {userbase}/lib/python{py_version_short} -platstdlib = {userbase}/lib/python{py_version_short} -purelib = {userbase}/lib/python{py_version_short}/site-packages -platlib = {userbase}/lib/python{py_version_short}/site-packages -include = {userbase}/include/python{py_version_short} -scripts = {userbase}/bin -data = {userbase} - -[osx_framework_user] -stdlib = {userbase}/lib/python -platstdlib = {userbase}/lib/python -purelib = {userbase}/lib/python/site-packages -platlib = {userbase}/lib/python/site-packages -include = {userbase}/include -scripts = {userbase}/bin -data = {userbase} diff --git a/venv/Lib/site-packages/pip/_vendor/distlib/_backport/sysconfig.py b/venv/Lib/site-packages/pip/_vendor/distlib/_backport/sysconfig.py deleted file mode 100644 index b470a37..0000000 --- a/venv/Lib/site-packages/pip/_vendor/distlib/_backport/sysconfig.py +++ /dev/null @@ -1,786 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright (C) 2012 The Python Software Foundation. -# See LICENSE.txt and CONTRIBUTORS.txt. -# -"""Access to Python's configuration information.""" - -import codecs -import os -import re -import sys -from os.path import pardir, realpath -try: - import configparser -except ImportError: - import ConfigParser as configparser - - -__all__ = [ - 'get_config_h_filename', - 'get_config_var', - 'get_config_vars', - 'get_makefile_filename', - 'get_path', - 'get_path_names', - 'get_paths', - 'get_platform', - 'get_python_version', - 'get_scheme_names', - 'parse_config_h', -] - - -def _safe_realpath(path): - try: - return realpath(path) - except OSError: - return path - - -if sys.executable: - _PROJECT_BASE = os.path.dirname(_safe_realpath(sys.executable)) -else: - # sys.executable can be empty if argv[0] has been changed and Python is - # unable to retrieve the real program name - _PROJECT_BASE = _safe_realpath(os.getcwd()) - -if os.name == "nt" and "pcbuild" in _PROJECT_BASE[-8:].lower(): - _PROJECT_BASE = _safe_realpath(os.path.join(_PROJECT_BASE, pardir)) -# PC/VS7.1 -if os.name == "nt" and "\\pc\\v" in _PROJECT_BASE[-10:].lower(): - _PROJECT_BASE = _safe_realpath(os.path.join(_PROJECT_BASE, pardir, pardir)) -# PC/AMD64 -if os.name == "nt" and "\\pcbuild\\amd64" in _PROJECT_BASE[-14:].lower(): - _PROJECT_BASE = _safe_realpath(os.path.join(_PROJECT_BASE, pardir, pardir)) - - -def is_python_build(): - for fn in ("Setup.dist", "Setup.local"): - if os.path.isfile(os.path.join(_PROJECT_BASE, "Modules", fn)): - return True - return False - -_PYTHON_BUILD = is_python_build() - -_cfg_read = False - -def _ensure_cfg_read(): - global _cfg_read - if not _cfg_read: - from ..resources import finder - backport_package = __name__.rsplit('.', 1)[0] - _finder = finder(backport_package) - _cfgfile = _finder.find('sysconfig.cfg') - assert _cfgfile, 'sysconfig.cfg exists' - with _cfgfile.as_stream() as s: - _SCHEMES.readfp(s) - if _PYTHON_BUILD: - for scheme in ('posix_prefix', 'posix_home'): - _SCHEMES.set(scheme, 'include', '{srcdir}/Include') - _SCHEMES.set(scheme, 'platinclude', '{projectbase}/.') - - _cfg_read = True - - -_SCHEMES = configparser.RawConfigParser() -_VAR_REPL = re.compile(r'\{([^{]*?)\}') - -def _expand_globals(config): - _ensure_cfg_read() - if config.has_section('globals'): - globals = config.items('globals') - else: - globals = tuple() - - sections = config.sections() - for section in sections: - if section == 'globals': - continue - for option, value in globals: - if config.has_option(section, option): - continue - config.set(section, option, value) - config.remove_section('globals') - - # now expanding local variables defined in the cfg file - # - for section in config.sections(): - variables = dict(config.items(section)) - - def _replacer(matchobj): - name = matchobj.group(1) - if name in variables: - return variables[name] - return matchobj.group(0) - - for option, value in config.items(section): - config.set(section, option, _VAR_REPL.sub(_replacer, value)) - -#_expand_globals(_SCHEMES) - -_PY_VERSION = '%s.%s.%s' % sys.version_info[:3] -_PY_VERSION_SHORT = '%s.%s' % sys.version_info[:2] -_PY_VERSION_SHORT_NO_DOT = '%s%s' % sys.version_info[:2] -_PREFIX = os.path.normpath(sys.prefix) -_EXEC_PREFIX = os.path.normpath(sys.exec_prefix) -_CONFIG_VARS = None -_USER_BASE = None - - -def _subst_vars(path, local_vars): - """In the string `path`, replace tokens like {some.thing} with the - corresponding value from the map `local_vars`. - - If there is no corresponding value, leave the token unchanged. - """ - def _replacer(matchobj): - name = matchobj.group(1) - if name in local_vars: - return local_vars[name] - elif name in os.environ: - return os.environ[name] - return matchobj.group(0) - return _VAR_REPL.sub(_replacer, path) - - -def _extend_dict(target_dict, other_dict): - target_keys = target_dict.keys() - for key, value in other_dict.items(): - if key in target_keys: - continue - target_dict[key] = value - - -def _expand_vars(scheme, vars): - res = {} - if vars is None: - vars = {} - _extend_dict(vars, get_config_vars()) - - for key, value in _SCHEMES.items(scheme): - if os.name in ('posix', 'nt'): - value = os.path.expanduser(value) - res[key] = os.path.normpath(_subst_vars(value, vars)) - return res - - -def format_value(value, vars): - def _replacer(matchobj): - name = matchobj.group(1) - if name in vars: - return vars[name] - return matchobj.group(0) - return _VAR_REPL.sub(_replacer, value) - - -def _get_default_scheme(): - if os.name == 'posix': - # the default scheme for posix is posix_prefix - return 'posix_prefix' - return os.name - - -def _getuserbase(): - env_base = os.environ.get("PYTHONUSERBASE", None) - - def joinuser(*args): - return os.path.expanduser(os.path.join(*args)) - - # what about 'os2emx', 'riscos' ? - if os.name == "nt": - base = os.environ.get("APPDATA") or "~" - if env_base: - return env_base - else: - return joinuser(base, "Python") - - if sys.platform == "darwin": - framework = get_config_var("PYTHONFRAMEWORK") - if framework: - if env_base: - return env_base - else: - return joinuser("~", "Library", framework, "%d.%d" % - sys.version_info[:2]) - - if env_base: - return env_base - else: - return joinuser("~", ".local") - - -def _parse_makefile(filename, vars=None): - """Parse a Makefile-style file. - - A dictionary containing name/value pairs is returned. If an - optional dictionary is passed in as the second argument, it is - used instead of a new dictionary. - """ - # Regexes needed for parsing Makefile (and similar syntaxes, - # like old-style Setup files). - _variable_rx = re.compile(r"([a-zA-Z][a-zA-Z0-9_]+)\s*=\s*(.*)") - _findvar1_rx = re.compile(r"\$\(([A-Za-z][A-Za-z0-9_]*)\)") - _findvar2_rx = re.compile(r"\${([A-Za-z][A-Za-z0-9_]*)}") - - if vars is None: - vars = {} - done = {} - notdone = {} - - with codecs.open(filename, encoding='utf-8', errors="surrogateescape") as f: - lines = f.readlines() - - for line in lines: - if line.startswith('#') or line.strip() == '': - continue - m = _variable_rx.match(line) - if m: - n, v = m.group(1, 2) - v = v.strip() - # `$$' is a literal `$' in make - tmpv = v.replace('$$', '') - - if "$" in tmpv: - notdone[n] = v - else: - try: - v = int(v) - except ValueError: - # insert literal `$' - done[n] = v.replace('$$', '$') - else: - done[n] = v - - # do variable interpolation here - variables = list(notdone.keys()) - - # Variables with a 'PY_' prefix in the makefile. These need to - # be made available without that prefix through sysconfig. - # Special care is needed to ensure that variable expansion works, even - # if the expansion uses the name without a prefix. - renamed_variables = ('CFLAGS', 'LDFLAGS', 'CPPFLAGS') - - while len(variables) > 0: - for name in tuple(variables): - value = notdone[name] - m = _findvar1_rx.search(value) or _findvar2_rx.search(value) - if m is not None: - n = m.group(1) - found = True - if n in done: - item = str(done[n]) - elif n in notdone: - # get it on a subsequent round - found = False - elif n in os.environ: - # do it like make: fall back to environment - item = os.environ[n] - - elif n in renamed_variables: - if (name.startswith('PY_') and - name[3:] in renamed_variables): - item = "" - - elif 'PY_' + n in notdone: - found = False - - else: - item = str(done['PY_' + n]) - - else: - done[n] = item = "" - - if found: - after = value[m.end():] - value = value[:m.start()] + item + after - if "$" in after: - notdone[name] = value - else: - try: - value = int(value) - except ValueError: - done[name] = value.strip() - else: - done[name] = value - variables.remove(name) - - if (name.startswith('PY_') and - name[3:] in renamed_variables): - - name = name[3:] - if name not in done: - done[name] = value - - else: - # bogus variable reference (e.g. "prefix=$/opt/python"); - # just drop it since we can't deal - done[name] = value - variables.remove(name) - - # strip spurious spaces - for k, v in done.items(): - if isinstance(v, str): - done[k] = v.strip() - - # save the results in the global dictionary - vars.update(done) - return vars - - -def get_makefile_filename(): - """Return the path of the Makefile.""" - if _PYTHON_BUILD: - return os.path.join(_PROJECT_BASE, "Makefile") - if hasattr(sys, 'abiflags'): - config_dir_name = 'config-%s%s' % (_PY_VERSION_SHORT, sys.abiflags) - else: - config_dir_name = 'config' - return os.path.join(get_path('stdlib'), config_dir_name, 'Makefile') - - -def _init_posix(vars): - """Initialize the module as appropriate for POSIX systems.""" - # load the installed Makefile: - makefile = get_makefile_filename() - try: - _parse_makefile(makefile, vars) - except IOError as e: - msg = "invalid Python installation: unable to open %s" % makefile - if hasattr(e, "strerror"): - msg = msg + " (%s)" % e.strerror - raise IOError(msg) - # load the installed pyconfig.h: - config_h = get_config_h_filename() - try: - with open(config_h) as f: - parse_config_h(f, vars) - except IOError as e: - msg = "invalid Python installation: unable to open %s" % config_h - if hasattr(e, "strerror"): - msg = msg + " (%s)" % e.strerror - raise IOError(msg) - # On AIX, there are wrong paths to the linker scripts in the Makefile - # -- these paths are relative to the Python source, but when installed - # the scripts are in another directory. - if _PYTHON_BUILD: - vars['LDSHARED'] = vars['BLDSHARED'] - - -def _init_non_posix(vars): - """Initialize the module as appropriate for NT""" - # set basic install directories - vars['LIBDEST'] = get_path('stdlib') - vars['BINLIBDEST'] = get_path('platstdlib') - vars['INCLUDEPY'] = get_path('include') - vars['SO'] = '.pyd' - vars['EXE'] = '.exe' - vars['VERSION'] = _PY_VERSION_SHORT_NO_DOT - vars['BINDIR'] = os.path.dirname(_safe_realpath(sys.executable)) - -# -# public APIs -# - - -def parse_config_h(fp, vars=None): - """Parse a config.h-style file. - - A dictionary containing name/value pairs is returned. If an - optional dictionary is passed in as the second argument, it is - used instead of a new dictionary. - """ - if vars is None: - vars = {} - define_rx = re.compile("#define ([A-Z][A-Za-z0-9_]+) (.*)\n") - undef_rx = re.compile("/[*] #undef ([A-Z][A-Za-z0-9_]+) [*]/\n") - - while True: - line = fp.readline() - if not line: - break - m = define_rx.match(line) - if m: - n, v = m.group(1, 2) - try: - v = int(v) - except ValueError: - pass - vars[n] = v - else: - m = undef_rx.match(line) - if m: - vars[m.group(1)] = 0 - return vars - - -def get_config_h_filename(): - """Return the path of pyconfig.h.""" - if _PYTHON_BUILD: - if os.name == "nt": - inc_dir = os.path.join(_PROJECT_BASE, "PC") - else: - inc_dir = _PROJECT_BASE - else: - inc_dir = get_path('platinclude') - return os.path.join(inc_dir, 'pyconfig.h') - - -def get_scheme_names(): - """Return a tuple containing the schemes names.""" - return tuple(sorted(_SCHEMES.sections())) - - -def get_path_names(): - """Return a tuple containing the paths names.""" - # xxx see if we want a static list - return _SCHEMES.options('posix_prefix') - - -def get_paths(scheme=_get_default_scheme(), vars=None, expand=True): - """Return a mapping containing an install scheme. - - ``scheme`` is the install scheme name. If not provided, it will - return the default scheme for the current platform. - """ - _ensure_cfg_read() - if expand: - return _expand_vars(scheme, vars) - else: - return dict(_SCHEMES.items(scheme)) - - -def get_path(name, scheme=_get_default_scheme(), vars=None, expand=True): - """Return a path corresponding to the scheme. - - ``scheme`` is the install scheme name. - """ - return get_paths(scheme, vars, expand)[name] - - -def get_config_vars(*args): - """With no arguments, return a dictionary of all configuration - variables relevant for the current platform. - - On Unix, this means every variable defined in Python's installed Makefile; - On Windows and Mac OS it's a much smaller set. - - With arguments, return a list of values that result from looking up - each argument in the configuration variable dictionary. - """ - global _CONFIG_VARS - if _CONFIG_VARS is None: - _CONFIG_VARS = {} - # Normalized versions of prefix and exec_prefix are handy to have; - # in fact, these are the standard versions used most places in the - # distutils2 module. - _CONFIG_VARS['prefix'] = _PREFIX - _CONFIG_VARS['exec_prefix'] = _EXEC_PREFIX - _CONFIG_VARS['py_version'] = _PY_VERSION - _CONFIG_VARS['py_version_short'] = _PY_VERSION_SHORT - _CONFIG_VARS['py_version_nodot'] = _PY_VERSION[0] + _PY_VERSION[2] - _CONFIG_VARS['base'] = _PREFIX - _CONFIG_VARS['platbase'] = _EXEC_PREFIX - _CONFIG_VARS['projectbase'] = _PROJECT_BASE - try: - _CONFIG_VARS['abiflags'] = sys.abiflags - except AttributeError: - # sys.abiflags may not be defined on all platforms. - _CONFIG_VARS['abiflags'] = '' - - if os.name in ('nt', 'os2'): - _init_non_posix(_CONFIG_VARS) - if os.name == 'posix': - _init_posix(_CONFIG_VARS) - # Setting 'userbase' is done below the call to the - # init function to enable using 'get_config_var' in - # the init-function. - if sys.version >= '2.6': - _CONFIG_VARS['userbase'] = _getuserbase() - - if 'srcdir' not in _CONFIG_VARS: - _CONFIG_VARS['srcdir'] = _PROJECT_BASE - else: - _CONFIG_VARS['srcdir'] = _safe_realpath(_CONFIG_VARS['srcdir']) - - # Convert srcdir into an absolute path if it appears necessary. - # Normally it is relative to the build directory. However, during - # testing, for example, we might be running a non-installed python - # from a different directory. - if _PYTHON_BUILD and os.name == "posix": - base = _PROJECT_BASE - try: - cwd = os.getcwd() - except OSError: - cwd = None - if (not os.path.isabs(_CONFIG_VARS['srcdir']) and - base != cwd): - # srcdir is relative and we are not in the same directory - # as the executable. Assume executable is in the build - # directory and make srcdir absolute. - srcdir = os.path.join(base, _CONFIG_VARS['srcdir']) - _CONFIG_VARS['srcdir'] = os.path.normpath(srcdir) - - if sys.platform == 'darwin': - kernel_version = os.uname()[2] # Kernel version (8.4.3) - major_version = int(kernel_version.split('.')[0]) - - if major_version < 8: - # On Mac OS X before 10.4, check if -arch and -isysroot - # are in CFLAGS or LDFLAGS and remove them if they are. - # This is needed when building extensions on a 10.3 system - # using a universal build of python. - for key in ('LDFLAGS', 'BASECFLAGS', - # a number of derived variables. These need to be - # patched up as well. - 'CFLAGS', 'PY_CFLAGS', 'BLDSHARED'): - flags = _CONFIG_VARS[key] - flags = re.sub(r'-arch\s+\w+\s', ' ', flags) - flags = re.sub('-isysroot [^ \t]*', ' ', flags) - _CONFIG_VARS[key] = flags - else: - # Allow the user to override the architecture flags using - # an environment variable. - # NOTE: This name was introduced by Apple in OSX 10.5 and - # is used by several scripting languages distributed with - # that OS release. - if 'ARCHFLAGS' in os.environ: - arch = os.environ['ARCHFLAGS'] - for key in ('LDFLAGS', 'BASECFLAGS', - # a number of derived variables. These need to be - # patched up as well. - 'CFLAGS', 'PY_CFLAGS', 'BLDSHARED'): - - flags = _CONFIG_VARS[key] - flags = re.sub(r'-arch\s+\w+\s', ' ', flags) - flags = flags + ' ' + arch - _CONFIG_VARS[key] = flags - - # If we're on OSX 10.5 or later and the user tries to - # compiles an extension using an SDK that is not present - # on the current machine it is better to not use an SDK - # than to fail. - # - # The major usecase for this is users using a Python.org - # binary installer on OSX 10.6: that installer uses - # the 10.4u SDK, but that SDK is not installed by default - # when you install Xcode. - # - CFLAGS = _CONFIG_VARS.get('CFLAGS', '') - m = re.search(r'-isysroot\s+(\S+)', CFLAGS) - if m is not None: - sdk = m.group(1) - if not os.path.exists(sdk): - for key in ('LDFLAGS', 'BASECFLAGS', - # a number of derived variables. These need to be - # patched up as well. - 'CFLAGS', 'PY_CFLAGS', 'BLDSHARED'): - - flags = _CONFIG_VARS[key] - flags = re.sub(r'-isysroot\s+\S+(\s|$)', ' ', flags) - _CONFIG_VARS[key] = flags - - if args: - vals = [] - for name in args: - vals.append(_CONFIG_VARS.get(name)) - return vals - else: - return _CONFIG_VARS - - -def get_config_var(name): - """Return the value of a single variable using the dictionary returned by - 'get_config_vars()'. - - Equivalent to get_config_vars().get(name) - """ - return get_config_vars().get(name) - - -def get_platform(): - """Return a string that identifies the current platform. - - This is used mainly to distinguish platform-specific build directories and - platform-specific built distributions. Typically includes the OS name - and version and the architecture (as supplied by 'os.uname()'), - although the exact information included depends on the OS; eg. for IRIX - the architecture isn't particularly important (IRIX only runs on SGI - hardware), but for Linux the kernel version isn't particularly - important. - - Examples of returned values: - linux-i586 - linux-alpha (?) - solaris-2.6-sun4u - irix-5.3 - irix64-6.2 - - Windows will return one of: - win-amd64 (64bit Windows on AMD64 (aka x86_64, Intel64, EM64T, etc) - win-ia64 (64bit Windows on Itanium) - win32 (all others - specifically, sys.platform is returned) - - For other non-POSIX platforms, currently just returns 'sys.platform'. - """ - if os.name == 'nt': - # sniff sys.version for architecture. - prefix = " bit (" - i = sys.version.find(prefix) - if i == -1: - return sys.platform - j = sys.version.find(")", i) - look = sys.version[i+len(prefix):j].lower() - if look == 'amd64': - return 'win-amd64' - if look == 'itanium': - return 'win-ia64' - return sys.platform - - if os.name != "posix" or not hasattr(os, 'uname'): - # XXX what about the architecture? NT is Intel or Alpha, - # Mac OS is M68k or PPC, etc. - return sys.platform - - # Try to distinguish various flavours of Unix - osname, host, release, version, machine = os.uname() - - # Convert the OS name to lowercase, remove '/' characters - # (to accommodate BSD/OS), and translate spaces (for "Power Macintosh") - osname = osname.lower().replace('/', '') - machine = machine.replace(' ', '_') - machine = machine.replace('/', '-') - - if osname[:5] == "linux": - # At least on Linux/Intel, 'machine' is the processor -- - # i386, etc. - # XXX what about Alpha, SPARC, etc? - return "%s-%s" % (osname, machine) - elif osname[:5] == "sunos": - if release[0] >= "5": # SunOS 5 == Solaris 2 - osname = "solaris" - release = "%d.%s" % (int(release[0]) - 3, release[2:]) - # fall through to standard osname-release-machine representation - elif osname[:4] == "irix": # could be "irix64"! - return "%s-%s" % (osname, release) - elif osname[:3] == "aix": - return "%s-%s.%s" % (osname, version, release) - elif osname[:6] == "cygwin": - osname = "cygwin" - rel_re = re.compile(r'[\d.]+') - m = rel_re.match(release) - if m: - release = m.group() - elif osname[:6] == "darwin": - # - # For our purposes, we'll assume that the system version from - # distutils' perspective is what MACOSX_DEPLOYMENT_TARGET is set - # to. This makes the compatibility story a bit more sane because the - # machine is going to compile and link as if it were - # MACOSX_DEPLOYMENT_TARGET. - cfgvars = get_config_vars() - macver = cfgvars.get('MACOSX_DEPLOYMENT_TARGET') - - if True: - # Always calculate the release of the running machine, - # needed to determine if we can build fat binaries or not. - - macrelease = macver - # Get the system version. Reading this plist is a documented - # way to get the system version (see the documentation for - # the Gestalt Manager) - try: - f = open('/System/Library/CoreServices/SystemVersion.plist') - except IOError: - # We're on a plain darwin box, fall back to the default - # behaviour. - pass - else: - try: - m = re.search(r'ProductUserVisibleVersion\s*' - r'(.*?)', f.read()) - finally: - f.close() - if m is not None: - macrelease = '.'.join(m.group(1).split('.')[:2]) - # else: fall back to the default behaviour - - if not macver: - macver = macrelease - - if macver: - release = macver - osname = "macosx" - - if ((macrelease + '.') >= '10.4.' and - '-arch' in get_config_vars().get('CFLAGS', '').strip()): - # The universal build will build fat binaries, but not on - # systems before 10.4 - # - # Try to detect 4-way universal builds, those have machine-type - # 'universal' instead of 'fat'. - - machine = 'fat' - cflags = get_config_vars().get('CFLAGS') - - archs = re.findall(r'-arch\s+(\S+)', cflags) - archs = tuple(sorted(set(archs))) - - if len(archs) == 1: - machine = archs[0] - elif archs == ('i386', 'ppc'): - machine = 'fat' - elif archs == ('i386', 'x86_64'): - machine = 'intel' - elif archs == ('i386', 'ppc', 'x86_64'): - machine = 'fat3' - elif archs == ('ppc64', 'x86_64'): - machine = 'fat64' - elif archs == ('i386', 'ppc', 'ppc64', 'x86_64'): - machine = 'universal' - else: - raise ValueError( - "Don't know machine value for archs=%r" % (archs,)) - - elif machine == 'i386': - # On OSX the machine type returned by uname is always the - # 32-bit variant, even if the executable architecture is - # the 64-bit variant - if sys.maxsize >= 2**32: - machine = 'x86_64' - - elif machine in ('PowerPC', 'Power_Macintosh'): - # Pick a sane name for the PPC architecture. - # See 'i386' case - if sys.maxsize >= 2**32: - machine = 'ppc64' - else: - machine = 'ppc' - - return "%s-%s-%s" % (osname, release, machine) - - -def get_python_version(): - return _PY_VERSION_SHORT - - -def _print_dict(title, data): - for index, (key, value) in enumerate(sorted(data.items())): - if index == 0: - print('%s: ' % (title)) - print('\t%s = "%s"' % (key, value)) - - -def _main(): - """Display all information sysconfig detains.""" - print('Platform: "%s"' % get_platform()) - print('Python version: "%s"' % get_python_version()) - print('Current installation scheme: "%s"' % _get_default_scheme()) - print() - _print_dict('Paths', get_paths()) - print() - _print_dict('Variables', get_config_vars()) - - -if __name__ == '__main__': - _main() diff --git a/venv/Lib/site-packages/pip/_vendor/distlib/_backport/tarfile.py b/venv/Lib/site-packages/pip/_vendor/distlib/_backport/tarfile.py deleted file mode 100644 index d66d856..0000000 --- a/venv/Lib/site-packages/pip/_vendor/distlib/_backport/tarfile.py +++ /dev/null @@ -1,2607 +0,0 @@ -#------------------------------------------------------------------- -# tarfile.py -#------------------------------------------------------------------- -# Copyright (C) 2002 Lars Gustaebel -# All rights reserved. -# -# Permission is hereby granted, free of charge, to any person -# obtaining a copy of this software and associated documentation -# files (the "Software"), to deal in the Software without -# restriction, including without limitation the rights to use, -# copy, modify, merge, publish, distribute, sublicense, and/or sell -# copies of the Software, and to permit persons to whom the -# Software is furnished to do so, subject to the following -# conditions: -# -# The above copyright notice and this permission notice shall be -# included in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES -# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT -# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, -# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR -# OTHER DEALINGS IN THE SOFTWARE. -# -from __future__ import print_function - -"""Read from and write to tar format archives. -""" - -__version__ = "$Revision$" - -version = "0.9.0" -__author__ = "Lars Gust\u00e4bel (lars@gustaebel.de)" -__date__ = "$Date: 2011-02-25 17:42:01 +0200 (Fri, 25 Feb 2011) $" -__cvsid__ = "$Id: tarfile.py 88586 2011-02-25 15:42:01Z marc-andre.lemburg $" -__credits__ = "Gustavo Niemeyer, Niels Gust\u00e4bel, Richard Townsend." - -#--------- -# Imports -#--------- -import sys -import os -import stat -import errno -import time -import struct -import copy -import re - -try: - import grp, pwd -except ImportError: - grp = pwd = None - -# os.symlink on Windows prior to 6.0 raises NotImplementedError -symlink_exception = (AttributeError, NotImplementedError) -try: - # WindowsError (1314) will be raised if the caller does not hold the - # SeCreateSymbolicLinkPrivilege privilege - symlink_exception += (WindowsError,) -except NameError: - pass - -# from tarfile import * -__all__ = ["TarFile", "TarInfo", "is_tarfile", "TarError"] - -if sys.version_info[0] < 3: - import __builtin__ as builtins -else: - import builtins - -_open = builtins.open # Since 'open' is TarFile.open - -#--------------------------------------------------------- -# tar constants -#--------------------------------------------------------- -NUL = b"\0" # the null character -BLOCKSIZE = 512 # length of processing blocks -RECORDSIZE = BLOCKSIZE * 20 # length of records -GNU_MAGIC = b"ustar \0" # magic gnu tar string -POSIX_MAGIC = b"ustar\x0000" # magic posix tar string - -LENGTH_NAME = 100 # maximum length of a filename -LENGTH_LINK = 100 # maximum length of a linkname -LENGTH_PREFIX = 155 # maximum length of the prefix field - -REGTYPE = b"0" # regular file -AREGTYPE = b"\0" # regular file -LNKTYPE = b"1" # link (inside tarfile) -SYMTYPE = b"2" # symbolic link -CHRTYPE = b"3" # character special device -BLKTYPE = b"4" # block special device -DIRTYPE = b"5" # directory -FIFOTYPE = b"6" # fifo special device -CONTTYPE = b"7" # contiguous file - -GNUTYPE_LONGNAME = b"L" # GNU tar longname -GNUTYPE_LONGLINK = b"K" # GNU tar longlink -GNUTYPE_SPARSE = b"S" # GNU tar sparse file - -XHDTYPE = b"x" # POSIX.1-2001 extended header -XGLTYPE = b"g" # POSIX.1-2001 global header -SOLARIS_XHDTYPE = b"X" # Solaris extended header - -USTAR_FORMAT = 0 # POSIX.1-1988 (ustar) format -GNU_FORMAT = 1 # GNU tar format -PAX_FORMAT = 2 # POSIX.1-2001 (pax) format -DEFAULT_FORMAT = GNU_FORMAT - -#--------------------------------------------------------- -# tarfile constants -#--------------------------------------------------------- -# File types that tarfile supports: -SUPPORTED_TYPES = (REGTYPE, AREGTYPE, LNKTYPE, - SYMTYPE, DIRTYPE, FIFOTYPE, - CONTTYPE, CHRTYPE, BLKTYPE, - GNUTYPE_LONGNAME, GNUTYPE_LONGLINK, - GNUTYPE_SPARSE) - -# File types that will be treated as a regular file. -REGULAR_TYPES = (REGTYPE, AREGTYPE, - CONTTYPE, GNUTYPE_SPARSE) - -# File types that are part of the GNU tar format. -GNU_TYPES = (GNUTYPE_LONGNAME, GNUTYPE_LONGLINK, - GNUTYPE_SPARSE) - -# Fields from a pax header that override a TarInfo attribute. -PAX_FIELDS = ("path", "linkpath", "size", "mtime", - "uid", "gid", "uname", "gname") - -# Fields from a pax header that are affected by hdrcharset. -PAX_NAME_FIELDS = set(("path", "linkpath", "uname", "gname")) - -# Fields in a pax header that are numbers, all other fields -# are treated as strings. -PAX_NUMBER_FIELDS = { - "atime": float, - "ctime": float, - "mtime": float, - "uid": int, - "gid": int, - "size": int -} - -#--------------------------------------------------------- -# Bits used in the mode field, values in octal. -#--------------------------------------------------------- -S_IFLNK = 0o120000 # symbolic link -S_IFREG = 0o100000 # regular file -S_IFBLK = 0o060000 # block device -S_IFDIR = 0o040000 # directory -S_IFCHR = 0o020000 # character device -S_IFIFO = 0o010000 # fifo - -TSUID = 0o4000 # set UID on execution -TSGID = 0o2000 # set GID on execution -TSVTX = 0o1000 # reserved - -TUREAD = 0o400 # read by owner -TUWRITE = 0o200 # write by owner -TUEXEC = 0o100 # execute/search by owner -TGREAD = 0o040 # read by group -TGWRITE = 0o020 # write by group -TGEXEC = 0o010 # execute/search by group -TOREAD = 0o004 # read by other -TOWRITE = 0o002 # write by other -TOEXEC = 0o001 # execute/search by other - -#--------------------------------------------------------- -# initialization -#--------------------------------------------------------- -if os.name in ("nt", "ce"): - ENCODING = "utf-8" -else: - ENCODING = sys.getfilesystemencoding() - -#--------------------------------------------------------- -# Some useful functions -#--------------------------------------------------------- - -def stn(s, length, encoding, errors): - """Convert a string to a null-terminated bytes object. - """ - s = s.encode(encoding, errors) - return s[:length] + (length - len(s)) * NUL - -def nts(s, encoding, errors): - """Convert a null-terminated bytes object to a string. - """ - p = s.find(b"\0") - if p != -1: - s = s[:p] - return s.decode(encoding, errors) - -def nti(s): - """Convert a number field to a python number. - """ - # There are two possible encodings for a number field, see - # itn() below. - if s[0] != chr(0o200): - try: - n = int(nts(s, "ascii", "strict") or "0", 8) - except ValueError: - raise InvalidHeaderError("invalid header") - else: - n = 0 - for i in range(len(s) - 1): - n <<= 8 - n += ord(s[i + 1]) - return n - -def itn(n, digits=8, format=DEFAULT_FORMAT): - """Convert a python number to a number field. - """ - # POSIX 1003.1-1988 requires numbers to be encoded as a string of - # octal digits followed by a null-byte, this allows values up to - # (8**(digits-1))-1. GNU tar allows storing numbers greater than - # that if necessary. A leading 0o200 byte indicates this particular - # encoding, the following digits-1 bytes are a big-endian - # representation. This allows values up to (256**(digits-1))-1. - if 0 <= n < 8 ** (digits - 1): - s = ("%0*o" % (digits - 1, n)).encode("ascii") + NUL - else: - if format != GNU_FORMAT or n >= 256 ** (digits - 1): - raise ValueError("overflow in number field") - - if n < 0: - # XXX We mimic GNU tar's behaviour with negative numbers, - # this could raise OverflowError. - n = struct.unpack("L", struct.pack("l", n))[0] - - s = bytearray() - for i in range(digits - 1): - s.insert(0, n & 0o377) - n >>= 8 - s.insert(0, 0o200) - return s - -def calc_chksums(buf): - """Calculate the checksum for a member's header by summing up all - characters except for the chksum field which is treated as if - it was filled with spaces. According to the GNU tar sources, - some tars (Sun and NeXT) calculate chksum with signed char, - which will be different if there are chars in the buffer with - the high bit set. So we calculate two checksums, unsigned and - signed. - """ - unsigned_chksum = 256 + sum(struct.unpack("148B", buf[:148]) + struct.unpack("356B", buf[156:512])) - signed_chksum = 256 + sum(struct.unpack("148b", buf[:148]) + struct.unpack("356b", buf[156:512])) - return unsigned_chksum, signed_chksum - -def copyfileobj(src, dst, length=None): - """Copy length bytes from fileobj src to fileobj dst. - If length is None, copy the entire content. - """ - if length == 0: - return - if length is None: - while True: - buf = src.read(16*1024) - if not buf: - break - dst.write(buf) - return - - BUFSIZE = 16 * 1024 - blocks, remainder = divmod(length, BUFSIZE) - for b in range(blocks): - buf = src.read(BUFSIZE) - if len(buf) < BUFSIZE: - raise IOError("end of file reached") - dst.write(buf) - - if remainder != 0: - buf = src.read(remainder) - if len(buf) < remainder: - raise IOError("end of file reached") - dst.write(buf) - return - -filemode_table = ( - ((S_IFLNK, "l"), - (S_IFREG, "-"), - (S_IFBLK, "b"), - (S_IFDIR, "d"), - (S_IFCHR, "c"), - (S_IFIFO, "p")), - - ((TUREAD, "r"),), - ((TUWRITE, "w"),), - ((TUEXEC|TSUID, "s"), - (TSUID, "S"), - (TUEXEC, "x")), - - ((TGREAD, "r"),), - ((TGWRITE, "w"),), - ((TGEXEC|TSGID, "s"), - (TSGID, "S"), - (TGEXEC, "x")), - - ((TOREAD, "r"),), - ((TOWRITE, "w"),), - ((TOEXEC|TSVTX, "t"), - (TSVTX, "T"), - (TOEXEC, "x")) -) - -def filemode(mode): - """Convert a file's mode to a string of the form - -rwxrwxrwx. - Used by TarFile.list() - """ - perm = [] - for table in filemode_table: - for bit, char in table: - if mode & bit == bit: - perm.append(char) - break - else: - perm.append("-") - return "".join(perm) - -class TarError(Exception): - """Base exception.""" - pass -class ExtractError(TarError): - """General exception for extract errors.""" - pass -class ReadError(TarError): - """Exception for unreadable tar archives.""" - pass -class CompressionError(TarError): - """Exception for unavailable compression methods.""" - pass -class StreamError(TarError): - """Exception for unsupported operations on stream-like TarFiles.""" - pass -class HeaderError(TarError): - """Base exception for header errors.""" - pass -class EmptyHeaderError(HeaderError): - """Exception for empty headers.""" - pass -class TruncatedHeaderError(HeaderError): - """Exception for truncated headers.""" - pass -class EOFHeaderError(HeaderError): - """Exception for end of file headers.""" - pass -class InvalidHeaderError(HeaderError): - """Exception for invalid headers.""" - pass -class SubsequentHeaderError(HeaderError): - """Exception for missing and invalid extended headers.""" - pass - -#--------------------------- -# internal stream interface -#--------------------------- -class _LowLevelFile(object): - """Low-level file object. Supports reading and writing. - It is used instead of a regular file object for streaming - access. - """ - - def __init__(self, name, mode): - mode = { - "r": os.O_RDONLY, - "w": os.O_WRONLY | os.O_CREAT | os.O_TRUNC, - }[mode] - if hasattr(os, "O_BINARY"): - mode |= os.O_BINARY - self.fd = os.open(name, mode, 0o666) - - def close(self): - os.close(self.fd) - - def read(self, size): - return os.read(self.fd, size) - - def write(self, s): - os.write(self.fd, s) - -class _Stream(object): - """Class that serves as an adapter between TarFile and - a stream-like object. The stream-like object only - needs to have a read() or write() method and is accessed - blockwise. Use of gzip or bzip2 compression is possible. - A stream-like object could be for example: sys.stdin, - sys.stdout, a socket, a tape device etc. - - _Stream is intended to be used only internally. - """ - - def __init__(self, name, mode, comptype, fileobj, bufsize): - """Construct a _Stream object. - """ - self._extfileobj = True - if fileobj is None: - fileobj = _LowLevelFile(name, mode) - self._extfileobj = False - - if comptype == '*': - # Enable transparent compression detection for the - # stream interface - fileobj = _StreamProxy(fileobj) - comptype = fileobj.getcomptype() - - self.name = name or "" - self.mode = mode - self.comptype = comptype - self.fileobj = fileobj - self.bufsize = bufsize - self.buf = b"" - self.pos = 0 - self.closed = False - - try: - if comptype == "gz": - try: - import zlib - except ImportError: - raise CompressionError("zlib module is not available") - self.zlib = zlib - self.crc = zlib.crc32(b"") - if mode == "r": - self._init_read_gz() - else: - self._init_write_gz() - - if comptype == "bz2": - try: - import bz2 - except ImportError: - raise CompressionError("bz2 module is not available") - if mode == "r": - self.dbuf = b"" - self.cmp = bz2.BZ2Decompressor() - else: - self.cmp = bz2.BZ2Compressor() - except: - if not self._extfileobj: - self.fileobj.close() - self.closed = True - raise - - def __del__(self): - if hasattr(self, "closed") and not self.closed: - self.close() - - def _init_write_gz(self): - """Initialize for writing with gzip compression. - """ - self.cmp = self.zlib.compressobj(9, self.zlib.DEFLATED, - -self.zlib.MAX_WBITS, - self.zlib.DEF_MEM_LEVEL, - 0) - timestamp = struct.pack(" self.bufsize: - self.fileobj.write(self.buf[:self.bufsize]) - self.buf = self.buf[self.bufsize:] - - def close(self): - """Close the _Stream object. No operation should be - done on it afterwards. - """ - if self.closed: - return - - if self.mode == "w" and self.comptype != "tar": - self.buf += self.cmp.flush() - - if self.mode == "w" and self.buf: - self.fileobj.write(self.buf) - self.buf = b"" - if self.comptype == "gz": - # The native zlib crc is an unsigned 32-bit integer, but - # the Python wrapper implicitly casts that to a signed C - # long. So, on a 32-bit box self.crc may "look negative", - # while the same crc on a 64-bit box may "look positive". - # To avoid irksome warnings from the `struct` module, force - # it to look positive on all boxes. - self.fileobj.write(struct.pack("= 0: - blocks, remainder = divmod(pos - self.pos, self.bufsize) - for i in range(blocks): - self.read(self.bufsize) - self.read(remainder) - else: - raise StreamError("seeking backwards is not allowed") - return self.pos - - def read(self, size=None): - """Return the next size number of bytes from the stream. - If size is not defined, return all bytes of the stream - up to EOF. - """ - if size is None: - t = [] - while True: - buf = self._read(self.bufsize) - if not buf: - break - t.append(buf) - buf = "".join(t) - else: - buf = self._read(size) - self.pos += len(buf) - return buf - - def _read(self, size): - """Return size bytes from the stream. - """ - if self.comptype == "tar": - return self.__read(size) - - c = len(self.dbuf) - while c < size: - buf = self.__read(self.bufsize) - if not buf: - break - try: - buf = self.cmp.decompress(buf) - except IOError: - raise ReadError("invalid compressed data") - self.dbuf += buf - c += len(buf) - buf = self.dbuf[:size] - self.dbuf = self.dbuf[size:] - return buf - - def __read(self, size): - """Return size bytes from stream. If internal buffer is empty, - read another block from the stream. - """ - c = len(self.buf) - while c < size: - buf = self.fileobj.read(self.bufsize) - if not buf: - break - self.buf += buf - c += len(buf) - buf = self.buf[:size] - self.buf = self.buf[size:] - return buf -# class _Stream - -class _StreamProxy(object): - """Small proxy class that enables transparent compression - detection for the Stream interface (mode 'r|*'). - """ - - def __init__(self, fileobj): - self.fileobj = fileobj - self.buf = self.fileobj.read(BLOCKSIZE) - - def read(self, size): - self.read = self.fileobj.read - return self.buf - - def getcomptype(self): - if self.buf.startswith(b"\037\213\010"): - return "gz" - if self.buf.startswith(b"BZh91"): - return "bz2" - return "tar" - - def close(self): - self.fileobj.close() -# class StreamProxy - -class _BZ2Proxy(object): - """Small proxy class that enables external file object - support for "r:bz2" and "w:bz2" modes. This is actually - a workaround for a limitation in bz2 module's BZ2File - class which (unlike gzip.GzipFile) has no support for - a file object argument. - """ - - blocksize = 16 * 1024 - - def __init__(self, fileobj, mode): - self.fileobj = fileobj - self.mode = mode - self.name = getattr(self.fileobj, "name", None) - self.init() - - def init(self): - import bz2 - self.pos = 0 - if self.mode == "r": - self.bz2obj = bz2.BZ2Decompressor() - self.fileobj.seek(0) - self.buf = b"" - else: - self.bz2obj = bz2.BZ2Compressor() - - def read(self, size): - x = len(self.buf) - while x < size: - raw = self.fileobj.read(self.blocksize) - if not raw: - break - data = self.bz2obj.decompress(raw) - self.buf += data - x += len(data) - - buf = self.buf[:size] - self.buf = self.buf[size:] - self.pos += len(buf) - return buf - - def seek(self, pos): - if pos < self.pos: - self.init() - self.read(pos - self.pos) - - def tell(self): - return self.pos - - def write(self, data): - self.pos += len(data) - raw = self.bz2obj.compress(data) - self.fileobj.write(raw) - - def close(self): - if self.mode == "w": - raw = self.bz2obj.flush() - self.fileobj.write(raw) -# class _BZ2Proxy - -#------------------------ -# Extraction file object -#------------------------ -class _FileInFile(object): - """A thin wrapper around an existing file object that - provides a part of its data as an individual file - object. - """ - - def __init__(self, fileobj, offset, size, blockinfo=None): - self.fileobj = fileobj - self.offset = offset - self.size = size - self.position = 0 - - if blockinfo is None: - blockinfo = [(0, size)] - - # Construct a map with data and zero blocks. - self.map_index = 0 - self.map = [] - lastpos = 0 - realpos = self.offset - for offset, size in blockinfo: - if offset > lastpos: - self.map.append((False, lastpos, offset, None)) - self.map.append((True, offset, offset + size, realpos)) - realpos += size - lastpos = offset + size - if lastpos < self.size: - self.map.append((False, lastpos, self.size, None)) - - def seekable(self): - if not hasattr(self.fileobj, "seekable"): - # XXX gzip.GzipFile and bz2.BZ2File - return True - return self.fileobj.seekable() - - def tell(self): - """Return the current file position. - """ - return self.position - - def seek(self, position): - """Seek to a position in the file. - """ - self.position = position - - def read(self, size=None): - """Read data from the file. - """ - if size is None: - size = self.size - self.position - else: - size = min(size, self.size - self.position) - - buf = b"" - while size > 0: - while True: - data, start, stop, offset = self.map[self.map_index] - if start <= self.position < stop: - break - else: - self.map_index += 1 - if self.map_index == len(self.map): - self.map_index = 0 - length = min(size, stop - self.position) - if data: - self.fileobj.seek(offset + (self.position - start)) - buf += self.fileobj.read(length) - else: - buf += NUL * length - size -= length - self.position += length - return buf -#class _FileInFile - - -class ExFileObject(object): - """File-like object for reading an archive member. - Is returned by TarFile.extractfile(). - """ - blocksize = 1024 - - def __init__(self, tarfile, tarinfo): - self.fileobj = _FileInFile(tarfile.fileobj, - tarinfo.offset_data, - tarinfo.size, - tarinfo.sparse) - self.name = tarinfo.name - self.mode = "r" - self.closed = False - self.size = tarinfo.size - - self.position = 0 - self.buffer = b"" - - def readable(self): - return True - - def writable(self): - return False - - def seekable(self): - return self.fileobj.seekable() - - def read(self, size=None): - """Read at most size bytes from the file. If size is not - present or None, read all data until EOF is reached. - """ - if self.closed: - raise ValueError("I/O operation on closed file") - - buf = b"" - if self.buffer: - if size is None: - buf = self.buffer - self.buffer = b"" - else: - buf = self.buffer[:size] - self.buffer = self.buffer[size:] - - if size is None: - buf += self.fileobj.read() - else: - buf += self.fileobj.read(size - len(buf)) - - self.position += len(buf) - return buf - - # XXX TextIOWrapper uses the read1() method. - read1 = read - - def readline(self, size=-1): - """Read one entire line from the file. If size is present - and non-negative, return a string with at most that - size, which may be an incomplete line. - """ - if self.closed: - raise ValueError("I/O operation on closed file") - - pos = self.buffer.find(b"\n") + 1 - if pos == 0: - # no newline found. - while True: - buf = self.fileobj.read(self.blocksize) - self.buffer += buf - if not buf or b"\n" in buf: - pos = self.buffer.find(b"\n") + 1 - if pos == 0: - # no newline found. - pos = len(self.buffer) - break - - if size != -1: - pos = min(size, pos) - - buf = self.buffer[:pos] - self.buffer = self.buffer[pos:] - self.position += len(buf) - return buf - - def readlines(self): - """Return a list with all remaining lines. - """ - result = [] - while True: - line = self.readline() - if not line: break - result.append(line) - return result - - def tell(self): - """Return the current file position. - """ - if self.closed: - raise ValueError("I/O operation on closed file") - - return self.position - - def seek(self, pos, whence=os.SEEK_SET): - """Seek to a position in the file. - """ - if self.closed: - raise ValueError("I/O operation on closed file") - - if whence == os.SEEK_SET: - self.position = min(max(pos, 0), self.size) - elif whence == os.SEEK_CUR: - if pos < 0: - self.position = max(self.position + pos, 0) - else: - self.position = min(self.position + pos, self.size) - elif whence == os.SEEK_END: - self.position = max(min(self.size + pos, self.size), 0) - else: - raise ValueError("Invalid argument") - - self.buffer = b"" - self.fileobj.seek(self.position) - - def close(self): - """Close the file object. - """ - self.closed = True - - def __iter__(self): - """Get an iterator over the file's lines. - """ - while True: - line = self.readline() - if not line: - break - yield line -#class ExFileObject - -#------------------ -# Exported Classes -#------------------ -class TarInfo(object): - """Informational class which holds the details about an - archive member given by a tar header block. - TarInfo objects are returned by TarFile.getmember(), - TarFile.getmembers() and TarFile.gettarinfo() and are - usually created internally. - """ - - __slots__ = ("name", "mode", "uid", "gid", "size", "mtime", - "chksum", "type", "linkname", "uname", "gname", - "devmajor", "devminor", - "offset", "offset_data", "pax_headers", "sparse", - "tarfile", "_sparse_structs", "_link_target") - - def __init__(self, name=""): - """Construct a TarInfo object. name is the optional name - of the member. - """ - self.name = name # member name - self.mode = 0o644 # file permissions - self.uid = 0 # user id - self.gid = 0 # group id - self.size = 0 # file size - self.mtime = 0 # modification time - self.chksum = 0 # header checksum - self.type = REGTYPE # member type - self.linkname = "" # link name - self.uname = "" # user name - self.gname = "" # group name - self.devmajor = 0 # device major number - self.devminor = 0 # device minor number - - self.offset = 0 # the tar header starts here - self.offset_data = 0 # the file's data starts here - - self.sparse = None # sparse member information - self.pax_headers = {} # pax header information - - # In pax headers the "name" and "linkname" field are called - # "path" and "linkpath". - def _getpath(self): - return self.name - def _setpath(self, name): - self.name = name - path = property(_getpath, _setpath) - - def _getlinkpath(self): - return self.linkname - def _setlinkpath(self, linkname): - self.linkname = linkname - linkpath = property(_getlinkpath, _setlinkpath) - - def __repr__(self): - return "<%s %r at %#x>" % (self.__class__.__name__,self.name,id(self)) - - def get_info(self): - """Return the TarInfo's attributes as a dictionary. - """ - info = { - "name": self.name, - "mode": self.mode & 0o7777, - "uid": self.uid, - "gid": self.gid, - "size": self.size, - "mtime": self.mtime, - "chksum": self.chksum, - "type": self.type, - "linkname": self.linkname, - "uname": self.uname, - "gname": self.gname, - "devmajor": self.devmajor, - "devminor": self.devminor - } - - if info["type"] == DIRTYPE and not info["name"].endswith("/"): - info["name"] += "/" - - return info - - def tobuf(self, format=DEFAULT_FORMAT, encoding=ENCODING, errors="surrogateescape"): - """Return a tar header as a string of 512 byte blocks. - """ - info = self.get_info() - - if format == USTAR_FORMAT: - return self.create_ustar_header(info, encoding, errors) - elif format == GNU_FORMAT: - return self.create_gnu_header(info, encoding, errors) - elif format == PAX_FORMAT: - return self.create_pax_header(info, encoding) - else: - raise ValueError("invalid format") - - def create_ustar_header(self, info, encoding, errors): - """Return the object as a ustar header block. - """ - info["magic"] = POSIX_MAGIC - - if len(info["linkname"]) > LENGTH_LINK: - raise ValueError("linkname is too long") - - if len(info["name"]) > LENGTH_NAME: - info["prefix"], info["name"] = self._posix_split_name(info["name"]) - - return self._create_header(info, USTAR_FORMAT, encoding, errors) - - def create_gnu_header(self, info, encoding, errors): - """Return the object as a GNU header block sequence. - """ - info["magic"] = GNU_MAGIC - - buf = b"" - if len(info["linkname"]) > LENGTH_LINK: - buf += self._create_gnu_long_header(info["linkname"], GNUTYPE_LONGLINK, encoding, errors) - - if len(info["name"]) > LENGTH_NAME: - buf += self._create_gnu_long_header(info["name"], GNUTYPE_LONGNAME, encoding, errors) - - return buf + self._create_header(info, GNU_FORMAT, encoding, errors) - - def create_pax_header(self, info, encoding): - """Return the object as a ustar header block. If it cannot be - represented this way, prepend a pax extended header sequence - with supplement information. - """ - info["magic"] = POSIX_MAGIC - pax_headers = self.pax_headers.copy() - - # Test string fields for values that exceed the field length or cannot - # be represented in ASCII encoding. - for name, hname, length in ( - ("name", "path", LENGTH_NAME), ("linkname", "linkpath", LENGTH_LINK), - ("uname", "uname", 32), ("gname", "gname", 32)): - - if hname in pax_headers: - # The pax header has priority. - continue - - # Try to encode the string as ASCII. - try: - info[name].encode("ascii", "strict") - except UnicodeEncodeError: - pax_headers[hname] = info[name] - continue - - if len(info[name]) > length: - pax_headers[hname] = info[name] - - # Test number fields for values that exceed the field limit or values - # that like to be stored as float. - for name, digits in (("uid", 8), ("gid", 8), ("size", 12), ("mtime", 12)): - if name in pax_headers: - # The pax header has priority. Avoid overflow. - info[name] = 0 - continue - - val = info[name] - if not 0 <= val < 8 ** (digits - 1) or isinstance(val, float): - pax_headers[name] = str(val) - info[name] = 0 - - # Create a pax extended header if necessary. - if pax_headers: - buf = self._create_pax_generic_header(pax_headers, XHDTYPE, encoding) - else: - buf = b"" - - return buf + self._create_header(info, USTAR_FORMAT, "ascii", "replace") - - @classmethod - def create_pax_global_header(cls, pax_headers): - """Return the object as a pax global header block sequence. - """ - return cls._create_pax_generic_header(pax_headers, XGLTYPE, "utf8") - - def _posix_split_name(self, name): - """Split a name longer than 100 chars into a prefix - and a name part. - """ - prefix = name[:LENGTH_PREFIX + 1] - while prefix and prefix[-1] != "/": - prefix = prefix[:-1] - - name = name[len(prefix):] - prefix = prefix[:-1] - - if not prefix or len(name) > LENGTH_NAME: - raise ValueError("name is too long") - return prefix, name - - @staticmethod - def _create_header(info, format, encoding, errors): - """Return a header block. info is a dictionary with file - information, format must be one of the *_FORMAT constants. - """ - parts = [ - stn(info.get("name", ""), 100, encoding, errors), - itn(info.get("mode", 0) & 0o7777, 8, format), - itn(info.get("uid", 0), 8, format), - itn(info.get("gid", 0), 8, format), - itn(info.get("size", 0), 12, format), - itn(info.get("mtime", 0), 12, format), - b" ", # checksum field - info.get("type", REGTYPE), - stn(info.get("linkname", ""), 100, encoding, errors), - info.get("magic", POSIX_MAGIC), - stn(info.get("uname", ""), 32, encoding, errors), - stn(info.get("gname", ""), 32, encoding, errors), - itn(info.get("devmajor", 0), 8, format), - itn(info.get("devminor", 0), 8, format), - stn(info.get("prefix", ""), 155, encoding, errors) - ] - - buf = struct.pack("%ds" % BLOCKSIZE, b"".join(parts)) - chksum = calc_chksums(buf[-BLOCKSIZE:])[0] - buf = buf[:-364] + ("%06o\0" % chksum).encode("ascii") + buf[-357:] - return buf - - @staticmethod - def _create_payload(payload): - """Return the string payload filled with zero bytes - up to the next 512 byte border. - """ - blocks, remainder = divmod(len(payload), BLOCKSIZE) - if remainder > 0: - payload += (BLOCKSIZE - remainder) * NUL - return payload - - @classmethod - def _create_gnu_long_header(cls, name, type, encoding, errors): - """Return a GNUTYPE_LONGNAME or GNUTYPE_LONGLINK sequence - for name. - """ - name = name.encode(encoding, errors) + NUL - - info = {} - info["name"] = "././@LongLink" - info["type"] = type - info["size"] = len(name) - info["magic"] = GNU_MAGIC - - # create extended header + name blocks. - return cls._create_header(info, USTAR_FORMAT, encoding, errors) + \ - cls._create_payload(name) - - @classmethod - def _create_pax_generic_header(cls, pax_headers, type, encoding): - """Return a POSIX.1-2008 extended or global header sequence - that contains a list of keyword, value pairs. The values - must be strings. - """ - # Check if one of the fields contains surrogate characters and thereby - # forces hdrcharset=BINARY, see _proc_pax() for more information. - binary = False - for keyword, value in pax_headers.items(): - try: - value.encode("utf8", "strict") - except UnicodeEncodeError: - binary = True - break - - records = b"" - if binary: - # Put the hdrcharset field at the beginning of the header. - records += b"21 hdrcharset=BINARY\n" - - for keyword, value in pax_headers.items(): - keyword = keyword.encode("utf8") - if binary: - # Try to restore the original byte representation of `value'. - # Needless to say, that the encoding must match the string. - value = value.encode(encoding, "surrogateescape") - else: - value = value.encode("utf8") - - l = len(keyword) + len(value) + 3 # ' ' + '=' + '\n' - n = p = 0 - while True: - n = l + len(str(p)) - if n == p: - break - p = n - records += bytes(str(p), "ascii") + b" " + keyword + b"=" + value + b"\n" - - # We use a hardcoded "././@PaxHeader" name like star does - # instead of the one that POSIX recommends. - info = {} - info["name"] = "././@PaxHeader" - info["type"] = type - info["size"] = len(records) - info["magic"] = POSIX_MAGIC - - # Create pax header + record blocks. - return cls._create_header(info, USTAR_FORMAT, "ascii", "replace") + \ - cls._create_payload(records) - - @classmethod - def frombuf(cls, buf, encoding, errors): - """Construct a TarInfo object from a 512 byte bytes object. - """ - if len(buf) == 0: - raise EmptyHeaderError("empty header") - if len(buf) != BLOCKSIZE: - raise TruncatedHeaderError("truncated header") - if buf.count(NUL) == BLOCKSIZE: - raise EOFHeaderError("end of file header") - - chksum = nti(buf[148:156]) - if chksum not in calc_chksums(buf): - raise InvalidHeaderError("bad checksum") - - obj = cls() - obj.name = nts(buf[0:100], encoding, errors) - obj.mode = nti(buf[100:108]) - obj.uid = nti(buf[108:116]) - obj.gid = nti(buf[116:124]) - obj.size = nti(buf[124:136]) - obj.mtime = nti(buf[136:148]) - obj.chksum = chksum - obj.type = buf[156:157] - obj.linkname = nts(buf[157:257], encoding, errors) - obj.uname = nts(buf[265:297], encoding, errors) - obj.gname = nts(buf[297:329], encoding, errors) - obj.devmajor = nti(buf[329:337]) - obj.devminor = nti(buf[337:345]) - prefix = nts(buf[345:500], encoding, errors) - - # Old V7 tar format represents a directory as a regular - # file with a trailing slash. - if obj.type == AREGTYPE and obj.name.endswith("/"): - obj.type = DIRTYPE - - # The old GNU sparse format occupies some of the unused - # space in the buffer for up to 4 sparse structures. - # Save the them for later processing in _proc_sparse(). - if obj.type == GNUTYPE_SPARSE: - pos = 386 - structs = [] - for i in range(4): - try: - offset = nti(buf[pos:pos + 12]) - numbytes = nti(buf[pos + 12:pos + 24]) - except ValueError: - break - structs.append((offset, numbytes)) - pos += 24 - isextended = bool(buf[482]) - origsize = nti(buf[483:495]) - obj._sparse_structs = (structs, isextended, origsize) - - # Remove redundant slashes from directories. - if obj.isdir(): - obj.name = obj.name.rstrip("/") - - # Reconstruct a ustar longname. - if prefix and obj.type not in GNU_TYPES: - obj.name = prefix + "/" + obj.name - return obj - - @classmethod - def fromtarfile(cls, tarfile): - """Return the next TarInfo object from TarFile object - tarfile. - """ - buf = tarfile.fileobj.read(BLOCKSIZE) - obj = cls.frombuf(buf, tarfile.encoding, tarfile.errors) - obj.offset = tarfile.fileobj.tell() - BLOCKSIZE - return obj._proc_member(tarfile) - - #-------------------------------------------------------------------------- - # The following are methods that are called depending on the type of a - # member. The entry point is _proc_member() which can be overridden in a - # subclass to add custom _proc_*() methods. A _proc_*() method MUST - # implement the following - # operations: - # 1. Set self.offset_data to the position where the data blocks begin, - # if there is data that follows. - # 2. Set tarfile.offset to the position where the next member's header will - # begin. - # 3. Return self or another valid TarInfo object. - def _proc_member(self, tarfile): - """Choose the right processing method depending on - the type and call it. - """ - if self.type in (GNUTYPE_LONGNAME, GNUTYPE_LONGLINK): - return self._proc_gnulong(tarfile) - elif self.type == GNUTYPE_SPARSE: - return self._proc_sparse(tarfile) - elif self.type in (XHDTYPE, XGLTYPE, SOLARIS_XHDTYPE): - return self._proc_pax(tarfile) - else: - return self._proc_builtin(tarfile) - - def _proc_builtin(self, tarfile): - """Process a builtin type or an unknown type which - will be treated as a regular file. - """ - self.offset_data = tarfile.fileobj.tell() - offset = self.offset_data - if self.isreg() or self.type not in SUPPORTED_TYPES: - # Skip the following data blocks. - offset += self._block(self.size) - tarfile.offset = offset - - # Patch the TarInfo object with saved global - # header information. - self._apply_pax_info(tarfile.pax_headers, tarfile.encoding, tarfile.errors) - - return self - - def _proc_gnulong(self, tarfile): - """Process the blocks that hold a GNU longname - or longlink member. - """ - buf = tarfile.fileobj.read(self._block(self.size)) - - # Fetch the next header and process it. - try: - next = self.fromtarfile(tarfile) - except HeaderError: - raise SubsequentHeaderError("missing or bad subsequent header") - - # Patch the TarInfo object from the next header with - # the longname information. - next.offset = self.offset - if self.type == GNUTYPE_LONGNAME: - next.name = nts(buf, tarfile.encoding, tarfile.errors) - elif self.type == GNUTYPE_LONGLINK: - next.linkname = nts(buf, tarfile.encoding, tarfile.errors) - - return next - - def _proc_sparse(self, tarfile): - """Process a GNU sparse header plus extra headers. - """ - # We already collected some sparse structures in frombuf(). - structs, isextended, origsize = self._sparse_structs - del self._sparse_structs - - # Collect sparse structures from extended header blocks. - while isextended: - buf = tarfile.fileobj.read(BLOCKSIZE) - pos = 0 - for i in range(21): - try: - offset = nti(buf[pos:pos + 12]) - numbytes = nti(buf[pos + 12:pos + 24]) - except ValueError: - break - if offset and numbytes: - structs.append((offset, numbytes)) - pos += 24 - isextended = bool(buf[504]) - self.sparse = structs - - self.offset_data = tarfile.fileobj.tell() - tarfile.offset = self.offset_data + self._block(self.size) - self.size = origsize - return self - - def _proc_pax(self, tarfile): - """Process an extended or global header as described in - POSIX.1-2008. - """ - # Read the header information. - buf = tarfile.fileobj.read(self._block(self.size)) - - # A pax header stores supplemental information for either - # the following file (extended) or all following files - # (global). - if self.type == XGLTYPE: - pax_headers = tarfile.pax_headers - else: - pax_headers = tarfile.pax_headers.copy() - - # Check if the pax header contains a hdrcharset field. This tells us - # the encoding of the path, linkpath, uname and gname fields. Normally, - # these fields are UTF-8 encoded but since POSIX.1-2008 tar - # implementations are allowed to store them as raw binary strings if - # the translation to UTF-8 fails. - match = re.search(br"\d+ hdrcharset=([^\n]+)\n", buf) - if match is not None: - pax_headers["hdrcharset"] = match.group(1).decode("utf8") - - # For the time being, we don't care about anything other than "BINARY". - # The only other value that is currently allowed by the standard is - # "ISO-IR 10646 2000 UTF-8" in other words UTF-8. - hdrcharset = pax_headers.get("hdrcharset") - if hdrcharset == "BINARY": - encoding = tarfile.encoding - else: - encoding = "utf8" - - # Parse pax header information. A record looks like that: - # "%d %s=%s\n" % (length, keyword, value). length is the size - # of the complete record including the length field itself and - # the newline. keyword and value are both UTF-8 encoded strings. - regex = re.compile(br"(\d+) ([^=]+)=") - pos = 0 - while True: - match = regex.match(buf, pos) - if not match: - break - - length, keyword = match.groups() - length = int(length) - value = buf[match.end(2) + 1:match.start(1) + length - 1] - - # Normally, we could just use "utf8" as the encoding and "strict" - # as the error handler, but we better not take the risk. For - # example, GNU tar <= 1.23 is known to store filenames it cannot - # translate to UTF-8 as raw strings (unfortunately without a - # hdrcharset=BINARY header). - # We first try the strict standard encoding, and if that fails we - # fall back on the user's encoding and error handler. - keyword = self._decode_pax_field(keyword, "utf8", "utf8", - tarfile.errors) - if keyword in PAX_NAME_FIELDS: - value = self._decode_pax_field(value, encoding, tarfile.encoding, - tarfile.errors) - else: - value = self._decode_pax_field(value, "utf8", "utf8", - tarfile.errors) - - pax_headers[keyword] = value - pos += length - - # Fetch the next header. - try: - next = self.fromtarfile(tarfile) - except HeaderError: - raise SubsequentHeaderError("missing or bad subsequent header") - - # Process GNU sparse information. - if "GNU.sparse.map" in pax_headers: - # GNU extended sparse format version 0.1. - self._proc_gnusparse_01(next, pax_headers) - - elif "GNU.sparse.size" in pax_headers: - # GNU extended sparse format version 0.0. - self._proc_gnusparse_00(next, pax_headers, buf) - - elif pax_headers.get("GNU.sparse.major") == "1" and pax_headers.get("GNU.sparse.minor") == "0": - # GNU extended sparse format version 1.0. - self._proc_gnusparse_10(next, pax_headers, tarfile) - - if self.type in (XHDTYPE, SOLARIS_XHDTYPE): - # Patch the TarInfo object with the extended header info. - next._apply_pax_info(pax_headers, tarfile.encoding, tarfile.errors) - next.offset = self.offset - - if "size" in pax_headers: - # If the extended header replaces the size field, - # we need to recalculate the offset where the next - # header starts. - offset = next.offset_data - if next.isreg() or next.type not in SUPPORTED_TYPES: - offset += next._block(next.size) - tarfile.offset = offset - - return next - - def _proc_gnusparse_00(self, next, pax_headers, buf): - """Process a GNU tar extended sparse header, version 0.0. - """ - offsets = [] - for match in re.finditer(br"\d+ GNU.sparse.offset=(\d+)\n", buf): - offsets.append(int(match.group(1))) - numbytes = [] - for match in re.finditer(br"\d+ GNU.sparse.numbytes=(\d+)\n", buf): - numbytes.append(int(match.group(1))) - next.sparse = list(zip(offsets, numbytes)) - - def _proc_gnusparse_01(self, next, pax_headers): - """Process a GNU tar extended sparse header, version 0.1. - """ - sparse = [int(x) for x in pax_headers["GNU.sparse.map"].split(",")] - next.sparse = list(zip(sparse[::2], sparse[1::2])) - - def _proc_gnusparse_10(self, next, pax_headers, tarfile): - """Process a GNU tar extended sparse header, version 1.0. - """ - fields = None - sparse = [] - buf = tarfile.fileobj.read(BLOCKSIZE) - fields, buf = buf.split(b"\n", 1) - fields = int(fields) - while len(sparse) < fields * 2: - if b"\n" not in buf: - buf += tarfile.fileobj.read(BLOCKSIZE) - number, buf = buf.split(b"\n", 1) - sparse.append(int(number)) - next.offset_data = tarfile.fileobj.tell() - next.sparse = list(zip(sparse[::2], sparse[1::2])) - - def _apply_pax_info(self, pax_headers, encoding, errors): - """Replace fields with supplemental information from a previous - pax extended or global header. - """ - for keyword, value in pax_headers.items(): - if keyword == "GNU.sparse.name": - setattr(self, "path", value) - elif keyword == "GNU.sparse.size": - setattr(self, "size", int(value)) - elif keyword == "GNU.sparse.realsize": - setattr(self, "size", int(value)) - elif keyword in PAX_FIELDS: - if keyword in PAX_NUMBER_FIELDS: - try: - value = PAX_NUMBER_FIELDS[keyword](value) - except ValueError: - value = 0 - if keyword == "path": - value = value.rstrip("/") - setattr(self, keyword, value) - - self.pax_headers = pax_headers.copy() - - def _decode_pax_field(self, value, encoding, fallback_encoding, fallback_errors): - """Decode a single field from a pax record. - """ - try: - return value.decode(encoding, "strict") - except UnicodeDecodeError: - return value.decode(fallback_encoding, fallback_errors) - - def _block(self, count): - """Round up a byte count by BLOCKSIZE and return it, - e.g. _block(834) => 1024. - """ - blocks, remainder = divmod(count, BLOCKSIZE) - if remainder: - blocks += 1 - return blocks * BLOCKSIZE - - def isreg(self): - return self.type in REGULAR_TYPES - def isfile(self): - return self.isreg() - def isdir(self): - return self.type == DIRTYPE - def issym(self): - return self.type == SYMTYPE - def islnk(self): - return self.type == LNKTYPE - def ischr(self): - return self.type == CHRTYPE - def isblk(self): - return self.type == BLKTYPE - def isfifo(self): - return self.type == FIFOTYPE - def issparse(self): - return self.sparse is not None - def isdev(self): - return self.type in (CHRTYPE, BLKTYPE, FIFOTYPE) -# class TarInfo - -class TarFile(object): - """The TarFile Class provides an interface to tar archives. - """ - - debug = 0 # May be set from 0 (no msgs) to 3 (all msgs) - - dereference = False # If true, add content of linked file to the - # tar file, else the link. - - ignore_zeros = False # If true, skips empty or invalid blocks and - # continues processing. - - errorlevel = 1 # If 0, fatal errors only appear in debug - # messages (if debug >= 0). If > 0, errors - # are passed to the caller as exceptions. - - format = DEFAULT_FORMAT # The format to use when creating an archive. - - encoding = ENCODING # Encoding for 8-bit character strings. - - errors = None # Error handler for unicode conversion. - - tarinfo = TarInfo # The default TarInfo class to use. - - fileobject = ExFileObject # The default ExFileObject class to use. - - def __init__(self, name=None, mode="r", fileobj=None, format=None, - tarinfo=None, dereference=None, ignore_zeros=None, encoding=None, - errors="surrogateescape", pax_headers=None, debug=None, errorlevel=None): - """Open an (uncompressed) tar archive `name'. `mode' is either 'r' to - read from an existing archive, 'a' to append data to an existing - file or 'w' to create a new file overwriting an existing one. `mode' - defaults to 'r'. - If `fileobj' is given, it is used for reading or writing data. If it - can be determined, `mode' is overridden by `fileobj's mode. - `fileobj' is not closed, when TarFile is closed. - """ - if len(mode) > 1 or mode not in "raw": - raise ValueError("mode must be 'r', 'a' or 'w'") - self.mode = mode - self._mode = {"r": "rb", "a": "r+b", "w": "wb"}[mode] - - if not fileobj: - if self.mode == "a" and not os.path.exists(name): - # Create nonexistent files in append mode. - self.mode = "w" - self._mode = "wb" - fileobj = bltn_open(name, self._mode) - self._extfileobj = False - else: - if name is None and hasattr(fileobj, "name"): - name = fileobj.name - if hasattr(fileobj, "mode"): - self._mode = fileobj.mode - self._extfileobj = True - self.name = os.path.abspath(name) if name else None - self.fileobj = fileobj - - # Init attributes. - if format is not None: - self.format = format - if tarinfo is not None: - self.tarinfo = tarinfo - if dereference is not None: - self.dereference = dereference - if ignore_zeros is not None: - self.ignore_zeros = ignore_zeros - if encoding is not None: - self.encoding = encoding - self.errors = errors - - if pax_headers is not None and self.format == PAX_FORMAT: - self.pax_headers = pax_headers - else: - self.pax_headers = {} - - if debug is not None: - self.debug = debug - if errorlevel is not None: - self.errorlevel = errorlevel - - # Init datastructures. - self.closed = False - self.members = [] # list of members as TarInfo objects - self._loaded = False # flag if all members have been read - self.offset = self.fileobj.tell() - # current position in the archive file - self.inodes = {} # dictionary caching the inodes of - # archive members already added - - try: - if self.mode == "r": - self.firstmember = None - self.firstmember = self.next() - - if self.mode == "a": - # Move to the end of the archive, - # before the first empty block. - while True: - self.fileobj.seek(self.offset) - try: - tarinfo = self.tarinfo.fromtarfile(self) - self.members.append(tarinfo) - except EOFHeaderError: - self.fileobj.seek(self.offset) - break - except HeaderError as e: - raise ReadError(str(e)) - - if self.mode in "aw": - self._loaded = True - - if self.pax_headers: - buf = self.tarinfo.create_pax_global_header(self.pax_headers.copy()) - self.fileobj.write(buf) - self.offset += len(buf) - except: - if not self._extfileobj: - self.fileobj.close() - self.closed = True - raise - - #-------------------------------------------------------------------------- - # Below are the classmethods which act as alternate constructors to the - # TarFile class. The open() method is the only one that is needed for - # public use; it is the "super"-constructor and is able to select an - # adequate "sub"-constructor for a particular compression using the mapping - # from OPEN_METH. - # - # This concept allows one to subclass TarFile without losing the comfort of - # the super-constructor. A sub-constructor is registered and made available - # by adding it to the mapping in OPEN_METH. - - @classmethod - def open(cls, name=None, mode="r", fileobj=None, bufsize=RECORDSIZE, **kwargs): - """Open a tar archive for reading, writing or appending. Return - an appropriate TarFile class. - - mode: - 'r' or 'r:*' open for reading with transparent compression - 'r:' open for reading exclusively uncompressed - 'r:gz' open for reading with gzip compression - 'r:bz2' open for reading with bzip2 compression - 'a' or 'a:' open for appending, creating the file if necessary - 'w' or 'w:' open for writing without compression - 'w:gz' open for writing with gzip compression - 'w:bz2' open for writing with bzip2 compression - - 'r|*' open a stream of tar blocks with transparent compression - 'r|' open an uncompressed stream of tar blocks for reading - 'r|gz' open a gzip compressed stream of tar blocks - 'r|bz2' open a bzip2 compressed stream of tar blocks - 'w|' open an uncompressed stream for writing - 'w|gz' open a gzip compressed stream for writing - 'w|bz2' open a bzip2 compressed stream for writing - """ - - if not name and not fileobj: - raise ValueError("nothing to open") - - if mode in ("r", "r:*"): - # Find out which *open() is appropriate for opening the file. - for comptype in cls.OPEN_METH: - func = getattr(cls, cls.OPEN_METH[comptype]) - if fileobj is not None: - saved_pos = fileobj.tell() - try: - return func(name, "r", fileobj, **kwargs) - except (ReadError, CompressionError) as e: - if fileobj is not None: - fileobj.seek(saved_pos) - continue - raise ReadError("file could not be opened successfully") - - elif ":" in mode: - filemode, comptype = mode.split(":", 1) - filemode = filemode or "r" - comptype = comptype or "tar" - - # Select the *open() function according to - # given compression. - if comptype in cls.OPEN_METH: - func = getattr(cls, cls.OPEN_METH[comptype]) - else: - raise CompressionError("unknown compression type %r" % comptype) - return func(name, filemode, fileobj, **kwargs) - - elif "|" in mode: - filemode, comptype = mode.split("|", 1) - filemode = filemode or "r" - comptype = comptype or "tar" - - if filemode not in "rw": - raise ValueError("mode must be 'r' or 'w'") - - stream = _Stream(name, filemode, comptype, fileobj, bufsize) - try: - t = cls(name, filemode, stream, **kwargs) - except: - stream.close() - raise - t._extfileobj = False - return t - - elif mode in "aw": - return cls.taropen(name, mode, fileobj, **kwargs) - - raise ValueError("undiscernible mode") - - @classmethod - def taropen(cls, name, mode="r", fileobj=None, **kwargs): - """Open uncompressed tar archive name for reading or writing. - """ - if len(mode) > 1 or mode not in "raw": - raise ValueError("mode must be 'r', 'a' or 'w'") - return cls(name, mode, fileobj, **kwargs) - - @classmethod - def gzopen(cls, name, mode="r", fileobj=None, compresslevel=9, **kwargs): - """Open gzip compressed tar archive name for reading or writing. - Appending is not allowed. - """ - if len(mode) > 1 or mode not in "rw": - raise ValueError("mode must be 'r' or 'w'") - - try: - import gzip - gzip.GzipFile - except (ImportError, AttributeError): - raise CompressionError("gzip module is not available") - - extfileobj = fileobj is not None - try: - fileobj = gzip.GzipFile(name, mode + "b", compresslevel, fileobj) - t = cls.taropen(name, mode, fileobj, **kwargs) - except IOError: - if not extfileobj and fileobj is not None: - fileobj.close() - if fileobj is None: - raise - raise ReadError("not a gzip file") - except: - if not extfileobj and fileobj is not None: - fileobj.close() - raise - t._extfileobj = extfileobj - return t - - @classmethod - def bz2open(cls, name, mode="r", fileobj=None, compresslevel=9, **kwargs): - """Open bzip2 compressed tar archive name for reading or writing. - Appending is not allowed. - """ - if len(mode) > 1 or mode not in "rw": - raise ValueError("mode must be 'r' or 'w'.") - - try: - import bz2 - except ImportError: - raise CompressionError("bz2 module is not available") - - if fileobj is not None: - fileobj = _BZ2Proxy(fileobj, mode) - else: - fileobj = bz2.BZ2File(name, mode, compresslevel=compresslevel) - - try: - t = cls.taropen(name, mode, fileobj, **kwargs) - except (IOError, EOFError): - fileobj.close() - raise ReadError("not a bzip2 file") - t._extfileobj = False - return t - - # All *open() methods are registered here. - OPEN_METH = { - "tar": "taropen", # uncompressed tar - "gz": "gzopen", # gzip compressed tar - "bz2": "bz2open" # bzip2 compressed tar - } - - #-------------------------------------------------------------------------- - # The public methods which TarFile provides: - - def close(self): - """Close the TarFile. In write-mode, two finishing zero blocks are - appended to the archive. - """ - if self.closed: - return - - if self.mode in "aw": - self.fileobj.write(NUL * (BLOCKSIZE * 2)) - self.offset += (BLOCKSIZE * 2) - # fill up the end with zero-blocks - # (like option -b20 for tar does) - blocks, remainder = divmod(self.offset, RECORDSIZE) - if remainder > 0: - self.fileobj.write(NUL * (RECORDSIZE - remainder)) - - if not self._extfileobj: - self.fileobj.close() - self.closed = True - - def getmember(self, name): - """Return a TarInfo object for member `name'. If `name' can not be - found in the archive, KeyError is raised. If a member occurs more - than once in the archive, its last occurrence is assumed to be the - most up-to-date version. - """ - tarinfo = self._getmember(name) - if tarinfo is None: - raise KeyError("filename %r not found" % name) - return tarinfo - - def getmembers(self): - """Return the members of the archive as a list of TarInfo objects. The - list has the same order as the members in the archive. - """ - self._check() - if not self._loaded: # if we want to obtain a list of - self._load() # all members, we first have to - # scan the whole archive. - return self.members - - def getnames(self): - """Return the members of the archive as a list of their names. It has - the same order as the list returned by getmembers(). - """ - return [tarinfo.name for tarinfo in self.getmembers()] - - def gettarinfo(self, name=None, arcname=None, fileobj=None): - """Create a TarInfo object for either the file `name' or the file - object `fileobj' (using os.fstat on its file descriptor). You can - modify some of the TarInfo's attributes before you add it using - addfile(). If given, `arcname' specifies an alternative name for the - file in the archive. - """ - self._check("aw") - - # When fileobj is given, replace name by - # fileobj's real name. - if fileobj is not None: - name = fileobj.name - - # Building the name of the member in the archive. - # Backward slashes are converted to forward slashes, - # Absolute paths are turned to relative paths. - if arcname is None: - arcname = name - drv, arcname = os.path.splitdrive(arcname) - arcname = arcname.replace(os.sep, "/") - arcname = arcname.lstrip("/") - - # Now, fill the TarInfo object with - # information specific for the file. - tarinfo = self.tarinfo() - tarinfo.tarfile = self - - # Use os.stat or os.lstat, depending on platform - # and if symlinks shall be resolved. - if fileobj is None: - if hasattr(os, "lstat") and not self.dereference: - statres = os.lstat(name) - else: - statres = os.stat(name) - else: - statres = os.fstat(fileobj.fileno()) - linkname = "" - - stmd = statres.st_mode - if stat.S_ISREG(stmd): - inode = (statres.st_ino, statres.st_dev) - if not self.dereference and statres.st_nlink > 1 and \ - inode in self.inodes and arcname != self.inodes[inode]: - # Is it a hardlink to an already - # archived file? - type = LNKTYPE - linkname = self.inodes[inode] - else: - # The inode is added only if its valid. - # For win32 it is always 0. - type = REGTYPE - if inode[0]: - self.inodes[inode] = arcname - elif stat.S_ISDIR(stmd): - type = DIRTYPE - elif stat.S_ISFIFO(stmd): - type = FIFOTYPE - elif stat.S_ISLNK(stmd): - type = SYMTYPE - linkname = os.readlink(name) - elif stat.S_ISCHR(stmd): - type = CHRTYPE - elif stat.S_ISBLK(stmd): - type = BLKTYPE - else: - return None - - # Fill the TarInfo object with all - # information we can get. - tarinfo.name = arcname - tarinfo.mode = stmd - tarinfo.uid = statres.st_uid - tarinfo.gid = statres.st_gid - if type == REGTYPE: - tarinfo.size = statres.st_size - else: - tarinfo.size = 0 - tarinfo.mtime = statres.st_mtime - tarinfo.type = type - tarinfo.linkname = linkname - if pwd: - try: - tarinfo.uname = pwd.getpwuid(tarinfo.uid)[0] - except KeyError: - pass - if grp: - try: - tarinfo.gname = grp.getgrgid(tarinfo.gid)[0] - except KeyError: - pass - - if type in (CHRTYPE, BLKTYPE): - if hasattr(os, "major") and hasattr(os, "minor"): - tarinfo.devmajor = os.major(statres.st_rdev) - tarinfo.devminor = os.minor(statres.st_rdev) - return tarinfo - - def list(self, verbose=True): - """Print a table of contents to sys.stdout. If `verbose' is False, only - the names of the members are printed. If it is True, an `ls -l'-like - output is produced. - """ - self._check() - - for tarinfo in self: - if verbose: - print(filemode(tarinfo.mode), end=' ') - print("%s/%s" % (tarinfo.uname or tarinfo.uid, - tarinfo.gname or tarinfo.gid), end=' ') - if tarinfo.ischr() or tarinfo.isblk(): - print("%10s" % ("%d,%d" \ - % (tarinfo.devmajor, tarinfo.devminor)), end=' ') - else: - print("%10d" % tarinfo.size, end=' ') - print("%d-%02d-%02d %02d:%02d:%02d" \ - % time.localtime(tarinfo.mtime)[:6], end=' ') - - print(tarinfo.name + ("/" if tarinfo.isdir() else ""), end=' ') - - if verbose: - if tarinfo.issym(): - print("->", tarinfo.linkname, end=' ') - if tarinfo.islnk(): - print("link to", tarinfo.linkname, end=' ') - print() - - def add(self, name, arcname=None, recursive=True, exclude=None, filter=None): - """Add the file `name' to the archive. `name' may be any type of file - (directory, fifo, symbolic link, etc.). If given, `arcname' - specifies an alternative name for the file in the archive. - Directories are added recursively by default. This can be avoided by - setting `recursive' to False. `exclude' is a function that should - return True for each filename to be excluded. `filter' is a function - that expects a TarInfo object argument and returns the changed - TarInfo object, if it returns None the TarInfo object will be - excluded from the archive. - """ - self._check("aw") - - if arcname is None: - arcname = name - - # Exclude pathnames. - if exclude is not None: - import warnings - warnings.warn("use the filter argument instead", - DeprecationWarning, 2) - if exclude(name): - self._dbg(2, "tarfile: Excluded %r" % name) - return - - # Skip if somebody tries to archive the archive... - if self.name is not None and os.path.abspath(name) == self.name: - self._dbg(2, "tarfile: Skipped %r" % name) - return - - self._dbg(1, name) - - # Create a TarInfo object from the file. - tarinfo = self.gettarinfo(name, arcname) - - if tarinfo is None: - self._dbg(1, "tarfile: Unsupported type %r" % name) - return - - # Change or exclude the TarInfo object. - if filter is not None: - tarinfo = filter(tarinfo) - if tarinfo is None: - self._dbg(2, "tarfile: Excluded %r" % name) - return - - # Append the tar header and data to the archive. - if tarinfo.isreg(): - f = bltn_open(name, "rb") - self.addfile(tarinfo, f) - f.close() - - elif tarinfo.isdir(): - self.addfile(tarinfo) - if recursive: - for f in os.listdir(name): - self.add(os.path.join(name, f), os.path.join(arcname, f), - recursive, exclude, filter=filter) - - else: - self.addfile(tarinfo) - - def addfile(self, tarinfo, fileobj=None): - """Add the TarInfo object `tarinfo' to the archive. If `fileobj' is - given, tarinfo.size bytes are read from it and added to the archive. - You can create TarInfo objects using gettarinfo(). - On Windows platforms, `fileobj' should always be opened with mode - 'rb' to avoid irritation about the file size. - """ - self._check("aw") - - tarinfo = copy.copy(tarinfo) - - buf = tarinfo.tobuf(self.format, self.encoding, self.errors) - self.fileobj.write(buf) - self.offset += len(buf) - - # If there's data to follow, append it. - if fileobj is not None: - copyfileobj(fileobj, self.fileobj, tarinfo.size) - blocks, remainder = divmod(tarinfo.size, BLOCKSIZE) - if remainder > 0: - self.fileobj.write(NUL * (BLOCKSIZE - remainder)) - blocks += 1 - self.offset += blocks * BLOCKSIZE - - self.members.append(tarinfo) - - def extractall(self, path=".", members=None): - """Extract all members from the archive to the current working - directory and set owner, modification time and permissions on - directories afterwards. `path' specifies a different directory - to extract to. `members' is optional and must be a subset of the - list returned by getmembers(). - """ - directories = [] - - if members is None: - members = self - - for tarinfo in members: - if tarinfo.isdir(): - # Extract directories with a safe mode. - directories.append(tarinfo) - tarinfo = copy.copy(tarinfo) - tarinfo.mode = 0o700 - # Do not set_attrs directories, as we will do that further down - self.extract(tarinfo, path, set_attrs=not tarinfo.isdir()) - - # Reverse sort directories. - directories.sort(key=lambda a: a.name) - directories.reverse() - - # Set correct owner, mtime and filemode on directories. - for tarinfo in directories: - dirpath = os.path.join(path, tarinfo.name) - try: - self.chown(tarinfo, dirpath) - self.utime(tarinfo, dirpath) - self.chmod(tarinfo, dirpath) - except ExtractError as e: - if self.errorlevel > 1: - raise - else: - self._dbg(1, "tarfile: %s" % e) - - def extract(self, member, path="", set_attrs=True): - """Extract a member from the archive to the current working directory, - using its full name. Its file information is extracted as accurately - as possible. `member' may be a filename or a TarInfo object. You can - specify a different directory using `path'. File attributes (owner, - mtime, mode) are set unless `set_attrs' is False. - """ - self._check("r") - - if isinstance(member, str): - tarinfo = self.getmember(member) - else: - tarinfo = member - - # Prepare the link target for makelink(). - if tarinfo.islnk(): - tarinfo._link_target = os.path.join(path, tarinfo.linkname) - - try: - self._extract_member(tarinfo, os.path.join(path, tarinfo.name), - set_attrs=set_attrs) - except EnvironmentError as e: - if self.errorlevel > 0: - raise - else: - if e.filename is None: - self._dbg(1, "tarfile: %s" % e.strerror) - else: - self._dbg(1, "tarfile: %s %r" % (e.strerror, e.filename)) - except ExtractError as e: - if self.errorlevel > 1: - raise - else: - self._dbg(1, "tarfile: %s" % e) - - def extractfile(self, member): - """Extract a member from the archive as a file object. `member' may be - a filename or a TarInfo object. If `member' is a regular file, a - file-like object is returned. If `member' is a link, a file-like - object is constructed from the link's target. If `member' is none of - the above, None is returned. - The file-like object is read-only and provides the following - methods: read(), readline(), readlines(), seek() and tell() - """ - self._check("r") - - if isinstance(member, str): - tarinfo = self.getmember(member) - else: - tarinfo = member - - if tarinfo.isreg(): - return self.fileobject(self, tarinfo) - - elif tarinfo.type not in SUPPORTED_TYPES: - # If a member's type is unknown, it is treated as a - # regular file. - return self.fileobject(self, tarinfo) - - elif tarinfo.islnk() or tarinfo.issym(): - if isinstance(self.fileobj, _Stream): - # A small but ugly workaround for the case that someone tries - # to extract a (sym)link as a file-object from a non-seekable - # stream of tar blocks. - raise StreamError("cannot extract (sym)link as file object") - else: - # A (sym)link's file object is its target's file object. - return self.extractfile(self._find_link_target(tarinfo)) - else: - # If there's no data associated with the member (directory, chrdev, - # blkdev, etc.), return None instead of a file object. - return None - - def _extract_member(self, tarinfo, targetpath, set_attrs=True): - """Extract the TarInfo object tarinfo to a physical - file called targetpath. - """ - # Fetch the TarInfo object for the given name - # and build the destination pathname, replacing - # forward slashes to platform specific separators. - targetpath = targetpath.rstrip("/") - targetpath = targetpath.replace("/", os.sep) - - # Create all upper directories. - upperdirs = os.path.dirname(targetpath) - if upperdirs and not os.path.exists(upperdirs): - # Create directories that are not part of the archive with - # default permissions. - os.makedirs(upperdirs) - - if tarinfo.islnk() or tarinfo.issym(): - self._dbg(1, "%s -> %s" % (tarinfo.name, tarinfo.linkname)) - else: - self._dbg(1, tarinfo.name) - - if tarinfo.isreg(): - self.makefile(tarinfo, targetpath) - elif tarinfo.isdir(): - self.makedir(tarinfo, targetpath) - elif tarinfo.isfifo(): - self.makefifo(tarinfo, targetpath) - elif tarinfo.ischr() or tarinfo.isblk(): - self.makedev(tarinfo, targetpath) - elif tarinfo.islnk() or tarinfo.issym(): - self.makelink(tarinfo, targetpath) - elif tarinfo.type not in SUPPORTED_TYPES: - self.makeunknown(tarinfo, targetpath) - else: - self.makefile(tarinfo, targetpath) - - if set_attrs: - self.chown(tarinfo, targetpath) - if not tarinfo.issym(): - self.chmod(tarinfo, targetpath) - self.utime(tarinfo, targetpath) - - #-------------------------------------------------------------------------- - # Below are the different file methods. They are called via - # _extract_member() when extract() is called. They can be replaced in a - # subclass to implement other functionality. - - def makedir(self, tarinfo, targetpath): - """Make a directory called targetpath. - """ - try: - # Use a safe mode for the directory, the real mode is set - # later in _extract_member(). - os.mkdir(targetpath, 0o700) - except EnvironmentError as e: - if e.errno != errno.EEXIST: - raise - - def makefile(self, tarinfo, targetpath): - """Make a file called targetpath. - """ - source = self.fileobj - source.seek(tarinfo.offset_data) - target = bltn_open(targetpath, "wb") - if tarinfo.sparse is not None: - for offset, size in tarinfo.sparse: - target.seek(offset) - copyfileobj(source, target, size) - else: - copyfileobj(source, target, tarinfo.size) - target.seek(tarinfo.size) - target.truncate() - target.close() - - def makeunknown(self, tarinfo, targetpath): - """Make a file from a TarInfo object with an unknown type - at targetpath. - """ - self.makefile(tarinfo, targetpath) - self._dbg(1, "tarfile: Unknown file type %r, " \ - "extracted as regular file." % tarinfo.type) - - def makefifo(self, tarinfo, targetpath): - """Make a fifo called targetpath. - """ - if hasattr(os, "mkfifo"): - os.mkfifo(targetpath) - else: - raise ExtractError("fifo not supported by system") - - def makedev(self, tarinfo, targetpath): - """Make a character or block device called targetpath. - """ - if not hasattr(os, "mknod") or not hasattr(os, "makedev"): - raise ExtractError("special devices not supported by system") - - mode = tarinfo.mode - if tarinfo.isblk(): - mode |= stat.S_IFBLK - else: - mode |= stat.S_IFCHR - - os.mknod(targetpath, mode, - os.makedev(tarinfo.devmajor, tarinfo.devminor)) - - def makelink(self, tarinfo, targetpath): - """Make a (symbolic) link called targetpath. If it cannot be created - (platform limitation), we try to make a copy of the referenced file - instead of a link. - """ - try: - # For systems that support symbolic and hard links. - if tarinfo.issym(): - os.symlink(tarinfo.linkname, targetpath) - else: - # See extract(). - if os.path.exists(tarinfo._link_target): - os.link(tarinfo._link_target, targetpath) - else: - self._extract_member(self._find_link_target(tarinfo), - targetpath) - except symlink_exception: - if tarinfo.issym(): - linkpath = os.path.join(os.path.dirname(tarinfo.name), - tarinfo.linkname) - else: - linkpath = tarinfo.linkname - else: - try: - self._extract_member(self._find_link_target(tarinfo), - targetpath) - except KeyError: - raise ExtractError("unable to resolve link inside archive") - - def chown(self, tarinfo, targetpath): - """Set owner of targetpath according to tarinfo. - """ - if pwd and hasattr(os, "geteuid") and os.geteuid() == 0: - # We have to be root to do so. - try: - g = grp.getgrnam(tarinfo.gname)[2] - except KeyError: - g = tarinfo.gid - try: - u = pwd.getpwnam(tarinfo.uname)[2] - except KeyError: - u = tarinfo.uid - try: - if tarinfo.issym() and hasattr(os, "lchown"): - os.lchown(targetpath, u, g) - else: - if sys.platform != "os2emx": - os.chown(targetpath, u, g) - except EnvironmentError as e: - raise ExtractError("could not change owner") - - def chmod(self, tarinfo, targetpath): - """Set file permissions of targetpath according to tarinfo. - """ - if hasattr(os, 'chmod'): - try: - os.chmod(targetpath, tarinfo.mode) - except EnvironmentError as e: - raise ExtractError("could not change mode") - - def utime(self, tarinfo, targetpath): - """Set modification time of targetpath according to tarinfo. - """ - if not hasattr(os, 'utime'): - return - try: - os.utime(targetpath, (tarinfo.mtime, tarinfo.mtime)) - except EnvironmentError as e: - raise ExtractError("could not change modification time") - - #-------------------------------------------------------------------------- - def next(self): - """Return the next member of the archive as a TarInfo object, when - TarFile is opened for reading. Return None if there is no more - available. - """ - self._check("ra") - if self.firstmember is not None: - m = self.firstmember - self.firstmember = None - return m - - # Read the next block. - self.fileobj.seek(self.offset) - tarinfo = None - while True: - try: - tarinfo = self.tarinfo.fromtarfile(self) - except EOFHeaderError as e: - if self.ignore_zeros: - self._dbg(2, "0x%X: %s" % (self.offset, e)) - self.offset += BLOCKSIZE - continue - except InvalidHeaderError as e: - if self.ignore_zeros: - self._dbg(2, "0x%X: %s" % (self.offset, e)) - self.offset += BLOCKSIZE - continue - elif self.offset == 0: - raise ReadError(str(e)) - except EmptyHeaderError: - if self.offset == 0: - raise ReadError("empty file") - except TruncatedHeaderError as e: - if self.offset == 0: - raise ReadError(str(e)) - except SubsequentHeaderError as e: - raise ReadError(str(e)) - break - - if tarinfo is not None: - self.members.append(tarinfo) - else: - self._loaded = True - - return tarinfo - - #-------------------------------------------------------------------------- - # Little helper methods: - - def _getmember(self, name, tarinfo=None, normalize=False): - """Find an archive member by name from bottom to top. - If tarinfo is given, it is used as the starting point. - """ - # Ensure that all members have been loaded. - members = self.getmembers() - - # Limit the member search list up to tarinfo. - if tarinfo is not None: - members = members[:members.index(tarinfo)] - - if normalize: - name = os.path.normpath(name) - - for member in reversed(members): - if normalize: - member_name = os.path.normpath(member.name) - else: - member_name = member.name - - if name == member_name: - return member - - def _load(self): - """Read through the entire archive file and look for readable - members. - """ - while True: - tarinfo = self.next() - if tarinfo is None: - break - self._loaded = True - - def _check(self, mode=None): - """Check if TarFile is still open, and if the operation's mode - corresponds to TarFile's mode. - """ - if self.closed: - raise IOError("%s is closed" % self.__class__.__name__) - if mode is not None and self.mode not in mode: - raise IOError("bad operation for mode %r" % self.mode) - - def _find_link_target(self, tarinfo): - """Find the target member of a symlink or hardlink member in the - archive. - """ - if tarinfo.issym(): - # Always search the entire archive. - linkname = os.path.dirname(tarinfo.name) + "/" + tarinfo.linkname - limit = None - else: - # Search the archive before the link, because a hard link is - # just a reference to an already archived file. - linkname = tarinfo.linkname - limit = tarinfo - - member = self._getmember(linkname, tarinfo=limit, normalize=True) - if member is None: - raise KeyError("linkname %r not found" % linkname) - return member - - def __iter__(self): - """Provide an iterator object. - """ - if self._loaded: - return iter(self.members) - else: - return TarIter(self) - - def _dbg(self, level, msg): - """Write debugging output to sys.stderr. - """ - if level <= self.debug: - print(msg, file=sys.stderr) - - def __enter__(self): - self._check() - return self - - def __exit__(self, type, value, traceback): - if type is None: - self.close() - else: - # An exception occurred. We must not call close() because - # it would try to write end-of-archive blocks and padding. - if not self._extfileobj: - self.fileobj.close() - self.closed = True -# class TarFile - -class TarIter(object): - """Iterator Class. - - for tarinfo in TarFile(...): - suite... - """ - - def __init__(self, tarfile): - """Construct a TarIter object. - """ - self.tarfile = tarfile - self.index = 0 - def __iter__(self): - """Return iterator object. - """ - return self - - def __next__(self): - """Return the next item using TarFile's next() method. - When all members have been read, set TarFile as _loaded. - """ - # Fix for SF #1100429: Under rare circumstances it can - # happen that getmembers() is called during iteration, - # which will cause TarIter to stop prematurely. - if not self.tarfile._loaded: - tarinfo = self.tarfile.next() - if not tarinfo: - self.tarfile._loaded = True - raise StopIteration - else: - try: - tarinfo = self.tarfile.members[self.index] - except IndexError: - raise StopIteration - self.index += 1 - return tarinfo - - next = __next__ # for Python 2.x - -#-------------------- -# exported functions -#-------------------- -def is_tarfile(name): - """Return True if name points to a tar archive that we - are able to handle, else return False. - """ - try: - t = open(name) - t.close() - return True - except TarError: - return False - -bltn_open = open -open = TarFile.open diff --git a/venv/Lib/site-packages/pip/_vendor/distlib/compat.py b/venv/Lib/site-packages/pip/_vendor/distlib/compat.py deleted file mode 100644 index e594106..0000000 --- a/venv/Lib/site-packages/pip/_vendor/distlib/compat.py +++ /dev/null @@ -1,1122 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright (C) 2013-2017 Vinay Sajip. -# Licensed to the Python Software Foundation under a contributor agreement. -# See LICENSE.txt and CONTRIBUTORS.txt. -# -from __future__ import absolute_import - -import os -import re -import sys - -try: - import ssl -except ImportError: # pragma: no cover - ssl = None - -if sys.version_info[0] < 3: # pragma: no cover - from StringIO import StringIO - string_types = basestring, - text_type = unicode - from types import FileType as file_type - import __builtin__ as builtins - import ConfigParser as configparser - from ._backport import shutil - from urlparse import urlparse, urlunparse, urljoin, urlsplit, urlunsplit - from urllib import (urlretrieve, quote as _quote, unquote, url2pathname, - pathname2url, ContentTooShortError, splittype) - - def quote(s): - if isinstance(s, unicode): - s = s.encode('utf-8') - return _quote(s) - - import urllib2 - from urllib2 import (Request, urlopen, URLError, HTTPError, - HTTPBasicAuthHandler, HTTPPasswordMgr, - HTTPHandler, HTTPRedirectHandler, - build_opener) - if ssl: - from urllib2 import HTTPSHandler - import httplib - import xmlrpclib - import Queue as queue - from HTMLParser import HTMLParser - import htmlentitydefs - raw_input = raw_input - from itertools import ifilter as filter - from itertools import ifilterfalse as filterfalse - - # Leaving this around for now, in case it needs resurrecting in some way - # _userprog = None - # def splituser(host): - # """splituser('user[:passwd]@host[:port]') --> 'user[:passwd]', 'host[:port]'.""" - # global _userprog - # if _userprog is None: - # import re - # _userprog = re.compile('^(.*)@(.*)$') - - # match = _userprog.match(host) - # if match: return match.group(1, 2) - # return None, host - -else: # pragma: no cover - from io import StringIO - string_types = str, - text_type = str - from io import TextIOWrapper as file_type - import builtins - import configparser - import shutil - from urllib.parse import (urlparse, urlunparse, urljoin, quote, - unquote, urlsplit, urlunsplit, splittype) - from urllib.request import (urlopen, urlretrieve, Request, url2pathname, - pathname2url, - HTTPBasicAuthHandler, HTTPPasswordMgr, - HTTPHandler, HTTPRedirectHandler, - build_opener) - if ssl: - from urllib.request import HTTPSHandler - from urllib.error import HTTPError, URLError, ContentTooShortError - import http.client as httplib - import urllib.request as urllib2 - import xmlrpc.client as xmlrpclib - import queue - from html.parser import HTMLParser - import html.entities as htmlentitydefs - raw_input = input - from itertools import filterfalse - filter = filter - - -try: - from ssl import match_hostname, CertificateError -except ImportError: # pragma: no cover - class CertificateError(ValueError): - pass - - - def _dnsname_match(dn, hostname, max_wildcards=1): - """Matching according to RFC 6125, section 6.4.3 - - http://tools.ietf.org/html/rfc6125#section-6.4.3 - """ - pats = [] - if not dn: - return False - - parts = dn.split('.') - leftmost, remainder = parts[0], parts[1:] - - wildcards = leftmost.count('*') - if wildcards > max_wildcards: - # Issue #17980: avoid denials of service by refusing more - # than one wildcard per fragment. A survey of established - # policy among SSL implementations showed it to be a - # reasonable choice. - raise CertificateError( - "too many wildcards in certificate DNS name: " + repr(dn)) - - # speed up common case w/o wildcards - if not wildcards: - return dn.lower() == hostname.lower() - - # RFC 6125, section 6.4.3, subitem 1. - # The client SHOULD NOT attempt to match a presented identifier in which - # the wildcard character comprises a label other than the left-most label. - if leftmost == '*': - # When '*' is a fragment by itself, it matches a non-empty dotless - # fragment. - pats.append('[^.]+') - elif leftmost.startswith('xn--') or hostname.startswith('xn--'): - # RFC 6125, section 6.4.3, subitem 3. - # The client SHOULD NOT attempt to match a presented identifier - # where the wildcard character is embedded within an A-label or - # U-label of an internationalized domain name. - pats.append(re.escape(leftmost)) - else: - # Otherwise, '*' matches any dotless string, e.g. www* - pats.append(re.escape(leftmost).replace(r'\*', '[^.]*')) - - # add the remaining fragments, ignore any wildcards - for frag in remainder: - pats.append(re.escape(frag)) - - pat = re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE) - return pat.match(hostname) - - - def match_hostname(cert, hostname): - """Verify that *cert* (in decoded format as returned by - SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125 - rules are followed, but IP addresses are not accepted for *hostname*. - - CertificateError is raised on failure. On success, the function - returns nothing. - """ - if not cert: - raise ValueError("empty or no certificate, match_hostname needs a " - "SSL socket or SSL context with either " - "CERT_OPTIONAL or CERT_REQUIRED") - dnsnames = [] - san = cert.get('subjectAltName', ()) - for key, value in san: - if key == 'DNS': - if _dnsname_match(value, hostname): - return - dnsnames.append(value) - if not dnsnames: - # The subject is only checked when there is no dNSName entry - # in subjectAltName - for sub in cert.get('subject', ()): - for key, value in sub: - # XXX according to RFC 2818, the most specific Common Name - # must be used. - if key == 'commonName': - if _dnsname_match(value, hostname): - return - dnsnames.append(value) - if len(dnsnames) > 1: - raise CertificateError("hostname %r " - "doesn't match either of %s" - % (hostname, ', '.join(map(repr, dnsnames)))) - elif len(dnsnames) == 1: - raise CertificateError("hostname %r " - "doesn't match %r" - % (hostname, dnsnames[0])) - else: - raise CertificateError("no appropriate commonName or " - "subjectAltName fields were found") - - -try: - from types import SimpleNamespace as Container -except ImportError: # pragma: no cover - class Container(object): - """ - A generic container for when multiple values need to be returned - """ - def __init__(self, **kwargs): - self.__dict__.update(kwargs) - - -try: - from shutil import which -except ImportError: # pragma: no cover - # Implementation from Python 3.3 - def which(cmd, mode=os.F_OK | os.X_OK, path=None): - """Given a command, mode, and a PATH string, return the path which - conforms to the given mode on the PATH, or None if there is no such - file. - - `mode` defaults to os.F_OK | os.X_OK. `path` defaults to the result - of os.environ.get("PATH"), or can be overridden with a custom search - path. - - """ - # Check that a given file can be accessed with the correct mode. - # Additionally check that `file` is not a directory, as on Windows - # directories pass the os.access check. - def _access_check(fn, mode): - return (os.path.exists(fn) and os.access(fn, mode) - and not os.path.isdir(fn)) - - # If we're given a path with a directory part, look it up directly rather - # than referring to PATH directories. This includes checking relative to the - # current directory, e.g. ./script - if os.path.dirname(cmd): - if _access_check(cmd, mode): - return cmd - return None - - if path is None: - path = os.environ.get("PATH", os.defpath) - if not path: - return None - path = path.split(os.pathsep) - - if sys.platform == "win32": - # The current directory takes precedence on Windows. - if not os.curdir in path: - path.insert(0, os.curdir) - - # PATHEXT is necessary to check on Windows. - pathext = os.environ.get("PATHEXT", "").split(os.pathsep) - # See if the given file matches any of the expected path extensions. - # This will allow us to short circuit when given "python.exe". - # If it does match, only test that one, otherwise we have to try - # others. - if any(cmd.lower().endswith(ext.lower()) for ext in pathext): - files = [cmd] - else: - files = [cmd + ext for ext in pathext] - else: - # On other platforms you don't have things like PATHEXT to tell you - # what file suffixes are executable, so just pass on cmd as-is. - files = [cmd] - - seen = set() - for dir in path: - normdir = os.path.normcase(dir) - if not normdir in seen: - seen.add(normdir) - for thefile in files: - name = os.path.join(dir, thefile) - if _access_check(name, mode): - return name - return None - - -# ZipFile is a context manager in 2.7, but not in 2.6 - -from zipfile import ZipFile as BaseZipFile - -if hasattr(BaseZipFile, '__enter__'): # pragma: no cover - ZipFile = BaseZipFile -else: # pragma: no cover - from zipfile import ZipExtFile as BaseZipExtFile - - class ZipExtFile(BaseZipExtFile): - def __init__(self, base): - self.__dict__.update(base.__dict__) - - def __enter__(self): - return self - - def __exit__(self, *exc_info): - self.close() - # return None, so if an exception occurred, it will propagate - - class ZipFile(BaseZipFile): - def __enter__(self): - return self - - def __exit__(self, *exc_info): - self.close() - # return None, so if an exception occurred, it will propagate - - def open(self, *args, **kwargs): - base = BaseZipFile.open(self, *args, **kwargs) - return ZipExtFile(base) - -try: - from platform import python_implementation -except ImportError: # pragma: no cover - def python_implementation(): - """Return a string identifying the Python implementation.""" - if 'PyPy' in sys.version: - return 'PyPy' - if os.name == 'java': - return 'Jython' - if sys.version.startswith('IronPython'): - return 'IronPython' - return 'CPython' - -try: - import sysconfig -except ImportError: # pragma: no cover - from ._backport import sysconfig - -try: - callable = callable -except NameError: # pragma: no cover - from collections.abc import Callable - - def callable(obj): - return isinstance(obj, Callable) - - -try: - fsencode = os.fsencode - fsdecode = os.fsdecode -except AttributeError: # pragma: no cover - # Issue #99: on some systems (e.g. containerised), - # sys.getfilesystemencoding() returns None, and we need a real value, - # so fall back to utf-8. From the CPython 2.7 docs relating to Unix and - # sys.getfilesystemencoding(): the return value is "the user’s preference - # according to the result of nl_langinfo(CODESET), or None if the - # nl_langinfo(CODESET) failed." - _fsencoding = sys.getfilesystemencoding() or 'utf-8' - if _fsencoding == 'mbcs': - _fserrors = 'strict' - else: - _fserrors = 'surrogateescape' - - def fsencode(filename): - if isinstance(filename, bytes): - return filename - elif isinstance(filename, text_type): - return filename.encode(_fsencoding, _fserrors) - else: - raise TypeError("expect bytes or str, not %s" % - type(filename).__name__) - - def fsdecode(filename): - if isinstance(filename, text_type): - return filename - elif isinstance(filename, bytes): - return filename.decode(_fsencoding, _fserrors) - else: - raise TypeError("expect bytes or str, not %s" % - type(filename).__name__) - -try: - from tokenize import detect_encoding -except ImportError: # pragma: no cover - from codecs import BOM_UTF8, lookup - import re - - cookie_re = re.compile(r"coding[:=]\s*([-\w.]+)") - - def _get_normal_name(orig_enc): - """Imitates get_normal_name in tokenizer.c.""" - # Only care about the first 12 characters. - enc = orig_enc[:12].lower().replace("_", "-") - if enc == "utf-8" or enc.startswith("utf-8-"): - return "utf-8" - if enc in ("latin-1", "iso-8859-1", "iso-latin-1") or \ - enc.startswith(("latin-1-", "iso-8859-1-", "iso-latin-1-")): - return "iso-8859-1" - return orig_enc - - def detect_encoding(readline): - """ - The detect_encoding() function is used to detect the encoding that should - be used to decode a Python source file. It requires one argument, readline, - in the same way as the tokenize() generator. - - It will call readline a maximum of twice, and return the encoding used - (as a string) and a list of any lines (left as bytes) it has read in. - - It detects the encoding from the presence of a utf-8 bom or an encoding - cookie as specified in pep-0263. If both a bom and a cookie are present, - but disagree, a SyntaxError will be raised. If the encoding cookie is an - invalid charset, raise a SyntaxError. Note that if a utf-8 bom is found, - 'utf-8-sig' is returned. - - If no encoding is specified, then the default of 'utf-8' will be returned. - """ - try: - filename = readline.__self__.name - except AttributeError: - filename = None - bom_found = False - encoding = None - default = 'utf-8' - def read_or_stop(): - try: - return readline() - except StopIteration: - return b'' - - def find_cookie(line): - try: - # Decode as UTF-8. Either the line is an encoding declaration, - # in which case it should be pure ASCII, or it must be UTF-8 - # per default encoding. - line_string = line.decode('utf-8') - except UnicodeDecodeError: - msg = "invalid or missing encoding declaration" - if filename is not None: - msg = '{} for {!r}'.format(msg, filename) - raise SyntaxError(msg) - - matches = cookie_re.findall(line_string) - if not matches: - return None - encoding = _get_normal_name(matches[0]) - try: - codec = lookup(encoding) - except LookupError: - # This behaviour mimics the Python interpreter - if filename is None: - msg = "unknown encoding: " + encoding - else: - msg = "unknown encoding for {!r}: {}".format(filename, - encoding) - raise SyntaxError(msg) - - if bom_found: - if codec.name != 'utf-8': - # This behaviour mimics the Python interpreter - if filename is None: - msg = 'encoding problem: utf-8' - else: - msg = 'encoding problem for {!r}: utf-8'.format(filename) - raise SyntaxError(msg) - encoding += '-sig' - return encoding - - first = read_or_stop() - if first.startswith(BOM_UTF8): - bom_found = True - first = first[3:] - default = 'utf-8-sig' - if not first: - return default, [] - - encoding = find_cookie(first) - if encoding: - return encoding, [first] - - second = read_or_stop() - if not second: - return default, [first] - - encoding = find_cookie(second) - if encoding: - return encoding, [first, second] - - return default, [first, second] - -# For converting & <-> & etc. -try: - from html import escape -except ImportError: - from cgi import escape -if sys.version_info[:2] < (3, 4): - unescape = HTMLParser().unescape -else: - from html import unescape - -try: - from collections import ChainMap -except ImportError: # pragma: no cover - from collections import MutableMapping - - try: - from reprlib import recursive_repr as _recursive_repr - except ImportError: - def _recursive_repr(fillvalue='...'): - ''' - Decorator to make a repr function return fillvalue for a recursive - call - ''' - - def decorating_function(user_function): - repr_running = set() - - def wrapper(self): - key = id(self), get_ident() - if key in repr_running: - return fillvalue - repr_running.add(key) - try: - result = user_function(self) - finally: - repr_running.discard(key) - return result - - # Can't use functools.wraps() here because of bootstrap issues - wrapper.__module__ = getattr(user_function, '__module__') - wrapper.__doc__ = getattr(user_function, '__doc__') - wrapper.__name__ = getattr(user_function, '__name__') - wrapper.__annotations__ = getattr(user_function, '__annotations__', {}) - return wrapper - - return decorating_function - - class ChainMap(MutableMapping): - ''' A ChainMap groups multiple dicts (or other mappings) together - to create a single, updateable view. - - The underlying mappings are stored in a list. That list is public and can - accessed or updated using the *maps* attribute. There is no other state. - - Lookups search the underlying mappings successively until a key is found. - In contrast, writes, updates, and deletions only operate on the first - mapping. - - ''' - - def __init__(self, *maps): - '''Initialize a ChainMap by setting *maps* to the given mappings. - If no mappings are provided, a single empty dictionary is used. - - ''' - self.maps = list(maps) or [{}] # always at least one map - - def __missing__(self, key): - raise KeyError(key) - - def __getitem__(self, key): - for mapping in self.maps: - try: - return mapping[key] # can't use 'key in mapping' with defaultdict - except KeyError: - pass - return self.__missing__(key) # support subclasses that define __missing__ - - def get(self, key, default=None): - return self[key] if key in self else default - - def __len__(self): - return len(set().union(*self.maps)) # reuses stored hash values if possible - - def __iter__(self): - return iter(set().union(*self.maps)) - - def __contains__(self, key): - return any(key in m for m in self.maps) - - def __bool__(self): - return any(self.maps) - - @_recursive_repr() - def __repr__(self): - return '{0.__class__.__name__}({1})'.format( - self, ', '.join(map(repr, self.maps))) - - @classmethod - def fromkeys(cls, iterable, *args): - 'Create a ChainMap with a single dict created from the iterable.' - return cls(dict.fromkeys(iterable, *args)) - - def copy(self): - 'New ChainMap or subclass with a new copy of maps[0] and refs to maps[1:]' - return self.__class__(self.maps[0].copy(), *self.maps[1:]) - - __copy__ = copy - - def new_child(self): # like Django's Context.push() - 'New ChainMap with a new dict followed by all previous maps.' - return self.__class__({}, *self.maps) - - @property - def parents(self): # like Django's Context.pop() - 'New ChainMap from maps[1:].' - return self.__class__(*self.maps[1:]) - - def __setitem__(self, key, value): - self.maps[0][key] = value - - def __delitem__(self, key): - try: - del self.maps[0][key] - except KeyError: - raise KeyError('Key not found in the first mapping: {!r}'.format(key)) - - def popitem(self): - 'Remove and return an item pair from maps[0]. Raise KeyError is maps[0] is empty.' - try: - return self.maps[0].popitem() - except KeyError: - raise KeyError('No keys found in the first mapping.') - - def pop(self, key, *args): - 'Remove *key* from maps[0] and return its value. Raise KeyError if *key* not in maps[0].' - try: - return self.maps[0].pop(key, *args) - except KeyError: - raise KeyError('Key not found in the first mapping: {!r}'.format(key)) - - def clear(self): - 'Clear maps[0], leaving maps[1:] intact.' - self.maps[0].clear() - -try: - from importlib.util import cache_from_source # Python >= 3.4 -except ImportError: # pragma: no cover - try: - from imp import cache_from_source - except ImportError: # pragma: no cover - def cache_from_source(path, debug_override=None): - assert path.endswith('.py') - if debug_override is None: - debug_override = __debug__ - if debug_override: - suffix = 'c' - else: - suffix = 'o' - return path + suffix - -try: - from collections import OrderedDict -except ImportError: # pragma: no cover -## {{{ http://code.activestate.com/recipes/576693/ (r9) -# Backport of OrderedDict() class that runs on Python 2.4, 2.5, 2.6, 2.7 and pypy. -# Passes Python2.7's test suite and incorporates all the latest updates. - try: - from thread import get_ident as _get_ident - except ImportError: - from dummy_thread import get_ident as _get_ident - - try: - from _abcoll import KeysView, ValuesView, ItemsView - except ImportError: - pass - - - class OrderedDict(dict): - 'Dictionary that remembers insertion order' - # An inherited dict maps keys to values. - # The inherited dict provides __getitem__, __len__, __contains__, and get. - # The remaining methods are order-aware. - # Big-O running times for all methods are the same as for regular dictionaries. - - # The internal self.__map dictionary maps keys to links in a doubly linked list. - # The circular doubly linked list starts and ends with a sentinel element. - # The sentinel element never gets deleted (this simplifies the algorithm). - # Each link is stored as a list of length three: [PREV, NEXT, KEY]. - - def __init__(self, *args, **kwds): - '''Initialize an ordered dictionary. Signature is the same as for - regular dictionaries, but keyword arguments are not recommended - because their insertion order is arbitrary. - - ''' - if len(args) > 1: - raise TypeError('expected at most 1 arguments, got %d' % len(args)) - try: - self.__root - except AttributeError: - self.__root = root = [] # sentinel node - root[:] = [root, root, None] - self.__map = {} - self.__update(*args, **kwds) - - def __setitem__(self, key, value, dict_setitem=dict.__setitem__): - 'od.__setitem__(i, y) <==> od[i]=y' - # Setting a new item creates a new link which goes at the end of the linked - # list, and the inherited dictionary is updated with the new key/value pair. - if key not in self: - root = self.__root - last = root[0] - last[1] = root[0] = self.__map[key] = [last, root, key] - dict_setitem(self, key, value) - - def __delitem__(self, key, dict_delitem=dict.__delitem__): - 'od.__delitem__(y) <==> del od[y]' - # Deleting an existing item uses self.__map to find the link which is - # then removed by updating the links in the predecessor and successor nodes. - dict_delitem(self, key) - link_prev, link_next, key = self.__map.pop(key) - link_prev[1] = link_next - link_next[0] = link_prev - - def __iter__(self): - 'od.__iter__() <==> iter(od)' - root = self.__root - curr = root[1] - while curr is not root: - yield curr[2] - curr = curr[1] - - def __reversed__(self): - 'od.__reversed__() <==> reversed(od)' - root = self.__root - curr = root[0] - while curr is not root: - yield curr[2] - curr = curr[0] - - def clear(self): - 'od.clear() -> None. Remove all items from od.' - try: - for node in self.__map.itervalues(): - del node[:] - root = self.__root - root[:] = [root, root, None] - self.__map.clear() - except AttributeError: - pass - dict.clear(self) - - def popitem(self, last=True): - '''od.popitem() -> (k, v), return and remove a (key, value) pair. - Pairs are returned in LIFO order if last is true or FIFO order if false. - - ''' - if not self: - raise KeyError('dictionary is empty') - root = self.__root - if last: - link = root[0] - link_prev = link[0] - link_prev[1] = root - root[0] = link_prev - else: - link = root[1] - link_next = link[1] - root[1] = link_next - link_next[0] = root - key = link[2] - del self.__map[key] - value = dict.pop(self, key) - return key, value - - # -- the following methods do not depend on the internal structure -- - - def keys(self): - 'od.keys() -> list of keys in od' - return list(self) - - def values(self): - 'od.values() -> list of values in od' - return [self[key] for key in self] - - def items(self): - 'od.items() -> list of (key, value) pairs in od' - return [(key, self[key]) for key in self] - - def iterkeys(self): - 'od.iterkeys() -> an iterator over the keys in od' - return iter(self) - - def itervalues(self): - 'od.itervalues -> an iterator over the values in od' - for k in self: - yield self[k] - - def iteritems(self): - 'od.iteritems -> an iterator over the (key, value) items in od' - for k in self: - yield (k, self[k]) - - def update(*args, **kwds): - '''od.update(E, **F) -> None. Update od from dict/iterable E and F. - - If E is a dict instance, does: for k in E: od[k] = E[k] - If E has a .keys() method, does: for k in E.keys(): od[k] = E[k] - Or if E is an iterable of items, does: for k, v in E: od[k] = v - In either case, this is followed by: for k, v in F.items(): od[k] = v - - ''' - if len(args) > 2: - raise TypeError('update() takes at most 2 positional ' - 'arguments (%d given)' % (len(args),)) - elif not args: - raise TypeError('update() takes at least 1 argument (0 given)') - self = args[0] - # Make progressively weaker assumptions about "other" - other = () - if len(args) == 2: - other = args[1] - if isinstance(other, dict): - for key in other: - self[key] = other[key] - elif hasattr(other, 'keys'): - for key in other.keys(): - self[key] = other[key] - else: - for key, value in other: - self[key] = value - for key, value in kwds.items(): - self[key] = value - - __update = update # let subclasses override update without breaking __init__ - - __marker = object() - - def pop(self, key, default=__marker): - '''od.pop(k[,d]) -> v, remove specified key and return the corresponding value. - If key is not found, d is returned if given, otherwise KeyError is raised. - - ''' - if key in self: - result = self[key] - del self[key] - return result - if default is self.__marker: - raise KeyError(key) - return default - - def setdefault(self, key, default=None): - 'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od' - if key in self: - return self[key] - self[key] = default - return default - - def __repr__(self, _repr_running=None): - 'od.__repr__() <==> repr(od)' - if not _repr_running: _repr_running = {} - call_key = id(self), _get_ident() - if call_key in _repr_running: - return '...' - _repr_running[call_key] = 1 - try: - if not self: - return '%s()' % (self.__class__.__name__,) - return '%s(%r)' % (self.__class__.__name__, self.items()) - finally: - del _repr_running[call_key] - - def __reduce__(self): - 'Return state information for pickling' - items = [[k, self[k]] for k in self] - inst_dict = vars(self).copy() - for k in vars(OrderedDict()): - inst_dict.pop(k, None) - if inst_dict: - return (self.__class__, (items,), inst_dict) - return self.__class__, (items,) - - def copy(self): - 'od.copy() -> a shallow copy of od' - return self.__class__(self) - - @classmethod - def fromkeys(cls, iterable, value=None): - '''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S - and values equal to v (which defaults to None). - - ''' - d = cls() - for key in iterable: - d[key] = value - return d - - def __eq__(self, other): - '''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive - while comparison to a regular mapping is order-insensitive. - - ''' - if isinstance(other, OrderedDict): - return len(self)==len(other) and self.items() == other.items() - return dict.__eq__(self, other) - - def __ne__(self, other): - return not self == other - - # -- the following methods are only used in Python 2.7 -- - - def viewkeys(self): - "od.viewkeys() -> a set-like object providing a view on od's keys" - return KeysView(self) - - def viewvalues(self): - "od.viewvalues() -> an object providing a view on od's values" - return ValuesView(self) - - def viewitems(self): - "od.viewitems() -> a set-like object providing a view on od's items" - return ItemsView(self) - -try: - from logging.config import BaseConfigurator, valid_ident -except ImportError: # pragma: no cover - IDENTIFIER = re.compile('^[a-z_][a-z0-9_]*$', re.I) - - - def valid_ident(s): - m = IDENTIFIER.match(s) - if not m: - raise ValueError('Not a valid Python identifier: %r' % s) - return True - - - # The ConvertingXXX classes are wrappers around standard Python containers, - # and they serve to convert any suitable values in the container. The - # conversion converts base dicts, lists and tuples to their wrapped - # equivalents, whereas strings which match a conversion format are converted - # appropriately. - # - # Each wrapper should have a configurator attribute holding the actual - # configurator to use for conversion. - - class ConvertingDict(dict): - """A converting dictionary wrapper.""" - - def __getitem__(self, key): - value = dict.__getitem__(self, key) - result = self.configurator.convert(value) - #If the converted value is different, save for next time - if value is not result: - self[key] = result - if type(result) in (ConvertingDict, ConvertingList, - ConvertingTuple): - result.parent = self - result.key = key - return result - - def get(self, key, default=None): - value = dict.get(self, key, default) - result = self.configurator.convert(value) - #If the converted value is different, save for next time - if value is not result: - self[key] = result - if type(result) in (ConvertingDict, ConvertingList, - ConvertingTuple): - result.parent = self - result.key = key - return result - - def pop(self, key, default=None): - value = dict.pop(self, key, default) - result = self.configurator.convert(value) - if value is not result: - if type(result) in (ConvertingDict, ConvertingList, - ConvertingTuple): - result.parent = self - result.key = key - return result - - class ConvertingList(list): - """A converting list wrapper.""" - def __getitem__(self, key): - value = list.__getitem__(self, key) - result = self.configurator.convert(value) - #If the converted value is different, save for next time - if value is not result: - self[key] = result - if type(result) in (ConvertingDict, ConvertingList, - ConvertingTuple): - result.parent = self - result.key = key - return result - - def pop(self, idx=-1): - value = list.pop(self, idx) - result = self.configurator.convert(value) - if value is not result: - if type(result) in (ConvertingDict, ConvertingList, - ConvertingTuple): - result.parent = self - return result - - class ConvertingTuple(tuple): - """A converting tuple wrapper.""" - def __getitem__(self, key): - value = tuple.__getitem__(self, key) - result = self.configurator.convert(value) - if value is not result: - if type(result) in (ConvertingDict, ConvertingList, - ConvertingTuple): - result.parent = self - result.key = key - return result - - class BaseConfigurator(object): - """ - The configurator base class which defines some useful defaults. - """ - - CONVERT_PATTERN = re.compile(r'^(?P[a-z]+)://(?P.*)$') - - WORD_PATTERN = re.compile(r'^\s*(\w+)\s*') - DOT_PATTERN = re.compile(r'^\.\s*(\w+)\s*') - INDEX_PATTERN = re.compile(r'^\[\s*(\w+)\s*\]\s*') - DIGIT_PATTERN = re.compile(r'^\d+$') - - value_converters = { - 'ext' : 'ext_convert', - 'cfg' : 'cfg_convert', - } - - # We might want to use a different one, e.g. importlib - importer = staticmethod(__import__) - - def __init__(self, config): - self.config = ConvertingDict(config) - self.config.configurator = self - - def resolve(self, s): - """ - Resolve strings to objects using standard import and attribute - syntax. - """ - name = s.split('.') - used = name.pop(0) - try: - found = self.importer(used) - for frag in name: - used += '.' + frag - try: - found = getattr(found, frag) - except AttributeError: - self.importer(used) - found = getattr(found, frag) - return found - except ImportError: - e, tb = sys.exc_info()[1:] - v = ValueError('Cannot resolve %r: %s' % (s, e)) - v.__cause__, v.__traceback__ = e, tb - raise v - - def ext_convert(self, value): - """Default converter for the ext:// protocol.""" - return self.resolve(value) - - def cfg_convert(self, value): - """Default converter for the cfg:// protocol.""" - rest = value - m = self.WORD_PATTERN.match(rest) - if m is None: - raise ValueError("Unable to convert %r" % value) - else: - rest = rest[m.end():] - d = self.config[m.groups()[0]] - #print d, rest - while rest: - m = self.DOT_PATTERN.match(rest) - if m: - d = d[m.groups()[0]] - else: - m = self.INDEX_PATTERN.match(rest) - if m: - idx = m.groups()[0] - if not self.DIGIT_PATTERN.match(idx): - d = d[idx] - else: - try: - n = int(idx) # try as number first (most likely) - d = d[n] - except TypeError: - d = d[idx] - if m: - rest = rest[m.end():] - else: - raise ValueError('Unable to convert ' - '%r at %r' % (value, rest)) - #rest should be empty - return d - - def convert(self, value): - """ - Convert values to an appropriate type. dicts, lists and tuples are - replaced by their converting alternatives. Strings are checked to - see if they have a conversion format and are converted if they do. - """ - if not isinstance(value, ConvertingDict) and isinstance(value, dict): - value = ConvertingDict(value) - value.configurator = self - elif not isinstance(value, ConvertingList) and isinstance(value, list): - value = ConvertingList(value) - value.configurator = self - elif not isinstance(value, ConvertingTuple) and\ - isinstance(value, tuple): - value = ConvertingTuple(value) - value.configurator = self - elif isinstance(value, string_types): - m = self.CONVERT_PATTERN.match(value) - if m: - d = m.groupdict() - prefix = d['prefix'] - converter = self.value_converters.get(prefix, None) - if converter: - suffix = d['suffix'] - converter = getattr(self, converter) - value = converter(suffix) - return value - - def configure_custom(self, config): - """Configure an object with a user-supplied factory.""" - c = config.pop('()') - if not callable(c): - c = self.resolve(c) - props = config.pop('.', None) - # Check for valid identifiers - kwargs = dict([(k, config[k]) for k in config if valid_ident(k)]) - result = c(**kwargs) - if props: - for name, value in props.items(): - setattr(result, name, value) - return result - - def as_tuple(self, value): - """Utility function which converts lists to tuples.""" - if isinstance(value, list): - value = tuple(value) - return value diff --git a/venv/Lib/site-packages/pip/_vendor/distlib/database.py b/venv/Lib/site-packages/pip/_vendor/distlib/database.py deleted file mode 100644 index 0a90c30..0000000 --- a/venv/Lib/site-packages/pip/_vendor/distlib/database.py +++ /dev/null @@ -1,1339 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright (C) 2012-2017 The Python Software Foundation. -# See LICENSE.txt and CONTRIBUTORS.txt. -# -"""PEP 376 implementation.""" - -from __future__ import unicode_literals - -import base64 -import codecs -import contextlib -import hashlib -import logging -import os -import posixpath -import sys -import zipimport - -from . import DistlibException, resources -from .compat import StringIO -from .version import get_scheme, UnsupportedVersionError -from .metadata import (Metadata, METADATA_FILENAME, WHEEL_METADATA_FILENAME, - LEGACY_METADATA_FILENAME) -from .util import (parse_requirement, cached_property, parse_name_and_version, - read_exports, write_exports, CSVReader, CSVWriter) - - -__all__ = ['Distribution', 'BaseInstalledDistribution', - 'InstalledDistribution', 'EggInfoDistribution', - 'DistributionPath'] - - -logger = logging.getLogger(__name__) - -EXPORTS_FILENAME = 'pydist-exports.json' -COMMANDS_FILENAME = 'pydist-commands.json' - -DIST_FILES = ('INSTALLER', METADATA_FILENAME, 'RECORD', 'REQUESTED', - 'RESOURCES', EXPORTS_FILENAME, 'SHARED') - -DISTINFO_EXT = '.dist-info' - - -class _Cache(object): - """ - A simple cache mapping names and .dist-info paths to distributions - """ - def __init__(self): - """ - Initialise an instance. There is normally one for each DistributionPath. - """ - self.name = {} - self.path = {} - self.generated = False - - def clear(self): - """ - Clear the cache, setting it to its initial state. - """ - self.name.clear() - self.path.clear() - self.generated = False - - def add(self, dist): - """ - Add a distribution to the cache. - :param dist: The distribution to add. - """ - if dist.path not in self.path: - self.path[dist.path] = dist - self.name.setdefault(dist.key, []).append(dist) - - -class DistributionPath(object): - """ - Represents a set of distributions installed on a path (typically sys.path). - """ - def __init__(self, path=None, include_egg=False): - """ - Create an instance from a path, optionally including legacy (distutils/ - setuptools/distribute) distributions. - :param path: The path to use, as a list of directories. If not specified, - sys.path is used. - :param include_egg: If True, this instance will look for and return legacy - distributions as well as those based on PEP 376. - """ - if path is None: - path = sys.path - self.path = path - self._include_dist = True - self._include_egg = include_egg - - self._cache = _Cache() - self._cache_egg = _Cache() - self._cache_enabled = True - self._scheme = get_scheme('default') - - def _get_cache_enabled(self): - return self._cache_enabled - - def _set_cache_enabled(self, value): - self._cache_enabled = value - - cache_enabled = property(_get_cache_enabled, _set_cache_enabled) - - def clear_cache(self): - """ - Clears the internal cache. - """ - self._cache.clear() - self._cache_egg.clear() - - - def _yield_distributions(self): - """ - Yield .dist-info and/or .egg(-info) distributions. - """ - # We need to check if we've seen some resources already, because on - # some Linux systems (e.g. some Debian/Ubuntu variants) there are - # symlinks which alias other files in the environment. - seen = set() - for path in self.path: - finder = resources.finder_for_path(path) - if finder is None: - continue - r = finder.find('') - if not r or not r.is_container: - continue - rset = sorted(r.resources) - for entry in rset: - r = finder.find(entry) - if not r or r.path in seen: - continue - if self._include_dist and entry.endswith(DISTINFO_EXT): - possible_filenames = [METADATA_FILENAME, - WHEEL_METADATA_FILENAME, - LEGACY_METADATA_FILENAME] - for metadata_filename in possible_filenames: - metadata_path = posixpath.join(entry, metadata_filename) - pydist = finder.find(metadata_path) - if pydist: - break - else: - continue - - with contextlib.closing(pydist.as_stream()) as stream: - metadata = Metadata(fileobj=stream, scheme='legacy') - logger.debug('Found %s', r.path) - seen.add(r.path) - yield new_dist_class(r.path, metadata=metadata, - env=self) - elif self._include_egg and entry.endswith(('.egg-info', - '.egg')): - logger.debug('Found %s', r.path) - seen.add(r.path) - yield old_dist_class(r.path, self) - - def _generate_cache(self): - """ - Scan the path for distributions and populate the cache with - those that are found. - """ - gen_dist = not self._cache.generated - gen_egg = self._include_egg and not self._cache_egg.generated - if gen_dist or gen_egg: - for dist in self._yield_distributions(): - if isinstance(dist, InstalledDistribution): - self._cache.add(dist) - else: - self._cache_egg.add(dist) - - if gen_dist: - self._cache.generated = True - if gen_egg: - self._cache_egg.generated = True - - @classmethod - def distinfo_dirname(cls, name, version): - """ - The *name* and *version* parameters are converted into their - filename-escaped form, i.e. any ``'-'`` characters are replaced - with ``'_'`` other than the one in ``'dist-info'`` and the one - separating the name from the version number. - - :parameter name: is converted to a standard distribution name by replacing - any runs of non- alphanumeric characters with a single - ``'-'``. - :type name: string - :parameter version: is converted to a standard version string. Spaces - become dots, and all other non-alphanumeric characters - (except dots) become dashes, with runs of multiple - dashes condensed to a single dash. - :type version: string - :returns: directory name - :rtype: string""" - name = name.replace('-', '_') - return '-'.join([name, version]) + DISTINFO_EXT - - def get_distributions(self): - """ - Provides an iterator that looks for distributions and returns - :class:`InstalledDistribution` or - :class:`EggInfoDistribution` instances for each one of them. - - :rtype: iterator of :class:`InstalledDistribution` and - :class:`EggInfoDistribution` instances - """ - if not self._cache_enabled: - for dist in self._yield_distributions(): - yield dist - else: - self._generate_cache() - - for dist in self._cache.path.values(): - yield dist - - if self._include_egg: - for dist in self._cache_egg.path.values(): - yield dist - - def get_distribution(self, name): - """ - Looks for a named distribution on the path. - - This function only returns the first result found, as no more than one - value is expected. If nothing is found, ``None`` is returned. - - :rtype: :class:`InstalledDistribution`, :class:`EggInfoDistribution` - or ``None`` - """ - result = None - name = name.lower() - if not self._cache_enabled: - for dist in self._yield_distributions(): - if dist.key == name: - result = dist - break - else: - self._generate_cache() - - if name in self._cache.name: - result = self._cache.name[name][0] - elif self._include_egg and name in self._cache_egg.name: - result = self._cache_egg.name[name][0] - return result - - def provides_distribution(self, name, version=None): - """ - Iterates over all distributions to find which distributions provide *name*. - If a *version* is provided, it will be used to filter the results. - - This function only returns the first result found, since no more than - one values are expected. If the directory is not found, returns ``None``. - - :parameter version: a version specifier that indicates the version - required, conforming to the format in ``PEP-345`` - - :type name: string - :type version: string - """ - matcher = None - if version is not None: - try: - matcher = self._scheme.matcher('%s (%s)' % (name, version)) - except ValueError: - raise DistlibException('invalid name or version: %r, %r' % - (name, version)) - - for dist in self.get_distributions(): - # We hit a problem on Travis where enum34 was installed and doesn't - # have a provides attribute ... - if not hasattr(dist, 'provides'): - logger.debug('No "provides": %s', dist) - else: - provided = dist.provides - - for p in provided: - p_name, p_ver = parse_name_and_version(p) - if matcher is None: - if p_name == name: - yield dist - break - else: - if p_name == name and matcher.match(p_ver): - yield dist - break - - def get_file_path(self, name, relative_path): - """ - Return the path to a resource file. - """ - dist = self.get_distribution(name) - if dist is None: - raise LookupError('no distribution named %r found' % name) - return dist.get_resource_path(relative_path) - - def get_exported_entries(self, category, name=None): - """ - Return all of the exported entries in a particular category. - - :param category: The category to search for entries. - :param name: If specified, only entries with that name are returned. - """ - for dist in self.get_distributions(): - r = dist.exports - if category in r: - d = r[category] - if name is not None: - if name in d: - yield d[name] - else: - for v in d.values(): - yield v - - -class Distribution(object): - """ - A base class for distributions, whether installed or from indexes. - Either way, it must have some metadata, so that's all that's needed - for construction. - """ - - build_time_dependency = False - """ - Set to True if it's known to be only a build-time dependency (i.e. - not needed after installation). - """ - - requested = False - """A boolean that indicates whether the ``REQUESTED`` metadata file is - present (in other words, whether the package was installed by user - request or it was installed as a dependency).""" - - def __init__(self, metadata): - """ - Initialise an instance. - :param metadata: The instance of :class:`Metadata` describing this - distribution. - """ - self.metadata = metadata - self.name = metadata.name - self.key = self.name.lower() # for case-insensitive comparisons - self.version = metadata.version - self.locator = None - self.digest = None - self.extras = None # additional features requested - self.context = None # environment marker overrides - self.download_urls = set() - self.digests = {} - - @property - def source_url(self): - """ - The source archive download URL for this distribution. - """ - return self.metadata.source_url - - download_url = source_url # Backward compatibility - - @property - def name_and_version(self): - """ - A utility property which displays the name and version in parentheses. - """ - return '%s (%s)' % (self.name, self.version) - - @property - def provides(self): - """ - A set of distribution names and versions provided by this distribution. - :return: A set of "name (version)" strings. - """ - plist = self.metadata.provides - s = '%s (%s)' % (self.name, self.version) - if s not in plist: - plist.append(s) - return plist - - def _get_requirements(self, req_attr): - md = self.metadata - logger.debug('Getting requirements from metadata %r', md.todict()) - reqts = getattr(md, req_attr) - return set(md.get_requirements(reqts, extras=self.extras, - env=self.context)) - - @property - def run_requires(self): - return self._get_requirements('run_requires') - - @property - def meta_requires(self): - return self._get_requirements('meta_requires') - - @property - def build_requires(self): - return self._get_requirements('build_requires') - - @property - def test_requires(self): - return self._get_requirements('test_requires') - - @property - def dev_requires(self): - return self._get_requirements('dev_requires') - - def matches_requirement(self, req): - """ - Say if this instance matches (fulfills) a requirement. - :param req: The requirement to match. - :rtype req: str - :return: True if it matches, else False. - """ - # Requirement may contain extras - parse to lose those - # from what's passed to the matcher - r = parse_requirement(req) - scheme = get_scheme(self.metadata.scheme) - try: - matcher = scheme.matcher(r.requirement) - except UnsupportedVersionError: - # XXX compat-mode if cannot read the version - logger.warning('could not read version %r - using name only', - req) - name = req.split()[0] - matcher = scheme.matcher(name) - - name = matcher.key # case-insensitive - - result = False - for p in self.provides: - p_name, p_ver = parse_name_and_version(p) - if p_name != name: - continue - try: - result = matcher.match(p_ver) - break - except UnsupportedVersionError: - pass - return result - - def __repr__(self): - """ - Return a textual representation of this instance, - """ - if self.source_url: - suffix = ' [%s]' % self.source_url - else: - suffix = '' - return '' % (self.name, self.version, suffix) - - def __eq__(self, other): - """ - See if this distribution is the same as another. - :param other: The distribution to compare with. To be equal to one - another. distributions must have the same type, name, - version and source_url. - :return: True if it is the same, else False. - """ - if type(other) is not type(self): - result = False - else: - result = (self.name == other.name and - self.version == other.version and - self.source_url == other.source_url) - return result - - def __hash__(self): - """ - Compute hash in a way which matches the equality test. - """ - return hash(self.name) + hash(self.version) + hash(self.source_url) - - -class BaseInstalledDistribution(Distribution): - """ - This is the base class for installed distributions (whether PEP 376 or - legacy). - """ - - hasher = None - - def __init__(self, metadata, path, env=None): - """ - Initialise an instance. - :param metadata: An instance of :class:`Metadata` which describes the - distribution. This will normally have been initialised - from a metadata file in the ``path``. - :param path: The path of the ``.dist-info`` or ``.egg-info`` - directory for the distribution. - :param env: This is normally the :class:`DistributionPath` - instance where this distribution was found. - """ - super(BaseInstalledDistribution, self).__init__(metadata) - self.path = path - self.dist_path = env - - def get_hash(self, data, hasher=None): - """ - Get the hash of some data, using a particular hash algorithm, if - specified. - - :param data: The data to be hashed. - :type data: bytes - :param hasher: The name of a hash implementation, supported by hashlib, - or ``None``. Examples of valid values are ``'sha1'``, - ``'sha224'``, ``'sha384'``, '``sha256'``, ``'md5'`` and - ``'sha512'``. If no hasher is specified, the ``hasher`` - attribute of the :class:`InstalledDistribution` instance - is used. If the hasher is determined to be ``None``, MD5 - is used as the hashing algorithm. - :returns: The hash of the data. If a hasher was explicitly specified, - the returned hash will be prefixed with the specified hasher - followed by '='. - :rtype: str - """ - if hasher is None: - hasher = self.hasher - if hasher is None: - hasher = hashlib.md5 - prefix = '' - else: - hasher = getattr(hashlib, hasher) - prefix = '%s=' % self.hasher - digest = hasher(data).digest() - digest = base64.urlsafe_b64encode(digest).rstrip(b'=').decode('ascii') - return '%s%s' % (prefix, digest) - - -class InstalledDistribution(BaseInstalledDistribution): - """ - Created with the *path* of the ``.dist-info`` directory provided to the - constructor. It reads the metadata contained in ``pydist.json`` when it is - instantiated., or uses a passed in Metadata instance (useful for when - dry-run mode is being used). - """ - - hasher = 'sha256' - - def __init__(self, path, metadata=None, env=None): - self.modules = [] - self.finder = finder = resources.finder_for_path(path) - if finder is None: - raise ValueError('finder unavailable for %s' % path) - if env and env._cache_enabled and path in env._cache.path: - metadata = env._cache.path[path].metadata - elif metadata is None: - r = finder.find(METADATA_FILENAME) - # Temporary - for Wheel 0.23 support - if r is None: - r = finder.find(WHEEL_METADATA_FILENAME) - # Temporary - for legacy support - if r is None: - r = finder.find(LEGACY_METADATA_FILENAME) - if r is None: - raise ValueError('no %s found in %s' % (METADATA_FILENAME, - path)) - with contextlib.closing(r.as_stream()) as stream: - metadata = Metadata(fileobj=stream, scheme='legacy') - - super(InstalledDistribution, self).__init__(metadata, path, env) - - if env and env._cache_enabled: - env._cache.add(self) - - r = finder.find('REQUESTED') - self.requested = r is not None - p = os.path.join(path, 'top_level.txt') - if os.path.exists(p): - with open(p, 'rb') as f: - data = f.read().decode('utf-8') - self.modules = data.splitlines() - - def __repr__(self): - return '' % ( - self.name, self.version, self.path) - - def __str__(self): - return "%s %s" % (self.name, self.version) - - def _get_records(self): - """ - Get the list of installed files for the distribution - :return: A list of tuples of path, hash and size. Note that hash and - size might be ``None`` for some entries. The path is exactly - as stored in the file (which is as in PEP 376). - """ - results = [] - r = self.get_distinfo_resource('RECORD') - with contextlib.closing(r.as_stream()) as stream: - with CSVReader(stream=stream) as record_reader: - # Base location is parent dir of .dist-info dir - #base_location = os.path.dirname(self.path) - #base_location = os.path.abspath(base_location) - for row in record_reader: - missing = [None for i in range(len(row), 3)] - path, checksum, size = row + missing - #if not os.path.isabs(path): - # path = path.replace('/', os.sep) - # path = os.path.join(base_location, path) - results.append((path, checksum, size)) - return results - - @cached_property - def exports(self): - """ - Return the information exported by this distribution. - :return: A dictionary of exports, mapping an export category to a dict - of :class:`ExportEntry` instances describing the individual - export entries, and keyed by name. - """ - result = {} - r = self.get_distinfo_resource(EXPORTS_FILENAME) - if r: - result = self.read_exports() - return result - - def read_exports(self): - """ - Read exports data from a file in .ini format. - - :return: A dictionary of exports, mapping an export category to a list - of :class:`ExportEntry` instances describing the individual - export entries. - """ - result = {} - r = self.get_distinfo_resource(EXPORTS_FILENAME) - if r: - with contextlib.closing(r.as_stream()) as stream: - result = read_exports(stream) - return result - - def write_exports(self, exports): - """ - Write a dictionary of exports to a file in .ini format. - :param exports: A dictionary of exports, mapping an export category to - a list of :class:`ExportEntry` instances describing the - individual export entries. - """ - rf = self.get_distinfo_file(EXPORTS_FILENAME) - with open(rf, 'w') as f: - write_exports(exports, f) - - def get_resource_path(self, relative_path): - """ - NOTE: This API may change in the future. - - Return the absolute path to a resource file with the given relative - path. - - :param relative_path: The path, relative to .dist-info, of the resource - of interest. - :return: The absolute path where the resource is to be found. - """ - r = self.get_distinfo_resource('RESOURCES') - with contextlib.closing(r.as_stream()) as stream: - with CSVReader(stream=stream) as resources_reader: - for relative, destination in resources_reader: - if relative == relative_path: - return destination - raise KeyError('no resource file with relative path %r ' - 'is installed' % relative_path) - - def list_installed_files(self): - """ - Iterates over the ``RECORD`` entries and returns a tuple - ``(path, hash, size)`` for each line. - - :returns: iterator of (path, hash, size) - """ - for result in self._get_records(): - yield result - - def write_installed_files(self, paths, prefix, dry_run=False): - """ - Writes the ``RECORD`` file, using the ``paths`` iterable passed in. Any - existing ``RECORD`` file is silently overwritten. - - prefix is used to determine when to write absolute paths. - """ - prefix = os.path.join(prefix, '') - base = os.path.dirname(self.path) - base_under_prefix = base.startswith(prefix) - base = os.path.join(base, '') - record_path = self.get_distinfo_file('RECORD') - logger.info('creating %s', record_path) - if dry_run: - return None - with CSVWriter(record_path) as writer: - for path in paths: - if os.path.isdir(path) or path.endswith(('.pyc', '.pyo')): - # do not put size and hash, as in PEP-376 - hash_value = size = '' - else: - size = '%d' % os.path.getsize(path) - with open(path, 'rb') as fp: - hash_value = self.get_hash(fp.read()) - if path.startswith(base) or (base_under_prefix and - path.startswith(prefix)): - path = os.path.relpath(path, base) - writer.writerow((path, hash_value, size)) - - # add the RECORD file itself - if record_path.startswith(base): - record_path = os.path.relpath(record_path, base) - writer.writerow((record_path, '', '')) - return record_path - - def check_installed_files(self): - """ - Checks that the hashes and sizes of the files in ``RECORD`` are - matched by the files themselves. Returns a (possibly empty) list of - mismatches. Each entry in the mismatch list will be a tuple consisting - of the path, 'exists', 'size' or 'hash' according to what didn't match - (existence is checked first, then size, then hash), the expected - value and the actual value. - """ - mismatches = [] - base = os.path.dirname(self.path) - record_path = self.get_distinfo_file('RECORD') - for path, hash_value, size in self.list_installed_files(): - if not os.path.isabs(path): - path = os.path.join(base, path) - if path == record_path: - continue - if not os.path.exists(path): - mismatches.append((path, 'exists', True, False)) - elif os.path.isfile(path): - actual_size = str(os.path.getsize(path)) - if size and actual_size != size: - mismatches.append((path, 'size', size, actual_size)) - elif hash_value: - if '=' in hash_value: - hasher = hash_value.split('=', 1)[0] - else: - hasher = None - - with open(path, 'rb') as f: - actual_hash = self.get_hash(f.read(), hasher) - if actual_hash != hash_value: - mismatches.append((path, 'hash', hash_value, actual_hash)) - return mismatches - - @cached_property - def shared_locations(self): - """ - A dictionary of shared locations whose keys are in the set 'prefix', - 'purelib', 'platlib', 'scripts', 'headers', 'data' and 'namespace'. - The corresponding value is the absolute path of that category for - this distribution, and takes into account any paths selected by the - user at installation time (e.g. via command-line arguments). In the - case of the 'namespace' key, this would be a list of absolute paths - for the roots of namespace packages in this distribution. - - The first time this property is accessed, the relevant information is - read from the SHARED file in the .dist-info directory. - """ - result = {} - shared_path = os.path.join(self.path, 'SHARED') - if os.path.isfile(shared_path): - with codecs.open(shared_path, 'r', encoding='utf-8') as f: - lines = f.read().splitlines() - for line in lines: - key, value = line.split('=', 1) - if key == 'namespace': - result.setdefault(key, []).append(value) - else: - result[key] = value - return result - - def write_shared_locations(self, paths, dry_run=False): - """ - Write shared location information to the SHARED file in .dist-info. - :param paths: A dictionary as described in the documentation for - :meth:`shared_locations`. - :param dry_run: If True, the action is logged but no file is actually - written. - :return: The path of the file written to. - """ - shared_path = os.path.join(self.path, 'SHARED') - logger.info('creating %s', shared_path) - if dry_run: - return None - lines = [] - for key in ('prefix', 'lib', 'headers', 'scripts', 'data'): - path = paths[key] - if os.path.isdir(paths[key]): - lines.append('%s=%s' % (key, path)) - for ns in paths.get('namespace', ()): - lines.append('namespace=%s' % ns) - - with codecs.open(shared_path, 'w', encoding='utf-8') as f: - f.write('\n'.join(lines)) - return shared_path - - def get_distinfo_resource(self, path): - if path not in DIST_FILES: - raise DistlibException('invalid path for a dist-info file: ' - '%r at %r' % (path, self.path)) - finder = resources.finder_for_path(self.path) - if finder is None: - raise DistlibException('Unable to get a finder for %s' % self.path) - return finder.find(path) - - def get_distinfo_file(self, path): - """ - Returns a path located under the ``.dist-info`` directory. Returns a - string representing the path. - - :parameter path: a ``'/'``-separated path relative to the - ``.dist-info`` directory or an absolute path; - If *path* is an absolute path and doesn't start - with the ``.dist-info`` directory path, - a :class:`DistlibException` is raised - :type path: str - :rtype: str - """ - # Check if it is an absolute path # XXX use relpath, add tests - if path.find(os.sep) >= 0: - # it's an absolute path? - distinfo_dirname, path = path.split(os.sep)[-2:] - if distinfo_dirname != self.path.split(os.sep)[-1]: - raise DistlibException( - 'dist-info file %r does not belong to the %r %s ' - 'distribution' % (path, self.name, self.version)) - - # The file must be relative - if path not in DIST_FILES: - raise DistlibException('invalid path for a dist-info file: ' - '%r at %r' % (path, self.path)) - - return os.path.join(self.path, path) - - def list_distinfo_files(self): - """ - Iterates over the ``RECORD`` entries and returns paths for each line if - the path is pointing to a file located in the ``.dist-info`` directory - or one of its subdirectories. - - :returns: iterator of paths - """ - base = os.path.dirname(self.path) - for path, checksum, size in self._get_records(): - # XXX add separator or use real relpath algo - if not os.path.isabs(path): - path = os.path.join(base, path) - if path.startswith(self.path): - yield path - - def __eq__(self, other): - return (isinstance(other, InstalledDistribution) and - self.path == other.path) - - # See http://docs.python.org/reference/datamodel#object.__hash__ - __hash__ = object.__hash__ - - -class EggInfoDistribution(BaseInstalledDistribution): - """Created with the *path* of the ``.egg-info`` directory or file provided - to the constructor. It reads the metadata contained in the file itself, or - if the given path happens to be a directory, the metadata is read from the - file ``PKG-INFO`` under that directory.""" - - requested = True # as we have no way of knowing, assume it was - shared_locations = {} - - def __init__(self, path, env=None): - def set_name_and_version(s, n, v): - s.name = n - s.key = n.lower() # for case-insensitive comparisons - s.version = v - - self.path = path - self.dist_path = env - if env and env._cache_enabled and path in env._cache_egg.path: - metadata = env._cache_egg.path[path].metadata - set_name_and_version(self, metadata.name, metadata.version) - else: - metadata = self._get_metadata(path) - - # Need to be set before caching - set_name_and_version(self, metadata.name, metadata.version) - - if env and env._cache_enabled: - env._cache_egg.add(self) - super(EggInfoDistribution, self).__init__(metadata, path, env) - - def _get_metadata(self, path): - requires = None - - def parse_requires_data(data): - """Create a list of dependencies from a requires.txt file. - - *data*: the contents of a setuptools-produced requires.txt file. - """ - reqs = [] - lines = data.splitlines() - for line in lines: - line = line.strip() - if line.startswith('['): - logger.warning('Unexpected line: quitting requirement scan: %r', - line) - break - r = parse_requirement(line) - if not r: - logger.warning('Not recognised as a requirement: %r', line) - continue - if r.extras: - logger.warning('extra requirements in requires.txt are ' - 'not supported') - if not r.constraints: - reqs.append(r.name) - else: - cons = ', '.join('%s%s' % c for c in r.constraints) - reqs.append('%s (%s)' % (r.name, cons)) - return reqs - - def parse_requires_path(req_path): - """Create a list of dependencies from a requires.txt file. - - *req_path*: the path to a setuptools-produced requires.txt file. - """ - - reqs = [] - try: - with codecs.open(req_path, 'r', 'utf-8') as fp: - reqs = parse_requires_data(fp.read()) - except IOError: - pass - return reqs - - tl_path = tl_data = None - if path.endswith('.egg'): - if os.path.isdir(path): - p = os.path.join(path, 'EGG-INFO') - meta_path = os.path.join(p, 'PKG-INFO') - metadata = Metadata(path=meta_path, scheme='legacy') - req_path = os.path.join(p, 'requires.txt') - tl_path = os.path.join(p, 'top_level.txt') - requires = parse_requires_path(req_path) - else: - # FIXME handle the case where zipfile is not available - zipf = zipimport.zipimporter(path) - fileobj = StringIO( - zipf.get_data('EGG-INFO/PKG-INFO').decode('utf8')) - metadata = Metadata(fileobj=fileobj, scheme='legacy') - try: - data = zipf.get_data('EGG-INFO/requires.txt') - tl_data = zipf.get_data('EGG-INFO/top_level.txt').decode('utf-8') - requires = parse_requires_data(data.decode('utf-8')) - except IOError: - requires = None - elif path.endswith('.egg-info'): - if os.path.isdir(path): - req_path = os.path.join(path, 'requires.txt') - requires = parse_requires_path(req_path) - path = os.path.join(path, 'PKG-INFO') - tl_path = os.path.join(path, 'top_level.txt') - metadata = Metadata(path=path, scheme='legacy') - else: - raise DistlibException('path must end with .egg-info or .egg, ' - 'got %r' % path) - - if requires: - metadata.add_requirements(requires) - # look for top-level modules in top_level.txt, if present - if tl_data is None: - if tl_path is not None and os.path.exists(tl_path): - with open(tl_path, 'rb') as f: - tl_data = f.read().decode('utf-8') - if not tl_data: - tl_data = [] - else: - tl_data = tl_data.splitlines() - self.modules = tl_data - return metadata - - def __repr__(self): - return '' % ( - self.name, self.version, self.path) - - def __str__(self): - return "%s %s" % (self.name, self.version) - - def check_installed_files(self): - """ - Checks that the hashes and sizes of the files in ``RECORD`` are - matched by the files themselves. Returns a (possibly empty) list of - mismatches. Each entry in the mismatch list will be a tuple consisting - of the path, 'exists', 'size' or 'hash' according to what didn't match - (existence is checked first, then size, then hash), the expected - value and the actual value. - """ - mismatches = [] - record_path = os.path.join(self.path, 'installed-files.txt') - if os.path.exists(record_path): - for path, _, _ in self.list_installed_files(): - if path == record_path: - continue - if not os.path.exists(path): - mismatches.append((path, 'exists', True, False)) - return mismatches - - def list_installed_files(self): - """ - Iterates over the ``installed-files.txt`` entries and returns a tuple - ``(path, hash, size)`` for each line. - - :returns: a list of (path, hash, size) - """ - - def _md5(path): - f = open(path, 'rb') - try: - content = f.read() - finally: - f.close() - return hashlib.md5(content).hexdigest() - - def _size(path): - return os.stat(path).st_size - - record_path = os.path.join(self.path, 'installed-files.txt') - result = [] - if os.path.exists(record_path): - with codecs.open(record_path, 'r', encoding='utf-8') as f: - for line in f: - line = line.strip() - p = os.path.normpath(os.path.join(self.path, line)) - # "./" is present as a marker between installed files - # and installation metadata files - if not os.path.exists(p): - logger.warning('Non-existent file: %s', p) - if p.endswith(('.pyc', '.pyo')): - continue - #otherwise fall through and fail - if not os.path.isdir(p): - result.append((p, _md5(p), _size(p))) - result.append((record_path, None, None)) - return result - - def list_distinfo_files(self, absolute=False): - """ - Iterates over the ``installed-files.txt`` entries and returns paths for - each line if the path is pointing to a file located in the - ``.egg-info`` directory or one of its subdirectories. - - :parameter absolute: If *absolute* is ``True``, each returned path is - transformed into a local absolute path. Otherwise the - raw value from ``installed-files.txt`` is returned. - :type absolute: boolean - :returns: iterator of paths - """ - record_path = os.path.join(self.path, 'installed-files.txt') - if os.path.exists(record_path): - skip = True - with codecs.open(record_path, 'r', encoding='utf-8') as f: - for line in f: - line = line.strip() - if line == './': - skip = False - continue - if not skip: - p = os.path.normpath(os.path.join(self.path, line)) - if p.startswith(self.path): - if absolute: - yield p - else: - yield line - - def __eq__(self, other): - return (isinstance(other, EggInfoDistribution) and - self.path == other.path) - - # See http://docs.python.org/reference/datamodel#object.__hash__ - __hash__ = object.__hash__ - -new_dist_class = InstalledDistribution -old_dist_class = EggInfoDistribution - - -class DependencyGraph(object): - """ - Represents a dependency graph between distributions. - - The dependency relationships are stored in an ``adjacency_list`` that maps - distributions to a list of ``(other, label)`` tuples where ``other`` - is a distribution and the edge is labeled with ``label`` (i.e. the version - specifier, if such was provided). Also, for more efficient traversal, for - every distribution ``x``, a list of predecessors is kept in - ``reverse_list[x]``. An edge from distribution ``a`` to - distribution ``b`` means that ``a`` depends on ``b``. If any missing - dependencies are found, they are stored in ``missing``, which is a - dictionary that maps distributions to a list of requirements that were not - provided by any other distributions. - """ - - def __init__(self): - self.adjacency_list = {} - self.reverse_list = {} - self.missing = {} - - def add_distribution(self, distribution): - """Add the *distribution* to the graph. - - :type distribution: :class:`distutils2.database.InstalledDistribution` - or :class:`distutils2.database.EggInfoDistribution` - """ - self.adjacency_list[distribution] = [] - self.reverse_list[distribution] = [] - #self.missing[distribution] = [] - - def add_edge(self, x, y, label=None): - """Add an edge from distribution *x* to distribution *y* with the given - *label*. - - :type x: :class:`distutils2.database.InstalledDistribution` or - :class:`distutils2.database.EggInfoDistribution` - :type y: :class:`distutils2.database.InstalledDistribution` or - :class:`distutils2.database.EggInfoDistribution` - :type label: ``str`` or ``None`` - """ - self.adjacency_list[x].append((y, label)) - # multiple edges are allowed, so be careful - if x not in self.reverse_list[y]: - self.reverse_list[y].append(x) - - def add_missing(self, distribution, requirement): - """ - Add a missing *requirement* for the given *distribution*. - - :type distribution: :class:`distutils2.database.InstalledDistribution` - or :class:`distutils2.database.EggInfoDistribution` - :type requirement: ``str`` - """ - logger.debug('%s missing %r', distribution, requirement) - self.missing.setdefault(distribution, []).append(requirement) - - def _repr_dist(self, dist): - return '%s %s' % (dist.name, dist.version) - - def repr_node(self, dist, level=1): - """Prints only a subgraph""" - output = [self._repr_dist(dist)] - for other, label in self.adjacency_list[dist]: - dist = self._repr_dist(other) - if label is not None: - dist = '%s [%s]' % (dist, label) - output.append(' ' * level + str(dist)) - suboutput = self.repr_node(other, level + 1) - subs = suboutput.split('\n') - output.extend(subs[1:]) - return '\n'.join(output) - - def to_dot(self, f, skip_disconnected=True): - """Writes a DOT output for the graph to the provided file *f*. - - If *skip_disconnected* is set to ``True``, then all distributions - that are not dependent on any other distribution are skipped. - - :type f: has to support ``file``-like operations - :type skip_disconnected: ``bool`` - """ - disconnected = [] - - f.write("digraph dependencies {\n") - for dist, adjs in self.adjacency_list.items(): - if len(adjs) == 0 and not skip_disconnected: - disconnected.append(dist) - for other, label in adjs: - if not label is None: - f.write('"%s" -> "%s" [label="%s"]\n' % - (dist.name, other.name, label)) - else: - f.write('"%s" -> "%s"\n' % (dist.name, other.name)) - if not skip_disconnected and len(disconnected) > 0: - f.write('subgraph disconnected {\n') - f.write('label = "Disconnected"\n') - f.write('bgcolor = red\n') - - for dist in disconnected: - f.write('"%s"' % dist.name) - f.write('\n') - f.write('}\n') - f.write('}\n') - - def topological_sort(self): - """ - Perform a topological sort of the graph. - :return: A tuple, the first element of which is a topologically sorted - list of distributions, and the second element of which is a - list of distributions that cannot be sorted because they have - circular dependencies and so form a cycle. - """ - result = [] - # Make a shallow copy of the adjacency list - alist = {} - for k, v in self.adjacency_list.items(): - alist[k] = v[:] - while True: - # See what we can remove in this run - to_remove = [] - for k, v in list(alist.items())[:]: - if not v: - to_remove.append(k) - del alist[k] - if not to_remove: - # What's left in alist (if anything) is a cycle. - break - # Remove from the adjacency list of others - for k, v in alist.items(): - alist[k] = [(d, r) for d, r in v if d not in to_remove] - logger.debug('Moving to result: %s', - ['%s (%s)' % (d.name, d.version) for d in to_remove]) - result.extend(to_remove) - return result, list(alist.keys()) - - def __repr__(self): - """Representation of the graph""" - output = [] - for dist, adjs in self.adjacency_list.items(): - output.append(self.repr_node(dist)) - return '\n'.join(output) - - -def make_graph(dists, scheme='default'): - """Makes a dependency graph from the given distributions. - - :parameter dists: a list of distributions - :type dists: list of :class:`distutils2.database.InstalledDistribution` and - :class:`distutils2.database.EggInfoDistribution` instances - :rtype: a :class:`DependencyGraph` instance - """ - scheme = get_scheme(scheme) - graph = DependencyGraph() - provided = {} # maps names to lists of (version, dist) tuples - - # first, build the graph and find out what's provided - for dist in dists: - graph.add_distribution(dist) - - for p in dist.provides: - name, version = parse_name_and_version(p) - logger.debug('Add to provided: %s, %s, %s', name, version, dist) - provided.setdefault(name, []).append((version, dist)) - - # now make the edges - for dist in dists: - requires = (dist.run_requires | dist.meta_requires | - dist.build_requires | dist.dev_requires) - for req in requires: - try: - matcher = scheme.matcher(req) - except UnsupportedVersionError: - # XXX compat-mode if cannot read the version - logger.warning('could not read version %r - using name only', - req) - name = req.split()[0] - matcher = scheme.matcher(name) - - name = matcher.key # case-insensitive - - matched = False - if name in provided: - for version, provider in provided[name]: - try: - match = matcher.match(version) - except UnsupportedVersionError: - match = False - - if match: - graph.add_edge(dist, provider, req) - matched = True - break - if not matched: - graph.add_missing(dist, req) - return graph - - -def get_dependent_dists(dists, dist): - """Recursively generate a list of distributions from *dists* that are - dependent on *dist*. - - :param dists: a list of distributions - :param dist: a distribution, member of *dists* for which we are interested - """ - if dist not in dists: - raise DistlibException('given distribution %r is not a member ' - 'of the list' % dist.name) - graph = make_graph(dists) - - dep = [dist] # dependent distributions - todo = graph.reverse_list[dist] # list of nodes we should inspect - - while todo: - d = todo.pop() - dep.append(d) - for succ in graph.reverse_list[d]: - if succ not in dep: - todo.append(succ) - - dep.pop(0) # remove dist from dep, was there to prevent infinite loops - return dep - - -def get_required_dists(dists, dist): - """Recursively generate a list of distributions from *dists* that are - required by *dist*. - - :param dists: a list of distributions - :param dist: a distribution, member of *dists* for which we are interested - """ - if dist not in dists: - raise DistlibException('given distribution %r is not a member ' - 'of the list' % dist.name) - graph = make_graph(dists) - - req = [] # required distributions - todo = graph.adjacency_list[dist] # list of nodes we should inspect - - while todo: - d = todo.pop()[0] - req.append(d) - for pred in graph.adjacency_list[d]: - if pred not in req: - todo.append(pred) - - return req - - -def make_dist(name, version, **kwargs): - """ - A convenience method for making a dist given just a name and version. - """ - summary = kwargs.pop('summary', 'Placeholder for summary') - md = Metadata(**kwargs) - md.name = name - md.version = version - md.summary = summary or 'Placeholder for summary' - return Distribution(md) diff --git a/venv/Lib/site-packages/pip/_vendor/distlib/index.py b/venv/Lib/site-packages/pip/_vendor/distlib/index.py deleted file mode 100644 index b1fbbf8..0000000 --- a/venv/Lib/site-packages/pip/_vendor/distlib/index.py +++ /dev/null @@ -1,509 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright (C) 2013 Vinay Sajip. -# Licensed to the Python Software Foundation under a contributor agreement. -# See LICENSE.txt and CONTRIBUTORS.txt. -# -import hashlib -import logging -import os -import shutil -import subprocess -import tempfile -try: - from threading import Thread -except ImportError: - from dummy_threading import Thread - -from . import DistlibException -from .compat import (HTTPBasicAuthHandler, Request, HTTPPasswordMgr, - urlparse, build_opener, string_types) -from .util import zip_dir, ServerProxy - -logger = logging.getLogger(__name__) - -DEFAULT_INDEX = 'https://pypi.org/pypi' -DEFAULT_REALM = 'pypi' - -class PackageIndex(object): - """ - This class represents a package index compatible with PyPI, the Python - Package Index. - """ - - boundary = b'----------ThIs_Is_tHe_distlib_index_bouNdaRY_$' - - def __init__(self, url=None): - """ - Initialise an instance. - - :param url: The URL of the index. If not specified, the URL for PyPI is - used. - """ - self.url = url or DEFAULT_INDEX - self.read_configuration() - scheme, netloc, path, params, query, frag = urlparse(self.url) - if params or query or frag or scheme not in ('http', 'https'): - raise DistlibException('invalid repository: %s' % self.url) - self.password_handler = None - self.ssl_verifier = None - self.gpg = None - self.gpg_home = None - with open(os.devnull, 'w') as sink: - # Use gpg by default rather than gpg2, as gpg2 insists on - # prompting for passwords - for s in ('gpg', 'gpg2'): - try: - rc = subprocess.check_call([s, '--version'], stdout=sink, - stderr=sink) - if rc == 0: - self.gpg = s - break - except OSError: - pass - - def _get_pypirc_command(self): - """ - Get the distutils command for interacting with PyPI configurations. - :return: the command. - """ - from .util import _get_pypirc_command as cmd - return cmd() - - def read_configuration(self): - """ - Read the PyPI access configuration as supported by distutils. This populates - ``username``, ``password``, ``realm`` and ``url`` attributes from the - configuration. - """ - from .util import _load_pypirc - cfg = _load_pypirc(self) - self.username = cfg.get('username') - self.password = cfg.get('password') - self.realm = cfg.get('realm', 'pypi') - self.url = cfg.get('repository', self.url) - - def save_configuration(self): - """ - Save the PyPI access configuration. You must have set ``username`` and - ``password`` attributes before calling this method. - """ - self.check_credentials() - from .util import _store_pypirc - _store_pypirc(self) - - def check_credentials(self): - """ - Check that ``username`` and ``password`` have been set, and raise an - exception if not. - """ - if self.username is None or self.password is None: - raise DistlibException('username and password must be set') - pm = HTTPPasswordMgr() - _, netloc, _, _, _, _ = urlparse(self.url) - pm.add_password(self.realm, netloc, self.username, self.password) - self.password_handler = HTTPBasicAuthHandler(pm) - - def register(self, metadata): - """ - Register a distribution on PyPI, using the provided metadata. - - :param metadata: A :class:`Metadata` instance defining at least a name - and version number for the distribution to be - registered. - :return: The HTTP response received from PyPI upon submission of the - request. - """ - self.check_credentials() - metadata.validate() - d = metadata.todict() - d[':action'] = 'verify' - request = self.encode_request(d.items(), []) - response = self.send_request(request) - d[':action'] = 'submit' - request = self.encode_request(d.items(), []) - return self.send_request(request) - - def _reader(self, name, stream, outbuf): - """ - Thread runner for reading lines of from a subprocess into a buffer. - - :param name: The logical name of the stream (used for logging only). - :param stream: The stream to read from. This will typically a pipe - connected to the output stream of a subprocess. - :param outbuf: The list to append the read lines to. - """ - while True: - s = stream.readline() - if not s: - break - s = s.decode('utf-8').rstrip() - outbuf.append(s) - logger.debug('%s: %s' % (name, s)) - stream.close() - - def get_sign_command(self, filename, signer, sign_password, - keystore=None): - """ - Return a suitable command for signing a file. - - :param filename: The pathname to the file to be signed. - :param signer: The identifier of the signer of the file. - :param sign_password: The passphrase for the signer's - private key used for signing. - :param keystore: The path to a directory which contains the keys - used in verification. If not specified, the - instance's ``gpg_home`` attribute is used instead. - :return: The signing command as a list suitable to be - passed to :class:`subprocess.Popen`. - """ - cmd = [self.gpg, '--status-fd', '2', '--no-tty'] - if keystore is None: - keystore = self.gpg_home - if keystore: - cmd.extend(['--homedir', keystore]) - if sign_password is not None: - cmd.extend(['--batch', '--passphrase-fd', '0']) - td = tempfile.mkdtemp() - sf = os.path.join(td, os.path.basename(filename) + '.asc') - cmd.extend(['--detach-sign', '--armor', '--local-user', - signer, '--output', sf, filename]) - logger.debug('invoking: %s', ' '.join(cmd)) - return cmd, sf - - def run_command(self, cmd, input_data=None): - """ - Run a command in a child process , passing it any input data specified. - - :param cmd: The command to run. - :param input_data: If specified, this must be a byte string containing - data to be sent to the child process. - :return: A tuple consisting of the subprocess' exit code, a list of - lines read from the subprocess' ``stdout``, and a list of - lines read from the subprocess' ``stderr``. - """ - kwargs = { - 'stdout': subprocess.PIPE, - 'stderr': subprocess.PIPE, - } - if input_data is not None: - kwargs['stdin'] = subprocess.PIPE - stdout = [] - stderr = [] - p = subprocess.Popen(cmd, **kwargs) - # We don't use communicate() here because we may need to - # get clever with interacting with the command - t1 = Thread(target=self._reader, args=('stdout', p.stdout, stdout)) - t1.start() - t2 = Thread(target=self._reader, args=('stderr', p.stderr, stderr)) - t2.start() - if input_data is not None: - p.stdin.write(input_data) - p.stdin.close() - - p.wait() - t1.join() - t2.join() - return p.returncode, stdout, stderr - - def sign_file(self, filename, signer, sign_password, keystore=None): - """ - Sign a file. - - :param filename: The pathname to the file to be signed. - :param signer: The identifier of the signer of the file. - :param sign_password: The passphrase for the signer's - private key used for signing. - :param keystore: The path to a directory which contains the keys - used in signing. If not specified, the instance's - ``gpg_home`` attribute is used instead. - :return: The absolute pathname of the file where the signature is - stored. - """ - cmd, sig_file = self.get_sign_command(filename, signer, sign_password, - keystore) - rc, stdout, stderr = self.run_command(cmd, - sign_password.encode('utf-8')) - if rc != 0: - raise DistlibException('sign command failed with error ' - 'code %s' % rc) - return sig_file - - def upload_file(self, metadata, filename, signer=None, sign_password=None, - filetype='sdist', pyversion='source', keystore=None): - """ - Upload a release file to the index. - - :param metadata: A :class:`Metadata` instance defining at least a name - and version number for the file to be uploaded. - :param filename: The pathname of the file to be uploaded. - :param signer: The identifier of the signer of the file. - :param sign_password: The passphrase for the signer's - private key used for signing. - :param filetype: The type of the file being uploaded. This is the - distutils command which produced that file, e.g. - ``sdist`` or ``bdist_wheel``. - :param pyversion: The version of Python which the release relates - to. For code compatible with any Python, this would - be ``source``, otherwise it would be e.g. ``3.2``. - :param keystore: The path to a directory which contains the keys - used in signing. If not specified, the instance's - ``gpg_home`` attribute is used instead. - :return: The HTTP response received from PyPI upon submission of the - request. - """ - self.check_credentials() - if not os.path.exists(filename): - raise DistlibException('not found: %s' % filename) - metadata.validate() - d = metadata.todict() - sig_file = None - if signer: - if not self.gpg: - logger.warning('no signing program available - not signed') - else: - sig_file = self.sign_file(filename, signer, sign_password, - keystore) - with open(filename, 'rb') as f: - file_data = f.read() - md5_digest = hashlib.md5(file_data).hexdigest() - sha256_digest = hashlib.sha256(file_data).hexdigest() - d.update({ - ':action': 'file_upload', - 'protocol_version': '1', - 'filetype': filetype, - 'pyversion': pyversion, - 'md5_digest': md5_digest, - 'sha256_digest': sha256_digest, - }) - files = [('content', os.path.basename(filename), file_data)] - if sig_file: - with open(sig_file, 'rb') as f: - sig_data = f.read() - files.append(('gpg_signature', os.path.basename(sig_file), - sig_data)) - shutil.rmtree(os.path.dirname(sig_file)) - request = self.encode_request(d.items(), files) - return self.send_request(request) - - def upload_documentation(self, metadata, doc_dir): - """ - Upload documentation to the index. - - :param metadata: A :class:`Metadata` instance defining at least a name - and version number for the documentation to be - uploaded. - :param doc_dir: The pathname of the directory which contains the - documentation. This should be the directory that - contains the ``index.html`` for the documentation. - :return: The HTTP response received from PyPI upon submission of the - request. - """ - self.check_credentials() - if not os.path.isdir(doc_dir): - raise DistlibException('not a directory: %r' % doc_dir) - fn = os.path.join(doc_dir, 'index.html') - if not os.path.exists(fn): - raise DistlibException('not found: %r' % fn) - metadata.validate() - name, version = metadata.name, metadata.version - zip_data = zip_dir(doc_dir).getvalue() - fields = [(':action', 'doc_upload'), - ('name', name), ('version', version)] - files = [('content', name, zip_data)] - request = self.encode_request(fields, files) - return self.send_request(request) - - def get_verify_command(self, signature_filename, data_filename, - keystore=None): - """ - Return a suitable command for verifying a file. - - :param signature_filename: The pathname to the file containing the - signature. - :param data_filename: The pathname to the file containing the - signed data. - :param keystore: The path to a directory which contains the keys - used in verification. If not specified, the - instance's ``gpg_home`` attribute is used instead. - :return: The verifying command as a list suitable to be - passed to :class:`subprocess.Popen`. - """ - cmd = [self.gpg, '--status-fd', '2', '--no-tty'] - if keystore is None: - keystore = self.gpg_home - if keystore: - cmd.extend(['--homedir', keystore]) - cmd.extend(['--verify', signature_filename, data_filename]) - logger.debug('invoking: %s', ' '.join(cmd)) - return cmd - - def verify_signature(self, signature_filename, data_filename, - keystore=None): - """ - Verify a signature for a file. - - :param signature_filename: The pathname to the file containing the - signature. - :param data_filename: The pathname to the file containing the - signed data. - :param keystore: The path to a directory which contains the keys - used in verification. If not specified, the - instance's ``gpg_home`` attribute is used instead. - :return: True if the signature was verified, else False. - """ - if not self.gpg: - raise DistlibException('verification unavailable because gpg ' - 'unavailable') - cmd = self.get_verify_command(signature_filename, data_filename, - keystore) - rc, stdout, stderr = self.run_command(cmd) - if rc not in (0, 1): - raise DistlibException('verify command failed with error ' - 'code %s' % rc) - return rc == 0 - - def download_file(self, url, destfile, digest=None, reporthook=None): - """ - This is a convenience method for downloading a file from an URL. - Normally, this will be a file from the index, though currently - no check is made for this (i.e. a file can be downloaded from - anywhere). - - The method is just like the :func:`urlretrieve` function in the - standard library, except that it allows digest computation to be - done during download and checking that the downloaded data - matched any expected value. - - :param url: The URL of the file to be downloaded (assumed to be - available via an HTTP GET request). - :param destfile: The pathname where the downloaded file is to be - saved. - :param digest: If specified, this must be a (hasher, value) - tuple, where hasher is the algorithm used (e.g. - ``'md5'``) and ``value`` is the expected value. - :param reporthook: The same as for :func:`urlretrieve` in the - standard library. - """ - if digest is None: - digester = None - logger.debug('No digest specified') - else: - if isinstance(digest, (list, tuple)): - hasher, digest = digest - else: - hasher = 'md5' - digester = getattr(hashlib, hasher)() - logger.debug('Digest specified: %s' % digest) - # The following code is equivalent to urlretrieve. - # We need to do it this way so that we can compute the - # digest of the file as we go. - with open(destfile, 'wb') as dfp: - # addinfourl is not a context manager on 2.x - # so we have to use try/finally - sfp = self.send_request(Request(url)) - try: - headers = sfp.info() - blocksize = 8192 - size = -1 - read = 0 - blocknum = 0 - if "content-length" in headers: - size = int(headers["Content-Length"]) - if reporthook: - reporthook(blocknum, blocksize, size) - while True: - block = sfp.read(blocksize) - if not block: - break - read += len(block) - dfp.write(block) - if digester: - digester.update(block) - blocknum += 1 - if reporthook: - reporthook(blocknum, blocksize, size) - finally: - sfp.close() - - # check that we got the whole file, if we can - if size >= 0 and read < size: - raise DistlibException( - 'retrieval incomplete: got only %d out of %d bytes' - % (read, size)) - # if we have a digest, it must match. - if digester: - actual = digester.hexdigest() - if digest != actual: - raise DistlibException('%s digest mismatch for %s: expected ' - '%s, got %s' % (hasher, destfile, - digest, actual)) - logger.debug('Digest verified: %s', digest) - - def send_request(self, req): - """ - Send a standard library :class:`Request` to PyPI and return its - response. - - :param req: The request to send. - :return: The HTTP response from PyPI (a standard library HTTPResponse). - """ - handlers = [] - if self.password_handler: - handlers.append(self.password_handler) - if self.ssl_verifier: - handlers.append(self.ssl_verifier) - opener = build_opener(*handlers) - return opener.open(req) - - def encode_request(self, fields, files): - """ - Encode fields and files for posting to an HTTP server. - - :param fields: The fields to send as a list of (fieldname, value) - tuples. - :param files: The files to send as a list of (fieldname, filename, - file_bytes) tuple. - """ - # Adapted from packaging, which in turn was adapted from - # http://code.activestate.com/recipes/146306 - - parts = [] - boundary = self.boundary - for k, values in fields: - if not isinstance(values, (list, tuple)): - values = [values] - - for v in values: - parts.extend(( - b'--' + boundary, - ('Content-Disposition: form-data; name="%s"' % - k).encode('utf-8'), - b'', - v.encode('utf-8'))) - for key, filename, value in files: - parts.extend(( - b'--' + boundary, - ('Content-Disposition: form-data; name="%s"; filename="%s"' % - (key, filename)).encode('utf-8'), - b'', - value)) - - parts.extend((b'--' + boundary + b'--', b'')) - - body = b'\r\n'.join(parts) - ct = b'multipart/form-data; boundary=' + boundary - headers = { - 'Content-type': ct, - 'Content-length': str(len(body)) - } - return Request(self.url, body, headers) - - def search(self, terms, operator=None): - if isinstance(terms, string_types): - terms = {'name': terms} - rpc_proxy = ServerProxy(self.url, timeout=3.0) - try: - return rpc_proxy.search(terms, operator or 'and') - finally: - rpc_proxy('close')() diff --git a/venv/Lib/site-packages/pip/_vendor/distlib/locators.py b/venv/Lib/site-packages/pip/_vendor/distlib/locators.py deleted file mode 100644 index 0c7d639..0000000 --- a/venv/Lib/site-packages/pip/_vendor/distlib/locators.py +++ /dev/null @@ -1,1300 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright (C) 2012-2015 Vinay Sajip. -# Licensed to the Python Software Foundation under a contributor agreement. -# See LICENSE.txt and CONTRIBUTORS.txt. -# - -import gzip -from io import BytesIO -import json -import logging -import os -import posixpath -import re -try: - import threading -except ImportError: # pragma: no cover - import dummy_threading as threading -import zlib - -from . import DistlibException -from .compat import (urljoin, urlparse, urlunparse, url2pathname, pathname2url, - queue, quote, unescape, build_opener, - HTTPRedirectHandler as BaseRedirectHandler, text_type, - Request, HTTPError, URLError) -from .database import Distribution, DistributionPath, make_dist -from .metadata import Metadata, MetadataInvalidError -from .util import (cached_property, ensure_slash, split_filename, get_project_data, - parse_requirement, parse_name_and_version, ServerProxy, - normalize_name) -from .version import get_scheme, UnsupportedVersionError -from .wheel import Wheel, is_compatible - -logger = logging.getLogger(__name__) - -HASHER_HASH = re.compile(r'^(\w+)=([a-f0-9]+)') -CHARSET = re.compile(r';\s*charset\s*=\s*(.*)\s*$', re.I) -HTML_CONTENT_TYPE = re.compile('text/html|application/x(ht)?ml') -DEFAULT_INDEX = 'https://pypi.org/pypi' - -def get_all_distribution_names(url=None): - """ - Return all distribution names known by an index. - :param url: The URL of the index. - :return: A list of all known distribution names. - """ - if url is None: - url = DEFAULT_INDEX - client = ServerProxy(url, timeout=3.0) - try: - return client.list_packages() - finally: - client('close')() - -class RedirectHandler(BaseRedirectHandler): - """ - A class to work around a bug in some Python 3.2.x releases. - """ - # There's a bug in the base version for some 3.2.x - # (e.g. 3.2.2 on Ubuntu Oneiric). If a Location header - # returns e.g. /abc, it bails because it says the scheme '' - # is bogus, when actually it should use the request's - # URL for the scheme. See Python issue #13696. - def http_error_302(self, req, fp, code, msg, headers): - # Some servers (incorrectly) return multiple Location headers - # (so probably same goes for URI). Use first header. - newurl = None - for key in ('location', 'uri'): - if key in headers: - newurl = headers[key] - break - if newurl is None: # pragma: no cover - return - urlparts = urlparse(newurl) - if urlparts.scheme == '': - newurl = urljoin(req.get_full_url(), newurl) - if hasattr(headers, 'replace_header'): - headers.replace_header(key, newurl) - else: - headers[key] = newurl - return BaseRedirectHandler.http_error_302(self, req, fp, code, msg, - headers) - - http_error_301 = http_error_303 = http_error_307 = http_error_302 - -class Locator(object): - """ - A base class for locators - things that locate distributions. - """ - source_extensions = ('.tar.gz', '.tar.bz2', '.tar', '.zip', '.tgz', '.tbz') - binary_extensions = ('.egg', '.exe', '.whl') - excluded_extensions = ('.pdf',) - - # A list of tags indicating which wheels you want to match. The default - # value of None matches against the tags compatible with the running - # Python. If you want to match other values, set wheel_tags on a locator - # instance to a list of tuples (pyver, abi, arch) which you want to match. - wheel_tags = None - - downloadable_extensions = source_extensions + ('.whl',) - - def __init__(self, scheme='default'): - """ - Initialise an instance. - :param scheme: Because locators look for most recent versions, they - need to know the version scheme to use. This specifies - the current PEP-recommended scheme - use ``'legacy'`` - if you need to support existing distributions on PyPI. - """ - self._cache = {} - self.scheme = scheme - # Because of bugs in some of the handlers on some of the platforms, - # we use our own opener rather than just using urlopen. - self.opener = build_opener(RedirectHandler()) - # If get_project() is called from locate(), the matcher instance - # is set from the requirement passed to locate(). See issue #18 for - # why this can be useful to know. - self.matcher = None - self.errors = queue.Queue() - - def get_errors(self): - """ - Return any errors which have occurred. - """ - result = [] - while not self.errors.empty(): # pragma: no cover - try: - e = self.errors.get(False) - result.append(e) - except self.errors.Empty: - continue - self.errors.task_done() - return result - - def clear_errors(self): - """ - Clear any errors which may have been logged. - """ - # Just get the errors and throw them away - self.get_errors() - - def clear_cache(self): - self._cache.clear() - - def _get_scheme(self): - return self._scheme - - def _set_scheme(self, value): - self._scheme = value - - scheme = property(_get_scheme, _set_scheme) - - def _get_project(self, name): - """ - For a given project, get a dictionary mapping available versions to Distribution - instances. - - This should be implemented in subclasses. - - If called from a locate() request, self.matcher will be set to a - matcher for the requirement to satisfy, otherwise it will be None. - """ - raise NotImplementedError('Please implement in the subclass') - - def get_distribution_names(self): - """ - Return all the distribution names known to this locator. - """ - raise NotImplementedError('Please implement in the subclass') - - def get_project(self, name): - """ - For a given project, get a dictionary mapping available versions to Distribution - instances. - - This calls _get_project to do all the work, and just implements a caching layer on top. - """ - if self._cache is None: # pragma: no cover - result = self._get_project(name) - elif name in self._cache: - result = self._cache[name] - else: - self.clear_errors() - result = self._get_project(name) - self._cache[name] = result - return result - - def score_url(self, url): - """ - Give an url a score which can be used to choose preferred URLs - for a given project release. - """ - t = urlparse(url) - basename = posixpath.basename(t.path) - compatible = True - is_wheel = basename.endswith('.whl') - is_downloadable = basename.endswith(self.downloadable_extensions) - if is_wheel: - compatible = is_compatible(Wheel(basename), self.wheel_tags) - return (t.scheme == 'https', 'pypi.org' in t.netloc, - is_downloadable, is_wheel, compatible, basename) - - def prefer_url(self, url1, url2): - """ - Choose one of two URLs where both are candidates for distribution - archives for the same version of a distribution (for example, - .tar.gz vs. zip). - - The current implementation favours https:// URLs over http://, archives - from PyPI over those from other locations, wheel compatibility (if a - wheel) and then the archive name. - """ - result = url2 - if url1: - s1 = self.score_url(url1) - s2 = self.score_url(url2) - if s1 > s2: - result = url1 - if result != url2: - logger.debug('Not replacing %r with %r', url1, url2) - else: - logger.debug('Replacing %r with %r', url1, url2) - return result - - def split_filename(self, filename, project_name): - """ - Attempt to split a filename in project name, version and Python version. - """ - return split_filename(filename, project_name) - - def convert_url_to_download_info(self, url, project_name): - """ - See if a URL is a candidate for a download URL for a project (the URL - has typically been scraped from an HTML page). - - If it is, a dictionary is returned with keys "name", "version", - "filename" and "url"; otherwise, None is returned. - """ - def same_project(name1, name2): - return normalize_name(name1) == normalize_name(name2) - - result = None - scheme, netloc, path, params, query, frag = urlparse(url) - if frag.lower().startswith('egg='): # pragma: no cover - logger.debug('%s: version hint in fragment: %r', - project_name, frag) - m = HASHER_HASH.match(frag) - if m: - algo, digest = m.groups() - else: - algo, digest = None, None - origpath = path - if path and path[-1] == '/': # pragma: no cover - path = path[:-1] - if path.endswith('.whl'): - try: - wheel = Wheel(path) - if not is_compatible(wheel, self.wheel_tags): - logger.debug('Wheel not compatible: %s', path) - else: - if project_name is None: - include = True - else: - include = same_project(wheel.name, project_name) - if include: - result = { - 'name': wheel.name, - 'version': wheel.version, - 'filename': wheel.filename, - 'url': urlunparse((scheme, netloc, origpath, - params, query, '')), - 'python-version': ', '.join( - ['.'.join(list(v[2:])) for v in wheel.pyver]), - } - except Exception as e: # pragma: no cover - logger.warning('invalid path for wheel: %s', path) - elif not path.endswith(self.downloadable_extensions): # pragma: no cover - logger.debug('Not downloadable: %s', path) - else: # downloadable extension - path = filename = posixpath.basename(path) - for ext in self.downloadable_extensions: - if path.endswith(ext): - path = path[:-len(ext)] - t = self.split_filename(path, project_name) - if not t: # pragma: no cover - logger.debug('No match for project/version: %s', path) - else: - name, version, pyver = t - if not project_name or same_project(project_name, name): - result = { - 'name': name, - 'version': version, - 'filename': filename, - 'url': urlunparse((scheme, netloc, origpath, - params, query, '')), - #'packagetype': 'sdist', - } - if pyver: # pragma: no cover - result['python-version'] = pyver - break - if result and algo: - result['%s_digest' % algo] = digest - return result - - def _get_digest(self, info): - """ - Get a digest from a dictionary by looking at a "digests" dictionary - or keys of the form 'algo_digest'. - - Returns a 2-tuple (algo, digest) if found, else None. Currently - looks only for SHA256, then MD5. - """ - result = None - if 'digests' in info: - digests = info['digests'] - for algo in ('sha256', 'md5'): - if algo in digests: - result = (algo, digests[algo]) - break - if not result: - for algo in ('sha256', 'md5'): - key = '%s_digest' % algo - if key in info: - result = (algo, info[key]) - break - return result - - def _update_version_data(self, result, info): - """ - Update a result dictionary (the final result from _get_project) with a - dictionary for a specific version, which typically holds information - gleaned from a filename or URL for an archive for the distribution. - """ - name = info.pop('name') - version = info.pop('version') - if version in result: - dist = result[version] - md = dist.metadata - else: - dist = make_dist(name, version, scheme=self.scheme) - md = dist.metadata - dist.digest = digest = self._get_digest(info) - url = info['url'] - result['digests'][url] = digest - if md.source_url != info['url']: - md.source_url = self.prefer_url(md.source_url, url) - result['urls'].setdefault(version, set()).add(url) - dist.locator = self - result[version] = dist - - def locate(self, requirement, prereleases=False): - """ - Find the most recent distribution which matches the given - requirement. - - :param requirement: A requirement of the form 'foo (1.0)' or perhaps - 'foo (>= 1.0, < 2.0, != 1.3)' - :param prereleases: If ``True``, allow pre-release versions - to be located. Otherwise, pre-release versions - are not returned. - :return: A :class:`Distribution` instance, or ``None`` if no such - distribution could be located. - """ - result = None - r = parse_requirement(requirement) - if r is None: # pragma: no cover - raise DistlibException('Not a valid requirement: %r' % requirement) - scheme = get_scheme(self.scheme) - self.matcher = matcher = scheme.matcher(r.requirement) - logger.debug('matcher: %s (%s)', matcher, type(matcher).__name__) - versions = self.get_project(r.name) - if len(versions) > 2: # urls and digests keys are present - # sometimes, versions are invalid - slist = [] - vcls = matcher.version_class - for k in versions: - if k in ('urls', 'digests'): - continue - try: - if not matcher.match(k): - pass # logger.debug('%s did not match %r', matcher, k) - else: - if prereleases or not vcls(k).is_prerelease: - slist.append(k) - # else: - # logger.debug('skipping pre-release ' - # 'version %s of %s', k, matcher.name) - except Exception: # pragma: no cover - logger.warning('error matching %s with %r', matcher, k) - pass # slist.append(k) - if len(slist) > 1: - slist = sorted(slist, key=scheme.key) - if slist: - logger.debug('sorted list: %s', slist) - version = slist[-1] - result = versions[version] - if result: - if r.extras: - result.extras = r.extras - result.download_urls = versions.get('urls', {}).get(version, set()) - d = {} - sd = versions.get('digests', {}) - for url in result.download_urls: - if url in sd: # pragma: no cover - d[url] = sd[url] - result.digests = d - self.matcher = None - return result - - -class PyPIRPCLocator(Locator): - """ - This locator uses XML-RPC to locate distributions. It therefore - cannot be used with simple mirrors (that only mirror file content). - """ - def __init__(self, url, **kwargs): - """ - Initialise an instance. - - :param url: The URL to use for XML-RPC. - :param kwargs: Passed to the superclass constructor. - """ - super(PyPIRPCLocator, self).__init__(**kwargs) - self.base_url = url - self.client = ServerProxy(url, timeout=3.0) - - def get_distribution_names(self): - """ - Return all the distribution names known to this locator. - """ - return set(self.client.list_packages()) - - def _get_project(self, name): - result = {'urls': {}, 'digests': {}} - versions = self.client.package_releases(name, True) - for v in versions: - urls = self.client.release_urls(name, v) - data = self.client.release_data(name, v) - metadata = Metadata(scheme=self.scheme) - metadata.name = data['name'] - metadata.version = data['version'] - metadata.license = data.get('license') - metadata.keywords = data.get('keywords', []) - metadata.summary = data.get('summary') - dist = Distribution(metadata) - if urls: - info = urls[0] - metadata.source_url = info['url'] - dist.digest = self._get_digest(info) - dist.locator = self - result[v] = dist - for info in urls: - url = info['url'] - digest = self._get_digest(info) - result['urls'].setdefault(v, set()).add(url) - result['digests'][url] = digest - return result - -class PyPIJSONLocator(Locator): - """ - This locator uses PyPI's JSON interface. It's very limited in functionality - and probably not worth using. - """ - def __init__(self, url, **kwargs): - super(PyPIJSONLocator, self).__init__(**kwargs) - self.base_url = ensure_slash(url) - - def get_distribution_names(self): - """ - Return all the distribution names known to this locator. - """ - raise NotImplementedError('Not available from this locator') - - def _get_project(self, name): - result = {'urls': {}, 'digests': {}} - url = urljoin(self.base_url, '%s/json' % quote(name)) - try: - resp = self.opener.open(url) - data = resp.read().decode() # for now - d = json.loads(data) - md = Metadata(scheme=self.scheme) - data = d['info'] - md.name = data['name'] - md.version = data['version'] - md.license = data.get('license') - md.keywords = data.get('keywords', []) - md.summary = data.get('summary') - dist = Distribution(md) - dist.locator = self - urls = d['urls'] - result[md.version] = dist - for info in d['urls']: - url = info['url'] - dist.download_urls.add(url) - dist.digests[url] = self._get_digest(info) - result['urls'].setdefault(md.version, set()).add(url) - result['digests'][url] = self._get_digest(info) - # Now get other releases - for version, infos in d['releases'].items(): - if version == md.version: - continue # already done - omd = Metadata(scheme=self.scheme) - omd.name = md.name - omd.version = version - odist = Distribution(omd) - odist.locator = self - result[version] = odist - for info in infos: - url = info['url'] - odist.download_urls.add(url) - odist.digests[url] = self._get_digest(info) - result['urls'].setdefault(version, set()).add(url) - result['digests'][url] = self._get_digest(info) -# for info in urls: -# md.source_url = info['url'] -# dist.digest = self._get_digest(info) -# dist.locator = self -# for info in urls: -# url = info['url'] -# result['urls'].setdefault(md.version, set()).add(url) -# result['digests'][url] = self._get_digest(info) - except Exception as e: - self.errors.put(text_type(e)) - logger.exception('JSON fetch failed: %s', e) - return result - - -class Page(object): - """ - This class represents a scraped HTML page. - """ - # The following slightly hairy-looking regex just looks for the contents of - # an anchor link, which has an attribute "href" either immediately preceded - # or immediately followed by a "rel" attribute. The attribute values can be - # declared with double quotes, single quotes or no quotes - which leads to - # the length of the expression. - _href = re.compile(""" -(rel\\s*=\\s*(?:"(?P[^"]*)"|'(?P[^']*)'|(?P[^>\\s\n]*))\\s+)? -href\\s*=\\s*(?:"(?P[^"]*)"|'(?P[^']*)'|(?P[^>\\s\n]*)) -(\\s+rel\\s*=\\s*(?:"(?P[^"]*)"|'(?P[^']*)'|(?P[^>\\s\n]*)))? -""", re.I | re.S | re.X) - _base = re.compile(r"""]+)""", re.I | re.S) - - def __init__(self, data, url): - """ - Initialise an instance with the Unicode page contents and the URL they - came from. - """ - self.data = data - self.base_url = self.url = url - m = self._base.search(self.data) - if m: - self.base_url = m.group(1) - - _clean_re = re.compile(r'[^a-z0-9$&+,/:;=?@.#%_\\|-]', re.I) - - @cached_property - def links(self): - """ - Return the URLs of all the links on a page together with information - about their "rel" attribute, for determining which ones to treat as - downloads and which ones to queue for further scraping. - """ - def clean(url): - "Tidy up an URL." - scheme, netloc, path, params, query, frag = urlparse(url) - return urlunparse((scheme, netloc, quote(path), - params, query, frag)) - - result = set() - for match in self._href.finditer(self.data): - d = match.groupdict('') - rel = (d['rel1'] or d['rel2'] or d['rel3'] or - d['rel4'] or d['rel5'] or d['rel6']) - url = d['url1'] or d['url2'] or d['url3'] - url = urljoin(self.base_url, url) - url = unescape(url) - url = self._clean_re.sub(lambda m: '%%%2x' % ord(m.group(0)), url) - result.add((url, rel)) - # We sort the result, hoping to bring the most recent versions - # to the front - result = sorted(result, key=lambda t: t[0], reverse=True) - return result - - -class SimpleScrapingLocator(Locator): - """ - A locator which scrapes HTML pages to locate downloads for a distribution. - This runs multiple threads to do the I/O; performance is at least as good - as pip's PackageFinder, which works in an analogous fashion. - """ - - # These are used to deal with various Content-Encoding schemes. - decoders = { - 'deflate': zlib.decompress, - 'gzip': lambda b: gzip.GzipFile(fileobj=BytesIO(b)).read(), - 'none': lambda b: b, - } - - def __init__(self, url, timeout=None, num_workers=10, **kwargs): - """ - Initialise an instance. - :param url: The root URL to use for scraping. - :param timeout: The timeout, in seconds, to be applied to requests. - This defaults to ``None`` (no timeout specified). - :param num_workers: The number of worker threads you want to do I/O, - This defaults to 10. - :param kwargs: Passed to the superclass. - """ - super(SimpleScrapingLocator, self).__init__(**kwargs) - self.base_url = ensure_slash(url) - self.timeout = timeout - self._page_cache = {} - self._seen = set() - self._to_fetch = queue.Queue() - self._bad_hosts = set() - self.skip_externals = False - self.num_workers = num_workers - self._lock = threading.RLock() - # See issue #45: we need to be resilient when the locator is used - # in a thread, e.g. with concurrent.futures. We can't use self._lock - # as it is for coordinating our internal threads - the ones created - # in _prepare_threads. - self._gplock = threading.RLock() - self.platform_check = False # See issue #112 - - def _prepare_threads(self): - """ - Threads are created only when get_project is called, and terminate - before it returns. They are there primarily to parallelise I/O (i.e. - fetching web pages). - """ - self._threads = [] - for i in range(self.num_workers): - t = threading.Thread(target=self._fetch) - t.setDaemon(True) - t.start() - self._threads.append(t) - - def _wait_threads(self): - """ - Tell all the threads to terminate (by sending a sentinel value) and - wait for them to do so. - """ - # Note that you need two loops, since you can't say which - # thread will get each sentinel - for t in self._threads: - self._to_fetch.put(None) # sentinel - for t in self._threads: - t.join() - self._threads = [] - - def _get_project(self, name): - result = {'urls': {}, 'digests': {}} - with self._gplock: - self.result = result - self.project_name = name - url = urljoin(self.base_url, '%s/' % quote(name)) - self._seen.clear() - self._page_cache.clear() - self._prepare_threads() - try: - logger.debug('Queueing %s', url) - self._to_fetch.put(url) - self._to_fetch.join() - finally: - self._wait_threads() - del self.result - return result - - platform_dependent = re.compile(r'\b(linux_(i\d86|x86_64|arm\w+)|' - r'win(32|_amd64)|macosx_?\d+)\b', re.I) - - def _is_platform_dependent(self, url): - """ - Does an URL refer to a platform-specific download? - """ - return self.platform_dependent.search(url) - - def _process_download(self, url): - """ - See if an URL is a suitable download for a project. - - If it is, register information in the result dictionary (for - _get_project) about the specific version it's for. - - Note that the return value isn't actually used other than as a boolean - value. - """ - if self.platform_check and self._is_platform_dependent(url): - info = None - else: - info = self.convert_url_to_download_info(url, self.project_name) - logger.debug('process_download: %s -> %s', url, info) - if info: - with self._lock: # needed because self.result is shared - self._update_version_data(self.result, info) - return info - - def _should_queue(self, link, referrer, rel): - """ - Determine whether a link URL from a referring page and with a - particular "rel" attribute should be queued for scraping. - """ - scheme, netloc, path, _, _, _ = urlparse(link) - if path.endswith(self.source_extensions + self.binary_extensions + - self.excluded_extensions): - result = False - elif self.skip_externals and not link.startswith(self.base_url): - result = False - elif not referrer.startswith(self.base_url): - result = False - elif rel not in ('homepage', 'download'): - result = False - elif scheme not in ('http', 'https', 'ftp'): - result = False - elif self._is_platform_dependent(link): - result = False - else: - host = netloc.split(':', 1)[0] - if host.lower() == 'localhost': - result = False - else: - result = True - logger.debug('should_queue: %s (%s) from %s -> %s', link, rel, - referrer, result) - return result - - def _fetch(self): - """ - Get a URL to fetch from the work queue, get the HTML page, examine its - links for download candidates and candidates for further scraping. - - This is a handy method to run in a thread. - """ - while True: - url = self._to_fetch.get() - try: - if url: - page = self.get_page(url) - if page is None: # e.g. after an error - continue - for link, rel in page.links: - if link not in self._seen: - try: - self._seen.add(link) - if (not self._process_download(link) and - self._should_queue(link, url, rel)): - logger.debug('Queueing %s from %s', link, url) - self._to_fetch.put(link) - except MetadataInvalidError: # e.g. invalid versions - pass - except Exception as e: # pragma: no cover - self.errors.put(text_type(e)) - finally: - # always do this, to avoid hangs :-) - self._to_fetch.task_done() - if not url: - #logger.debug('Sentinel seen, quitting.') - break - - def get_page(self, url): - """ - Get the HTML for an URL, possibly from an in-memory cache. - - XXX TODO Note: this cache is never actually cleared. It's assumed that - the data won't get stale over the lifetime of a locator instance (not - necessarily true for the default_locator). - """ - # http://peak.telecommunity.com/DevCenter/EasyInstall#package-index-api - scheme, netloc, path, _, _, _ = urlparse(url) - if scheme == 'file' and os.path.isdir(url2pathname(path)): - url = urljoin(ensure_slash(url), 'index.html') - - if url in self._page_cache: - result = self._page_cache[url] - logger.debug('Returning %s from cache: %s', url, result) - else: - host = netloc.split(':', 1)[0] - result = None - if host in self._bad_hosts: - logger.debug('Skipping %s due to bad host %s', url, host) - else: - req = Request(url, headers={'Accept-encoding': 'identity'}) - try: - logger.debug('Fetching %s', url) - resp = self.opener.open(req, timeout=self.timeout) - logger.debug('Fetched %s', url) - headers = resp.info() - content_type = headers.get('Content-Type', '') - if HTML_CONTENT_TYPE.match(content_type): - final_url = resp.geturl() - data = resp.read() - encoding = headers.get('Content-Encoding') - if encoding: - decoder = self.decoders[encoding] # fail if not found - data = decoder(data) - encoding = 'utf-8' - m = CHARSET.search(content_type) - if m: - encoding = m.group(1) - try: - data = data.decode(encoding) - except UnicodeError: # pragma: no cover - data = data.decode('latin-1') # fallback - result = Page(data, final_url) - self._page_cache[final_url] = result - except HTTPError as e: - if e.code != 404: - logger.exception('Fetch failed: %s: %s', url, e) - except URLError as e: # pragma: no cover - logger.exception('Fetch failed: %s: %s', url, e) - with self._lock: - self._bad_hosts.add(host) - except Exception as e: # pragma: no cover - logger.exception('Fetch failed: %s: %s', url, e) - finally: - self._page_cache[url] = result # even if None (failure) - return result - - _distname_re = re.compile(']*>([^<]+)<') - - def get_distribution_names(self): - """ - Return all the distribution names known to this locator. - """ - result = set() - page = self.get_page(self.base_url) - if not page: - raise DistlibException('Unable to get %s' % self.base_url) - for match in self._distname_re.finditer(page.data): - result.add(match.group(1)) - return result - -class DirectoryLocator(Locator): - """ - This class locates distributions in a directory tree. - """ - - def __init__(self, path, **kwargs): - """ - Initialise an instance. - :param path: The root of the directory tree to search. - :param kwargs: Passed to the superclass constructor, - except for: - * recursive - if True (the default), subdirectories are - recursed into. If False, only the top-level directory - is searched, - """ - self.recursive = kwargs.pop('recursive', True) - super(DirectoryLocator, self).__init__(**kwargs) - path = os.path.abspath(path) - if not os.path.isdir(path): # pragma: no cover - raise DistlibException('Not a directory: %r' % path) - self.base_dir = path - - def should_include(self, filename, parent): - """ - Should a filename be considered as a candidate for a distribution - archive? As well as the filename, the directory which contains it - is provided, though not used by the current implementation. - """ - return filename.endswith(self.downloadable_extensions) - - def _get_project(self, name): - result = {'urls': {}, 'digests': {}} - for root, dirs, files in os.walk(self.base_dir): - for fn in files: - if self.should_include(fn, root): - fn = os.path.join(root, fn) - url = urlunparse(('file', '', - pathname2url(os.path.abspath(fn)), - '', '', '')) - info = self.convert_url_to_download_info(url, name) - if info: - self._update_version_data(result, info) - if not self.recursive: - break - return result - - def get_distribution_names(self): - """ - Return all the distribution names known to this locator. - """ - result = set() - for root, dirs, files in os.walk(self.base_dir): - for fn in files: - if self.should_include(fn, root): - fn = os.path.join(root, fn) - url = urlunparse(('file', '', - pathname2url(os.path.abspath(fn)), - '', '', '')) - info = self.convert_url_to_download_info(url, None) - if info: - result.add(info['name']) - if not self.recursive: - break - return result - -class JSONLocator(Locator): - """ - This locator uses special extended metadata (not available on PyPI) and is - the basis of performant dependency resolution in distlib. Other locators - require archive downloads before dependencies can be determined! As you - might imagine, that can be slow. - """ - def get_distribution_names(self): - """ - Return all the distribution names known to this locator. - """ - raise NotImplementedError('Not available from this locator') - - def _get_project(self, name): - result = {'urls': {}, 'digests': {}} - data = get_project_data(name) - if data: - for info in data.get('files', []): - if info['ptype'] != 'sdist' or info['pyversion'] != 'source': - continue - # We don't store summary in project metadata as it makes - # the data bigger for no benefit during dependency - # resolution - dist = make_dist(data['name'], info['version'], - summary=data.get('summary', - 'Placeholder for summary'), - scheme=self.scheme) - md = dist.metadata - md.source_url = info['url'] - # TODO SHA256 digest - if 'digest' in info and info['digest']: - dist.digest = ('md5', info['digest']) - md.dependencies = info.get('requirements', {}) - dist.exports = info.get('exports', {}) - result[dist.version] = dist - result['urls'].setdefault(dist.version, set()).add(info['url']) - return result - -class DistPathLocator(Locator): - """ - This locator finds installed distributions in a path. It can be useful for - adding to an :class:`AggregatingLocator`. - """ - def __init__(self, distpath, **kwargs): - """ - Initialise an instance. - - :param distpath: A :class:`DistributionPath` instance to search. - """ - super(DistPathLocator, self).__init__(**kwargs) - assert isinstance(distpath, DistributionPath) - self.distpath = distpath - - def _get_project(self, name): - dist = self.distpath.get_distribution(name) - if dist is None: - result = {'urls': {}, 'digests': {}} - else: - result = { - dist.version: dist, - 'urls': {dist.version: set([dist.source_url])}, - 'digests': {dist.version: set([None])} - } - return result - - -class AggregatingLocator(Locator): - """ - This class allows you to chain and/or merge a list of locators. - """ - def __init__(self, *locators, **kwargs): - """ - Initialise an instance. - - :param locators: The list of locators to search. - :param kwargs: Passed to the superclass constructor, - except for: - * merge - if False (the default), the first successful - search from any of the locators is returned. If True, - the results from all locators are merged (this can be - slow). - """ - self.merge = kwargs.pop('merge', False) - self.locators = locators - super(AggregatingLocator, self).__init__(**kwargs) - - def clear_cache(self): - super(AggregatingLocator, self).clear_cache() - for locator in self.locators: - locator.clear_cache() - - def _set_scheme(self, value): - self._scheme = value - for locator in self.locators: - locator.scheme = value - - scheme = property(Locator.scheme.fget, _set_scheme) - - def _get_project(self, name): - result = {} - for locator in self.locators: - d = locator.get_project(name) - if d: - if self.merge: - files = result.get('urls', {}) - digests = result.get('digests', {}) - # next line could overwrite result['urls'], result['digests'] - result.update(d) - df = result.get('urls') - if files and df: - for k, v in files.items(): - if k in df: - df[k] |= v - else: - df[k] = v - dd = result.get('digests') - if digests and dd: - dd.update(digests) - else: - # See issue #18. If any dists are found and we're looking - # for specific constraints, we only return something if - # a match is found. For example, if a DirectoryLocator - # returns just foo (1.0) while we're looking for - # foo (>= 2.0), we'll pretend there was nothing there so - # that subsequent locators can be queried. Otherwise we - # would just return foo (1.0) which would then lead to a - # failure to find foo (>= 2.0), because other locators - # weren't searched. Note that this only matters when - # merge=False. - if self.matcher is None: - found = True - else: - found = False - for k in d: - if self.matcher.match(k): - found = True - break - if found: - result = d - break - return result - - def get_distribution_names(self): - """ - Return all the distribution names known to this locator. - """ - result = set() - for locator in self.locators: - try: - result |= locator.get_distribution_names() - except NotImplementedError: - pass - return result - - -# We use a legacy scheme simply because most of the dists on PyPI use legacy -# versions which don't conform to PEP 426 / PEP 440. -default_locator = AggregatingLocator( - JSONLocator(), - SimpleScrapingLocator('https://pypi.org/simple/', - timeout=3.0), - scheme='legacy') - -locate = default_locator.locate - - -class DependencyFinder(object): - """ - Locate dependencies for distributions. - """ - - def __init__(self, locator=None): - """ - Initialise an instance, using the specified locator - to locate distributions. - """ - self.locator = locator or default_locator - self.scheme = get_scheme(self.locator.scheme) - - def add_distribution(self, dist): - """ - Add a distribution to the finder. This will update internal information - about who provides what. - :param dist: The distribution to add. - """ - logger.debug('adding distribution %s', dist) - name = dist.key - self.dists_by_name[name] = dist - self.dists[(name, dist.version)] = dist - for p in dist.provides: - name, version = parse_name_and_version(p) - logger.debug('Add to provided: %s, %s, %s', name, version, dist) - self.provided.setdefault(name, set()).add((version, dist)) - - def remove_distribution(self, dist): - """ - Remove a distribution from the finder. This will update internal - information about who provides what. - :param dist: The distribution to remove. - """ - logger.debug('removing distribution %s', dist) - name = dist.key - del self.dists_by_name[name] - del self.dists[(name, dist.version)] - for p in dist.provides: - name, version = parse_name_and_version(p) - logger.debug('Remove from provided: %s, %s, %s', name, version, dist) - s = self.provided[name] - s.remove((version, dist)) - if not s: - del self.provided[name] - - def get_matcher(self, reqt): - """ - Get a version matcher for a requirement. - :param reqt: The requirement - :type reqt: str - :return: A version matcher (an instance of - :class:`distlib.version.Matcher`). - """ - try: - matcher = self.scheme.matcher(reqt) - except UnsupportedVersionError: # pragma: no cover - # XXX compat-mode if cannot read the version - name = reqt.split()[0] - matcher = self.scheme.matcher(name) - return matcher - - def find_providers(self, reqt): - """ - Find the distributions which can fulfill a requirement. - - :param reqt: The requirement. - :type reqt: str - :return: A set of distribution which can fulfill the requirement. - """ - matcher = self.get_matcher(reqt) - name = matcher.key # case-insensitive - result = set() - provided = self.provided - if name in provided: - for version, provider in provided[name]: - try: - match = matcher.match(version) - except UnsupportedVersionError: - match = False - - if match: - result.add(provider) - break - return result - - def try_to_replace(self, provider, other, problems): - """ - Attempt to replace one provider with another. This is typically used - when resolving dependencies from multiple sources, e.g. A requires - (B >= 1.0) while C requires (B >= 1.1). - - For successful replacement, ``provider`` must meet all the requirements - which ``other`` fulfills. - - :param provider: The provider we are trying to replace with. - :param other: The provider we're trying to replace. - :param problems: If False is returned, this will contain what - problems prevented replacement. This is currently - a tuple of the literal string 'cantreplace', - ``provider``, ``other`` and the set of requirements - that ``provider`` couldn't fulfill. - :return: True if we can replace ``other`` with ``provider``, else - False. - """ - rlist = self.reqts[other] - unmatched = set() - for s in rlist: - matcher = self.get_matcher(s) - if not matcher.match(provider.version): - unmatched.add(s) - if unmatched: - # can't replace other with provider - problems.add(('cantreplace', provider, other, - frozenset(unmatched))) - result = False - else: - # can replace other with provider - self.remove_distribution(other) - del self.reqts[other] - for s in rlist: - self.reqts.setdefault(provider, set()).add(s) - self.add_distribution(provider) - result = True - return result - - def find(self, requirement, meta_extras=None, prereleases=False): - """ - Find a distribution and all distributions it depends on. - - :param requirement: The requirement specifying the distribution to - find, or a Distribution instance. - :param meta_extras: A list of meta extras such as :test:, :build: and - so on. - :param prereleases: If ``True``, allow pre-release versions to be - returned - otherwise, don't return prereleases - unless they're all that's available. - - Return a set of :class:`Distribution` instances and a set of - problems. - - The distributions returned should be such that they have the - :attr:`required` attribute set to ``True`` if they were - from the ``requirement`` passed to ``find()``, and they have the - :attr:`build_time_dependency` attribute set to ``True`` unless they - are post-installation dependencies of the ``requirement``. - - The problems should be a tuple consisting of the string - ``'unsatisfied'`` and the requirement which couldn't be satisfied - by any distribution known to the locator. - """ - - self.provided = {} - self.dists = {} - self.dists_by_name = {} - self.reqts = {} - - meta_extras = set(meta_extras or []) - if ':*:' in meta_extras: - meta_extras.remove(':*:') - # :meta: and :run: are implicitly included - meta_extras |= set([':test:', ':build:', ':dev:']) - - if isinstance(requirement, Distribution): - dist = odist = requirement - logger.debug('passed %s as requirement', odist) - else: - dist = odist = self.locator.locate(requirement, - prereleases=prereleases) - if dist is None: - raise DistlibException('Unable to locate %r' % requirement) - logger.debug('located %s', odist) - dist.requested = True - problems = set() - todo = set([dist]) - install_dists = set([odist]) - while todo: - dist = todo.pop() - name = dist.key # case-insensitive - if name not in self.dists_by_name: - self.add_distribution(dist) - else: - #import pdb; pdb.set_trace() - other = self.dists_by_name[name] - if other != dist: - self.try_to_replace(dist, other, problems) - - ireqts = dist.run_requires | dist.meta_requires - sreqts = dist.build_requires - ereqts = set() - if meta_extras and dist in install_dists: - for key in ('test', 'build', 'dev'): - e = ':%s:' % key - if e in meta_extras: - ereqts |= getattr(dist, '%s_requires' % key) - all_reqts = ireqts | sreqts | ereqts - for r in all_reqts: - providers = self.find_providers(r) - if not providers: - logger.debug('No providers found for %r', r) - provider = self.locator.locate(r, prereleases=prereleases) - # If no provider is found and we didn't consider - # prereleases, consider them now. - if provider is None and not prereleases: - provider = self.locator.locate(r, prereleases=True) - if provider is None: - logger.debug('Cannot satisfy %r', r) - problems.add(('unsatisfied', r)) - else: - n, v = provider.key, provider.version - if (n, v) not in self.dists: - todo.add(provider) - providers.add(provider) - if r in ireqts and dist in install_dists: - install_dists.add(provider) - logger.debug('Adding %s to install_dists', - provider.name_and_version) - for p in providers: - name = p.key - if name not in self.dists_by_name: - self.reqts.setdefault(p, set()).add(r) - else: - other = self.dists_by_name[name] - if other != p: - # see if other can be replaced by p - self.try_to_replace(p, other, problems) - - dists = set(self.dists.values()) - for dist in dists: - dist.build_time_dependency = dist not in install_dists - if dist.build_time_dependency: - logger.debug('%s is a build-time dependency only.', - dist.name_and_version) - logger.debug('find done for %s', odist) - return dists, problems diff --git a/venv/Lib/site-packages/pip/_vendor/distlib/manifest.py b/venv/Lib/site-packages/pip/_vendor/distlib/manifest.py deleted file mode 100644 index ca0fe44..0000000 --- a/venv/Lib/site-packages/pip/_vendor/distlib/manifest.py +++ /dev/null @@ -1,393 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright (C) 2012-2013 Python Software Foundation. -# See LICENSE.txt and CONTRIBUTORS.txt. -# -""" -Class representing the list of files in a distribution. - -Equivalent to distutils.filelist, but fixes some problems. -""" -import fnmatch -import logging -import os -import re -import sys - -from . import DistlibException -from .compat import fsdecode -from .util import convert_path - - -__all__ = ['Manifest'] - -logger = logging.getLogger(__name__) - -# a \ followed by some spaces + EOL -_COLLAPSE_PATTERN = re.compile('\\\\w*\n', re.M) -_COMMENTED_LINE = re.compile('#.*?(?=\n)|\n(?=$)', re.M | re.S) - -# -# Due to the different results returned by fnmatch.translate, we need -# to do slightly different processing for Python 2.7 and 3.2 ... this needed -# to be brought in for Python 3.6 onwards. -# -_PYTHON_VERSION = sys.version_info[:2] - -class Manifest(object): - """A list of files built by on exploring the filesystem and filtered by - applying various patterns to what we find there. - """ - - def __init__(self, base=None): - """ - Initialise an instance. - - :param base: The base directory to explore under. - """ - self.base = os.path.abspath(os.path.normpath(base or os.getcwd())) - self.prefix = self.base + os.sep - self.allfiles = None - self.files = set() - - # - # Public API - # - - def findall(self): - """Find all files under the base and set ``allfiles`` to the absolute - pathnames of files found. - """ - from stat import S_ISREG, S_ISDIR, S_ISLNK - - self.allfiles = allfiles = [] - root = self.base - stack = [root] - pop = stack.pop - push = stack.append - - while stack: - root = pop() - names = os.listdir(root) - - for name in names: - fullname = os.path.join(root, name) - - # Avoid excess stat calls -- just one will do, thank you! - stat = os.stat(fullname) - mode = stat.st_mode - if S_ISREG(mode): - allfiles.append(fsdecode(fullname)) - elif S_ISDIR(mode) and not S_ISLNK(mode): - push(fullname) - - def add(self, item): - """ - Add a file to the manifest. - - :param item: The pathname to add. This can be relative to the base. - """ - if not item.startswith(self.prefix): - item = os.path.join(self.base, item) - self.files.add(os.path.normpath(item)) - - def add_many(self, items): - """ - Add a list of files to the manifest. - - :param items: The pathnames to add. These can be relative to the base. - """ - for item in items: - self.add(item) - - def sorted(self, wantdirs=False): - """ - Return sorted files in directory order - """ - - def add_dir(dirs, d): - dirs.add(d) - logger.debug('add_dir added %s', d) - if d != self.base: - parent, _ = os.path.split(d) - assert parent not in ('', '/') - add_dir(dirs, parent) - - result = set(self.files) # make a copy! - if wantdirs: - dirs = set() - for f in result: - add_dir(dirs, os.path.dirname(f)) - result |= dirs - return [os.path.join(*path_tuple) for path_tuple in - sorted(os.path.split(path) for path in result)] - - def clear(self): - """Clear all collected files.""" - self.files = set() - self.allfiles = [] - - def process_directive(self, directive): - """ - Process a directive which either adds some files from ``allfiles`` to - ``files``, or removes some files from ``files``. - - :param directive: The directive to process. This should be in a format - compatible with distutils ``MANIFEST.in`` files: - - http://docs.python.org/distutils/sourcedist.html#commands - """ - # Parse the line: split it up, make sure the right number of words - # is there, and return the relevant words. 'action' is always - # defined: it's the first word of the line. Which of the other - # three are defined depends on the action; it'll be either - # patterns, (dir and patterns), or (dirpattern). - action, patterns, thedir, dirpattern = self._parse_directive(directive) - - # OK, now we know that the action is valid and we have the - # right number of words on the line for that action -- so we - # can proceed with minimal error-checking. - if action == 'include': - for pattern in patterns: - if not self._include_pattern(pattern, anchor=True): - logger.warning('no files found matching %r', pattern) - - elif action == 'exclude': - for pattern in patterns: - found = self._exclude_pattern(pattern, anchor=True) - #if not found: - # logger.warning('no previously-included files ' - # 'found matching %r', pattern) - - elif action == 'global-include': - for pattern in patterns: - if not self._include_pattern(pattern, anchor=False): - logger.warning('no files found matching %r ' - 'anywhere in distribution', pattern) - - elif action == 'global-exclude': - for pattern in patterns: - found = self._exclude_pattern(pattern, anchor=False) - #if not found: - # logger.warning('no previously-included files ' - # 'matching %r found anywhere in ' - # 'distribution', pattern) - - elif action == 'recursive-include': - for pattern in patterns: - if not self._include_pattern(pattern, prefix=thedir): - logger.warning('no files found matching %r ' - 'under directory %r', pattern, thedir) - - elif action == 'recursive-exclude': - for pattern in patterns: - found = self._exclude_pattern(pattern, prefix=thedir) - #if not found: - # logger.warning('no previously-included files ' - # 'matching %r found under directory %r', - # pattern, thedir) - - elif action == 'graft': - if not self._include_pattern(None, prefix=dirpattern): - logger.warning('no directories found matching %r', - dirpattern) - - elif action == 'prune': - if not self._exclude_pattern(None, prefix=dirpattern): - logger.warning('no previously-included directories found ' - 'matching %r', dirpattern) - else: # pragma: no cover - # This should never happen, as it should be caught in - # _parse_template_line - raise DistlibException( - 'invalid action %r' % action) - - # - # Private API - # - - def _parse_directive(self, directive): - """ - Validate a directive. - :param directive: The directive to validate. - :return: A tuple of action, patterns, thedir, dir_patterns - """ - words = directive.split() - if len(words) == 1 and words[0] not in ('include', 'exclude', - 'global-include', - 'global-exclude', - 'recursive-include', - 'recursive-exclude', - 'graft', 'prune'): - # no action given, let's use the default 'include' - words.insert(0, 'include') - - action = words[0] - patterns = thedir = dir_pattern = None - - if action in ('include', 'exclude', - 'global-include', 'global-exclude'): - if len(words) < 2: - raise DistlibException( - '%r expects ...' % action) - - patterns = [convert_path(word) for word in words[1:]] - - elif action in ('recursive-include', 'recursive-exclude'): - if len(words) < 3: - raise DistlibException( - '%r expects ...' % action) - - thedir = convert_path(words[1]) - patterns = [convert_path(word) for word in words[2:]] - - elif action in ('graft', 'prune'): - if len(words) != 2: - raise DistlibException( - '%r expects a single ' % action) - - dir_pattern = convert_path(words[1]) - - else: - raise DistlibException('unknown action %r' % action) - - return action, patterns, thedir, dir_pattern - - def _include_pattern(self, pattern, anchor=True, prefix=None, - is_regex=False): - """Select strings (presumably filenames) from 'self.files' that - match 'pattern', a Unix-style wildcard (glob) pattern. - - Patterns are not quite the same as implemented by the 'fnmatch' - module: '*' and '?' match non-special characters, where "special" - is platform-dependent: slash on Unix; colon, slash, and backslash on - DOS/Windows; and colon on Mac OS. - - If 'anchor' is true (the default), then the pattern match is more - stringent: "*.py" will match "foo.py" but not "foo/bar.py". If - 'anchor' is false, both of these will match. - - If 'prefix' is supplied, then only filenames starting with 'prefix' - (itself a pattern) and ending with 'pattern', with anything in between - them, will match. 'anchor' is ignored in this case. - - If 'is_regex' is true, 'anchor' and 'prefix' are ignored, and - 'pattern' is assumed to be either a string containing a regex or a - regex object -- no translation is done, the regex is just compiled - and used as-is. - - Selected strings will be added to self.files. - - Return True if files are found. - """ - # XXX docstring lying about what the special chars are? - found = False - pattern_re = self._translate_pattern(pattern, anchor, prefix, is_regex) - - # delayed loading of allfiles list - if self.allfiles is None: - self.findall() - - for name in self.allfiles: - if pattern_re.search(name): - self.files.add(name) - found = True - return found - - def _exclude_pattern(self, pattern, anchor=True, prefix=None, - is_regex=False): - """Remove strings (presumably filenames) from 'files' that match - 'pattern'. - - Other parameters are the same as for 'include_pattern()', above. - The list 'self.files' is modified in place. Return True if files are - found. - - This API is public to allow e.g. exclusion of SCM subdirs, e.g. when - packaging source distributions - """ - found = False - pattern_re = self._translate_pattern(pattern, anchor, prefix, is_regex) - for f in list(self.files): - if pattern_re.search(f): - self.files.remove(f) - found = True - return found - - def _translate_pattern(self, pattern, anchor=True, prefix=None, - is_regex=False): - """Translate a shell-like wildcard pattern to a compiled regular - expression. - - Return the compiled regex. If 'is_regex' true, - then 'pattern' is directly compiled to a regex (if it's a string) - or just returned as-is (assumes it's a regex object). - """ - if is_regex: - if isinstance(pattern, str): - return re.compile(pattern) - else: - return pattern - - if _PYTHON_VERSION > (3, 2): - # ditch start and end characters - start, _, end = self._glob_to_re('_').partition('_') - - if pattern: - pattern_re = self._glob_to_re(pattern) - if _PYTHON_VERSION > (3, 2): - assert pattern_re.startswith(start) and pattern_re.endswith(end) - else: - pattern_re = '' - - base = re.escape(os.path.join(self.base, '')) - if prefix is not None: - # ditch end of pattern character - if _PYTHON_VERSION <= (3, 2): - empty_pattern = self._glob_to_re('') - prefix_re = self._glob_to_re(prefix)[:-len(empty_pattern)] - else: - prefix_re = self._glob_to_re(prefix) - assert prefix_re.startswith(start) and prefix_re.endswith(end) - prefix_re = prefix_re[len(start): len(prefix_re) - len(end)] - sep = os.sep - if os.sep == '\\': - sep = r'\\' - if _PYTHON_VERSION <= (3, 2): - pattern_re = '^' + base + sep.join((prefix_re, - '.*' + pattern_re)) - else: - pattern_re = pattern_re[len(start): len(pattern_re) - len(end)] - pattern_re = r'%s%s%s%s.*%s%s' % (start, base, prefix_re, sep, - pattern_re, end) - else: # no prefix -- respect anchor flag - if anchor: - if _PYTHON_VERSION <= (3, 2): - pattern_re = '^' + base + pattern_re - else: - pattern_re = r'%s%s%s' % (start, base, pattern_re[len(start):]) - - return re.compile(pattern_re) - - def _glob_to_re(self, pattern): - """Translate a shell-like glob pattern to a regular expression. - - Return a string containing the regex. Differs from - 'fnmatch.translate()' in that '*' does not match "special characters" - (which are platform-specific). - """ - pattern_re = fnmatch.translate(pattern) - - # '?' and '*' in the glob pattern become '.' and '.*' in the RE, which - # IMHO is wrong -- '?' and '*' aren't supposed to match slash in Unix, - # and by extension they shouldn't match such "special characters" under - # any OS. So change all non-escaped dots in the RE to match any - # character except the special characters (currently: just os.sep). - sep = os.sep - if os.sep == '\\': - # we're using a regex to manipulate a regex, so we need - # to escape the backslash twice - sep = r'\\\\' - escaped = r'\1[^%s]' % sep - pattern_re = re.sub(r'((? y, - '!=': lambda x, y: x != y, - '<': lambda x, y: x < y, - '<=': lambda x, y: x == y or x < y, - '>': lambda x, y: x > y, - '>=': lambda x, y: x == y or x > y, - 'and': lambda x, y: x and y, - 'or': lambda x, y: x or y, - 'in': lambda x, y: x in y, - 'not in': lambda x, y: x not in y, - } - - def evaluate(self, expr, context): - """ - Evaluate a marker expression returned by the :func:`parse_requirement` - function in the specified context. - """ - if isinstance(expr, string_types): - if expr[0] in '\'"': - result = expr[1:-1] - else: - if expr not in context: - raise SyntaxError('unknown variable: %s' % expr) - result = context[expr] - else: - assert isinstance(expr, dict) - op = expr['op'] - if op not in self.operations: - raise NotImplementedError('op not implemented: %s' % op) - elhs = expr['lhs'] - erhs = expr['rhs'] - if _is_literal(expr['lhs']) and _is_literal(expr['rhs']): - raise SyntaxError('invalid comparison: %s %s %s' % (elhs, op, erhs)) - - lhs = self.evaluate(elhs, context) - rhs = self.evaluate(erhs, context) - if ((elhs == 'python_version' or erhs == 'python_version') and - op in ('<', '<=', '>', '>=', '===', '==', '!=', '~=')): - lhs = NV(lhs) - rhs = NV(rhs) - elif elhs == 'python_version' and op in ('in', 'not in'): - lhs = NV(lhs) - rhs = _get_versions(rhs) - result = self.operations[op](lhs, rhs) - return result - -def default_context(): - def format_full_version(info): - version = '%s.%s.%s' % (info.major, info.minor, info.micro) - kind = info.releaselevel - if kind != 'final': - version += kind[0] + str(info.serial) - return version - - if hasattr(sys, 'implementation'): - implementation_version = format_full_version(sys.implementation.version) - implementation_name = sys.implementation.name - else: - implementation_version = '0' - implementation_name = '' - - result = { - 'implementation_name': implementation_name, - 'implementation_version': implementation_version, - 'os_name': os.name, - 'platform_machine': platform.machine(), - 'platform_python_implementation': platform.python_implementation(), - 'platform_release': platform.release(), - 'platform_system': platform.system(), - 'platform_version': platform.version(), - 'platform_in_venv': str(in_venv()), - 'python_full_version': platform.python_version(), - 'python_version': platform.python_version()[:3], - 'sys_platform': sys.platform, - } - return result - -DEFAULT_CONTEXT = default_context() -del default_context - -evaluator = Evaluator() - -def interpret(marker, execution_context=None): - """ - Interpret a marker and return a result depending on environment. - - :param marker: The marker to interpret. - :type marker: str - :param execution_context: The context used for name lookup. - :type execution_context: mapping - """ - try: - expr, rest = parse_marker(marker) - except Exception as e: - raise SyntaxError('Unable to interpret marker syntax: %s: %s' % (marker, e)) - if rest and rest[0] != '#': - raise SyntaxError('unexpected trailing data in marker: %s: %s' % (marker, rest)) - context = dict(DEFAULT_CONTEXT) - if execution_context: - context.update(execution_context) - return evaluator.evaluate(expr, context) diff --git a/venv/Lib/site-packages/pip/_vendor/distlib/metadata.py b/venv/Lib/site-packages/pip/_vendor/distlib/metadata.py deleted file mode 100644 index 6a26b0a..0000000 --- a/venv/Lib/site-packages/pip/_vendor/distlib/metadata.py +++ /dev/null @@ -1,1058 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright (C) 2012 The Python Software Foundation. -# See LICENSE.txt and CONTRIBUTORS.txt. -# -"""Implementation of the Metadata for Python packages PEPs. - -Supports all metadata formats (1.0, 1.1, 1.2, 1.3/2.1 and withdrawn 2.0). -""" -from __future__ import unicode_literals - -import codecs -from email import message_from_file -import json -import logging -import re - - -from . import DistlibException, __version__ -from .compat import StringIO, string_types, text_type -from .markers import interpret -from .util import extract_by_key, get_extras -from .version import get_scheme, PEP440_VERSION_RE - -logger = logging.getLogger(__name__) - - -class MetadataMissingError(DistlibException): - """A required metadata is missing""" - - -class MetadataConflictError(DistlibException): - """Attempt to read or write metadata fields that are conflictual.""" - - -class MetadataUnrecognizedVersionError(DistlibException): - """Unknown metadata version number.""" - - -class MetadataInvalidError(DistlibException): - """A metadata value is invalid""" - -# public API of this module -__all__ = ['Metadata', 'PKG_INFO_ENCODING', 'PKG_INFO_PREFERRED_VERSION'] - -# Encoding used for the PKG-INFO files -PKG_INFO_ENCODING = 'utf-8' - -# preferred version. Hopefully will be changed -# to 1.2 once PEP 345 is supported everywhere -PKG_INFO_PREFERRED_VERSION = '1.1' - -_LINE_PREFIX_1_2 = re.compile('\n \\|') -_LINE_PREFIX_PRE_1_2 = re.compile('\n ') -_241_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform', - 'Summary', 'Description', - 'Keywords', 'Home-page', 'Author', 'Author-email', - 'License') - -_314_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform', - 'Supported-Platform', 'Summary', 'Description', - 'Keywords', 'Home-page', 'Author', 'Author-email', - 'License', 'Classifier', 'Download-URL', 'Obsoletes', - 'Provides', 'Requires') - -_314_MARKERS = ('Obsoletes', 'Provides', 'Requires', 'Classifier', - 'Download-URL') - -_345_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform', - 'Supported-Platform', 'Summary', 'Description', - 'Keywords', 'Home-page', 'Author', 'Author-email', - 'Maintainer', 'Maintainer-email', 'License', - 'Classifier', 'Download-URL', 'Obsoletes-Dist', - 'Project-URL', 'Provides-Dist', 'Requires-Dist', - 'Requires-Python', 'Requires-External') - -_345_MARKERS = ('Provides-Dist', 'Requires-Dist', 'Requires-Python', - 'Obsoletes-Dist', 'Requires-External', 'Maintainer', - 'Maintainer-email', 'Project-URL') - -_426_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform', - 'Supported-Platform', 'Summary', 'Description', - 'Keywords', 'Home-page', 'Author', 'Author-email', - 'Maintainer', 'Maintainer-email', 'License', - 'Classifier', 'Download-URL', 'Obsoletes-Dist', - 'Project-URL', 'Provides-Dist', 'Requires-Dist', - 'Requires-Python', 'Requires-External', 'Private-Version', - 'Obsoleted-By', 'Setup-Requires-Dist', 'Extension', - 'Provides-Extra') - -_426_MARKERS = ('Private-Version', 'Provides-Extra', 'Obsoleted-By', - 'Setup-Requires-Dist', 'Extension') - -# See issue #106: Sometimes 'Requires' and 'Provides' occur wrongly in -# the metadata. Include them in the tuple literal below to allow them -# (for now). -# Ditto for Obsoletes - see issue #140. -_566_FIELDS = _426_FIELDS + ('Description-Content-Type', - 'Requires', 'Provides', 'Obsoletes') - -_566_MARKERS = ('Description-Content-Type',) - -_ALL_FIELDS = set() -_ALL_FIELDS.update(_241_FIELDS) -_ALL_FIELDS.update(_314_FIELDS) -_ALL_FIELDS.update(_345_FIELDS) -_ALL_FIELDS.update(_426_FIELDS) -_ALL_FIELDS.update(_566_FIELDS) - -EXTRA_RE = re.compile(r'''extra\s*==\s*("([^"]+)"|'([^']+)')''') - - -def _version2fieldlist(version): - if version == '1.0': - return _241_FIELDS - elif version == '1.1': - return _314_FIELDS - elif version == '1.2': - return _345_FIELDS - elif version in ('1.3', '2.1'): - # avoid adding field names if already there - return _345_FIELDS + tuple(f for f in _566_FIELDS if f not in _345_FIELDS) - elif version == '2.0': - return _426_FIELDS - raise MetadataUnrecognizedVersionError(version) - - -def _best_version(fields): - """Detect the best version depending on the fields used.""" - def _has_marker(keys, markers): - for marker in markers: - if marker in keys: - return True - return False - - keys = [] - for key, value in fields.items(): - if value in ([], 'UNKNOWN', None): - continue - keys.append(key) - - possible_versions = ['1.0', '1.1', '1.2', '1.3', '2.0', '2.1'] - - # first let's try to see if a field is not part of one of the version - for key in keys: - if key not in _241_FIELDS and '1.0' in possible_versions: - possible_versions.remove('1.0') - logger.debug('Removed 1.0 due to %s', key) - if key not in _314_FIELDS and '1.1' in possible_versions: - possible_versions.remove('1.1') - logger.debug('Removed 1.1 due to %s', key) - if key not in _345_FIELDS and '1.2' in possible_versions: - possible_versions.remove('1.2') - logger.debug('Removed 1.2 due to %s', key) - if key not in _566_FIELDS and '1.3' in possible_versions: - possible_versions.remove('1.3') - logger.debug('Removed 1.3 due to %s', key) - if key not in _566_FIELDS and '2.1' in possible_versions: - if key != 'Description': # In 2.1, description allowed after headers - possible_versions.remove('2.1') - logger.debug('Removed 2.1 due to %s', key) - if key not in _426_FIELDS and '2.0' in possible_versions: - possible_versions.remove('2.0') - logger.debug('Removed 2.0 due to %s', key) - - # possible_version contains qualified versions - if len(possible_versions) == 1: - return possible_versions[0] # found ! - elif len(possible_versions) == 0: - logger.debug('Out of options - unknown metadata set: %s', fields) - raise MetadataConflictError('Unknown metadata set') - - # let's see if one unique marker is found - is_1_1 = '1.1' in possible_versions and _has_marker(keys, _314_MARKERS) - is_1_2 = '1.2' in possible_versions and _has_marker(keys, _345_MARKERS) - is_2_1 = '2.1' in possible_versions and _has_marker(keys, _566_MARKERS) - is_2_0 = '2.0' in possible_versions and _has_marker(keys, _426_MARKERS) - if int(is_1_1) + int(is_1_2) + int(is_2_1) + int(is_2_0) > 1: - raise MetadataConflictError('You used incompatible 1.1/1.2/2.0/2.1 fields') - - # we have the choice, 1.0, or 1.2, or 2.0 - # - 1.0 has a broken Summary field but works with all tools - # - 1.1 is to avoid - # - 1.2 fixes Summary but has little adoption - # - 2.0 adds more features and is very new - if not is_1_1 and not is_1_2 and not is_2_1 and not is_2_0: - # we couldn't find any specific marker - if PKG_INFO_PREFERRED_VERSION in possible_versions: - return PKG_INFO_PREFERRED_VERSION - if is_1_1: - return '1.1' - if is_1_2: - return '1.2' - if is_2_1: - return '2.1' - - return '2.0' - -# This follows the rules about transforming keys as described in -# https://www.python.org/dev/peps/pep-0566/#id17 -_ATTR2FIELD = { - name.lower().replace("-", "_"): name for name in _ALL_FIELDS -} -_FIELD2ATTR = {field: attr for attr, field in _ATTR2FIELD.items()} - -_PREDICATE_FIELDS = ('Requires-Dist', 'Obsoletes-Dist', 'Provides-Dist') -_VERSIONS_FIELDS = ('Requires-Python',) -_VERSION_FIELDS = ('Version',) -_LISTFIELDS = ('Platform', 'Classifier', 'Obsoletes', - 'Requires', 'Provides', 'Obsoletes-Dist', - 'Provides-Dist', 'Requires-Dist', 'Requires-External', - 'Project-URL', 'Supported-Platform', 'Setup-Requires-Dist', - 'Provides-Extra', 'Extension') -_LISTTUPLEFIELDS = ('Project-URL',) - -_ELEMENTSFIELD = ('Keywords',) - -_UNICODEFIELDS = ('Author', 'Maintainer', 'Summary', 'Description') - -_MISSING = object() - -_FILESAFE = re.compile('[^A-Za-z0-9.]+') - - -def _get_name_and_version(name, version, for_filename=False): - """Return the distribution name with version. - - If for_filename is true, return a filename-escaped form.""" - if for_filename: - # For both name and version any runs of non-alphanumeric or '.' - # characters are replaced with a single '-'. Additionally any - # spaces in the version string become '.' - name = _FILESAFE.sub('-', name) - version = _FILESAFE.sub('-', version.replace(' ', '.')) - return '%s-%s' % (name, version) - - -class LegacyMetadata(object): - """The legacy metadata of a release. - - Supports versions 1.0, 1.1, 1.2, 2.0 and 1.3/2.1 (auto-detected). You can - instantiate the class with one of these arguments (or none): - - *path*, the path to a metadata file - - *fileobj* give a file-like object with metadata as content - - *mapping* is a dict-like object - - *scheme* is a version scheme name - """ - # TODO document the mapping API and UNKNOWN default key - - def __init__(self, path=None, fileobj=None, mapping=None, - scheme='default'): - if [path, fileobj, mapping].count(None) < 2: - raise TypeError('path, fileobj and mapping are exclusive') - self._fields = {} - self.requires_files = [] - self._dependencies = None - self.scheme = scheme - if path is not None: - self.read(path) - elif fileobj is not None: - self.read_file(fileobj) - elif mapping is not None: - self.update(mapping) - self.set_metadata_version() - - def set_metadata_version(self): - self._fields['Metadata-Version'] = _best_version(self._fields) - - def _write_field(self, fileobj, name, value): - fileobj.write('%s: %s\n' % (name, value)) - - def __getitem__(self, name): - return self.get(name) - - def __setitem__(self, name, value): - return self.set(name, value) - - def __delitem__(self, name): - field_name = self._convert_name(name) - try: - del self._fields[field_name] - except KeyError: - raise KeyError(name) - - def __contains__(self, name): - return (name in self._fields or - self._convert_name(name) in self._fields) - - def _convert_name(self, name): - if name in _ALL_FIELDS: - return name - name = name.replace('-', '_').lower() - return _ATTR2FIELD.get(name, name) - - def _default_value(self, name): - if name in _LISTFIELDS or name in _ELEMENTSFIELD: - return [] - return 'UNKNOWN' - - def _remove_line_prefix(self, value): - if self.metadata_version in ('1.0', '1.1'): - return _LINE_PREFIX_PRE_1_2.sub('\n', value) - else: - return _LINE_PREFIX_1_2.sub('\n', value) - - def __getattr__(self, name): - if name in _ATTR2FIELD: - return self[name] - raise AttributeError(name) - - # - # Public API - # - -# dependencies = property(_get_dependencies, _set_dependencies) - - def get_fullname(self, filesafe=False): - """Return the distribution name with version. - - If filesafe is true, return a filename-escaped form.""" - return _get_name_and_version(self['Name'], self['Version'], filesafe) - - def is_field(self, name): - """return True if name is a valid metadata key""" - name = self._convert_name(name) - return name in _ALL_FIELDS - - def is_multi_field(self, name): - name = self._convert_name(name) - return name in _LISTFIELDS - - def read(self, filepath): - """Read the metadata values from a file path.""" - fp = codecs.open(filepath, 'r', encoding='utf-8') - try: - self.read_file(fp) - finally: - fp.close() - - def read_file(self, fileob): - """Read the metadata values from a file object.""" - msg = message_from_file(fileob) - self._fields['Metadata-Version'] = msg['metadata-version'] - - # When reading, get all the fields we can - for field in _ALL_FIELDS: - if field not in msg: - continue - if field in _LISTFIELDS: - # we can have multiple lines - values = msg.get_all(field) - if field in _LISTTUPLEFIELDS and values is not None: - values = [tuple(value.split(',')) for value in values] - self.set(field, values) - else: - # single line - value = msg[field] - if value is not None and value != 'UNKNOWN': - self.set(field, value) - - # PEP 566 specifies that the body be used for the description, if - # available - body = msg.get_payload() - self["Description"] = body if body else self["Description"] - # logger.debug('Attempting to set metadata for %s', self) - # self.set_metadata_version() - - def write(self, filepath, skip_unknown=False): - """Write the metadata fields to filepath.""" - fp = codecs.open(filepath, 'w', encoding='utf-8') - try: - self.write_file(fp, skip_unknown) - finally: - fp.close() - - def write_file(self, fileobject, skip_unknown=False): - """Write the PKG-INFO format data to a file object.""" - self.set_metadata_version() - - for field in _version2fieldlist(self['Metadata-Version']): - values = self.get(field) - if skip_unknown and values in ('UNKNOWN', [], ['UNKNOWN']): - continue - if field in _ELEMENTSFIELD: - self._write_field(fileobject, field, ','.join(values)) - continue - if field not in _LISTFIELDS: - if field == 'Description': - if self.metadata_version in ('1.0', '1.1'): - values = values.replace('\n', '\n ') - else: - values = values.replace('\n', '\n |') - values = [values] - - if field in _LISTTUPLEFIELDS: - values = [','.join(value) for value in values] - - for value in values: - self._write_field(fileobject, field, value) - - def update(self, other=None, **kwargs): - """Set metadata values from the given iterable `other` and kwargs. - - Behavior is like `dict.update`: If `other` has a ``keys`` method, - they are looped over and ``self[key]`` is assigned ``other[key]``. - Else, ``other`` is an iterable of ``(key, value)`` iterables. - - Keys that don't match a metadata field or that have an empty value are - dropped. - """ - def _set(key, value): - if key in _ATTR2FIELD and value: - self.set(self._convert_name(key), value) - - if not other: - # other is None or empty container - pass - elif hasattr(other, 'keys'): - for k in other.keys(): - _set(k, other[k]) - else: - for k, v in other: - _set(k, v) - - if kwargs: - for k, v in kwargs.items(): - _set(k, v) - - def set(self, name, value): - """Control then set a metadata field.""" - name = self._convert_name(name) - - if ((name in _ELEMENTSFIELD or name == 'Platform') and - not isinstance(value, (list, tuple))): - if isinstance(value, string_types): - value = [v.strip() for v in value.split(',')] - else: - value = [] - elif (name in _LISTFIELDS and - not isinstance(value, (list, tuple))): - if isinstance(value, string_types): - value = [value] - else: - value = [] - - if logger.isEnabledFor(logging.WARNING): - project_name = self['Name'] - - scheme = get_scheme(self.scheme) - if name in _PREDICATE_FIELDS and value is not None: - for v in value: - # check that the values are valid - if not scheme.is_valid_matcher(v.split(';')[0]): - logger.warning( - "'%s': '%s' is not valid (field '%s')", - project_name, v, name) - # FIXME this rejects UNKNOWN, is that right? - elif name in _VERSIONS_FIELDS and value is not None: - if not scheme.is_valid_constraint_list(value): - logger.warning("'%s': '%s' is not a valid version (field '%s')", - project_name, value, name) - elif name in _VERSION_FIELDS and value is not None: - if not scheme.is_valid_version(value): - logger.warning("'%s': '%s' is not a valid version (field '%s')", - project_name, value, name) - - if name in _UNICODEFIELDS: - if name == 'Description': - value = self._remove_line_prefix(value) - - self._fields[name] = value - - def get(self, name, default=_MISSING): - """Get a metadata field.""" - name = self._convert_name(name) - if name not in self._fields: - if default is _MISSING: - default = self._default_value(name) - return default - if name in _UNICODEFIELDS: - value = self._fields[name] - return value - elif name in _LISTFIELDS: - value = self._fields[name] - if value is None: - return [] - res = [] - for val in value: - if name not in _LISTTUPLEFIELDS: - res.append(val) - else: - # That's for Project-URL - res.append((val[0], val[1])) - return res - - elif name in _ELEMENTSFIELD: - value = self._fields[name] - if isinstance(value, string_types): - return value.split(',') - return self._fields[name] - - def check(self, strict=False): - """Check if the metadata is compliant. If strict is True then raise if - no Name or Version are provided""" - self.set_metadata_version() - - # XXX should check the versions (if the file was loaded) - missing, warnings = [], [] - - for attr in ('Name', 'Version'): # required by PEP 345 - if attr not in self: - missing.append(attr) - - if strict and missing != []: - msg = 'missing required metadata: %s' % ', '.join(missing) - raise MetadataMissingError(msg) - - for attr in ('Home-page', 'Author'): - if attr not in self: - missing.append(attr) - - # checking metadata 1.2 (XXX needs to check 1.1, 1.0) - if self['Metadata-Version'] != '1.2': - return missing, warnings - - scheme = get_scheme(self.scheme) - - def are_valid_constraints(value): - for v in value: - if not scheme.is_valid_matcher(v.split(';')[0]): - return False - return True - - for fields, controller in ((_PREDICATE_FIELDS, are_valid_constraints), - (_VERSIONS_FIELDS, - scheme.is_valid_constraint_list), - (_VERSION_FIELDS, - scheme.is_valid_version)): - for field in fields: - value = self.get(field, None) - if value is not None and not controller(value): - warnings.append("Wrong value for '%s': %s" % (field, value)) - - return missing, warnings - - def todict(self, skip_missing=False): - """Return fields as a dict. - - Field names will be converted to use the underscore-lowercase style - instead of hyphen-mixed case (i.e. home_page instead of Home-page). - This is as per https://www.python.org/dev/peps/pep-0566/#id17. - """ - self.set_metadata_version() - - fields = _version2fieldlist(self['Metadata-Version']) - - data = {} - - for field_name in fields: - if not skip_missing or field_name in self._fields: - key = _FIELD2ATTR[field_name] - if key != 'project_url': - data[key] = self[field_name] - else: - data[key] = [','.join(u) for u in self[field_name]] - - return data - - def add_requirements(self, requirements): - if self['Metadata-Version'] == '1.1': - # we can't have 1.1 metadata *and* Setuptools requires - for field in ('Obsoletes', 'Requires', 'Provides'): - if field in self: - del self[field] - self['Requires-Dist'] += requirements - - # Mapping API - # TODO could add iter* variants - - def keys(self): - return list(_version2fieldlist(self['Metadata-Version'])) - - def __iter__(self): - for key in self.keys(): - yield key - - def values(self): - return [self[key] for key in self.keys()] - - def items(self): - return [(key, self[key]) for key in self.keys()] - - def __repr__(self): - return '<%s %s %s>' % (self.__class__.__name__, self.name, - self.version) - - -METADATA_FILENAME = 'pydist.json' -WHEEL_METADATA_FILENAME = 'metadata.json' -LEGACY_METADATA_FILENAME = 'METADATA' - - -class Metadata(object): - """ - The metadata of a release. This implementation uses 2.0 (JSON) - metadata where possible. If not possible, it wraps a LegacyMetadata - instance which handles the key-value metadata format. - """ - - METADATA_VERSION_MATCHER = re.compile(r'^\d+(\.\d+)*$') - - NAME_MATCHER = re.compile('^[0-9A-Z]([0-9A-Z_.-]*[0-9A-Z])?$', re.I) - - VERSION_MATCHER = PEP440_VERSION_RE - - SUMMARY_MATCHER = re.compile('.{1,2047}') - - METADATA_VERSION = '2.0' - - GENERATOR = 'distlib (%s)' % __version__ - - MANDATORY_KEYS = { - 'name': (), - 'version': (), - 'summary': ('legacy',), - } - - INDEX_KEYS = ('name version license summary description author ' - 'author_email keywords platform home_page classifiers ' - 'download_url') - - DEPENDENCY_KEYS = ('extras run_requires test_requires build_requires ' - 'dev_requires provides meta_requires obsoleted_by ' - 'supports_environments') - - SYNTAX_VALIDATORS = { - 'metadata_version': (METADATA_VERSION_MATCHER, ()), - 'name': (NAME_MATCHER, ('legacy',)), - 'version': (VERSION_MATCHER, ('legacy',)), - 'summary': (SUMMARY_MATCHER, ('legacy',)), - } - - __slots__ = ('_legacy', '_data', 'scheme') - - def __init__(self, path=None, fileobj=None, mapping=None, - scheme='default'): - if [path, fileobj, mapping].count(None) < 2: - raise TypeError('path, fileobj and mapping are exclusive') - self._legacy = None - self._data = None - self.scheme = scheme - #import pdb; pdb.set_trace() - if mapping is not None: - try: - self._validate_mapping(mapping, scheme) - self._data = mapping - except MetadataUnrecognizedVersionError: - self._legacy = LegacyMetadata(mapping=mapping, scheme=scheme) - self.validate() - else: - data = None - if path: - with open(path, 'rb') as f: - data = f.read() - elif fileobj: - data = fileobj.read() - if data is None: - # Initialised with no args - to be added - self._data = { - 'metadata_version': self.METADATA_VERSION, - 'generator': self.GENERATOR, - } - else: - if not isinstance(data, text_type): - data = data.decode('utf-8') - try: - self._data = json.loads(data) - self._validate_mapping(self._data, scheme) - except ValueError: - # Note: MetadataUnrecognizedVersionError does not - # inherit from ValueError (it's a DistlibException, - # which should not inherit from ValueError). - # The ValueError comes from the json.load - if that - # succeeds and we get a validation error, we want - # that to propagate - self._legacy = LegacyMetadata(fileobj=StringIO(data), - scheme=scheme) - self.validate() - - common_keys = set(('name', 'version', 'license', 'keywords', 'summary')) - - none_list = (None, list) - none_dict = (None, dict) - - mapped_keys = { - 'run_requires': ('Requires-Dist', list), - 'build_requires': ('Setup-Requires-Dist', list), - 'dev_requires': none_list, - 'test_requires': none_list, - 'meta_requires': none_list, - 'extras': ('Provides-Extra', list), - 'modules': none_list, - 'namespaces': none_list, - 'exports': none_dict, - 'commands': none_dict, - 'classifiers': ('Classifier', list), - 'source_url': ('Download-URL', None), - 'metadata_version': ('Metadata-Version', None), - } - - del none_list, none_dict - - def __getattribute__(self, key): - common = object.__getattribute__(self, 'common_keys') - mapped = object.__getattribute__(self, 'mapped_keys') - if key in mapped: - lk, maker = mapped[key] - if self._legacy: - if lk is None: - result = None if maker is None else maker() - else: - result = self._legacy.get(lk) - else: - value = None if maker is None else maker() - if key not in ('commands', 'exports', 'modules', 'namespaces', - 'classifiers'): - result = self._data.get(key, value) - else: - # special cases for PEP 459 - sentinel = object() - result = sentinel - d = self._data.get('extensions') - if d: - if key == 'commands': - result = d.get('python.commands', value) - elif key == 'classifiers': - d = d.get('python.details') - if d: - result = d.get(key, value) - else: - d = d.get('python.exports') - if not d: - d = self._data.get('python.exports') - if d: - result = d.get(key, value) - if result is sentinel: - result = value - elif key not in common: - result = object.__getattribute__(self, key) - elif self._legacy: - result = self._legacy.get(key) - else: - result = self._data.get(key) - return result - - def _validate_value(self, key, value, scheme=None): - if key in self.SYNTAX_VALIDATORS: - pattern, exclusions = self.SYNTAX_VALIDATORS[key] - if (scheme or self.scheme) not in exclusions: - m = pattern.match(value) - if not m: - raise MetadataInvalidError("'%s' is an invalid value for " - "the '%s' property" % (value, - key)) - - def __setattr__(self, key, value): - self._validate_value(key, value) - common = object.__getattribute__(self, 'common_keys') - mapped = object.__getattribute__(self, 'mapped_keys') - if key in mapped: - lk, _ = mapped[key] - if self._legacy: - if lk is None: - raise NotImplementedError - self._legacy[lk] = value - elif key not in ('commands', 'exports', 'modules', 'namespaces', - 'classifiers'): - self._data[key] = value - else: - # special cases for PEP 459 - d = self._data.setdefault('extensions', {}) - if key == 'commands': - d['python.commands'] = value - elif key == 'classifiers': - d = d.setdefault('python.details', {}) - d[key] = value - else: - d = d.setdefault('python.exports', {}) - d[key] = value - elif key not in common: - object.__setattr__(self, key, value) - else: - if key == 'keywords': - if isinstance(value, string_types): - value = value.strip() - if value: - value = value.split() - else: - value = [] - if self._legacy: - self._legacy[key] = value - else: - self._data[key] = value - - @property - def name_and_version(self): - return _get_name_and_version(self.name, self.version, True) - - @property - def provides(self): - if self._legacy: - result = self._legacy['Provides-Dist'] - else: - result = self._data.setdefault('provides', []) - s = '%s (%s)' % (self.name, self.version) - if s not in result: - result.append(s) - return result - - @provides.setter - def provides(self, value): - if self._legacy: - self._legacy['Provides-Dist'] = value - else: - self._data['provides'] = value - - def get_requirements(self, reqts, extras=None, env=None): - """ - Base method to get dependencies, given a set of extras - to satisfy and an optional environment context. - :param reqts: A list of sometimes-wanted dependencies, - perhaps dependent on extras and environment. - :param extras: A list of optional components being requested. - :param env: An optional environment for marker evaluation. - """ - if self._legacy: - result = reqts - else: - result = [] - extras = get_extras(extras or [], self.extras) - for d in reqts: - if 'extra' not in d and 'environment' not in d: - # unconditional - include = True - else: - if 'extra' not in d: - # Not extra-dependent - only environment-dependent - include = True - else: - include = d.get('extra') in extras - if include: - # Not excluded because of extras, check environment - marker = d.get('environment') - if marker: - include = interpret(marker, env) - if include: - result.extend(d['requires']) - for key in ('build', 'dev', 'test'): - e = ':%s:' % key - if e in extras: - extras.remove(e) - # A recursive call, but it should terminate since 'test' - # has been removed from the extras - reqts = self._data.get('%s_requires' % key, []) - result.extend(self.get_requirements(reqts, extras=extras, - env=env)) - return result - - @property - def dictionary(self): - if self._legacy: - return self._from_legacy() - return self._data - - @property - def dependencies(self): - if self._legacy: - raise NotImplementedError - else: - return extract_by_key(self._data, self.DEPENDENCY_KEYS) - - @dependencies.setter - def dependencies(self, value): - if self._legacy: - raise NotImplementedError - else: - self._data.update(value) - - def _validate_mapping(self, mapping, scheme): - if mapping.get('metadata_version') != self.METADATA_VERSION: - raise MetadataUnrecognizedVersionError() - missing = [] - for key, exclusions in self.MANDATORY_KEYS.items(): - if key not in mapping: - if scheme not in exclusions: - missing.append(key) - if missing: - msg = 'Missing metadata items: %s' % ', '.join(missing) - raise MetadataMissingError(msg) - for k, v in mapping.items(): - self._validate_value(k, v, scheme) - - def validate(self): - if self._legacy: - missing, warnings = self._legacy.check(True) - if missing or warnings: - logger.warning('Metadata: missing: %s, warnings: %s', - missing, warnings) - else: - self._validate_mapping(self._data, self.scheme) - - def todict(self): - if self._legacy: - return self._legacy.todict(True) - else: - result = extract_by_key(self._data, self.INDEX_KEYS) - return result - - def _from_legacy(self): - assert self._legacy and not self._data - result = { - 'metadata_version': self.METADATA_VERSION, - 'generator': self.GENERATOR, - } - lmd = self._legacy.todict(True) # skip missing ones - for k in ('name', 'version', 'license', 'summary', 'description', - 'classifier'): - if k in lmd: - if k == 'classifier': - nk = 'classifiers' - else: - nk = k - result[nk] = lmd[k] - kw = lmd.get('Keywords', []) - if kw == ['']: - kw = [] - result['keywords'] = kw - keys = (('requires_dist', 'run_requires'), - ('setup_requires_dist', 'build_requires')) - for ok, nk in keys: - if ok in lmd and lmd[ok]: - result[nk] = [{'requires': lmd[ok]}] - result['provides'] = self.provides - author = {} - maintainer = {} - return result - - LEGACY_MAPPING = { - 'name': 'Name', - 'version': 'Version', - ('extensions', 'python.details', 'license'): 'License', - 'summary': 'Summary', - 'description': 'Description', - ('extensions', 'python.project', 'project_urls', 'Home'): 'Home-page', - ('extensions', 'python.project', 'contacts', 0, 'name'): 'Author', - ('extensions', 'python.project', 'contacts', 0, 'email'): 'Author-email', - 'source_url': 'Download-URL', - ('extensions', 'python.details', 'classifiers'): 'Classifier', - } - - def _to_legacy(self): - def process_entries(entries): - reqts = set() - for e in entries: - extra = e.get('extra') - env = e.get('environment') - rlist = e['requires'] - for r in rlist: - if not env and not extra: - reqts.add(r) - else: - marker = '' - if extra: - marker = 'extra == "%s"' % extra - if env: - if marker: - marker = '(%s) and %s' % (env, marker) - else: - marker = env - reqts.add(';'.join((r, marker))) - return reqts - - assert self._data and not self._legacy - result = LegacyMetadata() - nmd = self._data - # import pdb; pdb.set_trace() - for nk, ok in self.LEGACY_MAPPING.items(): - if not isinstance(nk, tuple): - if nk in nmd: - result[ok] = nmd[nk] - else: - d = nmd - found = True - for k in nk: - try: - d = d[k] - except (KeyError, IndexError): - found = False - break - if found: - result[ok] = d - r1 = process_entries(self.run_requires + self.meta_requires) - r2 = process_entries(self.build_requires + self.dev_requires) - if self.extras: - result['Provides-Extra'] = sorted(self.extras) - result['Requires-Dist'] = sorted(r1) - result['Setup-Requires-Dist'] = sorted(r2) - # TODO: any other fields wanted - return result - - def write(self, path=None, fileobj=None, legacy=False, skip_unknown=True): - if [path, fileobj].count(None) != 1: - raise ValueError('Exactly one of path and fileobj is needed') - self.validate() - if legacy: - if self._legacy: - legacy_md = self._legacy - else: - legacy_md = self._to_legacy() - if path: - legacy_md.write(path, skip_unknown=skip_unknown) - else: - legacy_md.write_file(fileobj, skip_unknown=skip_unknown) - else: - if self._legacy: - d = self._from_legacy() - else: - d = self._data - if fileobj: - json.dump(d, fileobj, ensure_ascii=True, indent=2, - sort_keys=True) - else: - with codecs.open(path, 'w', 'utf-8') as f: - json.dump(d, f, ensure_ascii=True, indent=2, - sort_keys=True) - - def add_requirements(self, requirements): - if self._legacy: - self._legacy.add_requirements(requirements) - else: - run_requires = self._data.setdefault('run_requires', []) - always = None - for entry in run_requires: - if 'environment' not in entry and 'extra' not in entry: - always = entry - break - if always is None: - always = { 'requires': requirements } - run_requires.insert(0, always) - else: - rset = set(always['requires']) | set(requirements) - always['requires'] = sorted(rset) - - def __repr__(self): - name = self.name or '(no name)' - version = self.version or 'no version' - return '<%s %s %s (%s)>' % (self.__class__.__name__, - self.metadata_version, name, version) diff --git a/venv/Lib/site-packages/pip/_vendor/distlib/resources.py b/venv/Lib/site-packages/pip/_vendor/distlib/resources.py deleted file mode 100644 index fef52aa..0000000 --- a/venv/Lib/site-packages/pip/_vendor/distlib/resources.py +++ /dev/null @@ -1,358 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright (C) 2013-2017 Vinay Sajip. -# Licensed to the Python Software Foundation under a contributor agreement. -# See LICENSE.txt and CONTRIBUTORS.txt. -# -from __future__ import unicode_literals - -import bisect -import io -import logging -import os -import pkgutil -import sys -import types -import zipimport - -from . import DistlibException -from .util import cached_property, get_cache_base, Cache - -logger = logging.getLogger(__name__) - - -cache = None # created when needed - - -class ResourceCache(Cache): - def __init__(self, base=None): - if base is None: - # Use native string to avoid issues on 2.x: see Python #20140. - base = os.path.join(get_cache_base(), str('resource-cache')) - super(ResourceCache, self).__init__(base) - - def is_stale(self, resource, path): - """ - Is the cache stale for the given resource? - - :param resource: The :class:`Resource` being cached. - :param path: The path of the resource in the cache. - :return: True if the cache is stale. - """ - # Cache invalidation is a hard problem :-) - return True - - def get(self, resource): - """ - Get a resource into the cache, - - :param resource: A :class:`Resource` instance. - :return: The pathname of the resource in the cache. - """ - prefix, path = resource.finder.get_cache_info(resource) - if prefix is None: - result = path - else: - result = os.path.join(self.base, self.prefix_to_dir(prefix), path) - dirname = os.path.dirname(result) - if not os.path.isdir(dirname): - os.makedirs(dirname) - if not os.path.exists(result): - stale = True - else: - stale = self.is_stale(resource, path) - if stale: - # write the bytes of the resource to the cache location - with open(result, 'wb') as f: - f.write(resource.bytes) - return result - - -class ResourceBase(object): - def __init__(self, finder, name): - self.finder = finder - self.name = name - - -class Resource(ResourceBase): - """ - A class representing an in-package resource, such as a data file. This is - not normally instantiated by user code, but rather by a - :class:`ResourceFinder` which manages the resource. - """ - is_container = False # Backwards compatibility - - def as_stream(self): - """ - Get the resource as a stream. - - This is not a property to make it obvious that it returns a new stream - each time. - """ - return self.finder.get_stream(self) - - @cached_property - def file_path(self): - global cache - if cache is None: - cache = ResourceCache() - return cache.get(self) - - @cached_property - def bytes(self): - return self.finder.get_bytes(self) - - @cached_property - def size(self): - return self.finder.get_size(self) - - -class ResourceContainer(ResourceBase): - is_container = True # Backwards compatibility - - @cached_property - def resources(self): - return self.finder.get_resources(self) - - -class ResourceFinder(object): - """ - Resource finder for file system resources. - """ - - if sys.platform.startswith('java'): - skipped_extensions = ('.pyc', '.pyo', '.class') - else: - skipped_extensions = ('.pyc', '.pyo') - - def __init__(self, module): - self.module = module - self.loader = getattr(module, '__loader__', None) - self.base = os.path.dirname(getattr(module, '__file__', '')) - - def _adjust_path(self, path): - return os.path.realpath(path) - - def _make_path(self, resource_name): - # Issue #50: need to preserve type of path on Python 2.x - # like os.path._get_sep - if isinstance(resource_name, bytes): # should only happen on 2.x - sep = b'/' - else: - sep = '/' - parts = resource_name.split(sep) - parts.insert(0, self.base) - result = os.path.join(*parts) - return self._adjust_path(result) - - def _find(self, path): - return os.path.exists(path) - - def get_cache_info(self, resource): - return None, resource.path - - def find(self, resource_name): - path = self._make_path(resource_name) - if not self._find(path): - result = None - else: - if self._is_directory(path): - result = ResourceContainer(self, resource_name) - else: - result = Resource(self, resource_name) - result.path = path - return result - - def get_stream(self, resource): - return open(resource.path, 'rb') - - def get_bytes(self, resource): - with open(resource.path, 'rb') as f: - return f.read() - - def get_size(self, resource): - return os.path.getsize(resource.path) - - def get_resources(self, resource): - def allowed(f): - return (f != '__pycache__' and not - f.endswith(self.skipped_extensions)) - return set([f for f in os.listdir(resource.path) if allowed(f)]) - - def is_container(self, resource): - return self._is_directory(resource.path) - - _is_directory = staticmethod(os.path.isdir) - - def iterator(self, resource_name): - resource = self.find(resource_name) - if resource is not None: - todo = [resource] - while todo: - resource = todo.pop(0) - yield resource - if resource.is_container: - rname = resource.name - for name in resource.resources: - if not rname: - new_name = name - else: - new_name = '/'.join([rname, name]) - child = self.find(new_name) - if child.is_container: - todo.append(child) - else: - yield child - - -class ZipResourceFinder(ResourceFinder): - """ - Resource finder for resources in .zip files. - """ - def __init__(self, module): - super(ZipResourceFinder, self).__init__(module) - archive = self.loader.archive - self.prefix_len = 1 + len(archive) - # PyPy doesn't have a _files attr on zipimporter, and you can't set one - if hasattr(self.loader, '_files'): - self._files = self.loader._files - else: - self._files = zipimport._zip_directory_cache[archive] - self.index = sorted(self._files) - - def _adjust_path(self, path): - return path - - def _find(self, path): - path = path[self.prefix_len:] - if path in self._files: - result = True - else: - if path and path[-1] != os.sep: - path = path + os.sep - i = bisect.bisect(self.index, path) - try: - result = self.index[i].startswith(path) - except IndexError: - result = False - if not result: - logger.debug('_find failed: %r %r', path, self.loader.prefix) - else: - logger.debug('_find worked: %r %r', path, self.loader.prefix) - return result - - def get_cache_info(self, resource): - prefix = self.loader.archive - path = resource.path[1 + len(prefix):] - return prefix, path - - def get_bytes(self, resource): - return self.loader.get_data(resource.path) - - def get_stream(self, resource): - return io.BytesIO(self.get_bytes(resource)) - - def get_size(self, resource): - path = resource.path[self.prefix_len:] - return self._files[path][3] - - def get_resources(self, resource): - path = resource.path[self.prefix_len:] - if path and path[-1] != os.sep: - path += os.sep - plen = len(path) - result = set() - i = bisect.bisect(self.index, path) - while i < len(self.index): - if not self.index[i].startswith(path): - break - s = self.index[i][plen:] - result.add(s.split(os.sep, 1)[0]) # only immediate children - i += 1 - return result - - def _is_directory(self, path): - path = path[self.prefix_len:] - if path and path[-1] != os.sep: - path += os.sep - i = bisect.bisect(self.index, path) - try: - result = self.index[i].startswith(path) - except IndexError: - result = False - return result - - -_finder_registry = { - type(None): ResourceFinder, - zipimport.zipimporter: ZipResourceFinder -} - -try: - # In Python 3.6, _frozen_importlib -> _frozen_importlib_external - try: - import _frozen_importlib_external as _fi - except ImportError: - import _frozen_importlib as _fi - _finder_registry[_fi.SourceFileLoader] = ResourceFinder - _finder_registry[_fi.FileFinder] = ResourceFinder - # See issue #146 - _finder_registry[_fi.SourcelessFileLoader] = ResourceFinder - del _fi -except (ImportError, AttributeError): - pass - - -def register_finder(loader, finder_maker): - _finder_registry[type(loader)] = finder_maker - - -_finder_cache = {} - - -def finder(package): - """ - Return a resource finder for a package. - :param package: The name of the package. - :return: A :class:`ResourceFinder` instance for the package. - """ - if package in _finder_cache: - result = _finder_cache[package] - else: - if package not in sys.modules: - __import__(package) - module = sys.modules[package] - path = getattr(module, '__path__', None) - if path is None: - raise DistlibException('You cannot get a finder for a module, ' - 'only for a package') - loader = getattr(module, '__loader__', None) - finder_maker = _finder_registry.get(type(loader)) - if finder_maker is None: - raise DistlibException('Unable to locate finder for %r' % package) - result = finder_maker(module) - _finder_cache[package] = result - return result - - -_dummy_module = types.ModuleType(str('__dummy__')) - - -def finder_for_path(path): - """ - Return a resource finder for a path, which should represent a container. - - :param path: The path. - :return: A :class:`ResourceFinder` instance for the path. - """ - result = None - # calls any path hooks, gets importer into cache - pkgutil.get_importer(path) - loader = sys.path_importer_cache.get(path) - finder = _finder_registry.get(type(loader)) - if finder: - module = _dummy_module - module.__file__ = os.path.join(path, '') - module.__loader__ = loader - result = finder(module) - return result diff --git a/venv/Lib/site-packages/pip/_vendor/distlib/scripts.py b/venv/Lib/site-packages/pip/_vendor/distlib/scripts.py deleted file mode 100644 index 913912c..0000000 --- a/venv/Lib/site-packages/pip/_vendor/distlib/scripts.py +++ /dev/null @@ -1,429 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright (C) 2013-2015 Vinay Sajip. -# Licensed to the Python Software Foundation under a contributor agreement. -# See LICENSE.txt and CONTRIBUTORS.txt. -# -from io import BytesIO -import logging -import os -import re -import struct -import sys - -from .compat import sysconfig, detect_encoding, ZipFile -from .resources import finder -from .util import (FileOperator, get_export_entry, convert_path, - get_executable, get_platform, in_venv) - -logger = logging.getLogger(__name__) - -_DEFAULT_MANIFEST = ''' - - - - - - - - - - - - -'''.strip() - -# check if Python is called on the first line with this expression -FIRST_LINE_RE = re.compile(b'^#!.*pythonw?[0-9.]*([ \t].*)?$') -SCRIPT_TEMPLATE = r'''# -*- coding: utf-8 -*- -import re -import sys -from %(module)s import %(import_name)s -if __name__ == '__main__': - sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) - sys.exit(%(func)s()) -''' - - -def enquote_executable(executable): - if ' ' in executable: - # make sure we quote only the executable in case of env - # for example /usr/bin/env "/dir with spaces/bin/jython" - # instead of "/usr/bin/env /dir with spaces/bin/jython" - # otherwise whole - if executable.startswith('/usr/bin/env '): - env, _executable = executable.split(' ', 1) - if ' ' in _executable and not _executable.startswith('"'): - executable = '%s "%s"' % (env, _executable) - else: - if not executable.startswith('"'): - executable = '"%s"' % executable - return executable - -# Keep the old name around (for now), as there is at least one project using it! -_enquote_executable = enquote_executable - -class ScriptMaker(object): - """ - A class to copy or create scripts from source scripts or callable - specifications. - """ - script_template = SCRIPT_TEMPLATE - - executable = None # for shebangs - - def __init__(self, source_dir, target_dir, add_launchers=True, - dry_run=False, fileop=None): - self.source_dir = source_dir - self.target_dir = target_dir - self.add_launchers = add_launchers - self.force = False - self.clobber = False - # It only makes sense to set mode bits on POSIX. - self.set_mode = (os.name == 'posix') or (os.name == 'java' and - os._name == 'posix') - self.variants = set(('', 'X.Y')) - self._fileop = fileop or FileOperator(dry_run) - - self._is_nt = os.name == 'nt' or ( - os.name == 'java' and os._name == 'nt') - self.version_info = sys.version_info - - def _get_alternate_executable(self, executable, options): - if options.get('gui', False) and self._is_nt: # pragma: no cover - dn, fn = os.path.split(executable) - fn = fn.replace('python', 'pythonw') - executable = os.path.join(dn, fn) - return executable - - if sys.platform.startswith('java'): # pragma: no cover - def _is_shell(self, executable): - """ - Determine if the specified executable is a script - (contains a #! line) - """ - try: - with open(executable) as fp: - return fp.read(2) == '#!' - except (OSError, IOError): - logger.warning('Failed to open %s', executable) - return False - - def _fix_jython_executable(self, executable): - if self._is_shell(executable): - # Workaround for Jython is not needed on Linux systems. - import java - - if java.lang.System.getProperty('os.name') == 'Linux': - return executable - elif executable.lower().endswith('jython.exe'): - # Use wrapper exe for Jython on Windows - return executable - return '/usr/bin/env %s' % executable - - def _build_shebang(self, executable, post_interp): - """ - Build a shebang line. In the simple case (on Windows, or a shebang line - which is not too long or contains spaces) use a simple formulation for - the shebang. Otherwise, use /bin/sh as the executable, with a contrived - shebang which allows the script to run either under Python or sh, using - suitable quoting. Thanks to Harald Nordgren for his input. - - See also: http://www.in-ulm.de/~mascheck/various/shebang/#length - https://hg.mozilla.org/mozilla-central/file/tip/mach - """ - if os.name != 'posix': - simple_shebang = True - else: - # Add 3 for '#!' prefix and newline suffix. - shebang_length = len(executable) + len(post_interp) + 3 - if sys.platform == 'darwin': - max_shebang_length = 512 - else: - max_shebang_length = 127 - simple_shebang = ((b' ' not in executable) and - (shebang_length <= max_shebang_length)) - - if simple_shebang: - result = b'#!' + executable + post_interp + b'\n' - else: - result = b'#!/bin/sh\n' - result += b"'''exec' " + executable + post_interp + b' "$0" "$@"\n' - result += b"' '''" - return result - - def _get_shebang(self, encoding, post_interp=b'', options=None): - enquote = True - if self.executable: - executable = self.executable - enquote = False # assume this will be taken care of - elif not sysconfig.is_python_build(): - executable = get_executable() - elif in_venv(): # pragma: no cover - executable = os.path.join(sysconfig.get_path('scripts'), - 'python%s' % sysconfig.get_config_var('EXE')) - else: # pragma: no cover - executable = os.path.join( - sysconfig.get_config_var('BINDIR'), - 'python%s%s' % (sysconfig.get_config_var('VERSION'), - sysconfig.get_config_var('EXE'))) - if not os.path.isfile(executable): - # for Python builds from source on Windows, no Python executables with - # a version suffix are created, so we use python.exe - executable = os.path.join(sysconfig.get_config_var('BINDIR'), - 'python%s' % (sysconfig.get_config_var('EXE'))) - if options: - executable = self._get_alternate_executable(executable, options) - - if sys.platform.startswith('java'): # pragma: no cover - executable = self._fix_jython_executable(executable) - - # Normalise case for Windows - COMMENTED OUT - # executable = os.path.normcase(executable) - # N.B. The normalising operation above has been commented out: See - # issue #124. Although paths in Windows are generally case-insensitive, - # they aren't always. For example, a path containing a ẞ (which is a - # LATIN CAPITAL LETTER SHARP S - U+1E9E) is normcased to ß (which is a - # LATIN SMALL LETTER SHARP S' - U+00DF). The two are not considered by - # Windows as equivalent in path names. - - # If the user didn't specify an executable, it may be necessary to - # cater for executable paths with spaces (not uncommon on Windows) - if enquote: - executable = enquote_executable(executable) - # Issue #51: don't use fsencode, since we later try to - # check that the shebang is decodable using utf-8. - executable = executable.encode('utf-8') - # in case of IronPython, play safe and enable frames support - if (sys.platform == 'cli' and '-X:Frames' not in post_interp - and '-X:FullFrames' not in post_interp): # pragma: no cover - post_interp += b' -X:Frames' - shebang = self._build_shebang(executable, post_interp) - # Python parser starts to read a script using UTF-8 until - # it gets a #coding:xxx cookie. The shebang has to be the - # first line of a file, the #coding:xxx cookie cannot be - # written before. So the shebang has to be decodable from - # UTF-8. - try: - shebang.decode('utf-8') - except UnicodeDecodeError: # pragma: no cover - raise ValueError( - 'The shebang (%r) is not decodable from utf-8' % shebang) - # If the script is encoded to a custom encoding (use a - # #coding:xxx cookie), the shebang has to be decodable from - # the script encoding too. - if encoding != 'utf-8': - try: - shebang.decode(encoding) - except UnicodeDecodeError: # pragma: no cover - raise ValueError( - 'The shebang (%r) is not decodable ' - 'from the script encoding (%r)' % (shebang, encoding)) - return shebang - - def _get_script_text(self, entry): - return self.script_template % dict(module=entry.prefix, - import_name=entry.suffix.split('.')[0], - func=entry.suffix) - - manifest = _DEFAULT_MANIFEST - - def get_manifest(self, exename): - base = os.path.basename(exename) - return self.manifest % base - - def _write_script(self, names, shebang, script_bytes, filenames, ext): - use_launcher = self.add_launchers and self._is_nt - linesep = os.linesep.encode('utf-8') - if not shebang.endswith(linesep): - shebang += linesep - if not use_launcher: - script_bytes = shebang + script_bytes - else: # pragma: no cover - if ext == 'py': - launcher = self._get_launcher('t') - else: - launcher = self._get_launcher('w') - stream = BytesIO() - with ZipFile(stream, 'w') as zf: - zf.writestr('__main__.py', script_bytes) - zip_data = stream.getvalue() - script_bytes = launcher + shebang + zip_data - for name in names: - outname = os.path.join(self.target_dir, name) - if use_launcher: # pragma: no cover - n, e = os.path.splitext(outname) - if e.startswith('.py'): - outname = n - outname = '%s.exe' % outname - try: - self._fileop.write_binary_file(outname, script_bytes) - except Exception: - # Failed writing an executable - it might be in use. - logger.warning('Failed to write executable - trying to ' - 'use .deleteme logic') - dfname = '%s.deleteme' % outname - if os.path.exists(dfname): - os.remove(dfname) # Not allowed to fail here - os.rename(outname, dfname) # nor here - self._fileop.write_binary_file(outname, script_bytes) - logger.debug('Able to replace executable using ' - '.deleteme logic') - try: - os.remove(dfname) - except Exception: - pass # still in use - ignore error - else: - if self._is_nt and not outname.endswith('.' + ext): # pragma: no cover - outname = '%s.%s' % (outname, ext) - if os.path.exists(outname) and not self.clobber: - logger.warning('Skipping existing file %s', outname) - continue - self._fileop.write_binary_file(outname, script_bytes) - if self.set_mode: - self._fileop.set_executable_mode([outname]) - filenames.append(outname) - - variant_separator = '-' - - def get_script_filenames(self, name): - result = set() - if '' in self.variants: - result.add(name) - if 'X' in self.variants: - result.add('%s%s' % (name, self.version_info[0])) - if 'X.Y' in self.variants: - result.add('%s%s%s.%s' % (name, self.variant_separator, - self.version_info[0], self.version_info[1])) - return result - - def _make_script(self, entry, filenames, options=None): - post_interp = b'' - if options: - args = options.get('interpreter_args', []) - if args: - args = ' %s' % ' '.join(args) - post_interp = args.encode('utf-8') - shebang = self._get_shebang('utf-8', post_interp, options=options) - script = self._get_script_text(entry).encode('utf-8') - scriptnames = self.get_script_filenames(entry.name) - if options and options.get('gui', False): - ext = 'pyw' - else: - ext = 'py' - self._write_script(scriptnames, shebang, script, filenames, ext) - - def _copy_script(self, script, filenames): - adjust = False - script = os.path.join(self.source_dir, convert_path(script)) - outname = os.path.join(self.target_dir, os.path.basename(script)) - if not self.force and not self._fileop.newer(script, outname): - logger.debug('not copying %s (up-to-date)', script) - return - - # Always open the file, but ignore failures in dry-run mode -- - # that way, we'll get accurate feedback if we can read the - # script. - try: - f = open(script, 'rb') - except IOError: # pragma: no cover - if not self.dry_run: - raise - f = None - else: - first_line = f.readline() - if not first_line: # pragma: no cover - logger.warning('%s is an empty file (skipping)', script) - return - - match = FIRST_LINE_RE.match(first_line.replace(b'\r\n', b'\n')) - if match: - adjust = True - post_interp = match.group(1) or b'' - - if not adjust: - if f: - f.close() - self._fileop.copy_file(script, outname) - if self.set_mode: - self._fileop.set_executable_mode([outname]) - filenames.append(outname) - else: - logger.info('copying and adjusting %s -> %s', script, - self.target_dir) - if not self._fileop.dry_run: - encoding, lines = detect_encoding(f.readline) - f.seek(0) - shebang = self._get_shebang(encoding, post_interp) - if b'pythonw' in first_line: # pragma: no cover - ext = 'pyw' - else: - ext = 'py' - n = os.path.basename(outname) - self._write_script([n], shebang, f.read(), filenames, ext) - if f: - f.close() - - @property - def dry_run(self): - return self._fileop.dry_run - - @dry_run.setter - def dry_run(self, value): - self._fileop.dry_run = value - - if os.name == 'nt' or (os.name == 'java' and os._name == 'nt'): # pragma: no cover - # Executable launcher support. - # Launchers are from https://bitbucket.org/vinay.sajip/simple_launcher/ - - def _get_launcher(self, kind): - if struct.calcsize('P') == 8: # 64-bit - bits = '64' - else: - bits = '32' - platform_suffix = '-arm' if get_platform() == 'win-arm64' else '' - name = '%s%s%s.exe' % (kind, bits, platform_suffix) - # Issue 31: don't hardcode an absolute package name, but - # determine it relative to the current package - distlib_package = __name__.rsplit('.', 1)[0] - resource = finder(distlib_package).find(name) - if not resource: - msg = ('Unable to find resource %s in package %s' % (name, - distlib_package)) - raise ValueError(msg) - return resource.bytes - - # Public API follows - - def make(self, specification, options=None): - """ - Make a script. - - :param specification: The specification, which is either a valid export - entry specification (to make a script from a - callable) or a filename (to make a script by - copying from a source location). - :param options: A dictionary of options controlling script generation. - :return: A list of all absolute pathnames written to. - """ - filenames = [] - entry = get_export_entry(specification) - if entry is None: - self._copy_script(specification, filenames) - else: - self._make_script(entry, filenames, options=options) - return filenames - - def make_multiple(self, specifications, options=None): - """ - Take a list of specifications and make scripts from them, - :param specifications: A list of specifications. - :return: A list of all absolute pathnames written to, - """ - filenames = [] - for specification in specifications: - filenames.extend(self.make(specification, options)) - return filenames diff --git a/venv/Lib/site-packages/pip/_vendor/distlib/t32.exe b/venv/Lib/site-packages/pip/_vendor/distlib/t32.exe deleted file mode 100644 index 8932a18..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/distlib/t32.exe and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/distlib/t64-arm.exe b/venv/Lib/site-packages/pip/_vendor/distlib/t64-arm.exe deleted file mode 100644 index c5df486..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/distlib/t64-arm.exe and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/distlib/t64.exe b/venv/Lib/site-packages/pip/_vendor/distlib/t64.exe deleted file mode 100644 index 325b805..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/distlib/t64.exe and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/distlib/util.py b/venv/Lib/site-packages/pip/_vendor/distlib/util.py deleted file mode 100644 index 80bfc86..0000000 --- a/venv/Lib/site-packages/pip/_vendor/distlib/util.py +++ /dev/null @@ -1,1969 +0,0 @@ -# -# Copyright (C) 2012-2021 The Python Software Foundation. -# See LICENSE.txt and CONTRIBUTORS.txt. -# -import codecs -from collections import deque -import contextlib -import csv -from glob import iglob as std_iglob -import io -import json -import logging -import os -import py_compile -import re -import socket -try: - import ssl -except ImportError: # pragma: no cover - ssl = None -import subprocess -import sys -import tarfile -import tempfile -import textwrap - -try: - import threading -except ImportError: # pragma: no cover - import dummy_threading as threading -import time - -from . import DistlibException -from .compat import (string_types, text_type, shutil, raw_input, StringIO, - cache_from_source, urlopen, urljoin, httplib, xmlrpclib, - splittype, HTTPHandler, BaseConfigurator, valid_ident, - Container, configparser, URLError, ZipFile, fsdecode, - unquote, urlparse) - -logger = logging.getLogger(__name__) - -# -# Requirement parsing code as per PEP 508 -# - -IDENTIFIER = re.compile(r'^([\w\.-]+)\s*') -VERSION_IDENTIFIER = re.compile(r'^([\w\.*+-]+)\s*') -COMPARE_OP = re.compile(r'^(<=?|>=?|={2,3}|[~!]=)\s*') -MARKER_OP = re.compile(r'^((<=?)|(>=?)|={2,3}|[~!]=|in|not\s+in)\s*') -OR = re.compile(r'^or\b\s*') -AND = re.compile(r'^and\b\s*') -NON_SPACE = re.compile(r'(\S+)\s*') -STRING_CHUNK = re.compile(r'([\s\w\.{}()*+#:;,/?!~`@$%^&=|<>\[\]-]+)') - - -def parse_marker(marker_string): - """ - Parse a marker string and return a dictionary containing a marker expression. - - The dictionary will contain keys "op", "lhs" and "rhs" for non-terminals in - the expression grammar, or strings. A string contained in quotes is to be - interpreted as a literal string, and a string not contained in quotes is a - variable (such as os_name). - """ - def marker_var(remaining): - # either identifier, or literal string - m = IDENTIFIER.match(remaining) - if m: - result = m.groups()[0] - remaining = remaining[m.end():] - elif not remaining: - raise SyntaxError('unexpected end of input') - else: - q = remaining[0] - if q not in '\'"': - raise SyntaxError('invalid expression: %s' % remaining) - oq = '\'"'.replace(q, '') - remaining = remaining[1:] - parts = [q] - while remaining: - # either a string chunk, or oq, or q to terminate - if remaining[0] == q: - break - elif remaining[0] == oq: - parts.append(oq) - remaining = remaining[1:] - else: - m = STRING_CHUNK.match(remaining) - if not m: - raise SyntaxError('error in string literal: %s' % remaining) - parts.append(m.groups()[0]) - remaining = remaining[m.end():] - else: - s = ''.join(parts) - raise SyntaxError('unterminated string: %s' % s) - parts.append(q) - result = ''.join(parts) - remaining = remaining[1:].lstrip() # skip past closing quote - return result, remaining - - def marker_expr(remaining): - if remaining and remaining[0] == '(': - result, remaining = marker(remaining[1:].lstrip()) - if remaining[0] != ')': - raise SyntaxError('unterminated parenthesis: %s' % remaining) - remaining = remaining[1:].lstrip() - else: - lhs, remaining = marker_var(remaining) - while remaining: - m = MARKER_OP.match(remaining) - if not m: - break - op = m.groups()[0] - remaining = remaining[m.end():] - rhs, remaining = marker_var(remaining) - lhs = {'op': op, 'lhs': lhs, 'rhs': rhs} - result = lhs - return result, remaining - - def marker_and(remaining): - lhs, remaining = marker_expr(remaining) - while remaining: - m = AND.match(remaining) - if not m: - break - remaining = remaining[m.end():] - rhs, remaining = marker_expr(remaining) - lhs = {'op': 'and', 'lhs': lhs, 'rhs': rhs} - return lhs, remaining - - def marker(remaining): - lhs, remaining = marker_and(remaining) - while remaining: - m = OR.match(remaining) - if not m: - break - remaining = remaining[m.end():] - rhs, remaining = marker_and(remaining) - lhs = {'op': 'or', 'lhs': lhs, 'rhs': rhs} - return lhs, remaining - - return marker(marker_string) - - -def parse_requirement(req): - """ - Parse a requirement passed in as a string. Return a Container - whose attributes contain the various parts of the requirement. - """ - remaining = req.strip() - if not remaining or remaining.startswith('#'): - return None - m = IDENTIFIER.match(remaining) - if not m: - raise SyntaxError('name expected: %s' % remaining) - distname = m.groups()[0] - remaining = remaining[m.end():] - extras = mark_expr = versions = uri = None - if remaining and remaining[0] == '[': - i = remaining.find(']', 1) - if i < 0: - raise SyntaxError('unterminated extra: %s' % remaining) - s = remaining[1:i] - remaining = remaining[i + 1:].lstrip() - extras = [] - while s: - m = IDENTIFIER.match(s) - if not m: - raise SyntaxError('malformed extra: %s' % s) - extras.append(m.groups()[0]) - s = s[m.end():] - if not s: - break - if s[0] != ',': - raise SyntaxError('comma expected in extras: %s' % s) - s = s[1:].lstrip() - if not extras: - extras = None - if remaining: - if remaining[0] == '@': - # it's a URI - remaining = remaining[1:].lstrip() - m = NON_SPACE.match(remaining) - if not m: - raise SyntaxError('invalid URI: %s' % remaining) - uri = m.groups()[0] - t = urlparse(uri) - # there are issues with Python and URL parsing, so this test - # is a bit crude. See bpo-20271, bpo-23505. Python doesn't - # always parse invalid URLs correctly - it should raise - # exceptions for malformed URLs - if not (t.scheme and t.netloc): - raise SyntaxError('Invalid URL: %s' % uri) - remaining = remaining[m.end():].lstrip() - else: - - def get_versions(ver_remaining): - """ - Return a list of operator, version tuples if any are - specified, else None. - """ - m = COMPARE_OP.match(ver_remaining) - versions = None - if m: - versions = [] - while True: - op = m.groups()[0] - ver_remaining = ver_remaining[m.end():] - m = VERSION_IDENTIFIER.match(ver_remaining) - if not m: - raise SyntaxError('invalid version: %s' % ver_remaining) - v = m.groups()[0] - versions.append((op, v)) - ver_remaining = ver_remaining[m.end():] - if not ver_remaining or ver_remaining[0] != ',': - break - ver_remaining = ver_remaining[1:].lstrip() - # Some packages have a trailing comma which would break things - # See issue #148 - if not ver_remaining: - break - m = COMPARE_OP.match(ver_remaining) - if not m: - raise SyntaxError('invalid constraint: %s' % ver_remaining) - if not versions: - versions = None - return versions, ver_remaining - - if remaining[0] != '(': - versions, remaining = get_versions(remaining) - else: - i = remaining.find(')', 1) - if i < 0: - raise SyntaxError('unterminated parenthesis: %s' % remaining) - s = remaining[1:i] - remaining = remaining[i + 1:].lstrip() - # As a special diversion from PEP 508, allow a version number - # a.b.c in parentheses as a synonym for ~= a.b.c (because this - # is allowed in earlier PEPs) - if COMPARE_OP.match(s): - versions, _ = get_versions(s) - else: - m = VERSION_IDENTIFIER.match(s) - if not m: - raise SyntaxError('invalid constraint: %s' % s) - v = m.groups()[0] - s = s[m.end():].lstrip() - if s: - raise SyntaxError('invalid constraint: %s' % s) - versions = [('~=', v)] - - if remaining: - if remaining[0] != ';': - raise SyntaxError('invalid requirement: %s' % remaining) - remaining = remaining[1:].lstrip() - - mark_expr, remaining = parse_marker(remaining) - - if remaining and remaining[0] != '#': - raise SyntaxError('unexpected trailing data: %s' % remaining) - - if not versions: - rs = distname - else: - rs = '%s %s' % (distname, ', '.join(['%s %s' % con for con in versions])) - return Container(name=distname, extras=extras, constraints=versions, - marker=mark_expr, url=uri, requirement=rs) - - -def get_resources_dests(resources_root, rules): - """Find destinations for resources files""" - - def get_rel_path(root, path): - # normalizes and returns a lstripped-/-separated path - root = root.replace(os.path.sep, '/') - path = path.replace(os.path.sep, '/') - assert path.startswith(root) - return path[len(root):].lstrip('/') - - destinations = {} - for base, suffix, dest in rules: - prefix = os.path.join(resources_root, base) - for abs_base in iglob(prefix): - abs_glob = os.path.join(abs_base, suffix) - for abs_path in iglob(abs_glob): - resource_file = get_rel_path(resources_root, abs_path) - if dest is None: # remove the entry if it was here - destinations.pop(resource_file, None) - else: - rel_path = get_rel_path(abs_base, abs_path) - rel_dest = dest.replace(os.path.sep, '/').rstrip('/') - destinations[resource_file] = rel_dest + '/' + rel_path - return destinations - - -def in_venv(): - if hasattr(sys, 'real_prefix'): - # virtualenv venvs - result = True - else: - # PEP 405 venvs - result = sys.prefix != getattr(sys, 'base_prefix', sys.prefix) - return result - - -def get_executable(): -# The __PYVENV_LAUNCHER__ dance is apparently no longer needed, as -# changes to the stub launcher mean that sys.executable always points -# to the stub on OS X -# if sys.platform == 'darwin' and ('__PYVENV_LAUNCHER__' -# in os.environ): -# result = os.environ['__PYVENV_LAUNCHER__'] -# else: -# result = sys.executable -# return result - # Avoid normcasing: see issue #143 - # result = os.path.normcase(sys.executable) - result = sys.executable - if not isinstance(result, text_type): - result = fsdecode(result) - return result - - -def proceed(prompt, allowed_chars, error_prompt=None, default=None): - p = prompt - while True: - s = raw_input(p) - p = prompt - if not s and default: - s = default - if s: - c = s[0].lower() - if c in allowed_chars: - break - if error_prompt: - p = '%c: %s\n%s' % (c, error_prompt, prompt) - return c - - -def extract_by_key(d, keys): - if isinstance(keys, string_types): - keys = keys.split() - result = {} - for key in keys: - if key in d: - result[key] = d[key] - return result - -def read_exports(stream): - if sys.version_info[0] >= 3: - # needs to be a text stream - stream = codecs.getreader('utf-8')(stream) - # Try to load as JSON, falling back on legacy format - data = stream.read() - stream = StringIO(data) - try: - jdata = json.load(stream) - result = jdata['extensions']['python.exports']['exports'] - for group, entries in result.items(): - for k, v in entries.items(): - s = '%s = %s' % (k, v) - entry = get_export_entry(s) - assert entry is not None - entries[k] = entry - return result - except Exception: - stream.seek(0, 0) - - def read_stream(cp, stream): - if hasattr(cp, 'read_file'): - cp.read_file(stream) - else: - cp.readfp(stream) - - cp = configparser.ConfigParser() - try: - read_stream(cp, stream) - except configparser.MissingSectionHeaderError: - stream.close() - data = textwrap.dedent(data) - stream = StringIO(data) - read_stream(cp, stream) - - result = {} - for key in cp.sections(): - result[key] = entries = {} - for name, value in cp.items(key): - s = '%s = %s' % (name, value) - entry = get_export_entry(s) - assert entry is not None - #entry.dist = self - entries[name] = entry - return result - - -def write_exports(exports, stream): - if sys.version_info[0] >= 3: - # needs to be a text stream - stream = codecs.getwriter('utf-8')(stream) - cp = configparser.ConfigParser() - for k, v in exports.items(): - # TODO check k, v for valid values - cp.add_section(k) - for entry in v.values(): - if entry.suffix is None: - s = entry.prefix - else: - s = '%s:%s' % (entry.prefix, entry.suffix) - if entry.flags: - s = '%s [%s]' % (s, ', '.join(entry.flags)) - cp.set(k, entry.name, s) - cp.write(stream) - - -@contextlib.contextmanager -def tempdir(): - td = tempfile.mkdtemp() - try: - yield td - finally: - shutil.rmtree(td) - -@contextlib.contextmanager -def chdir(d): - cwd = os.getcwd() - try: - os.chdir(d) - yield - finally: - os.chdir(cwd) - - -@contextlib.contextmanager -def socket_timeout(seconds=15): - cto = socket.getdefaulttimeout() - try: - socket.setdefaulttimeout(seconds) - yield - finally: - socket.setdefaulttimeout(cto) - - -class cached_property(object): - def __init__(self, func): - self.func = func - #for attr in ('__name__', '__module__', '__doc__'): - # setattr(self, attr, getattr(func, attr, None)) - - def __get__(self, obj, cls=None): - if obj is None: - return self - value = self.func(obj) - object.__setattr__(obj, self.func.__name__, value) - #obj.__dict__[self.func.__name__] = value = self.func(obj) - return value - -def convert_path(pathname): - """Return 'pathname' as a name that will work on the native filesystem. - - The path is split on '/' and put back together again using the current - directory separator. Needed because filenames in the setup script are - always supplied in Unix style, and have to be converted to the local - convention before we can actually use them in the filesystem. Raises - ValueError on non-Unix-ish systems if 'pathname' either starts or - ends with a slash. - """ - if os.sep == '/': - return pathname - if not pathname: - return pathname - if pathname[0] == '/': - raise ValueError("path '%s' cannot be absolute" % pathname) - if pathname[-1] == '/': - raise ValueError("path '%s' cannot end with '/'" % pathname) - - paths = pathname.split('/') - while os.curdir in paths: - paths.remove(os.curdir) - if not paths: - return os.curdir - return os.path.join(*paths) - - -class FileOperator(object): - def __init__(self, dry_run=False): - self.dry_run = dry_run - self.ensured = set() - self._init_record() - - def _init_record(self): - self.record = False - self.files_written = set() - self.dirs_created = set() - - def record_as_written(self, path): - if self.record: - self.files_written.add(path) - - def newer(self, source, target): - """Tell if the target is newer than the source. - - Returns true if 'source' exists and is more recently modified than - 'target', or if 'source' exists and 'target' doesn't. - - Returns false if both exist and 'target' is the same age or younger - than 'source'. Raise PackagingFileError if 'source' does not exist. - - Note that this test is not very accurate: files created in the same - second will have the same "age". - """ - if not os.path.exists(source): - raise DistlibException("file '%r' does not exist" % - os.path.abspath(source)) - if not os.path.exists(target): - return True - - return os.stat(source).st_mtime > os.stat(target).st_mtime - - def copy_file(self, infile, outfile, check=True): - """Copy a file respecting dry-run and force flags. - """ - self.ensure_dir(os.path.dirname(outfile)) - logger.info('Copying %s to %s', infile, outfile) - if not self.dry_run: - msg = None - if check: - if os.path.islink(outfile): - msg = '%s is a symlink' % outfile - elif os.path.exists(outfile) and not os.path.isfile(outfile): - msg = '%s is a non-regular file' % outfile - if msg: - raise ValueError(msg + ' which would be overwritten') - shutil.copyfile(infile, outfile) - self.record_as_written(outfile) - - def copy_stream(self, instream, outfile, encoding=None): - assert not os.path.isdir(outfile) - self.ensure_dir(os.path.dirname(outfile)) - logger.info('Copying stream %s to %s', instream, outfile) - if not self.dry_run: - if encoding is None: - outstream = open(outfile, 'wb') - else: - outstream = codecs.open(outfile, 'w', encoding=encoding) - try: - shutil.copyfileobj(instream, outstream) - finally: - outstream.close() - self.record_as_written(outfile) - - def write_binary_file(self, path, data): - self.ensure_dir(os.path.dirname(path)) - if not self.dry_run: - if os.path.exists(path): - os.remove(path) - with open(path, 'wb') as f: - f.write(data) - self.record_as_written(path) - - def write_text_file(self, path, data, encoding): - self.write_binary_file(path, data.encode(encoding)) - - def set_mode(self, bits, mask, files): - if os.name == 'posix' or (os.name == 'java' and os._name == 'posix'): - # Set the executable bits (owner, group, and world) on - # all the files specified. - for f in files: - if self.dry_run: - logger.info("changing mode of %s", f) - else: - mode = (os.stat(f).st_mode | bits) & mask - logger.info("changing mode of %s to %o", f, mode) - os.chmod(f, mode) - - set_executable_mode = lambda s, f: s.set_mode(0o555, 0o7777, f) - - def ensure_dir(self, path): - path = os.path.abspath(path) - if path not in self.ensured and not os.path.exists(path): - self.ensured.add(path) - d, f = os.path.split(path) - self.ensure_dir(d) - logger.info('Creating %s' % path) - if not self.dry_run: - os.mkdir(path) - if self.record: - self.dirs_created.add(path) - - def byte_compile(self, path, optimize=False, force=False, prefix=None, hashed_invalidation=False): - dpath = cache_from_source(path, not optimize) - logger.info('Byte-compiling %s to %s', path, dpath) - if not self.dry_run: - if force or self.newer(path, dpath): - if not prefix: - diagpath = None - else: - assert path.startswith(prefix) - diagpath = path[len(prefix):] - compile_kwargs = {} - if hashed_invalidation and hasattr(py_compile, 'PycInvalidationMode'): - compile_kwargs['invalidation_mode'] = py_compile.PycInvalidationMode.CHECKED_HASH - py_compile.compile(path, dpath, diagpath, True, **compile_kwargs) # raise error - self.record_as_written(dpath) - return dpath - - def ensure_removed(self, path): - if os.path.exists(path): - if os.path.isdir(path) and not os.path.islink(path): - logger.debug('Removing directory tree at %s', path) - if not self.dry_run: - shutil.rmtree(path) - if self.record: - if path in self.dirs_created: - self.dirs_created.remove(path) - else: - if os.path.islink(path): - s = 'link' - else: - s = 'file' - logger.debug('Removing %s %s', s, path) - if not self.dry_run: - os.remove(path) - if self.record: - if path in self.files_written: - self.files_written.remove(path) - - def is_writable(self, path): - result = False - while not result: - if os.path.exists(path): - result = os.access(path, os.W_OK) - break - parent = os.path.dirname(path) - if parent == path: - break - path = parent - return result - - def commit(self): - """ - Commit recorded changes, turn off recording, return - changes. - """ - assert self.record - result = self.files_written, self.dirs_created - self._init_record() - return result - - def rollback(self): - if not self.dry_run: - for f in list(self.files_written): - if os.path.exists(f): - os.remove(f) - # dirs should all be empty now, except perhaps for - # __pycache__ subdirs - # reverse so that subdirs appear before their parents - dirs = sorted(self.dirs_created, reverse=True) - for d in dirs: - flist = os.listdir(d) - if flist: - assert flist == ['__pycache__'] - sd = os.path.join(d, flist[0]) - os.rmdir(sd) - os.rmdir(d) # should fail if non-empty - self._init_record() - -def resolve(module_name, dotted_path): - if module_name in sys.modules: - mod = sys.modules[module_name] - else: - mod = __import__(module_name) - if dotted_path is None: - result = mod - else: - parts = dotted_path.split('.') - result = getattr(mod, parts.pop(0)) - for p in parts: - result = getattr(result, p) - return result - - -class ExportEntry(object): - def __init__(self, name, prefix, suffix, flags): - self.name = name - self.prefix = prefix - self.suffix = suffix - self.flags = flags - - @cached_property - def value(self): - return resolve(self.prefix, self.suffix) - - def __repr__(self): # pragma: no cover - return '' % (self.name, self.prefix, - self.suffix, self.flags) - - def __eq__(self, other): - if not isinstance(other, ExportEntry): - result = False - else: - result = (self.name == other.name and - self.prefix == other.prefix and - self.suffix == other.suffix and - self.flags == other.flags) - return result - - __hash__ = object.__hash__ - - -ENTRY_RE = re.compile(r'''(?P(\w|[-.+])+) - \s*=\s*(?P(\w+)([:\.]\w+)*) - \s*(\[\s*(?P[\w-]+(=\w+)?(,\s*\w+(=\w+)?)*)\s*\])? - ''', re.VERBOSE) - -def get_export_entry(specification): - m = ENTRY_RE.search(specification) - if not m: - result = None - if '[' in specification or ']' in specification: - raise DistlibException("Invalid specification " - "'%s'" % specification) - else: - d = m.groupdict() - name = d['name'] - path = d['callable'] - colons = path.count(':') - if colons == 0: - prefix, suffix = path, None - else: - if colons != 1: - raise DistlibException("Invalid specification " - "'%s'" % specification) - prefix, suffix = path.split(':') - flags = d['flags'] - if flags is None: - if '[' in specification or ']' in specification: - raise DistlibException("Invalid specification " - "'%s'" % specification) - flags = [] - else: - flags = [f.strip() for f in flags.split(',')] - result = ExportEntry(name, prefix, suffix, flags) - return result - - -def get_cache_base(suffix=None): - """ - Return the default base location for distlib caches. If the directory does - not exist, it is created. Use the suffix provided for the base directory, - and default to '.distlib' if it isn't provided. - - On Windows, if LOCALAPPDATA is defined in the environment, then it is - assumed to be a directory, and will be the parent directory of the result. - On POSIX, and on Windows if LOCALAPPDATA is not defined, the user's home - directory - using os.expanduser('~') - will be the parent directory of - the result. - - The result is just the directory '.distlib' in the parent directory as - determined above, or with the name specified with ``suffix``. - """ - if suffix is None: - suffix = '.distlib' - if os.name == 'nt' and 'LOCALAPPDATA' in os.environ: - result = os.path.expandvars('$localappdata') - else: - # Assume posix, or old Windows - result = os.path.expanduser('~') - # we use 'isdir' instead of 'exists', because we want to - # fail if there's a file with that name - if os.path.isdir(result): - usable = os.access(result, os.W_OK) - if not usable: - logger.warning('Directory exists but is not writable: %s', result) - else: - try: - os.makedirs(result) - usable = True - except OSError: - logger.warning('Unable to create %s', result, exc_info=True) - usable = False - if not usable: - result = tempfile.mkdtemp() - logger.warning('Default location unusable, using %s', result) - return os.path.join(result, suffix) - - -def path_to_cache_dir(path): - """ - Convert an absolute path to a directory name for use in a cache. - - The algorithm used is: - - #. On Windows, any ``':'`` in the drive is replaced with ``'---'``. - #. Any occurrence of ``os.sep`` is replaced with ``'--'``. - #. ``'.cache'`` is appended. - """ - d, p = os.path.splitdrive(os.path.abspath(path)) - if d: - d = d.replace(':', '---') - p = p.replace(os.sep, '--') - return d + p + '.cache' - - -def ensure_slash(s): - if not s.endswith('/'): - return s + '/' - return s - - -def parse_credentials(netloc): - username = password = None - if '@' in netloc: - prefix, netloc = netloc.rsplit('@', 1) - if ':' not in prefix: - username = prefix - else: - username, password = prefix.split(':', 1) - if username: - username = unquote(username) - if password: - password = unquote(password) - return username, password, netloc - - -def get_process_umask(): - result = os.umask(0o22) - os.umask(result) - return result - -def is_string_sequence(seq): - result = True - i = None - for i, s in enumerate(seq): - if not isinstance(s, string_types): - result = False - break - assert i is not None - return result - -PROJECT_NAME_AND_VERSION = re.compile('([a-z0-9_]+([.-][a-z_][a-z0-9_]*)*)-' - '([a-z0-9_.+-]+)', re.I) -PYTHON_VERSION = re.compile(r'-py(\d\.?\d?)') - - -def split_filename(filename, project_name=None): - """ - Extract name, version, python version from a filename (no extension) - - Return name, version, pyver or None - """ - result = None - pyver = None - filename = unquote(filename).replace(' ', '-') - m = PYTHON_VERSION.search(filename) - if m: - pyver = m.group(1) - filename = filename[:m.start()] - if project_name and len(filename) > len(project_name) + 1: - m = re.match(re.escape(project_name) + r'\b', filename) - if m: - n = m.end() - result = filename[:n], filename[n + 1:], pyver - if result is None: - m = PROJECT_NAME_AND_VERSION.match(filename) - if m: - result = m.group(1), m.group(3), pyver - return result - -# Allow spaces in name because of legacy dists like "Twisted Core" -NAME_VERSION_RE = re.compile(r'(?P[\w .-]+)\s*' - r'\(\s*(?P[^\s)]+)\)$') - -def parse_name_and_version(p): - """ - A utility method used to get name and version from a string. - - From e.g. a Provides-Dist value. - - :param p: A value in a form 'foo (1.0)' - :return: The name and version as a tuple. - """ - m = NAME_VERSION_RE.match(p) - if not m: - raise DistlibException('Ill-formed name/version string: \'%s\'' % p) - d = m.groupdict() - return d['name'].strip().lower(), d['ver'] - -def get_extras(requested, available): - result = set() - requested = set(requested or []) - available = set(available or []) - if '*' in requested: - requested.remove('*') - result |= available - for r in requested: - if r == '-': - result.add(r) - elif r.startswith('-'): - unwanted = r[1:] - if unwanted not in available: - logger.warning('undeclared extra: %s' % unwanted) - if unwanted in result: - result.remove(unwanted) - else: - if r not in available: - logger.warning('undeclared extra: %s' % r) - result.add(r) - return result -# -# Extended metadata functionality -# - -def _get_external_data(url): - result = {} - try: - # urlopen might fail if it runs into redirections, - # because of Python issue #13696. Fixed in locators - # using a custom redirect handler. - resp = urlopen(url) - headers = resp.info() - ct = headers.get('Content-Type') - if not ct.startswith('application/json'): - logger.debug('Unexpected response for JSON request: %s', ct) - else: - reader = codecs.getreader('utf-8')(resp) - #data = reader.read().decode('utf-8') - #result = json.loads(data) - result = json.load(reader) - except Exception as e: - logger.exception('Failed to get external data for %s: %s', url, e) - return result - -_external_data_base_url = 'https://www.red-dove.com/pypi/projects/' - -def get_project_data(name): - url = '%s/%s/project.json' % (name[0].upper(), name) - url = urljoin(_external_data_base_url, url) - result = _get_external_data(url) - return result - -def get_package_data(name, version): - url = '%s/%s/package-%s.json' % (name[0].upper(), name, version) - url = urljoin(_external_data_base_url, url) - return _get_external_data(url) - - -class Cache(object): - """ - A class implementing a cache for resources that need to live in the file system - e.g. shared libraries. This class was moved from resources to here because it - could be used by other modules, e.g. the wheel module. - """ - - def __init__(self, base): - """ - Initialise an instance. - - :param base: The base directory where the cache should be located. - """ - # we use 'isdir' instead of 'exists', because we want to - # fail if there's a file with that name - if not os.path.isdir(base): # pragma: no cover - os.makedirs(base) - if (os.stat(base).st_mode & 0o77) != 0: - logger.warning('Directory \'%s\' is not private', base) - self.base = os.path.abspath(os.path.normpath(base)) - - def prefix_to_dir(self, prefix): - """ - Converts a resource prefix to a directory name in the cache. - """ - return path_to_cache_dir(prefix) - - def clear(self): - """ - Clear the cache. - """ - not_removed = [] - for fn in os.listdir(self.base): - fn = os.path.join(self.base, fn) - try: - if os.path.islink(fn) or os.path.isfile(fn): - os.remove(fn) - elif os.path.isdir(fn): - shutil.rmtree(fn) - except Exception: - not_removed.append(fn) - return not_removed - - -class EventMixin(object): - """ - A very simple publish/subscribe system. - """ - def __init__(self): - self._subscribers = {} - - def add(self, event, subscriber, append=True): - """ - Add a subscriber for an event. - - :param event: The name of an event. - :param subscriber: The subscriber to be added (and called when the - event is published). - :param append: Whether to append or prepend the subscriber to an - existing subscriber list for the event. - """ - subs = self._subscribers - if event not in subs: - subs[event] = deque([subscriber]) - else: - sq = subs[event] - if append: - sq.append(subscriber) - else: - sq.appendleft(subscriber) - - def remove(self, event, subscriber): - """ - Remove a subscriber for an event. - - :param event: The name of an event. - :param subscriber: The subscriber to be removed. - """ - subs = self._subscribers - if event not in subs: - raise ValueError('No subscribers: %r' % event) - subs[event].remove(subscriber) - - def get_subscribers(self, event): - """ - Return an iterator for the subscribers for an event. - :param event: The event to return subscribers for. - """ - return iter(self._subscribers.get(event, ())) - - def publish(self, event, *args, **kwargs): - """ - Publish a event and return a list of values returned by its - subscribers. - - :param event: The event to publish. - :param args: The positional arguments to pass to the event's - subscribers. - :param kwargs: The keyword arguments to pass to the event's - subscribers. - """ - result = [] - for subscriber in self.get_subscribers(event): - try: - value = subscriber(event, *args, **kwargs) - except Exception: - logger.exception('Exception during event publication') - value = None - result.append(value) - logger.debug('publish %s: args = %s, kwargs = %s, result = %s', - event, args, kwargs, result) - return result - -# -# Simple sequencing -# -class Sequencer(object): - def __init__(self): - self._preds = {} - self._succs = {} - self._nodes = set() # nodes with no preds/succs - - def add_node(self, node): - self._nodes.add(node) - - def remove_node(self, node, edges=False): - if node in self._nodes: - self._nodes.remove(node) - if edges: - for p in set(self._preds.get(node, ())): - self.remove(p, node) - for s in set(self._succs.get(node, ())): - self.remove(node, s) - # Remove empties - for k, v in list(self._preds.items()): - if not v: - del self._preds[k] - for k, v in list(self._succs.items()): - if not v: - del self._succs[k] - - def add(self, pred, succ): - assert pred != succ - self._preds.setdefault(succ, set()).add(pred) - self._succs.setdefault(pred, set()).add(succ) - - def remove(self, pred, succ): - assert pred != succ - try: - preds = self._preds[succ] - succs = self._succs[pred] - except KeyError: # pragma: no cover - raise ValueError('%r not a successor of anything' % succ) - try: - preds.remove(pred) - succs.remove(succ) - except KeyError: # pragma: no cover - raise ValueError('%r not a successor of %r' % (succ, pred)) - - def is_step(self, step): - return (step in self._preds or step in self._succs or - step in self._nodes) - - def get_steps(self, final): - if not self.is_step(final): - raise ValueError('Unknown: %r' % final) - result = [] - todo = [] - seen = set() - todo.append(final) - while todo: - step = todo.pop(0) - if step in seen: - # if a step was already seen, - # move it to the end (so it will appear earlier - # when reversed on return) ... but not for the - # final step, as that would be confusing for - # users - if step != final: - result.remove(step) - result.append(step) - else: - seen.add(step) - result.append(step) - preds = self._preds.get(step, ()) - todo.extend(preds) - return reversed(result) - - @property - def strong_connections(self): - #http://en.wikipedia.org/wiki/Tarjan%27s_strongly_connected_components_algorithm - index_counter = [0] - stack = [] - lowlinks = {} - index = {} - result = [] - - graph = self._succs - - def strongconnect(node): - # set the depth index for this node to the smallest unused index - index[node] = index_counter[0] - lowlinks[node] = index_counter[0] - index_counter[0] += 1 - stack.append(node) - - # Consider successors - try: - successors = graph[node] - except Exception: - successors = [] - for successor in successors: - if successor not in lowlinks: - # Successor has not yet been visited - strongconnect(successor) - lowlinks[node] = min(lowlinks[node],lowlinks[successor]) - elif successor in stack: - # the successor is in the stack and hence in the current - # strongly connected component (SCC) - lowlinks[node] = min(lowlinks[node],index[successor]) - - # If `node` is a root node, pop the stack and generate an SCC - if lowlinks[node] == index[node]: - connected_component = [] - - while True: - successor = stack.pop() - connected_component.append(successor) - if successor == node: break - component = tuple(connected_component) - # storing the result - result.append(component) - - for node in graph: - if node not in lowlinks: - strongconnect(node) - - return result - - @property - def dot(self): - result = ['digraph G {'] - for succ in self._preds: - preds = self._preds[succ] - for pred in preds: - result.append(' %s -> %s;' % (pred, succ)) - for node in self._nodes: - result.append(' %s;' % node) - result.append('}') - return '\n'.join(result) - -# -# Unarchiving functionality for zip, tar, tgz, tbz, whl -# - -ARCHIVE_EXTENSIONS = ('.tar.gz', '.tar.bz2', '.tar', '.zip', - '.tgz', '.tbz', '.whl') - -def unarchive(archive_filename, dest_dir, format=None, check=True): - - def check_path(path): - if not isinstance(path, text_type): - path = path.decode('utf-8') - p = os.path.abspath(os.path.join(dest_dir, path)) - if not p.startswith(dest_dir) or p[plen] != os.sep: - raise ValueError('path outside destination: %r' % p) - - dest_dir = os.path.abspath(dest_dir) - plen = len(dest_dir) - archive = None - if format is None: - if archive_filename.endswith(('.zip', '.whl')): - format = 'zip' - elif archive_filename.endswith(('.tar.gz', '.tgz')): - format = 'tgz' - mode = 'r:gz' - elif archive_filename.endswith(('.tar.bz2', '.tbz')): - format = 'tbz' - mode = 'r:bz2' - elif archive_filename.endswith('.tar'): - format = 'tar' - mode = 'r' - else: # pragma: no cover - raise ValueError('Unknown format for %r' % archive_filename) - try: - if format == 'zip': - archive = ZipFile(archive_filename, 'r') - if check: - names = archive.namelist() - for name in names: - check_path(name) - else: - archive = tarfile.open(archive_filename, mode) - if check: - names = archive.getnames() - for name in names: - check_path(name) - if format != 'zip' and sys.version_info[0] < 3: - # See Python issue 17153. If the dest path contains Unicode, - # tarfile extraction fails on Python 2.x if a member path name - # contains non-ASCII characters - it leads to an implicit - # bytes -> unicode conversion using ASCII to decode. - for tarinfo in archive.getmembers(): - if not isinstance(tarinfo.name, text_type): - tarinfo.name = tarinfo.name.decode('utf-8') - archive.extractall(dest_dir) - - finally: - if archive: - archive.close() - - -def zip_dir(directory): - """zip a directory tree into a BytesIO object""" - result = io.BytesIO() - dlen = len(directory) - with ZipFile(result, "w") as zf: - for root, dirs, files in os.walk(directory): - for name in files: - full = os.path.join(root, name) - rel = root[dlen:] - dest = os.path.join(rel, name) - zf.write(full, dest) - return result - -# -# Simple progress bar -# - -UNITS = ('', 'K', 'M', 'G','T','P') - - -class Progress(object): - unknown = 'UNKNOWN' - - def __init__(self, minval=0, maxval=100): - assert maxval is None or maxval >= minval - self.min = self.cur = minval - self.max = maxval - self.started = None - self.elapsed = 0 - self.done = False - - def update(self, curval): - assert self.min <= curval - assert self.max is None or curval <= self.max - self.cur = curval - now = time.time() - if self.started is None: - self.started = now - else: - self.elapsed = now - self.started - - def increment(self, incr): - assert incr >= 0 - self.update(self.cur + incr) - - def start(self): - self.update(self.min) - return self - - def stop(self): - if self.max is not None: - self.update(self.max) - self.done = True - - @property - def maximum(self): - return self.unknown if self.max is None else self.max - - @property - def percentage(self): - if self.done: - result = '100 %' - elif self.max is None: - result = ' ?? %' - else: - v = 100.0 * (self.cur - self.min) / (self.max - self.min) - result = '%3d %%' % v - return result - - def format_duration(self, duration): - if (duration <= 0) and self.max is None or self.cur == self.min: - result = '??:??:??' - #elif duration < 1: - # result = '--:--:--' - else: - result = time.strftime('%H:%M:%S', time.gmtime(duration)) - return result - - @property - def ETA(self): - if self.done: - prefix = 'Done' - t = self.elapsed - #import pdb; pdb.set_trace() - else: - prefix = 'ETA ' - if self.max is None: - t = -1 - elif self.elapsed == 0 or (self.cur == self.min): - t = 0 - else: - #import pdb; pdb.set_trace() - t = float(self.max - self.min) - t /= self.cur - self.min - t = (t - 1) * self.elapsed - return '%s: %s' % (prefix, self.format_duration(t)) - - @property - def speed(self): - if self.elapsed == 0: - result = 0.0 - else: - result = (self.cur - self.min) / self.elapsed - for unit in UNITS: - if result < 1000: - break - result /= 1000.0 - return '%d %sB/s' % (result, unit) - -# -# Glob functionality -# - -RICH_GLOB = re.compile(r'\{([^}]*)\}') -_CHECK_RECURSIVE_GLOB = re.compile(r'[^/\\,{]\*\*|\*\*[^/\\,}]') -_CHECK_MISMATCH_SET = re.compile(r'^[^{]*\}|\{[^}]*$') - - -def iglob(path_glob): - """Extended globbing function that supports ** and {opt1,opt2,opt3}.""" - if _CHECK_RECURSIVE_GLOB.search(path_glob): - msg = """invalid glob %r: recursive glob "**" must be used alone""" - raise ValueError(msg % path_glob) - if _CHECK_MISMATCH_SET.search(path_glob): - msg = """invalid glob %r: mismatching set marker '{' or '}'""" - raise ValueError(msg % path_glob) - return _iglob(path_glob) - - -def _iglob(path_glob): - rich_path_glob = RICH_GLOB.split(path_glob, 1) - if len(rich_path_glob) > 1: - assert len(rich_path_glob) == 3, rich_path_glob - prefix, set, suffix = rich_path_glob - for item in set.split(','): - for path in _iglob(''.join((prefix, item, suffix))): - yield path - else: - if '**' not in path_glob: - for item in std_iglob(path_glob): - yield item - else: - prefix, radical = path_glob.split('**', 1) - if prefix == '': - prefix = '.' - if radical == '': - radical = '*' - else: - # we support both - radical = radical.lstrip('/') - radical = radical.lstrip('\\') - for path, dir, files in os.walk(prefix): - path = os.path.normpath(path) - for fn in _iglob(os.path.join(path, radical)): - yield fn - -if ssl: - from .compat import (HTTPSHandler as BaseHTTPSHandler, match_hostname, - CertificateError) - - -# -# HTTPSConnection which verifies certificates/matches domains -# - - class HTTPSConnection(httplib.HTTPSConnection): - ca_certs = None # set this to the path to the certs file (.pem) - check_domain = True # only used if ca_certs is not None - - # noinspection PyPropertyAccess - def connect(self): - sock = socket.create_connection((self.host, self.port), self.timeout) - if getattr(self, '_tunnel_host', False): - self.sock = sock - self._tunnel() - - if not hasattr(ssl, 'SSLContext'): - # For 2.x - if self.ca_certs: - cert_reqs = ssl.CERT_REQUIRED - else: - cert_reqs = ssl.CERT_NONE - self.sock = ssl.wrap_socket(sock, self.key_file, self.cert_file, - cert_reqs=cert_reqs, - ssl_version=ssl.PROTOCOL_SSLv23, - ca_certs=self.ca_certs) - else: # pragma: no cover - context = ssl.SSLContext(ssl.PROTOCOL_SSLv23) - if hasattr(ssl, 'OP_NO_SSLv2'): - context.options |= ssl.OP_NO_SSLv2 - if self.cert_file: - context.load_cert_chain(self.cert_file, self.key_file) - kwargs = {} - if self.ca_certs: - context.verify_mode = ssl.CERT_REQUIRED - context.load_verify_locations(cafile=self.ca_certs) - if getattr(ssl, 'HAS_SNI', False): - kwargs['server_hostname'] = self.host - self.sock = context.wrap_socket(sock, **kwargs) - if self.ca_certs and self.check_domain: - try: - match_hostname(self.sock.getpeercert(), self.host) - logger.debug('Host verified: %s', self.host) - except CertificateError: # pragma: no cover - self.sock.shutdown(socket.SHUT_RDWR) - self.sock.close() - raise - - class HTTPSHandler(BaseHTTPSHandler): - def __init__(self, ca_certs, check_domain=True): - BaseHTTPSHandler.__init__(self) - self.ca_certs = ca_certs - self.check_domain = check_domain - - def _conn_maker(self, *args, **kwargs): - """ - This is called to create a connection instance. Normally you'd - pass a connection class to do_open, but it doesn't actually check for - a class, and just expects a callable. As long as we behave just as a - constructor would have, we should be OK. If it ever changes so that - we *must* pass a class, we'll create an UnsafeHTTPSConnection class - which just sets check_domain to False in the class definition, and - choose which one to pass to do_open. - """ - result = HTTPSConnection(*args, **kwargs) - if self.ca_certs: - result.ca_certs = self.ca_certs - result.check_domain = self.check_domain - return result - - def https_open(self, req): - try: - return self.do_open(self._conn_maker, req) - except URLError as e: - if 'certificate verify failed' in str(e.reason): - raise CertificateError('Unable to verify server certificate ' - 'for %s' % req.host) - else: - raise - - # - # To prevent against mixing HTTP traffic with HTTPS (examples: A Man-In-The- - # Middle proxy using HTTP listens on port 443, or an index mistakenly serves - # HTML containing a http://xyz link when it should be https://xyz), - # you can use the following handler class, which does not allow HTTP traffic. - # - # It works by inheriting from HTTPHandler - so build_opener won't add a - # handler for HTTP itself. - # - class HTTPSOnlyHandler(HTTPSHandler, HTTPHandler): - def http_open(self, req): - raise URLError('Unexpected HTTP request on what should be a secure ' - 'connection: %s' % req) - -# -# XML-RPC with timeouts -# - -_ver_info = sys.version_info[:2] - -if _ver_info == (2, 6): - class HTTP(httplib.HTTP): - def __init__(self, host='', port=None, **kwargs): - if port == 0: # 0 means use port 0, not the default port - port = None - self._setup(self._connection_class(host, port, **kwargs)) - - - if ssl: - class HTTPS(httplib.HTTPS): - def __init__(self, host='', port=None, **kwargs): - if port == 0: # 0 means use port 0, not the default port - port = None - self._setup(self._connection_class(host, port, **kwargs)) - - -class Transport(xmlrpclib.Transport): - def __init__(self, timeout, use_datetime=0): - self.timeout = timeout - xmlrpclib.Transport.__init__(self, use_datetime) - - def make_connection(self, host): - h, eh, x509 = self.get_host_info(host) - if _ver_info == (2, 6): - result = HTTP(h, timeout=self.timeout) - else: - if not self._connection or host != self._connection[0]: - self._extra_headers = eh - self._connection = host, httplib.HTTPConnection(h) - result = self._connection[1] - return result - -if ssl: - class SafeTransport(xmlrpclib.SafeTransport): - def __init__(self, timeout, use_datetime=0): - self.timeout = timeout - xmlrpclib.SafeTransport.__init__(self, use_datetime) - - def make_connection(self, host): - h, eh, kwargs = self.get_host_info(host) - if not kwargs: - kwargs = {} - kwargs['timeout'] = self.timeout - if _ver_info == (2, 6): - result = HTTPS(host, None, **kwargs) - else: - if not self._connection or host != self._connection[0]: - self._extra_headers = eh - self._connection = host, httplib.HTTPSConnection(h, None, - **kwargs) - result = self._connection[1] - return result - - -class ServerProxy(xmlrpclib.ServerProxy): - def __init__(self, uri, **kwargs): - self.timeout = timeout = kwargs.pop('timeout', None) - # The above classes only come into play if a timeout - # is specified - if timeout is not None: - # scheme = splittype(uri) # deprecated as of Python 3.8 - scheme = urlparse(uri)[0] - use_datetime = kwargs.get('use_datetime', 0) - if scheme == 'https': - tcls = SafeTransport - else: - tcls = Transport - kwargs['transport'] = t = tcls(timeout, use_datetime=use_datetime) - self.transport = t - xmlrpclib.ServerProxy.__init__(self, uri, **kwargs) - -# -# CSV functionality. This is provided because on 2.x, the csv module can't -# handle Unicode. However, we need to deal with Unicode in e.g. RECORD files. -# - -def _csv_open(fn, mode, **kwargs): - if sys.version_info[0] < 3: - mode += 'b' - else: - kwargs['newline'] = '' - # Python 3 determines encoding from locale. Force 'utf-8' - # file encoding to match other forced utf-8 encoding - kwargs['encoding'] = 'utf-8' - return open(fn, mode, **kwargs) - - -class CSVBase(object): - defaults = { - 'delimiter': str(','), # The strs are used because we need native - 'quotechar': str('"'), # str in the csv API (2.x won't take - 'lineterminator': str('\n') # Unicode) - } - - def __enter__(self): - return self - - def __exit__(self, *exc_info): - self.stream.close() - - -class CSVReader(CSVBase): - def __init__(self, **kwargs): - if 'stream' in kwargs: - stream = kwargs['stream'] - if sys.version_info[0] >= 3: - # needs to be a text stream - stream = codecs.getreader('utf-8')(stream) - self.stream = stream - else: - self.stream = _csv_open(kwargs['path'], 'r') - self.reader = csv.reader(self.stream, **self.defaults) - - def __iter__(self): - return self - - def next(self): - result = next(self.reader) - if sys.version_info[0] < 3: - for i, item in enumerate(result): - if not isinstance(item, text_type): - result[i] = item.decode('utf-8') - return result - - __next__ = next - -class CSVWriter(CSVBase): - def __init__(self, fn, **kwargs): - self.stream = _csv_open(fn, 'w') - self.writer = csv.writer(self.stream, **self.defaults) - - def writerow(self, row): - if sys.version_info[0] < 3: - r = [] - for item in row: - if isinstance(item, text_type): - item = item.encode('utf-8') - r.append(item) - row = r - self.writer.writerow(row) - -# -# Configurator functionality -# - -class Configurator(BaseConfigurator): - - value_converters = dict(BaseConfigurator.value_converters) - value_converters['inc'] = 'inc_convert' - - def __init__(self, config, base=None): - super(Configurator, self).__init__(config) - self.base = base or os.getcwd() - - def configure_custom(self, config): - def convert(o): - if isinstance(o, (list, tuple)): - result = type(o)([convert(i) for i in o]) - elif isinstance(o, dict): - if '()' in o: - result = self.configure_custom(o) - else: - result = {} - for k in o: - result[k] = convert(o[k]) - else: - result = self.convert(o) - return result - - c = config.pop('()') - if not callable(c): - c = self.resolve(c) - props = config.pop('.', None) - # Check for valid identifiers - args = config.pop('[]', ()) - if args: - args = tuple([convert(o) for o in args]) - items = [(k, convert(config[k])) for k in config if valid_ident(k)] - kwargs = dict(items) - result = c(*args, **kwargs) - if props: - for n, v in props.items(): - setattr(result, n, convert(v)) - return result - - def __getitem__(self, key): - result = self.config[key] - if isinstance(result, dict) and '()' in result: - self.config[key] = result = self.configure_custom(result) - return result - - def inc_convert(self, value): - """Default converter for the inc:// protocol.""" - if not os.path.isabs(value): - value = os.path.join(self.base, value) - with codecs.open(value, 'r', encoding='utf-8') as f: - result = json.load(f) - return result - - -class SubprocessMixin(object): - """ - Mixin for running subprocesses and capturing their output - """ - def __init__(self, verbose=False, progress=None): - self.verbose = verbose - self.progress = progress - - def reader(self, stream, context): - """ - Read lines from a subprocess' output stream and either pass to a progress - callable (if specified) or write progress information to sys.stderr. - """ - progress = self.progress - verbose = self.verbose - while True: - s = stream.readline() - if not s: - break - if progress is not None: - progress(s, context) - else: - if not verbose: - sys.stderr.write('.') - else: - sys.stderr.write(s.decode('utf-8')) - sys.stderr.flush() - stream.close() - - def run_command(self, cmd, **kwargs): - p = subprocess.Popen(cmd, stdout=subprocess.PIPE, - stderr=subprocess.PIPE, **kwargs) - t1 = threading.Thread(target=self.reader, args=(p.stdout, 'stdout')) - t1.start() - t2 = threading.Thread(target=self.reader, args=(p.stderr, 'stderr')) - t2.start() - p.wait() - t1.join() - t2.join() - if self.progress is not None: - self.progress('done.', 'main') - elif self.verbose: - sys.stderr.write('done.\n') - return p - - -def normalize_name(name): - """Normalize a python package name a la PEP 503""" - # https://www.python.org/dev/peps/pep-0503/#normalized-names - return re.sub('[-_.]+', '-', name).lower() - -# def _get_pypirc_command(): - # """ - # Get the distutils command for interacting with PyPI configurations. - # :return: the command. - # """ - # from distutils.core import Distribution - # from distutils.config import PyPIRCCommand - # d = Distribution() - # return PyPIRCCommand(d) - -class PyPIRCFile(object): - - DEFAULT_REPOSITORY = 'https://upload.pypi.org/legacy/' - DEFAULT_REALM = 'pypi' - - def __init__(self, fn=None, url=None): - if fn is None: - fn = os.path.join(os.path.expanduser('~'), '.pypirc') - self.filename = fn - self.url = url - - def read(self): - result = {} - - if os.path.exists(self.filename): - repository = self.url or self.DEFAULT_REPOSITORY - - config = configparser.RawConfigParser() - config.read(self.filename) - sections = config.sections() - if 'distutils' in sections: - # let's get the list of servers - index_servers = config.get('distutils', 'index-servers') - _servers = [server.strip() for server in - index_servers.split('\n') - if server.strip() != ''] - if _servers == []: - # nothing set, let's try to get the default pypi - if 'pypi' in sections: - _servers = ['pypi'] - else: - for server in _servers: - result = {'server': server} - result['username'] = config.get(server, 'username') - - # optional params - for key, default in (('repository', self.DEFAULT_REPOSITORY), - ('realm', self.DEFAULT_REALM), - ('password', None)): - if config.has_option(server, key): - result[key] = config.get(server, key) - else: - result[key] = default - - # work around people having "repository" for the "pypi" - # section of their config set to the HTTP (rather than - # HTTPS) URL - if (server == 'pypi' and - repository in (self.DEFAULT_REPOSITORY, 'pypi')): - result['repository'] = self.DEFAULT_REPOSITORY - elif (result['server'] != repository and - result['repository'] != repository): - result = {} - elif 'server-login' in sections: - # old format - server = 'server-login' - if config.has_option(server, 'repository'): - repository = config.get(server, 'repository') - else: - repository = self.DEFAULT_REPOSITORY - result = { - 'username': config.get(server, 'username'), - 'password': config.get(server, 'password'), - 'repository': repository, - 'server': server, - 'realm': self.DEFAULT_REALM - } - return result - - def update(self, username, password): - # import pdb; pdb.set_trace() - config = configparser.RawConfigParser() - fn = self.filename - config.read(fn) - if not config.has_section('pypi'): - config.add_section('pypi') - config.set('pypi', 'username', username) - config.set('pypi', 'password', password) - with open(fn, 'w') as f: - config.write(f) - -def _load_pypirc(index): - """ - Read the PyPI access configuration as supported by distutils. - """ - return PyPIRCFile(url=index.url).read() - -def _store_pypirc(index): - PyPIRCFile().update(index.username, index.password) - -# -# get_platform()/get_host_platform() copied from Python 3.10.a0 source, with some minor -# tweaks -# - -def get_host_platform(): - """Return a string that identifies the current platform. This is used mainly to - distinguish platform-specific build directories and platform-specific built - distributions. Typically includes the OS name and version and the - architecture (as supplied by 'os.uname()'), although the exact information - included depends on the OS; eg. on Linux, the kernel version isn't - particularly important. - - Examples of returned values: - linux-i586 - linux-alpha (?) - solaris-2.6-sun4u - - Windows will return one of: - win-amd64 (64bit Windows on AMD64 (aka x86_64, Intel64, EM64T, etc) - win32 (all others - specifically, sys.platform is returned) - - For other non-POSIX platforms, currently just returns 'sys.platform'. - - """ - if os.name == 'nt': - if 'amd64' in sys.version.lower(): - return 'win-amd64' - if '(arm)' in sys.version.lower(): - return 'win-arm32' - if '(arm64)' in sys.version.lower(): - return 'win-arm64' - return sys.platform - - # Set for cross builds explicitly - if "_PYTHON_HOST_PLATFORM" in os.environ: - return os.environ["_PYTHON_HOST_PLATFORM"] - - if os.name != 'posix' or not hasattr(os, 'uname'): - # XXX what about the architecture? NT is Intel or Alpha, - # Mac OS is M68k or PPC, etc. - return sys.platform - - # Try to distinguish various flavours of Unix - - (osname, host, release, version, machine) = os.uname() - - # Convert the OS name to lowercase, remove '/' characters, and translate - # spaces (for "Power Macintosh") - osname = osname.lower().replace('/', '') - machine = machine.replace(' ', '_').replace('/', '-') - - if osname[:5] == 'linux': - # At least on Linux/Intel, 'machine' is the processor -- - # i386, etc. - # XXX what about Alpha, SPARC, etc? - return "%s-%s" % (osname, machine) - - elif osname[:5] == 'sunos': - if release[0] >= '5': # SunOS 5 == Solaris 2 - osname = 'solaris' - release = '%d.%s' % (int(release[0]) - 3, release[2:]) - # We can't use 'platform.architecture()[0]' because a - # bootstrap problem. We use a dict to get an error - # if some suspicious happens. - bitness = {2147483647:'32bit', 9223372036854775807:'64bit'} - machine += '.%s' % bitness[sys.maxsize] - # fall through to standard osname-release-machine representation - elif osname[:3] == 'aix': - from _aix_support import aix_platform - return aix_platform() - elif osname[:6] == 'cygwin': - osname = 'cygwin' - rel_re = re.compile (r'[\d.]+', re.ASCII) - m = rel_re.match(release) - if m: - release = m.group() - elif osname[:6] == 'darwin': - import _osx_support, distutils.sysconfig - osname, release, machine = _osx_support.get_platform_osx( - distutils.sysconfig.get_config_vars(), - osname, release, machine) - - return '%s-%s-%s' % (osname, release, machine) - - -_TARGET_TO_PLAT = { - 'x86' : 'win32', - 'x64' : 'win-amd64', - 'arm' : 'win-arm32', -} - - -def get_platform(): - if os.name != 'nt': - return get_host_platform() - cross_compilation_target = os.environ.get('VSCMD_ARG_TGT_ARCH') - if cross_compilation_target not in _TARGET_TO_PLAT: - return get_host_platform() - return _TARGET_TO_PLAT[cross_compilation_target] diff --git a/venv/Lib/site-packages/pip/_vendor/distlib/version.py b/venv/Lib/site-packages/pip/_vendor/distlib/version.py deleted file mode 100644 index c7c8bb6..0000000 --- a/venv/Lib/site-packages/pip/_vendor/distlib/version.py +++ /dev/null @@ -1,739 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright (C) 2012-2017 The Python Software Foundation. -# See LICENSE.txt and CONTRIBUTORS.txt. -# -""" -Implementation of a flexible versioning scheme providing support for PEP-440, -setuptools-compatible and semantic versioning. -""" - -import logging -import re - -from .compat import string_types -from .util import parse_requirement - -__all__ = ['NormalizedVersion', 'NormalizedMatcher', - 'LegacyVersion', 'LegacyMatcher', - 'SemanticVersion', 'SemanticMatcher', - 'UnsupportedVersionError', 'get_scheme'] - -logger = logging.getLogger(__name__) - - -class UnsupportedVersionError(ValueError): - """This is an unsupported version.""" - pass - - -class Version(object): - def __init__(self, s): - self._string = s = s.strip() - self._parts = parts = self.parse(s) - assert isinstance(parts, tuple) - assert len(parts) > 0 - - def parse(self, s): - raise NotImplementedError('please implement in a subclass') - - def _check_compatible(self, other): - if type(self) != type(other): - raise TypeError('cannot compare %r and %r' % (self, other)) - - def __eq__(self, other): - self._check_compatible(other) - return self._parts == other._parts - - def __ne__(self, other): - return not self.__eq__(other) - - def __lt__(self, other): - self._check_compatible(other) - return self._parts < other._parts - - def __gt__(self, other): - return not (self.__lt__(other) or self.__eq__(other)) - - def __le__(self, other): - return self.__lt__(other) or self.__eq__(other) - - def __ge__(self, other): - return self.__gt__(other) or self.__eq__(other) - - # See http://docs.python.org/reference/datamodel#object.__hash__ - def __hash__(self): - return hash(self._parts) - - def __repr__(self): - return "%s('%s')" % (self.__class__.__name__, self._string) - - def __str__(self): - return self._string - - @property - def is_prerelease(self): - raise NotImplementedError('Please implement in subclasses.') - - -class Matcher(object): - version_class = None - - # value is either a callable or the name of a method - _operators = { - '<': lambda v, c, p: v < c, - '>': lambda v, c, p: v > c, - '<=': lambda v, c, p: v == c or v < c, - '>=': lambda v, c, p: v == c or v > c, - '==': lambda v, c, p: v == c, - '===': lambda v, c, p: v == c, - # by default, compatible => >=. - '~=': lambda v, c, p: v == c or v > c, - '!=': lambda v, c, p: v != c, - } - - # this is a method only to support alternative implementations - # via overriding - def parse_requirement(self, s): - return parse_requirement(s) - - def __init__(self, s): - if self.version_class is None: - raise ValueError('Please specify a version class') - self._string = s = s.strip() - r = self.parse_requirement(s) - if not r: - raise ValueError('Not valid: %r' % s) - self.name = r.name - self.key = self.name.lower() # for case-insensitive comparisons - clist = [] - if r.constraints: - # import pdb; pdb.set_trace() - for op, s in r.constraints: - if s.endswith('.*'): - if op not in ('==', '!='): - raise ValueError('\'.*\' not allowed for ' - '%r constraints' % op) - # Could be a partial version (e.g. for '2.*') which - # won't parse as a version, so keep it as a string - vn, prefix = s[:-2], True - # Just to check that vn is a valid version - self.version_class(vn) - else: - # Should parse as a version, so we can create an - # instance for the comparison - vn, prefix = self.version_class(s), False - clist.append((op, vn, prefix)) - self._parts = tuple(clist) - - def match(self, version): - """ - Check if the provided version matches the constraints. - - :param version: The version to match against this instance. - :type version: String or :class:`Version` instance. - """ - if isinstance(version, string_types): - version = self.version_class(version) - for operator, constraint, prefix in self._parts: - f = self._operators.get(operator) - if isinstance(f, string_types): - f = getattr(self, f) - if not f: - msg = ('%r not implemented ' - 'for %s' % (operator, self.__class__.__name__)) - raise NotImplementedError(msg) - if not f(version, constraint, prefix): - return False - return True - - @property - def exact_version(self): - result = None - if len(self._parts) == 1 and self._parts[0][0] in ('==', '==='): - result = self._parts[0][1] - return result - - def _check_compatible(self, other): - if type(self) != type(other) or self.name != other.name: - raise TypeError('cannot compare %s and %s' % (self, other)) - - def __eq__(self, other): - self._check_compatible(other) - return self.key == other.key and self._parts == other._parts - - def __ne__(self, other): - return not self.__eq__(other) - - # See http://docs.python.org/reference/datamodel#object.__hash__ - def __hash__(self): - return hash(self.key) + hash(self._parts) - - def __repr__(self): - return "%s(%r)" % (self.__class__.__name__, self._string) - - def __str__(self): - return self._string - - -PEP440_VERSION_RE = re.compile(r'^v?(\d+!)?(\d+(\.\d+)*)((a|b|c|rc)(\d+))?' - r'(\.(post)(\d+))?(\.(dev)(\d+))?' - r'(\+([a-zA-Z\d]+(\.[a-zA-Z\d]+)?))?$') - - -def _pep_440_key(s): - s = s.strip() - m = PEP440_VERSION_RE.match(s) - if not m: - raise UnsupportedVersionError('Not a valid version: %s' % s) - groups = m.groups() - nums = tuple(int(v) for v in groups[1].split('.')) - while len(nums) > 1 and nums[-1] == 0: - nums = nums[:-1] - - if not groups[0]: - epoch = 0 - else: - epoch = int(groups[0][:-1]) - pre = groups[4:6] - post = groups[7:9] - dev = groups[10:12] - local = groups[13] - if pre == (None, None): - pre = () - else: - pre = pre[0], int(pre[1]) - if post == (None, None): - post = () - else: - post = post[0], int(post[1]) - if dev == (None, None): - dev = () - else: - dev = dev[0], int(dev[1]) - if local is None: - local = () - else: - parts = [] - for part in local.split('.'): - # to ensure that numeric compares as > lexicographic, avoid - # comparing them directly, but encode a tuple which ensures - # correct sorting - if part.isdigit(): - part = (1, int(part)) - else: - part = (0, part) - parts.append(part) - local = tuple(parts) - if not pre: - # either before pre-release, or final release and after - if not post and dev: - # before pre-release - pre = ('a', -1) # to sort before a0 - else: - pre = ('z',) # to sort after all pre-releases - # now look at the state of post and dev. - if not post: - post = ('_',) # sort before 'a' - if not dev: - dev = ('final',) - - #print('%s -> %s' % (s, m.groups())) - return epoch, nums, pre, post, dev, local - - -_normalized_key = _pep_440_key - - -class NormalizedVersion(Version): - """A rational version. - - Good: - 1.2 # equivalent to "1.2.0" - 1.2.0 - 1.2a1 - 1.2.3a2 - 1.2.3b1 - 1.2.3c1 - 1.2.3.4 - TODO: fill this out - - Bad: - 1 # minimum two numbers - 1.2a # release level must have a release serial - 1.2.3b - """ - def parse(self, s): - result = _normalized_key(s) - # _normalized_key loses trailing zeroes in the release - # clause, since that's needed to ensure that X.Y == X.Y.0 == X.Y.0.0 - # However, PEP 440 prefix matching needs it: for example, - # (~= 1.4.5.0) matches differently to (~= 1.4.5.0.0). - m = PEP440_VERSION_RE.match(s) # must succeed - groups = m.groups() - self._release_clause = tuple(int(v) for v in groups[1].split('.')) - return result - - PREREL_TAGS = set(['a', 'b', 'c', 'rc', 'dev']) - - @property - def is_prerelease(self): - return any(t[0] in self.PREREL_TAGS for t in self._parts if t) - - -def _match_prefix(x, y): - x = str(x) - y = str(y) - if x == y: - return True - if not x.startswith(y): - return False - n = len(y) - return x[n] == '.' - - -class NormalizedMatcher(Matcher): - version_class = NormalizedVersion - - # value is either a callable or the name of a method - _operators = { - '~=': '_match_compatible', - '<': '_match_lt', - '>': '_match_gt', - '<=': '_match_le', - '>=': '_match_ge', - '==': '_match_eq', - '===': '_match_arbitrary', - '!=': '_match_ne', - } - - def _adjust_local(self, version, constraint, prefix): - if prefix: - strip_local = '+' not in constraint and version._parts[-1] - else: - # both constraint and version are - # NormalizedVersion instances. - # If constraint does not have a local component, - # ensure the version doesn't, either. - strip_local = not constraint._parts[-1] and version._parts[-1] - if strip_local: - s = version._string.split('+', 1)[0] - version = self.version_class(s) - return version, constraint - - def _match_lt(self, version, constraint, prefix): - version, constraint = self._adjust_local(version, constraint, prefix) - if version >= constraint: - return False - release_clause = constraint._release_clause - pfx = '.'.join([str(i) for i in release_clause]) - return not _match_prefix(version, pfx) - - def _match_gt(self, version, constraint, prefix): - version, constraint = self._adjust_local(version, constraint, prefix) - if version <= constraint: - return False - release_clause = constraint._release_clause - pfx = '.'.join([str(i) for i in release_clause]) - return not _match_prefix(version, pfx) - - def _match_le(self, version, constraint, prefix): - version, constraint = self._adjust_local(version, constraint, prefix) - return version <= constraint - - def _match_ge(self, version, constraint, prefix): - version, constraint = self._adjust_local(version, constraint, prefix) - return version >= constraint - - def _match_eq(self, version, constraint, prefix): - version, constraint = self._adjust_local(version, constraint, prefix) - if not prefix: - result = (version == constraint) - else: - result = _match_prefix(version, constraint) - return result - - def _match_arbitrary(self, version, constraint, prefix): - return str(version) == str(constraint) - - def _match_ne(self, version, constraint, prefix): - version, constraint = self._adjust_local(version, constraint, prefix) - if not prefix: - result = (version != constraint) - else: - result = not _match_prefix(version, constraint) - return result - - def _match_compatible(self, version, constraint, prefix): - version, constraint = self._adjust_local(version, constraint, prefix) - if version == constraint: - return True - if version < constraint: - return False -# if not prefix: -# return True - release_clause = constraint._release_clause - if len(release_clause) > 1: - release_clause = release_clause[:-1] - pfx = '.'.join([str(i) for i in release_clause]) - return _match_prefix(version, pfx) - -_REPLACEMENTS = ( - (re.compile('[.+-]$'), ''), # remove trailing puncts - (re.compile(r'^[.](\d)'), r'0.\1'), # .N -> 0.N at start - (re.compile('^[.-]'), ''), # remove leading puncts - (re.compile(r'^\((.*)\)$'), r'\1'), # remove parentheses - (re.compile(r'^v(ersion)?\s*(\d+)'), r'\2'), # remove leading v(ersion) - (re.compile(r'^r(ev)?\s*(\d+)'), r'\2'), # remove leading v(ersion) - (re.compile('[.]{2,}'), '.'), # multiple runs of '.' - (re.compile(r'\b(alfa|apha)\b'), 'alpha'), # misspelt alpha - (re.compile(r'\b(pre-alpha|prealpha)\b'), - 'pre.alpha'), # standardise - (re.compile(r'\(beta\)$'), 'beta'), # remove parentheses -) - -_SUFFIX_REPLACEMENTS = ( - (re.compile('^[:~._+-]+'), ''), # remove leading puncts - (re.compile('[,*")([\\]]'), ''), # remove unwanted chars - (re.compile('[~:+_ -]'), '.'), # replace illegal chars - (re.compile('[.]{2,}'), '.'), # multiple runs of '.' - (re.compile(r'\.$'), ''), # trailing '.' -) - -_NUMERIC_PREFIX = re.compile(r'(\d+(\.\d+)*)') - - -def _suggest_semantic_version(s): - """ - Try to suggest a semantic form for a version for which - _suggest_normalized_version couldn't come up with anything. - """ - result = s.strip().lower() - for pat, repl in _REPLACEMENTS: - result = pat.sub(repl, result) - if not result: - result = '0.0.0' - - # Now look for numeric prefix, and separate it out from - # the rest. - #import pdb; pdb.set_trace() - m = _NUMERIC_PREFIX.match(result) - if not m: - prefix = '0.0.0' - suffix = result - else: - prefix = m.groups()[0].split('.') - prefix = [int(i) for i in prefix] - while len(prefix) < 3: - prefix.append(0) - if len(prefix) == 3: - suffix = result[m.end():] - else: - suffix = '.'.join([str(i) for i in prefix[3:]]) + result[m.end():] - prefix = prefix[:3] - prefix = '.'.join([str(i) for i in prefix]) - suffix = suffix.strip() - if suffix: - #import pdb; pdb.set_trace() - # massage the suffix. - for pat, repl in _SUFFIX_REPLACEMENTS: - suffix = pat.sub(repl, suffix) - - if not suffix: - result = prefix - else: - sep = '-' if 'dev' in suffix else '+' - result = prefix + sep + suffix - if not is_semver(result): - result = None - return result - - -def _suggest_normalized_version(s): - """Suggest a normalized version close to the given version string. - - If you have a version string that isn't rational (i.e. NormalizedVersion - doesn't like it) then you might be able to get an equivalent (or close) - rational version from this function. - - This does a number of simple normalizations to the given string, based - on observation of versions currently in use on PyPI. Given a dump of - those version during PyCon 2009, 4287 of them: - - 2312 (53.93%) match NormalizedVersion without change - with the automatic suggestion - - 3474 (81.04%) match when using this suggestion method - - @param s {str} An irrational version string. - @returns A rational version string, or None, if couldn't determine one. - """ - try: - _normalized_key(s) - return s # already rational - except UnsupportedVersionError: - pass - - rs = s.lower() - - # part of this could use maketrans - for orig, repl in (('-alpha', 'a'), ('-beta', 'b'), ('alpha', 'a'), - ('beta', 'b'), ('rc', 'c'), ('-final', ''), - ('-pre', 'c'), - ('-release', ''), ('.release', ''), ('-stable', ''), - ('+', '.'), ('_', '.'), (' ', ''), ('.final', ''), - ('final', '')): - rs = rs.replace(orig, repl) - - # if something ends with dev or pre, we add a 0 - rs = re.sub(r"pre$", r"pre0", rs) - rs = re.sub(r"dev$", r"dev0", rs) - - # if we have something like "b-2" or "a.2" at the end of the - # version, that is probably beta, alpha, etc - # let's remove the dash or dot - rs = re.sub(r"([abc]|rc)[\-\.](\d+)$", r"\1\2", rs) - - # 1.0-dev-r371 -> 1.0.dev371 - # 0.1-dev-r79 -> 0.1.dev79 - rs = re.sub(r"[\-\.](dev)[\-\.]?r?(\d+)$", r".\1\2", rs) - - # Clean: 2.0.a.3, 2.0.b1, 0.9.0~c1 - rs = re.sub(r"[.~]?([abc])\.?", r"\1", rs) - - # Clean: v0.3, v1.0 - if rs.startswith('v'): - rs = rs[1:] - - # Clean leading '0's on numbers. - #TODO: unintended side-effect on, e.g., "2003.05.09" - # PyPI stats: 77 (~2%) better - rs = re.sub(r"\b0+(\d+)(?!\d)", r"\1", rs) - - # Clean a/b/c with no version. E.g. "1.0a" -> "1.0a0". Setuptools infers - # zero. - # PyPI stats: 245 (7.56%) better - rs = re.sub(r"(\d+[abc])$", r"\g<1>0", rs) - - # the 'dev-rNNN' tag is a dev tag - rs = re.sub(r"\.?(dev-r|dev\.r)\.?(\d+)$", r".dev\2", rs) - - # clean the - when used as a pre delimiter - rs = re.sub(r"-(a|b|c)(\d+)$", r"\1\2", rs) - - # a terminal "dev" or "devel" can be changed into ".dev0" - rs = re.sub(r"[\.\-](dev|devel)$", r".dev0", rs) - - # a terminal "dev" can be changed into ".dev0" - rs = re.sub(r"(?![\.\-])dev$", r".dev0", rs) - - # a terminal "final" or "stable" can be removed - rs = re.sub(r"(final|stable)$", "", rs) - - # The 'r' and the '-' tags are post release tags - # 0.4a1.r10 -> 0.4a1.post10 - # 0.9.33-17222 -> 0.9.33.post17222 - # 0.9.33-r17222 -> 0.9.33.post17222 - rs = re.sub(r"\.?(r|-|-r)\.?(\d+)$", r".post\2", rs) - - # Clean 'r' instead of 'dev' usage: - # 0.9.33+r17222 -> 0.9.33.dev17222 - # 1.0dev123 -> 1.0.dev123 - # 1.0.git123 -> 1.0.dev123 - # 1.0.bzr123 -> 1.0.dev123 - # 0.1a0dev.123 -> 0.1a0.dev123 - # PyPI stats: ~150 (~4%) better - rs = re.sub(r"\.?(dev|git|bzr)\.?(\d+)$", r".dev\2", rs) - - # Clean '.pre' (normalized from '-pre' above) instead of 'c' usage: - # 0.2.pre1 -> 0.2c1 - # 0.2-c1 -> 0.2c1 - # 1.0preview123 -> 1.0c123 - # PyPI stats: ~21 (0.62%) better - rs = re.sub(r"\.?(pre|preview|-c)(\d+)$", r"c\g<2>", rs) - - # Tcl/Tk uses "px" for their post release markers - rs = re.sub(r"p(\d+)$", r".post\1", rs) - - try: - _normalized_key(rs) - except UnsupportedVersionError: - rs = None - return rs - -# -# Legacy version processing (distribute-compatible) -# - -_VERSION_PART = re.compile(r'([a-z]+|\d+|[\.-])', re.I) -_VERSION_REPLACE = { - 'pre': 'c', - 'preview': 'c', - '-': 'final-', - 'rc': 'c', - 'dev': '@', - '': None, - '.': None, -} - - -def _legacy_key(s): - def get_parts(s): - result = [] - for p in _VERSION_PART.split(s.lower()): - p = _VERSION_REPLACE.get(p, p) - if p: - if '0' <= p[:1] <= '9': - p = p.zfill(8) - else: - p = '*' + p - result.append(p) - result.append('*final') - return result - - result = [] - for p in get_parts(s): - if p.startswith('*'): - if p < '*final': - while result and result[-1] == '*final-': - result.pop() - while result and result[-1] == '00000000': - result.pop() - result.append(p) - return tuple(result) - - -class LegacyVersion(Version): - def parse(self, s): - return _legacy_key(s) - - @property - def is_prerelease(self): - result = False - for x in self._parts: - if (isinstance(x, string_types) and x.startswith('*') and - x < '*final'): - result = True - break - return result - - -class LegacyMatcher(Matcher): - version_class = LegacyVersion - - _operators = dict(Matcher._operators) - _operators['~='] = '_match_compatible' - - numeric_re = re.compile(r'^(\d+(\.\d+)*)') - - def _match_compatible(self, version, constraint, prefix): - if version < constraint: - return False - m = self.numeric_re.match(str(constraint)) - if not m: - logger.warning('Cannot compute compatible match for version %s ' - ' and constraint %s', version, constraint) - return True - s = m.groups()[0] - if '.' in s: - s = s.rsplit('.', 1)[0] - return _match_prefix(version, s) - -# -# Semantic versioning -# - -_SEMVER_RE = re.compile(r'^(\d+)\.(\d+)\.(\d+)' - r'(-[a-z0-9]+(\.[a-z0-9-]+)*)?' - r'(\+[a-z0-9]+(\.[a-z0-9-]+)*)?$', re.I) - - -def is_semver(s): - return _SEMVER_RE.match(s) - - -def _semantic_key(s): - def make_tuple(s, absent): - if s is None: - result = (absent,) - else: - parts = s[1:].split('.') - # We can't compare ints and strings on Python 3, so fudge it - # by zero-filling numeric values so simulate a numeric comparison - result = tuple([p.zfill(8) if p.isdigit() else p for p in parts]) - return result - - m = is_semver(s) - if not m: - raise UnsupportedVersionError(s) - groups = m.groups() - major, minor, patch = [int(i) for i in groups[:3]] - # choose the '|' and '*' so that versions sort correctly - pre, build = make_tuple(groups[3], '|'), make_tuple(groups[5], '*') - return (major, minor, patch), pre, build - - -class SemanticVersion(Version): - def parse(self, s): - return _semantic_key(s) - - @property - def is_prerelease(self): - return self._parts[1][0] != '|' - - -class SemanticMatcher(Matcher): - version_class = SemanticVersion - - -class VersionScheme(object): - def __init__(self, key, matcher, suggester=None): - self.key = key - self.matcher = matcher - self.suggester = suggester - - def is_valid_version(self, s): - try: - self.matcher.version_class(s) - result = True - except UnsupportedVersionError: - result = False - return result - - def is_valid_matcher(self, s): - try: - self.matcher(s) - result = True - except UnsupportedVersionError: - result = False - return result - - def is_valid_constraint_list(self, s): - """ - Used for processing some metadata fields - """ - # See issue #140. Be tolerant of a single trailing comma. - if s.endswith(','): - s = s[:-1] - return self.is_valid_matcher('dummy_name (%s)' % s) - - def suggest(self, s): - if self.suggester is None: - result = None - else: - result = self.suggester(s) - return result - -_SCHEMES = { - 'normalized': VersionScheme(_normalized_key, NormalizedMatcher, - _suggest_normalized_version), - 'legacy': VersionScheme(_legacy_key, LegacyMatcher, lambda self, s: s), - 'semantic': VersionScheme(_semantic_key, SemanticMatcher, - _suggest_semantic_version), -} - -_SCHEMES['default'] = _SCHEMES['normalized'] - - -def get_scheme(name): - if name not in _SCHEMES: - raise ValueError('unknown scheme name: %r' % name) - return _SCHEMES[name] diff --git a/venv/Lib/site-packages/pip/_vendor/distlib/w32.exe b/venv/Lib/site-packages/pip/_vendor/distlib/w32.exe deleted file mode 100644 index e6439e9..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/distlib/w32.exe and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/distlib/w64-arm.exe b/venv/Lib/site-packages/pip/_vendor/distlib/w64-arm.exe deleted file mode 100644 index 70a2ec2..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/distlib/w64-arm.exe and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/distlib/w64.exe b/venv/Lib/site-packages/pip/_vendor/distlib/w64.exe deleted file mode 100644 index 46139db..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/distlib/w64.exe and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/distlib/wheel.py b/venv/Lib/site-packages/pip/_vendor/distlib/wheel.py deleted file mode 100644 index 48abfde..0000000 --- a/venv/Lib/site-packages/pip/_vendor/distlib/wheel.py +++ /dev/null @@ -1,1053 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright (C) 2013-2020 Vinay Sajip. -# Licensed to the Python Software Foundation under a contributor agreement. -# See LICENSE.txt and CONTRIBUTORS.txt. -# -from __future__ import unicode_literals - -import base64 -import codecs -import datetime -from email import message_from_file -import hashlib -import imp -import json -import logging -import os -import posixpath -import re -import shutil -import sys -import tempfile -import zipfile - -from . import __version__, DistlibException -from .compat import sysconfig, ZipFile, fsdecode, text_type, filter -from .database import InstalledDistribution -from .metadata import (Metadata, METADATA_FILENAME, WHEEL_METADATA_FILENAME, - LEGACY_METADATA_FILENAME) -from .util import (FileOperator, convert_path, CSVReader, CSVWriter, Cache, - cached_property, get_cache_base, read_exports, tempdir, - get_platform) -from .version import NormalizedVersion, UnsupportedVersionError - -logger = logging.getLogger(__name__) - -cache = None # created when needed - -if hasattr(sys, 'pypy_version_info'): # pragma: no cover - IMP_PREFIX = 'pp' -elif sys.platform.startswith('java'): # pragma: no cover - IMP_PREFIX = 'jy' -elif sys.platform == 'cli': # pragma: no cover - IMP_PREFIX = 'ip' -else: - IMP_PREFIX = 'cp' - -VER_SUFFIX = sysconfig.get_config_var('py_version_nodot') -if not VER_SUFFIX: # pragma: no cover - VER_SUFFIX = '%s%s' % sys.version_info[:2] -PYVER = 'py' + VER_SUFFIX -IMPVER = IMP_PREFIX + VER_SUFFIX - -ARCH = get_platform().replace('-', '_').replace('.', '_') - -ABI = sysconfig.get_config_var('SOABI') -if ABI and ABI.startswith('cpython-'): - ABI = ABI.replace('cpython-', 'cp').split('-')[0] -else: - def _derive_abi(): - parts = ['cp', VER_SUFFIX] - if sysconfig.get_config_var('Py_DEBUG'): - parts.append('d') - if sysconfig.get_config_var('WITH_PYMALLOC'): - parts.append('m') - if sysconfig.get_config_var('Py_UNICODE_SIZE') == 4: - parts.append('u') - return ''.join(parts) - ABI = _derive_abi() - del _derive_abi - -FILENAME_RE = re.compile(r''' -(?P[^-]+) --(?P\d+[^-]*) -(-(?P\d+[^-]*))? --(?P\w+\d+(\.\w+\d+)*) --(?P\w+) --(?P\w+(\.\w+)*) -\.whl$ -''', re.IGNORECASE | re.VERBOSE) - -NAME_VERSION_RE = re.compile(r''' -(?P[^-]+) --(?P\d+[^-]*) -(-(?P\d+[^-]*))?$ -''', re.IGNORECASE | re.VERBOSE) - -SHEBANG_RE = re.compile(br'\s*#![^\r\n]*') -SHEBANG_DETAIL_RE = re.compile(br'^(\s*#!("[^"]+"|\S+))\s+(.*)$') -SHEBANG_PYTHON = b'#!python' -SHEBANG_PYTHONW = b'#!pythonw' - -if os.sep == '/': - to_posix = lambda o: o -else: - to_posix = lambda o: o.replace(os.sep, '/') - - -class Mounter(object): - def __init__(self): - self.impure_wheels = {} - self.libs = {} - - def add(self, pathname, extensions): - self.impure_wheels[pathname] = extensions - self.libs.update(extensions) - - def remove(self, pathname): - extensions = self.impure_wheels.pop(pathname) - for k, v in extensions: - if k in self.libs: - del self.libs[k] - - def find_module(self, fullname, path=None): - if fullname in self.libs: - result = self - else: - result = None - return result - - def load_module(self, fullname): - if fullname in sys.modules: - result = sys.modules[fullname] - else: - if fullname not in self.libs: - raise ImportError('unable to find extension for %s' % fullname) - result = imp.load_dynamic(fullname, self.libs[fullname]) - result.__loader__ = self - parts = fullname.rsplit('.', 1) - if len(parts) > 1: - result.__package__ = parts[0] - return result - -_hook = Mounter() - - -class Wheel(object): - """ - Class to build and install from Wheel files (PEP 427). - """ - - wheel_version = (1, 1) - hash_kind = 'sha256' - - def __init__(self, filename=None, sign=False, verify=False): - """ - Initialise an instance using a (valid) filename. - """ - self.sign = sign - self.should_verify = verify - self.buildver = '' - self.pyver = [PYVER] - self.abi = ['none'] - self.arch = ['any'] - self.dirname = os.getcwd() - if filename is None: - self.name = 'dummy' - self.version = '0.1' - self._filename = self.filename - else: - m = NAME_VERSION_RE.match(filename) - if m: - info = m.groupdict('') - self.name = info['nm'] - # Reinstate the local version separator - self.version = info['vn'].replace('_', '-') - self.buildver = info['bn'] - self._filename = self.filename - else: - dirname, filename = os.path.split(filename) - m = FILENAME_RE.match(filename) - if not m: - raise DistlibException('Invalid name or ' - 'filename: %r' % filename) - if dirname: - self.dirname = os.path.abspath(dirname) - self._filename = filename - info = m.groupdict('') - self.name = info['nm'] - self.version = info['vn'] - self.buildver = info['bn'] - self.pyver = info['py'].split('.') - self.abi = info['bi'].split('.') - self.arch = info['ar'].split('.') - - @property - def filename(self): - """ - Build and return a filename from the various components. - """ - if self.buildver: - buildver = '-' + self.buildver - else: - buildver = '' - pyver = '.'.join(self.pyver) - abi = '.'.join(self.abi) - arch = '.'.join(self.arch) - # replace - with _ as a local version separator - version = self.version.replace('-', '_') - return '%s-%s%s-%s-%s-%s.whl' % (self.name, version, buildver, - pyver, abi, arch) - - @property - def exists(self): - path = os.path.join(self.dirname, self.filename) - return os.path.isfile(path) - - @property - def tags(self): - for pyver in self.pyver: - for abi in self.abi: - for arch in self.arch: - yield pyver, abi, arch - - @cached_property - def metadata(self): - pathname = os.path.join(self.dirname, self.filename) - name_ver = '%s-%s' % (self.name, self.version) - info_dir = '%s.dist-info' % name_ver - wrapper = codecs.getreader('utf-8') - with ZipFile(pathname, 'r') as zf: - wheel_metadata = self.get_wheel_metadata(zf) - wv = wheel_metadata['Wheel-Version'].split('.', 1) - file_version = tuple([int(i) for i in wv]) - # if file_version < (1, 1): - # fns = [WHEEL_METADATA_FILENAME, METADATA_FILENAME, - # LEGACY_METADATA_FILENAME] - # else: - # fns = [WHEEL_METADATA_FILENAME, METADATA_FILENAME] - fns = [WHEEL_METADATA_FILENAME, LEGACY_METADATA_FILENAME] - result = None - for fn in fns: - try: - metadata_filename = posixpath.join(info_dir, fn) - with zf.open(metadata_filename) as bf: - wf = wrapper(bf) - result = Metadata(fileobj=wf) - if result: - break - except KeyError: - pass - if not result: - raise ValueError('Invalid wheel, because metadata is ' - 'missing: looked in %s' % ', '.join(fns)) - return result - - def get_wheel_metadata(self, zf): - name_ver = '%s-%s' % (self.name, self.version) - info_dir = '%s.dist-info' % name_ver - metadata_filename = posixpath.join(info_dir, 'WHEEL') - with zf.open(metadata_filename) as bf: - wf = codecs.getreader('utf-8')(bf) - message = message_from_file(wf) - return dict(message) - - @cached_property - def info(self): - pathname = os.path.join(self.dirname, self.filename) - with ZipFile(pathname, 'r') as zf: - result = self.get_wheel_metadata(zf) - return result - - def process_shebang(self, data): - m = SHEBANG_RE.match(data) - if m: - end = m.end() - shebang, data_after_shebang = data[:end], data[end:] - # Preserve any arguments after the interpreter - if b'pythonw' in shebang.lower(): - shebang_python = SHEBANG_PYTHONW - else: - shebang_python = SHEBANG_PYTHON - m = SHEBANG_DETAIL_RE.match(shebang) - if m: - args = b' ' + m.groups()[-1] - else: - args = b'' - shebang = shebang_python + args - data = shebang + data_after_shebang - else: - cr = data.find(b'\r') - lf = data.find(b'\n') - if cr < 0 or cr > lf: - term = b'\n' - else: - if data[cr:cr + 2] == b'\r\n': - term = b'\r\n' - else: - term = b'\r' - data = SHEBANG_PYTHON + term + data - return data - - def get_hash(self, data, hash_kind=None): - if hash_kind is None: - hash_kind = self.hash_kind - try: - hasher = getattr(hashlib, hash_kind) - except AttributeError: - raise DistlibException('Unsupported hash algorithm: %r' % hash_kind) - result = hasher(data).digest() - result = base64.urlsafe_b64encode(result).rstrip(b'=').decode('ascii') - return hash_kind, result - - def write_record(self, records, record_path, base): - records = list(records) # make a copy, as mutated - p = to_posix(os.path.relpath(record_path, base)) - records.append((p, '', '')) - with CSVWriter(record_path) as writer: - for row in records: - writer.writerow(row) - - def write_records(self, info, libdir, archive_paths): - records = [] - distinfo, info_dir = info - hasher = getattr(hashlib, self.hash_kind) - for ap, p in archive_paths: - with open(p, 'rb') as f: - data = f.read() - digest = '%s=%s' % self.get_hash(data) - size = os.path.getsize(p) - records.append((ap, digest, size)) - - p = os.path.join(distinfo, 'RECORD') - self.write_record(records, p, libdir) - ap = to_posix(os.path.join(info_dir, 'RECORD')) - archive_paths.append((ap, p)) - - def build_zip(self, pathname, archive_paths): - with ZipFile(pathname, 'w', zipfile.ZIP_DEFLATED) as zf: - for ap, p in archive_paths: - logger.debug('Wrote %s to %s in wheel', p, ap) - zf.write(p, ap) - - def build(self, paths, tags=None, wheel_version=None): - """ - Build a wheel from files in specified paths, and use any specified tags - when determining the name of the wheel. - """ - if tags is None: - tags = {} - - libkey = list(filter(lambda o: o in paths, ('purelib', 'platlib')))[0] - if libkey == 'platlib': - is_pure = 'false' - default_pyver = [IMPVER] - default_abi = [ABI] - default_arch = [ARCH] - else: - is_pure = 'true' - default_pyver = [PYVER] - default_abi = ['none'] - default_arch = ['any'] - - self.pyver = tags.get('pyver', default_pyver) - self.abi = tags.get('abi', default_abi) - self.arch = tags.get('arch', default_arch) - - libdir = paths[libkey] - - name_ver = '%s-%s' % (self.name, self.version) - data_dir = '%s.data' % name_ver - info_dir = '%s.dist-info' % name_ver - - archive_paths = [] - - # First, stuff which is not in site-packages - for key in ('data', 'headers', 'scripts'): - if key not in paths: - continue - path = paths[key] - if os.path.isdir(path): - for root, dirs, files in os.walk(path): - for fn in files: - p = fsdecode(os.path.join(root, fn)) - rp = os.path.relpath(p, path) - ap = to_posix(os.path.join(data_dir, key, rp)) - archive_paths.append((ap, p)) - if key == 'scripts' and not p.endswith('.exe'): - with open(p, 'rb') as f: - data = f.read() - data = self.process_shebang(data) - with open(p, 'wb') as f: - f.write(data) - - # Now, stuff which is in site-packages, other than the - # distinfo stuff. - path = libdir - distinfo = None - for root, dirs, files in os.walk(path): - if root == path: - # At the top level only, save distinfo for later - # and skip it for now - for i, dn in enumerate(dirs): - dn = fsdecode(dn) - if dn.endswith('.dist-info'): - distinfo = os.path.join(root, dn) - del dirs[i] - break - assert distinfo, '.dist-info directory expected, not found' - - for fn in files: - # comment out next suite to leave .pyc files in - if fsdecode(fn).endswith(('.pyc', '.pyo')): - continue - p = os.path.join(root, fn) - rp = to_posix(os.path.relpath(p, path)) - archive_paths.append((rp, p)) - - # Now distinfo. Assumed to be flat, i.e. os.listdir is enough. - files = os.listdir(distinfo) - for fn in files: - if fn not in ('RECORD', 'INSTALLER', 'SHARED', 'WHEEL'): - p = fsdecode(os.path.join(distinfo, fn)) - ap = to_posix(os.path.join(info_dir, fn)) - archive_paths.append((ap, p)) - - wheel_metadata = [ - 'Wheel-Version: %d.%d' % (wheel_version or self.wheel_version), - 'Generator: distlib %s' % __version__, - 'Root-Is-Purelib: %s' % is_pure, - ] - for pyver, abi, arch in self.tags: - wheel_metadata.append('Tag: %s-%s-%s' % (pyver, abi, arch)) - p = os.path.join(distinfo, 'WHEEL') - with open(p, 'w') as f: - f.write('\n'.join(wheel_metadata)) - ap = to_posix(os.path.join(info_dir, 'WHEEL')) - archive_paths.append((ap, p)) - - # sort the entries by archive path. Not needed by any spec, but it - # keeps the archive listing and RECORD tidier than they would otherwise - # be. Use the number of path segments to keep directory entries together, - # and keep the dist-info stuff at the end. - def sorter(t): - ap = t[0] - n = ap.count('/') - if '.dist-info' in ap: - n += 10000 - return (n, ap) - archive_paths = sorted(archive_paths, key=sorter) - - # Now, at last, RECORD. - # Paths in here are archive paths - nothing else makes sense. - self.write_records((distinfo, info_dir), libdir, archive_paths) - # Now, ready to build the zip file - pathname = os.path.join(self.dirname, self.filename) - self.build_zip(pathname, archive_paths) - return pathname - - def skip_entry(self, arcname): - """ - Determine whether an archive entry should be skipped when verifying - or installing. - """ - # The signature file won't be in RECORD, - # and we don't currently don't do anything with it - # We also skip directories, as they won't be in RECORD - # either. See: - # - # https://github.com/pypa/wheel/issues/294 - # https://github.com/pypa/wheel/issues/287 - # https://github.com/pypa/wheel/pull/289 - # - return arcname.endswith(('/', '/RECORD.jws')) - - def install(self, paths, maker, **kwargs): - """ - Install a wheel to the specified paths. If kwarg ``warner`` is - specified, it should be a callable, which will be called with two - tuples indicating the wheel version of this software and the wheel - version in the file, if there is a discrepancy in the versions. - This can be used to issue any warnings to raise any exceptions. - If kwarg ``lib_only`` is True, only the purelib/platlib files are - installed, and the headers, scripts, data and dist-info metadata are - not written. If kwarg ``bytecode_hashed_invalidation`` is True, written - bytecode will try to use file-hash based invalidation (PEP-552) on - supported interpreter versions (CPython 2.7+). - - The return value is a :class:`InstalledDistribution` instance unless - ``options.lib_only`` is True, in which case the return value is ``None``. - """ - - dry_run = maker.dry_run - warner = kwargs.get('warner') - lib_only = kwargs.get('lib_only', False) - bc_hashed_invalidation = kwargs.get('bytecode_hashed_invalidation', False) - - pathname = os.path.join(self.dirname, self.filename) - name_ver = '%s-%s' % (self.name, self.version) - data_dir = '%s.data' % name_ver - info_dir = '%s.dist-info' % name_ver - - metadata_name = posixpath.join(info_dir, LEGACY_METADATA_FILENAME) - wheel_metadata_name = posixpath.join(info_dir, 'WHEEL') - record_name = posixpath.join(info_dir, 'RECORD') - - wrapper = codecs.getreader('utf-8') - - with ZipFile(pathname, 'r') as zf: - with zf.open(wheel_metadata_name) as bwf: - wf = wrapper(bwf) - message = message_from_file(wf) - wv = message['Wheel-Version'].split('.', 1) - file_version = tuple([int(i) for i in wv]) - if (file_version != self.wheel_version) and warner: - warner(self.wheel_version, file_version) - - if message['Root-Is-Purelib'] == 'true': - libdir = paths['purelib'] - else: - libdir = paths['platlib'] - - records = {} - with zf.open(record_name) as bf: - with CSVReader(stream=bf) as reader: - for row in reader: - p = row[0] - records[p] = row - - data_pfx = posixpath.join(data_dir, '') - info_pfx = posixpath.join(info_dir, '') - script_pfx = posixpath.join(data_dir, 'scripts', '') - - # make a new instance rather than a copy of maker's, - # as we mutate it - fileop = FileOperator(dry_run=dry_run) - fileop.record = True # so we can rollback if needed - - bc = not sys.dont_write_bytecode # Double negatives. Lovely! - - outfiles = [] # for RECORD writing - - # for script copying/shebang processing - workdir = tempfile.mkdtemp() - # set target dir later - # we default add_launchers to False, as the - # Python Launcher should be used instead - maker.source_dir = workdir - maker.target_dir = None - try: - for zinfo in zf.infolist(): - arcname = zinfo.filename - if isinstance(arcname, text_type): - u_arcname = arcname - else: - u_arcname = arcname.decode('utf-8') - if self.skip_entry(u_arcname): - continue - row = records[u_arcname] - if row[2] and str(zinfo.file_size) != row[2]: - raise DistlibException('size mismatch for ' - '%s' % u_arcname) - if row[1]: - kind, value = row[1].split('=', 1) - with zf.open(arcname) as bf: - data = bf.read() - _, digest = self.get_hash(data, kind) - if digest != value: - raise DistlibException('digest mismatch for ' - '%s' % arcname) - - if lib_only and u_arcname.startswith((info_pfx, data_pfx)): - logger.debug('lib_only: skipping %s', u_arcname) - continue - is_script = (u_arcname.startswith(script_pfx) - and not u_arcname.endswith('.exe')) - - if u_arcname.startswith(data_pfx): - _, where, rp = u_arcname.split('/', 2) - outfile = os.path.join(paths[where], convert_path(rp)) - else: - # meant for site-packages. - if u_arcname in (wheel_metadata_name, record_name): - continue - outfile = os.path.join(libdir, convert_path(u_arcname)) - if not is_script: - with zf.open(arcname) as bf: - fileop.copy_stream(bf, outfile) - # Issue #147: permission bits aren't preserved. Using - # zf.extract(zinfo, libdir) should have worked, but didn't, - # see https://www.thetopsites.net/article/53834422.shtml - # So ... manually preserve permission bits as given in zinfo - if os.name == 'posix': - # just set the normal permission bits - os.chmod(outfile, (zinfo.external_attr >> 16) & 0x1FF) - outfiles.append(outfile) - # Double check the digest of the written file - if not dry_run and row[1]: - with open(outfile, 'rb') as bf: - data = bf.read() - _, newdigest = self.get_hash(data, kind) - if newdigest != digest: - raise DistlibException('digest mismatch ' - 'on write for ' - '%s' % outfile) - if bc and outfile.endswith('.py'): - try: - pyc = fileop.byte_compile(outfile, - hashed_invalidation=bc_hashed_invalidation) - outfiles.append(pyc) - except Exception: - # Don't give up if byte-compilation fails, - # but log it and perhaps warn the user - logger.warning('Byte-compilation failed', - exc_info=True) - else: - fn = os.path.basename(convert_path(arcname)) - workname = os.path.join(workdir, fn) - with zf.open(arcname) as bf: - fileop.copy_stream(bf, workname) - - dn, fn = os.path.split(outfile) - maker.target_dir = dn - filenames = maker.make(fn) - fileop.set_executable_mode(filenames) - outfiles.extend(filenames) - - if lib_only: - logger.debug('lib_only: returning None') - dist = None - else: - # Generate scripts - - # Try to get pydist.json so we can see if there are - # any commands to generate. If this fails (e.g. because - # of a legacy wheel), log a warning but don't give up. - commands = None - file_version = self.info['Wheel-Version'] - if file_version == '1.0': - # Use legacy info - ep = posixpath.join(info_dir, 'entry_points.txt') - try: - with zf.open(ep) as bwf: - epdata = read_exports(bwf) - commands = {} - for key in ('console', 'gui'): - k = '%s_scripts' % key - if k in epdata: - commands['wrap_%s' % key] = d = {} - for v in epdata[k].values(): - s = '%s:%s' % (v.prefix, v.suffix) - if v.flags: - s += ' [%s]' % ','.join(v.flags) - d[v.name] = s - except Exception: - logger.warning('Unable to read legacy script ' - 'metadata, so cannot generate ' - 'scripts') - else: - try: - with zf.open(metadata_name) as bwf: - wf = wrapper(bwf) - commands = json.load(wf).get('extensions') - if commands: - commands = commands.get('python.commands') - except Exception: - logger.warning('Unable to read JSON metadata, so ' - 'cannot generate scripts') - if commands: - console_scripts = commands.get('wrap_console', {}) - gui_scripts = commands.get('wrap_gui', {}) - if console_scripts or gui_scripts: - script_dir = paths.get('scripts', '') - if not os.path.isdir(script_dir): - raise ValueError('Valid script path not ' - 'specified') - maker.target_dir = script_dir - for k, v in console_scripts.items(): - script = '%s = %s' % (k, v) - filenames = maker.make(script) - fileop.set_executable_mode(filenames) - - if gui_scripts: - options = {'gui': True } - for k, v in gui_scripts.items(): - script = '%s = %s' % (k, v) - filenames = maker.make(script, options) - fileop.set_executable_mode(filenames) - - p = os.path.join(libdir, info_dir) - dist = InstalledDistribution(p) - - # Write SHARED - paths = dict(paths) # don't change passed in dict - del paths['purelib'] - del paths['platlib'] - paths['lib'] = libdir - p = dist.write_shared_locations(paths, dry_run) - if p: - outfiles.append(p) - - # Write RECORD - dist.write_installed_files(outfiles, paths['prefix'], - dry_run) - return dist - except Exception: # pragma: no cover - logger.exception('installation failed.') - fileop.rollback() - raise - finally: - shutil.rmtree(workdir) - - def _get_dylib_cache(self): - global cache - if cache is None: - # Use native string to avoid issues on 2.x: see Python #20140. - base = os.path.join(get_cache_base(), str('dylib-cache'), - '%s.%s' % sys.version_info[:2]) - cache = Cache(base) - return cache - - def _get_extensions(self): - pathname = os.path.join(self.dirname, self.filename) - name_ver = '%s-%s' % (self.name, self.version) - info_dir = '%s.dist-info' % name_ver - arcname = posixpath.join(info_dir, 'EXTENSIONS') - wrapper = codecs.getreader('utf-8') - result = [] - with ZipFile(pathname, 'r') as zf: - try: - with zf.open(arcname) as bf: - wf = wrapper(bf) - extensions = json.load(wf) - cache = self._get_dylib_cache() - prefix = cache.prefix_to_dir(pathname) - cache_base = os.path.join(cache.base, prefix) - if not os.path.isdir(cache_base): - os.makedirs(cache_base) - for name, relpath in extensions.items(): - dest = os.path.join(cache_base, convert_path(relpath)) - if not os.path.exists(dest): - extract = True - else: - file_time = os.stat(dest).st_mtime - file_time = datetime.datetime.fromtimestamp(file_time) - info = zf.getinfo(relpath) - wheel_time = datetime.datetime(*info.date_time) - extract = wheel_time > file_time - if extract: - zf.extract(relpath, cache_base) - result.append((name, dest)) - except KeyError: - pass - return result - - def is_compatible(self): - """ - Determine if a wheel is compatible with the running system. - """ - return is_compatible(self) - - def is_mountable(self): - """ - Determine if a wheel is asserted as mountable by its metadata. - """ - return True # for now - metadata details TBD - - def mount(self, append=False): - pathname = os.path.abspath(os.path.join(self.dirname, self.filename)) - if not self.is_compatible(): - msg = 'Wheel %s not compatible with this Python.' % pathname - raise DistlibException(msg) - if not self.is_mountable(): - msg = 'Wheel %s is marked as not mountable.' % pathname - raise DistlibException(msg) - if pathname in sys.path: - logger.debug('%s already in path', pathname) - else: - if append: - sys.path.append(pathname) - else: - sys.path.insert(0, pathname) - extensions = self._get_extensions() - if extensions: - if _hook not in sys.meta_path: - sys.meta_path.append(_hook) - _hook.add(pathname, extensions) - - def unmount(self): - pathname = os.path.abspath(os.path.join(self.dirname, self.filename)) - if pathname not in sys.path: - logger.debug('%s not in path', pathname) - else: - sys.path.remove(pathname) - if pathname in _hook.impure_wheels: - _hook.remove(pathname) - if not _hook.impure_wheels: - if _hook in sys.meta_path: - sys.meta_path.remove(_hook) - - def verify(self): - pathname = os.path.join(self.dirname, self.filename) - name_ver = '%s-%s' % (self.name, self.version) - data_dir = '%s.data' % name_ver - info_dir = '%s.dist-info' % name_ver - - metadata_name = posixpath.join(info_dir, LEGACY_METADATA_FILENAME) - wheel_metadata_name = posixpath.join(info_dir, 'WHEEL') - record_name = posixpath.join(info_dir, 'RECORD') - - wrapper = codecs.getreader('utf-8') - - with ZipFile(pathname, 'r') as zf: - with zf.open(wheel_metadata_name) as bwf: - wf = wrapper(bwf) - message = message_from_file(wf) - wv = message['Wheel-Version'].split('.', 1) - file_version = tuple([int(i) for i in wv]) - # TODO version verification - - records = {} - with zf.open(record_name) as bf: - with CSVReader(stream=bf) as reader: - for row in reader: - p = row[0] - records[p] = row - - for zinfo in zf.infolist(): - arcname = zinfo.filename - if isinstance(arcname, text_type): - u_arcname = arcname - else: - u_arcname = arcname.decode('utf-8') - # See issue #115: some wheels have .. in their entries, but - # in the filename ... e.g. __main__..py ! So the check is - # updated to look for .. in the directory portions - p = u_arcname.split('/') - if '..' in p: - raise DistlibException('invalid entry in ' - 'wheel: %r' % u_arcname) - - if self.skip_entry(u_arcname): - continue - row = records[u_arcname] - if row[2] and str(zinfo.file_size) != row[2]: - raise DistlibException('size mismatch for ' - '%s' % u_arcname) - if row[1]: - kind, value = row[1].split('=', 1) - with zf.open(arcname) as bf: - data = bf.read() - _, digest = self.get_hash(data, kind) - if digest != value: - raise DistlibException('digest mismatch for ' - '%s' % arcname) - - def update(self, modifier, dest_dir=None, **kwargs): - """ - Update the contents of a wheel in a generic way. The modifier should - be a callable which expects a dictionary argument: its keys are - archive-entry paths, and its values are absolute filesystem paths - where the contents the corresponding archive entries can be found. The - modifier is free to change the contents of the files pointed to, add - new entries and remove entries, before returning. This method will - extract the entire contents of the wheel to a temporary location, call - the modifier, and then use the passed (and possibly updated) - dictionary to write a new wheel. If ``dest_dir`` is specified, the new - wheel is written there -- otherwise, the original wheel is overwritten. - - The modifier should return True if it updated the wheel, else False. - This method returns the same value the modifier returns. - """ - - def get_version(path_map, info_dir): - version = path = None - key = '%s/%s' % (info_dir, LEGACY_METADATA_FILENAME) - if key not in path_map: - key = '%s/PKG-INFO' % info_dir - if key in path_map: - path = path_map[key] - version = Metadata(path=path).version - return version, path - - def update_version(version, path): - updated = None - try: - v = NormalizedVersion(version) - i = version.find('-') - if i < 0: - updated = '%s+1' % version - else: - parts = [int(s) for s in version[i + 1:].split('.')] - parts[-1] += 1 - updated = '%s+%s' % (version[:i], - '.'.join(str(i) for i in parts)) - except UnsupportedVersionError: - logger.debug('Cannot update non-compliant (PEP-440) ' - 'version %r', version) - if updated: - md = Metadata(path=path) - md.version = updated - legacy = path.endswith(LEGACY_METADATA_FILENAME) - md.write(path=path, legacy=legacy) - logger.debug('Version updated from %r to %r', version, - updated) - - pathname = os.path.join(self.dirname, self.filename) - name_ver = '%s-%s' % (self.name, self.version) - info_dir = '%s.dist-info' % name_ver - record_name = posixpath.join(info_dir, 'RECORD') - with tempdir() as workdir: - with ZipFile(pathname, 'r') as zf: - path_map = {} - for zinfo in zf.infolist(): - arcname = zinfo.filename - if isinstance(arcname, text_type): - u_arcname = arcname - else: - u_arcname = arcname.decode('utf-8') - if u_arcname == record_name: - continue - if '..' in u_arcname: - raise DistlibException('invalid entry in ' - 'wheel: %r' % u_arcname) - zf.extract(zinfo, workdir) - path = os.path.join(workdir, convert_path(u_arcname)) - path_map[u_arcname] = path - - # Remember the version. - original_version, _ = get_version(path_map, info_dir) - # Files extracted. Call the modifier. - modified = modifier(path_map, **kwargs) - if modified: - # Something changed - need to build a new wheel. - current_version, path = get_version(path_map, info_dir) - if current_version and (current_version == original_version): - # Add or update local version to signify changes. - update_version(current_version, path) - # Decide where the new wheel goes. - if dest_dir is None: - fd, newpath = tempfile.mkstemp(suffix='.whl', - prefix='wheel-update-', - dir=workdir) - os.close(fd) - else: - if not os.path.isdir(dest_dir): - raise DistlibException('Not a directory: %r' % dest_dir) - newpath = os.path.join(dest_dir, self.filename) - archive_paths = list(path_map.items()) - distinfo = os.path.join(workdir, info_dir) - info = distinfo, info_dir - self.write_records(info, workdir, archive_paths) - self.build_zip(newpath, archive_paths) - if dest_dir is None: - shutil.copyfile(newpath, pathname) - return modified - -def _get_glibc_version(): - import platform - ver = platform.libc_ver() - result = [] - if ver[0] == 'glibc': - for s in ver[1].split('.'): - result.append(int(s) if s.isdigit() else 0) - result = tuple(result) - return result - -def compatible_tags(): - """ - Return (pyver, abi, arch) tuples compatible with this Python. - """ - versions = [VER_SUFFIX] - major = VER_SUFFIX[0] - for minor in range(sys.version_info[1] - 1, - 1, -1): - versions.append(''.join([major, str(minor)])) - - abis = [] - for suffix, _, _ in imp.get_suffixes(): - if suffix.startswith('.abi'): - abis.append(suffix.split('.', 2)[1]) - abis.sort() - if ABI != 'none': - abis.insert(0, ABI) - abis.append('none') - result = [] - - arches = [ARCH] - if sys.platform == 'darwin': - m = re.match(r'(\w+)_(\d+)_(\d+)_(\w+)$', ARCH) - if m: - name, major, minor, arch = m.groups() - minor = int(minor) - matches = [arch] - if arch in ('i386', 'ppc'): - matches.append('fat') - if arch in ('i386', 'ppc', 'x86_64'): - matches.append('fat3') - if arch in ('ppc64', 'x86_64'): - matches.append('fat64') - if arch in ('i386', 'x86_64'): - matches.append('intel') - if arch in ('i386', 'x86_64', 'intel', 'ppc', 'ppc64'): - matches.append('universal') - while minor >= 0: - for match in matches: - s = '%s_%s_%s_%s' % (name, major, minor, match) - if s != ARCH: # already there - arches.append(s) - minor -= 1 - - # Most specific - our Python version, ABI and arch - for abi in abis: - for arch in arches: - result.append((''.join((IMP_PREFIX, versions[0])), abi, arch)) - # manylinux - if abi != 'none' and sys.platform.startswith('linux'): - arch = arch.replace('linux_', '') - parts = _get_glibc_version() - if len(parts) == 2: - if parts >= (2, 5): - result.append((''.join((IMP_PREFIX, versions[0])), abi, - 'manylinux1_%s' % arch)) - if parts >= (2, 12): - result.append((''.join((IMP_PREFIX, versions[0])), abi, - 'manylinux2010_%s' % arch)) - if parts >= (2, 17): - result.append((''.join((IMP_PREFIX, versions[0])), abi, - 'manylinux2014_%s' % arch)) - result.append((''.join((IMP_PREFIX, versions[0])), abi, - 'manylinux_%s_%s_%s' % (parts[0], parts[1], - arch))) - - # where no ABI / arch dependency, but IMP_PREFIX dependency - for i, version in enumerate(versions): - result.append((''.join((IMP_PREFIX, version)), 'none', 'any')) - if i == 0: - result.append((''.join((IMP_PREFIX, version[0])), 'none', 'any')) - - # no IMP_PREFIX, ABI or arch dependency - for i, version in enumerate(versions): - result.append((''.join(('py', version)), 'none', 'any')) - if i == 0: - result.append((''.join(('py', version[0])), 'none', 'any')) - - return set(result) - - -COMPATIBLE_TAGS = compatible_tags() - -del compatible_tags - - -def is_compatible(wheel, tags=None): - if not isinstance(wheel, Wheel): - wheel = Wheel(wheel) # assume it's a filename - result = False - if tags is None: - tags = COMPATIBLE_TAGS - for ver, abi, arch in tags: - if ver in wheel.pyver and abi in wheel.abi and arch in wheel.arch: - result = True - break - return result diff --git a/venv/Lib/site-packages/pip/_vendor/distro.py b/venv/Lib/site-packages/pip/_vendor/distro.py deleted file mode 100644 index 7892741..0000000 --- a/venv/Lib/site-packages/pip/_vendor/distro.py +++ /dev/null @@ -1,1386 +0,0 @@ -# Copyright 2015,2016,2017 Nir Cohen -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -""" -The ``distro`` package (``distro`` stands for Linux Distribution) provides -information about the Linux distribution it runs on, such as a reliable -machine-readable distro ID, or version information. - -It is the recommended replacement for Python's original -:py:func:`platform.linux_distribution` function, but it provides much more -functionality. An alternative implementation became necessary because Python -3.5 deprecated this function, and Python 3.8 removed it altogether. Its -predecessor function :py:func:`platform.dist` was already deprecated since -Python 2.6 and removed in Python 3.8. Still, there are many cases in which -access to OS distribution information is needed. See `Python issue 1322 -`_ for more information. -""" - -import argparse -import json -import logging -import os -import re -import shlex -import subprocess -import sys -import warnings - -__version__ = "1.6.0" - -# Use `if False` to avoid an ImportError on Python 2. After dropping Python 2 -# support, can use typing.TYPE_CHECKING instead. See: -# https://docs.python.org/3/library/typing.html#typing.TYPE_CHECKING -if False: # pragma: nocover - from typing import ( - Any, - Callable, - Dict, - Iterable, - Optional, - Sequence, - TextIO, - Tuple, - Type, - TypedDict, - Union, - ) - - VersionDict = TypedDict( - "VersionDict", {"major": str, "minor": str, "build_number": str} - ) - InfoDict = TypedDict( - "InfoDict", - { - "id": str, - "version": str, - "version_parts": VersionDict, - "like": str, - "codename": str, - }, - ) - - -_UNIXCONFDIR = os.environ.get("UNIXCONFDIR", "/etc") -_UNIXUSRLIBDIR = os.environ.get("UNIXUSRLIBDIR", "/usr/lib") -_OS_RELEASE_BASENAME = "os-release" - -#: Translation table for normalizing the "ID" attribute defined in os-release -#: files, for use by the :func:`distro.id` method. -#: -#: * Key: Value as defined in the os-release file, translated to lower case, -#: with blanks translated to underscores. -#: -#: * Value: Normalized value. -NORMALIZED_OS_ID = { - "ol": "oracle", # Oracle Linux -} - -#: Translation table for normalizing the "Distributor ID" attribute returned by -#: the lsb_release command, for use by the :func:`distro.id` method. -#: -#: * Key: Value as returned by the lsb_release command, translated to lower -#: case, with blanks translated to underscores. -#: -#: * Value: Normalized value. -NORMALIZED_LSB_ID = { - "enterpriseenterpriseas": "oracle", # Oracle Enterprise Linux 4 - "enterpriseenterpriseserver": "oracle", # Oracle Linux 5 - "redhatenterpriseworkstation": "rhel", # RHEL 6, 7 Workstation - "redhatenterpriseserver": "rhel", # RHEL 6, 7 Server - "redhatenterprisecomputenode": "rhel", # RHEL 6 ComputeNode -} - -#: Translation table for normalizing the distro ID derived from the file name -#: of distro release files, for use by the :func:`distro.id` method. -#: -#: * Key: Value as derived from the file name of a distro release file, -#: translated to lower case, with blanks translated to underscores. -#: -#: * Value: Normalized value. -NORMALIZED_DISTRO_ID = { - "redhat": "rhel", # RHEL 6.x, 7.x -} - -# Pattern for content of distro release file (reversed) -_DISTRO_RELEASE_CONTENT_REVERSED_PATTERN = re.compile( - r"(?:[^)]*\)(.*)\()? *(?:STL )?([\d.+\-a-z]*\d) *(?:esaeler *)?(.+)" -) - -# Pattern for base file name of distro release file -_DISTRO_RELEASE_BASENAME_PATTERN = re.compile(r"(\w+)[-_](release|version)$") - -# Base file names to be ignored when searching for distro release file -_DISTRO_RELEASE_IGNORE_BASENAMES = ( - "debian_version", - "lsb-release", - "oem-release", - _OS_RELEASE_BASENAME, - "system-release", - "plesk-release", - "iredmail-release", -) - - -def linux_distribution(full_distribution_name=True): - # type: (bool) -> Tuple[str, str, str] - """ - .. deprecated:: 1.6.0 - - :func:`distro.linux_distribution()` is deprecated. It should only be - used as a compatibility shim with Python's - :py:func:`platform.linux_distribution()`. Please use :func:`distro.id`, - :func:`distro.version` and :func:`distro.name` instead. - - Return information about the current OS distribution as a tuple - ``(id_name, version, codename)`` with items as follows: - - * ``id_name``: If *full_distribution_name* is false, the result of - :func:`distro.id`. Otherwise, the result of :func:`distro.name`. - - * ``version``: The result of :func:`distro.version`. - - * ``codename``: The result of :func:`distro.codename`. - - The interface of this function is compatible with the original - :py:func:`platform.linux_distribution` function, supporting a subset of - its parameters. - - The data it returns may not exactly be the same, because it uses more data - sources than the original function, and that may lead to different data if - the OS distribution is not consistent across multiple data sources it - provides (there are indeed such distributions ...). - - Another reason for differences is the fact that the :func:`distro.id` - method normalizes the distro ID string to a reliable machine-readable value - for a number of popular OS distributions. - """ - warnings.warn( - "distro.linux_distribution() is deprecated. It should only be used as a " - "compatibility shim with Python's platform.linux_distribution(). Please use " - "distro.id(), distro.version() and distro.name() instead.", - DeprecationWarning, - stacklevel=2, - ) - return _distro.linux_distribution(full_distribution_name) - - -def id(): - # type: () -> str - """ - Return the distro ID of the current distribution, as a - machine-readable string. - - For a number of OS distributions, the returned distro ID value is - *reliable*, in the sense that it is documented and that it does not change - across releases of the distribution. - - This package maintains the following reliable distro ID values: - - ============== ========================================= - Distro ID Distribution - ============== ========================================= - "ubuntu" Ubuntu - "debian" Debian - "rhel" RedHat Enterprise Linux - "centos" CentOS - "fedora" Fedora - "sles" SUSE Linux Enterprise Server - "opensuse" openSUSE - "amazon" Amazon Linux - "arch" Arch Linux - "cloudlinux" CloudLinux OS - "exherbo" Exherbo Linux - "gentoo" GenToo Linux - "ibm_powerkvm" IBM PowerKVM - "kvmibm" KVM for IBM z Systems - "linuxmint" Linux Mint - "mageia" Mageia - "mandriva" Mandriva Linux - "parallels" Parallels - "pidora" Pidora - "raspbian" Raspbian - "oracle" Oracle Linux (and Oracle Enterprise Linux) - "scientific" Scientific Linux - "slackware" Slackware - "xenserver" XenServer - "openbsd" OpenBSD - "netbsd" NetBSD - "freebsd" FreeBSD - "midnightbsd" MidnightBSD - ============== ========================================= - - If you have a need to get distros for reliable IDs added into this set, - or if you find that the :func:`distro.id` function returns a different - distro ID for one of the listed distros, please create an issue in the - `distro issue tracker`_. - - **Lookup hierarchy and transformations:** - - First, the ID is obtained from the following sources, in the specified - order. The first available and non-empty value is used: - - * the value of the "ID" attribute of the os-release file, - - * the value of the "Distributor ID" attribute returned by the lsb_release - command, - - * the first part of the file name of the distro release file, - - The so determined ID value then passes the following transformations, - before it is returned by this method: - - * it is translated to lower case, - - * blanks (which should not be there anyway) are translated to underscores, - - * a normalization of the ID is performed, based upon - `normalization tables`_. The purpose of this normalization is to ensure - that the ID is as reliable as possible, even across incompatible changes - in the OS distributions. A common reason for an incompatible change is - the addition of an os-release file, or the addition of the lsb_release - command, with ID values that differ from what was previously determined - from the distro release file name. - """ - return _distro.id() - - -def name(pretty=False): - # type: (bool) -> str - """ - Return the name of the current OS distribution, as a human-readable - string. - - If *pretty* is false, the name is returned without version or codename. - (e.g. "CentOS Linux") - - If *pretty* is true, the version and codename are appended. - (e.g. "CentOS Linux 7.1.1503 (Core)") - - **Lookup hierarchy:** - - The name is obtained from the following sources, in the specified order. - The first available and non-empty value is used: - - * If *pretty* is false: - - - the value of the "NAME" attribute of the os-release file, - - - the value of the "Distributor ID" attribute returned by the lsb_release - command, - - - the value of the "" field of the distro release file. - - * If *pretty* is true: - - - the value of the "PRETTY_NAME" attribute of the os-release file, - - - the value of the "Description" attribute returned by the lsb_release - command, - - - the value of the "" field of the distro release file, appended - with the value of the pretty version ("" and "" - fields) of the distro release file, if available. - """ - return _distro.name(pretty) - - -def version(pretty=False, best=False): - # type: (bool, bool) -> str - """ - Return the version of the current OS distribution, as a human-readable - string. - - If *pretty* is false, the version is returned without codename (e.g. - "7.0"). - - If *pretty* is true, the codename in parenthesis is appended, if the - codename is non-empty (e.g. "7.0 (Maipo)"). - - Some distributions provide version numbers with different precisions in - the different sources of distribution information. Examining the different - sources in a fixed priority order does not always yield the most precise - version (e.g. for Debian 8.2, or CentOS 7.1). - - The *best* parameter can be used to control the approach for the returned - version: - - If *best* is false, the first non-empty version number in priority order of - the examined sources is returned. - - If *best* is true, the most precise version number out of all examined - sources is returned. - - **Lookup hierarchy:** - - In all cases, the version number is obtained from the following sources. - If *best* is false, this order represents the priority order: - - * the value of the "VERSION_ID" attribute of the os-release file, - * the value of the "Release" attribute returned by the lsb_release - command, - * the version number parsed from the "" field of the first line - of the distro release file, - * the version number parsed from the "PRETTY_NAME" attribute of the - os-release file, if it follows the format of the distro release files. - * the version number parsed from the "Description" attribute returned by - the lsb_release command, if it follows the format of the distro release - files. - """ - return _distro.version(pretty, best) - - -def version_parts(best=False): - # type: (bool) -> Tuple[str, str, str] - """ - Return the version of the current OS distribution as a tuple - ``(major, minor, build_number)`` with items as follows: - - * ``major``: The result of :func:`distro.major_version`. - - * ``minor``: The result of :func:`distro.minor_version`. - - * ``build_number``: The result of :func:`distro.build_number`. - - For a description of the *best* parameter, see the :func:`distro.version` - method. - """ - return _distro.version_parts(best) - - -def major_version(best=False): - # type: (bool) -> str - """ - Return the major version of the current OS distribution, as a string, - if provided. - Otherwise, the empty string is returned. The major version is the first - part of the dot-separated version string. - - For a description of the *best* parameter, see the :func:`distro.version` - method. - """ - return _distro.major_version(best) - - -def minor_version(best=False): - # type: (bool) -> str - """ - Return the minor version of the current OS distribution, as a string, - if provided. - Otherwise, the empty string is returned. The minor version is the second - part of the dot-separated version string. - - For a description of the *best* parameter, see the :func:`distro.version` - method. - """ - return _distro.minor_version(best) - - -def build_number(best=False): - # type: (bool) -> str - """ - Return the build number of the current OS distribution, as a string, - if provided. - Otherwise, the empty string is returned. The build number is the third part - of the dot-separated version string. - - For a description of the *best* parameter, see the :func:`distro.version` - method. - """ - return _distro.build_number(best) - - -def like(): - # type: () -> str - """ - Return a space-separated list of distro IDs of distributions that are - closely related to the current OS distribution in regards to packaging - and programming interfaces, for example distributions the current - distribution is a derivative from. - - **Lookup hierarchy:** - - This information item is only provided by the os-release file. - For details, see the description of the "ID_LIKE" attribute in the - `os-release man page - `_. - """ - return _distro.like() - - -def codename(): - # type: () -> str - """ - Return the codename for the release of the current OS distribution, - as a string. - - If the distribution does not have a codename, an empty string is returned. - - Note that the returned codename is not always really a codename. For - example, openSUSE returns "x86_64". This function does not handle such - cases in any special way and just returns the string it finds, if any. - - **Lookup hierarchy:** - - * the codename within the "VERSION" attribute of the os-release file, if - provided, - - * the value of the "Codename" attribute returned by the lsb_release - command, - - * the value of the "" field of the distro release file. - """ - return _distro.codename() - - -def info(pretty=False, best=False): - # type: (bool, bool) -> InfoDict - """ - Return certain machine-readable information items about the current OS - distribution in a dictionary, as shown in the following example: - - .. sourcecode:: python - - { - 'id': 'rhel', - 'version': '7.0', - 'version_parts': { - 'major': '7', - 'minor': '0', - 'build_number': '' - }, - 'like': 'fedora', - 'codename': 'Maipo' - } - - The dictionary structure and keys are always the same, regardless of which - information items are available in the underlying data sources. The values - for the various keys are as follows: - - * ``id``: The result of :func:`distro.id`. - - * ``version``: The result of :func:`distro.version`. - - * ``version_parts -> major``: The result of :func:`distro.major_version`. - - * ``version_parts -> minor``: The result of :func:`distro.minor_version`. - - * ``version_parts -> build_number``: The result of - :func:`distro.build_number`. - - * ``like``: The result of :func:`distro.like`. - - * ``codename``: The result of :func:`distro.codename`. - - For a description of the *pretty* and *best* parameters, see the - :func:`distro.version` method. - """ - return _distro.info(pretty, best) - - -def os_release_info(): - # type: () -> Dict[str, str] - """ - Return a dictionary containing key-value pairs for the information items - from the os-release file data source of the current OS distribution. - - See `os-release file`_ for details about these information items. - """ - return _distro.os_release_info() - - -def lsb_release_info(): - # type: () -> Dict[str, str] - """ - Return a dictionary containing key-value pairs for the information items - from the lsb_release command data source of the current OS distribution. - - See `lsb_release command output`_ for details about these information - items. - """ - return _distro.lsb_release_info() - - -def distro_release_info(): - # type: () -> Dict[str, str] - """ - Return a dictionary containing key-value pairs for the information items - from the distro release file data source of the current OS distribution. - - See `distro release file`_ for details about these information items. - """ - return _distro.distro_release_info() - - -def uname_info(): - # type: () -> Dict[str, str] - """ - Return a dictionary containing key-value pairs for the information items - from the distro release file data source of the current OS distribution. - """ - return _distro.uname_info() - - -def os_release_attr(attribute): - # type: (str) -> str - """ - Return a single named information item from the os-release file data source - of the current OS distribution. - - Parameters: - - * ``attribute`` (string): Key of the information item. - - Returns: - - * (string): Value of the information item, if the item exists. - The empty string, if the item does not exist. - - See `os-release file`_ for details about these information items. - """ - return _distro.os_release_attr(attribute) - - -def lsb_release_attr(attribute): - # type: (str) -> str - """ - Return a single named information item from the lsb_release command output - data source of the current OS distribution. - - Parameters: - - * ``attribute`` (string): Key of the information item. - - Returns: - - * (string): Value of the information item, if the item exists. - The empty string, if the item does not exist. - - See `lsb_release command output`_ for details about these information - items. - """ - return _distro.lsb_release_attr(attribute) - - -def distro_release_attr(attribute): - # type: (str) -> str - """ - Return a single named information item from the distro release file - data source of the current OS distribution. - - Parameters: - - * ``attribute`` (string): Key of the information item. - - Returns: - - * (string): Value of the information item, if the item exists. - The empty string, if the item does not exist. - - See `distro release file`_ for details about these information items. - """ - return _distro.distro_release_attr(attribute) - - -def uname_attr(attribute): - # type: (str) -> str - """ - Return a single named information item from the distro release file - data source of the current OS distribution. - - Parameters: - - * ``attribute`` (string): Key of the information item. - - Returns: - - * (string): Value of the information item, if the item exists. - The empty string, if the item does not exist. - """ - return _distro.uname_attr(attribute) - - -try: - from functools import cached_property -except ImportError: - # Python < 3.8 - class cached_property(object): # type: ignore - """A version of @property which caches the value. On access, it calls the - underlying function and sets the value in `__dict__` so future accesses - will not re-call the property. - """ - - def __init__(self, f): - # type: (Callable[[Any], Any]) -> None - self._fname = f.__name__ - self._f = f - - def __get__(self, obj, owner): - # type: (Any, Type[Any]) -> Any - assert obj is not None, "call {} on an instance".format(self._fname) - ret = obj.__dict__[self._fname] = self._f(obj) - return ret - - -class LinuxDistribution(object): - """ - Provides information about a OS distribution. - - This package creates a private module-global instance of this class with - default initialization arguments, that is used by the - `consolidated accessor functions`_ and `single source accessor functions`_. - By using default initialization arguments, that module-global instance - returns data about the current OS distribution (i.e. the distro this - package runs on). - - Normally, it is not necessary to create additional instances of this class. - However, in situations where control is needed over the exact data sources - that are used, instances of this class can be created with a specific - distro release file, or a specific os-release file, or without invoking the - lsb_release command. - """ - - def __init__( - self, - include_lsb=True, - os_release_file="", - distro_release_file="", - include_uname=True, - root_dir=None, - ): - # type: (bool, str, str, bool, Optional[str]) -> None - """ - The initialization method of this class gathers information from the - available data sources, and stores that in private instance attributes. - Subsequent access to the information items uses these private instance - attributes, so that the data sources are read only once. - - Parameters: - - * ``include_lsb`` (bool): Controls whether the - `lsb_release command output`_ is included as a data source. - - If the lsb_release command is not available in the program execution - path, the data source for the lsb_release command will be empty. - - * ``os_release_file`` (string): The path name of the - `os-release file`_ that is to be used as a data source. - - An empty string (the default) will cause the default path name to - be used (see `os-release file`_ for details). - - If the specified or defaulted os-release file does not exist, the - data source for the os-release file will be empty. - - * ``distro_release_file`` (string): The path name of the - `distro release file`_ that is to be used as a data source. - - An empty string (the default) will cause a default search algorithm - to be used (see `distro release file`_ for details). - - If the specified distro release file does not exist, or if no default - distro release file can be found, the data source for the distro - release file will be empty. - - * ``include_uname`` (bool): Controls whether uname command output is - included as a data source. If the uname command is not available in - the program execution path the data source for the uname command will - be empty. - - * ``root_dir`` (string): The absolute path to the root directory to use - to find distro-related information files. - - Public instance attributes: - - * ``os_release_file`` (string): The path name of the - `os-release file`_ that is actually used as a data source. The - empty string if no distro release file is used as a data source. - - * ``distro_release_file`` (string): The path name of the - `distro release file`_ that is actually used as a data source. The - empty string if no distro release file is used as a data source. - - * ``include_lsb`` (bool): The result of the ``include_lsb`` parameter. - This controls whether the lsb information will be loaded. - - * ``include_uname`` (bool): The result of the ``include_uname`` - parameter. This controls whether the uname information will - be loaded. - - Raises: - - * :py:exc:`IOError`: Some I/O issue with an os-release file or distro - release file. - - * :py:exc:`subprocess.CalledProcessError`: The lsb_release command had - some issue (other than not being available in the program execution - path). - - * :py:exc:`UnicodeError`: A data source has unexpected characters or - uses an unexpected encoding. - """ - self.root_dir = root_dir - self.etc_dir = os.path.join(root_dir, "etc") if root_dir else _UNIXCONFDIR - self.usr_lib_dir = ( - os.path.join(root_dir, "usr/lib") if root_dir else _UNIXUSRLIBDIR - ) - - if os_release_file: - self.os_release_file = os_release_file - else: - etc_dir_os_release_file = os.path.join(self.etc_dir, _OS_RELEASE_BASENAME) - usr_lib_os_release_file = os.path.join( - self.usr_lib_dir, _OS_RELEASE_BASENAME - ) - - # NOTE: The idea is to respect order **and** have it set - # at all times for API backwards compatibility. - if os.path.isfile(etc_dir_os_release_file) or not os.path.isfile( - usr_lib_os_release_file - ): - self.os_release_file = etc_dir_os_release_file - else: - self.os_release_file = usr_lib_os_release_file - - self.distro_release_file = distro_release_file or "" # updated later - self.include_lsb = include_lsb - self.include_uname = include_uname - - def __repr__(self): - # type: () -> str - """Return repr of all info""" - return ( - "LinuxDistribution(" - "os_release_file={self.os_release_file!r}, " - "distro_release_file={self.distro_release_file!r}, " - "include_lsb={self.include_lsb!r}, " - "include_uname={self.include_uname!r}, " - "_os_release_info={self._os_release_info!r}, " - "_lsb_release_info={self._lsb_release_info!r}, " - "_distro_release_info={self._distro_release_info!r}, " - "_uname_info={self._uname_info!r})".format(self=self) - ) - - def linux_distribution(self, full_distribution_name=True): - # type: (bool) -> Tuple[str, str, str] - """ - Return information about the OS distribution that is compatible - with Python's :func:`platform.linux_distribution`, supporting a subset - of its parameters. - - For details, see :func:`distro.linux_distribution`. - """ - return ( - self.name() if full_distribution_name else self.id(), - self.version(), - self.codename(), - ) - - def id(self): - # type: () -> str - """Return the distro ID of the OS distribution, as a string. - - For details, see :func:`distro.id`. - """ - - def normalize(distro_id, table): - # type: (str, Dict[str, str]) -> str - distro_id = distro_id.lower().replace(" ", "_") - return table.get(distro_id, distro_id) - - distro_id = self.os_release_attr("id") - if distro_id: - return normalize(distro_id, NORMALIZED_OS_ID) - - distro_id = self.lsb_release_attr("distributor_id") - if distro_id: - return normalize(distro_id, NORMALIZED_LSB_ID) - - distro_id = self.distro_release_attr("id") - if distro_id: - return normalize(distro_id, NORMALIZED_DISTRO_ID) - - distro_id = self.uname_attr("id") - if distro_id: - return normalize(distro_id, NORMALIZED_DISTRO_ID) - - return "" - - def name(self, pretty=False): - # type: (bool) -> str - """ - Return the name of the OS distribution, as a string. - - For details, see :func:`distro.name`. - """ - name = ( - self.os_release_attr("name") - or self.lsb_release_attr("distributor_id") - or self.distro_release_attr("name") - or self.uname_attr("name") - ) - if pretty: - name = self.os_release_attr("pretty_name") or self.lsb_release_attr( - "description" - ) - if not name: - name = self.distro_release_attr("name") or self.uname_attr("name") - version = self.version(pretty=True) - if version: - name = name + " " + version - return name or "" - - def version(self, pretty=False, best=False): - # type: (bool, bool) -> str - """ - Return the version of the OS distribution, as a string. - - For details, see :func:`distro.version`. - """ - versions = [ - self.os_release_attr("version_id"), - self.lsb_release_attr("release"), - self.distro_release_attr("version_id"), - self._parse_distro_release_content(self.os_release_attr("pretty_name")).get( - "version_id", "" - ), - self._parse_distro_release_content( - self.lsb_release_attr("description") - ).get("version_id", ""), - self.uname_attr("release"), - ] - version = "" - if best: - # This algorithm uses the last version in priority order that has - # the best precision. If the versions are not in conflict, that - # does not matter; otherwise, using the last one instead of the - # first one might be considered a surprise. - for v in versions: - if v.count(".") > version.count(".") or version == "": - version = v - else: - for v in versions: - if v != "": - version = v - break - if pretty and version and self.codename(): - version = "{0} ({1})".format(version, self.codename()) - return version - - def version_parts(self, best=False): - # type: (bool) -> Tuple[str, str, str] - """ - Return the version of the OS distribution, as a tuple of version - numbers. - - For details, see :func:`distro.version_parts`. - """ - version_str = self.version(best=best) - if version_str: - version_regex = re.compile(r"(\d+)\.?(\d+)?\.?(\d+)?") - matches = version_regex.match(version_str) - if matches: - major, minor, build_number = matches.groups() - return major, minor or "", build_number or "" - return "", "", "" - - def major_version(self, best=False): - # type: (bool) -> str - """ - Return the major version number of the current distribution. - - For details, see :func:`distro.major_version`. - """ - return self.version_parts(best)[0] - - def minor_version(self, best=False): - # type: (bool) -> str - """ - Return the minor version number of the current distribution. - - For details, see :func:`distro.minor_version`. - """ - return self.version_parts(best)[1] - - def build_number(self, best=False): - # type: (bool) -> str - """ - Return the build number of the current distribution. - - For details, see :func:`distro.build_number`. - """ - return self.version_parts(best)[2] - - def like(self): - # type: () -> str - """ - Return the IDs of distributions that are like the OS distribution. - - For details, see :func:`distro.like`. - """ - return self.os_release_attr("id_like") or "" - - def codename(self): - # type: () -> str - """ - Return the codename of the OS distribution. - - For details, see :func:`distro.codename`. - """ - try: - # Handle os_release specially since distros might purposefully set - # this to empty string to have no codename - return self._os_release_info["codename"] - except KeyError: - return ( - self.lsb_release_attr("codename") - or self.distro_release_attr("codename") - or "" - ) - - def info(self, pretty=False, best=False): - # type: (bool, bool) -> InfoDict - """ - Return certain machine-readable information about the OS - distribution. - - For details, see :func:`distro.info`. - """ - return dict( - id=self.id(), - version=self.version(pretty, best), - version_parts=dict( - major=self.major_version(best), - minor=self.minor_version(best), - build_number=self.build_number(best), - ), - like=self.like(), - codename=self.codename(), - ) - - def os_release_info(self): - # type: () -> Dict[str, str] - """ - Return a dictionary containing key-value pairs for the information - items from the os-release file data source of the OS distribution. - - For details, see :func:`distro.os_release_info`. - """ - return self._os_release_info - - def lsb_release_info(self): - # type: () -> Dict[str, str] - """ - Return a dictionary containing key-value pairs for the information - items from the lsb_release command data source of the OS - distribution. - - For details, see :func:`distro.lsb_release_info`. - """ - return self._lsb_release_info - - def distro_release_info(self): - # type: () -> Dict[str, str] - """ - Return a dictionary containing key-value pairs for the information - items from the distro release file data source of the OS - distribution. - - For details, see :func:`distro.distro_release_info`. - """ - return self._distro_release_info - - def uname_info(self): - # type: () -> Dict[str, str] - """ - Return a dictionary containing key-value pairs for the information - items from the uname command data source of the OS distribution. - - For details, see :func:`distro.uname_info`. - """ - return self._uname_info - - def os_release_attr(self, attribute): - # type: (str) -> str - """ - Return a single named information item from the os-release file data - source of the OS distribution. - - For details, see :func:`distro.os_release_attr`. - """ - return self._os_release_info.get(attribute, "") - - def lsb_release_attr(self, attribute): - # type: (str) -> str - """ - Return a single named information item from the lsb_release command - output data source of the OS distribution. - - For details, see :func:`distro.lsb_release_attr`. - """ - return self._lsb_release_info.get(attribute, "") - - def distro_release_attr(self, attribute): - # type: (str) -> str - """ - Return a single named information item from the distro release file - data source of the OS distribution. - - For details, see :func:`distro.distro_release_attr`. - """ - return self._distro_release_info.get(attribute, "") - - def uname_attr(self, attribute): - # type: (str) -> str - """ - Return a single named information item from the uname command - output data source of the OS distribution. - - For details, see :func:`distro.uname_attr`. - """ - return self._uname_info.get(attribute, "") - - @cached_property - def _os_release_info(self): - # type: () -> Dict[str, str] - """ - Get the information items from the specified os-release file. - - Returns: - A dictionary containing all information items. - """ - if os.path.isfile(self.os_release_file): - with open(self.os_release_file) as release_file: - return self._parse_os_release_content(release_file) - return {} - - @staticmethod - def _parse_os_release_content(lines): - # type: (TextIO) -> Dict[str, str] - """ - Parse the lines of an os-release file. - - Parameters: - - * lines: Iterable through the lines in the os-release file. - Each line must be a unicode string or a UTF-8 encoded byte - string. - - Returns: - A dictionary containing all information items. - """ - props = {} - lexer = shlex.shlex(lines, posix=True) - lexer.whitespace_split = True - - # The shlex module defines its `wordchars` variable using literals, - # making it dependent on the encoding of the Python source file. - # In Python 2.6 and 2.7, the shlex source file is encoded in - # 'iso-8859-1', and the `wordchars` variable is defined as a byte - # string. This causes a UnicodeDecodeError to be raised when the - # parsed content is a unicode object. The following fix resolves that - # (... but it should be fixed in shlex...): - if sys.version_info[0] == 2 and isinstance(lexer.wordchars, bytes): - lexer.wordchars = lexer.wordchars.decode("iso-8859-1") - - tokens = list(lexer) - for token in tokens: - # At this point, all shell-like parsing has been done (i.e. - # comments processed, quotes and backslash escape sequences - # processed, multi-line values assembled, trailing newlines - # stripped, etc.), so the tokens are now either: - # * variable assignments: var=value - # * commands or their arguments (not allowed in os-release) - if "=" in token: - k, v = token.split("=", 1) - props[k.lower()] = v - else: - # Ignore any tokens that are not variable assignments - pass - - if "version_codename" in props: - # os-release added a version_codename field. Use that in - # preference to anything else Note that some distros purposefully - # do not have code names. They should be setting - # version_codename="" - props["codename"] = props["version_codename"] - elif "ubuntu_codename" in props: - # Same as above but a non-standard field name used on older Ubuntus - props["codename"] = props["ubuntu_codename"] - elif "version" in props: - # If there is no version_codename, parse it from the version - match = re.search(r"(\(\D+\))|,(\s+)?\D+", props["version"]) - if match: - codename = match.group() - codename = codename.strip("()") - codename = codename.strip(",") - codename = codename.strip() - # codename appears within paranthese. - props["codename"] = codename - - return props - - @cached_property - def _lsb_release_info(self): - # type: () -> Dict[str, str] - """ - Get the information items from the lsb_release command output. - - Returns: - A dictionary containing all information items. - """ - if not self.include_lsb: - return {} - with open(os.devnull, "wb") as devnull: - try: - cmd = ("lsb_release", "-a") - stdout = subprocess.check_output(cmd, stderr=devnull) - # Command not found or lsb_release returned error - except (OSError, subprocess.CalledProcessError): - return {} - content = self._to_str(stdout).splitlines() - return self._parse_lsb_release_content(content) - - @staticmethod - def _parse_lsb_release_content(lines): - # type: (Iterable[str]) -> Dict[str, str] - """ - Parse the output of the lsb_release command. - - Parameters: - - * lines: Iterable through the lines of the lsb_release output. - Each line must be a unicode string or a UTF-8 encoded byte - string. - - Returns: - A dictionary containing all information items. - """ - props = {} - for line in lines: - kv = line.strip("\n").split(":", 1) - if len(kv) != 2: - # Ignore lines without colon. - continue - k, v = kv - props.update({k.replace(" ", "_").lower(): v.strip()}) - return props - - @cached_property - def _uname_info(self): - # type: () -> Dict[str, str] - with open(os.devnull, "wb") as devnull: - try: - cmd = ("uname", "-rs") - stdout = subprocess.check_output(cmd, stderr=devnull) - except OSError: - return {} - content = self._to_str(stdout).splitlines() - return self._parse_uname_content(content) - - @staticmethod - def _parse_uname_content(lines): - # type: (Sequence[str]) -> Dict[str, str] - props = {} - match = re.search(r"^([^\s]+)\s+([\d\.]+)", lines[0].strip()) - if match: - name, version = match.groups() - - # This is to prevent the Linux kernel version from - # appearing as the 'best' version on otherwise - # identifiable distributions. - if name == "Linux": - return {} - props["id"] = name.lower() - props["name"] = name - props["release"] = version - return props - - @staticmethod - def _to_str(text): - # type: (Union[bytes, str]) -> str - encoding = sys.getfilesystemencoding() - encoding = "utf-8" if encoding == "ascii" else encoding - - if sys.version_info[0] >= 3: - if isinstance(text, bytes): - return text.decode(encoding) - else: - if isinstance(text, unicode): # noqa - return text.encode(encoding) - - return text - - @cached_property - def _distro_release_info(self): - # type: () -> Dict[str, str] - """ - Get the information items from the specified distro release file. - - Returns: - A dictionary containing all information items. - """ - if self.distro_release_file: - # If it was specified, we use it and parse what we can, even if - # its file name or content does not match the expected pattern. - distro_info = self._parse_distro_release_file(self.distro_release_file) - basename = os.path.basename(self.distro_release_file) - # The file name pattern for user-specified distro release files - # is somewhat more tolerant (compared to when searching for the - # file), because we want to use what was specified as best as - # possible. - match = _DISTRO_RELEASE_BASENAME_PATTERN.match(basename) - if "name" in distro_info and "cloudlinux" in distro_info["name"].lower(): - distro_info["id"] = "cloudlinux" - elif match: - distro_info["id"] = match.group(1) - return distro_info - else: - try: - basenames = os.listdir(self.etc_dir) - # We sort for repeatability in cases where there are multiple - # distro specific files; e.g. CentOS, Oracle, Enterprise all - # containing `redhat-release` on top of their own. - basenames.sort() - except OSError: - # This may occur when /etc is not readable but we can't be - # sure about the *-release files. Check common entries of - # /etc for information. If they turn out to not be there the - # error is handled in `_parse_distro_release_file()`. - basenames = [ - "SuSE-release", - "arch-release", - "base-release", - "centos-release", - "fedora-release", - "gentoo-release", - "mageia-release", - "mandrake-release", - "mandriva-release", - "mandrivalinux-release", - "manjaro-release", - "oracle-release", - "redhat-release", - "sl-release", - "slackware-version", - ] - for basename in basenames: - if basename in _DISTRO_RELEASE_IGNORE_BASENAMES: - continue - match = _DISTRO_RELEASE_BASENAME_PATTERN.match(basename) - if match: - filepath = os.path.join(self.etc_dir, basename) - distro_info = self._parse_distro_release_file(filepath) - if "name" in distro_info: - # The name is always present if the pattern matches - self.distro_release_file = filepath - distro_info["id"] = match.group(1) - if "cloudlinux" in distro_info["name"].lower(): - distro_info["id"] = "cloudlinux" - return distro_info - return {} - - def _parse_distro_release_file(self, filepath): - # type: (str) -> Dict[str, str] - """ - Parse a distro release file. - - Parameters: - - * filepath: Path name of the distro release file. - - Returns: - A dictionary containing all information items. - """ - try: - with open(filepath) as fp: - # Only parse the first line. For instance, on SLES there - # are multiple lines. We don't want them... - return self._parse_distro_release_content(fp.readline()) - except (OSError, IOError): - # Ignore not being able to read a specific, seemingly version - # related file. - # See https://github.com/python-distro/distro/issues/162 - return {} - - @staticmethod - def _parse_distro_release_content(line): - # type: (str) -> Dict[str, str] - """ - Parse a line from a distro release file. - - Parameters: - * line: Line from the distro release file. Must be a unicode string - or a UTF-8 encoded byte string. - - Returns: - A dictionary containing all information items. - """ - matches = _DISTRO_RELEASE_CONTENT_REVERSED_PATTERN.match(line.strip()[::-1]) - distro_info = {} - if matches: - # regexp ensures non-None - distro_info["name"] = matches.group(3)[::-1] - if matches.group(2): - distro_info["version_id"] = matches.group(2)[::-1] - if matches.group(1): - distro_info["codename"] = matches.group(1)[::-1] - elif line: - distro_info["name"] = line.strip() - return distro_info - - -_distro = LinuxDistribution() - - -def main(): - # type: () -> None - logger = logging.getLogger(__name__) - logger.setLevel(logging.DEBUG) - logger.addHandler(logging.StreamHandler(sys.stdout)) - - parser = argparse.ArgumentParser(description="OS distro info tool") - parser.add_argument( - "--json", "-j", help="Output in machine readable format", action="store_true" - ) - - parser.add_argument( - "--root-dir", - "-r", - type=str, - dest="root_dir", - help="Path to the root filesystem directory (defaults to /)", - ) - - args = parser.parse_args() - - if args.root_dir: - dist = LinuxDistribution( - include_lsb=False, include_uname=False, root_dir=args.root_dir - ) - else: - dist = _distro - - if args.json: - logger.info(json.dumps(dist.info(), indent=4, sort_keys=True)) - else: - logger.info("Name: %s", dist.name(pretty=True)) - distribution_version = dist.version(pretty=True) - logger.info("Version: %s", distribution_version) - distribution_codename = dist.codename() - logger.info("Codename: %s", distribution_codename) - - -if __name__ == "__main__": - main() diff --git a/venv/Lib/site-packages/pip/_vendor/html5lib/__init__.py b/venv/Lib/site-packages/pip/_vendor/html5lib/__init__.py deleted file mode 100644 index d1d82f1..0000000 --- a/venv/Lib/site-packages/pip/_vendor/html5lib/__init__.py +++ /dev/null @@ -1,35 +0,0 @@ -""" -HTML parsing library based on the `WHATWG HTML specification -`_. The parser is designed to be compatible with -existing HTML found in the wild and implements well-defined error recovery that -is largely compatible with modern desktop web browsers. - -Example usage:: - - from pip._vendor import html5lib - with open("my_document.html", "rb") as f: - tree = html5lib.parse(f) - -For convenience, this module re-exports the following names: - -* :func:`~.html5parser.parse` -* :func:`~.html5parser.parseFragment` -* :class:`~.html5parser.HTMLParser` -* :func:`~.treebuilders.getTreeBuilder` -* :func:`~.treewalkers.getTreeWalker` -* :func:`~.serializer.serialize` -""" - -from __future__ import absolute_import, division, unicode_literals - -from .html5parser import HTMLParser, parse, parseFragment -from .treebuilders import getTreeBuilder -from .treewalkers import getTreeWalker -from .serializer import serialize - -__all__ = ["HTMLParser", "parse", "parseFragment", "getTreeBuilder", - "getTreeWalker", "serialize"] - -# this has to be at the top level, see how setup.py parses this -#: Distribution version number. -__version__ = "1.1" diff --git a/venv/Lib/site-packages/pip/_vendor/html5lib/__pycache__/__init__.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/html5lib/__pycache__/__init__.cpython-39.pyc deleted file mode 100644 index 6cc0967..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/html5lib/__pycache__/__init__.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/html5lib/__pycache__/_ihatexml.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/html5lib/__pycache__/_ihatexml.cpython-39.pyc deleted file mode 100644 index a361116..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/html5lib/__pycache__/_ihatexml.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/html5lib/__pycache__/_inputstream.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/html5lib/__pycache__/_inputstream.cpython-39.pyc deleted file mode 100644 index 026e387..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/html5lib/__pycache__/_inputstream.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/html5lib/__pycache__/_tokenizer.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/html5lib/__pycache__/_tokenizer.cpython-39.pyc deleted file mode 100644 index f7a8167..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/html5lib/__pycache__/_tokenizer.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/html5lib/__pycache__/_utils.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/html5lib/__pycache__/_utils.cpython-39.pyc deleted file mode 100644 index 0709d4a..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/html5lib/__pycache__/_utils.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/html5lib/__pycache__/constants.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/html5lib/__pycache__/constants.cpython-39.pyc deleted file mode 100644 index 976b98e..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/html5lib/__pycache__/constants.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/html5lib/__pycache__/html5parser.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/html5lib/__pycache__/html5parser.cpython-39.pyc deleted file mode 100644 index b2d0bc1..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/html5lib/__pycache__/html5parser.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/html5lib/__pycache__/serializer.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/html5lib/__pycache__/serializer.cpython-39.pyc deleted file mode 100644 index 083876b..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/html5lib/__pycache__/serializer.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/html5lib/_ihatexml.py b/venv/Lib/site-packages/pip/_vendor/html5lib/_ihatexml.py deleted file mode 100644 index 3ff803c..0000000 --- a/venv/Lib/site-packages/pip/_vendor/html5lib/_ihatexml.py +++ /dev/null @@ -1,289 +0,0 @@ -from __future__ import absolute_import, division, unicode_literals - -import re -import warnings - -from .constants import DataLossWarning - -baseChar = """ -[#x0041-#x005A] | [#x0061-#x007A] | [#x00C0-#x00D6] | [#x00D8-#x00F6] | -[#x00F8-#x00FF] | [#x0100-#x0131] | [#x0134-#x013E] | [#x0141-#x0148] | -[#x014A-#x017E] | [#x0180-#x01C3] | [#x01CD-#x01F0] | [#x01F4-#x01F5] | -[#x01FA-#x0217] | [#x0250-#x02A8] | [#x02BB-#x02C1] | #x0386 | -[#x0388-#x038A] | #x038C | [#x038E-#x03A1] | [#x03A3-#x03CE] | -[#x03D0-#x03D6] | #x03DA | #x03DC | #x03DE | #x03E0 | [#x03E2-#x03F3] | -[#x0401-#x040C] | [#x040E-#x044F] | [#x0451-#x045C] | [#x045E-#x0481] | -[#x0490-#x04C4] | [#x04C7-#x04C8] | [#x04CB-#x04CC] | [#x04D0-#x04EB] | -[#x04EE-#x04F5] | [#x04F8-#x04F9] | [#x0531-#x0556] | #x0559 | -[#x0561-#x0586] | [#x05D0-#x05EA] | [#x05F0-#x05F2] | [#x0621-#x063A] | -[#x0641-#x064A] | [#x0671-#x06B7] | [#x06BA-#x06BE] | [#x06C0-#x06CE] | -[#x06D0-#x06D3] | #x06D5 | [#x06E5-#x06E6] | [#x0905-#x0939] | #x093D | -[#x0958-#x0961] | [#x0985-#x098C] | [#x098F-#x0990] | [#x0993-#x09A8] | -[#x09AA-#x09B0] | #x09B2 | [#x09B6-#x09B9] | [#x09DC-#x09DD] | -[#x09DF-#x09E1] | [#x09F0-#x09F1] | [#x0A05-#x0A0A] | [#x0A0F-#x0A10] | -[#x0A13-#x0A28] | [#x0A2A-#x0A30] | [#x0A32-#x0A33] | [#x0A35-#x0A36] | -[#x0A38-#x0A39] | [#x0A59-#x0A5C] | #x0A5E | [#x0A72-#x0A74] | -[#x0A85-#x0A8B] | #x0A8D | [#x0A8F-#x0A91] | [#x0A93-#x0AA8] | -[#x0AAA-#x0AB0] | [#x0AB2-#x0AB3] | [#x0AB5-#x0AB9] | #x0ABD | #x0AE0 | -[#x0B05-#x0B0C] | [#x0B0F-#x0B10] | [#x0B13-#x0B28] | [#x0B2A-#x0B30] | -[#x0B32-#x0B33] | [#x0B36-#x0B39] | #x0B3D | [#x0B5C-#x0B5D] | -[#x0B5F-#x0B61] | [#x0B85-#x0B8A] | [#x0B8E-#x0B90] | [#x0B92-#x0B95] | -[#x0B99-#x0B9A] | #x0B9C | [#x0B9E-#x0B9F] | [#x0BA3-#x0BA4] | -[#x0BA8-#x0BAA] | [#x0BAE-#x0BB5] | [#x0BB7-#x0BB9] | [#x0C05-#x0C0C] | -[#x0C0E-#x0C10] | [#x0C12-#x0C28] | [#x0C2A-#x0C33] | [#x0C35-#x0C39] | -[#x0C60-#x0C61] | [#x0C85-#x0C8C] | [#x0C8E-#x0C90] | [#x0C92-#x0CA8] | -[#x0CAA-#x0CB3] | [#x0CB5-#x0CB9] | #x0CDE | [#x0CE0-#x0CE1] | -[#x0D05-#x0D0C] | [#x0D0E-#x0D10] | [#x0D12-#x0D28] | [#x0D2A-#x0D39] | -[#x0D60-#x0D61] | [#x0E01-#x0E2E] | #x0E30 | [#x0E32-#x0E33] | -[#x0E40-#x0E45] | [#x0E81-#x0E82] | #x0E84 | [#x0E87-#x0E88] | #x0E8A | -#x0E8D | [#x0E94-#x0E97] | [#x0E99-#x0E9F] | [#x0EA1-#x0EA3] | #x0EA5 | -#x0EA7 | [#x0EAA-#x0EAB] | [#x0EAD-#x0EAE] | #x0EB0 | [#x0EB2-#x0EB3] | -#x0EBD | [#x0EC0-#x0EC4] | [#x0F40-#x0F47] | [#x0F49-#x0F69] | -[#x10A0-#x10C5] | [#x10D0-#x10F6] | #x1100 | [#x1102-#x1103] | -[#x1105-#x1107] | #x1109 | [#x110B-#x110C] | [#x110E-#x1112] | #x113C | -#x113E | #x1140 | #x114C | #x114E | #x1150 | [#x1154-#x1155] | #x1159 | -[#x115F-#x1161] | #x1163 | #x1165 | #x1167 | #x1169 | [#x116D-#x116E] | -[#x1172-#x1173] | #x1175 | #x119E | #x11A8 | #x11AB | [#x11AE-#x11AF] | -[#x11B7-#x11B8] | #x11BA | [#x11BC-#x11C2] | #x11EB | #x11F0 | #x11F9 | -[#x1E00-#x1E9B] | [#x1EA0-#x1EF9] | [#x1F00-#x1F15] | [#x1F18-#x1F1D] | -[#x1F20-#x1F45] | [#x1F48-#x1F4D] | [#x1F50-#x1F57] | #x1F59 | #x1F5B | -#x1F5D | [#x1F5F-#x1F7D] | [#x1F80-#x1FB4] | [#x1FB6-#x1FBC] | #x1FBE | -[#x1FC2-#x1FC4] | [#x1FC6-#x1FCC] | [#x1FD0-#x1FD3] | [#x1FD6-#x1FDB] | -[#x1FE0-#x1FEC] | [#x1FF2-#x1FF4] | [#x1FF6-#x1FFC] | #x2126 | -[#x212A-#x212B] | #x212E | [#x2180-#x2182] | [#x3041-#x3094] | -[#x30A1-#x30FA] | [#x3105-#x312C] | [#xAC00-#xD7A3]""" - -ideographic = """[#x4E00-#x9FA5] | #x3007 | [#x3021-#x3029]""" - -combiningCharacter = """ -[#x0300-#x0345] | [#x0360-#x0361] | [#x0483-#x0486] | [#x0591-#x05A1] | -[#x05A3-#x05B9] | [#x05BB-#x05BD] | #x05BF | [#x05C1-#x05C2] | #x05C4 | -[#x064B-#x0652] | #x0670 | [#x06D6-#x06DC] | [#x06DD-#x06DF] | -[#x06E0-#x06E4] | [#x06E7-#x06E8] | [#x06EA-#x06ED] | [#x0901-#x0903] | -#x093C | [#x093E-#x094C] | #x094D | [#x0951-#x0954] | [#x0962-#x0963] | -[#x0981-#x0983] | #x09BC | #x09BE | #x09BF | [#x09C0-#x09C4] | -[#x09C7-#x09C8] | [#x09CB-#x09CD] | #x09D7 | [#x09E2-#x09E3] | #x0A02 | -#x0A3C | #x0A3E | #x0A3F | [#x0A40-#x0A42] | [#x0A47-#x0A48] | -[#x0A4B-#x0A4D] | [#x0A70-#x0A71] | [#x0A81-#x0A83] | #x0ABC | -[#x0ABE-#x0AC5] | [#x0AC7-#x0AC9] | [#x0ACB-#x0ACD] | [#x0B01-#x0B03] | -#x0B3C | [#x0B3E-#x0B43] | [#x0B47-#x0B48] | [#x0B4B-#x0B4D] | -[#x0B56-#x0B57] | [#x0B82-#x0B83] | [#x0BBE-#x0BC2] | [#x0BC6-#x0BC8] | -[#x0BCA-#x0BCD] | #x0BD7 | [#x0C01-#x0C03] | [#x0C3E-#x0C44] | -[#x0C46-#x0C48] | [#x0C4A-#x0C4D] | [#x0C55-#x0C56] | [#x0C82-#x0C83] | -[#x0CBE-#x0CC4] | [#x0CC6-#x0CC8] | [#x0CCA-#x0CCD] | [#x0CD5-#x0CD6] | -[#x0D02-#x0D03] | [#x0D3E-#x0D43] | [#x0D46-#x0D48] | [#x0D4A-#x0D4D] | -#x0D57 | #x0E31 | [#x0E34-#x0E3A] | [#x0E47-#x0E4E] | #x0EB1 | -[#x0EB4-#x0EB9] | [#x0EBB-#x0EBC] | [#x0EC8-#x0ECD] | [#x0F18-#x0F19] | -#x0F35 | #x0F37 | #x0F39 | #x0F3E | #x0F3F | [#x0F71-#x0F84] | -[#x0F86-#x0F8B] | [#x0F90-#x0F95] | #x0F97 | [#x0F99-#x0FAD] | -[#x0FB1-#x0FB7] | #x0FB9 | [#x20D0-#x20DC] | #x20E1 | [#x302A-#x302F] | -#x3099 | #x309A""" - -digit = """ -[#x0030-#x0039] | [#x0660-#x0669] | [#x06F0-#x06F9] | [#x0966-#x096F] | -[#x09E6-#x09EF] | [#x0A66-#x0A6F] | [#x0AE6-#x0AEF] | [#x0B66-#x0B6F] | -[#x0BE7-#x0BEF] | [#x0C66-#x0C6F] | [#x0CE6-#x0CEF] | [#x0D66-#x0D6F] | -[#x0E50-#x0E59] | [#x0ED0-#x0ED9] | [#x0F20-#x0F29]""" - -extender = """ -#x00B7 | #x02D0 | #x02D1 | #x0387 | #x0640 | #x0E46 | #x0EC6 | #x3005 | -#[#x3031-#x3035] | [#x309D-#x309E] | [#x30FC-#x30FE]""" - -letter = " | ".join([baseChar, ideographic]) - -# Without the -name = " | ".join([letter, digit, ".", "-", "_", combiningCharacter, - extender]) -nameFirst = " | ".join([letter, "_"]) - -reChar = re.compile(r"#x([\d|A-F]{4,4})") -reCharRange = re.compile(r"\[#x([\d|A-F]{4,4})-#x([\d|A-F]{4,4})\]") - - -def charStringToList(chars): - charRanges = [item.strip() for item in chars.split(" | ")] - rv = [] - for item in charRanges: - foundMatch = False - for regexp in (reChar, reCharRange): - match = regexp.match(item) - if match is not None: - rv.append([hexToInt(item) for item in match.groups()]) - if len(rv[-1]) == 1: - rv[-1] = rv[-1] * 2 - foundMatch = True - break - if not foundMatch: - assert len(item) == 1 - - rv.append([ord(item)] * 2) - rv = normaliseCharList(rv) - return rv - - -def normaliseCharList(charList): - charList = sorted(charList) - for item in charList: - assert item[1] >= item[0] - rv = [] - i = 0 - while i < len(charList): - j = 1 - rv.append(charList[i]) - while i + j < len(charList) and charList[i + j][0] <= rv[-1][1] + 1: - rv[-1][1] = charList[i + j][1] - j += 1 - i += j - return rv - - -# We don't really support characters above the BMP :( -max_unicode = int("FFFF", 16) - - -def missingRanges(charList): - rv = [] - if charList[0] != 0: - rv.append([0, charList[0][0] - 1]) - for i, item in enumerate(charList[:-1]): - rv.append([item[1] + 1, charList[i + 1][0] - 1]) - if charList[-1][1] != max_unicode: - rv.append([charList[-1][1] + 1, max_unicode]) - return rv - - -def listToRegexpStr(charList): - rv = [] - for item in charList: - if item[0] == item[1]: - rv.append(escapeRegexp(chr(item[0]))) - else: - rv.append(escapeRegexp(chr(item[0])) + "-" + - escapeRegexp(chr(item[1]))) - return "[%s]" % "".join(rv) - - -def hexToInt(hex_str): - return int(hex_str, 16) - - -def escapeRegexp(string): - specialCharacters = (".", "^", "$", "*", "+", "?", "{", "}", - "[", "]", "|", "(", ")", "-") - for char in specialCharacters: - string = string.replace(char, "\\" + char) - - return string - -# output from the above -nonXmlNameBMPRegexp = re.compile('[\x00-,/:-@\\[-\\^`\\{-\xb6\xb8-\xbf\xd7\xf7\u0132-\u0133\u013f-\u0140\u0149\u017f\u01c4-\u01cc\u01f1-\u01f3\u01f6-\u01f9\u0218-\u024f\u02a9-\u02ba\u02c2-\u02cf\u02d2-\u02ff\u0346-\u035f\u0362-\u0385\u038b\u038d\u03a2\u03cf\u03d7-\u03d9\u03db\u03dd\u03df\u03e1\u03f4-\u0400\u040d\u0450\u045d\u0482\u0487-\u048f\u04c5-\u04c6\u04c9-\u04ca\u04cd-\u04cf\u04ec-\u04ed\u04f6-\u04f7\u04fa-\u0530\u0557-\u0558\u055a-\u0560\u0587-\u0590\u05a2\u05ba\u05be\u05c0\u05c3\u05c5-\u05cf\u05eb-\u05ef\u05f3-\u0620\u063b-\u063f\u0653-\u065f\u066a-\u066f\u06b8-\u06b9\u06bf\u06cf\u06d4\u06e9\u06ee-\u06ef\u06fa-\u0900\u0904\u093a-\u093b\u094e-\u0950\u0955-\u0957\u0964-\u0965\u0970-\u0980\u0984\u098d-\u098e\u0991-\u0992\u09a9\u09b1\u09b3-\u09b5\u09ba-\u09bb\u09bd\u09c5-\u09c6\u09c9-\u09ca\u09ce-\u09d6\u09d8-\u09db\u09de\u09e4-\u09e5\u09f2-\u0a01\u0a03-\u0a04\u0a0b-\u0a0e\u0a11-\u0a12\u0a29\u0a31\u0a34\u0a37\u0a3a-\u0a3b\u0a3d\u0a43-\u0a46\u0a49-\u0a4a\u0a4e-\u0a58\u0a5d\u0a5f-\u0a65\u0a75-\u0a80\u0a84\u0a8c\u0a8e\u0a92\u0aa9\u0ab1\u0ab4\u0aba-\u0abb\u0ac6\u0aca\u0ace-\u0adf\u0ae1-\u0ae5\u0af0-\u0b00\u0b04\u0b0d-\u0b0e\u0b11-\u0b12\u0b29\u0b31\u0b34-\u0b35\u0b3a-\u0b3b\u0b44-\u0b46\u0b49-\u0b4a\u0b4e-\u0b55\u0b58-\u0b5b\u0b5e\u0b62-\u0b65\u0b70-\u0b81\u0b84\u0b8b-\u0b8d\u0b91\u0b96-\u0b98\u0b9b\u0b9d\u0ba0-\u0ba2\u0ba5-\u0ba7\u0bab-\u0bad\u0bb6\u0bba-\u0bbd\u0bc3-\u0bc5\u0bc9\u0bce-\u0bd6\u0bd8-\u0be6\u0bf0-\u0c00\u0c04\u0c0d\u0c11\u0c29\u0c34\u0c3a-\u0c3d\u0c45\u0c49\u0c4e-\u0c54\u0c57-\u0c5f\u0c62-\u0c65\u0c70-\u0c81\u0c84\u0c8d\u0c91\u0ca9\u0cb4\u0cba-\u0cbd\u0cc5\u0cc9\u0cce-\u0cd4\u0cd7-\u0cdd\u0cdf\u0ce2-\u0ce5\u0cf0-\u0d01\u0d04\u0d0d\u0d11\u0d29\u0d3a-\u0d3d\u0d44-\u0d45\u0d49\u0d4e-\u0d56\u0d58-\u0d5f\u0d62-\u0d65\u0d70-\u0e00\u0e2f\u0e3b-\u0e3f\u0e4f\u0e5a-\u0e80\u0e83\u0e85-\u0e86\u0e89\u0e8b-\u0e8c\u0e8e-\u0e93\u0e98\u0ea0\u0ea4\u0ea6\u0ea8-\u0ea9\u0eac\u0eaf\u0eba\u0ebe-\u0ebf\u0ec5\u0ec7\u0ece-\u0ecf\u0eda-\u0f17\u0f1a-\u0f1f\u0f2a-\u0f34\u0f36\u0f38\u0f3a-\u0f3d\u0f48\u0f6a-\u0f70\u0f85\u0f8c-\u0f8f\u0f96\u0f98\u0fae-\u0fb0\u0fb8\u0fba-\u109f\u10c6-\u10cf\u10f7-\u10ff\u1101\u1104\u1108\u110a\u110d\u1113-\u113b\u113d\u113f\u1141-\u114b\u114d\u114f\u1151-\u1153\u1156-\u1158\u115a-\u115e\u1162\u1164\u1166\u1168\u116a-\u116c\u116f-\u1171\u1174\u1176-\u119d\u119f-\u11a7\u11a9-\u11aa\u11ac-\u11ad\u11b0-\u11b6\u11b9\u11bb\u11c3-\u11ea\u11ec-\u11ef\u11f1-\u11f8\u11fa-\u1dff\u1e9c-\u1e9f\u1efa-\u1eff\u1f16-\u1f17\u1f1e-\u1f1f\u1f46-\u1f47\u1f4e-\u1f4f\u1f58\u1f5a\u1f5c\u1f5e\u1f7e-\u1f7f\u1fb5\u1fbd\u1fbf-\u1fc1\u1fc5\u1fcd-\u1fcf\u1fd4-\u1fd5\u1fdc-\u1fdf\u1fed-\u1ff1\u1ff5\u1ffd-\u20cf\u20dd-\u20e0\u20e2-\u2125\u2127-\u2129\u212c-\u212d\u212f-\u217f\u2183-\u3004\u3006\u3008-\u3020\u3030\u3036-\u3040\u3095-\u3098\u309b-\u309c\u309f-\u30a0\u30fb\u30ff-\u3104\u312d-\u4dff\u9fa6-\uabff\ud7a4-\uffff]') # noqa - -nonXmlNameFirstBMPRegexp = re.compile('[\x00-@\\[-\\^`\\{-\xbf\xd7\xf7\u0132-\u0133\u013f-\u0140\u0149\u017f\u01c4-\u01cc\u01f1-\u01f3\u01f6-\u01f9\u0218-\u024f\u02a9-\u02ba\u02c2-\u0385\u0387\u038b\u038d\u03a2\u03cf\u03d7-\u03d9\u03db\u03dd\u03df\u03e1\u03f4-\u0400\u040d\u0450\u045d\u0482-\u048f\u04c5-\u04c6\u04c9-\u04ca\u04cd-\u04cf\u04ec-\u04ed\u04f6-\u04f7\u04fa-\u0530\u0557-\u0558\u055a-\u0560\u0587-\u05cf\u05eb-\u05ef\u05f3-\u0620\u063b-\u0640\u064b-\u0670\u06b8-\u06b9\u06bf\u06cf\u06d4\u06d6-\u06e4\u06e7-\u0904\u093a-\u093c\u093e-\u0957\u0962-\u0984\u098d-\u098e\u0991-\u0992\u09a9\u09b1\u09b3-\u09b5\u09ba-\u09db\u09de\u09e2-\u09ef\u09f2-\u0a04\u0a0b-\u0a0e\u0a11-\u0a12\u0a29\u0a31\u0a34\u0a37\u0a3a-\u0a58\u0a5d\u0a5f-\u0a71\u0a75-\u0a84\u0a8c\u0a8e\u0a92\u0aa9\u0ab1\u0ab4\u0aba-\u0abc\u0abe-\u0adf\u0ae1-\u0b04\u0b0d-\u0b0e\u0b11-\u0b12\u0b29\u0b31\u0b34-\u0b35\u0b3a-\u0b3c\u0b3e-\u0b5b\u0b5e\u0b62-\u0b84\u0b8b-\u0b8d\u0b91\u0b96-\u0b98\u0b9b\u0b9d\u0ba0-\u0ba2\u0ba5-\u0ba7\u0bab-\u0bad\u0bb6\u0bba-\u0c04\u0c0d\u0c11\u0c29\u0c34\u0c3a-\u0c5f\u0c62-\u0c84\u0c8d\u0c91\u0ca9\u0cb4\u0cba-\u0cdd\u0cdf\u0ce2-\u0d04\u0d0d\u0d11\u0d29\u0d3a-\u0d5f\u0d62-\u0e00\u0e2f\u0e31\u0e34-\u0e3f\u0e46-\u0e80\u0e83\u0e85-\u0e86\u0e89\u0e8b-\u0e8c\u0e8e-\u0e93\u0e98\u0ea0\u0ea4\u0ea6\u0ea8-\u0ea9\u0eac\u0eaf\u0eb1\u0eb4-\u0ebc\u0ebe-\u0ebf\u0ec5-\u0f3f\u0f48\u0f6a-\u109f\u10c6-\u10cf\u10f7-\u10ff\u1101\u1104\u1108\u110a\u110d\u1113-\u113b\u113d\u113f\u1141-\u114b\u114d\u114f\u1151-\u1153\u1156-\u1158\u115a-\u115e\u1162\u1164\u1166\u1168\u116a-\u116c\u116f-\u1171\u1174\u1176-\u119d\u119f-\u11a7\u11a9-\u11aa\u11ac-\u11ad\u11b0-\u11b6\u11b9\u11bb\u11c3-\u11ea\u11ec-\u11ef\u11f1-\u11f8\u11fa-\u1dff\u1e9c-\u1e9f\u1efa-\u1eff\u1f16-\u1f17\u1f1e-\u1f1f\u1f46-\u1f47\u1f4e-\u1f4f\u1f58\u1f5a\u1f5c\u1f5e\u1f7e-\u1f7f\u1fb5\u1fbd\u1fbf-\u1fc1\u1fc5\u1fcd-\u1fcf\u1fd4-\u1fd5\u1fdc-\u1fdf\u1fed-\u1ff1\u1ff5\u1ffd-\u2125\u2127-\u2129\u212c-\u212d\u212f-\u217f\u2183-\u3006\u3008-\u3020\u302a-\u3040\u3095-\u30a0\u30fb-\u3104\u312d-\u4dff\u9fa6-\uabff\ud7a4-\uffff]') # noqa - -# Simpler things -nonPubidCharRegexp = re.compile("[^\x20\x0D\x0Aa-zA-Z0-9\\-'()+,./:=?;!*#@$_%]") - - -class InfosetFilter(object): - replacementRegexp = re.compile(r"U[\dA-F]{5,5}") - - def __init__(self, - dropXmlnsLocalName=False, - dropXmlnsAttrNs=False, - preventDoubleDashComments=False, - preventDashAtCommentEnd=False, - replaceFormFeedCharacters=True, - preventSingleQuotePubid=False): - - self.dropXmlnsLocalName = dropXmlnsLocalName - self.dropXmlnsAttrNs = dropXmlnsAttrNs - - self.preventDoubleDashComments = preventDoubleDashComments - self.preventDashAtCommentEnd = preventDashAtCommentEnd - - self.replaceFormFeedCharacters = replaceFormFeedCharacters - - self.preventSingleQuotePubid = preventSingleQuotePubid - - self.replaceCache = {} - - def coerceAttribute(self, name, namespace=None): - if self.dropXmlnsLocalName and name.startswith("xmlns:"): - warnings.warn("Attributes cannot begin with xmlns", DataLossWarning) - return None - elif (self.dropXmlnsAttrNs and - namespace == "http://www.w3.org/2000/xmlns/"): - warnings.warn("Attributes cannot be in the xml namespace", DataLossWarning) - return None - else: - return self.toXmlName(name) - - def coerceElement(self, name): - return self.toXmlName(name) - - def coerceComment(self, data): - if self.preventDoubleDashComments: - while "--" in data: - warnings.warn("Comments cannot contain adjacent dashes", DataLossWarning) - data = data.replace("--", "- -") - if data.endswith("-"): - warnings.warn("Comments cannot end in a dash", DataLossWarning) - data += " " - return data - - def coerceCharacters(self, data): - if self.replaceFormFeedCharacters: - for _ in range(data.count("\x0C")): - warnings.warn("Text cannot contain U+000C", DataLossWarning) - data = data.replace("\x0C", " ") - # Other non-xml characters - return data - - def coercePubid(self, data): - dataOutput = data - for char in nonPubidCharRegexp.findall(data): - warnings.warn("Coercing non-XML pubid", DataLossWarning) - replacement = self.getReplacementCharacter(char) - dataOutput = dataOutput.replace(char, replacement) - if self.preventSingleQuotePubid and dataOutput.find("'") >= 0: - warnings.warn("Pubid cannot contain single quote", DataLossWarning) - dataOutput = dataOutput.replace("'", self.getReplacementCharacter("'")) - return dataOutput - - def toXmlName(self, name): - nameFirst = name[0] - nameRest = name[1:] - m = nonXmlNameFirstBMPRegexp.match(nameFirst) - if m: - warnings.warn("Coercing non-XML name: %s" % name, DataLossWarning) - nameFirstOutput = self.getReplacementCharacter(nameFirst) - else: - nameFirstOutput = nameFirst - - nameRestOutput = nameRest - replaceChars = set(nonXmlNameBMPRegexp.findall(nameRest)) - for char in replaceChars: - warnings.warn("Coercing non-XML name: %s" % name, DataLossWarning) - replacement = self.getReplacementCharacter(char) - nameRestOutput = nameRestOutput.replace(char, replacement) - return nameFirstOutput + nameRestOutput - - def getReplacementCharacter(self, char): - if char in self.replaceCache: - replacement = self.replaceCache[char] - else: - replacement = self.escapeChar(char) - return replacement - - def fromXmlName(self, name): - for item in set(self.replacementRegexp.findall(name)): - name = name.replace(item, self.unescapeChar(item)) - return name - - def escapeChar(self, char): - replacement = "U%05X" % ord(char) - self.replaceCache[char] = replacement - return replacement - - def unescapeChar(self, charcode): - return chr(int(charcode[1:], 16)) diff --git a/venv/Lib/site-packages/pip/_vendor/html5lib/_inputstream.py b/venv/Lib/site-packages/pip/_vendor/html5lib/_inputstream.py deleted file mode 100644 index e0bb376..0000000 --- a/venv/Lib/site-packages/pip/_vendor/html5lib/_inputstream.py +++ /dev/null @@ -1,918 +0,0 @@ -from __future__ import absolute_import, division, unicode_literals - -from pip._vendor.six import text_type -from pip._vendor.six.moves import http_client, urllib - -import codecs -import re -from io import BytesIO, StringIO - -from pip._vendor import webencodings - -from .constants import EOF, spaceCharacters, asciiLetters, asciiUppercase -from .constants import _ReparseException -from . import _utils - -# Non-unicode versions of constants for use in the pre-parser -spaceCharactersBytes = frozenset([item.encode("ascii") for item in spaceCharacters]) -asciiLettersBytes = frozenset([item.encode("ascii") for item in asciiLetters]) -asciiUppercaseBytes = frozenset([item.encode("ascii") for item in asciiUppercase]) -spacesAngleBrackets = spaceCharactersBytes | frozenset([b">", b"<"]) - - -invalid_unicode_no_surrogate = "[\u0001-\u0008\u000B\u000E-\u001F\u007F-\u009F\uFDD0-\uFDEF\uFFFE\uFFFF\U0001FFFE\U0001FFFF\U0002FFFE\U0002FFFF\U0003FFFE\U0003FFFF\U0004FFFE\U0004FFFF\U0005FFFE\U0005FFFF\U0006FFFE\U0006FFFF\U0007FFFE\U0007FFFF\U0008FFFE\U0008FFFF\U0009FFFE\U0009FFFF\U000AFFFE\U000AFFFF\U000BFFFE\U000BFFFF\U000CFFFE\U000CFFFF\U000DFFFE\U000DFFFF\U000EFFFE\U000EFFFF\U000FFFFE\U000FFFFF\U0010FFFE\U0010FFFF]" # noqa - -if _utils.supports_lone_surrogates: - # Use one extra step of indirection and create surrogates with - # eval. Not using this indirection would introduce an illegal - # unicode literal on platforms not supporting such lone - # surrogates. - assert invalid_unicode_no_surrogate[-1] == "]" and invalid_unicode_no_surrogate.count("]") == 1 - invalid_unicode_re = re.compile(invalid_unicode_no_surrogate[:-1] + - eval('"\\uD800-\\uDFFF"') + # pylint:disable=eval-used - "]") -else: - invalid_unicode_re = re.compile(invalid_unicode_no_surrogate) - -non_bmp_invalid_codepoints = {0x1FFFE, 0x1FFFF, 0x2FFFE, 0x2FFFF, 0x3FFFE, - 0x3FFFF, 0x4FFFE, 0x4FFFF, 0x5FFFE, 0x5FFFF, - 0x6FFFE, 0x6FFFF, 0x7FFFE, 0x7FFFF, 0x8FFFE, - 0x8FFFF, 0x9FFFE, 0x9FFFF, 0xAFFFE, 0xAFFFF, - 0xBFFFE, 0xBFFFF, 0xCFFFE, 0xCFFFF, 0xDFFFE, - 0xDFFFF, 0xEFFFE, 0xEFFFF, 0xFFFFE, 0xFFFFF, - 0x10FFFE, 0x10FFFF} - -ascii_punctuation_re = re.compile("[\u0009-\u000D\u0020-\u002F\u003A-\u0040\u005C\u005B-\u0060\u007B-\u007E]") - -# Cache for charsUntil() -charsUntilRegEx = {} - - -class BufferedStream(object): - """Buffering for streams that do not have buffering of their own - - The buffer is implemented as a list of chunks on the assumption that - joining many strings will be slow since it is O(n**2) - """ - - def __init__(self, stream): - self.stream = stream - self.buffer = [] - self.position = [-1, 0] # chunk number, offset - - def tell(self): - pos = 0 - for chunk in self.buffer[:self.position[0]]: - pos += len(chunk) - pos += self.position[1] - return pos - - def seek(self, pos): - assert pos <= self._bufferedBytes() - offset = pos - i = 0 - while len(self.buffer[i]) < offset: - offset -= len(self.buffer[i]) - i += 1 - self.position = [i, offset] - - def read(self, bytes): - if not self.buffer: - return self._readStream(bytes) - elif (self.position[0] == len(self.buffer) and - self.position[1] == len(self.buffer[-1])): - return self._readStream(bytes) - else: - return self._readFromBuffer(bytes) - - def _bufferedBytes(self): - return sum([len(item) for item in self.buffer]) - - def _readStream(self, bytes): - data = self.stream.read(bytes) - self.buffer.append(data) - self.position[0] += 1 - self.position[1] = len(data) - return data - - def _readFromBuffer(self, bytes): - remainingBytes = bytes - rv = [] - bufferIndex = self.position[0] - bufferOffset = self.position[1] - while bufferIndex < len(self.buffer) and remainingBytes != 0: - assert remainingBytes > 0 - bufferedData = self.buffer[bufferIndex] - - if remainingBytes <= len(bufferedData) - bufferOffset: - bytesToRead = remainingBytes - self.position = [bufferIndex, bufferOffset + bytesToRead] - else: - bytesToRead = len(bufferedData) - bufferOffset - self.position = [bufferIndex, len(bufferedData)] - bufferIndex += 1 - rv.append(bufferedData[bufferOffset:bufferOffset + bytesToRead]) - remainingBytes -= bytesToRead - - bufferOffset = 0 - - if remainingBytes: - rv.append(self._readStream(remainingBytes)) - - return b"".join(rv) - - -def HTMLInputStream(source, **kwargs): - # Work around Python bug #20007: read(0) closes the connection. - # http://bugs.python.org/issue20007 - if (isinstance(source, http_client.HTTPResponse) or - # Also check for addinfourl wrapping HTTPResponse - (isinstance(source, urllib.response.addbase) and - isinstance(source.fp, http_client.HTTPResponse))): - isUnicode = False - elif hasattr(source, "read"): - isUnicode = isinstance(source.read(0), text_type) - else: - isUnicode = isinstance(source, text_type) - - if isUnicode: - encodings = [x for x in kwargs if x.endswith("_encoding")] - if encodings: - raise TypeError("Cannot set an encoding with a unicode input, set %r" % encodings) - - return HTMLUnicodeInputStream(source, **kwargs) - else: - return HTMLBinaryInputStream(source, **kwargs) - - -class HTMLUnicodeInputStream(object): - """Provides a unicode stream of characters to the HTMLTokenizer. - - This class takes care of character encoding and removing or replacing - incorrect byte-sequences and also provides column and line tracking. - - """ - - _defaultChunkSize = 10240 - - def __init__(self, source): - """Initialises the HTMLInputStream. - - HTMLInputStream(source, [encoding]) -> Normalized stream from source - for use by html5lib. - - source can be either a file-object, local filename or a string. - - The optional encoding parameter must be a string that indicates - the encoding. If specified, that encoding will be used, - regardless of any BOM or later declaration (such as in a meta - element) - - """ - - if not _utils.supports_lone_surrogates: - # Such platforms will have already checked for such - # surrogate errors, so no need to do this checking. - self.reportCharacterErrors = None - elif len("\U0010FFFF") == 1: - self.reportCharacterErrors = self.characterErrorsUCS4 - else: - self.reportCharacterErrors = self.characterErrorsUCS2 - - # List of where new lines occur - self.newLines = [0] - - self.charEncoding = (lookupEncoding("utf-8"), "certain") - self.dataStream = self.openStream(source) - - self.reset() - - def reset(self): - self.chunk = "" - self.chunkSize = 0 - self.chunkOffset = 0 - self.errors = [] - - # number of (complete) lines in previous chunks - self.prevNumLines = 0 - # number of columns in the last line of the previous chunk - self.prevNumCols = 0 - - # Deal with CR LF and surrogates split over chunk boundaries - self._bufferedCharacter = None - - def openStream(self, source): - """Produces a file object from source. - - source can be either a file object, local filename or a string. - - """ - # Already a file object - if hasattr(source, 'read'): - stream = source - else: - stream = StringIO(source) - - return stream - - def _position(self, offset): - chunk = self.chunk - nLines = chunk.count('\n', 0, offset) - positionLine = self.prevNumLines + nLines - lastLinePos = chunk.rfind('\n', 0, offset) - if lastLinePos == -1: - positionColumn = self.prevNumCols + offset - else: - positionColumn = offset - (lastLinePos + 1) - return (positionLine, positionColumn) - - def position(self): - """Returns (line, col) of the current position in the stream.""" - line, col = self._position(self.chunkOffset) - return (line + 1, col) - - def char(self): - """ Read one character from the stream or queue if available. Return - EOF when EOF is reached. - """ - # Read a new chunk from the input stream if necessary - if self.chunkOffset >= self.chunkSize: - if not self.readChunk(): - return EOF - - chunkOffset = self.chunkOffset - char = self.chunk[chunkOffset] - self.chunkOffset = chunkOffset + 1 - - return char - - def readChunk(self, chunkSize=None): - if chunkSize is None: - chunkSize = self._defaultChunkSize - - self.prevNumLines, self.prevNumCols = self._position(self.chunkSize) - - self.chunk = "" - self.chunkSize = 0 - self.chunkOffset = 0 - - data = self.dataStream.read(chunkSize) - - # Deal with CR LF and surrogates broken across chunks - if self._bufferedCharacter: - data = self._bufferedCharacter + data - self._bufferedCharacter = None - elif not data: - # We have no more data, bye-bye stream - return False - - if len(data) > 1: - lastv = ord(data[-1]) - if lastv == 0x0D or 0xD800 <= lastv <= 0xDBFF: - self._bufferedCharacter = data[-1] - data = data[:-1] - - if self.reportCharacterErrors: - self.reportCharacterErrors(data) - - # Replace invalid characters - data = data.replace("\r\n", "\n") - data = data.replace("\r", "\n") - - self.chunk = data - self.chunkSize = len(data) - - return True - - def characterErrorsUCS4(self, data): - for _ in range(len(invalid_unicode_re.findall(data))): - self.errors.append("invalid-codepoint") - - def characterErrorsUCS2(self, data): - # Someone picked the wrong compile option - # You lose - skip = False - for match in invalid_unicode_re.finditer(data): - if skip: - continue - codepoint = ord(match.group()) - pos = match.start() - # Pretty sure there should be endianness issues here - if _utils.isSurrogatePair(data[pos:pos + 2]): - # We have a surrogate pair! - char_val = _utils.surrogatePairToCodepoint(data[pos:pos + 2]) - if char_val in non_bmp_invalid_codepoints: - self.errors.append("invalid-codepoint") - skip = True - elif (codepoint >= 0xD800 and codepoint <= 0xDFFF and - pos == len(data) - 1): - self.errors.append("invalid-codepoint") - else: - skip = False - self.errors.append("invalid-codepoint") - - def charsUntil(self, characters, opposite=False): - """ Returns a string of characters from the stream up to but not - including any character in 'characters' or EOF. 'characters' must be - a container that supports the 'in' method and iteration over its - characters. - """ - - # Use a cache of regexps to find the required characters - try: - chars = charsUntilRegEx[(characters, opposite)] - except KeyError: - if __debug__: - for c in characters: - assert(ord(c) < 128) - regex = "".join(["\\x%02x" % ord(c) for c in characters]) - if not opposite: - regex = "^%s" % regex - chars = charsUntilRegEx[(characters, opposite)] = re.compile("[%s]+" % regex) - - rv = [] - - while True: - # Find the longest matching prefix - m = chars.match(self.chunk, self.chunkOffset) - if m is None: - # If nothing matched, and it wasn't because we ran out of chunk, - # then stop - if self.chunkOffset != self.chunkSize: - break - else: - end = m.end() - # If not the whole chunk matched, return everything - # up to the part that didn't match - if end != self.chunkSize: - rv.append(self.chunk[self.chunkOffset:end]) - self.chunkOffset = end - break - # If the whole remainder of the chunk matched, - # use it all and read the next chunk - rv.append(self.chunk[self.chunkOffset:]) - if not self.readChunk(): - # Reached EOF - break - - r = "".join(rv) - return r - - def unget(self, char): - # Only one character is allowed to be ungotten at once - it must - # be consumed again before any further call to unget - if char is not EOF: - if self.chunkOffset == 0: - # unget is called quite rarely, so it's a good idea to do - # more work here if it saves a bit of work in the frequently - # called char and charsUntil. - # So, just prepend the ungotten character onto the current - # chunk: - self.chunk = char + self.chunk - self.chunkSize += 1 - else: - self.chunkOffset -= 1 - assert self.chunk[self.chunkOffset] == char - - -class HTMLBinaryInputStream(HTMLUnicodeInputStream): - """Provides a unicode stream of characters to the HTMLTokenizer. - - This class takes care of character encoding and removing or replacing - incorrect byte-sequences and also provides column and line tracking. - - """ - - def __init__(self, source, override_encoding=None, transport_encoding=None, - same_origin_parent_encoding=None, likely_encoding=None, - default_encoding="windows-1252", useChardet=True): - """Initialises the HTMLInputStream. - - HTMLInputStream(source, [encoding]) -> Normalized stream from source - for use by html5lib. - - source can be either a file-object, local filename or a string. - - The optional encoding parameter must be a string that indicates - the encoding. If specified, that encoding will be used, - regardless of any BOM or later declaration (such as in a meta - element) - - """ - # Raw Stream - for unicode objects this will encode to utf-8 and set - # self.charEncoding as appropriate - self.rawStream = self.openStream(source) - - HTMLUnicodeInputStream.__init__(self, self.rawStream) - - # Encoding Information - # Number of bytes to use when looking for a meta element with - # encoding information - self.numBytesMeta = 1024 - # Number of bytes to use when using detecting encoding using chardet - self.numBytesChardet = 100 - # Things from args - self.override_encoding = override_encoding - self.transport_encoding = transport_encoding - self.same_origin_parent_encoding = same_origin_parent_encoding - self.likely_encoding = likely_encoding - self.default_encoding = default_encoding - - # Determine encoding - self.charEncoding = self.determineEncoding(useChardet) - assert self.charEncoding[0] is not None - - # Call superclass - self.reset() - - def reset(self): - self.dataStream = self.charEncoding[0].codec_info.streamreader(self.rawStream, 'replace') - HTMLUnicodeInputStream.reset(self) - - def openStream(self, source): - """Produces a file object from source. - - source can be either a file object, local filename or a string. - - """ - # Already a file object - if hasattr(source, 'read'): - stream = source - else: - stream = BytesIO(source) - - try: - stream.seek(stream.tell()) - except Exception: - stream = BufferedStream(stream) - - return stream - - def determineEncoding(self, chardet=True): - # BOMs take precedence over everything - # This will also read past the BOM if present - charEncoding = self.detectBOM(), "certain" - if charEncoding[0] is not None: - return charEncoding - - # If we've been overridden, we've been overridden - charEncoding = lookupEncoding(self.override_encoding), "certain" - if charEncoding[0] is not None: - return charEncoding - - # Now check the transport layer - charEncoding = lookupEncoding(self.transport_encoding), "certain" - if charEncoding[0] is not None: - return charEncoding - - # Look for meta elements with encoding information - charEncoding = self.detectEncodingMeta(), "tentative" - if charEncoding[0] is not None: - return charEncoding - - # Parent document encoding - charEncoding = lookupEncoding(self.same_origin_parent_encoding), "tentative" - if charEncoding[0] is not None and not charEncoding[0].name.startswith("utf-16"): - return charEncoding - - # "likely" encoding - charEncoding = lookupEncoding(self.likely_encoding), "tentative" - if charEncoding[0] is not None: - return charEncoding - - # Guess with chardet, if available - if chardet: - try: - from pip._vendor.chardet.universaldetector import UniversalDetector - except ImportError: - pass - else: - buffers = [] - detector = UniversalDetector() - while not detector.done: - buffer = self.rawStream.read(self.numBytesChardet) - assert isinstance(buffer, bytes) - if not buffer: - break - buffers.append(buffer) - detector.feed(buffer) - detector.close() - encoding = lookupEncoding(detector.result['encoding']) - self.rawStream.seek(0) - if encoding is not None: - return encoding, "tentative" - - # Try the default encoding - charEncoding = lookupEncoding(self.default_encoding), "tentative" - if charEncoding[0] is not None: - return charEncoding - - # Fallback to html5lib's default if even that hasn't worked - return lookupEncoding("windows-1252"), "tentative" - - def changeEncoding(self, newEncoding): - assert self.charEncoding[1] != "certain" - newEncoding = lookupEncoding(newEncoding) - if newEncoding is None: - return - if newEncoding.name in ("utf-16be", "utf-16le"): - newEncoding = lookupEncoding("utf-8") - assert newEncoding is not None - elif newEncoding == self.charEncoding[0]: - self.charEncoding = (self.charEncoding[0], "certain") - else: - self.rawStream.seek(0) - self.charEncoding = (newEncoding, "certain") - self.reset() - raise _ReparseException("Encoding changed from %s to %s" % (self.charEncoding[0], newEncoding)) - - def detectBOM(self): - """Attempts to detect at BOM at the start of the stream. If - an encoding can be determined from the BOM return the name of the - encoding otherwise return None""" - bomDict = { - codecs.BOM_UTF8: 'utf-8', - codecs.BOM_UTF16_LE: 'utf-16le', codecs.BOM_UTF16_BE: 'utf-16be', - codecs.BOM_UTF32_LE: 'utf-32le', codecs.BOM_UTF32_BE: 'utf-32be' - } - - # Go to beginning of file and read in 4 bytes - string = self.rawStream.read(4) - assert isinstance(string, bytes) - - # Try detecting the BOM using bytes from the string - encoding = bomDict.get(string[:3]) # UTF-8 - seek = 3 - if not encoding: - # Need to detect UTF-32 before UTF-16 - encoding = bomDict.get(string) # UTF-32 - seek = 4 - if not encoding: - encoding = bomDict.get(string[:2]) # UTF-16 - seek = 2 - - # Set the read position past the BOM if one was found, otherwise - # set it to the start of the stream - if encoding: - self.rawStream.seek(seek) - return lookupEncoding(encoding) - else: - self.rawStream.seek(0) - return None - - def detectEncodingMeta(self): - """Report the encoding declared by the meta element - """ - buffer = self.rawStream.read(self.numBytesMeta) - assert isinstance(buffer, bytes) - parser = EncodingParser(buffer) - self.rawStream.seek(0) - encoding = parser.getEncoding() - - if encoding is not None and encoding.name in ("utf-16be", "utf-16le"): - encoding = lookupEncoding("utf-8") - - return encoding - - -class EncodingBytes(bytes): - """String-like object with an associated position and various extra methods - If the position is ever greater than the string length then an exception is - raised""" - def __new__(self, value): - assert isinstance(value, bytes) - return bytes.__new__(self, value.lower()) - - def __init__(self, value): - # pylint:disable=unused-argument - self._position = -1 - - def __iter__(self): - return self - - def __next__(self): - p = self._position = self._position + 1 - if p >= len(self): - raise StopIteration - elif p < 0: - raise TypeError - return self[p:p + 1] - - def next(self): - # Py2 compat - return self.__next__() - - def previous(self): - p = self._position - if p >= len(self): - raise StopIteration - elif p < 0: - raise TypeError - self._position = p = p - 1 - return self[p:p + 1] - - def setPosition(self, position): - if self._position >= len(self): - raise StopIteration - self._position = position - - def getPosition(self): - if self._position >= len(self): - raise StopIteration - if self._position >= 0: - return self._position - else: - return None - - position = property(getPosition, setPosition) - - def getCurrentByte(self): - return self[self.position:self.position + 1] - - currentByte = property(getCurrentByte) - - def skip(self, chars=spaceCharactersBytes): - """Skip past a list of characters""" - p = self.position # use property for the error-checking - while p < len(self): - c = self[p:p + 1] - if c not in chars: - self._position = p - return c - p += 1 - self._position = p - return None - - def skipUntil(self, chars): - p = self.position - while p < len(self): - c = self[p:p + 1] - if c in chars: - self._position = p - return c - p += 1 - self._position = p - return None - - def matchBytes(self, bytes): - """Look for a sequence of bytes at the start of a string. If the bytes - are found return True and advance the position to the byte after the - match. Otherwise return False and leave the position alone""" - rv = self.startswith(bytes, self.position) - if rv: - self.position += len(bytes) - return rv - - def jumpTo(self, bytes): - """Look for the next sequence of bytes matching a given sequence. If - a match is found advance the position to the last byte of the match""" - try: - self._position = self.index(bytes, self.position) + len(bytes) - 1 - except ValueError: - raise StopIteration - return True - - -class EncodingParser(object): - """Mini parser for detecting character encoding from meta elements""" - - def __init__(self, data): - """string - the data to work on for encoding detection""" - self.data = EncodingBytes(data) - self.encoding = None - - def getEncoding(self): - if b"") - - def handleMeta(self): - if self.data.currentByte not in spaceCharactersBytes: - # if we have ") - - def getAttribute(self): - """Return a name,value pair for the next attribute in the stream, - if one is found, or None""" - data = self.data - # Step 1 (skip chars) - c = data.skip(spaceCharactersBytes | frozenset([b"/"])) - assert c is None or len(c) == 1 - # Step 2 - if c in (b">", None): - return None - # Step 3 - attrName = [] - attrValue = [] - # Step 4 attribute name - while True: - if c == b"=" and attrName: - break - elif c in spaceCharactersBytes: - # Step 6! - c = data.skip() - break - elif c in (b"/", b">"): - return b"".join(attrName), b"" - elif c in asciiUppercaseBytes: - attrName.append(c.lower()) - elif c is None: - return None - else: - attrName.append(c) - # Step 5 - c = next(data) - # Step 7 - if c != b"=": - data.previous() - return b"".join(attrName), b"" - # Step 8 - next(data) - # Step 9 - c = data.skip() - # Step 10 - if c in (b"'", b'"'): - # 10.1 - quoteChar = c - while True: - # 10.2 - c = next(data) - # 10.3 - if c == quoteChar: - next(data) - return b"".join(attrName), b"".join(attrValue) - # 10.4 - elif c in asciiUppercaseBytes: - attrValue.append(c.lower()) - # 10.5 - else: - attrValue.append(c) - elif c == b">": - return b"".join(attrName), b"" - elif c in asciiUppercaseBytes: - attrValue.append(c.lower()) - elif c is None: - return None - else: - attrValue.append(c) - # Step 11 - while True: - c = next(data) - if c in spacesAngleBrackets: - return b"".join(attrName), b"".join(attrValue) - elif c in asciiUppercaseBytes: - attrValue.append(c.lower()) - elif c is None: - return None - else: - attrValue.append(c) - - -class ContentAttrParser(object): - def __init__(self, data): - assert isinstance(data, bytes) - self.data = data - - def parse(self): - try: - # Check if the attr name is charset - # otherwise return - self.data.jumpTo(b"charset") - self.data.position += 1 - self.data.skip() - if not self.data.currentByte == b"=": - # If there is no = sign keep looking for attrs - return None - self.data.position += 1 - self.data.skip() - # Look for an encoding between matching quote marks - if self.data.currentByte in (b'"', b"'"): - quoteMark = self.data.currentByte - self.data.position += 1 - oldPosition = self.data.position - if self.data.jumpTo(quoteMark): - return self.data[oldPosition:self.data.position] - else: - return None - else: - # Unquoted value - oldPosition = self.data.position - try: - self.data.skipUntil(spaceCharactersBytes) - return self.data[oldPosition:self.data.position] - except StopIteration: - # Return the whole remaining value - return self.data[oldPosition:] - except StopIteration: - return None - - -def lookupEncoding(encoding): - """Return the python codec name corresponding to an encoding or None if the - string doesn't correspond to a valid encoding.""" - if isinstance(encoding, bytes): - try: - encoding = encoding.decode("ascii") - except UnicodeDecodeError: - return None - - if encoding is not None: - try: - return webencodings.lookup(encoding) - except AttributeError: - return None - else: - return None diff --git a/venv/Lib/site-packages/pip/_vendor/html5lib/_tokenizer.py b/venv/Lib/site-packages/pip/_vendor/html5lib/_tokenizer.py deleted file mode 100644 index 5f00253..0000000 --- a/venv/Lib/site-packages/pip/_vendor/html5lib/_tokenizer.py +++ /dev/null @@ -1,1735 +0,0 @@ -from __future__ import absolute_import, division, unicode_literals - -from pip._vendor.six import unichr as chr - -from collections import deque, OrderedDict -from sys import version_info - -from .constants import spaceCharacters -from .constants import entities -from .constants import asciiLetters, asciiUpper2Lower -from .constants import digits, hexDigits, EOF -from .constants import tokenTypes, tagTokenTypes -from .constants import replacementCharacters - -from ._inputstream import HTMLInputStream - -from ._trie import Trie - -entitiesTrie = Trie(entities) - -if version_info >= (3, 7): - attributeMap = dict -else: - attributeMap = OrderedDict - - -class HTMLTokenizer(object): - """ This class takes care of tokenizing HTML. - - * self.currentToken - Holds the token that is currently being processed. - - * self.state - Holds a reference to the method to be invoked... XXX - - * self.stream - Points to HTMLInputStream object. - """ - - def __init__(self, stream, parser=None, **kwargs): - - self.stream = HTMLInputStream(stream, **kwargs) - self.parser = parser - - # Setup the initial tokenizer state - self.escapeFlag = False - self.lastFourChars = [] - self.state = self.dataState - self.escape = False - - # The current token being created - self.currentToken = None - super(HTMLTokenizer, self).__init__() - - def __iter__(self): - """ This is where the magic happens. - - We do our usually processing through the states and when we have a token - to return we yield the token which pauses processing until the next token - is requested. - """ - self.tokenQueue = deque([]) - # Start processing. When EOF is reached self.state will return False - # instead of True and the loop will terminate. - while self.state(): - while self.stream.errors: - yield {"type": tokenTypes["ParseError"], "data": self.stream.errors.pop(0)} - while self.tokenQueue: - yield self.tokenQueue.popleft() - - def consumeNumberEntity(self, isHex): - """This function returns either U+FFFD or the character based on the - decimal or hexadecimal representation. It also discards ";" if present. - If not present self.tokenQueue.append({"type": tokenTypes["ParseError"]}) is invoked. - """ - - allowed = digits - radix = 10 - if isHex: - allowed = hexDigits - radix = 16 - - charStack = [] - - # Consume all the characters that are in range while making sure we - # don't hit an EOF. - c = self.stream.char() - while c in allowed and c is not EOF: - charStack.append(c) - c = self.stream.char() - - # Convert the set of characters consumed to an int. - charAsInt = int("".join(charStack), radix) - - # Certain characters get replaced with others - if charAsInt in replacementCharacters: - char = replacementCharacters[charAsInt] - self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": - "illegal-codepoint-for-numeric-entity", - "datavars": {"charAsInt": charAsInt}}) - elif ((0xD800 <= charAsInt <= 0xDFFF) or - (charAsInt > 0x10FFFF)): - char = "\uFFFD" - self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": - "illegal-codepoint-for-numeric-entity", - "datavars": {"charAsInt": charAsInt}}) - else: - # Should speed up this check somehow (e.g. move the set to a constant) - if ((0x0001 <= charAsInt <= 0x0008) or - (0x000E <= charAsInt <= 0x001F) or - (0x007F <= charAsInt <= 0x009F) or - (0xFDD0 <= charAsInt <= 0xFDEF) or - charAsInt in frozenset([0x000B, 0xFFFE, 0xFFFF, 0x1FFFE, - 0x1FFFF, 0x2FFFE, 0x2FFFF, 0x3FFFE, - 0x3FFFF, 0x4FFFE, 0x4FFFF, 0x5FFFE, - 0x5FFFF, 0x6FFFE, 0x6FFFF, 0x7FFFE, - 0x7FFFF, 0x8FFFE, 0x8FFFF, 0x9FFFE, - 0x9FFFF, 0xAFFFE, 0xAFFFF, 0xBFFFE, - 0xBFFFF, 0xCFFFE, 0xCFFFF, 0xDFFFE, - 0xDFFFF, 0xEFFFE, 0xEFFFF, 0xFFFFE, - 0xFFFFF, 0x10FFFE, 0x10FFFF])): - self.tokenQueue.append({"type": tokenTypes["ParseError"], - "data": - "illegal-codepoint-for-numeric-entity", - "datavars": {"charAsInt": charAsInt}}) - try: - # Try/except needed as UCS-2 Python builds' unichar only works - # within the BMP. - char = chr(charAsInt) - except ValueError: - v = charAsInt - 0x10000 - char = chr(0xD800 | (v >> 10)) + chr(0xDC00 | (v & 0x3FF)) - - # Discard the ; if present. Otherwise, put it back on the queue and - # invoke parseError on parser. - if c != ";": - self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": - "numeric-entity-without-semicolon"}) - self.stream.unget(c) - - return char - - def consumeEntity(self, allowedChar=None, fromAttribute=False): - # Initialise to the default output for when no entity is matched - output = "&" - - charStack = [self.stream.char()] - if (charStack[0] in spaceCharacters or charStack[0] in (EOF, "<", "&") or - (allowedChar is not None and allowedChar == charStack[0])): - self.stream.unget(charStack[0]) - - elif charStack[0] == "#": - # Read the next character to see if it's hex or decimal - hex = False - charStack.append(self.stream.char()) - if charStack[-1] in ("x", "X"): - hex = True - charStack.append(self.stream.char()) - - # charStack[-1] should be the first digit - if (hex and charStack[-1] in hexDigits) \ - or (not hex and charStack[-1] in digits): - # At least one digit found, so consume the whole number - self.stream.unget(charStack[-1]) - output = self.consumeNumberEntity(hex) - else: - # No digits found - self.tokenQueue.append({"type": tokenTypes["ParseError"], - "data": "expected-numeric-entity"}) - self.stream.unget(charStack.pop()) - output = "&" + "".join(charStack) - - else: - # At this point in the process might have named entity. Entities - # are stored in the global variable "entities". - # - # Consume characters and compare to these to a substring of the - # entity names in the list until the substring no longer matches. - while (charStack[-1] is not EOF): - if not entitiesTrie.has_keys_with_prefix("".join(charStack)): - break - charStack.append(self.stream.char()) - - # At this point we have a string that starts with some characters - # that may match an entity - # Try to find the longest entity the string will match to take care - # of ¬i for instance. - try: - entityName = entitiesTrie.longest_prefix("".join(charStack[:-1])) - entityLength = len(entityName) - except KeyError: - entityName = None - - if entityName is not None: - if entityName[-1] != ";": - self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": - "named-entity-without-semicolon"}) - if (entityName[-1] != ";" and fromAttribute and - (charStack[entityLength] in asciiLetters or - charStack[entityLength] in digits or - charStack[entityLength] == "=")): - self.stream.unget(charStack.pop()) - output = "&" + "".join(charStack) - else: - output = entities[entityName] - self.stream.unget(charStack.pop()) - output += "".join(charStack[entityLength:]) - else: - self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": - "expected-named-entity"}) - self.stream.unget(charStack.pop()) - output = "&" + "".join(charStack) - - if fromAttribute: - self.currentToken["data"][-1][1] += output - else: - if output in spaceCharacters: - tokenType = "SpaceCharacters" - else: - tokenType = "Characters" - self.tokenQueue.append({"type": tokenTypes[tokenType], "data": output}) - - def processEntityInAttribute(self, allowedChar): - """This method replaces the need for "entityInAttributeValueState". - """ - self.consumeEntity(allowedChar=allowedChar, fromAttribute=True) - - def emitCurrentToken(self): - """This method is a generic handler for emitting the tags. It also sets - the state to "data" because that's what's needed after a token has been - emitted. - """ - token = self.currentToken - # Add token to the queue to be yielded - if (token["type"] in tagTokenTypes): - token["name"] = token["name"].translate(asciiUpper2Lower) - if token["type"] == tokenTypes["StartTag"]: - raw = token["data"] - data = attributeMap(raw) - if len(raw) > len(data): - # we had some duplicated attribute, fix so first wins - data.update(raw[::-1]) - token["data"] = data - - if token["type"] == tokenTypes["EndTag"]: - if token["data"]: - self.tokenQueue.append({"type": tokenTypes["ParseError"], - "data": "attributes-in-end-tag"}) - if token["selfClosing"]: - self.tokenQueue.append({"type": tokenTypes["ParseError"], - "data": "self-closing-flag-on-end-tag"}) - self.tokenQueue.append(token) - self.state = self.dataState - - # Below are the various tokenizer states worked out. - def dataState(self): - data = self.stream.char() - if data == "&": - self.state = self.entityDataState - elif data == "<": - self.state = self.tagOpenState - elif data == "\u0000": - self.tokenQueue.append({"type": tokenTypes["ParseError"], - "data": "invalid-codepoint"}) - self.tokenQueue.append({"type": tokenTypes["Characters"], - "data": "\u0000"}) - elif data is EOF: - # Tokenization ends. - return False - elif data in spaceCharacters: - # Directly after emitting a token you switch back to the "data - # state". At that point spaceCharacters are important so they are - # emitted separately. - self.tokenQueue.append({"type": tokenTypes["SpaceCharacters"], "data": - data + self.stream.charsUntil(spaceCharacters, True)}) - # No need to update lastFourChars here, since the first space will - # have already been appended to lastFourChars and will have broken - # any sequences - else: - chars = self.stream.charsUntil(("&", "<", "\u0000")) - self.tokenQueue.append({"type": tokenTypes["Characters"], "data": - data + chars}) - return True - - def entityDataState(self): - self.consumeEntity() - self.state = self.dataState - return True - - def rcdataState(self): - data = self.stream.char() - if data == "&": - self.state = self.characterReferenceInRcdata - elif data == "<": - self.state = self.rcdataLessThanSignState - elif data == EOF: - # Tokenization ends. - return False - elif data == "\u0000": - self.tokenQueue.append({"type": tokenTypes["ParseError"], - "data": "invalid-codepoint"}) - self.tokenQueue.append({"type": tokenTypes["Characters"], - "data": "\uFFFD"}) - elif data in spaceCharacters: - # Directly after emitting a token you switch back to the "data - # state". At that point spaceCharacters are important so they are - # emitted separately. - self.tokenQueue.append({"type": tokenTypes["SpaceCharacters"], "data": - data + self.stream.charsUntil(spaceCharacters, True)}) - # No need to update lastFourChars here, since the first space will - # have already been appended to lastFourChars and will have broken - # any sequences - else: - chars = self.stream.charsUntil(("&", "<", "\u0000")) - self.tokenQueue.append({"type": tokenTypes["Characters"], "data": - data + chars}) - return True - - def characterReferenceInRcdata(self): - self.consumeEntity() - self.state = self.rcdataState - return True - - def rawtextState(self): - data = self.stream.char() - if data == "<": - self.state = self.rawtextLessThanSignState - elif data == "\u0000": - self.tokenQueue.append({"type": tokenTypes["ParseError"], - "data": "invalid-codepoint"}) - self.tokenQueue.append({"type": tokenTypes["Characters"], - "data": "\uFFFD"}) - elif data == EOF: - # Tokenization ends. - return False - else: - chars = self.stream.charsUntil(("<", "\u0000")) - self.tokenQueue.append({"type": tokenTypes["Characters"], "data": - data + chars}) - return True - - def scriptDataState(self): - data = self.stream.char() - if data == "<": - self.state = self.scriptDataLessThanSignState - elif data == "\u0000": - self.tokenQueue.append({"type": tokenTypes["ParseError"], - "data": "invalid-codepoint"}) - self.tokenQueue.append({"type": tokenTypes["Characters"], - "data": "\uFFFD"}) - elif data == EOF: - # Tokenization ends. - return False - else: - chars = self.stream.charsUntil(("<", "\u0000")) - self.tokenQueue.append({"type": tokenTypes["Characters"], "data": - data + chars}) - return True - - def plaintextState(self): - data = self.stream.char() - if data == EOF: - # Tokenization ends. - return False - elif data == "\u0000": - self.tokenQueue.append({"type": tokenTypes["ParseError"], - "data": "invalid-codepoint"}) - self.tokenQueue.append({"type": tokenTypes["Characters"], - "data": "\uFFFD"}) - else: - self.tokenQueue.append({"type": tokenTypes["Characters"], "data": - data + self.stream.charsUntil("\u0000")}) - return True - - def tagOpenState(self): - data = self.stream.char() - if data == "!": - self.state = self.markupDeclarationOpenState - elif data == "/": - self.state = self.closeTagOpenState - elif data in asciiLetters: - self.currentToken = {"type": tokenTypes["StartTag"], - "name": data, "data": [], - "selfClosing": False, - "selfClosingAcknowledged": False} - self.state = self.tagNameState - elif data == ">": - # XXX In theory it could be something besides a tag name. But - # do we really care? - self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": - "expected-tag-name-but-got-right-bracket"}) - self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<>"}) - self.state = self.dataState - elif data == "?": - # XXX In theory it could be something besides a tag name. But - # do we really care? - self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": - "expected-tag-name-but-got-question-mark"}) - self.stream.unget(data) - self.state = self.bogusCommentState - else: - # XXX - self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": - "expected-tag-name"}) - self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<"}) - self.stream.unget(data) - self.state = self.dataState - return True - - def closeTagOpenState(self): - data = self.stream.char() - if data in asciiLetters: - self.currentToken = {"type": tokenTypes["EndTag"], "name": data, - "data": [], "selfClosing": False} - self.state = self.tagNameState - elif data == ">": - self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": - "expected-closing-tag-but-got-right-bracket"}) - self.state = self.dataState - elif data is EOF: - self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": - "expected-closing-tag-but-got-eof"}) - self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "": - self.emitCurrentToken() - elif data is EOF: - self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": - "eof-in-tag-name"}) - self.state = self.dataState - elif data == "/": - self.state = self.selfClosingStartTagState - elif data == "\u0000": - self.tokenQueue.append({"type": tokenTypes["ParseError"], - "data": "invalid-codepoint"}) - self.currentToken["name"] += "\uFFFD" - else: - self.currentToken["name"] += data - # (Don't use charsUntil here, because tag names are - # very short and it's faster to not do anything fancy) - return True - - def rcdataLessThanSignState(self): - data = self.stream.char() - if data == "/": - self.temporaryBuffer = "" - self.state = self.rcdataEndTagOpenState - else: - self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<"}) - self.stream.unget(data) - self.state = self.rcdataState - return True - - def rcdataEndTagOpenState(self): - data = self.stream.char() - if data in asciiLetters: - self.temporaryBuffer += data - self.state = self.rcdataEndTagNameState - else: - self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "" and appropriate: - self.currentToken = {"type": tokenTypes["EndTag"], - "name": self.temporaryBuffer, - "data": [], "selfClosing": False} - self.emitCurrentToken() - self.state = self.dataState - elif data in asciiLetters: - self.temporaryBuffer += data - else: - self.tokenQueue.append({"type": tokenTypes["Characters"], - "data": "" and appropriate: - self.currentToken = {"type": tokenTypes["EndTag"], - "name": self.temporaryBuffer, - "data": [], "selfClosing": False} - self.emitCurrentToken() - self.state = self.dataState - elif data in asciiLetters: - self.temporaryBuffer += data - else: - self.tokenQueue.append({"type": tokenTypes["Characters"], - "data": "" and appropriate: - self.currentToken = {"type": tokenTypes["EndTag"], - "name": self.temporaryBuffer, - "data": [], "selfClosing": False} - self.emitCurrentToken() - self.state = self.dataState - elif data in asciiLetters: - self.temporaryBuffer += data - else: - self.tokenQueue.append({"type": tokenTypes["Characters"], - "data": "": - self.tokenQueue.append({"type": tokenTypes["Characters"], "data": ">"}) - self.state = self.scriptDataState - elif data == "\u0000": - self.tokenQueue.append({"type": tokenTypes["ParseError"], - "data": "invalid-codepoint"}) - self.tokenQueue.append({"type": tokenTypes["Characters"], - "data": "\uFFFD"}) - self.state = self.scriptDataEscapedState - elif data == EOF: - self.state = self.dataState - else: - self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data}) - self.state = self.scriptDataEscapedState - return True - - def scriptDataEscapedLessThanSignState(self): - data = self.stream.char() - if data == "/": - self.temporaryBuffer = "" - self.state = self.scriptDataEscapedEndTagOpenState - elif data in asciiLetters: - self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<" + data}) - self.temporaryBuffer = data - self.state = self.scriptDataDoubleEscapeStartState - else: - self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<"}) - self.stream.unget(data) - self.state = self.scriptDataEscapedState - return True - - def scriptDataEscapedEndTagOpenState(self): - data = self.stream.char() - if data in asciiLetters: - self.temporaryBuffer = data - self.state = self.scriptDataEscapedEndTagNameState - else: - self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "" and appropriate: - self.currentToken = {"type": tokenTypes["EndTag"], - "name": self.temporaryBuffer, - "data": [], "selfClosing": False} - self.emitCurrentToken() - self.state = self.dataState - elif data in asciiLetters: - self.temporaryBuffer += data - else: - self.tokenQueue.append({"type": tokenTypes["Characters"], - "data": ""))): - self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data}) - if self.temporaryBuffer.lower() == "script": - self.state = self.scriptDataDoubleEscapedState - else: - self.state = self.scriptDataEscapedState - elif data in asciiLetters: - self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data}) - self.temporaryBuffer += data - else: - self.stream.unget(data) - self.state = self.scriptDataEscapedState - return True - - def scriptDataDoubleEscapedState(self): - data = self.stream.char() - if data == "-": - self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "-"}) - self.state = self.scriptDataDoubleEscapedDashState - elif data == "<": - self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<"}) - self.state = self.scriptDataDoubleEscapedLessThanSignState - elif data == "\u0000": - self.tokenQueue.append({"type": tokenTypes["ParseError"], - "data": "invalid-codepoint"}) - self.tokenQueue.append({"type": tokenTypes["Characters"], - "data": "\uFFFD"}) - elif data == EOF: - self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": - "eof-in-script-in-script"}) - self.state = self.dataState - else: - self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data}) - return True - - def scriptDataDoubleEscapedDashState(self): - data = self.stream.char() - if data == "-": - self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "-"}) - self.state = self.scriptDataDoubleEscapedDashDashState - elif data == "<": - self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<"}) - self.state = self.scriptDataDoubleEscapedLessThanSignState - elif data == "\u0000": - self.tokenQueue.append({"type": tokenTypes["ParseError"], - "data": "invalid-codepoint"}) - self.tokenQueue.append({"type": tokenTypes["Characters"], - "data": "\uFFFD"}) - self.state = self.scriptDataDoubleEscapedState - elif data == EOF: - self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": - "eof-in-script-in-script"}) - self.state = self.dataState - else: - self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data}) - self.state = self.scriptDataDoubleEscapedState - return True - - def scriptDataDoubleEscapedDashDashState(self): - data = self.stream.char() - if data == "-": - self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "-"}) - elif data == "<": - self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<"}) - self.state = self.scriptDataDoubleEscapedLessThanSignState - elif data == ">": - self.tokenQueue.append({"type": tokenTypes["Characters"], "data": ">"}) - self.state = self.scriptDataState - elif data == "\u0000": - self.tokenQueue.append({"type": tokenTypes["ParseError"], - "data": "invalid-codepoint"}) - self.tokenQueue.append({"type": tokenTypes["Characters"], - "data": "\uFFFD"}) - self.state = self.scriptDataDoubleEscapedState - elif data == EOF: - self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": - "eof-in-script-in-script"}) - self.state = self.dataState - else: - self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data}) - self.state = self.scriptDataDoubleEscapedState - return True - - def scriptDataDoubleEscapedLessThanSignState(self): - data = self.stream.char() - if data == "/": - self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "/"}) - self.temporaryBuffer = "" - self.state = self.scriptDataDoubleEscapeEndState - else: - self.stream.unget(data) - self.state = self.scriptDataDoubleEscapedState - return True - - def scriptDataDoubleEscapeEndState(self): - data = self.stream.char() - if data in (spaceCharacters | frozenset(("/", ">"))): - self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data}) - if self.temporaryBuffer.lower() == "script": - self.state = self.scriptDataEscapedState - else: - self.state = self.scriptDataDoubleEscapedState - elif data in asciiLetters: - self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data}) - self.temporaryBuffer += data - else: - self.stream.unget(data) - self.state = self.scriptDataDoubleEscapedState - return True - - def beforeAttributeNameState(self): - data = self.stream.char() - if data in spaceCharacters: - self.stream.charsUntil(spaceCharacters, True) - elif data in asciiLetters: - self.currentToken["data"].append([data, ""]) - self.state = self.attributeNameState - elif data == ">": - self.emitCurrentToken() - elif data == "/": - self.state = self.selfClosingStartTagState - elif data in ("'", '"', "=", "<"): - self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": - "invalid-character-in-attribute-name"}) - self.currentToken["data"].append([data, ""]) - self.state = self.attributeNameState - elif data == "\u0000": - self.tokenQueue.append({"type": tokenTypes["ParseError"], - "data": "invalid-codepoint"}) - self.currentToken["data"].append(["\uFFFD", ""]) - self.state = self.attributeNameState - elif data is EOF: - self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": - "expected-attribute-name-but-got-eof"}) - self.state = self.dataState - else: - self.currentToken["data"].append([data, ""]) - self.state = self.attributeNameState - return True - - def attributeNameState(self): - data = self.stream.char() - leavingThisState = True - emitToken = False - if data == "=": - self.state = self.beforeAttributeValueState - elif data in asciiLetters: - self.currentToken["data"][-1][0] += data +\ - self.stream.charsUntil(asciiLetters, True) - leavingThisState = False - elif data == ">": - # XXX If we emit here the attributes are converted to a dict - # without being checked and when the code below runs we error - # because data is a dict not a list - emitToken = True - elif data in spaceCharacters: - self.state = self.afterAttributeNameState - elif data == "/": - self.state = self.selfClosingStartTagState - elif data == "\u0000": - self.tokenQueue.append({"type": tokenTypes["ParseError"], - "data": "invalid-codepoint"}) - self.currentToken["data"][-1][0] += "\uFFFD" - leavingThisState = False - elif data in ("'", '"', "<"): - self.tokenQueue.append({"type": tokenTypes["ParseError"], - "data": - "invalid-character-in-attribute-name"}) - self.currentToken["data"][-1][0] += data - leavingThisState = False - elif data is EOF: - self.tokenQueue.append({"type": tokenTypes["ParseError"], - "data": "eof-in-attribute-name"}) - self.state = self.dataState - else: - self.currentToken["data"][-1][0] += data - leavingThisState = False - - if leavingThisState: - # Attributes are not dropped at this stage. That happens when the - # start tag token is emitted so values can still be safely appended - # to attributes, but we do want to report the parse error in time. - self.currentToken["data"][-1][0] = ( - self.currentToken["data"][-1][0].translate(asciiUpper2Lower)) - for name, _ in self.currentToken["data"][:-1]: - if self.currentToken["data"][-1][0] == name: - self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": - "duplicate-attribute"}) - break - # XXX Fix for above XXX - if emitToken: - self.emitCurrentToken() - return True - - def afterAttributeNameState(self): - data = self.stream.char() - if data in spaceCharacters: - self.stream.charsUntil(spaceCharacters, True) - elif data == "=": - self.state = self.beforeAttributeValueState - elif data == ">": - self.emitCurrentToken() - elif data in asciiLetters: - self.currentToken["data"].append([data, ""]) - self.state = self.attributeNameState - elif data == "/": - self.state = self.selfClosingStartTagState - elif data == "\u0000": - self.tokenQueue.append({"type": tokenTypes["ParseError"], - "data": "invalid-codepoint"}) - self.currentToken["data"].append(["\uFFFD", ""]) - self.state = self.attributeNameState - elif data in ("'", '"', "<"): - self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": - "invalid-character-after-attribute-name"}) - self.currentToken["data"].append([data, ""]) - self.state = self.attributeNameState - elif data is EOF: - self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": - "expected-end-of-tag-but-got-eof"}) - self.state = self.dataState - else: - self.currentToken["data"].append([data, ""]) - self.state = self.attributeNameState - return True - - def beforeAttributeValueState(self): - data = self.stream.char() - if data in spaceCharacters: - self.stream.charsUntil(spaceCharacters, True) - elif data == "\"": - self.state = self.attributeValueDoubleQuotedState - elif data == "&": - self.state = self.attributeValueUnQuotedState - self.stream.unget(data) - elif data == "'": - self.state = self.attributeValueSingleQuotedState - elif data == ">": - self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": - "expected-attribute-value-but-got-right-bracket"}) - self.emitCurrentToken() - elif data == "\u0000": - self.tokenQueue.append({"type": tokenTypes["ParseError"], - "data": "invalid-codepoint"}) - self.currentToken["data"][-1][1] += "\uFFFD" - self.state = self.attributeValueUnQuotedState - elif data in ("=", "<", "`"): - self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": - "equals-in-unquoted-attribute-value"}) - self.currentToken["data"][-1][1] += data - self.state = self.attributeValueUnQuotedState - elif data is EOF: - self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": - "expected-attribute-value-but-got-eof"}) - self.state = self.dataState - else: - self.currentToken["data"][-1][1] += data - self.state = self.attributeValueUnQuotedState - return True - - def attributeValueDoubleQuotedState(self): - data = self.stream.char() - if data == "\"": - self.state = self.afterAttributeValueState - elif data == "&": - self.processEntityInAttribute('"') - elif data == "\u0000": - self.tokenQueue.append({"type": tokenTypes["ParseError"], - "data": "invalid-codepoint"}) - self.currentToken["data"][-1][1] += "\uFFFD" - elif data is EOF: - self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": - "eof-in-attribute-value-double-quote"}) - self.state = self.dataState - else: - self.currentToken["data"][-1][1] += data +\ - self.stream.charsUntil(("\"", "&", "\u0000")) - return True - - def attributeValueSingleQuotedState(self): - data = self.stream.char() - if data == "'": - self.state = self.afterAttributeValueState - elif data == "&": - self.processEntityInAttribute("'") - elif data == "\u0000": - self.tokenQueue.append({"type": tokenTypes["ParseError"], - "data": "invalid-codepoint"}) - self.currentToken["data"][-1][1] += "\uFFFD" - elif data is EOF: - self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": - "eof-in-attribute-value-single-quote"}) - self.state = self.dataState - else: - self.currentToken["data"][-1][1] += data +\ - self.stream.charsUntil(("'", "&", "\u0000")) - return True - - def attributeValueUnQuotedState(self): - data = self.stream.char() - if data in spaceCharacters: - self.state = self.beforeAttributeNameState - elif data == "&": - self.processEntityInAttribute(">") - elif data == ">": - self.emitCurrentToken() - elif data in ('"', "'", "=", "<", "`"): - self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": - "unexpected-character-in-unquoted-attribute-value"}) - self.currentToken["data"][-1][1] += data - elif data == "\u0000": - self.tokenQueue.append({"type": tokenTypes["ParseError"], - "data": "invalid-codepoint"}) - self.currentToken["data"][-1][1] += "\uFFFD" - elif data is EOF: - self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": - "eof-in-attribute-value-no-quotes"}) - self.state = self.dataState - else: - self.currentToken["data"][-1][1] += data + self.stream.charsUntil( - frozenset(("&", ">", '"', "'", "=", "<", "`", "\u0000")) | spaceCharacters) - return True - - def afterAttributeValueState(self): - data = self.stream.char() - if data in spaceCharacters: - self.state = self.beforeAttributeNameState - elif data == ">": - self.emitCurrentToken() - elif data == "/": - self.state = self.selfClosingStartTagState - elif data is EOF: - self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": - "unexpected-EOF-after-attribute-value"}) - self.stream.unget(data) - self.state = self.dataState - else: - self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": - "unexpected-character-after-attribute-value"}) - self.stream.unget(data) - self.state = self.beforeAttributeNameState - return True - - def selfClosingStartTagState(self): - data = self.stream.char() - if data == ">": - self.currentToken["selfClosing"] = True - self.emitCurrentToken() - elif data is EOF: - self.tokenQueue.append({"type": tokenTypes["ParseError"], - "data": - "unexpected-EOF-after-solidus-in-tag"}) - self.stream.unget(data) - self.state = self.dataState - else: - self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": - "unexpected-character-after-solidus-in-tag"}) - self.stream.unget(data) - self.state = self.beforeAttributeNameState - return True - - def bogusCommentState(self): - # Make a new comment token and give it as value all the characters - # until the first > or EOF (charsUntil checks for EOF automatically) - # and emit it. - data = self.stream.charsUntil(">") - data = data.replace("\u0000", "\uFFFD") - self.tokenQueue.append( - {"type": tokenTypes["Comment"], "data": data}) - - # Eat the character directly after the bogus comment which is either a - # ">" or an EOF. - self.stream.char() - self.state = self.dataState - return True - - def markupDeclarationOpenState(self): - charStack = [self.stream.char()] - if charStack[-1] == "-": - charStack.append(self.stream.char()) - if charStack[-1] == "-": - self.currentToken = {"type": tokenTypes["Comment"], "data": ""} - self.state = self.commentStartState - return True - elif charStack[-1] in ('d', 'D'): - matched = True - for expected in (('o', 'O'), ('c', 'C'), ('t', 'T'), - ('y', 'Y'), ('p', 'P'), ('e', 'E')): - charStack.append(self.stream.char()) - if charStack[-1] not in expected: - matched = False - break - if matched: - self.currentToken = {"type": tokenTypes["Doctype"], - "name": "", - "publicId": None, "systemId": None, - "correct": True} - self.state = self.doctypeState - return True - elif (charStack[-1] == "[" and - self.parser is not None and - self.parser.tree.openElements and - self.parser.tree.openElements[-1].namespace != self.parser.tree.defaultNamespace): - matched = True - for expected in ["C", "D", "A", "T", "A", "["]: - charStack.append(self.stream.char()) - if charStack[-1] != expected: - matched = False - break - if matched: - self.state = self.cdataSectionState - return True - - self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": - "expected-dashes-or-doctype"}) - - while charStack: - self.stream.unget(charStack.pop()) - self.state = self.bogusCommentState - return True - - def commentStartState(self): - data = self.stream.char() - if data == "-": - self.state = self.commentStartDashState - elif data == "\u0000": - self.tokenQueue.append({"type": tokenTypes["ParseError"], - "data": "invalid-codepoint"}) - self.currentToken["data"] += "\uFFFD" - elif data == ">": - self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": - "incorrect-comment"}) - self.tokenQueue.append(self.currentToken) - self.state = self.dataState - elif data is EOF: - self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": - "eof-in-comment"}) - self.tokenQueue.append(self.currentToken) - self.state = self.dataState - else: - self.currentToken["data"] += data - self.state = self.commentState - return True - - def commentStartDashState(self): - data = self.stream.char() - if data == "-": - self.state = self.commentEndState - elif data == "\u0000": - self.tokenQueue.append({"type": tokenTypes["ParseError"], - "data": "invalid-codepoint"}) - self.currentToken["data"] += "-\uFFFD" - elif data == ">": - self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": - "incorrect-comment"}) - self.tokenQueue.append(self.currentToken) - self.state = self.dataState - elif data is EOF: - self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": - "eof-in-comment"}) - self.tokenQueue.append(self.currentToken) - self.state = self.dataState - else: - self.currentToken["data"] += "-" + data - self.state = self.commentState - return True - - def commentState(self): - data = self.stream.char() - if data == "-": - self.state = self.commentEndDashState - elif data == "\u0000": - self.tokenQueue.append({"type": tokenTypes["ParseError"], - "data": "invalid-codepoint"}) - self.currentToken["data"] += "\uFFFD" - elif data is EOF: - self.tokenQueue.append({"type": tokenTypes["ParseError"], - "data": "eof-in-comment"}) - self.tokenQueue.append(self.currentToken) - self.state = self.dataState - else: - self.currentToken["data"] += data + \ - self.stream.charsUntil(("-", "\u0000")) - return True - - def commentEndDashState(self): - data = self.stream.char() - if data == "-": - self.state = self.commentEndState - elif data == "\u0000": - self.tokenQueue.append({"type": tokenTypes["ParseError"], - "data": "invalid-codepoint"}) - self.currentToken["data"] += "-\uFFFD" - self.state = self.commentState - elif data is EOF: - self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": - "eof-in-comment-end-dash"}) - self.tokenQueue.append(self.currentToken) - self.state = self.dataState - else: - self.currentToken["data"] += "-" + data - self.state = self.commentState - return True - - def commentEndState(self): - data = self.stream.char() - if data == ">": - self.tokenQueue.append(self.currentToken) - self.state = self.dataState - elif data == "\u0000": - self.tokenQueue.append({"type": tokenTypes["ParseError"], - "data": "invalid-codepoint"}) - self.currentToken["data"] += "--\uFFFD" - self.state = self.commentState - elif data == "!": - self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": - "unexpected-bang-after-double-dash-in-comment"}) - self.state = self.commentEndBangState - elif data == "-": - self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": - "unexpected-dash-after-double-dash-in-comment"}) - self.currentToken["data"] += data - elif data is EOF: - self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": - "eof-in-comment-double-dash"}) - self.tokenQueue.append(self.currentToken) - self.state = self.dataState - else: - # XXX - self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": - "unexpected-char-in-comment"}) - self.currentToken["data"] += "--" + data - self.state = self.commentState - return True - - def commentEndBangState(self): - data = self.stream.char() - if data == ">": - self.tokenQueue.append(self.currentToken) - self.state = self.dataState - elif data == "-": - self.currentToken["data"] += "--!" - self.state = self.commentEndDashState - elif data == "\u0000": - self.tokenQueue.append({"type": tokenTypes["ParseError"], - "data": "invalid-codepoint"}) - self.currentToken["data"] += "--!\uFFFD" - self.state = self.commentState - elif data is EOF: - self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": - "eof-in-comment-end-bang-state"}) - self.tokenQueue.append(self.currentToken) - self.state = self.dataState - else: - self.currentToken["data"] += "--!" + data - self.state = self.commentState - return True - - def doctypeState(self): - data = self.stream.char() - if data in spaceCharacters: - self.state = self.beforeDoctypeNameState - elif data is EOF: - self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": - "expected-doctype-name-but-got-eof"}) - self.currentToken["correct"] = False - self.tokenQueue.append(self.currentToken) - self.state = self.dataState - else: - self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": - "need-space-after-doctype"}) - self.stream.unget(data) - self.state = self.beforeDoctypeNameState - return True - - def beforeDoctypeNameState(self): - data = self.stream.char() - if data in spaceCharacters: - pass - elif data == ">": - self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": - "expected-doctype-name-but-got-right-bracket"}) - self.currentToken["correct"] = False - self.tokenQueue.append(self.currentToken) - self.state = self.dataState - elif data == "\u0000": - self.tokenQueue.append({"type": tokenTypes["ParseError"], - "data": "invalid-codepoint"}) - self.currentToken["name"] = "\uFFFD" - self.state = self.doctypeNameState - elif data is EOF: - self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": - "expected-doctype-name-but-got-eof"}) - self.currentToken["correct"] = False - self.tokenQueue.append(self.currentToken) - self.state = self.dataState - else: - self.currentToken["name"] = data - self.state = self.doctypeNameState - return True - - def doctypeNameState(self): - data = self.stream.char() - if data in spaceCharacters: - self.currentToken["name"] = self.currentToken["name"].translate(asciiUpper2Lower) - self.state = self.afterDoctypeNameState - elif data == ">": - self.currentToken["name"] = self.currentToken["name"].translate(asciiUpper2Lower) - self.tokenQueue.append(self.currentToken) - self.state = self.dataState - elif data == "\u0000": - self.tokenQueue.append({"type": tokenTypes["ParseError"], - "data": "invalid-codepoint"}) - self.currentToken["name"] += "\uFFFD" - self.state = self.doctypeNameState - elif data is EOF: - self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": - "eof-in-doctype-name"}) - self.currentToken["correct"] = False - self.currentToken["name"] = self.currentToken["name"].translate(asciiUpper2Lower) - self.tokenQueue.append(self.currentToken) - self.state = self.dataState - else: - self.currentToken["name"] += data - return True - - def afterDoctypeNameState(self): - data = self.stream.char() - if data in spaceCharacters: - pass - elif data == ">": - self.tokenQueue.append(self.currentToken) - self.state = self.dataState - elif data is EOF: - self.currentToken["correct"] = False - self.stream.unget(data) - self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": - "eof-in-doctype"}) - self.tokenQueue.append(self.currentToken) - self.state = self.dataState - else: - if data in ("p", "P"): - matched = True - for expected in (("u", "U"), ("b", "B"), ("l", "L"), - ("i", "I"), ("c", "C")): - data = self.stream.char() - if data not in expected: - matched = False - break - if matched: - self.state = self.afterDoctypePublicKeywordState - return True - elif data in ("s", "S"): - matched = True - for expected in (("y", "Y"), ("s", "S"), ("t", "T"), - ("e", "E"), ("m", "M")): - data = self.stream.char() - if data not in expected: - matched = False - break - if matched: - self.state = self.afterDoctypeSystemKeywordState - return True - - # All the characters read before the current 'data' will be - # [a-zA-Z], so they're garbage in the bogus doctype and can be - # discarded; only the latest character might be '>' or EOF - # and needs to be ungetted - self.stream.unget(data) - self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": - "expected-space-or-right-bracket-in-doctype", "datavars": - {"data": data}}) - self.currentToken["correct"] = False - self.state = self.bogusDoctypeState - - return True - - def afterDoctypePublicKeywordState(self): - data = self.stream.char() - if data in spaceCharacters: - self.state = self.beforeDoctypePublicIdentifierState - elif data in ("'", '"'): - self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": - "unexpected-char-in-doctype"}) - self.stream.unget(data) - self.state = self.beforeDoctypePublicIdentifierState - elif data is EOF: - self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": - "eof-in-doctype"}) - self.currentToken["correct"] = False - self.tokenQueue.append(self.currentToken) - self.state = self.dataState - else: - self.stream.unget(data) - self.state = self.beforeDoctypePublicIdentifierState - return True - - def beforeDoctypePublicIdentifierState(self): - data = self.stream.char() - if data in spaceCharacters: - pass - elif data == "\"": - self.currentToken["publicId"] = "" - self.state = self.doctypePublicIdentifierDoubleQuotedState - elif data == "'": - self.currentToken["publicId"] = "" - self.state = self.doctypePublicIdentifierSingleQuotedState - elif data == ">": - self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": - "unexpected-end-of-doctype"}) - self.currentToken["correct"] = False - self.tokenQueue.append(self.currentToken) - self.state = self.dataState - elif data is EOF: - self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": - "eof-in-doctype"}) - self.currentToken["correct"] = False - self.tokenQueue.append(self.currentToken) - self.state = self.dataState - else: - self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": - "unexpected-char-in-doctype"}) - self.currentToken["correct"] = False - self.state = self.bogusDoctypeState - return True - - def doctypePublicIdentifierDoubleQuotedState(self): - data = self.stream.char() - if data == "\"": - self.state = self.afterDoctypePublicIdentifierState - elif data == "\u0000": - self.tokenQueue.append({"type": tokenTypes["ParseError"], - "data": "invalid-codepoint"}) - self.currentToken["publicId"] += "\uFFFD" - elif data == ">": - self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": - "unexpected-end-of-doctype"}) - self.currentToken["correct"] = False - self.tokenQueue.append(self.currentToken) - self.state = self.dataState - elif data is EOF: - self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": - "eof-in-doctype"}) - self.currentToken["correct"] = False - self.tokenQueue.append(self.currentToken) - self.state = self.dataState - else: - self.currentToken["publicId"] += data - return True - - def doctypePublicIdentifierSingleQuotedState(self): - data = self.stream.char() - if data == "'": - self.state = self.afterDoctypePublicIdentifierState - elif data == "\u0000": - self.tokenQueue.append({"type": tokenTypes["ParseError"], - "data": "invalid-codepoint"}) - self.currentToken["publicId"] += "\uFFFD" - elif data == ">": - self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": - "unexpected-end-of-doctype"}) - self.currentToken["correct"] = False - self.tokenQueue.append(self.currentToken) - self.state = self.dataState - elif data is EOF: - self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": - "eof-in-doctype"}) - self.currentToken["correct"] = False - self.tokenQueue.append(self.currentToken) - self.state = self.dataState - else: - self.currentToken["publicId"] += data - return True - - def afterDoctypePublicIdentifierState(self): - data = self.stream.char() - if data in spaceCharacters: - self.state = self.betweenDoctypePublicAndSystemIdentifiersState - elif data == ">": - self.tokenQueue.append(self.currentToken) - self.state = self.dataState - elif data == '"': - self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": - "unexpected-char-in-doctype"}) - self.currentToken["systemId"] = "" - self.state = self.doctypeSystemIdentifierDoubleQuotedState - elif data == "'": - self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": - "unexpected-char-in-doctype"}) - self.currentToken["systemId"] = "" - self.state = self.doctypeSystemIdentifierSingleQuotedState - elif data is EOF: - self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": - "eof-in-doctype"}) - self.currentToken["correct"] = False - self.tokenQueue.append(self.currentToken) - self.state = self.dataState - else: - self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": - "unexpected-char-in-doctype"}) - self.currentToken["correct"] = False - self.state = self.bogusDoctypeState - return True - - def betweenDoctypePublicAndSystemIdentifiersState(self): - data = self.stream.char() - if data in spaceCharacters: - pass - elif data == ">": - self.tokenQueue.append(self.currentToken) - self.state = self.dataState - elif data == '"': - self.currentToken["systemId"] = "" - self.state = self.doctypeSystemIdentifierDoubleQuotedState - elif data == "'": - self.currentToken["systemId"] = "" - self.state = self.doctypeSystemIdentifierSingleQuotedState - elif data == EOF: - self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": - "eof-in-doctype"}) - self.currentToken["correct"] = False - self.tokenQueue.append(self.currentToken) - self.state = self.dataState - else: - self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": - "unexpected-char-in-doctype"}) - self.currentToken["correct"] = False - self.state = self.bogusDoctypeState - return True - - def afterDoctypeSystemKeywordState(self): - data = self.stream.char() - if data in spaceCharacters: - self.state = self.beforeDoctypeSystemIdentifierState - elif data in ("'", '"'): - self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": - "unexpected-char-in-doctype"}) - self.stream.unget(data) - self.state = self.beforeDoctypeSystemIdentifierState - elif data is EOF: - self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": - "eof-in-doctype"}) - self.currentToken["correct"] = False - self.tokenQueue.append(self.currentToken) - self.state = self.dataState - else: - self.stream.unget(data) - self.state = self.beforeDoctypeSystemIdentifierState - return True - - def beforeDoctypeSystemIdentifierState(self): - data = self.stream.char() - if data in spaceCharacters: - pass - elif data == "\"": - self.currentToken["systemId"] = "" - self.state = self.doctypeSystemIdentifierDoubleQuotedState - elif data == "'": - self.currentToken["systemId"] = "" - self.state = self.doctypeSystemIdentifierSingleQuotedState - elif data == ">": - self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": - "unexpected-char-in-doctype"}) - self.currentToken["correct"] = False - self.tokenQueue.append(self.currentToken) - self.state = self.dataState - elif data is EOF: - self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": - "eof-in-doctype"}) - self.currentToken["correct"] = False - self.tokenQueue.append(self.currentToken) - self.state = self.dataState - else: - self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": - "unexpected-char-in-doctype"}) - self.currentToken["correct"] = False - self.state = self.bogusDoctypeState - return True - - def doctypeSystemIdentifierDoubleQuotedState(self): - data = self.stream.char() - if data == "\"": - self.state = self.afterDoctypeSystemIdentifierState - elif data == "\u0000": - self.tokenQueue.append({"type": tokenTypes["ParseError"], - "data": "invalid-codepoint"}) - self.currentToken["systemId"] += "\uFFFD" - elif data == ">": - self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": - "unexpected-end-of-doctype"}) - self.currentToken["correct"] = False - self.tokenQueue.append(self.currentToken) - self.state = self.dataState - elif data is EOF: - self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": - "eof-in-doctype"}) - self.currentToken["correct"] = False - self.tokenQueue.append(self.currentToken) - self.state = self.dataState - else: - self.currentToken["systemId"] += data - return True - - def doctypeSystemIdentifierSingleQuotedState(self): - data = self.stream.char() - if data == "'": - self.state = self.afterDoctypeSystemIdentifierState - elif data == "\u0000": - self.tokenQueue.append({"type": tokenTypes["ParseError"], - "data": "invalid-codepoint"}) - self.currentToken["systemId"] += "\uFFFD" - elif data == ">": - self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": - "unexpected-end-of-doctype"}) - self.currentToken["correct"] = False - self.tokenQueue.append(self.currentToken) - self.state = self.dataState - elif data is EOF: - self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": - "eof-in-doctype"}) - self.currentToken["correct"] = False - self.tokenQueue.append(self.currentToken) - self.state = self.dataState - else: - self.currentToken["systemId"] += data - return True - - def afterDoctypeSystemIdentifierState(self): - data = self.stream.char() - if data in spaceCharacters: - pass - elif data == ">": - self.tokenQueue.append(self.currentToken) - self.state = self.dataState - elif data is EOF: - self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": - "eof-in-doctype"}) - self.currentToken["correct"] = False - self.tokenQueue.append(self.currentToken) - self.state = self.dataState - else: - self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": - "unexpected-char-in-doctype"}) - self.state = self.bogusDoctypeState - return True - - def bogusDoctypeState(self): - data = self.stream.char() - if data == ">": - self.tokenQueue.append(self.currentToken) - self.state = self.dataState - elif data is EOF: - # XXX EMIT - self.stream.unget(data) - self.tokenQueue.append(self.currentToken) - self.state = self.dataState - else: - pass - return True - - def cdataSectionState(self): - data = [] - while True: - data.append(self.stream.charsUntil("]")) - data.append(self.stream.charsUntil(">")) - char = self.stream.char() - if char == EOF: - break - else: - assert char == ">" - if data[-1][-2:] == "]]": - data[-1] = data[-1][:-2] - break - else: - data.append(char) - - data = "".join(data) # pylint:disable=redefined-variable-type - # Deal with null here rather than in the parser - nullCount = data.count("\u0000") - if nullCount > 0: - for _ in range(nullCount): - self.tokenQueue.append({"type": tokenTypes["ParseError"], - "data": "invalid-codepoint"}) - data = data.replace("\u0000", "\uFFFD") - if data: - self.tokenQueue.append({"type": tokenTypes["Characters"], - "data": data}) - self.state = self.dataState - return True diff --git a/venv/Lib/site-packages/pip/_vendor/html5lib/_trie/__init__.py b/venv/Lib/site-packages/pip/_vendor/html5lib/_trie/__init__.py deleted file mode 100644 index 07bad5d..0000000 --- a/venv/Lib/site-packages/pip/_vendor/html5lib/_trie/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -from __future__ import absolute_import, division, unicode_literals - -from .py import Trie - -__all__ = ["Trie"] diff --git a/venv/Lib/site-packages/pip/_vendor/html5lib/_trie/__pycache__/__init__.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/html5lib/_trie/__pycache__/__init__.cpython-39.pyc deleted file mode 100644 index b480346..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/html5lib/_trie/__pycache__/__init__.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/html5lib/_trie/__pycache__/_base.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/html5lib/_trie/__pycache__/_base.cpython-39.pyc deleted file mode 100644 index 36912c0..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/html5lib/_trie/__pycache__/_base.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/html5lib/_trie/__pycache__/py.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/html5lib/_trie/__pycache__/py.cpython-39.pyc deleted file mode 100644 index 27b4a7d..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/html5lib/_trie/__pycache__/py.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/html5lib/_trie/_base.py b/venv/Lib/site-packages/pip/_vendor/html5lib/_trie/_base.py deleted file mode 100644 index 6b71975..0000000 --- a/venv/Lib/site-packages/pip/_vendor/html5lib/_trie/_base.py +++ /dev/null @@ -1,40 +0,0 @@ -from __future__ import absolute_import, division, unicode_literals - -try: - from collections.abc import Mapping -except ImportError: # Python 2.7 - from collections import Mapping - - -class Trie(Mapping): - """Abstract base class for tries""" - - def keys(self, prefix=None): - # pylint:disable=arguments-differ - keys = super(Trie, self).keys() - - if prefix is None: - return set(keys) - - return {x for x in keys if x.startswith(prefix)} - - def has_keys_with_prefix(self, prefix): - for key in self.keys(): - if key.startswith(prefix): - return True - - return False - - def longest_prefix(self, prefix): - if prefix in self: - return prefix - - for i in range(1, len(prefix) + 1): - if prefix[:-i] in self: - return prefix[:-i] - - raise KeyError(prefix) - - def longest_prefix_item(self, prefix): - lprefix = self.longest_prefix(prefix) - return (lprefix, self[lprefix]) diff --git a/venv/Lib/site-packages/pip/_vendor/html5lib/_trie/py.py b/venv/Lib/site-packages/pip/_vendor/html5lib/_trie/py.py deleted file mode 100644 index c178b21..0000000 --- a/venv/Lib/site-packages/pip/_vendor/html5lib/_trie/py.py +++ /dev/null @@ -1,67 +0,0 @@ -from __future__ import absolute_import, division, unicode_literals -from pip._vendor.six import text_type - -from bisect import bisect_left - -from ._base import Trie as ABCTrie - - -class Trie(ABCTrie): - def __init__(self, data): - if not all(isinstance(x, text_type) for x in data.keys()): - raise TypeError("All keys must be strings") - - self._data = data - self._keys = sorted(data.keys()) - self._cachestr = "" - self._cachepoints = (0, len(data)) - - def __contains__(self, key): - return key in self._data - - def __len__(self): - return len(self._data) - - def __iter__(self): - return iter(self._data) - - def __getitem__(self, key): - return self._data[key] - - def keys(self, prefix=None): - if prefix is None or prefix == "" or not self._keys: - return set(self._keys) - - if prefix.startswith(self._cachestr): - lo, hi = self._cachepoints - start = i = bisect_left(self._keys, prefix, lo, hi) - else: - start = i = bisect_left(self._keys, prefix) - - keys = set() - if start == len(self._keys): - return keys - - while self._keys[i].startswith(prefix): - keys.add(self._keys[i]) - i += 1 - - self._cachestr = prefix - self._cachepoints = (start, i) - - return keys - - def has_keys_with_prefix(self, prefix): - if prefix in self._data: - return True - - if prefix.startswith(self._cachestr): - lo, hi = self._cachepoints - i = bisect_left(self._keys, prefix, lo, hi) - else: - i = bisect_left(self._keys, prefix) - - if i == len(self._keys): - return False - - return self._keys[i].startswith(prefix) diff --git a/venv/Lib/site-packages/pip/_vendor/html5lib/_utils.py b/venv/Lib/site-packages/pip/_vendor/html5lib/_utils.py deleted file mode 100644 index d7c4926..0000000 --- a/venv/Lib/site-packages/pip/_vendor/html5lib/_utils.py +++ /dev/null @@ -1,159 +0,0 @@ -from __future__ import absolute_import, division, unicode_literals - -from types import ModuleType - -try: - from collections.abc import Mapping -except ImportError: - from collections import Mapping - -from pip._vendor.six import text_type, PY3 - -if PY3: - import xml.etree.ElementTree as default_etree -else: - try: - import xml.etree.cElementTree as default_etree - except ImportError: - import xml.etree.ElementTree as default_etree - - -__all__ = ["default_etree", "MethodDispatcher", "isSurrogatePair", - "surrogatePairToCodepoint", "moduleFactoryFactory", - "supports_lone_surrogates"] - - -# Platforms not supporting lone surrogates (\uD800-\uDFFF) should be -# caught by the below test. In general this would be any platform -# using UTF-16 as its encoding of unicode strings, such as -# Jython. This is because UTF-16 itself is based on the use of such -# surrogates, and there is no mechanism to further escape such -# escapes. -try: - _x = eval('"\\uD800"') # pylint:disable=eval-used - if not isinstance(_x, text_type): - # We need this with u"" because of http://bugs.jython.org/issue2039 - _x = eval('u"\\uD800"') # pylint:disable=eval-used - assert isinstance(_x, text_type) -except Exception: - supports_lone_surrogates = False -else: - supports_lone_surrogates = True - - -class MethodDispatcher(dict): - """Dict with 2 special properties: - - On initiation, keys that are lists, sets or tuples are converted to - multiple keys so accessing any one of the items in the original - list-like object returns the matching value - - md = MethodDispatcher({("foo", "bar"):"baz"}) - md["foo"] == "baz" - - A default value which can be set through the default attribute. - """ - - def __init__(self, items=()): - _dictEntries = [] - for name, value in items: - if isinstance(name, (list, tuple, frozenset, set)): - for item in name: - _dictEntries.append((item, value)) - else: - _dictEntries.append((name, value)) - dict.__init__(self, _dictEntries) - assert len(self) == len(_dictEntries) - self.default = None - - def __getitem__(self, key): - return dict.get(self, key, self.default) - - def __get__(self, instance, owner=None): - return BoundMethodDispatcher(instance, self) - - -class BoundMethodDispatcher(Mapping): - """Wraps a MethodDispatcher, binding its return values to `instance`""" - def __init__(self, instance, dispatcher): - self.instance = instance - self.dispatcher = dispatcher - - def __getitem__(self, key): - # see https://docs.python.org/3/reference/datamodel.html#object.__get__ - # on a function, __get__ is used to bind a function to an instance as a bound method - return self.dispatcher[key].__get__(self.instance) - - def get(self, key, default): - if key in self.dispatcher: - return self[key] - else: - return default - - def __iter__(self): - return iter(self.dispatcher) - - def __len__(self): - return len(self.dispatcher) - - def __contains__(self, key): - return key in self.dispatcher - - -# Some utility functions to deal with weirdness around UCS2 vs UCS4 -# python builds - -def isSurrogatePair(data): - return (len(data) == 2 and - ord(data[0]) >= 0xD800 and ord(data[0]) <= 0xDBFF and - ord(data[1]) >= 0xDC00 and ord(data[1]) <= 0xDFFF) - - -def surrogatePairToCodepoint(data): - char_val = (0x10000 + (ord(data[0]) - 0xD800) * 0x400 + - (ord(data[1]) - 0xDC00)) - return char_val - -# Module Factory Factory (no, this isn't Java, I know) -# Here to stop this being duplicated all over the place. - - -def moduleFactoryFactory(factory): - moduleCache = {} - - def moduleFactory(baseModule, *args, **kwargs): - if isinstance(ModuleType.__name__, type("")): - name = "_%s_factory" % baseModule.__name__ - else: - name = b"_%s_factory" % baseModule.__name__ - - kwargs_tuple = tuple(kwargs.items()) - - try: - return moduleCache[name][args][kwargs_tuple] - except KeyError: - mod = ModuleType(name) - objs = factory(baseModule, *args, **kwargs) - mod.__dict__.update(objs) - if "name" not in moduleCache: - moduleCache[name] = {} - if "args" not in moduleCache[name]: - moduleCache[name][args] = {} - if "kwargs" not in moduleCache[name][args]: - moduleCache[name][args][kwargs_tuple] = {} - moduleCache[name][args][kwargs_tuple] = mod - return mod - - return moduleFactory - - -def memoize(func): - cache = {} - - def wrapped(*args, **kwargs): - key = (tuple(args), tuple(kwargs.items())) - if key not in cache: - cache[key] = func(*args, **kwargs) - return cache[key] - - return wrapped diff --git a/venv/Lib/site-packages/pip/_vendor/html5lib/constants.py b/venv/Lib/site-packages/pip/_vendor/html5lib/constants.py deleted file mode 100644 index fe3e237..0000000 --- a/venv/Lib/site-packages/pip/_vendor/html5lib/constants.py +++ /dev/null @@ -1,2946 +0,0 @@ -from __future__ import absolute_import, division, unicode_literals - -import string - -EOF = None - -E = { - "null-character": - "Null character in input stream, replaced with U+FFFD.", - "invalid-codepoint": - "Invalid codepoint in stream.", - "incorrectly-placed-solidus": - "Solidus (/) incorrectly placed in tag.", - "incorrect-cr-newline-entity": - "Incorrect CR newline entity, replaced with LF.", - "illegal-windows-1252-entity": - "Entity used with illegal number (windows-1252 reference).", - "cant-convert-numeric-entity": - "Numeric entity couldn't be converted to character " - "(codepoint U+%(charAsInt)08x).", - "illegal-codepoint-for-numeric-entity": - "Numeric entity represents an illegal codepoint: " - "U+%(charAsInt)08x.", - "numeric-entity-without-semicolon": - "Numeric entity didn't end with ';'.", - "expected-numeric-entity-but-got-eof": - "Numeric entity expected. Got end of file instead.", - "expected-numeric-entity": - "Numeric entity expected but none found.", - "named-entity-without-semicolon": - "Named entity didn't end with ';'.", - "expected-named-entity": - "Named entity expected. Got none.", - "attributes-in-end-tag": - "End tag contains unexpected attributes.", - 'self-closing-flag-on-end-tag': - "End tag contains unexpected self-closing flag.", - "expected-tag-name-but-got-right-bracket": - "Expected tag name. Got '>' instead.", - "expected-tag-name-but-got-question-mark": - "Expected tag name. Got '?' instead. (HTML doesn't " - "support processing instructions.)", - "expected-tag-name": - "Expected tag name. Got something else instead", - "expected-closing-tag-but-got-right-bracket": - "Expected closing tag. Got '>' instead. Ignoring ''.", - "expected-closing-tag-but-got-eof": - "Expected closing tag. Unexpected end of file.", - "expected-closing-tag-but-got-char": - "Expected closing tag. Unexpected character '%(data)s' found.", - "eof-in-tag-name": - "Unexpected end of file in the tag name.", - "expected-attribute-name-but-got-eof": - "Unexpected end of file. Expected attribute name instead.", - "eof-in-attribute-name": - "Unexpected end of file in attribute name.", - "invalid-character-in-attribute-name": - "Invalid character in attribute name", - "duplicate-attribute": - "Dropped duplicate attribute on tag.", - "expected-end-of-tag-name-but-got-eof": - "Unexpected end of file. Expected = or end of tag.", - "expected-attribute-value-but-got-eof": - "Unexpected end of file. Expected attribute value.", - "expected-attribute-value-but-got-right-bracket": - "Expected attribute value. Got '>' instead.", - 'equals-in-unquoted-attribute-value': - "Unexpected = in unquoted attribute", - 'unexpected-character-in-unquoted-attribute-value': - "Unexpected character in unquoted attribute", - "invalid-character-after-attribute-name": - "Unexpected character after attribute name.", - "unexpected-character-after-attribute-value": - "Unexpected character after attribute value.", - "eof-in-attribute-value-double-quote": - "Unexpected end of file in attribute value (\").", - "eof-in-attribute-value-single-quote": - "Unexpected end of file in attribute value (').", - "eof-in-attribute-value-no-quotes": - "Unexpected end of file in attribute value.", - "unexpected-EOF-after-solidus-in-tag": - "Unexpected end of file in tag. Expected >", - "unexpected-character-after-solidus-in-tag": - "Unexpected character after / in tag. Expected >", - "expected-dashes-or-doctype": - "Expected '--' or 'DOCTYPE'. Not found.", - "unexpected-bang-after-double-dash-in-comment": - "Unexpected ! after -- in comment", - "unexpected-space-after-double-dash-in-comment": - "Unexpected space after -- in comment", - "incorrect-comment": - "Incorrect comment.", - "eof-in-comment": - "Unexpected end of file in comment.", - "eof-in-comment-end-dash": - "Unexpected end of file in comment (-)", - "unexpected-dash-after-double-dash-in-comment": - "Unexpected '-' after '--' found in comment.", - "eof-in-comment-double-dash": - "Unexpected end of file in comment (--).", - "eof-in-comment-end-space-state": - "Unexpected end of file in comment.", - "eof-in-comment-end-bang-state": - "Unexpected end of file in comment.", - "unexpected-char-in-comment": - "Unexpected character in comment found.", - "need-space-after-doctype": - "No space after literal string 'DOCTYPE'.", - "expected-doctype-name-but-got-right-bracket": - "Unexpected > character. Expected DOCTYPE name.", - "expected-doctype-name-but-got-eof": - "Unexpected end of file. Expected DOCTYPE name.", - "eof-in-doctype-name": - "Unexpected end of file in DOCTYPE name.", - "eof-in-doctype": - "Unexpected end of file in DOCTYPE.", - "expected-space-or-right-bracket-in-doctype": - "Expected space or '>'. Got '%(data)s'", - "unexpected-end-of-doctype": - "Unexpected end of DOCTYPE.", - "unexpected-char-in-doctype": - "Unexpected character in DOCTYPE.", - "eof-in-innerhtml": - "XXX innerHTML EOF", - "unexpected-doctype": - "Unexpected DOCTYPE. Ignored.", - "non-html-root": - "html needs to be the first start tag.", - "expected-doctype-but-got-eof": - "Unexpected End of file. Expected DOCTYPE.", - "unknown-doctype": - "Erroneous DOCTYPE.", - "expected-doctype-but-got-chars": - "Unexpected non-space characters. Expected DOCTYPE.", - "expected-doctype-but-got-start-tag": - "Unexpected start tag (%(name)s). Expected DOCTYPE.", - "expected-doctype-but-got-end-tag": - "Unexpected end tag (%(name)s). Expected DOCTYPE.", - "end-tag-after-implied-root": - "Unexpected end tag (%(name)s) after the (implied) root element.", - "expected-named-closing-tag-but-got-eof": - "Unexpected end of file. Expected end tag (%(name)s).", - "two-heads-are-not-better-than-one": - "Unexpected start tag head in existing head. Ignored.", - "unexpected-end-tag": - "Unexpected end tag (%(name)s). Ignored.", - "unexpected-start-tag-out-of-my-head": - "Unexpected start tag (%(name)s) that can be in head. Moved.", - "unexpected-start-tag": - "Unexpected start tag (%(name)s).", - "missing-end-tag": - "Missing end tag (%(name)s).", - "missing-end-tags": - "Missing end tags (%(name)s).", - "unexpected-start-tag-implies-end-tag": - "Unexpected start tag (%(startName)s) " - "implies end tag (%(endName)s).", - "unexpected-start-tag-treated-as": - "Unexpected start tag (%(originalName)s). Treated as %(newName)s.", - "deprecated-tag": - "Unexpected start tag %(name)s. Don't use it!", - "unexpected-start-tag-ignored": - "Unexpected start tag %(name)s. Ignored.", - "expected-one-end-tag-but-got-another": - "Unexpected end tag (%(gotName)s). " - "Missing end tag (%(expectedName)s).", - "end-tag-too-early": - "End tag (%(name)s) seen too early. Expected other end tag.", - "end-tag-too-early-named": - "Unexpected end tag (%(gotName)s). Expected end tag (%(expectedName)s).", - "end-tag-too-early-ignored": - "End tag (%(name)s) seen too early. Ignored.", - "adoption-agency-1.1": - "End tag (%(name)s) violates step 1, " - "paragraph 1 of the adoption agency algorithm.", - "adoption-agency-1.2": - "End tag (%(name)s) violates step 1, " - "paragraph 2 of the adoption agency algorithm.", - "adoption-agency-1.3": - "End tag (%(name)s) violates step 1, " - "paragraph 3 of the adoption agency algorithm.", - "adoption-agency-4.4": - "End tag (%(name)s) violates step 4, " - "paragraph 4 of the adoption agency algorithm.", - "unexpected-end-tag-treated-as": - "Unexpected end tag (%(originalName)s). Treated as %(newName)s.", - "no-end-tag": - "This element (%(name)s) has no end tag.", - "unexpected-implied-end-tag-in-table": - "Unexpected implied end tag (%(name)s) in the table phase.", - "unexpected-implied-end-tag-in-table-body": - "Unexpected implied end tag (%(name)s) in the table body phase.", - "unexpected-char-implies-table-voodoo": - "Unexpected non-space characters in " - "table context caused voodoo mode.", - "unexpected-hidden-input-in-table": - "Unexpected input with type hidden in table context.", - "unexpected-form-in-table": - "Unexpected form in table context.", - "unexpected-start-tag-implies-table-voodoo": - "Unexpected start tag (%(name)s) in " - "table context caused voodoo mode.", - "unexpected-end-tag-implies-table-voodoo": - "Unexpected end tag (%(name)s) in " - "table context caused voodoo mode.", - "unexpected-cell-in-table-body": - "Unexpected table cell start tag (%(name)s) " - "in the table body phase.", - "unexpected-cell-end-tag": - "Got table cell end tag (%(name)s) " - "while required end tags are missing.", - "unexpected-end-tag-in-table-body": - "Unexpected end tag (%(name)s) in the table body phase. Ignored.", - "unexpected-implied-end-tag-in-table-row": - "Unexpected implied end tag (%(name)s) in the table row phase.", - "unexpected-end-tag-in-table-row": - "Unexpected end tag (%(name)s) in the table row phase. Ignored.", - "unexpected-select-in-select": - "Unexpected select start tag in the select phase " - "treated as select end tag.", - "unexpected-input-in-select": - "Unexpected input start tag in the select phase.", - "unexpected-start-tag-in-select": - "Unexpected start tag token (%(name)s in the select phase. " - "Ignored.", - "unexpected-end-tag-in-select": - "Unexpected end tag (%(name)s) in the select phase. Ignored.", - "unexpected-table-element-start-tag-in-select-in-table": - "Unexpected table element start tag (%(name)s) in the select in table phase.", - "unexpected-table-element-end-tag-in-select-in-table": - "Unexpected table element end tag (%(name)s) in the select in table phase.", - "unexpected-char-after-body": - "Unexpected non-space characters in the after body phase.", - "unexpected-start-tag-after-body": - "Unexpected start tag token (%(name)s)" - " in the after body phase.", - "unexpected-end-tag-after-body": - "Unexpected end tag token (%(name)s)" - " in the after body phase.", - "unexpected-char-in-frameset": - "Unexpected characters in the frameset phase. Characters ignored.", - "unexpected-start-tag-in-frameset": - "Unexpected start tag token (%(name)s)" - " in the frameset phase. Ignored.", - "unexpected-frameset-in-frameset-innerhtml": - "Unexpected end tag token (frameset) " - "in the frameset phase (innerHTML).", - "unexpected-end-tag-in-frameset": - "Unexpected end tag token (%(name)s)" - " in the frameset phase. Ignored.", - "unexpected-char-after-frameset": - "Unexpected non-space characters in the " - "after frameset phase. Ignored.", - "unexpected-start-tag-after-frameset": - "Unexpected start tag (%(name)s)" - " in the after frameset phase. Ignored.", - "unexpected-end-tag-after-frameset": - "Unexpected end tag (%(name)s)" - " in the after frameset phase. Ignored.", - "unexpected-end-tag-after-body-innerhtml": - "Unexpected end tag after body(innerHtml)", - "expected-eof-but-got-char": - "Unexpected non-space characters. Expected end of file.", - "expected-eof-but-got-start-tag": - "Unexpected start tag (%(name)s)" - ". Expected end of file.", - "expected-eof-but-got-end-tag": - "Unexpected end tag (%(name)s)" - ". Expected end of file.", - "eof-in-table": - "Unexpected end of file. Expected table content.", - "eof-in-select": - "Unexpected end of file. Expected select content.", - "eof-in-frameset": - "Unexpected end of file. Expected frameset content.", - "eof-in-script-in-script": - "Unexpected end of file. Expected script content.", - "eof-in-foreign-lands": - "Unexpected end of file. Expected foreign content", - "non-void-element-with-trailing-solidus": - "Trailing solidus not allowed on element %(name)s", - "unexpected-html-element-in-foreign-content": - "Element %(name)s not allowed in a non-html context", - "unexpected-end-tag-before-html": - "Unexpected end tag (%(name)s) before html.", - "unexpected-inhead-noscript-tag": - "Element %(name)s not allowed in a inhead-noscript context", - "eof-in-head-noscript": - "Unexpected end of file. Expected inhead-noscript content", - "char-in-head-noscript": - "Unexpected non-space character. Expected inhead-noscript content", - "XXX-undefined-error": - "Undefined error (this sucks and should be fixed)", -} - -namespaces = { - "html": "http://www.w3.org/1999/xhtml", - "mathml": "http://www.w3.org/1998/Math/MathML", - "svg": "http://www.w3.org/2000/svg", - "xlink": "http://www.w3.org/1999/xlink", - "xml": "http://www.w3.org/XML/1998/namespace", - "xmlns": "http://www.w3.org/2000/xmlns/" -} - -scopingElements = frozenset([ - (namespaces["html"], "applet"), - (namespaces["html"], "caption"), - (namespaces["html"], "html"), - (namespaces["html"], "marquee"), - (namespaces["html"], "object"), - (namespaces["html"], "table"), - (namespaces["html"], "td"), - (namespaces["html"], "th"), - (namespaces["mathml"], "mi"), - (namespaces["mathml"], "mo"), - (namespaces["mathml"], "mn"), - (namespaces["mathml"], "ms"), - (namespaces["mathml"], "mtext"), - (namespaces["mathml"], "annotation-xml"), - (namespaces["svg"], "foreignObject"), - (namespaces["svg"], "desc"), - (namespaces["svg"], "title"), -]) - -formattingElements = frozenset([ - (namespaces["html"], "a"), - (namespaces["html"], "b"), - (namespaces["html"], "big"), - (namespaces["html"], "code"), - (namespaces["html"], "em"), - (namespaces["html"], "font"), - (namespaces["html"], "i"), - (namespaces["html"], "nobr"), - (namespaces["html"], "s"), - (namespaces["html"], "small"), - (namespaces["html"], "strike"), - (namespaces["html"], "strong"), - (namespaces["html"], "tt"), - (namespaces["html"], "u") -]) - -specialElements = frozenset([ - (namespaces["html"], "address"), - (namespaces["html"], "applet"), - (namespaces["html"], "area"), - (namespaces["html"], "article"), - (namespaces["html"], "aside"), - (namespaces["html"], "base"), - (namespaces["html"], "basefont"), - (namespaces["html"], "bgsound"), - (namespaces["html"], "blockquote"), - (namespaces["html"], "body"), - (namespaces["html"], "br"), - (namespaces["html"], "button"), - (namespaces["html"], "caption"), - (namespaces["html"], "center"), - (namespaces["html"], "col"), - (namespaces["html"], "colgroup"), - (namespaces["html"], "command"), - (namespaces["html"], "dd"), - (namespaces["html"], "details"), - (namespaces["html"], "dir"), - (namespaces["html"], "div"), - (namespaces["html"], "dl"), - (namespaces["html"], "dt"), - (namespaces["html"], "embed"), - (namespaces["html"], "fieldset"), - (namespaces["html"], "figure"), - (namespaces["html"], "footer"), - (namespaces["html"], "form"), - (namespaces["html"], "frame"), - (namespaces["html"], "frameset"), - (namespaces["html"], "h1"), - (namespaces["html"], "h2"), - (namespaces["html"], "h3"), - (namespaces["html"], "h4"), - (namespaces["html"], "h5"), - (namespaces["html"], "h6"), - (namespaces["html"], "head"), - (namespaces["html"], "header"), - (namespaces["html"], "hr"), - (namespaces["html"], "html"), - (namespaces["html"], "iframe"), - # Note that image is commented out in the spec as "this isn't an - # element that can end up on the stack, so it doesn't matter," - (namespaces["html"], "image"), - (namespaces["html"], "img"), - (namespaces["html"], "input"), - (namespaces["html"], "isindex"), - (namespaces["html"], "li"), - (namespaces["html"], "link"), - (namespaces["html"], "listing"), - (namespaces["html"], "marquee"), - (namespaces["html"], "menu"), - (namespaces["html"], "meta"), - (namespaces["html"], "nav"), - (namespaces["html"], "noembed"), - (namespaces["html"], "noframes"), - (namespaces["html"], "noscript"), - (namespaces["html"], "object"), - (namespaces["html"], "ol"), - (namespaces["html"], "p"), - (namespaces["html"], "param"), - (namespaces["html"], "plaintext"), - (namespaces["html"], "pre"), - (namespaces["html"], "script"), - (namespaces["html"], "section"), - (namespaces["html"], "select"), - (namespaces["html"], "style"), - (namespaces["html"], "table"), - (namespaces["html"], "tbody"), - (namespaces["html"], "td"), - (namespaces["html"], "textarea"), - (namespaces["html"], "tfoot"), - (namespaces["html"], "th"), - (namespaces["html"], "thead"), - (namespaces["html"], "title"), - (namespaces["html"], "tr"), - (namespaces["html"], "ul"), - (namespaces["html"], "wbr"), - (namespaces["html"], "xmp"), - (namespaces["svg"], "foreignObject") -]) - -htmlIntegrationPointElements = frozenset([ - (namespaces["mathml"], "annotation-xml"), - (namespaces["svg"], "foreignObject"), - (namespaces["svg"], "desc"), - (namespaces["svg"], "title") -]) - -mathmlTextIntegrationPointElements = frozenset([ - (namespaces["mathml"], "mi"), - (namespaces["mathml"], "mo"), - (namespaces["mathml"], "mn"), - (namespaces["mathml"], "ms"), - (namespaces["mathml"], "mtext") -]) - -adjustSVGAttributes = { - "attributename": "attributeName", - "attributetype": "attributeType", - "basefrequency": "baseFrequency", - "baseprofile": "baseProfile", - "calcmode": "calcMode", - "clippathunits": "clipPathUnits", - "contentscripttype": "contentScriptType", - "contentstyletype": "contentStyleType", - "diffuseconstant": "diffuseConstant", - "edgemode": "edgeMode", - "externalresourcesrequired": "externalResourcesRequired", - "filterres": "filterRes", - "filterunits": "filterUnits", - "glyphref": "glyphRef", - "gradienttransform": "gradientTransform", - "gradientunits": "gradientUnits", - "kernelmatrix": "kernelMatrix", - "kernelunitlength": "kernelUnitLength", - "keypoints": "keyPoints", - "keysplines": "keySplines", - "keytimes": "keyTimes", - "lengthadjust": "lengthAdjust", - "limitingconeangle": "limitingConeAngle", - "markerheight": "markerHeight", - "markerunits": "markerUnits", - "markerwidth": "markerWidth", - "maskcontentunits": "maskContentUnits", - "maskunits": "maskUnits", - "numoctaves": "numOctaves", - "pathlength": "pathLength", - "patterncontentunits": "patternContentUnits", - "patterntransform": "patternTransform", - "patternunits": "patternUnits", - "pointsatx": "pointsAtX", - "pointsaty": "pointsAtY", - "pointsatz": "pointsAtZ", - "preservealpha": "preserveAlpha", - "preserveaspectratio": "preserveAspectRatio", - "primitiveunits": "primitiveUnits", - "refx": "refX", - "refy": "refY", - "repeatcount": "repeatCount", - "repeatdur": "repeatDur", - "requiredextensions": "requiredExtensions", - "requiredfeatures": "requiredFeatures", - "specularconstant": "specularConstant", - "specularexponent": "specularExponent", - "spreadmethod": "spreadMethod", - "startoffset": "startOffset", - "stddeviation": "stdDeviation", - "stitchtiles": "stitchTiles", - "surfacescale": "surfaceScale", - "systemlanguage": "systemLanguage", - "tablevalues": "tableValues", - "targetx": "targetX", - "targety": "targetY", - "textlength": "textLength", - "viewbox": "viewBox", - "viewtarget": "viewTarget", - "xchannelselector": "xChannelSelector", - "ychannelselector": "yChannelSelector", - "zoomandpan": "zoomAndPan" -} - -adjustMathMLAttributes = {"definitionurl": "definitionURL"} - -adjustForeignAttributes = { - "xlink:actuate": ("xlink", "actuate", namespaces["xlink"]), - "xlink:arcrole": ("xlink", "arcrole", namespaces["xlink"]), - "xlink:href": ("xlink", "href", namespaces["xlink"]), - "xlink:role": ("xlink", "role", namespaces["xlink"]), - "xlink:show": ("xlink", "show", namespaces["xlink"]), - "xlink:title": ("xlink", "title", namespaces["xlink"]), - "xlink:type": ("xlink", "type", namespaces["xlink"]), - "xml:base": ("xml", "base", namespaces["xml"]), - "xml:lang": ("xml", "lang", namespaces["xml"]), - "xml:space": ("xml", "space", namespaces["xml"]), - "xmlns": (None, "xmlns", namespaces["xmlns"]), - "xmlns:xlink": ("xmlns", "xlink", namespaces["xmlns"]) -} - -unadjustForeignAttributes = {(ns, local): qname for qname, (prefix, local, ns) in - adjustForeignAttributes.items()} - -spaceCharacters = frozenset([ - "\t", - "\n", - "\u000C", - " ", - "\r" -]) - -tableInsertModeElements = frozenset([ - "table", - "tbody", - "tfoot", - "thead", - "tr" -]) - -asciiLowercase = frozenset(string.ascii_lowercase) -asciiUppercase = frozenset(string.ascii_uppercase) -asciiLetters = frozenset(string.ascii_letters) -digits = frozenset(string.digits) -hexDigits = frozenset(string.hexdigits) - -asciiUpper2Lower = {ord(c): ord(c.lower()) for c in string.ascii_uppercase} - -# Heading elements need to be ordered -headingElements = ( - "h1", - "h2", - "h3", - "h4", - "h5", - "h6" -) - -voidElements = frozenset([ - "base", - "command", - "event-source", - "link", - "meta", - "hr", - "br", - "img", - "embed", - "param", - "area", - "col", - "input", - "source", - "track" -]) - -cdataElements = frozenset(['title', 'textarea']) - -rcdataElements = frozenset([ - 'style', - 'script', - 'xmp', - 'iframe', - 'noembed', - 'noframes', - 'noscript' -]) - -booleanAttributes = { - "": frozenset(["irrelevant", "itemscope"]), - "style": frozenset(["scoped"]), - "img": frozenset(["ismap"]), - "audio": frozenset(["autoplay", "controls"]), - "video": frozenset(["autoplay", "controls"]), - "script": frozenset(["defer", "async"]), - "details": frozenset(["open"]), - "datagrid": frozenset(["multiple", "disabled"]), - "command": frozenset(["hidden", "disabled", "checked", "default"]), - "hr": frozenset(["noshade"]), - "menu": frozenset(["autosubmit"]), - "fieldset": frozenset(["disabled", "readonly"]), - "option": frozenset(["disabled", "readonly", "selected"]), - "optgroup": frozenset(["disabled", "readonly"]), - "button": frozenset(["disabled", "autofocus"]), - "input": frozenset(["disabled", "readonly", "required", "autofocus", "checked", "ismap"]), - "select": frozenset(["disabled", "readonly", "autofocus", "multiple"]), - "output": frozenset(["disabled", "readonly"]), - "iframe": frozenset(["seamless"]), -} - -# entitiesWindows1252 has to be _ordered_ and needs to have an index. It -# therefore can't be a frozenset. -entitiesWindows1252 = ( - 8364, # 0x80 0x20AC EURO SIGN - 65533, # 0x81 UNDEFINED - 8218, # 0x82 0x201A SINGLE LOW-9 QUOTATION MARK - 402, # 0x83 0x0192 LATIN SMALL LETTER F WITH HOOK - 8222, # 0x84 0x201E DOUBLE LOW-9 QUOTATION MARK - 8230, # 0x85 0x2026 HORIZONTAL ELLIPSIS - 8224, # 0x86 0x2020 DAGGER - 8225, # 0x87 0x2021 DOUBLE DAGGER - 710, # 0x88 0x02C6 MODIFIER LETTER CIRCUMFLEX ACCENT - 8240, # 0x89 0x2030 PER MILLE SIGN - 352, # 0x8A 0x0160 LATIN CAPITAL LETTER S WITH CARON - 8249, # 0x8B 0x2039 SINGLE LEFT-POINTING ANGLE QUOTATION MARK - 338, # 0x8C 0x0152 LATIN CAPITAL LIGATURE OE - 65533, # 0x8D UNDEFINED - 381, # 0x8E 0x017D LATIN CAPITAL LETTER Z WITH CARON - 65533, # 0x8F UNDEFINED - 65533, # 0x90 UNDEFINED - 8216, # 0x91 0x2018 LEFT SINGLE QUOTATION MARK - 8217, # 0x92 0x2019 RIGHT SINGLE QUOTATION MARK - 8220, # 0x93 0x201C LEFT DOUBLE QUOTATION MARK - 8221, # 0x94 0x201D RIGHT DOUBLE QUOTATION MARK - 8226, # 0x95 0x2022 BULLET - 8211, # 0x96 0x2013 EN DASH - 8212, # 0x97 0x2014 EM DASH - 732, # 0x98 0x02DC SMALL TILDE - 8482, # 0x99 0x2122 TRADE MARK SIGN - 353, # 0x9A 0x0161 LATIN SMALL LETTER S WITH CARON - 8250, # 0x9B 0x203A SINGLE RIGHT-POINTING ANGLE QUOTATION MARK - 339, # 0x9C 0x0153 LATIN SMALL LIGATURE OE - 65533, # 0x9D UNDEFINED - 382, # 0x9E 0x017E LATIN SMALL LETTER Z WITH CARON - 376 # 0x9F 0x0178 LATIN CAPITAL LETTER Y WITH DIAERESIS -) - -xmlEntities = frozenset(['lt;', 'gt;', 'amp;', 'apos;', 'quot;']) - -entities = { - "AElig": "\xc6", - "AElig;": "\xc6", - "AMP": "&", - "AMP;": "&", - "Aacute": "\xc1", - "Aacute;": "\xc1", - "Abreve;": "\u0102", - "Acirc": "\xc2", - "Acirc;": "\xc2", - "Acy;": "\u0410", - "Afr;": "\U0001d504", - "Agrave": "\xc0", - "Agrave;": "\xc0", - "Alpha;": "\u0391", - "Amacr;": "\u0100", - "And;": "\u2a53", - "Aogon;": "\u0104", - "Aopf;": "\U0001d538", - "ApplyFunction;": "\u2061", - "Aring": "\xc5", - "Aring;": "\xc5", - "Ascr;": "\U0001d49c", - "Assign;": "\u2254", - "Atilde": "\xc3", - "Atilde;": "\xc3", - "Auml": "\xc4", - "Auml;": "\xc4", - "Backslash;": "\u2216", - "Barv;": "\u2ae7", - "Barwed;": "\u2306", - "Bcy;": "\u0411", - "Because;": "\u2235", - "Bernoullis;": "\u212c", - "Beta;": "\u0392", - "Bfr;": "\U0001d505", - "Bopf;": "\U0001d539", - "Breve;": "\u02d8", - "Bscr;": "\u212c", - "Bumpeq;": "\u224e", - "CHcy;": "\u0427", - "COPY": "\xa9", - "COPY;": "\xa9", - "Cacute;": "\u0106", - "Cap;": "\u22d2", - "CapitalDifferentialD;": "\u2145", - "Cayleys;": "\u212d", - "Ccaron;": "\u010c", - "Ccedil": "\xc7", - "Ccedil;": "\xc7", - "Ccirc;": "\u0108", - "Cconint;": "\u2230", - "Cdot;": "\u010a", - "Cedilla;": "\xb8", - "CenterDot;": "\xb7", - "Cfr;": "\u212d", - "Chi;": "\u03a7", - "CircleDot;": "\u2299", - "CircleMinus;": "\u2296", - "CirclePlus;": "\u2295", - "CircleTimes;": "\u2297", - "ClockwiseContourIntegral;": "\u2232", - "CloseCurlyDoubleQuote;": "\u201d", - "CloseCurlyQuote;": "\u2019", - "Colon;": "\u2237", - "Colone;": "\u2a74", - "Congruent;": "\u2261", - "Conint;": "\u222f", - "ContourIntegral;": "\u222e", - "Copf;": "\u2102", - "Coproduct;": "\u2210", - "CounterClockwiseContourIntegral;": "\u2233", - "Cross;": "\u2a2f", - "Cscr;": "\U0001d49e", - "Cup;": "\u22d3", - "CupCap;": "\u224d", - "DD;": "\u2145", - "DDotrahd;": "\u2911", - "DJcy;": "\u0402", - "DScy;": "\u0405", - "DZcy;": "\u040f", - "Dagger;": "\u2021", - "Darr;": "\u21a1", - "Dashv;": "\u2ae4", - "Dcaron;": "\u010e", - "Dcy;": "\u0414", - "Del;": "\u2207", - "Delta;": "\u0394", - "Dfr;": "\U0001d507", - "DiacriticalAcute;": "\xb4", - "DiacriticalDot;": "\u02d9", - "DiacriticalDoubleAcute;": "\u02dd", - "DiacriticalGrave;": "`", - "DiacriticalTilde;": "\u02dc", - "Diamond;": "\u22c4", - "DifferentialD;": "\u2146", - "Dopf;": "\U0001d53b", - "Dot;": "\xa8", - "DotDot;": "\u20dc", - "DotEqual;": "\u2250", - "DoubleContourIntegral;": "\u222f", - "DoubleDot;": "\xa8", - "DoubleDownArrow;": "\u21d3", - "DoubleLeftArrow;": "\u21d0", - "DoubleLeftRightArrow;": "\u21d4", - "DoubleLeftTee;": "\u2ae4", - "DoubleLongLeftArrow;": "\u27f8", - "DoubleLongLeftRightArrow;": "\u27fa", - "DoubleLongRightArrow;": "\u27f9", - "DoubleRightArrow;": "\u21d2", - "DoubleRightTee;": "\u22a8", - "DoubleUpArrow;": "\u21d1", - "DoubleUpDownArrow;": "\u21d5", - "DoubleVerticalBar;": "\u2225", - "DownArrow;": "\u2193", - "DownArrowBar;": "\u2913", - "DownArrowUpArrow;": "\u21f5", - "DownBreve;": "\u0311", - "DownLeftRightVector;": "\u2950", - "DownLeftTeeVector;": "\u295e", - "DownLeftVector;": "\u21bd", - "DownLeftVectorBar;": "\u2956", - "DownRightTeeVector;": "\u295f", - "DownRightVector;": "\u21c1", - "DownRightVectorBar;": "\u2957", - "DownTee;": "\u22a4", - "DownTeeArrow;": "\u21a7", - "Downarrow;": "\u21d3", - "Dscr;": "\U0001d49f", - "Dstrok;": "\u0110", - "ENG;": "\u014a", - "ETH": "\xd0", - "ETH;": "\xd0", - "Eacute": "\xc9", - "Eacute;": "\xc9", - "Ecaron;": "\u011a", - "Ecirc": "\xca", - "Ecirc;": "\xca", - "Ecy;": "\u042d", - "Edot;": "\u0116", - "Efr;": "\U0001d508", - "Egrave": "\xc8", - "Egrave;": "\xc8", - "Element;": "\u2208", - "Emacr;": "\u0112", - "EmptySmallSquare;": "\u25fb", - "EmptyVerySmallSquare;": "\u25ab", - "Eogon;": "\u0118", - "Eopf;": "\U0001d53c", - "Epsilon;": "\u0395", - "Equal;": "\u2a75", - "EqualTilde;": "\u2242", - "Equilibrium;": "\u21cc", - "Escr;": "\u2130", - "Esim;": "\u2a73", - "Eta;": "\u0397", - "Euml": "\xcb", - "Euml;": "\xcb", - "Exists;": "\u2203", - "ExponentialE;": "\u2147", - "Fcy;": "\u0424", - "Ffr;": "\U0001d509", - "FilledSmallSquare;": "\u25fc", - "FilledVerySmallSquare;": "\u25aa", - "Fopf;": "\U0001d53d", - "ForAll;": "\u2200", - "Fouriertrf;": "\u2131", - "Fscr;": "\u2131", - "GJcy;": "\u0403", - "GT": ">", - "GT;": ">", - "Gamma;": "\u0393", - "Gammad;": "\u03dc", - "Gbreve;": "\u011e", - "Gcedil;": "\u0122", - "Gcirc;": "\u011c", - "Gcy;": "\u0413", - "Gdot;": "\u0120", - "Gfr;": "\U0001d50a", - "Gg;": "\u22d9", - "Gopf;": "\U0001d53e", - "GreaterEqual;": "\u2265", - "GreaterEqualLess;": "\u22db", - "GreaterFullEqual;": "\u2267", - "GreaterGreater;": "\u2aa2", - "GreaterLess;": "\u2277", - "GreaterSlantEqual;": "\u2a7e", - "GreaterTilde;": "\u2273", - "Gscr;": "\U0001d4a2", - "Gt;": "\u226b", - "HARDcy;": "\u042a", - "Hacek;": "\u02c7", - "Hat;": "^", - "Hcirc;": "\u0124", - "Hfr;": "\u210c", - "HilbertSpace;": "\u210b", - "Hopf;": "\u210d", - "HorizontalLine;": "\u2500", - "Hscr;": "\u210b", - "Hstrok;": "\u0126", - "HumpDownHump;": "\u224e", - "HumpEqual;": "\u224f", - "IEcy;": "\u0415", - "IJlig;": "\u0132", - "IOcy;": "\u0401", - "Iacute": "\xcd", - "Iacute;": "\xcd", - "Icirc": "\xce", - "Icirc;": "\xce", - "Icy;": "\u0418", - "Idot;": "\u0130", - "Ifr;": "\u2111", - "Igrave": "\xcc", - "Igrave;": "\xcc", - "Im;": "\u2111", - "Imacr;": "\u012a", - "ImaginaryI;": "\u2148", - "Implies;": "\u21d2", - "Int;": "\u222c", - "Integral;": "\u222b", - "Intersection;": "\u22c2", - "InvisibleComma;": "\u2063", - "InvisibleTimes;": "\u2062", - "Iogon;": "\u012e", - "Iopf;": "\U0001d540", - "Iota;": "\u0399", - "Iscr;": "\u2110", - "Itilde;": "\u0128", - "Iukcy;": "\u0406", - "Iuml": "\xcf", - "Iuml;": "\xcf", - "Jcirc;": "\u0134", - "Jcy;": "\u0419", - "Jfr;": "\U0001d50d", - "Jopf;": "\U0001d541", - "Jscr;": "\U0001d4a5", - "Jsercy;": "\u0408", - "Jukcy;": "\u0404", - "KHcy;": "\u0425", - "KJcy;": "\u040c", - "Kappa;": "\u039a", - "Kcedil;": "\u0136", - "Kcy;": "\u041a", - "Kfr;": "\U0001d50e", - "Kopf;": "\U0001d542", - "Kscr;": "\U0001d4a6", - "LJcy;": "\u0409", - "LT": "<", - "LT;": "<", - "Lacute;": "\u0139", - "Lambda;": "\u039b", - "Lang;": "\u27ea", - "Laplacetrf;": "\u2112", - "Larr;": "\u219e", - "Lcaron;": "\u013d", - "Lcedil;": "\u013b", - "Lcy;": "\u041b", - "LeftAngleBracket;": "\u27e8", - "LeftArrow;": "\u2190", - "LeftArrowBar;": "\u21e4", - "LeftArrowRightArrow;": "\u21c6", - "LeftCeiling;": "\u2308", - "LeftDoubleBracket;": "\u27e6", - "LeftDownTeeVector;": "\u2961", - "LeftDownVector;": "\u21c3", - "LeftDownVectorBar;": "\u2959", - "LeftFloor;": "\u230a", - "LeftRightArrow;": "\u2194", - "LeftRightVector;": "\u294e", - "LeftTee;": "\u22a3", - "LeftTeeArrow;": "\u21a4", - "LeftTeeVector;": "\u295a", - "LeftTriangle;": "\u22b2", - "LeftTriangleBar;": "\u29cf", - "LeftTriangleEqual;": "\u22b4", - "LeftUpDownVector;": "\u2951", - "LeftUpTeeVector;": "\u2960", - "LeftUpVector;": "\u21bf", - "LeftUpVectorBar;": "\u2958", - "LeftVector;": "\u21bc", - "LeftVectorBar;": "\u2952", - "Leftarrow;": "\u21d0", - "Leftrightarrow;": "\u21d4", - "LessEqualGreater;": "\u22da", - "LessFullEqual;": "\u2266", - "LessGreater;": "\u2276", - "LessLess;": "\u2aa1", - "LessSlantEqual;": "\u2a7d", - "LessTilde;": "\u2272", - "Lfr;": "\U0001d50f", - "Ll;": "\u22d8", - "Lleftarrow;": "\u21da", - "Lmidot;": "\u013f", - "LongLeftArrow;": "\u27f5", - "LongLeftRightArrow;": "\u27f7", - "LongRightArrow;": "\u27f6", - "Longleftarrow;": "\u27f8", - "Longleftrightarrow;": "\u27fa", - "Longrightarrow;": "\u27f9", - "Lopf;": "\U0001d543", - "LowerLeftArrow;": "\u2199", - "LowerRightArrow;": "\u2198", - "Lscr;": "\u2112", - "Lsh;": "\u21b0", - "Lstrok;": "\u0141", - "Lt;": "\u226a", - "Map;": "\u2905", - "Mcy;": "\u041c", - "MediumSpace;": "\u205f", - "Mellintrf;": "\u2133", - "Mfr;": "\U0001d510", - "MinusPlus;": "\u2213", - "Mopf;": "\U0001d544", - "Mscr;": "\u2133", - "Mu;": "\u039c", - "NJcy;": "\u040a", - "Nacute;": "\u0143", - "Ncaron;": "\u0147", - "Ncedil;": "\u0145", - "Ncy;": "\u041d", - "NegativeMediumSpace;": "\u200b", - "NegativeThickSpace;": "\u200b", - "NegativeThinSpace;": "\u200b", - "NegativeVeryThinSpace;": "\u200b", - "NestedGreaterGreater;": "\u226b", - "NestedLessLess;": "\u226a", - "NewLine;": "\n", - "Nfr;": "\U0001d511", - "NoBreak;": "\u2060", - "NonBreakingSpace;": "\xa0", - "Nopf;": "\u2115", - "Not;": "\u2aec", - "NotCongruent;": "\u2262", - "NotCupCap;": "\u226d", - "NotDoubleVerticalBar;": "\u2226", - "NotElement;": "\u2209", - "NotEqual;": "\u2260", - "NotEqualTilde;": "\u2242\u0338", - "NotExists;": "\u2204", - "NotGreater;": "\u226f", - "NotGreaterEqual;": "\u2271", - "NotGreaterFullEqual;": "\u2267\u0338", - "NotGreaterGreater;": "\u226b\u0338", - "NotGreaterLess;": "\u2279", - "NotGreaterSlantEqual;": "\u2a7e\u0338", - "NotGreaterTilde;": "\u2275", - "NotHumpDownHump;": "\u224e\u0338", - "NotHumpEqual;": "\u224f\u0338", - "NotLeftTriangle;": "\u22ea", - "NotLeftTriangleBar;": "\u29cf\u0338", - "NotLeftTriangleEqual;": "\u22ec", - "NotLess;": "\u226e", - "NotLessEqual;": "\u2270", - "NotLessGreater;": "\u2278", - "NotLessLess;": "\u226a\u0338", - "NotLessSlantEqual;": "\u2a7d\u0338", - "NotLessTilde;": "\u2274", - "NotNestedGreaterGreater;": "\u2aa2\u0338", - "NotNestedLessLess;": "\u2aa1\u0338", - "NotPrecedes;": "\u2280", - "NotPrecedesEqual;": "\u2aaf\u0338", - "NotPrecedesSlantEqual;": "\u22e0", - "NotReverseElement;": "\u220c", - "NotRightTriangle;": "\u22eb", - "NotRightTriangleBar;": "\u29d0\u0338", - "NotRightTriangleEqual;": "\u22ed", - "NotSquareSubset;": "\u228f\u0338", - "NotSquareSubsetEqual;": "\u22e2", - "NotSquareSuperset;": "\u2290\u0338", - "NotSquareSupersetEqual;": "\u22e3", - "NotSubset;": "\u2282\u20d2", - "NotSubsetEqual;": "\u2288", - "NotSucceeds;": "\u2281", - "NotSucceedsEqual;": "\u2ab0\u0338", - "NotSucceedsSlantEqual;": "\u22e1", - "NotSucceedsTilde;": "\u227f\u0338", - "NotSuperset;": "\u2283\u20d2", - "NotSupersetEqual;": "\u2289", - "NotTilde;": "\u2241", - "NotTildeEqual;": "\u2244", - "NotTildeFullEqual;": "\u2247", - "NotTildeTilde;": "\u2249", - "NotVerticalBar;": "\u2224", - "Nscr;": "\U0001d4a9", - "Ntilde": "\xd1", - "Ntilde;": "\xd1", - "Nu;": "\u039d", - "OElig;": "\u0152", - "Oacute": "\xd3", - "Oacute;": "\xd3", - "Ocirc": "\xd4", - "Ocirc;": "\xd4", - "Ocy;": "\u041e", - "Odblac;": "\u0150", - "Ofr;": "\U0001d512", - "Ograve": "\xd2", - "Ograve;": "\xd2", - "Omacr;": "\u014c", - "Omega;": "\u03a9", - "Omicron;": "\u039f", - "Oopf;": "\U0001d546", - "OpenCurlyDoubleQuote;": "\u201c", - "OpenCurlyQuote;": "\u2018", - "Or;": "\u2a54", - "Oscr;": "\U0001d4aa", - "Oslash": "\xd8", - "Oslash;": "\xd8", - "Otilde": "\xd5", - "Otilde;": "\xd5", - "Otimes;": "\u2a37", - "Ouml": "\xd6", - "Ouml;": "\xd6", - "OverBar;": "\u203e", - "OverBrace;": "\u23de", - "OverBracket;": "\u23b4", - "OverParenthesis;": "\u23dc", - "PartialD;": "\u2202", - "Pcy;": "\u041f", - "Pfr;": "\U0001d513", - "Phi;": "\u03a6", - "Pi;": "\u03a0", - "PlusMinus;": "\xb1", - "Poincareplane;": "\u210c", - "Popf;": "\u2119", - "Pr;": "\u2abb", - "Precedes;": "\u227a", - "PrecedesEqual;": "\u2aaf", - "PrecedesSlantEqual;": "\u227c", - "PrecedesTilde;": "\u227e", - "Prime;": "\u2033", - "Product;": "\u220f", - "Proportion;": "\u2237", - "Proportional;": "\u221d", - "Pscr;": "\U0001d4ab", - "Psi;": "\u03a8", - "QUOT": "\"", - "QUOT;": "\"", - "Qfr;": "\U0001d514", - "Qopf;": "\u211a", - "Qscr;": "\U0001d4ac", - "RBarr;": "\u2910", - "REG": "\xae", - "REG;": "\xae", - "Racute;": "\u0154", - "Rang;": "\u27eb", - "Rarr;": "\u21a0", - "Rarrtl;": "\u2916", - "Rcaron;": "\u0158", - "Rcedil;": "\u0156", - "Rcy;": "\u0420", - "Re;": "\u211c", - "ReverseElement;": "\u220b", - "ReverseEquilibrium;": "\u21cb", - "ReverseUpEquilibrium;": "\u296f", - "Rfr;": "\u211c", - "Rho;": "\u03a1", - "RightAngleBracket;": "\u27e9", - "RightArrow;": "\u2192", - "RightArrowBar;": "\u21e5", - "RightArrowLeftArrow;": "\u21c4", - "RightCeiling;": "\u2309", - "RightDoubleBracket;": "\u27e7", - "RightDownTeeVector;": "\u295d", - "RightDownVector;": "\u21c2", - "RightDownVectorBar;": "\u2955", - "RightFloor;": "\u230b", - "RightTee;": "\u22a2", - "RightTeeArrow;": "\u21a6", - "RightTeeVector;": "\u295b", - "RightTriangle;": "\u22b3", - "RightTriangleBar;": "\u29d0", - "RightTriangleEqual;": "\u22b5", - "RightUpDownVector;": "\u294f", - "RightUpTeeVector;": "\u295c", - "RightUpVector;": "\u21be", - "RightUpVectorBar;": "\u2954", - "RightVector;": "\u21c0", - "RightVectorBar;": "\u2953", - "Rightarrow;": "\u21d2", - "Ropf;": "\u211d", - "RoundImplies;": "\u2970", - "Rrightarrow;": "\u21db", - "Rscr;": "\u211b", - "Rsh;": "\u21b1", - "RuleDelayed;": "\u29f4", - "SHCHcy;": "\u0429", - "SHcy;": "\u0428", - "SOFTcy;": "\u042c", - "Sacute;": "\u015a", - "Sc;": "\u2abc", - "Scaron;": "\u0160", - "Scedil;": "\u015e", - "Scirc;": "\u015c", - "Scy;": "\u0421", - "Sfr;": "\U0001d516", - "ShortDownArrow;": "\u2193", - "ShortLeftArrow;": "\u2190", - "ShortRightArrow;": "\u2192", - "ShortUpArrow;": "\u2191", - "Sigma;": "\u03a3", - "SmallCircle;": "\u2218", - "Sopf;": "\U0001d54a", - "Sqrt;": "\u221a", - "Square;": "\u25a1", - "SquareIntersection;": "\u2293", - "SquareSubset;": "\u228f", - "SquareSubsetEqual;": "\u2291", - "SquareSuperset;": "\u2290", - "SquareSupersetEqual;": "\u2292", - "SquareUnion;": "\u2294", - "Sscr;": "\U0001d4ae", - "Star;": "\u22c6", - "Sub;": "\u22d0", - "Subset;": "\u22d0", - "SubsetEqual;": "\u2286", - "Succeeds;": "\u227b", - "SucceedsEqual;": "\u2ab0", - "SucceedsSlantEqual;": "\u227d", - "SucceedsTilde;": "\u227f", - "SuchThat;": "\u220b", - "Sum;": "\u2211", - "Sup;": "\u22d1", - "Superset;": "\u2283", - "SupersetEqual;": "\u2287", - "Supset;": "\u22d1", - "THORN": "\xde", - "THORN;": "\xde", - "TRADE;": "\u2122", - "TSHcy;": "\u040b", - "TScy;": "\u0426", - "Tab;": "\t", - "Tau;": "\u03a4", - "Tcaron;": "\u0164", - "Tcedil;": "\u0162", - "Tcy;": "\u0422", - "Tfr;": "\U0001d517", - "Therefore;": "\u2234", - "Theta;": "\u0398", - "ThickSpace;": "\u205f\u200a", - "ThinSpace;": "\u2009", - "Tilde;": "\u223c", - "TildeEqual;": "\u2243", - "TildeFullEqual;": "\u2245", - "TildeTilde;": "\u2248", - "Topf;": "\U0001d54b", - "TripleDot;": "\u20db", - "Tscr;": "\U0001d4af", - "Tstrok;": "\u0166", - "Uacute": "\xda", - "Uacute;": "\xda", - "Uarr;": "\u219f", - "Uarrocir;": "\u2949", - "Ubrcy;": "\u040e", - "Ubreve;": "\u016c", - "Ucirc": "\xdb", - "Ucirc;": "\xdb", - "Ucy;": "\u0423", - "Udblac;": "\u0170", - "Ufr;": "\U0001d518", - "Ugrave": "\xd9", - "Ugrave;": "\xd9", - "Umacr;": "\u016a", - "UnderBar;": "_", - "UnderBrace;": "\u23df", - "UnderBracket;": "\u23b5", - "UnderParenthesis;": "\u23dd", - "Union;": "\u22c3", - "UnionPlus;": "\u228e", - "Uogon;": "\u0172", - "Uopf;": "\U0001d54c", - "UpArrow;": "\u2191", - "UpArrowBar;": "\u2912", - "UpArrowDownArrow;": "\u21c5", - "UpDownArrow;": "\u2195", - "UpEquilibrium;": "\u296e", - "UpTee;": "\u22a5", - "UpTeeArrow;": "\u21a5", - "Uparrow;": "\u21d1", - "Updownarrow;": "\u21d5", - "UpperLeftArrow;": "\u2196", - "UpperRightArrow;": "\u2197", - "Upsi;": "\u03d2", - "Upsilon;": "\u03a5", - "Uring;": "\u016e", - "Uscr;": "\U0001d4b0", - "Utilde;": "\u0168", - "Uuml": "\xdc", - "Uuml;": "\xdc", - "VDash;": "\u22ab", - "Vbar;": "\u2aeb", - "Vcy;": "\u0412", - "Vdash;": "\u22a9", - "Vdashl;": "\u2ae6", - "Vee;": "\u22c1", - "Verbar;": "\u2016", - "Vert;": "\u2016", - "VerticalBar;": "\u2223", - "VerticalLine;": "|", - "VerticalSeparator;": "\u2758", - "VerticalTilde;": "\u2240", - "VeryThinSpace;": "\u200a", - "Vfr;": "\U0001d519", - "Vopf;": "\U0001d54d", - "Vscr;": "\U0001d4b1", - "Vvdash;": "\u22aa", - "Wcirc;": "\u0174", - "Wedge;": "\u22c0", - "Wfr;": "\U0001d51a", - "Wopf;": "\U0001d54e", - "Wscr;": "\U0001d4b2", - "Xfr;": "\U0001d51b", - "Xi;": "\u039e", - "Xopf;": "\U0001d54f", - "Xscr;": "\U0001d4b3", - "YAcy;": "\u042f", - "YIcy;": "\u0407", - "YUcy;": "\u042e", - "Yacute": "\xdd", - "Yacute;": "\xdd", - "Ycirc;": "\u0176", - "Ycy;": "\u042b", - "Yfr;": "\U0001d51c", - "Yopf;": "\U0001d550", - "Yscr;": "\U0001d4b4", - "Yuml;": "\u0178", - "ZHcy;": "\u0416", - "Zacute;": "\u0179", - "Zcaron;": "\u017d", - "Zcy;": "\u0417", - "Zdot;": "\u017b", - "ZeroWidthSpace;": "\u200b", - "Zeta;": "\u0396", - "Zfr;": "\u2128", - "Zopf;": "\u2124", - "Zscr;": "\U0001d4b5", - "aacute": "\xe1", - "aacute;": "\xe1", - "abreve;": "\u0103", - "ac;": "\u223e", - "acE;": "\u223e\u0333", - "acd;": "\u223f", - "acirc": "\xe2", - "acirc;": "\xe2", - "acute": "\xb4", - "acute;": "\xb4", - "acy;": "\u0430", - "aelig": "\xe6", - "aelig;": "\xe6", - "af;": "\u2061", - "afr;": "\U0001d51e", - "agrave": "\xe0", - "agrave;": "\xe0", - "alefsym;": "\u2135", - "aleph;": "\u2135", - "alpha;": "\u03b1", - "amacr;": "\u0101", - "amalg;": "\u2a3f", - "amp": "&", - "amp;": "&", - "and;": "\u2227", - "andand;": "\u2a55", - "andd;": "\u2a5c", - "andslope;": "\u2a58", - "andv;": "\u2a5a", - "ang;": "\u2220", - "ange;": "\u29a4", - "angle;": "\u2220", - "angmsd;": "\u2221", - "angmsdaa;": "\u29a8", - "angmsdab;": "\u29a9", - "angmsdac;": "\u29aa", - "angmsdad;": "\u29ab", - "angmsdae;": "\u29ac", - "angmsdaf;": "\u29ad", - "angmsdag;": "\u29ae", - "angmsdah;": "\u29af", - "angrt;": "\u221f", - "angrtvb;": "\u22be", - "angrtvbd;": "\u299d", - "angsph;": "\u2222", - "angst;": "\xc5", - "angzarr;": "\u237c", - "aogon;": "\u0105", - "aopf;": "\U0001d552", - "ap;": "\u2248", - "apE;": "\u2a70", - "apacir;": "\u2a6f", - "ape;": "\u224a", - "apid;": "\u224b", - "apos;": "'", - "approx;": "\u2248", - "approxeq;": "\u224a", - "aring": "\xe5", - "aring;": "\xe5", - "ascr;": "\U0001d4b6", - "ast;": "*", - "asymp;": "\u2248", - "asympeq;": "\u224d", - "atilde": "\xe3", - "atilde;": "\xe3", - "auml": "\xe4", - "auml;": "\xe4", - "awconint;": "\u2233", - "awint;": "\u2a11", - "bNot;": "\u2aed", - "backcong;": "\u224c", - "backepsilon;": "\u03f6", - "backprime;": "\u2035", - "backsim;": "\u223d", - "backsimeq;": "\u22cd", - "barvee;": "\u22bd", - "barwed;": "\u2305", - "barwedge;": "\u2305", - "bbrk;": "\u23b5", - "bbrktbrk;": "\u23b6", - "bcong;": "\u224c", - "bcy;": "\u0431", - "bdquo;": "\u201e", - "becaus;": "\u2235", - "because;": "\u2235", - "bemptyv;": "\u29b0", - "bepsi;": "\u03f6", - "bernou;": "\u212c", - "beta;": "\u03b2", - "beth;": "\u2136", - "between;": "\u226c", - "bfr;": "\U0001d51f", - "bigcap;": "\u22c2", - "bigcirc;": "\u25ef", - "bigcup;": "\u22c3", - "bigodot;": "\u2a00", - "bigoplus;": "\u2a01", - "bigotimes;": "\u2a02", - "bigsqcup;": "\u2a06", - "bigstar;": "\u2605", - "bigtriangledown;": "\u25bd", - "bigtriangleup;": "\u25b3", - "biguplus;": "\u2a04", - "bigvee;": "\u22c1", - "bigwedge;": "\u22c0", - "bkarow;": "\u290d", - "blacklozenge;": "\u29eb", - "blacksquare;": "\u25aa", - "blacktriangle;": "\u25b4", - "blacktriangledown;": "\u25be", - "blacktriangleleft;": "\u25c2", - "blacktriangleright;": "\u25b8", - "blank;": "\u2423", - "blk12;": "\u2592", - "blk14;": "\u2591", - "blk34;": "\u2593", - "block;": "\u2588", - "bne;": "=\u20e5", - "bnequiv;": "\u2261\u20e5", - "bnot;": "\u2310", - "bopf;": "\U0001d553", - "bot;": "\u22a5", - "bottom;": "\u22a5", - "bowtie;": "\u22c8", - "boxDL;": "\u2557", - "boxDR;": "\u2554", - "boxDl;": "\u2556", - "boxDr;": "\u2553", - "boxH;": "\u2550", - "boxHD;": "\u2566", - "boxHU;": "\u2569", - "boxHd;": "\u2564", - "boxHu;": "\u2567", - "boxUL;": "\u255d", - "boxUR;": "\u255a", - "boxUl;": "\u255c", - "boxUr;": "\u2559", - "boxV;": "\u2551", - "boxVH;": "\u256c", - "boxVL;": "\u2563", - "boxVR;": "\u2560", - "boxVh;": "\u256b", - "boxVl;": "\u2562", - "boxVr;": "\u255f", - "boxbox;": "\u29c9", - "boxdL;": "\u2555", - "boxdR;": "\u2552", - "boxdl;": "\u2510", - "boxdr;": "\u250c", - "boxh;": "\u2500", - "boxhD;": "\u2565", - "boxhU;": "\u2568", - "boxhd;": "\u252c", - "boxhu;": "\u2534", - "boxminus;": "\u229f", - "boxplus;": "\u229e", - "boxtimes;": "\u22a0", - "boxuL;": "\u255b", - "boxuR;": "\u2558", - "boxul;": "\u2518", - "boxur;": "\u2514", - "boxv;": "\u2502", - "boxvH;": "\u256a", - "boxvL;": "\u2561", - "boxvR;": "\u255e", - "boxvh;": "\u253c", - "boxvl;": "\u2524", - "boxvr;": "\u251c", - "bprime;": "\u2035", - "breve;": "\u02d8", - "brvbar": "\xa6", - "brvbar;": "\xa6", - "bscr;": "\U0001d4b7", - "bsemi;": "\u204f", - "bsim;": "\u223d", - "bsime;": "\u22cd", - "bsol;": "\\", - "bsolb;": "\u29c5", - "bsolhsub;": "\u27c8", - "bull;": "\u2022", - "bullet;": "\u2022", - "bump;": "\u224e", - "bumpE;": "\u2aae", - "bumpe;": "\u224f", - "bumpeq;": "\u224f", - "cacute;": "\u0107", - "cap;": "\u2229", - "capand;": "\u2a44", - "capbrcup;": "\u2a49", - "capcap;": "\u2a4b", - "capcup;": "\u2a47", - "capdot;": "\u2a40", - "caps;": "\u2229\ufe00", - "caret;": "\u2041", - "caron;": "\u02c7", - "ccaps;": "\u2a4d", - "ccaron;": "\u010d", - "ccedil": "\xe7", - "ccedil;": "\xe7", - "ccirc;": "\u0109", - "ccups;": "\u2a4c", - "ccupssm;": "\u2a50", - "cdot;": "\u010b", - "cedil": "\xb8", - "cedil;": "\xb8", - "cemptyv;": "\u29b2", - "cent": "\xa2", - "cent;": "\xa2", - "centerdot;": "\xb7", - "cfr;": "\U0001d520", - "chcy;": "\u0447", - "check;": "\u2713", - "checkmark;": "\u2713", - "chi;": "\u03c7", - "cir;": "\u25cb", - "cirE;": "\u29c3", - "circ;": "\u02c6", - "circeq;": "\u2257", - "circlearrowleft;": "\u21ba", - "circlearrowright;": "\u21bb", - "circledR;": "\xae", - "circledS;": "\u24c8", - "circledast;": "\u229b", - "circledcirc;": "\u229a", - "circleddash;": "\u229d", - "cire;": "\u2257", - "cirfnint;": "\u2a10", - "cirmid;": "\u2aef", - "cirscir;": "\u29c2", - "clubs;": "\u2663", - "clubsuit;": "\u2663", - "colon;": ":", - "colone;": "\u2254", - "coloneq;": "\u2254", - "comma;": ",", - "commat;": "@", - "comp;": "\u2201", - "compfn;": "\u2218", - "complement;": "\u2201", - "complexes;": "\u2102", - "cong;": "\u2245", - "congdot;": "\u2a6d", - "conint;": "\u222e", - "copf;": "\U0001d554", - "coprod;": "\u2210", - "copy": "\xa9", - "copy;": "\xa9", - "copysr;": "\u2117", - "crarr;": "\u21b5", - "cross;": "\u2717", - "cscr;": "\U0001d4b8", - "csub;": "\u2acf", - "csube;": "\u2ad1", - "csup;": "\u2ad0", - "csupe;": "\u2ad2", - "ctdot;": "\u22ef", - "cudarrl;": "\u2938", - "cudarrr;": "\u2935", - "cuepr;": "\u22de", - "cuesc;": "\u22df", - "cularr;": "\u21b6", - "cularrp;": "\u293d", - "cup;": "\u222a", - "cupbrcap;": "\u2a48", - "cupcap;": "\u2a46", - "cupcup;": "\u2a4a", - "cupdot;": "\u228d", - "cupor;": "\u2a45", - "cups;": "\u222a\ufe00", - "curarr;": "\u21b7", - "curarrm;": "\u293c", - "curlyeqprec;": "\u22de", - "curlyeqsucc;": "\u22df", - "curlyvee;": "\u22ce", - "curlywedge;": "\u22cf", - "curren": "\xa4", - "curren;": "\xa4", - "curvearrowleft;": "\u21b6", - "curvearrowright;": "\u21b7", - "cuvee;": "\u22ce", - "cuwed;": "\u22cf", - "cwconint;": "\u2232", - "cwint;": "\u2231", - "cylcty;": "\u232d", - "dArr;": "\u21d3", - "dHar;": "\u2965", - "dagger;": "\u2020", - "daleth;": "\u2138", - "darr;": "\u2193", - "dash;": "\u2010", - "dashv;": "\u22a3", - "dbkarow;": "\u290f", - "dblac;": "\u02dd", - "dcaron;": "\u010f", - "dcy;": "\u0434", - "dd;": "\u2146", - "ddagger;": "\u2021", - "ddarr;": "\u21ca", - "ddotseq;": "\u2a77", - "deg": "\xb0", - "deg;": "\xb0", - "delta;": "\u03b4", - "demptyv;": "\u29b1", - "dfisht;": "\u297f", - "dfr;": "\U0001d521", - "dharl;": "\u21c3", - "dharr;": "\u21c2", - "diam;": "\u22c4", - "diamond;": "\u22c4", - "diamondsuit;": "\u2666", - "diams;": "\u2666", - "die;": "\xa8", - "digamma;": "\u03dd", - "disin;": "\u22f2", - "div;": "\xf7", - "divide": "\xf7", - "divide;": "\xf7", - "divideontimes;": "\u22c7", - "divonx;": "\u22c7", - "djcy;": "\u0452", - "dlcorn;": "\u231e", - "dlcrop;": "\u230d", - "dollar;": "$", - "dopf;": "\U0001d555", - "dot;": "\u02d9", - "doteq;": "\u2250", - "doteqdot;": "\u2251", - "dotminus;": "\u2238", - "dotplus;": "\u2214", - "dotsquare;": "\u22a1", - "doublebarwedge;": "\u2306", - "downarrow;": "\u2193", - "downdownarrows;": "\u21ca", - "downharpoonleft;": "\u21c3", - "downharpoonright;": "\u21c2", - "drbkarow;": "\u2910", - "drcorn;": "\u231f", - "drcrop;": "\u230c", - "dscr;": "\U0001d4b9", - "dscy;": "\u0455", - "dsol;": "\u29f6", - "dstrok;": "\u0111", - "dtdot;": "\u22f1", - "dtri;": "\u25bf", - "dtrif;": "\u25be", - "duarr;": "\u21f5", - "duhar;": "\u296f", - "dwangle;": "\u29a6", - "dzcy;": "\u045f", - "dzigrarr;": "\u27ff", - "eDDot;": "\u2a77", - "eDot;": "\u2251", - "eacute": "\xe9", - "eacute;": "\xe9", - "easter;": "\u2a6e", - "ecaron;": "\u011b", - "ecir;": "\u2256", - "ecirc": "\xea", - "ecirc;": "\xea", - "ecolon;": "\u2255", - "ecy;": "\u044d", - "edot;": "\u0117", - "ee;": "\u2147", - "efDot;": "\u2252", - "efr;": "\U0001d522", - "eg;": "\u2a9a", - "egrave": "\xe8", - "egrave;": "\xe8", - "egs;": "\u2a96", - "egsdot;": "\u2a98", - "el;": "\u2a99", - "elinters;": "\u23e7", - "ell;": "\u2113", - "els;": "\u2a95", - "elsdot;": "\u2a97", - "emacr;": "\u0113", - "empty;": "\u2205", - "emptyset;": "\u2205", - "emptyv;": "\u2205", - "emsp13;": "\u2004", - "emsp14;": "\u2005", - "emsp;": "\u2003", - "eng;": "\u014b", - "ensp;": "\u2002", - "eogon;": "\u0119", - "eopf;": "\U0001d556", - "epar;": "\u22d5", - "eparsl;": "\u29e3", - "eplus;": "\u2a71", - "epsi;": "\u03b5", - "epsilon;": "\u03b5", - "epsiv;": "\u03f5", - "eqcirc;": "\u2256", - "eqcolon;": "\u2255", - "eqsim;": "\u2242", - "eqslantgtr;": "\u2a96", - "eqslantless;": "\u2a95", - "equals;": "=", - "equest;": "\u225f", - "equiv;": "\u2261", - "equivDD;": "\u2a78", - "eqvparsl;": "\u29e5", - "erDot;": "\u2253", - "erarr;": "\u2971", - "escr;": "\u212f", - "esdot;": "\u2250", - "esim;": "\u2242", - "eta;": "\u03b7", - "eth": "\xf0", - "eth;": "\xf0", - "euml": "\xeb", - "euml;": "\xeb", - "euro;": "\u20ac", - "excl;": "!", - "exist;": "\u2203", - "expectation;": "\u2130", - "exponentiale;": "\u2147", - "fallingdotseq;": "\u2252", - "fcy;": "\u0444", - "female;": "\u2640", - "ffilig;": "\ufb03", - "fflig;": "\ufb00", - "ffllig;": "\ufb04", - "ffr;": "\U0001d523", - "filig;": "\ufb01", - "fjlig;": "fj", - "flat;": "\u266d", - "fllig;": "\ufb02", - "fltns;": "\u25b1", - "fnof;": "\u0192", - "fopf;": "\U0001d557", - "forall;": "\u2200", - "fork;": "\u22d4", - "forkv;": "\u2ad9", - "fpartint;": "\u2a0d", - "frac12": "\xbd", - "frac12;": "\xbd", - "frac13;": "\u2153", - "frac14": "\xbc", - "frac14;": "\xbc", - "frac15;": "\u2155", - "frac16;": "\u2159", - "frac18;": "\u215b", - "frac23;": "\u2154", - "frac25;": "\u2156", - "frac34": "\xbe", - "frac34;": "\xbe", - "frac35;": "\u2157", - "frac38;": "\u215c", - "frac45;": "\u2158", - "frac56;": "\u215a", - "frac58;": "\u215d", - "frac78;": "\u215e", - "frasl;": "\u2044", - "frown;": "\u2322", - "fscr;": "\U0001d4bb", - "gE;": "\u2267", - "gEl;": "\u2a8c", - "gacute;": "\u01f5", - "gamma;": "\u03b3", - "gammad;": "\u03dd", - "gap;": "\u2a86", - "gbreve;": "\u011f", - "gcirc;": "\u011d", - "gcy;": "\u0433", - "gdot;": "\u0121", - "ge;": "\u2265", - "gel;": "\u22db", - "geq;": "\u2265", - "geqq;": "\u2267", - "geqslant;": "\u2a7e", - "ges;": "\u2a7e", - "gescc;": "\u2aa9", - "gesdot;": "\u2a80", - "gesdoto;": "\u2a82", - "gesdotol;": "\u2a84", - "gesl;": "\u22db\ufe00", - "gesles;": "\u2a94", - "gfr;": "\U0001d524", - "gg;": "\u226b", - "ggg;": "\u22d9", - "gimel;": "\u2137", - "gjcy;": "\u0453", - "gl;": "\u2277", - "glE;": "\u2a92", - "gla;": "\u2aa5", - "glj;": "\u2aa4", - "gnE;": "\u2269", - "gnap;": "\u2a8a", - "gnapprox;": "\u2a8a", - "gne;": "\u2a88", - "gneq;": "\u2a88", - "gneqq;": "\u2269", - "gnsim;": "\u22e7", - "gopf;": "\U0001d558", - "grave;": "`", - "gscr;": "\u210a", - "gsim;": "\u2273", - "gsime;": "\u2a8e", - "gsiml;": "\u2a90", - "gt": ">", - "gt;": ">", - "gtcc;": "\u2aa7", - "gtcir;": "\u2a7a", - "gtdot;": "\u22d7", - "gtlPar;": "\u2995", - "gtquest;": "\u2a7c", - "gtrapprox;": "\u2a86", - "gtrarr;": "\u2978", - "gtrdot;": "\u22d7", - "gtreqless;": "\u22db", - "gtreqqless;": "\u2a8c", - "gtrless;": "\u2277", - "gtrsim;": "\u2273", - "gvertneqq;": "\u2269\ufe00", - "gvnE;": "\u2269\ufe00", - "hArr;": "\u21d4", - "hairsp;": "\u200a", - "half;": "\xbd", - "hamilt;": "\u210b", - "hardcy;": "\u044a", - "harr;": "\u2194", - "harrcir;": "\u2948", - "harrw;": "\u21ad", - "hbar;": "\u210f", - "hcirc;": "\u0125", - "hearts;": "\u2665", - "heartsuit;": "\u2665", - "hellip;": "\u2026", - "hercon;": "\u22b9", - "hfr;": "\U0001d525", - "hksearow;": "\u2925", - "hkswarow;": "\u2926", - "hoarr;": "\u21ff", - "homtht;": "\u223b", - "hookleftarrow;": "\u21a9", - "hookrightarrow;": "\u21aa", - "hopf;": "\U0001d559", - "horbar;": "\u2015", - "hscr;": "\U0001d4bd", - "hslash;": "\u210f", - "hstrok;": "\u0127", - "hybull;": "\u2043", - "hyphen;": "\u2010", - "iacute": "\xed", - "iacute;": "\xed", - "ic;": "\u2063", - "icirc": "\xee", - "icirc;": "\xee", - "icy;": "\u0438", - "iecy;": "\u0435", - "iexcl": "\xa1", - "iexcl;": "\xa1", - "iff;": "\u21d4", - "ifr;": "\U0001d526", - "igrave": "\xec", - "igrave;": "\xec", - "ii;": "\u2148", - "iiiint;": "\u2a0c", - "iiint;": "\u222d", - "iinfin;": "\u29dc", - "iiota;": "\u2129", - "ijlig;": "\u0133", - "imacr;": "\u012b", - "image;": "\u2111", - "imagline;": "\u2110", - "imagpart;": "\u2111", - "imath;": "\u0131", - "imof;": "\u22b7", - "imped;": "\u01b5", - "in;": "\u2208", - "incare;": "\u2105", - "infin;": "\u221e", - "infintie;": "\u29dd", - "inodot;": "\u0131", - "int;": "\u222b", - "intcal;": "\u22ba", - "integers;": "\u2124", - "intercal;": "\u22ba", - "intlarhk;": "\u2a17", - "intprod;": "\u2a3c", - "iocy;": "\u0451", - "iogon;": "\u012f", - "iopf;": "\U0001d55a", - "iota;": "\u03b9", - "iprod;": "\u2a3c", - "iquest": "\xbf", - "iquest;": "\xbf", - "iscr;": "\U0001d4be", - "isin;": "\u2208", - "isinE;": "\u22f9", - "isindot;": "\u22f5", - "isins;": "\u22f4", - "isinsv;": "\u22f3", - "isinv;": "\u2208", - "it;": "\u2062", - "itilde;": "\u0129", - "iukcy;": "\u0456", - "iuml": "\xef", - "iuml;": "\xef", - "jcirc;": "\u0135", - "jcy;": "\u0439", - "jfr;": "\U0001d527", - "jmath;": "\u0237", - "jopf;": "\U0001d55b", - "jscr;": "\U0001d4bf", - "jsercy;": "\u0458", - "jukcy;": "\u0454", - "kappa;": "\u03ba", - "kappav;": "\u03f0", - "kcedil;": "\u0137", - "kcy;": "\u043a", - "kfr;": "\U0001d528", - "kgreen;": "\u0138", - "khcy;": "\u0445", - "kjcy;": "\u045c", - "kopf;": "\U0001d55c", - "kscr;": "\U0001d4c0", - "lAarr;": "\u21da", - "lArr;": "\u21d0", - "lAtail;": "\u291b", - "lBarr;": "\u290e", - "lE;": "\u2266", - "lEg;": "\u2a8b", - "lHar;": "\u2962", - "lacute;": "\u013a", - "laemptyv;": "\u29b4", - "lagran;": "\u2112", - "lambda;": "\u03bb", - "lang;": "\u27e8", - "langd;": "\u2991", - "langle;": "\u27e8", - "lap;": "\u2a85", - "laquo": "\xab", - "laquo;": "\xab", - "larr;": "\u2190", - "larrb;": "\u21e4", - "larrbfs;": "\u291f", - "larrfs;": "\u291d", - "larrhk;": "\u21a9", - "larrlp;": "\u21ab", - "larrpl;": "\u2939", - "larrsim;": "\u2973", - "larrtl;": "\u21a2", - "lat;": "\u2aab", - "latail;": "\u2919", - "late;": "\u2aad", - "lates;": "\u2aad\ufe00", - "lbarr;": "\u290c", - "lbbrk;": "\u2772", - "lbrace;": "{", - "lbrack;": "[", - "lbrke;": "\u298b", - "lbrksld;": "\u298f", - "lbrkslu;": "\u298d", - "lcaron;": "\u013e", - "lcedil;": "\u013c", - "lceil;": "\u2308", - "lcub;": "{", - "lcy;": "\u043b", - "ldca;": "\u2936", - "ldquo;": "\u201c", - "ldquor;": "\u201e", - "ldrdhar;": "\u2967", - "ldrushar;": "\u294b", - "ldsh;": "\u21b2", - "le;": "\u2264", - "leftarrow;": "\u2190", - "leftarrowtail;": "\u21a2", - "leftharpoondown;": "\u21bd", - "leftharpoonup;": "\u21bc", - "leftleftarrows;": "\u21c7", - "leftrightarrow;": "\u2194", - "leftrightarrows;": "\u21c6", - "leftrightharpoons;": "\u21cb", - "leftrightsquigarrow;": "\u21ad", - "leftthreetimes;": "\u22cb", - "leg;": "\u22da", - "leq;": "\u2264", - "leqq;": "\u2266", - "leqslant;": "\u2a7d", - "les;": "\u2a7d", - "lescc;": "\u2aa8", - "lesdot;": "\u2a7f", - "lesdoto;": "\u2a81", - "lesdotor;": "\u2a83", - "lesg;": "\u22da\ufe00", - "lesges;": "\u2a93", - "lessapprox;": "\u2a85", - "lessdot;": "\u22d6", - "lesseqgtr;": "\u22da", - "lesseqqgtr;": "\u2a8b", - "lessgtr;": "\u2276", - "lesssim;": "\u2272", - "lfisht;": "\u297c", - "lfloor;": "\u230a", - "lfr;": "\U0001d529", - "lg;": "\u2276", - "lgE;": "\u2a91", - "lhard;": "\u21bd", - "lharu;": "\u21bc", - "lharul;": "\u296a", - "lhblk;": "\u2584", - "ljcy;": "\u0459", - "ll;": "\u226a", - "llarr;": "\u21c7", - "llcorner;": "\u231e", - "llhard;": "\u296b", - "lltri;": "\u25fa", - "lmidot;": "\u0140", - "lmoust;": "\u23b0", - "lmoustache;": "\u23b0", - "lnE;": "\u2268", - "lnap;": "\u2a89", - "lnapprox;": "\u2a89", - "lne;": "\u2a87", - "lneq;": "\u2a87", - "lneqq;": "\u2268", - "lnsim;": "\u22e6", - "loang;": "\u27ec", - "loarr;": "\u21fd", - "lobrk;": "\u27e6", - "longleftarrow;": "\u27f5", - "longleftrightarrow;": "\u27f7", - "longmapsto;": "\u27fc", - "longrightarrow;": "\u27f6", - "looparrowleft;": "\u21ab", - "looparrowright;": "\u21ac", - "lopar;": "\u2985", - "lopf;": "\U0001d55d", - "loplus;": "\u2a2d", - "lotimes;": "\u2a34", - "lowast;": "\u2217", - "lowbar;": "_", - "loz;": "\u25ca", - "lozenge;": "\u25ca", - "lozf;": "\u29eb", - "lpar;": "(", - "lparlt;": "\u2993", - "lrarr;": "\u21c6", - "lrcorner;": "\u231f", - "lrhar;": "\u21cb", - "lrhard;": "\u296d", - "lrm;": "\u200e", - "lrtri;": "\u22bf", - "lsaquo;": "\u2039", - "lscr;": "\U0001d4c1", - "lsh;": "\u21b0", - "lsim;": "\u2272", - "lsime;": "\u2a8d", - "lsimg;": "\u2a8f", - "lsqb;": "[", - "lsquo;": "\u2018", - "lsquor;": "\u201a", - "lstrok;": "\u0142", - "lt": "<", - "lt;": "<", - "ltcc;": "\u2aa6", - "ltcir;": "\u2a79", - "ltdot;": "\u22d6", - "lthree;": "\u22cb", - "ltimes;": "\u22c9", - "ltlarr;": "\u2976", - "ltquest;": "\u2a7b", - "ltrPar;": "\u2996", - "ltri;": "\u25c3", - "ltrie;": "\u22b4", - "ltrif;": "\u25c2", - "lurdshar;": "\u294a", - "luruhar;": "\u2966", - "lvertneqq;": "\u2268\ufe00", - "lvnE;": "\u2268\ufe00", - "mDDot;": "\u223a", - "macr": "\xaf", - "macr;": "\xaf", - "male;": "\u2642", - "malt;": "\u2720", - "maltese;": "\u2720", - "map;": "\u21a6", - "mapsto;": "\u21a6", - "mapstodown;": "\u21a7", - "mapstoleft;": "\u21a4", - "mapstoup;": "\u21a5", - "marker;": "\u25ae", - "mcomma;": "\u2a29", - "mcy;": "\u043c", - "mdash;": "\u2014", - "measuredangle;": "\u2221", - "mfr;": "\U0001d52a", - "mho;": "\u2127", - "micro": "\xb5", - "micro;": "\xb5", - "mid;": "\u2223", - "midast;": "*", - "midcir;": "\u2af0", - "middot": "\xb7", - "middot;": "\xb7", - "minus;": "\u2212", - "minusb;": "\u229f", - "minusd;": "\u2238", - "minusdu;": "\u2a2a", - "mlcp;": "\u2adb", - "mldr;": "\u2026", - "mnplus;": "\u2213", - "models;": "\u22a7", - "mopf;": "\U0001d55e", - "mp;": "\u2213", - "mscr;": "\U0001d4c2", - "mstpos;": "\u223e", - "mu;": "\u03bc", - "multimap;": "\u22b8", - "mumap;": "\u22b8", - "nGg;": "\u22d9\u0338", - "nGt;": "\u226b\u20d2", - "nGtv;": "\u226b\u0338", - "nLeftarrow;": "\u21cd", - "nLeftrightarrow;": "\u21ce", - "nLl;": "\u22d8\u0338", - "nLt;": "\u226a\u20d2", - "nLtv;": "\u226a\u0338", - "nRightarrow;": "\u21cf", - "nVDash;": "\u22af", - "nVdash;": "\u22ae", - "nabla;": "\u2207", - "nacute;": "\u0144", - "nang;": "\u2220\u20d2", - "nap;": "\u2249", - "napE;": "\u2a70\u0338", - "napid;": "\u224b\u0338", - "napos;": "\u0149", - "napprox;": "\u2249", - "natur;": "\u266e", - "natural;": "\u266e", - "naturals;": "\u2115", - "nbsp": "\xa0", - "nbsp;": "\xa0", - "nbump;": "\u224e\u0338", - "nbumpe;": "\u224f\u0338", - "ncap;": "\u2a43", - "ncaron;": "\u0148", - "ncedil;": "\u0146", - "ncong;": "\u2247", - "ncongdot;": "\u2a6d\u0338", - "ncup;": "\u2a42", - "ncy;": "\u043d", - "ndash;": "\u2013", - "ne;": "\u2260", - "neArr;": "\u21d7", - "nearhk;": "\u2924", - "nearr;": "\u2197", - "nearrow;": "\u2197", - "nedot;": "\u2250\u0338", - "nequiv;": "\u2262", - "nesear;": "\u2928", - "nesim;": "\u2242\u0338", - "nexist;": "\u2204", - "nexists;": "\u2204", - "nfr;": "\U0001d52b", - "ngE;": "\u2267\u0338", - "nge;": "\u2271", - "ngeq;": "\u2271", - "ngeqq;": "\u2267\u0338", - "ngeqslant;": "\u2a7e\u0338", - "nges;": "\u2a7e\u0338", - "ngsim;": "\u2275", - "ngt;": "\u226f", - "ngtr;": "\u226f", - "nhArr;": "\u21ce", - "nharr;": "\u21ae", - "nhpar;": "\u2af2", - "ni;": "\u220b", - "nis;": "\u22fc", - "nisd;": "\u22fa", - "niv;": "\u220b", - "njcy;": "\u045a", - "nlArr;": "\u21cd", - "nlE;": "\u2266\u0338", - "nlarr;": "\u219a", - "nldr;": "\u2025", - "nle;": "\u2270", - "nleftarrow;": "\u219a", - "nleftrightarrow;": "\u21ae", - "nleq;": "\u2270", - "nleqq;": "\u2266\u0338", - "nleqslant;": "\u2a7d\u0338", - "nles;": "\u2a7d\u0338", - "nless;": "\u226e", - "nlsim;": "\u2274", - "nlt;": "\u226e", - "nltri;": "\u22ea", - "nltrie;": "\u22ec", - "nmid;": "\u2224", - "nopf;": "\U0001d55f", - "not": "\xac", - "not;": "\xac", - "notin;": "\u2209", - "notinE;": "\u22f9\u0338", - "notindot;": "\u22f5\u0338", - "notinva;": "\u2209", - "notinvb;": "\u22f7", - "notinvc;": "\u22f6", - "notni;": "\u220c", - "notniva;": "\u220c", - "notnivb;": "\u22fe", - "notnivc;": "\u22fd", - "npar;": "\u2226", - "nparallel;": "\u2226", - "nparsl;": "\u2afd\u20e5", - "npart;": "\u2202\u0338", - "npolint;": "\u2a14", - "npr;": "\u2280", - "nprcue;": "\u22e0", - "npre;": "\u2aaf\u0338", - "nprec;": "\u2280", - "npreceq;": "\u2aaf\u0338", - "nrArr;": "\u21cf", - "nrarr;": "\u219b", - "nrarrc;": "\u2933\u0338", - "nrarrw;": "\u219d\u0338", - "nrightarrow;": "\u219b", - "nrtri;": "\u22eb", - "nrtrie;": "\u22ed", - "nsc;": "\u2281", - "nsccue;": "\u22e1", - "nsce;": "\u2ab0\u0338", - "nscr;": "\U0001d4c3", - "nshortmid;": "\u2224", - "nshortparallel;": "\u2226", - "nsim;": "\u2241", - "nsime;": "\u2244", - "nsimeq;": "\u2244", - "nsmid;": "\u2224", - "nspar;": "\u2226", - "nsqsube;": "\u22e2", - "nsqsupe;": "\u22e3", - "nsub;": "\u2284", - "nsubE;": "\u2ac5\u0338", - "nsube;": "\u2288", - "nsubset;": "\u2282\u20d2", - "nsubseteq;": "\u2288", - "nsubseteqq;": "\u2ac5\u0338", - "nsucc;": "\u2281", - "nsucceq;": "\u2ab0\u0338", - "nsup;": "\u2285", - "nsupE;": "\u2ac6\u0338", - "nsupe;": "\u2289", - "nsupset;": "\u2283\u20d2", - "nsupseteq;": "\u2289", - "nsupseteqq;": "\u2ac6\u0338", - "ntgl;": "\u2279", - "ntilde": "\xf1", - "ntilde;": "\xf1", - "ntlg;": "\u2278", - "ntriangleleft;": "\u22ea", - "ntrianglelefteq;": "\u22ec", - "ntriangleright;": "\u22eb", - "ntrianglerighteq;": "\u22ed", - "nu;": "\u03bd", - "num;": "#", - "numero;": "\u2116", - "numsp;": "\u2007", - "nvDash;": "\u22ad", - "nvHarr;": "\u2904", - "nvap;": "\u224d\u20d2", - "nvdash;": "\u22ac", - "nvge;": "\u2265\u20d2", - "nvgt;": ">\u20d2", - "nvinfin;": "\u29de", - "nvlArr;": "\u2902", - "nvle;": "\u2264\u20d2", - "nvlt;": "<\u20d2", - "nvltrie;": "\u22b4\u20d2", - "nvrArr;": "\u2903", - "nvrtrie;": "\u22b5\u20d2", - "nvsim;": "\u223c\u20d2", - "nwArr;": "\u21d6", - "nwarhk;": "\u2923", - "nwarr;": "\u2196", - "nwarrow;": "\u2196", - "nwnear;": "\u2927", - "oS;": "\u24c8", - "oacute": "\xf3", - "oacute;": "\xf3", - "oast;": "\u229b", - "ocir;": "\u229a", - "ocirc": "\xf4", - "ocirc;": "\xf4", - "ocy;": "\u043e", - "odash;": "\u229d", - "odblac;": "\u0151", - "odiv;": "\u2a38", - "odot;": "\u2299", - "odsold;": "\u29bc", - "oelig;": "\u0153", - "ofcir;": "\u29bf", - "ofr;": "\U0001d52c", - "ogon;": "\u02db", - "ograve": "\xf2", - "ograve;": "\xf2", - "ogt;": "\u29c1", - "ohbar;": "\u29b5", - "ohm;": "\u03a9", - "oint;": "\u222e", - "olarr;": "\u21ba", - "olcir;": "\u29be", - "olcross;": "\u29bb", - "oline;": "\u203e", - "olt;": "\u29c0", - "omacr;": "\u014d", - "omega;": "\u03c9", - "omicron;": "\u03bf", - "omid;": "\u29b6", - "ominus;": "\u2296", - "oopf;": "\U0001d560", - "opar;": "\u29b7", - "operp;": "\u29b9", - "oplus;": "\u2295", - "or;": "\u2228", - "orarr;": "\u21bb", - "ord;": "\u2a5d", - "order;": "\u2134", - "orderof;": "\u2134", - "ordf": "\xaa", - "ordf;": "\xaa", - "ordm": "\xba", - "ordm;": "\xba", - "origof;": "\u22b6", - "oror;": "\u2a56", - "orslope;": "\u2a57", - "orv;": "\u2a5b", - "oscr;": "\u2134", - "oslash": "\xf8", - "oslash;": "\xf8", - "osol;": "\u2298", - "otilde": "\xf5", - "otilde;": "\xf5", - "otimes;": "\u2297", - "otimesas;": "\u2a36", - "ouml": "\xf6", - "ouml;": "\xf6", - "ovbar;": "\u233d", - "par;": "\u2225", - "para": "\xb6", - "para;": "\xb6", - "parallel;": "\u2225", - "parsim;": "\u2af3", - "parsl;": "\u2afd", - "part;": "\u2202", - "pcy;": "\u043f", - "percnt;": "%", - "period;": ".", - "permil;": "\u2030", - "perp;": "\u22a5", - "pertenk;": "\u2031", - "pfr;": "\U0001d52d", - "phi;": "\u03c6", - "phiv;": "\u03d5", - "phmmat;": "\u2133", - "phone;": "\u260e", - "pi;": "\u03c0", - "pitchfork;": "\u22d4", - "piv;": "\u03d6", - "planck;": "\u210f", - "planckh;": "\u210e", - "plankv;": "\u210f", - "plus;": "+", - "plusacir;": "\u2a23", - "plusb;": "\u229e", - "pluscir;": "\u2a22", - "plusdo;": "\u2214", - "plusdu;": "\u2a25", - "pluse;": "\u2a72", - "plusmn": "\xb1", - "plusmn;": "\xb1", - "plussim;": "\u2a26", - "plustwo;": "\u2a27", - "pm;": "\xb1", - "pointint;": "\u2a15", - "popf;": "\U0001d561", - "pound": "\xa3", - "pound;": "\xa3", - "pr;": "\u227a", - "prE;": "\u2ab3", - "prap;": "\u2ab7", - "prcue;": "\u227c", - "pre;": "\u2aaf", - "prec;": "\u227a", - "precapprox;": "\u2ab7", - "preccurlyeq;": "\u227c", - "preceq;": "\u2aaf", - "precnapprox;": "\u2ab9", - "precneqq;": "\u2ab5", - "precnsim;": "\u22e8", - "precsim;": "\u227e", - "prime;": "\u2032", - "primes;": "\u2119", - "prnE;": "\u2ab5", - "prnap;": "\u2ab9", - "prnsim;": "\u22e8", - "prod;": "\u220f", - "profalar;": "\u232e", - "profline;": "\u2312", - "profsurf;": "\u2313", - "prop;": "\u221d", - "propto;": "\u221d", - "prsim;": "\u227e", - "prurel;": "\u22b0", - "pscr;": "\U0001d4c5", - "psi;": "\u03c8", - "puncsp;": "\u2008", - "qfr;": "\U0001d52e", - "qint;": "\u2a0c", - "qopf;": "\U0001d562", - "qprime;": "\u2057", - "qscr;": "\U0001d4c6", - "quaternions;": "\u210d", - "quatint;": "\u2a16", - "quest;": "?", - "questeq;": "\u225f", - "quot": "\"", - "quot;": "\"", - "rAarr;": "\u21db", - "rArr;": "\u21d2", - "rAtail;": "\u291c", - "rBarr;": "\u290f", - "rHar;": "\u2964", - "race;": "\u223d\u0331", - "racute;": "\u0155", - "radic;": "\u221a", - "raemptyv;": "\u29b3", - "rang;": "\u27e9", - "rangd;": "\u2992", - "range;": "\u29a5", - "rangle;": "\u27e9", - "raquo": "\xbb", - "raquo;": "\xbb", - "rarr;": "\u2192", - "rarrap;": "\u2975", - "rarrb;": "\u21e5", - "rarrbfs;": "\u2920", - "rarrc;": "\u2933", - "rarrfs;": "\u291e", - "rarrhk;": "\u21aa", - "rarrlp;": "\u21ac", - "rarrpl;": "\u2945", - "rarrsim;": "\u2974", - "rarrtl;": "\u21a3", - "rarrw;": "\u219d", - "ratail;": "\u291a", - "ratio;": "\u2236", - "rationals;": "\u211a", - "rbarr;": "\u290d", - "rbbrk;": "\u2773", - "rbrace;": "}", - "rbrack;": "]", - "rbrke;": "\u298c", - "rbrksld;": "\u298e", - "rbrkslu;": "\u2990", - "rcaron;": "\u0159", - "rcedil;": "\u0157", - "rceil;": "\u2309", - "rcub;": "}", - "rcy;": "\u0440", - "rdca;": "\u2937", - "rdldhar;": "\u2969", - "rdquo;": "\u201d", - "rdquor;": "\u201d", - "rdsh;": "\u21b3", - "real;": "\u211c", - "realine;": "\u211b", - "realpart;": "\u211c", - "reals;": "\u211d", - "rect;": "\u25ad", - "reg": "\xae", - "reg;": "\xae", - "rfisht;": "\u297d", - "rfloor;": "\u230b", - "rfr;": "\U0001d52f", - "rhard;": "\u21c1", - "rharu;": "\u21c0", - "rharul;": "\u296c", - "rho;": "\u03c1", - "rhov;": "\u03f1", - "rightarrow;": "\u2192", - "rightarrowtail;": "\u21a3", - "rightharpoondown;": "\u21c1", - "rightharpoonup;": "\u21c0", - "rightleftarrows;": "\u21c4", - "rightleftharpoons;": "\u21cc", - "rightrightarrows;": "\u21c9", - "rightsquigarrow;": "\u219d", - "rightthreetimes;": "\u22cc", - "ring;": "\u02da", - "risingdotseq;": "\u2253", - "rlarr;": "\u21c4", - "rlhar;": "\u21cc", - "rlm;": "\u200f", - "rmoust;": "\u23b1", - "rmoustache;": "\u23b1", - "rnmid;": "\u2aee", - "roang;": "\u27ed", - "roarr;": "\u21fe", - "robrk;": "\u27e7", - "ropar;": "\u2986", - "ropf;": "\U0001d563", - "roplus;": "\u2a2e", - "rotimes;": "\u2a35", - "rpar;": ")", - "rpargt;": "\u2994", - "rppolint;": "\u2a12", - "rrarr;": "\u21c9", - "rsaquo;": "\u203a", - "rscr;": "\U0001d4c7", - "rsh;": "\u21b1", - "rsqb;": "]", - "rsquo;": "\u2019", - "rsquor;": "\u2019", - "rthree;": "\u22cc", - "rtimes;": "\u22ca", - "rtri;": "\u25b9", - "rtrie;": "\u22b5", - "rtrif;": "\u25b8", - "rtriltri;": "\u29ce", - "ruluhar;": "\u2968", - "rx;": "\u211e", - "sacute;": "\u015b", - "sbquo;": "\u201a", - "sc;": "\u227b", - "scE;": "\u2ab4", - "scap;": "\u2ab8", - "scaron;": "\u0161", - "sccue;": "\u227d", - "sce;": "\u2ab0", - "scedil;": "\u015f", - "scirc;": "\u015d", - "scnE;": "\u2ab6", - "scnap;": "\u2aba", - "scnsim;": "\u22e9", - "scpolint;": "\u2a13", - "scsim;": "\u227f", - "scy;": "\u0441", - "sdot;": "\u22c5", - "sdotb;": "\u22a1", - "sdote;": "\u2a66", - "seArr;": "\u21d8", - "searhk;": "\u2925", - "searr;": "\u2198", - "searrow;": "\u2198", - "sect": "\xa7", - "sect;": "\xa7", - "semi;": ";", - "seswar;": "\u2929", - "setminus;": "\u2216", - "setmn;": "\u2216", - "sext;": "\u2736", - "sfr;": "\U0001d530", - "sfrown;": "\u2322", - "sharp;": "\u266f", - "shchcy;": "\u0449", - "shcy;": "\u0448", - "shortmid;": "\u2223", - "shortparallel;": "\u2225", - "shy": "\xad", - "shy;": "\xad", - "sigma;": "\u03c3", - "sigmaf;": "\u03c2", - "sigmav;": "\u03c2", - "sim;": "\u223c", - "simdot;": "\u2a6a", - "sime;": "\u2243", - "simeq;": "\u2243", - "simg;": "\u2a9e", - "simgE;": "\u2aa0", - "siml;": "\u2a9d", - "simlE;": "\u2a9f", - "simne;": "\u2246", - "simplus;": "\u2a24", - "simrarr;": "\u2972", - "slarr;": "\u2190", - "smallsetminus;": "\u2216", - "smashp;": "\u2a33", - "smeparsl;": "\u29e4", - "smid;": "\u2223", - "smile;": "\u2323", - "smt;": "\u2aaa", - "smte;": "\u2aac", - "smtes;": "\u2aac\ufe00", - "softcy;": "\u044c", - "sol;": "/", - "solb;": "\u29c4", - "solbar;": "\u233f", - "sopf;": "\U0001d564", - "spades;": "\u2660", - "spadesuit;": "\u2660", - "spar;": "\u2225", - "sqcap;": "\u2293", - "sqcaps;": "\u2293\ufe00", - "sqcup;": "\u2294", - "sqcups;": "\u2294\ufe00", - "sqsub;": "\u228f", - "sqsube;": "\u2291", - "sqsubset;": "\u228f", - "sqsubseteq;": "\u2291", - "sqsup;": "\u2290", - "sqsupe;": "\u2292", - "sqsupset;": "\u2290", - "sqsupseteq;": "\u2292", - "squ;": "\u25a1", - "square;": "\u25a1", - "squarf;": "\u25aa", - "squf;": "\u25aa", - "srarr;": "\u2192", - "sscr;": "\U0001d4c8", - "ssetmn;": "\u2216", - "ssmile;": "\u2323", - "sstarf;": "\u22c6", - "star;": "\u2606", - "starf;": "\u2605", - "straightepsilon;": "\u03f5", - "straightphi;": "\u03d5", - "strns;": "\xaf", - "sub;": "\u2282", - "subE;": "\u2ac5", - "subdot;": "\u2abd", - "sube;": "\u2286", - "subedot;": "\u2ac3", - "submult;": "\u2ac1", - "subnE;": "\u2acb", - "subne;": "\u228a", - "subplus;": "\u2abf", - "subrarr;": "\u2979", - "subset;": "\u2282", - "subseteq;": "\u2286", - "subseteqq;": "\u2ac5", - "subsetneq;": "\u228a", - "subsetneqq;": "\u2acb", - "subsim;": "\u2ac7", - "subsub;": "\u2ad5", - "subsup;": "\u2ad3", - "succ;": "\u227b", - "succapprox;": "\u2ab8", - "succcurlyeq;": "\u227d", - "succeq;": "\u2ab0", - "succnapprox;": "\u2aba", - "succneqq;": "\u2ab6", - "succnsim;": "\u22e9", - "succsim;": "\u227f", - "sum;": "\u2211", - "sung;": "\u266a", - "sup1": "\xb9", - "sup1;": "\xb9", - "sup2": "\xb2", - "sup2;": "\xb2", - "sup3": "\xb3", - "sup3;": "\xb3", - "sup;": "\u2283", - "supE;": "\u2ac6", - "supdot;": "\u2abe", - "supdsub;": "\u2ad8", - "supe;": "\u2287", - "supedot;": "\u2ac4", - "suphsol;": "\u27c9", - "suphsub;": "\u2ad7", - "suplarr;": "\u297b", - "supmult;": "\u2ac2", - "supnE;": "\u2acc", - "supne;": "\u228b", - "supplus;": "\u2ac0", - "supset;": "\u2283", - "supseteq;": "\u2287", - "supseteqq;": "\u2ac6", - "supsetneq;": "\u228b", - "supsetneqq;": "\u2acc", - "supsim;": "\u2ac8", - "supsub;": "\u2ad4", - "supsup;": "\u2ad6", - "swArr;": "\u21d9", - "swarhk;": "\u2926", - "swarr;": "\u2199", - "swarrow;": "\u2199", - "swnwar;": "\u292a", - "szlig": "\xdf", - "szlig;": "\xdf", - "target;": "\u2316", - "tau;": "\u03c4", - "tbrk;": "\u23b4", - "tcaron;": "\u0165", - "tcedil;": "\u0163", - "tcy;": "\u0442", - "tdot;": "\u20db", - "telrec;": "\u2315", - "tfr;": "\U0001d531", - "there4;": "\u2234", - "therefore;": "\u2234", - "theta;": "\u03b8", - "thetasym;": "\u03d1", - "thetav;": "\u03d1", - "thickapprox;": "\u2248", - "thicksim;": "\u223c", - "thinsp;": "\u2009", - "thkap;": "\u2248", - "thksim;": "\u223c", - "thorn": "\xfe", - "thorn;": "\xfe", - "tilde;": "\u02dc", - "times": "\xd7", - "times;": "\xd7", - "timesb;": "\u22a0", - "timesbar;": "\u2a31", - "timesd;": "\u2a30", - "tint;": "\u222d", - "toea;": "\u2928", - "top;": "\u22a4", - "topbot;": "\u2336", - "topcir;": "\u2af1", - "topf;": "\U0001d565", - "topfork;": "\u2ada", - "tosa;": "\u2929", - "tprime;": "\u2034", - "trade;": "\u2122", - "triangle;": "\u25b5", - "triangledown;": "\u25bf", - "triangleleft;": "\u25c3", - "trianglelefteq;": "\u22b4", - "triangleq;": "\u225c", - "triangleright;": "\u25b9", - "trianglerighteq;": "\u22b5", - "tridot;": "\u25ec", - "trie;": "\u225c", - "triminus;": "\u2a3a", - "triplus;": "\u2a39", - "trisb;": "\u29cd", - "tritime;": "\u2a3b", - "trpezium;": "\u23e2", - "tscr;": "\U0001d4c9", - "tscy;": "\u0446", - "tshcy;": "\u045b", - "tstrok;": "\u0167", - "twixt;": "\u226c", - "twoheadleftarrow;": "\u219e", - "twoheadrightarrow;": "\u21a0", - "uArr;": "\u21d1", - "uHar;": "\u2963", - "uacute": "\xfa", - "uacute;": "\xfa", - "uarr;": "\u2191", - "ubrcy;": "\u045e", - "ubreve;": "\u016d", - "ucirc": "\xfb", - "ucirc;": "\xfb", - "ucy;": "\u0443", - "udarr;": "\u21c5", - "udblac;": "\u0171", - "udhar;": "\u296e", - "ufisht;": "\u297e", - "ufr;": "\U0001d532", - "ugrave": "\xf9", - "ugrave;": "\xf9", - "uharl;": "\u21bf", - "uharr;": "\u21be", - "uhblk;": "\u2580", - "ulcorn;": "\u231c", - "ulcorner;": "\u231c", - "ulcrop;": "\u230f", - "ultri;": "\u25f8", - "umacr;": "\u016b", - "uml": "\xa8", - "uml;": "\xa8", - "uogon;": "\u0173", - "uopf;": "\U0001d566", - "uparrow;": "\u2191", - "updownarrow;": "\u2195", - "upharpoonleft;": "\u21bf", - "upharpoonright;": "\u21be", - "uplus;": "\u228e", - "upsi;": "\u03c5", - "upsih;": "\u03d2", - "upsilon;": "\u03c5", - "upuparrows;": "\u21c8", - "urcorn;": "\u231d", - "urcorner;": "\u231d", - "urcrop;": "\u230e", - "uring;": "\u016f", - "urtri;": "\u25f9", - "uscr;": "\U0001d4ca", - "utdot;": "\u22f0", - "utilde;": "\u0169", - "utri;": "\u25b5", - "utrif;": "\u25b4", - "uuarr;": "\u21c8", - "uuml": "\xfc", - "uuml;": "\xfc", - "uwangle;": "\u29a7", - "vArr;": "\u21d5", - "vBar;": "\u2ae8", - "vBarv;": "\u2ae9", - "vDash;": "\u22a8", - "vangrt;": "\u299c", - "varepsilon;": "\u03f5", - "varkappa;": "\u03f0", - "varnothing;": "\u2205", - "varphi;": "\u03d5", - "varpi;": "\u03d6", - "varpropto;": "\u221d", - "varr;": "\u2195", - "varrho;": "\u03f1", - "varsigma;": "\u03c2", - "varsubsetneq;": "\u228a\ufe00", - "varsubsetneqq;": "\u2acb\ufe00", - "varsupsetneq;": "\u228b\ufe00", - "varsupsetneqq;": "\u2acc\ufe00", - "vartheta;": "\u03d1", - "vartriangleleft;": "\u22b2", - "vartriangleright;": "\u22b3", - "vcy;": "\u0432", - "vdash;": "\u22a2", - "vee;": "\u2228", - "veebar;": "\u22bb", - "veeeq;": "\u225a", - "vellip;": "\u22ee", - "verbar;": "|", - "vert;": "|", - "vfr;": "\U0001d533", - "vltri;": "\u22b2", - "vnsub;": "\u2282\u20d2", - "vnsup;": "\u2283\u20d2", - "vopf;": "\U0001d567", - "vprop;": "\u221d", - "vrtri;": "\u22b3", - "vscr;": "\U0001d4cb", - "vsubnE;": "\u2acb\ufe00", - "vsubne;": "\u228a\ufe00", - "vsupnE;": "\u2acc\ufe00", - "vsupne;": "\u228b\ufe00", - "vzigzag;": "\u299a", - "wcirc;": "\u0175", - "wedbar;": "\u2a5f", - "wedge;": "\u2227", - "wedgeq;": "\u2259", - "weierp;": "\u2118", - "wfr;": "\U0001d534", - "wopf;": "\U0001d568", - "wp;": "\u2118", - "wr;": "\u2240", - "wreath;": "\u2240", - "wscr;": "\U0001d4cc", - "xcap;": "\u22c2", - "xcirc;": "\u25ef", - "xcup;": "\u22c3", - "xdtri;": "\u25bd", - "xfr;": "\U0001d535", - "xhArr;": "\u27fa", - "xharr;": "\u27f7", - "xi;": "\u03be", - "xlArr;": "\u27f8", - "xlarr;": "\u27f5", - "xmap;": "\u27fc", - "xnis;": "\u22fb", - "xodot;": "\u2a00", - "xopf;": "\U0001d569", - "xoplus;": "\u2a01", - "xotime;": "\u2a02", - "xrArr;": "\u27f9", - "xrarr;": "\u27f6", - "xscr;": "\U0001d4cd", - "xsqcup;": "\u2a06", - "xuplus;": "\u2a04", - "xutri;": "\u25b3", - "xvee;": "\u22c1", - "xwedge;": "\u22c0", - "yacute": "\xfd", - "yacute;": "\xfd", - "yacy;": "\u044f", - "ycirc;": "\u0177", - "ycy;": "\u044b", - "yen": "\xa5", - "yen;": "\xa5", - "yfr;": "\U0001d536", - "yicy;": "\u0457", - "yopf;": "\U0001d56a", - "yscr;": "\U0001d4ce", - "yucy;": "\u044e", - "yuml": "\xff", - "yuml;": "\xff", - "zacute;": "\u017a", - "zcaron;": "\u017e", - "zcy;": "\u0437", - "zdot;": "\u017c", - "zeetrf;": "\u2128", - "zeta;": "\u03b6", - "zfr;": "\U0001d537", - "zhcy;": "\u0436", - "zigrarr;": "\u21dd", - "zopf;": "\U0001d56b", - "zscr;": "\U0001d4cf", - "zwj;": "\u200d", - "zwnj;": "\u200c", -} - -replacementCharacters = { - 0x0: "\uFFFD", - 0x0d: "\u000D", - 0x80: "\u20AC", - 0x81: "\u0081", - 0x82: "\u201A", - 0x83: "\u0192", - 0x84: "\u201E", - 0x85: "\u2026", - 0x86: "\u2020", - 0x87: "\u2021", - 0x88: "\u02C6", - 0x89: "\u2030", - 0x8A: "\u0160", - 0x8B: "\u2039", - 0x8C: "\u0152", - 0x8D: "\u008D", - 0x8E: "\u017D", - 0x8F: "\u008F", - 0x90: "\u0090", - 0x91: "\u2018", - 0x92: "\u2019", - 0x93: "\u201C", - 0x94: "\u201D", - 0x95: "\u2022", - 0x96: "\u2013", - 0x97: "\u2014", - 0x98: "\u02DC", - 0x99: "\u2122", - 0x9A: "\u0161", - 0x9B: "\u203A", - 0x9C: "\u0153", - 0x9D: "\u009D", - 0x9E: "\u017E", - 0x9F: "\u0178", -} - -tokenTypes = { - "Doctype": 0, - "Characters": 1, - "SpaceCharacters": 2, - "StartTag": 3, - "EndTag": 4, - "EmptyTag": 5, - "Comment": 6, - "ParseError": 7 -} - -tagTokenTypes = frozenset([tokenTypes["StartTag"], tokenTypes["EndTag"], - tokenTypes["EmptyTag"]]) - - -prefixes = {v: k for k, v in namespaces.items()} -prefixes["http://www.w3.org/1998/Math/MathML"] = "math" - - -class DataLossWarning(UserWarning): - """Raised when the current tree is unable to represent the input data""" - pass - - -class _ReparseException(Exception): - pass diff --git a/venv/Lib/site-packages/pip/_vendor/html5lib/filters/__init__.py b/venv/Lib/site-packages/pip/_vendor/html5lib/filters/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/venv/Lib/site-packages/pip/_vendor/html5lib/filters/__pycache__/__init__.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/html5lib/filters/__pycache__/__init__.cpython-39.pyc deleted file mode 100644 index d1d2e5b..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/html5lib/filters/__pycache__/__init__.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/html5lib/filters/__pycache__/alphabeticalattributes.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/html5lib/filters/__pycache__/alphabeticalattributes.cpython-39.pyc deleted file mode 100644 index 2cc31a3..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/html5lib/filters/__pycache__/alphabeticalattributes.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/html5lib/filters/__pycache__/base.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/html5lib/filters/__pycache__/base.cpython-39.pyc deleted file mode 100644 index 82d1ddb..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/html5lib/filters/__pycache__/base.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/html5lib/filters/__pycache__/inject_meta_charset.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/html5lib/filters/__pycache__/inject_meta_charset.cpython-39.pyc deleted file mode 100644 index d10671d..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/html5lib/filters/__pycache__/inject_meta_charset.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/html5lib/filters/__pycache__/lint.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/html5lib/filters/__pycache__/lint.cpython-39.pyc deleted file mode 100644 index e5223ac..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/html5lib/filters/__pycache__/lint.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/html5lib/filters/__pycache__/optionaltags.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/html5lib/filters/__pycache__/optionaltags.cpython-39.pyc deleted file mode 100644 index 3a22c06..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/html5lib/filters/__pycache__/optionaltags.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/html5lib/filters/__pycache__/sanitizer.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/html5lib/filters/__pycache__/sanitizer.cpython-39.pyc deleted file mode 100644 index 6605f18..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/html5lib/filters/__pycache__/sanitizer.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/html5lib/filters/__pycache__/whitespace.cpython-39.pyc b/venv/Lib/site-packages/pip/_vendor/html5lib/filters/__pycache__/whitespace.cpython-39.pyc deleted file mode 100644 index 853cc38..0000000 Binary files a/venv/Lib/site-packages/pip/_vendor/html5lib/filters/__pycache__/whitespace.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pip/_vendor/html5lib/filters/alphabeticalattributes.py b/venv/Lib/site-packages/pip/_vendor/html5lib/filters/alphabeticalattributes.py deleted file mode 100644 index 5ba926e..0000000 --- a/venv/Lib/site-packages/pip/_vendor/html5lib/filters/alphabeticalattributes.py +++ /dev/null @@ -1,29 +0,0 @@ -from __future__ import absolute_import, division, unicode_literals - -from . import base - -from collections import OrderedDict - - -def _attr_key(attr): - """Return an appropriate key for an attribute for sorting - - Attributes have a namespace that can be either ``None`` or a string. We - can't compare the two because they're different types, so we convert - ``None`` to an empty string first. - - """ - return (attr[0][0] or ''), attr[0][1] - - -class Filter(base.Filter): - """Alphabetizes attributes for elements""" - def __iter__(self): - for token in base.Filter.__iter__(self): - if token["type"] in ("StartTag", "EmptyTag"): - attrs = OrderedDict() - for name, value in sorted(token["data"].items(), - key=_attr_key): - attrs[name] = value - token["data"] = attrs - yield token diff --git a/venv/Lib/site-packages/pip/_vendor/html5lib/filters/base.py b/venv/Lib/site-packages/pip/_vendor/html5lib/filters/base.py deleted file mode 100644 index c7dbaed..0000000 --- a/venv/Lib/site-packages/pip/_vendor/html5lib/filters/base.py +++ /dev/null @@ -1,12 +0,0 @@ -from __future__ import absolute_import, division, unicode_literals - - -class Filter(object): - def __init__(self, source): - self.source = source - - def __iter__(self): - return iter(self.source) - - def __getattr__(self, name): - return getattr(self.source, name) diff --git a/venv/Lib/site-packages/pip/_vendor/html5lib/filters/inject_meta_charset.py b/venv/Lib/site-packages/pip/_vendor/html5lib/filters/inject_meta_charset.py deleted file mode 100644 index aefb5c8..0000000 --- a/venv/Lib/site-packages/pip/_vendor/html5lib/filters/inject_meta_charset.py +++ /dev/null @@ -1,73 +0,0 @@ -from __future__ import absolute_import, division, unicode_literals - -from . import base - - -class Filter(base.Filter): - """Injects ```` tag into head of document""" - def __init__(self, source, encoding): - """Creates a Filter - - :arg source: the source token stream - - :arg encoding: the encoding to set - - """ - base.Filter.__init__(self, source) - self.encoding = encoding - - def __iter__(self): - state = "pre_head" - meta_found = (self.encoding is None) - pending = [] - - for token in base.Filter.__iter__(self): - type = token["type"] - if type == "StartTag": - if token["name"].lower() == "head": - state = "in_head" - - elif type == "EmptyTag": - if token["name"].lower() == "meta": - # replace charset with actual encoding - has_http_equiv_content_type = False - for (namespace, name), value in token["data"].items(): - if namespace is not None: - continue - elif name.lower() == 'charset': - token["data"][(namespace, name)] = self.encoding - meta_found = True - break - elif name == 'http-equiv' and value.lower() == 'content-type': - has_http_equiv_content_type = True - else: - if has_http_equiv_content_type and (None, "content") in token["data"]: - token["data"][(None, "content")] = 'text/html; charset=%s' % self.encoding - meta_found = True - - elif token["name"].lower() == "head" and not meta_found: - # insert meta into empty head - yield {"type": "StartTag", "name": "head", - "data": token["data"]} - yield {"type": "EmptyTag", "name": "meta", - "data": {(None, "charset"): self.encoding}} - yield {"type": "EndTag", "name": "head"} - meta_found = True - continue - - elif type == "EndTag": - if token["name"].lower() == "head" and pending: - # insert meta into head (if necessary) and flush pending queue - yield pending.pop(0) - if not meta_found: - yield {"type": "EmptyTag", "name": "meta", - "data": {(None, "charset"): self.encoding}} - while pending: - yield pending.pop(0) - meta_found = True - state = "post_head" - - if state == "in_head": - pending.append(token) - else: - yield token diff --git a/venv/Lib/site-packages/pip/_vendor/html5lib/filters/lint.py b/venv/Lib/site-packages/pip/_vendor/html5lib/filters/lint.py deleted file mode 100644 index fcc07ee..0000000 --- a/venv/Lib/site-packages/pip/_vendor/html5lib/filters/lint.py +++ /dev/null @@ -1,93 +0,0 @@ -from __future__ import absolute_import, division, unicode_literals - -from pip._vendor.six import text_type - -from . import base -from ..constants import namespaces, voidElements - -from ..constants import spaceCharacters -spaceCharacters = "".join(spaceCharacters) - - -class Filter(base.Filter): - """Lints the token stream for errors - - If it finds any errors, it'll raise an ``AssertionError``. - - """ - def __init__(self, source, require_matching_tags=True): - """Creates a Filter - - :arg source: the source token stream - - :arg require_matching_tags: whether or not to require matching tags - - """ - super(Filter, self).__init__(source) - self.require_matching_tags = require_matching_tags - - def __iter__(self): - open_elements = [] - for token in base.Filter.__iter__(self): - type = token["type"] - if type in ("StartTag", "EmptyTag"): - namespace = token["namespace"] - name = token["name"] - assert namespace is None or isinstance(namespace, text_type) - assert namespace != "" - assert isinstance(name, text_type) - assert name != "" - assert isinstance(token["data"], dict) - if (not namespace or namespace == namespaces["html"]) and name in voidElements: - assert type == "EmptyTag" - else: - assert type == "StartTag" - if type == "StartTag" and self.require_matching_tags: - open_elements.append((namespace, name)) - for (namespace, name), value in token["data"].items(): - assert namespace is None or isinstance(namespace, text_type) - assert namespace != "" - assert isinstance(name, text_type) - assert name != "" - assert isinstance(value, text_type) - - elif type == "EndTag": - namespace = token["namespace"] - name = token["name"] - assert namespace is None or isinstance(namespace, text_type) - assert namespace != "" - assert isinstance(name, text_type) - assert name != "" - if (not namespace or namespace == namespaces["html"]) and name in voidElements: - assert False, "Void element reported as EndTag token: %(tag)s" % {"tag": name} - elif self.require_matching_tags: - start = open_elements.pop() - assert start == (namespace, name) - - elif type == "Comment": - data = token["data"] - assert isinstance(data, text_type) - - elif type in ("Characters", "SpaceCharacters"): - data = token["data"] - assert isinstance(data, text_type) - assert data != "" - if type == "SpaceCharacters": - assert data.strip(spaceCharacters) == "" - - elif type == "Doctype": - name = token["name"] - assert name is None or isinstance(name, text_type) - assert token["publicId"] is None or isinstance(name, text_type) - assert token["systemId"] is None or isinstance(name, text_type) - - elif type == "Entity": - assert isinstance(token["name"], text_type) - - elif type == "SerializerError": - assert isinstance(token["data"], text_type) - - else: - assert False, "Unknown token type: %(type)s" % {"type": type} - - yield token diff --git a/venv/Lib/site-packages/pip/_vendor/html5lib/filters/optionaltags.py b/venv/Lib/site-packages/pip/_vendor/html5lib/filters/optionaltags.py deleted file mode 100644 index 4a86501..0000000 --- a/venv/Lib/site-packages/pip/_vendor/html5lib/filters/optionaltags.py +++ /dev/null @@ -1,207 +0,0 @@ -from __future__ import absolute_import, division, unicode_literals - -from . import base - - -class Filter(base.Filter): - """Removes optional tags from the token stream""" - def slider(self): - previous1 = previous2 = None - for token in self.source: - if previous1 is not None: - yield previous2, previous1, token - previous2 = previous1 - previous1 = token - if previous1 is not None: - yield previous2, previous1, None - - def __iter__(self): - for previous, token, next in self.slider(): - type = token["type"] - if type == "StartTag": - if (token["data"] or - not self.is_optional_start(token["name"], previous, next)): - yield token - elif type == "EndTag": - if not self.is_optional_end(token["name"], next): - yield token - else: - yield token - - def is_optional_start(self, tagname, previous, next): - type = next and next["type"] or None - if tagname in 'html': - # An html element's start tag may be omitted if the first thing - # inside the html element is not a space character or a comment. - return type not in ("Comment", "SpaceCharacters") - elif tagname == 'head': - # A head element's start tag may be omitted if the first thing - # inside the head element is an element. - # XXX: we also omit the start tag if the head element is empty - if type in ("StartTag", "EmptyTag"): - return True - elif type == "EndTag": - return next["name"] == "head" - elif tagname == 'body': - # A body element's start tag may be omitted if the first thing - # inside the body element is not a space character or a comment, - # except if the first thing inside the body element is a script - # or style element and the node immediately preceding the body - # element is a head element whose end tag has been omitted. - if type in ("Comment", "SpaceCharacters"): - return False - elif type == "StartTag": - # XXX: we do not look at the preceding event, so we never omit - # the body element's start tag if it's followed by a script or - # a style element. - return next["name"] not in ('script', 'style') - else: - return True - elif tagname == 'colgroup': - # A colgroup element's start tag may be omitted if the first thing - # inside the colgroup element is a col element, and if the element - # is not immediately preceded by another colgroup element whose - # end tag has been omitted. - if type in ("StartTag", "EmptyTag"): - # XXX: we do not look at the preceding event, so instead we never - # omit the colgroup element's end tag when it is immediately - # followed by another colgroup element. See is_optional_end. - return next["name"] == "col" - else: - return False - elif tagname == 'tbody': - # A tbody element's start tag may be omitted if the first thing - # inside the tbody element is a tr element, and if the element is - # not immediately preceded by a tbody, thead, or tfoot element - # whose end tag has been omitted. - if type == "StartTag": - # omit the thead and tfoot elements' end tag when they are - # immediately followed by a tbody element. See is_optional_end. - if previous and previous['type'] == 'EndTag' and \ - previous['name'] in ('tbody', 'thead', 'tfoot'): - return False - return next["name"] == 'tr' - else: - return False - return False - - def is_optional_end(self, tagname, next): - type = next and next["type"] or None - if tagname in ('html', 'head', 'body'): - # An html element's end tag may be omitted if the html element - # is not immediately followed by a space character or a comment. - return type not in ("Comment", "SpaceCharacters") - elif tagname in ('li', 'optgroup', 'tr'): - # A li element's end tag may be omitted if the li element is - # immediately followed by another li element or if there is - # no more content in the parent element. - # An optgroup element's end tag may be omitted if the optgroup - # element is immediately followed by another optgroup element, - # or if there is no more content in the parent element. - # A tr element's end tag may be omitted if the tr element is - # immediately followed by another tr element, or if there is - # no more content in the parent element. - if type == "StartTag": - return next["name"] == tagname - else: - return type == "EndTag" or type is None - elif tagname in ('dt', 'dd'): - # A dt element's end tag may be omitted if the dt element is - # immediately followed by another dt element or a dd element. - # A dd element's end tag may be omitted if the dd element is - # immediately followed by another dd element or a dt element, - # or if there is no more content in the parent element. - if type == "StartTag": - return next["name"] in ('dt', 'dd') - elif tagname == 'dd': - return type == "EndTag" or type is None - else: - return False - elif tagname == 'p': - # A p element's end tag may be omitted if the p element is - # immediately followed by an address, article, aside, - # blockquote, datagrid, dialog, dir, div, dl, fieldset, - # footer, form, h1, h2, h3, h4, h5, h6, header, hr, menu, - # nav, ol, p, pre, section, table, or ul, element, or if - # there is no more content in the parent element. - if type in ("StartTag", "EmptyTag"): - return next["name"] in ('address', 'article', 'aside', - 'blockquote', 'datagrid', 'dialog', - 'dir', 'div', 'dl', 'fieldset', 'footer', - 'form', 'h1', 'h2', 'h3', 'h4', 'h5', 'h6', - 'header', 'hr', 'menu', 'nav', 'ol', - 'p', 'pre', 'section', 'table', 'ul') - else: - return type == "EndTag" or type is None - elif tagname == 'option': - # An option element's end tag may be omitted if the option - # element is immediately followed by another option element, - # or if it is immediately followed by an optgroup - # element, or if there is no more content in the parent - # element. - if type == "StartTag": - return next["name"] in ('option', 'optgroup') - else: - return type == "EndTag" or type is None - elif tagname in ('rt', 'rp'): - # An rt element's end tag may be omitted if the rt element is - # immediately followed by an rt or rp element, or if there is - # no more content in the parent element. - # An rp element's end tag may be omitted if the rp element is - # immediately followed by an rt or rp element, or if there is - # no more content in the parent element. - if type == "StartTag": - return next["name"] in ('rt', 'rp') - else: - return type == "EndTag" or type is None - elif tagname == 'colgroup': - # A colgroup element's end tag may be omitted if the colgroup - # element is not immediately followed by a space character or - # a comment. - if type in ("Comment", "SpaceCharacters"): - return False - elif type == "StartTag": - # XXX: we also look for an immediately following colgroup - # element. See is_optional_start. - return next["name"] != 'colgroup' - else: - return True - elif tagname in ('thead', 'tbody'): - # A thead element's end tag may be omitted if the thead element - # is immediately followed by a tbody or tfoot element. - # A tbody element's end tag may be omitted if the tbody element - # is immediately followed by a tbody or tfoot element, or if - # there is no more content in the parent element. - # A tfoot element's end tag may be omitted if the tfoot element - # is immediately followed by a tbody element, or if there is no - # more content in the parent element. - # XXX: we never omit the end tag when the following element is - # a tbody. See is_optional_start. - if type == "StartTag": - return next["name"] in ['tbody', 'tfoot'] - elif tagname == 'tbody': - return type == "EndTag" or type is None - else: - return False - elif tagname == 'tfoot': - # A tfoot element's end tag may be omitted if the tfoot element - # is immediately followed by a tbody element, or if there is no - # more content in the parent element. - # XXX: we never omit the end tag when the following element is - # a tbody. See is_optional_start. - if type == "StartTag": - return next["name"] == 'tbody' - else: - return type == "EndTag" or type is None - elif tagname in ('td', 'th'): - # A td element's end tag may be omitted if the td element is - # immediately followed by a td or th element, or if there is - # no more content in the parent element. - # A th element's end tag may be omitted if the th element is - # immediately followed by a td or th element, or if there is - # no more content in the parent element. - if type == "StartTag": - return next["name"] in ('td', 'th') - else: - return type == "EndTag" or type is None - return False diff --git a/venv/Lib/site-packages/pip/_vendor/html5lib/filters/sanitizer.py b/venv/Lib/site-packages/pip/_vendor/html5lib/filters/sanitizer.py deleted file mode 100644 index aa7431d..0000000 --- a/venv/Lib/site-packages/pip/_vendor/html5lib/filters/sanitizer.py +++ /dev/null @@ -1,916 +0,0 @@ -"""Deprecated from html5lib 1.1. - -See `here `_ for -information about its deprecation; `Bleach `_ -is recommended as a replacement. Please let us know in the aforementioned issue -if Bleach is unsuitable for your needs. - -""" -from __future__ import absolute_import, division, unicode_literals - -import re -import warnings -from xml.sax.saxutils import escape, unescape - -from pip._vendor.six.moves import urllib_parse as urlparse - -from . import base -from ..constants import namespaces, prefixes - -__all__ = ["Filter"] - - -_deprecation_msg = ( - "html5lib's sanitizer is deprecated; see " + - "https://github.com/html5lib/html5lib-python/issues/443 and please let " + - "us know if Bleach is unsuitable for your needs" -) - -warnings.warn(_deprecation_msg, DeprecationWarning) - -allowed_elements = frozenset(( - (namespaces['html'], 'a'), - (namespaces['html'], 'abbr'), - (namespaces['html'], 'acronym'), - (namespaces['html'], 'address'), - (namespaces['html'], 'area'), - (namespaces['html'], 'article'), - (namespaces['html'], 'aside'), - (namespaces['html'], 'audio'), - (namespaces['html'], 'b'), - (namespaces['html'], 'big'), - (namespaces['html'], 'blockquote'), - (namespaces['html'], 'br'), - (namespaces['html'], 'button'), - (namespaces['html'], 'canvas'), - (namespaces['html'], 'caption'), - (namespaces['html'], 'center'), - (namespaces['html'], 'cite'), - (namespaces['html'], 'code'), - (namespaces['html'], 'col'), - (namespaces['html'], 'colgroup'), - (namespaces['html'], 'command'), - (namespaces['html'], 'datagrid'), - (namespaces['html'], 'datalist'), - (namespaces['html'], 'dd'), - (namespaces['html'], 'del'), - (namespaces['html'], 'details'), - (namespaces['html'], 'dfn'), - (namespaces['html'], 'dialog'), - (namespaces['html'], 'dir'), - (namespaces['html'], 'div'), - (namespaces['html'], 'dl'), - (namespaces['html'], 'dt'), - (namespaces['html'], 'em'), - (namespaces['html'], 'event-source'), - (namespaces['html'], 'fieldset'), - (namespaces['html'], 'figcaption'), - (namespaces['html'], 'figure'), - (namespaces['html'], 'footer'), - (namespaces['html'], 'font'), - (namespaces['html'], 'form'), - (namespaces['html'], 'header'), - (namespaces['html'], 'h1'), - (namespaces['html'], 'h2'), - (namespaces['html'], 'h3'), - (namespaces['html'], 'h4'), - (namespaces['html'], 'h5'), - (namespaces['html'], 'h6'), - (namespaces['html'], 'hr'), - (namespaces['html'], 'i'), - (namespaces['html'], 'img'), - (namespaces['html'], 'input'), - (namespaces['html'], 'ins'), - (namespaces['html'], 'keygen'), - (namespaces['html'], 'kbd'), - (namespaces['html'], 'label'), - (namespaces['html'], 'legend'), - (namespaces['html'], 'li'), - (namespaces['html'], 'm'), - (namespaces['html'], 'map'), - (namespaces['html'], 'menu'), - (namespaces['html'], 'meter'), - (namespaces['html'], 'multicol'), - (namespaces['html'], 'nav'), - (namespaces['html'], 'nextid'), - (namespaces['html'], 'ol'), - (namespaces['html'], 'output'), - (namespaces['html'], 'optgroup'), - (namespaces['html'], 'option'), - (namespaces['html'], 'p'), - (namespaces['html'], 'pre'), - (namespaces['html'], 'progress'), - (namespaces['html'], 'q'), - (namespaces['html'], 's'), - (namespaces['html'], 'samp'), - (namespaces['html'], 'section'), - (namespaces['html'], 'select'), - (namespaces['html'], 'small'), - (namespaces['html'], 'sound'), - (namespaces['html'], 'source'), - (namespaces['html'], 'spacer'), - (namespaces['html'], 'span'), - (namespaces['html'], 'strike'), - (namespaces['html'], 'strong'), - (namespaces['html'], 'sub'), - (namespaces['html'], 'sup'), - (namespaces['html'], 'table'), - (namespaces['html'], 'tbody'), - (namespaces['html'], 'td'), - (namespaces['html'], 'textarea'), - (namespaces['html'], 'time'), - (namespaces['html'], 'tfoot'), - (namespaces['html'], 'th'), - (namespaces['html'], 'thead'), - (namespaces['html'], 'tr'), - (namespaces['html'], 'tt'), - (namespaces['html'], 'u'), - (namespaces['html'], 'ul'), - (namespaces['html'], 'var'), - (namespaces['html'], 'video'), - (namespaces['mathml'], 'maction'), - (namespaces['mathml'], 'math'), - (namespaces['mathml'], 'merror'), - (namespaces['mathml'], 'mfrac'), - (namespaces['mathml'], 'mi'), - (namespaces['mathml'], 'mmultiscripts'), - (namespaces['mathml'], 'mn'), - (namespaces['mathml'], 'mo'), - (namespaces['mathml'], 'mover'), - (namespaces['mathml'], 'mpadded'), - (namespaces['mathml'], 'mphantom'), - (namespaces['mathml'], 'mprescripts'), - (namespaces['mathml'], 'mroot'), - (namespaces['mathml'], 'mrow'), - (namespaces['mathml'], 'mspace'), - (namespaces['mathml'], 'msqrt'), - (namespaces['mathml'], 'mstyle'), - (namespaces['mathml'], 'msub'), - (namespaces['mathml'], 'msubsup'), - (namespaces['mathml'], 'msup'), - (namespaces['mathml'], 'mtable'), - (namespaces['mathml'], 'mtd'), - (namespaces['mathml'], 'mtext'), - (namespaces['mathml'], 'mtr'), - (namespaces['mathml'], 'munder'), - (namespaces['mathml'], 'munderover'), - (namespaces['mathml'], 'none'), - (namespaces['svg'], 'a'), - (namespaces['svg'], 'animate'), - (namespaces['svg'], 'animateColor'), - (namespaces['svg'], 'animateMotion'), - (namespaces['svg'], 'animateTransform'), - (namespaces['svg'], 'clipPath'), - (namespaces['svg'], 'circle'), - (namespaces['svg'], 'defs'), - (namespaces['svg'], 'desc'), - (namespaces['svg'], 'ellipse'), - (namespaces['svg'], 'font-face'), - (namespaces['svg'], 'font-face-name'), - (namespaces['svg'], 'font-face-src'), - (namespaces['svg'], 'g'), - (namespaces['svg'], 'glyph'), - (namespaces['svg'], 'hkern'), - (namespaces['svg'], 'linearGradient'), - (namespaces['svg'], 'line'), - (namespaces['svg'], 'marker'), - (namespaces['svg'], 'metadata'), - (namespaces['svg'], 'missing-glyph'), - (namespaces['svg'], 'mpath'), - (namespaces['svg'], 'path'), - (namespaces['svg'], 'polygon'), - (namespaces['svg'], 'polyline'), - (namespaces['svg'], 'radialGradient'), - (namespaces['svg'], 'rect'), - (namespaces['svg'], 'set'), - (namespaces['svg'], 'stop'), - (namespaces['svg'], 'svg'), - (namespaces['svg'], 'switch'), - (namespaces['svg'], 'text'), - (namespaces['svg'], 'title'), - (namespaces['svg'], 'tspan'), - (namespaces['svg'], 'use'), -)) - -allowed_attributes = frozenset(( - # HTML attributes - (None, 'abbr'), - (None, 'accept'), - (None, 'accept-charset'), - (None, 'accesskey'), - (None, 'action'), - (None, 'align'), - (None, 'alt'), - (None, 'autocomplete'), - (None, 'autofocus'), - (None, 'axis'), - (None, 'background'), - (None, 'balance'), - (None, 'bgcolor'), - (None, 'bgproperties'), - (None, 'border'), - (None, 'bordercolor'), - (None, 'bordercolordark'), - (None, 'bordercolorlight'), - (None, 'bottompadding'), - (None, 'cellpadding'), - (None, 'cellspacing'), - (None, 'ch'), - (None, 'challenge'), - (None, 'char'), - (None, 'charoff'), - (None, 'choff'), - (None, 'charset'), - (None, 'checked'), - (None, 'cite'), - (None, 'class'), - (None, 'clear'), - (None, 'color'), - (None, 'cols'), - (None, 'colspan'), - (None, 'compact'), - (None, 'contenteditable'), - (None, 'controls'), - (None, 'coords'), - (None, 'data'), - (None, 'datafld'), - (None, 'datapagesize'), - (None, 'datasrc'), - (None, 'datetime'), - (None, 'default'), - (None, 'delay'), - (None, 'dir'), - (None, 'disabled'), - (None, 'draggable'), - (None, 'dynsrc'), - (None, 'enctype'), - (None, 'end'), - (None, 'face'), - (None, 'for'), - (None, 'form'), - (None, 'frame'), - (None, 'galleryimg'), - (None, 'gutter'), - (None, 'headers'), - (None, 'height'), - (None, 'hidefocus'), - (None, 'hidden'), - (None, 'high'), - (None, 'href'), - (None, 'hreflang'), - (None, 'hspace'), - (None, 'icon'), - (None, 'id'), - (None, 'inputmode'), - (None, 'ismap'), - (None, 'keytype'), - (None, 'label'), - (None, 'leftspacing'), - (None, 'lang'), - (None, 'list'), - (None, 'longdesc'), - (None, 'loop'), - (None, 'loopcount'), - (None, 'loopend'), - (None, 'loopstart'), - (None, 'low'), - (None, 'lowsrc'), - (None, 'max'), - (None, 'maxlength'), - (None, 'media'), - (None, 'method'), - (None, 'min'), - (None, 'multiple'), - (None, 'name'), - (None, 'nohref'), - (None, 'noshade'), - (None, 'nowrap'), - (None, 'open'), - (None, 'optimum'), - (None, 'pattern'), - (None, 'ping'), - (None, 'point-size'), - (None, 'poster'), - (None, 'pqg'), - (None, 'preload'), - (None, 'prompt'), - (None, 'radiogroup'), - (None, 'readonly'), - (None, 'rel'), - (None, 'repeat-max'), - (None, 'repeat-min'), - (None, 'replace'), - (None, 'required'), - (None, 'rev'), - (None, 'rightspacing'), - (None, 'rows'), - (None, 'rowspan'), - (None, 'rules'), - (None, 'scope'), - (None, 'selected'), - (None, 'shape'), - (None, 'size'), - (None, 'span'), - (None, 'src'), - (None, 'start'), - (None, 'step'), - (None, 'style'), - (None, 'summary'), - (None, 'suppress'), - (None, 'tabindex'), - (None, 'target'), - (None, 'template'), - (None, 'title'), - (None, 'toppadding'), - (None, 'type'), - (None, 'unselectable'), - (None, 'usemap'), - (None, 'urn'), - (None, 'valign'), - (None, 'value'), - (None, 'variable'), - (None, 'volume'), - (None, 'vspace'), - (None, 'vrml'), - (None, 'width'), - (None, 'wrap'), - (namespaces['xml'], 'lang'), - # MathML attributes - (None, 'actiontype'), - (None, 'align'), - (None, 'columnalign'), - (None, 'columnalign'), - (None, 'columnalign'), - (None, 'columnlines'), - (None, 'columnspacing'), - (None, 'columnspan'), - (None, 'depth'), - (None, 'display'), - (None, 'displaystyle'), - (None, 'equalcolumns'), - (None, 'equalrows'), - (None, 'fence'), - (None, 'fontstyle'), - (None, 'fontweight'), - (None, 'frame'), - (None, 'height'), - (None, 'linethickness'), - (None, 'lspace'), - (None, 'mathbackground'), - (None, 'mathcolor'), - (None, 'mathvariant'), - (None, 'mathvariant'), - (None, 'maxsize'), - (None, 'minsize'), - (None, 'other'), - (None, 'rowalign'), - (None, 'rowalign'), - (None, 'rowalign'), - (None, 'rowlines'), - (None, 'rowspacing'), - (None, 'rowspan'), - (None, 'rspace'), - (None, 'scriptlevel'), - (None, 'selection'), - (None, 'separator'), - (None, 'stretchy'), - (None, 'width'), - (None, 'width'), - (namespaces['xlink'], 'href'), - (namespaces['xlink'], 'show'), - (namespaces['xlink'], 'type'), - # SVG attributes - (None, 'accent-height'), - (None, 'accumulate'), - (None, 'additive'), - (None, 'alphabetic'), - (None, 'arabic-form'), - (None, 'ascent'), - (None, 'attributeName'), - (None, 'attributeType'), - (None, 'baseProfile'), - (None, 'bbox'), - (None, 'begin'), - (None, 'by'), - (None, 'calcMode'), - (None, 'cap-height'), - (None, 'class'), - (None, 'clip-path'), - (None, 'color'), - (None, 'color-rendering'), - (None, 'content'), - (None, 'cx'), - (None, 'cy'), - (None, 'd'), - (None, 'dx'), - (None, 'dy'), - (None, 'descent'), - (None, 'display'), - (None, 'dur'), - (None, 'end'), - (None, 'fill'), - (None, 'fill-opacity'), - (None, 'fill-rule'), - (None, 'font-family'), - (None, 'font-size'), - (None, 'font-stretch'), - (None, 'font-style'), - (None, 'font-variant'), - (None, 'font-weight'), - (None, 'from'), - (None, 'fx'), - (None, 'fy'), - (None, 'g1'), - (None, 'g2'), - (None, 'glyph-name'), - (None, 'gradientUnits'), - (None, 'hanging'), - (None, 'height'), - (None, 'horiz-adv-x'), - (None, 'horiz-origin-x'), - (None, 'id'), - (None, 'ideographic'), - (None, 'k'), - (None, 'keyPoints'), - (None, 'keySplines'), - (None, 'keyTimes'), - (None, 'lang'), - (None, 'marker-end'), - (None, 'marker-mid'), - (None, 'marker-start'), - (None, 'markerHeight'), - (None, 'markerUnits'), - (None, 'markerWidth'), - (None, 'mathematical'), - (None, 'max'), - (None, 'min'), - (None, 'name'), - (None, 'offset'), - (None, 'opacity'), - (None, 'orient'), - (None, 'origin'), - (None, 'overline-position'), - (None, 'overline-thickness'), - (None, 'panose-1'), - (None, 'path'), - (None, 'pathLength'), - (None, 'points'), - (None, 'preserveAspectRatio'), - (None, 'r'), - (None, 'refX'), - (None, 'refY'), - (None, 'repeatCount'), - (None, 'repeatDur'), - (None, 'requiredExtensions'), - (None, 'requiredFeatures'), - (None, 'restart'), - (None, 'rotate'), - (None, 'rx'), - (None, 'ry'), - (None, 'slope'), - (None, 'stemh'), - (None, 'stemv'), - (None, 'stop-color'), - (None, 'stop-opacity'), - (None, 'strikethrough-position'), - (None, 'strikethrough-thickness'), - (None, 'stroke'), - (None, 'stroke-dasharray'), - (None, 'stroke-dashoffset'), - (None, 'stroke-linecap'), - (None, 'stroke-linejoin'), - (None, 'stroke-miterlimit'), - (None, 'stroke-opacity'), - (None, 'stroke-width'), - (None, 'systemLanguage'), - (None, 'target'), - (None, 'text-anchor'), - (None, 'to'), - (None, 'transform'), - (None, 'type'), - (None, 'u1'), - (None, 'u2'), - (None, 'underline-position'), - (None, 'underline-thickness'), - (None, 'unicode'), - (None, 'unicode-range'), - (None, 'units-per-em'), - (None, 'values'), - (None, 'version'), - (None, 'viewBox'), - (None, 'visibility'), - (None, 'width'), - (None, 'widths'), - (None, 'x'), - (None, 'x-height'), - (None, 'x1'), - (None, 'x2'), - (namespaces['xlink'], 'actuate'), - (namespaces['xlink'], 'arcrole'), - (namespaces['xlink'], 'href'), - (namespaces['xlink'], 'role'), - (namespaces['xlink'], 'show'), - (namespaces['xlink'], 'title'), - (namespaces['xlink'], 'type'), - (namespaces['xml'], 'base'), - (namespaces['xml'], 'lang'), - (namespaces['xml'], 'space'), - (None, 'y'), - (None, 'y1'), - (None, 'y2'), - (None, 'zoomAndPan'), -)) - -attr_val_is_uri = frozenset(( - (None, 'href'), - (None, 'src'), - (None, 'cite'), - (None, 'action'), - (None, 'longdesc'), - (None, 'poster'), - (None, 'background'), - (None, 'datasrc'), - (None, 'dynsrc'), - (None, 'lowsrc'), - (None, 'ping'), - (namespaces['xlink'], 'href'), - (namespaces['xml'], 'base'), -)) - -svg_attr_val_allows_ref = frozenset(( - (None, 'clip-path'), - (None, 'color-profile'), - (None, 'cursor'), - (None, 'fill'), - (None, 'filter'), - (None, 'marker'), - (None, 'marker-start'), - (None, 'marker-mid'), - (None, 'marker-end'), - (None, 'mask'), - (None, 'stroke'), -)) - -svg_allow_local_href = frozenset(( - (None, 'altGlyph'), - (None, 'animate'), - (None, 'animateColor'), - (None, 'animateMotion'), - (None, 'animateTransform'), - (None, 'cursor'), - (None, 'feImage'), - (None, 'filter'), - (None, 'linearGradient'), - (None, 'pattern'), - (None, 'radialGradient'), - (None, 'textpath'), - (None, 'tref'), - (None, 'set'), - (None, 'use') -)) - -allowed_css_properties = frozenset(( - 'azimuth', - 'background-color', - 'border-bottom-color', - 'border-collapse', - 'border-color', - 'border-left-color', - 'border-right-color', - 'border-top-color', - 'clear', - 'color', - 'cursor', - 'direction', - 'display', - 'elevation', - 'float', - 'font', - 'font-family', - 'font-size', - 'font-style', - 'font-variant', - 'font-weight', - 'height', - 'letter-spacing', - 'line-height', - 'overflow', - 'pause', - 'pause-after', - 'pause-before', - 'pitch', - 'pitch-range', - 'richness', - 'speak', - 'speak-header', - 'speak-numeral', - 'speak-punctuation', - 'speech-rate', - 'stress', - 'text-align', - 'text-decoration', - 'text-indent', - 'unicode-bidi', - 'vertical-align', - 'voice-family', - 'volume', - 'white-space', - 'width', -)) - -allowed_css_keywords = frozenset(( - 'auto', - 'aqua', - 'black', - 'block', - 'blue', - 'bold', - 'both', - 'bottom', - 'brown', - 'center', - 'collapse', - 'dashed', - 'dotted', - 'fuchsia', - 'gray', - 'green', - '!important', - 'italic', - 'left', - 'lime', - 'maroon', - 'medium', - 'none', - 'navy', - 'normal', - 'nowrap', - 'olive', - 'pointer', - 'purple', - 'red', - 'right', - 'solid', - 'silver', - 'teal', - 'top', - 'transparent', - 'underline', - 'white', - 'yellow', -)) - -allowed_svg_properties = frozenset(( - 'fill', - 'fill-opacity', - 'fill-rule', - 'stroke', - 'stroke-width', - 'stroke-linecap', - 'stroke-linejoin', - 'stroke-opacity', -)) - -allowed_protocols = frozenset(( - 'ed2k', - 'ftp', - 'http', - 'https', - 'irc', - 'mailto', - 'news', - 'gopher', - 'nntp', - 'telnet', - 'webcal', - 'xmpp', - 'callto', - 'feed', - 'urn', - 'aim', - 'rsync', - 'tag', - 'ssh', - 'sftp', - 'rtsp', - 'afs', - 'data', -)) - -allowed_content_types = frozenset(( - 'image/png', - 'image/jpeg', - 'image/gif', - 'image/webp', - 'image/bmp', - 'text/plain', -)) - - -data_content_type = re.compile(r''' - ^ - # Match a content type / - (?P[-a-zA-Z0-9.]+/[-a-zA-Z0-9.]+) - # Match any character set and encoding - (?:(?:;charset=(?:[-a-zA-Z0-9]+)(?:;(?:base64))?) - |(?:;(?:base64))?(?:;charset=(?:[-a-zA-Z0-9]+))?) - # Assume the rest is data - ,.* - $ - ''', - re.VERBOSE) - - -class Filter(base.Filter): - """Sanitizes token stream of XHTML+MathML+SVG and of inline style attributes""" - def __init__(self, - source, - allowed_elements=allowed_elements, - allowed_attributes=allowed_attributes, - allowed_css_properties=allowed_css_properties, - allowed_css_keywords=allowed_css_keywords, - allowed_svg_properties=allowed_svg_properties, - allowed_protocols=allowed_protocols, - allowed_content_types=allowed_content_types, - attr_val_is_uri=attr_val_is_uri, - svg_attr_val_allows_ref=svg_attr_val_allows_ref, - svg_allow_local_href=svg_allow_local_href): - """Creates a Filter - - :arg allowed_elements: set of elements to allow--everything else will - be escaped - - :arg allowed_attributes: set of attributes to allow in - elements--everything else will be stripped - - :arg allowed_css_properties: set of CSS properties to allow--everything - else will be stripped - - :arg allowed_css_keywords: set of CSS keywords to allow--everything - else will be stripped - - :arg allowed_svg_properties: set of SVG properties to allow--everything - else will be removed - - :arg allowed_protocols: set of allowed protocols for URIs - - :arg allowed_content_types: set of allowed content types for ``data`` URIs. - - :arg attr_val_is_uri: set of attributes that have URI values--values - that have a scheme not listed in ``allowed_protocols`` are removed - - :arg svg_attr_val_allows_ref: set of SVG attributes that can have - references - - :arg svg_allow_local_href: set of SVG elements that can have local - hrefs--these are removed - - """ - super(Filter, self).__init__(source) - - warnings.warn(_deprecation_msg, DeprecationWarning) - - self.allowed_elements = allowed_elements - self.allowed_attributes = allowed_attributes - self.allowed_css_properties = allowed_css_properties - self.allowed_css_keywords = allowed_css_keywords - self.allowed_svg_properties = allowed_svg_properties - self.allowed_protocols = allowed_protocols - self.allowed_content_types = allowed_content_types - self.attr_val_is_uri = attr_val_is_uri - self.svg_attr_val_allows_ref = svg_attr_val_allows_ref - self.svg_allow_local_href = svg_allow_local_href - - def __iter__(self): - for token in base.Filter.__iter__(self): - token = self.sanitize_token(token) - if token: - yield token - - # Sanitize the +html+, escaping all elements not in ALLOWED_ELEMENTS, and - # stripping out all attributes not in ALLOWED_ATTRIBUTES. Style attributes - # are parsed, and a restricted set, specified by ALLOWED_CSS_PROPERTIES and - # ALLOWED_CSS_KEYWORDS, are allowed through. attributes in ATTR_VAL_IS_URI - # are scanned, and only URI schemes specified in ALLOWED_PROTOCOLS are - # allowed. - # - # sanitize_html('') - # => <script> do_nasty_stuff() </script> - # sanitize_html('Click here for $100') - # => Click here for $100 - def sanitize_token(self, token): - - # accommodate filters which use token_type differently - token_type = token["type"] - if token_type in ("StartTag", "EndTag", "EmptyTag"): - name = token["name"] - namespace = token["namespace"] - if ((namespace, name) in self.allowed_elements or - (namespace is None and - (namespaces["html"], name) in self.allowed_elements)): - return self.allowed_token(token) - else: - return self.disallowed_token(token) - elif token_type == "Comment": - pass - else: - return token - - def allowed_token(self, token): - if "data" in token: - attrs = token["data"] - attr_names = set(attrs.keys()) - - # Remove forbidden attributes - for to_remove in (attr_names - self.allowed_attributes): - del token["data"][to_remove] - attr_names.remove(to_remove) - - # Remove attributes with disallowed URL values - for attr in (attr_names & self.attr_val_is_uri): - assert attr in attrs - # I don't have a clue where this regexp comes from or why it matches those - # characters, nor why we call unescape. I just know it's always been here. - # Should you be worried by this comment in a sanitizer? Yes. On the other hand, all - # this will do is remove *more* than it otherwise would. - val_unescaped = re.sub("[`\x00-\x20\x7f-\xa0\\s]+", '', - unescape(attrs[attr])).lower() - # remove replacement characters from unescaped characters - val_unescaped = val_unescaped.replace("\ufffd", "") - try: - uri = urlparse.urlparse(val_unescaped) - except ValueError: - uri = None - del attrs[attr] - if uri and uri.scheme: - if uri.scheme not in self.allowed_protocols: - del attrs[attr] - if uri.scheme == 'data': - m = data_content_type.match(uri.path) - if not m: - del attrs[attr] - elif m.group('content_type') not in self.allowed_content_types: - del attrs[attr] - - for attr in self.svg_attr_val_allows_ref: - if attr in attrs: - attrs[attr] = re.sub(r'url\s*\(\s*[^#\s][^)]+?\)', - ' ', - unescape(attrs[attr])) - if (token["name"] in self.svg_allow_local_href and - (namespaces['xlink'], 'href') in attrs and re.search(r'^\s*[^#\s].*', - attrs[(namespaces['xlink'], 'href')])): - del attrs[(namespaces['xlink'], 'href')] - if (None, 'style') in attrs: - attrs[(None, 'style')] = self.sanitize_css(attrs[(None, 'style')]) - token["data"] = attrs - return token - - def disallowed_token(self, token): - token_type = token["type"] - if token_type == "EndTag": - token["data"] = "" % token["name"] - elif token["data"]: - assert token_type in ("StartTag", "EmptyTag") - attrs = [] - for (ns, name), v in token["data"].items(): - attrs.append(' %s="%s"' % (name if ns is None else "%s:%s" % (prefixes[ns], name), escape(v))) - token["data"] = "<%s%s>" % (token["name"], ''.join(attrs)) - else: - token["data"] = "<%s>" % token["name"] - if token.get("selfClosing"): - token["data"] = token["data"][:-1] + "/>" - - token["type"] = "Characters" - - del token["name"] - return token - - def sanitize_css(self, style): - # disallow urls - style = re.compile(r'url\s*\(\s*[^\s)]+?\s*\)\s*').sub(' ', style) - - # gauntlet - if not re.match(r"""^([:,;#%.\sa-zA-Z0-9!]|\w-\w|'[\s\w]+'|"[\s\w]+"|\([\d,\s]+\))*$""", style): - return '' - if not re.match(r"^\s*([-\w]+\s*:[^:;]*(;\s*|$))*$", style): - return '' - - clean = [] - for prop, value in re.findall(r"([-\w]+)\s*:\s*([^:;]*)", style): - if not value: - continue - if prop.lower() in self.allowed_css_properties: - clean.append(prop + ': ' + value + ';') - elif prop.split('-')[0].lower() in ['background', 'border', 'margin', - 'padding']: - for keyword in value.split(): - if keyword not in self.allowed_css_keywords and \ - not re.match(r"^(#[0-9a-fA-F]+|rgb\(\d+%?,\d*%?,?\d*%?\)?|\d{0,2}\.?\d{0,2}(cm|em|ex|in|mm|pc|pt|px|%|,|\))?)$", keyword): # noqa - break - else: - clean.append(prop + ': ' + value + ';') - elif prop.lower() in self.allowed_svg_properties: - clean.append(prop + ': ' + value + ';') - - return ' '.join(clean) diff --git a/venv/Lib/site-packages/pip/_vendor/html5lib/filters/whitespace.py b/venv/Lib/site-packages/pip/_vendor/html5lib/filters/whitespace.py deleted file mode 100644 index 0d12584..0000000 --- a/venv/Lib/site-packages/pip/_vendor/html5lib/filters/whitespace.py +++ /dev/null @@ -1,38 +0,0 @@ -from __future__ import absolute_import, division, unicode_literals - -import re - -from . import base -from ..constants import rcdataElements, spaceCharacters -spaceCharacters = "".join(spaceCharacters) - -SPACES_REGEX = re.compile("[%s]+" % spaceCharacters) - - -class Filter(base.Filter): - """Collapses whitespace except in pre, textarea, and script elements""" - spacePreserveElements = frozenset(["pre", "textarea"] + list(rcdataElements)) - - def __iter__(self): - preserve = 0 - for token in base.Filter.__iter__(self): - type = token["type"] - if type == "StartTag" \ - and (preserve or token["name"] in self.spacePreserveElements): - preserve += 1 - - elif type == "EndTag" and preserve: - preserve -= 1 - - elif not preserve and type == "SpaceCharacters" and token["data"]: - # Test on token["data"] above to not introduce spaces where there were not - token["data"] = " " - - elif not preserve and type == "Characters": - token["data"] = collapse_spaces(token["data"]) - - yield token - - -def collapse_spaces(text): - return SPACES_REGEX.sub(' ', text) diff --git a/venv/Lib/site-packages/pip/_vendor/html5lib/html5parser.py b/venv/Lib/site-packages/pip/_vendor/html5lib/html5parser.py deleted file mode 100644 index d06784f..0000000 --- a/venv/Lib/site-packages/pip/_vendor/html5lib/html5parser.py +++ /dev/null @@ -1,2795 +0,0 @@ -from __future__ import absolute_import, division, unicode_literals -from pip._vendor.six import with_metaclass, viewkeys - -import types - -from . import _inputstream -from . import _tokenizer - -from . import treebuilders -from .treebuilders.base import Marker - -from . import _utils -from .constants import ( - spaceCharacters, asciiUpper2Lower, - specialElements, headingElements, cdataElements, rcdataElements, - tokenTypes, tagTokenTypes, - namespaces, - htmlIntegrationPointElements, mathmlTextIntegrationPointElements, - adjustForeignAttributes as adjustForeignAttributesMap, - adjustMathMLAttributes, adjustSVGAttributes, - E, - _ReparseException -) - - -def parse(doc, treebuilder="etree", namespaceHTMLElements=True, **kwargs): - """Parse an HTML document as a string or file-like object into a tree - - :arg doc: the document to parse as a string or file-like object - - :arg treebuilder: the treebuilder to use when parsing - - :arg namespaceHTMLElements: whether or not to namespace HTML elements - - :returns: parsed tree - - Example: - - >>> from html5lib.html5parser import parse - >>> parse('

This is a doc

') - - - """ - tb = treebuilders.getTreeBuilder(treebuilder) - p = HTMLParser(tb, namespaceHTMLElements=namespaceHTMLElements) - return p.parse(doc, **kwargs) - - -def parseFragment(doc, container="div", treebuilder="etree", namespaceHTMLElements=True, **kwargs): - """Parse an HTML fragment as a string or file-like object into a tree - - :arg doc: the fragment to parse as a string or file-like object - - :arg container: the container context to parse the fragment in - - :arg treebuilder: the treebuilder to use when parsing - - :arg namespaceHTMLElements: whether or not to namespace HTML elements - - :returns: parsed tree - - Example: - - >>> from html5lib.html5libparser import parseFragment - >>> parseFragment('this is a fragment') - - - """ - tb = treebuilders.getTreeBuilder(treebuilder) - p = HTMLParser(tb, namespaceHTMLElements=namespaceHTMLElements) - return p.parseFragment(doc, container=container, **kwargs) - - -def method_decorator_metaclass(function): - class Decorated(type): - def __new__(meta, classname, bases, classDict): - for attributeName, attribute in classDict.items(): - if isinstance(attribute, types.FunctionType): - attribute = function(attribute) - - classDict[attributeName] = attribute - return type.__new__(meta, classname, bases, classDict) - return Decorated - - -class HTMLParser(object): - """HTML parser - - Generates a tree structure from a stream of (possibly malformed) HTML. - - """ - - def __init__(self, tree=None, strict=False, namespaceHTMLElements=True, debug=False): - """ - :arg tree: a treebuilder class controlling the type of tree that will be - returned. Built in treebuilders can be accessed through - html5lib.treebuilders.getTreeBuilder(treeType) - - :arg strict: raise an exception when a parse error is encountered - - :arg namespaceHTMLElements: whether or not to namespace HTML elements - - :arg debug: whether or not to enable debug mode which logs things - - Example: - - >>> from html5lib.html5parser import HTMLParser - >>> parser = HTMLParser() # generates parser with etree builder - >>> parser = HTMLParser('lxml', strict=True) # generates parser with lxml builder which is strict - - """ - - # Raise an exception on the first error encountered - self.strict = strict - - if tree is None: - tree = treebuilders.getTreeBuilder("etree") - self.tree = tree(namespaceHTMLElements) - self.errors = [] - - self.phases = {name: cls(self, self.tree) for name, cls in - getPhases(debug).items()} - - def _parse(self, stream, innerHTML=False, container="div", scripting=False, **kwargs): - - self.innerHTMLMode = innerHTML - self.container = container - self.scripting = scripting - self.tokenizer = _tokenizer.HTMLTokenizer(stream, parser=self, **kwargs) - self.reset() - - try: - self.mainLoop() - except _ReparseException: - self.reset() - self.mainLoop() - - def reset(self): - self.tree.reset() - self.firstStartTag = False - self.errors = [] - self.log = [] # only used with debug mode - # "quirks" / "limited quirks" / "no quirks" - self.compatMode = "no quirks" - - if self.innerHTMLMode: - self.innerHTML = self.container.lower() - - if self.innerHTML in cdataElements: - self.tokenizer.state = self.tokenizer.rcdataState - elif self.innerHTML in rcdataElements: - self.tokenizer.state = self.tokenizer.rawtextState - elif self.innerHTML == 'plaintext': - self.tokenizer.state = self.tokenizer.plaintextState - else: - # state already is data state - # self.tokenizer.state = self.tokenizer.dataState - pass - self.phase = self.phases["beforeHtml"] - self.phase.insertHtmlElement() - self.resetInsertionMode() - else: - self.innerHTML = False # pylint:disable=redefined-variable-type - self.phase = self.phases["initial"] - - self.lastPhase = None - - self.beforeRCDataPhase = None - - self.framesetOK = True - - @property - def documentEncoding(self): - """Name of the character encoding that was used to decode the input stream, or - :obj:`None` if that is not determined yet - - """ - if not hasattr(self, 'tokenizer'): - return None - return self.tokenizer.stream.charEncoding[0].name - - def isHTMLIntegrationPoint(self, element): - if (element.name == "annotation-xml" and - element.namespace == namespaces["mathml"]): - return ("encoding" in element.attributes and - element.attributes["encoding"].translate( - asciiUpper2Lower) in - ("text/html", "application/xhtml+xml")) - else: - return (element.namespace, element.name) in htmlIntegrationPointElements - - def isMathMLTextIntegrationPoint(self, element): - return (element.namespace, element.name) in mathmlTextIntegrationPointElements - - def mainLoop(self): - CharactersToken = tokenTypes["Characters"] - SpaceCharactersToken = tokenTypes["SpaceCharacters"] - StartTagToken = tokenTypes["StartTag"] - EndTagToken = tokenTypes["EndTag"] - CommentToken = tokenTypes["Comment"] - DoctypeToken = tokenTypes["Doctype"] - ParseErrorToken = tokenTypes["ParseError"] - - for token in self.tokenizer: - prev_token = None - new_token = token - while new_token is not None: - prev_token = new_token - currentNode = self.tree.openElements[-1] if self.tree.openElements else None - currentNodeNamespace = currentNode.namespace if currentNode else None - currentNodeName = currentNode.name if currentNode else None - - type = new_token["type"] - - if type == ParseErrorToken: - self.parseError(new_token["data"], new_token.get("datavars", {})) - new_token = None - else: - if (len(self.tree.openElements) == 0 or - currentNodeNamespace == self.tree.defaultNamespace or - (self.isMathMLTextIntegrationPoint(currentNode) and - ((type == StartTagToken and - token["name"] not in frozenset(["mglyph", "malignmark"])) or - type in (CharactersToken, SpaceCharactersToken))) or - (currentNodeNamespace == namespaces["mathml"] and - currentNodeName == "annotation-xml" and - type == StartTagToken and - token["name"] == "svg") or - (self.isHTMLIntegrationPoint(currentNode) and - type in (StartTagToken, CharactersToken, SpaceCharactersToken))): - phase = self.phase - else: - phase = self.phases["inForeignContent"] - - if type == CharactersToken: - new_token = phase.processCharacters(new_token) - elif type == SpaceCharactersToken: - new_token = phase.processSpaceCharacters(new_token) - elif type == StartTagToken: - new_token = phase.processStartTag(new_token) - elif type == EndTagToken: - new_token = phase.processEndTag(new_token) - elif type == CommentToken: - new_token = phase.processComment(new_token) - elif type == DoctypeToken: - new_token = phase.processDoctype(new_token) - - if (type == StartTagToken and prev_token["selfClosing"] and - not prev_token["selfClosingAcknowledged"]): - self.parseError("non-void-element-with-trailing-solidus", - {"name": prev_token["name"]}) - - # When the loop finishes it's EOF - reprocess = True - phases = [] - while reprocess: - phases.append(self.phase) - reprocess = self.phase.processEOF() - if reprocess: - assert self.phase not in phases - - def parse(self, stream, *args, **kwargs): - """Parse a HTML document into a well-formed tree - - :arg stream: a file-like object or string containing the HTML to be parsed - - The optional encoding parameter must be a string that indicates - the encoding. If specified, that encoding will be used, - regardless of any BOM or later declaration (such as in a meta - element). - - :arg scripting: treat noscript elements as if JavaScript was turned on - - :returns: parsed tree - - Example: - - >>> from html5lib.html5parser import HTMLParser - >>> parser = HTMLParser() - >>> parser.parse('

This is a doc

') - - - """ - self._parse(stream, False, None, *args, **kwargs) - return self.tree.getDocument() - - def parseFragment(self, stream, *args, **kwargs): - """Parse a HTML fragment into a well-formed tree fragment - - :arg container: name of the element we're setting the innerHTML - property if set to None, default to 'div' - - :arg stream: a file-like object or string containing the HTML to be parsed - - The optional encoding parameter must be a string that indicates - the encoding. If specified, that encoding will be used, - regardless of any BOM or later declaration (such as in a meta - element) - - :arg scripting: treat noscript elements as if JavaScript was turned on - - :returns: parsed tree - - Example: - - >>> from html5lib.html5libparser import HTMLParser - >>> parser = HTMLParser() - >>> parser.parseFragment('this is a fragment') - - - """ - self._parse(stream, True, *args, **kwargs) - return self.tree.getFragment() - - def parseError(self, errorcode="XXX-undefined-error", datavars=None): - # XXX The idea is to make errorcode mandatory. - if datavars is None: - datavars = {} - self.errors.append((self.tokenizer.stream.position(), errorcode, datavars)) - if self.strict: - raise ParseError(E[errorcode] % datavars) - - def adjustMathMLAttributes(self, token): - adjust_attributes(token, adjustMathMLAttributes) - - def adjustSVGAttributes(self, token): - adjust_attributes(token, adjustSVGAttributes) - - def adjustForeignAttributes(self, token): - adjust_attributes(token, adjustForeignAttributesMap) - - def reparseTokenNormal(self, token): - # pylint:disable=unused-argument - self.parser.phase() - - def resetInsertionMode(self): - # The name of this method is mostly historical. (It's also used in the - # specification.) - last = False - newModes = { - "select": "inSelect", - "td": "inCell", - "th": "inCell", - "tr": "inRow", - "tbody": "inTableBody", - "thead": "inTableBody", - "tfoot": "inTableBody", - "caption": "inCaption", - "colgroup": "inColumnGroup", - "table": "inTable", - "head": "inBody", - "body": "inBody", - "frameset": "inFrameset", - "html": "beforeHead" - } - for node in self.tree.openElements[::-1]: - nodeName = node.name - new_phase = None - if node == self.tree.openElements[0]: - assert self.innerHTML - last = True - nodeName = self.innerHTML - # Check for conditions that should only happen in the innerHTML - # case - if nodeName in ("select", "colgroup", "head", "html"): - assert self.innerHTML - - if not last and node.namespace != self.tree.defaultNamespace: - continue - - if nodeName in newModes: - new_phase = self.phases[newModes[nodeName]] - break - elif last: - new_phase = self.phases["inBody"] - break - - self.phase = new_phase - - def parseRCDataRawtext(self, token, contentType): - # Generic RCDATA/RAWTEXT Parsing algorithm - assert contentType in ("RAWTEXT", "RCDATA") - - self.tree.insertElement(token) - - if contentType == "RAWTEXT": - self.tokenizer.state = self.tokenizer.rawtextState - else: - self.tokenizer.state = self.tokenizer.rcdataState - - self.originalPhase = self.phase - - self.phase = self.phases["text"] - - -@_utils.memoize -def getPhases(debug): - def log(function): - """Logger that records which phase processes each token""" - type_names = {value: key for key, value in tokenTypes.items()} - - def wrapped(self, *args, **kwargs): - if function.__name__.startswith("process") and len(args) > 0: - token = args[0] - info = {"type": type_names[token['type']]} - if token['type'] in tagTokenTypes: - info["name"] = token['name'] - - self.parser.log.append((self.parser.tokenizer.state.__name__, - self.parser.phase.__class__.__name__, - self.__class__.__name__, - function.__name__, - info)) - return function(self, *args, **kwargs) - else: - return function(self, *args, **kwargs) - return wrapped - - def getMetaclass(use_metaclass, metaclass_func): - if use_metaclass: - return method_decorator_metaclass(metaclass_func) - else: - return type - - # pylint:disable=unused-argument - class Phase(with_metaclass(getMetaclass(debug, log))): - """Base class for helper object that implements each phase of processing - """ - __slots__ = ("parser", "tree", "__startTagCache", "__endTagCache") - - def __init__(self, parser, tree): - self.parser = parser - self.tree = tree - self.__startTagCache = {} - self.__endTagCache = {} - - def processEOF(self): - raise NotImplementedError - - def processComment(self, token): - # For most phases the following is correct. Where it's not it will be - # overridden. - self.tree.insertComment(token, self.tree.openElements[-1]) - - def processDoctype(self, token): - self.parser.parseError("unexpected-doctype") - - def processCharacters(self, token): - self.tree.insertText(token["data"]) - - def processSpaceCharacters(self, token): - self.tree.insertText(token["data"]) - - def processStartTag(self, token): - # Note the caching is done here rather than BoundMethodDispatcher as doing it there - # requires a circular reference to the Phase, and this ends up with a significant - # (CPython 2.7, 3.8) GC cost when parsing many short inputs - name = token["name"] - # In Py2, using `in` is quicker in general than try/except KeyError - # In Py3, `in` is quicker when there are few cache hits (typically short inputs) - if name in self.__startTagCache: - func = self.__startTagCache[name] - else: - func = self.__startTagCache[name] = self.startTagHandler[name] - # bound the cache size in case we get loads of unknown tags - while len(self.__startTagCache) > len(self.startTagHandler) * 1.1: - # this makes the eviction policy random on Py < 3.7 and FIFO >= 3.7 - self.__startTagCache.pop(next(iter(self.__startTagCache))) - return func(token) - - def startTagHtml(self, token): - if not self.parser.firstStartTag and token["name"] == "html": - self.parser.parseError("non-html-root") - # XXX Need a check here to see if the first start tag token emitted is - # this token... If it's not, invoke self.parser.parseError(). - for attr, value in token["data"].items(): - if attr not in self.tree.openElements[0].attributes: - self.tree.openElements[0].attributes[attr] = value - self.parser.firstStartTag = False - - def processEndTag(self, token): - # Note the caching is done here rather than BoundMethodDispatcher as doing it there - # requires a circular reference to the Phase, and this ends up with a significant - # (CPython 2.7, 3.8) GC cost when parsing many short inputs - name = token["name"] - # In Py2, using `in` is quicker in general than try/except KeyError - # In Py3, `in` is quicker when there are few cache hits (typically short inputs) - if name in self.__endTagCache: - func = self.__endTagCache[name] - else: - func = self.__endTagCache[name] = self.endTagHandler[name] - # bound the cache size in case we get loads of unknown tags - while len(self.__endTagCache) > len(self.endTagHandler) * 1.1: - # this makes the eviction policy random on Py < 3.7 and FIFO >= 3.7 - self.__endTagCache.pop(next(iter(self.__endTagCache))) - return func(token) - - class InitialPhase(Phase): - __slots__ = tuple() - - def processSpaceCharacters(self, token): - pass - - def processComment(self, token): - self.tree.insertComment(token, self.tree.document) - - def processDoctype(self, token): - name = token["name"] - publicId = token["publicId"] - systemId = token["systemId"] - correct = token["correct"] - - if (name != "html" or publicId is not None or - systemId is not None and systemId != "about:legacy-compat"): - self.parser.parseError("unknown-doctype") - - if publicId is None: - publicId = "" - - self.tree.insertDoctype(token) - - if publicId != "": - publicId = publicId.translate(asciiUpper2Lower) - - if (not correct or token["name"] != "html" or - publicId.startswith( - ("+//silmaril//dtd html pro v0r11 19970101//", - "-//advasoft ltd//dtd html 3.0 aswedit + extensions//", - "-//as//dtd html 3.0 aswedit + extensions//", - "-//ietf//dtd html 2.0 level 1//", - "-//ietf//dtd html 2.0 level 2//", - "-//ietf//dtd html 2.0 strict level 1//", - "-//ietf//dtd html 2.0 strict level 2//", - "-//ietf//dtd html 2.0 strict//", - "-//ietf//dtd html 2.0//", - "-//ietf//dtd html 2.1e//", - "-//ietf//dtd html 3.0//", - "-//ietf//dtd html 3.2 final//", - "-//ietf//dtd html 3.2//", - "-//ietf//dtd html 3//", - "-//ietf//dtd html level 0//", - "-//ietf//dtd html level 1//", - "-//ietf//dtd html level 2//", - "-//ietf//dtd html level 3//", - "-//ietf//dtd html strict level 0//", - "-//ietf//dtd html strict level 1//", - "-//ietf//dtd html strict level 2//", - "-//ietf//dtd html strict level 3//", - "-//ietf//dtd html strict//", - "-//ietf//dtd html//", - "-//metrius//dtd metrius presentational//", - "-//microsoft//dtd internet explorer 2.0 html strict//", - "-//microsoft//dtd internet explorer 2.0 html//", - "-//microsoft//dtd internet explorer 2.0 tables//", - "-//microsoft//dtd internet explorer 3.0 html strict//", - "-//microsoft//dtd internet explorer 3.0 html//", - "-//microsoft//dtd internet explorer 3.0 tables//", - "-//netscape comm. corp.//dtd html//", - "-//netscape comm. corp.//dtd strict html//", - "-//o'reilly and associates//dtd html 2.0//", - "-//o'reilly and associates//dtd html extended 1.0//", - "-//o'reilly and associates//dtd html extended relaxed 1.0//", - "-//softquad software//dtd hotmetal pro 6.0::19990601::extensions to html 4.0//", - "-//softquad//dtd hotmetal pro 4.0::19971010::extensions to html 4.0//", - "-//spyglass//dtd html 2.0 extended//", - "-//sq//dtd html 2.0 hotmetal + extensions//", - "-//sun microsystems corp.//dtd hotjava html//", - "-//sun microsystems corp.//dtd hotjava strict html//", - "-//w3c//dtd html 3 1995-03-24//", - "-//w3c//dtd html 3.2 draft//", - "-//w3c//dtd html 3.2 final//", - "-//w3c//dtd html 3.2//", - "-//w3c//dtd html 3.2s draft//", - "-//w3c//dtd html 4.0 frameset//", - "-//w3c//dtd html 4.0 transitional//", - "-//w3c//dtd html experimental 19960712//", - "-//w3c//dtd html experimental 970421//", - "-//w3c//dtd w3 html//", - "-//w3o//dtd w3 html 3.0//", - "-//webtechs//dtd mozilla html 2.0//", - "-//webtechs//dtd mozilla html//")) or - publicId in ("-//w3o//dtd w3 html strict 3.0//en//", - "-/w3c/dtd html 4.0 transitional/en", - "html") or - publicId.startswith( - ("-//w3c//dtd html 4.01 frameset//", - "-//w3c//dtd html 4.01 transitional//")) and - systemId is None or - systemId and systemId.lower() == "http://www.ibm.com/data/dtd/v11/ibmxhtml1-transitional.dtd"): - self.parser.compatMode = "quirks" - elif (publicId.startswith( - ("-//w3c//dtd xhtml 1.0 frameset//", - "-//w3c//dtd xhtml 1.0 transitional//")) or - publicId.startswith( - ("-//w3c//dtd html 4.01 frameset//", - "-//w3c//dtd html 4.01 transitional//")) and - systemId is not None): - self.parser.compatMode = "limited quirks" - - self.parser.phase = self.parser.phases["beforeHtml"] - - def anythingElse(self): - self.parser.compatMode = "quirks" - self.parser.phase = self.parser.phases["beforeHtml"] - - def processCharacters(self, token): - self.parser.parseError("expected-doctype-but-got-chars") - self.anythingElse() - return token - - def processStartTag(self, token): - self.parser.parseError("expected-doctype-but-got-start-tag", - {"name": token["name"]}) - self.anythingElse() - return token - - def processEndTag(self, token): - self.parser.parseError("expected-doctype-but-got-end-tag", - {"name": token["name"]}) - self.anythingElse() - return token - - def processEOF(self): - self.parser.parseError("expected-doctype-but-got-eof") - self.anythingElse() - return True - - class BeforeHtmlPhase(Phase): - __slots__ = tuple() - - # helper methods - def insertHtmlElement(self): - self.tree.insertRoot(impliedTagToken("html", "StartTag")) - self.parser.phase = self.parser.phases["beforeHead"] - - # other - def processEOF(self): - self.insertHtmlElement() - return True - - def processComment(self, token): - self.tree.insertComment(token, self.tree.document) - - def processSpaceCharacters(self, token): - pass - - def processCharacters(self, token): - self.insertHtmlElement() - return token - - def processStartTag(self, token): - if token["name"] == "html": - self.parser.firstStartTag = True - self.insertHtmlElement() - return token - - def processEndTag(self, token): - if token["name"] not in ("head", "body", "html", "br"): - self.parser.parseError("unexpected-end-tag-before-html", - {"name": token["name"]}) - else: - self.insertHtmlElement() - return token - - class BeforeHeadPhase(Phase): - __slots__ = tuple() - - def processEOF(self): - self.startTagHead(impliedTagToken("head", "StartTag")) - return True - - def processSpaceCharacters(self, token): - pass - - def processCharacters(self, token): - self.startTagHead(impliedTagToken("head", "StartTag")) - return token - - def startTagHtml(self, token): - return self.parser.phases["inBody"].processStartTag(token) - - def startTagHead(self, token): - self.tree.insertElement(token) - self.tree.headPointer = self.tree.openElements[-1] - self.parser.phase = self.parser.phases["inHead"] - - def startTagOther(self, token): - self.startTagHead(impliedTagToken("head", "StartTag")) - return token - - def endTagImplyHead(self, token): - self.startTagHead(impliedTagToken("head", "StartTag")) - return token - - def endTagOther(self, token): - self.parser.parseError("end-tag-after-implied-root", - {"name": token["name"]}) - - startTagHandler = _utils.MethodDispatcher([ - ("html", startTagHtml), - ("head", startTagHead) - ]) - startTagHandler.default = startTagOther - - endTagHandler = _utils.MethodDispatcher([ - (("head", "body", "html", "br"), endTagImplyHead) - ]) - endTagHandler.default = endTagOther - - class InHeadPhase(Phase): - __slots__ = tuple() - - # the real thing - def processEOF(self): - self.anythingElse() - return True - - def processCharacters(self, token): - self.anythingElse() - return token - - def startTagHtml(self, token): - return self.parser.phases["inBody"].processStartTag(token) - - def startTagHead(self, token): - self.parser.parseError("two-heads-are-not-better-than-one") - - def startTagBaseLinkCommand(self, token): - self.tree.insertElement(token) - self.tree.openElements.pop() - token["selfClosingAcknowledged"] = True - - def startTagMeta(self, token): - self.tree.insertElement(token) - self.tree.openElements.pop() - token["selfClosingAcknowledged"] = True - - attributes = token["data"] - if self.parser.tokenizer.stream.charEncoding[1] == "tentative": - if "charset" in attributes: - self.parser.tokenizer.stream.changeEncoding(attributes["charset"]) - elif ("content" in attributes and - "http-equiv" in attributes and - attributes["http-equiv"].lower() == "content-type"): - # Encoding it as UTF-8 here is a hack, as really we should pass - # the abstract Unicode string, and just use the - # ContentAttrParser on that, but using UTF-8 allows all chars - # to be encoded and as a ASCII-superset works. - data = _inputstream.EncodingBytes(attributes["content"].encode("utf-8")) - parser = _inputstream.ContentAttrParser(data) - codec = parser.parse() - self.parser.tokenizer.stream.changeEncoding(codec) - - def startTagTitle(self, token): - self.parser.parseRCDataRawtext(token, "RCDATA") - - def startTagNoFramesStyle(self, token): - # Need to decide whether to implement the scripting-disabled case - self.parser.parseRCDataRawtext(token, "RAWTEXT") - - def startTagNoscript(self, token): - if self.parser.scripting: - self.parser.parseRCDataRawtext(token, "RAWTEXT") - else: - self.tree.insertElement(token) - self.parser.phase = self.parser.phases["inHeadNoscript"] - - def startTagScript(self, token): - self.tree.insertElement(token) - self.parser.tokenizer.state = self.parser.tokenizer.scriptDataState - self.parser.originalPhase = self.parser.phase - self.parser.phase = self.parser.phases["text"] - - def startTagOther(self, token): - self.anythingElse() - return token - - def endTagHead(self, token): - node = self.parser.tree.openElements.pop() - assert node.name == "head", "Expected head got %s" % node.name - self.parser.phase = self.parser.phases["afterHead"] - - def endTagHtmlBodyBr(self, token): - self.anythingElse() - return token - - def endTagOther(self, token): - self.parser.parseError("unexpected-end-tag", {"name": token["name"]}) - - def anythingElse(self): - self.endTagHead(impliedTagToken("head")) - - startTagHandler = _utils.MethodDispatcher([ - ("html", startTagHtml), - ("title", startTagTitle), - (("noframes", "style"), startTagNoFramesStyle), - ("noscript", startTagNoscript), - ("script", startTagScript), - (("base", "basefont", "bgsound", "command", "link"), - startTagBaseLinkCommand), - ("meta", startTagMeta), - ("head", startTagHead) - ]) - startTagHandler.default = startTagOther - - endTagHandler = _utils.MethodDispatcher([ - ("head", endTagHead), - (("br", "html", "body"), endTagHtmlBodyBr) - ]) - endTagHandler.default = endTagOther - - class InHeadNoscriptPhase(Phase): - __slots__ = tuple() - - def processEOF(self): - self.parser.parseError("eof-in-head-noscript") - self.anythingElse() - return True - - def processComment(self, token): - return self.parser.phases["inHead"].processComment(token) - - def processCharacters(self, token): - self.parser.parseError("char-in-head-noscript") - self.anythingElse() - return token - - def processSpaceCharacters(self, token): - return self.parser.phases["inHead"].processSpaceCharacters(token) - - def startTagHtml(self, token): - return self.parser.phases["inBody"].processStartTag(token) - - def startTagBaseLinkCommand(self, token): - return self.parser.phases["inHead"].processStartTag(token) - - def startTagHeadNoscript(self, token): - self.parser.parseError("unexpected-start-tag", {"name": token["name"]}) - - def startTagOther(self, token): - self.parser.parseError("unexpected-inhead-noscript-tag", {"name": token["name"]}) - self.anythingElse() - return token - - def endTagNoscript(self, token): - node = self.parser.tree.openElements.pop() - assert node.name == "noscript", "Expected noscript got %s" % node.name - self.parser.phase = self.parser.phases["inHead"] - - def endTagBr(self, token): - self.parser.parseError("unexpected-inhead-noscript-tag", {"name": token["name"]}) - self.anythingElse() - return token - - def endTagOther(self, token): - self.parser.parseError("unexpected-end-tag", {"name": token["name"]}) - - def anythingElse(self): - # Caller must raise parse error first! - self.endTagNoscript(impliedTagToken("noscript")) - - startTagHandler = _utils.MethodDispatcher([ - ("html", startTagHtml), - (("basefont", "bgsound", "link", "meta", "noframes", "style"), startTagBaseLinkCommand), - (("head", "noscript"), startTagHeadNoscript), - ]) - startTagHandler.default = startTagOther - - endTagHandler = _utils.MethodDispatcher([ - ("noscript", endTagNoscript), - ("br", endTagBr), - ]) - endTagHandler.default = endTagOther - - class AfterHeadPhase(Phase): - __slots__ = tuple() - - def processEOF(self): - self.anythingElse() - return True - - def processCharacters(self, token): - self.anythingElse() - return token - - def startTagHtml(self, token): - return self.parser.phases["inBody"].processStartTag(token) - - def startTagBody(self, token): - self.parser.framesetOK = False - self.tree.insertElement(token) - self.parser.phase = self.parser.phases["inBody"] - - def startTagFrameset(self, token): - self.tree.insertElement(token) - self.parser.phase = self.parser.phases["inFrameset"] - - def startTagFromHead(self, token): - self.parser.parseError("unexpected-start-tag-out-of-my-head", - {"name": token["name"]}) - self.tree.openElements.append(self.tree.headPointer) - self.parser.phases["inHead"].processStartTag(token) - for node in self.tree.openElements[::-1]: - if node.name == "head": - self.tree.openElements.remove(node) - break - - def startTagHead(self, token): - self.parser.parseError("unexpected-start-tag", {"name": token["name"]}) - - def startTagOther(self, token): - self.anythingElse() - return token - - def endTagHtmlBodyBr(self, token): - self.anythingElse() - return token - - def endTagOther(self, token): - self.parser.parseError("unexpected-end-tag", {"name": token["name"]}) - - def anythingElse(self): - self.tree.insertElement(impliedTagToken("body", "StartTag")) - self.parser.phase = self.parser.phases["inBody"] - self.parser.framesetOK = True - - startTagHandler = _utils.MethodDispatcher([ - ("html", startTagHtml), - ("body", startTagBody), - ("frameset", startTagFrameset), - (("base", "basefont", "bgsound", "link", "meta", "noframes", "script", - "style", "title"), - startTagFromHead), - ("head", startTagHead) - ]) - startTagHandler.default = startTagOther - endTagHandler = _utils.MethodDispatcher([(("body", "html", "br"), - endTagHtmlBodyBr)]) - endTagHandler.default = endTagOther - - class InBodyPhase(Phase): - # http://www.whatwg.org/specs/web-apps/current-work/#parsing-main-inbody - # the really-really-really-very crazy mode - __slots__ = ("processSpaceCharacters",) - - def __init__(self, *args, **kwargs): - super(InBodyPhase, self).__init__(*args, **kwargs) - # Set this to the default handler - self.processSpaceCharacters = self.processSpaceCharactersNonPre - - def isMatchingFormattingElement(self, node1, node2): - return (node1.name == node2.name and - node1.namespace == node2.namespace and - node1.attributes == node2.attributes) - - # helper - def addFormattingElement(self, token): - self.tree.insertElement(token) - element = self.tree.openElements[-1] - - matchingElements = [] - for node in self.tree.activeFormattingElements[::-1]: - if node is Marker: - break - elif self.isMatchingFormattingElement(node, element): - matchingElements.append(node) - - assert len(matchingElements) <= 3 - if len(matchingElements) == 3: - self.tree.activeFormattingElements.remove(matchingElements[-1]) - self.tree.activeFormattingElements.append(element) - - # the real deal - def processEOF(self): - allowed_elements = frozenset(("dd", "dt", "li", "p", "tbody", "td", - "tfoot", "th", "thead", "tr", "body", - "html")) - for node in self.tree.openElements[::-1]: - if node.name not in allowed_elements: - self.parser.parseError("expected-closing-tag-but-got-eof") - break - # Stop parsing - - def processSpaceCharactersDropNewline(self, token): - # Sometimes (start of
, , and ";
-	support.noCloneChecked = !!div.cloneNode( true ).lastChild.defaultValue;
-
-	// Support: IE <=9 only
-	// IE <=9 replaces ";
-	support.option = !!div.lastChild;
-} )();
-
-
-// We have to close these tags to support XHTML (#13200)
-var wrapMap = {
-
-	// XHTML parsers do not magically insert elements in the
-	// same way that tag soup parsers do. So we cannot shorten
-	// this by omitting  or other required elements.
-	thead: [ 1, "", "
" ], - col: [ 2, "", "
" ], - tr: [ 2, "", "
" ], - td: [ 3, "", "
" ], - - _default: [ 0, "", "" ] -}; - -wrapMap.tbody = wrapMap.tfoot = wrapMap.colgroup = wrapMap.caption = wrapMap.thead; -wrapMap.th = wrapMap.td; - -// Support: IE <=9 only -if ( !support.option ) { - wrapMap.optgroup = wrapMap.option = [ 1, "" ]; -} - - -function getAll( context, tag ) { - - // Support: IE <=9 - 11 only - // Use typeof to avoid zero-argument method invocation on host objects (#15151) - var ret; - - if ( typeof context.getElementsByTagName !== "undefined" ) { - ret = context.getElementsByTagName( tag || "*" ); - - } else if ( typeof context.querySelectorAll !== "undefined" ) { - ret = context.querySelectorAll( tag || "*" ); - - } else { - ret = []; - } - - if ( tag === undefined || tag && nodeName( context, tag ) ) { - return jQuery.merge( [ context ], ret ); - } - - return ret; -} - - -// Mark scripts as having already been evaluated -function setGlobalEval( elems, refElements ) { - var i = 0, - l = elems.length; - - for ( ; i < l; i++ ) { - dataPriv.set( - elems[ i ], - "globalEval", - !refElements || dataPriv.get( refElements[ i ], "globalEval" ) - ); - } -} - - -var rhtml = /<|&#?\w+;/; - -function buildFragment( elems, context, scripts, selection, ignored ) { - var elem, tmp, tag, wrap, attached, j, - fragment = context.createDocumentFragment(), - nodes = [], - i = 0, - l = elems.length; - - for ( ; i < l; i++ ) { - elem = elems[ i ]; - - if ( elem || elem === 0 ) { - - // Add nodes directly - if ( toType( elem ) === "object" ) { - - // Support: Android <=4.0 only, PhantomJS 1 only - // push.apply(_, arraylike) throws on ancient WebKit - jQuery.merge( nodes, elem.nodeType ? [ elem ] : elem ); - - // Convert non-html into a text node - } else if ( !rhtml.test( elem ) ) { - nodes.push( context.createTextNode( elem ) ); - - // Convert html into DOM nodes - } else { - tmp = tmp || fragment.appendChild( context.createElement( "div" ) ); - - // Deserialize a standard representation - tag = ( rtagName.exec( elem ) || [ "", "" ] )[ 1 ].toLowerCase(); - wrap = wrapMap[ tag ] || wrapMap._default; - tmp.innerHTML = wrap[ 1 ] + jQuery.htmlPrefilter( elem ) + wrap[ 2 ]; - - // Descend through wrappers to the right content - j = wrap[ 0 ]; - while ( j-- ) { - tmp = tmp.lastChild; - } - - // Support: Android <=4.0 only, PhantomJS 1 only - // push.apply(_, arraylike) throws on ancient WebKit - jQuery.merge( nodes, tmp.childNodes ); - - // Remember the top-level container - tmp = fragment.firstChild; - - // Ensure the created nodes are orphaned (#12392) - tmp.textContent = ""; - } - } - } - - // Remove wrapper from fragment - fragment.textContent = ""; - - i = 0; - while ( ( elem = nodes[ i++ ] ) ) { - - // Skip elements already in the context collection (trac-4087) - if ( selection && jQuery.inArray( elem, selection ) > -1 ) { - if ( ignored ) { - ignored.push( elem ); - } - continue; - } - - attached = isAttached( elem ); - - // Append to fragment - tmp = getAll( fragment.appendChild( elem ), "script" ); - - // Preserve script evaluation history - if ( attached ) { - setGlobalEval( tmp ); - } - - // Capture executables - if ( scripts ) { - j = 0; - while ( ( elem = tmp[ j++ ] ) ) { - if ( rscriptType.test( elem.type || "" ) ) { - scripts.push( elem ); - } - } - } - } - - return fragment; -} - - -var - rkeyEvent = /^key/, - rmouseEvent = /^(?:mouse|pointer|contextmenu|drag|drop)|click/, - rtypenamespace = /^([^.]*)(?:\.(.+)|)/; - -function returnTrue() { - return true; -} - -function returnFalse() { - return false; -} - -// Support: IE <=9 - 11+ -// focus() and blur() are asynchronous, except when they are no-op. -// So expect focus to be synchronous when the element is already active, -// and blur to be synchronous when the element is not already active. -// (focus and blur are always synchronous in other supported browsers, -// this just defines when we can count on it). -function expectSync( elem, type ) { - return ( elem === safeActiveElement() ) === ( type === "focus" ); -} - -// Support: IE <=9 only -// Accessing document.activeElement can throw unexpectedly -// https://bugs.jquery.com/ticket/13393 -function safeActiveElement() { - try { - return document.activeElement; - } catch ( err ) { } -} - -function on( elem, types, selector, data, fn, one ) { - var origFn, type; - - // Types can be a map of types/handlers - if ( typeof types === "object" ) { - - // ( types-Object, selector, data ) - if ( typeof selector !== "string" ) { - - // ( types-Object, data ) - data = data || selector; - selector = undefined; - } - for ( type in types ) { - on( elem, type, selector, data, types[ type ], one ); - } - return elem; - } - - if ( data == null && fn == null ) { - - // ( types, fn ) - fn = selector; - data = selector = undefined; - } else if ( fn == null ) { - if ( typeof selector === "string" ) { - - // ( types, selector, fn ) - fn = data; - data = undefined; - } else { - - // ( types, data, fn ) - fn = data; - data = selector; - selector = undefined; - } - } - if ( fn === false ) { - fn = returnFalse; - } else if ( !fn ) { - return elem; - } - - if ( one === 1 ) { - origFn = fn; - fn = function( event ) { - - // Can use an empty set, since event contains the info - jQuery().off( event ); - return origFn.apply( this, arguments ); - }; - - // Use same guid so caller can remove using origFn - fn.guid = origFn.guid || ( origFn.guid = jQuery.guid++ ); - } - return elem.each( function() { - jQuery.event.add( this, types, fn, data, selector ); - } ); -} - -/* - * Helper functions for managing events -- not part of the public interface. - * Props to Dean Edwards' addEvent library for many of the ideas. - */ -jQuery.event = { - - global: {}, - - add: function( elem, types, handler, data, selector ) { - - var handleObjIn, eventHandle, tmp, - events, t, handleObj, - special, handlers, type, namespaces, origType, - elemData = dataPriv.get( elem ); - - // Only attach events to objects that accept data - if ( !acceptData( elem ) ) { - return; - } - - // Caller can pass in an object of custom data in lieu of the handler - if ( handler.handler ) { - handleObjIn = handler; - handler = handleObjIn.handler; - selector = handleObjIn.selector; - } - - // Ensure that invalid selectors throw exceptions at attach time - // Evaluate against documentElement in case elem is a non-element node (e.g., document) - if ( selector ) { - jQuery.find.matchesSelector( documentElement, selector ); - } - - // Make sure that the handler has a unique ID, used to find/remove it later - if ( !handler.guid ) { - handler.guid = jQuery.guid++; - } - - // Init the element's event structure and main handler, if this is the first - if ( !( events = elemData.events ) ) { - events = elemData.events = Object.create( null ); - } - if ( !( eventHandle = elemData.handle ) ) { - eventHandle = elemData.handle = function( e ) { - - // Discard the second event of a jQuery.event.trigger() and - // when an event is called after a page has unloaded - return typeof jQuery !== "undefined" && jQuery.event.triggered !== e.type ? - jQuery.event.dispatch.apply( elem, arguments ) : undefined; - }; - } - - // Handle multiple events separated by a space - types = ( types || "" ).match( rnothtmlwhite ) || [ "" ]; - t = types.length; - while ( t-- ) { - tmp = rtypenamespace.exec( types[ t ] ) || []; - type = origType = tmp[ 1 ]; - namespaces = ( tmp[ 2 ] || "" ).split( "." ).sort(); - - // There *must* be a type, no attaching namespace-only handlers - if ( !type ) { - continue; - } - - // If event changes its type, use the special event handlers for the changed type - special = jQuery.event.special[ type ] || {}; - - // If selector defined, determine special event api type, otherwise given type - type = ( selector ? special.delegateType : special.bindType ) || type; - - // Update special based on newly reset type - special = jQuery.event.special[ type ] || {}; - - // handleObj is passed to all event handlers - handleObj = jQuery.extend( { - type: type, - origType: origType, - data: data, - handler: handler, - guid: handler.guid, - selector: selector, - needsContext: selector && jQuery.expr.match.needsContext.test( selector ), - namespace: namespaces.join( "." ) - }, handleObjIn ); - - // Init the event handler queue if we're the first - if ( !( handlers = events[ type ] ) ) { - handlers = events[ type ] = []; - handlers.delegateCount = 0; - - // Only use addEventListener if the special events handler returns false - if ( !special.setup || - special.setup.call( elem, data, namespaces, eventHandle ) === false ) { - - if ( elem.addEventListener ) { - elem.addEventListener( type, eventHandle ); - } - } - } - - if ( special.add ) { - special.add.call( elem, handleObj ); - - if ( !handleObj.handler.guid ) { - handleObj.handler.guid = handler.guid; - } - } - - // Add to the element's handler list, delegates in front - if ( selector ) { - handlers.splice( handlers.delegateCount++, 0, handleObj ); - } else { - handlers.push( handleObj ); - } - - // Keep track of which events have ever been used, for event optimization - jQuery.event.global[ type ] = true; - } - - }, - - // Detach an event or set of events from an element - remove: function( elem, types, handler, selector, mappedTypes ) { - - var j, origCount, tmp, - events, t, handleObj, - special, handlers, type, namespaces, origType, - elemData = dataPriv.hasData( elem ) && dataPriv.get( elem ); - - if ( !elemData || !( events = elemData.events ) ) { - return; - } - - // Once for each type.namespace in types; type may be omitted - types = ( types || "" ).match( rnothtmlwhite ) || [ "" ]; - t = types.length; - while ( t-- ) { - tmp = rtypenamespace.exec( types[ t ] ) || []; - type = origType = tmp[ 1 ]; - namespaces = ( tmp[ 2 ] || "" ).split( "." ).sort(); - - // Unbind all events (on this namespace, if provided) for the element - if ( !type ) { - for ( type in events ) { - jQuery.event.remove( elem, type + types[ t ], handler, selector, true ); - } - continue; - } - - special = jQuery.event.special[ type ] || {}; - type = ( selector ? special.delegateType : special.bindType ) || type; - handlers = events[ type ] || []; - tmp = tmp[ 2 ] && - new RegExp( "(^|\\.)" + namespaces.join( "\\.(?:.*\\.|)" ) + "(\\.|$)" ); - - // Remove matching events - origCount = j = handlers.length; - while ( j-- ) { - handleObj = handlers[ j ]; - - if ( ( mappedTypes || origType === handleObj.origType ) && - ( !handler || handler.guid === handleObj.guid ) && - ( !tmp || tmp.test( handleObj.namespace ) ) && - ( !selector || selector === handleObj.selector || - selector === "**" && handleObj.selector ) ) { - handlers.splice( j, 1 ); - - if ( handleObj.selector ) { - handlers.delegateCount--; - } - if ( special.remove ) { - special.remove.call( elem, handleObj ); - } - } - } - - // Remove generic event handler if we removed something and no more handlers exist - // (avoids potential for endless recursion during removal of special event handlers) - if ( origCount && !handlers.length ) { - if ( !special.teardown || - special.teardown.call( elem, namespaces, elemData.handle ) === false ) { - - jQuery.removeEvent( elem, type, elemData.handle ); - } - - delete events[ type ]; - } - } - - // Remove data and the expando if it's no longer used - if ( jQuery.isEmptyObject( events ) ) { - dataPriv.remove( elem, "handle events" ); - } - }, - - dispatch: function( nativeEvent ) { - - var i, j, ret, matched, handleObj, handlerQueue, - args = new Array( arguments.length ), - - // Make a writable jQuery.Event from the native event object - event = jQuery.event.fix( nativeEvent ), - - handlers = ( - dataPriv.get( this, "events" ) || Object.create( null ) - )[ event.type ] || [], - special = jQuery.event.special[ event.type ] || {}; - - // Use the fix-ed jQuery.Event rather than the (read-only) native event - args[ 0 ] = event; - - for ( i = 1; i < arguments.length; i++ ) { - args[ i ] = arguments[ i ]; - } - - event.delegateTarget = this; - - // Call the preDispatch hook for the mapped type, and let it bail if desired - if ( special.preDispatch && special.preDispatch.call( this, event ) === false ) { - return; - } - - // Determine handlers - handlerQueue = jQuery.event.handlers.call( this, event, handlers ); - - // Run delegates first; they may want to stop propagation beneath us - i = 0; - while ( ( matched = handlerQueue[ i++ ] ) && !event.isPropagationStopped() ) { - event.currentTarget = matched.elem; - - j = 0; - while ( ( handleObj = matched.handlers[ j++ ] ) && - !event.isImmediatePropagationStopped() ) { - - // If the event is namespaced, then each handler is only invoked if it is - // specially universal or its namespaces are a superset of the event's. - if ( !event.rnamespace || handleObj.namespace === false || - event.rnamespace.test( handleObj.namespace ) ) { - - event.handleObj = handleObj; - event.data = handleObj.data; - - ret = ( ( jQuery.event.special[ handleObj.origType ] || {} ).handle || - handleObj.handler ).apply( matched.elem, args ); - - if ( ret !== undefined ) { - if ( ( event.result = ret ) === false ) { - event.preventDefault(); - event.stopPropagation(); - } - } - } - } - } - - // Call the postDispatch hook for the mapped type - if ( special.postDispatch ) { - special.postDispatch.call( this, event ); - } - - return event.result; - }, - - handlers: function( event, handlers ) { - var i, handleObj, sel, matchedHandlers, matchedSelectors, - handlerQueue = [], - delegateCount = handlers.delegateCount, - cur = event.target; - - // Find delegate handlers - if ( delegateCount && - - // Support: IE <=9 - // Black-hole SVG instance trees (trac-13180) - cur.nodeType && - - // Support: Firefox <=42 - // Suppress spec-violating clicks indicating a non-primary pointer button (trac-3861) - // https://www.w3.org/TR/DOM-Level-3-Events/#event-type-click - // Support: IE 11 only - // ...but not arrow key "clicks" of radio inputs, which can have `button` -1 (gh-2343) - !( event.type === "click" && event.button >= 1 ) ) { - - for ( ; cur !== this; cur = cur.parentNode || this ) { - - // Don't check non-elements (#13208) - // Don't process clicks on disabled elements (#6911, #8165, #11382, #11764) - if ( cur.nodeType === 1 && !( event.type === "click" && cur.disabled === true ) ) { - matchedHandlers = []; - matchedSelectors = {}; - for ( i = 0; i < delegateCount; i++ ) { - handleObj = handlers[ i ]; - - // Don't conflict with Object.prototype properties (#13203) - sel = handleObj.selector + " "; - - if ( matchedSelectors[ sel ] === undefined ) { - matchedSelectors[ sel ] = handleObj.needsContext ? - jQuery( sel, this ).index( cur ) > -1 : - jQuery.find( sel, this, null, [ cur ] ).length; - } - if ( matchedSelectors[ sel ] ) { - matchedHandlers.push( handleObj ); - } - } - if ( matchedHandlers.length ) { - handlerQueue.push( { elem: cur, handlers: matchedHandlers } ); - } - } - } - } - - // Add the remaining (directly-bound) handlers - cur = this; - if ( delegateCount < handlers.length ) { - handlerQueue.push( { elem: cur, handlers: handlers.slice( delegateCount ) } ); - } - - return handlerQueue; - }, - - addProp: function( name, hook ) { - Object.defineProperty( jQuery.Event.prototype, name, { - enumerable: true, - configurable: true, - - get: isFunction( hook ) ? - function() { - if ( this.originalEvent ) { - return hook( this.originalEvent ); - } - } : - function() { - if ( this.originalEvent ) { - return this.originalEvent[ name ]; - } - }, - - set: function( value ) { - Object.defineProperty( this, name, { - enumerable: true, - configurable: true, - writable: true, - value: value - } ); - } - } ); - }, - - fix: function( originalEvent ) { - return originalEvent[ jQuery.expando ] ? - originalEvent : - new jQuery.Event( originalEvent ); - }, - - special: { - load: { - - // Prevent triggered image.load events from bubbling to window.load - noBubble: true - }, - click: { - - // Utilize native event to ensure correct state for checkable inputs - setup: function( data ) { - - // For mutual compressibility with _default, replace `this` access with a local var. - // `|| data` is dead code meant only to preserve the variable through minification. - var el = this || data; - - // Claim the first handler - if ( rcheckableType.test( el.type ) && - el.click && nodeName( el, "input" ) ) { - - // dataPriv.set( el, "click", ... ) - leverageNative( el, "click", returnTrue ); - } - - // Return false to allow normal processing in the caller - return false; - }, - trigger: function( data ) { - - // For mutual compressibility with _default, replace `this` access with a local var. - // `|| data` is dead code meant only to preserve the variable through minification. - var el = this || data; - - // Force setup before triggering a click - if ( rcheckableType.test( el.type ) && - el.click && nodeName( el, "input" ) ) { - - leverageNative( el, "click" ); - } - - // Return non-false to allow normal event-path propagation - return true; - }, - - // For cross-browser consistency, suppress native .click() on links - // Also prevent it if we're currently inside a leveraged native-event stack - _default: function( event ) { - var target = event.target; - return rcheckableType.test( target.type ) && - target.click && nodeName( target, "input" ) && - dataPriv.get( target, "click" ) || - nodeName( target, "a" ); - } - }, - - beforeunload: { - postDispatch: function( event ) { - - // Support: Firefox 20+ - // Firefox doesn't alert if the returnValue field is not set. - if ( event.result !== undefined && event.originalEvent ) { - event.originalEvent.returnValue = event.result; - } - } - } - } -}; - -// Ensure the presence of an event listener that handles manually-triggered -// synthetic events by interrupting progress until reinvoked in response to -// *native* events that it fires directly, ensuring that state changes have -// already occurred before other listeners are invoked. -function leverageNative( el, type, expectSync ) { - - // Missing expectSync indicates a trigger call, which must force setup through jQuery.event.add - if ( !expectSync ) { - if ( dataPriv.get( el, type ) === undefined ) { - jQuery.event.add( el, type, returnTrue ); - } - return; - } - - // Register the controller as a special universal handler for all event namespaces - dataPriv.set( el, type, false ); - jQuery.event.add( el, type, { - namespace: false, - handler: function( event ) { - var notAsync, result, - saved = dataPriv.get( this, type ); - - if ( ( event.isTrigger & 1 ) && this[ type ] ) { - - // Interrupt processing of the outer synthetic .trigger()ed event - // Saved data should be false in such cases, but might be a leftover capture object - // from an async native handler (gh-4350) - if ( !saved.length ) { - - // Store arguments for use when handling the inner native event - // There will always be at least one argument (an event object), so this array - // will not be confused with a leftover capture object. - saved = slice.call( arguments ); - dataPriv.set( this, type, saved ); - - // Trigger the native event and capture its result - // Support: IE <=9 - 11+ - // focus() and blur() are asynchronous - notAsync = expectSync( this, type ); - this[ type ](); - result = dataPriv.get( this, type ); - if ( saved !== result || notAsync ) { - dataPriv.set( this, type, false ); - } else { - result = {}; - } - if ( saved !== result ) { - - // Cancel the outer synthetic event - event.stopImmediatePropagation(); - event.preventDefault(); - return result.value; - } - - // If this is an inner synthetic event for an event with a bubbling surrogate - // (focus or blur), assume that the surrogate already propagated from triggering the - // native event and prevent that from happening again here. - // This technically gets the ordering wrong w.r.t. to `.trigger()` (in which the - // bubbling surrogate propagates *after* the non-bubbling base), but that seems - // less bad than duplication. - } else if ( ( jQuery.event.special[ type ] || {} ).delegateType ) { - event.stopPropagation(); - } - - // If this is a native event triggered above, everything is now in order - // Fire an inner synthetic event with the original arguments - } else if ( saved.length ) { - - // ...and capture the result - dataPriv.set( this, type, { - value: jQuery.event.trigger( - - // Support: IE <=9 - 11+ - // Extend with the prototype to reset the above stopImmediatePropagation() - jQuery.extend( saved[ 0 ], jQuery.Event.prototype ), - saved.slice( 1 ), - this - ) - } ); - - // Abort handling of the native event - event.stopImmediatePropagation(); - } - } - } ); -} - -jQuery.removeEvent = function( elem, type, handle ) { - - // This "if" is needed for plain objects - if ( elem.removeEventListener ) { - elem.removeEventListener( type, handle ); - } -}; - -jQuery.Event = function( src, props ) { - - // Allow instantiation without the 'new' keyword - if ( !( this instanceof jQuery.Event ) ) { - return new jQuery.Event( src, props ); - } - - // Event object - if ( src && src.type ) { - this.originalEvent = src; - this.type = src.type; - - // Events bubbling up the document may have been marked as prevented - // by a handler lower down the tree; reflect the correct value. - this.isDefaultPrevented = src.defaultPrevented || - src.defaultPrevented === undefined && - - // Support: Android <=2.3 only - src.returnValue === false ? - returnTrue : - returnFalse; - - // Create target properties - // Support: Safari <=6 - 7 only - // Target should not be a text node (#504, #13143) - this.target = ( src.target && src.target.nodeType === 3 ) ? - src.target.parentNode : - src.target; - - this.currentTarget = src.currentTarget; - this.relatedTarget = src.relatedTarget; - - // Event type - } else { - this.type = src; - } - - // Put explicitly provided properties onto the event object - if ( props ) { - jQuery.extend( this, props ); - } - - // Create a timestamp if incoming event doesn't have one - this.timeStamp = src && src.timeStamp || Date.now(); - - // Mark it as fixed - this[ jQuery.expando ] = true; -}; - -// jQuery.Event is based on DOM3 Events as specified by the ECMAScript Language Binding -// https://www.w3.org/TR/2003/WD-DOM-Level-3-Events-20030331/ecma-script-binding.html -jQuery.Event.prototype = { - constructor: jQuery.Event, - isDefaultPrevented: returnFalse, - isPropagationStopped: returnFalse, - isImmediatePropagationStopped: returnFalse, - isSimulated: false, - - preventDefault: function() { - var e = this.originalEvent; - - this.isDefaultPrevented = returnTrue; - - if ( e && !this.isSimulated ) { - e.preventDefault(); - } - }, - stopPropagation: function() { - var e = this.originalEvent; - - this.isPropagationStopped = returnTrue; - - if ( e && !this.isSimulated ) { - e.stopPropagation(); - } - }, - stopImmediatePropagation: function() { - var e = this.originalEvent; - - this.isImmediatePropagationStopped = returnTrue; - - if ( e && !this.isSimulated ) { - e.stopImmediatePropagation(); - } - - this.stopPropagation(); - } -}; - -// Includes all common event props including KeyEvent and MouseEvent specific props -jQuery.each( { - altKey: true, - bubbles: true, - cancelable: true, - changedTouches: true, - ctrlKey: true, - detail: true, - eventPhase: true, - metaKey: true, - pageX: true, - pageY: true, - shiftKey: true, - view: true, - "char": true, - code: true, - charCode: true, - key: true, - keyCode: true, - button: true, - buttons: true, - clientX: true, - clientY: true, - offsetX: true, - offsetY: true, - pointerId: true, - pointerType: true, - screenX: true, - screenY: true, - targetTouches: true, - toElement: true, - touches: true, - - which: function( event ) { - var button = event.button; - - // Add which for key events - if ( event.which == null && rkeyEvent.test( event.type ) ) { - return event.charCode != null ? event.charCode : event.keyCode; - } - - // Add which for click: 1 === left; 2 === middle; 3 === right - if ( !event.which && button !== undefined && rmouseEvent.test( event.type ) ) { - if ( button & 1 ) { - return 1; - } - - if ( button & 2 ) { - return 3; - } - - if ( button & 4 ) { - return 2; - } - - return 0; - } - - return event.which; - } -}, jQuery.event.addProp ); - -jQuery.each( { focus: "focusin", blur: "focusout" }, function( type, delegateType ) { - jQuery.event.special[ type ] = { - - // Utilize native event if possible so blur/focus sequence is correct - setup: function() { - - // Claim the first handler - // dataPriv.set( this, "focus", ... ) - // dataPriv.set( this, "blur", ... ) - leverageNative( this, type, expectSync ); - - // Return false to allow normal processing in the caller - return false; - }, - trigger: function() { - - // Force setup before trigger - leverageNative( this, type ); - - // Return non-false to allow normal event-path propagation - return true; - }, - - delegateType: delegateType - }; -} ); - -// Create mouseenter/leave events using mouseover/out and event-time checks -// so that event delegation works in jQuery. -// Do the same for pointerenter/pointerleave and pointerover/pointerout -// -// Support: Safari 7 only -// Safari sends mouseenter too often; see: -// https://bugs.chromium.org/p/chromium/issues/detail?id=470258 -// for the description of the bug (it existed in older Chrome versions as well). -jQuery.each( { - mouseenter: "mouseover", - mouseleave: "mouseout", - pointerenter: "pointerover", - pointerleave: "pointerout" -}, function( orig, fix ) { - jQuery.event.special[ orig ] = { - delegateType: fix, - bindType: fix, - - handle: function( event ) { - var ret, - target = this, - related = event.relatedTarget, - handleObj = event.handleObj; - - // For mouseenter/leave call the handler if related is outside the target. - // NB: No relatedTarget if the mouse left/entered the browser window - if ( !related || ( related !== target && !jQuery.contains( target, related ) ) ) { - event.type = handleObj.origType; - ret = handleObj.handler.apply( this, arguments ); - event.type = fix; - } - return ret; - } - }; -} ); - -jQuery.fn.extend( { - - on: function( types, selector, data, fn ) { - return on( this, types, selector, data, fn ); - }, - one: function( types, selector, data, fn ) { - return on( this, types, selector, data, fn, 1 ); - }, - off: function( types, selector, fn ) { - var handleObj, type; - if ( types && types.preventDefault && types.handleObj ) { - - // ( event ) dispatched jQuery.Event - handleObj = types.handleObj; - jQuery( types.delegateTarget ).off( - handleObj.namespace ? - handleObj.origType + "." + handleObj.namespace : - handleObj.origType, - handleObj.selector, - handleObj.handler - ); - return this; - } - if ( typeof types === "object" ) { - - // ( types-object [, selector] ) - for ( type in types ) { - this.off( type, selector, types[ type ] ); - } - return this; - } - if ( selector === false || typeof selector === "function" ) { - - // ( types [, fn] ) - fn = selector; - selector = undefined; - } - if ( fn === false ) { - fn = returnFalse; - } - return this.each( function() { - jQuery.event.remove( this, types, fn, selector ); - } ); - } -} ); - - -var - - // Support: IE <=10 - 11, Edge 12 - 13 only - // In IE/Edge using regex groups here causes severe slowdowns. - // See https://connect.microsoft.com/IE/feedback/details/1736512/ - rnoInnerhtml = /\s*$/g; - -// Prefer a tbody over its parent table for containing new rows -function manipulationTarget( elem, content ) { - if ( nodeName( elem, "table" ) && - nodeName( content.nodeType !== 11 ? content : content.firstChild, "tr" ) ) { - - return jQuery( elem ).children( "tbody" )[ 0 ] || elem; - } - - return elem; -} - -// Replace/restore the type attribute of script elements for safe DOM manipulation -function disableScript( elem ) { - elem.type = ( elem.getAttribute( "type" ) !== null ) + "/" + elem.type; - return elem; -} -function restoreScript( elem ) { - if ( ( elem.type || "" ).slice( 0, 5 ) === "true/" ) { - elem.type = elem.type.slice( 5 ); - } else { - elem.removeAttribute( "type" ); - } - - return elem; -} - -function cloneCopyEvent( src, dest ) { - var i, l, type, pdataOld, udataOld, udataCur, events; - - if ( dest.nodeType !== 1 ) { - return; - } - - // 1. Copy private data: events, handlers, etc. - if ( dataPriv.hasData( src ) ) { - pdataOld = dataPriv.get( src ); - events = pdataOld.events; - - if ( events ) { - dataPriv.remove( dest, "handle events" ); - - for ( type in events ) { - for ( i = 0, l = events[ type ].length; i < l; i++ ) { - jQuery.event.add( dest, type, events[ type ][ i ] ); - } - } - } - } - - // 2. Copy user data - if ( dataUser.hasData( src ) ) { - udataOld = dataUser.access( src ); - udataCur = jQuery.extend( {}, udataOld ); - - dataUser.set( dest, udataCur ); - } -} - -// Fix IE bugs, see support tests -function fixInput( src, dest ) { - var nodeName = dest.nodeName.toLowerCase(); - - // Fails to persist the checked state of a cloned checkbox or radio button. - if ( nodeName === "input" && rcheckableType.test( src.type ) ) { - dest.checked = src.checked; - - // Fails to return the selected option to the default selected state when cloning options - } else if ( nodeName === "input" || nodeName === "textarea" ) { - dest.defaultValue = src.defaultValue; - } -} - -function domManip( collection, args, callback, ignored ) { - - // Flatten any nested arrays - args = flat( args ); - - var fragment, first, scripts, hasScripts, node, doc, - i = 0, - l = collection.length, - iNoClone = l - 1, - value = args[ 0 ], - valueIsFunction = isFunction( value ); - - // We can't cloneNode fragments that contain checked, in WebKit - if ( valueIsFunction || - ( l > 1 && typeof value === "string" && - !support.checkClone && rchecked.test( value ) ) ) { - return collection.each( function( index ) { - var self = collection.eq( index ); - if ( valueIsFunction ) { - args[ 0 ] = value.call( this, index, self.html() ); - } - domManip( self, args, callback, ignored ); - } ); - } - - if ( l ) { - fragment = buildFragment( args, collection[ 0 ].ownerDocument, false, collection, ignored ); - first = fragment.firstChild; - - if ( fragment.childNodes.length === 1 ) { - fragment = first; - } - - // Require either new content or an interest in ignored elements to invoke the callback - if ( first || ignored ) { - scripts = jQuery.map( getAll( fragment, "script" ), disableScript ); - hasScripts = scripts.length; - - // Use the original fragment for the last item - // instead of the first because it can end up - // being emptied incorrectly in certain situations (#8070). - for ( ; i < l; i++ ) { - node = fragment; - - if ( i !== iNoClone ) { - node = jQuery.clone( node, true, true ); - - // Keep references to cloned scripts for later restoration - if ( hasScripts ) { - - // Support: Android <=4.0 only, PhantomJS 1 only - // push.apply(_, arraylike) throws on ancient WebKit - jQuery.merge( scripts, getAll( node, "script" ) ); - } - } - - callback.call( collection[ i ], node, i ); - } - - if ( hasScripts ) { - doc = scripts[ scripts.length - 1 ].ownerDocument; - - // Reenable scripts - jQuery.map( scripts, restoreScript ); - - // Evaluate executable scripts on first document insertion - for ( i = 0; i < hasScripts; i++ ) { - node = scripts[ i ]; - if ( rscriptType.test( node.type || "" ) && - !dataPriv.access( node, "globalEval" ) && - jQuery.contains( doc, node ) ) { - - if ( node.src && ( node.type || "" ).toLowerCase() !== "module" ) { - - // Optional AJAX dependency, but won't run scripts if not present - if ( jQuery._evalUrl && !node.noModule ) { - jQuery._evalUrl( node.src, { - nonce: node.nonce || node.getAttribute( "nonce" ) - }, doc ); - } - } else { - DOMEval( node.textContent.replace( rcleanScript, "" ), node, doc ); - } - } - } - } - } - } - - return collection; -} - -function remove( elem, selector, keepData ) { - var node, - nodes = selector ? jQuery.filter( selector, elem ) : elem, - i = 0; - - for ( ; ( node = nodes[ i ] ) != null; i++ ) { - if ( !keepData && node.nodeType === 1 ) { - jQuery.cleanData( getAll( node ) ); - } - - if ( node.parentNode ) { - if ( keepData && isAttached( node ) ) { - setGlobalEval( getAll( node, "script" ) ); - } - node.parentNode.removeChild( node ); - } - } - - return elem; -} - -jQuery.extend( { - htmlPrefilter: function( html ) { - return html; - }, - - clone: function( elem, dataAndEvents, deepDataAndEvents ) { - var i, l, srcElements, destElements, - clone = elem.cloneNode( true ), - inPage = isAttached( elem ); - - // Fix IE cloning issues - if ( !support.noCloneChecked && ( elem.nodeType === 1 || elem.nodeType === 11 ) && - !jQuery.isXMLDoc( elem ) ) { - - // We eschew Sizzle here for performance reasons: https://jsperf.com/getall-vs-sizzle/2 - destElements = getAll( clone ); - srcElements = getAll( elem ); - - for ( i = 0, l = srcElements.length; i < l; i++ ) { - fixInput( srcElements[ i ], destElements[ i ] ); - } - } - - // Copy the events from the original to the clone - if ( dataAndEvents ) { - if ( deepDataAndEvents ) { - srcElements = srcElements || getAll( elem ); - destElements = destElements || getAll( clone ); - - for ( i = 0, l = srcElements.length; i < l; i++ ) { - cloneCopyEvent( srcElements[ i ], destElements[ i ] ); - } - } else { - cloneCopyEvent( elem, clone ); - } - } - - // Preserve script evaluation history - destElements = getAll( clone, "script" ); - if ( destElements.length > 0 ) { - setGlobalEval( destElements, !inPage && getAll( elem, "script" ) ); - } - - // Return the cloned set - return clone; - }, - - cleanData: function( elems ) { - var data, elem, type, - special = jQuery.event.special, - i = 0; - - for ( ; ( elem = elems[ i ] ) !== undefined; i++ ) { - if ( acceptData( elem ) ) { - if ( ( data = elem[ dataPriv.expando ] ) ) { - if ( data.events ) { - for ( type in data.events ) { - if ( special[ type ] ) { - jQuery.event.remove( elem, type ); - - // This is a shortcut to avoid jQuery.event.remove's overhead - } else { - jQuery.removeEvent( elem, type, data.handle ); - } - } - } - - // Support: Chrome <=35 - 45+ - // Assign undefined instead of using delete, see Data#remove - elem[ dataPriv.expando ] = undefined; - } - if ( elem[ dataUser.expando ] ) { - - // Support: Chrome <=35 - 45+ - // Assign undefined instead of using delete, see Data#remove - elem[ dataUser.expando ] = undefined; - } - } - } - } -} ); - -jQuery.fn.extend( { - detach: function( selector ) { - return remove( this, selector, true ); - }, - - remove: function( selector ) { - return remove( this, selector ); - }, - - text: function( value ) { - return access( this, function( value ) { - return value === undefined ? - jQuery.text( this ) : - this.empty().each( function() { - if ( this.nodeType === 1 || this.nodeType === 11 || this.nodeType === 9 ) { - this.textContent = value; - } - } ); - }, null, value, arguments.length ); - }, - - append: function() { - return domManip( this, arguments, function( elem ) { - if ( this.nodeType === 1 || this.nodeType === 11 || this.nodeType === 9 ) { - var target = manipulationTarget( this, elem ); - target.appendChild( elem ); - } - } ); - }, - - prepend: function() { - return domManip( this, arguments, function( elem ) { - if ( this.nodeType === 1 || this.nodeType === 11 || this.nodeType === 9 ) { - var target = manipulationTarget( this, elem ); - target.insertBefore( elem, target.firstChild ); - } - } ); - }, - - before: function() { - return domManip( this, arguments, function( elem ) { - if ( this.parentNode ) { - this.parentNode.insertBefore( elem, this ); - } - } ); - }, - - after: function() { - return domManip( this, arguments, function( elem ) { - if ( this.parentNode ) { - this.parentNode.insertBefore( elem, this.nextSibling ); - } - } ); - }, - - empty: function() { - var elem, - i = 0; - - for ( ; ( elem = this[ i ] ) != null; i++ ) { - if ( elem.nodeType === 1 ) { - - // Prevent memory leaks - jQuery.cleanData( getAll( elem, false ) ); - - // Remove any remaining nodes - elem.textContent = ""; - } - } - - return this; - }, - - clone: function( dataAndEvents, deepDataAndEvents ) { - dataAndEvents = dataAndEvents == null ? false : dataAndEvents; - deepDataAndEvents = deepDataAndEvents == null ? dataAndEvents : deepDataAndEvents; - - return this.map( function() { - return jQuery.clone( this, dataAndEvents, deepDataAndEvents ); - } ); - }, - - html: function( value ) { - return access( this, function( value ) { - var elem = this[ 0 ] || {}, - i = 0, - l = this.length; - - if ( value === undefined && elem.nodeType === 1 ) { - return elem.innerHTML; - } - - // See if we can take a shortcut and just use innerHTML - if ( typeof value === "string" && !rnoInnerhtml.test( value ) && - !wrapMap[ ( rtagName.exec( value ) || [ "", "" ] )[ 1 ].toLowerCase() ] ) { - - value = jQuery.htmlPrefilter( value ); - - try { - for ( ; i < l; i++ ) { - elem = this[ i ] || {}; - - // Remove element nodes and prevent memory leaks - if ( elem.nodeType === 1 ) { - jQuery.cleanData( getAll( elem, false ) ); - elem.innerHTML = value; - } - } - - elem = 0; - - // If using innerHTML throws an exception, use the fallback method - } catch ( e ) {} - } - - if ( elem ) { - this.empty().append( value ); - } - }, null, value, arguments.length ); - }, - - replaceWith: function() { - var ignored = []; - - // Make the changes, replacing each non-ignored context element with the new content - return domManip( this, arguments, function( elem ) { - var parent = this.parentNode; - - if ( jQuery.inArray( this, ignored ) < 0 ) { - jQuery.cleanData( getAll( this ) ); - if ( parent ) { - parent.replaceChild( elem, this ); - } - } - - // Force callback invocation - }, ignored ); - } -} ); - -jQuery.each( { - appendTo: "append", - prependTo: "prepend", - insertBefore: "before", - insertAfter: "after", - replaceAll: "replaceWith" -}, function( name, original ) { - jQuery.fn[ name ] = function( selector ) { - var elems, - ret = [], - insert = jQuery( selector ), - last = insert.length - 1, - i = 0; - - for ( ; i <= last; i++ ) { - elems = i === last ? this : this.clone( true ); - jQuery( insert[ i ] )[ original ]( elems ); - - // Support: Android <=4.0 only, PhantomJS 1 only - // .get() because push.apply(_, arraylike) throws on ancient WebKit - push.apply( ret, elems.get() ); - } - - return this.pushStack( ret ); - }; -} ); -var rnumnonpx = new RegExp( "^(" + pnum + ")(?!px)[a-z%]+$", "i" ); - -var getStyles = function( elem ) { - - // Support: IE <=11 only, Firefox <=30 (#15098, #14150) - // IE throws on elements created in popups - // FF meanwhile throws on frame elements through "defaultView.getComputedStyle" - var view = elem.ownerDocument.defaultView; - - if ( !view || !view.opener ) { - view = window; - } - - return view.getComputedStyle( elem ); - }; - -var swap = function( elem, options, callback ) { - var ret, name, - old = {}; - - // Remember the old values, and insert the new ones - for ( name in options ) { - old[ name ] = elem.style[ name ]; - elem.style[ name ] = options[ name ]; - } - - ret = callback.call( elem ); - - // Revert the old values - for ( name in options ) { - elem.style[ name ] = old[ name ]; - } - - return ret; -}; - - -var rboxStyle = new RegExp( cssExpand.join( "|" ), "i" ); - - - -( function() { - - // Executing both pixelPosition & boxSizingReliable tests require only one layout - // so they're executed at the same time to save the second computation. - function computeStyleTests() { - - // This is a singleton, we need to execute it only once - if ( !div ) { - return; - } - - container.style.cssText = "position:absolute;left:-11111px;width:60px;" + - "margin-top:1px;padding:0;border:0"; - div.style.cssText = - "position:relative;display:block;box-sizing:border-box;overflow:scroll;" + - "margin:auto;border:1px;padding:1px;" + - "width:60%;top:1%"; - documentElement.appendChild( container ).appendChild( div ); - - var divStyle = window.getComputedStyle( div ); - pixelPositionVal = divStyle.top !== "1%"; - - // Support: Android 4.0 - 4.3 only, Firefox <=3 - 44 - reliableMarginLeftVal = roundPixelMeasures( divStyle.marginLeft ) === 12; - - // Support: Android 4.0 - 4.3 only, Safari <=9.1 - 10.1, iOS <=7.0 - 9.3 - // Some styles come back with percentage values, even though they shouldn't - div.style.right = "60%"; - pixelBoxStylesVal = roundPixelMeasures( divStyle.right ) === 36; - - // Support: IE 9 - 11 only - // Detect misreporting of content dimensions for box-sizing:border-box elements - boxSizingReliableVal = roundPixelMeasures( divStyle.width ) === 36; - - // Support: IE 9 only - // Detect overflow:scroll screwiness (gh-3699) - // Support: Chrome <=64 - // Don't get tricked when zoom affects offsetWidth (gh-4029) - div.style.position = "absolute"; - scrollboxSizeVal = roundPixelMeasures( div.offsetWidth / 3 ) === 12; - - documentElement.removeChild( container ); - - // Nullify the div so it wouldn't be stored in the memory and - // it will also be a sign that checks already performed - div = null; - } - - function roundPixelMeasures( measure ) { - return Math.round( parseFloat( measure ) ); - } - - var pixelPositionVal, boxSizingReliableVal, scrollboxSizeVal, pixelBoxStylesVal, - reliableTrDimensionsVal, reliableMarginLeftVal, - container = document.createElement( "div" ), - div = document.createElement( "div" ); - - // Finish early in limited (non-browser) environments - if ( !div.style ) { - return; - } - - // Support: IE <=9 - 11 only - // Style of cloned element affects source element cloned (#8908) - div.style.backgroundClip = "content-box"; - div.cloneNode( true ).style.backgroundClip = ""; - support.clearCloneStyle = div.style.backgroundClip === "content-box"; - - jQuery.extend( support, { - boxSizingReliable: function() { - computeStyleTests(); - return boxSizingReliableVal; - }, - pixelBoxStyles: function() { - computeStyleTests(); - return pixelBoxStylesVal; - }, - pixelPosition: function() { - computeStyleTests(); - return pixelPositionVal; - }, - reliableMarginLeft: function() { - computeStyleTests(); - return reliableMarginLeftVal; - }, - scrollboxSize: function() { - computeStyleTests(); - return scrollboxSizeVal; - }, - - // Support: IE 9 - 11+, Edge 15 - 18+ - // IE/Edge misreport `getComputedStyle` of table rows with width/height - // set in CSS while `offset*` properties report correct values. - // Behavior in IE 9 is more subtle than in newer versions & it passes - // some versions of this test; make sure not to make it pass there! - reliableTrDimensions: function() { - var table, tr, trChild, trStyle; - if ( reliableTrDimensionsVal == null ) { - table = document.createElement( "table" ); - tr = document.createElement( "tr" ); - trChild = document.createElement( "div" ); - - table.style.cssText = "position:absolute;left:-11111px"; - tr.style.height = "1px"; - trChild.style.height = "9px"; - - documentElement - .appendChild( table ) - .appendChild( tr ) - .appendChild( trChild ); - - trStyle = window.getComputedStyle( tr ); - reliableTrDimensionsVal = parseInt( trStyle.height ) > 3; - - documentElement.removeChild( table ); - } - return reliableTrDimensionsVal; - } - } ); -} )(); - - -function curCSS( elem, name, computed ) { - var width, minWidth, maxWidth, ret, - - // Support: Firefox 51+ - // Retrieving style before computed somehow - // fixes an issue with getting wrong values - // on detached elements - style = elem.style; - - computed = computed || getStyles( elem ); - - // getPropertyValue is needed for: - // .css('filter') (IE 9 only, #12537) - // .css('--customProperty) (#3144) - if ( computed ) { - ret = computed.getPropertyValue( name ) || computed[ name ]; - - if ( ret === "" && !isAttached( elem ) ) { - ret = jQuery.style( elem, name ); - } - - // A tribute to the "awesome hack by Dean Edwards" - // Android Browser returns percentage for some values, - // but width seems to be reliably pixels. - // This is against the CSSOM draft spec: - // https://drafts.csswg.org/cssom/#resolved-values - if ( !support.pixelBoxStyles() && rnumnonpx.test( ret ) && rboxStyle.test( name ) ) { - - // Remember the original values - width = style.width; - minWidth = style.minWidth; - maxWidth = style.maxWidth; - - // Put in the new values to get a computed value out - style.minWidth = style.maxWidth = style.width = ret; - ret = computed.width; - - // Revert the changed values - style.width = width; - style.minWidth = minWidth; - style.maxWidth = maxWidth; - } - } - - return ret !== undefined ? - - // Support: IE <=9 - 11 only - // IE returns zIndex value as an integer. - ret + "" : - ret; -} - - -function addGetHookIf( conditionFn, hookFn ) { - - // Define the hook, we'll check on the first run if it's really needed. - return { - get: function() { - if ( conditionFn() ) { - - // Hook not needed (or it's not possible to use it due - // to missing dependency), remove it. - delete this.get; - return; - } - - // Hook needed; redefine it so that the support test is not executed again. - return ( this.get = hookFn ).apply( this, arguments ); - } - }; -} - - -var cssPrefixes = [ "Webkit", "Moz", "ms" ], - emptyStyle = document.createElement( "div" ).style, - vendorProps = {}; - -// Return a vendor-prefixed property or undefined -function vendorPropName( name ) { - - // Check for vendor prefixed names - var capName = name[ 0 ].toUpperCase() + name.slice( 1 ), - i = cssPrefixes.length; - - while ( i-- ) { - name = cssPrefixes[ i ] + capName; - if ( name in emptyStyle ) { - return name; - } - } -} - -// Return a potentially-mapped jQuery.cssProps or vendor prefixed property -function finalPropName( name ) { - var final = jQuery.cssProps[ name ] || vendorProps[ name ]; - - if ( final ) { - return final; - } - if ( name in emptyStyle ) { - return name; - } - return vendorProps[ name ] = vendorPropName( name ) || name; -} - - -var - - // Swappable if display is none or starts with table - // except "table", "table-cell", or "table-caption" - // See here for display values: https://developer.mozilla.org/en-US/docs/CSS/display - rdisplayswap = /^(none|table(?!-c[ea]).+)/, - rcustomProp = /^--/, - cssShow = { position: "absolute", visibility: "hidden", display: "block" }, - cssNormalTransform = { - letterSpacing: "0", - fontWeight: "400" - }; - -function setPositiveNumber( _elem, value, subtract ) { - - // Any relative (+/-) values have already been - // normalized at this point - var matches = rcssNum.exec( value ); - return matches ? - - // Guard against undefined "subtract", e.g., when used as in cssHooks - Math.max( 0, matches[ 2 ] - ( subtract || 0 ) ) + ( matches[ 3 ] || "px" ) : - value; -} - -function boxModelAdjustment( elem, dimension, box, isBorderBox, styles, computedVal ) { - var i = dimension === "width" ? 1 : 0, - extra = 0, - delta = 0; - - // Adjustment may not be necessary - if ( box === ( isBorderBox ? "border" : "content" ) ) { - return 0; - } - - for ( ; i < 4; i += 2 ) { - - // Both box models exclude margin - if ( box === "margin" ) { - delta += jQuery.css( elem, box + cssExpand[ i ], true, styles ); - } - - // If we get here with a content-box, we're seeking "padding" or "border" or "margin" - if ( !isBorderBox ) { - - // Add padding - delta += jQuery.css( elem, "padding" + cssExpand[ i ], true, styles ); - - // For "border" or "margin", add border - if ( box !== "padding" ) { - delta += jQuery.css( elem, "border" + cssExpand[ i ] + "Width", true, styles ); - - // But still keep track of it otherwise - } else { - extra += jQuery.css( elem, "border" + cssExpand[ i ] + "Width", true, styles ); - } - - // If we get here with a border-box (content + padding + border), we're seeking "content" or - // "padding" or "margin" - } else { - - // For "content", subtract padding - if ( box === "content" ) { - delta -= jQuery.css( elem, "padding" + cssExpand[ i ], true, styles ); - } - - // For "content" or "padding", subtract border - if ( box !== "margin" ) { - delta -= jQuery.css( elem, "border" + cssExpand[ i ] + "Width", true, styles ); - } - } - } - - // Account for positive content-box scroll gutter when requested by providing computedVal - if ( !isBorderBox && computedVal >= 0 ) { - - // offsetWidth/offsetHeight is a rounded sum of content, padding, scroll gutter, and border - // Assuming integer scroll gutter, subtract the rest and round down - delta += Math.max( 0, Math.ceil( - elem[ "offset" + dimension[ 0 ].toUpperCase() + dimension.slice( 1 ) ] - - computedVal - - delta - - extra - - 0.5 - - // If offsetWidth/offsetHeight is unknown, then we can't determine content-box scroll gutter - // Use an explicit zero to avoid NaN (gh-3964) - ) ) || 0; - } - - return delta; -} - -function getWidthOrHeight( elem, dimension, extra ) { - - // Start with computed style - var styles = getStyles( elem ), - - // To avoid forcing a reflow, only fetch boxSizing if we need it (gh-4322). - // Fake content-box until we know it's needed to know the true value. - boxSizingNeeded = !support.boxSizingReliable() || extra, - isBorderBox = boxSizingNeeded && - jQuery.css( elem, "boxSizing", false, styles ) === "border-box", - valueIsBorderBox = isBorderBox, - - val = curCSS( elem, dimension, styles ), - offsetProp = "offset" + dimension[ 0 ].toUpperCase() + dimension.slice( 1 ); - - // Support: Firefox <=54 - // Return a confounding non-pixel value or feign ignorance, as appropriate. - if ( rnumnonpx.test( val ) ) { - if ( !extra ) { - return val; - } - val = "auto"; - } - - - // Support: IE 9 - 11 only - // Use offsetWidth/offsetHeight for when box sizing is unreliable. - // In those cases, the computed value can be trusted to be border-box. - if ( ( !support.boxSizingReliable() && isBorderBox || - - // Support: IE 10 - 11+, Edge 15 - 18+ - // IE/Edge misreport `getComputedStyle` of table rows with width/height - // set in CSS while `offset*` properties report correct values. - // Interestingly, in some cases IE 9 doesn't suffer from this issue. - !support.reliableTrDimensions() && nodeName( elem, "tr" ) || - - // Fall back to offsetWidth/offsetHeight when value is "auto" - // This happens for inline elements with no explicit setting (gh-3571) - val === "auto" || - - // Support: Android <=4.1 - 4.3 only - // Also use offsetWidth/offsetHeight for misreported inline dimensions (gh-3602) - !parseFloat( val ) && jQuery.css( elem, "display", false, styles ) === "inline" ) && - - // Make sure the element is visible & connected - elem.getClientRects().length ) { - - isBorderBox = jQuery.css( elem, "boxSizing", false, styles ) === "border-box"; - - // Where available, offsetWidth/offsetHeight approximate border box dimensions. - // Where not available (e.g., SVG), assume unreliable box-sizing and interpret the - // retrieved value as a content box dimension. - valueIsBorderBox = offsetProp in elem; - if ( valueIsBorderBox ) { - val = elem[ offsetProp ]; - } - } - - // Normalize "" and auto - val = parseFloat( val ) || 0; - - // Adjust for the element's box model - return ( val + - boxModelAdjustment( - elem, - dimension, - extra || ( isBorderBox ? "border" : "content" ), - valueIsBorderBox, - styles, - - // Provide the current computed size to request scroll gutter calculation (gh-3589) - val - ) - ) + "px"; -} - -jQuery.extend( { - - // Add in style property hooks for overriding the default - // behavior of getting and setting a style property - cssHooks: { - opacity: { - get: function( elem, computed ) { - if ( computed ) { - - // We should always get a number back from opacity - var ret = curCSS( elem, "opacity" ); - return ret === "" ? "1" : ret; - } - } - } - }, - - // Don't automatically add "px" to these possibly-unitless properties - cssNumber: { - "animationIterationCount": true, - "columnCount": true, - "fillOpacity": true, - "flexGrow": true, - "flexShrink": true, - "fontWeight": true, - "gridArea": true, - "gridColumn": true, - "gridColumnEnd": true, - "gridColumnStart": true, - "gridRow": true, - "gridRowEnd": true, - "gridRowStart": true, - "lineHeight": true, - "opacity": true, - "order": true, - "orphans": true, - "widows": true, - "zIndex": true, - "zoom": true - }, - - // Add in properties whose names you wish to fix before - // setting or getting the value - cssProps: {}, - - // Get and set the style property on a DOM Node - style: function( elem, name, value, extra ) { - - // Don't set styles on text and comment nodes - if ( !elem || elem.nodeType === 3 || elem.nodeType === 8 || !elem.style ) { - return; - } - - // Make sure that we're working with the right name - var ret, type, hooks, - origName = camelCase( name ), - isCustomProp = rcustomProp.test( name ), - style = elem.style; - - // Make sure that we're working with the right name. We don't - // want to query the value if it is a CSS custom property - // since they are user-defined. - if ( !isCustomProp ) { - name = finalPropName( origName ); - } - - // Gets hook for the prefixed version, then unprefixed version - hooks = jQuery.cssHooks[ name ] || jQuery.cssHooks[ origName ]; - - // Check if we're setting a value - if ( value !== undefined ) { - type = typeof value; - - // Convert "+=" or "-=" to relative numbers (#7345) - if ( type === "string" && ( ret = rcssNum.exec( value ) ) && ret[ 1 ] ) { - value = adjustCSS( elem, name, ret ); - - // Fixes bug #9237 - type = "number"; - } - - // Make sure that null and NaN values aren't set (#7116) - if ( value == null || value !== value ) { - return; - } - - // If a number was passed in, add the unit (except for certain CSS properties) - // The isCustomProp check can be removed in jQuery 4.0 when we only auto-append - // "px" to a few hardcoded values. - if ( type === "number" && !isCustomProp ) { - value += ret && ret[ 3 ] || ( jQuery.cssNumber[ origName ] ? "" : "px" ); - } - - // background-* props affect original clone's values - if ( !support.clearCloneStyle && value === "" && name.indexOf( "background" ) === 0 ) { - style[ name ] = "inherit"; - } - - // If a hook was provided, use that value, otherwise just set the specified value - if ( !hooks || !( "set" in hooks ) || - ( value = hooks.set( elem, value, extra ) ) !== undefined ) { - - if ( isCustomProp ) { - style.setProperty( name, value ); - } else { - style[ name ] = value; - } - } - - } else { - - // If a hook was provided get the non-computed value from there - if ( hooks && "get" in hooks && - ( ret = hooks.get( elem, false, extra ) ) !== undefined ) { - - return ret; - } - - // Otherwise just get the value from the style object - return style[ name ]; - } - }, - - css: function( elem, name, extra, styles ) { - var val, num, hooks, - origName = camelCase( name ), - isCustomProp = rcustomProp.test( name ); - - // Make sure that we're working with the right name. We don't - // want to modify the value if it is a CSS custom property - // since they are user-defined. - if ( !isCustomProp ) { - name = finalPropName( origName ); - } - - // Try prefixed name followed by the unprefixed name - hooks = jQuery.cssHooks[ name ] || jQuery.cssHooks[ origName ]; - - // If a hook was provided get the computed value from there - if ( hooks && "get" in hooks ) { - val = hooks.get( elem, true, extra ); - } - - // Otherwise, if a way to get the computed value exists, use that - if ( val === undefined ) { - val = curCSS( elem, name, styles ); - } - - // Convert "normal" to computed value - if ( val === "normal" && name in cssNormalTransform ) { - val = cssNormalTransform[ name ]; - } - - // Make numeric if forced or a qualifier was provided and val looks numeric - if ( extra === "" || extra ) { - num = parseFloat( val ); - return extra === true || isFinite( num ) ? num || 0 : val; - } - - return val; - } -} ); - -jQuery.each( [ "height", "width" ], function( _i, dimension ) { - jQuery.cssHooks[ dimension ] = { - get: function( elem, computed, extra ) { - if ( computed ) { - - // Certain elements can have dimension info if we invisibly show them - // but it must have a current display style that would benefit - return rdisplayswap.test( jQuery.css( elem, "display" ) ) && - - // Support: Safari 8+ - // Table columns in Safari have non-zero offsetWidth & zero - // getBoundingClientRect().width unless display is changed. - // Support: IE <=11 only - // Running getBoundingClientRect on a disconnected node - // in IE throws an error. - ( !elem.getClientRects().length || !elem.getBoundingClientRect().width ) ? - swap( elem, cssShow, function() { - return getWidthOrHeight( elem, dimension, extra ); - } ) : - getWidthOrHeight( elem, dimension, extra ); - } - }, - - set: function( elem, value, extra ) { - var matches, - styles = getStyles( elem ), - - // Only read styles.position if the test has a chance to fail - // to avoid forcing a reflow. - scrollboxSizeBuggy = !support.scrollboxSize() && - styles.position === "absolute", - - // To avoid forcing a reflow, only fetch boxSizing if we need it (gh-3991) - boxSizingNeeded = scrollboxSizeBuggy || extra, - isBorderBox = boxSizingNeeded && - jQuery.css( elem, "boxSizing", false, styles ) === "border-box", - subtract = extra ? - boxModelAdjustment( - elem, - dimension, - extra, - isBorderBox, - styles - ) : - 0; - - // Account for unreliable border-box dimensions by comparing offset* to computed and - // faking a content-box to get border and padding (gh-3699) - if ( isBorderBox && scrollboxSizeBuggy ) { - subtract -= Math.ceil( - elem[ "offset" + dimension[ 0 ].toUpperCase() + dimension.slice( 1 ) ] - - parseFloat( styles[ dimension ] ) - - boxModelAdjustment( elem, dimension, "border", false, styles ) - - 0.5 - ); - } - - // Convert to pixels if value adjustment is needed - if ( subtract && ( matches = rcssNum.exec( value ) ) && - ( matches[ 3 ] || "px" ) !== "px" ) { - - elem.style[ dimension ] = value; - value = jQuery.css( elem, dimension ); - } - - return setPositiveNumber( elem, value, subtract ); - } - }; -} ); - -jQuery.cssHooks.marginLeft = addGetHookIf( support.reliableMarginLeft, - function( elem, computed ) { - if ( computed ) { - return ( parseFloat( curCSS( elem, "marginLeft" ) ) || - elem.getBoundingClientRect().left - - swap( elem, { marginLeft: 0 }, function() { - return elem.getBoundingClientRect().left; - } ) - ) + "px"; - } - } -); - -// These hooks are used by animate to expand properties -jQuery.each( { - margin: "", - padding: "", - border: "Width" -}, function( prefix, suffix ) { - jQuery.cssHooks[ prefix + suffix ] = { - expand: function( value ) { - var i = 0, - expanded = {}, - - // Assumes a single number if not a string - parts = typeof value === "string" ? value.split( " " ) : [ value ]; - - for ( ; i < 4; i++ ) { - expanded[ prefix + cssExpand[ i ] + suffix ] = - parts[ i ] || parts[ i - 2 ] || parts[ 0 ]; - } - - return expanded; - } - }; - - if ( prefix !== "margin" ) { - jQuery.cssHooks[ prefix + suffix ].set = setPositiveNumber; - } -} ); - -jQuery.fn.extend( { - css: function( name, value ) { - return access( this, function( elem, name, value ) { - var styles, len, - map = {}, - i = 0; - - if ( Array.isArray( name ) ) { - styles = getStyles( elem ); - len = name.length; - - for ( ; i < len; i++ ) { - map[ name[ i ] ] = jQuery.css( elem, name[ i ], false, styles ); - } - - return map; - } - - return value !== undefined ? - jQuery.style( elem, name, value ) : - jQuery.css( elem, name ); - }, name, value, arguments.length > 1 ); - } -} ); - - -function Tween( elem, options, prop, end, easing ) { - return new Tween.prototype.init( elem, options, prop, end, easing ); -} -jQuery.Tween = Tween; - -Tween.prototype = { - constructor: Tween, - init: function( elem, options, prop, end, easing, unit ) { - this.elem = elem; - this.prop = prop; - this.easing = easing || jQuery.easing._default; - this.options = options; - this.start = this.now = this.cur(); - this.end = end; - this.unit = unit || ( jQuery.cssNumber[ prop ] ? "" : "px" ); - }, - cur: function() { - var hooks = Tween.propHooks[ this.prop ]; - - return hooks && hooks.get ? - hooks.get( this ) : - Tween.propHooks._default.get( this ); - }, - run: function( percent ) { - var eased, - hooks = Tween.propHooks[ this.prop ]; - - if ( this.options.duration ) { - this.pos = eased = jQuery.easing[ this.easing ]( - percent, this.options.duration * percent, 0, 1, this.options.duration - ); - } else { - this.pos = eased = percent; - } - this.now = ( this.end - this.start ) * eased + this.start; - - if ( this.options.step ) { - this.options.step.call( this.elem, this.now, this ); - } - - if ( hooks && hooks.set ) { - hooks.set( this ); - } else { - Tween.propHooks._default.set( this ); - } - return this; - } -}; - -Tween.prototype.init.prototype = Tween.prototype; - -Tween.propHooks = { - _default: { - get: function( tween ) { - var result; - - // Use a property on the element directly when it is not a DOM element, - // or when there is no matching style property that exists. - if ( tween.elem.nodeType !== 1 || - tween.elem[ tween.prop ] != null && tween.elem.style[ tween.prop ] == null ) { - return tween.elem[ tween.prop ]; - } - - // Passing an empty string as a 3rd parameter to .css will automatically - // attempt a parseFloat and fallback to a string if the parse fails. - // Simple values such as "10px" are parsed to Float; - // complex values such as "rotate(1rad)" are returned as-is. - result = jQuery.css( tween.elem, tween.prop, "" ); - - // Empty strings, null, undefined and "auto" are converted to 0. - return !result || result === "auto" ? 0 : result; - }, - set: function( tween ) { - - // Use step hook for back compat. - // Use cssHook if its there. - // Use .style if available and use plain properties where available. - if ( jQuery.fx.step[ tween.prop ] ) { - jQuery.fx.step[ tween.prop ]( tween ); - } else if ( tween.elem.nodeType === 1 && ( - jQuery.cssHooks[ tween.prop ] || - tween.elem.style[ finalPropName( tween.prop ) ] != null ) ) { - jQuery.style( tween.elem, tween.prop, tween.now + tween.unit ); - } else { - tween.elem[ tween.prop ] = tween.now; - } - } - } -}; - -// Support: IE <=9 only -// Panic based approach to setting things on disconnected nodes -Tween.propHooks.scrollTop = Tween.propHooks.scrollLeft = { - set: function( tween ) { - if ( tween.elem.nodeType && tween.elem.parentNode ) { - tween.elem[ tween.prop ] = tween.now; - } - } -}; - -jQuery.easing = { - linear: function( p ) { - return p; - }, - swing: function( p ) { - return 0.5 - Math.cos( p * Math.PI ) / 2; - }, - _default: "swing" -}; - -jQuery.fx = Tween.prototype.init; - -// Back compat <1.8 extension point -jQuery.fx.step = {}; - - - - -var - fxNow, inProgress, - rfxtypes = /^(?:toggle|show|hide)$/, - rrun = /queueHooks$/; - -function schedule() { - if ( inProgress ) { - if ( document.hidden === false && window.requestAnimationFrame ) { - window.requestAnimationFrame( schedule ); - } else { - window.setTimeout( schedule, jQuery.fx.interval ); - } - - jQuery.fx.tick(); - } -} - -// Animations created synchronously will run synchronously -function createFxNow() { - window.setTimeout( function() { - fxNow = undefined; - } ); - return ( fxNow = Date.now() ); -} - -// Generate parameters to create a standard animation -function genFx( type, includeWidth ) { - var which, - i = 0, - attrs = { height: type }; - - // If we include width, step value is 1 to do all cssExpand values, - // otherwise step value is 2 to skip over Left and Right - includeWidth = includeWidth ? 1 : 0; - for ( ; i < 4; i += 2 - includeWidth ) { - which = cssExpand[ i ]; - attrs[ "margin" + which ] = attrs[ "padding" + which ] = type; - } - - if ( includeWidth ) { - attrs.opacity = attrs.width = type; - } - - return attrs; -} - -function createTween( value, prop, animation ) { - var tween, - collection = ( Animation.tweeners[ prop ] || [] ).concat( Animation.tweeners[ "*" ] ), - index = 0, - length = collection.length; - for ( ; index < length; index++ ) { - if ( ( tween = collection[ index ].call( animation, prop, value ) ) ) { - - // We're done with this property - return tween; - } - } -} - -function defaultPrefilter( elem, props, opts ) { - var prop, value, toggle, hooks, oldfire, propTween, restoreDisplay, display, - isBox = "width" in props || "height" in props, - anim = this, - orig = {}, - style = elem.style, - hidden = elem.nodeType && isHiddenWithinTree( elem ), - dataShow = dataPriv.get( elem, "fxshow" ); - - // Queue-skipping animations hijack the fx hooks - if ( !opts.queue ) { - hooks = jQuery._queueHooks( elem, "fx" ); - if ( hooks.unqueued == null ) { - hooks.unqueued = 0; - oldfire = hooks.empty.fire; - hooks.empty.fire = function() { - if ( !hooks.unqueued ) { - oldfire(); - } - }; - } - hooks.unqueued++; - - anim.always( function() { - - // Ensure the complete handler is called before this completes - anim.always( function() { - hooks.unqueued--; - if ( !jQuery.queue( elem, "fx" ).length ) { - hooks.empty.fire(); - } - } ); - } ); - } - - // Detect show/hide animations - for ( prop in props ) { - value = props[ prop ]; - if ( rfxtypes.test( value ) ) { - delete props[ prop ]; - toggle = toggle || value === "toggle"; - if ( value === ( hidden ? "hide" : "show" ) ) { - - // Pretend to be hidden if this is a "show" and - // there is still data from a stopped show/hide - if ( value === "show" && dataShow && dataShow[ prop ] !== undefined ) { - hidden = true; - - // Ignore all other no-op show/hide data - } else { - continue; - } - } - orig[ prop ] = dataShow && dataShow[ prop ] || jQuery.style( elem, prop ); - } - } - - // Bail out if this is a no-op like .hide().hide() - propTween = !jQuery.isEmptyObject( props ); - if ( !propTween && jQuery.isEmptyObject( orig ) ) { - return; - } - - // Restrict "overflow" and "display" styles during box animations - if ( isBox && elem.nodeType === 1 ) { - - // Support: IE <=9 - 11, Edge 12 - 15 - // Record all 3 overflow attributes because IE does not infer the shorthand - // from identically-valued overflowX and overflowY and Edge just mirrors - // the overflowX value there. - opts.overflow = [ style.overflow, style.overflowX, style.overflowY ]; - - // Identify a display type, preferring old show/hide data over the CSS cascade - restoreDisplay = dataShow && dataShow.display; - if ( restoreDisplay == null ) { - restoreDisplay = dataPriv.get( elem, "display" ); - } - display = jQuery.css( elem, "display" ); - if ( display === "none" ) { - if ( restoreDisplay ) { - display = restoreDisplay; - } else { - - // Get nonempty value(s) by temporarily forcing visibility - showHide( [ elem ], true ); - restoreDisplay = elem.style.display || restoreDisplay; - display = jQuery.css( elem, "display" ); - showHide( [ elem ] ); - } - } - - // Animate inline elements as inline-block - if ( display === "inline" || display === "inline-block" && restoreDisplay != null ) { - if ( jQuery.css( elem, "float" ) === "none" ) { - - // Restore the original display value at the end of pure show/hide animations - if ( !propTween ) { - anim.done( function() { - style.display = restoreDisplay; - } ); - if ( restoreDisplay == null ) { - display = style.display; - restoreDisplay = display === "none" ? "" : display; - } - } - style.display = "inline-block"; - } - } - } - - if ( opts.overflow ) { - style.overflow = "hidden"; - anim.always( function() { - style.overflow = opts.overflow[ 0 ]; - style.overflowX = opts.overflow[ 1 ]; - style.overflowY = opts.overflow[ 2 ]; - } ); - } - - // Implement show/hide animations - propTween = false; - for ( prop in orig ) { - - // General show/hide setup for this element animation - if ( !propTween ) { - if ( dataShow ) { - if ( "hidden" in dataShow ) { - hidden = dataShow.hidden; - } - } else { - dataShow = dataPriv.access( elem, "fxshow", { display: restoreDisplay } ); - } - - // Store hidden/visible for toggle so `.stop().toggle()` "reverses" - if ( toggle ) { - dataShow.hidden = !hidden; - } - - // Show elements before animating them - if ( hidden ) { - showHide( [ elem ], true ); - } - - /* eslint-disable no-loop-func */ - - anim.done( function() { - - /* eslint-enable no-loop-func */ - - // The final step of a "hide" animation is actually hiding the element - if ( !hidden ) { - showHide( [ elem ] ); - } - dataPriv.remove( elem, "fxshow" ); - for ( prop in orig ) { - jQuery.style( elem, prop, orig[ prop ] ); - } - } ); - } - - // Per-property setup - propTween = createTween( hidden ? dataShow[ prop ] : 0, prop, anim ); - if ( !( prop in dataShow ) ) { - dataShow[ prop ] = propTween.start; - if ( hidden ) { - propTween.end = propTween.start; - propTween.start = 0; - } - } - } -} - -function propFilter( props, specialEasing ) { - var index, name, easing, value, hooks; - - // camelCase, specialEasing and expand cssHook pass - for ( index in props ) { - name = camelCase( index ); - easing = specialEasing[ name ]; - value = props[ index ]; - if ( Array.isArray( value ) ) { - easing = value[ 1 ]; - value = props[ index ] = value[ 0 ]; - } - - if ( index !== name ) { - props[ name ] = value; - delete props[ index ]; - } - - hooks = jQuery.cssHooks[ name ]; - if ( hooks && "expand" in hooks ) { - value = hooks.expand( value ); - delete props[ name ]; - - // Not quite $.extend, this won't overwrite existing keys. - // Reusing 'index' because we have the correct "name" - for ( index in value ) { - if ( !( index in props ) ) { - props[ index ] = value[ index ]; - specialEasing[ index ] = easing; - } - } - } else { - specialEasing[ name ] = easing; - } - } -} - -function Animation( elem, properties, options ) { - var result, - stopped, - index = 0, - length = Animation.prefilters.length, - deferred = jQuery.Deferred().always( function() { - - // Don't match elem in the :animated selector - delete tick.elem; - } ), - tick = function() { - if ( stopped ) { - return false; - } - var currentTime = fxNow || createFxNow(), - remaining = Math.max( 0, animation.startTime + animation.duration - currentTime ), - - // Support: Android 2.3 only - // Archaic crash bug won't allow us to use `1 - ( 0.5 || 0 )` (#12497) - temp = remaining / animation.duration || 0, - percent = 1 - temp, - index = 0, - length = animation.tweens.length; - - for ( ; index < length; index++ ) { - animation.tweens[ index ].run( percent ); - } - - deferred.notifyWith( elem, [ animation, percent, remaining ] ); - - // If there's more to do, yield - if ( percent < 1 && length ) { - return remaining; - } - - // If this was an empty animation, synthesize a final progress notification - if ( !length ) { - deferred.notifyWith( elem, [ animation, 1, 0 ] ); - } - - // Resolve the animation and report its conclusion - deferred.resolveWith( elem, [ animation ] ); - return false; - }, - animation = deferred.promise( { - elem: elem, - props: jQuery.extend( {}, properties ), - opts: jQuery.extend( true, { - specialEasing: {}, - easing: jQuery.easing._default - }, options ), - originalProperties: properties, - originalOptions: options, - startTime: fxNow || createFxNow(), - duration: options.duration, - tweens: [], - createTween: function( prop, end ) { - var tween = jQuery.Tween( elem, animation.opts, prop, end, - animation.opts.specialEasing[ prop ] || animation.opts.easing ); - animation.tweens.push( tween ); - return tween; - }, - stop: function( gotoEnd ) { - var index = 0, - - // If we are going to the end, we want to run all the tweens - // otherwise we skip this part - length = gotoEnd ? animation.tweens.length : 0; - if ( stopped ) { - return this; - } - stopped = true; - for ( ; index < length; index++ ) { - animation.tweens[ index ].run( 1 ); - } - - // Resolve when we played the last frame; otherwise, reject - if ( gotoEnd ) { - deferred.notifyWith( elem, [ animation, 1, 0 ] ); - deferred.resolveWith( elem, [ animation, gotoEnd ] ); - } else { - deferred.rejectWith( elem, [ animation, gotoEnd ] ); - } - return this; - } - } ), - props = animation.props; - - propFilter( props, animation.opts.specialEasing ); - - for ( ; index < length; index++ ) { - result = Animation.prefilters[ index ].call( animation, elem, props, animation.opts ); - if ( result ) { - if ( isFunction( result.stop ) ) { - jQuery._queueHooks( animation.elem, animation.opts.queue ).stop = - result.stop.bind( result ); - } - return result; - } - } - - jQuery.map( props, createTween, animation ); - - if ( isFunction( animation.opts.start ) ) { - animation.opts.start.call( elem, animation ); - } - - // Attach callbacks from options - animation - .progress( animation.opts.progress ) - .done( animation.opts.done, animation.opts.complete ) - .fail( animation.opts.fail ) - .always( animation.opts.always ); - - jQuery.fx.timer( - jQuery.extend( tick, { - elem: elem, - anim: animation, - queue: animation.opts.queue - } ) - ); - - return animation; -} - -jQuery.Animation = jQuery.extend( Animation, { - - tweeners: { - "*": [ function( prop, value ) { - var tween = this.createTween( prop, value ); - adjustCSS( tween.elem, prop, rcssNum.exec( value ), tween ); - return tween; - } ] - }, - - tweener: function( props, callback ) { - if ( isFunction( props ) ) { - callback = props; - props = [ "*" ]; - } else { - props = props.match( rnothtmlwhite ); - } - - var prop, - index = 0, - length = props.length; - - for ( ; index < length; index++ ) { - prop = props[ index ]; - Animation.tweeners[ prop ] = Animation.tweeners[ prop ] || []; - Animation.tweeners[ prop ].unshift( callback ); - } - }, - - prefilters: [ defaultPrefilter ], - - prefilter: function( callback, prepend ) { - if ( prepend ) { - Animation.prefilters.unshift( callback ); - } else { - Animation.prefilters.push( callback ); - } - } -} ); - -jQuery.speed = function( speed, easing, fn ) { - var opt = speed && typeof speed === "object" ? jQuery.extend( {}, speed ) : { - complete: fn || !fn && easing || - isFunction( speed ) && speed, - duration: speed, - easing: fn && easing || easing && !isFunction( easing ) && easing - }; - - // Go to the end state if fx are off - if ( jQuery.fx.off ) { - opt.duration = 0; - - } else { - if ( typeof opt.duration !== "number" ) { - if ( opt.duration in jQuery.fx.speeds ) { - opt.duration = jQuery.fx.speeds[ opt.duration ]; - - } else { - opt.duration = jQuery.fx.speeds._default; - } - } - } - - // Normalize opt.queue - true/undefined/null -> "fx" - if ( opt.queue == null || opt.queue === true ) { - opt.queue = "fx"; - } - - // Queueing - opt.old = opt.complete; - - opt.complete = function() { - if ( isFunction( opt.old ) ) { - opt.old.call( this ); - } - - if ( opt.queue ) { - jQuery.dequeue( this, opt.queue ); - } - }; - - return opt; -}; - -jQuery.fn.extend( { - fadeTo: function( speed, to, easing, callback ) { - - // Show any hidden elements after setting opacity to 0 - return this.filter( isHiddenWithinTree ).css( "opacity", 0 ).show() - - // Animate to the value specified - .end().animate( { opacity: to }, speed, easing, callback ); - }, - animate: function( prop, speed, easing, callback ) { - var empty = jQuery.isEmptyObject( prop ), - optall = jQuery.speed( speed, easing, callback ), - doAnimation = function() { - - // Operate on a copy of prop so per-property easing won't be lost - var anim = Animation( this, jQuery.extend( {}, prop ), optall ); - - // Empty animations, or finishing resolves immediately - if ( empty || dataPriv.get( this, "finish" ) ) { - anim.stop( true ); - } - }; - doAnimation.finish = doAnimation; - - return empty || optall.queue === false ? - this.each( doAnimation ) : - this.queue( optall.queue, doAnimation ); - }, - stop: function( type, clearQueue, gotoEnd ) { - var stopQueue = function( hooks ) { - var stop = hooks.stop; - delete hooks.stop; - stop( gotoEnd ); - }; - - if ( typeof type !== "string" ) { - gotoEnd = clearQueue; - clearQueue = type; - type = undefined; - } - if ( clearQueue ) { - this.queue( type || "fx", [] ); - } - - return this.each( function() { - var dequeue = true, - index = type != null && type + "queueHooks", - timers = jQuery.timers, - data = dataPriv.get( this ); - - if ( index ) { - if ( data[ index ] && data[ index ].stop ) { - stopQueue( data[ index ] ); - } - } else { - for ( index in data ) { - if ( data[ index ] && data[ index ].stop && rrun.test( index ) ) { - stopQueue( data[ index ] ); - } - } - } - - for ( index = timers.length; index--; ) { - if ( timers[ index ].elem === this && - ( type == null || timers[ index ].queue === type ) ) { - - timers[ index ].anim.stop( gotoEnd ); - dequeue = false; - timers.splice( index, 1 ); - } - } - - // Start the next in the queue if the last step wasn't forced. - // Timers currently will call their complete callbacks, which - // will dequeue but only if they were gotoEnd. - if ( dequeue || !gotoEnd ) { - jQuery.dequeue( this, type ); - } - } ); - }, - finish: function( type ) { - if ( type !== false ) { - type = type || "fx"; - } - return this.each( function() { - var index, - data = dataPriv.get( this ), - queue = data[ type + "queue" ], - hooks = data[ type + "queueHooks" ], - timers = jQuery.timers, - length = queue ? queue.length : 0; - - // Enable finishing flag on private data - data.finish = true; - - // Empty the queue first - jQuery.queue( this, type, [] ); - - if ( hooks && hooks.stop ) { - hooks.stop.call( this, true ); - } - - // Look for any active animations, and finish them - for ( index = timers.length; index--; ) { - if ( timers[ index ].elem === this && timers[ index ].queue === type ) { - timers[ index ].anim.stop( true ); - timers.splice( index, 1 ); - } - } - - // Look for any animations in the old queue and finish them - for ( index = 0; index < length; index++ ) { - if ( queue[ index ] && queue[ index ].finish ) { - queue[ index ].finish.call( this ); - } - } - - // Turn off finishing flag - delete data.finish; - } ); - } -} ); - -jQuery.each( [ "toggle", "show", "hide" ], function( _i, name ) { - var cssFn = jQuery.fn[ name ]; - jQuery.fn[ name ] = function( speed, easing, callback ) { - return speed == null || typeof speed === "boolean" ? - cssFn.apply( this, arguments ) : - this.animate( genFx( name, true ), speed, easing, callback ); - }; -} ); - -// Generate shortcuts for custom animations -jQuery.each( { - slideDown: genFx( "show" ), - slideUp: genFx( "hide" ), - slideToggle: genFx( "toggle" ), - fadeIn: { opacity: "show" }, - fadeOut: { opacity: "hide" }, - fadeToggle: { opacity: "toggle" } -}, function( name, props ) { - jQuery.fn[ name ] = function( speed, easing, callback ) { - return this.animate( props, speed, easing, callback ); - }; -} ); - -jQuery.timers = []; -jQuery.fx.tick = function() { - var timer, - i = 0, - timers = jQuery.timers; - - fxNow = Date.now(); - - for ( ; i < timers.length; i++ ) { - timer = timers[ i ]; - - // Run the timer and safely remove it when done (allowing for external removal) - if ( !timer() && timers[ i ] === timer ) { - timers.splice( i--, 1 ); - } - } - - if ( !timers.length ) { - jQuery.fx.stop(); - } - fxNow = undefined; -}; - -jQuery.fx.timer = function( timer ) { - jQuery.timers.push( timer ); - jQuery.fx.start(); -}; - -jQuery.fx.interval = 13; -jQuery.fx.start = function() { - if ( inProgress ) { - return; - } - - inProgress = true; - schedule(); -}; - -jQuery.fx.stop = function() { - inProgress = null; -}; - -jQuery.fx.speeds = { - slow: 600, - fast: 200, - - // Default speed - _default: 400 -}; - - -// Based off of the plugin by Clint Helfers, with permission. -// https://web.archive.org/web/20100324014747/http://blindsignals.com/index.php/2009/07/jquery-delay/ -jQuery.fn.delay = function( time, type ) { - time = jQuery.fx ? jQuery.fx.speeds[ time ] || time : time; - type = type || "fx"; - - return this.queue( type, function( next, hooks ) { - var timeout = window.setTimeout( next, time ); - hooks.stop = function() { - window.clearTimeout( timeout ); - }; - } ); -}; - - -( function() { - var input = document.createElement( "input" ), - select = document.createElement( "select" ), - opt = select.appendChild( document.createElement( "option" ) ); - - input.type = "checkbox"; - - // Support: Android <=4.3 only - // Default value for a checkbox should be "on" - support.checkOn = input.value !== ""; - - // Support: IE <=11 only - // Must access selectedIndex to make default options select - support.optSelected = opt.selected; - - // Support: IE <=11 only - // An input loses its value after becoming a radio - input = document.createElement( "input" ); - input.value = "t"; - input.type = "radio"; - support.radioValue = input.value === "t"; -} )(); - - -var boolHook, - attrHandle = jQuery.expr.attrHandle; - -jQuery.fn.extend( { - attr: function( name, value ) { - return access( this, jQuery.attr, name, value, arguments.length > 1 ); - }, - - removeAttr: function( name ) { - return this.each( function() { - jQuery.removeAttr( this, name ); - } ); - } -} ); - -jQuery.extend( { - attr: function( elem, name, value ) { - var ret, hooks, - nType = elem.nodeType; - - // Don't get/set attributes on text, comment and attribute nodes - if ( nType === 3 || nType === 8 || nType === 2 ) { - return; - } - - // Fallback to prop when attributes are not supported - if ( typeof elem.getAttribute === "undefined" ) { - return jQuery.prop( elem, name, value ); - } - - // Attribute hooks are determined by the lowercase version - // Grab necessary hook if one is defined - if ( nType !== 1 || !jQuery.isXMLDoc( elem ) ) { - hooks = jQuery.attrHooks[ name.toLowerCase() ] || - ( jQuery.expr.match.bool.test( name ) ? boolHook : undefined ); - } - - if ( value !== undefined ) { - if ( value === null ) { - jQuery.removeAttr( elem, name ); - return; - } - - if ( hooks && "set" in hooks && - ( ret = hooks.set( elem, value, name ) ) !== undefined ) { - return ret; - } - - elem.setAttribute( name, value + "" ); - return value; - } - - if ( hooks && "get" in hooks && ( ret = hooks.get( elem, name ) ) !== null ) { - return ret; - } - - ret = jQuery.find.attr( elem, name ); - - // Non-existent attributes return null, we normalize to undefined - return ret == null ? undefined : ret; - }, - - attrHooks: { - type: { - set: function( elem, value ) { - if ( !support.radioValue && value === "radio" && - nodeName( elem, "input" ) ) { - var val = elem.value; - elem.setAttribute( "type", value ); - if ( val ) { - elem.value = val; - } - return value; - } - } - } - }, - - removeAttr: function( elem, value ) { - var name, - i = 0, - - // Attribute names can contain non-HTML whitespace characters - // https://html.spec.whatwg.org/multipage/syntax.html#attributes-2 - attrNames = value && value.match( rnothtmlwhite ); - - if ( attrNames && elem.nodeType === 1 ) { - while ( ( name = attrNames[ i++ ] ) ) { - elem.removeAttribute( name ); - } - } - } -} ); - -// Hooks for boolean attributes -boolHook = { - set: function( elem, value, name ) { - if ( value === false ) { - - // Remove boolean attributes when set to false - jQuery.removeAttr( elem, name ); - } else { - elem.setAttribute( name, name ); - } - return name; - } -}; - -jQuery.each( jQuery.expr.match.bool.source.match( /\w+/g ), function( _i, name ) { - var getter = attrHandle[ name ] || jQuery.find.attr; - - attrHandle[ name ] = function( elem, name, isXML ) { - var ret, handle, - lowercaseName = name.toLowerCase(); - - if ( !isXML ) { - - // Avoid an infinite loop by temporarily removing this function from the getter - handle = attrHandle[ lowercaseName ]; - attrHandle[ lowercaseName ] = ret; - ret = getter( elem, name, isXML ) != null ? - lowercaseName : - null; - attrHandle[ lowercaseName ] = handle; - } - return ret; - }; -} ); - - - - -var rfocusable = /^(?:input|select|textarea|button)$/i, - rclickable = /^(?:a|area)$/i; - -jQuery.fn.extend( { - prop: function( name, value ) { - return access( this, jQuery.prop, name, value, arguments.length > 1 ); - }, - - removeProp: function( name ) { - return this.each( function() { - delete this[ jQuery.propFix[ name ] || name ]; - } ); - } -} ); - -jQuery.extend( { - prop: function( elem, name, value ) { - var ret, hooks, - nType = elem.nodeType; - - // Don't get/set properties on text, comment and attribute nodes - if ( nType === 3 || nType === 8 || nType === 2 ) { - return; - } - - if ( nType !== 1 || !jQuery.isXMLDoc( elem ) ) { - - // Fix name and attach hooks - name = jQuery.propFix[ name ] || name; - hooks = jQuery.propHooks[ name ]; - } - - if ( value !== undefined ) { - if ( hooks && "set" in hooks && - ( ret = hooks.set( elem, value, name ) ) !== undefined ) { - return ret; - } - - return ( elem[ name ] = value ); - } - - if ( hooks && "get" in hooks && ( ret = hooks.get( elem, name ) ) !== null ) { - return ret; - } - - return elem[ name ]; - }, - - propHooks: { - tabIndex: { - get: function( elem ) { - - // Support: IE <=9 - 11 only - // elem.tabIndex doesn't always return the - // correct value when it hasn't been explicitly set - // https://web.archive.org/web/20141116233347/http://fluidproject.org/blog/2008/01/09/getting-setting-and-removing-tabindex-values-with-javascript/ - // Use proper attribute retrieval(#12072) - var tabindex = jQuery.find.attr( elem, "tabindex" ); - - if ( tabindex ) { - return parseInt( tabindex, 10 ); - } - - if ( - rfocusable.test( elem.nodeName ) || - rclickable.test( elem.nodeName ) && - elem.href - ) { - return 0; - } - - return -1; - } - } - }, - - propFix: { - "for": "htmlFor", - "class": "className" - } -} ); - -// Support: IE <=11 only -// Accessing the selectedIndex property -// forces the browser to respect setting selected -// on the option -// The getter ensures a default option is selected -// when in an optgroup -// eslint rule "no-unused-expressions" is disabled for this code -// since it considers such accessions noop -if ( !support.optSelected ) { - jQuery.propHooks.selected = { - get: function( elem ) { - - /* eslint no-unused-expressions: "off" */ - - var parent = elem.parentNode; - if ( parent && parent.parentNode ) { - parent.parentNode.selectedIndex; - } - return null; - }, - set: function( elem ) { - - /* eslint no-unused-expressions: "off" */ - - var parent = elem.parentNode; - if ( parent ) { - parent.selectedIndex; - - if ( parent.parentNode ) { - parent.parentNode.selectedIndex; - } - } - } - }; -} - -jQuery.each( [ - "tabIndex", - "readOnly", - "maxLength", - "cellSpacing", - "cellPadding", - "rowSpan", - "colSpan", - "useMap", - "frameBorder", - "contentEditable" -], function() { - jQuery.propFix[ this.toLowerCase() ] = this; -} ); - - - - - // Strip and collapse whitespace according to HTML spec - // https://infra.spec.whatwg.org/#strip-and-collapse-ascii-whitespace - function stripAndCollapse( value ) { - var tokens = value.match( rnothtmlwhite ) || []; - return tokens.join( " " ); - } - - -function getClass( elem ) { - return elem.getAttribute && elem.getAttribute( "class" ) || ""; -} - -function classesToArray( value ) { - if ( Array.isArray( value ) ) { - return value; - } - if ( typeof value === "string" ) { - return value.match( rnothtmlwhite ) || []; - } - return []; -} - -jQuery.fn.extend( { - addClass: function( value ) { - var classes, elem, cur, curValue, clazz, j, finalValue, - i = 0; - - if ( isFunction( value ) ) { - return this.each( function( j ) { - jQuery( this ).addClass( value.call( this, j, getClass( this ) ) ); - } ); - } - - classes = classesToArray( value ); - - if ( classes.length ) { - while ( ( elem = this[ i++ ] ) ) { - curValue = getClass( elem ); - cur = elem.nodeType === 1 && ( " " + stripAndCollapse( curValue ) + " " ); - - if ( cur ) { - j = 0; - while ( ( clazz = classes[ j++ ] ) ) { - if ( cur.indexOf( " " + clazz + " " ) < 0 ) { - cur += clazz + " "; - } - } - - // Only assign if different to avoid unneeded rendering. - finalValue = stripAndCollapse( cur ); - if ( curValue !== finalValue ) { - elem.setAttribute( "class", finalValue ); - } - } - } - } - - return this; - }, - - removeClass: function( value ) { - var classes, elem, cur, curValue, clazz, j, finalValue, - i = 0; - - if ( isFunction( value ) ) { - return this.each( function( j ) { - jQuery( this ).removeClass( value.call( this, j, getClass( this ) ) ); - } ); - } - - if ( !arguments.length ) { - return this.attr( "class", "" ); - } - - classes = classesToArray( value ); - - if ( classes.length ) { - while ( ( elem = this[ i++ ] ) ) { - curValue = getClass( elem ); - - // This expression is here for better compressibility (see addClass) - cur = elem.nodeType === 1 && ( " " + stripAndCollapse( curValue ) + " " ); - - if ( cur ) { - j = 0; - while ( ( clazz = classes[ j++ ] ) ) { - - // Remove *all* instances - while ( cur.indexOf( " " + clazz + " " ) > -1 ) { - cur = cur.replace( " " + clazz + " ", " " ); - } - } - - // Only assign if different to avoid unneeded rendering. - finalValue = stripAndCollapse( cur ); - if ( curValue !== finalValue ) { - elem.setAttribute( "class", finalValue ); - } - } - } - } - - return this; - }, - - toggleClass: function( value, stateVal ) { - var type = typeof value, - isValidValue = type === "string" || Array.isArray( value ); - - if ( typeof stateVal === "boolean" && isValidValue ) { - return stateVal ? this.addClass( value ) : this.removeClass( value ); - } - - if ( isFunction( value ) ) { - return this.each( function( i ) { - jQuery( this ).toggleClass( - value.call( this, i, getClass( this ), stateVal ), - stateVal - ); - } ); - } - - return this.each( function() { - var className, i, self, classNames; - - if ( isValidValue ) { - - // Toggle individual class names - i = 0; - self = jQuery( this ); - classNames = classesToArray( value ); - - while ( ( className = classNames[ i++ ] ) ) { - - // Check each className given, space separated list - if ( self.hasClass( className ) ) { - self.removeClass( className ); - } else { - self.addClass( className ); - } - } - - // Toggle whole class name - } else if ( value === undefined || type === "boolean" ) { - className = getClass( this ); - if ( className ) { - - // Store className if set - dataPriv.set( this, "__className__", className ); - } - - // If the element has a class name or if we're passed `false`, - // then remove the whole classname (if there was one, the above saved it). - // Otherwise bring back whatever was previously saved (if anything), - // falling back to the empty string if nothing was stored. - if ( this.setAttribute ) { - this.setAttribute( "class", - className || value === false ? - "" : - dataPriv.get( this, "__className__" ) || "" - ); - } - } - } ); - }, - - hasClass: function( selector ) { - var className, elem, - i = 0; - - className = " " + selector + " "; - while ( ( elem = this[ i++ ] ) ) { - if ( elem.nodeType === 1 && - ( " " + stripAndCollapse( getClass( elem ) ) + " " ).indexOf( className ) > -1 ) { - return true; - } - } - - return false; - } -} ); - - - - -var rreturn = /\r/g; - -jQuery.fn.extend( { - val: function( value ) { - var hooks, ret, valueIsFunction, - elem = this[ 0 ]; - - if ( !arguments.length ) { - if ( elem ) { - hooks = jQuery.valHooks[ elem.type ] || - jQuery.valHooks[ elem.nodeName.toLowerCase() ]; - - if ( hooks && - "get" in hooks && - ( ret = hooks.get( elem, "value" ) ) !== undefined - ) { - return ret; - } - - ret = elem.value; - - // Handle most common string cases - if ( typeof ret === "string" ) { - return ret.replace( rreturn, "" ); - } - - // Handle cases where value is null/undef or number - return ret == null ? "" : ret; - } - - return; - } - - valueIsFunction = isFunction( value ); - - return this.each( function( i ) { - var val; - - if ( this.nodeType !== 1 ) { - return; - } - - if ( valueIsFunction ) { - val = value.call( this, i, jQuery( this ).val() ); - } else { - val = value; - } - - // Treat null/undefined as ""; convert numbers to string - if ( val == null ) { - val = ""; - - } else if ( typeof val === "number" ) { - val += ""; - - } else if ( Array.isArray( val ) ) { - val = jQuery.map( val, function( value ) { - return value == null ? "" : value + ""; - } ); - } - - hooks = jQuery.valHooks[ this.type ] || jQuery.valHooks[ this.nodeName.toLowerCase() ]; - - // If set returns undefined, fall back to normal setting - if ( !hooks || !( "set" in hooks ) || hooks.set( this, val, "value" ) === undefined ) { - this.value = val; - } - } ); - } -} ); - -jQuery.extend( { - valHooks: { - option: { - get: function( elem ) { - - var val = jQuery.find.attr( elem, "value" ); - return val != null ? - val : - - // Support: IE <=10 - 11 only - // option.text throws exceptions (#14686, #14858) - // Strip and collapse whitespace - // https://html.spec.whatwg.org/#strip-and-collapse-whitespace - stripAndCollapse( jQuery.text( elem ) ); - } - }, - select: { - get: function( elem ) { - var value, option, i, - options = elem.options, - index = elem.selectedIndex, - one = elem.type === "select-one", - values = one ? null : [], - max = one ? index + 1 : options.length; - - if ( index < 0 ) { - i = max; - - } else { - i = one ? index : 0; - } - - // Loop through all the selected options - for ( ; i < max; i++ ) { - option = options[ i ]; - - // Support: IE <=9 only - // IE8-9 doesn't update selected after form reset (#2551) - if ( ( option.selected || i === index ) && - - // Don't return options that are disabled or in a disabled optgroup - !option.disabled && - ( !option.parentNode.disabled || - !nodeName( option.parentNode, "optgroup" ) ) ) { - - // Get the specific value for the option - value = jQuery( option ).val(); - - // We don't need an array for one selects - if ( one ) { - return value; - } - - // Multi-Selects return an array - values.push( value ); - } - } - - return values; - }, - - set: function( elem, value ) { - var optionSet, option, - options = elem.options, - values = jQuery.makeArray( value ), - i = options.length; - - while ( i-- ) { - option = options[ i ]; - - /* eslint-disable no-cond-assign */ - - if ( option.selected = - jQuery.inArray( jQuery.valHooks.option.get( option ), values ) > -1 - ) { - optionSet = true; - } - - /* eslint-enable no-cond-assign */ - } - - // Force browsers to behave consistently when non-matching value is set - if ( !optionSet ) { - elem.selectedIndex = -1; - } - return values; - } - } - } -} ); - -// Radios and checkboxes getter/setter -jQuery.each( [ "radio", "checkbox" ], function() { - jQuery.valHooks[ this ] = { - set: function( elem, value ) { - if ( Array.isArray( value ) ) { - return ( elem.checked = jQuery.inArray( jQuery( elem ).val(), value ) > -1 ); - } - } - }; - if ( !support.checkOn ) { - jQuery.valHooks[ this ].get = function( elem ) { - return elem.getAttribute( "value" ) === null ? "on" : elem.value; - }; - } -} ); - - - - -// Return jQuery for attributes-only inclusion - - -support.focusin = "onfocusin" in window; - - -var rfocusMorph = /^(?:focusinfocus|focusoutblur)$/, - stopPropagationCallback = function( e ) { - e.stopPropagation(); - }; - -jQuery.extend( jQuery.event, { - - trigger: function( event, data, elem, onlyHandlers ) { - - var i, cur, tmp, bubbleType, ontype, handle, special, lastElement, - eventPath = [ elem || document ], - type = hasOwn.call( event, "type" ) ? event.type : event, - namespaces = hasOwn.call( event, "namespace" ) ? event.namespace.split( "." ) : []; - - cur = lastElement = tmp = elem = elem || document; - - // Don't do events on text and comment nodes - if ( elem.nodeType === 3 || elem.nodeType === 8 ) { - return; - } - - // focus/blur morphs to focusin/out; ensure we're not firing them right now - if ( rfocusMorph.test( type + jQuery.event.triggered ) ) { - return; - } - - if ( type.indexOf( "." ) > -1 ) { - - // Namespaced trigger; create a regexp to match event type in handle() - namespaces = type.split( "." ); - type = namespaces.shift(); - namespaces.sort(); - } - ontype = type.indexOf( ":" ) < 0 && "on" + type; - - // Caller can pass in a jQuery.Event object, Object, or just an event type string - event = event[ jQuery.expando ] ? - event : - new jQuery.Event( type, typeof event === "object" && event ); - - // Trigger bitmask: & 1 for native handlers; & 2 for jQuery (always true) - event.isTrigger = onlyHandlers ? 2 : 3; - event.namespace = namespaces.join( "." ); - event.rnamespace = event.namespace ? - new RegExp( "(^|\\.)" + namespaces.join( "\\.(?:.*\\.|)" ) + "(\\.|$)" ) : - null; - - // Clean up the event in case it is being reused - event.result = undefined; - if ( !event.target ) { - event.target = elem; - } - - // Clone any incoming data and prepend the event, creating the handler arg list - data = data == null ? - [ event ] : - jQuery.makeArray( data, [ event ] ); - - // Allow special events to draw outside the lines - special = jQuery.event.special[ type ] || {}; - if ( !onlyHandlers && special.trigger && special.trigger.apply( elem, data ) === false ) { - return; - } - - // Determine event propagation path in advance, per W3C events spec (#9951) - // Bubble up to document, then to window; watch for a global ownerDocument var (#9724) - if ( !onlyHandlers && !special.noBubble && !isWindow( elem ) ) { - - bubbleType = special.delegateType || type; - if ( !rfocusMorph.test( bubbleType + type ) ) { - cur = cur.parentNode; - } - for ( ; cur; cur = cur.parentNode ) { - eventPath.push( cur ); - tmp = cur; - } - - // Only add window if we got to document (e.g., not plain obj or detached DOM) - if ( tmp === ( elem.ownerDocument || document ) ) { - eventPath.push( tmp.defaultView || tmp.parentWindow || window ); - } - } - - // Fire handlers on the event path - i = 0; - while ( ( cur = eventPath[ i++ ] ) && !event.isPropagationStopped() ) { - lastElement = cur; - event.type = i > 1 ? - bubbleType : - special.bindType || type; - - // jQuery handler - handle = ( - dataPriv.get( cur, "events" ) || Object.create( null ) - )[ event.type ] && - dataPriv.get( cur, "handle" ); - if ( handle ) { - handle.apply( cur, data ); - } - - // Native handler - handle = ontype && cur[ ontype ]; - if ( handle && handle.apply && acceptData( cur ) ) { - event.result = handle.apply( cur, data ); - if ( event.result === false ) { - event.preventDefault(); - } - } - } - event.type = type; - - // If nobody prevented the default action, do it now - if ( !onlyHandlers && !event.isDefaultPrevented() ) { - - if ( ( !special._default || - special._default.apply( eventPath.pop(), data ) === false ) && - acceptData( elem ) ) { - - // Call a native DOM method on the target with the same name as the event. - // Don't do default actions on window, that's where global variables be (#6170) - if ( ontype && isFunction( elem[ type ] ) && !isWindow( elem ) ) { - - // Don't re-trigger an onFOO event when we call its FOO() method - tmp = elem[ ontype ]; - - if ( tmp ) { - elem[ ontype ] = null; - } - - // Prevent re-triggering of the same event, since we already bubbled it above - jQuery.event.triggered = type; - - if ( event.isPropagationStopped() ) { - lastElement.addEventListener( type, stopPropagationCallback ); - } - - elem[ type ](); - - if ( event.isPropagationStopped() ) { - lastElement.removeEventListener( type, stopPropagationCallback ); - } - - jQuery.event.triggered = undefined; - - if ( tmp ) { - elem[ ontype ] = tmp; - } - } - } - } - - return event.result; - }, - - // Piggyback on a donor event to simulate a different one - // Used only for `focus(in | out)` events - simulate: function( type, elem, event ) { - var e = jQuery.extend( - new jQuery.Event(), - event, - { - type: type, - isSimulated: true - } - ); - - jQuery.event.trigger( e, null, elem ); - } - -} ); - -jQuery.fn.extend( { - - trigger: function( type, data ) { - return this.each( function() { - jQuery.event.trigger( type, data, this ); - } ); - }, - triggerHandler: function( type, data ) { - var elem = this[ 0 ]; - if ( elem ) { - return jQuery.event.trigger( type, data, elem, true ); - } - } -} ); - - -// Support: Firefox <=44 -// Firefox doesn't have focus(in | out) events -// Related ticket - https://bugzilla.mozilla.org/show_bug.cgi?id=687787 -// -// Support: Chrome <=48 - 49, Safari <=9.0 - 9.1 -// focus(in | out) events fire after focus & blur events, -// which is spec violation - http://www.w3.org/TR/DOM-Level-3-Events/#events-focusevent-event-order -// Related ticket - https://bugs.chromium.org/p/chromium/issues/detail?id=449857 -if ( !support.focusin ) { - jQuery.each( { focus: "focusin", blur: "focusout" }, function( orig, fix ) { - - // Attach a single capturing handler on the document while someone wants focusin/focusout - var handler = function( event ) { - jQuery.event.simulate( fix, event.target, jQuery.event.fix( event ) ); - }; - - jQuery.event.special[ fix ] = { - setup: function() { - - // Handle: regular nodes (via `this.ownerDocument`), window - // (via `this.document`) & document (via `this`). - var doc = this.ownerDocument || this.document || this, - attaches = dataPriv.access( doc, fix ); - - if ( !attaches ) { - doc.addEventListener( orig, handler, true ); - } - dataPriv.access( doc, fix, ( attaches || 0 ) + 1 ); - }, - teardown: function() { - var doc = this.ownerDocument || this.document || this, - attaches = dataPriv.access( doc, fix ) - 1; - - if ( !attaches ) { - doc.removeEventListener( orig, handler, true ); - dataPriv.remove( doc, fix ); - - } else { - dataPriv.access( doc, fix, attaches ); - } - } - }; - } ); -} -var location = window.location; - -var nonce = { guid: Date.now() }; - -var rquery = ( /\?/ ); - - - -// Cross-browser xml parsing -jQuery.parseXML = function( data ) { - var xml; - if ( !data || typeof data !== "string" ) { - return null; - } - - // Support: IE 9 - 11 only - // IE throws on parseFromString with invalid input. - try { - xml = ( new window.DOMParser() ).parseFromString( data, "text/xml" ); - } catch ( e ) { - xml = undefined; - } - - if ( !xml || xml.getElementsByTagName( "parsererror" ).length ) { - jQuery.error( "Invalid XML: " + data ); - } - return xml; -}; - - -var - rbracket = /\[\]$/, - rCRLF = /\r?\n/g, - rsubmitterTypes = /^(?:submit|button|image|reset|file)$/i, - rsubmittable = /^(?:input|select|textarea|keygen)/i; - -function buildParams( prefix, obj, traditional, add ) { - var name; - - if ( Array.isArray( obj ) ) { - - // Serialize array item. - jQuery.each( obj, function( i, v ) { - if ( traditional || rbracket.test( prefix ) ) { - - // Treat each array item as a scalar. - add( prefix, v ); - - } else { - - // Item is non-scalar (array or object), encode its numeric index. - buildParams( - prefix + "[" + ( typeof v === "object" && v != null ? i : "" ) + "]", - v, - traditional, - add - ); - } - } ); - - } else if ( !traditional && toType( obj ) === "object" ) { - - // Serialize object item. - for ( name in obj ) { - buildParams( prefix + "[" + name + "]", obj[ name ], traditional, add ); - } - - } else { - - // Serialize scalar item. - add( prefix, obj ); - } -} - -// Serialize an array of form elements or a set of -// key/values into a query string -jQuery.param = function( a, traditional ) { - var prefix, - s = [], - add = function( key, valueOrFunction ) { - - // If value is a function, invoke it and use its return value - var value = isFunction( valueOrFunction ) ? - valueOrFunction() : - valueOrFunction; - - s[ s.length ] = encodeURIComponent( key ) + "=" + - encodeURIComponent( value == null ? "" : value ); - }; - - if ( a == null ) { - return ""; - } - - // If an array was passed in, assume that it is an array of form elements. - if ( Array.isArray( a ) || ( a.jquery && !jQuery.isPlainObject( a ) ) ) { - - // Serialize the form elements - jQuery.each( a, function() { - add( this.name, this.value ); - } ); - - } else { - - // If traditional, encode the "old" way (the way 1.3.2 or older - // did it), otherwise encode params recursively. - for ( prefix in a ) { - buildParams( prefix, a[ prefix ], traditional, add ); - } - } - - // Return the resulting serialization - return s.join( "&" ); -}; - -jQuery.fn.extend( { - serialize: function() { - return jQuery.param( this.serializeArray() ); - }, - serializeArray: function() { - return this.map( function() { - - // Can add propHook for "elements" to filter or add form elements - var elements = jQuery.prop( this, "elements" ); - return elements ? jQuery.makeArray( elements ) : this; - } ) - .filter( function() { - var type = this.type; - - // Use .is( ":disabled" ) so that fieldset[disabled] works - return this.name && !jQuery( this ).is( ":disabled" ) && - rsubmittable.test( this.nodeName ) && !rsubmitterTypes.test( type ) && - ( this.checked || !rcheckableType.test( type ) ); - } ) - .map( function( _i, elem ) { - var val = jQuery( this ).val(); - - if ( val == null ) { - return null; - } - - if ( Array.isArray( val ) ) { - return jQuery.map( val, function( val ) { - return { name: elem.name, value: val.replace( rCRLF, "\r\n" ) }; - } ); - } - - return { name: elem.name, value: val.replace( rCRLF, "\r\n" ) }; - } ).get(); - } -} ); - - -var - r20 = /%20/g, - rhash = /#.*$/, - rantiCache = /([?&])_=[^&]*/, - rheaders = /^(.*?):[ \t]*([^\r\n]*)$/mg, - - // #7653, #8125, #8152: local protocol detection - rlocalProtocol = /^(?:about|app|app-storage|.+-extension|file|res|widget):$/, - rnoContent = /^(?:GET|HEAD)$/, - rprotocol = /^\/\//, - - /* Prefilters - * 1) They are useful to introduce custom dataTypes (see ajax/jsonp.js for an example) - * 2) These are called: - * - BEFORE asking for a transport - * - AFTER param serialization (s.data is a string if s.processData is true) - * 3) key is the dataType - * 4) the catchall symbol "*" can be used - * 5) execution will start with transport dataType and THEN continue down to "*" if needed - */ - prefilters = {}, - - /* Transports bindings - * 1) key is the dataType - * 2) the catchall symbol "*" can be used - * 3) selection will start with transport dataType and THEN go to "*" if needed - */ - transports = {}, - - // Avoid comment-prolog char sequence (#10098); must appease lint and evade compression - allTypes = "*/".concat( "*" ), - - // Anchor tag for parsing the document origin - originAnchor = document.createElement( "a" ); - originAnchor.href = location.href; - -// Base "constructor" for jQuery.ajaxPrefilter and jQuery.ajaxTransport -function addToPrefiltersOrTransports( structure ) { - - // dataTypeExpression is optional and defaults to "*" - return function( dataTypeExpression, func ) { - - if ( typeof dataTypeExpression !== "string" ) { - func = dataTypeExpression; - dataTypeExpression = "*"; - } - - var dataType, - i = 0, - dataTypes = dataTypeExpression.toLowerCase().match( rnothtmlwhite ) || []; - - if ( isFunction( func ) ) { - - // For each dataType in the dataTypeExpression - while ( ( dataType = dataTypes[ i++ ] ) ) { - - // Prepend if requested - if ( dataType[ 0 ] === "+" ) { - dataType = dataType.slice( 1 ) || "*"; - ( structure[ dataType ] = structure[ dataType ] || [] ).unshift( func ); - - // Otherwise append - } else { - ( structure[ dataType ] = structure[ dataType ] || [] ).push( func ); - } - } - } - }; -} - -// Base inspection function for prefilters and transports -function inspectPrefiltersOrTransports( structure, options, originalOptions, jqXHR ) { - - var inspected = {}, - seekingTransport = ( structure === transports ); - - function inspect( dataType ) { - var selected; - inspected[ dataType ] = true; - jQuery.each( structure[ dataType ] || [], function( _, prefilterOrFactory ) { - var dataTypeOrTransport = prefilterOrFactory( options, originalOptions, jqXHR ); - if ( typeof dataTypeOrTransport === "string" && - !seekingTransport && !inspected[ dataTypeOrTransport ] ) { - - options.dataTypes.unshift( dataTypeOrTransport ); - inspect( dataTypeOrTransport ); - return false; - } else if ( seekingTransport ) { - return !( selected = dataTypeOrTransport ); - } - } ); - return selected; - } - - return inspect( options.dataTypes[ 0 ] ) || !inspected[ "*" ] && inspect( "*" ); -} - -// A special extend for ajax options -// that takes "flat" options (not to be deep extended) -// Fixes #9887 -function ajaxExtend( target, src ) { - var key, deep, - flatOptions = jQuery.ajaxSettings.flatOptions || {}; - - for ( key in src ) { - if ( src[ key ] !== undefined ) { - ( flatOptions[ key ] ? target : ( deep || ( deep = {} ) ) )[ key ] = src[ key ]; - } - } - if ( deep ) { - jQuery.extend( true, target, deep ); - } - - return target; -} - -/* Handles responses to an ajax request: - * - finds the right dataType (mediates between content-type and expected dataType) - * - returns the corresponding response - */ -function ajaxHandleResponses( s, jqXHR, responses ) { - - var ct, type, finalDataType, firstDataType, - contents = s.contents, - dataTypes = s.dataTypes; - - // Remove auto dataType and get content-type in the process - while ( dataTypes[ 0 ] === "*" ) { - dataTypes.shift(); - if ( ct === undefined ) { - ct = s.mimeType || jqXHR.getResponseHeader( "Content-Type" ); - } - } - - // Check if we're dealing with a known content-type - if ( ct ) { - for ( type in contents ) { - if ( contents[ type ] && contents[ type ].test( ct ) ) { - dataTypes.unshift( type ); - break; - } - } - } - - // Check to see if we have a response for the expected dataType - if ( dataTypes[ 0 ] in responses ) { - finalDataType = dataTypes[ 0 ]; - } else { - - // Try convertible dataTypes - for ( type in responses ) { - if ( !dataTypes[ 0 ] || s.converters[ type + " " + dataTypes[ 0 ] ] ) { - finalDataType = type; - break; - } - if ( !firstDataType ) { - firstDataType = type; - } - } - - // Or just use first one - finalDataType = finalDataType || firstDataType; - } - - // If we found a dataType - // We add the dataType to the list if needed - // and return the corresponding response - if ( finalDataType ) { - if ( finalDataType !== dataTypes[ 0 ] ) { - dataTypes.unshift( finalDataType ); - } - return responses[ finalDataType ]; - } -} - -/* Chain conversions given the request and the original response - * Also sets the responseXXX fields on the jqXHR instance - */ -function ajaxConvert( s, response, jqXHR, isSuccess ) { - var conv2, current, conv, tmp, prev, - converters = {}, - - // Work with a copy of dataTypes in case we need to modify it for conversion - dataTypes = s.dataTypes.slice(); - - // Create converters map with lowercased keys - if ( dataTypes[ 1 ] ) { - for ( conv in s.converters ) { - converters[ conv.toLowerCase() ] = s.converters[ conv ]; - } - } - - current = dataTypes.shift(); - - // Convert to each sequential dataType - while ( current ) { - - if ( s.responseFields[ current ] ) { - jqXHR[ s.responseFields[ current ] ] = response; - } - - // Apply the dataFilter if provided - if ( !prev && isSuccess && s.dataFilter ) { - response = s.dataFilter( response, s.dataType ); - } - - prev = current; - current = dataTypes.shift(); - - if ( current ) { - - // There's only work to do if current dataType is non-auto - if ( current === "*" ) { - - current = prev; - - // Convert response if prev dataType is non-auto and differs from current - } else if ( prev !== "*" && prev !== current ) { - - // Seek a direct converter - conv = converters[ prev + " " + current ] || converters[ "* " + current ]; - - // If none found, seek a pair - if ( !conv ) { - for ( conv2 in converters ) { - - // If conv2 outputs current - tmp = conv2.split( " " ); - if ( tmp[ 1 ] === current ) { - - // If prev can be converted to accepted input - conv = converters[ prev + " " + tmp[ 0 ] ] || - converters[ "* " + tmp[ 0 ] ]; - if ( conv ) { - - // Condense equivalence converters - if ( conv === true ) { - conv = converters[ conv2 ]; - - // Otherwise, insert the intermediate dataType - } else if ( converters[ conv2 ] !== true ) { - current = tmp[ 0 ]; - dataTypes.unshift( tmp[ 1 ] ); - } - break; - } - } - } - } - - // Apply converter (if not an equivalence) - if ( conv !== true ) { - - // Unless errors are allowed to bubble, catch and return them - if ( conv && s.throws ) { - response = conv( response ); - } else { - try { - response = conv( response ); - } catch ( e ) { - return { - state: "parsererror", - error: conv ? e : "No conversion from " + prev + " to " + current - }; - } - } - } - } - } - } - - return { state: "success", data: response }; -} - -jQuery.extend( { - - // Counter for holding the number of active queries - active: 0, - - // Last-Modified header cache for next request - lastModified: {}, - etag: {}, - - ajaxSettings: { - url: location.href, - type: "GET", - isLocal: rlocalProtocol.test( location.protocol ), - global: true, - processData: true, - async: true, - contentType: "application/x-www-form-urlencoded; charset=UTF-8", - - /* - timeout: 0, - data: null, - dataType: null, - username: null, - password: null, - cache: null, - throws: false, - traditional: false, - headers: {}, - */ - - accepts: { - "*": allTypes, - text: "text/plain", - html: "text/html", - xml: "application/xml, text/xml", - json: "application/json, text/javascript" - }, - - contents: { - xml: /\bxml\b/, - html: /\bhtml/, - json: /\bjson\b/ - }, - - responseFields: { - xml: "responseXML", - text: "responseText", - json: "responseJSON" - }, - - // Data converters - // Keys separate source (or catchall "*") and destination types with a single space - converters: { - - // Convert anything to text - "* text": String, - - // Text to html (true = no transformation) - "text html": true, - - // Evaluate text as a json expression - "text json": JSON.parse, - - // Parse text as xml - "text xml": jQuery.parseXML - }, - - // For options that shouldn't be deep extended: - // you can add your own custom options here if - // and when you create one that shouldn't be - // deep extended (see ajaxExtend) - flatOptions: { - url: true, - context: true - } - }, - - // Creates a full fledged settings object into target - // with both ajaxSettings and settings fields. - // If target is omitted, writes into ajaxSettings. - ajaxSetup: function( target, settings ) { - return settings ? - - // Building a settings object - ajaxExtend( ajaxExtend( target, jQuery.ajaxSettings ), settings ) : - - // Extending ajaxSettings - ajaxExtend( jQuery.ajaxSettings, target ); - }, - - ajaxPrefilter: addToPrefiltersOrTransports( prefilters ), - ajaxTransport: addToPrefiltersOrTransports( transports ), - - // Main method - ajax: function( url, options ) { - - // If url is an object, simulate pre-1.5 signature - if ( typeof url === "object" ) { - options = url; - url = undefined; - } - - // Force options to be an object - options = options || {}; - - var transport, - - // URL without anti-cache param - cacheURL, - - // Response headers - responseHeadersString, - responseHeaders, - - // timeout handle - timeoutTimer, - - // Url cleanup var - urlAnchor, - - // Request state (becomes false upon send and true upon completion) - completed, - - // To know if global events are to be dispatched - fireGlobals, - - // Loop variable - i, - - // uncached part of the url - uncached, - - // Create the final options object - s = jQuery.ajaxSetup( {}, options ), - - // Callbacks context - callbackContext = s.context || s, - - // Context for global events is callbackContext if it is a DOM node or jQuery collection - globalEventContext = s.context && - ( callbackContext.nodeType || callbackContext.jquery ) ? - jQuery( callbackContext ) : - jQuery.event, - - // Deferreds - deferred = jQuery.Deferred(), - completeDeferred = jQuery.Callbacks( "once memory" ), - - // Status-dependent callbacks - statusCode = s.statusCode || {}, - - // Headers (they are sent all at once) - requestHeaders = {}, - requestHeadersNames = {}, - - // Default abort message - strAbort = "canceled", - - // Fake xhr - jqXHR = { - readyState: 0, - - // Builds headers hashtable if needed - getResponseHeader: function( key ) { - var match; - if ( completed ) { - if ( !responseHeaders ) { - responseHeaders = {}; - while ( ( match = rheaders.exec( responseHeadersString ) ) ) { - responseHeaders[ match[ 1 ].toLowerCase() + " " ] = - ( responseHeaders[ match[ 1 ].toLowerCase() + " " ] || [] ) - .concat( match[ 2 ] ); - } - } - match = responseHeaders[ key.toLowerCase() + " " ]; - } - return match == null ? null : match.join( ", " ); - }, - - // Raw string - getAllResponseHeaders: function() { - return completed ? responseHeadersString : null; - }, - - // Caches the header - setRequestHeader: function( name, value ) { - if ( completed == null ) { - name = requestHeadersNames[ name.toLowerCase() ] = - requestHeadersNames[ name.toLowerCase() ] || name; - requestHeaders[ name ] = value; - } - return this; - }, - - // Overrides response content-type header - overrideMimeType: function( type ) { - if ( completed == null ) { - s.mimeType = type; - } - return this; - }, - - // Status-dependent callbacks - statusCode: function( map ) { - var code; - if ( map ) { - if ( completed ) { - - // Execute the appropriate callbacks - jqXHR.always( map[ jqXHR.status ] ); - } else { - - // Lazy-add the new callbacks in a way that preserves old ones - for ( code in map ) { - statusCode[ code ] = [ statusCode[ code ], map[ code ] ]; - } - } - } - return this; - }, - - // Cancel the request - abort: function( statusText ) { - var finalText = statusText || strAbort; - if ( transport ) { - transport.abort( finalText ); - } - done( 0, finalText ); - return this; - } - }; - - // Attach deferreds - deferred.promise( jqXHR ); - - // Add protocol if not provided (prefilters might expect it) - // Handle falsy url in the settings object (#10093: consistency with old signature) - // We also use the url parameter if available - s.url = ( ( url || s.url || location.href ) + "" ) - .replace( rprotocol, location.protocol + "//" ); - - // Alias method option to type as per ticket #12004 - s.type = options.method || options.type || s.method || s.type; - - // Extract dataTypes list - s.dataTypes = ( s.dataType || "*" ).toLowerCase().match( rnothtmlwhite ) || [ "" ]; - - // A cross-domain request is in order when the origin doesn't match the current origin. - if ( s.crossDomain == null ) { - urlAnchor = document.createElement( "a" ); - - // Support: IE <=8 - 11, Edge 12 - 15 - // IE throws exception on accessing the href property if url is malformed, - // e.g. http://example.com:80x/ - try { - urlAnchor.href = s.url; - - // Support: IE <=8 - 11 only - // Anchor's host property isn't correctly set when s.url is relative - urlAnchor.href = urlAnchor.href; - s.crossDomain = originAnchor.protocol + "//" + originAnchor.host !== - urlAnchor.protocol + "//" + urlAnchor.host; - } catch ( e ) { - - // If there is an error parsing the URL, assume it is crossDomain, - // it can be rejected by the transport if it is invalid - s.crossDomain = true; - } - } - - // Convert data if not already a string - if ( s.data && s.processData && typeof s.data !== "string" ) { - s.data = jQuery.param( s.data, s.traditional ); - } - - // Apply prefilters - inspectPrefiltersOrTransports( prefilters, s, options, jqXHR ); - - // If request was aborted inside a prefilter, stop there - if ( completed ) { - return jqXHR; - } - - // We can fire global events as of now if asked to - // Don't fire events if jQuery.event is undefined in an AMD-usage scenario (#15118) - fireGlobals = jQuery.event && s.global; - - // Watch for a new set of requests - if ( fireGlobals && jQuery.active++ === 0 ) { - jQuery.event.trigger( "ajaxStart" ); - } - - // Uppercase the type - s.type = s.type.toUpperCase(); - - // Determine if request has content - s.hasContent = !rnoContent.test( s.type ); - - // Save the URL in case we're toying with the If-Modified-Since - // and/or If-None-Match header later on - // Remove hash to simplify url manipulation - cacheURL = s.url.replace( rhash, "" ); - - // More options handling for requests with no content - if ( !s.hasContent ) { - - // Remember the hash so we can put it back - uncached = s.url.slice( cacheURL.length ); - - // If data is available and should be processed, append data to url - if ( s.data && ( s.processData || typeof s.data === "string" ) ) { - cacheURL += ( rquery.test( cacheURL ) ? "&" : "?" ) + s.data; - - // #9682: remove data so that it's not used in an eventual retry - delete s.data; - } - - // Add or update anti-cache param if needed - if ( s.cache === false ) { - cacheURL = cacheURL.replace( rantiCache, "$1" ); - uncached = ( rquery.test( cacheURL ) ? "&" : "?" ) + "_=" + ( nonce.guid++ ) + - uncached; - } - - // Put hash and anti-cache on the URL that will be requested (gh-1732) - s.url = cacheURL + uncached; - - // Change '%20' to '+' if this is encoded form body content (gh-2658) - } else if ( s.data && s.processData && - ( s.contentType || "" ).indexOf( "application/x-www-form-urlencoded" ) === 0 ) { - s.data = s.data.replace( r20, "+" ); - } - - // Set the If-Modified-Since and/or If-None-Match header, if in ifModified mode. - if ( s.ifModified ) { - if ( jQuery.lastModified[ cacheURL ] ) { - jqXHR.setRequestHeader( "If-Modified-Since", jQuery.lastModified[ cacheURL ] ); - } - if ( jQuery.etag[ cacheURL ] ) { - jqXHR.setRequestHeader( "If-None-Match", jQuery.etag[ cacheURL ] ); - } - } - - // Set the correct header, if data is being sent - if ( s.data && s.hasContent && s.contentType !== false || options.contentType ) { - jqXHR.setRequestHeader( "Content-Type", s.contentType ); - } - - // Set the Accepts header for the server, depending on the dataType - jqXHR.setRequestHeader( - "Accept", - s.dataTypes[ 0 ] && s.accepts[ s.dataTypes[ 0 ] ] ? - s.accepts[ s.dataTypes[ 0 ] ] + - ( s.dataTypes[ 0 ] !== "*" ? ", " + allTypes + "; q=0.01" : "" ) : - s.accepts[ "*" ] - ); - - // Check for headers option - for ( i in s.headers ) { - jqXHR.setRequestHeader( i, s.headers[ i ] ); - } - - // Allow custom headers/mimetypes and early abort - if ( s.beforeSend && - ( s.beforeSend.call( callbackContext, jqXHR, s ) === false || completed ) ) { - - // Abort if not done already and return - return jqXHR.abort(); - } - - // Aborting is no longer a cancellation - strAbort = "abort"; - - // Install callbacks on deferreds - completeDeferred.add( s.complete ); - jqXHR.done( s.success ); - jqXHR.fail( s.error ); - - // Get transport - transport = inspectPrefiltersOrTransports( transports, s, options, jqXHR ); - - // If no transport, we auto-abort - if ( !transport ) { - done( -1, "No Transport" ); - } else { - jqXHR.readyState = 1; - - // Send global event - if ( fireGlobals ) { - globalEventContext.trigger( "ajaxSend", [ jqXHR, s ] ); - } - - // If request was aborted inside ajaxSend, stop there - if ( completed ) { - return jqXHR; - } - - // Timeout - if ( s.async && s.timeout > 0 ) { - timeoutTimer = window.setTimeout( function() { - jqXHR.abort( "timeout" ); - }, s.timeout ); - } - - try { - completed = false; - transport.send( requestHeaders, done ); - } catch ( e ) { - - // Rethrow post-completion exceptions - if ( completed ) { - throw e; - } - - // Propagate others as results - done( -1, e ); - } - } - - // Callback for when everything is done - function done( status, nativeStatusText, responses, headers ) { - var isSuccess, success, error, response, modified, - statusText = nativeStatusText; - - // Ignore repeat invocations - if ( completed ) { - return; - } - - completed = true; - - // Clear timeout if it exists - if ( timeoutTimer ) { - window.clearTimeout( timeoutTimer ); - } - - // Dereference transport for early garbage collection - // (no matter how long the jqXHR object will be used) - transport = undefined; - - // Cache response headers - responseHeadersString = headers || ""; - - // Set readyState - jqXHR.readyState = status > 0 ? 4 : 0; - - // Determine if successful - isSuccess = status >= 200 && status < 300 || status === 304; - - // Get response data - if ( responses ) { - response = ajaxHandleResponses( s, jqXHR, responses ); - } - - // Use a noop converter for missing script - if ( !isSuccess && jQuery.inArray( "script", s.dataTypes ) > -1 ) { - s.converters[ "text script" ] = function() {}; - } - - // Convert no matter what (that way responseXXX fields are always set) - response = ajaxConvert( s, response, jqXHR, isSuccess ); - - // If successful, handle type chaining - if ( isSuccess ) { - - // Set the If-Modified-Since and/or If-None-Match header, if in ifModified mode. - if ( s.ifModified ) { - modified = jqXHR.getResponseHeader( "Last-Modified" ); - if ( modified ) { - jQuery.lastModified[ cacheURL ] = modified; - } - modified = jqXHR.getResponseHeader( "etag" ); - if ( modified ) { - jQuery.etag[ cacheURL ] = modified; - } - } - - // if no content - if ( status === 204 || s.type === "HEAD" ) { - statusText = "nocontent"; - - // if not modified - } else if ( status === 304 ) { - statusText = "notmodified"; - - // If we have data, let's convert it - } else { - statusText = response.state; - success = response.data; - error = response.error; - isSuccess = !error; - } - } else { - - // Extract error from statusText and normalize for non-aborts - error = statusText; - if ( status || !statusText ) { - statusText = "error"; - if ( status < 0 ) { - status = 0; - } - } - } - - // Set data for the fake xhr object - jqXHR.status = status; - jqXHR.statusText = ( nativeStatusText || statusText ) + ""; - - // Success/Error - if ( isSuccess ) { - deferred.resolveWith( callbackContext, [ success, statusText, jqXHR ] ); - } else { - deferred.rejectWith( callbackContext, [ jqXHR, statusText, error ] ); - } - - // Status-dependent callbacks - jqXHR.statusCode( statusCode ); - statusCode = undefined; - - if ( fireGlobals ) { - globalEventContext.trigger( isSuccess ? "ajaxSuccess" : "ajaxError", - [ jqXHR, s, isSuccess ? success : error ] ); - } - - // Complete - completeDeferred.fireWith( callbackContext, [ jqXHR, statusText ] ); - - if ( fireGlobals ) { - globalEventContext.trigger( "ajaxComplete", [ jqXHR, s ] ); - - // Handle the global AJAX counter - if ( !( --jQuery.active ) ) { - jQuery.event.trigger( "ajaxStop" ); - } - } - } - - return jqXHR; - }, - - getJSON: function( url, data, callback ) { - return jQuery.get( url, data, callback, "json" ); - }, - - getScript: function( url, callback ) { - return jQuery.get( url, undefined, callback, "script" ); - } -} ); - -jQuery.each( [ "get", "post" ], function( _i, method ) { - jQuery[ method ] = function( url, data, callback, type ) { - - // Shift arguments if data argument was omitted - if ( isFunction( data ) ) { - type = type || callback; - callback = data; - data = undefined; - } - - // The url can be an options object (which then must have .url) - return jQuery.ajax( jQuery.extend( { - url: url, - type: method, - dataType: type, - data: data, - success: callback - }, jQuery.isPlainObject( url ) && url ) ); - }; -} ); - -jQuery.ajaxPrefilter( function( s ) { - var i; - for ( i in s.headers ) { - if ( i.toLowerCase() === "content-type" ) { - s.contentType = s.headers[ i ] || ""; - } - } -} ); - - -jQuery._evalUrl = function( url, options, doc ) { - return jQuery.ajax( { - url: url, - - // Make this explicit, since user can override this through ajaxSetup (#11264) - type: "GET", - dataType: "script", - cache: true, - async: false, - global: false, - - // Only evaluate the response if it is successful (gh-4126) - // dataFilter is not invoked for failure responses, so using it instead - // of the default converter is kludgy but it works. - converters: { - "text script": function() {} - }, - dataFilter: function( response ) { - jQuery.globalEval( response, options, doc ); - } - } ); -}; - - -jQuery.fn.extend( { - wrapAll: function( html ) { - var wrap; - - if ( this[ 0 ] ) { - if ( isFunction( html ) ) { - html = html.call( this[ 0 ] ); - } - - // The elements to wrap the target around - wrap = jQuery( html, this[ 0 ].ownerDocument ).eq( 0 ).clone( true ); - - if ( this[ 0 ].parentNode ) { - wrap.insertBefore( this[ 0 ] ); - } - - wrap.map( function() { - var elem = this; - - while ( elem.firstElementChild ) { - elem = elem.firstElementChild; - } - - return elem; - } ).append( this ); - } - - return this; - }, - - wrapInner: function( html ) { - if ( isFunction( html ) ) { - return this.each( function( i ) { - jQuery( this ).wrapInner( html.call( this, i ) ); - } ); - } - - return this.each( function() { - var self = jQuery( this ), - contents = self.contents(); - - if ( contents.length ) { - contents.wrapAll( html ); - - } else { - self.append( html ); - } - } ); - }, - - wrap: function( html ) { - var htmlIsFunction = isFunction( html ); - - return this.each( function( i ) { - jQuery( this ).wrapAll( htmlIsFunction ? html.call( this, i ) : html ); - } ); - }, - - unwrap: function( selector ) { - this.parent( selector ).not( "body" ).each( function() { - jQuery( this ).replaceWith( this.childNodes ); - } ); - return this; - } -} ); - - -jQuery.expr.pseudos.hidden = function( elem ) { - return !jQuery.expr.pseudos.visible( elem ); -}; -jQuery.expr.pseudos.visible = function( elem ) { - return !!( elem.offsetWidth || elem.offsetHeight || elem.getClientRects().length ); -}; - - - - -jQuery.ajaxSettings.xhr = function() { - try { - return new window.XMLHttpRequest(); - } catch ( e ) {} -}; - -var xhrSuccessStatus = { - - // File protocol always yields status code 0, assume 200 - 0: 200, - - // Support: IE <=9 only - // #1450: sometimes IE returns 1223 when it should be 204 - 1223: 204 - }, - xhrSupported = jQuery.ajaxSettings.xhr(); - -support.cors = !!xhrSupported && ( "withCredentials" in xhrSupported ); -support.ajax = xhrSupported = !!xhrSupported; - -jQuery.ajaxTransport( function( options ) { - var callback, errorCallback; - - // Cross domain only allowed if supported through XMLHttpRequest - if ( support.cors || xhrSupported && !options.crossDomain ) { - return { - send: function( headers, complete ) { - var i, - xhr = options.xhr(); - - xhr.open( - options.type, - options.url, - options.async, - options.username, - options.password - ); - - // Apply custom fields if provided - if ( options.xhrFields ) { - for ( i in options.xhrFields ) { - xhr[ i ] = options.xhrFields[ i ]; - } - } - - // Override mime type if needed - if ( options.mimeType && xhr.overrideMimeType ) { - xhr.overrideMimeType( options.mimeType ); - } - - // X-Requested-With header - // For cross-domain requests, seeing as conditions for a preflight are - // akin to a jigsaw puzzle, we simply never set it to be sure. - // (it can always be set on a per-request basis or even using ajaxSetup) - // For same-domain requests, won't change header if already provided. - if ( !options.crossDomain && !headers[ "X-Requested-With" ] ) { - headers[ "X-Requested-With" ] = "XMLHttpRequest"; - } - - // Set headers - for ( i in headers ) { - xhr.setRequestHeader( i, headers[ i ] ); - } - - // Callback - callback = function( type ) { - return function() { - if ( callback ) { - callback = errorCallback = xhr.onload = - xhr.onerror = xhr.onabort = xhr.ontimeout = - xhr.onreadystatechange = null; - - if ( type === "abort" ) { - xhr.abort(); - } else if ( type === "error" ) { - - // Support: IE <=9 only - // On a manual native abort, IE9 throws - // errors on any property access that is not readyState - if ( typeof xhr.status !== "number" ) { - complete( 0, "error" ); - } else { - complete( - - // File: protocol always yields status 0; see #8605, #14207 - xhr.status, - xhr.statusText - ); - } - } else { - complete( - xhrSuccessStatus[ xhr.status ] || xhr.status, - xhr.statusText, - - // Support: IE <=9 only - // IE9 has no XHR2 but throws on binary (trac-11426) - // For XHR2 non-text, let the caller handle it (gh-2498) - ( xhr.responseType || "text" ) !== "text" || - typeof xhr.responseText !== "string" ? - { binary: xhr.response } : - { text: xhr.responseText }, - xhr.getAllResponseHeaders() - ); - } - } - }; - }; - - // Listen to events - xhr.onload = callback(); - errorCallback = xhr.onerror = xhr.ontimeout = callback( "error" ); - - // Support: IE 9 only - // Use onreadystatechange to replace onabort - // to handle uncaught aborts - if ( xhr.onabort !== undefined ) { - xhr.onabort = errorCallback; - } else { - xhr.onreadystatechange = function() { - - // Check readyState before timeout as it changes - if ( xhr.readyState === 4 ) { - - // Allow onerror to be called first, - // but that will not handle a native abort - // Also, save errorCallback to a variable - // as xhr.onerror cannot be accessed - window.setTimeout( function() { - if ( callback ) { - errorCallback(); - } - } ); - } - }; - } - - // Create the abort callback - callback = callback( "abort" ); - - try { - - // Do send the request (this may raise an exception) - xhr.send( options.hasContent && options.data || null ); - } catch ( e ) { - - // #14683: Only rethrow if this hasn't been notified as an error yet - if ( callback ) { - throw e; - } - } - }, - - abort: function() { - if ( callback ) { - callback(); - } - } - }; - } -} ); - - - - -// Prevent auto-execution of scripts when no explicit dataType was provided (See gh-2432) -jQuery.ajaxPrefilter( function( s ) { - if ( s.crossDomain ) { - s.contents.script = false; - } -} ); - -// Install script dataType -jQuery.ajaxSetup( { - accepts: { - script: "text/javascript, application/javascript, " + - "application/ecmascript, application/x-ecmascript" - }, - contents: { - script: /\b(?:java|ecma)script\b/ - }, - converters: { - "text script": function( text ) { - jQuery.globalEval( text ); - return text; - } - } -} ); - -// Handle cache's special case and crossDomain -jQuery.ajaxPrefilter( "script", function( s ) { - if ( s.cache === undefined ) { - s.cache = false; - } - if ( s.crossDomain ) { - s.type = "GET"; - } -} ); - -// Bind script tag hack transport -jQuery.ajaxTransport( "script", function( s ) { - - // This transport only deals with cross domain or forced-by-attrs requests - if ( s.crossDomain || s.scriptAttrs ) { - var script, callback; - return { - send: function( _, complete ) { - script = jQuery( " - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/venv/Lib/site-packages/pygame/docs/generated/c_api/base.html b/venv/Lib/site-packages/pygame/docs/generated/c_api/base.html deleted file mode 100644 index 6486af9..0000000 --- a/venv/Lib/site-packages/pygame/docs/generated/c_api/base.html +++ /dev/null @@ -1,365 +0,0 @@ - - - - - - - - - High level API exported by pygame.base — pygame v2.1.2 documentation - - - - - - - - - - - - - -
- -
- - - - - -
-
- -
-
- -
-
-

src_c/base.c¶

-

This extension module defines general purpose routines for starting and stopping -SDL as well as various conversion routines uses elsewhere in pygame.

-

C header: src_c/include/pygame.h

-
-
-PyObject *pgExc_SDLError¶
-

This is pygame.error, the exception type used to raise SDL errors.

-
- -
-
-int pg_mod_autoinit(const char *modname)¶
-

Inits a pygame module, which has the name modname -Return 1 on success, 0 on error, with python -error set.

-
- -
-
-void pg_mod_autoquit(const char *modname)¶
-

Quits a pygame module, which has the name modname

-
- -
-
-void pg_RegisterQuit(void (*f)(void))¶
-

Register function f as a callback on Pygame termination. -Multiple functions can be registered. -Functions are called in the reverse order they were registered.

-
- -
-
-int pg_IntFromObj(PyObject *obj, int *val)¶
-

Convert number like object obj to C int and place in argument val. -Return 1 on success, else 0. -No Python exceptions are raised.

-
- -
-
-int pg_IntFromObjIndex(PyObject *obj, int index, int *val)¶
-

Convert number like object at position i in sequence obj -to C int and place in argument val. -Return 1 on success, 0 on failure. -No Python exceptions are raised.

-
- -
-
-int pg_TwoIntsFromObj(PyObject *obj, int *val1, int *v2)¶
-

Convert the two number like objects in length 2 sequence obj -to C int and place in arguments val1 and val2 respectively. -Return 1 on success, 0 on failure. -No Python exceptions are raised.

-
- -
-
-int pg_FloatFromObj(PyObject *obj, float *val)¶
-

Convert number like object obj to C float and place in argument val. -Returns 1 on success, 0 on failure. -No Python exceptions are raised.

-
- -
-
-int pg_FloatFromObjIndex(PyObject *obj, int index, float *val)¶
-

Convert number like object at position i in sequence obj -to C float and place in argument val. -Return 1 on success, else 0. -No Python exceptions are raised.

-
- -
-
-int pg_TwoFloatsFromObj(PyObject *obj, float *val1, float *val2)¶
-

Convert the two number like objects in length 2 sequence obj -to C float and place in arguments val1 and val2 respectively. -Return 1 on success, else 0. -No Python exceptions are raised.

-
- -
-
-int pg_UintFromObj(PyObject *obj, Uint32 *val)¶
-

Convert number like object obj to unsigned 32 bit integer and place -in argument val. -Return 1 on success, else 0. -No Python exceptions are raised.

-
- -
-
-int pg_UintFromObjIndex(PyObject *obj, int _index, Uint32 *val)¶
-

Convert number like object at position i in sequence obj -to unsigned 32 bit integer and place in argument val. -Return 1 on success, else 0. -No Python exceptions are raised.

-
- -
-
-int pg_RGBAFromObj(PyObject *obj, Uint8 *RGBA)¶
-

Convert the color represented by object obj into a red, green, blue, alpha -length 4 C array RGBA. -The object must be a length 3 or 4 sequence of numbers having values -between 0 and 255 inclusive. -For a length 3 sequence an alpha value of 255 is assumed. -Return 1 on success, 0 otherwise. -No Python exceptions are raised.

-
- -
-
-type pg_buffer¶
-
-
-Py_buffer view¶
-

A standard buffer description

-
- -
-
-PyObject *consumer¶
-

The object holding the buffer

-
- -
-
-pybuffer_releaseproc release_buffer¶
-

A buffer release callback.

-
- -
- -
-
-PyObject *pgExc_BufferError¶
-

Python exception type raised for any pg_buffer related errors.

-
- -
-
-PyObject *pgBuffer_AsArrayInterface(Py_buffer *view_p)¶
-

Return a Python array interface object representation of buffer view_p. -On failure raise a Python exception and return NULL.

-
- -
-
-PyObject *pgBuffer_AsArrayStruct(Py_buffer *view_p)¶
-

Return a Python array struct object representation of buffer view_p. -On failure raise a Python exception and return NULL.

-
- -
-
-int pgObject_GetBuffer(PyObject *obj, pg_buffer *pg_view_p, int flags)¶
-

Request a buffer for object obj. -Argument flags are PyBUF options. -Return the buffer description in pg_view_p. -An object may support the Python buffer interface, the NumPy array interface, -or the NumPy array struct interface. -Return 0 on success, raise a Python exception and return -1 on failure.

-
- -
-
-void pgBuffer_Release(Pg_buffer *pg_view_p)¶
-

Release the Pygame pg_view_p buffer.

-
- -
-
-int pgDict_AsBuffer(Pg_buffer *pg_view_p, PyObject *dict, int flags)¶
-

Write the array interface dictionary buffer description dict into a Pygame -buffer description struct pg_view_p. -The flags PyBUF options describe the view type requested. -Return 0 on success, or raise a Python exception and return -1 on failure.

-
- -
-
-void import_pygame_base()¶
-

Import the pygame.base module C API into an extension module. -On failure raise a Python exception.

-
- -
-
-SDL_Window *pg_GetDefaultWindow(void)¶
-

Return the Pygame default SDL window created by a -pygame.display.set_mode() call, or NULL.

-

Availability: SDL 2.

-
- -
-
-void pg_SetDefaultWindow(SDL_Window *win)¶
-

Replace the Pygame default window with win. -The previous window, if any, is destroyed. -Argument win may be NULL. -This function is called by pygame.display.set_mode().

-

Availability: SDL 2.

-
- -
-
-pgSurfaceObject *pg_GetDefaultWindowSurface(void)¶
-

Return a borrowed reference to the Pygame default window display surface, -or NULL if no default window is open.

-

Availability: SDL 2.

-
- -
-
-void pg_SetDefaultWindowSurface(pgSurfaceObject *screen)¶
-

Replace the Pygame default display surface with object screen. -The previous surface object, if any, is invalidated. -Argument screen may be NULL. -This functions is called by pygame.display.set_mode().

-

Availability: SDL 2.

-
- -
-
- - -

-
-Edit on GitHub -
-
-
-
-
- - - - \ No newline at end of file diff --git a/venv/Lib/site-packages/pygame/docs/generated/c_api/bufferproxy.html b/venv/Lib/site-packages/pygame/docs/generated/c_api/bufferproxy.html deleted file mode 100644 index 2b2b352..0000000 --- a/venv/Lib/site-packages/pygame/docs/generated/c_api/bufferproxy.html +++ /dev/null @@ -1,183 +0,0 @@ - - - - - - - - - Class BufferProxy API exported by pgyame.bufferproxy — pygame v2.1.2 documentation - - - - - - - - - - - - - -
- -
- - - - - -
-
- -
-
- -
-
-

src_c/bufferproxy.c¶

-

This extension module defines Python type pygame.BufferProxypygame object to export a surface buffer through an array protocol.

-

Header file: src_c/include/pygame_bufferproxy.h

-
-
-PyTypeObject *pgBufproxy_Type¶
-

The pygame buffer proxy object type pygame.BufferProxy.

-
- -
-
-int pgBufproxy_Check(PyObject *x)¶
-

Return true if Python object x is a pygame.BufferProxy instance, -false otherwise. -This will return false on pygame.BufferProxy subclass instances as well.

-
- -
-
-PyObject *pgBufproxy_New(PyObject *obj, getbufferproc get_buffer)¶
-

Return a new pygame.BufferProxy instance. -Argument obj is the Python object that has its data exposed. -It may be NULL. -Argument get_buffer is the pg_buffer get callback. -It must not be NULL. -On failure raise a Python error and return NULL.

-
- -
-
-PyObject *pgBufproxy_GetParent(PyObject *obj)¶
-

Return the Python object wrapped by buffer proxy obj. -Argument obj must not be NULL. -On failure, raise a Python error and return NULL.

-
- -
-
-int pgBufproxy_Trip(PyObject *obj)¶
-

Cause the buffer proxy object obj to create a pg_buffer view of its parent. -Argument obj must not be NULL. -Return 0 on success, otherwise raise a Python error and return -1.

-
- -
-
- - -

-
-Edit on GitHub -
-
-
-
-
- - - - \ No newline at end of file diff --git a/venv/Lib/site-packages/pygame/docs/generated/c_api/cdrom.html b/venv/Lib/site-packages/pygame/docs/generated/c_api/cdrom.html deleted file mode 100644 index 07f524a..0000000 --- a/venv/Lib/site-packages/pygame/docs/generated/c_api/cdrom.html +++ /dev/null @@ -1,178 +0,0 @@ - - - - - - - - - API exported by pygame.cdrom — pygame v2.1.2 documentation - - - - - - - - - - - - - -
- -
- - - - - -
-
- -
-
- -
-
-

src_c/cdrom.c¶

-

The pygame.cdrompygame module for audio cdrom control extension module. Only available for SDL 1.

-

Header file: src_c/include/pygame.h

-
-
-type pgCDObject¶
-

The pygame.cdrom.CD instance C struct.

-
- -
-
-PyTypeObject pgCD_Type¶
-

The pygame.cdrom.CD Python type.

-
- -
-
-PyObject *pgCD_New(int id)¶
-

Return a new pygame.cdrom.CD instance for CD drive id. -On error raise a Python exception and return NULL.

-
- -
-
-int pgCD_Check(PyObject *x)¶
-

Return true if x is a pygame.cdrom.CD instance. -Will return false for a subclass of CD. -This is a macro. No check is made that x is not NULL.

-
- -
-
-int pgCD_AsID(PyObject *x)¶
-

Return the CD identifier associated with the pygame.cdrom.CD -instance x. -This is a macro. No check is made that x is a pygame.cdrom.CD -instance or is not NULL.

-
- -
-
- - -

-
-Edit on GitHub -
-
-
-
-
- - - - \ No newline at end of file diff --git a/venv/Lib/site-packages/pygame/docs/generated/c_api/color.html b/venv/Lib/site-packages/pygame/docs/generated/c_api/color.html deleted file mode 100644 index 7fef935..0000000 --- a/venv/Lib/site-packages/pygame/docs/generated/c_api/color.html +++ /dev/null @@ -1,172 +0,0 @@ - - - - - - - - - Class Color API exported by pygame.color — pygame v2.1.2 documentation - - - - - - - - - - - - - -
- -
- - - - - -
-
- -
-
- -
-
-

src_c/color.c¶

-

This extension module defines the Python type pygame.Colorpygame object for color representations.

-

Header file: src_c/include/pygame.h

-
-
-PyTypeObject *pgColor_Type¶
-

The Pygame color object type pygame.Color.

-
- -
-
-int pgColor_Check(PyObject *obj)¶
-

Return true if obj is an instance of type pgColor_Type, -but not a pgColor_Type subclass instance. -This macro does not check if obj is not NULL or indeed a Python type.

-
- -
-
-PyObject *pgColor_New(Uint8 rgba[])¶
-

Return a new pygame.Color instance for the the four element array rgba. -On failure, raise a Python exception and return NULL.

-
- -
-
-PyObject *pgColor_NewLength(Uint8 rgba[], Uint8 length)¶
-

Return a new pygame.Color instance having length elements, -with element values taken from the first length elements of array rgba. -Argument length must be between 1 and 4 inclusive. -On failure, raise a Python exception and return NULL.

-
- -
-
- - -

-
-Edit on GitHub -
-
-
-
-
- - - - \ No newline at end of file diff --git a/venv/Lib/site-packages/pygame/docs/generated/c_api/display.html b/venv/Lib/site-packages/pygame/docs/generated/c_api/display.html deleted file mode 100644 index c83184f..0000000 --- a/venv/Lib/site-packages/pygame/docs/generated/c_api/display.html +++ /dev/null @@ -1,177 +0,0 @@ - - - - - - - - - API exported by pygame.display — pygame v2.1.2 documentation - - - - - - - - - - - - - -
- -
- - - - - -
-
- -
-
- -
-
-

src_c/display.c¶

-

This is the pygame.displaypygame module to control the display window and screen extension module.

-

Header file: src_c/include/pygame.h

-
-
-type pgVidInfoObject¶
-

A pygame object that wraps an SDL_VideoInfo struct. -The object returned by pgyame.display.Info().

-
- -
-
-PyTypeObject *pgVidInfo_Type¶
-

The pgVidInfoObject object Python type.

-
- -
-
-SDL_VideoInfo pgVidInfo_AsVidInfo(PyObject *obj)¶
-

Return the SDL_VideoInfo field of obj, a pgVidInfo_Type instance. -This macro does not check that obj is not NULL or an actual pgVidInfoObject object.

-
- -
-
-PyObject *pgVidInfo_New(SDL_VideoInfo *i)¶
-

Return a new pgVidInfoObject object for the SDL_VideoInfo i. -On failure, raise a Python exception and return NULL.

-
- -
-
-int pgVidInfo_Check(PyObject *x)¶
-

Return true if x is a pgVidInfo_Type instance

-

Will return false if x is a subclass of pgVidInfo_Type. -This macro does not check that x is not NULL.

-
- -
-
- - -

-
-Edit on GitHub -
-
-
-
-
- - - - \ No newline at end of file diff --git a/venv/Lib/site-packages/pygame/docs/generated/c_api/event.html b/venv/Lib/site-packages/pygame/docs/generated/c_api/event.html deleted file mode 100644 index be5bf1d..0000000 --- a/venv/Lib/site-packages/pygame/docs/generated/c_api/event.html +++ /dev/null @@ -1,192 +0,0 @@ - - - - - - - - - API exported by pygame.event — pygame v2.1.2 documentation - - - - - - - - - - - - - -
- -
- - - - - -
-
- -
-
- -
-
-

src_c/event.c¶

-

The extsion module pygame.eventpygame module for interacting with events and queues.

-

Header file: src_c/include/pygame.h

-
-
-type pgEventObject¶
-

The pygame.event.EventType object C struct.

-
-
-int type¶
-

The event type code.

-
- -
- -
-
-type pgEvent_Type¶
-

The pygame event object type pygame.event.EventType.

-
- -
-
-int pgEvent_Check(PyObject *x)¶
-

Return true if x is a pygame event instance

-

Will return false if x is a subclass of event. -This is a macro. No check is made that x is not NULL.

-
- -
-
-PyObject *pgEvent_New(SDL_Event *event)¶
-

Return a new pygame event instance for the SDL event. -If event is NULL then create an empty event object. -On failure raise a Python exception and return NULL.

-
- -
-
-PyObject *pgEvent_New2(int type, PyObject *dict)¶
-

Return a new pygame event instance of SDL type and with -attribute dictionary dict. -If dict is NULL an empty attribute dictionary is created. -On failure raise a Python exception and return NULL.

-
- -
-
-int pgEvent_FillUserEvent(pgEventObject *e, SDL_Event *event)¶
-

Fill SDL event event with information from pygame user event instance e. -Return 0 on success, -1 otherwise.

-
- -
-
- - -

-
-Edit on GitHub -
-
-
-
-
- - - - \ No newline at end of file diff --git a/venv/Lib/site-packages/pygame/docs/generated/c_api/freetype.html b/venv/Lib/site-packages/pygame/docs/generated/c_api/freetype.html deleted file mode 100644 index db0f46a..0000000 --- a/venv/Lib/site-packages/pygame/docs/generated/c_api/freetype.html +++ /dev/null @@ -1,180 +0,0 @@ - - - - - - - - - API exported by pygame._freetype — pygame v2.1.2 documentation - - - - - - - - - - - - - -
- -
- - - - - -
-
- -
-
- -
-
-

src_c/_freetype.c¶

-

This extension module defines Python type pygame.freetype.FontCreate a new Font instance from a supported font file..

-

Header file: src_c/include/pygame_freetype.h

-
-
-type pgFontObject¶
-

The pygame.freetype.Font instance C struct.

-
- -
-
-type pgFont_Type¶
-

The pygame.freetype.Font Python type.

-
- -
-
-PyObject *pgFont_New(const char *filename, long font_index)¶
-

Open the font file with path filename and return a new -new pygame.freetype.Font instance for that font. -Set font_index to 0 unless the file contains multiple, indexed, fonts. -On error raise a Python exception and return NULL.

-
- -
-
-int pgFont_Check(PyObject *x)¶
-

Return true if x is a pygame.freetype.Font instance. -Will return false for a subclass of Font. -This is a macro. No check is made that x is not NULL.

-
- -
-
-int pgFont_IS_ALIVE(PyObject *o)¶
-

Return true if pygame.freetype.Font object o -is an open font file. -This is a macro. No check is made that o is not NULL -or not a Font instance.

-
- -
-
- - -

-
-Edit on GitHub -
-
-
-
-
- - - - \ No newline at end of file diff --git a/venv/Lib/site-packages/pygame/docs/generated/c_api/mixer.html b/venv/Lib/site-packages/pygame/docs/generated/c_api/mixer.html deleted file mode 100644 index 2128b79..0000000 --- a/venv/Lib/site-packages/pygame/docs/generated/c_api/mixer.html +++ /dev/null @@ -1,213 +0,0 @@ - - - - - - - - - API exported by pygame.mixer — pygame v2.1.2 documentation - - - - - - - - - - - - - -
- -
- - - - - -
-
- -
-
- -
-
-

src_c/mixer.c¶

-

Python types and module startup/shutdown functions defined in the -pygame.mixerpygame module for loading and playing sounds extension module.

-

Header file: src_c/include/pygame_mixer.h

-
-
-type pgSoundObject¶
-

The pygame.mixer.Sound instance C structure.

-
- -
-
-PyTypeObject *pgSound_Type¶
-

The pygame.mixer.Sound Python type.

-
- -
-
-PyObject *pgSound_New(Mix_Chunk *chunk)¶
-

Return a new pygame.mixer.Sound instance for the SDL mixer chunk chunk. -On failure, raise a Python exception and return NULL.

-
- -
-
-int pgSound_Check(PyObject *obj)¶
-

Return true if obj is an instance of type pgSound_Type, -but not a pgSound_Type subclass instance. -A macro.

-
- -
-
-Mix_Chunk *pgSound_AsChunk(PyObject *x)¶
-

Return the SDL Mix_Chunk struct associated with the -pgSound_Type instance x. -A macro that does no NULL or Python type check on x.

-
- -
-
-type pgChannelObject¶
-

The pygame.mixer.Channel instance C structure.

-
- -
-
-PyTypeObject *pgChannel_Type¶
-

The pygame.mixer.Channel Python type.

-
- -
-
-PyObject *pgChannel_New(int channelnum)¶
-

Return a new pygame.mixer.Channel instance for the SDL mixer -channel channelnum. -On failure, raise a Python exception and return NULL.

-
- -
-
-int pgChannel_Check(PyObject *obj)¶
-

Return true if obj is an instance of type pgChannel_Type, -but not a pgChannel_Type subclass instance. -A macro.

-
- -
-
-int pgChannel_AsInt(PyObject *x)¶
-

Return the SDL mixer music channel number associated with pgChannel_Type instance x. -A macro that does no NULL or Python type check on x.

-
- -
-
- - -

-
-Edit on GitHub -
-
-
-
-
- - - - \ No newline at end of file diff --git a/venv/Lib/site-packages/pygame/docs/generated/c_api/rect.html b/venv/Lib/site-packages/pygame/docs/generated/c_api/rect.html deleted file mode 100644 index 21708e3..0000000 --- a/venv/Lib/site-packages/pygame/docs/generated/c_api/rect.html +++ /dev/null @@ -1,202 +0,0 @@ - - - - - - - - - Class Rect API exported by pygame.rect — pygame v2.1.2 documentation - - - - - - - - - - - - - -
- -
- - - - - -
-
- -
-
- -
-
-

src_c/rect.c¶

-

This extension module defines Python type pygame.Rectpygame object for storing rectangular coordinates.

-

Header file: src_c/include/pygame.h

-
-
-type pgRectObject¶
-
-
-SDL_Rect r¶
-
- -

The Pygame rectangle type instance.

-
- -
-
-PyTypeObject *pgRect_Type¶
-

The Pygame rectangle object type pygame.Rect.

-
- -
-
-SDL_Rect pgRect_AsRect(PyObject *obj)¶
-

A macro to access the SDL_Rect field of a pygame.Rect instance.

-
- -
-
-PyObject *pgRect_New(SDL_Rect *r)¶
-

Return a new pygame.Rect instance from the SDL_Rect r. -On failure, raise a Python exception and return NULL.

-
- -
-
-PyObject *pgRect_New4(int x, int y, int w, int h)¶
-

Return a new pygame.Rect instance with position (x, y) and -size (w, h). -On failure raise a Python exception and return NULL.

-
- -
-
-SDL_Rect *pgRect_FromObject(PyObject *obj, SDL_Rect *temp)¶
-

Translate a Python rectangle representation as a Pygame SDL_Rect. -A rectangle can be a length 4 sequence integers (x, y, w, h), -or a length 2 sequence of position (x, y) and size (w, h), -or a length 1 tuple containing a rectangle representation, -or have a method rect that returns a rectangle. -Pass a pointer to a locally declared SDL_Rect as temp. -Do not rely on this being filled in; use the function's return value instead. -On success, return a pointer to a SDL_Rect representation -of the rectangle, else return NULL. -No Python exceptions are raised.

-
- -
-
-void pgRect_Normalize(SDL_Rect *rect)¶
-

Normalize the given rect. A rect with a negative size (negative width and/or -height) will be adjusted to have a positive size.

-
- -
-
- - -

-
-Edit on GitHub -
-
-
-
-
- - - - \ No newline at end of file diff --git a/venv/Lib/site-packages/pygame/docs/generated/c_api/rwobject.html b/venv/Lib/site-packages/pygame/docs/generated/c_api/rwobject.html deleted file mode 100644 index f767e41..0000000 --- a/venv/Lib/site-packages/pygame/docs/generated/c_api/rwobject.html +++ /dev/null @@ -1,207 +0,0 @@ - - - - - - - - - API exported by pygame.rwobject — pygame v2.1.2 documentation - - - - - - - - - - - - - -
- -
- - - - - -
-
- -
-
- -
-
-

src_c/rwobject.c¶

-

This extension module implements functions for wrapping a Python file like -object in a SDL_RWops struct for SDL file access.

-

Header file: src_c/include/pygame.h

-
-
-SDL_RWops *pgRWops_FromObject(PyObject *obj)¶
-

Return a SDL_RWops struct filled to access obj. -If obj is a string then let SDL open the file it names. -Otherwise, if obj is a Python file-like object then use its read, write, -seek, tell, and close methods. If threads are available, -the Python GIL is acquired before calling any of the obj methods. -On error raise a Python exception and return NULL.

-
- -
-
-SDL_RWops *pgRWops_FromFileObject(PyObject *obj)¶
-

Return a SDL_RWops struct filled to access the Python file-like object obj. -Uses its read, write, seek, tell, and close methods. -If threads are available, the Python GIL is acquired before calling any of the obj methods. -On error raise a Python exception and return NULL.

-
- -
-
-int pgRWops_IsFileObject(SDL_RWops *rw)¶
-

Return true if rw is a Python file-like object wrapper returned by pgRWops_FromObject() -or pgRWops_FromFileObject().

-
- -
-
-char *pgRWops_GetFileExtension(SDL_RWops *rw)¶
-

Return a string that contains the file extension of the original file -loaded into the SDL_RWops object, or NULL if the SDL_RWops object comes -from a file object.

-
- -
-
-int pgRWops_ReleaseObject(SDL_RWops *context)¶
-

Free a SDL_RWops struct. If it is attached to a Python file-like object, decrement its -refcount. Otherwise, close the file handle. -Return 0 on success. On error, raise a Python exception and return a negative value.

-
- -
-
-PyObject *pg_EncodeFilePath(PyObject *obj, PyObject *eclass)¶
-

Return the file path obj as a byte string properly encoded for the OS. -Null bytes are forbidden in the encoded file path. -On error raise a Python exception and return NULL, -using eclass as the exception type if it is not NULL. -If obj is NULL assume an exception was already raised and pass it on.

-
- -
-
-PyObject *pg_EncodeString(PyObject *obj, const char *encoding, const char *errors, PyObject *eclass)¶
-

Return string obj as an encoded byte string. -The C string arguments encoding and errors are the same as for -PyUnicode_AsEncodedString(). -On error raise a Python exception and return NULL, -using eclass as the exception type if it is not NULL. -If obj is NULL assume an exception was already raised and pass it on.

-
- -
-
- - -

-
-Edit on GitHub -
-
-
-
-
- - - - \ No newline at end of file diff --git a/venv/Lib/site-packages/pygame/docs/generated/c_api/slots.html b/venv/Lib/site-packages/pygame/docs/generated/c_api/slots.html deleted file mode 100644 index 852765d..0000000 --- a/venv/Lib/site-packages/pygame/docs/generated/c_api/slots.html +++ /dev/null @@ -1,157 +0,0 @@ - - - - - - - - - Slots and c_api - Making functions and data available from other modules — pygame v2.1.2 documentation - - - - - - - - - - - - - -
- -
- - - - - -
-
- -
-
- -
-

One example is pg_RGBAFromObj where the implementation is defined in base.c, and also exported in base.c (and _pygame.h).

-

base.c has this exposing the pg_RGBAFromObj function to the c_api structure:

-
-

c_api[12] = pg_RGBAFromObj;

-
-

Then in src_c/include/_pygame.h there is an

-
-

#define pg_RGBAFromObj.

-
-

Also in _pygame.h, it needs to define the number of slots the base module uses. This is PYGAMEAPI_BASE_NUMSLOTS. So if you were adding another function, you need to increment this PYGAMEAPI_BASE_NUMSLOTS number.

-

Then to use the pg_RGBAFromObj in other files,

-
    -
  1. include the "pygame.h" file,

  2. -
  3. they have to make sure base is imported with:

    -
    -

    import_pygame_base();

    -
    -
  4. -
-

Examples that use pg_RGBAFromObj are: _freetype.c, color.c, gfxdraw.c, and surface.c.

-
- - -

-
-Edit on GitHub -
-
-
-
-
- - - - \ No newline at end of file diff --git a/venv/Lib/site-packages/pygame/docs/generated/c_api/surface.html b/venv/Lib/site-packages/pygame/docs/generated/c_api/surface.html deleted file mode 100644 index 4013702..0000000 --- a/venv/Lib/site-packages/pygame/docs/generated/c_api/surface.html +++ /dev/null @@ -1,194 +0,0 @@ - - - - - - - - - Class Surface API exported by pygame.surface — pygame v2.1.2 documentation - - - - - - - - - - - - - -
- -
- - - - - -
-
- -
-
- -
-
-

src_c/surface.c¶

-

This extension module defines Python type pygame.Surfacepygame object for representing images.

-

Header file: src_c/include/pygame.h

-
-
-type pgSurfaceObject¶
-

A pygame.Surface instance.

-
- -
-
-PyTypeObject *pgSurface_Type¶
-

The pygame.Surface Python type.

-
- -
-
-int pgSurface_Check(PyObject *x)¶
-

Return true if x is a pygame.Surface instance

-

Will return false if x is a subclass of Surface. -This is a macro. No check is made that x is not NULL.

-
- -
-
-pgSurfaceObject *pgSurface_New(SDL_Surface *s)¶
-

Return a new new pygame surface instance for SDL surface s. -Return NULL on error.

-
- -
-
-SDL_Surface *pgSurface_AsSurface(PyObject *x)¶
-

Return a pointer the SDL surface represented by the pygame Surface instance -x.

-

This is a macro. Argument x is assumed to be a Surface, or subclass of -Surface, instance.

-
- -
-
-int pgSurface_Blit(PyObject *dstobj, PyObject *srcobj, SDL_Rect *dstrect, SDL_Rect *srcrect, int the_args)¶
-

Blit the srcrect portion of Surface srcobj onto Surface dstobj at srcobj

-

Argument the_args indicates the type of blit to perform: -Normal blit (0), PYGAME_BLEND_ADD, PYGAME_BLEND_SUB, -PYGAME_BLEND_SUB, PYGAME_BLEND_MULT, PYGAME_BLEND_MIN, -PYGAME_BLEND_MAX, PYGAME_BLEND_RGBA_ADD, PYGAME_BLEND_RGBA_SUB, -PYGAME_BLEND_RGBA_MULT, PYGAME_BLEND_RGBA_MIN, -PYGAME_BLEND_RGBA_MAX, PYGAME_BLEND_ALPHA_SDL2 and PYGAME_BLEND_PREMULTIPLIED. -Argument dstrect is updated to the actual area on dstobj affected -by the blit.

-

The C version of the pygame.Surface.blit() method. -Return 1 on success, 0 on an exception.

-
- -
-
- - -

-
-Edit on GitHub -
-
-
-
-
- - - - \ No newline at end of file diff --git a/venv/Lib/site-packages/pygame/docs/generated/c_api/surflock.html b/venv/Lib/site-packages/pygame/docs/generated/c_api/surflock.html deleted file mode 100644 index f25e3b4..0000000 --- a/venv/Lib/site-packages/pygame/docs/generated/c_api/surflock.html +++ /dev/null @@ -1,231 +0,0 @@ - - - - - - - - - API exported by pygame.surflock — pygame v2.1.2 documentation - - - - - - - - - - - - - -
- -
- - - - - -
-
- -
-
- -
-
-

src_c/surflock.c¶

-

This extension module implements SDL surface locking for the -pygame.Surfacepygame object for representing images type.

-

Header file: src_c/include/pygame.h

-
-
-type pgLifetimeLockObject¶
-
-
-PyObject *surface¶
-

An SDL locked pygame surface.

-
- -
-
-PyObject *lockobj¶
-

The Python object which owns the lock on the surface. -This field does not own a reference to the object.

-
- -

The lifetime lock type instance. -A lifetime lock pairs a locked pygame surface with -the Python object that locked the surface for modification. -The lock is removed automatically when the lifetime lock instance -is garbage collected.

-
- -
-
-PyTypeObject *pgLifetimeLock_Type¶
-

The pygame internal surflock lifetime lock object type.

-
- -
-
-int pgLifetimeLock_Check(PyObject *x)¶
-

Return true if Python object x is a pgLifetimeLock_Type instance, -false otherwise. -This will return false on pgLifetimeLock_Type subclass instances as well.

-
- -
-
-void pgSurface_Prep(pgSurfaceObject *surfobj)¶
-

If surfobj is a subsurface, then lock the parent surface with surfobj -the owner of the lock.

-
- -
-
-void pgSurface_Unprep(pgSurfaceObject *surfobj)¶
-

If surfobj is a subsurface, then release its lock on the parent surface.

-
- -
-
-int pgSurface_Lock(pgSurfaceObject *surfobj)¶
-

Lock pygame surface surfobj, with surfobj owning its own lock.

-
- -
-
-int pgSurface_LockBy(pgSurfaceObject *surfobj, PyObject *lockobj)¶
-

Lock pygame surface surfobj with Python object lockobj the owning -the lock.

-

The surface will keep a weak reference to object lockobj, -and eventually remove the lock on itself if lockobj is garbage collected. -However, it is best if lockobj also keep a reference to the locked surface -and call to pgSurface_UnLockBy() when finished with the surface.

-
- -
-
-int pgSurface_UnLock(pgSurfaceObject *surfobj)¶
-

Remove the pygame surface surfobj object's lock on itself.

-
- -
-
-int pgSurface_UnLockBy(pgSurfaceObject *surfobj, PyObject *lockobj)¶
-

Remove the lock on pygame surface surfobj owned by Python object lockobj.

-
- -
-
-PyObject *pgSurface_LockLifetime(PyObject *surfobj, PyObject *lockobj)¶
-

Lock pygame surface surfobj for Python object lockobj and return a -new pgLifetimeLock_Type instance for the lock.

-

This function is not called anywhere within pygame. -It and pgLifetimeLock_Type are candidates for removal.

-
- -
-
- - -

-
-Edit on GitHub -
-
-
-
-
- - - - \ No newline at end of file diff --git a/venv/Lib/site-packages/pygame/docs/generated/c_api/version.html b/venv/Lib/site-packages/pygame/docs/generated/c_api/version.html deleted file mode 100644 index e05ad33..0000000 --- a/venv/Lib/site-packages/pygame/docs/generated/c_api/version.html +++ /dev/null @@ -1,173 +0,0 @@ - - - - - - - - - API exported by pygame.version — pygame v2.1.2 documentation - - - - - - - - - - - - - -
- -
- - - - - -
-
- -
-
- -
-
-

src_py/version.py¶

-

Header file: src_c/include/pygame.h

-

Version information can be retrieved at compile-time using these macros.

-
-

New in pygame 1.9.5.

-
-
-
-PG_MAJOR_VERSION¶
-
- -
-
-PG_MINOR_VERSION¶
-
- -
-
-PG_PATCH_VERSION¶
-
- -
-
-PG_VERSIONNUM(MAJOR, MINOR, PATCH)¶
-

Returns an integer representing the given version.

-
- -
-
-PG_VERSION_ATLEAST(MAJOR, MINOR, PATCH)¶
-

Returns true if the current version is at least equal -to the specified version.

-
- -
-
- - -

-
-Edit on GitHub -
-
-
-
-
- - - - \ No newline at end of file diff --git a/venv/Lib/site-packages/pygame/docs/generated/filepaths.html b/venv/Lib/site-packages/pygame/docs/generated/filepaths.html deleted file mode 100644 index be6419c..0000000 --- a/venv/Lib/site-packages/pygame/docs/generated/filepaths.html +++ /dev/null @@ -1,145 +0,0 @@ - - - - - - - - - File Path Function Arguments — pygame v2.1.2 documentation - - - - - - - - - - - - -
- -
- - - - - -
-
- -
-
- -
-

A pygame function or method which takes a file path argument will accept -either a Unicode or a byte (8-bit or ASCII character) string. -Unicode strings are translated to Python's default filesystem encoding, -as returned by sys.getfilesystemencoding(). A Unicode code point -above U+FFFF (\uFFFF) can be coded directly with a 32-bit escape sequences -(\Uxxxxxxxx), even for Python interpreters built with an UCS-2 -(16-bit character) Unicode type. Byte strings are passed -to the operating system unchanged.

-

Null characters (\x00) are not permitted in the path, raising an exception. -An exception is also raised if an Unicode file path cannot be encoded. -How UTF-16 surrogate codes are handled is Python-interpreter-dependent. -Use UTF-32 code points and 32-bit escape sequences instead. -The exception types are function-dependent.

-
- - -

-
-Edit on GitHub -
-
-
-
-
- - - - \ No newline at end of file diff --git a/venv/Lib/site-packages/pygame/docs/generated/genindex.html b/venv/Lib/site-packages/pygame/docs/generated/genindex.html deleted file mode 100644 index 45d036c..0000000 --- a/venv/Lib/site-packages/pygame/docs/generated/genindex.html +++ /dev/null @@ -1,2604 +0,0 @@ - - - - - - - - Index — pygame v2.1.2 documentation - - - - - - - - - - - - -
-
-
- - -

Index

- -
- _ - | A - | B - | C - | D - | E - | F - | G - | H - | I - | J - | K - | L - | M - | N - | O - | P - | Q - | R - | S - | T - | U - | V - | W - -
-

_

- - - -
- -

A

- - - -
- -

B

- - - -
- -

C

- - - -
- -

D

- - - -
- -

E

- - - -
- -

F

- - - -
- -

G

- - - -
- -

H

- - - -
- -

I

- - - -
- -

J

- - - -
- -

K

- - - -
- -

L

- - - -
- -

M

- - - -
- -

N

- - - -
- -

O

- - - -
- -

P

- - - -
- -

Q

- - -
- -

R

- - - -
- -

S

- - - -
- -

T

- - - -
- -

U

- - - -
- -

V

- - - -
- -

W

- - - -
- - - -
-
-
-
-
- - - - \ No newline at end of file diff --git a/venv/Lib/site-packages/pygame/docs/generated/index.html b/venv/Lib/site-packages/pygame/docs/generated/index.html deleted file mode 100644 index 6cc1417..0000000 --- a/venv/Lib/site-packages/pygame/docs/generated/index.html +++ /dev/null @@ -1,258 +0,0 @@ - - - - - - - - - Pygame Front Page — pygame v2.1.2 documentation - - - - - - - - - - - - -
- -
- - - - - -
-
- -
-
- -
-

Pygame Front Page¶

-
-
-
-

Documents¶

-
-
Readme

Basic information about pygame: what it is, who is involved, and where to find it.

-
-
Install

Steps needed to compile pygame on several platforms. -Also help on finding and installing prebuilt binaries for your system.

-
-
File Path Function Arguments

How pygame handles file system paths.

-
-
LGPL License

This is the license pygame is distributed under. -It provides for pygame to be distributed with open source and commercial software. -Generally, if pygame is not changed, it can be used with any type of program.

-
-
-
-
-

Tutorials¶

-
-
Introduction to Pygame

An introduction to the basics of pygame. -This is written for users of Python and appeared in volume two of the Py magazine.

-
-
Import and Initialize

The beginning steps on importing and initializing pygame. -The pygame package is made of several modules. -Some modules are not included on all platforms.

-
-
How do I move an Image?

A basic tutorial that covers the concepts behind 2D computer animation. -Information about drawing and clearing objects to make them appear animated.

-
-
Chimp Tutorial, Line by Line

The pygame examples include a simple program with an interactive fist and a chimpanzee. -This was inspired by the annoying flash banner of the early 2000s. -This tutorial examines every line of code used in the example.

-
-
Sprite Module Introduction

Pygame includes a higher level sprite module to help organize games. -The sprite module includes several classes that help manage details found in almost all games types. -The Sprite classes are a bit more advanced than the regular pygame modules, -and need more understanding to be properly used.

-
-
Surfarray Introduction

Pygame used the NumPy python module to allow efficient per pixel effects on images. -Using the surface arrays is an advanced feature that allows custom effects and filters. -This also examines some of the simple effects from the pygame example, arraydemo.py.

-
-
Camera Module Introduction

Pygame, as of 1.9, has a camera module that allows you to capture images, -watch live streams, and do some basic computer vision. -This tutorial covers those use cases.

-
-
Newbie Guide

A list of thirteen helpful tips for people to get comfortable using pygame.

-
-
Making Games Tutorial

A large tutorial that covers the bigger topics needed to create an entire game.

-
-
Display Modes

Getting a display surface for the screen.

-
-
한국어 튜토리얼 (Korean Tutorial)

ë¹¨ê°„ë¸”ë¡ ê²€ì€ë¸”ë¡

-
-
-
-
-

Reference¶

-
-
Index

A list of all functions, classes, and methods in the pygame package.

-
-
pygame.BufferProxy

An array protocol view of surface pixels

-
-
pygame.Color

Color representation.

-
-
pygame.cursors

Loading and compiling cursor images.

-
-
pygame.display

Configure the display surface.

-
-
pygame.draw

Drawing simple shapes like lines and ellipses to surfaces.

-
-
pygame.event

Manage the incoming events from various input devices and the windowing platform.

-
-
pygame.examples

Various programs demonstrating the use of individual pygame modules.

-
-
pygame.font

Loading and rendering TrueType fonts.

-
-
pygame.freetype

Enhanced pygame module for loading and rendering font faces.

-
-
pygame.gfxdraw

Anti-aliasing draw functions.

-
-
pygame.image

Loading, saving, and transferring of surfaces.

-
-
pygame.joystick

Manage the joystick devices.

-
-
pygame.key

Manage the keyboard device.

-
-
pygame.locals

Pygame constants.

-
-
pygame.mixer

Load and play sounds

-
-
pygame.mouse

Manage the mouse device and display.

-
-
pygame.mixer.music

Play streaming music tracks.

-
-
pygame

Top level functions to manage pygame.

-
-
pygame.PixelArray

Manipulate image pixel data.

-
-
pygame.Rect

Flexible container for a rectangle.

-
-
pygame.scrap

Native clipboard access.

-
-
pygame.sndarray

Manipulate sound sample data.

-
-
pygame.sprite

Higher level objects to represent game images.

-
-
pygame.Surface

Objects for images and the screen.

-
-
pygame.surfarray

Manipulate image pixel data.

-
-
pygame.tests

Test pygame.

-
-
pygame.time

Manage timing and framerate.

-
-
pygame.transform

Resize and move images.

-
-
pygame C API

The C api shared amongst pygame extension modules.

-
-
Search Page

Search pygame documents by keyword.

-
-
-
-
- - -

-
-Edit on GitHub -
-
-
-
-
- - - - \ No newline at end of file diff --git a/venv/Lib/site-packages/pygame/docs/generated/py-modindex.html b/venv/Lib/site-packages/pygame/docs/generated/py-modindex.html deleted file mode 100644 index 45a5aa1..0000000 --- a/venv/Lib/site-packages/pygame/docs/generated/py-modindex.html +++ /dev/null @@ -1,251 +0,0 @@ - - - - - - - - Python Module Index — pygame v2.1.2 documentation - - - - - - - - - - - - - - - -
-
-
- - -

Python Module Index

- -
- . | - p -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 
- .
    - pygame._sdl2.controller - pygame module to work with controllers
    - pygame._sdl2.touch - pygame module to work with touch input
    - pygame._sdl2.video - Experimental pygame module for porting new SDL video systems
    - pygame.camera - pygame module for camera use
    - pygame.cdrom - pygame module for audio cdrom control
    - pygame.cursors - pygame module for cursor resources
    - pygame.display - pygame module to control the display window and screen
    - pygame.draw - pygame module for drawing shapes
    - pygame.event - pygame module for interacting with events and queues
    - pygame.examples - module of example programs
    - pygame.fastevent - pygame module for interacting with events and queues from multiple -threads.
    - pygame.font - pygame module for loading and rendering fonts
    - pygame.freetype - Enhanced pygame module for loading and rendering computer fonts
    - pygame.gfxdraw - pygame module for drawing shapes
    - pygame.image - pygame module for image transfer
    - pygame.joystick - Pygame module for interacting with joysticks, gamepads, and trackballs.
    - pygame.key - pygame module to work with the keyboard
    - pygame.locals - pygame constants
    - pygame.mask - pygame module for image masks.
    - pygame.math - pygame module for vector classes
    - pygame.midi - pygame module for interacting with midi input and output.
    - pygame.mixer - pygame module for loading and playing sounds
    - pygame.mixer.music - pygame module for controlling streamed audio
    - pygame.mouse - pygame module to work with the mouse
    - pygame.pixelcopy - pygame module for general pixel array copying
    - pygame.scrap - pygame module for clipboard support.
    - pygame.sndarray - pygame module for accessing sound sample data
    - pygame.sprite - pygame module with basic game object classes
    - pygame.surfarray - pygame module for accessing surface pixel data using array interfaces
    - pygame.tests - Pygame unit test suite package
    - pygame.time - pygame module for monitoring time
    - pygame.transform - pygame module to transform surfaces
    - pygame.version - small module containing version information
 
- p
- pygame - the top level pygame package
- - -
-
-
-
-
- - - - \ No newline at end of file diff --git a/venv/Lib/site-packages/pygame/docs/generated/ref/bufferproxy.html b/venv/Lib/site-packages/pygame/docs/generated/ref/bufferproxy.html deleted file mode 100644 index d41ad94..0000000 --- a/venv/Lib/site-packages/pygame/docs/generated/ref/bufferproxy.html +++ /dev/null @@ -1,283 +0,0 @@ - - - - - - - - - pygame.BufferProxy — pygame v2.1.2 documentation - - - - - - - - - - - - - -
- -
- - - - - -
-
- -
-
- -
-
-
-pygame.BufferProxy¶
-
-
pygame object to export a surface buffer through an array protocol
-
BufferProxy(<parent>) -> BufferProxy
-
- ----- - - - - - - - - - - - - - - - - - - -
-—Return wrapped exporting object.
-—The size, in bytes, of the exported buffer.
-—A copy of the exported buffer as a single block of bytes.
-—Write raw bytes to object buffer.
-

BufferProxy is a pygame support type, designed as the return value -of the Surface.get_buffer() and Surface.get_view() methods. -For all Python versions a BufferProxy object exports a C struct -and Python level array interface on behalf of its parent object's buffer. -A new buffer interface is also exported. -In pygame, BufferProxy is key to implementing the -pygame.surfarraypygame module for accessing surface pixel data using array interfaces module.

-

BufferProxy instances can be created directly from Python code, -either for a parent that exports an interface, or from a Python dict -describing an object's buffer layout. The dict entries are based on the -Python level array interface mapping. The following keys are recognized:

-
-
-
"shape"tuple

The length of each array dimension as a tuple of integers. The -length of the tuple is the number of dimensions in the array.

-
-
"typestr"string

The array element type as a length 3 string. The first character -gives byteorder, '<' for little-endian, '>' for big-endian, and -'|' for not applicable. The second character is the element type, -'i' for signed integer, 'u' for unsigned integer, 'f' for floating -point, and 'V' for an chunk of bytes. The third character gives the -bytesize of the element, from '1' to '9' bytes. So, for example, -"<u4" is an unsigned 4 byte little-endian integer, such as a -32 bit pixel on a PC, while "|V3" would represent a 24 bit pixel, -which has no integer equivalent.

-
-
"data"tuple

The physical buffer start address and a read-only flag as a length -2 tuple. The address is an integer value, while the read-only flag -is a bool—False for writable, True for read-only.

-
-
"strides"tuple(optional)

Array stride information as a tuple of integers. It is required -only of non C-contiguous arrays. The tuple length must match -that of "shape".

-
-
"parent"object(optional)

The exporting object. It can be used to keep the parent object -alive while its buffer is visible.

-
-
"before"callable(optional)

Callback invoked when the BufferProxy instance -exports the buffer. The callback is given one argument, the -"parent" object if given, otherwise None. -The callback is useful for setting a lock on the parent.

-
-
"after"callable(optional)

Callback invoked when an exported buffer is released. -The callback is passed on argument, the "parent" object if given, -otherwise None. The callback is useful for releasing a lock on the -parent.

-
-
-
-

The BufferProxy class supports subclassing, instance variables, and weak -references.

-
-

New in pygame 1.8.0.

-
-
-

Extended in pygame 1.9.2.

-
-
-
-parent¶
-
-
Return wrapped exporting object.
-
parent -> Surface
-
parent -> <parent>
-
-

The Surface which returned the BufferProxy object or -the object passed to a BufferProxy call.

-
- -
-
-length¶
-
-
The size, in bytes, of the exported buffer.
-
length -> int
-
-

The number of valid bytes of data exported. For discontinuous data, -that is data which is not a single block of memory, the bytes within -the gaps are excluded from the count. This property is equivalent to -the Py_buffer C struct len field.

-
- -
-
-raw¶
-
-
A copy of the exported buffer as a single block of bytes.
-
raw -> bytes
-
-

The buffer data as a str/bytes object. -Any gaps in the exported data are removed.

-
- -
-
-write()¶
-
-
Write raw bytes to object buffer.
-
write(buffer, offset=0)
-
-

Overwrite bytes in the parent object's data. The data must be C or F -contiguous, otherwise a ValueError is raised. Argument buffer is a -str/bytes object. An optional offset gives a -start position, in bytes, within the buffer where overwriting begins. -If the offset is negative or greater that or equal to the buffer proxy's -length value, an IndexException is raised. -If len(buffer) > proxy.length + offset, a ValueError is raised.

-
- -
- -
- - -

-
-Edit on GitHub -
-
-
-
-
- - - - \ No newline at end of file diff --git a/venv/Lib/site-packages/pygame/docs/generated/ref/camera.html b/venv/Lib/site-packages/pygame/docs/generated/ref/camera.html deleted file mode 100644 index 1ee0b29..0000000 --- a/venv/Lib/site-packages/pygame/docs/generated/ref/camera.html +++ /dev/null @@ -1,472 +0,0 @@ - - - - - - - - - pygame.camera — pygame v2.1.2 documentation - - - - - - - - - - - - - -
- -
- - - - - -
-
- -
-
- -
-
-
-pygame.camera
-
-
pygame module for camera use
-
- ----- - - - - - - - - - - - - - - - - - - - - - - -
-—Module init
-—Get the backends supported on this system
-—Surface colorspace conversion
-—returns a list of available cameras
-—load a camera
-

Pygame currently supports Linux (V4L2) and Windows (MSMF) cameras natively, -with wider platform support available via an integrated OpenCV backend.

-
-

New in pygame 2.0.2: Windows native camera support

-
-
-

New in pygame 2.0.3: New OpenCV backends

-
-

EXPERIMENTAL!: This API may change or disappear in later pygame releases. If -you use this, your code will very likely break with the next pygame release.

-

The Bayer to RGB function is based on:

-
Sonix SN9C101 based webcam basic I/F routines
-Copyright (C) 2004 Takafumi Mizuno <taka-qce@ls-a.jp>
-Redistribution and use in source and binary forms, with or without
-modification, are permitted provided that the following conditions
-are met:
-1. Redistributions of source code must retain the above copyright
-   notice, this list of conditions and the following disclaimer.
-2. Redistributions in binary form must reproduce the above copyright
-   notice, this list of conditions and the following disclaimer in the
-   documentation and/or other materials provided with the distribution.
-THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
-ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
-IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
-ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
-FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
-DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
-OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
-HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
-LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
-OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
-SUCH DAMAGE.
-
-
-

New in pygame 1.9.0.

-
-
-pygame.camera.init()¶
-
-
Module init
-
init(backend = None) -> None
-
-

This function starts up the camera module, choosing the best webcam backend -it can find for your system. This is not guaranteed to succeed, and may even -attempt to import third party modules, like OpenCV. If you want to -override its backend choice, you can call pass the name of the backend you -want into this function. More about backends in -get_backends().

-
-

Changed in pygame 2.0.3: Option to explicitly select backend

-
-
- -
-
-pygame.camera.get_backends()¶
-
-
Get the backends supported on this system
-
get_backends() -> [str]
-
-

This function returns every backend it thinks has a possibility of working -on your system, in order of priority.

-

pygame.camera Backends:

-
Backend           OS        Description
----------------------------------------------------------------------------------
-_camera (MSMF)    Windows   Builtin, works on Windows 8+ Python3
-_camera (V4L2)    Linux     Builtin
-OpenCV            Any       Uses `opencv-python` module, can't enumerate cameras
-OpenCV-Mac        Mac       Same as OpenCV, but has camera enumeration
-VideoCapture      Windows   Uses abandoned `VideoCapture` module, can't enumerate
-                            cameras, may be removed in the future
-
-
-

There are two main differences among backends.

-

The _camera backends are built in to pygame itself, and require no third -party imports. All the other backends do. For the OpenCV and VideoCapture -backends, those modules need to be installed on your system.

-

The other big difference is "camera enumeration." Some backends don't have -a way to list out camera names, or even the number of cameras on the -system. In these cases, list_cameras() will return -something like [0]. If you know you have multiple cameras on the -system, these backend ports will pass through a "camera index number" -through if you use that as the device parameter.

-
-

New in pygame 2.0.3.

-
-
- -
-
-pygame.camera.colorspace()¶
-
-
Surface colorspace conversion
-
colorspace(Surface, format, DestSurface = None) -> Surface
-
-

Allows for conversion from "RGB" to a destination colorspace of "HSV" or -"YUV". The source and destination surfaces must be the same size and pixel -depth. This is useful for computer vision on devices with limited processing -power. Capture as small of an image as possible, transform.scale() it -even smaller, and then convert the colorspace to YUV or HSV before -doing any processing on it.

-
- -
-
-pygame.camera.list_cameras()¶
-
-
returns a list of available cameras
-
list_cameras() -> [cameras]
-
-

Checks the computer for available cameras and returns a list of strings of -camera names, ready to be fed into pygame.camera.Cameraload a camera.

-

If the camera backend doesn't support webcam enumeration, this will return -something like [0]. See get_backends() for much more -information.

-
- -
-
-pygame.camera.Camera¶
-
-
load a camera
-
Camera(device, (width, height), format) -> Camera
-
- ----- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-—opens, initializes, and starts capturing
-—stops, uninitializes, and closes the camera
-—gets current values of user controls
-—changes camera settings if supported by the camera
-—returns the dimensions of the images being recorded
-—checks if a frame is ready
-—captures an image as a Surface
-—returns an unmodified image as bytes
-

Loads a camera. On Linux, the device is typically something like -"/dev/video0". Default width and height are 640 by 480. -Format is the desired colorspace of the output. -This is useful for computer vision purposes. The default is -RGB. The following are supported:

-
-
    -
  • RGB - Red, Green, Blue

  • -
  • YUV - Luma, Blue Chrominance, Red Chrominance

  • -
  • HSV - Hue, Saturation, Value

  • -
-
-
-
-start()¶
-
-
opens, initializes, and starts capturing
-
start() -> None
-
-

Opens the camera device, attempts to initialize it, and begins recording -images to a buffer. The camera must be started before any of the below -functions can be used.

-
- -
-
-stop()¶
-
-
stops, uninitializes, and closes the camera
-
stop() -> None
-
-

Stops recording, uninitializes the camera, and closes it. Once a camera -is stopped, the below functions cannot be used until it is started again.

-
- -
-
-get_controls()¶
-
-
gets current values of user controls
-
get_controls() -> (hflip = bool, vflip = bool, brightness)
-
-

If the camera supports it, get_controls will return the current settings -for horizontal and vertical image flip as bools and brightness as an int. -If unsupported, it will return the default values of (0, 0, 0). Note that -the return values here may be different than those returned by -set_controls, though these are more likely to be correct.

-
- -
-
-set_controls()¶
-
-
changes camera settings if supported by the camera
-
set_controls(hflip = bool, vflip = bool, brightness) -> (hflip = bool, vflip = bool, brightness)
-
-

Allows you to change camera settings if the camera supports it. The -return values will be the input values if the camera claims it succeeded -or the values previously in use if not. Each argument is optional, and -the desired one can be chosen by supplying the keyword, like hflip. Note -that the actual settings being used by the camera may not be the same as -those returned by set_controls. On Windows, hflip and vflip are -implemented by pygame, not by the Camera, so they should always work, but -brightness is unsupported.

-
- -
-
-get_size()¶
-
-
returns the dimensions of the images being recorded
-
get_size() -> (width, height)
-
-

Returns the current dimensions of the images being captured by the -camera. This will return the actual size, which may be different than the -one specified during initialization if the camera did not support that -size.

-
- -
-
-query_image()¶
-
-
checks if a frame is ready
-
query_image() -> bool
-
-

If an image is ready to get, it returns true. Otherwise it returns false. -Note that some webcams will always return False and will only queue a -frame when called with a blocking function like get_image(). -On Windows (MSMF), and the OpenCV backends, query_image() -should be reliable, though. This is useful to separate the framerate of -the game from that of the camera without having to use threading.

-
- -
-
-get_image()¶
-
-
captures an image as a Surface
-
get_image(Surface = None) -> Surface
-
-

Pulls an image off of the buffer as an RGB Surface. It can optionally -reuse an existing Surface to save time. The bit-depth of the surface is -24 bits on Linux, 32 bits on Windows, or the same as the optionally -supplied Surface.

-
- -
-
-get_raw()¶
-
-
returns an unmodified image as bytes
-
get_raw() -> bytes
-
-

Gets an image from a camera as a string in the native pixelformat of the -camera. Useful for integration with other libraries. This returns a -bytes object

-
- -
- -
- -
- - -

-
-Edit on GitHub -
-
-
-
-
- - - - \ No newline at end of file diff --git a/venv/Lib/site-packages/pygame/docs/generated/ref/cdrom.html b/venv/Lib/site-packages/pygame/docs/generated/ref/cdrom.html deleted file mode 100644 index 8cdfa31..0000000 --- a/venv/Lib/site-packages/pygame/docs/generated/ref/cdrom.html +++ /dev/null @@ -1,592 +0,0 @@ - - - - - - - - - pygame.cdrom — pygame v2.1.2 documentation - - - - - - - - - - - - - -
- -
- - - - - -
-
- -
-
- -
-
-
-pygame.cdrom
-
-
pygame module for audio cdrom control
-
- ----- - - - - - - - - - - - - - - - - - - - - - - -
-—initialize the cdrom module
-—uninitialize the cdrom module
-—true if the cdrom module is initialized
-—number of cd drives on the system
-—class to manage a cdrom drive
-
-

Warning

-

This module is non functional in pygame 2.0 and above, unless you have manually compiled pygame with SDL1. -This module will not be supported in the future. -One alternative for python cdrom functionality is pycdio.

-
-

The cdrom module manages the CD and DVD drives on a computer. It can -also control the playback of audio CDs. This module needs to be initialized -before it can do anything. Each CD object you create represents a cdrom -drive and must also be initialized individually before it can do most things.

-
-
-pygame.cdrom.init()¶
-
-
initialize the cdrom module
-
init() -> None
-
-

Initialize the cdrom module. This will scan the system for all CD -devices. The module must be initialized before any other functions will -work. This automatically happens when you call pygame.init().

-

It is safe to call this function more than once.

-
- -
-
-pygame.cdrom.quit()¶
-
-
uninitialize the cdrom module
-
quit() -> None
-
-

Uninitialize the cdrom module. After you call this any existing CD -objects will no longer work.

-

It is safe to call this function more than once.

-
- -
-
-pygame.cdrom.get_init()¶
-
-
true if the cdrom module is initialized
-
get_init() -> bool
-
-

Test if the cdrom module is initialized or not. This is different than the -CD.init() since each drive must also be initialized individually.

-
- -
-
-pygame.cdrom.get_count()¶
-
-
number of cd drives on the system
-
get_count() -> count
-
-

Return the number of cd drives on the system. When you create CD objects -you need to pass an integer id that must be lower than this count. The count -will be 0 if there are no drives on the system.

-
- -
-
-pygame.cdrom.CD¶
-
-
class to manage a cdrom drive
-
CD(id) -> CD
-
- ----- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-—initialize a cdrom drive for use
-—uninitialize a cdrom drive for use
-—true if this cd device initialized
-—start playing audio
-—stop audio playback
-—temporarily stop audio playback
-—unpause audio playback
-—eject or open the cdrom drive
-—the index of the cdrom drive
-—the system name of the cdrom drive
-—true if the drive is playing audio
-—true if the drive is paused
-—the current audio playback position
-—False if a cdrom is in the drive
-—the number of tracks on the cdrom
-—true if the cdrom track has audio data
-—get all track information
-—start time of a cdrom track
-—length of a cdrom track
-

You can create a CD object for each cdrom on the system. Use -pygame.cdrom.get_count() to determine how many drives actually exist. -The id argument is an integer of the drive, starting at zero.

-

The CD object is not initialized, you can only call CD.get_id() and -CD.get_name() on an uninitialized drive.

-

It is safe to create multiple CD objects for the same drive, they will -all cooperate normally.

-
-
-init()¶
-
-
initialize a cdrom drive for use
-
init() -> None
-
-

Initialize the cdrom drive for use. The drive must be initialized for -most CD methods to work. Even if the rest of pygame has been -initialized.

-

There may be a brief pause while the drive is initialized. Avoid -CD.init() if the program should not stop for a second or two.

-
- -
-
-quit()¶
-
-
uninitialize a cdrom drive for use
-
quit() -> None
-
-

Uninitialize a drive for use. Call this when your program will not be -accessing the drive for awhile.

-
- -
-
-get_init()¶
-
-
true if this cd device initialized
-
get_init() -> bool
-
-

Test if this CDROM device is initialized. This is different than the -pygame.cdrom.init() since each drive must also be initialized -individually.

-
- -
-
-play()¶
-
-
start playing audio
-
play(track, start=None, end=None) -> None
-
-

Playback audio from an audio cdrom in the drive. Besides the track number -argument, you can also pass a starting and ending time for playback. The -start and end time are in seconds, and can limit the section of an audio -track played.

-

If you pass a start time but no end, the audio will play to the end of -the track. If you pass a start time and 'None' for the end time, the -audio will play to the end of the entire disc.

-

See the CD.get_numtracks() and CD.get_track_audio() to find -tracks to playback.

-

Note, track 0 is the first track on the CD. Track numbers start at -zero.

-
- -
-
-stop()¶
-
-
stop audio playback
-
stop() -> None
-
-

Stops playback of audio from the cdrom. This will also lose the current -playback position. This method does nothing if the drive isn't already -playing audio.

-
- -
-
-pause()¶
-
-
temporarily stop audio playback
-
pause() -> None
-
-

Temporarily stop audio playback on the CD. The playback can be -resumed at the same point with the CD.resume() method. If the CD -is not playing this method does nothing.

-

Note, track 0 is the first track on the CD. Track numbers start at -zero.

-
- -
-
-resume()¶
-
-
unpause audio playback
-
resume() -> None
-
-

Unpause a paused CD. If the CD is not paused or already playing, -this method does nothing.

-
- -
-
-eject()¶
-
-
eject or open the cdrom drive
-
eject() -> None
-
-

This will open the cdrom drive and eject the cdrom. If the drive is -playing or paused it will be stopped.

-
- -
-
-get_id()¶
-
-
the index of the cdrom drive
-
get_id() -> id
-
-

Returns the integer id that was used to create the CD instance. This -method can work on an uninitialized CD.

-
- -
-
-get_name()¶
-
-
the system name of the cdrom drive
-
get_name() -> name
-
-

Return the string name of the drive. This is the system name used to -represent the drive. It is often the drive letter or device name. This -method can work on an uninitialized CD.

-
- -
-
-get_busy()¶
-
-
true if the drive is playing audio
-
get_busy() -> bool
-
-

Returns True if the drive busy playing back audio.

-
- -
-
-get_paused()¶
-
-
true if the drive is paused
-
get_paused() -> bool
-
-

Returns True if the drive is currently paused.

-
- -
-
-get_current()¶
-
-
the current audio playback position
-
get_current() -> track, seconds
-
-

Returns both the current track and time of that track. This method works -when the drive is either playing or paused.

-

Note, track 0 is the first track on the CD. Track numbers start at -zero.

-
- -
-
-get_empty()¶
-
-
False if a cdrom is in the drive
-
get_empty() -> bool
-
-

Return False if there is a cdrom currently in the drive. If the drive is -empty this will return True.

-
- -
-
-get_numtracks()¶
-
-
the number of tracks on the cdrom
-
get_numtracks() -> count
-
-

Return the number of tracks on the cdrom in the drive. This will return -zero of the drive is empty or has no tracks.

-
- -
-
-get_track_audio()¶
-
-
true if the cdrom track has audio data
-
get_track_audio(track) -> bool
-
-

Determine if a track on a cdrom contains audio data. You can also call -CD.num_tracks() and CD.get_all() to determine more information -about the cdrom.

-

Note, track 0 is the first track on the CD. Track numbers start at -zero.

-
- -
-
-get_all()¶
-
-
get all track information
-
get_all() -> [(audio, start, end, length), ...]
-
-

Return a list with information for every track on the cdrom. The -information consists of a tuple with four values. The audio value is True -if the track contains audio data. The start, end, and length values are -floating point numbers in seconds. Start and end represent absolute times -on the entire disc.

-
- -
-
-get_track_start()¶
-
-
start time of a cdrom track
-
get_track_start(track) -> seconds
-
-

Return the absolute time in seconds where at start of the cdrom track.

-

Note, track 0 is the first track on the CD. Track numbers start at -zero.

-
- -
-
-get_track_length()¶
-
-
length of a cdrom track
-
get_track_length(track) -> seconds
-
-

Return a floating point value in seconds of the length of the cdrom -track.

-

Note, track 0 is the first track on the CD. Track numbers start at -zero.

-
- -
- -
- -
- - -

-
-Edit on GitHub -
-
-
-
-
- - - - \ No newline at end of file diff --git a/venv/Lib/site-packages/pygame/docs/generated/ref/color.html b/venv/Lib/site-packages/pygame/docs/generated/ref/color.html deleted file mode 100644 index 1de2df9..0000000 --- a/venv/Lib/site-packages/pygame/docs/generated/ref/color.html +++ /dev/null @@ -1,508 +0,0 @@ - - - - - - - - - pygame.Color — pygame v2.1.2 documentation - - - - - - - - - - - - - -
- -
- - - - - -
-
- -
-
- -
-
-
-pygame.Color¶
-
-
pygame object for color representations
-
Color(r, g, b) -> Color
-
Color(r, g, b, a=255) -> Color
-
Color(color_value) -> Color
-
- ----- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-—Gets or sets the red value of the Color.
-—Gets or sets the green value of the Color.
-—Gets or sets the blue value of the Color.
-—Gets or sets the alpha value of the Color.
-—Gets or sets the CMY representation of the Color.
-—Gets or sets the HSVA representation of the Color.
-—Gets or sets the HSLA representation of the Color.
-—Gets or sets the I1I2I3 representation of the Color.
-—Returns the normalized RGBA values of the Color.
-—Applies a certain gamma value to the Color.
-—Set the number of elements in the Color to 1,2,3, or 4.
-—returns a linear interpolation to the given Color.
-—returns a Color where the r,g,b components have been multiplied by the alpha.
-—Sets the elements of the color
-

The Color class represents RGBA color values using a value range of -0 to 255 inclusive. It allows basic arithmetic operations — binary -operations +, -, *, //, %, and unary operation ~ — to -create new colors, supports conversions to other color spaces such as HSV -or HSL and lets you adjust single color channels. -Alpha defaults to 255 (fully opaque) when not given. -The arithmetic operations and correct_gamma() method preserve subclasses. -For the binary operators, the class of the returned color is that of the -left hand color object of the operator.

-

Color objects support equality comparison with other color objects and 3 or -4 element tuples of integers. There was a bug in pygame 1.8.1 -where the default alpha was 0, not 255 like previously.

-

Color objects export the C level array interface. The interface exports a -read-only one dimensional unsigned byte array of the same assigned length -as the color. The new buffer interface is also exported, with the same -characteristics as the array interface.

-

The floor division, //, and modulus, %, operators do not raise -an exception for division by zero. Instead, if a color, or alpha, channel -in the right hand color is 0, then the result is 0. For example:

-
# These expressions are True
-Color(255, 255, 255, 255) // Color(0, 64, 64, 64) == Color(0, 3, 3, 3)
-Color(255, 255, 255, 255) % Color(64, 64, 64, 0) == Color(63, 63, 63, 0)
-
-
-

Use int(color) to return the immutable integer value of the color, -usable as a dict key. This integer value differs from the mapped -pixel values of pygame.Surface.get_at_mapped()get the mapped color value at a single pixel, -pygame.Surface.map_rgb()convert a color into a mapped color value and pygame.Surface.unmap_rgb()convert a mapped integer color value into a Color. -It can be passed as a color_value argument to Color -(useful with sets).

-

See Named Colors for samples of the available named colors.

-
-
Parameters
-
    -
  • r (int) -- red value in the range of 0 to 255 inclusive

  • -
  • g (int) -- green value in the range of 0 to 255 inclusive

  • -
  • b (int) -- blue value in the range of 0 to 255 inclusive

  • -
  • a (int) -- (optional) alpha value in the range of 0 to 255 inclusive, -default is 255

  • -
  • color_value (Color or str or int or tuple(int, int, int, [int]) or -list(int, int, int, [int])) --

    color value (see note below for the supported formats)

    -
    -

    Note

    -
    -
    Supported color_value formats:
    -
    - Color object: clones the given Color object
    -
    - Color name: str: name of the color to use, e.g. 'red' -(all the supported name strings can be found in the - Named Colors, with sample swatches)
    -
    - HTML color format str: '#rrggbbaa' or '#rrggbb', -where rr, gg, bb, and aa are 2-digit hex numbers in the range -of 0 to 0xFF inclusive, the aa (alpha) value defaults to 0xFF -if not provided
    -
    - hex number str: '0xrrggbbaa' or '0xrrggbb', where -rr, gg, bb, and aa are 2-digit hex numbers in the range of 0x00 -to 0xFF inclusive, the aa (alpha) value defaults to 0xFF if not -provided
    -
    - int: int value of the color to use, using hex numbers can -make this parameter more readable, e.g. 0xrrggbbaa, where rr, -gg, bb, and aa are 2-digit hex numbers in the range of 0x00 to -0xFF inclusive, note that the aa (alpha) value is not optional for -the int format and must be provided
    -
    - tuple/list of int color values: (R, G, B, A) or -(R, G, B), where R, G, B, and A are int values in the range of -0 to 255 inclusive, the A (alpha) value defaults to 255 if not -provided
    -
    -
    -
    -
    -

  • -
-
-
Returns
-

a newly created Color object

-
-
Return type
-

Color

-
-
-
-

Changed in pygame 2.0.0: Support for tuples, lists, and Color objects when creating -Color objects.

-
-
-

Changed in pygame 1.9.2: Color objects export the C level array interface.

-
-
-

Changed in pygame 1.9.0: Color objects support 4-element tuples of integers.

-
-
-

Changed in pygame 1.8.1: New implementation of the class.

-
-
-
-r¶
-
-
Gets or sets the red value of the Color.
-
r -> int
-
-

The red value of the Color.

-
- -
-
-g¶
-
-
Gets or sets the green value of the Color.
-
g -> int
-
-

The green value of the Color.

-
- -
-
-b¶
-
-
Gets or sets the blue value of the Color.
-
b -> int
-
-

The blue value of the Color.

-
- -
-
-a¶
-
-
Gets or sets the alpha value of the Color.
-
a -> int
-
-

The alpha value of the Color.

-
- -
-
-cmy¶
-
-
Gets or sets the CMY representation of the Color.
-
cmy -> tuple
-
-

The CMY representation of the Color. The CMY components are in -the ranges C = [0, 1], M = [0, 1], Y = [0, 1]. Note that this -will not return the absolutely exact CMY values for the set RGB -values in all cases. Due to the RGB mapping from 0-255 and the -CMY mapping from 0-1 rounding errors may cause the CMY values to -differ slightly from what you might expect.

-
- -
-
-hsva¶
-
-
Gets or sets the HSVA representation of the Color.
-
hsva -> tuple
-
-

The HSVA representation of the Color. The HSVA components are in -the ranges H = [0, 360], S = [0, 100], V = [0, 100], A = [0, -100]. Note that this will not return the absolutely exact HSV values -for the set RGB values in all cases. Due to the RGB mapping from -0-255 and the HSV mapping from 0-100 and 0-360 rounding errors may -cause the HSV values to differ slightly from what you might expect.

-
- -
-
-hsla¶
-
-
Gets or sets the HSLA representation of the Color.
-
hsla -> tuple
-
-

The HSLA representation of the Color. The HSLA components are in -the ranges H = [0, 360], S = [0, 100], V = [0, 100], A = [0, -100]. Note that this will not return the absolutely exact HSL values -for the set RGB values in all cases. Due to the RGB mapping from -0-255 and the HSL mapping from 0-100 and 0-360 rounding errors may -cause the HSL values to differ slightly from what you might expect.

-
- -
-
-i1i2i3¶
-
-
Gets or sets the I1I2I3 representation of the Color.
-
i1i2i3 -> tuple
-
-

The I1I2I3 representation of the Color. The I1I2I3 components are -in the ranges I1 = [0, 1], I2 = [-0.5, 0.5], I3 = [-0.5, -0.5]. Note that this will not return the absolutely exact I1I2I3 -values for the set RGB values in all cases. Due to the RGB -mapping from 0-255 and the I1I2I3 mapping from 0-1 rounding errors -may cause the I1I2I3 values to differ slightly from what you might -expect.

-
- -
-
-normalize()¶
-
-
Returns the normalized RGBA values of the Color.
-
normalize() -> tuple
-
-

Returns the normalized RGBA values of the Color as floating point -values.

-
- -
-
-correct_gamma()¶
-
-
Applies a certain gamma value to the Color.
-
correct_gamma (gamma) -> Color
-
-

Applies a certain gamma value to the Color and returns a new Color with -the adjusted RGBA values.

-
- -
-
-set_length()¶
-
-
Set the number of elements in the Color to 1,2,3, or 4.
-
set_length(len) -> None
-
-

The default Color length is 4. Colors can have lengths 1,2,3 or 4. This -is useful if you want to unpack to r,g,b and not r,g,b,a. If you want to -get the length of a Color do len(acolor).

-
-

New in pygame 1.9.0.

-
-
- -
-
-lerp()¶
-
-
returns a linear interpolation to the given Color.
-
lerp(Color, float) -> Color
-
-

Returns a Color which is a linear interpolation between self and the -given Color in RGBA space. The second parameter determines how far -between self and other the result is going to be. -It must be a value between 0 and 1 where 0 means self and 1 means -other will be returned.

-
-

New in pygame 2.0.1.

-
-
- -
-
-premul_alpha()¶
-
-
returns a Color where the r,g,b components have been multiplied by the alpha.
-
premul_alpha() -> Color
-
-

Returns a new Color where each of the red, green and blue colour -channels have been multiplied by the alpha channel of the original -color. The alpha channel remains unchanged.

-

This is useful when working with the BLEND_PREMULTIPLIED blending mode -flag for pygame.Surface.blit()draw one image onto another, which assumes that all surfaces using -it are using pre-multiplied alpha colors.

-
-

New in pygame 2.0.0.

-
-
- -
-
-update()¶
-
-
Sets the elements of the color
-
update(r, g, b) -> None
-
update(r, g, b, a=255) -> None
-
update(color_value) -> None
-
-

Sets the elements of the color. See parameters for pygame.Color()pygame object for color representations for the -parameters of this function. If the alpha value was not set it will not change.

-
-

New in pygame 2.0.1.

-
-
- -
- -
- - -

-
-Edit on GitHub -
-
-
-
-
- - - - \ No newline at end of file diff --git a/venv/Lib/site-packages/pygame/docs/generated/ref/color_list.html b/venv/Lib/site-packages/pygame/docs/generated/ref/color_list.html deleted file mode 100644 index 269c796..0000000 --- a/venv/Lib/site-packages/pygame/docs/generated/ref/color_list.html +++ /dev/null @@ -1,2813 +0,0 @@ - - - - - - - - - Named Colors — pygame v2.1.2 documentation - - - - - - - - - - - - - -
- -
- - - - - -
-
- -
-
- -
-

pygame.Colorpygame object for color representations lets you specify any of these named colors when creating a new -pygame.Color (taken from the -colordict module).

- ---- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

Name

Color

aliceblue

████████

antiquewhite

████████

antiquewhite1

████████

antiquewhite2

████████

antiquewhite3

████████

antiquewhite4

████████

aqua

████████

aquamarine

████████

aquamarine1

████████

aquamarine2

████████

aquamarine3

████████

aquamarine4

████████

azure

████████

azure1

████████

azure2

████████

azure3

████████

azure4

████████

beige

████████

bisque

████████

bisque1

████████

bisque2

████████

bisque3

████████

bisque4

████████

black

████████

blanchedalmond

████████

blue

████████

blue1

████████

blue2

████████

blue3

████████

blue4

████████

blueviolet

████████

brown

████████

brown1

████████

brown2

████████

brown3

████████

brown4

████████

burlywood

████████

burlywood1

████████

burlywood2

████████

burlywood3

████████

burlywood4

████████

cadetblue

████████

cadetblue1

████████

cadetblue2

████████

cadetblue3

████████

cadetblue4

████████

chartreuse

████████

chartreuse1

████████

chartreuse2

████████

chartreuse3

████████

chartreuse4

████████

chocolate

████████

chocolate1

████████

chocolate2

████████

chocolate3

████████

chocolate4

████████

coral

████████

coral1

████████

coral2

████████

coral3

████████

coral4

████████

cornflowerblue

████████

cornsilk

████████

cornsilk1

████████

cornsilk2

████████

cornsilk3

████████

cornsilk4

████████

crimson

████████

cyan

████████

cyan1

████████

cyan2

████████

cyan3

████████

cyan4

████████

darkblue

████████

darkcyan

████████

darkgoldenrod

████████

darkgoldenrod1

████████

darkgoldenrod2

████████

darkgoldenrod3

████████

darkgoldenrod4

████████

darkgray

████████

darkgreen

████████

darkgrey

████████

darkkhaki

████████

darkmagenta

████████

darkolivegreen

████████

darkolivegreen1

████████

darkolivegreen2

████████

darkolivegreen3

████████

darkolivegreen4

████████

darkorange

████████

darkorange1

████████

darkorange2

████████

darkorange3

████████

darkorange4

████████

darkorchid

████████

darkorchid1

████████

darkorchid2

████████

darkorchid3

████████

darkorchid4

████████

darkred

████████

darksalmon

████████

darkseagreen

████████

darkseagreen1

████████

darkseagreen2

████████

darkseagreen3

████████

darkseagreen4

████████

darkslateblue

████████

darkslategray

████████

darkslategray1

████████

darkslategray2

████████

darkslategray3

████████

darkslategray4

████████

darkslategrey

████████

darkturquoise

████████

darkviolet

████████

deeppink

████████

deeppink1

████████

deeppink2

████████

deeppink3

████████

deeppink4

████████

deepskyblue

████████

deepskyblue1

████████

deepskyblue2

████████

deepskyblue3

████████

deepskyblue4

████████

dimgray

████████

dimgrey

████████

dodgerblue

████████

dodgerblue1

████████

dodgerblue2

████████

dodgerblue3

████████

dodgerblue4

████████

firebrick

████████

firebrick1

████████

firebrick2

████████

firebrick3

████████

firebrick4

████████

floralwhite

████████

forestgreen

████████

fuchsia

████████

gainsboro

████████

ghostwhite

████████

gold

████████

gold1

████████

gold2

████████

gold3

████████

gold4

████████

goldenrod

████████

goldenrod1

████████

goldenrod2

████████

goldenrod3

████████

goldenrod4

████████

gray

████████

gray0

████████

gray1

████████

gray2

████████

gray3

████████

gray4

████████

gray5

████████

gray6

████████

gray7

████████

gray8

████████

gray9

████████

gray10

████████

gray11

████████

gray12

████████

gray13

████████

gray14

████████

gray15

████████

gray16

████████

gray17

████████

gray18

████████

gray19

████████

gray20

████████

gray21

████████

gray22

████████

gray23

████████

gray24

████████

gray25

████████

gray26

████████

gray27

████████

gray28

████████

gray29

████████

gray30

████████

gray31

████████

gray32

████████

gray33

████████

gray34

████████

gray35

████████

gray36

████████

gray37

████████

gray38

████████

gray39

████████

gray40

████████

gray41

████████

gray42

████████

gray43

████████

gray44

████████

gray45

████████

gray46

████████

gray47

████████

gray48

████████

gray49

████████

gray50

████████

gray51

████████

gray52

████████

gray53

████████

gray54

████████

gray55

████████

gray56

████████

gray57

████████

gray58

████████

gray59

████████

gray60

████████

gray61

████████

gray62

████████

gray63

████████

gray64

████████

gray65

████████

gray66

████████

gray67

████████

gray68

████████

gray69

████████

gray70

████████

gray71

████████

gray72

████████

gray73

████████

gray74

████████

gray75

████████

gray76

████████

gray77

████████

gray78

████████

gray79

████████

gray80

████████

gray81

████████

gray82

████████

gray83

████████

gray84

████████

gray85

████████

gray86

████████

gray87

████████

gray88

████████

gray89

████████

gray90

████████

gray91

████████

gray92

████████

gray93

████████

gray94

████████

gray95

████████

gray96

████████

gray97

████████

gray98

████████

gray99

████████

gray100

████████

green

████████

green1

████████

green2

████████

green3

████████

green4

████████

greenyellow

████████

grey

████████

grey0

████████

grey1

████████

grey2

████████

grey3

████████

grey4

████████

grey5

████████

grey6

████████

grey7

████████

grey8

████████

grey9

████████

grey10

████████

grey11

████████

grey12

████████

grey13

████████

grey14

████████

grey15

████████

grey16

████████

grey17

████████

grey18

████████

grey19

████████

grey20

████████

grey21

████████

grey22

████████

grey23

████████

grey24

████████

grey25

████████

grey26

████████

grey27

████████

grey28

████████

grey29

████████

grey30

████████

grey31

████████

grey32

████████

grey33

████████

grey34

████████

grey35

████████

grey36

████████

grey37

████████

grey38

████████

grey39

████████

grey40

████████

grey41

████████

grey42

████████

grey43

████████

grey44

████████

grey45

████████

grey46

████████

grey47

████████

grey48

████████

grey49

████████

grey50

████████

grey51

████████

grey52

████████

grey53

████████

grey54

████████

grey55

████████

grey56

████████

grey57

████████

grey58

████████

grey59

████████

grey60

████████

grey61

████████

grey62

████████

grey63

████████

grey64

████████

grey65

████████

grey66

████████

grey67

████████

grey68

████████

grey69

████████

grey70

████████

grey71

████████

grey72

████████

grey73

████████

grey74

████████

grey75

████████

grey76

████████

grey77

████████

grey78

████████

grey79

████████

grey80

████████

grey81

████████

grey82

████████

grey83

████████

grey84

████████

grey85

████████

grey86

████████

grey87

████████

grey88

████████

grey89

████████

grey90

████████

grey91

████████

grey92

████████

grey93

████████

grey94

████████

grey95

████████

grey96

████████

grey97

████████

grey98

████████

grey99

████████

grey100

████████

honeydew

████████

honeydew1

████████

honeydew2

████████

honeydew3

████████

honeydew4

████████

hotpink

████████

hotpink1

████████

hotpink2

████████

hotpink3

████████

hotpink4

████████

indianred

████████

indianred1

████████

indianred2

████████

indianred3

████████

indianred4

████████

indigo

████████

ivory

████████

ivory1

████████

ivory2

████████

ivory3

████████

ivory4

████████

khaki

████████

khaki1

████████

khaki2

████████

khaki3

████████

khaki4

████████

lavender

████████

lavenderblush

████████

lavenderblush1

████████

lavenderblush2

████████

lavenderblush3

████████

lavenderblush4

████████

lawngreen

████████

lemonchiffon

████████

lemonchiffon1

████████

lemonchiffon2

████████

lemonchiffon3

████████

lemonchiffon4

████████

lightblue

████████

lightblue1

████████

lightblue2

████████

lightblue3

████████

lightblue4

████████

lightcoral

████████

lightcyan

████████

lightcyan1

████████

lightcyan2

████████

lightcyan3

████████

lightcyan4

████████

lightgoldenrod

████████

lightgoldenrod1

████████

lightgoldenrod2

████████

lightgoldenrod3

████████

lightgoldenrod4

████████

lightgoldenrodyellow

████████

lightgray

████████

lightgreen

████████

lightgrey

████████

lightpink

████████

lightpink1

████████

lightpink2

████████

lightpink3

████████

lightpink4

████████

lightsalmon

████████

lightsalmon1

████████

lightsalmon2

████████

lightsalmon3

████████

lightsalmon4

████████

lightseagreen

████████

lightskyblue

████████

lightskyblue1

████████

lightskyblue2

████████

lightskyblue3

████████

lightskyblue4

████████

lightslateblue

████████

lightslategray

████████

lightslategrey

████████

lightsteelblue

████████

lightsteelblue1

████████

lightsteelblue2

████████

lightsteelblue3

████████

lightsteelblue4

████████

lightyellow

████████

lightyellow1

████████

lightyellow2

████████

lightyellow3

████████

lightyellow4

████████

lime

████████

limegreen

████████

linen

████████

magenta

████████

magenta1

████████

magenta2

████████

magenta3

████████

magenta4

████████

maroon

████████

maroon1

████████

maroon2

████████

maroon3

████████

maroon4

████████

mediumaquamarine

████████

mediumblue

████████

mediumorchid

████████

mediumorchid1

████████

mediumorchid2

████████

mediumorchid3

████████

mediumorchid4

████████

mediumpurple

████████

mediumpurple1

████████

mediumpurple2

████████

mediumpurple3

████████

mediumpurple4

████████

mediumseagreen

████████

mediumslateblue

████████

mediumspringgreen

████████

mediumturquoise

████████

mediumvioletred

████████

midnightblue

████████

mintcream

████████

mistyrose

████████

mistyrose1

████████

mistyrose2

████████

mistyrose3

████████

mistyrose4

████████

moccasin

████████

navajowhite

████████

navajowhite1

████████

navajowhite2

████████

navajowhite3

████████

navajowhite4

████████

navy

████████

navyblue

████████

oldlace

████████

olive

████████

olivedrab

████████

olivedrab1

████████

olivedrab2

████████

olivedrab3

████████

olivedrab4

████████

orange

████████

orange1

████████

orange2

████████

orange3

████████

orange4

████████

orangered

████████

orangered1

████████

orangered2

████████

orangered3

████████

orangered4

████████

orchid

████████

orchid1

████████

orchid2

████████

orchid3

████████

orchid4

████████

palegoldenrod

████████

palegreen

████████

palegreen1

████████

palegreen2

████████

palegreen3

████████

palegreen4

████████

paleturquoise

████████

paleturquoise1

████████

paleturquoise2

████████

paleturquoise3

████████

paleturquoise4

████████

palevioletred

████████

palevioletred1

████████

palevioletred2

████████

palevioletred3

████████

palevioletred4

████████

papayawhip

████████

peachpuff

████████

peachpuff1

████████

peachpuff2

████████

peachpuff3

████████

peachpuff4

████████

peru

████████

pink

████████

pink1

████████

pink2

████████

pink3

████████

pink4

████████

plum

████████

plum1

████████

plum2

████████

plum3

████████

plum4

████████

powderblue

████████

purple

████████

purple1

████████

purple2

████████

purple3

████████

purple4

████████

red

████████

red1

████████

red2

████████

red3

████████

red4

████████

rosybrown

████████

rosybrown1

████████

rosybrown2

████████

rosybrown3

████████

rosybrown4

████████

royalblue

████████

royalblue1

████████

royalblue2

████████

royalblue3

████████

royalblue4

████████

saddlebrown

████████

salmon

████████

salmon1

████████

salmon2

████████

salmon3

████████

salmon4

████████

sandybrown

████████

seagreen

████████

seagreen1

████████

seagreen2

████████

seagreen3

████████

seagreen4

████████

seashell

████████

seashell1

████████

seashell2

████████

seashell3

████████

seashell4

████████

sienna

████████

sienna1

████████

sienna2

████████

sienna3

████████

sienna4

████████

silver

████████

skyblue

████████

skyblue1

████████

skyblue2

████████

skyblue3

████████

skyblue4

████████

slateblue

████████

slateblue1

████████

slateblue2

████████

slateblue3

████████

slateblue4

████████

slategray

████████

slategray1

████████

slategray2

████████

slategray3

████████

slategray4

████████

slategrey

████████

snow

████████

snow1

████████

snow2

████████

snow3

████████

snow4

████████

springgreen

████████

springgreen1

████████

springgreen2

████████

springgreen3

████████

springgreen4

████████

steelblue

████████

steelblue1

████████

steelblue2

████████

steelblue3

████████

steelblue4

████████

tan

████████

tan1

████████

tan2

████████

tan3

████████

tan4

████████

teal

████████

thistle

████████

thistle1

████████

thistle2

████████

thistle3

████████

thistle4

████████

tomato

████████

tomato1

████████

tomato2

████████

tomato3

████████

tomato4

████████

turquoise

████████

turquoise1

████████

turquoise2

████████

turquoise3

████████

turquoise4

████████

violet

████████

violetred

████████

violetred1

████████

violetred2

████████

violetred3

████████

violetred4

████████

wheat

████████

wheat1

████████

wheat2

████████

wheat3

████████

wheat4

████████

white

████████

whitesmoke

████████

yellow

████████

yellow1

████████

yellow2

████████

yellow3

████████

yellow4

████████

yellowgreen

████████

-
- - -

-
-Edit on GitHub -
-
-
-
-
- - - - \ No newline at end of file diff --git a/venv/Lib/site-packages/pygame/docs/generated/ref/cursors.html b/venv/Lib/site-packages/pygame/docs/generated/ref/cursors.html deleted file mode 100644 index 81e4c20..0000000 --- a/venv/Lib/site-packages/pygame/docs/generated/ref/cursors.html +++ /dev/null @@ -1,452 +0,0 @@ - - - - - - - - - pygame.cursors — pygame v2.1.2 documentation - - - - - - - - - - - - - -
- -
- - - - - -
-
- -
-
- -
-
-
-pygame.cursors
-
-
pygame module for cursor resources
-
- ----- - - - - - - - - - - - - - - -
-—create binary cursor data from simple strings
-—load cursor data from an XBM file
-—pygame object representing a cursor
-

Pygame offers control over the system hardware cursor. Pygame supports -black and white cursors (bitmap cursors), as well as system variant cursors and color cursors. -You control the cursor with functions inside pygame.mousepygame module to work with the mouse.

-

This cursors module contains functions for loading and decoding various -cursor formats. These allow you to easily store your cursors in external files -or directly as encoded python strings.

-

The module includes several standard cursors. The pygame.mouse.set_cursor()set the mouse cursor to a new cursor -function takes several arguments. All those arguments have been stored in a -single tuple you can call like this:

-
>>> pygame.mouse.set_cursor(*pygame.cursors.arrow)
-
-
-

The following variables can be passed to pygame.mouse.set_cursor function:

-
-
    -
  • pygame.cursors.arrow

  • -
  • pygame.cursors.diamond

  • -
  • pygame.cursors.broken_x

  • -
  • pygame.cursors.tri_left

  • -
  • pygame.cursors.tri_right

  • -
-
-

This module also contains a few cursors as formatted strings. You'll need to -pass these to pygame.cursors.compile() function before you can use them. -The example call would look like this:

-
>>> cursor = pygame.cursors.compile(pygame.cursors.textmarker_strings)
->>> pygame.mouse.set_cursor((8, 16), (0, 0), *cursor)
-
-
-

The following strings can be converted into cursor bitmaps with -pygame.cursors.compile() :

-
-
    -
  • pygame.cursors.thickarrow_strings

  • -
  • pygame.cursors.sizer_x_strings

  • -
  • pygame.cursors.sizer_y_strings

  • -
  • pygame.cursors.sizer_xy_strings

  • -
  • pygame.cursor.textmarker_strings

  • -
-
-
-
-pygame.cursors.compile()¶
-
-
create binary cursor data from simple strings
-
compile(strings, black='X', white='.', xor='o') -> data, mask
-
-

A sequence of strings can be used to create binary cursor data for the -system cursor. This returns the binary data in the form of two tuples. -Those can be passed as the third and fourth arguments respectively of the -pygame.mouse.set_cursor()set the mouse cursor to a new cursor function.

-

If you are creating your own cursor strings, you can use any value represent -the black and white pixels. Some system allow you to set a special toggle -color for the system color, this is also called the xor color. If the system -does not support xor cursors, that color will simply be black.

-

The height must be divisible by 8. The width of the strings must all be equal -and be divisible by 8. If these two conditions are not met, ValueError is -raised. -An example set of cursor strings looks like this

-
thickarrow_strings = (               #sized 24x24
-  "XX                      ",
-  "XXX                     ",
-  "XXXX                    ",
-  "XX.XX                   ",
-  "XX..XX                  ",
-  "XX...XX                 ",
-  "XX....XX                ",
-  "XX.....XX               ",
-  "XX......XX              ",
-  "XX.......XX             ",
-  "XX........XX            ",
-  "XX........XXX           ",
-  "XX......XXXXX           ",
-  "XX.XXX..XX              ",
-  "XXXX XX..XX             ",
-  "XX   XX..XX             ",
-  "     XX..XX             ",
-  "      XX..XX            ",
-  "      XX..XX            ",
-  "       XXXX             ",
-  "       XX               ",
-  "                        ",
-  "                        ",
-  "                        ")
-
-
-
- -
-
-pygame.cursors.load_xbm()¶
-
-
load cursor data from an XBM file
-
load_xbm(cursorfile) -> cursor_args
-
load_xbm(cursorfile, maskfile) -> cursor_args
-
-

This loads cursors for a simple subset of XBM files. XBM files are -traditionally used to store cursors on UNIX systems, they are an ASCII -format used to represent simple images.

-

Sometimes the black and white color values will be split into two separate -XBM files. You can pass a second maskfile argument to load the two -images into a single cursor.

-

The cursorfile and maskfile arguments can either be filenames or file-like -object with the readlines method.

-

The return value cursor_args can be passed directly to the -pygame.mouse.set_cursor() function.

-
- -
-
-pygame.cursors.Cursor¶
-
-
pygame object representing a cursor
-
Cursor(size, hotspot, xormasks, andmasks) -> Cursor
-
Cursor(hotspot, surface) -> Cursor
-
Cursor(constant) -> Cursor
-
Cursor(Cursor) -> Cursor
-
Cursor() -> Cursor
-
- ----- - - - - - - - - - - - - - - -
-—
-—Gets the cursor type
-—Gets the cursor data
-

In pygame 2, there are 3 types of cursors you can create to give your -game that little bit of extra polish. There's bitmap type cursors, -which existed in pygame 1.x, and are compiled from a string or load from an xbm file. -Then there are system type cursors, where you choose a preset that will -convey the same meaning but look native across different operating systems. -Finally you can create a color cursor, which displays a pygame surface as the cursor.

-

Creating a system cursor

-

Choose a constant from this list, pass it into pygame.cursors.Cursor(constant), -and you're good to go. Be advised that not all systems support every system -cursor, and you may get a substitution instead. For example, on MacOS, -WAIT/WAITARROW should show up as an arrow, and SIZENWSE/SIZENESW/SIZEALL -should show up as a closed hand. And on Wayland, every SIZE cursor should -show up as a hand.

-
Pygame Cursor Constant           Description
---------------------------------------------
-pygame.SYSTEM_CURSOR_ARROW       arrow
-pygame.SYSTEM_CURSOR_IBEAM       i-beam
-pygame.SYSTEM_CURSOR_WAIT        wait
-pygame.SYSTEM_CURSOR_CROSSHAIR   crosshair
-pygame.SYSTEM_CURSOR_WAITARROW   small wait cursor
-                                 (or wait if not available)
-pygame.SYSTEM_CURSOR_SIZENWSE    double arrow pointing
-                                 northwest and southeast
-pygame.SYSTEM_CURSOR_SIZENESW    double arrow pointing
-                                 northeast and southwest
-pygame.SYSTEM_CURSOR_SIZEWE      double arrow pointing
-                                 west and east
-pygame.SYSTEM_CURSOR_SIZENS      double arrow pointing
-                                 north and south
-pygame.SYSTEM_CURSOR_SIZEALL     four pointed arrow pointing
-                                 north, south, east, and west
-pygame.SYSTEM_CURSOR_NO          slashed circle or crossbones
-pygame.SYSTEM_CURSOR_HAND        hand
-
-
-

Creating a cursor without passing arguments

-

In addition to the cursor constants available and described above, -you can also call pygame.cursors.Cursor(), and your cursor is ready (doing that is the same as -calling pygame.cursors.Cursor(pygame.SYSTEM_CURSOR_ARROW). -Doing one of those calls actually creates a system cursor using the default native image.

-

Creating a color cursor

-

To create a color cursor, create a Cursor from a hotspot and a surface. -hotspot is an (x,y) coordinate that determines where in the cursor the exact point is. -The hotspot position must be within the bounds of the surface.

-

Creating a bitmap cursor

-

When the mouse cursor is visible, it will be displayed as a black and white -bitmap using the given bitmask arrays. The size is a sequence containing -the cursor width and height. hotspot is a sequence containing the cursor -hotspot position.

-

A cursor has a width and height, but a mouse position is represented by a -set of point coordinates. So the value passed into the cursor hotspot -variable helps pygame to actually determine at what exact point the cursor -is at.

-

xormasks is a sequence of bytes containing the cursor xor data masks. -Lastly andmasks, a sequence of bytes containing the cursor bitmask data. -To create these variables, we can make use of the -pygame.cursors.compile()create binary cursor data from simple strings function.

-

Width and height must be a multiple of 8, and the mask arrays must be the -correct size for the given width and height. Otherwise an exception is raised.

-
-
-copy()¶
-
-| :sl:`copy the current cursor`
-
-| :sg:`copy() -> Cursor`
-

Returns a new Cursor object with the same data and hotspot as the original.

-
- -
-
-type¶
-
-
Gets the cursor type
-
type -> string
-
-

The type will be "system", "bitmap", or "color".

-
- -
-
-data¶
-
-
Gets the cursor data
-
data -> tuple
-
-

Returns the data that was used to create this cursor object, wrapped up in a tuple.

-
- -
-

New in pygame 2.0.1.

-
-
- -

Example code for creating and settings cursors. (Click the mouse to switch cursor)

-
# pygame setup
-import pygame as pg
-
-pg.init()
-screen = pg.display.set_mode([600, 400])
-pg.display.set_caption("Example code for the cursors module")
-
-# create a system cursor
-system = pg.cursors.Cursor(pg.SYSTEM_CURSOR_NO)
-
-# create bitmap cursors
-bitmap_1 = pg.cursors.Cursor(*pg.cursors.arrow)
-bitmap_2 = pg.cursors.Cursor(
-    (24, 24), (0, 0), *pg.cursors.compile(pg.cursors.thickarrow_strings)
-)
-
-# create a color cursor
-surf = pg.Surface((40, 40)) # you could also load an image 
-surf.fill((120, 50, 50))        # and use that as your surface
-color = pg.cursors.Cursor((20, 20), surf)
-
-cursors = [system, bitmap_1, bitmap_2, color]
-cursor_index = 0
-
-pg.mouse.set_cursor(cursors[cursor_index])
-
-clock = pg.time.Clock()
-going = True
-while going:
-    clock.tick(60)
-    screen.fill((0, 75, 30))
-    pg.display.flip()
-
-    for event in pg.event.get():
-        if event.type == pg.QUIT or (event.type == pg.KEYDOWN and event.key == pg.K_ESCAPE):
-            going = False
-
-        # if the mouse is clicked it will switch to a new cursor
-        if event.type == pg.MOUSEBUTTONDOWN:
-            cursor_index += 1
-            cursor_index %= len(cursors)
-            pg.mouse.set_cursor(cursors[cursor_index])
-
-pg.quit()
-
-
-
- -
- - -

-
-Edit on GitHub -
-
-
-
-
- - - - \ No newline at end of file diff --git a/venv/Lib/site-packages/pygame/docs/generated/ref/display.html b/venv/Lib/site-packages/pygame/docs/generated/ref/display.html deleted file mode 100644 index edaab32..0000000 --- a/venv/Lib/site-packages/pygame/docs/generated/ref/display.html +++ /dev/null @@ -1,990 +0,0 @@ - - - - - - - - - pygame.display — pygame v2.1.2 documentation - - - - - - - - - - - - - -
- -
- - - - - -
-
- -
-
- -
-
-
-pygame.display
-
-
pygame module to control the display window and screen
-
- ----- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-—Initialize the display module
-—Uninitialize the display module
-—Returns True if the display module has been initialized
-—Initialize a window or screen for display
-—Get a reference to the currently set display surface
-—Update the full display Surface to the screen
-—Update portions of the screen for software displays
-—Get the name of the pygame display backend
-—Create a video display information object
-—Get information about the current windowing system
-—Get sizes of active desktops
-—Get list of available fullscreen modes
-—Pick the best color depth for a display mode
-—Get the value for an OpenGL flag for the current display
-—Request an OpenGL display attribute for the display mode
-—Returns True when the display is active on the screen
-—Iconify the display surface
-—Switch between fullscreen and windowed displays
-—Change the hardware gamma ramps
-—Change the hardware gamma ramps with a custom lookup
-—Change the system image for the display window
-—Set the current window caption
-—Get the current window caption
-—Set the display color palette for indexed displays
-—Return the number of displays
-—Return the size of the window or screen
-—Return whether the screensaver is allowed to run.
-—Set whether the screensaver may run
-

This module offers control over the pygame display. Pygame has a single display -Surface that is either contained in a window or runs full screen. Once you -create the display you treat it as a regular Surface. Changes are not -immediately visible onscreen; you must choose one of the two flipping functions -to update the actual display.

-

The origin of the display, where x = 0 and y = 0, is the top left of the -screen. Both axes increase positively towards the bottom right of the screen.

-

The pygame display can actually be initialized in one of several modes. By -default, the display is a basic software driven framebuffer. You can request -special modules like automatic scaling or OpenGL support. These are -controlled by flags passed to pygame.display.set_mode().

-

Pygame can only have a single display active at any time. Creating a new one -with pygame.display.set_mode() will close the previous display. To detect -the number and size of attached screens, you can use -pygame.display.get_desktop_sizes and then select appropriate window size -and display index to pass to pygame.display.set_mode().

-

For backward compatibility pygame.display allows precise control over -the pixel format or display resolutions. This used to be necessary with old -grahics cards and CRT screens, but is usually not needed any more. Use the -functions pygame.display.mode_ok(), pygame.display.list_modes(), and -pygame.display.Info() to query detailed information about the display.

-

Once the display Surface is created, the functions from this module affect the -single existing display. The Surface becomes invalid if the module is -uninitialized. If a new display mode is set, the existing Surface will -automatically switch to operate on the new display.

-

When the display mode is set, several events are placed on the pygame event -queue. pygame.QUIT is sent when the user has requested the program to -shut down. The window will receive pygame.ACTIVEEVENT events as the display -gains and loses input focus. If the display is set with the -pygame.RESIZABLE flag, pygame.VIDEORESIZE events will be sent when the -user adjusts the window dimensions. Hardware displays that draw direct to the -screen will get pygame.VIDEOEXPOSE events when portions of the window must -be redrawn.

-

A new windowevent API was introduced in pygame 2.0.1. Check event module docs -for more information on that

-

Some display environments have an option for automatically stretching all -windows. When this option is enabled, this automatic stretching distorts the -appearance of the pygame window. In the pygame examples directory, there is -example code (prevent_display_stretching.py) which shows how to disable this -automatic stretching of the pygame display on Microsoft Windows (Vista or newer -required).

-
-
-pygame.display.init()¶
-
-
Initialize the display module
-
init() -> None
-
-

Initializes the pygame display module. The display module cannot do anything -until it is initialized. This is usually handled for you automatically when -you call the higher level pygame.init().

-

Pygame will select from one of several internal display backends when it is -initialized. The display mode will be chosen depending on the platform and -permissions of current user. Before the display module is initialized the -environment variable SDL_VIDEODRIVER can be set to control which backend -is used. The systems with multiple choices are listed here.

-
Windows : windib, directx
-Unix    : x11, dga, fbcon, directfb, ggi, vgl, svgalib, aalib
-
-
-

On some platforms it is possible to embed the pygame display into an already -existing window. To do this, the environment variable SDL_WINDOWID must -be set to a string containing the window id or handle. The environment -variable is checked when the pygame display is initialized. Be aware that -there can be many strange side effects when running in an embedded display.

-

It is harmless to call this more than once, repeated calls have no effect.

-
- -
-
-pygame.display.quit()¶
-
-
Uninitialize the display module
-
quit() -> None
-
-

This will shut down the entire display module. This means any active -displays will be closed. This will also be handled automatically when the -program exits.

-

It is harmless to call this more than once, repeated calls have no effect.

-
- -
-
-pygame.display.get_init()¶
-
-
Returns True if the display module has been initialized
-
get_init() -> bool
-
-

Returns True if the pygame.displaypygame module to control the display window and screen module is currently initialized.

-
- -
-
-pygame.display.set_mode()¶
-
-
Initialize a window or screen for display
-
set_mode(size=(0, 0), flags=0, depth=0, display=0, vsync=0) -> Surface
-
-

This function will create a display Surface. The arguments passed in are -requests for a display type. The actual created display will be the best -possible match supported by the system.

-

Note that calling this function implicitly initializes pygame.display, if -it was not initialized before.

-

The size argument is a pair of numbers representing the width and -height. The flags argument is a collection of additional options. The depth -argument represents the number of bits to use for color.

-

The Surface that gets returned can be drawn to like a regular Surface but -changes will eventually be seen on the monitor.

-

If no size is passed or is set to (0, 0) and pygame uses SDL -version 1.2.10 or above, the created Surface will have the same size as the -current screen resolution. If only the width or height are set to 0, the -Surface will have the same width or height as the screen resolution. Using a -SDL version prior to 1.2.10 will raise an exception.

-

It is usually best to not pass the depth argument. It will default to the -best and fastest color depth for the system. If your game requires a -specific color format you can control the depth with this argument. Pygame -will emulate an unavailable color depth which can be slow.

-

When requesting fullscreen display modes, sometimes an exact match for the -requested size cannot be made. In these situations pygame will select -the closest compatible match. The returned surface will still always match -the requested size.

-

On high resolution displays(4k, 1080p) and tiny graphics games (640x480) -show up very small so that they are unplayable. SCALED scales up the window -for you. The game thinks it's a 640x480 window, but really it can be bigger. -Mouse events are scaled for you, so your game doesn't need to do it. Note -that SCALED is considered an experimental API and may change in future -releases.

-

The flags argument controls which type of display you want. There are -several to choose from, and you can even combine multiple types using the -bitwise or operator, (the pipe "|" character). Here are the display -flags you will want to choose from:

-
pygame.FULLSCREEN    create a fullscreen display
-pygame.DOUBLEBUF     (obsolete in pygame 2) recommended for HWSURFACE or OPENGL
-pygame.HWSURFACE     (obsolete in pygame 2) hardware accelerated, only in FULLSCREEN
-pygame.OPENGL        create an OpenGL-renderable display
-pygame.RESIZABLE     display window should be sizeable
-pygame.NOFRAME       display window will have no border or controls
-pygame.SCALED        resolution depends on desktop size and scale graphics
-pygame.SHOWN         window is opened in visible mode (default)
-pygame.HIDDEN        window is opened in hidden mode
-
-
-
-

New in pygame 2.0.0: SCALED, SHOWN and HIDDEN

-
-

By setting the vsync parameter to 1, it is possible to get a display -with vertical sync, but you are not guaranteed to get one. The request only -works at all for calls to set_mode() with the pygame.OPENGL or -pygame.SCALED flags set, and is still not guaranteed even with one of -those set. What you get depends on the hardware and driver configuration -of the system pygame is running on. Here is an example usage of a call -to set_mode() that may give you a display with vsync:

-
flags = pygame.OPENGL | pygame.FULLSCREEN
-window_surface = pygame.display.set_mode((1920, 1080), flags, vsync=1)
-
-
-

Vsync behaviour is considered experimental, and may change in future releases.

-
-

New in pygame 2.0.0: vsync

-
-

Basic example:

-
# Open a window on the screen
-screen_width=700
-screen_height=400
-screen=pygame.display.set_mode([screen_width, screen_height])
-
-
-

The display index 0 means the default display is used. If no display -index argument is provided, the default display can be overridden with an -environment variable.

-
-

Changed in pygame 1.9.5: display argument added

-
-
- -
-
-pygame.display.get_surface()¶
-
-
Get a reference to the currently set display surface
-
get_surface() -> Surface
-
-

Return a reference to the currently set display Surface. If no display mode -has been set this will return None.

-
- -
-
-pygame.display.flip()¶
-
-
Update the full display Surface to the screen
-
flip() -> None
-
-

This will update the contents of the entire display. If your display mode is -using the flags pygame.HWSURFACE and pygame.DOUBLEBUF on pygame 1, -this will wait for a vertical retrace and swap the surfaces.

-

When using an pygame.OPENGL display mode this will perform a gl buffer -swap.

-
- -
-
-pygame.display.update()¶
-
-
Update portions of the screen for software displays
-
update(rectangle=None) -> None
-
update(rectangle_list) -> None
-
-

This function is like an optimized version of pygame.display.flip() for -software displays. It allows only a portion of the screen to updated, -instead of the entire area. If no argument is passed it updates the entire -Surface area like pygame.display.flip().

-

Note that calling display.update(None) means no part of the window is -updated. Whereas display.update() means the whole window is updated.

-

You can pass the function a single rectangle, or a sequence of rectangles. -It is more efficient to pass many rectangles at once than to call update -multiple times with single or a partial list of rectangles. If passing a -sequence of rectangles it is safe to include None values in the list, which -will be skipped.

-

This call cannot be used on pygame.OPENGL displays and will generate an -exception.

-
- -
-
-pygame.display.get_driver()¶
-
-
Get the name of the pygame display backend
-
get_driver() -> name
-
-

Pygame chooses one of many available display backends when it is -initialized. This returns the internal name used for the display backend. -This can be used to provide limited information about what display -capabilities might be accelerated. See the SDL_VIDEODRIVER flags in -pygame.display.set_mode() to see some of the common options.

-
- -
-
-pygame.display.Info()¶
-
-
Create a video display information object
-
Info() -> VideoInfo
-
-

Creates a simple object containing several attributes to describe the -current graphics environment. If this is called before -pygame.display.set_mode() some platforms can provide information about -the default display mode. This can also be called after setting the display -mode to verify specific display options were satisfied. The VidInfo object -has several attributes:

-
hw:         1 if the display is hardware accelerated
-wm:         1 if windowed display modes can be used
-video_mem:  The megabytes of video memory on the display. This is 0 if
-            unknown
-bitsize:    Number of bits used to store each pixel
-bytesize:   Number of bytes used to store each pixel
-masks:      Four values used to pack RGBA values into pixels
-shifts:     Four values used to pack RGBA values into pixels
-losses:     Four values used to pack RGBA values into pixels
-blit_hw:    1 if hardware Surface blitting is accelerated
-blit_hw_CC: 1 if hardware Surface colorkey blitting is accelerated
-blit_hw_A:  1 if hardware Surface pixel alpha blitting is accelerated
-blit_sw:    1 if software Surface blitting is accelerated
-blit_sw_CC: 1 if software Surface colorkey blitting is accelerated
-blit_sw_A:  1 if software Surface pixel alpha blitting is accelerated
-current_h, current_w:  Height and width of the current video mode, or
-            of the desktop mode if called before the display.set_mode
-            is called. (current_h, current_w are available since
-            SDL 1.2.10, and pygame 1.8.0). They are -1 on error, or if
-            an old SDL is being used.
-
-
-
- -
-
-pygame.display.get_wm_info()¶
-
-
Get information about the current windowing system
-
get_wm_info() -> dict
-
-

Creates a dictionary filled with string keys. The strings and values are -arbitrarily created by the system. Some systems may have no information and -an empty dictionary will be returned. Most platforms will return a "window" -key with the value set to the system id for the current display.

-
-

New in pygame 1.7.1.

-
-
- -
-
-pygame.display.get_desktop_sizes()¶
-
-
Get sizes of active desktops
-
get_desktop_sizes() -> list
-
-

This function returns the sizes of the currrently configured -virtual desktops as a list of (x, y) tuples of integers.

-

The length of the list is not the same as the number of attached monitors, -as a desktop can be mirrored across multiple monitors. The desktop sizes -do not indicate the maximum monitor resolutions supported by the hardware, -but the desktop size configured in the operating system.

-

In order to fit windows into the desktop as it is currently configured, and -to respect the resolution configured by the operating system in fullscreen -mode, this function should be used to replace many use cases of -pygame.display.list_modes() whenever applicable.

-
-

New in pygame 2.0.0.

-
-
- -
-
-pygame.display.list_modes()¶
-
-
Get list of available fullscreen modes
-
list_modes(depth=0, flags=pygame.FULLSCREEN, display=0) -> list
-
-

This function returns a list of possible sizes for a specified color -depth. The return value will be an empty list if no display modes are -available with the given arguments. A return value of -1 means that -any requested size should work (this is likely the case for windowed -modes). Mode sizes are sorted from biggest to smallest.

-

If depth is 0, the current/best color depth for the display is used. -The flags defaults to pygame.FULLSCREEN, but you may need to add -additional flags for specific fullscreen modes.

-

The display index 0 means the default display is used.

-

Since pygame 2.0, pygame.display.get_desktop_sizes() has taken over -some use cases from pygame.display.list_modes():

-

To find a suitable size for non-fullscreen windows, it is preferable to -use pygame.display.get_desktop_sizes() to get the size of the current -desktop, and to then choose a smaller window size. This way, the window is -guaranteed to fit, even when the monitor is configured to a lower resolution -than the maximum supported by the hardware.

-

To avoid changing the physical monitor resolution, it is also preferable to -use pygame.display.get_desktop_sizes() to determine the fullscreen -resolution. Developers are strongly advised to default to the current -physical monitor resolution unless the user explicitly requests a different -one (e.g. in an options menu or configuration file).

-
-

Changed in pygame 1.9.5: display argument added

-
-
- -
-
-pygame.display.mode_ok()¶
-
-
Pick the best color depth for a display mode
-
mode_ok(size, flags=0, depth=0, display=0) -> depth
-
-

This function uses the same arguments as pygame.display.set_mode(). It -is used to determine if a requested display mode is available. It will -return 0 if the display mode cannot be set. Otherwise it will return a -pixel depth that best matches the display asked for.

-

Usually the depth argument is not passed, but some platforms can support -multiple display depths. If passed it will hint to which depth is a better -match.

-

The function will return 0 if the passed display flags cannot be set.

-

The display index 0 means the default display is used.

-
-

Changed in pygame 1.9.5: display argument added

-
-
- -
-
-pygame.display.gl_get_attribute()¶
-
-
Get the value for an OpenGL flag for the current display
-
gl_get_attribute(flag) -> value
-
-

After calling pygame.display.set_mode() with the pygame.OPENGL flag, -it is a good idea to check the value of any requested OpenGL attributes. See -pygame.display.gl_set_attribute() for a list of valid flags.

-
- -
-
-pygame.display.gl_set_attribute()¶
-
-
Request an OpenGL display attribute for the display mode
-
gl_set_attribute(flag, value) -> None
-
-

When calling pygame.display.set_mode() with the pygame.OPENGL flag, -Pygame automatically handles setting the OpenGL attributes like color and -double-buffering. OpenGL offers several other attributes you may want control -over. Pass one of these attributes as the flag, and its appropriate value. -This must be called before pygame.display.set_mode().

-

Many settings are the requested minimum. Creating a window with an OpenGL context -will fail if OpenGL cannot provide the requested attribute, but it may for example -give you a stencil buffer even if you request none, or it may give you a larger -one than requested.

-

The OPENGL flags are:

-
GL_ALPHA_SIZE, GL_DEPTH_SIZE, GL_STENCIL_SIZE, GL_ACCUM_RED_SIZE,
-GL_ACCUM_GREEN_SIZE,  GL_ACCUM_BLUE_SIZE, GL_ACCUM_ALPHA_SIZE,
-GL_MULTISAMPLEBUFFERS, GL_MULTISAMPLESAMPLES, GL_STEREO
-
-
-

GL_MULTISAMPLEBUFFERS

-
-

Whether to enable multisampling anti-aliasing. -Defaults to 0 (disabled).

-

Set GL_MULTISAMPLESAMPLES to a value -above 0 to control the amount of anti-aliasing. -A typical value is 2 or 3.

-
-

GL_STENCIL_SIZE

-
-

Minimum bit size of the stencil buffer. Defaults to 0.

-
-

GL_DEPTH_SIZE

-
-

Minimum bit size of the depth buffer. Defaults to 16.

-
-

GL_STEREO

-
-

1 enables stereo 3D. Defaults to 0.

-
-

GL_BUFFER_SIZE

-
-

Minimum bit size of the frame buffer. Defaults to 0.

-
-
-

New in pygame 2.0.0: Additional attributes:

-
-
GL_ACCELERATED_VISUAL,
-GL_CONTEXT_MAJOR_VERSION, GL_CONTEXT_MINOR_VERSION,
-GL_CONTEXT_FLAGS, GL_CONTEXT_PROFILE_MASK,
-GL_SHARE_WITH_CURRENT_CONTEXT,
-GL_CONTEXT_RELEASE_BEHAVIOR,
-GL_FRAMEBUFFER_SRGB_CAPABLE
-
-
-

GL_CONTEXT_PROFILE_MASK

-
-

Sets the OpenGL profile to one of these values:

-
GL_CONTEXT_PROFILE_CORE             disable deprecated features
-GL_CONTEXT_PROFILE_COMPATIBILITY    allow deprecated features
-GL_CONTEXT_PROFILE_ES               allow only the ES feature
-                                    subset of OpenGL
-
-
-
-

GL_ACCELERATED_VISUAL

-
-

Set to 1 to require hardware acceleration, or 0 to force software render. -By default, both are allowed.

-
-
- -
-
-pygame.display.get_active()¶
-
-
Returns True when the display is active on the screen
-
get_active() -> bool
-
-

Returns True when the display Surface is considered actively -renderable on the screen and may be visible to the user. This is -the default state immediately after pygame.display.set_mode(). -This method may return True even if the application is fully hidden -behind another application window.

-

This will return False if the display Surface has been iconified or -minimized (either via pygame.display.iconify() or via an OS -specific method such as the minimize-icon available on most -desktops).

-

The method can also return False for other reasons without the -application being explicitly iconified or minimized by the user. A -notable example being if the user has multiple virtual desktops and -the display Surface is not on the active virtual desktop.

-
-

Note

-

This function returning True is unrelated to whether the -application has input focus. Please see -pygame.key.get_focused() and pygame.mouse.get_focused() -for APIs related to input focus.

-
-
- -
-
-pygame.display.iconify()¶
-
-
Iconify the display surface
-
iconify() -> bool
-
-

Request the window for the display surface be iconified or hidden. Not all -systems and displays support an iconified display. The function will return -True if successful.

-

When the display is iconified pygame.display.get_active() will return -False. The event queue should receive an ACTIVEEVENT event when the -window has been iconified. Additionally, the event queue also recieves a -WINDOWEVENT_MINIMIZED event when the window has been iconified on pygame 2.

-
- -
-
-pygame.display.toggle_fullscreen()¶
-
-
Switch between fullscreen and windowed displays
-
toggle_fullscreen() -> int
-
-

Switches the display window between windowed and fullscreen modes. -Display driver support is not great when using pygame 1, but with -pygame 2 it is the most reliable method to switch to and from fullscreen.

-

Supported display drivers in pygame 1:

-
-
    -
  • x11 (Linux/Unix)

  • -
  • wayland (Linux/Unix)

  • -
-
-

Supported display drivers in pygame 2:

-
-
    -
  • windows (Windows)

  • -
  • x11 (Linux/Unix)

  • -
  • wayland (Linux/Unix)

  • -
  • cocoa (OSX/Mac)

  • -
-
-
-

Note

-

toggle_fullscreen() doesn't work on Windows -unless the window size is in pygame.display.list_modes()Get list of available fullscreen modes or -the window is created with the flag pygame.SCALED. -See issue #2380.

-
-
- -
-
-pygame.display.set_gamma()¶
-
-
Change the hardware gamma ramps
-
set_gamma(red, green=None, blue=None) -> bool
-
-

Set the red, green, and blue gamma values on the display hardware. If the -green and blue arguments are not passed, they will both be the same as red. -Not all systems and hardware support gamma ramps, if the function succeeds -it will return True.

-

A gamma value of 1.0 creates a linear color table. Lower values will -darken the display and higher values will brighten.

-
- -
-
-pygame.display.set_gamma_ramp()¶
-
-
Change the hardware gamma ramps with a custom lookup
-
set_gamma_ramp(red, green, blue) -> bool
-
-

Set the red, green, and blue gamma ramps with an explicit lookup table. Each -argument should be sequence of 256 integers. The integers should range -between 0 and 0xffff. Not all systems and hardware support gamma -ramps, if the function succeeds it will return True.

-
- -
-
-pygame.display.set_icon()¶
-
-
Change the system image for the display window
-
set_icon(Surface) -> None
-
-

Sets the runtime icon the system will use to represent the display window. -All windows default to a simple pygame logo for the window icon.

-

Note that calling this function implicitly initializes pygame.display, if -it was not initialized before.

-

You can pass any surface, but most systems want a smaller image around -32x32. The image can have colorkey transparency which will be passed to the -system.

-

Some systems do not allow the window icon to change after it has been shown. -This function can be called before pygame.display.set_mode() to create -the icon before the display mode is set.

-
- -
-
-pygame.display.set_caption()¶
-
-
Set the current window caption
-
set_caption(title, icontitle=None) -> None
-
-

If the display has a window title, this function will change the name on the -window. In pygame 1.x, some systems supported an alternate shorter title to -be used for minimized displays, but in pygame 2 icontitle does nothing.

-
- -
-
-pygame.display.get_caption()¶
-
-
Get the current window caption
-
get_caption() -> (title, icontitle)
-
-

Returns the title and icontitle for the display window. In pygame 2.x -these will always be the same value.

-
- -
-
-pygame.display.set_palette()¶
-
-
Set the display color palette for indexed displays
-
set_palette(palette=None) -> None
-
-

This will change the video display color palette for 8-bit displays. This -does not change the palette for the actual display Surface, only the palette -that is used to display the Surface. If no palette argument is passed, the -system default palette will be restored. The palette is a sequence of -RGB triplets.

-
- -
-
-pygame.display.get_num_displays()¶
-
-
Return the number of displays
-
get_num_displays() -> int
-
-

Returns the number of available displays. This is always 1 if -pygame.get_sdl_version()get the version number of SDL returns a major version number below 2.

-
-

New in pygame 1.9.5.

-
-
- -
-
-pygame.display.get_window_size()¶
-
-
Return the size of the window or screen
-
get_window_size() -> tuple
-
-

Returns the size of the window initialized with pygame.display.set_mode()Initialize a window or screen for display. -This may differ from the size of the display surface if SCALED is used.

-
-

New in pygame 2.0.0.

-
-
- -
-
-pygame.display.get_allow_screensaver()¶
-
-
Return whether the screensaver is allowed to run.
-
get_allow_screensaver() -> bool
-
-

Return whether screensaver is allowed to run whilst the app is running. -Default is False. -By default pygame does not allow the screensaver during game play.

-
-

Note

-

Some platforms do not have a screensaver or support -disabling the screensaver. Please see -pygame.display.set_allow_screensaver()Set whether the screensaver may run for -caveats with screensaver support.

-
-
-

New in pygame 2.0.0.

-
-
- -
-
-pygame.display.set_allow_screensaver()¶
-
-
Set whether the screensaver may run
-
set_allow_screensaver(bool) -> None
-
-

Change whether screensavers should be allowed whilst the app is running. -The default value of the argument to the function is True. -By default pygame does not allow the screensaver during game play.

-

If the screensaver has been disallowed due to this function, it will automatically -be allowed to run when pygame.quit()uninitialize all pygame modules is called.

-

It is possible to influence the default value via the environment variable -SDL_HINT_VIDEO_ALLOW_SCREENSAVER, which can be set to either 0 (disable) -or 1 (enable).

-
-

Note

-

Disabling screensaver is subject to platform support. -When platform support is absent, this function will -silently appear to work even though the screensaver state -is unchanged. The lack of feedback is due to SDL not -providing any supported method for determining whether -it supports changing the screensaver state. -SDL_HINT_VIDEO_ALLOW_SCREENSAVER is available in SDL 2.0.2 or later. -SDL1.2 does not implement this.

-
-
-

New in pygame 2.0.0.

-
-
- -
- -
- - -

-
-Edit on GitHub -
-
-
-
-
- - - - \ No newline at end of file diff --git a/venv/Lib/site-packages/pygame/docs/generated/ref/draw.html b/venv/Lib/site-packages/pygame/docs/generated/ref/draw.html deleted file mode 100644 index a8a7bb6..0000000 --- a/venv/Lib/site-packages/pygame/docs/generated/ref/draw.html +++ /dev/null @@ -1,971 +0,0 @@ - - - - - - - - - pygame.draw — pygame v2.1.2 documentation - - - - - - - - - - - - - -
- -
- - - - - -
-
- -
-
- -
-
-
-pygame.draw
-
-
pygame module for drawing shapes
-
- ----- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-—draw a rectangle
-—draw a polygon
-—draw a circle
-—draw an ellipse
-—draw an elliptical arc
-—draw a straight line
-—draw multiple contiguous straight line segments
-—draw a straight antialiased line
-—draw multiple contiguous straight antialiased line segments
-

Draw several simple shapes to a surface. These functions will work for -rendering to any format of surface. Rendering to hardware surfaces will be -slower than regular software surfaces.

-

Most of the functions take a width argument to represent the size of stroke -(thickness) around the edge of the shape. If a width of 0 is passed the shape -will be filled (solid).

-

All the drawing functions respect the clip area for the surface and will be -constrained to that area. The functions return a rectangle representing the -bounding area of changed pixels. This bounding rectangle is the 'minimum' -bounding box that encloses the affected area.

-

All the drawing functions accept a color argument that can be one of the -following formats:

-
-
-
-

A color's alpha value will be written directly into the surface (if the -surface contains pixel alphas), but the draw function will not draw -transparently.

-

These functions temporarily lock the surface they are operating on. Many -sequential drawing calls can be sped up by locking and unlocking the surface -object around the draw calls (see pygame.Surface.lock()lock the Surface memory for pixel access and -pygame.Surface.unlock()unlock the Surface memory from pixel access).

-
-

Note

-

See the pygame.gfxdrawpygame module for drawing shapes module for alternative draw methods.

-
-
-
-pygame.draw.rect()¶
-
-
draw a rectangle
-
rect(surface, color, rect) -> Rect
-
rect(surface, color, rect, width=0, border_radius=0, border_top_left_radius=-1, border_top_right_radius=-1, border_bottom_left_radius=-1, border_bottom_right_radius=-1) -> Rect
-
-

Draws a rectangle on the given surface.

-
-
Parameters
-
    -
  • surface (Surface) -- surface to draw on

  • -
  • color (Color or int or tuple(int, int, int, [int])) -- color to draw with, the alpha value is optional if using a -tuple (RGB[A])

  • -
  • rect (Rect) -- rectangle to draw, position and dimensions

  • -
  • width (int) --

    (optional) used for line thickness or to indicate that -the rectangle is to be filled (not to be confused with the width value -of the rect parameter)

    -
    -
    -
    if width == 0, (default) fill the rectangle
    -
    if width > 0, used for line thickness
    -
    if width < 0, nothing will be drawn
    -

    -
    -
    -

    Note

    -

    When using width values > 1, the edge lines will grow -outside the original boundary of the rect. For more details on -how the thickness for edge lines grow, refer to the width notes -of the pygame.draw.line()draw a straight line function.

    -
    -
    -

  • -
  • border_radius (int) -- (optional) used for drawing rectangle with rounded corners. -The supported range is [0, min(height, width) / 2], with 0 representing a rectangle -without rounded corners.

  • -
  • border_top_left_radius (int) -- (optional) used for setting the value of top left -border. If you don't set this value, it will use the border_radius value.

  • -
  • border_top_right_radius (int) -- (optional) used for setting the value of top right -border. If you don't set this value, it will use the border_radius value.

  • -
  • border_bottom_left_radius (int) -- (optional) used for setting the value of bottom left -border. If you don't set this value, it will use the border_radius value.

  • -
  • border_bottom_right_radius (int) --

    (optional) used for setting the value of bottom right -border. If you don't set this value, it will use the border_radius value.

    -
    -
    -
    if border_radius < 1 it will draw rectangle without rounded corners
    -
    if any of border radii has the value < 0 it will use value of the border_radius
    -
    If sum of radii on the same side of the rectangle is greater than the rect size the radii
    -
    will get scaled
    -
    -
    -

  • -
-
-
Returns
-

a rect bounding the changed pixels, if nothing is drawn the -bounding rect's position will be the position of the given rect -parameter and its width and height will be 0

-
-
Return type
-

Rect

-
-
-
-

Note

-

The pygame.Surface.fill()fill Surface with a solid color method works just as well for drawing -filled rectangles and can be hardware accelerated on some platforms with -both software and hardware display modes.

-
-
-

Changed in pygame 2.0.0: Added support for keyword arguments.

-
-
-

Changed in pygame 2.0.0.dev8: Added support for border radius.

-
-
- -
-
-pygame.draw.polygon()¶
-
-
draw a polygon
-
polygon(surface, color, points) -> Rect
-
polygon(surface, color, points, width=0) -> Rect
-
-

Draws a polygon on the given surface.

-
-
Parameters
-
    -
  • surface (Surface) -- surface to draw on

  • -
  • color (Color or int or tuple(int, int, int, [int])) -- color to draw with, the alpha value is optional if using a -tuple (RGB[A])

  • -
  • points (tuple(coordinate) or list(coordinate)) -- a sequence of 3 or more (x, y) coordinates that make up the -vertices of the polygon, each coordinate in the sequence must be a -tuple/list/pygame.math.Vector2a 2-Dimensional Vector of 2 ints/floats, -e.g. [(x1, y1), (x2, y2), (x3, y3)]

  • -
  • width (int) --

    (optional) used for line thickness or to indicate that -the polygon is to be filled

    -
    -
    -
    if width == 0, (default) fill the polygon
    -
    if width > 0, used for line thickness
    -
    if width < 0, nothing will be drawn
    -

    -
    -
    -

    Note

    -

    When using width values > 1, the edge lines will grow -outside the original boundary of the polygon. For more details on -how the thickness for edge lines grow, refer to the width notes -of the pygame.draw.line()draw a straight line function.

    -
    -
    -

  • -
-
-
Returns
-

a rect bounding the changed pixels, if nothing is drawn the -bounding rect's position will be the position of the first point in the -points parameter (float values will be truncated) and its width and -height will be 0

-
-
Return type
-

Rect

-
-
Raises
-
    -
  • ValueError -- if len(points) < 3 (must have at least 3 points)

  • -
  • TypeError -- if points is not a sequence or points does not -contain number pairs

  • -
-
-
-
-

Note

-

For an aapolygon, use aalines() with closed=True.

-
-
-

Changed in pygame 2.0.0: Added support for keyword arguments.

-
-
- -
-
-pygame.draw.circle()¶
-
-
draw a circle
-
circle(surface, color, center, radius) -> Rect
-
circle(surface, color, center, radius, width=0, draw_top_right=None, draw_top_left=None, draw_bottom_left=None, draw_bottom_right=None) -> Rect
-
-

Draws a circle on the given surface.

-
-
Parameters
-
    -
  • surface (Surface) -- surface to draw on

  • -
  • color (Color or int or tuple(int, int, int, [int])) -- color to draw with, the alpha value is optional if using a -tuple (RGB[A])

  • -
  • center (tuple(int or float, int or float) or -list(int or float, int or float) or Vector2(int or float, int or float)) -- center point of the circle as a sequence of 2 ints/floats, -e.g. (x, y)

  • -
  • radius (int or float) -- radius of the circle, measured from the center parameter, -nothing will be drawn if the radius is less than 1

  • -
  • width (int) --

    (optional) used for line thickness or to indicate that -the circle is to be filled

    -
    -
    -
    if width == 0, (default) fill the circle
    -
    if width > 0, used for line thickness
    -
    if width < 0, nothing will be drawn
    -

    -
    -
    -

    Note

    -

    When using width values > 1, the edge lines will only grow -inward.

    -
    -
    -

  • -
  • draw_top_right (bool) -- (optional) if this is set to True then the top right corner -of the circle will be drawn

  • -
  • draw_top_left (bool) -- (optional) if this is set to True then the top left corner -of the circle will be drawn

  • -
  • draw_bottom_left (bool) -- (optional) if this is set to True then the bottom left corner -of the circle will be drawn

  • -
  • draw_bottom_right (bool) --

    (optional) if this is set to True then the bottom right corner -of the circle will be drawn

    -
    -
    -
    if any of the draw_circle_part is True then it will draw all circle parts that have the True
    -
    value, otherwise it will draw the entire circle.
    -
    -
    -

  • -
-
-
Returns
-

a rect bounding the changed pixels, if nothing is drawn the -bounding rect's position will be the center parameter value (float -values will be truncated) and its width and height will be 0

-
-
Return type
-

Rect

-
-
Raises
-
    -
  • TypeError -- if center is not a sequence of two numbers

  • -
  • TypeError -- if radius is not a number

  • -
-
-
-
-

Changed in pygame 2.0.0: Added support for keyword arguments. -Nothing is drawn when the radius is 0 (a pixel at the center coordinates -used to be drawn when the radius equaled 0). -Floats, and Vector2 are accepted for the center param. -The drawing algorithm was improved to look more like a circle.

-
-
-

Changed in pygame 2.0.0.dev8: Added support for drawing circle quadrants.

-
-
- -
-
-pygame.draw.ellipse()¶
-
-
draw an ellipse
-
ellipse(surface, color, rect) -> Rect
-
ellipse(surface, color, rect, width=0) -> Rect
-
-

Draws an ellipse on the given surface.

-
-
Parameters
-
    -
  • surface (Surface) -- surface to draw on

  • -
  • color (Color or int or tuple(int, int, int, [int])) -- color to draw with, the alpha value is optional if using a -tuple (RGB[A])

  • -
  • rect (Rect) -- rectangle to indicate the position and dimensions of the -ellipse, the ellipse will be centered inside the rectangle and bounded -by it

  • -
  • width (int) --

    (optional) used for line thickness or to indicate that -the ellipse is to be filled (not to be confused with the width value -of the rect parameter)

    -
    -
    -
    if width == 0, (default) fill the ellipse
    -
    if width > 0, used for line thickness
    -
    if width < 0, nothing will be drawn
    -

    -
    -
    -

    Note

    -

    When using width values > 1, the edge lines will only grow -inward from the original boundary of the rect parameter.

    -
    -
    -

  • -
-
-
Returns
-

a rect bounding the changed pixels, if nothing is drawn the -bounding rect's position will be the position of the given rect -parameter and its width and height will be 0

-
-
Return type
-

Rect

-
-
-
-

Changed in pygame 2.0.0: Added support for keyword arguments.

-
-
- -
-
-pygame.draw.arc()¶
-
-
draw an elliptical arc
-
arc(surface, color, rect, start_angle, stop_angle) -> Rect
-
arc(surface, color, rect, start_angle, stop_angle, width=1) -> Rect
-
-

Draws an elliptical arc on the given surface.

-

The two angle arguments are given in radians and indicate the start and stop -positions of the arc. The arc is drawn in a counterclockwise direction from -the start_angle to the stop_angle.

-
-
Parameters
-
    -
  • surface (Surface) -- surface to draw on

  • -
  • color (Color or int or tuple(int, int, int, [int])) -- color to draw with, the alpha value is optional if using a -tuple (RGB[A])

  • -
  • rect (Rect) -- rectangle to indicate the position and dimensions of the -ellipse which the arc will be based on, the ellipse will be centered -inside the rectangle

  • -
  • start_angle (float) -- start angle of the arc in radians

  • -
  • stop_angle (float) --

    stop angle of the arc in -radians

    -
    -
    -
    if start_angle < stop_angle, the arc is drawn in a -counterclockwise direction from the start_angle to the -stop_angle
    -
    if start_angle > stop_angle, tau (tau == 2 * pi) will be added -to the stop_angle, if the resulting stop angle value is greater -than the start_angle the above start_angle < stop_angle case -applies, otherwise nothing will be drawn
    -
    if start_angle == stop_angle, nothing will be drawn
    -

    -
    -
    -

  • -
  • width (int) --

    (optional) used for line thickness (not to be confused -with the width value of the rect parameter)

    -
    -
    -
    if width == 0, nothing will be drawn
    -
    if width > 0, (default is 1) used for line thickness
    -
    if width < 0, same as width == 0
    -
    -
    -

    Note

    -

    When using width values > 1, the edge lines will only grow -inward from the original boundary of the rect parameter.

    -
    -
    -

  • -
-
-
Returns
-

a rect bounding the changed pixels, if nothing is drawn the -bounding rect's position will be the position of the given rect -parameter and its width and height will be 0

-
-
Return type
-

Rect

-
-
-
-

Changed in pygame 2.0.0: Added support for keyword arguments.

-
-
- -
-
-pygame.draw.line()¶
-
-
draw a straight line
-
line(surface, color, start_pos, end_pos) -> Rect
-
line(surface, color, start_pos, end_pos, width=1) -> Rect
-
-

Draws a straight line on the given surface. There are no endcaps. For thick -lines the ends are squared off.

-
-
Parameters
-
    -
  • surface (Surface) -- surface to draw on

  • -
  • color (Color or int or tuple(int, int, int, [int])) -- color to draw with, the alpha value is optional if using a -tuple (RGB[A])

  • -
  • start_pos (tuple(int or float, int or float) or -list(int or float, int or float) or Vector2(int or float, int or float)) -- start position of the line, (x, y)

  • -
  • end_pos (tuple(int or float, int or float) or -list(int or float, int or float) or Vector2(int or float, int or float)) -- end position of the line, (x, y)

  • -
  • width (int) --

    (optional) used for line thickness

    -
    -
    if width >= 1, used for line thickness (default is 1)
    -
    if width < 1, nothing will be drawn
    -

    -
    -
    -

    Note

    -

    When using width values > 1, lines will grow as follows.

    -

    For odd width values, the thickness of each line grows with the -original line being in the center.

    -

    For even width values, the thickness of each line grows with the -original line being offset from the center (as there is no exact -center line drawn). As a result, lines with a slope < 1 -(horizontal-ish) will have 1 more pixel of thickness below the -original line (in the y direction). Lines with a slope >= 1 -(vertical-ish) will have 1 more pixel of thickness to the right of -the original line (in the x direction).

    -
    -

  • -
-
-
Returns
-

a rect bounding the changed pixels, if nothing is drawn the -bounding rect's position will be the start_pos parameter value (float -values will be truncated) and its width and height will be 0

-
-
Return type
-

Rect

-
-
Raises
-

TypeError -- if start_pos or end_pos is not a sequence of -two numbers

-
-
-
-

Changed in pygame 2.0.0: Added support for keyword arguments.

-
-
- -
-
-pygame.draw.lines()¶
-
-
draw multiple contiguous straight line segments
-
lines(surface, color, closed, points) -> Rect
-
lines(surface, color, closed, points, width=1) -> Rect
-
-

Draws a sequence of contiguous straight lines on the given surface. There are -no endcaps or miter joints. For thick lines the ends are squared off. -Drawing thick lines with sharp corners can have undesired looking results.

-
-
Parameters
-
    -
  • surface (Surface) -- surface to draw on

  • -
  • color (Color or int or tuple(int, int, int, [int])) -- color to draw with, the alpha value is optional if using a -tuple (RGB[A])

  • -
  • closed (bool) -- if True an additional line segment is drawn between -the first and last points in the points sequence

  • -
  • points (tuple(coordinate) or list(coordinate)) -- a sequence of 2 or more (x, y) coordinates, where each -coordinate in the sequence must be a -tuple/list/pygame.math.Vector2a 2-Dimensional Vector of 2 ints/floats and adjacent -coordinates will be connected by a line segment, e.g. for the -points [(x1, y1), (x2, y2), (x3, y3)] a line segment will be drawn -from (x1, y1) to (x2, y2) and from (x2, y2) to (x3, y3), -additionally if the closed parameter is True another line segment -will be drawn from (x3, y3) to (x1, y1)

  • -
  • width (int) --

    (optional) used for line thickness

    -
    -
    if width >= 1, used for line thickness (default is 1)
    -
    if width < 1, nothing will be drawn
    -

    -
    -
    -

    Note

    -

    When using width values > 1 refer to the width notes -of line() for details on how thick lines grow.

    -
    -

  • -
-
-
Returns
-

a rect bounding the changed pixels, if nothing is drawn the -bounding rect's position will be the position of the first point in the -points parameter (float values will be truncated) and its width and -height will be 0

-
-
Return type
-

Rect

-
-
Raises
-
    -
  • ValueError -- if len(points) < 2 (must have at least 2 points)

  • -
  • TypeError -- if points is not a sequence or points does not -contain number pairs

  • -
-
-
-
-

Changed in pygame 2.0.0: Added support for keyword arguments.

-
-
- -
-
-pygame.draw.aaline()¶
-
-
draw a straight antialiased line
-
aaline(surface, color, start_pos, end_pos) -> Rect
-
aaline(surface, color, start_pos, end_pos, blend=1) -> Rect
-
-

Draws a straight antialiased line on the given surface.

-

The line has a thickness of one pixel and the endpoints have a height and -width of one pixel each.

-
-
The way a line and its endpoints are drawn:

If both endpoints are equal, only a single pixel is drawn (after -rounding floats to nearest integer).

-

Otherwise if the line is not steep (i.e. if the length along the x-axis -is greater than the height along the y-axis):

-
-

For each endpoint:

-
-

If x, the endpoint's x-coordinate, is a whole number find -which pixels would be covered by it and draw them.

-

Otherwise:

-
-

Calculate the position of the nearest point with a whole number -for its x-coordinate, when extending the line past the -endpoint.

-

Find which pixels would be covered and how much by that point.

-

If the endpoint is the left one, multiply the coverage by (1 - -the decimal part of x).

-

Otherwise multiply the coverage by the decimal part of x.

-

Then draw those pixels.

-
-
e.g.:
-
The left endpoint of the line ((1, 1.3), (5, 3)) would -cover 70% of the pixel (1, 1) and 30% of the pixel -(1, 2) while the right one would cover 100% of the -pixel (5, 3).
-
The left endpoint of the line ((1.2, 1.4), (4.6, 3.1)) -would cover 56% (i.e. 0.8 * 70%) of the pixel (1, 1) -and 24% (i.e. 0.8 * 30%) of the pixel (1, 2) while -the right one would cover 42% (i.e. 0.6 * 70%) of the -pixel (5, 3) and 18% (i.e. 0.6 * 30%) of the pixel -(5, 4) while the right
-
-
-
-
-
-

Then for each point between the endpoints, along the line, whose -x-coordinate is a whole number:

-
-

Find which pixels would be covered and how much by that point and -draw them.

-
-
e.g.:
-
The points along the line ((1, 1), (4, 2.5)) would be -(2, 1.5) and (3, 2) and would cover 50% of the pixel -(2, 1), 50% of the pixel (2, 2) and 100% of the pixel -(3, 2).
-
The points along the line ((1.2, 1.4), (4.6, 3.1)) would -be (2, 1.8) (covering 20% of the pixel (2, 1) and 80% -of the pixel (2, 2)), (3, 2.3) (covering 70% of the -pixel (3, 2) and 30% of the pixel (3, 3)) and (4, -2.8) (covering 20% of the pixel (2, 1) and 80% of the -pixel (2, 2))
-
-
-
-
-
-

Otherwise do the same for steep lines as for non-steep lines except -along the y-axis instead of the x-axis (using y instead of x, -top instead of left and bottom instead of right).

-
-
-
-

Note

-

Regarding float values for coordinates, a point with coordinate -consisting of two whole numbers is considered being right in the center -of said pixel (and having a height and width of 1 pixel would therefore -completely cover it), while a point with coordinate where one (or both) -of the numbers have non-zero decimal parts would be partially covering -two (or four if both numbers have decimal parts) adjacent pixels, e.g. -the point (1.4, 2) covers 60% of the pixel (1, 2) and 40% of the -pixel (2,2).

-
-
-
Parameters
-
    -
  • surface (Surface) -- surface to draw on

  • -
  • color (Color or int or tuple(int, int, int, [int])) -- color to draw with, the alpha value is optional if using a -tuple (RGB[A])

  • -
  • start_pos (tuple(int or float, int or float) or -list(int or float, int or float) or Vector2(int or float, int or float)) -- start position of the line, (x, y)

  • -
  • end_pos (tuple(int or float, int or float) or -list(int or float, int or float) or Vector2(int or float, int or float)) -- end position of the line, (x, y)

  • -
  • blend (int) -- (optional) if non-zero (default) the line will be blended -with the surface's existing pixel shades, otherwise it will overwrite them

  • -
-
-
Returns
-

a rect bounding the changed pixels, if nothing is drawn the -bounding rect's position will be the start_pos parameter value (float -values will be truncated) and its width and height will be 0

-
-
Return type
-

Rect

-
-
Raises
-

TypeError -- if start_pos or end_pos is not a sequence of -two numbers

-
-
-
-

Changed in pygame 2.0.0: Added support for keyword arguments.

-
-
- -
-
-pygame.draw.aalines()¶
-
-
draw multiple contiguous straight antialiased line segments
-
aalines(surface, color, closed, points) -> Rect
-
aalines(surface, color, closed, points, blend=1) -> Rect
-
-

Draws a sequence of contiguous straight antialiased lines on the given -surface.

-
-
Parameters
-
    -
  • surface (Surface) -- surface to draw on

  • -
  • color (Color or int or tuple(int, int, int, [int])) -- color to draw with, the alpha value is optional if using a -tuple (RGB[A])

  • -
  • closed (bool) -- if True an additional line segment is drawn between -the first and last points in the points sequence

  • -
  • points (tuple(coordinate) or list(coordinate)) -- a sequence of 2 or more (x, y) coordinates, where each -coordinate in the sequence must be a -tuple/list/pygame.math.Vector2a 2-Dimensional Vector of 2 ints/floats and adjacent -coordinates will be connected by a line segment, e.g. for the -points [(x1, y1), (x2, y2), (x3, y3)] a line segment will be drawn -from (x1, y1) to (x2, y2) and from (x2, y2) to (x3, y3), -additionally if the closed parameter is True another line segment -will be drawn from (x3, y3) to (x1, y1)

  • -
  • blend (int) -- (optional) if non-zero (default) each line will be blended -with the surface's existing pixel shades, otherwise the pixels will be -overwritten

  • -
-
-
Returns
-

a rect bounding the changed pixels, if nothing is drawn the -bounding rect's position will be the position of the first point in the -points parameter (float values will be truncated) and its width and -height will be 0

-
-
Return type
-

Rect

-
-
Raises
-
    -
  • ValueError -- if len(points) < 2 (must have at least 2 points)

  • -
  • TypeError -- if points is not a sequence or points does not -contain number pairs

  • -
-
-
-
-

Changed in pygame 2.0.0: Added support for keyword arguments.

-
-
- -
-draw module example -
-

Example code for draw module.¶

-
-
-
# Import a library of functions called 'pygame'
-import pygame
-from math import pi
- 
-# Initialize the game engine
-pygame.init()
- 
-# Define the colors we will use in RGB format
-BLACK = (  0,   0,   0)
-WHITE = (255, 255, 255)
-BLUE =  (  0,   0, 255)
-GREEN = (  0, 255,   0)
-RED =   (255,   0,   0)
- 
-# Set the height and width of the screen
-size = [400, 300]
-screen = pygame.display.set_mode(size)
- 
-pygame.display.set_caption("Example code for the draw module")
- 
-#Loop until the user clicks the close button.
-done = False
-clock = pygame.time.Clock()
- 
-while not done:
- 
-    # This limits the while loop to a max of 10 times per second.
-    # Leave this out and we will use all CPU we can.
-    clock.tick(10)
-     
-    for event in pygame.event.get(): # User did something
-        if event.type == pygame.QUIT: # If user clicked close
-            done=True # Flag that we are done so we exit this loop
- 
-    # All drawing code happens after the for loop and but
-    # inside the main while done==False loop.
-     
-    # Clear the screen and set the screen background
-    screen.fill(WHITE)
- 
-    # Draw on the screen a GREEN line from (0, 0) to (50, 30) 
-    # 5 pixels wide.
-    pygame.draw.line(screen, GREEN, [0, 0], [50,30], 5)
- 
-    # Draw on the screen 3 BLACK lines, each 5 pixels wide.
-    # The 'False' means the first and last points are not connected.
-    pygame.draw.lines(screen, BLACK, False, [[0, 80], [50, 90], [200, 80], [220, 30]], 5)
-    
-    # Draw on the screen a GREEN line from (0, 50) to (50, 80) 
-    # Because it is an antialiased line, it is 1 pixel wide.
-    pygame.draw.aaline(screen, GREEN, [0, 50],[50, 80], True)
-
-    # Draw a rectangle outline
-    pygame.draw.rect(screen, BLACK, [75, 10, 50, 20], 2)
-     
-    # Draw a solid rectangle
-    pygame.draw.rect(screen, BLACK, [150, 10, 50, 20])
-
-    # Draw a rectangle with rounded corners
-    pygame.draw.rect(screen, GREEN, [115, 210, 70, 40], 10, border_radius=15)
-    pygame.draw.rect(screen, RED, [135, 260, 50, 30], 0, border_radius=10, border_top_left_radius=0,
-                     border_bottom_right_radius=15)
-
-    # Draw an ellipse outline, using a rectangle as the outside boundaries
-    pygame.draw.ellipse(screen, RED, [225, 10, 50, 20], 2) 
-
-    # Draw an solid ellipse, using a rectangle as the outside boundaries
-    pygame.draw.ellipse(screen, RED, [300, 10, 50, 20]) 
- 
-    # This draws a triangle using the polygon command
-    pygame.draw.polygon(screen, BLACK, [[100, 100], [0, 200], [200, 200]], 5)
-  
-    # Draw an arc as part of an ellipse. 
-    # Use radians to determine what angle to draw.
-    pygame.draw.arc(screen, BLACK,[210, 75, 150, 125], 0, pi/2, 2)
-    pygame.draw.arc(screen, GREEN,[210, 75, 150, 125], pi/2, pi, 2)
-    pygame.draw.arc(screen, BLUE, [210, 75, 150, 125], pi,3*pi/2, 2)
-    pygame.draw.arc(screen, RED,  [210, 75, 150, 125], 3*pi/2, 2*pi, 2)
-    
-    # Draw a circle
-    pygame.draw.circle(screen, BLUE, [60, 250], 40)
-
-    # Draw only one circle quadrant
-    pygame.draw.circle(screen, BLUE, [250, 250], 40, 0, draw_top_right=True)
-    pygame.draw.circle(screen, RED, [250, 250], 40, 30, draw_top_left=True)
-    pygame.draw.circle(screen, GREEN, [250, 250], 40, 20, draw_bottom_left=True)
-    pygame.draw.circle(screen, BLACK, [250, 250], 40, 10, draw_bottom_right=True)
-
-    # Go ahead and update the screen with what we've drawn.
-    # This MUST happen after all the other drawing commands.
-    pygame.display.flip()
- 
-# Be IDLE friendly
-pygame.quit()
-
-
-
- -
- - -

-
-Edit on GitHub -
-
-
-
-
- - - - \ No newline at end of file diff --git a/venv/Lib/site-packages/pygame/docs/generated/ref/event.html b/venv/Lib/site-packages/pygame/docs/generated/ref/event.html deleted file mode 100644 index 4906eb6..0000000 --- a/venv/Lib/site-packages/pygame/docs/generated/ref/event.html +++ /dev/null @@ -1,715 +0,0 @@ - - - - - - - - - pygame.event — pygame v2.1.2 documentation - - - - - - - - - - - - - -
- -
- - - - - -
-
- -
-
- -
-
-
-pygame.event
-
-
pygame module for interacting with events and queues
-
- ----- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-—internally process pygame event handlers
-—get events from the queue
-—get a single event from the queue
-—wait for a single event from the queue
-—test if event types are waiting on the queue
-—remove all events from the queue
-—get the string name from an event id
-—control which events are allowed on the queue
-—control which events are allowed on the queue
-—test if a type of event is blocked from the queue
-—control the sharing of input devices with other applications
-—test if the program is sharing input devices
-—place a new event on the queue
-—make custom user event type
-—create a new event object
-—pygame object for representing events
-

Pygame handles all its event messaging through an event queue. The routines in -this module help you manage that event queue. The input queue is heavily -dependent on the pygame.displaypygame module to control the display window and screen module. If the display has not been -initialized and a video mode not set, the event queue may not work properly.

-

The event queue has an upper limit on the number of events it can hold. When -the queue becomes full new events are quietly dropped. To prevent lost events, -especially input events which signal a quit command, your program must handle -events every frame (with pygame.event.get(), pygame.event.pump(), -pygame.event.wait(), pygame.event.peek() or pygame.event.clear()) -and process them. Not handling events may cause your system to decide your -program has locked up. To speed up queue processing use -pygame.event.set_blocked()control which events are allowed on the queue to limit which events get queued.

-

To get the state of various input devices, you can forego the event queue and -access the input devices directly with their appropriate modules: -pygame.mousepygame module to work with the mouse, pygame.keypygame module to work with the keyboard, and pygame.joystickPygame module for interacting with joysticks, gamepads, and trackballs.. If you use -this method, remember that pygame requires some form of communication with the -system window manager and other parts of the platform. To keep pygame in sync -with the system, you will need to call pygame.event.pump()internally process pygame event handlers to keep -everything current. Usually, this should be called once per game loop. -Note: Joysticks will not send any events until the device has been initialized.

-

The event queue contains pygame.event.EventTypepygame object for representing events event objects. -There are a variety of ways to access the queued events, from simply -checking for the existence of events, to grabbing them directly off the stack. -The event queue also offers some simple filtering which can slightly help -performance by blocking certain event types from the queue. Use -pygame.event.set_allowed()control which events are allowed on the queue and pygame.event.set_blocked()control which events are allowed on the queue to -change this filtering. By default, all event types can be placed on the queue.

-

All pygame.event.EventTypepygame object for representing events instances contain an event type identifier -and attributes specific to that event type. The event type identifier is -accessible as the pygame.event.EventType.typeevent type identifier. property. Any of the -event specific attributes can be accessed through the -pygame.event.EventType.__dict__event attribute dictionary attribute or directly as an attribute -of the event object (as member lookups are passed through to the object's -dictionary values). The event object has no method functions. Users can create -their own new events with the pygame.event.Event()create a new event object function.

-

The event type identifier is in between the values of NOEVENT and -NUMEVENTS. User defined events should have a value in the inclusive range -of USEREVENT to NUMEVENTS - 1. User defined events can get a custom -event number with pygame.event.custom_type()make custom user event type. -It is recommended all user events follow this system.

-

Events support equality and inequality comparisons. Two events are equal if -they are the same type and have identical attribute values.

-

While debugging and experimenting, you can print an event object for a quick -display of its type and members. The function pygame.event.event_name()get the string name from an event id -can be used to get a string representing the name of the event type.

-

Events that come from the system will have a guaranteed set of member -attributes based on the type. The following is a list event types with their -specific attributes.

-
QUIT              none
-ACTIVEEVENT       gain, state
-KEYDOWN           key, mod, unicode, scancode
-KEYUP             key, mod, unicode, scancode
-MOUSEMOTION       pos, rel, buttons, touch
-MOUSEBUTTONUP     pos, button, touch
-MOUSEBUTTONDOWN   pos, button, touch
-JOYAXISMOTION     joy (deprecated), instance_id, axis, value
-JOYBALLMOTION     joy (deprecated), instance_id, ball, rel
-JOYHATMOTION      joy (deprecated), instance_id, hat, value
-JOYBUTTONUP       joy (deprecated), instance_id, button
-JOYBUTTONDOWN     joy (deprecated), instance_id, button
-VIDEORESIZE       size, w, h
-VIDEOEXPOSE       none
-USEREVENT         code
-
-
-
-

Changed in pygame 2.0.0: The joy attribute was deprecated, instance_id was added.

-
-
-

Changed in pygame 2.0.1: The unicode attribute was added to KEYUP event.

-
-

You can also find a list of constants for keyboard keys -here.

-
-

-
-

On MacOSX when a file is opened using a pygame application, a USEREVENT -with its code attribute set to pygame.USEREVENT_DROPFILE is generated. -There is an additional attribute called filename where the name of the file -being accessed is stored.

-
USEREVENT         code=pygame.USEREVENT_DROPFILE, filename
-
-
-
-

New in pygame 1.9.2.

-
-
-

-
-

When compiled with SDL2, pygame has these additional events and their -attributes.

-
AUDIODEVICEADDED   which, iscapture
-AUDIODEVICEREMOVED which, iscapture
-FINGERMOTION       touch_id, finger_id, x, y, dx, dy
-FINGERDOWN         touch_id, finger_id, x, y, dx, dy
-FINGERUP           touch_id, finger_id, x, y, dx, dy
-MOUSEWHEEL         which, flipped, x, y, touch
-MULTIGESTURE       touch_id, x, y, pinched, rotated, num_fingers
-TEXTEDITING        text, start, length
-TEXTINPUT          text
-
-
-
-

New in pygame 1.9.5.

-
-
-

Changed in pygame 2.0.2: Fixed amount horizontal scroll (x, positive to the right and negative to the left).

-
-
-

Changed in pygame 2.0.2: The touch attribute was added to all the MOUSE events.

-
-

The touch attribute of MOUSE events indicates whether or not the events were generated -by a touch input device, and not a real mouse. You might want to ignore such events, if your application -already handles FINGERMOTION, FINGERDOWN and FINGERUP events.

-
-

-
-

Many new events were introduced in pygame 2.

-

pygame can recognize text or files dropped in its window. If a file -is dropped, DROPFILE event will be sent, file will be its path. -The DROPTEXT event is only supported on X11.

-

MIDIIN and MIDIOUT are events reserved for pygame.midipygame module for interacting with midi input and output. use.

-

pygame 2 also supports controller hot-plugging

-
DROPBEGIN
-DROPCOMPLETE
-DROPFILE                 file
-DROPTEXT                 text
-MIDIIN
-MIDIOUT
-CONTROLLERDEVICEADDED    device_index
-JOYDEVICEADDED           device_index
-CONTROLLERDEVICEREMOVED  instance_id
-JOYDEVICEREMOVED         instance_id
-CONTROLLERDEVICEREMAPPED instance_id
-
-
-

Also in this version, instance_id attributes were added to joystick events, -and the joy attribute was deprecated.

-
-

New in pygame 2.0.0.

-
-

Since pygame 2.0.1, there are a new set of events, called window events. -Here is a list of all window events, along with a short description

-
Event type                Short description
-
-WINDOWSHOWN            Window became shown
-WINDOWHIDDEN           Window became hidden
-WINDOWEXPOSED          Window got updated by some external event
-WINDOWMOVED            Window got moved
-WINDOWRESIZED          Window got resized
-WINDOWSIZECHANGED      Window changed its size
-WINDOWMINIMIZED        Window was minimized
-WINDOWMAXIMIZED        Window was maximized
-WINDOWRESTORED         Window was restored
-WINDOWENTER            Mouse entered the window
-WINDOWLEAVE            Mouse left the window
-WINDOWFOCUSGAINED      Window gained focus
-WINDOWFOCUSLOST        Window lost focus
-WINDOWCLOSE            Window was closed
-WINDOWTAKEFOCUS        Window was offered focus
-WINDOWHITTEST          Window has a special hit test
-
-
-

If SDL version used is less than 2.0.5, the last two events WINDOWTAKEFOCUS -and WINDOWHITTEST will not work.

-

Most these window events do not have any attributes, except WINDOWMOVED, -WINDOWRESIZED and WINDOWSIZECHANGED, these have x and y attributes

-
-

-
-
-
-pygame.event.pump()¶
-
-
internally process pygame event handlers
-
pump() -> None
-
-

For each frame of your game, you will need to make some sort of call to the -event queue. This ensures your program can internally interact with the rest -of the operating system. If you are not using other event functions in your -game, you should call pygame.event.pump() to allow pygame to handle -internal actions.

-

This function is not necessary if your program is consistently processing -events on the queue through the other pygame.eventpygame module for interacting with events and queues functions.

-

There are important things that must be dealt with internally in the event -queue. The main window may need to be repainted or respond to the system. If -you fail to make a call to the event queue for too long, the system may -decide your program has locked up.

-
-

Caution

-

This function should only be called in the thread that initialized pygame.displaypygame module to control the display window and screen.

-
-
- -
-
-pygame.event.get()¶
-
-
get events from the queue
-
get(eventtype=None) -> Eventlist
-
get(eventtype=None, pump=True) -> Eventlist
-
get(eventtype=None, pump=True, exclude=None) -> Eventlist
-
-

This will get all the messages and remove them from the queue. If a type or -sequence of types is given only those messages will be removed from the -queue and returned.

-

If a type or sequence of types is passed in the exclude argument -instead, then all only other messages will be removed from the queue. If -an exclude parameter is passed, the eventtype parameter must be -None.

-

If you are only taking specific events from the queue, be aware that the -queue could eventually fill up with the events you are not interested.

-

If pump is True (the default), then pygame.event.pump()internally process pygame event handlers will be called.

-
-

Changed in pygame 1.9.5: Added pump argument

-
-
-

Changed in pygame 2.0.2: Added exclude argument

-
-
- -
-
-pygame.event.poll()¶
-
-
get a single event from the queue
-
poll() -> EventType instance
-
-

Returns a single event from the queue. If the event queue is empty an event -of type pygame.NOEVENT will be returned immediately. The returned event -is removed from the queue.

-
-

Caution

-

This function should only be called in the thread that initialized pygame.displaypygame module to control the display window and screen.

-
-
- -
-
-pygame.event.wait()¶
-
-
wait for a single event from the queue
-
wait() -> EventType instance
-
wait(timeout) -> EventType instance
-
-

Returns a single event from the queue. If the queue is empty this function -will wait until one is created. From pygame 2.0.0, if a timeout argument -is given, the function will return an event of type pygame.NOEVENT -if no events enter the queue in timeout milliseconds. The event is removed -from the queue once it has been returned. While the program is waiting it will -sleep in an idle state. This is important for programs that want to share the -system with other applications.

-
-

Changed in pygame 2.0.0.dev13: Added timeout argument

-
-
-

Caution

-

This function should only be called in the thread that initialized pygame.displaypygame module to control the display window and screen.

-
-
- -
-
-pygame.event.peek()¶
-
-
test if event types are waiting on the queue
-
peek(eventtype=None) -> bool
-
peek(eventtype=None, pump=True) -> bool
-
-

Returns True if there are any events of the given type waiting on the -queue. If a sequence of event types is passed, this will return True if -any of those events are on the queue.

-

If pump is True (the default), then pygame.event.pump()internally process pygame event handlers will be called.

-
-

Changed in pygame 1.9.5: Added pump argument

-
-
- -
-
-pygame.event.clear()¶
-
-
remove all events from the queue
-
clear(eventtype=None) -> None
-
clear(eventtype=None, pump=True) -> None
-
-

Removes all events from the queue. If eventtype is given, removes the given event -or sequence of events. This has the same effect as pygame.event.get()get events from the queue except None -is returned. It can be slightly more efficient when clearing a full event queue.

-

If pump is True (the default), then pygame.event.pump()internally process pygame event handlers will be called.

-
-

Changed in pygame 1.9.5: Added pump argument

-
-
- -
-
-pygame.event.event_name()¶
-
-
get the string name from an event id
-
event_name(type) -> string
-
-

Returns a string representing the name (in CapWords style) of the given -event type.

-

"UserEvent" is returned for all values in the user event id range. -"Unknown" is returned when the event type does not exist.

-
- -
-
-pygame.event.set_blocked()¶
-
-
control which events are allowed on the queue
-
set_blocked(type) -> None
-
set_blocked(typelist) -> None
-
set_blocked(None) -> None
-
-

The given event types are not allowed to appear on the event queue. By -default all events can be placed on the queue. It is safe to disable an -event type multiple times.

-

If None is passed as the argument, ALL of the event types are blocked -from being placed on the queue.

-
- -
-
-pygame.event.set_allowed()¶
-
-
control which events are allowed on the queue
-
set_allowed(type) -> None
-
set_allowed(typelist) -> None
-
set_allowed(None) -> None
-
-

The given event types are allowed to appear on the event queue. By default, -all event types can be placed on the queue. It is safe to enable an event -type multiple times.

-

If None is passed as the argument, ALL of the event types are allowed -to be placed on the queue.

-
- -
-
-pygame.event.get_blocked()¶
-
-
test if a type of event is blocked from the queue
-
get_blocked(type) -> bool
-
get_blocked(typelist) -> bool
-
-

Returns True if the given event type is blocked from the queue. If a -sequence of event types is passed, this will return True if any of those -event types are blocked.

-
- -
-
-pygame.event.set_grab()¶
-
-
control the sharing of input devices with other applications
-
set_grab(bool) -> None
-
-

When your program runs in a windowed environment, it will share the mouse -and keyboard devices with other applications that have focus. If your -program sets the event grab to True, it will lock all input into your -program.

-

It is best to not always grab the input, since it prevents the user from -doing other things on their system.

-
- -
-
-pygame.event.get_grab()¶
-
-
test if the program is sharing input devices
-
get_grab() -> bool
-
-

Returns True when the input events are grabbed for this application.

-
- -
-
-pygame.event.post()¶
-
-
place a new event on the queue
-
post(Event) -> bool
-
-

Places the given event at the end of the event queue.

-

This is usually used for placing custom events on the event queue. -Any type of event can be posted, and the events posted can have any attributes.

-

This returns a boolean on whether the event was posted or not. Blocked events -cannot be posted, and this function returns False if you try to post them.

-
-

Changed in pygame 2.0.1: returns a boolean, previously returned None

-
-
- -
-
-pygame.event.custom_type()¶
-
-
make custom user event type
-
custom_type() -> int
-
-

Reserves a pygame.USEREVENT for a custom use.

-

If too many events are made a pygame.errorstandard pygame exception is raised.

-
-

New in pygame 2.0.0.dev3.

-
-
- -
-
-pygame.event.Event()¶
-
-
create a new event object
-
Event(type, dict) -> EventType instance
-
Event(type, **attributes) -> EventType instance
-
-

Creates a new event with the given type and attributes. The attributes can -come from a dictionary argument with string keys or from keyword arguments.

-
- -
-
-pygame.event.EventType¶
-
-
pygame object for representing events
-
- ----- - - - - - - - - - - -
-—event type identifier.
-—event attribute dictionary
-

A pygame object that represents an event. User event instances are created -with an pygame.event.Event()create a new event object function call. The EventType type -is not directly callable. EventType instances support attribute -assignment and deletion.

-
-
-type¶
-
-
event type identifier.
-
type -> int
-
-

Read-only. The event type identifier. For user created event -objects, this is the type argument passed to -pygame.event.Event()create a new event object.

-

For example, some predefined event identifiers are QUIT and -MOUSEMOTION.

-
- -
-
-__dict__¶
-
-
event attribute dictionary
-
__dict__ -> dict
-
-

Read-only. The event type specific attributes of an event. The -dict attribute is a synonym for backward compatibility.

-

For example, the attributes of a KEYDOWN event would be unicode, -key, and mod

-
- -
-

New in pygame 1.9.2: Mutable attributes.

-
-
- -
- -
- - -

-
-Edit on GitHub -
-
-
-
-
- - - - \ No newline at end of file diff --git a/venv/Lib/site-packages/pygame/docs/generated/ref/examples.html b/venv/Lib/site-packages/pygame/docs/generated/ref/examples.html deleted file mode 100644 index 22322e1..0000000 --- a/venv/Lib/site-packages/pygame/docs/generated/ref/examples.html +++ /dev/null @@ -1,712 +0,0 @@ - - - - - - - - - pygame.examples — pygame v2.1.2 documentation - - - - - - - - - - - - - -
- -
- - - - - -
-
- -
-
- -
-
-
-pygame.examples
-
-
module of example programs
-
- ----- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-—play the full aliens example
-—run a simple starfield example
-—hit the moving chimp
-—display animated objects on the screen
-—run a font rendering example
-—run a FreeType rendering example
-—display a vertical gradient
-—display pygame events
-—show various surfarray effects
-—load and play a sound
-—play various sndarray effects
-—display an animated liquid effect
-—display an animated 3D cube using OpenGL
-—access the clipboard
-—display multiple images bounce off each other using collision detection
-—show lots of sprites moving around
-—write an image file that is smoothscaled copy of an input file
-—demonstrate joystick functionality
-—demonstrate the various surface.fill method blend options
-—uses alternative additive fill to that of surface.fill
-—display two different custom cursors
-—display various pixelarray generated effects
-—interactively scale an image using smoothscale
-—run a midi example
-—run a Surface.scroll example that shows a magnified image
-—display video captured live from an attached camera
-—play an audio file
-

These examples should help get you started with pygame. Here is a brief rundown -of what you get. The source code for these examples is in the public domain. -Feel free to use for your own projects.

-

There are several ways to run the examples. First they can be run as -stand-alone programs. Second they can be imported and their main() methods -called (see below). Finally, the easiest way is to use the python -m option:

-
python -m pygame.examples.<example name> <example arguments>
-
-
-

eg:

-
python -m pygame.examples.scaletest someimage.png
-
-
-

Resources such as images and sounds for the examples are found in the -pygame/examples/data subdirectory.

-

You can find where the example files are installed by using the following -commands inside the python interpreter.

-
>>> import pygame.examples.scaletest
->>> pygame.examples.scaletest.__file__
-'/usr/lib/python2.6/site-packages/pygame/examples/scaletest.py'
-
-
-

On each OS and version of Python the location will be slightly different. -For example on Windows it might be in 'C:/Python26/Lib/site-packages/pygame/examples/' -On Mac OS X it might be in '/Library/Frameworks/Python.framework/Versions/2.6/lib/python2.6/site-packages/pygame/examples/'

-

You can also run the examples in the python interpreter by calling each modules main() function.

-
>>> import pygame.examples.scaletest
->>> pygame.examples.scaletest.main()
-
-
-

We're always on the lookout for more examples and/or example requests. Code -like this is probably the best way to start getting involved with python -gaming.

-

examples as a package is new to pygame 1.9.0. But most of the examples came with -pygame much earlier.

-
-
-aliens.main()¶
-
-
play the full aliens example
-
aliens.main() -> None
-
-

This started off as a port of the SDL demonstration, Aliens. Now it has -evolved into something sort of resembling fun. This demonstrates a lot of -different uses of sprites and optimized blitting. Also transparency, -colorkeys, fonts, sound, music, joystick, and more. (PS, my high score is -117! goodluck)

-
- -
-
-stars.main()¶
-
-
run a simple starfield example
-
stars.main() -> None
-
-

A simple starfield example. You can change the center of perspective by -leftclicking the mouse on the screen.

-
- -
-
-chimp.main()¶
-
-
hit the moving chimp
-
chimp.main() -> None
-
-

This simple example is derived from the line-by-line tutorial that comes -with pygame. It is based on a 'popular' web banner. Note there are comments -here, but for the full explanation, follow along in the tutorial.

-
- -
-
-moveit.main()¶
-
-
display animated objects on the screen
-
moveit.main() -> None
-
-

This is the full and final example from the Pygame Tutorial, "How Do I Make -It Move". It creates 10 objects and animates them on the screen.

-

Note it's a bit scant on error checking, but it's easy to read. :] -Fortunately, this is python, and we needn't wrestle with a pile of error -codes.

-
- -
-
-fonty.main()¶
-
-
run a font rendering example
-
fonty.main() -> None
-
-

Super quick, super simple application demonstrating the different ways to -render fonts with the font module

-
- -
-
-freetype_misc.main()¶
-
-
run a FreeType rendering example
-
freetype_misc.main() -> None
-
-

A showcase of rendering features the pygame.freetype.FontCreate a new Font instance from a supported font file. -class provides in addition to those available with pygame.font.Fontcreate a new Font object from a file. -It is a demonstration of direct to surface rendering, with vertical text -and rotated text, opaque text and semi transparent text, horizontally -stretched text and vertically stretched text.

-
- -
-
-vgrade.main()¶
-
-
display a vertical gradient
-
vgrade.main() -> None
-
-

Demonstrates creating a vertical gradient with pixelcopy and NumPy python. -The app will create a new gradient every half second and report the time -needed to create and display the image. If you're not prepared to start -working with the NumPy arrays, don't worry about the source for this one :]

-
- -
-
-eventlist.main()¶
-
-
display pygame events
-
eventlist.main() -> None
-
-

Eventlist is a sloppy style of pygame, but is a handy tool for learning -about pygame events and input. At the top of the screen are the state of -several device values, and a scrolling list of events are displayed on the -bottom.

-

This is not quality 'ui' code at all, but you can see how to implement very -non-interactive status displays, or even a crude text output control.

-
- -
-
-arraydemo.main()¶
-
-
show various surfarray effects
-
arraydemo.main(arraytype=None) -> None
-
-

Another example filled with various surfarray effects. It requires the -surfarray and image modules to be installed. This little demo can also make -a good starting point for any of your own tests with surfarray

-

The arraytype parameter is deprecated; passing any value besides 'numpy' -will raise ValueError.

-
- -
-
-sound.main()¶
-
-
load and play a sound
-
sound.main(file_path=None) -> None
-
-

Extremely basic testing of the mixer module. Load a sound and play it. All -from the command shell, no graphics.

-

If provided, use the audio file 'file_path', otherwise use a default file.

-

sound.py optional command line argument: an audio file

-
- -
-
-sound_array_demos.main()¶
-
-
play various sndarray effects
-
sound_array_demos.main(arraytype=None) -> None
-
-

Uses sndarray and NumPy to create offset faded copies of the -original sound. Currently it just uses hardcoded values for the number of -echoes and the delay. Easy for you to recreate as needed.

-

The arraytype parameter is deprecated; passing any value besides 'numpy' -will raise ValueError.

-
- -
-
-liquid.main()¶
-
-
display an animated liquid effect
-
liquid.main() -> None
-
-

This example was created in a quick comparison with the BlitzBasic gaming -language. Nonetheless, it demonstrates a quick 8-bit setup (with colormap).

-
- -
-
-glcube.main()¶
-
-
display an animated 3D cube using OpenGL
-
glcube.main() -> None
-
-

Using PyOpenGL and pygame, this creates a spinning 3D multicolored cube.

-
- -
-
-scrap_clipboard.main()¶
-
-
access the clipboard
-
scrap_clipboard.main() -> None
-
-

A simple demonstration example for the clipboard support.

-
- -
-
-mask.main()¶
-
-
display multiple images bounce off each other using collision detection
-
mask.main(*args) -> None
-
-

Positional arguments:

-
one or more image file names.
-
-
-

This pygame.masks demo will display multiple moving sprites bouncing off -each other. More than one sprite image can be provided.

-

If run as a program then mask.py takes one or more image files as -command line arguments.

-
- -
-
-testsprite.main()¶
-
-
show lots of sprites moving around
-
testsprite.main(update_rects = True, use_static = False, use_FastRenderGroup = False, screen_dims = [640, 480], use_alpha = False, flags = 0) -> None
-
-

Optional keyword arguments:

-
update_rects - use the RenderUpdate sprite group class
-use_static - include non-moving images
-use_FastRenderGroup - Use the FastRenderGroup sprite group
-screen_dims - pygame window dimensions
-use_alpha - use alpha blending
-flags - additional display mode flags
-
-
-

Like the testsprite.c that comes with SDL, this pygame version shows -lots of sprites moving around.

-

If run as a stand-alone program then no command line arguments are taken.

-
- -
-
-headless_no_windows_needed.main()¶
-
-
write an image file that is smoothscaled copy of an input file
-
headless_no_windows_needed.main(fin, fout, w, h) -> None
-
-

arguments:

-
fin - name of an input image file
-fout - name of the output file to create/overwrite
-w, h - size of the rescaled image, as integer width and height
-
-
-

How to use pygame with no windowing system, like on headless servers.

-

Thumbnail generation with scaling is an example of what you can do with -pygame.

-

NOTE: the pygame scale function uses MMX/SSE if available, and can be -run in multiple threads.

-

If headless_no_windows_needed.py is run as a program it takes the -following command line arguments:

-
-scale inputimage outputimage new_width new_height
-eg. -scale in.png outpng 50 50
-
-
-
- -
-
-joystick.main()¶
-
-
demonstrate joystick functionality
-
joystick.main() -> None
-
-

A demo showing full joystick support.

-
-

New in pygame 2.0.2.

-
-
- -
-
-blend_fill.main()¶
-
-
demonstrate the various surface.fill method blend options
-
blend_fill.main() -> None
-
-

A interactive demo that lets one choose which BLEND_xxx option to apply to a -surface.

-
- -
-
-blit_blends.main()¶
-
-
uses alternative additive fill to that of surface.fill
-
blit_blends.main() -> None
-
-

Fake additive blending. Using NumPy. it doesn't clamp. Press r,g,b Somewhat -like blend_fill.

-
- -
-
-cursors.main()¶
-
-
display two different custom cursors
-
cursors.main() -> None
-
-

Display an arrow or circle with crossbar cursor.

-
- -
-
-pixelarray.main()¶
-
-
display various pixelarray generated effects
-
pixelarray.main() -> None
-
-

Display various pixelarray generated effects.

-
- -
-
-scaletest.main()¶
-
-
interactively scale an image using smoothscale
-
scaletest.main(imagefile, convert_alpha=False, run_speed_test=True) -> None
-
-

arguments:

-
imagefile - file name of source image (required)
-convert_alpha - use convert_alpha() on the surf (default False)
-run_speed_test - (default False)
-
-
-

A smoothscale example that resized an image on the screen. Vertical and -horizontal arrow keys are used to change the width and height of the -displayed image. If the convert_alpha option is True then the source image -is forced to have source alpha, whether or not the original images does. If -run_speed_test is True then a background timing test is performed instead of -the interactive scaler.

-

If scaletest.py is run as a program then the command line options are:

-
ImageFile [-t] [-convert_alpha]
-[-t] = Run Speed Test
-[-convert_alpha] = Use convert_alpha() on the surf.
-
-
-
- -
-
-midi.main()¶
-
-
run a midi example
-
midi.main(mode='output', device_id=None) -> None
-
-

Arguments:

-
mode - if 'output' run a midi keyboard output example
-          'input' run a midi event logger input example
-          'list' list available midi devices
-       (default 'output')
-device_id - midi device number; if None then use the default midi input or
-            output device for the system
-
-
-

The output example shows how to translate mouse clicks or computer keyboard -events into midi notes. It implements a rudimentary button widget and state -machine.

-

The input example shows how to translate midi input to pygame events.

-

With the use of a virtual midi patch cord the output and input examples can -be run as separate processes and connected so the keyboard output is -displayed on a console.

-

new to pygame 1.9.0

-
- -
-
-scroll.main()¶
-
-
run a Surface.scroll example that shows a magnified image
-
scroll.main(image_file=None) -> None
-
-

This example shows a scrollable image that has a zoom factor of eight. It -uses the Surface.scroll() -function to shift the image on the display surface. -A clip rectangle protects a margin area. If called as a function, -the example accepts an optional image file path. If run as a program it -takes an optional file path command line argument. If no file is provided a -default image file is used.

-

When running click on a black triangle to move one pixel in the direction -the triangle points. Or use the arrow keys. Close the window or press -ESC to quit.

-
- -
-
-camera.main()¶
-
-
display video captured live from an attached camera
-
camera.main() -> None
-
-

A simple live video player, it uses the first available camera it finds on -the system.

-
- -
-
-playmus.main()¶
-
-
play an audio file
-
playmus.main(file_path) -> None
-
-

A simple music player with window and keyboard playback control. Playback can -be paused and rewound to the beginning.

-
- -
- -
- - -

-
-Edit on GitHub -
-
-
-
-
- - - - \ No newline at end of file diff --git a/venv/Lib/site-packages/pygame/docs/generated/ref/fastevent.html b/venv/Lib/site-packages/pygame/docs/generated/ref/fastevent.html deleted file mode 100644 index c5c8d33..0000000 --- a/venv/Lib/site-packages/pygame/docs/generated/ref/fastevent.html +++ /dev/null @@ -1,286 +0,0 @@ - - - - - - - - - pygame.fastevent — pygame v2.1.2 documentation - - - - - - - - - - - - - -
- -
- - - - - -
-
- -
-
- -
-
-
-pygame.fastevent
-
-
pygame module for interacting with events and queues
-
- ----- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-—initialize pygame.fastevent
-—returns True if the fastevent module is currently initialized
-—internally process pygame event handlers
-—wait for an event
-—get an available event
-—get all events from the queue
-—place an event on the queue
-

IMPORTANT NOTE: THIS MODULE IS DEPRECATED IN PYGAME 2.2

-

In older pygame versions before pygame 2, pygame.eventpygame module for interacting with events and queues was not well -suited for posting events from different threads. This module served as a -replacement (with less features) for multithreaded use. Now, the usage of this -module is highly discouraged in favour of use of the main pygame.eventpygame module for interacting with events and queues -module. This module will be removed in a future pygame version.

-

Below, the legacy docs of the module is provided

-
-
-pygame.fastevent.init()¶
-
-
initialize pygame.fastevent
-
init() -> None
-
-

Initialize the pygame.fastevent module.

-
- -
-
-pygame.fastevent.get_init()¶
-
-
returns True if the fastevent module is currently initialized
-
get_init() -> bool
-
-

Returns True if the pygame.fastevent module is currently initialized.

-
- -
-
-pygame.fastevent.pump()¶
-
-
internally process pygame event handlers
-
pump() -> None
-
-

For each frame of your game, you will need to make some sort of call to the -event queue. This ensures your program can internally interact with the rest -of the operating system.

-

This function is not necessary if your program is consistently processing -events on the queue through the other pygame.fasteventpygame module for interacting with events and queues functions.

-

There are important things that must be dealt with internally in the event -queue. The main window may need to be repainted or respond to the system. If -you fail to make a call to the event queue for too long, the system may -decide your program has locked up.

-
- -
-
-pygame.fastevent.wait()¶
-
-
wait for an event
-
wait() -> Event
-
-

Returns the current event on the queue. If there are no messages -waiting on the queue, this will not return until one is available. -Sometimes it is important to use this wait to get events from the queue, -it will allow your application to idle when the user isn't doing anything -with it.

-
- -
-
-pygame.fastevent.poll()¶
-
-
get an available event
-
poll() -> Event
-
-

Returns next event on queue. If there is no event waiting on the queue, -this will return an event with type NOEVENT.

-
- -
-
-pygame.fastevent.get()¶
-
-
get all events from the queue
-
get() -> list of Events
-
-

This will get all the messages and remove them from the queue.

-
- -
-
-pygame.fastevent.post()¶
-
-
place an event on the queue
-
post(Event) -> None
-
-

This will post your own event objects onto the event queue. You can post -any event type you want, but some care must be taken. For example, if you -post a MOUSEBUTTONDOWN event to the queue, it is likely any code receiving -the event will expect the standard MOUSEBUTTONDOWN attributes to be -available, like 'pos' and 'button'.

-

Because pygame.fastevent.post() may have to wait for the queue to empty, -you can get into a dead lock if you try to append an event on to a full -queue from the thread that processes events. For that reason I do not -recommend using this function in the main thread of an SDL program.

-
- -
- -
- - -

-
-Edit on GitHub -
-
-
-
-
- - - - \ No newline at end of file diff --git a/venv/Lib/site-packages/pygame/docs/generated/ref/font.html b/venv/Lib/site-packages/pygame/docs/generated/ref/font.html deleted file mode 100644 index 322a582..0000000 --- a/venv/Lib/site-packages/pygame/docs/generated/ref/font.html +++ /dev/null @@ -1,698 +0,0 @@ - - - - - - - - - pygame.font — pygame v2.1.2 documentation - - - - - - - - - - - - - -
- -
- - - - - -
-
- -
-
- -
-
-
-pygame.font
-
-
pygame module for loading and rendering fonts
-
- ----- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-—initialize the font module
-—uninitialize the font module
-—true if the font module is initialized
-—get the filename of the default font
-—get all available fonts
-—find a specific font on the system
-—create a Font object from the system fonts
-—create a new Font object from a file
-

The font module allows for rendering TrueType fonts into a new Surface object. -It accepts any UCS-2 character ('u0001' to 'uFFFF'). This module is optional -and requires SDL_ttf as a dependency. You should test that pygame.fontpygame module for loading and rendering fonts -is available and initialized before attempting to use the module.

-

Most of the work done with fonts are done by using the actual Font objects. The -module by itself only has routines to initialize the module and create Font -objects with pygame.font.Font().

-

You can load fonts from the system by using the pygame.font.SysFont() -function. There are a few other functions to help lookup the system fonts.

-

Pygame comes with a builtin default font. This can always be accessed by -passing None as the font name.

-

To use the pygame.freetypeEnhanced pygame module for loading and rendering computer fonts based pygame.ftfont as -pygame.fontpygame module for loading and rendering fonts define the environment variable PYGAME_FREETYPE before the -first import of pygamethe top level pygame package. Module pygame.ftfont is a pygame.fontpygame module for loading and rendering fonts -compatible module that passes all but one of the font module unit tests: -it does not have the UCS-2 limitation of the SDL_ttf based font module, so -fails to raise an exception for a code point greater than 'uFFFF'. If -pygame.freetypeEnhanced pygame module for loading and rendering computer fonts is unavailable then the SDL_ttf font module will be -loaded instead.

-
-
-pygame.font.init()¶
-
-
initialize the font module
-
init() -> None
-
-

This method is called automatically by pygame.init(). It initializes the -font module. The module must be initialized before any other functions will -work.

-

It is safe to call this function more than once.

-
- -
-
-pygame.font.quit()¶
-
-
uninitialize the font module
-
quit() -> None
-
-

Manually uninitialize SDL_ttf's font system. This is called automatically by -pygame.quit().

-

It is safe to call this function even if font is currently not initialized.

-
- -
-
-pygame.font.get_init()¶
-
-
true if the font module is initialized
-
get_init() -> bool
-
-

Test if the font module is initialized or not.

-
- -
-
-pygame.font.get_default_font()¶
-
-
get the filename of the default font
-
get_default_font() -> string
-
-

Return the filename of the system font. This is not the full path to the -file. This file can usually be found in the same directory as the font -module, but it can also be bundled in separate archives.

-
- -
-
-pygame.font.get_fonts()¶
-
-
get all available fonts
-
get_fonts() -> list of strings
-
-

Returns a list of all the fonts available on the system. The names of the -fonts will be set to lowercase with all spaces and punctuation removed. This -works on most systems, but some will return an empty list if they cannot -find fonts.

-
- -
-
-pygame.font.match_font()¶
-
-
find a specific font on the system
-
match_font(name, bold=False, italic=False) -> path
-
-

Returns the full path to a font file on the system. If bold or italic are -set to true, this will attempt to find the correct family of font.

-

The font name can also be an iterable of font names, a string of -comma-separated font names, or a bytes of comma-separated font names, in -which case the set of names will be searched in order. -If none of the given names are found, None is returned.

-
-

New in pygame 2.0.1: Accept an iterable of font names.

-
-

Example:

-
print pygame.font.match_font('bitstreamverasans')
-# output is: /usr/share/fonts/truetype/ttf-bitstream-vera/Vera.ttf
-# (but only if you have Vera on your system)
-
-
-
- -
-
-pygame.font.SysFont()¶
-
-
create a Font object from the system fonts
-
SysFont(name, size, bold=False, italic=False) -> Font
-
-

Return a new Font object that is loaded from the system fonts. The font will -match the requested bold and italic flags. Pygame uses a small set of common -font aliases. If the specific font you ask for is not available, a reasonable -alternative may be used. If a suitable system font is not found this will -fall back on loading the default pygame font.

-

The font name can also be an iterable of font names, a string of -comma-separated font names, or a bytes of comma-separated font names, in -which case the set of names will be searched in order.

-
-

New in pygame 2.0.1: Accept an iterable of font names.

-
-
- -
-
-pygame.font.Font¶
-
-
create a new Font object from a file
-
Font(filename, size) -> Font
-
Font(pathlib.Path, size) -> Font
-
Font(object, size) -> Font
-
- ----- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-—Gets or sets whether the font should be rendered in (faked) bold.
-—Gets or sets whether the font should be rendered in (faked) italics.
-—Gets or sets whether the font should be rendered with an underline.
-—draw text on a new Surface
-—determine the amount of space needed to render text
-—control if text is rendered with an underline
-—check if text will be rendered with an underline
-—enable fake rendering of bold text
-—check if text will be rendered bold
-—enable fake rendering of italic text
-—gets the metrics for each character in the passed string
-—check if the text will be rendered italic
-—get the line space of the font text
-—get the height of the font
-—get the ascent of the font
-—get the descent of the font
-

Load a new font from a given filename or a python file object. The size is -the height of the font in pixels. If the filename is None the pygame default -font will be loaded. If a font cannot be loaded from the arguments given an -exception will be raised. Once the font is created the size cannot be -changed.

-

Font objects are mainly used to render text into new Surface objects. The -render can emulate bold or italic features, but it is better to load from a -font with actual italic or bold glyphs. The rendered text can be regular -strings or unicode.

-
-
-bold¶
-
-
Gets or sets whether the font should be rendered in (faked) bold.
-
bold -> bool
-
-

Whether the font should be rendered in bold.

-

When set to True, this enables the bold rendering of text. This -is a fake stretching of the font that doesn't look good on many -font types. If possible load the font from a real bold font -file. While bold, the font will have a different width than when -normal. This can be mixed with the italic and underline modes.

-
-

New in pygame 2.0.0.

-
-
- -
-
-italic¶
-
-
Gets or sets whether the font should be rendered in (faked) italics.
-
italic -> bool
-
-

Whether the font should be rendered in italic.

-

When set to True, this enables fake rendering of italic -text. This is a fake skewing of the font that doesn't look good -on many font types. If possible load the font from a real italic -font file. While italic the font will have a different width -than when normal. This can be mixed with the bold and underline -modes.

-
-

New in pygame 2.0.0.

-
-
- -
-
-underline¶
-
-
Gets or sets whether the font should be rendered with an underline.
-
underline -> bool
-
-

Whether the font should be rendered in underline.

-

When set to True, all rendered fonts will include an -underline. The underline is always one pixel thick, regardless -of font size. This can be mixed with the bold and italic modes.

-
-

New in pygame 2.0.0.

-
-
- -
-
-render()¶
-
-
draw text on a new Surface
-
render(text, antialias, color, background=None) -> Surface
-
-

This creates a new Surface with the specified text rendered on it. pygame -provides no way to directly draw text on an existing Surface: instead you -must use Font.render() to create an image (Surface) of the text, then -blit this image onto another Surface.

-

The text can only be a single line: newline characters are not rendered. -Null characters ('x00') raise a TypeError. Both Unicode and char (byte) -strings are accepted. For Unicode strings only UCS-2 characters -('u0001' to 'uFFFF') were previously supported and any greater unicode -codepoint would raise a UnicodeError. Now, characters in the UCS-4 range -are supported. For char strings a LATIN1 encoding is assumed. The -antialias argument is a boolean: if true the characters will have smooth -edges. The color argument is the color of the text -[e.g.: (0,0,255) for blue]. The optional background argument is a color -to use for the text background. If no background is passed the area -outside the text will be transparent.

-

The Surface returned will be of the dimensions required to hold the text. -(the same as those returned by Font.size()). If an empty string is passed -for the text, a blank surface will be returned that is zero pixel wide and -the height of the font.

-

Depending on the type of background and antialiasing used, this returns -different types of Surfaces. For performance reasons, it is good to know -what type of image will be used. If antialiasing is not used, the return -image will always be an 8-bit image with a two-color palette. If the -background is transparent a colorkey will be set. Antialiased images are -rendered to 24-bit RGB images. If the background is transparent a -pixel alpha will be included.

-

Optimization: if you know that the final destination for the text (on the -screen) will always have a solid background, and the text is antialiased, -you can improve performance by specifying the background color. This will -cause the resulting image to maintain transparency information by -colorkey rather than (much less efficient) alpha values.

-

If you render '\n' an unknown char will be rendered. Usually a -rectangle. Instead you need to handle new lines yourself.

-

Font rendering is not thread safe: only a single thread can render text -at any time.

-
-

Changed in pygame 2.0.3: Rendering UCS_4 unicode works and does not -raise an exception. Use if hasattr(pygame.font, 'UCS_4'): to see if -pygame supports rendering UCS_4 unicode including more languages and -emoji.

-
-
- -
-
-size()¶
-
-
determine the amount of space needed to render text
-
size(text) -> (width, height)
-
-

Returns the dimensions needed to render the text. This can be used to -help determine the positioning needed for text before it is rendered. It -can also be used for wordwrapping and other layout effects.

-

Be aware that most fonts use kerning which adjusts the widths for -specific letter pairs. For example, the width for "ae" will not always -match the width for "a" + "e".

-
- -
-
-set_underline()¶
-
-
control if text is rendered with an underline
-
set_underline(bool) -> None
-
-

When enabled, all rendered fonts will include an underline. The underline -is always one pixel thick, regardless of font size. This can be mixed -with the bold and italic modes.

-
-

Note

-

This is the same as the underline attribute.

-
-
- -
-
-get_underline()¶
-
-
check if text will be rendered with an underline
-
get_underline() -> bool
-
-

Return True when the font underline is enabled.

-
-
-

Note

-

This is the same as the underline attribute.

-
-
-
- -
-
-set_bold()¶
-
-
enable fake rendering of bold text
-
set_bold(bool) -> None
-
-

Enables the bold rendering of text. This is a fake stretching of the font -that doesn't look good on many font types. If possible load the font from -a real bold font file. While bold, the font will have a different width -than when normal. This can be mixed with the italic and underline modes.

-
-

Note

-

This is the same as the bold attribute.

-
-
- -
-
-get_bold()¶
-
-
check if text will be rendered bold
-
get_bold() -> bool
-
-

Return True when the font bold rendering mode is enabled.

-
-

Note

-

This is the same as the bold attribute.

-
-
- -
-
-set_italic()¶
-
-
enable fake rendering of italic text
-
set_italic(bool) -> None
-
-

Enables fake rendering of italic text. This is a fake skewing of the font -that doesn't look good on many font types. If possible load the font from -a real italic font file. While italic the font will have a different -width than when normal. This can be mixed with the bold and underline -modes.

-
-

Note

-

This is the same as the italic attribute.

-
-
- -
-
-metrics()¶
-
-
gets the metrics for each character in the passed string
-
metrics(text) -> list
-
-

The list contains tuples for each character, which contain the minimum -X offset, the maximum X offset, the minimum Y offset, the -maximum Y offset and the advance offset (bearing plus width) of the -character. [(minx, maxx, miny, maxy, advance), (minx, maxx, miny, maxy, -advance), ...]. None is entered in the list for each unrecognized -character.

-
- -
-
-get_italic()¶
-
-
check if the text will be rendered italic
-
get_italic() -> bool
-
-

Return True when the font italic rendering mode is enabled.

-
-

Note

-

This is the same as the italic attribute.

-
-
- -
-
-get_linesize()¶
-
-
get the line space of the font text
-
get_linesize() -> int
-
-

Return the height in pixels for a line of text with the font. When -rendering multiple lines of text this is the recommended amount of space -between lines.

-
- -
-
-get_height()¶
-
-
get the height of the font
-
get_height() -> int
-
-

Return the height in pixels of the actual rendered text. This is the -average size for each glyph in the font.

-
- -
-
-get_ascent()¶
-
-
get the ascent of the font
-
get_ascent() -> int
-
-

Return the height in pixels for the font ascent. The ascent is the number -of pixels from the font baseline to the top of the font.

-
- -
-
-get_descent()¶
-
-
get the descent of the font
-
get_descent() -> int
-
-

Return the height in pixels for the font descent. The descent is the -number of pixels from the font baseline to the bottom of the font.

-
- -
- -
- -
- - -

-
-Edit on GitHub -
-
-
-
-
- - - - \ No newline at end of file diff --git a/venv/Lib/site-packages/pygame/docs/generated/ref/freetype.html b/venv/Lib/site-packages/pygame/docs/generated/ref/freetype.html deleted file mode 100644 index 8c293e8..0000000 --- a/venv/Lib/site-packages/pygame/docs/generated/ref/freetype.html +++ /dev/null @@ -1,1270 +0,0 @@ - - - - - - - - - pygame.freetype — pygame v2.1.2 documentation - - - - - - - - - - - - - -
- -
- - - - - -
-
- -
-
- -
-
-
-pygame.freetype
-
-
Enhanced pygame module for loading and rendering computer fonts
-
- ----- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-—Return the latest FreeType error
-—Return the FreeType version
-—Initialize the underlying FreeType library.
-—Shut down the underlying FreeType library.
-—Returns True if the FreeType module is currently initialized.
-—DEPRECATED: Use get_init() instead.
-—Return the glyph case size
-—Return the default pixel size in dots per inch
-—Set the default pixel size in dots per inch for the module
-—create a Font object from the system fonts
-—Get the filename of the default font
-—Create a new Font instance from a supported font file.
-

The pygame.freetype module is a replacement for pygame.fontpygame module for loading and rendering fonts. -It has all of the functionality of the original, plus many new features. -Yet is has absolutely no dependencies on the SDL_ttf library. -It is implemented directly on the FreeType 2 library. -The pygame.freetype module is not itself backward compatible with -pygame.fontpygame module for loading and rendering fonts. -Instead, use the pygame.ftfont module as a drop-in replacement -for pygame.fontpygame module for loading and rendering fonts.

-

All font file formats supported by FreeType can be rendered by -pygame.freetype, namely TTF, Type1, CFF, OpenType, -SFNT, PCF, FNT, BDF, PFR and Type42 fonts. -All glyphs having UTF-32 code points are accessible -(see Font.ucs4).

-

Most work on fonts is done using Font instances. -The module itself only has routines for initialization and creation -of Font objects. -You can load fonts from the system using the SysFont() function.

-

Extra support of bitmap fonts is available. Available bitmap sizes can -be listed (see Font.get_sizes()). For bitmap only fonts Font -can set the size for you (see the Font.size property).

-

For now undefined character codes are replaced with the .notdef -(not defined) character. -How undefined codes are handled may become configurable in a future release.

-

Pygame comes with a built-in default font. This can always be accessed by -passing None as the font name to the Font constructor.

-

Extra rendering features available to pygame.freetype.FontCreate a new Font instance from a supported font file. -are direct to surface rendering (see Font.render_to()), character kerning -(see Font.kerning), vertical layout (see Font.vertical), -rotation of rendered text (see Font.rotation), -and the strong style (see Font.strong). -Some properties are configurable, such as -strong style strength (see Font.strength) and underline positioning -(see Font.underline_adjustment). Text can be positioned by the upper -right corner of the text box or by the text baseline (see Font.origin). -Finally, a font's vertical and horizontal size can be adjusted separately -(see Font.size). -The pygame.examples.freetype_misc -example shows these features in use.

-

The pygame package does not import freetype automatically when -loaded. This module must be imported explicitly to be used.

-
import pygame
-import pygame.freetype
-
-
-
-

New in pygame 1.9.2: freetype

-
-
-
-pygame.freetype.get_error()¶
-
-
Return the latest FreeType error
-
get_error() -> str
-
get_error() -> None
-
-

Return a description of the last error which occurred in the FreeType2 -library, or None if no errors have occurred.

-
- -
-
-pygame.freetype.get_version()¶
-
-
Return the FreeType version
-
get_version() -> (int, int, int)
-
-

Returns the version of the FreeType library in use by this module.

-

Note that the freetype module depends on the FreeType 2 library. -It will not compile with the original FreeType 1.0. Hence, the first element -of the tuple will always be "2".

-
- -
-
-pygame.freetype.init()¶
-
-
Initialize the underlying FreeType library.
-
init(cache_size=64, resolution=72) -> None
-
-

This function initializes the underlying FreeType library and must be -called before trying to use any of the functionality of the freetype -module.

-

However, pygame.init()initialize all imported pygame modules will automatically call this function -if the freetype module is already imported. It is safe to call this -function more than once.

-

Optionally, you may specify a default cache_size for the Glyph cache: the -maximum number of glyphs that will be cached at any given time by the -module. Exceedingly small values will be automatically tuned for -performance. Also a default pixel resolution, in dots per inch, can -be given to adjust font scaling.

-
- -
-
-pygame.freetype.quit()¶
-
-
Shut down the underlying FreeType library.
-
quit() -> None
-
-

This function closes the freetype module. After calling this -function, you should not invoke any class, method or function related to the -freetype module as they are likely to fail or might give unpredictable -results. It is safe to call this function even if the module hasn't been -initialized yet.

-
- -
-
-pygame.freetype.get_init()¶
-
-
Returns True if the FreeType module is currently initialized.
-
get_init() -> bool
-
-

Returns True if the pygame.freetype module is currently initialized.

-
-

New in pygame 1.9.5.

-
-
- -
-
-pygame.freetype.was_init()¶
-
-
DEPRECATED: Use get_init() instead.
-
was_init() -> bool
-
-

DEPRECATED: Returns True if the pygame.freetype module is currently -initialized. Use get_init() instead.

-
- -
-
-pygame.freetype.get_cache_size()¶
-
-
Return the glyph case size
-
get_cache_size() -> long
-
-

See pygame.freetype.init()Initialize the underlying FreeType library..

-
- -
-
-pygame.freetype.get_default_resolution()¶
-
-
Return the default pixel size in dots per inch
-
get_default_resolution() -> long
-
-

Returns the default pixel size, in dots per inch, for the module. -The default is 72 DPI.

-
- -
-
-pygame.freetype.set_default_resolution()¶
-
-
Set the default pixel size in dots per inch for the module
-
set_default_resolution([resolution])
-
-

Set the default pixel size, in dots per inch, for the module. If the -optional argument is omitted or zero the resolution is reset to 72 DPI.

-
- -
-
-pygame.freetype.SysFont()¶
-
-
create a Font object from the system fonts
-
SysFont(name, size, bold=False, italic=False) -> Font
-
-

Return a new Font object that is loaded from the system fonts. The font will -match the requested bold and italic flags. Pygame uses a small set of -common font aliases. If the specific font you ask for is not available, a -reasonable alternative may be used. If a suitable system font is not found -this will fall back on loading the default pygame font.

-

The font name can also be an iterable of font names, a string of -comma-separated font names, or a bytes of comma-separated font names, in -which case the set of names will be searched in order.

-
-

New in pygame 2.0.1: Accept an iterable of font names.

-
-
- -
-
-pygame.freetype.get_default_font()¶
-
-
Get the filename of the default font
-
get_default_font() -> string
-
-

Return the filename of the default pygame font. This is not the full path -to the file. The file is usually in the same directory as the font module, -but can also be bundled in a separate archive.

-
- -
-
-pygame.freetype.Font¶
-
-
Create a new Font instance from a supported font file.
-
Font(file, size=0, font_index=0, resolution=0, ucs4=False) -> Font
-
Font(pathlib.Path) -> Font
-
- ----- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-—Proper font name.
-—Font file path
-—The default point size used in rendering
-—Return the size and offset of rendered text
-—Return the glyph metrics for the given text
-—The unscaled height of the font in font units
-—The unscaled ascent of the font in font units
-—The unscaled descent of the font in font units
-—The scaled ascent of the font in pixels
-—The scaled descent of the font in pixels
-—The scaled height of the font in pixels
-—The scaled bounding box height of the font in pixels
-—return the available sizes of embedded bitmaps
-—Return rendered text as a surface
-—Render text onto an existing surface
-—Return rendered text as a string of bytes
-—Render text into an array of ints
-—The font's style flags
-—The state of the font's underline style flag
-—The state of the font's strong style flag
-—The state of the font's oblique style flag
-—The state of the font's wide style flag
-—The strength associated with the strong or wide font styles
-—Adjustment factor for the underline position
-—Gets whether the font is fixed-width
-—the number of available bitmap sizes for the font
-—Gets whether the font is scalable
-—allow the use of embedded bitmaps in an outline font file
-—Font anti-aliasing mode
-—Character kerning mode
-—Font vertical mode
-—text rotation in degrees counterclockwise
-—default foreground color
-—default background color
-—Font render to text origin mode
-—padded boundary mode
-—Enable UCS-4 mode
-—Pixel resolution in dots per inch
-

Argument file can be either a string representing the font's filename, a -file-like object containing the font, or None; if None, a default, -Pygame, font is used.

-

Optionally, a size argument may be specified to set the default size in -points, which determines the size of the rendered characters. -The size can also be passed explicitly to each method call. -Because of the way the caching system works, specifying a default size on -the constructor doesn't imply a performance gain over manually passing -the size on each function call. If the font is bitmap and no size -is given, the default size is set to the first available size for the font.

-

If the font file has more than one font, the font to load can be chosen with -the index argument. An exception is raised for an out-of-range font index -value.

-

The optional resolution argument sets the pixel size, in dots per inch, -for use in scaling glyphs for this Font instance. If 0 then the default -module value, set by init(), is used. The Font object's -resolution can only be changed by re-initializing the Font instance.

-

The optional ucs4 argument, an integer, sets the default text translation -mode: 0 (False) recognize UTF-16 surrogate pairs, any other value (True), -to treat Unicode text as UCS-4, with no surrogate pairs. See -Font.ucs4.

-
-
-name¶
-
-
Proper font name.
-
name -> string
-
-

Read only. Returns the real (long) name of the font, as -recorded in the font file.

-
- -
-
-path¶
-
-
Font file path
-
path -> unicode
-
-

Read only. Returns the path of the loaded font file

-
- -
-
-size¶
-
-
The default point size used in rendering
-
size -> float
-
size -> (float, float)
-
-

Get or set the default size for text metrics and rendering. It can be -a single point size, given as a Python int or float, or a -font ppem (width, height) tuple. Size values are non-negative. -A zero size or width represents an undefined size. In this case -the size must be given as a method argument, or an exception is -raised. A zero width but non-zero height is a ValueError.

-

For a scalable font, a single number value is equivalent to a tuple -with width equal height. A font can be stretched vertically with -height set greater than width, or horizontally with width set -greater than height. For embedded bitmaps, as listed by get_sizes(), -use the nominal width and height to select an available size.

-

Font size differs for a non-scalable, bitmap, font. During a -method call it must match one of the available sizes returned by -method get_sizes(). If not, an exception is raised. -If the size is a single number, the size is first matched against the -point size value. If no match, then the available size with the -same nominal width and height is chosen.

-
- -
-
-get_rect()¶
-
-
Return the size and offset of rendered text
-
get_rect(text, style=STYLE_DEFAULT, rotation=0, size=0) -> rect
-
-

Gets the final dimensions and origin, in pixels, of text using the -optional size in points, style, and rotation. For other -relevant render properties, and for any optional argument not given, -the default values set for the Font instance are used.

-

Returns a Rect instance containing the -width and height of the text's bounding box and the position of the -text's origin. -The origin is useful in aligning separately rendered pieces of text. -It gives the baseline position and bearing at the start of the text. -See the render_to() method for an example.

-

If text is a char (byte) string, its encoding is assumed to be -LATIN1.

-

Optionally, text can be None, which will return the bounding -rectangle for the text passed to a previous get_rect(), -render(), render_to(), render_raw(), or -render_raw_to() call. See render_to() for more -details.

-
- -
-
-get_metrics()¶
-
-
Return the glyph metrics for the given text
-
get_metrics(text, size=0) -> [(...), ...]
-
-

Returns the glyph metrics for each character in text.

-

The glyph metrics are returned as a list of tuples. Each tuple gives -metrics of a single character glyph. The glyph metrics are:

-
(min_x, max_x, min_y, max_y, horizontal_advance_x, horizontal_advance_y)
-
-
-

The bounding box min_x, max_x, min_y, and max_y values are returned as -grid-fitted pixel coordinates of type int. The advance values are -float values.

-

The calculations are done using the font's default size in points. -Optionally you may specify another point size with the size argument.

-

The metrics are adjusted for the current rotation, strong, and oblique -settings.

-

If text is a char (byte) string, then its encoding is assumed to be -LATIN1.

-
- -
-
-height¶
-
-
The unscaled height of the font in font units
-
height -> int
-
-

Read only. Gets the height of the font. This is the average value of all -glyphs in the font.

-
- -
-
-ascender¶
-
-
The unscaled ascent of the font in font units
-
ascender -> int
-
-

Read only. Return the number of units from the font's baseline to -the top of the bounding box.

-
- -
-
-descender¶
-
-
The unscaled descent of the font in font units
-
descender -> int
-
-

Read only. Return the height in font units for the font descent. -The descent is the number of units from the font's baseline to the -bottom of the bounding box.

-
- -
-
-get_sized_ascender()¶
-
-
The scaled ascent of the font in pixels
-
get_sized_ascender(<size>=0) -> int
-
-

Return the number of units from the font's baseline to the top of the -bounding box. It is not adjusted for strong or rotation.

-
- -
-
-get_sized_descender()¶
-
-
The scaled descent of the font in pixels
-
get_sized_descender(<size>=0) -> int
-
-

Return the number of pixels from the font's baseline to the top of the -bounding box. It is not adjusted for strong or rotation.

-
- -
-
-get_sized_height()¶
-
-
The scaled height of the font in pixels
-
get_sized_height(<size>=0) -> int
-
-

Returns the height of the font. This is the average value of all -glyphs in the font. It is not adjusted for strong or rotation.

-
- -
-
-get_sized_glyph_height()¶
-
-
The scaled bounding box height of the font in pixels
-
get_sized_glyph_height(<size>=0) -> int
-
-

Return the glyph bounding box height of the font in pixels. -This is the average value of all glyphs in the font. -It is not adjusted for strong or rotation.

-
- -
-
-get_sizes()¶
-
-
return the available sizes of embedded bitmaps
-
get_sizes() -> [(int, int, int, float, float), ...]
-
get_sizes() -> []
-
-

Returns a list of tuple records, one for each point size -supported. Each tuple containing the point size, the height in pixels, -width in pixels, horizontal ppem (nominal width) in fractional pixels, -and vertical ppem (nominal height) in fractional pixels.

-
- -
-
-render()¶
-
-
Return rendered text as a surface
-
render(text, fgcolor=None, bgcolor=None, style=STYLE_DEFAULT, rotation=0, size=0) -> (Surface, Rect)
-
-

Returns a new Surface, -with the text rendered to it -in the color given by 'fgcolor'. If no foreground color is given, -the default foreground color, fgcolor is used. -If bgcolor is given, the surface -will be filled with this color. When no background color is given, -the surface background is transparent, zero alpha. Normally the returned -surface has a 32 bit pixel size. However, if bgcolor is None -and anti-aliasing is disabled a monochrome 8 bit colorkey surface, -with colorkey set for the background color, is returned.

-

The return value is a tuple: the new surface and the bounding -rectangle giving the size and origin of the rendered text.

-

If an empty string is passed for text then the returned Rect is zero -width and the height of the font.

-

Optional fgcolor, style, rotation, and size arguments override -the default values set for the Font instance.

-

If text is a char (byte) string, then its encoding is assumed to be -LATIN1.

-

Optionally, text can be None, which will render the text -passed to a previous get_rect(), render(), render_to(), -render_raw(), or render_raw_to() call. -See render_to() for details.

-
- -
-
-render_to()¶
-
-
Render text onto an existing surface
-
render_to(surf, dest, text, fgcolor=None, bgcolor=None, style=STYLE_DEFAULT, rotation=0, size=0) -> Rect
-
-

Renders the string text to the pygame.Surfacepygame object for representing images surf, -at position dest, a (x, y) surface coordinate pair. -If either x or y is not an integer it is converted to one if possible. -Any sequence where the first two items are x and y positional elements -is accepted, including a Rect instance. -As with render(), -optional fgcolor, style, rotation, and size argument are -available.

-

If a background color bgcolor is given, the text bounding box is -first filled with that color. The text is blitted next. -Both the background fill and text rendering involve full alpha blits. -That is, the alpha values of the foreground, background, and destination -target surface all affect the blit.

-

The return value is a rectangle giving the size and position of the -rendered text within the surface.

-

If an empty string is passed for text then the returned -Rect is zero width and the height of the font. -The rect will test False.

-

Optionally, text can be set None, which will re-render text -passed to a previous render_to(), get_rect(), render(), -render_raw(), or render_raw_to() call. Primarily, this -feature is an aid to using render_to() in combination with -get_rect(). An example:

-
def word_wrap(surf, text, font, color=(0, 0, 0)):
-    font.origin = True
-    words = text.split(' ')
-    width, height = surf.get_size()
-    line_spacing = font.get_sized_height() + 2
-    x, y = 0, line_spacing
-    space = font.get_rect(' ')
-    for word in words:
-        bounds = font.get_rect(word)
-        if x + bounds.width + bounds.x >= width:
-            x, y = 0, y + line_spacing
-        if x + bounds.width + bounds.x >= width:
-            raise ValueError("word too wide for the surface")
-        if y + bounds.height - bounds.y >= height:
-            raise ValueError("text to long for the surface")
-        font.render_to(surf, (x, y), None, color)
-        x += bounds.width + space.width
-    return x, y
-
-
-

When render_to() is called with the same -font properties ― size, style, strength, -wide, antialiased, vertical, rotation, -kerning, and use_bitmap_strikes ― as get_rect(), -render_to() will use the layout calculated by get_rect(). -Otherwise, render_to() will recalculate the layout if called -with a text string or one of the above properties has changed -after the get_rect() call.

-

If text is a char (byte) string, then its encoding is assumed to be -LATIN1.

-
- -
-
-render_raw()¶
-
-
Return rendered text as a string of bytes
-
render_raw(text, style=STYLE_DEFAULT, rotation=0, size=0, invert=False) -> (bytes, (int, int))
-
-

Like render() but with the pixels returned as a byte string -of 8-bit gray-scale values. The foreground color is 255, the -background 0, useful as an alpha mask for a foreground pattern.

-
- -
-
-render_raw_to()¶
-
-
Render text into an array of ints
-
render_raw_to(array, text, dest=None, style=STYLE_DEFAULT, rotation=0, size=0, invert=False) -> Rect
-
-

Render to an array object exposing an array struct interface. The array -must be two dimensional with integer items. The default dest value, -None, is equivalent to position (0, 0). See render_to(). -As with the other render methods, text can be None to -render a text string passed previously to another method.

-

The return value is a pygame.Rect()pygame object for storing rectangular coordinates giving the size and position of -the rendered text.

-
- -
-
-style¶
-
-
The font's style flags
-
style -> int
-
-

Gets or sets the default style of the Font. This default style will be -used for all text rendering and size calculations unless overridden -specifically a render or get_rect() call. -The style value may be a bit-wise OR of one or more of the following -constants:

-
STYLE_NORMAL
-STYLE_UNDERLINE
-STYLE_OBLIQUE
-STYLE_STRONG
-STYLE_WIDE
-STYLE_DEFAULT
-
-
-

These constants may be found on the FreeType constants module. -Optionally, the default style can be modified or obtained accessing the -individual style attributes (underline, oblique, strong).

-

The STYLE_OBLIQUE and STYLE_STRONG styles are for -scalable fonts only. An attempt to set either for a bitmap font raises -an AttributeError. An attempt to set either for an inactive font, -as returned by Font.__new__(), raises a RuntimeError.

-

Assigning STYLE_DEFAULT to the style property leaves -the property unchanged, as this property defines the default. -The style property will never return STYLE_DEFAULT.

-
- -
-
-underline¶
-
-
The state of the font's underline style flag
-
underline -> bool
-
-

Gets or sets whether the font will be underlined when drawing text. This -default style value will be used for all text rendering and size -calculations unless overridden specifically in a render or -get_rect() call, via the 'style' parameter.

-
- -
-
-strong¶
-
-
The state of the font's strong style flag
-
strong -> bool
-
-

Gets or sets whether the font will be bold when drawing text. This -default style value will be used for all text rendering and size -calculations unless overridden specifically in a render or -get_rect() call, via the 'style' parameter.

-
- -
-
-oblique¶
-
-
The state of the font's oblique style flag
-
oblique -> bool
-
-

Gets or sets whether the font will be rendered as oblique. This -default style value will be used for all text rendering and size -calculations unless overridden specifically in a render or -get_rect() call, via the style parameter.

-

The oblique style is only supported for scalable (outline) fonts. -An attempt to set this style on a bitmap font will raise an -AttributeError. If the font object is inactive, as returned by -Font.__new__(), setting this property raises a RuntimeError.

-
- -
-
-wide¶
-
-
The state of the font's wide style flag
-
wide -> bool
-
-

Gets or sets whether the font will be stretched horizontally -when drawing text. It produces a result similar to -pygame.font.Fontcreate a new Font object from a file's bold. This style not available for -rotated text.

-
- -
-
-strength¶
-
-
The strength associated with the strong or wide font styles
-
strength -> float
-
-

The amount by which a font glyph's size is enlarged for the -strong or wide transformations, as a fraction of the untransformed -size. For the wide style only the horizontal dimension is -increased. For strong text both the horizontal and vertical -dimensions are enlarged. A wide style of strength 0.08333 ( 1/12 ) is -equivalent to the pygame.font.Fontcreate a new Font object from a file bold style. -The default is 0.02778 ( 1/36 ).

-

The strength style is only supported for scalable (outline) fonts. -An attempt to set this property on a bitmap font will raise an -AttributeError. If the font object is inactive, as returned by -Font.__new__(), assignment to this property raises a RuntimeError.

-
- -
-
-underline_adjustment¶
-
-
Adjustment factor for the underline position
-
underline_adjustment -> float
-
-

Gets or sets a factor which, when positive, is multiplied with the -font's underline offset to adjust the underline position. A negative -value turns an underline into a strike-through or overline. It is -multiplied with the ascender. Accepted values range between -2.0 and 2.0 -inclusive. A value of 0.5 closely matches Tango underlining. A value of -1.0 mimics pygame.font.Fontcreate a new Font object from a file underlining.

-
- -
-
-fixed_width¶
-
-
Gets whether the font is fixed-width
-
fixed_width -> bool
-
-

Read only. Returns True if the font contains fixed-width -characters (for example Courier, Bitstream Vera Sans Mono, Andale Mono).

-
- -
-
-fixed_sizes¶
-
-
the number of available bitmap sizes for the font
-
fixed_sizes -> int
-
-

Read only. Returns the number of point sizes for which the font contains -bitmap character images. If zero then the font is not a bitmap font. -A scalable font may contain pre-rendered point sizes as strikes.

-
- -
-
-scalable¶
-
-
Gets whether the font is scalable
-
scalable -> bool
-
-

Read only. Returns True if the font contains outline glyphs. -If so, the point size is not limited to available bitmap sizes.

-
- -
-
-use_bitmap_strikes¶
-
-
allow the use of embedded bitmaps in an outline font file
-
use_bitmap_strikes -> bool
-
-

Some scalable fonts include embedded bitmaps for particular point -sizes. This property controls whether or not those bitmap strikes -are used. Set it False to disable the loading of any bitmap -strike. Set it True, the default, to permit bitmap strikes -for a non-rotated render with no style other than wide or -underline. This property is ignored for bitmap fonts.

-

See also fixed_sizes and get_sizes().

-
- -
-
-antialiased¶
-
-
Font anti-aliasing mode
-
antialiased -> bool
-
-

Gets or sets the font's anti-aliasing mode. This defaults to -True on all fonts, which are rendered with full 8 bit blending.

-

Set to False to do monochrome rendering. This should -provide a small speed gain and reduce cache memory size.

-
- -
-
-kerning¶
-
-
Character kerning mode
-
kerning -> bool
-
-

Gets or sets the font's kerning mode. This defaults to False -on all fonts, which will be rendered without kerning.

-

Set to True to add kerning between character pairs, if supported -by the font, when positioning glyphs.

-
- -
-
-vertical¶
-
-
Font vertical mode
-
vertical -> bool
-
-

Gets or sets whether the characters are laid out vertically rather -than horizontally. May be useful when rendering Kanji or some other -vertical script.

-

Set to True to switch to a vertical text layout. The default -is False, place horizontally.

-

Note that the Font class does not automatically determine -script orientation. Vertical layout must be selected explicitly.

-

Also note that several font formats (especially bitmap based ones) don't -contain the necessary metrics to draw glyphs vertically, so drawing in -those cases will give unspecified results.

-
- -
-
-rotation¶
-
-
text rotation in degrees counterclockwise
-
rotation -> int
-
-

Gets or sets the baseline angle of the rendered text. The angle is -represented as integer degrees. The default angle is 0, with horizontal -text rendered along the X-axis, and vertical text along the Y-axis. -A positive value rotates these axes counterclockwise that many degrees. -A negative angle corresponds to a clockwise rotation. The rotation -value is normalized to a value within the range 0 to 359 inclusive -(eg. 390 -> 390 - 360 -> 30, -45 -> 360 + -45 -> 315, -720 -> 720 - (2 * 360) -> 0).

-

Only scalable (outline) fonts can be rotated. An attempt to change -the rotation of a bitmap font raises an AttributeError. -An attempt to change the rotation of an inactive font instance, as -returned by Font.__new__(), raises a RuntimeError.

-
- -
-
-fgcolor¶
-
-
default foreground color
-
fgcolor -> Color
-
-

Gets or sets the default glyph rendering color. It is initially opaque -black ― (0, 0, 0, 255). Applies to render() and render_to().

-
- -
-
-bgcolor¶
-
-
default background color
-
bgcolor -> Color
-
-

Gets or sets the default background rendering color. Initially it is -unset and text will render with a transparent background by default. -Applies to render() and render_to().

-
- -
-

New in pygame 2.0.0.

-
-
-
-origin¶
-
-
Font render to text origin mode
-
origin -> bool
-
-

If set True, render_to() and render_raw_to() will -take the dest position to be that of the text origin, as opposed to -the top-left corner of the bounding box. See get_rect() for -details.

-
- -
-
-pad¶
-
-
padded boundary mode
-
pad -> bool
-
-

If set True, then the text boundary rectangle will be inflated -to match that of font.Font. -Otherwise, the boundary rectangle is just large enough for the text.

-
- -
-
-ucs4¶
-
-
Enable UCS-4 mode
-
ucs4 -> bool
-
-

Gets or sets the decoding of Unicode text. By default, the -freetype module performs UTF-16 surrogate pair decoding on Unicode text. -This allows 32-bit escape sequences ('Uxxxxxxxx') between 0x10000 and -0x10FFFF to represent their corresponding UTF-32 code points on Python -interpreters built with a UCS-2 Unicode type (on Windows, for instance). -It also means character values within the UTF-16 surrogate area (0xD800 -to 0xDFFF) are considered part of a surrogate pair. A malformed surrogate -pair will raise a UnicodeEncodeError. Setting ucs4 True turns -surrogate pair decoding off, allowing access the full UCS-4 character -range to a Python interpreter built with four-byte Unicode character -support.

-
- -
-
-resolution¶
-
-
Pixel resolution in dots per inch
-
resolution -> int
-
-

Read only. Gets pixel size used in scaling font glyphs for this -Font instance.

-
- -
- -
- -
- - -

-
-Edit on GitHub -
-
-
-
-
- - - - \ No newline at end of file diff --git a/venv/Lib/site-packages/pygame/docs/generated/ref/gfxdraw.html b/venv/Lib/site-packages/pygame/docs/generated/ref/gfxdraw.html deleted file mode 100644 index c97f7be..0000000 --- a/venv/Lib/site-packages/pygame/docs/generated/ref/gfxdraw.html +++ /dev/null @@ -1,1058 +0,0 @@ - - - - - - - - - pygame.gfxdraw — pygame v2.1.2 documentation - - - - - - - - - - - - - -
- -
- - - - - -
-
- -
-
- -
-
-
-pygame.gfxdraw
-
-
pygame module for drawing shapes
-
- ----- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-—draw a pixel
-—draw a horizontal line
-—draw a vertical line
-—draw a line
-—draw a rectangle
-—draw a filled rectangle
-—draw a circle
-—draw an antialiased circle
-—draw a filled circle
-—draw an ellipse
-—draw an antialiased ellipse
-—draw a filled ellipse
-—draw an arc
-—draw a pie
-—draw a trigon/triangle
-—draw an antialiased trigon/triangle
-—draw a filled trigon/triangle
-—draw a polygon
-—draw an antialiased polygon
-—draw a filled polygon
-—draw a textured polygon
-—draw a Bezier curve
-

EXPERIMENTAL!: This API may change or disappear in later pygame releases. If -you use this, your code may break with the next pygame release.

-

The pygame package does not import gfxdraw automatically when loaded, so it -must imported explicitly to be used.

-
import pygame
-import pygame.gfxdraw
-
-
-

For all functions the arguments are strictly positional and integers are -accepted for coordinates and radii. The color argument can be one of the -following formats:

-
-
-
-

The functions rectangle() and box() will accept any (x, y, w, h) -sequence for their rect argument, though pygame.Rectpygame object for storing rectangular coordinates instances are -preferred.

-

To draw a filled antialiased shape, first use the antialiased (aa*) version -of the function, and then use the filled (filled_*) version. -For example:

-
col = (255, 0, 0)
-surf.fill((255, 255, 255))
-pygame.gfxdraw.aacircle(surf, x, y, 30, col)
-pygame.gfxdraw.filled_circle(surf, x, y, 30, col)
-
-
-
-

Note

-

For threading, each of the functions releases the GIL during the C part of -the call.

-
-
-

Note

-

See the pygame.drawpygame module for drawing shapes module for alternative draw methods. -The pygame.gfxdraw module differs from the pygame.drawpygame module for drawing shapes module in -the API it uses and the different draw functions available. -pygame.gfxdraw wraps the primitives from the library called SDL_gfx, -rather than using modified versions.

-
-
-

New in pygame 1.9.0.

-
-
-
-pygame.gfxdraw.pixel()¶
-
-
draw a pixel
-
pixel(surface, x, y, color) -> None
-
-

Draws a single pixel, at position (x ,y), on the given surface.

-
-
Parameters
-
    -
  • surface (Surface) -- surface to draw on

  • -
  • x (int) -- x coordinate of the pixel

  • -
  • y (int) -- y coordinate of the pixel

  • -
  • color (Color or tuple(int, int, int, [int])) -- color to draw with, the alpha value is optional if using a -tuple (RGB[A])

  • -
-
-
Returns
-

None

-
-
Return type
-

NoneType

-
-
-
- -
-
-pygame.gfxdraw.hline()¶
-
-
draw a horizontal line
-
hline(surface, x1, x2, y, color) -> None
-
-

Draws a straight horizontal line ((x1, y) to (x2, y)) on the given -surface. There are no endcaps.

-
-
Parameters
-
    -
  • surface (Surface) -- surface to draw on

  • -
  • x1 (int) -- x coordinate of one end of the line

  • -
  • x2 (int) -- x coordinate of the other end of the line

  • -
  • y (int) -- y coordinate of the line

  • -
  • color (Color or tuple(int, int, int, [int])) -- color to draw with, the alpha value is optional if using a -tuple (RGB[A])

  • -
-
-
Returns
-

None

-
-
Return type
-

NoneType

-
-
-
- -
-
-pygame.gfxdraw.vline()¶
-
-
draw a vertical line
-
vline(surface, x, y1, y2, color) -> None
-
-

Draws a straight vertical line ((x, y1) to (x, y2)) on the given -surface. There are no endcaps.

-
-
Parameters
-
    -
  • surface (Surface) -- surface to draw on

  • -
  • x (int) -- x coordinate of the line

  • -
  • y1 (int) -- y coordinate of one end of the line

  • -
  • y2 (int) -- y coordinate of the other end of the line

  • -
  • color (Color or tuple(int, int, int, [int])) -- color to draw with, the alpha value is optional if using a -tuple (RGB[A])

  • -
-
-
Returns
-

None

-
-
Return type
-

NoneType

-
-
-
- -
-
-pygame.gfxdraw.line()¶
-
-
draw a line
-
line(surface, x1, y1, x2, y2, color) -> None
-
-

Draws a straight line ((x1, y1) to (x2, y2)) on the given surface. -There are no endcaps.

-
-
Parameters
-
    -
  • surface (Surface) -- surface to draw on

  • -
  • x1 (int) -- x coordinate of one end of the line

  • -
  • y1 (int) -- y coordinate of one end of the line

  • -
  • x2 (int) -- x coordinate of the other end of the line

  • -
  • y2 (int) -- y coordinate of the other end of the line

  • -
  • color (Color or tuple(int, int, int, [int])) -- color to draw with, the alpha value is optional if using a -tuple (RGB[A])

  • -
-
-
Returns
-

None

-
-
Return type
-

NoneType

-
-
-
- -
-
-pygame.gfxdraw.rectangle()¶
-
-
draw a rectangle
-
rectangle(surface, rect, color) -> None
-
-

Draws an unfilled rectangle on the given surface. For a filled rectangle use -box().

-
-
Parameters
-
    -
  • surface (Surface) -- surface to draw on

  • -
  • rect (Rect) -- rectangle to draw, position and dimensions

  • -
  • color (Color or tuple(int, int, int, [int])) -- color to draw with, the alpha value is optional if using a -tuple (RGB[A])

  • -
-
-
Returns
-

None

-
-
Return type
-

NoneType

-
-
-
-

Note

-

The rect.bottom and rect.right attributes of a pygame.Rectpygame object for storing rectangular coordinates -always lie one pixel outside of its actual border. Therefore, these -values will not be included as part of the drawing.

-
-
- -
-
-pygame.gfxdraw.box()¶
-
-
draw a filled rectangle
-
box(surface, rect, color) -> None
-
-

Draws a filled rectangle on the given surface. For an unfilled rectangle use -rectangle().

-
-
Parameters
-
    -
  • surface (Surface) -- surface to draw on

  • -
  • rect (Rect) -- rectangle to draw, position and dimensions

  • -
  • color (Color or tuple(int, int, int, [int])) -- color to draw with, the alpha value is optional if using a -tuple (RGB[A])

  • -
-
-
Returns
-

None

-
-
Return type
-

NoneType

-
-
-
-

Note

-

The rect.bottom and rect.right attributes of a pygame.Rectpygame object for storing rectangular coordinates -always lie one pixel outside of its actual border. Therefore, these -values will not be included as part of the drawing.

-
-
-

Note

-

The pygame.Surface.fill()fill Surface with a solid color method works just as well for drawing -filled rectangles. In fact pygame.Surface.fill()fill Surface with a solid color can be hardware -accelerated on some platforms with both software and hardware display -modes.

-
-
- -
-
-pygame.gfxdraw.circle()¶
-
-
draw a circle
-
circle(surface, x, y, r, color) -> None
-
-

Draws an unfilled circle on the given surface. For a filled circle use -filled_circle().

-
-
Parameters
-
    -
  • surface (Surface) -- surface to draw on

  • -
  • x (int) -- x coordinate of the center of the circle

  • -
  • y (int) -- y coordinate of the center of the circle

  • -
  • r (int) -- radius of the circle

  • -
  • color (Color or tuple(int, int, int, [int])) -- color to draw with, the alpha value is optional if using a -tuple (RGB[A])

  • -
-
-
Returns
-

None

-
-
Return type
-

NoneType

-
-
-
- -
-
-pygame.gfxdraw.aacircle()¶
-
-
draw an antialiased circle
-
aacircle(surface, x, y, r, color) -> None
-
-

Draws an unfilled antialiased circle on the given surface.

-
-
Parameters
-
    -
  • surface (Surface) -- surface to draw on

  • -
  • x (int) -- x coordinate of the center of the circle

  • -
  • y (int) -- y coordinate of the center of the circle

  • -
  • r (int) -- radius of the circle

  • -
  • color (Color or tuple(int, int, int, [int])) -- color to draw with, the alpha value is optional if using a -tuple (RGB[A])

  • -
-
-
Returns
-

None

-
-
Return type
-

NoneType

-
-
-
- -
-
-pygame.gfxdraw.filled_circle()¶
-
-
draw a filled circle
-
filled_circle(surface, x, y, r, color) -> None
-
-

Draws a filled circle on the given surface. For an unfilled circle use -circle().

-
-
Parameters
-
    -
  • surface (Surface) -- surface to draw on

  • -
  • x (int) -- x coordinate of the center of the circle

  • -
  • y (int) -- y coordinate of the center of the circle

  • -
  • r (int) -- radius of the circle

  • -
  • color (Color or tuple(int, int, int, [int])) -- color to draw with, the alpha value is optional if using a -tuple (RGB[A])

  • -
-
-
Returns
-

None

-
-
Return type
-

NoneType

-
-
-
- -
-
-pygame.gfxdraw.ellipse()¶
-
-
draw an ellipse
-
ellipse(surface, x, y, rx, ry, color) -> None
-
-

Draws an unfilled ellipse on the given surface. For a filled ellipse use -filled_ellipse().

-
-
Parameters
-
    -
  • surface (Surface) -- surface to draw on

  • -
  • x (int) -- x coordinate of the center of the ellipse

  • -
  • y (int) -- y coordinate of the center of the ellipse

  • -
  • rx (int) -- horizontal radius of the ellipse

  • -
  • ry (int) -- vertical radius of the ellipse

  • -
  • color (Color or tuple(int, int, int, [int])) -- color to draw with, the alpha value is optional if using a -tuple (RGB[A])

  • -
-
-
Returns
-

None

-
-
Return type
-

NoneType

-
-
-
- -
-
-pygame.gfxdraw.aaellipse()¶
-
-
draw an antialiased ellipse
-
aaellipse(surface, x, y, rx, ry, color) -> None
-
-

Draws an unfilled antialiased ellipse on the given surface.

-
-
Parameters
-
    -
  • surface (Surface) -- surface to draw on

  • -
  • x (int) -- x coordinate of the center of the ellipse

  • -
  • y (int) -- y coordinate of the center of the ellipse

  • -
  • rx (int) -- horizontal radius of the ellipse

  • -
  • ry (int) -- vertical radius of the ellipse

  • -
  • color (Color or tuple(int, int, int, [int])) -- color to draw with, the alpha value is optional if using a -tuple (RGB[A])

  • -
-
-
Returns
-

None

-
-
Return type
-

NoneType

-
-
-
- -
-
-pygame.gfxdraw.filled_ellipse()¶
-
-
draw a filled ellipse
-
filled_ellipse(surface, x, y, rx, ry, color) -> None
-
-

Draws a filled ellipse on the given surface. For an unfilled ellipse use -ellipse().

-
-
Parameters
-
    -
  • surface (Surface) -- surface to draw on

  • -
  • x (int) -- x coordinate of the center of the ellipse

  • -
  • y (int) -- y coordinate of the center of the ellipse

  • -
  • rx (int) -- horizontal radius of the ellipse

  • -
  • ry (int) -- vertical radius of the ellipse

  • -
  • color (Color or tuple(int, int, int, [int])) -- color to draw with, the alpha value is optional if using a -tuple (RGB[A])

  • -
-
-
Returns
-

None

-
-
Return type
-

NoneType

-
-
-
- -
-
-pygame.gfxdraw.arc()¶
-
-
draw an arc
-
arc(surface, x, y, r, start_angle, stop_angle, color) -> None
-
-

Draws an arc on the given surface. For an arc with its endpoints connected -to its center use pie().

-

The two angle arguments are given in degrees and indicate the start and stop -positions of the arc. The arc is drawn in a clockwise direction from the -start_angle to the stop_angle. If start_angle == stop_angle, -nothing will be drawn

-
-
Parameters
-
    -
  • surface (Surface) -- surface to draw on

  • -
  • x (int) -- x coordinate of the center of the arc

  • -
  • y (int) -- y coordinate of the center of the arc

  • -
  • r (int) -- radius of the arc

  • -
  • start_angle (int) -- start angle in degrees

  • -
  • stop_angle (int) -- stop angle in degrees

  • -
  • color (Color or tuple(int, int, int, [int])) -- color to draw with, the alpha value is optional if using a -tuple (RGB[A])

  • -
-
-
Returns
-

None

-
-
Return type
-

NoneType

-
-
-
-

Note

-

This function uses degrees while the pygame.draw.arc()draw an elliptical arc function -uses radians.

-
-
- -
-
-pygame.gfxdraw.pie()¶
-
-
draw a pie
-
pie(surface, x, y, r, start_angle, stop_angle, color) -> None
-
-

Draws an unfilled pie on the given surface. A pie is an arc() with its -endpoints connected to its center.

-

The two angle arguments are given in degrees and indicate the start and stop -positions of the pie. The pie is drawn in a clockwise direction from the -start_angle to the stop_angle. If start_angle == stop_angle, -a straight line will be drawn from the center position at the given angle, -to a length of the radius.

-
-
Parameters
-
    -
  • surface (Surface) -- surface to draw on

  • -
  • x (int) -- x coordinate of the center of the pie

  • -
  • y (int) -- y coordinate of the center of the pie

  • -
  • r (int) -- radius of the pie

  • -
  • start_angle (int) -- start angle in degrees

  • -
  • stop_angle (int) -- stop angle in degrees

  • -
  • color (Color or tuple(int, int, int, [int])) -- color to draw with, the alpha value is optional if using a -tuple (RGB[A])

  • -
-
-
Returns
-

None

-
-
Return type
-

NoneType

-
-
-
- -
-
-pygame.gfxdraw.trigon()¶
-
-
draw a trigon/triangle
-
trigon(surface, x1, y1, x2, y2, x3, y3, color) -> None
-
-

Draws an unfilled trigon (triangle) on the given surface. For a filled -trigon use filled_trigon().

-

A trigon can also be drawn using polygon() e.g. -polygon(surface, ((x1, y1), (x2, y2), (x3, y3)), color)

-
-
Parameters
-
    -
  • surface (Surface) -- surface to draw on

  • -
  • x1 (int) -- x coordinate of the first corner of the trigon

  • -
  • y1 (int) -- y coordinate of the first corner of the trigon

  • -
  • x2 (int) -- x coordinate of the second corner of the trigon

  • -
  • y2 (int) -- y coordinate of the second corner of the trigon

  • -
  • x3 (int) -- x coordinate of the third corner of the trigon

  • -
  • y3 (int) -- y coordinate of the third corner of the trigon

  • -
  • color (Color or tuple(int, int, int, [int])) -- color to draw with, the alpha value is optional if using a -tuple (RGB[A])

  • -
-
-
Returns
-

None

-
-
Return type
-

NoneType

-
-
-
- -
-
-pygame.gfxdraw.aatrigon()¶
-
-
draw an antialiased trigon/triangle
-
aatrigon(surface, x1, y1, x2, y2, x3, y3, color) -> None
-
-

Draws an unfilled antialiased trigon (triangle) on the given surface.

-

An aatrigon can also be drawn using aapolygon() e.g. -aapolygon(surface, ((x1, y1), (x2, y2), (x3, y3)), color)

-
-
Parameters
-
    -
  • surface (Surface) -- surface to draw on

  • -
  • x1 (int) -- x coordinate of the first corner of the trigon

  • -
  • y1 (int) -- y coordinate of the first corner of the trigon

  • -
  • x2 (int) -- x coordinate of the second corner of the trigon

  • -
  • y2 (int) -- y coordinate of the second corner of the trigon

  • -
  • x3 (int) -- x coordinate of the third corner of the trigon

  • -
  • y3 (int) -- y coordinate of the third corner of the trigon

  • -
  • color (Color or tuple(int, int, int, [int])) -- color to draw with, the alpha value is optional if using a -tuple (RGB[A])

  • -
-
-
Returns
-

None

-
-
Return type
-

NoneType

-
-
-
- -
-
-pygame.gfxdraw.filled_trigon()¶
-
-
draw a filled trigon/triangle
-
filled_trigon(surface, x1, y1, x2, y2, x3, y3, color) -> None
-
-

Draws a filled trigon (triangle) on the given surface. For an unfilled -trigon use trigon().

-

A filled_trigon can also be drawn using filled_polygon() e.g. -filled_polygon(surface, ((x1, y1), (x2, y2), (x3, y3)), color)

-
-
Parameters
-
    -
  • surface (Surface) -- surface to draw on

  • -
  • x1 (int) -- x coordinate of the first corner of the trigon

  • -
  • y1 (int) -- y coordinate of the first corner of the trigon

  • -
  • x2 (int) -- x coordinate of the second corner of the trigon

  • -
  • y2 (int) -- y coordinate of the second corner of the trigon

  • -
  • x3 (int) -- x coordinate of the third corner of the trigon

  • -
  • y3 (int) -- y coordinate of the third corner of the trigon

  • -
  • color (Color or tuple(int, int, int, [int])) -- color to draw with, the alpha value is optional if using a -tuple (RGB[A])

  • -
-
-
Returns
-

None

-
-
Return type
-

NoneType

-
-
-
- -
-
-pygame.gfxdraw.polygon()¶
-
-
draw a polygon
-
polygon(surface, points, color) -> None
-
-

Draws an unfilled polygon on the given surface. For a filled polygon use -filled_polygon().

-

The adjacent coordinates in the points argument, as well as the first -and last points, will be connected by line segments. -e.g. For the points [(x1, y1), (x2, y2), (x3, y3)] a line segment will -be drawn from (x1, y1) to (x2, y2), from (x2, y2) to -(x3, y3), and from (x3, y3) to (x1, y1).

-
-
Parameters
-
    -
  • surface (Surface) -- surface to draw on

  • -
  • points (tuple(coordinate) or list(coordinate)) -- a sequence of 3 or more (x, y) coordinates, where each -coordinate in the sequence must be a -tuple/list/pygame.math.Vector2a 2-Dimensional Vector of 2 ints/floats (float values -will be truncated)

  • -
  • color (Color or tuple(int, int, int, [int])) -- color to draw with, the alpha value is optional if using a -tuple (RGB[A])

  • -
-
-
Returns
-

None

-
-
Return type
-

NoneType

-
-
Raises
-
    -
  • ValueError -- if len(points) < 3 (must have at least 3 points)

  • -
  • IndexError -- if len(coordinate) < 2 (each coordinate must have -at least 2 items)

  • -
-
-
-
- -
-
-pygame.gfxdraw.aapolygon()¶
-
-
draw an antialiased polygon
-
aapolygon(surface, points, color) -> None
-
-

Draws an unfilled antialiased polygon on the given surface.

-

The adjacent coordinates in the points argument, as well as the first -and last points, will be connected by line segments. -e.g. For the points [(x1, y1), (x2, y2), (x3, y3)] a line segment will -be drawn from (x1, y1) to (x2, y2), from (x2, y2) to -(x3, y3), and from (x3, y3) to (x1, y1).

-
-
Parameters
-
    -
  • surface (Surface) -- surface to draw on

  • -
  • points (tuple(coordinate) or list(coordinate)) -- a sequence of 3 or more (x, y) coordinates, where each -coordinate in the sequence must be a -tuple/list/pygame.math.Vector2a 2-Dimensional Vector of 2 ints/floats (float values -will be truncated)

  • -
  • color (Color or tuple(int, int, int, [int])) -- color to draw with, the alpha value is optional if using a -tuple (RGB[A])

  • -
-
-
Returns
-

None

-
-
Return type
-

NoneType

-
-
Raises
-
    -
  • ValueError -- if len(points) < 3 (must have at least 3 points)

  • -
  • IndexError -- if len(coordinate) < 2 (each coordinate must have -at least 2 items)

  • -
-
-
-
- -
-
-pygame.gfxdraw.filled_polygon()¶
-
-
draw a filled polygon
-
filled_polygon(surface, points, color) -> None
-
-

Draws a filled polygon on the given surface. For an unfilled polygon use -polygon().

-

The adjacent coordinates in the points argument, as well as the first -and last points, will be connected by line segments. -e.g. For the points [(x1, y1), (x2, y2), (x3, y3)] a line segment will -be drawn from (x1, y1) to (x2, y2), from (x2, y2) to -(x3, y3), and from (x3, y3) to (x1, y1).

-
-
Parameters
-
    -
  • surface (Surface) -- surface to draw on

  • -
  • points (tuple(coordinate) or list(coordinate)) -- a sequence of 3 or more (x, y) coordinates, where each -coordinate in the sequence must be a -tuple/list/pygame.math.Vector2a 2-Dimensional Vector of 2 ints/floats (float values -will be truncated)`

  • -
  • color (Color or tuple(int, int, int, [int])) -- color to draw with, the alpha value is optional if using a -tuple (RGB[A])

  • -
-
-
Returns
-

None

-
-
Return type
-

NoneType

-
-
Raises
-
    -
  • ValueError -- if len(points) < 3 (must have at least 3 points)

  • -
  • IndexError -- if len(coordinate) < 2 (each coordinate must have -at least 2 items)

  • -
-
-
-
- -
-
-pygame.gfxdraw.textured_polygon()¶
-
-
draw a textured polygon
-
textured_polygon(surface, points, texture, tx, ty) -> None
-
-

Draws a textured polygon on the given surface. For better performance, the -surface and the texture should have the same format.

-

A per-pixel alpha texture blit to a per-pixel alpha surface will differ from -a pygame.Surface.blit()draw one image onto another blit. Also, a per-pixel alpha texture cannot be -used with an 8-bit per pixel destination.

-

The adjacent coordinates in the points argument, as well as the first -and last points, will be connected by line segments. -e.g. For the points [(x1, y1), (x2, y2), (x3, y3)] a line segment will -be drawn from (x1, y1) to (x2, y2), from (x2, y2) to -(x3, y3), and from (x3, y3) to (x1, y1).

-
-
Parameters
-
    -
  • surface (Surface) -- surface to draw on

  • -
  • points (tuple(coordinate) or list(coordinate)) -- a sequence of 3 or more (x, y) coordinates, where each -coordinate in the sequence must be a -tuple/list/pygame.math.Vector2a 2-Dimensional Vector of 2 ints/floats (float values -will be truncated)

  • -
  • texture (Surface) -- texture to draw on the polygon

  • -
  • tx (int) -- x offset of the texture

  • -
  • ty (int) -- y offset of the texture

  • -
-
-
Returns
-

None

-
-
Return type
-

NoneType

-
-
Raises
-
    -
  • ValueError -- if len(points) < 3 (must have at least 3 points)

  • -
  • IndexError -- if len(coordinate) < 2 (each coordinate must have -at least 2 items)

  • -
-
-
-
- -
-
-pygame.gfxdraw.bezier()¶
-
-
draw a Bezier curve
-
bezier(surface, points, steps, color) -> None
-
-

Draws a Bézier curve on the given surface.

-
-
Parameters
-
    -
  • surface (Surface) -- surface to draw on

  • -
  • points (tuple(coordinate) or list(coordinate)) -- a sequence of 3 or more (x, y) coordinates used to form a -curve, where each coordinate in the sequence must be a -tuple/list/pygame.math.Vector2a 2-Dimensional Vector of 2 ints/floats (float values -will be truncated)

  • -
  • steps (int) -- number of steps for the interpolation, the minimum is 2

  • -
  • color (Color or tuple(int, int, int, [int])) -- color to draw with, the alpha value is optional if using a -tuple (RGB[A])

  • -
-
-
Returns
-

None

-
-
Return type
-

NoneType

-
-
Raises
-
    -
  • ValueError -- if steps < 2

  • -
  • ValueError -- if len(points) < 3 (must have at least 3 points)

  • -
  • IndexError -- if len(coordinate) < 2 (each coordinate must have -at least 2 items)

  • -
-
-
-
- -
- -
- - -

-
-Edit on GitHub -
-
-
-
-
- - - - \ No newline at end of file diff --git a/venv/Lib/site-packages/pygame/docs/generated/ref/image.html b/venv/Lib/site-packages/pygame/docs/generated/ref/image.html deleted file mode 100644 index 2444caa..0000000 --- a/venv/Lib/site-packages/pygame/docs/generated/ref/image.html +++ /dev/null @@ -1,459 +0,0 @@ - - - - - - - - - pygame.image — pygame v2.1.2 documentation - - - - - - - - - - - - - -
- -
- - - - - -
-
- -
-
- -
-
-
-pygame.image
-
-
pygame module for image transfer
-
- ----- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-—load new image from a file (or file-like object)
-—save an image to file (or file-like object)
-—get version number of the SDL_Image library being used
-—test if extended image formats can be loaded
-—transfer image to string buffer
-—create new Surface from a string buffer
-—create a new Surface that shares data inside a bytes buffer
-—load new BMP image from a file (or file-like object)
-—load an image from a file (or file-like object)
-—save a png/jpg image to file (or file-like object)
-

The image module contains functions for loading and saving pictures, as well as -transferring Surfaces to formats usable by other packages.

-

Note that there is no Image class; an image is loaded as a Surface object. The -Surface class allows manipulation (drawing lines, setting pixels, capturing -regions, etc.).

-

The image module is a required dependency of pygame, but it only optionally -supports any extended file formats. By default it can only load uncompressed -BMP images. When built with full image support, the pygame.image.load() -function can support the following formats.

-
-
    -
  • BMP

  • -
  • GIF (non-animated)

  • -
  • JPEG

  • -
  • LBM (and PBM, PGM, PPM)

  • -
  • PCX

  • -
  • PNG

  • -
  • PNM

  • -
  • SVG (limited support, using Nano SVG)

  • -
  • TGA (uncompressed)

  • -
  • TIFF

  • -
  • WEBP

  • -
  • XPM

  • -
-
-
-

New in pygame 2.0: Loading SVG, WebP, PNM

-
-

Saving images only supports a limited set of formats. You can save to the -following formats.

-
-
    -
  • BMP

  • -
  • JPEG

  • -
  • PNG

  • -
  • TGA

  • -
-
-

JPEG and JPG, as well as TIF and TIFF refer to the same file format

-
-

New in pygame 1.8: Saving PNG and JPEG files.

-
-
-
-pygame.image.load()¶
-
-
load new image from a file (or file-like object)
-
load(filename) -> Surface
-
load(fileobj, namehint="") -> Surface
-
-

Load an image from a file source. You can pass either a filename, a Python -file-like object, or a pathlib.Path.

-

Pygame will automatically determine the image type (e.g., GIF or bitmap) -and create a new Surface object from the data. In some cases it will need to -know the file extension (e.g., GIF images should end in ".gif"). If you -pass a raw file-like object, you may also want to pass the original filename -as the namehint argument.

-

The returned Surface will contain the same color format, colorkey and alpha -transparency as the file it came from. You will often want to call -Surface.convert() with no arguments, to create a copy that will draw -more quickly on the screen.

-

For alpha transparency, like in .png images, use the convert_alpha() -method after loading so that the image has per pixel transparency.

-

pygame may not always be built to support all image formats. At minimum it -will support uncompressed BMP. If pygame.image.get_extended() -returns 'True', you should be able to load most images (including PNG, JPG -and GIF).

-

You should use os.path.join() for compatibility.

-
eg. asurf = pygame.image.load(os.path.join('data', 'bla.png'))
-
-
-
- -
-
-pygame.image.save()¶
-
-
save an image to file (or file-like object)
-
save(Surface, filename) -> None
-
save(Surface, fileobj, namehint="") -> None
-
-

This will save your Surface as either a BMP, TGA, PNG, or -JPEG image. If the filename extension is unrecognized it will default to -TGA. Both TGA, and BMP file formats create uncompressed files. -You can pass a filename, a pathlib.Path or a Python file-like object. -For file-like object, the image is saved to TGA format unless -a namehint with a recognizable extension is passed in.

-
-

Note

-

When saving to a file-like object, it seems that for most formats, -the object needs to be flushed after saving to it to make loading -from it possible.

-
-
-

Changed in pygame 1.8: Saving PNG and JPEG files.

-
-
-

Changed in pygame 2.0.0: The namehint parameter was added to make it possible -to save other formats than TGA to a file-like object. -Saving to a file-like object with JPEG is possible.

-
-
- -
-
-pygame.image.get_sdl_image_version()¶
-
-
get version number of the SDL_Image library being used
-
get_sdl_image_version() -> None
-
get_sdl_image_version() -> (major, minor, patch)
-
-

If pygame is built with extended image formats, then this function will -return the SDL_Image library's version number as a tuple of 3 integers -(major, minor, patch). If not, then it will return None.

-
-

New in pygame 2.0.0.

-
-
- -
-
-pygame.image.get_extended()¶
-
-
test if extended image formats can be loaded
-
get_extended() -> bool
-
-

If pygame is built with extended image formats this function will return -True. It is still not possible to determine which formats will be available, -but generally you will be able to load them all.

-
- -
-
-pygame.image.tostring()¶
-
-
transfer image to string buffer
-
tostring(Surface, format, flipped=False) -> string
-
-

Creates a string that can be transferred with the 'fromstring' method in -other Python imaging packages. Some Python image packages prefer their -images in bottom-to-top format (PyOpenGL for example). If you pass True for -the flipped argument, the string buffer will be vertically flipped.

-

The format argument is a string of one of the following values. Note that -only 8-bit Surfaces can use the "P" format. The other formats will work for -any Surface. Also note that other Python image packages support more formats -than pygame.

-
-
    -
  • P, 8-bit palettized Surfaces

  • -
  • RGB, 24-bit image

  • -
  • RGBX, 32-bit image with unused space

  • -
  • RGBA, 32-bit image with an alpha channel

  • -
  • ARGB, 32-bit image with alpha channel first

  • -
  • RGBA_PREMULT, 32-bit image with colors scaled by alpha channel

  • -
  • ARGB_PREMULT, 32-bit image with colors scaled by alpha channel, alpha channel first

  • -
-
-
- -
-
-pygame.image.fromstring()¶
-
-
create new Surface from a string buffer
-
fromstring(string, size, format, flipped=False) -> Surface
-
-

This function takes arguments similar to pygame.image.tostring(). The -size argument is a pair of numbers representing the width and height. Once -the new Surface is created you can destroy the string buffer.

-

The size and format image must compute the exact same size as the passed -string buffer. Otherwise an exception will be raised.

-

See the pygame.image.frombuffer() method for a potentially faster way to -transfer images into pygame.

-
- -
-
-pygame.image.frombuffer()¶
-
-
create a new Surface that shares data inside a bytes buffer
-
frombuffer(bytes, size, format) -> Surface
-
-

Create a new Surface that shares pixel data directly from a bytes buffer. -This method takes similar arguments to pygame.image.fromstring(), but -is unable to vertically flip the source data.

-

This will run much faster than pygame.image.fromstring()create new Surface from a string buffer, since no -pixel data must be allocated and copied.

-

It accepts the following 'format' arguments:

-
-
    -
  • P, 8-bit palettized Surfaces

  • -
  • RGB, 24-bit image

  • -
  • BGR, 24-bit image, red and blue channels swapped.

  • -
  • RGBX, 32-bit image with unused space

  • -
  • RGBA, 32-bit image with an alpha channel

  • -
  • ARGB, 32-bit image with alpha channel first

  • -
-
-
- -
-
-pygame.image.load_basic()¶
-
-
load new BMP image from a file (or file-like object)
-
load_basic(file) -> Surface
-
-

Load an image from a file source. You can pass either a filename or a Python -file-like object, or a pathlib.Path.

-

This function only supports loading "basic" image format, ie BMP -format. -This function is always available, no matter how pygame was built.

-
- -
-
-pygame.image.load_extended()¶
-
-
load an image from a file (or file-like object)
-
load_extended(filename) -> Surface
-
load_extended(fileobj, namehint="") -> Surface
-
-

This function is similar to pygame.image.load(), except that this -function can only be used if pygame was built with extended image format -support.

-

From version 2.0.1, this function is always available, but raises an -error if extended image formats are not supported. Previously, this -function may or may not be available, depending on the state of -extended image format support.

-
-

Changed in pygame 2.0.1.

-
-
- -
-
-pygame.image.save_extended()¶
-
-
save a png/jpg image to file (or file-like object)
-
save_extended(Surface, filename) -> None
-
save_extended(Surface, fileobj, namehint="") -> None
-
-

This will save your Surface as either a PNG or JPEG image.

-

Incase the image is being saved to a file-like object, this function -uses the namehint argument to determine the format of the file being -saved. Saves to JPEG incase the namehint was not specified while -saving to file-like object.

-
-

Changed in pygame 2.0.1: This function is always available, but raises an -error if extended image formats are not supported. -Previously, this function may or may not be -available, depending on the state of extended image -format support.

-
-
- -
- -
- - -

-
-Edit on GitHub -
-
-
-
-
- - - - \ No newline at end of file diff --git a/venv/Lib/site-packages/pygame/docs/generated/ref/joystick.html b/venv/Lib/site-packages/pygame/docs/generated/ref/joystick.html deleted file mode 100644 index 703da99..0000000 --- a/venv/Lib/site-packages/pygame/docs/generated/ref/joystick.html +++ /dev/null @@ -1,984 +0,0 @@ - - - - - - - - - pygame.joystick — pygame v2.1.2 documentation - - - - - - - - - - - - - -
- -
- - - - - -
-
- -
-
- -
-
-
-pygame.joystick
-
-
Pygame module for interacting with joysticks, gamepads, and trackballs.
-
- ----- - - - - - - - - - - - - - - - - - - - - - - -
-—Initialize the joystick module.
-—Uninitialize the joystick module.
-—Returns True if the joystick module is initialized.
-—Returns the number of joysticks.
-—Create a new Joystick object.
-

The joystick module manages the joystick devices on a computer. -Joystick devices include trackballs and video-game-style -gamepads, and the module allows the use of multiple buttons and "hats". -Computers may manage multiple joysticks at a time.

-

Each instance of the Joystick class represents one gaming device plugged -into the computer. If a gaming pad has multiple joysticks on it, then the -joystick object can actually represent multiple joysticks on that single -game device.

-

For a quick way to initialise the joystick module and get a list of Joystick instances -use the following code:

-
pygame.joystick.init()
-joysticks = [pygame.joystick.Joystick(x) for x in range(pygame.joystick.get_count())]
-
-
-

The following event types will be generated by the joysticks

-
JOYAXISMOTION JOYBALLMOTION JOYBUTTONDOWN JOYBUTTONUP JOYHATMOTION
-
-
-

And in pygame 2, which supports hotplugging:

-
JOYDEVICEADDED JOYDEVICEREMOVED
-
-
-

Note that in pygame 2, joysticks events use a unique "instance ID". The device index -passed in the constructor to a Joystick object is not unique after devices have -been added and removed. You must call Joystick.get_instance_id() to find -the instance ID that was assigned to a Joystick on opening.

-

The event queue needs to be pumped frequently for some of the methods to work. -So call one of pygame.event.get, pygame.event.wait, or pygame.event.pump regularly.

-
-
-pygame.joystick.init()¶
-
-
Initialize the joystick module.
-
init() -> None
-
-

This function is called automatically by pygame.init().

-

It initializes the joystick module. The module must be initialized before any -other functions will work.

-

It is safe to call this function more than once.

-
- -
-
-pygame.joystick.quit()¶
-
-
Uninitialize the joystick module.
-
quit() -> None
-
-

Uninitialize the joystick module. After you call this any existing joystick -objects will no longer work.

-

It is safe to call this function more than once.

-
- -
-
-pygame.joystick.get_init()¶
-
-
Returns True if the joystick module is initialized.
-
get_init() -> bool
-
-

Test if the pygame.joystick.init() function has been called.

-
- -
-
-pygame.joystick.get_count()¶
-
-
Returns the number of joysticks.
-
get_count() -> count
-
-

Return the number of joystick devices on the system. The count will be 0 -if there are no joysticks on the system.

-

When you create Joystick objects using Joystick(id), you pass an integer -that must be lower than this count.

-
- -
-
-pygame.joystick.Joystick¶
-
-
Create a new Joystick object.
-
Joystick(id) -> Joystick
-
- ----- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-—initialize the Joystick
-—uninitialize the Joystick
-—check if the Joystick is initialized
-—get the device index (deprecated)
-—get the joystick instance id
-—get the joystick GUID
-—get the approximate power status of the device
-—get the Joystick system name
-—get the number of axes on a Joystick
-—get the current position of an axis
-—get the number of trackballs on a Joystick
-—get the relative position of a trackball
-—get the number of buttons on a Joystick
-—get the current button state
-—get the number of hat controls on a Joystick
-—get the position of a joystick hat
-—Start a rumbling effect
-—Stop any rumble effect playing
-

Create a new joystick to access a physical device. The id argument must be a -value from 0 to pygame.joystick.get_count() - 1.

-

Joysticks are initialised on creation and are shut down when deallocated. -Once the device is initialized the pygame event queue will start receiving -events about its input.

-
-

Changed in pygame 2.0.0: Joystick objects are now opened immediately on creation.

-
-
-
-init()¶
-
-
initialize the Joystick
-
init() -> None
-
-

Initialize the joystick, if it has been closed. It is safe to call this -even if the joystick is already initialized.

-
-

Deprecated since pygame 2.0.0: In future it will not be possible to reinitialise a closed Joystick -object. Will be removed in Pygame 2.1.

-
-
- -
-
-quit()¶
-
-
uninitialize the Joystick
-
quit() -> None
-
-

Close a Joystick object. After this the pygame event queue will no longer -receive events from the device.

-

It is safe to call this more than once.

-
- -
-
-get_init()¶
-
-
check if the Joystick is initialized
-
get_init() -> bool
-
-

Return True if the Joystick object is currently initialised.

-
- -
-
-get_id()¶
-
-
get the device index (deprecated)
-
get_id() -> int
-
-

Returns the original device index for this device. This is the same -value that was passed to the Joystick() constructor. This method can -safely be called while the Joystick is not initialized.

-
-

Deprecated since pygame 2.0.0: The original device index is not useful in pygame 2. Use -get_instance_id() instead. Will be removed in Pygame 2.1.

-
-
- -
-
-get_instance_id() int¶
-
-
get the joystick instance id
-
get_instance_id() -> int
-
-

Get the joystick instance ID. This matches the instance_id field -that is given in joystick events.

-
-

New in pygame 2.0.0dev11.

-
-
- -
-
-get_guid() str¶
-
-
get the joystick GUID
-
get_guid() -> str
-
-

Get the GUID string. This identifies the exact hardware of the joystick -device.

-
-

New in pygame 2.0.0dev11.

-
-
- -
-
-get_power_level() str¶
-
-
get the approximate power status of the device
-
get_power_level() -> str
-
-

Get a string giving the power status of the device.

-

One of: empty, low, medium, full, wired, max, or -unknown.

-
-

New in pygame 2.0.0dev11.

-
-
- -
-
-get_name()¶
-
-
get the Joystick system name
-
get_name() -> string
-
-

Returns the system name for this joystick device. It is unknown what name -the system will give to the Joystick, but it should be a unique name that -identifies the device. This method can safely be called while the -Joystick is not initialized.

-
- -
-
-get_numaxes()¶
-
-
get the number of axes on a Joystick
-
get_numaxes() -> int
-
-

Returns the number of input axes are on a Joystick. There will usually be -two for the position. Controls like rudders and throttles are treated as -additional axes.

-

The pygame.JOYAXISMOTION events will be in the range from -1.0 -to 1.0. A value of 0.0 means the axis is centered. Gamepad devices -will usually be -1, 0, or 1 with no values in between. Older -analog joystick axes will not always use the full -1 to 1 range, -and the centered value will be some area around 0.

-

Analog joysticks usually have a bit of noise in their axis, which will -generate a lot of rapid small motion events.

-
- -
-
-get_axis()¶
-
-
get the current position of an axis
-
get_axis(axis_number) -> float
-
-

Returns the current position of a joystick axis. The value will range -from -1 to 1 with a value of 0 being centered. You may want -to take into account some tolerance to handle jitter, and joystick drift -may keep the joystick from centering at 0 or using the full range of -position values.

-

The axis number must be an integer from 0 to get_numaxes() - 1.

-

When using gamepads both the control sticks and the analog triggers are -usually reported as axes.

-
- -
-
-get_numballs()¶
-
-
get the number of trackballs on a Joystick
-
get_numballs() -> int
-
-

Returns the number of trackball devices on a Joystick. These devices work -similar to a mouse but they have no absolute position; they only have -relative amounts of movement.

-

The pygame.JOYBALLMOTION event will be sent when the trackball is -rolled. It will report the amount of movement on the trackball.

-
- -
-
-get_ball()¶
-
-
get the relative position of a trackball
-
get_ball(ball_number) -> x, y
-
-

Returns the relative movement of a joystick button. The value is a x, y -pair holding the relative movement since the last call to get_ball.

-

The ball number must be an integer from 0 to get_numballs() - 1.

-
- -
-
-get_numbuttons()¶
-
-
get the number of buttons on a Joystick
-
get_numbuttons() -> int
-
-

Returns the number of pushable buttons on the joystick. These buttons -have a boolean (on or off) state.

-

Buttons generate a pygame.JOYBUTTONDOWN and pygame.JOYBUTTONUP -event when they are pressed and released.

-
- -
-
-get_button()¶
-
-
get the current button state
-
get_button(button) -> bool
-
-

Returns the current state of a joystick button.

-
- -
-
-get_numhats()¶
-
-
get the number of hat controls on a Joystick
-
get_numhats() -> int
-
-

Returns the number of joystick hats on a Joystick. Hat devices are like -miniature digital joysticks on a joystick. Each hat has two axes of -input.

-

The pygame.JOYHATMOTION event is generated when the hat changes -position. The position attribute for the event contains a pair of -values that are either -1, 0, or 1. A position of (0, 0) -means the hat is centered.

-
- -
-
-get_hat()¶
-
-
get the position of a joystick hat
-
get_hat(hat_number) -> x, y
-
-

Returns the current position of a position hat. The position is given as -two values representing the x and y position for the hat. (0, 0) -means centered. A value of -1 means left/down and a value of 1 means -right/up: so (-1, 0) means left; (1, 0) means right; (0, 1) means -up; (1, 1) means upper-right; etc.

-

This value is digital, i.e., each coordinate can be -1, 0 or 1 -but never in-between.

-

The hat number must be between 0 and get_numhats() - 1.

-
- -
-
-rumble()¶
-
-
Start a rumbling effect
-
rumble(low_frequency, high_frequency, duration) -> bool
-
-

Start a rumble effect on the joystick, with the specified strength ranging -from 0 to 1. Duration is length of the effect, in ms. Setting the duration -to 0 will play the effect until another one overwrites it or -Joystick.stop_rumble() is called. If an effect is already -playing, then it will be overwritten.

-

Returns True if the rumble was played successfully or False if the -joystick does not support it or pygame.version.SDL()tupled integers of the SDL library version is below 2.0.9.

-
-

New in pygame 2.0.2.

-
-
- -
-
-stop_rumble()¶
-
-
Stop any rumble effect playing
-
stop_rumble() -> None
-
-

Stops any rumble effect playing on the joystick. See -Joystick.rumble() for more information.

-
-

New in pygame 2.0.2.

-
-
- -
- -
-joystick module example -
-

Example code for joystick module.¶

-
-
-
import pygame
-
-
-# Define some colors.
-BLACK = pygame.Color('black')
-WHITE = pygame.Color('white')
-
-
-# This is a simple class that will help us print to the screen.
-# It has nothing to do with the joysticks, just outputting the
-# information.
-class TextPrint(object):
-    def __init__(self):
-        self.reset()
-        self.font = pygame.font.Font(None, 20)
-
-    def tprint(self, screen, textString):
-        textBitmap = self.font.render(textString, True, BLACK)
-        screen.blit(textBitmap, (self.x, self.y))
-        self.y += self.line_height
-
-    def reset(self):
-        self.x = 10
-        self.y = 10
-        self.line_height = 15
-
-    def indent(self):
-        self.x += 10
-
-    def unindent(self):
-        self.x -= 10
-
-
-pygame.init()
-
-# Set the width and height of the screen (width, height).
-screen = pygame.display.set_mode((500, 700))
-
-pygame.display.set_caption("My Game")
-
-# Loop until the user clicks the close button.
-done = False
-
-# Used to manage how fast the screen updates.
-clock = pygame.time.Clock()
-
-# Initialize the joysticks.
-pygame.joystick.init()
-
-# Get ready to print.
-textPrint = TextPrint()
-
-# -------- Main Program Loop -----------
-while not done:
-    #
-    # EVENT PROCESSING STEP
-    #
-    # Possible joystick actions: JOYAXISMOTION, JOYBALLMOTION, JOYBUTTONDOWN,
-    # JOYBUTTONUP, JOYHATMOTION
-    for event in pygame.event.get(): # User did something.
-        if event.type == pygame.QUIT: # If user clicked close.
-            done = True # Flag that we are done so we exit this loop.
-        elif event.type == pygame.JOYBUTTONDOWN:
-            print("Joystick button pressed.")
-        elif event.type == pygame.JOYBUTTONUP:
-            print("Joystick button released.")
-
-    #
-    # DRAWING STEP
-    #
-    # First, clear the screen to white. Don't put other drawing commands
-    # above this, or they will be erased with this command.
-    screen.fill(WHITE)
-    textPrint.reset()
-
-    # Get count of joysticks.
-    joystick_count = pygame.joystick.get_count()
-
-    textPrint.tprint(screen, "Number of joysticks: {}".format(joystick_count))
-    textPrint.indent()
-
-    # For each joystick:
-    for i in range(joystick_count):
-        joystick = pygame.joystick.Joystick(i)
-        joystick.init()
-
-        try:
-            jid = joystick.get_instance_id()
-        except AttributeError:
-            # get_instance_id() is an SDL2 method
-            jid = joystick.get_id()
-        textPrint.tprint(screen, "Joystick {}".format(jid))
-        textPrint.indent()
-
-        # Get the name from the OS for the controller/joystick.
-        name = joystick.get_name()
-        textPrint.tprint(screen, "Joystick name: {}".format(name))
-
-        try:
-            guid = joystick.get_guid()
-        except AttributeError:
-            # get_guid() is an SDL2 method
-            pass
-        else:
-            textPrint.tprint(screen, "GUID: {}".format(guid))
-
-        # Usually axis run in pairs, up/down for one, and left/right for
-        # the other.
-        axes = joystick.get_numaxes()
-        textPrint.tprint(screen, "Number of axes: {}".format(axes))
-        textPrint.indent()
-
-        for i in range(axes):
-            axis = joystick.get_axis(i)
-            textPrint.tprint(screen, "Axis {} value: {:>6.3f}".format(i, axis))
-        textPrint.unindent()
-
-        buttons = joystick.get_numbuttons()
-        textPrint.tprint(screen, "Number of buttons: {}".format(buttons))
-        textPrint.indent()
-
-        for i in range(buttons):
-            button = joystick.get_button(i)
-            textPrint.tprint(screen,
-                             "Button {:>2} value: {}".format(i, button))
-        textPrint.unindent()
-
-        hats = joystick.get_numhats()
-        textPrint.tprint(screen, "Number of hats: {}".format(hats))
-        textPrint.indent()
-
-        # Hat position. All or nothing for direction, not a float like
-        # get_axis(). Position is a tuple of int values (x, y).
-        for i in range(hats):
-            hat = joystick.get_hat(i)
-            textPrint.tprint(screen, "Hat {} value: {}".format(i, str(hat)))
-        textPrint.unindent()
-
-        textPrint.unindent()
-
-    #
-    # ALL CODE TO DRAW SHOULD GO ABOVE THIS COMMENT
-    #
-
-    # Go ahead and update the screen with what we've drawn.
-    pygame.display.flip()
-
-    # Limit to 20 frames per second.
-    clock.tick(20)
-
-# Close the window and quit.
-# If you forget this line, the program will 'hang'
-# on exit if running from IDLE.
-pygame.quit()
-
-
-
-

Common Controller Axis Mappings

-

Controller mappings are drawn from the underlying SDL library which pygame uses and they differ -between pygame 1 and pygame 2. Below are a couple of mappings for two popular game pads.

-

Pygame 2

-

Axis and hat mappings are listed from -1 to +1.

-

X-Box 360 Controller (name: "Xbox 360 Controller")

-

In pygame 2 the X360 controller mapping has 6 Axes, 11 buttons and 1 hat.

-
    -
  • Left Stick:

    -
    Left -> Right   - Axis 0
    -Up   -> Down    - Axis 1
    -
    -
    -
  • -
  • Right Stick:

    -
    Left -> Right   - Axis 3
    -Up   -> Down    - Axis 4
    -
    -
    -
  • -
  • Left Trigger:

    -
    Out -> In       - Axis 2
    -
    -
    -
  • -
  • Right Trigger:

    -
    Out -> In       - Axis 5
    -
    -
    -
  • -
  • Buttons:

    -
    A Button        - Button 0
    -B Button        - Button 1
    -X Button        - Button 2
    -Y Button        - Button 3
    -Left Bumper     - Button 4
    -Right Bumper    - Button 5
    -Back Button     - Button 6
    -Start Button    - Button 7
    -L. Stick In     - Button 8
    -R. Stick In     - Button 9
    -Guide Button    - Button 10
    -
    -
    -
  • -
  • Hat/D-pad:

    -
    Down -> Up      - Y Axis
    -Left -> Right   - X Axis
    -
    -
    -
  • -
-

Playstation 4 Controller (name: "PS4 Controller")

-

In pygame 2 the PS4 controller mapping has 6 Axes and 16 buttons.

-
    -
  • Left Stick:

    -
    Left -> Right   - Axis 0
    -Up   -> Down    - Axis 1
    -
    -
    -
  • -
  • Right Stick:

    -
    Left -> Right   - Axis 2
    -Up   -> Down    - Axis 3
    -
    -
    -
  • -
  • Left Trigger:

    -
    Out -> In       - Axis 4
    -
    -
    -
  • -
  • Right Trigger:

    -
    Out -> In       - Axis 5
    -
    -
    -
  • -
  • Buttons:

    -
    Cross Button    - Button 0
    -Circle Button   - Button 1
    -Square Button   - Button 2
    -Triangle Button - Button 3
    -Share Button    - Button 4
    -PS Button       - Button 5
    -Options Button  - Button 6
    -L. Stick In     - Button 7
    -R. Stick In     - Button 8
    -Left Bumper     - Button 9
    -Right Bumper    - Button 10
    -D-pad Up        - Button 11
    -D-pad Down      - Button 12
    -D-pad Left      - Button 13
    -D-pad Right     - Button 14
    -Touch Pad Click - Button 15
    -
    -
    -
  • -
-

Pygame 1

-

Axis and hat mappings are listed from -1 to +1.

-

X-Box 360 Controller (name: "Controller (XBOX 360 For Windows)")

-

In pygame 1 the X360 controller mapping has 5 Axes, 10 buttons and 1 hat.

-
    -
  • Left Stick:

    -
    Left -> Right   - Axis 0
    -Up   -> Down    - Axis 1
    -
    -
    -
  • -
  • Right Stick:

    -
    Left -> Right   - Axis 4
    -Up   -> Down    - Axis 3
    -
    -
    -
  • -
  • Left Trigger & Right Trigger:

    -
    RT -> LT        - Axis 2
    -
    -
    -
  • -
  • Buttons:

    -
    A Button        - Button 0
    -B Button        - Button 1
    -X Button        - Button 2
    -Y Button        - Button 3
    -Left Bumper     - Button 4
    -Right Bumper    - Button 5
    -Back Button     - Button 6
    -Start Button    - Button 7
    -L. Stick In     - Button 8
    -R. Stick In     - Button 9
    -
    -
    -
  • -
  • Hat/D-pad:

    -
    Down -> Up      - Y Axis
    -Left -> Right   - X Axis
    -
    -
    -
  • -
-

Playstation 4 Controller (name: "Wireless Controller")

-

In pygame 1 the PS4 controller mapping has 6 Axes and 14 buttons and 1 hat.

-
    -
  • Left Stick:

    -
    Left -> Right   - Axis 0
    -Up   -> Down    - Axis 1
    -
    -
    -
  • -
  • Right Stick:

    -
    Left -> Right   - Axis 2
    -Up   -> Down    - Axis 3
    -
    -
    -
  • -
  • Left Trigger:

    -
    Out -> In       - Axis 5
    -
    -
    -
  • -
  • Right Trigger:

    -
    Out -> In       - Axis 4
    -
    -
    -
  • -
  • Buttons:

    -
    Cross Button    - Button 0
    -Circle Button   - Button 1
    -Square Button   - Button 2
    -Triangle Button - Button 3
    -Left Bumper     - Button 4
    -Right Bumper    - Button 5
    -L. Trigger(Full)- Button 6
    -R. Trigger(Full)- Button 7
    -Share Button    - Button 8
    -Options Button  - Button 9
    -L. Stick In     - Button 10
    -R. Stick In     - Button 11
    -PS Button       - Button 12
    -Touch Pad Click - Button 13
    -
    -
    -
  • -
  • Hat/D-pad:

    -
    Down -> Up      - Y Axis
    -Left -> Right   - X Axis
    -
    -
    -
  • -
-
- -
- - -

-
-Edit on GitHub -
-
-
-
-
- - - - \ No newline at end of file diff --git a/venv/Lib/site-packages/pygame/docs/generated/ref/key.html b/venv/Lib/site-packages/pygame/docs/generated/ref/key.html deleted file mode 100644 index c662fb2..0000000 --- a/venv/Lib/site-packages/pygame/docs/generated/ref/key.html +++ /dev/null @@ -1,628 +0,0 @@ - - - - - - - - - pygame.key — pygame v2.1.2 documentation - - - - - - - - - - - - - -
- -
- - - - - -
-
- -
-
- -
-
-
-pygame.key
-
-
pygame module to work with the keyboard
-
- ----- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-—true if the display is receiving keyboard input from the system
-—get the state of all keyboard buttons
-—determine which modifier keys are being held
-—temporarily set which modifier keys are pressed
-—control how held keys are repeated
-—see how held keys are repeated
-—get the name of a key identifier
-—get the key identifier from a key name
-—start handling Unicode text input events
-—stop handling Unicode text input events
-—controls the position of the candidate list
-

This module contains functions for dealing with the keyboard.

-

The pygame.eventpygame module for interacting with events and queues queue gets pygame.KEYDOWN and pygame.KEYUP -events when the keyboard buttons are pressed and released. Both events have -key and mod attributes.

-
-
    -
  • key: an integer ID representing every key -on the keyboard

  • -
  • mod: a bitmask of all the modifier keys -that were in a pressed state when the event occurred

  • -
-
-

The pygame.KEYDOWN event has the additional attributes unicode and -scancode.

-
-
    -
  • unicode: a single character string that is the fully translated -character entered, this takes into account the shift and composition keys

  • -
  • scancode: the platform-specific key code, which could be different from -keyboard to keyboard, but is useful for key selection of weird keys like -the multimedia keys

  • -
-
-
-

New in pygame 2.0.0: The pygame.TEXTINPUT event is preferred to the unicode attribute -of pygame.KEYDOWN. The attribute text contains the input.

-
-

The following is a list of all the constants (from pygame.localspygame constants) used to -represent keyboard keys.

-

Portability note: The integers for key constants differ between pygame 1 and 2. -Always use key constants (K_a) rather than integers directly (97) so -that your key handling code works well on both pygame 1 and pygame 2.

-
pygame
-Constant      ASCII   Description
----------------------------------
-K_BACKSPACE   \b      backspace
-K_TAB         \t      tab
-K_CLEAR               clear
-K_RETURN      \r      return
-K_PAUSE               pause
-K_ESCAPE      ^[      escape
-K_SPACE               space
-K_EXCLAIM     !       exclaim
-K_QUOTEDBL    "       quotedbl
-K_HASH        #       hash
-K_DOLLAR      $       dollar
-K_AMPERSAND   &       ampersand
-K_QUOTE               quote
-K_LEFTPAREN   (       left parenthesis
-K_RIGHTPAREN  )       right parenthesis
-K_ASTERISK    *       asterisk
-K_PLUS        +       plus sign
-K_COMMA       ,       comma
-K_MINUS       -       minus sign
-K_PERIOD      .       period
-K_SLASH       /       forward slash
-K_0           0       0
-K_1           1       1
-K_2           2       2
-K_3           3       3
-K_4           4       4
-K_5           5       5
-K_6           6       6
-K_7           7       7
-K_8           8       8
-K_9           9       9
-K_COLON       :       colon
-K_SEMICOLON   ;       semicolon
-K_LESS        <       less-than sign
-K_EQUALS      =       equals sign
-K_GREATER     >       greater-than sign
-K_QUESTION    ?       question mark
-K_AT          @       at
-K_LEFTBRACKET [       left bracket
-K_BACKSLASH   \       backslash
-K_RIGHTBRACKET ]      right bracket
-K_CARET       ^       caret
-K_UNDERSCORE  _       underscore
-K_BACKQUOTE   `       grave
-K_a           a       a
-K_b           b       b
-K_c           c       c
-K_d           d       d
-K_e           e       e
-K_f           f       f
-K_g           g       g
-K_h           h       h
-K_i           i       i
-K_j           j       j
-K_k           k       k
-K_l           l       l
-K_m           m       m
-K_n           n       n
-K_o           o       o
-K_p           p       p
-K_q           q       q
-K_r           r       r
-K_s           s       s
-K_t           t       t
-K_u           u       u
-K_v           v       v
-K_w           w       w
-K_x           x       x
-K_y           y       y
-K_z           z       z
-K_DELETE              delete
-K_KP0                 keypad 0
-K_KP1                 keypad 1
-K_KP2                 keypad 2
-K_KP3                 keypad 3
-K_KP4                 keypad 4
-K_KP5                 keypad 5
-K_KP6                 keypad 6
-K_KP7                 keypad 7
-K_KP8                 keypad 8
-K_KP9                 keypad 9
-K_KP_PERIOD   .       keypad period
-K_KP_DIVIDE   /       keypad divide
-K_KP_MULTIPLY *       keypad multiply
-K_KP_MINUS    -       keypad minus
-K_KP_PLUS     +       keypad plus
-K_KP_ENTER    \r      keypad enter
-K_KP_EQUALS   =       keypad equals
-K_UP                  up arrow
-K_DOWN                down arrow
-K_RIGHT               right arrow
-K_LEFT                left arrow
-K_INSERT              insert
-K_HOME                home
-K_END                 end
-K_PAGEUP              page up
-K_PAGEDOWN            page down
-K_F1                  F1
-K_F2                  F2
-K_F3                  F3
-K_F4                  F4
-K_F5                  F5
-K_F6                  F6
-K_F7                  F7
-K_F8                  F8
-K_F9                  F9
-K_F10                 F10
-K_F11                 F11
-K_F12                 F12
-K_F13                 F13
-K_F14                 F14
-K_F15                 F15
-K_NUMLOCK             numlock
-K_CAPSLOCK            capslock
-K_SCROLLOCK           scrollock
-K_RSHIFT              right shift
-K_LSHIFT              left shift
-K_RCTRL               right control
-K_LCTRL               left control
-K_RALT                right alt
-K_LALT                left alt
-K_RMETA               right meta
-K_LMETA               left meta
-K_LSUPER              left Windows key
-K_RSUPER              right Windows key
-K_MODE                mode shift
-K_HELP                help
-K_PRINT               print screen
-K_SYSREQ              sysrq
-K_BREAK               break
-K_MENU                menu
-K_POWER               power
-K_EURO                Euro
-K_AC_BACK             Android back button
-
-
-

The keyboard also has a list of modifier states (from pygame.localspygame constants) that -can be assembled by bitwise-ORing them together.

-
pygame
-Constant      Description
--------------------------
-KMOD_NONE     no modifier keys pressed
-KMOD_LSHIFT   left shift
-KMOD_RSHIFT   right shift
-KMOD_SHIFT    left shift or right shift or both
-KMOD_LCTRL    left control
-KMOD_RCTRL    right control
-KMOD_CTRL     left control or right control or both
-KMOD_LALT     left alt
-KMOD_RALT     right alt
-KMOD_ALT      left alt or right alt or both
-KMOD_LMETA    left meta
-KMOD_RMETA    right meta
-KMOD_META     left meta or right meta or both
-KMOD_CAPS     caps lock
-KMOD_NUM      num lock
-KMOD_MODE     AltGr
-
-
-

The modifier information is contained in the mod attribute of the -pygame.KEYDOWN and pygame.KEYUP events. The mod attribute is a -bitmask of all the modifier keys that were in a pressed state when the event -occurred. The modifier information can be decoded using a bitwise AND (except -for KMOD_NONE, which should be compared using equals ==). For example:

-
for event in pygame.event.get():
-    if event.type == pygame.KEYDOWN or event.type == pygame.KEYUP:
-        if event.mod == pygame.KMOD_NONE:
-            print('No modifier keys were in a pressed state when this '
-                  'event occurred.')
-        else:
-            if event.mod & pygame.KMOD_LSHIFT:
-                print('Left shift was in a pressed state when this event '
-                      'occurred.')
-            if event.mod & pygame.KMOD_RSHIFT:
-                print('Right shift was in a pressed state when this event '
-                      'occurred.')
-            if event.mod & pygame.KMOD_SHIFT:
-                print('Left shift or right shift or both were in a '
-                      'pressed state when this event occurred.')
-
-
-
-
-pygame.key.get_focused()¶
-
-
true if the display is receiving keyboard input from the system
-
get_focused() -> bool
-
-

Returns True when the display window has keyboard focus from the -system. If the display needs to ensure it does not lose keyboard focus, it -can use pygame.event.set_grab()control the sharing of input devices with other applications to grab all input.

-
- -
-
-pygame.key.get_pressed()¶
-
-
get the state of all keyboard buttons
-
get_pressed() -> bools
-
-

Returns a sequence of boolean values representing the state of every key on -the keyboard. Use the key constant values to index the array. A True -value means that the button is pressed.

-
-

Note

-

Getting the list of pushed buttons with this function is not the proper -way to handle text entry from the user. There is no way to know the order -of keys pressed, and rapidly pushed keys can be completely unnoticed -between two calls to pygame.key.get_pressed(). There is also no way to -translate these pushed keys into a fully translated character value. See -the pygame.KEYDOWN events on the pygame.eventpygame module for interacting with events and queues queue for this -functionality.

-
-
- -
-
-pygame.key.get_mods()¶
-
-
determine which modifier keys are being held
-
get_mods() -> int
-
-

Returns a single integer representing a bitmask of all the modifier keys -being held. Using bitwise operators you can test if specific -modifier keys are pressed.

-
- -
-
-pygame.key.set_mods()¶
-
-
temporarily set which modifier keys are pressed
-
set_mods(int) -> None
-
-

Create a bitmask of the modifier key constants -you want to impose on your program.

-
- -
-
-pygame.key.set_repeat()¶
-
-
control how held keys are repeated
-
set_repeat() -> None
-
set_repeat(delay) -> None
-
set_repeat(delay, interval) -> None
-
-

When the keyboard repeat is enabled, keys that are held down will generate -multiple pygame.KEYDOWN events. The delay parameter is the number of -milliseconds before the first repeated pygame.KEYDOWN event will be sent. -After that, another pygame.KEYDOWN event will be sent every interval -milliseconds. If a delay value is provided and an interval value is -not provided or is 0, then the interval will be set to the same value as -delay.

-

To disable key repeat call this function with no arguments or with delay -set to 0.

-

When pygame is initialized the key repeat is disabled.

-
-
Raises
-

ValueError -- if delay or interval is < 0

-
-
-
-

Changed in pygame 2.0.0: A ValueError is now raised (instead of a -pygame.error) if delay or interval is < 0.

-
-
- -
-
-pygame.key.get_repeat()¶
-
-
see how held keys are repeated
-
get_repeat() -> (delay, interval)
-
-

Get the delay and interval keyboard repeat values. Refer to -pygame.key.set_repeat()control how held keys are repeated for a description of these values.

-
-

New in pygame 1.8.

-
-
- -
-
-pygame.key.name()¶
-
-
get the name of a key identifier
-
name(key) -> string
-
-

Get the descriptive name of the button from a keyboard button id constant.

-
- -
-
-pygame.key.key_code()¶
-
-
get the key identifier from a key name
-
key_code(name=string) -> int
-
-

Get the key identifier code from the descriptive name of the key. This -returns an integer matching one of the K_* keycodes. For example:

-
>>> pygame.key.key_code("return") == pygame.K_RETURN
-True
->>> pygame.key.key_code("0") == pygame.K_0
-True
->>> pygame.key.key_code("space") == pygame.K_SPACE
-True
-
-
-
-
Raises
-
    -
  • ValueError -- if the key name is not known.

  • -
  • NotImplementedError -- if used with SDL 1.

  • -
-
-
-
-

New in pygame 2.0.0.

-
-
- -
-
-pygame.key.start_text_input()¶
-
-
start handling Unicode text input events
-
start_text_input() -> None
-
-

Start receiving pygame.TEXTEDITING and pygame.TEXTINPUT -events. If applicable, show the on-screen keyboard or IME editor.

-

For many languages, key presses will automatically generate a -corresponding pygame.TEXTINPUT event. Special keys like -escape or function keys, and certain key combinations will not -generate pygame.TEXTINPUT events.

-

In other languages, entering a single symbol may require multiple -key presses, or a language-specific user interface. In this case, -pygame.TEXTINPUT events are preferable to pygame.KEYDOWN -events for text input.

-

A pygame.TEXTEDITING event is received when an IME composition -is started or changed. It contains the composition text, length, -and editing start position within the composition (attributes -text, length, and start, respectively). -When the composition is committed (or non-IME input is received), -a pygame.TEXTINPUT event is generated.

-

Text input events handling is on by default.

-
-

New in pygame 2.0.0.

-
-
- -
-
-pygame.key.stop_text_input()¶
-
-
stop handling Unicode text input events
-
stop_text_input() -> None
-
-

Stop receiving pygame.TEXTEDITING and pygame.TEXTINPUT -events. If an on-screen keyboard or IME editor was shown with -pygame.key.start_text_input(), hide it again.

-

Text input events handling is on by default.

-

To avoid triggering the IME editor or the on-screen keyboard -when the user is holding down a key during gameplay, text input -should be disabled once text entry is finished, or when the user -clicks outside of a text box.

-
-

New in pygame 2.0.0.

-
-
- -
-
-pygame.key.set_text_input_rect()¶
-
-
controls the position of the candidate list
-
set_text_input_rect(Rect) -> None
-
-

This sets the rectangle used for typing with an IME. -It controls where the candidate list will open, if supported.

-
-

New in pygame 2.0.0.

-
-
- -
- -
- - -

-
-Edit on GitHub -
-
-
-
-
- - - - \ No newline at end of file diff --git a/venv/Lib/site-packages/pygame/docs/generated/ref/locals.html b/venv/Lib/site-packages/pygame/docs/generated/ref/locals.html deleted file mode 100644 index baa5f8e..0000000 --- a/venv/Lib/site-packages/pygame/docs/generated/ref/locals.html +++ /dev/null @@ -1,161 +0,0 @@ - - - - - - - - - pygame.locals — pygame v2.1.2 documentation - - - - - - - - - - - - - -
- -
- - - - - -
-
- -
-
- -
-
-
-pygame.locals
-
-
pygame constants
-
-

This module contains various constants used by pygame. Its contents are -automatically placed in the pygame module namespace. However, an application -can use pygame.locals to include only the pygame constants with a from -pygame.locals import *.

-

Detailed descriptions of the various constants can be found throughout the -pygame documentation. Here are the locations of some of them.

-
-
-
-
- -
- - -

-
-Edit on GitHub -
-
-
-
-
- - - - \ No newline at end of file diff --git a/venv/Lib/site-packages/pygame/docs/generated/ref/mask.html b/venv/Lib/site-packages/pygame/docs/generated/ref/mask.html deleted file mode 100644 index 88e1219..0000000 --- a/venv/Lib/site-packages/pygame/docs/generated/ref/mask.html +++ /dev/null @@ -1,1123 +0,0 @@ - - - - - - - - - pygame.mask — pygame v2.1.2 documentation - - - - - - - - - - - - - -
- -
- - - - - -
-
- -
-
- -
-
-
-pygame.mask
-
-
pygame module for image masks.
-
- ----- - - - - - - - - - - - - - - -
-—Creates a Mask from the given surface
-—Creates a mask by thresholding Surfaces
-—pygame object for representing 2D bitmasks
-

Useful for fast pixel perfect collision detection. A mask uses 1 bit per-pixel -to store which parts collide.

-
-

New in pygame 1.8.

-
-
-

Changed in pygame 2.0.2: Mask functions now support keyword arguments.

-
-
-

Changed in pygame 2.0.2: Mask functions that take positions or offsets now -support pygame.math.Vector2a 2-Dimensional Vector arguments.

-
-
-
-pygame.mask.from_surface()¶
-
-
Creates a Mask from the given surface
-
from_surface(surface) -> Mask
-
from_surface(surface, threshold=127) -> Mask
-
-

Creates a Mask object from the given surface by setting all the -opaque pixels and not setting the transparent pixels.

-

If the surface uses a color-key, then it is used to decide which bits in -the resulting mask are set. All the pixels that are not equal to the -color-key are set and the pixels equal to the color-key are not set.

-

If a color-key is not used, then the alpha value of each pixel is used to -decide which bits in the resulting mask are set. All the pixels that have an -alpha value greater than the threshold parameter are set and the -pixels with an alpha value less than or equal to the threshold are -not set.

-
-
Parameters
-
    -
  • surface (Surface) -- the surface to create the mask from

  • -
  • threshold (int) -- (optional) the alpha threshold (default is 127) to -compare with each surface pixel's alpha value, if the surface is -color-keyed this parameter is ignored

  • -
-
-
Returns
-

a newly created Mask object from the given surface

-
-
Return type
-

Mask

-
-
-
-

Note

-

This function is used to create the masks for -pygame.sprite.collide_mask()Collision detection between two sprites, using masks..

-
-
- -
-
-pygame.mask.from_threshold()¶
-
-
Creates a mask by thresholding Surfaces
-
from_threshold(surface, color) -> Mask
-
from_threshold(surface, color, threshold=(0, 0, 0, 255), othersurface=None, palette_colors=1) -> Mask
-
-

This is a more featureful method of getting a Mask from a surface.

-

If the optional othersurface is not used, all the pixels within the -threshold of the color parameter are set in the resulting mask.

-

If the optional othersurface is used, every pixel in the first surface -that is within the threshold of the corresponding pixel in -othersurface is set in the resulting mask.

-
-
Parameters
-
    -
  • surface (Surface) -- the surface to create the mask from

  • -
  • color (Color or int or tuple(int, int, int, [int]) or list[int, int, int, [int]]) -- color used to check if the surface's pixels are within the -given threshold range, this parameter is ignored if the optional -othersurface parameter is supplied

  • -
  • threshold (Color or int or tuple(int, int, int, [int]) or list[int, int, int, [int]]) -- (optional) the threshold range used to check the difference -between two colors (default is (0, 0, 0, 255))

  • -
  • othersurface (Surface) -- (optional) used to check whether the pixels of -the first surface are within the given threshold range of the pixels -from this surface (default is None)

  • -
  • palette_colors (int) -- (optional) indicates whether to use the palette -colors or not, a nonzero value causes the palette colors to be used and a -0 causes them not to be used (default is 1)

  • -
-
-
Returns
-

a newly created Mask object from the given surface

-
-
Return type
-

Mask

-
-
-
- -
-
-pygame.mask.Mask¶
-
-
pygame object for representing 2D bitmasks
-
Mask(size=(width, height)) -> Mask
-
Mask(size=(width, height), fill=False) -> Mask
-
- ----- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-—Returns a new copy of the mask
-—Returns the size of the mask
-—Returns a Rect based on the size of the mask
-—Gets the bit at the given position
-—Sets the bit at the given position
-—Returns the point of intersection
-—Returns the number of overlapping set bits
-—Returns a mask of the overlapping set bits
-—Sets all bits to 1
-—Sets all bits to 0
-—Flips all the bits
-—Resizes a mask
-—Draws a mask onto another
-—Erases a mask from another
-—Returns the number of set bits
-—Returns the centroid of the set bits
-—Returns the orientation of the set bits
-—Returns a list of points outlining an object
-—Returns the convolution of this mask with another mask
-—Returns a mask containing a connected component
-—Returns a list of masks of connected components
-—Returns a list of bounding rects of connected components
-—Returns a surface with the mask drawn on it
-

A Mask object is used to represent a 2D bitmask. Each bit in -the mask represents a pixel. 1 is used to indicate a set bit and 0 is used -to indicate an unset bit. Set bits in a mask can be used to detect collisions -with other masks and their set bits.

-

A filled mask has all of its bits set to 1, conversely an -unfilled/cleared/empty mask has all of its bits set to 0. Masks can be -created unfilled (default) or filled by using the fill parameter. Masks -can also be cleared or filled using the pygame.mask.Mask.clear()Sets all bits to 0 and -pygame.mask.Mask.fill()Sets all bits to 1 methods respectively.

-

A mask's coordinates start in the top left corner at (0, 0) just like -pygame.Surfacepygame object for representing images. Individual bits can be accessed using the -pygame.mask.Mask.get_at()Gets the bit at the given position and pygame.mask.Mask.set_at()Sets the bit at the given position -methods.

-

The methods overlap(), overlap_area(), overlap_mask(), -draw(), erase(), and convolve() use an offset parameter -to indicate the offset of another mask's top left corner from the calling -mask's top left corner. The calling mask's top left corner is considered to -be the origin (0, 0). Offsets are a sequence of two values -(x_offset, y_offset). Positive and negative offset values are supported.

-
           0 to x (x_offset)
-           :    :
-   0 ..... +----:---------+
-   to      |    :         |
-   y .......... +-----------+
-(y_offset) |    | othermask |
-           |    +-----------+
-           | calling_mask |
-           +--------------+
-
-
-
-
Parameters
-
    -
  • size -- the dimensions of the mask (width and height)

  • -
  • fill (bool) -- (optional) create an unfilled mask (default: False) or -filled mask (True)

  • -
-
-
Returns
-

a newly created Mask object

-
-
Return type
-

Mask

-
-
-
-

Changed in pygame 2.0.0: Shallow copy support added. The Mask class supports the special -method __copy__() and shallow copying via copy.copy(mask).

-
-
-

Changed in pygame 2.0.0: Subclassing support added. The Mask class -can be used as a base class.

-
-
-

Changed in pygame 1.9.5: Added support for keyword arguments.

-
-
-

Changed in pygame 1.9.5: Added the optional keyword parameter fill.

-
-
-

Changed in pygame 1.9.5: Added support for masks with a width and/or a -height of 0.

-
-
-
-copy()¶
-
-
Returns a new copy of the mask
-
copy() -> Mask
-
-
-
Returns
-

a new copy of this mask, the new mask will have the same width, -height, and set/unset bits as the original

-
-
Return type
-

Mask

-
-
-
-

Note

-

If a mask subclass needs to copy any instance specific attributes -then it should override the __copy__() method. The overridden -__copy__() method needs to call super().__copy__() and then -copy the required data as in the following example code.

-
class SubMask(pygame.mask.Mask):
-    def __copy__(self):
-        new_mask = super().__copy__()
-        # Do any SubMask attribute copying here.
-        return new_mask
-
-
-
-
-

New in pygame 2.0.0.

-
-
- -
-
-get_size()¶
-
-
Returns the size of the mask
-
get_size() -> (width, height)
-
-
-
Returns
-

the size of the mask, (width, height)

-
-
Return type
-

tuple(int, int)

-
-
-
- -
-
-get_rect()¶
-
-
Returns a Rect based on the size of the mask
-
get_rect(**kwargs) -> Rect
-
-

Returns a new pygame.Rect()pygame object for storing rectangular coordinates object based on the size of this mask. -The rect's default position will be (0, 0) and its default width and -height will be the same as this mask's. The rect's attributes can be -altered via pygame.Rect()pygame object for storing rectangular coordinates attribute keyword arguments/values passed -into this method. As an example, a_mask.get_rect(center=(10, 5)) would -create a pygame.Rect()pygame object for storing rectangular coordinates based on the mask's size centered at the -given position.

-
-
Parameters
-

kwargs (dict) -- pygame.Rect()pygame object for storing rectangular coordinates attribute keyword arguments/values -that will be applied to the rect

-
-
Returns
-

a new pygame.Rect()pygame object for storing rectangular coordinates object based on the size of this mask -with any pygame.Rect()pygame object for storing rectangular coordinates attribute keyword arguments/values applied -to it

-
-
Return type
-

Rect

-
-
-
-

New in pygame 2.0.0.

-
-
- -
-
-get_at()¶
-
-
Gets the bit at the given position
-
get_at(pos) -> int
-
-
-
Parameters
-

pos -- the position of the bit to get (x, y)

-
-
Returns
-

1 if the bit is set, 0 if the bit is not set

-
-
Return type
-

int

-
-
Raises
-

IndexError -- if the position is outside of the mask's bounds

-
-
-
- -
-
-set_at()¶
-
-
Sets the bit at the given position
-
set_at(pos) -> None
-
set_at(pos, value=1) -> None
-
-
-
Parameters
-
    -
  • pos -- the position of the bit to set (x, y)

  • -
  • value (int) -- any nonzero int will set the bit to 1, 0 will set the -bit to 0 (default is 1)

  • -
-
-
Returns
-

None

-
-
Return type
-

NoneType

-
-
Raises
-

IndexError -- if the position is outside of the mask's bounds

-
-
-
- -
-
-overlap()¶
-
-
Returns the point of intersection
-
overlap(other, offset) -> (x, y)
-
overlap(other, offset) -> None
-
-

Returns the first point of intersection encountered between this mask and -other. A point of intersection is 2 overlapping set bits.

-

The current algorithm searches the overlapping area in -sizeof(unsigned long int) * CHAR_BIT bit wide column blocks (the value -of sizeof(unsigned long int) * CHAR_BIT is platform dependent, for -clarity it will be referred to as W). Starting at the top left corner -it checks bits 0 to W - 1 of the first row ((0, 0) to -(W - 1, 0)) then continues to the next row ((0, 1) to -(W - 1, 1)). Once this entire column block is checked, it continues to -the next one (W to 2 * W - 1). This is repeated until it finds a -point of intersection or the entire overlapping area is checked.

-
-
Parameters
-
    -
  • other (Mask) -- the other mask to overlap with this mask

  • -
  • offset -- the offset of other from this mask, for more -details refer to the Mask offset notes

  • -
-
-
Returns
-

point of intersection or None if no intersection

-
-
Return type
-

tuple(int, int) or NoneType

-
-
-
- -
-
-overlap_area()¶
-
-
Returns the number of overlapping set bits
-
overlap_area(other, offset) -> numbits
-
-

Returns the number of overlapping set bits between between this mask and -other.

-

This can be useful for collision detection. An approximate collision -normal can be found by calculating the gradient of the overlapping area -through the finite difference.

-
dx = mask.overlap_area(other, (x + 1, y)) - mask.overlap_area(other, (x - 1, y))
-dy = mask.overlap_area(other, (x, y + 1)) - mask.overlap_area(other, (x, y - 1))
-
-
-
-
Parameters
-
    -
  • other (Mask) -- the other mask to overlap with this mask

  • -
  • offset -- the offset of other from this mask, for more -details refer to the Mask offset notes

  • -
-
-
Returns
-

the number of overlapping set bits

-
-
Return type
-

int

-
-
-
- -
-
-overlap_mask()¶
-
-
Returns a mask of the overlapping set bits
-
overlap_mask(other, offset) -> Mask
-
-

Returns a Mask, the same size as this mask, containing the -overlapping set bits between this mask and other.

-
-
Parameters
-
    -
  • other (Mask) -- the other mask to overlap with this mask

  • -
  • offset -- the offset of other from this mask, for more -details refer to the Mask offset notes

  • -
-
-
Returns
-

a newly created Mask with the overlapping bits set

-
-
Return type
-

Mask

-
-
-
- -
-
-fill()¶
-
-
Sets all bits to 1
-
fill() -> None
-
-

Sets all bits in the mask to 1.

-
-
Returns
-

None

-
-
Return type
-

NoneType

-
-
-
- -
-
-clear()¶
-
-
Sets all bits to 0
-
clear() -> None
-
-

Sets all bits in the mask to 0.

-
-
Returns
-

None

-
-
Return type
-

NoneType

-
-
-
- -
-
-invert()¶
-
-
Flips all the bits
-
invert() -> None
-
-

Flips all of the bits in the mask. All the set bits are cleared to 0 and -all the unset bits are set to 1.

-
-
Returns
-

None

-
-
Return type
-

NoneType

-
-
-
- -
-
-scale()¶
-
-
Resizes a mask
-
scale((width, height)) -> Mask
-
-

Creates a new Mask of the requested size with its bits scaled -from this mask.

-
-
Parameters
-

size -- the width and height (size) of the mask to create

-
-
Returns
-

a new Mask object with its bits scaled from this mask

-
-
Return type
-

Mask

-
-
Raises
-

ValueError -- if width < 0 or height < 0

-
-
-
- -
-
-draw()¶
-
-
Draws a mask onto another
-
draw(other, offset) -> None
-
-

Performs a bitwise OR, drawing othermask onto this mask.

-
-
Parameters
-
    -
  • other (Mask) -- the mask to draw onto this mask

  • -
  • offset -- the offset of other from this mask, for more -details refer to the Mask offset notes

  • -
-
-
Returns
-

None

-
-
Return type
-

NoneType

-
-
-
- -
-
-erase()¶
-
-
Erases a mask from another
-
erase(other, offset) -> None
-
-

Erases (clears) all bits set in other from this mask.

-
-
Parameters
-
    -
  • other (Mask) -- the mask to erase from this mask

  • -
  • offset -- the offset of other from this mask, for more -details refer to the Mask offset notes

  • -
-
-
Returns
-

None

-
-
Return type
-

NoneType

-
-
-
- -
-
-count()¶
-
-
Returns the number of set bits
-
count() -> bits
-
-
-
Returns
-

the number of set bits in the mask

-
-
Return type
-

int

-
-
-
- -
-
-centroid()¶
-
-
Returns the centroid of the set bits
-
centroid() -> (x, y)
-
-

Finds the centroid (the center mass of the set bits) for this mask.

-
-
Returns
-

a coordinate tuple indicating the centroid of the mask, it will -return (0, 0) if the mask has no bits set

-
-
Return type
-

tuple(int, int)

-
-
-
- -
-
-angle()¶
-
-
Returns the orientation of the set bits
-
angle() -> theta
-
-

Finds the approximate orientation (from -90 to 90 degrees) of the set bits -in the mask. This works best if performed on a mask with only one -connected component.

-
-
Returns
-

the orientation of the set bits in the mask, it will return -0.0 if the mask has no bits set

-
-
Return type
-

float

-
-
-
-

Note

-

See connected_component() for details on how a connected -component is calculated.

-
-
- -
-
-outline()¶
-
-
Returns a list of points outlining an object
-
outline() -> [(x, y), ...]
-
outline(every=1) -> [(x, y), ...]
-
-

Returns a list of points of the outline of the first connected component -encountered in the mask. To find a connected component, the mask is -searched per row (left to right) starting in the top left corner.

-

The every optional parameter skips set bits in the outline. For -example, setting it to 10 would return a list of every 10th set bit in the -outline.

-
-
Parameters
-

every (int) -- (optional) indicates the number of bits to skip over in -the outline (default is 1)

-
-
Returns
-

a list of points outlining the first connected component -encountered, an empty list is returned if the mask has no bits set

-
-
Return type
-

list[tuple(int, int)]

-
-
-
-

Note

-

See connected_component() for details on how a connected -component is calculated.

-
-
- -
-
-convolve()¶
-
-
Returns the convolution of this mask with another mask
-
convolve(other) -> Mask
-
convolve(other, output=None, offset=(0, 0)) -> Mask
-
-

Convolve this mask with the given other Mask.

-
-
Parameters
-
    -
  • other (Mask) -- mask to convolve this mask with

  • -
  • output (Mask or NoneType) -- (optional) mask for output (default is None)

  • -
  • offset -- the offset of other from this mask, (default is -(0, 0))

  • -
-
-
Returns
-

a Mask with the (i - offset[0], j - offset[1]) bit -set, if shifting other (such that its bottom right corner is at -(i, j)) causes it to overlap with this mask

-

If an output Mask is specified, the output is drawn onto it and -it is returned. Otherwise a mask of size (MAX(0, width + other mask's -width - 1), MAX(0, height + other mask's height - 1)) is created and -returned.

-

-
-
Return type
-

Mask

-
-
-
- -
-
-connected_component()¶
-
-
Returns a mask containing a connected component
-
connected_component() -> Mask
-
connected_component(pos) -> Mask
-
-

A connected component is a group (1 or more) of connected set bits -(orthogonally and diagonally). The SAUF algorithm, which checks 8 point -connectivity, is used to find a connected component in the mask.

-

By default this method will return a Mask containing the largest -connected component in the mask. Optionally, a bit coordinate can be -specified and the connected component containing it will be returned. If -the bit at the given location is not set, the returned Mask will -be empty (no bits set).

-
-
Parameters
-

pos -- (optional) selects the connected component that contains the -bit at this position

-
-
Returns
-

a Mask object (same size as this mask) with the largest -connected component from this mask, if this mask has no bits set then -an empty mask will be returned

-

If the pos parameter is provided then the mask returned will have -the connected component that contains this position. An empty mask will -be returned if the pos parameter selects an unset bit.

-

-
-
Return type
-

Mask

-
-
Raises
-

IndexError -- if the optional pos parameter is outside of the -mask's bounds

-
-
-
- -
-
-connected_components()¶
-
-
Returns a list of masks of connected components
-
connected_components() -> [Mask, ...]
-
connected_components(minimum=0) -> [Mask, ...]
-
-

Provides a list containing a Mask object for each connected -component.

-
-
Parameters
-

minimum (int) -- (optional) indicates the minimum number of bits (to -filter out noise) per connected component (default is 0, which equates -to no minimum and is equivalent to setting it to 1, as a connected -component must have at least 1 bit set)

-
-
Returns
-

a list containing a Mask object for each connected -component, an empty list is returned if the mask has no bits set

-
-
Return type
-

list[Mask]

-
-
-
-

Note

-

See connected_component() for details on how a connected -component is calculated.

-
-
- -
-
-get_bounding_rects()¶
-
-
Returns a list of bounding rects of connected components
-
get_bounding_rects() -> [Rect, ...]
-
-

Provides a list containing a bounding rect for each connected component.

-
-
Returns
-

a list containing a bounding rect for each connected component, -an empty list is returned if the mask has no bits set

-
-
Return type
-

list[Rect]

-
-
-
-

Note

-

See connected_component() for details on how a connected -component is calculated.

-
-
- -
-
-to_surface()¶
-
-
Returns a surface with the mask drawn on it
-
to_surface() -> Surface
-
to_surface(surface=None, setsurface=None, unsetsurface=None, setcolor=(255, 255, 255, 255), unsetcolor=(0, 0, 0, 255), dest=(0, 0)) -> Surface
-
-

Draws this mask on the given surface. Set bits (bits set to 1) and unset -bits (bits set to 0) can be drawn onto a surface.

-
-
Parameters
-
    -
  • surface (Surface or None) -- (optional) Surface to draw mask onto, if no surface is -provided one will be created (default is None, which will cause a -surface with the parameters -Surface(size=mask.get_size(), flags=SRCALPHA, depth=32) to be -created, drawn on, and returned)

  • -
  • setsurface (Surface or None) -- (optional) use this surface's color values to draw -set bits (default is None), if this surface is smaller than the -mask any bits outside its bounds will use the setcolor value

  • -
  • unsetsurface (Surface or None) -- (optional) use this surface's color values to draw -unset bits (default is None), if this surface is smaller than the -mask any bits outside its bounds will use the unsetcolor value

  • -
  • setcolor (Color or str or int or tuple(int, int, int, [int]) or -list(int, int, int, [int]) or None) -- (optional) color to draw set bits (default is -(255, 255, 255, 255), white), use None to skip drawing the set -bits, the setsurface parameter (if set) will takes precedence over -this parameter

  • -
  • unsetcolor (Color or str or int or tuple(int, int, int, [int]) or -list(int, int, int, [int]) or None) -- (optional) color to draw unset bits (default is -(0, 0, 0, 255), black), use None to skip drawing the unset -bits, the unsetsurface parameter (if set) will takes precedence -over this parameter

  • -
  • dest (Rect or tuple(int, int) or list(int, int) or Vector2(int, int)) -- (optional) surface destination of where to position the -topleft corner of the mask being drawn (default is (0, 0)), if a -Rect is used as the dest parameter, its x and y attributes -will be used as the destination, NOTE1: rects with a negative width -or height value will not be normalized before using their x and -y values, NOTE2: this destination value is only used to -position the mask on the surface, it does not offset the setsurface -and unsetsurface from the mask, they are always aligned with the -mask (i.e. position (0, 0) on the mask always corresponds to -position (0, 0) on the setsurface and unsetsurface)

  • -
-
-
Returns
-

the surface parameter (or a newly created surface if no -surface parameter was provided) with this mask drawn on it

-
-
Return type
-

Surface

-
-
Raises
-

ValueError -- if the setsurface parameter or unsetsurface -parameter does not have the same format (bytesize/bitsize/alpha) as -the surface parameter

-
-
-
-

Note

-

To skip drawing the set bits, both setsurface and setcolor must -be None. The setsurface parameter defaults to None, but -setcolor defaults to a color value and therefore must be set to -None.

-
-
-

Note

-

To skip drawing the unset bits, both unsetsurface and -unsetcolor must be None. The unsetsurface parameter -defaults to None, but unsetcolor defaults to a color value and -therefore must be set to None.

-
-
-

New in pygame 2.0.0.

-
-
- -
- -
- -
- - -

-
-Edit on GitHub -
-
-
-
-
- - - - \ No newline at end of file diff --git a/venv/Lib/site-packages/pygame/docs/generated/ref/math.html b/venv/Lib/site-packages/pygame/docs/generated/ref/math.html deleted file mode 100644 index fe824c5..0000000 --- a/venv/Lib/site-packages/pygame/docs/generated/ref/math.html +++ /dev/null @@ -1,1509 +0,0 @@ - - - - - - - - - pygame.math — pygame v2.1.2 documentation - - - - - - - - - - - - - -
- -
- - - - - -
-
- -
-
- -
-
-
-pygame.math
-
-
pygame module for vector classes
-
- ----- - - - - - - - - - - -
-—a 2-Dimensional Vector
-—a 3-Dimensional Vector
-

The pygame math module currently provides Vector classes in two and three -dimensions, Vector2 and Vector3 respectively.

-

They support the following numerical operations: vec+vec, vec-vec, -vec*number, number*vec, vec/number, vec//number, vec+=vec, -vec-=vec, vec*=number, vec/=number, vec//=number.

-

All these operations will be performed elementwise. -In addition vec*vec will perform a scalar-product (a.k.a. dot-product). -If you want to multiply every element from vector v with every element from -vector w you can use the elementwise method: v.elementwise() * w

-

The coordinates of a vector can be retrieved or set using attributes or -subscripts

-
v = pygame.Vector3()
-
-v.x = 5
-v[1] = 2 * v.x
-print(v[1]) # 10
-
-v.x == v[0]
-v.y == v[1]
-v.z == v[2]
-
-
-

Multiple coordinates can be set using slices or swizzling

-
v = pygame.Vector2()
-v.xy = 1, 2
-v[:] = 1, 2
-
-
-
-

New in pygame 1.9.2pre.

-
-
-

Changed in pygame 1.9.4: Removed experimental notice.

-
-
-

Changed in pygame 1.9.4: Allow scalar construction like GLSL Vector2(2) == Vector2(2.0, 2.0)

-
-
-

Changed in pygame 1.9.4: pygame.mathpygame module for vector classes required import. More convenient pygame.Vector2 and pygame.Vector3.

-
-
-
-pygame.math.Vector2¶
-
-
a 2-Dimensional Vector
-
Vector2() -> Vector2
-
Vector2(int) -> Vector2
-
Vector2(float) -> Vector2
-
Vector2(Vector2) -> Vector2
-
Vector2(x, y) -> Vector2
-
Vector2((x, y)) -> Vector2
-
- ----- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-—calculates the dot- or scalar-product with the other vector
-—calculates the cross- or vector-product
-—returns the Euclidean magnitude of the vector.
-—returns the squared magnitude of the vector.
-—returns the Euclidean length of the vector.
-—returns the squared Euclidean length of the vector.
-—returns a vector with the same direction but length 1.
-—normalizes the vector in place so that its length is 1.
-—tests if the vector is normalized i.e. has length == 1.
-—scales the vector to a given length.
-—returns a vector reflected of a given normal.
-—reflect the vector of a given normal in place.
-—calculates the Euclidean distance to a given vector.
-—calculates the squared Euclidean distance to a given vector.
-—returns a linear interpolation to the given vector.
-—returns a spherical interpolation to the given vector.
-—The next operation will be performed elementwise.
-—rotates a vector by a given angle in degrees.
-—rotates a vector by a given angle in radians.
-—rotates the vector by a given angle in degrees in place.
-—rotates the vector by a given angle in radians in place.
-—rotates the vector by a given angle in radians in place.
-—calculates the angle to a given vector in degrees.
-—returns a tuple with radial distance and azimuthal angle.
-—Sets x and y from a polar coordinates tuple.
-—projects a vector onto another.
-—Returns a copy of itself.
-—Sets the coordinates of the vector.
-

Some general information about the Vector2 class.

-
-
-dot()¶
-
-
calculates the dot- or scalar-product with the other vector
-
dot(Vector2) -> float
-
-
- -
-
-cross()¶
-
-
calculates the cross- or vector-product
-
cross(Vector2) -> Vector2
-
-

calculates the third component of the cross-product.

-
- -
-
-magnitude()¶
-
-
returns the Euclidean magnitude of the vector.
-
magnitude() -> float
-
-

calculates the magnitude of the vector which follows from the -theorem: vec.magnitude() == math.sqrt(vec.x**2 + vec.y**2)

-
- -
-
-magnitude_squared()¶
-
-
returns the squared magnitude of the vector.
-
magnitude_squared() -> float
-
-

calculates the magnitude of the vector which follows from the -theorem: vec.magnitude_squared() == vec.x**2 + vec.y**2. This -is faster than vec.magnitude() because it avoids the square root.

-
- -
-
-length()¶
-
-
returns the Euclidean length of the vector.
-
length() -> float
-
-

calculates the Euclidean length of the vector which follows from the -Pythagorean theorem: vec.length() == math.sqrt(vec.x**2 + vec.y**2)

-
- -
-
-length_squared()¶
-
-
returns the squared Euclidean length of the vector.
-
length_squared() -> float
-
-

calculates the Euclidean length of the vector which follows from the -Pythagorean theorem: vec.length_squared() == vec.x**2 + vec.y**2. -This is faster than vec.length() because it avoids the square root.

-
- -
-
-normalize()¶
-
-
returns a vector with the same direction but length 1.
-
normalize() -> Vector2
-
-

Returns a new vector that has length equal to 1 and the same -direction as self.

-
- -
-
-normalize_ip()¶
-
-
normalizes the vector in place so that its length is 1.
-
normalize_ip() -> None
-
-

Normalizes the vector so that it has length equal to 1. -The direction of the vector is not changed.

-
- -
-
-is_normalized()¶
-
-
tests if the vector is normalized i.e. has length == 1.
-
is_normalized() -> Bool
-
-

Returns True if the vector has length equal to 1. Otherwise -it returns False.

-
- -
-
-scale_to_length()¶
-
-
scales the vector to a given length.
-
scale_to_length(float) -> None
-
-

Scales the vector so that it has the given length. The direction of the -vector is not changed. You can also scale to length 0. If the vector -is the zero vector (i.e. has length 0 thus no direction) a -ValueError is raised.

-
- -
-
-reflect()¶
-
-
returns a vector reflected of a given normal.
-
reflect(Vector2) -> Vector2
-
-

Returns a new vector that points in the direction as if self would bounce -of a surface characterized by the given surface normal. The length of the -new vector is the same as self's.

-
- -
-
-reflect_ip()¶
-
-
reflect the vector of a given normal in place.
-
reflect_ip(Vector2) -> None
-
-

Changes the direction of self as if it would have been reflected of a -surface with the given surface normal.

-
- -
-
-distance_to()¶
-
-
calculates the Euclidean distance to a given vector.
-
distance_to(Vector2) -> float
-
-
- -
-
-distance_squared_to()¶
-
-
calculates the squared Euclidean distance to a given vector.
-
distance_squared_to(Vector2) -> float
-
-
- -
-
-lerp()¶
-
-
returns a linear interpolation to the given vector.
-
lerp(Vector2, float) -> Vector2
-
-

Returns a Vector which is a linear interpolation between self and the -given Vector. The second parameter determines how far between self and -other the result is going to be. It must be a value between 0 and 1 -where 0 means self and 1 means other will be returned.

-
- -
-
-slerp()¶
-
-
returns a spherical interpolation to the given vector.
-
slerp(Vector2, float) -> Vector2
-
-

Calculates the spherical interpolation from self to the given Vector. The -second argument - often called t - must be in the range [-1, 1]. It -parametrizes where - in between the two vectors - the result should be. -If a negative value is given the interpolation will not take the -complement of the shortest path.

-
- -
-
-elementwise()¶
-
-
The next operation will be performed elementwise.
-
elementwise() -> VectorElementwiseProxy
-
-

Applies the following operation to each element of the vector.

-
- -
-
-rotate()¶
-
-
rotates a vector by a given angle in degrees.
-
rotate(angle) -> Vector2
-
-

Returns a vector which has the same length as self but is rotated -counterclockwise by the given angle in degrees. -(Note that due to pygame's inverted y coordinate system, the rotation -will look clockwise if displayed).

-
- -
-
-rotate_rad()¶
-
-
rotates a vector by a given angle in radians.
-
rotate_rad(angle) -> Vector2
-
-

Returns a vector which has the same length as self but is rotated -counterclockwise by the given angle in radians. -(Note that due to pygame's inverted y coordinate system, the rotation -will look clockwise if displayed).

-
-

New in pygame 2.0.0.

-
-
- -
-
-rotate_ip()¶
-
-
rotates the vector by a given angle in degrees in place.
-
rotate_ip(angle) -> None
-
-

Rotates the vector counterclockwise by the given angle in degrees. The -length of the vector is not changed. -(Note that due to pygame's inverted y coordinate system, the rotation -will look clockwise if displayed).

-
- -
-
-rotate_ip_rad()¶
-
-
rotates the vector by a given angle in radians in place.
-
rotate_ip_rad(angle) -> None
-
-

DEPRECATED: Use rotate_rad_ip() instead.

-
-

New in pygame 2.0.0.

-
-
-

Deprecated since pygame 2.1.1.

-
-
- -
-
-rotate_rad_ip()¶
-
-
rotates the vector by a given angle in radians in place.
-
rotate_rad_ip(angle) -> None
-
-

Rotates the vector counterclockwise by the given angle in radians. The -length of the vector is not changed. -(Note that due to pygame's inverted y coordinate system, the rotation -will look clockwise if displayed).

-
-

New in pygame 2.1.1.

-
-
- -
-
-angle_to()¶
-
-
calculates the angle to a given vector in degrees.
-
angle_to(Vector2) -> float
-
-

Returns the angle between self and the given vector.

-
- -
-
-as_polar()¶
-
-
returns a tuple with radial distance and azimuthal angle.
-
as_polar() -> (r, phi)
-
-

Returns a tuple (r, phi) where r is the radial distance, and phi -is the azimuthal angle.

-
- -
-
-from_polar()¶
-
-
Sets x and y from a polar coordinates tuple.
-
from_polar((r, phi)) -> None
-
-

Sets x and y from a tuple (r, phi) where r is the radial distance, and -phi is the azimuthal angle.

-
- -
-
-project()¶
-
-
projects a vector onto another.
-
project(Vector2) -> Vector2
-
-

Returns the projected vector. This is useful for collision detection in finding the components in a certain direction (e.g. in direction of the wall). -For a more detailed explanation see Wikipedia.

-
-

New in pygame 2.0.2.

-
-
- -
-
-copy()¶
-
-
Returns a copy of itself.
-
copy() -> Vector2
-
-

Returns a new Vector2 having the same dimensions.

-
-

New in pygame 2.1.1.

-
-
- -
-
-update()¶
-
-
Sets the coordinates of the vector.
-
update() -> None
-
update(int) -> None
-
update(float) -> None
-
update(Vector2) -> None
-
update(x, y) -> None
-
update((x, y)) -> None
-
-

Sets coordinates x and y in place.

-
-

New in pygame 1.9.5.

-
-
- -
- -
-
-pygame.math.Vector3¶
-
-
a 3-Dimensional Vector
-
Vector3() -> Vector3
-
Vector3(int) -> Vector3
-
Vector3(float) -> Vector3
-
Vector3(Vector3) -> Vector3
-
Vector3(x, y, z) -> Vector3
-
Vector3((x, y, z)) -> Vector3
-
- ----- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-—calculates the dot- or scalar-product with the other vector
-—calculates the cross- or vector-product
-—returns the Euclidean magnitude of the vector.
-—returns the squared Euclidean magnitude of the vector.
-—returns the Euclidean length of the vector.
-—returns the squared Euclidean length of the vector.
-—returns a vector with the same direction but length 1.
-—normalizes the vector in place so that its length is 1.
-—tests if the vector is normalized i.e. has length == 1.
-—scales the vector to a given length.
-—returns a vector reflected of a given normal.
-—reflect the vector of a given normal in place.
-—calculates the Euclidean distance to a given vector.
-—calculates the squared Euclidean distance to a given vector.
-—returns a linear interpolation to the given vector.
-—returns a spherical interpolation to the given vector.
-—The next operation will be performed elementwise.
-—rotates a vector by a given angle in degrees.
-—rotates a vector by a given angle in radians.
-—rotates the vector by a given angle in degrees in place.
-—rotates the vector by a given angle in radians in place.
-—rotates the vector by a given angle in radians in place.
-—rotates a vector around the x-axis by the angle in degrees.
-—rotates a vector around the x-axis by the angle in radians.
-—rotates the vector around the x-axis by the angle in degrees in place.
-—rotates the vector around the x-axis by the angle in radians in place.
-—rotates the vector around the x-axis by the angle in radians in place.
-—rotates a vector around the y-axis by the angle in degrees.
-—rotates a vector around the y-axis by the angle in radians.
-—rotates the vector around the y-axis by the angle in degrees in place.
-—rotates the vector around the y-axis by the angle in radians in place.
-—rotates the vector around the y-axis by the angle in radians in place.
-—rotates a vector around the z-axis by the angle in degrees.
-—rotates a vector around the z-axis by the angle in radians.
-—rotates the vector around the z-axis by the angle in degrees in place.
-—rotates the vector around the z-axis by the angle in radians in place.
-—rotates the vector around the z-axis by the angle in radians in place.
-—calculates the angle to a given vector in degrees.
-—returns a tuple with radial distance, inclination and azimuthal angle.
-—Sets x, y and z from a spherical coordinates 3-tuple.
-—projects a vector onto another.
-—Returns a copy of itself.
-—Sets the coordinates of the vector.
-

Some general information about the Vector3 class.

-
-
-dot()¶
-
-
calculates the dot- or scalar-product with the other vector
-
dot(Vector3) -> float
-
-
- -
-
-cross()¶
-
-
calculates the cross- or vector-product
-
cross(Vector3) -> Vector3
-
-

calculates the cross-product.

-
- -
-
-magnitude()¶
-
-
returns the Euclidean magnitude of the vector.
-
magnitude() -> float
-
-

calculates the magnitude of the vector which follows from the -theorem: vec.magnitude() == math.sqrt(vec.x**2 + vec.y**2 + vec.z**2)

-
- -
-
-magnitude_squared()¶
-
-
returns the squared Euclidean magnitude of the vector.
-
magnitude_squared() -> float
-
-

calculates the magnitude of the vector which follows from the -theorem: -vec.magnitude_squared() == vec.x**2 + vec.y**2 + vec.z**2. -This is faster than vec.magnitude() because it avoids the -square root.

-
- -
-
-length()¶
-
-
returns the Euclidean length of the vector.
-
length() -> float
-
-

calculates the Euclidean length of the vector which follows from the -Pythagorean theorem: -vec.length() == math.sqrt(vec.x**2 + vec.y**2 + vec.z**2)

-
- -
-
-length_squared()¶
-
-
returns the squared Euclidean length of the vector.
-
length_squared() -> float
-
-

calculates the Euclidean length of the vector which follows from the -Pythagorean theorem: -vec.length_squared() == vec.x**2 + vec.y**2 + vec.z**2. -This is faster than vec.length() because it avoids the square root.

-
- -
-
-normalize()¶
-
-
returns a vector with the same direction but length 1.
-
normalize() -> Vector3
-
-

Returns a new vector that has length equal to 1 and the same -direction as self.

-
- -
-
-normalize_ip()¶
-
-
normalizes the vector in place so that its length is 1.
-
normalize_ip() -> None
-
-

Normalizes the vector so that it has length equal to 1. The -direction of the vector is not changed.

-
- -
-
-is_normalized()¶
-
-
tests if the vector is normalized i.e. has length == 1.
-
is_normalized() -> Bool
-
-

Returns True if the vector has length equal to 1. Otherwise it -returns False.

-
- -
-
-scale_to_length()¶
-
-
scales the vector to a given length.
-
scale_to_length(float) -> None
-
-

Scales the vector so that it has the given length. The direction of the -vector is not changed. You can also scale to length 0. If the vector -is the zero vector (i.e. has length 0 thus no direction) a -ValueError is raised.

-
- -
-
-reflect()¶
-
-
returns a vector reflected of a given normal.
-
reflect(Vector3) -> Vector3
-
-

Returns a new vector that points in the direction as if self would bounce -of a surface characterized by the given surface normal. The length of the -new vector is the same as self's.

-
- -
-
-reflect_ip()¶
-
-
reflect the vector of a given normal in place.
-
reflect_ip(Vector3) -> None
-
-

Changes the direction of self as if it would have been reflected of a -surface with the given surface normal.

-
- -
-
-distance_to()¶
-
-
calculates the Euclidean distance to a given vector.
-
distance_to(Vector3) -> float
-
-
- -
-
-distance_squared_to()¶
-
-
calculates the squared Euclidean distance to a given vector.
-
distance_squared_to(Vector3) -> float
-
-
- -
-
-lerp()¶
-
-
returns a linear interpolation to the given vector.
-
lerp(Vector3, float) -> Vector3
-
-

Returns a Vector which is a linear interpolation between self and the -given Vector. The second parameter determines how far between self an -other the result is going to be. It must be a value between 0 and -1, where 0 means self and 1 means other will be returned.

-
- -
-
-slerp()¶
-
-
returns a spherical interpolation to the given vector.
-
slerp(Vector3, float) -> Vector3
-
-

Calculates the spherical interpolation from self to the given Vector. The -second argument - often called t - must be in the range [-1, 1]. It -parametrizes where - in between the two vectors - the result should be. -If a negative value is given the interpolation will not take the -complement of the shortest path.

-
- -
-
-elementwise()¶
-
-
The next operation will be performed elementwise.
-
elementwise() -> VectorElementwiseProxy
-
-

Applies the following operation to each element of the vector.

-
- -
-
-rotate()¶
-
-
rotates a vector by a given angle in degrees.
-
rotate(angle, Vector3) -> Vector3
-
-

Returns a vector which has the same length as self but is rotated -counterclockwise by the given angle in degrees around the given axis. -(Note that due to pygame's inverted y coordinate system, the rotation -will look clockwise if displayed).

-
- -
-
-rotate_rad()¶
-
-
rotates a vector by a given angle in radians.
-
rotate_rad(angle, Vector3) -> Vector3
-
-

Returns a vector which has the same length as self but is rotated -counterclockwise by the given angle in radians around the given axis. -(Note that due to pygame's inverted y coordinate system, the rotation -will look clockwise if displayed).

-
-

New in pygame 2.0.0.

-
-
- -
-
-rotate_ip()¶
-
-
rotates the vector by a given angle in degrees in place.
-
rotate_ip(angle, Vector3) -> None
-
-

Rotates the vector counterclockwise around the given axis by the given -angle in degrees. The length of the vector is not changed. -(Note that due to pygame's inverted y coordinate system, the rotation -will look clockwise if displayed).

-
- -
-
-rotate_ip_rad()¶
-
-
rotates the vector by a given angle in radians in place.
-
rotate_ip_rad(angle, Vector3) -> None
-
-

DEPRECATED: Use rotate_rad_ip() instead.

-
-

New in pygame 2.0.0.

-
-
-

Deprecated since pygame 2.1.1.

-
-
- -
-
-rotate_rad_ip()¶
-
-
rotates the vector by a given angle in radians in place.
-
rotate_rad_ip(angle, Vector3) -> None
-
-

Rotates the vector counterclockwise around the given axis by the given -angle in radians. The length of the vector is not changed. -(Note that due to pygame's inverted y coordinate system, the rotation -will look clockwise if displayed).

-
-

New in pygame 2.1.1.

-
-
- -
-
-rotate_x()¶
-
-
rotates a vector around the x-axis by the angle in degrees.
-
rotate_x(angle) -> Vector3
-
-

Returns a vector which has the same length as self but is rotated -counterclockwise around the x-axis by the given angle in degrees. -(Note that due to pygame's inverted y coordinate system, the rotation -will look clockwise if displayed).

-
- -
-
-rotate_x_rad()¶
-
-
rotates a vector around the x-axis by the angle in radians.
-
rotate_x_rad(angle) -> Vector3
-
-

Returns a vector which has the same length as self but is rotated -counterclockwise around the x-axis by the given angle in radians. -(Note that due to pygame's inverted y coordinate system, the rotation -will look clockwise if displayed).

-
-

New in pygame 2.0.0.

-
-
- -
-
-rotate_x_ip()¶
-
-
rotates the vector around the x-axis by the angle in degrees in place.
-
rotate_x_ip(angle) -> None
-
-

Rotates the vector counterclockwise around the x-axis by the given angle -in degrees. The length of the vector is not changed. -(Note that due to pygame's inverted y coordinate system, the rotation -will look clockwise if displayed).

-
- -
-
-rotate_x_ip_rad()¶
-
-
rotates the vector around the x-axis by the angle in radians in place.
-
rotate_x_ip_rad(angle) -> None
-
-

DEPRECATED: Use rotate_x_rad_ip() instead.

-
-

New in pygame 2.0.0.

-
-
-

Deprecated since pygame 2.1.1.

-
-
- -
-
-rotate_x_rad_ip()¶
-
-
rotates the vector around the x-axis by the angle in radians in place.
-
rotate_x_rad_ip(angle) -> None
-
-

Rotates the vector counterclockwise around the x-axis by the given angle -in radians. The length of the vector is not changed. -(Note that due to pygame's inverted y coordinate system, the rotation -will look clockwise if displayed).

-
-

New in pygame 2.1.1.

-
-
- -
-
-rotate_y()¶
-
-
rotates a vector around the y-axis by the angle in degrees.
-
rotate_y(angle) -> Vector3
-
-

Returns a vector which has the same length as self but is rotated -counterclockwise around the y-axis by the given angle in degrees. -(Note that due to pygame's inverted y coordinate system, the rotation -will look clockwise if displayed).

-
- -
-
-rotate_y_rad()¶
-
-
rotates a vector around the y-axis by the angle in radians.
-
rotate_y_rad(angle) -> Vector3
-
-

Returns a vector which has the same length as self but is rotated -counterclockwise around the y-axis by the given angle in radians. -(Note that due to pygame's inverted y coordinate system, the rotation -will look clockwise if displayed).

-
-

New in pygame 2.0.0.

-
-
- -
-
-rotate_y_ip()¶
-
-
rotates the vector around the y-axis by the angle in degrees in place.
-
rotate_y_ip(angle) -> None
-
-

Rotates the vector counterclockwise around the y-axis by the given angle -in degrees. The length of the vector is not changed. -(Note that due to pygame's inverted y coordinate system, the rotation -will look clockwise if displayed).

-
- -
-
-rotate_y_ip_rad()¶
-
-
rotates the vector around the y-axis by the angle in radians in place.
-
rotate_y_ip_rad(angle) -> None
-
-

DEPRECATED: Use rotate_y_rad_ip() instead.

-
-

New in pygame 2.0.0.

-
-
-

Deprecated since pygame 2.1.1.

-
-
- -
-
-rotate_y_rad_ip()¶
-
-
rotates the vector around the y-axis by the angle in radians in place.
-
rotate_y_rad_ip(angle) -> None
-
-

Rotates the vector counterclockwise around the y-axis by the given angle -in radians. The length of the vector is not changed. -(Note that due to pygame's inverted y coordinate system, the rotation -will look clockwise if displayed).

-
-

New in pygame 2.1.1.

-
-
- -
-
-rotate_z()¶
-
-
rotates a vector around the z-axis by the angle in degrees.
-
rotate_z(angle) -> Vector3
-
-

Returns a vector which has the same length as self but is rotated -counterclockwise around the z-axis by the given angle in degrees. -(Note that due to pygame's inverted y coordinate system, the rotation -will look clockwise if displayed).

-
- -
-
-rotate_z_rad()¶
-
-
rotates a vector around the z-axis by the angle in radians.
-
rotate_z_rad(angle) -> Vector3
-
-

Returns a vector which has the same length as self but is rotated -counterclockwise around the z-axis by the given angle in radians. -(Note that due to pygame's inverted y coordinate system, the rotation -will look clockwise if displayed).

-
-

New in pygame 2.0.0.

-
-
- -
-
-rotate_z_ip()¶
-
-
rotates the vector around the z-axis by the angle in degrees in place.
-
rotate_z_ip(angle) -> None
-
-

Rotates the vector counterclockwise around the z-axis by the given angle -in degrees. The length of the vector is not changed. -(Note that due to pygame's inverted y coordinate system, the rotation -will look clockwise if displayed).

-
- -
-
-rotate_z_ip_rad()¶
-
-
rotates the vector around the z-axis by the angle in radians in place.
-
rotate_z_ip_rad(angle) -> None
-
-

DEPRECATED: Use rotate_z_rad_ip() instead.

-
-

Deprecated since pygame 2.1.1.

-
-
- -
-
-rotate_z_rad_ip()¶
-
-
rotates the vector around the z-axis by the angle in radians in place.
-
rotate_z_rad_ip(angle) -> None
-
-

Rotates the vector counterclockwise around the z-axis by the given angle -in radians. The length of the vector is not changed. -(Note that due to pygame's inverted y coordinate system, the rotation -will look clockwise if displayed).

-
-

New in pygame 2.1.1.

-
-
- -
-
-angle_to()¶
-
-
calculates the angle to a given vector in degrees.
-
angle_to(Vector3) -> float
-
-

Returns the angle between self and the given vector.

-
- -
-
-as_spherical()¶
-
-
returns a tuple with radial distance, inclination and azimuthal angle.
-
as_spherical() -> (r, theta, phi)
-
-

Returns a tuple (r, theta, phi) where r is the radial distance, theta is -the inclination angle and phi is the azimuthal angle.

-
- -
-
-from_spherical()¶
-
-
Sets x, y and z from a spherical coordinates 3-tuple.
-
from_spherical((r, theta, phi)) -> None
-
-

Sets x, y and z from a tuple (r, theta, phi) where r is the radial -distance, theta is the inclination angle and phi is the azimuthal angle.

-
- -
-
-project()¶
-
-
projects a vector onto another.
-
project(Vector3) -> Vector3
-
-

Returns the projected vector. This is useful for collision detection in finding the components in a certain direction (e.g. in direction of the wall). -For a more detailed explanation see Wikipedia.

-
-

New in pygame 2.0.2.

-
-
- -
-
-copy()¶
-
-
Returns a copy of itself.
-
copy() -> Vector3
-
-

Returns a new Vector3 having the same dimensions.

-
-

New in pygame 2.1.1.

-
-
- -
-
-update()¶
-
-
Sets the coordinates of the vector.
-
update() -> None
-
update(int) -> None
-
update(float) -> None
-
update(Vector3) -> None
-
update(x, y, z) -> None
-
update((x, y, z)) -> None
-
-

Sets coordinates x, y, and z in place.

-
-

New in pygame 1.9.5.

-
-
- -
- -
- -
- - -

-
-Edit on GitHub -
-
-
-
-
- - - - \ No newline at end of file diff --git a/venv/Lib/site-packages/pygame/docs/generated/ref/midi.html b/venv/Lib/site-packages/pygame/docs/generated/ref/midi.html deleted file mode 100644 index de8cabe..0000000 --- a/venv/Lib/site-packages/pygame/docs/generated/ref/midi.html +++ /dev/null @@ -1,845 +0,0 @@ - - - - - - - - - pygame.midi — pygame v2.1.2 documentation - - - - - - - - - - - - - -
- -
- - - - - -
-
- -
-
- -
-
-
-pygame.midi
-
-
pygame module for interacting with midi input and output.
-
- ----- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-—initialize the midi module
-—uninitialize the midi module
-—returns True if the midi module is currently initialized
-—Input is used to get midi input from midi devices.
-—Output is used to send midi to an output device
-—gets the number of devices.
-—gets default input device number
-—gets default output device number
-—returns information about a midi device
-—converts midi events to pygame events
-—returns the current time in ms of the PortMidi timer
-—Converts a frequency into a MIDI note. Rounds to the closest midi note.
-—Converts a midi note to a frequency.
-—Returns the Ansi Note name for a midi number.
-—exception that pygame.midi functions and classes can raise
-
-

New in pygame 1.9.0.

-
-

The midi module can send output to midi devices and get input from midi -devices. It can also list midi devices on the system.

-

The midi module supports real and virtual midi devices.

-

It uses the portmidi library. Is portable to which ever platforms portmidi -supports (currently Windows, Mac OS X, and Linux).

-

This uses pyportmidi for now, but may use its own bindings at some point in the -future. The pyportmidi bindings are included with pygame.

-
-

-
-
-

New in pygame 2.0.0.

-
-

These are pygame events (pygame.eventpygame module for interacting with events and queues) reserved for midi use. The -MIDIIN event is used by pygame.midi.midis2events()converts midi events to pygame events when converting -midi events to pygame events.

-
MIDIIN
-MIDIOUT
-
-
-
-

-
-
-
-pygame.midi.init()¶
-
-
initialize the midi module
-
init() -> None
-
-

Initializes the pygame.midipygame module for interacting with midi input and output. module. Must be called before using the -pygame.midipygame module for interacting with midi input and output. module.

-

It is safe to call this more than once.

-
- -
-
-pygame.midi.quit()¶
-
-
uninitialize the midi module
-
quit() -> None
-
-

Uninitializes the pygame.midipygame module for interacting with midi input and output. module. If pygame.midi.init()initialize the midi module was -called to initialize the pygame.midipygame module for interacting with midi input and output. module, then this function will -be called automatically when your program exits.

-

It is safe to call this function more than once.

-
- -
-
-pygame.midi.get_init()¶
-
-
returns True if the midi module is currently initialized
-
get_init() -> bool
-
-

Gets the initialization state of the pygame.midipygame module for interacting with midi input and output. module.

-
-
Returns
-

True if the pygame.midipygame module for interacting with midi input and output. module is currently initialized.

-
-
Return type
-

bool

-
-
-
-

New in pygame 1.9.5.

-
-
- -
-
-pygame.midi.Input¶
-
-
Input is used to get midi input from midi devices.
-
Input(device_id) -> None
-
Input(device_id, buffer_size) -> None
-
- ----- - - - - - - - - - - - - - - -
-—closes a midi stream, flushing any pending buffers.
-—returns True if there's data, or False if not.
-—reads num_events midi events from the buffer.
-
-
Parameters
-
    -
  • device_id (int) -- midi device id

  • -
  • buffer_size (int) -- (optional) the number of input events to be buffered

  • -
-
-
-
-
-close()¶
-
-
closes a midi stream, flushing any pending buffers.
-
close() -> None
-
-

PortMidi attempts to close open streams when the application exits.

-
-

Note

-

This is particularly difficult under Windows.

-
-
- -
-
-poll()¶
-
-
returns True if there's data, or False if not.
-
poll() -> bool
-
-

Used to indicate if any data exists.

-
-
Returns
-

True if there is data, False otherwise

-
-
Return type
-

bool

-
-
Raises
-

MidiException -- on error

-
-
-
- -
-
-read()¶
-
-
reads num_events midi events from the buffer.
-
read(num_events) -> midi_event_list
-
-

Reads from the input buffer and gives back midi events.

-
-
Parameters
-

num_events (int) -- number of input events to read

-
-
Returns
-

the format for midi_event_list is -[[[status, data1, data2, data3], timestamp], ...]

-
-
Return type
-

list

-
-
-
- -
- -
-
-pygame.midi.Output¶
-
-
Output is used to send midi to an output device
-
Output(device_id) -> None
-
Output(device_id, latency=0) -> None
-
Output(device_id, buffer_size=256) -> None
-
Output(device_id, latency, buffer_size) -> None
-
- ----- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-—terminates outgoing messages immediately
-—closes a midi stream, flushing any pending buffers.
-—turns a midi note off (note must be on)
-—turns a midi note on (note must be off)
-—select an instrument, with a value between 0 and 127
-—modify the pitch of a channel.
-—writes a list of midi data to the Output
-—writes up to 3 bytes of midi data to the Output
-—writes a timestamped system-exclusive midi message.
-

The buffer_size specifies the number of output events to be buffered -waiting for output. In some cases (see below) PortMidi does not buffer -output at all and merely passes data to a lower-level API, in which case -buffersize is ignored.

-

latency is the delay in milliseconds applied to timestamps to determine -when the output should actually occur. If latency is <<0, 0 is assumed.

-

If latency is zero, timestamps are ignored and all output is delivered -immediately. If latency is greater than zero, output is delayed until the -message timestamp plus the latency. In some cases, PortMidi can obtain -better timing than your application by passing timestamps along to the -device driver or hardware. Latency may also help you to synchronize midi -data to audio data by matching midi latency to the audio buffer latency.

-
-

Note

-

Time is measured relative to the time source indicated by time_proc. -Timestamps are absolute, not relative delays or offsets.

-
-
-
-abort()¶
-
-
terminates outgoing messages immediately
-
abort() -> None
-
-

The caller should immediately close the output port; this call may result -in transmission of a partial midi message. There is no abort for Midi -input because the user can simply ignore messages in the buffer and close -an input device at any time.

-
- -
-
-close()¶
-
-
closes a midi stream, flushing any pending buffers.
-
close() -> None
-
-

PortMidi attempts to close open streams when the application exits.

-
-

Note

-

This is particularly difficult under Windows.

-
-
- -
-
-note_off()¶
-
-
turns a midi note off (note must be on)
-
note_off(note, velocity=None, channel=0) -> None
-
-

Turn a note off in the output stream. The note must already be on for -this to work correctly.

-
- -
-
-note_on()¶
-
-
turns a midi note on (note must be off)
-
note_on(note, velocity=None, channel=0) -> None
-
-

Turn a note on in the output stream. The note must already be off for -this to work correctly.

-
- -
-
-set_instrument()¶
-
-
select an instrument, with a value between 0 and 127
-
set_instrument(instrument_id, channel=0) -> None
-
-

Select an instrument.

-
- -
-
-pitch_bend()¶
-
-
modify the pitch of a channel.
-
set_instrument(value=0, channel=0) -> None
-
-

Adjust the pitch of a channel. The value is a signed integer -from -8192 to +8191. For example, 0 means "no change", +4096 is -typically a semitone higher, and -8192 is 1 whole tone lower (though -the musical range corresponding to the pitch bend range can also be -changed in some synthesizers).

-

If no value is given, the pitch bend is returned to "no change".

-
-

New in pygame 1.9.4.

-
-
- -
-
-write()¶
-
-
writes a list of midi data to the Output
-
write(data) -> None
-
-

Writes series of MIDI information in the form of a list.

-
-
Parameters
-

data (list) -- data to write, the expected format is -[[[status, data1=0, data2=0, ...], timestamp], ...] -with the data# fields being optional

-
-
Raises
-

IndexError -- if more than 1024 elements in the data list

-
-
-

Example:

-
# Program change at time 20000 and 500ms later send note 65 with
-# velocity 100.
-write([[[0xc0, 0, 0], 20000], [[0x90, 60, 100], 20500]])
-
-
-
-

Note

-
    -
  • Timestamps will be ignored if latency = 0

  • -
  • To get a note to play immediately, send MIDI info with timestamp -read from function Time

  • -
  • Optional data fields: write([[[0xc0, 0, 0], 20000]]) is -equivalent to write([[[0xc0], 20000]])

  • -
-
-
- -
-
-write_short()¶
-
-
writes up to 3 bytes of midi data to the Output
-
write_short(status) -> None
-
write_short(status, data1=0, data2=0) -> None
-
-

Output MIDI information of 3 bytes or less. The data fields are -optional and assumed to be 0 if omitted.

-

Examples of status byte values:

-
0xc0  # program change
-0x90  # note on
-# etc.
-
-
-

Example:

-
# note 65 on with velocity 100
-write_short(0x90, 65, 100)
-
-
-
- -
-
-write_sys_ex()¶
-
-
writes a timestamped system-exclusive midi message.
-
write_sys_ex(when, msg) -> None
-
-

Writes a timestamped system-exclusive midi message.

-
-
Parameters
-
    -
  • msg (list[int] or str) -- midi message

  • -
  • when -- timestamp in milliseconds

  • -
-
-
-

Example:

-
midi_output.write_sys_ex(0, '\xF0\x7D\x10\x11\x12\x13\xF7')
-
-# is equivalent to
-
-midi_output.write_sys_ex(pygame.midi.time(),
-                         [0xF0, 0x7D, 0x10, 0x11, 0x12, 0x13, 0xF7])
-
-
-
- -
- -
-
-pygame.midi.get_count()¶
-
-
gets the number of devices.
-
get_count() -> num_devices
-
-

Device ids range from 0 to get_count() - 1

-
- -
-
-pygame.midi.get_default_input_id()¶
-
-
gets default input device number
-
get_default_input_id() -> default_id
-
-

The following describes the usage details for this function and the -get_default_output_id() function.

-

Return the default device ID or -1 if there are no devices. The result -can be passed to the Input/Output class.

-

On a PC the user can specify a default device by setting an environment -variable. To use device #1, for example:

-
set PM_RECOMMENDED_INPUT_DEVICE=1
-or
-set PM_RECOMMENDED_OUTPUT_DEVICE=1
-
-
-

The user should first determine the available device ID by using the -supplied application "testin" or "testout".

-

In general, the registry is a better place for this kind of info. With -USB devices that can come and go, using integers is not very reliable -for device identification. Under Windows, if PM_RECOMMENDED_INPUT_DEVICE -(or PM_RECOMMENDED_OUTPUT_DEVICE) is NOT found in the environment, -then the default device is obtained by looking for a string in the registry -under:

-
HKEY_LOCAL_MACHINE/SOFTWARE/PortMidi/Recommended_Input_Device
-or
-HKEY_LOCAL_MACHINE/SOFTWARE/PortMidi/Recommended_Output_Device
-
-
-

The number of the first device with a substring that matches the -string exactly is returned. For example, if the string in the registry is -"USB" and device 1 is named "In USB MidiSport 1x1", then that will be -the default input because it contains the string "USB".

-

In addition to the name, get_device_info() returns "interf", which is -the interface name. The "interface" is the underlying software system or -API used by PortMidi to access devices. Supported interfaces:

-
MMSystem   # the only Win32 interface currently supported
-ALSA       # the only Linux interface currently supported
-CoreMIDI   # the only Mac OS X interface currently supported
-# DirectX - not implemented
-# OSS     - not implemented
-
-
-

To specify both the interface and the device name in the registry, separate -the two with a comma and a space. The string before the comma must be a -substring of the "interf" string and the string after the space must be a -substring of the "name" name string in order to match the device. e.g.:

-
MMSystem, In USB MidiSport 1x1
-
-
-
-

Note

-

In the current release, the default is simply the first device (the -input or output device with the lowest PmDeviceID).

-
-
- -
-
-pygame.midi.get_default_output_id()¶
-
-
gets default output device number
-
get_default_output_id() -> default_id
-
-

See get_default_input_id() for usage details.

-
- -
-
-pygame.midi.get_device_info()¶
-
-
returns information about a midi device
-
get_device_info(an_id) -> (interf, name, input, output, opened)
-
get_device_info(an_id) -> None
-
-

Gets the device info for a given id.

-
-
Parameters
-

an_id (int) -- id of the midi device being queried

-
-
Returns
-

if the id is out of range None is returned, otherwise -a tuple of (interf, name, input, output, opened) is returned.

-
-
    -
  • interf: string describing the device interface (e.g. 'ALSA')

  • -
  • name: string name of the device (e.g. 'Midi Through Port-0')

  • -
  • input: 1 if the device is an input device, otherwise 0

  • -
  • output: 1 if the device is an output device, otherwise 0

  • -
  • opened: 1 if the device is opened, otherwise 0

  • -
-
-

-
-
Return type
-

tuple or None

-
-
-
- -
-
-pygame.midi.midis2events()¶
-
-
converts midi events to pygame events
-
midis2events(midi_events, device_id) -> [Event, ...]
-
-

Takes a sequence of midi events and returns list of pygame events.

-

The midi_events data is expected to be a sequence of -((status, data1, data2, data3), timestamp) midi events (all values -required).

-
-
Returns
-

a list of pygame events of event type MIDIIN

-
-
Return type
-

list

-
-
-
- -
-
-pygame.midi.time()¶
-
-
returns the current time in ms of the PortMidi timer
-
time() -> time
-
-

The time is reset to 0 when the pygame.midipygame module for interacting with midi input and output. module is initialized.

-
- -
-
-pygame.midi.frequency_to_midi()¶
-
-
Converts a frequency into a MIDI note. Rounds to the closest midi note.
-
frequency_to_midi(midi_note) -> midi_note
-
-

example:

-
frequency_to_midi(27.5) == 21
-
-
-
-

New in pygame 1.9.5.

-
-
- -
-
-pygame.midi.midi_to_frequency()¶
-
-
Converts a midi note to a frequency.
-
midi_to_frequency(midi_note) -> frequency
-
-

example:

-
midi_to_frequency(21) == 27.5
-
-
-
-

New in pygame 1.9.5.

-
-
- -
-
-pygame.midi.midi_to_ansi_note()¶
-
-
Returns the Ansi Note name for a midi number.
-
midi_to_ansi_note(midi_note) -> ansi_note
-
-

example:

-
midi_to_ansi_note(21) == 'A0'
-
-
-
-

New in pygame 1.9.5.

-
-
- -
-
-exception pygame.midi.MidiException¶
-
-
exception that pygame.midi functions and classes can raise
-
MidiException(errno) -> None
-
-
- -
- -
- - -

-
-Edit on GitHub -
-
-
-
-
- - - - \ No newline at end of file diff --git a/venv/Lib/site-packages/pygame/docs/generated/ref/mixer.html b/venv/Lib/site-packages/pygame/docs/generated/ref/mixer.html deleted file mode 100644 index 76c2ca9..0000000 --- a/venv/Lib/site-packages/pygame/docs/generated/ref/mixer.html +++ /dev/null @@ -1,995 +0,0 @@ - - - - - - - - - pygame.mixer — pygame v2.1.2 documentation - - - - - - - - - - - - - -
- -
- - - - - -
-
- -
-
- -
-
-
-pygame.mixer
-
-
pygame module for loading and playing sounds
-
- ----- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-—initialize the mixer module
-—preset the mixer init arguments
-—uninitialize the mixer
-—test if the mixer is initialized
-—stop playback of all sound channels
-—temporarily stop playback of all sound channels
-—resume paused playback of sound channels
-—fade out the volume on all sounds before stopping
-—set the total number of playback channels
-—get the total number of playback channels
-—reserve channels from being automatically used
-—find an unused channel
-—test if any sound is being mixed
-—get the mixer's SDL version
-—Create a new Sound object from a file or buffer object
-—Create a Channel object for controlling playback
-

This module contains classes for loading Sound objects and controlling -playback. The mixer module is optional and depends on SDL_mixer. Your program -should test that pygame.mixerpygame module for loading and playing sounds is available and initialized before using -it.

-

The mixer module has a limited number of channels for playback of sounds. -Usually programs tell pygame to start playing audio and it selects an available -channel automatically. The default is 8 simultaneous channels, but complex -programs can get more precise control over the number of channels and their -use.

-

All sound playback is mixed in background threads. When you begin to play a -Sound object, it will return immediately while the sound continues to play. A -single Sound object can also be actively played back multiple times.

-

The mixer also has a special streaming channel. This is for music playback and -is accessed through the pygame.mixer.musicpygame module for controlling streamed audio module. Consider using this -module for playing long running music. Unlike mixer module, the music module -streams the music from the files without loading music at once into memory.

-

The mixer module must be initialized like other pygame modules, but it has some -extra conditions. The pygame.mixer.init() function takes several optional -arguments to control the playback rate and sample size. Pygame will default to -reasonable values, but pygame cannot perform Sound resampling, so the mixer -should be initialized to match the values of your audio resources.

-

NOTE: For less laggy sound use a smaller buffer size. The default -is set to reduce the chance of scratchy sounds on some computers. You can -change the default buffer by calling pygame.mixer.pre_init()preset the mixer init arguments before -pygame.mixer.init()initialize the mixer module or pygame.init()initialize all imported pygame modules is called. For example: -pygame.mixer.pre_init(44100,-16,2, 1024)

-
-
-pygame.mixer.init()¶
-
-
initialize the mixer module
-
init(frequency=44100, size=-16, channels=2, buffer=512, devicename=None, allowedchanges=AUDIO_ALLOW_FREQUENCY_CHANGE | AUDIO_ALLOW_CHANNELS_CHANGE) -> None
-
-

Initialize the mixer module for Sound loading and playback. The default -arguments can be overridden to provide specific audio mixing. Keyword -arguments are accepted. For backwards compatibility, argument values of -0 are replaced with the startup defaults, except for allowedchanges, -where -1 is used. (startup defaults may be changed by a pre_init() call).

-

The size argument represents how many bits are used for each audio sample. -If the value is negative then signed sample values will be used. Positive -values mean unsigned audio samples will be used. An invalid value raises an -exception.

-

The channels argument is used to specify whether to use mono or stereo. 1 -for mono and 2 for stereo.

-

The buffer argument controls the number of internal samples used in the -sound mixer. The default value should work for most cases. It can be lowered -to reduce latency, but sound dropout may occur. It can be raised to larger -values to ensure playback never skips, but it will impose latency on sound -playback. The buffer size must be a power of two (if not it is rounded up to -the next nearest power of 2).

-

Some platforms require the pygame.mixerpygame module for loading and playing sounds module to be initialized -after the display modules have initialized. The top level pygame.init() -takes care of this automatically, but cannot pass any arguments to the mixer -init. To solve this, mixer has a function pygame.mixer.pre_init() to set -the proper defaults before the toplevel init is used.

-

When using allowedchanges=0 it will convert the samples at runtime to match -what the hardware supports. For example a sound card may not -support 16bit sound samples, so instead it will use 8bit samples internally. -If AUDIO_ALLOW_FORMAT_CHANGE is supplied, then the requested format will -change to the closest that SDL2 supports.

-

Apart from 0, allowedchanged accepts the following constants ORed together:

-
-
    -
  • AUDIO_ALLOW_FREQUENCY_CHANGE

  • -
  • AUDIO_ALLOW_FORMAT_CHANGE

  • -
  • AUDIO_ALLOW_CHANNELS_CHANGE

  • -
  • AUDIO_ALLOW_ANY_CHANGE

  • -
-
-

It is safe to call this more than once, but after the mixer is initialized -you cannot change the playback arguments without first calling -pygame.mixer.quit().

-
-

Changed in pygame 1.8: The default buffersize changed from 1024 to 3072.

-
-
-

Changed in pygame 1.9.1: The default buffersize changed from 3072 to 4096.

-
-
-

Changed in pygame 2.0.0: The default buffersize changed from 4096 to 512.

-
-
-

Changed in pygame 2.0.0: The default frequency changed from 22050 to 44100.

-
-
-

Changed in pygame 2.0.0: size can be 32 (32-bit floats).

-
-
-

Changed in pygame 2.0.0: channels can also be 4 or 6.

-
-
-

New in pygame 2.0.0: allowedchanges, devicename arguments added

-
-
- -
-
-pygame.mixer.pre_init()¶
-
-
preset the mixer init arguments
-
pre_init(frequency=44100, size=-16, channels=2, buffer=512, devicename=None, allowedchanges=AUDIO_ALLOW_FREQUENCY_CHANGE | AUDIO_ALLOW_CHANNELS_CHANGE) -> None
-
-

Call pre_init to change the defaults used when the real -pygame.mixer.init() is called. Keyword arguments are accepted. The best -way to set custom mixer playback values is to call -pygame.mixer.pre_init() before calling the top level pygame.init(). -For backwards compatibility, argument values of 0 are replaced with the -startup defaults, except for allowedchanges, where -1 is used.

-
-

Changed in pygame 1.8: The default buffersize changed from 1024 to 3072.

-
-
-

Changed in pygame 1.9.1: The default buffersize changed from 3072 to 4096.

-
-
-

Changed in pygame 2.0.0: The default buffersize changed from 4096 to 512.

-
-
-

Changed in pygame 2.0.0: The default frequency changed from 22050 to 44100.

-
-
-

New in pygame 2.0.0: allowedchanges, devicename arguments added

-
-
- -
-
-pygame.mixer.quit()¶
-
-
uninitialize the mixer
-
quit() -> None
-
-

This will uninitialize pygame.mixerpygame module for loading and playing sounds. All playback will stop and any -loaded Sound objects may not be compatible with the mixer if it is -reinitialized later.

-
- -
-
-pygame.mixer.get_init()¶
-
-
test if the mixer is initialized
-
get_init() -> (frequency, format, channels)
-
-

If the mixer is initialized, this returns the playback arguments it is -using. If the mixer has not been initialized this returns None.

-
- -
-
-pygame.mixer.stop()¶
-
-
stop playback of all sound channels
-
stop() -> None
-
-

This will stop all playback of all active mixer channels.

-
- -
-
-pygame.mixer.pause()¶
-
-
temporarily stop playback of all sound channels
-
pause() -> None
-
-

This will temporarily stop all playback on the active mixer channels. The -playback can later be resumed with pygame.mixer.unpause()

-
- -
-
-pygame.mixer.unpause()¶
-
-
resume paused playback of sound channels
-
unpause() -> None
-
-

This will resume all active sound channels after they have been paused.

-
- -
-
-pygame.mixer.fadeout()¶
-
-
fade out the volume on all sounds before stopping
-
fadeout(time) -> None
-
-

This will fade out the volume on all active channels over the time argument -in milliseconds. After the sound is muted the playback will stop.

-
- -
-
-pygame.mixer.set_num_channels()¶
-
-
set the total number of playback channels
-
set_num_channels(count) -> None
-
-

Sets the number of available channels for the mixer. The default value is 8. -The value can be increased or decreased. If the value is decreased, sounds -playing on the truncated channels are stopped.

-
- -
-
-pygame.mixer.get_num_channels()¶
-
-
get the total number of playback channels
-
get_num_channels() -> count
-
-

Returns the number of currently active playback channels.

-
- -
-
-pygame.mixer.set_reserved()¶
-
-
reserve channels from being automatically used
-
set_reserved(count) -> count
-
-

The mixer can reserve any number of channels that will not be automatically -selected for playback by Sounds. If sounds are currently playing on the -reserved channels they will not be stopped.

-

This allows the application to reserve a specific number of channels for -important sounds that must not be dropped or have a guaranteed channel to -play on.

-

Will return number of channels actually reserved, this may be less than requested -depending on the number of channels previously allocated.

-
- -
-
-pygame.mixer.find_channel()¶
-
-
find an unused channel
-
find_channel(force=False) -> Channel
-
-

This will find and return an inactive Channel object. If there are no -inactive Channels this function will return None. If there are no -inactive channels and the force argument is True, this will find the -Channel with the longest running Sound and return it.

-
- -
-
-pygame.mixer.get_busy()¶
-
-
test if any sound is being mixed
-
get_busy() -> bool
-
-

Returns True if the mixer is busy mixing any channels. If the mixer is -idle then this return False.

-
- -
-
-pygame.mixer.get_sdl_mixer_version()¶
-
-
get the mixer's SDL version
-
get_sdl_mixer_version() -> (major, minor, patch)
-
get_sdl_mixer_version(linked=True) -> (major, minor, patch)
-
-
-
Parameters
-

linked (bool) -- if True (default) the linked version number is -returned, otherwise the compiled version number is returned

-
-
Returns
-

the mixer's SDL library version number (linked or compiled -depending on the linked parameter) as a tuple of 3 integers -(major, minor, patch)

-
-
Return type
-

tuple

-
-
-
-

Note

-

The linked and compile version numbers should be the same.

-
-
-

New in pygame 2.0.0.

-
-
- -
-
-pygame.mixer.Sound¶
-
-
Create a new Sound object from a file or buffer object
-
Sound(filename) -> Sound
-
Sound(file=filename) -> Sound
-
Sound(file=pathlib_path) -> Sound
-
Sound(buffer) -> Sound
-
Sound(buffer=buffer) -> Sound
-
Sound(object) -> Sound
-
Sound(file=object) -> Sound
-
Sound(array=object) -> Sound
-
- ----- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-—begin sound playback
-—stop sound playback
-—stop sound playback after fading out
-—set the playback volume for this Sound
-—get the playback volume
-—count how many times this Sound is playing
-—get the length of the Sound
-—return a bytestring copy of the Sound samples.
-

Load a new sound buffer from a filename, a python file object or a readable -buffer object. Limited resampling will be performed to help the sample match -the initialize arguments for the mixer. A Unicode string can only be a file -pathname. A bytes object can be either a pathname or a buffer object. -Use the 'file' or 'buffer' keywords to avoid ambiguity; otherwise Sound may -guess wrong. If the array keyword is used, the object is expected to export -a new buffer interface (The object is checked for a buffer interface first.)

-

The Sound object represents actual sound sample data. Methods that change -the state of the Sound object will the all instances of the Sound playback. -A Sound object also exports a new buffer interface.

-

The Sound can be loaded from an OGG audio file or from an uncompressed -WAV.

-

Note: The buffer will be copied internally, no data will be shared between -it and the Sound object.

-

For now buffer and array support is consistent with sndarray.make_sound -for Numeric arrays, in that sample sign and byte order are ignored. This -will change, either by correctly handling sign and byte order, or by raising -an exception when different. Also, source samples are truncated to fit the -audio sample size. This will not change.

-
-

New in pygame 1.8: pygame.mixer.Sound(buffer)

-
-
-

New in pygame 1.9.2: pygame.mixer.SoundCreate a new Sound object from a file or buffer object keyword arguments and array interface support

-
-
-

New in pygame 2.0.1: pathlib.Path support on Python 3.

-
-
-
-play()¶
-
-
begin sound playback
-
play(loops=0, maxtime=0, fade_ms=0) -> Channel
-
-

Begin playback of the Sound (i.e., on the computer's speakers) on an -available Channel. This will forcibly select a Channel, so playback may -cut off a currently playing sound if necessary.

-

The loops argument controls how many times the sample will be repeated -after being played the first time. A value of 5 means that the sound will -be played once, then repeated five times, and so is played a total of six -times. The default value (zero) means the Sound is not repeated, and so -is only played once. If loops is set to -1 the Sound will loop -indefinitely (though you can still call stop() to stop it).

-

The maxtime argument can be used to stop playback after a given number of -milliseconds.

-

The fade_ms argument will make the sound start playing at 0 volume and -fade up to full volume over the time given. The sample may end before the -fade-in is complete.

-

This returns the Channel object for the channel that was selected.

-
- -
-
-stop()¶
-
-
stop sound playback
-
stop() -> None
-
-

This will stop the playback of this Sound on any active Channels.

-
- -
-
-fadeout()¶
-
-
stop sound playback after fading out
-
fadeout(time) -> None
-
-

This will stop playback of the sound after fading it out over the time -argument in milliseconds. The Sound will fade and stop on all actively -playing channels.

-
- -
-
-set_volume()¶
-
-
set the playback volume for this Sound
-
set_volume(value) -> None
-
-

This will set the playback volume (loudness) for this Sound. This will -immediately affect the Sound if it is playing. It will also affect any -future playback of this Sound.

-
-
Parameters
-

value (float) --

volume in the range of 0.0 to 1.0 (inclusive)

-
-
If value < 0.0, the volume will not be changed
-
If value > 1.0, the volume will be set to 1.0
-
-

-
-
-
- -
-
-get_volume()¶
-
-
get the playback volume
-
get_volume() -> value
-
-

Return a value from 0.0 to 1.0 representing the volume for this Sound.

-
- -
-
-get_num_channels()¶
-
-
count how many times this Sound is playing
-
get_num_channels() -> count
-
-

Return the number of active channels this sound is playing on.

-
- -
-
-get_length()¶
-
-
get the length of the Sound
-
get_length() -> seconds
-
-

Return the length of this Sound in seconds.

-
- -
-
-get_raw()¶
-
-
return a bytestring copy of the Sound samples.
-
get_raw() -> bytes
-
-

Return a copy of the Sound object buffer as a bytes.

-
-

New in pygame 1.9.2.

-
-
- -
- -
-
-pygame.mixer.Channel¶
-
-
Create a Channel object for controlling playback
-
Channel(id) -> Channel
-
- ----- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-—play a Sound on a specific Channel
-—stop playback on a Channel
-—temporarily stop playback of a channel
-—resume pause playback of a channel
-—stop playback after fading channel out
-—set the volume of a playing channel
-—get the volume of the playing channel
-—check if the channel is active
-—get the currently playing Sound
-—queue a Sound object to follow the current
-—return any Sound that is queued
-—have the channel send an event when playback stops
-—get the event a channel sends when playback stops
-

Return a Channel object for one of the current channels. The id must be a -value from 0 to the value of pygame.mixer.get_num_channels().

-

The Channel object can be used to get fine control over the playback of -Sounds. A channel can only playback a single Sound at time. Using channels -is entirely optional since pygame can manage them by default.

-
-
-play()¶
-
-
play a Sound on a specific Channel
-
play(Sound, loops=0, maxtime=0, fade_ms=0) -> None
-
-

This will begin playback of a Sound on a specific Channel. If the Channel -is currently playing any other Sound it will be stopped.

-

The loops argument has the same meaning as in Sound.play(): it is the -number of times to repeat the sound after the first time. If it is 3, the -sound will be played 4 times (the first time, then three more). If loops -is -1 then the playback will repeat indefinitely.

-

As in Sound.play(), the maxtime argument can be used to stop playback -of the Sound after a given number of milliseconds.

-

As in Sound.play(), the fade_ms argument can be used fade in the -sound.

-
- -
-
-stop()¶
-
-
stop playback on a Channel
-
stop() -> None
-
-

Stop sound playback on a channel. After playback is stopped the channel -becomes available for new Sounds to play on it.

-
- -
-
-pause()¶
-
-
temporarily stop playback of a channel
-
pause() -> None
-
-

Temporarily stop the playback of sound on a channel. It can be resumed at -a later time with Channel.unpause()

-
- -
-
-unpause()¶
-
-
resume pause playback of a channel
-
unpause() -> None
-
-

Resume the playback on a paused channel.

-
- -
-
-fadeout()¶
-
-
stop playback after fading channel out
-
fadeout(time) -> None
-
-

Stop playback of a channel after fading out the sound over the given time -argument in milliseconds.

-
- -
-
-set_volume()¶
-
-
set the volume of a playing channel
-
set_volume(value) -> None
-
set_volume(left, right) -> None
-
-

Set the volume (loudness) of a playing sound. When a channel starts to -play its volume value is reset. This only affects the current sound. The -value argument is between 0.0 and 1.0.

-

If one argument is passed, it will be the volume of both speakers. If two -arguments are passed and the mixer is in stereo mode, the first argument -will be the volume of the left speaker and the second will be the volume -of the right speaker. (If the second argument is None, the first -argument will be the volume of both speakers.)

-

If the channel is playing a Sound on which set_volume() has also been -called, both calls are taken into account. For example:

-
sound = pygame.mixer.Sound("s.wav")
-channel = s.play()      # Sound plays at full volume by default
-sound.set_volume(0.9)   # Now plays at 90% of full volume.
-sound.set_volume(0.6)   # Now plays at 60% (previous value replaced).
-channel.set_volume(0.5) # Now plays at 30% (0.6 * 0.5).
-
-
-
- -
-
-get_volume()¶
-
-
get the volume of the playing channel
-
get_volume() -> value
-
-

Return the volume of the channel for the current playing sound. This does -not take into account stereo separation used by -Channel.set_volume(). The Sound object also has its own volume -which is mixed with the channel.

-
- -
-
-get_busy()¶
-
-
check if the channel is active
-
get_busy() -> bool
-
-

Returns True if the channel is actively mixing sound. If the channel -is idle this returns False.

-
- -
-
-get_sound()¶
-
-
get the currently playing Sound
-
get_sound() -> Sound
-
-

Return the actual Sound object currently playing on this channel. If the -channel is idle None is returned.

-
- -
-
-queue()¶
-
-
queue a Sound object to follow the current
-
queue(Sound) -> None
-
-

When a Sound is queued on a Channel, it will begin playing immediately -after the current Sound is finished. Each channel can only have a single -Sound queued at a time. The queued Sound will only play if the current -playback finished automatically. It is cleared on any other call to -Channel.stop() or Channel.play().

-

If there is no sound actively playing on the Channel then the Sound will -begin playing immediately.

-
- -
-
-get_queue()¶
-
-
return any Sound that is queued
-
get_queue() -> Sound
-
-

If a Sound is already queued on this channel it will be returned. Once -the queued sound begins playback it will no longer be on the queue.

-
- -
-
-set_endevent()¶
-
-
have the channel send an event when playback stops
-
set_endevent() -> None
-
set_endevent(type) -> None
-
-

When an endevent is set for a channel, it will send an event to the -pygame queue every time a sound finishes playing on that channel (not -just the first time). Use pygame.event.get() to retrieve the endevent -once it's sent.

-

Note that if you called Sound.play(n) or Channel.play(sound,n), -the end event is sent only once: after the sound has been played "n+1" -times (see the documentation of Sound.play).

-

If Channel.stop() or Channel.play() is called while the sound was -still playing, the event will be posted immediately.

-

The type argument will be the event id sent to the queue. This can be any -valid event type, but a good choice would be a value between -pygame.locals.USEREVENT and pygame.locals.NUMEVENTS. If no type -argument is given then the Channel will stop sending endevents.

-
- -
-
-get_endevent()¶
-
-
get the event a channel sends when playback stops
-
get_endevent() -> type
-
-

Returns the event type to be sent every time the Channel finishes -playback of a Sound. If there is no endevent the function returns -pygame.NOEVENT.

-
- -
- -
- -
- - -

-
-Edit on GitHub -
-
-
-
-
- - - - \ No newline at end of file diff --git a/venv/Lib/site-packages/pygame/docs/generated/ref/mouse.html b/venv/Lib/site-packages/pygame/docs/generated/ref/mouse.html deleted file mode 100644 index 1af1d62..0000000 --- a/venv/Lib/site-packages/pygame/docs/generated/ref/mouse.html +++ /dev/null @@ -1,406 +0,0 @@ - - - - - - - - - pygame.mouse — pygame v2.1.2 documentation - - - - - - - - - - - - - -
- -
- - - - - -
-
- -
-
- -
-
-
-pygame.mouse
-
-
pygame module to work with the mouse
-
- ----- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-—get the state of the mouse buttons
-—get the mouse cursor position
-—get the amount of mouse movement
-—set the mouse cursor position
-—hide or show the mouse cursor
-—get the current visibility state of the mouse cursor
-—check if the display is receiving mouse input
-—set the mouse cursor to a new cursor
-—get the current mouse cursor
-

The mouse functions can be used to get the current state of the mouse device. -These functions can also alter the system cursor for the mouse.

-

When the display mode is set, the event queue will start receiving mouse -events. The mouse buttons generate pygame.MOUSEBUTTONDOWN and -pygame.MOUSEBUTTONUP events when they are pressed and released. These -events contain a button attribute representing which button was pressed. The -mouse wheel will generate pygame.MOUSEBUTTONDOWN and -pygame.MOUSEBUTTONUP events when rolled. The button will be set to 4 -when the wheel is rolled up, and to button 5 when the wheel is rolled down. -Whenever the mouse is moved it generates a pygame.MOUSEMOTION event. The -mouse movement is broken into small and accurate motion events. As the mouse -is moving many motion events will be placed on the queue. Mouse motion events -that are not properly cleaned from the event queue are the primary reason the -event queue fills up.

-

If the mouse cursor is hidden, and input is grabbed to the current display the -mouse will enter a virtual input mode, where the relative movements of the -mouse will never be stopped by the borders of the screen. See the functions -pygame.mouse.set_visible() and pygame.event.set_grab() to get this -configured.

-

Mouse Wheel Behavior in pygame 2

-

There is proper functionality for mouse wheel behaviour with pygame 2 supporting -pygame.MOUSEWHEEL events. The new events support horizontal and vertical -scroll movements, with signed integer values representing the amount scrolled -(x and y), as well as flipped direction (the set positive and -negative values for each axis is flipped). Read more about SDL2 -input-related changes here https://wiki.libsdl.org/MigrationGuide#Input

-

In pygame 2, the mouse wheel functionality can be used by listening for the -pygame.MOUSEWHEEL type of an event (Bear in mind they still emit -pygame.MOUSEBUTTONDOWN events like in pygame 1.x, as well). -When this event is triggered, a developer can access the appropriate Event object -with pygame.event.get(). The object can be used to access data about the mouse -scroll, such as which (it will tell you what exact mouse device trigger the event).

-
-
Code example of mouse scroll (tested on 2.0.0.dev7)¶
-
# Taken from husano896's PR thread (slightly modified)
-import pygame
-from pygame.locals import *
-pygame.init()
-screen = pygame.display.set_mode((640, 480))
-clock = pygame.time.Clock()
-
-def main():
-   while True:
-      for event in pygame.event.get():
-            if event.type == QUIT:
-               pygame.quit()
-               return
-            elif event.type == MOUSEWHEEL:
-               print(event)
-               print(event.x, event.y)
-               print(event.flipped)
-               print(event.which)
-               # can access properties with
-               # proper notation(ex: event.y)
-      clock.tick(60)
-
-# Execute game:
-main()
-
-
-
-
-
-pygame.mouse.get_pressed()¶
-
-
get the state of the mouse buttons
-
get_pressed(num_buttons=3) -> (button1, button2, button3)
-
get_pressed(num_buttons=5) -> (button1, button2, button3, button4, button5)
-
-

Returns a sequence of booleans representing the state of all the mouse -buttons. A true value means the mouse is currently being pressed at the time -of the call.

-

Note, to get all of the mouse events it is better to use either -pygame.event.wait() or pygame.event.get() and check all of those -events to see if they are MOUSEBUTTONDOWN, MOUSEBUTTONUP, or -MOUSEMOTION.

-

Note, that on X11 some X servers use middle button emulation. When you -click both buttons 1 and 3 at the same time a 2 button event -can be emitted.

-

Note, remember to call pygame.event.get() before this function. -Otherwise it will not work as expected.

-

To support five button mice, an optional parameter num_buttons has been -added in pygame 2. When this is set to 5, button4 and button5 -are added to the returned tuple. Only 3 and 5 are valid values -for this parameter.

-
-

Changed in pygame 2.0.0: num_buttons argument added

-
-
- -
-
-pygame.mouse.get_pos()¶
-
-
get the mouse cursor position
-
get_pos() -> (x, y)
-
-

Returns the x and y position of the mouse cursor. The position is -relative to the top-left corner of the display. The cursor position can be -located outside of the display window, but is always constrained to the -screen.

-
- -
-
-pygame.mouse.get_rel()¶
-
-
get the amount of mouse movement
-
get_rel() -> (x, y)
-
-

Returns the amount of movement in x and y since the previous call to -this function. The relative movement of the mouse cursor is constrained to -the edges of the screen, but see the virtual input mouse mode for a way -around this. Virtual input mode is described at the top of the page.

-
- -
-
-pygame.mouse.set_pos()¶
-
-
set the mouse cursor position
-
set_pos([x, y]) -> None
-
-

Set the current mouse position to arguments given. If the mouse cursor is -visible it will jump to the new coordinates. Moving the mouse will generate -a new pygame.MOUSEMOTION event.

-
- -
-
-pygame.mouse.set_visible()¶
-
-
hide or show the mouse cursor
-
set_visible(bool) -> bool
-
-

If the bool argument is true, the mouse cursor will be visible. This will -return the previous visible state of the cursor.

-
- -
-
-pygame.mouse.get_visible()¶
-
-
get the current visibility state of the mouse cursor
-
get_visible() -> bool
-
-

Get the current visibility state of the mouse cursor. True if the mouse is -visible, False otherwise.

-
-

New in pygame 2.0.0.

-
-
- -
-
-pygame.mouse.get_focused()¶
-
-
check if the display is receiving mouse input
-
get_focused() -> bool
-
-

Returns true when pygame is receiving mouse input events (or, in windowing -terminology, is "active" or has the "focus").

-

This method is most useful when working in a window. By contrast, in -full-screen mode, this method always returns true.

-

Note: under MS Windows, the window that has the mouse focus also has the -keyboard focus. But under X-Windows, one window can receive mouse events and -another receive keyboard events. pygame.mouse.get_focused() indicates -whether the pygame window receives mouse events.

-
- -
-
-pygame.mouse.set_cursor()¶
-
-
set the mouse cursor to a new cursor
-
set_cursor(pygame.cursors.Cursor) -> None
-
set_cursor(size, hotspot, xormasks, andmasks) -> None
-
set_cursor(hotspot, surface) -> None
-
set_cursor(constant) -> None
-
-

Set the mouse cursor to something new. This function accepts either an explicit -Cursor object or arguments to create a Cursor object.

-

See pygame.cursors.Cursorpygame object representing a cursor for help creating cursors and for examples.

-
-

Changed in pygame 2.0.1.

-
-
- -
-
-pygame.mouse.get_cursor()¶
-
-
get the current mouse cursor
-
get_cursor() -> pygame.cursors.Cursor
-
-

Get the information about the mouse system cursor. The return value contains -the same data as the arguments passed into pygame.mouse.set_cursor()set the mouse cursor to a new cursor.

-
-

Note

-

Code that unpacked a get_cursor() call into -size, hotspot, xormasks, andmasks will still work, -assuming the call returns an old school type cursor.

-
-
-

Changed in pygame 2.0.1.

-
-
- -
- -
- - -

-
-Edit on GitHub -
-
-
-
-
- - - - \ No newline at end of file diff --git a/venv/Lib/site-packages/pygame/docs/generated/ref/music.html b/venv/Lib/site-packages/pygame/docs/generated/ref/music.html deleted file mode 100644 index f9d86ba..0000000 --- a/venv/Lib/site-packages/pygame/docs/generated/ref/music.html +++ /dev/null @@ -1,502 +0,0 @@ - - - - - - - - - pygame.mixer.music — pygame v2.1.2 documentation - - - - - - - - - - - - - -
- -
- - - - - -
-
- -
-
- -
-
-
-pygame.mixer.music
-
-
pygame module for controlling streamed audio
-
- ----- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-—Load a music file for playback
-—Unload the currently loaded music to free up resources
-—Start the playback of the music stream
-—restart music
-—stop the music playback
-—temporarily stop music playback
-—resume paused music
-—stop music playback after fading out
-—set the music volume
-—get the music volume
-—check if the music stream is playing
-—set position to play from
-—get the music play time
-—queue a sound file to follow the current
-—have the music send an event when playback stops
-—get the event a channel sends when playback stops
-

The music module is closely tied to pygame.mixerpygame module for loading and playing sounds. Use the music module -to control the playback of music in the sound mixer.

-

The difference between the music playback and regular Sound playback is that -the music is streamed, and never actually loaded all at once. The mixer system -only supports a single music stream at once.

-

On older pygame versions, MP3 support was limited under Mac and Linux. This -changed in pygame v2.0.2 which got improved MP3 support. Consider using -OGG file format for music as that can give slightly better compression than -MP3 in most cases.

-
-
-pygame.mixer.music.load()¶
-
-
Load a music file for playback
-
load(filename) -> None
-
load(fileobj, namehint="") -> None
-
-

This will load a music filename/file object and prepare it for playback. If -a music stream is already playing it will be stopped. This does not start -the music playing.

-

If you are loading from a file object, the namehint parameter can be used to specify -the type of music data in the object. For example: load(fileobj, "ogg").

-
-

Changed in pygame 2.0.2: Added optional namehint argument

-
-
- -
-
-pygame.mixer.music.unload()¶
-
-
Unload the currently loaded music to free up resources
-
unload() -> None
-
-

This closes resources like files for any music that may be loaded.

-
-

New in pygame 2.0.0.

-
-
- -
-
-pygame.mixer.music.play()¶
-
-
Start the playback of the music stream
-
play(loops=0, start=0.0, fade_ms=0) -> None
-
-

This will play the loaded music stream. If the music is already playing it -will be restarted.

-

loops is an optional integer argument, which is 0 by default, which -indicates how many times to repeat the music. The music repeats indefinitely if -this argument is set to -1.

-

start is an optional float argument, which is 0.0 by default, which -denotes the position in time from which the music starts playing. The starting -position depends on the format of the music played. MP3 and OGG use -the position as time in seconds. For MP3 files the start time position -selected may not be accurate as things like variable bit rate encoding and ID3 -tags can throw off the timing calculations. For MOD music it is the pattern -order number. Passing a start position will raise a NotImplementedError if -the start position cannot be set.

-

fade_ms is an optional integer argument, which is 0 by default, -which denotes the period of time (in milliseconds) over which the music -will fade up from volume level 0.0 to full volume (or the volume level -previously set by set_volume()). The sample may end before the fade-in -is complete. If the music is already streaming fade_ms is ignored.

-
-

Changed in pygame 2.0.0: Added optional fade_ms argument

-
-
- -
-
-pygame.mixer.music.rewind()¶
-
-
restart music
-
rewind() -> None
-
-

Resets playback of the current music to the beginning. If pause() has -previoulsy been used to pause the music, the music will remain paused.

-
-

Note

-

rewind() supports a limited number of file types and notably -WAV files are NOT supported. For unsupported file types use play() -which will restart the music that's already playing (note that this -will start the music playing again even if previously paused).

-
-
- -
-
-pygame.mixer.music.stop()¶
-
-
stop the music playback
-
stop() -> None
-
-

Stops the music playback if it is currently playing. -endevent will be triggered, if set. -It won't unload the music.

-
- -
-
-pygame.mixer.music.pause()¶
-
-
temporarily stop music playback
-
pause() -> None
-
-

Temporarily stop playback of the music stream. It can be resumed with the -unpause() function.

-
- -
-
-pygame.mixer.music.unpause()¶
-
-
resume paused music
-
unpause() -> None
-
-

This will resume the playback of a music stream after it has been paused.

-
- -
-
-pygame.mixer.music.fadeout()¶
-
-
stop music playback after fading out
-
fadeout(time) -> None
-
-

Fade out and stop the currently playing music.

-

The time argument denotes the integer milliseconds for which the -fading effect is generated.

-

Note, that this function blocks until the music has faded out. Calls -to fadeout() and set_volume() will have no effect during -this time. If an event was set using set_endevent() it will be -called after the music has faded.

-
- -
-
-pygame.mixer.music.set_volume()¶
-
-
set the music volume
-
set_volume(volume) -> None
-
-

Set the volume of the music playback.

-

The volume argument is a float between 0.0 and 1.0 that sets -the volume level. When new music is loaded the volume is reset to full -volume. If volume is a negative value it will be ignored and the -volume will remain set at the current level. If the volume argument -is greater than 1.0, the volume will be set to 1.0.

-
- -
-
-pygame.mixer.music.get_volume()¶
-
-
get the music volume
-
get_volume() -> value
-
-

Returns the current volume for the mixer. The value will be between 0.0 -and 1.0.

-
- -
-
-pygame.mixer.music.get_busy()¶
-
-
check if the music stream is playing
-
get_busy() -> bool
-
-

Returns True when the music stream is actively playing. When the music is -idle this returns False. In pygame 2.0.1 and above this function returns -False when the music is paused. In pygame 1 it returns True when the music -is paused.

-
-

Changed in pygame 2.0.1: Returns False when music paused.

-
-
- -
-
-pygame.mixer.music.set_pos()¶
-
-
set position to play from
-
set_pos(pos) -> None
-
-

This sets the position in the music file where playback will start. -The meaning of "pos", a float (or a number that can be converted to a float), -depends on the music format.

-

For MOD files, pos is the integer pattern number in the module. -For OGG it is the absolute position, in seconds, from -the beginning of the sound. For MP3 files, it is the relative position, -in seconds, from the current position. For absolute positioning in an MP3 -file, first call rewind().

-

Other file formats are unsupported. Newer versions of SDL_mixer have -better positioning support than earlier ones. An SDLError is raised if a -particular format does not support positioning.

-

Function set_pos() calls underlining SDL_mixer function -Mix_SetMusicPosition.

-
-

New in pygame 1.9.2.

-
-
- -
-
-pygame.mixer.music.get_pos()¶
-
-
get the music play time
-
get_pos() -> time
-
-

This gets the number of milliseconds that the music has been playing for. -The returned time only represents how long the music has been playing; it -does not take into account any starting position offsets.

-
- -
-
-pygame.mixer.music.queue()¶
-
-
queue a sound file to follow the current
-
queue(filename) -> None
-
queue(fileobj, namehint="", loops=0) -> None
-
-

This will load a sound file and queue it. A queued sound file will begin as -soon as the current sound naturally ends. Only one sound can be queued at a -time. Queuing a new sound while another sound is queued will result in the -new sound becoming the queued sound. Also, if the current sound is ever -stopped or changed, the queued sound will be lost.

-

If you are loading from a file object, the namehint parameter can be used to specify -the type of music data in the object. For example: queue(fileobj, "ogg").

-

The following example will play music by Bach six times, then play music by -Mozart once:

-
pygame.mixer.music.load('bach.ogg')
-pygame.mixer.music.play(5)        # Plays six times, not five!
-pygame.mixer.music.queue('mozart.ogg')
-
-
-
-

Changed in pygame 2.0.2: Added optional namehint argument

-
-
- -
-
-pygame.mixer.music.set_endevent()¶
-
-
have the music send an event when playback stops
-
set_endevent() -> None
-
set_endevent(type) -> None
-
-

This causes pygame to signal (by means of the event queue) when the music is -done playing. The argument determines the type of event that will be queued.

-

The event will be queued every time the music finishes, not just the first -time. To stop the event from being queued, call this method with no -argument.

-
- -
-
-pygame.mixer.music.get_endevent()¶
-
-
get the event a channel sends when playback stops
-
get_endevent() -> type
-
-

Returns the event type to be sent every time the music finishes playback. If -there is no endevent the function returns pygame.NOEVENT.

-
- -
- -
- - -

-
-Edit on GitHub -
-
-
-
-
- - - - \ No newline at end of file diff --git a/venv/Lib/site-packages/pygame/docs/generated/ref/overlay.html b/venv/Lib/site-packages/pygame/docs/generated/ref/overlay.html deleted file mode 100644 index 00ecdca..0000000 --- a/venv/Lib/site-packages/pygame/docs/generated/ref/overlay.html +++ /dev/null @@ -1,231 +0,0 @@ - - - - - - - - - pygame.Overlay — pygame v2.1.2 documentation - - - - - - - - - - - - - -
- -
- - - - - -
-
- -
-
- -
-
-

Warning

-

This module is non functional in pygame 2.0 and above, unless you have manually compiled pygame with SDL1. -This module will not be supported in the future.

-
-
-
-pygame.Overlay¶
-
-
pygame object for video overlay graphics
-
Overlay(format, (width, height)) -> Overlay
-
- ----- - - - - - - - - - - - - - - -
-—set the overlay pixel data
-—control where the overlay is displayed
-—test if the Overlay is hardware accelerated
-

The Overlay objects provide support for accessing hardware video overlays. -Video overlays do not use standard RGB pixel formats, and can use -multiple resolutions of data to create a single image.

-

The Overlay objects represent lower level access to the display hardware. To -use the object you must understand the technical details of video overlays.

-

The Overlay format determines the type of pixel data used. Not all hardware -will support all types of overlay formats. Here is a list of available -format types:

-
YV12_OVERLAY, IYUV_OVERLAY, YUY2_OVERLAY, UYVY_OVERLAY, YVYU_OVERLAY
-
-
-

The width and height arguments control the size for the overlay image data. -The overlay image can be displayed at any size, not just the resolution of -the overlay.

-

The overlay objects are always visible, and always show above the regular -display contents.

-
-
-display()¶
-
-
set the overlay pixel data
-
display((y, u, v)) -> None
-
display() -> None
-
-

Display the YUV data in SDL's overlay planes. The y, u, and v arguments -are strings of binary data. The data must be in the correct format used -to create the Overlay.

-

If no argument is passed in, the Overlay will simply be redrawn with the -current data. This can be useful when the Overlay is not really hardware -accelerated.

-

The strings are not validated, and improperly sized strings could crash -the program.

-
- -
-
-set_location()¶
-
-
control where the overlay is displayed
-
set_location(rect) -> None
-
-

Set the location for the overlay. The overlay will always be shown -relative to the main display Surface. This does not actually redraw the -overlay, it will be updated on the next call to Overlay.display().

-
- -
-
-get_hardware()¶
-
-
test if the Overlay is hardware accelerated
-
get_hardware(rect) -> int
-
-

Returns a True value when the Overlay is hardware accelerated. If the -platform does not support acceleration, software rendering is used.

-
- -
- -
- - -

-
-Edit on GitHub -
-
-
-
-
- - - - \ No newline at end of file diff --git a/venv/Lib/site-packages/pygame/docs/generated/ref/pixelarray.html b/venv/Lib/site-packages/pygame/docs/generated/ref/pixelarray.html deleted file mode 100644 index dad2d87..0000000 --- a/venv/Lib/site-packages/pygame/docs/generated/ref/pixelarray.html +++ /dev/null @@ -1,486 +0,0 @@ - - - - - - - - - pygame.PixelArray — pygame v2.1.2 documentation - - - - - - - - - - - - - -
- -
- - - - - -
-
- -
-
- -
-
-
-pygame.PixelArray¶
-
-
pygame object for direct pixel access of surfaces
-
PixelArray(Surface) -> PixelArray
-
- ----- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-—Gets the Surface the PixelArray uses.
-—Returns the byte size of a pixel array item
-—Returns the number of dimensions.
-—Returns the array size.
-—Returns byte offsets for each array dimension.
-—Creates a new Surface from the current PixelArray.
-—Replaces the passed color in the PixelArray with another one.
-—Extracts the passed color from the PixelArray.
-—Compares the PixelArray with another one.
-—Exchanges the x and y axis.
-—Closes the PixelArray, and releases Surface lock.
-

The PixelArray wraps a Surface and provides direct access to the -surface's pixels. A pixel array can be one or two dimensional. -A two dimensional array, like its surface, is indexed [column, row]. -Pixel arrays support slicing, both for returning a subarray or -for assignment. A pixel array sliced on a single column or row -returns a one dimensional pixel array. Arithmetic and other operations -are not supported. A pixel array can be safely assigned to itself. -Finally, pixel arrays export an array struct interface, allowing -them to interact with pygame.pixelcopypygame module for general pixel array copying methods and NumPy -arrays.

-

A PixelArray pixel item can be assigned a raw integer values, a -pygame.Colorpygame object for color representations instance, or a (r, g, b[, a]) tuple.

-
pxarray[x, y] = 0xFF00FF
-pxarray[x, y] = pygame.Color(255, 0, 255)
-pxarray[x, y] = (255, 0, 255)
-
-
-

However, only a pixel's integer value is returned. So, to compare a pixel -to a particular color the color needs to be first mapped using -the Surface.map_rgb() method of the Surface object for which the -PixelArray was created.

-
pxarray = pygame.PixelArray(surface)
-# Check, if the first pixel at the topleft corner is blue
-if pxarray[0, 0] == surface.map_rgb((0, 0, 255)):
-    ...
-
-
-

When assigning to a range of of pixels, a non tuple sequence of colors or -a PixelArray can be used as the value. For a sequence, the length must -match the PixelArray width.

-
pxarray[a:b] = 0xFF00FF                   # set all pixels to 0xFF00FF
-pxarray[a:b] = (0xFF00FF, 0xAACCEE, ... ) # first pixel = 0xFF00FF,
-                                          # second pixel  = 0xAACCEE, ...
-pxarray[a:b] = [(255, 0, 255), (170, 204, 238), ...] # same as above
-pxarray[a:b] = [(255, 0, 255), 0xAACCEE, ...]        # same as above
-pxarray[a:b] = otherarray[x:y]            # slice sizes must match
-
-
-

For PixelArray assignment, if the right hand side array has a row length -of 1, then the column is broadcast over the target array's rows. An -array of height 1 is broadcast over the target's columns, and is equivalent -to assigning a 1D PixelArray.

-

Subscript slices can also be used to assign to a rectangular subview of -the target PixelArray.

-
# Create some new PixelArray objects providing a different view
-# of the original array/surface.
-newarray = pxarray[2:4, 3:5]
-otherarray = pxarray[::2, ::2]
-
-
-

Subscript slices can also be used to do fast rectangular pixel manipulations -instead of iterating over the x or y axis. The

-
pxarray[::2, :] = (0, 0, 0)               # Make even columns black.
-pxarray[::2] = (0, 0, 0)                  # Same as [::2, :]
-
-
-

During its lifetime, the PixelArray locks the surface, thus you explicitly -have to close() it once its not used any more and the surface should perform -operations in the same scope. It is best to use it as a context manager -using the with PixelArray(surf) as pixel_array: style. So it works on pypy too.

-

A simple : slice index for the column can be omitted.

-
pxarray[::2, ...] = (0, 0, 0)             # Same as pxarray[::2, :]
-pxarray[...] = (255, 0, 0)                # Same as pxarray[:]
-
-
-

A note about PixelArray to PixelArray assignment, for arrays with an -item size of 3 (created from 24 bit surfaces) pixel values are translated -from the source to the destinations format. The red, green, and blue -color elements of each pixel are shifted to match the format of the -target surface. For all other pixel sizes no such remapping occurs. -This should change in later pygame releases, where format conversions -are performed for all pixel sizes. To avoid code breakage when full mapped -copying is implemented it is suggested PixelArray to PixelArray copies be -only between surfaces of identical format.

-
-

New in pygame 1.9.4:

-
    -
  • close() method was added. For explicitly cleaning up.

  • -
  • being able to use PixelArray as a context manager for cleanup.

  • -
  • both of these are useful for when working without reference counting (pypy).

  • -
-
-
-

New in pygame 1.9.2:

-
    -
  • array struct interface

  • -
  • transpose method

  • -
  • broadcasting for a length 1 dimension

  • -
-
-
-

Changed in pygame 1.9.2:

-
    -
  • A 2D PixelArray can have a length 1 dimension. -Only an integer index on a 2D PixelArray returns a 1D array.

  • -
  • For assignment, a tuple can only be a color. Any other sequence type -is a sequence of colors.

  • -
-
-
-
-surface¶
-
-
Gets the Surface the PixelArray uses.
-
surface -> Surface
-
-

The Surface the PixelArray was created for.

-
- -
-
-itemsize¶
-
-
Returns the byte size of a pixel array item
-
itemsize -> int
-
-

This is the same as Surface.get_bytesize() for the -pixel array's surface.

-
-

New in pygame 1.9.2.

-
-
- -
-
-ndim¶
-
-
Returns the number of dimensions.
-
ndim -> int
-
-

A pixel array can be 1 or 2 dimensional.

-
-

New in pygame 1.9.2.

-
-
- -
-
-shape¶
-
-
Returns the array size.
-
shape -> tuple of int's
-
-

A tuple or length ndim giving the length of each -dimension. Analogous to Surface.get_size().

-
-

New in pygame 1.9.2.

-
-
- -
-
-strides¶
-
-
Returns byte offsets for each array dimension.
-
strides -> tuple of int's
-
-

A tuple or length ndim byte counts. When a stride is -multiplied by the corresponding index it gives the offset -of that index from the start of the array. A stride is negative -for an array that has is inverted (has a negative step).

-
-

New in pygame 1.9.2.

-
-
- -
-
-make_surface()¶
-
-
Creates a new Surface from the current PixelArray.
-
make_surface() -> Surface
-
-

Creates a new Surface from the current PixelArray. Depending on the -current PixelArray the size, pixel order etc. will be different from the -original Surface.

-
# Create a new surface flipped around the vertical axis.
-sf = pxarray[:,::-1].make_surface ()
-
-
-
-

New in pygame 1.8.1.

-
-
- -
-
-replace()¶
-
-
Replaces the passed color in the PixelArray with another one.
-
replace(color, repcolor, distance=0, weights=(0.299, 0.587, 0.114)) -> None
-
-

Replaces the pixels with the passed color in the PixelArray by changing -them them to the passed replacement color.

-

It uses a simple weighted Euclidean distance formula to calculate the -distance between the colors. The distance space ranges from 0.0 to 1.0 -and is used as threshold for the color detection. This causes the -replacement to take pixels with a similar, but not exactly identical -color, into account as well.

-

This is an in place operation that directly affects the pixels of the -PixelArray.

-
-

New in pygame 1.8.1.

-
-
- -
-
-extract()¶
-
-
Extracts the passed color from the PixelArray.
-
extract(color, distance=0, weights=(0.299, 0.587, 0.114)) -> PixelArray
-
-

Extracts the passed color by changing all matching pixels to white, while -non-matching pixels are changed to black. This returns a new PixelArray -with the black/white color mask.

-

It uses a simple weighted Euclidean distance formula to calculate the -distance between the colors. The distance space ranges from 0.0 to 1.0 -and is used as threshold for the color detection. This causes the -extraction to take pixels with a similar, but not exactly identical -color, into account as well.

-
-

New in pygame 1.8.1.

-
-
- -
-
-compare()¶
-
-
Compares the PixelArray with another one.
-
compare(array, distance=0, weights=(0.299, 0.587, 0.114)) -> PixelArray
-
-

Compares the contents of the PixelArray with those from the passed in -PixelArray. It returns a new PixelArray with a black/white color mask -that indicates the differences (black) of both arrays. Both PixelArray -objects must have identical bit depths and dimensions.

-

It uses a simple weighted Euclidean distance formula to calculate the -distance between the colors. The distance space ranges from 0.0 to 1.0 -and is used as a threshold for the color detection. This causes the -comparison to mark pixels with a similar, but not exactly identical -color, as white.

-
-

New in pygame 1.8.1.

-
-
- -
-
-transpose()¶
-
-
Exchanges the x and y axis.
-
transpose() -> PixelArray
-
-

This method returns a new view of the pixel array with the rows and -columns swapped. So for a (w, h) sized array a (h, w) slice is returned. -If an array is one dimensional, then a length 1 x dimension is added, -resulting in a 2D pixel array.

-
-

New in pygame 1.9.2.

-
-
- -
-
-close()¶
-
-
Closes the PixelArray, and releases Surface lock.
-
transpose() -> PixelArray
-
-

This method is for explicitly closing the PixelArray, and releasing -a lock on the Suface.

-
-

New in pygame 1.9.4.

-
-
- -
- -
- - -

-
-Edit on GitHub -
-
-
-
-
- - - - \ No newline at end of file diff --git a/venv/Lib/site-packages/pygame/docs/generated/ref/pixelcopy.html b/venv/Lib/site-packages/pygame/docs/generated/ref/pixelcopy.html deleted file mode 100644 index 55fa60a..0000000 --- a/venv/Lib/site-packages/pygame/docs/generated/ref/pixelcopy.html +++ /dev/null @@ -1,262 +0,0 @@ - - - - - - - - - pygame.pixelcopy — pygame v2.1.2 documentation - - - - - - - - - - - - - -
- -
- - - - - -
-
- -
-
- -
-
-
-pygame.pixelcopy
-
-
pygame module for general pixel array copying
-
- ----- - - - - - - - - - - - - - - - - - - -
-—copy surface pixels to an array object
-—copy an array object to a surface
-—copy an array to another array, using surface format
-—Copy an array to a new surface
-

The pygame.pixelcopy module contains functions for copying between -surfaces and objects exporting an array structure interface. It is a backend -for pygame.surfarraypygame module for accessing surface pixel data using array interfaces, adding NumPy support. But pixelcopy is more -general, and intended for direct use.

-

The array struct interface exposes an array's data in a standard way. -It was introduced in NumPy. In Python 2.7 and above it is replaced by the -new buffer protocol, though the buffer protocol is still a work in progress. -The array struct interface, on the other hand, is stable and works with earlier -Python versions. So for now the array struct interface is the predominate way -pygame handles array introspection.

-

For 2d arrays of integer pixel values, the values are mapped to the -pixel format of the related surface. To get the actual color of a pixel -value use pygame.Surface.unmap_rgb()convert a mapped integer color value into a Color. 2d arrays can only be used -directly between surfaces having the same pixel layout.

-

New in pygame 1.9.2.

-
-
-pygame.pixelcopy.surface_to_array()¶
-
-
copy surface pixels to an array object
-
surface_to_array(array, surface, kind='P', opaque=255, clear=0) -> None
-
-

The surface_to_array function copies pixels from a Surface object -to a 2D or 3D array. Depending on argument kind and the target array -dimension, a copy may be raw pixel value, RGB, a color component slice, -or colorkey alpha transparency value. Recognized kind values are the -single character codes 'P', 'R', 'G', 'B', 'A', and 'C'. Kind codes are case -insensitive, so 'p' is equivalent to 'P'. The first two dimensions -of the target must be the surface size (w, h).

-

The default 'P' kind code does a direct raw integer pixel (mapped) value -copy to a 2D array and a 'RGB' pixel component (unmapped) copy to a 3D array -having shape (w, h, 3). For an 8 bit colormap surface this means the -table index is copied to a 2D array, not the table value itself. A 2D -array's item size must be at least as large as the surface's pixel -byte size. The item size of a 3D array must be at least one byte.

-

For the 'R', 'G', 'B', and 'A' copy kinds a single color component -of the unmapped surface pixels are copied to the target 2D array. -For kind 'A' and surfaces with source alpha (the surface was created with -the SRCALPHA flag), has a colorkey -(set with Surface.set_colorkey()), -or has a blanket alpha -(set with Surface.set_alpha()) -then the alpha values are those expected for a SDL surface. -If a surface has no explicit alpha value, then the target array -is filled with the value of the optional opaque surface_to_array -argument (default 255: not transparent).

-

Copy kind 'C' is a special case for alpha copy of a source surface -with colorkey. Unlike the 'A' color component copy, the clear -argument value is used for colorkey matches, opaque otherwise. -By default, a match has alpha 0 (totally transparent), while everything -else is alpha 255 (totally opaque). It is a more general implementation -of pygame.surfarray.array_colorkey()Copy the colorkey values into a 2d array.

-

Specific to surface_to_array, a ValueError is raised for target arrays -with incorrect shape or item size. A TypeError is raised for an incorrect -kind code. Surface specific problems, such as locking, raise a pygame.error.

-
- -
-
-pygame.pixelcopy.array_to_surface()¶
-
-
copy an array object to a surface
-
array_to_surface(<surface>, <array>) -> None
-
-

See pygame.surfarray.blit_array()Blit directly from a array values.

-
- -
-
-pygame.pixelcopy.map_array()¶
-
-
copy an array to another array, using surface format
-
map_array(<array>, <array>, <surface>) -> None
-
-

Map an array of color element values - (w, h, ..., 3) - to an array of -pixels - (w, h) according to the format of <surface>.

-
- -
-
-pygame.pixelcopy.make_surface()¶
-
-
Copy an array to a new surface
-
pygame.pixelcopy.make_surface(array) -> Surface
-
-

Create a new Surface that best resembles the data and format of the array. -The array can be 2D or 3D with any sized integer values.

-
- -
- -
- - -

-
-Edit on GitHub -
-
-
-
-
- - - - \ No newline at end of file diff --git a/venv/Lib/site-packages/pygame/docs/generated/ref/pygame.html b/venv/Lib/site-packages/pygame/docs/generated/ref/pygame.html deleted file mode 100644 index 8b48ddd..0000000 --- a/venv/Lib/site-packages/pygame/docs/generated/ref/pygame.html +++ /dev/null @@ -1,693 +0,0 @@ - - - - - - - - - pygame — pygame v2.1.2 documentation - - - - - - - - - - - - - -
- -
- - - - - -
-
- -
-
- -
-
-
-pygame
-
-
the top level pygame package
-
- ----- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-—initialize all imported pygame modules
-—uninitialize all pygame modules
-—returns True if pygame is currently initialized
-—standard pygame exception
-—get the current error message
-—set the current error message
-—get the version number of SDL
-—get the byte order of SDL
-—register a function to be called when pygame quits
-—Encode a Unicode or bytes object
-—Encode a Unicode or bytes object as a file system path
-

The pygame package represents the top-level package for others to use. Pygame -itself is broken into many submodules, but this does not affect programs that -use pygame.

-

As a convenience, most of the top-level variables in pygame have been placed -inside a module named pygame.localspygame constants. This is meant to be used with -from pygame.locals import *, in addition to import pygame.

-

When you import pygame all available pygame submodules are automatically -imported. Be aware that some of the pygame modules are considered optional, -and may not be available. In that case, pygame will provide a placeholder -object instead of the module, which can be used to test for availability.

-
-
-pygame.init()¶
-
-
initialize all imported pygame modules
-
init() -> (numpass, numfail)
-
-

Initialize all imported pygame modules. No exceptions will be raised if a -module fails, but the total number if successful and failed inits will be -returned as a tuple. You can always initialize individual modules manually, -but pygame.init()initialize all imported pygame modules is a convenient way to get everything started. The -init() functions for individual modules will raise exceptions when they -fail.

-

You may want to initialize the different modules separately to speed up your -program or to not use modules your game does not require.

-

It is safe to call this init() more than once as repeated calls will have -no effect. This is true even if you have pygame.quit() all the modules.

-
- -
-
-pygame.quit()¶
-
-
uninitialize all pygame modules
-
quit() -> None
-
-

Uninitialize all pygame modules that have previously been initialized. When -the Python interpreter shuts down, this method is called regardless, so your -program should not need it, except when it wants to terminate its pygame -resources and continue. It is safe to call this function more than once as -repeated calls have no effect.

-
-

Note

-

Calling pygame.quit()uninitialize all pygame modules will not exit your program. Consider letting -your program end in the same way a normal Python program will end.

-
-
- -
-
-pygame.get_init()¶
-
-
returns True if pygame is currently initialized
-
get_init() -> bool
-
-

Returns True if pygame is currently initialized.

-
-

New in pygame 1.9.5.

-
-
- -
-
-exception pygame.error¶
-
-
standard pygame exception
-
raise pygame.error(message)
-
-

This exception is raised whenever a pygame or SDL operation fails. You -can catch any anticipated problems and deal with the error. The exception is -always raised with a descriptive message about the problem.

-

Derived from the RuntimeError exception, which can also be used to catch -these raised errors.

-
- -
-
-pygame.get_error()¶
-
-
get the current error message
-
get_error() -> errorstr
-
-

SDL maintains an internal error message. This message will usually be -given to you when pygame.error()standard pygame exception is raised, so this function will -rarely be needed.

-
- -
-
-pygame.set_error()¶
-
-
set the current error message
-
set_error(error_msg) -> None
-
-

SDL maintains an internal error message. This message will usually be -given to you when pygame.error()standard pygame exception is raised, so this function will -rarely be needed.

-
- -
-
-pygame.get_sdl_version()¶
-
-
get the version number of SDL
-
get_sdl_version() -> major, minor, patch
-
-

Returns the three version numbers of the SDL library. This version is built -at compile time. It can be used to detect which features may or may not be -available through pygame.

-
-

New in pygame 1.7.0.

-
-
- -
-
-pygame.get_sdl_byteorder()¶
-
-
get the byte order of SDL
-
get_sdl_byteorder() -> int
-
-

Returns the byte order of the SDL library. It returns 1234 for little -endian byte order and 4321 for big endian byte order.

-
-

New in pygame 1.8.

-
-
- -
-
-pygame.register_quit()¶
-
-
register a function to be called when pygame quits
-
register_quit(callable) -> None
-
-

When pygame.quit()uninitialize all pygame modules is called, all registered quit functions are -called. Pygame modules do this automatically when they are initializing, so -this function will rarely be needed.

-
- -
-
-pygame.encode_string()¶
-
-
Encode a Unicode or bytes object
-
encode_string([obj [, encoding [, errors [, etype]]]]) -> bytes or None
-
-

obj: If Unicode, encode; if bytes, return unaltered; if anything else, -return None; if not given, raise SyntaxError.

-

encoding (string): If present, encoding to use. The default is -'unicode_escape'.

-

errors (string): If given, how to handle unencodable characters. The default -is 'backslashreplace'.

-

etype (exception type): If given, the exception type to raise for an -encoding error. The default is UnicodeEncodeError, as returned by -PyUnicode_AsEncodedString(). For the default encoding and errors values -there should be no encoding errors.

-

This function is used in encoding file paths. Keyword arguments are -supported.

-
-

New in pygame 1.9.2: (primarily for use in unit tests)

-
-
- -
-
-pygame.encode_file_path()¶
-
-
Encode a Unicode or bytes object as a file system path
-
encode_file_path([obj [, etype]]) -> bytes or None
-
-

obj: If Unicode, encode; if bytes, return unaltered; if anything else, -return None; if not given, raise SyntaxError.

-

etype (exception type): If given, the exception type to raise for an -encoding error. The default is UnicodeEncodeError, as returned by -PyUnicode_AsEncodedString().

-

This function is used to encode file paths in pygame. Encoding is to the -codec as returned by sys.getfilesystemencoding(). Keyword arguments are -supported.

-
-

New in pygame 1.9.2: (primarily for use in unit tests)

-
-
- -
- -
-
-
-
-pygame.version
-
-
small module containing version information
-
- ----- - - - - - - - - - - - - - - - - - - -
-—version number as a string
-—tupled integers of the version
-—repository revision of the build
-—tupled integers of the SDL library version
-

This module is automatically imported into the pygame package and can be used to -check which version of pygame has been imported.

-
-
-pygame.version.ver¶
-
-
version number as a string
-
ver = '1.2'
-
-

This is the version represented as a string. It can contain a micro release -number as well, e.g. '1.5.2'

-
- -
-
-pygame.version.vernum¶
-
-
tupled integers of the version
-
vernum = (1, 5, 3)
-
-

This version information can easily be compared with other version -numbers of the same format. An example of checking pygame version numbers -would look like this:

-
if pygame.version.vernum < (1, 5):
-    print('Warning, older version of pygame (%s)' %  pygame.version.ver)
-    disable_advanced_features = True
-
-
-
-

New in pygame 1.9.6: Attributes major, minor, and patch.

-
-
vernum.major == vernum[0]
-vernum.minor == vernum[1]
-vernum.patch == vernum[2]
-
-
-
-

Changed in pygame 1.9.6: str(pygame.version.vernum) returns a string like "2.0.0" instead -of "(2, 0, 0)".

-
-
-

Changed in pygame 1.9.6: repr(pygame.version.vernum) returns a string like -"PygameVersion(major=2, minor=0, patch=0)" instead of "(2, 0, 0)".

-
-
- -
-
-pygame.version.rev¶
-
-
repository revision of the build
-
rev = 'a6f89747b551+'
-
-

The Mercurial node identifier of the repository checkout from which this -package was built. If the identifier ends with a plus sign '+' then the -package contains uncommitted changes. Please include this revision number -in bug reports, especially for non-release pygame builds.

-

Important note: pygame development has moved to github, this variable is -obsolete now. As soon as development shifted to github, this variable started -returning an empty string "". -It has always been returning an empty string since v1.9.5.

-
-

Changed in pygame 1.9.5: Always returns an empty string "".

-
-
- -
-
-pygame.version.SDL¶
-
-
tupled integers of the SDL library version
-
SDL = '(2, 0, 12)'
-
-

This is the SDL library version represented as an extended tuple. It also has -attributes 'major', 'minor' & 'patch' that can be accessed like this:

-
>>> pygame.version.SDL.major
-2
-
-
-

printing the whole thing returns a string like this:

-
>>> pygame.version.SDL
-SDLVersion(major=2, minor=0, patch=12)
-
-
-
-

New in pygame 2.0.0.

-
-
- -

Setting Environment Variables

-

Some aspects of pygame's behaviour can be controlled by setting environment variables, they cover a wide -range of the library's functionality. Some of the variables are from pygame itself, while others come from -the underlying C SDL library that pygame uses.

-

In python, environment variables are usually set in code like this:

-
import os
-os.environ['NAME_OF_ENVIRONMENT_VARIABLE'] = 'value_to_set'
-
-
-

Or to preserve users ability to override the variable:

-
import os
-os.environ['ENV_VAR'] = os.environ.get('ENV_VAR', 'value')
-
-
-

If the variable is more useful for users of an app to set than the developer then they can set it like this:

-

Windows:

-
set NAME_OF_ENVIRONMENT_VARIABLE=value_to_set
-python my_application.py
-
-
-

Linux/Mac:

-
ENV_VAR=value python my_application.py
-
-
-

For some variables they need to be set before initialising pygame, some must be set before even importing pygame, -and others can simply be set right before the area of code they control is run.

-

Below is a list of environment variables, their settable values, and a brief description of what they do.

-
-

-
-

Pygame Environment Variables

-

These variables are defined by pygame itself.

-
-

-
-
PYGAME_DISPLAY - Experimental (subject to change)
-Set index of the display to use, "0" is the default.
-
-
-

This sets the display where pygame will open its window -or screen. The value set here will be used if set before -calling pygame.display.set_mode()Initialize a window or screen for display, and as long as no -'display' parameter is passed into pygame.display.set_mode()Initialize a window or screen for display.

-
-

-
-
PYGAME_FORCE_SCALE -
-Set to "photo" or "default".
-
-
-

This forces set_mode() to use the SCALED display mode and, -if "photo" is set, makes the scaling use the slowest, but -highest quality anisotropic scaling algorithm, if it is -available. Must be set before calling pygame.display.set_mode()Initialize a window or screen for display.

-
-

-
-
PYGAME_BLEND_ALPHA_SDL2 - New in pygame 2.0.0
-Set to "1" to enable the SDL2 blitter.
-
-
-

This makes pygame use the SDL2 blitter for all alpha -blending. The SDL2 blitter is sometimes faster than -the default blitter but uses a different formula so -the final colours may differ. Must be set before -pygame.init()initialize all imported pygame modules is called.

-
-

-
-
PYGAME_HIDE_SUPPORT_PROMPT -
-Set to "1" to hide the prompt.
-
-
-

This stops the welcome message popping up in the -console that tells you which version of python, -pygame & SDL you are using. Must be set before -importing pygame.

-
-

-
-
PYGAME_FREETYPE -
-Set to "1" to enable.
-
-
-

This switches the pygame.font module to a pure -freetype implementation that bypasses SDL_ttf. -See the font module for why you might want to -do this. Must be set before importing pygame.

-
-

-
-
PYGAME_CAMERA -
-Set to "opencv" or "vidcapture"
-
-
-

Forces the library backend used in the camera -module, overriding the platform defaults. Must -be set before calling pygame.camera.init()Module init.

-

In pygame 2.0.3, backends can be set programmatically instead, and the old -OpenCV backend has been replaced with one on top of "opencv-python," rather -than the old "highgui" OpenCV port. Also, there is a new native Windows -backend available.

-
-

-

-
-

SDL Environment Variables

-

These variables are defined by SDL.

-

For documentation on the environment variables available in -pygame 1 try here. -For Pygame 2, some selected environment variables are listed below.

-
-

-
-
SDL_VIDEO_CENTERED -
-Set to "1" to enable centering the window.
-
-
-

This will make the pygame window open in the centre of the display. -Must be set before calling pygame.display.set_mode()Initialize a window or screen for display.

-
-

-
-
SDL_VIDEO_WINDOW_POS -
-Set to "x,y" to position the top left corner of the window.
-
-
-

This allows control over the placement of the pygame window within -the display. Must be set before calling pygame.display.set_mode()Initialize a window or screen for display.

-
-

-
-
SDL_VIDEODRIVER -
-Set to "drivername" to change the video driver used.
-
-
-

On some platforms there are multiple video drivers available and -this allows users to pick between them. More information is available -here. Must be set before -calling pygame.init()initialize all imported pygame modules or pygame.display.init()Initialize the display module.

-
-

-
-
SDL_AUDIODRIVER -
-Set to "drivername" to change the audio driver used.
-
-
-

On some platforms there are multiple audio drivers available and -this allows users to pick between them. More information is available -here. Must be set before -calling pygame.init()initialize all imported pygame modules or pygame.mixer.init()initialize the mixer module.

-
-

-
-
SDL_VIDEO_ALLOW_SCREENSAVER
-Set to "1" to allow screensavers while pygame apps are running.
-
-
-

By default pygame apps disable screensavers while -they are running. Setting this environment variable allows users or -developers to change that and make screensavers run again.

-
-

-
-
SDL_VIDEO_X11_NET_WM_BYPASS_COMPOSITOR
-Set to "0" to re-enable the compositor.
-
-
-

By default SDL tries to disable the X11 compositor for all pygame -apps. This is usually a good thing as it's faster, however if you -have an app which doesn't update every frame and are using linux -you may want to disable this bypass. The bypass has reported problems -on KDE linux. This variable is only used on x11/linux platforms.

-
- -
- - -

-
-Edit on GitHub -
-
-
-
-
- - - - \ No newline at end of file diff --git a/venv/Lib/site-packages/pygame/docs/generated/ref/rect.html b/venv/Lib/site-packages/pygame/docs/generated/ref/rect.html deleted file mode 100644 index 11c4188..0000000 --- a/venv/Lib/site-packages/pygame/docs/generated/ref/rect.html +++ /dev/null @@ -1,672 +0,0 @@ - - - - - - - - - pygame.Rect — pygame v2.1.2 documentation - - - - - - - - - - - - - -
- -
- - - - - -
-
- -
-
- -
-
-
-pygame.Rect¶
-
-
pygame object for storing rectangular coordinates
-
Rect(left, top, width, height) -> Rect
-
Rect((left, top), (width, height)) -> Rect
-
Rect(object) -> Rect
-
- ----- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-—copy the rectangle
-—moves the rectangle
-—moves the rectangle, in place
-—grow or shrink the rectangle size
-—grow or shrink the rectangle size, in place
-—sets the position and size of the rectangle
-—moves the rectangle inside another
-—moves the rectangle inside another, in place
-—crops a rectangle inside another
-—crops a line inside a rectangle
-—joins two rectangles into one
-—joins two rectangles into one, in place
-—the union of many rectangles
-—the union of many rectangles, in place
-—resize and move a rectangle with aspect ratio
-—correct negative sizes
-—test if one rectangle is inside another
-—test if a point is inside a rectangle
-—test if two rectangles overlap
-—test if one rectangle in a list intersects
-—test if all rectangles in a list intersect
-—test if one rectangle in a dictionary intersects
-—test if all rectangles in a dictionary intersect
-

Pygame uses Rect objects to store and manipulate rectangular areas. A Rect -can be created from a combination of left, top, width, and height values. -Rects can also be created from python objects that are already a Rect or -have an attribute named "rect".

-

Any pygame function that requires a Rect argument also accepts any of these -values to construct a Rect. This makes it easier to create Rects on the fly -as arguments to functions.

-

The Rect functions that change the position or size of a Rect return a new -copy of the Rect with the affected changes. The original Rect is not -modified. Some methods have an alternate "in-place" version that returns -None but affects the original Rect. These "in-place" methods are denoted -with the "ip" suffix.

-

The Rect object has several virtual attributes which can be used to move and -align the Rect:

-
x,y
-top, left, bottom, right
-topleft, bottomleft, topright, bottomright
-midtop, midleft, midbottom, midright
-center, centerx, centery
-size, width, height
-w,h
-
-
-

All of these attributes can be assigned to:

-
rect1.right = 10
-rect2.center = (20,30)
-
-
-

Assigning to size, width or height changes the dimensions of the rectangle; -all other assignments move the rectangle without resizing it. Notice that -some attributes are integers and others are pairs of integers.

-

If a Rect has a nonzero width or height, it will return True for a -nonzero test. Some methods return a Rect with 0 size to represent an invalid -rectangle. A Rect with a 0 size will not collide when using collision -detection methods (e.g. collidepoint(), colliderect(), etc.).

-

The coordinates for Rect objects are all integers. The size values can be -programmed to have negative values, but these are considered illegal Rects -for most operations.

-

There are several collision tests between other rectangles. Most python -containers can be searched for collisions against a single Rect.

-

The area covered by a Rect does not include the right- and bottom-most edge -of pixels. If one Rect's bottom border is another Rect's top border (i.e., -rect1.bottom=rect2.top), the two meet exactly on the screen but do not -overlap, and rect1.colliderect(rect2) returns false.

-
-

New in pygame 1.9.2: The Rect class can be subclassed. Methods such as copy() and move() -will recognize this and return instances of the subclass. -However, the subclass's __init__() method is not called, -and __new__() is assumed to take no arguments. So these methods should be -overridden if any extra attributes need to be copied.

-
-
-
-copy()¶
-
-
copy the rectangle
-
copy() -> Rect
-
-

Returns a new rectangle having the same position and size as the original.

-

New in pygame 1.9

-
- -
-
-move()¶
-
-
moves the rectangle
-
move(x, y) -> Rect
-
-

Returns a new rectangle that is moved by the given offset. The x and y -arguments can be any integer value, positive or negative.

-
- -
-
-move_ip()¶
-
-
moves the rectangle, in place
-
move_ip(x, y) -> None
-
-

Same as the Rect.move() method, but operates in place.

-
- -
-
-inflate()¶
-
-
grow or shrink the rectangle size
-
inflate(x, y) -> Rect
-
-

Returns a new rectangle with the size changed by the given offset. The -rectangle remains centered around its current center. Negative values -will shrink the rectangle. Note, uses integers, if the offset given is -too small(< 2 > -2), center will be off.

-
- -
-
-inflate_ip()¶
-
-
grow or shrink the rectangle size, in place
-
inflate_ip(x, y) -> None
-
-

Same as the Rect.inflate() method, but operates in place.

-
- -
-
-update()¶
-
-
sets the position and size of the rectangle
-
update(left, top, width, height) -> None
-
update((left, top), (width, height)) -> None
-
update(object) -> None
-
-

Sets the position and size of the rectangle, in place. See -parameters for pygame.Rect()pygame object for storing rectangular coordinates for the parameters of this function.

-
-

New in pygame 2.0.1.

-
-
- -
-
-clamp()¶
-
-
moves the rectangle inside another
-
clamp(Rect) -> Rect
-
-

Returns a new rectangle that is moved to be completely inside the -argument Rect. If the rectangle is too large to fit inside, it is -centered inside the argument Rect, but its size is not changed.

-
- -
-
-clamp_ip()¶
-
-
moves the rectangle inside another, in place
-
clamp_ip(Rect) -> None
-
-

Same as the Rect.clamp() method, but operates in place.

-
- -
-
-clip()¶
-
-
crops a rectangle inside another
-
clip(Rect) -> Rect
-
-

Returns a new rectangle that is cropped to be completely inside the -argument Rect. If the two rectangles do not overlap to begin with, a Rect -with 0 size is returned.

-
- -
-
-clipline()¶
-
-
crops a line inside a rectangle
-
clipline(x1, y1, x2, y2) -> ((cx1, cy1), (cx2, cy2))
-
clipline(x1, y1, x2, y2) -> ()
-
clipline((x1, y1), (x2, y2)) -> ((cx1, cy1), (cx2, cy2))
-
clipline((x1, y1), (x2, y2)) -> ()
-
clipline((x1, y1, x2, y2)) -> ((cx1, cy1), (cx2, cy2))
-
clipline((x1, y1, x2, y2)) -> ()
-
clipline(((x1, y1), (x2, y2))) -> ((cx1, cy1), (cx2, cy2))
-
clipline(((x1, y1), (x2, y2))) -> ()
-
-

Returns the coordinates of a line that is cropped to be completely inside -the rectangle. If the line does not overlap the rectangle, then an empty -tuple is returned.

-

The line to crop can be any of the following formats (floats can be used -in place of ints, but they will be truncated):

-
-
    -
  • four ints

  • -
  • 2 lists/tuples/Vector2s of 2 ints

  • -
  • a list/tuple of four ints

  • -
  • a list/tuple of 2 lists/tuples/Vector2s of 2 ints

  • -
-
-
-
Returns
-

a tuple with the coordinates of the given line cropped to be -completely inside the rectangle is returned, if the given line does -not overlap the rectangle, an empty tuple is returned

-
-
Return type
-

tuple(tuple(int, int), tuple(int, int)) or ()

-
-
Raises
-

TypeError -- if the line coordinates are not given as one of the -above described line formats

-
-
-
-

Note

-

This method can be used for collision detection between a rect and a -line. See example code below.

-
-
-

Note

-

The rect.bottom and rect.right attributes of a -pygame.Rectpygame object for storing rectangular coordinates always lie one pixel outside of its actual border.

-
-
# Example using clipline().
-clipped_line = rect.clipline(line)
-
-if clipped_line:
-    # If clipped_line is not an empty tuple then the line
-    # collides/overlaps with the rect. The returned value contains
-    # the endpoints of the clipped line.
-    start, end = clipped_line
-    x1, y1 = start
-    x2, y2 = end
-else:
-    print("No clipping. The line is fully outside the rect.")
-
-
-
-

New in pygame 2.0.0.

-
-
- -
-
-union()¶
-
-
joins two rectangles into one
-
union(Rect) -> Rect
-
-

Returns a new rectangle that completely covers the area of the two -provided rectangles. There may be area inside the new Rect that is not -covered by the originals.

-
- -
-
-union_ip()¶
-
-
joins two rectangles into one, in place
-
union_ip(Rect) -> None
-
-

Same as the Rect.union() method, but operates in place.

-
- -
-
-unionall()¶
-
-
the union of many rectangles
-
unionall(Rect_sequence) -> Rect
-
-

Returns the union of one rectangle with a sequence of many rectangles.

-
- -
-
-unionall_ip()¶
-
-
the union of many rectangles, in place
-
unionall_ip(Rect_sequence) -> None
-
-

The same as the Rect.unionall() method, but operates in place.

-
- -
-
-fit()¶
-
-
resize and move a rectangle with aspect ratio
-
fit(Rect) -> Rect
-
-

Returns a new rectangle that is moved and resized to fit another. The -aspect ratio of the original Rect is preserved, so the new rectangle may -be smaller than the target in either width or height.

-
- -
-
-normalize()¶
-
-
correct negative sizes
-
normalize() -> None
-
-

This will flip the width or height of a rectangle if it has a negative -size. The rectangle will remain in the same place, with only the sides -swapped.

-
- -
-
-contains()¶
-
-
test if one rectangle is inside another
-
contains(Rect) -> bool
-
-

Returns true when the argument is completely inside the Rect.

-
- -
-
-collidepoint()¶
-
-
test if a point is inside a rectangle
-
collidepoint(x, y) -> bool
-
collidepoint((x,y)) -> bool
-
-

Returns true if the given point is inside the rectangle. A point along -the right or bottom edge is not considered to be inside the rectangle.

-
-

Note

-

For collision detection between a rect and a line the clipline() -method can be used.

-
-
- -
-
-colliderect()¶
-
-
test if two rectangles overlap
-
colliderect(Rect) -> bool
-
-

Returns true if any portion of either rectangle overlap (except the -top+bottom or left+right edges).

-
-

Note

-

For collision detection between a rect and a line the clipline() -method can be used.

-
-
- -
-
-collidelist()¶
-
-
test if one rectangle in a list intersects
-
collidelist(list) -> index
-
-

Test whether the rectangle collides with any in a sequence of rectangles. -The index of the first collision found is returned. If no collisions are -found an index of -1 is returned.

-
- -
-
-collidelistall()¶
-
-
test if all rectangles in a list intersect
-
collidelistall(list) -> indices
-
-

Returns a list of all the indices that contain rectangles that collide -with the Rect. If no intersecting rectangles are found, an empty list is -returned.

-
- -
-
-collidedict()¶
-
-
test if one rectangle in a dictionary intersects
-
collidedict(dict) -> (key, value)
-
collidedict(dict) -> None
-
collidedict(dict, use_values=0) -> (key, value)
-
collidedict(dict, use_values=0) -> None
-
-

Returns the first key and value pair that intersects with the calling -Rect object. If no collisions are found, None is returned. If -use_values is 0 (default) then the dict's keys will be used in the -collision detection, otherwise the dict's values will be used.

-
-

Note

-

Rect objects cannot be used as keys in a dictionary (they are not -hashable), so they must be converted to a tuple. -e.g. rect.collidedict({tuple(key_rect) : value})

-
-
- -
-
-collidedictall()¶
-
-
test if all rectangles in a dictionary intersect
-
collidedictall(dict) -> [(key, value), ...]
-
collidedictall(dict, use_values=0) -> [(key, value), ...]
-
-

Returns a list of all the key and value pairs that intersect with the -calling Rect object. If no collisions are found an empty list is returned. -If use_values is 0 (default) then the dict's keys will be used in the -collision detection, otherwise the dict's values will be used.

-
-

Note

-

Rect objects cannot be used as keys in a dictionary (they are not -hashable), so they must be converted to a tuple. -e.g. rect.collidedictall({tuple(key_rect) : value})

-
-
- -
- -
- - -

-
-Edit on GitHub -
-
-
-
-
- - - - \ No newline at end of file diff --git a/venv/Lib/site-packages/pygame/docs/generated/ref/scrap.html b/venv/Lib/site-packages/pygame/docs/generated/ref/scrap.html deleted file mode 100644 index 96c7eec..0000000 --- a/venv/Lib/site-packages/pygame/docs/generated/ref/scrap.html +++ /dev/null @@ -1,458 +0,0 @@ - - - - - - - - - pygame.scrap — pygame v2.1.2 documentation - - - - - - - - - - - - - -
- -
- - - - - -
-
- -
-
- -
-
-
-pygame.scrap
-
-
pygame module for clipboard support.
-
- ----- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-—Initializes the scrap module.
-—Returns True if the scrap module is currently initialized.
-—Gets the data for the specified type from the clipboard.
-—Gets a list of the available clipboard types.
-—Places data into the clipboard.
-—Checks whether data for a given type is available in the clipboard.
-—Indicates if the clipboard ownership has been lost by the pygame application.
-—Sets the clipboard access mode.
-

EXPERIMENTAL!: This API may change or disappear in later pygame releases. If -you use this, your code may break with the next pygame release.

-

The scrap module is for transferring data to/from the clipboard. This allows -for cutting and pasting data between pygame and other applications. Some basic -data (MIME) types are defined and registered:

-
 pygame         string
-constant        value        description
---------------------------------------------------
-SCRAP_TEXT   "text/plain"    plain text
-SCRAP_BMP    "image/bmp"     BMP encoded image data
-SCRAP_PBM    "image/pbm"     PBM encoded image data
-SCRAP_PPM    "image/ppm"     PPM encoded image data
-
-
-

pygame.SCRAP_PPM, pygame.SCRAP_PBM and pygame.SCRAP_BMP are -suitable for surface buffers to be shared with other applications. -pygame.SCRAP_TEXT is an alias for the plain text clipboard type.

-

Depending on the platform, additional types are automatically registered when -data is placed into the clipboard to guarantee a consistent sharing behaviour -with other applications. The following listed types can be used as strings to -be passed to the respective pygame.scrappygame module for clipboard support. module functions.

-

For Windows platforms, these additional types are supported automatically -and resolve to their internal definitions:

-
"text/plain;charset=utf-8"   UTF-8 encoded text
-"audio/wav"                  WAV encoded audio
-"image/tiff"                 TIFF encoded image data
-
-
-

For X11 platforms, these additional types are supported automatically and -resolve to their internal definitions:

-
"text/plain;charset=utf-8"   UTF-8 encoded text
-"UTF8_STRING"                UTF-8 encoded text
-"COMPOUND_TEXT"              COMPOUND text
-
-
-

User defined types can be used, but the data might not be accessible by other -applications unless they know what data type to look for. -Example: Data placed into the clipboard by -pygame.scrap.put("my_data_type", byte_data) can only be accessed by -applications which query the clipboard for the "my_data_type" data type.

-

For an example of how the scrap module works refer to the examples page -(pygame.examples.scrap_clipboard.main()access the clipboard) or the code directly in GitHub -(pygame/examples/scrap_clipboard.py).

-
-

New in pygame 1.8.

-
-
-

Note

-

The scrap module is currently only supported for Windows, X11 and Mac OS X. -On Mac OS X only text works at the moment - other types may be supported in -future releases.

-
-
-
-pygame.scrap.init()¶
-
-
Initializes the scrap module.
-
init() -> None
-
-

Initialize the scrap module.

-
-
Raises
-

pygame.errorstandard pygame exception -- if unable to initialize scrap module

-
-
-
-

Note

-

The scrap module requires pygame.display.set_mode()Initialize a window or screen for display be -called before being initialized.

-
-
- -
-
-pygame.scrap.get_init()¶
-
-
Returns True if the scrap module is currently initialized.
-
get_init() -> bool
-
-

Gets the scrap module's initialization state.

-
-
Returns
-

True if the pygame.scrappygame module for clipboard support. module is currently -initialized, False otherwise

-
-
Return type
-

bool

-
-
-
-

New in pygame 1.9.5.

-
-
- -
-
-pygame.scrap.get()¶
-
-
Gets the data for the specified type from the clipboard.
-
get(type) -> bytes | None
-
-

Retrieves the data for the specified type from the clipboard. The data is -returned as a byte string and might need further processing (such as -decoding to Unicode).

-
-
Parameters
-

type (string) -- data type to retrieve from the clipboard

-
-
Returns
-

data (bytes object) for the given type identifier or None if -no data for the given type is available

-
-
Return type
-

bytes | None

-
-
-
text = pygame.scrap.get(pygame.SCRAP_TEXT)
-if text:
-    print("There is text in the clipboard.")
-else:
-    print("There does not seem to be text in the clipboard.")
-
-
-
- -
-
-pygame.scrap.get_types()¶
-
-
Gets a list of the available clipboard types.
-
get_types() -> list
-
-

Gets a list of data type string identifiers for the data currently -available on the clipboard. Each identifier can be used in the -pygame.scrap.get()Gets the data for the specified type from the clipboard. method to get the clipboard content of the -specific type.

-
-
Returns
-

list of strings of the available clipboard data types, if there -is no data in the clipboard an empty list is returned

-
-
Return type
-

list

-
-
-
for t in pygame.scrap.get_types():
-    if "text" in t:
-        # There is some content with the word "text" in its type string.
-        print(pygame.scrap.get(t))
-
-
-
- -
-
-pygame.scrap.put()¶
-
-
Places data into the clipboard.
-
put(type, data) -> None
-
-

Places data for a given clipboard type into the clipboard. The data must -be a string buffer. The type is a string identifying the type of data to be -placed into the clipboard. This can be one of the predefined -pygame.SCRAP_PBM, pygame.SCRAP_PPM, pygame.SCRAP_BMP or -pygame.SCRAP_TEXT values or a user defined string identifier.

-
-
Parameters
-
    -
  • type (string) -- type identifier of the data to be placed into the -clipboard

  • -
  • data (bytes) -- data to be place into the clipboard, a bytes object

  • -
-
-
Raises
-

pygame.errorstandard pygame exception -- if unable to put the data into the clipboard

-
-
-
with open("example.bmp", "rb") as fp:
-    pygame.scrap.put(pygame.SCRAP_BMP, fp.read())
-# The image data is now on the clipboard for other applications to access
-# it.
-pygame.scrap.put(pygame.SCRAP_TEXT, b"A text to copy")
-pygame.scrap.put("Plain text", b"Data for user defined type 'Plain text'")
-
-
-
- -
-
-pygame.scrap.contains()¶
-
-
Checks whether data for a given type is available in the clipboard.
-
contains(type) -> bool
-
-

Checks whether data for the given type is currently available in the -clipboard.

-
-
Parameters
-

type (string) -- data type to check availability of

-
-
Returns
-

True if data for the passed type is available in the -clipboard, False otherwise

-
-
Return type
-

bool

-
-
-
if pygame.scrap.contains(pygame.SCRAP_TEXT):
-    print("There is text in the clipboard.")
-if pygame.scrap.contains("own_data_type"):
-    print("There is stuff in the clipboard.")
-
-
-
- -
-
-pygame.scrap.lost()¶
-
-
Indicates if the clipboard ownership has been lost by the pygame application.
-
lost() -> bool
-
-

Indicates if the clipboard ownership has been lost by the pygame -application.

-
-
Returns
-

True, if the clipboard ownership has been lost by the pygame -application, False if the pygame application still owns the clipboard

-
-
Return type
-

bool

-
-
-
if pygame.scrap.lost():
-    print("The clipboard is in use by another application.")
-
-
-
- -
-
-pygame.scrap.set_mode()¶
-
-
Sets the clipboard access mode.
-
set_mode(mode) -> None
-
-

Sets the access mode for the clipboard. This is only of interest for X11 -environments where clipboard modes pygame.SCRAP_SELECTION (for mouse -selections) and pygame.SCRAP_CLIPBOARD (for the clipboard) are -available. Setting the mode to pygame.SCRAP_SELECTION in other -environments will not change the mode from pygame.SCRAP_CLIPBOARD.

-
-
Parameters
-

mode -- access mode, supported values are pygame.SCRAP_CLIPBOARD -and pygame.SCRAP_SELECTION (pygame.SCRAP_SELECTION only has an -effect when used on X11 platforms)

-
-
Raises
-

ValueError -- if the mode parameter is not -pygame.SCRAP_CLIPBOARD or pygame.SCRAP_SELECTION

-
-
-
- -
- -
- - -

-
-Edit on GitHub -
-
-
-
-
- - - - \ No newline at end of file diff --git a/venv/Lib/site-packages/pygame/docs/generated/ref/sdl2_controller.html b/venv/Lib/site-packages/pygame/docs/generated/ref/sdl2_controller.html deleted file mode 100644 index a62ed46..0000000 --- a/venv/Lib/site-packages/pygame/docs/generated/ref/sdl2_controller.html +++ /dev/null @@ -1,569 +0,0 @@ - - - - - - - - - pygame._sdl2.controller — pygame v2.1.2 documentation - - - - - - - - - - - - - -
- -
- - - - - -
-
- -
-
- -
-
-
-pygame._sdl2.controller
-
-
Pygame module to work with controllers.
-
- ----- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-—initialize the controller module
-—Uninitialize the controller module.
-—Returns True if the controller module is initialized.
-—Sets the current state of events related to controllers
-—Gets the current state of events related to controllers
-—Get the number of joysticks connected
-—Check if the given joystick is supported by the game controller interface
-—Get the name of the contoller
-—Create a new Controller object.
-

This module offers control over common controller types like the dualshock 4 or -the xbox 360 controllers: They have two analog sticks, two triggers, two shoulder buttons, -a dpad, 4 buttons on the side, 2 (or 3) buttons in the middle.

-

Pygame uses xbox controllers naming conventions (like a, b, x, y for buttons) but -they always refer to the same buttons. For example CONTROLLER_BUTTON_X is -always the leftmost button of the 4 buttons on the right.

-

Controllers can generate the following events:

-
CONTROLLERAXISMOTION, CONTROLLERBUTTONDOWN, CONTROLLERBUTTONUP,
-CONTROLLERDEVICEREMAPPED, CONTROLLERDEVICEADDED, CONTROLLERDEVICEREMOVED
-
-
-

Additionally if pygame is built with SDL 2.0.14 or higher the following events can also be generated -(to get the version of sdl pygame is built with use pygame.version.SDL()tupled integers of the SDL library version):

-
CONTROLLERTOUCHPADDOWN, CONTROLLERTOUCHPADMOTION, CONTROLLERTOUCHPADUP
-
-
-

These events can be enabled/disabled by pygame._sdl2.controller.set_eventstate()Sets the current state of events related to controllers -Note that controllers can generate joystick events as well. This function only toggles -events related to controllers.

-
-

Note

-

See the pygame.joystickPygame module for interacting with joysticks, gamepads, and trackballs. for a more versatile but more advanced api.

-
-
-

New in pygame 2: This module requires SDL2.

-
-
-
-pygame._sdl2.controller.init()¶
-
-
initialize the controller module
-
init() -> None
-
-

Initialize the controller module.

-
- -
-
-pygame._sdl2.controller.quit()¶
-
-
Uninitialize the controller module.
-
quit() -> None
-
-

Uninitialize the controller module.

-
- -
-
-pygame._sdl2.controller.get_init()¶
-
-
Returns True if the controller module is initialized.
-
get_init() -> bool
-
-

Test if pygame._sdl2.controller.init() was called.

-
-
-
- -
-
-pygame._sdl2.controller.set_eventstate()¶
-
-
Sets the current state of events related to controllers
-
set_eventstate(state) -> None
-
-

Enable or disable events connected to controllers.

-
-

Note

-

Controllers can still generate joystick events, which will not be toggled by this function.

-
-
-

Changed in pygame 2.0.2:: Changed return type from int to None

-
-
- -
-
-pygame._sdl2.controller.get_eventstate()¶
-
-
Gets the current state of events related to controllers
-
get_eventstate() -> bool
-
-

Returns the current state of events related to controllers, True meaning -events will be posted.

-
-

New in pygame 2.0.2.

-
-
- -
-
-pygame._sdl2.controller.get_count()¶
-
-
Get the number of joysticks connected
-
get_count() -> int
-
-

Get the number of joysticks connected.

-
- -
-
-pygame._sdl2.controller.is_controller()¶
-
-
Check if the given joystick is supported by the game controller interface
-
is_controller(index) -> bool
-
-

Returns True if the index given can be used to create a controller object.

-
- -
-
-pygame._sdl2.controller.name_forindex()¶
-
-
Get the name of the contoller
-
name_forindex(index) -> name or None
-
-

Returns the name of controller, or None if there's no name or the -index is invalid.

-
- -
-
-pygame._sdl2.controller.Controller¶
-
-
-
Create a new Controller object.
-
Controller(index) -> Controller
-
-

Create a new Controller object. Index should be integer between -0 and pygame._sdl2.contoller.get_count(). Controllers also -can be created from a pygame.joystick.Joystick using -pygame._sdl2.controller.from_joystick. Controllers are -initialized on creation.

-
- ----- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-—uninitialize the Controller
-—check if the Controller is initialized
-—Create a Controller from a pygame.joystick.Joystick object
-—Check if the Controller has been opened and is currently connected.
-—Returns a pygame.joystick.Joystick() object
-—Get the current state of a joystick axis
-—Get the current state of a button
-—Get the mapping assigned to the controller
-—Assign a mapping to the controller
-—Start a rumbling effect
-—Stop any rumble effect playing
-
-
-quit()¶
-
-
uninitialize the Controller
-
quit() -> None
-
-

Close a Controller object. After this the pygame event queue will no longer -receive events from the device.

-

It is safe to call this more than once.

-
- -
-
-get_init()¶
-
-
check if the Controller is initialized
-
get_init() -> bool
-
-

Returns True if the Controller object is currently initialised.

-
- -
-
-static from_joystick()¶
-
-
Create a Controller from a pygame.joystick.Joystick object
-
from_joystick(joystick) -> Controller
-
-

Create a Controller object from a pygame.joystick.Joystick object

-
- -
-
-attached()¶
-
-
Check if the Controller has been opened and is currently connected.
-
attached() -> bool
-
-

Returns True if the Controller object is opened and connected.

-
- -
-
-as_joystick()¶
-
-
Returns a pygame.joystick.Joystick() object
-
as_joystick() -> Joystick object
-
-

Returns a pygame.joystick.Joystick() object created from this controller's index

-
- -
-
-get_axis()¶
-
-
Get the current state of a joystick axis
-
get_axis(axis) -> int
-
-

Get the current state of a trigger or joystick axis. -The axis argument must be one of the following constants:

-
CONTROLLER_AXIS_LEFTX, CONTROLLER_AXIS_LEFTY,
-CONTROLLER_AXIS_RIGHTX, CONTROLLER_AXIS_RIGHTY,
-CONTROLLER_AXIS_TRIGGERLEFT, CONTROLLER_AXIS_TRIGGERRIGHT
-
-
-

Joysticks can return a value between -32768 and 32767. Triggers however -can only return a value between 0 and 32768.

-
- -
-
-get_button()¶
-
-
Get the current state of a button
-
get_button(button) -> bool
-
-

Get the current state of a button, True meaning it is pressed down. -The button argument must be one of the following constants:

-
CONTROLLER_BUTTON_A, CONTROLLER_BUTTON_B,
-CONTROLLER_BUTTON_X, CONTROLLER_BUTTON_Y
-CONTROLLER_BUTTON_DPAD_UP, CONTROLLER_BUTTON_DPAD_DOWN,
-CONTROLLER_BUTTON_DPAD_LEFT, CONTROLLER_BUTTON_DPAD_RIGHT,
-CONTROLLER_BUTTON_LEFTSHOULDER, CONTROLLER_BUTTON_RIGHTSHOULDER,
-CONTROLLER_BUTTON_LEFTSTICK, CONTROLLER_BUTTON_RIGHTSTICK,
-CONTROLLER_BUTTON_BACK, CONTROLLER_BUTTON_GUIDE,
-CONTROLLER_BUTTON_START
-
-
-
- -
-
-get_mapping()¶
-
-
Get the mapping assigned to the controller
-
get_mapping() -> mapping
-
-

Returns a dict containing the mapping of the Controller. For more -information see Controller.set_mapping()

-
-

Changed in pygame 2.0.2:: Return type changed from str to dict

-
-
- -
-
-set_mapping()¶
-
-
Assign a mapping to the controller
-
set_mapping(mapping) -> int
-
-

Rebind buttons, axes, triggers and dpads. The mapping should be a -dict containing all buttons, hats and axes. The easiest way to get this -is to use the dict returned by Controller.get_mapping(). To edit -this mapping assign a value to the original button. The value of the -dictionary must be a button, hat or axis represented in the following way:

-
    -
  • For a button use: bX where X is the index of the button.

  • -
  • For a hat use: hX.Y where X is the index and the Y is the direction (up: 1, right: 2, down: 3, left: 4).

  • -
  • For an axis use: aX where x is the index of the axis.

  • -
-

An example of mapping:

-
mapping = controller.get_mapping() # Get current mapping
-mapping["a"] = "b3" # Remap button a to y
-mapping["y"] = "b0" # Remap button y to a
-controller.set_mapping(mapping) # Set the mapping
-
-
-

The function will return 1 if a new mapping is added or 0 if an existing one is updated.

-
-

Changed in pygame 2.0.2:: Renamed from add_mapping to set_mapping

-
-
-

Changed in pygame 2.0.2:: Argument type changed from str to dict

-
-
- -
-
-rumble()¶
-
-
Start a rumbling effect
-
rumble(low_frequency, high_frequency, duration) -> bool
-
-

Start a rumble effect on the controller, with the specified strength ranging -from 0 to 1. Duration is length of the effect, in ms. Setting the duration -to 0 will play the effect until another one overwrites it or -Controller.stop_rumble() is called. If an effect is already -playing, then it will be overwritten.

-

Returns True if the rumble was played successfully or False if the -controller does not support it or pygame.version.SDL()tupled integers of the SDL library version is below 2.0.9.

-
-

New in pygame 2.0.2.

-
-
- -
-
-stop_rumble()¶
-
-
Stop any rumble effect playing
-
stop_rumble() -> None
-
-

Stops any rumble effect playing on the controller. See -Controller.rumble() for more information.

-
-

New in pygame 2.0.2.

-
-
- -
- -
- -
- - -

-
-Edit on GitHub -
-
-
-
-
- - - - \ No newline at end of file diff --git a/venv/Lib/site-packages/pygame/docs/generated/ref/sdl2_video.html b/venv/Lib/site-packages/pygame/docs/generated/ref/sdl2_video.html deleted file mode 100644 index 7968b16..0000000 --- a/venv/Lib/site-packages/pygame/docs/generated/ref/sdl2_video.html +++ /dev/null @@ -1,1091 +0,0 @@ - - - - - - - - - pygame.sdl2_video — pygame v2.1.2 documentation - - - - - - - - - - - - - -
- -
- - - - - -
-
- -
-
- -
-
-
-pygame.sdl2_video
-
-

Warning

-

This module isn't ready for prime time yet, it's still in development. -These docs are primarily meant to help the pygame developers and super-early adopters -who are in communication with the developers. This API will change.

-
- ----- - - - - - - - - - - - - - - - - - - -
-—pygame object that represents a window
-—pygame object that representing a Texture.
-—Easy way to use a portion of a Texture without worrying about srcrect all the time.
-—Create a 2D rendering context for a window.
-
-
Experimental pygame module for porting new SDL video systems
-
-
-
-pygame._sdl2.video.Window¶
-
-
pygame object that represents a window
-
Window(title="pygame", size=(640, 480), position=None, fullscreen=False, fullscreen_desktop=False, keywords) -> Window
-
- ----- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-—Creates window using window created by pygame.display.set_mode().
-—Gets or sets whether the mouse is confined to the window.
-—Gets or sets the window's relative mouse motion state.
-—Enable windowed mode (exit fullscreen).
-—Enter fullscreen.
-—Gets or sets whether the window title.
-—Destroys the window.
-—Hide the window.
-—Show the window.
-—Raise the window above other windows and set the input focus. The "input_only" argument is only supported on X11.
-—Restore the size and position of a minimized or maximized window.
-—Maximize the window.
-—Minimize the window.
-—Gets and sets whether the window is resizable.
-—Add or remove the border from the window.
-—Set the icon for the window.
-—Get the unique window ID. *Read-only*
-—Gets and sets the window size.
-—Gets and sets the window position.
-—Gets and sets the window opacity. Between 0.0 (fully transparent) and 1.0 (fully opaque).
-—Gets and sets the brightness (gamma multiplier) for the display that owns the window.
-—Get the index of the display that owns the window. *Read-only*
-—Set the window as a modal for a parent window. This function is only supported on X11.
-
-
-classmethod from_display_module()¶
-
-
Creates window using window created by pygame.display.set_mode().
-
from_display_module() -> Window
-
-
- -
-
-grab¶
-
-
Gets or sets whether the mouse is confined to the window.
-
grab -> bool
-
-
- -
-
-relative_mouse¶
-
-
Gets or sets the window's relative mouse motion state.
-
relative_mouse -> bool
-
-
- -
-
-set_windowed()¶
-
-
Enable windowed mode (exit fullscreen).
-
set_windowed() -> None
-
-
- -
-
-set_fullscreen()¶
-
-
Enter fullscreen.
-
set_fullscreen(desktop=False) -> None
-
-
- -
-
-title¶
-
-
Gets or sets whether the window title.
-
title -> string
-
-
- -
-
-destroy()¶
-
-
Destroys the window.
-
destroy() -> None
-
-
- -
-
-hide()¶
-
-
Hide the window.
-
hide() -> None
-
-
- -
-
-show()¶
-
-
Show the window.
-
show() -> None
-
-
- -
-
-focus()¶
-
-
Raise the window above other windows and set the input focus. The "input_only" argument is only supported on X11.
-
focus(input_only=False) -> None
-
-
- -
-
-restore()¶
-
-
Restore the size and position of a minimized or maximized window.
-
restore() -> None
-
-
- -
-
-maximize()¶
-
-
Maximize the window.
-
maximize() -> None
-
-
- -
-
-minimize()¶
-
-
Minimize the window.
-
maximize() -> None
-
-
- -
-
-resizable¶
-
-
Gets and sets whether the window is resizable.
-
resizable -> bool
-
-
- -
-
-borderless¶
-
-
Add or remove the border from the window.
-
borderless -> bool
-
-
- -
-
-set_icon()¶
-
-
Set the icon for the window.
-
set_icon(surface) -> None
-
-
- -
-
-id¶
-
-
Get the unique window ID. *Read-only*
-
id -> int
-
-
- -
-
-size¶
-
-
Gets and sets the window size.
-
size -> (int, int)
-
-
- -
-
-position¶
-
-
Gets and sets the window position.
-
position -> (int, int) or WINDOWPOS_CENTERED or WINDOWPOS_UNDEFINED
-
-
- -
-
-opacity¶
-
-
Gets and sets the window opacity. Between 0.0 (fully transparent) and 1.0 (fully opaque).
-
opacity -> float
-
-
- -
-
-brightness¶
-
-
Gets and sets the brightness (gamma multiplier) for the display that owns the window.
-
brightness -> float
-
-
- -
-
-display_index¶
-
-
Get the index of the display that owns the window. *Read-only*
-
display_index -> int
-
-
- -
-
-set_modal_for()¶
-
-
Set the window as a modal for a parent window. This function is only supported on X11.
-
set_modal_for(Window) -> None
-
-
- -
- -
-
-pygame._sdl2.video.Texture¶
-
-
pygame object that representing a Texture.
-
Texture(renderer, size, depth=0, static=False, streaming=False, target=False) -> Texture
-
- ----- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-—Create a texture from an existing surface.
-—Gets the renderer associated with the Texture. *Read-only*
-—Gets the width of the Texture. *Read-only*
-—Gets the height of the Texture. *Read-only*
-—Gets and sets an additional alpha value multiplied into render copy operations.
-—Gets and sets the blend mode for the Texture.
-—Gets and sets an additional color value multiplied into render copy operations.
-—Get the rectangular area of the texture.
-—Copy a portion of the texture to the rendering target.
-—Update the texture with a Surface. WARNING: Slow operation, use sparingly.
-
-
-static from_surface()¶
-
-
Create a texture from an existing surface.
-
from_surface(renderer, surface) -> Texture
-
-
- -
-
-renderer¶
-
-
Gets the renderer associated with the Texture. *Read-only*
-
renderer -> Renderer
-
-
- -
-
-width¶
-
-
Gets the width of the Texture. *Read-only*
-
width -> int
-
-
- -
-
-height¶
-
-
Gets the height of the Texture. *Read-only*
-
height -> int
-
-
- -
-
-alpha¶
-
-
Gets and sets an additional alpha value multiplied into render copy operations.
-
alpha -> int
-
-
- -
-
-blend_mode¶
-
-
Gets and sets the blend mode for the Texture.
-
blend_mode -> int
-
-
- -
-
-color¶
-
-
Gets and sets an additional color value multiplied into render copy operations.
-
color -> color
-
-
- -
-
-get_rect()¶
-
-
Get the rectangular area of the texture.
-
get_rect(**kwargs) -> Rect
-
-
- -
-
-draw()¶
-
-
Copy a portion of the texture to the rendering target.
-
draw(srcrect=None, dstrect=None, angle=0, origin=None, flipX=False, flipY=False) -> None
-
-
- -
-
-update()¶
-
-
Update the texture with a Surface. WARNING: Slow operation, use sparingly.
-
update(surface, area=None) -> None
-
-
- -
- -
-
-pygame._sdl2.video.Image¶
-
-
Easy way to use a portion of a Texture without worrying about srcrect all the time.
-
Image(textureOrImage, srcrect=None) -> Image
-
- ----- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-—Get the rectangular area of the Image.
-—Copy a portion of the Image to the rendering target.
-—Gets and sets the angle the Image draws itself with.
-—Gets and sets the origin. Origin=None means the Image will be rotated around its center.
-—Gets and sets whether the Image is flipped on the x axis.
-—Gets and sets whether the Image is flipped on the y axis.
-—Gets and sets the Image color modifier.
-—Gets and sets the Image alpha modifier.
-—Gets and sets the blend mode for the Image.
-—Gets and sets the Texture the Image is based on.
-—Gets and sets the Rect the Image is based on.
-
-
-get_rect()¶
-
-
Get the rectangular area of the Image.
-
get_rect() -> Rect
-
-
- -
-
-draw()¶
-
-
Copy a portion of the Image to the rendering target.
-
draw(srcrect=None, dstrect=None) -> None
-
-
- -
-
-angle¶
-
-
Gets and sets the angle the Image draws itself with.
-
angle -> float
-
-
- -
-
-origin¶
-
-
Gets and sets the origin. Origin=None means the Image will be rotated around its center.
-
origin -> (float, float) or None.
-
-
- -
-
-flipX¶
-
-
Gets and sets whether the Image is flipped on the x axis.
-
flipX -> bool
-
-
- -
-
-flipY¶
-
-
Gets and sets whether the Image is flipped on the y axis.
-
flipY -> bool
-
-
- -
-
-color¶
-
-
Gets and sets the Image color modifier.
-
color -> Color
-
-
- -
-
-alpha¶
-
-
Gets and sets the Image alpha modifier.
-
alpha -> float
-
-
- -
-
-blend_mode¶
-
-
Gets and sets the blend mode for the Image.
-
blend_mode -> int
-
-
- -
-
-texture¶
-
-
Gets and sets the Texture the Image is based on.
-
texture -> Texture
-
-
- -
-
-srcrect¶
-
-
Gets and sets the Rect the Image is based on.
-
srcrect -> Rect
-
-
- -
- -
-
-pygame._sdl2.video.Renderer¶
-
-
Create a 2D rendering context for a window.
-
Renderer(window, index=-1, accelerated=-1, vsync=False, target_texture=False) -> Renderer
-
- ----- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-—Easy way to create a Renderer.
-—Gets and sets the blend mode used by the drawing functions.
-—Gets and sets the color used by the drawing functions.
-—Clear the current rendering target with the drawing color.
-—Updates the screen with any new rendering since previous call.
-—Returns the drawing area on the target.
-—Set the drawing area on the target. If area is None, the entire target will be used.
-—Gets and sets the logical size.
-—Gets and sets the scale.
-—Gets and sets the render target. None represents the default target (the renderer).
-—For compatibility purposes. Textures created by different Renderers cannot be shared!
-—Draws a line.
-—Draws a point.
-—Draws a rectangle.
-—Fills a rectangle.
-—Read pixels from current render target and create a pygame.Surface. WARNING: Slow operation, use sparingly.
-
-
-classmethod from_window()¶
-
-
Easy way to create a Renderer.
-
from_window(window) -> Renderer
-
-
- -
-
-draw_blend_mode¶
-
-
Gets and sets the blend mode used by the drawing functions.
-
draw_blend_mode -> int
-
-
- -
-
-draw_color¶
-
-
Gets and sets the color used by the drawing functions.
-
draw_color -> Color
-
-
- -
-
-clear()¶
-
-
Clear the current rendering target with the drawing color.
-
clear() -> None
-
-
- -
-
-present()¶
-
-
Updates the screen with any new rendering since previous call.
-
present() -> None
-
-
- -
-
-get_viewport()¶
-
-
Returns the drawing area on the target.
-
get_viewport() -> Rect
-
-
- -
-
-set_viewport()¶
-
-
Set the drawing area on the target. If area is None, the entire target will be used.
-
set_viewport(area) -> None
-
-
- -
-
-logical_size¶
-
-
Gets and sets the logical size.
-
logical_size -> (int width, int height)
-
-
- -
-
-scale¶
-
-
Gets and sets the scale.
-
scale -> (float x_scale, float y_scale)
-
-
- -
-
-target¶
-
-
Gets and sets the render target. None represents the default target (the renderer).
-
target -> Texture or None
-
-
- -
-
-blit()¶
-
-
For compatibility purposes. Textures created by different Renderers cannot be shared!
-
blit(soure, dest, area=None, special_flags=0)-> Rect
-
-
- -
-
-draw_line()¶
-
-
Draws a line.
-
draw_line(p1, p2) -> None
-
-
- -
-
-draw_point()¶
-
-
Draws a point.
-
draw_point(point) -> None
-
-
- -
-
-draw_rect()¶
-
-
Draws a rectangle.
-
draw_rect(rect)-> None
-
-
- -
-
-fill_rect()¶
-
-
Fills a rectangle.
-
fill_rect(rect)-> None
-
-
- -
-
-to_surface()¶
-
-
Read pixels from current render target and create a pygame.Surface. WARNING: Slow operation, use sparingly.
-
to_surface(surface=None, area=None)-> Surface
-
-
- -
- -
- -
- - -

-
-Edit on GitHub -
-
-
-
-
- - - - \ No newline at end of file diff --git a/venv/Lib/site-packages/pygame/docs/generated/ref/sndarray.html b/venv/Lib/site-packages/pygame/docs/generated/ref/sndarray.html deleted file mode 100644 index f12d6ee..0000000 --- a/venv/Lib/site-packages/pygame/docs/generated/ref/sndarray.html +++ /dev/null @@ -1,274 +0,0 @@ - - - - - - - - - pygame.sndarray — pygame v2.1.2 documentation - - - - - - - - - - - - - -
- -
- - - - - -
-
- -
-
- -
-
-
-pygame.sndarray
-
-
pygame module for accessing sound sample data
-
- ----- - - - - - - - - - - - - - - - - - - - - - - - - - - -
-—copy Sound samples into an array
-—reference Sound samples into an array
-—convert an array into a Sound object
-—Sets the array system to be used for sound arrays
-—Gets the currently active array type.
-—Gets the array system types currently supported.
-

Functions to convert between NumPy arrays and Sound objects. This -module will only be functional when pygame can use the external NumPy -package. If NumPy can't be imported, surfarray becomes a MissingModule -object.

-

Sound data is made of thousands of samples per second, and each sample is the -amplitude of the wave at a particular moment in time. For example, in 22-kHz -format, element number 5 of the array is the amplitude of the wave after -5/22000 seconds.

-

The arrays are indexed by the X axis first, followed by the Y axis. -Each sample is an 8-bit or 16-bit integer, depending on the data format. A -stereo sound file has two values per sample, while a mono sound file only has -one.

-
-
-pygame.sndarray.array()¶
-
-
copy Sound samples into an array
-
array(Sound) -> array
-
-

Creates a new array for the sound data and copies the samples. The array -will always be in the format returned from pygame.mixer.get_init().

-
- -
-
-pygame.sndarray.samples()¶
-
-
reference Sound samples into an array
-
samples(Sound) -> array
-
-

Creates a new array that directly references the samples in a Sound object. -Modifying the array will change the Sound. The array will always be in the -format returned from pygame.mixer.get_init().

-
- -
-
-pygame.sndarray.make_sound()¶
-
-
convert an array into a Sound object
-
make_sound(array) -> Sound
-
-

Create a new playable Sound object from an array. The mixer module must be -initialized and the array format must be similar to the mixer audio format.

-
- -
-
-pygame.sndarray.use_arraytype()¶
-
-
Sets the array system to be used for sound arrays
-
use_arraytype (arraytype) -> None
-
-

DEPRECATED: Uses the requested array type for the module functions. The -only supported arraytype is 'numpy'. Other values will raise ValueError. -Using this function will raise a DeprecationWarning. -.. ## pygame.sndarray.use_arraytype ##

-
- -
-
-pygame.sndarray.get_arraytype()¶
-
-
Gets the currently active array type.
-
get_arraytype () -> str
-
-

DEPRECATED: Returns the currently active array type. This will be a value of the -get_arraytypes() tuple and indicates which type of array module is used -for the array creation. Using this function will raise a DeprecationWarning.

-
-

New in pygame 1.8.

-
-
- -
-
-pygame.sndarray.get_arraytypes()¶
-
-
Gets the array system types currently supported.
-
get_arraytypes () -> tuple
-
-

DEPRECATED: Checks, which array systems are available and returns them as a tuple of -strings. The values of the tuple can be used directly in the -pygame.sndarray.use_arraytype()Sets the array system to be used for sound arrays () method. If no supported array -system could be found, None will be returned. Using this function will raise a -DeprecationWarning.

-
-

New in pygame 1.8.

-
-
- -
- -
- - -

-
-Edit on GitHub -
-
-
-
-
- - - - \ No newline at end of file diff --git a/venv/Lib/site-packages/pygame/docs/generated/ref/sprite.html b/venv/Lib/site-packages/pygame/docs/generated/ref/sprite.html deleted file mode 100644 index 653e292..0000000 --- a/venv/Lib/site-packages/pygame/docs/generated/ref/sprite.html +++ /dev/null @@ -1,1381 +0,0 @@ - - - - - - - - - pygame.sprite — pygame v2.1.2 documentation - - - - - - - - - - - - - -
- -
- - - - - -
-
- -
-
- -
-
-
-pygame.sprite
-
-
pygame module with basic game object classes
-
- ----- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-—Simple base class for visible game objects.
-—A subclass of Sprite with more attributes and features.
-—A container class to hold and manage multiple Sprite objects.
-—Same as pygame.sprite.Group
-—Same as pygame.sprite.Group
-—Group sub-class that tracks dirty updates.
-—RenderUpdates sub-class that draws Sprites in order of addition.
-—LayeredUpdates is a sprite group that handles layers and draws like OrderedUpdates.
-—LayeredDirty group is for DirtySprite objects. Subclasses LayeredUpdates.
-—Group container that holds a single sprite.
-—Find sprites in a group that intersect another sprite.
-—Collision detection between two sprites, using rects.
-—Collision detection between two sprites, using rects scaled to a ratio.
-—Collision detection between two sprites, using circles.
-—Collision detection between two sprites, using circles scaled to a ratio.
-—Collision detection between two sprites, using masks.
-—Find all sprites that collide between two groups.
-—Simple test if a sprite intersects anything in a group.
-

This module contains several simple classes to be used within games. There is -the main Sprite class and several Group classes that contain Sprites. The use -of these classes is entirely optional when using pygame. The classes are fairly -lightweight and only provide a starting place for the code that is common to -most games.

-

The Sprite class is intended to be used as a base class for the different types -of objects in the game. There is also a base Group class that simply stores -sprites. A game could create new types of Group classes that operate on -specially customized Sprite instances they contain.

-

The basic Sprite class can draw the Sprites it contains to a Surface. The -Group.draw() method requires that each Sprite have a Surface.image -attribute and a Surface.rect. The Group.clear() method requires these -same attributes, and can be used to erase all the Sprites with background. -There are also more advanced Groups: pygame.sprite.RenderUpdates() and -pygame.sprite.OrderedUpdates().

-

Lastly, this module contains several collision functions. These help find -sprites inside multiple groups that have intersecting bounding rectangles. To -find the collisions, the Sprites are required to have a Surface.rect -attribute assigned.

-

The groups are designed for high efficiency in removing and adding Sprites to -them. They also allow cheap testing to see if a Sprite already exists in a -Group. A given Sprite can exist in any number of groups. A game could use some -groups to control object rendering, and a completely separate set of groups to -control interaction or player movement. Instead of adding type attributes or -bools to a derived Sprite class, consider keeping the Sprites inside organized -Groups. This will allow for easier lookup later in the game.

-

Sprites and Groups manage their relationships with the add() and -remove() methods. These methods can accept a single or multiple targets for -membership. The default initializers for these classes also takes a single or -list of targets for initial membership. It is safe to repeatedly add and remove -the same Sprite from a Group.

-

While it is possible to design sprite and group classes that don't derive from -the Sprite and AbstractGroup classes below, it is strongly recommended that you -extend those when you add a Sprite or Group class.

-

Sprites are not thread safe. So lock them yourself if using threads.

-
-
-pygame.sprite.Sprite¶
-
-
Simple base class for visible game objects.
-
Sprite(*groups) -> Sprite
-
- ----- - - - - - - - - - - - - - - - - - - - - - - - - - - -
-—method to control sprite behavior
-—add the sprite to groups
-—remove the sprite from groups
-—remove the Sprite from all Groups
-—does the sprite belong to any groups
-—list of Groups that contain this Sprite
-

The base class for visible game objects. Derived classes will want to -override the Sprite.update() and assign a Sprite.image and -Sprite.rect attributes. The initializer can accept any number of Group -instances to be added to.

-

When subclassing the Sprite, be sure to call the base initializer before -adding the Sprite to Groups. For example:

-
class Block(pygame.sprite.Sprite):
-
-    # Constructor. Pass in the color of the block,
-    # and its x and y position
-    def __init__(self, color, width, height):
-       # Call the parent class (Sprite) constructor
-       pygame.sprite.Sprite.__init__(self)
-
-       # Create an image of the block, and fill it with a color.
-       # This could also be an image loaded from the disk.
-       self.image = pygame.Surface([width, height])
-       self.image.fill(color)
-
-       # Fetch the rectangle object that has the dimensions of the image
-       # Update the position of this object by setting the values of rect.x and rect.y
-       self.rect = self.image.get_rect()
-
-
-
-
-update()¶
-
-
method to control sprite behavior
-
update(*args, **kwargs) -> None
-
-

The default implementation of this method does nothing; it's just a -convenient "hook" that you can override. This method is called by -Group.update() with whatever arguments you give it.

-

There is no need to use this method if not using the convenience method -by the same name in the Group class.

-
- -
-
-add()¶
-
-
add the sprite to groups
-
add(*groups) -> None
-
-

Any number of Group instances can be passed as arguments. The Sprite will -be added to the Groups it is not already a member of.

-
- -
-
-remove()¶
-
-
remove the sprite from groups
-
remove(*groups) -> None
-
-

Any number of Group instances can be passed as arguments. The Sprite will -be removed from the Groups it is currently a member of.

-
- -
-
-kill()¶
-
-
remove the Sprite from all Groups
-
kill() -> None
-
-

The Sprite is removed from all the Groups that contain it. This won't -change anything about the state of the Sprite. It is possible to continue -to use the Sprite after this method has been called, including adding it -to Groups.

-
- -
-
-alive()¶
-
-
does the sprite belong to any groups
-
alive() -> bool
-
-

Returns True when the Sprite belongs to one or more Groups.

-
- -
-
-groups()¶
-
-
list of Groups that contain this Sprite
-
groups() -> group_list
-
-

Return a list of all the Groups that contain this Sprite.

-
- -
- -
-
-pygame.sprite.DirtySprite¶
-
-
A subclass of Sprite with more attributes and features.
-
DirtySprite(*groups) -> DirtySprite
-
-

Extra DirtySprite attributes with their default values:

-

dirty = 1

-
if set to 1, it is repainted and then set to 0 again
-if set to 2 then it is always dirty ( repainted each frame,
-flag is not reset)
-0 means that it is not dirty and therefore not repainted again
-
-
-

blendmode = 0

-
its the special_flags argument of blit, blendmodes
-
-
-

source_rect = None

-
source rect to use, remember that it is relative to
-topleft (0,0) of self.image
-
-
-

visible = 1

-
normally 1, if set to 0 it will not be repainted
-(you must set it dirty too to be erased from screen)
-
-
-

layer = 0

-
(READONLY value, it is read when adding it to the
-LayeredDirty, for details see doc of LayeredDirty)
-
-
-
- -
-
-pygame.sprite.Group¶
-
-
A container class to hold and manage multiple Sprite objects.
-
Group(*sprites) -> Group
-
- ----- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-—list of the Sprites this Group contains
-—duplicate the Group
-—add Sprites to this Group
-—remove Sprites from the Group
-—test if a Group contains Sprites
-—call the update method on contained Sprites
-—blit the Sprite images
-—draw a background over the Sprites
-—remove all Sprites
-

A simple container for Sprite objects. This class can be inherited to create -containers with more specific behaviors. The constructor takes any number of -Sprite arguments to add to the Group. The group supports the following -standard Python operations:

-
in      test if a Sprite is contained
-len     the number of Sprites contained
-bool    test if any Sprites are contained
-iter    iterate through all the Sprites
-
-
-

The Sprites in the Group are ordered only on python 3.6 and higher. -Below python 3.6 drawing and iterating over the Sprites is in no particular order.

-
-
-sprites()¶
-
-
list of the Sprites this Group contains
-
sprites() -> sprite_list
-
-

Return a list of all the Sprites this group contains. You can also get an -iterator from the group, but you cannot iterate over a Group while -modifying it.

-
- -
-
-copy()¶
-
-
duplicate the Group
-
copy() -> Group
-
-

Creates a new Group with all the same Sprites as the original. If you -have subclassed Group, the new object will have the same (sub-)class as -the original. This only works if the derived class's constructor takes -the same arguments as the Group class's.

-
- -
-
-add()¶
-
-
add Sprites to this Group
-
add(*sprites) -> None
-
-

Add any number of Sprites to this Group. This will only add Sprites that -are not already members of the Group.

-

Each sprite argument can also be a iterator containing Sprites.

-
- -
-
-remove()¶
-
-
remove Sprites from the Group
-
remove(*sprites) -> None
-
-

Remove any number of Sprites from the Group. This will only remove -Sprites that are already members of the Group.

-

Each sprite argument can also be a iterator containing Sprites.

-
- -
-
-has()¶
-
-
test if a Group contains Sprites
-
has(*sprites) -> bool
-
-

Return True if the Group contains all of the given sprites. This is -similar to using the "in" operator on the Group ("if sprite in group: -..."), which tests if a single Sprite belongs to a Group.

-

Each sprite argument can also be a iterator containing Sprites.

-
- -
-
-update()¶
-
-
call the update method on contained Sprites
-
update(*args, **kwargs) -> None
-
-

Calls the update() method on all Sprites in the Group. The base -Sprite class has an update method that takes any number of arguments and -does nothing. The arguments passed to Group.update() will be passed -to each Sprite.

-

There is no way to get the return value from the Sprite.update() -methods.

-
- -
-
-draw()¶
-
-
blit the Sprite images
-
draw(Surface) -> List[Rect]
-
-

Draws the contained Sprites to the Surface argument. This uses the -Sprite.image attribute for the source surface, and Sprite.rect -for the position.

-

The Group does not keep sprites in any order, so the draw order is -arbitrary.

-
- -
-
-clear()¶
-
-
draw a background over the Sprites
-
clear(Surface_dest, background) -> None
-
-

Erases the Sprites used in the last Group.draw() call. The -destination Surface is cleared by filling the drawn Sprite positions with -the background.

-

The background is usually a Surface image the same dimensions as the -destination Surface. However, it can also be a callback function that -takes two arguments; the destination Surface and an area to clear. The -background callback function will be called several times each clear.

-

Here is an example callback that will clear the Sprites with solid red:

-
def clear_callback(surf, rect):
-    color = 255, 0, 0
-    surf.fill(color, rect)
-
-
-
- -
-
-empty()¶
-
-
remove all Sprites
-
empty() -> None
-
-

Removes all Sprites from this Group.

-
- -
- -
-
-pygame.sprite.RenderPlain¶
-
-
Same as pygame.sprite.Group
-
-

This class is an alias to pygame.sprite.Group(). It has no additional functionality.

-
- -
-
-pygame.sprite.RenderClear¶
-
-
Same as pygame.sprite.Group
-
-

This class is an alias to pygame.sprite.Group(). It has no additional functionality.

-
- -
-
-pygame.sprite.RenderUpdates¶
-
-
Group sub-class that tracks dirty updates.
-
RenderUpdates(*sprites) -> RenderUpdates
-
- ----- - - - - - - -
-—blit the Sprite images and track changed areas
-

This class is derived from pygame.sprite.Group(). It has an extended -draw() method that tracks the changed areas of the screen.

-
-
-draw()¶
-
-
blit the Sprite images and track changed areas
-
draw(surface) -> Rect_list
-
-

Draws all the Sprites to the surface, the same as Group.draw(). This -method also returns a list of Rectangular areas on the screen that have -been changed. The returned changes include areas of the screen that have -been affected by previous Group.clear() calls.

-

The returned Rect list should be passed to pygame.display.update(). -This will help performance on software driven display modes. This type of -updating is usually only helpful on destinations with non-animating -backgrounds.

-
- -
- -
-
-pygame.sprite.OrderedUpdates()¶
-
-
RenderUpdates sub-class that draws Sprites in order of addition.
-
OrderedUpdates(*spites) -> OrderedUpdates
-
-

This class derives from pygame.sprite.RenderUpdates(). It maintains the -order in which the Sprites were added to the Group for rendering. This makes -adding and removing Sprites from the Group a little slower than regular -Groups.

-
- -
-
-pygame.sprite.LayeredUpdates¶
-
-
LayeredUpdates is a sprite group that handles layers and draws like OrderedUpdates.
-
LayeredUpdates(*spites, **kwargs) -> LayeredUpdates
-
- ----- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-—add a sprite or sequence of sprites to a group
-—returns a ordered list of sprites (first back, last top).
-—draw all sprites in the right order onto the passed surface.
-—returns a list with all sprites at that position.
-—returns the sprite at the index idx from the groups sprites
-—removes all sprites from a layer and returns them as a list.
-—returns a list of layers defined (unique), sorted from bottom up.
-—changes the layer of the sprite
-—returns the layer that sprite is currently in.
-—returns the top layer
-—returns the bottom layer
-—brings the sprite to front layer
-—moves the sprite to the bottom layer
-—returns the topmost sprite
-—returns all sprites from a layer, ordered by how they where added
-—switches the sprites from layer1 to layer2
-

This group is fully compatible with pygame.sprite.SpriteSimple base class for visible game objects..

-

You can set the default layer through kwargs using 'default_layer' and an -integer for the layer. The default layer is 0.

-

If the sprite you add has an attribute _layer then that layer will be used. -If the **kwarg contains 'layer' then the sprites passed will be added to -that layer (overriding the sprite.layer attribute). If neither sprite -has attribute layer nor **kwarg then the default layer is used to add the -sprites.

-
-

New in pygame 1.8.

-
-
-
-add()¶
-
-
add a sprite or sequence of sprites to a group
-
add(*sprites, **kwargs) -> None
-
-

If the sprite(s) have an attribute layer then that is used for the -layer. If **kwargs contains 'layer' then the sprite(s) will be added -to that argument (overriding the sprite layer attribute). If neither is -passed then the sprite(s) will be added to the default layer.

-
- -
-
-sprites()¶
-
-
returns a ordered list of sprites (first back, last top).
-
sprites() -> sprites
-
-
- -
-
-draw()¶
-
-
draw all sprites in the right order onto the passed surface.
-
draw(surface) -> Rect_list
-
-
- -
-
-get_sprites_at()¶
-
-
returns a list with all sprites at that position.
-
get_sprites_at(pos) -> colliding_sprites
-
-

Bottom sprites first, top last.

-
- -
-
-get_sprite()¶
-
-
returns the sprite at the index idx from the groups sprites
-
get_sprite(idx) -> sprite
-
-

Raises IndexOutOfBounds if the idx is not within range.

-
- -
-
-remove_sprites_of_layer()¶
-
-
removes all sprites from a layer and returns them as a list.
-
remove_sprites_of_layer(layer_nr) -> sprites
-
-
- -
-
-layers()¶
-
-
returns a list of layers defined (unique), sorted from bottom up.
-
layers() -> layers
-
-
- -
-
-change_layer()¶
-
-
changes the layer of the sprite
-
change_layer(sprite, new_layer) -> None
-
-

sprite must have been added to the renderer. It is not checked.

-
- -
-
-get_layer_of_sprite()¶
-
-
returns the layer that sprite is currently in.
-
get_layer_of_sprite(sprite) -> layer
-
-

If the sprite is not found then it will return the default layer.

-
- -
-
-get_top_layer()¶
-
-
returns the top layer
-
get_top_layer() -> layer
-
-
- -
-
-get_bottom_layer()¶
-
-
returns the bottom layer
-
get_bottom_layer() -> layer
-
-
- -
-
-move_to_front()¶
-
-
brings the sprite to front layer
-
move_to_front(sprite) -> None
-
-

Brings the sprite to front, changing sprite layer to topmost layer (added -at the end of that layer).

-
- -
-
-move_to_back()¶
-
-
moves the sprite to the bottom layer
-
move_to_back(sprite) -> None
-
-

Moves the sprite to the bottom layer, moving it behind all other layers -and adding one additional layer.

-
- -
-
-get_top_sprite()¶
-
-
returns the topmost sprite
-
get_top_sprite() -> Sprite
-
-
- -
-
-get_sprites_from_layer()¶
-
-
returns all sprites from a layer, ordered by how they where added
-
get_sprites_from_layer(layer) -> sprites
-
-

Returns all sprites from a layer, ordered by how they where added. It -uses linear search and the sprites are not removed from layer.

-
- -
-
-switch_layer()¶
-
-
switches the sprites from layer1 to layer2
-
switch_layer(layer1_nr, layer2_nr) -> None
-
-

The layers number must exist, it is not checked.

-
- -
- -
-
-pygame.sprite.LayeredDirty¶
-
-
LayeredDirty group is for DirtySprite objects. Subclasses LayeredUpdates.
-
LayeredDirty(*spites, **kwargs) -> LayeredDirty
-
- ----- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-—draw all sprites in the right order onto the passed surface.
-—used to set background
-—repaints the given area
-—clip the area where to draw. Just pass None (default) to reset the clip
-—clip the area where to draw. Just pass None (default) to reset the clip
-—changes the layer of the sprite
-—sets the threshold in milliseconds
-—sets the threshold in milliseconds
-

This group requires pygame.sprite.DirtySpriteA subclass of Sprite with more attributes and features. or any sprite that -has the following attributes:

-
image, rect, dirty, visible, blendmode (see doc of DirtySprite).
-
-
-

It uses the dirty flag technique and is therefore faster than the -pygame.sprite.RenderUpdatesGroup sub-class that tracks dirty updates. if you have many static sprites. It -also switches automatically between dirty rect update and full screen -drawing, so you do not have to worry what would be faster.

-

Same as for the pygame.sprite.GroupA container class to hold and manage multiple Sprite objects.. You can specify some -additional attributes through kwargs:

-
_use_update: True/False   default is False
-_default_layer: default layer where sprites without a layer are added.
-_time_threshold: threshold time for switching between dirty rect mode
-    and fullscreen mode, defaults to 1000./80  == 1000./fps
-
-
-
-

New in pygame 1.8.

-
-
-
-draw()¶
-
-
draw all sprites in the right order onto the passed surface.
-
draw(surface, bgd=None) -> Rect_list
-
-

You can pass the background too. If a background is already set, then the -bgd argument has no effect.

-
- -
-
-clear()¶
-
-
used to set background
-
clear(surface, bgd) -> None
-
-
- -
-
-repaint_rect()¶
-
-
repaints the given area
-
repaint_rect(screen_rect) -> None
-
-

screen_rect is in screen coordinates.

-
- -
-
-set_clip()¶
-
-
clip the area where to draw. Just pass None (default) to reset the clip
-
set_clip(screen_rect=None) -> None
-
-
- -
-
-get_clip()¶
-
-
clip the area where to draw. Just pass None (default) to reset the clip
-
get_clip() -> Rect
-
-
- -
-
-change_layer()¶
-
-
changes the layer of the sprite
-
change_layer(sprite, new_layer) -> None
-
-

sprite must have been added to the renderer. It is not checked.

-
- -
-
-set_timing_treshold()¶
-
-
sets the threshold in milliseconds
-
set_timing_treshold(time_ms) -> None
-
-

DEPRECATED: Use set_timing_threshold() instead.

-
-

Deprecated since pygame 2.1.1.

-
-
- -
-
-set_timing_threshold()¶
-
-
sets the threshold in milliseconds
-
set_timing_threshold(time_ms) -> None
-
-

Defaults to 1000.0 / 80.0. This means that the screen will be painted -using the flip method rather than the update method if the update -method is taking so long to update the screen that the frame rate falls -below 80 frames per second.

-
-

New in pygame 2.1.1.

-
-
-
Raises
-

TypeError -- if time_ms is not int or float

-
-
-
- -
- -
-
-pygame.sprite.GroupSingle()¶
-
-
Group container that holds a single sprite.
-
GroupSingle(sprite=None) -> GroupSingle
-
-

The GroupSingle container only holds a single Sprite. When a new Sprite is -added, the old one is removed.

-

There is a special property, GroupSingle.sprite, that accesses the -Sprite that this Group contains. It can be None when the Group is empty. The -property can also be assigned to add a Sprite into the GroupSingle -container.

-
- -
-
-pygame.sprite.spritecollide()¶
-
-
Find sprites in a group that intersect another sprite.
-
spritecollide(sprite, group, dokill, collided = None) -> Sprite_list
-
-

Return a list containing all Sprites in a Group that intersect with another -Sprite. Intersection is determined by comparing the Sprite.rect -attribute of each Sprite.

-

The dokill argument is a bool. If set to True, all Sprites that collide will -be removed from the Group.

-

The collided argument is a callback function used to calculate if two -sprites are colliding. it should take two sprites as values, and return a -bool value indicating if they are colliding. If collided is not passed, all -sprites must have a "rect" value, which is a rectangle of the sprite area, -which will be used to calculate the collision.

-

collided callables:

-
collide_rect, collide_rect_ratio, collide_circle,
-collide_circle_ratio, collide_mask
-
-
-

Example:

-
# See if the Sprite block has collided with anything in the Group block_list
-# The True flag will remove the sprite in block_list
-blocks_hit_list = pygame.sprite.spritecollide(player, block_list, True)
-
-# Check the list of colliding sprites, and add one to the score for each one
-for block in blocks_hit_list:
-    score +=1
-
-
-
- -
-
-pygame.sprite.collide_rect()¶
-
-
Collision detection between two sprites, using rects.
-
collide_rect(left, right) -> bool
-
-

Tests for collision between two sprites. Uses the pygame rect colliderect -function to calculate the collision. Intended to be passed as a collided -callback function to the *collide functions. Sprites must have a "rect" -attributes.

-
-

New in pygame 1.8.

-
-
- -
-
-pygame.sprite.collide_rect_ratio()¶
-
-
Collision detection between two sprites, using rects scaled to a ratio.
-
collide_rect_ratio(ratio) -> collided_callable
-
-

A callable class that checks for collisions between two sprites, using a -scaled version of the sprites rects.

-

Is created with a ratio, the instance is then intended to be passed as a -collided callback function to the *collide functions.

-

A ratio is a floating point number - 1.0 is the same size, 2.0 is twice as -big, and 0.5 is half the size.

-
-

New in pygame 1.8.1.

-
-
- -
-
-pygame.sprite.collide_circle()¶
-
-
Collision detection between two sprites, using circles.
-
collide_circle(left, right) -> bool
-
-

Tests for collision between two sprites, by testing to see if two circles -centered on the sprites overlap. If the sprites have a "radius" attribute, -that is used to create the circle, otherwise a circle is created that is big -enough to completely enclose the sprites rect as given by the "rect" -attribute. Intended to be passed as a collided callback function to the -*collide functions. Sprites must have a "rect" and an optional "radius" -attribute.

-
-

New in pygame 1.8.1.

-
-
- -
-
-pygame.sprite.collide_circle_ratio()¶
-
-
Collision detection between two sprites, using circles scaled to a ratio.
-
collide_circle_ratio(ratio) -> collided_callable
-
-

A callable class that checks for collisions between two sprites, using a -scaled version of the sprites radius.

-

Is created with a floating point ratio, the instance is then intended to be -passed as a collided callback function to the *collide functions.

-

A ratio is a floating point number - 1.0 is the same size, 2.0 is twice as -big, and 0.5 is half the size.

-

The created callable tests for collision between two sprites, by testing to -see if two circles centered on the sprites overlap, after scaling the -circles radius by the stored ratio. If the sprites have a "radius" -attribute, that is used to create the circle, otherwise a circle is created -that is big enough to completely enclose the sprites rect as given by the -"rect" attribute. Intended to be passed as a collided callback function to -the *collide functions. Sprites must have a "rect" and an optional "radius" -attribute.

-
-

New in pygame 1.8.1.

-
-
- -
-
-pygame.sprite.collide_mask()¶
-
-
Collision detection between two sprites, using masks.
-
collide_mask(sprite1, sprite2) -> (int, int)
-
collide_mask(sprite1, sprite2) -> None
-
-

Tests for collision between two sprites, by testing if their bitmasks -overlap (uses pygame.mask.Mask.overlap()Returns the point of intersection). If the sprites have a -mask attribute, it is used as the mask, otherwise a mask is created from -the sprite's image (uses pygame.mask.from_surface()Creates a Mask from the given surface). Sprites must -have a rect attribute; the mask attribute is optional.

-

The first point of collision between the masks is returned. The collision -point is offset from sprite1's mask's topleft corner (which is always -(0, 0)). The collision point is a position within the mask and is not -related to the actual screen position of sprite1.

-

This function is intended to be passed as a collided callback function -to the group collide functions (see spritecollide(), -groupcollide(), spritecollideany()).

-
-

Note

-

To increase performance, create and set a mask attibute for all -sprites that will use this function to check for collisions. Otherwise, -each time this function is called it will create new masks.

-
-
-

Note

-

A new mask needs to be recreated each time a sprite's image is changed -(e.g. if a new image is used or the existing image is rotated).

-
-
# Example of mask creation for a sprite.
-sprite.mask = pygame.mask.from_surface(sprite.image)
-
-
-
-
Returns
-

first point of collision between the masks or None if no -collision

-
-
Return type
-

tuple(int, int) or NoneType

-
-
-
-

New in pygame 1.8.0.

-
-
- -
-
-pygame.sprite.groupcollide()¶
-
-
Find all sprites that collide between two groups.
-
groupcollide(group1, group2, dokill1, dokill2, collided = None) -> Sprite_dict
-
-

This will find collisions between all the Sprites in two groups. -Collision is determined by comparing the Sprite.rect attribute of -each Sprite or by using the collided function if it is not None.

-

Every Sprite inside group1 is added to the return dictionary. The value for -each item is the list of Sprites in group2 that intersect.

-

If either dokill argument is True, the colliding Sprites will be removed -from their respective Group.

-

The collided argument is a callback function used to calculate if two sprites are -colliding. It should take two sprites as values and return a bool value -indicating if they are colliding. If collided is not passed, then all -sprites must have a "rect" value, which is a rectangle of the sprite area, -which will be used to calculate the collision.

-
- -
-
-pygame.sprite.spritecollideany()¶
-
-
Simple test if a sprite intersects anything in a group.
-
spritecollideany(sprite, group, collided = None) -> Sprite Collision with the returned sprite.
-
spritecollideany(sprite, group, collided = None) -> None No collision
-
-

If the sprite collides with any single sprite in the group, a single -sprite from the group is returned. On no collision None is returned.

-

If you don't need all the features of the pygame.sprite.spritecollide() function, this -function will be a bit quicker.

-

The collided argument is a callback function used to calculate if two sprites are -colliding. It should take two sprites as values and return a bool value -indicating if they are colliding. If collided is not passed, then all -sprites must have a "rect" value, which is a rectangle of the sprite area, -which will be used to calculate the collision.

-
- -
- -
- - -

-
-Edit on GitHub -
-
-
-
-
- - - - \ No newline at end of file diff --git a/venv/Lib/site-packages/pygame/docs/generated/ref/surface.html b/venv/Lib/site-packages/pygame/docs/generated/ref/surface.html deleted file mode 100644 index 8a5da1c..0000000 --- a/venv/Lib/site-packages/pygame/docs/generated/ref/surface.html +++ /dev/null @@ -1,1290 +0,0 @@ - - - - - - - - - pygame.Surface — pygame v2.1.2 documentation - - - - - - - - - - - - - -
- -
- - - - - -
-
- -
-
- -
-
-
-pygame.Surface¶
-
-
pygame object for representing images
-
Surface((width, height), flags=0, depth=0, masks=None) -> Surface
-
Surface((width, height), flags=0, Surface) -> Surface
-
- ----- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-—draw one image onto another
-—draw many images onto another
-—change the pixel format of an image
-—change the pixel format of an image including per pixel alphas
-—create a new copy of a Surface
-—fill Surface with a solid color
-—Shift the surface image in place
-—Set the transparent colorkey
-—Get the current transparent colorkey
-—set the alpha value for the full Surface image
-—get the current Surface transparency value
-—lock the Surface memory for pixel access
-—unlock the Surface memory from pixel access
-—test if the Surface requires locking
-—test if the Surface is current locked
-—Gets the locks for the Surface
-—get the color value at a single pixel
-—set the color value for a single pixel
-—get the mapped color value at a single pixel
-—get the color index palette for an 8-bit Surface
-—get the color for a single entry in a palette
-—set the color palette for an 8-bit Surface
-—set the color for a single index in an 8-bit Surface palette
-—convert a color into a mapped color value
-—convert a mapped integer color value into a Color
-—set the current clipping area of the Surface
-—get the current clipping area of the Surface
-—create a new surface that references its parent
-—find the parent of a subsurface
-—find the top level parent of a subsurface
-—find the position of a child subsurface inside a parent
-—find the absolute position of a child subsurface inside its top level parent
-—get the dimensions of the Surface
-—get the width of the Surface
-—get the height of the Surface
-—get the rectangular area of the Surface
-—get the bit depth of the Surface pixel format
-—get the bytes used per Surface pixel
-—get the additional flags used for the Surface
-—get the number of bytes used per Surface row
-—the bitmasks needed to convert between a color and a mapped integer
-—set the bitmasks needed to convert between a color and a mapped integer
-—the bit shifts needed to convert between a color and a mapped integer
-—sets the bit shifts needed to convert between a color and a mapped integer
-—the significant bits used to convert between a color and a mapped integer
-—find the smallest rect containing data
-—return a buffer view of the Surface's pixels.
-—acquires a buffer object for the pixels of the Surface.
-—pixel buffer address
-

A pygame Surface is used to represent any image. The Surface has a fixed -resolution and pixel format. Surfaces with 8-bit pixels use a color palette -to map to 24-bit color.

-

Call pygame.Surface()pygame object for representing images to create a new image object. The Surface will -be cleared to all black. The only required arguments are the sizes. With no -additional arguments, the Surface will be created in a format that best -matches the display Surface.

-

The pixel format can be controlled by passing the bit depth or an existing -Surface. The flags argument is a bitmask of additional features for the -surface. You can pass any combination of these flags:

-
HWSURFACE    (obsolete in pygame 2) creates the image in video memory
-SRCALPHA     the pixel format will include a per-pixel alpha
-
-
-

Both flags are only a request, and may not be possible for all displays and -formats.

-

Advance users can combine a set of bitmasks with a depth value. The masks -are a set of 4 integers representing which bits in a pixel will represent -each color. Normal Surfaces should not require the masks argument.

-

Surfaces can have many extra attributes like alpha planes, colorkeys, source -rectangle clipping. These functions mainly effect how the Surface is blitted -to other Surfaces. The blit routines will attempt to use hardware -acceleration when possible, otherwise they will use highly optimized -software blitting methods.

-

There are three types of transparency supported in pygame: colorkeys, -surface alphas, and pixel alphas. Surface alphas can be mixed with -colorkeys, but an image with per pixel alphas cannot use the other modes. -Colorkey transparency makes a single color value transparent. Any pixels -matching the colorkey will not be drawn. The surface alpha value is a single -value that changes the transparency for the entire image. A surface alpha of -255 is opaque, and a value of 0 is completely transparent.

-

Per pixel alphas are different because they store a transparency value for -every pixel. This allows for the most precise transparency effects, but it -also the slowest. Per pixel alphas cannot be mixed with surface alpha and -colorkeys.

-

There is support for pixel access for the Surfaces. Pixel access on hardware -surfaces is slow and not recommended. Pixels can be accessed using the -get_at() and set_at() functions. These methods are fine for -simple access, but will be considerably slow when doing of pixel work with -them. If you plan on doing a lot of pixel level work, it is recommended to -use a pygame.PixelArraypygame object for direct pixel access of surfaces, which gives an array like view of the -surface. For involved mathematical manipulations try the -pygame.surfarraypygame module for accessing surface pixel data using array interfaces module (It's quite quick, but requires NumPy.)

-

Any functions that directly access a surface's pixel data will need that -surface to be lock()'ed. These functions can lock() and -unlock() the surfaces themselves without assistance. But, if a -function will be called many times, there will be a lot of overhead for -multiple locking and unlocking of the surface. It is best to lock the -surface manually before making the function call many times, and then -unlocking when you are finished. All functions that need a locked surface -will say so in their docs. Remember to leave the Surface locked only while -necessary.

-

Surface pixels are stored internally as a single number that has all the -colors encoded into it. Use the map_rgb() and -unmap_rgb() to convert between individual red, green, and blue -values into a packed integer for that Surface.

-

Surfaces can also reference sections of other Surfaces. These are created -with the subsurface() method. Any change to either Surface will -effect the other.

-

Each Surface contains a clipping area. By default the clip area covers the -entire Surface. If it is changed, all drawing operations will only effect -the smaller area.

-
-
-blit()¶
-
-
draw one image onto another
-
blit(source, dest, area=None, special_flags=0) -> Rect
-
-

Draws a source Surface onto this Surface. The draw can be positioned with -the dest argument. The dest argument can either be a pair of coordinates representing the position of -the upper left corner of the blit or a Rect, where the upper left corner of the rectangle will be used as the -position for the blit. The size of the destination rectangle does not -effect the blit.

-

An optional area rectangle can be passed as well. This represents a -smaller portion of the source Surface to draw.

-
-

New in pygame 1.8: Optional special_flags: BLEND_ADD, BLEND_SUB, -BLEND_MULT, BLEND_MIN, BLEND_MAX.

-
-
-

New in pygame 1.8.1: Optional special_flags: BLEND_RGBA_ADD, BLEND_RGBA_SUB, -BLEND_RGBA_MULT, BLEND_RGBA_MIN, BLEND_RGBA_MAX -BLEND_RGB_ADD, BLEND_RGB_SUB, BLEND_RGB_MULT, -BLEND_RGB_MIN, BLEND_RGB_MAX.

-
-
-

New in pygame 1.9.2: Optional special_flags: BLEND_PREMULTIPLIED

-
-
-

New in pygame 2.0.0: Optional special_flags: BLEND_ALPHA_SDL2 - Uses the SDL2 blitter for alpha blending, -this gives different results than the default blitter, which is modelled after SDL1, due to -different approximations used for the alpha blending formula. The SDL2 blitter also supports -RLE on alpha blended surfaces which the pygame one does not.

-
-

The return rectangle is the area of the affected pixels, excluding any -pixels outside the destination Surface, or outside the clipping area.

-

Pixel alphas will be ignored when blitting to an 8 bit Surface.

-

For a surface with colorkey or blanket alpha, a blit to self may give -slightly different colors than a non self-blit.

-
- -
-
-blits()¶
-
-
draw many images onto another
-
blits(blit_sequence=((source, dest), ...), doreturn=1) -> [Rect, ...] or None
-
blits(((source, dest, area), ...)) -> [Rect, ...]
-
blits(((source, dest, area, special_flags), ...)) -> [Rect, ...]
-
-

Draws many surfaces onto this Surface. It takes a sequence as input, -with each of the elements corresponding to the ones of blit(). -It needs at minimum a sequence of (source, dest).

-
-
Parameters
-
    -
  • blit_sequence -- a sequence of surfaces and arguments to blit them, -they correspond to the blit() arguments

  • -
  • doreturn -- if True, return a list of rects of the areas changed, -otherwise return None

  • -
-
-
Returns
-

a list of rects of the areas changed if doreturn is -True, otherwise None

-
-
Return type
-

list or None

-
-
-

New in pygame 1.9.4.

-
- -
-
-convert()¶
-
-
change the pixel format of an image
-
convert(Surface=None) -> Surface
-
convert(depth, flags=0) -> Surface
-
convert(masks, flags=0) -> Surface
-
-

Creates a new copy of the Surface with the pixel format changed. The new -pixel format can be determined from another existing Surface. Otherwise -depth, flags, and masks arguments can be used, similar to the -pygame.Surface()pygame object for representing images call.

-

If no arguments are passed the new Surface will have the same pixel -format as the display Surface. This is always the fastest format for -blitting. It is a good idea to convert all Surfaces before they are -blitted many times.

-

The converted Surface will have no pixel alphas. They will be stripped if -the original had them. See convert_alpha() for preserving or -creating per-pixel alphas.

-

The new copy will have the same class as the copied surface. This lets -as Surface subclass inherit this method without the need to override, -unless subclass specific instance attributes also need copying.

-
- -
-
-convert_alpha()¶
-
-
change the pixel format of an image including per pixel alphas
-
convert_alpha(Surface) -> Surface
-
convert_alpha() -> Surface
-
-

Creates a new copy of the surface with the desired pixel format. The new -surface will be in a format suited for quick blitting to the given format -with per pixel alpha. If no surface is given, the new surface will be -optimized for blitting to the current display.

-

Unlike the convert() method, the pixel format for the new -image will not be exactly the same as the requested source, but it will -be optimized for fast alpha blitting to the destination.

-

As with convert() the returned surface has the same class as -the converted surface.

-
- -
-
-copy()¶
-
-
create a new copy of a Surface
-
copy() -> Surface
-
-

Makes a duplicate copy of a Surface. The new surface will have the same -pixel formats, color palettes, transparency settings, and class as the -original. If a Surface subclass also needs to copy any instance specific -attributes then it should override copy().

-
- -
-
-fill()¶
-
-
fill Surface with a solid color
-
fill(color, rect=None, special_flags=0) -> Rect
-
-

Fill the Surface with a solid color. If no rect argument is given the -entire Surface will be filled. The rect argument will limit the fill to a -specific area. The fill will also be contained by the Surface clip area.

-

The color argument can be either a RGB sequence, a RGBA sequence -or a mapped color index. If using RGBA, the Alpha (A part of -RGBA) is ignored unless the surface uses per pixel alpha (Surface has -the SRCALPHA flag).

-
-

New in pygame 1.8: Optional special_flags: BLEND_ADD, BLEND_SUB, -BLEND_MULT, BLEND_MIN, BLEND_MAX.

-
-
-

New in pygame 1.8.1: Optional special_flags: BLEND_RGBA_ADD, BLEND_RGBA_SUB, -BLEND_RGBA_MULT, BLEND_RGBA_MIN, BLEND_RGBA_MAX -BLEND_RGB_ADD, BLEND_RGB_SUB, BLEND_RGB_MULT, -BLEND_RGB_MIN, BLEND_RGB_MAX.

-
-

This will return the affected Surface area.

-
- -
-
-scroll()¶
-
-
Shift the surface image in place
-
scroll(dx=0, dy=0) -> None
-
-

Move the image by dx pixels right and dy pixels down. dx and dy may be -negative for left and up scrolls respectively. Areas of the surface that -are not overwritten retain their original pixel values. Scrolling is -contained by the Surface clip area. It is safe to have dx and dy values -that exceed the surface size.

-
-

New in pygame 1.9.

-
-
- -
-
-set_colorkey()¶
-
-
Set the transparent colorkey
-
set_colorkey(Color, flags=0) -> None
-
set_colorkey(None) -> None
-
-

Set the current color key for the Surface. When blitting this Surface -onto a destination, any pixels that have the same color as the colorkey -will be transparent. The color can be an RGB color or a mapped color -integer. If None is passed, the colorkey will be unset.

-

The colorkey will be ignored if the Surface is formatted to use per pixel -alpha values. The colorkey can be mixed with the full Surface alpha -value.

-

The optional flags argument can be set to pygame.RLEACCEL to provide -better performance on non accelerated displays. An RLEACCEL Surface -will be slower to modify, but quicker to blit as a source.

-
- -
-
-get_colorkey()¶
-
-
Get the current transparent colorkey
-
get_colorkey() -> RGB or None
-
-

Return the current colorkey value for the Surface. If the colorkey is not -set then None is returned.

-
- -
-
-set_alpha()¶
-
-
set the alpha value for the full Surface image
-
set_alpha(value, flags=0) -> None
-
set_alpha(None) -> None
-
-

Set the current alpha value for the Surface. When blitting this Surface -onto a destination, the pixels will be drawn slightly transparent. The -alpha value is an integer from 0 to 255, 0 is fully transparent and 255 -is fully opaque. If None is passed for the alpha value, then alpha -blending will be disabled, including per-pixel alpha.

-

This value is different than the per pixel Surface alpha. For a surface -with per pixel alpha, blanket alpha is ignored and None is returned.

-
-

Changed in pygame 2.0: per-surface alpha can be combined with per-pixel -alpha.

-
-

The optional flags argument can be set to pygame.RLEACCEL to provide -better performance on non accelerated displays. An RLEACCEL Surface -will be slower to modify, but quicker to blit as a source.

-
- -
-
-get_alpha()¶
-
-
get the current Surface transparency value
-
get_alpha() -> int_value
-
-

Return the current alpha value for the Surface.

-
- -
-
-lock()¶
-
-
lock the Surface memory for pixel access
-
lock() -> None
-
-

Lock the pixel data of a Surface for access. On accelerated Surfaces, the -pixel data may be stored in volatile video memory or nonlinear compressed -forms. When a Surface is locked the pixel memory becomes available to -access by regular software. Code that reads or writes pixel values will -need the Surface to be locked.

-

Surfaces should not remain locked for more than necessary. A locked -Surface can often not be displayed or managed by pygame.

-

Not all Surfaces require locking. The mustlock() method can -determine if it is actually required. There is no performance penalty for -locking and unlocking a Surface that does not need it.

-

All pygame functions will automatically lock and unlock the Surface data -as needed. If a section of code is going to make calls that will -repeatedly lock and unlock the Surface many times, it can be helpful to -wrap the block inside a lock and unlock pair.

-

It is safe to nest locking and unlocking calls. The surface will only be -unlocked after the final lock is released.

-
- -
-
-unlock()¶
-
-
unlock the Surface memory from pixel access
-
unlock() -> None
-
-

Unlock the Surface pixel data after it has been locked. The unlocked -Surface can once again be drawn and managed by pygame. See the -lock() documentation for more details.

-

All pygame functions will automatically lock and unlock the Surface data -as needed. If a section of code is going to make calls that will -repeatedly lock and unlock the Surface many times, it can be helpful to -wrap the block inside a lock and unlock pair.

-

It is safe to nest locking and unlocking calls. The surface will only be -unlocked after the final lock is released.

-
- -
-
-mustlock()¶
-
-
test if the Surface requires locking
-
mustlock() -> bool
-
-

Returns True if the Surface is required to be locked to access pixel -data. Usually pure software Surfaces do not require locking. This method -is rarely needed, since it is safe and quickest to just lock all Surfaces -as needed.

-

All pygame functions will automatically lock and unlock the Surface data -as needed. If a section of code is going to make calls that will -repeatedly lock and unlock the Surface many times, it can be helpful to -wrap the block inside a lock and unlock pair.

-
- -
-
-get_locked()¶
-
-
test if the Surface is current locked
-
get_locked() -> bool
-
-

Returns True when the Surface is locked. It doesn't matter how many -times the Surface is locked.

-
- -
-
-get_locks()¶
-
-
Gets the locks for the Surface
-
get_locks() -> tuple
-
-

Returns the currently existing locks for the Surface.

-
- -
-
-get_at()¶
-
-
get the color value at a single pixel
-
get_at((x, y)) -> Color
-
-

Return a copy of the RGBA Color value at the given pixel. If the -Surface has no per pixel alpha, then the alpha value will always be 255 -(opaque). If the pixel position is outside the area of the Surface an -IndexError exception will be raised.

-

Getting and setting pixels one at a time is generally too slow to be used -in a game or realtime situation. It is better to use methods which -operate on many pixels at a time like with the blit, fill and draw -methods - or by using pygame.surfarraypygame module for accessing surface pixel data using array interfaces/pygame.PixelArraypygame object for direct pixel access of surfaces.

-

This function will temporarily lock and unlock the Surface as needed.

-
-

New in pygame 1.9: Returning a Color instead of tuple. Use tuple(surf.get_at((x,y))) -if you want a tuple, and not a Color. This should only matter if -you want to use the color as a key in a dict.

-
-
- -
-
-set_at()¶
-
-
set the color value for a single pixel
-
set_at((x, y), Color) -> None
-
-

Set the RGBA or mapped integer color value for a single pixel. If the -Surface does not have per pixel alphas, the alpha value is ignored. -Setting pixels outside the Surface area or outside the Surface clipping -will have no effect.

-

Getting and setting pixels one at a time is generally too slow to be used -in a game or realtime situation.

-

This function will temporarily lock and unlock the Surface as needed.

-
- -
-
-get_at_mapped()¶
-
-
get the mapped color value at a single pixel
-
get_at_mapped((x, y)) -> Color
-
-

Return the integer value of the given pixel. If the pixel position is -outside the area of the Surface an IndexError exception will be -raised.

-

This method is intended for pygame unit testing. It unlikely has any use -in an application.

-

This function will temporarily lock and unlock the Surface as needed.

-
-

New in pygame 1.9.2.

-
-
- -
-
-get_palette()¶
-
-
get the color index palette for an 8-bit Surface
-
get_palette() -> [RGB, RGB, RGB, ...]
-
-

Return a list of up to 256 color elements that represent the indexed -colors used in an 8-bit Surface. The returned list is a copy of the -palette, and changes will have no effect on the Surface.

-

Returning a list of Color(with length 3) instances instead of tuples.

-
-

New in pygame 1.9.

-
-
- -
-
-get_palette_at()¶
-
-
get the color for a single entry in a palette
-
get_palette_at(index) -> RGB
-
-

Returns the red, green, and blue color values for a single index in a -Surface palette. The index should be a value from 0 to 255.

-
-

New in pygame 1.9: Returning Color(with length 3) instance instead of a tuple.

-
-
- -
-
-set_palette()¶
-
-
set the color palette for an 8-bit Surface
-
set_palette([RGB, RGB, RGB, ...]) -> None
-
-

Set the full palette for an 8-bit Surface. This will replace the colors in -the existing palette. A partial palette can be passed and only the first -colors in the original palette will be changed.

-

This function has no effect on a Surface with more than 8-bits per pixel.

-
- -
-
-set_palette_at()¶
-
-
set the color for a single index in an 8-bit Surface palette
-
set_palette_at(index, RGB) -> None
-
-

Set the palette value for a single entry in a Surface palette. The index -should be a value from 0 to 255.

-

This function has no effect on a Surface with more than 8-bits per pixel.

-
- -
-
-map_rgb()¶
-
-
convert a color into a mapped color value
-
map_rgb(Color) -> mapped_int
-
-

Convert an RGBA color into the mapped integer value for this Surface. -The returned integer will contain no more bits than the bit depth of the -Surface. Mapped color values are not often used inside pygame, but can be -passed to most functions that require a Surface and a color.

-

See the Surface object documentation for more information about colors -and pixel formats.

-
- -
-
-unmap_rgb()¶
-
-
convert a mapped integer color value into a Color
-
unmap_rgb(mapped_int) -> Color
-
-

Convert an mapped integer color into the RGB color components for -this Surface. Mapped color values are not often used inside pygame, but -can be passed to most functions that require a Surface and a color.

-

See the Surface object documentation for more information about colors -and pixel formats.

-
- -
-
-set_clip()¶
-
-
set the current clipping area of the Surface
-
set_clip(rect) -> None
-
set_clip(None) -> None
-
-

Each Surface has an active clipping area. This is a rectangle that -represents the only pixels on the Surface that can be modified. If -None is passed for the rectangle the full Surface will be available -for changes.

-

The clipping area is always restricted to the area of the Surface itself. -If the clip rectangle is too large it will be shrunk to fit inside the -Surface.

-
- -
-
-get_clip()¶
-
-
get the current clipping area of the Surface
-
get_clip() -> Rect
-
-

Return a rectangle of the current clipping area. The Surface will always -return a valid rectangle that will never be outside the bounds of the -image. If the Surface has had None set for the clipping area, the -Surface will return a rectangle with the full area of the Surface.

-
- -
-
-subsurface()¶
-
-
create a new surface that references its parent
-
subsurface(Rect) -> Surface
-
-

Returns a new Surface that shares its pixels with its new parent. The new -Surface is considered a child of the original. Modifications to either -Surface pixels will effect each other. Surface information like clipping -area and color keys are unique to each Surface.

-

The new Surface will inherit the palette, color key, and alpha settings -from its parent.

-

It is possible to have any number of subsurfaces and subsubsurfaces on -the parent. It is also possible to subsurface the display Surface if the -display mode is not hardware accelerated.

-

See get_offset() and get_parent() to learn more -about the state of a subsurface.

-

A subsurface will have the same class as the parent surface.

-
- -
-
-get_parent()¶
-
-
find the parent of a subsurface
-
get_parent() -> Surface
-
-

Returns the parent Surface of a subsurface. If this is not a subsurface -then None will be returned.

-
- -
-
-get_abs_parent()¶
-
-
find the top level parent of a subsurface
-
get_abs_parent() -> Surface
-
-

Returns the parent Surface of a subsurface. If this is not a subsurface -then this surface will be returned.

-
- -
-
-get_offset()¶
-
-
find the position of a child subsurface inside a parent
-
get_offset() -> (x, y)
-
-

Get the offset position of a child subsurface inside of a parent. If the -Surface is not a subsurface this will return (0, 0).

-
- -
-
-get_abs_offset()¶
-
-
find the absolute position of a child subsurface inside its top level parent
-
get_abs_offset() -> (x, y)
-
-

Get the offset position of a child subsurface inside of its top level -parent Surface. If the Surface is not a subsurface this will return (0, -0).

-
- -
-
-get_size()¶
-
-
get the dimensions of the Surface
-
get_size() -> (width, height)
-
-

Return the width and height of the Surface in pixels.

-
- -
-
-get_width()¶
-
-
get the width of the Surface
-
get_width() -> width
-
-

Return the width of the Surface in pixels.

-
- -
-
-get_height()¶
-
-
get the height of the Surface
-
get_height() -> height
-
-

Return the height of the Surface in pixels.

-
- -
-
-get_rect()¶
-
-
get the rectangular area of the Surface
-
get_rect(**kwargs) -> Rect
-
-

Returns a new rectangle covering the entire surface. This rectangle will -always start at (0, 0) with a width and height the same size as the image.

-

You can pass keyword argument values to this function. These named values -will be applied to the attributes of the Rect before it is returned. An -example would be mysurf.get_rect(center=(100, 100)) to create a -rectangle for the Surface centered at a given position.

-
- -
-
-get_bitsize()¶
-
-
get the bit depth of the Surface pixel format
-
get_bitsize() -> int
-
-

Returns the number of bits used to represent each pixel. This value may -not exactly fill the number of bytes used per pixel. For example a 15 bit -Surface still requires a full 2 bytes.

-
- -
-
-get_bytesize()¶
-
-
get the bytes used per Surface pixel
-
get_bytesize() -> int
-
-

Return the number of bytes used per pixel.

-
- -
-
-get_flags()¶
-
-
get the additional flags used for the Surface
-
get_flags() -> int
-
-

Returns a set of current Surface features. Each feature is a bit in the -flags bitmask. Typical flags are RLEACCEL, SRCALPHA, and -SRCCOLORKEY.

-

Here is a more complete list of flags. A full list can be found in -SDL_video.h

-
SWSURFACE      0x00000000    # Surface is in system memory
-HWSURFACE      0x00000001    # (obsolete in pygame 2) Surface is in video memory
-ASYNCBLIT      0x00000004    # (obsolete in pygame 2) Use asynchronous blits if possible
-
-
-

See pygame.display.set_mode()Initialize a window or screen for display for flags exclusive to the -display surface.

-

Used internally (read-only)

-
HWACCEL        0x00000100    # Blit uses hardware acceleration
-SRCCOLORKEY    0x00001000    # Blit uses a source color key
-RLEACCELOK     0x00002000    # Private flag
-RLEACCEL       0x00004000    # Surface is RLE encoded
-SRCALPHA       0x00010000    # Blit uses source alpha blending
-PREALLOC       0x01000000    # Surface uses preallocated memory
-
-
-
- -
-
-get_pitch()¶
-
-
get the number of bytes used per Surface row
-
get_pitch() -> int
-
-

Return the number of bytes separating each row in the Surface. Surfaces -in video memory are not always linearly packed. Subsurfaces will also -have a larger pitch than their real width.

-

This value is not needed for normal pygame usage.

-
- -
-
-get_masks()¶
-
-
the bitmasks needed to convert between a color and a mapped integer
-
get_masks() -> (R, G, B, A)
-
-

Returns the bitmasks used to isolate each color in a mapped integer.

-

This value is not needed for normal pygame usage.

-
- -
-
-set_masks()¶
-
-
set the bitmasks needed to convert between a color and a mapped integer
-
set_masks((r,g,b,a)) -> None
-
-

This is not needed for normal pygame usage.

-
-

Note

-

In SDL2, the masks are read-only and accordingly this method will raise -an AttributeError if called.

-
-
-

New in pygame 1.8.1.

-
-
- -
-
-get_shifts()¶
-
-
the bit shifts needed to convert between a color and a mapped integer
-
get_shifts() -> (R, G, B, A)
-
-

Returns the pixel shifts need to convert between each color and a mapped -integer.

-

This value is not needed for normal pygame usage.

-
- -
-
-set_shifts()¶
-
-
sets the bit shifts needed to convert between a color and a mapped integer
-
set_shifts((r,g,b,a)) -> None
-
-

This is not needed for normal pygame usage.

-
-

Note

-

In SDL2, the shifts are read-only and accordingly this method will raise -an AttributeError if called.

-
-
-

New in pygame 1.8.1.

-
-
- -
-
-get_losses()¶
-
-
the significant bits used to convert between a color and a mapped integer
-
get_losses() -> (R, G, B, A)
-
-

Return the least significant number of bits stripped from each color in a -mapped integer.

-

This value is not needed for normal pygame usage.

-
- -
-
-get_bounding_rect()¶
-
-
find the smallest rect containing data
-
get_bounding_rect(min_alpha = 1) -> Rect
-
-

Returns the smallest rectangular region that contains all the pixels in -the surface that have an alpha value greater than or equal to the minimum -alpha value.

-

This function will temporarily lock and unlock the Surface as needed.

-
-

New in pygame 1.8.

-
-
- -
-
-get_view()¶
-
-
return a buffer view of the Surface's pixels.
-
get_view(<kind>='2') -> BufferProxy
-
-

Return an object which exports a surface's internal pixel buffer as -a C level array struct, Python level array interface or a C level -buffer interface. The new buffer protocol is supported.

-

The kind argument is the length 1 string '0', '1', '2', '3', -'r', 'g', 'b', or 'a'. The letters are case insensitive; -'A' will work as well. The argument can be either a Unicode or byte (char) -string. The default is '2'.

-

'0' returns a contiguous unstructured bytes view. No surface shape -information is given. A ValueError is raised if the surface's pixels -are discontinuous.

-

'1' returns a (surface-width * surface-height) array of continuous -pixels. A ValueError is raised if the surface pixels are -discontinuous.

-

'2' returns a (surface-width, surface-height) array of raw pixels. -The pixels are surface-bytesize-d unsigned integers. The pixel format is -surface specific. The 3 byte unsigned integers of 24 bit surfaces are -unlikely accepted by anything other than other pygame functions.

-

'3' returns a (surface-width, surface-height, 3) array of RGB color -components. Each of the red, green, and blue components are unsigned -bytes. Only 24-bit and 32-bit surfaces are supported. The color -components must be in either RGB or BGR order within the pixel.

-

'r' for red, 'g' for green, 'b' for blue, and 'a' for alpha return a -(surface-width, surface-height) view of a single color component within a -surface: a color plane. Color components are unsigned bytes. Both 24-bit -and 32-bit surfaces support 'r', 'g', and 'b'. Only 32-bit surfaces with -SRCALPHA support 'a'.

-

The surface is locked only when an exposed interface is accessed. -For new buffer interface accesses, the surface is unlocked once the -last buffer view is released. For array interface and old buffer -interface accesses, the surface remains locked until the BufferProxy -object is released.

-
-

New in pygame 1.9.2.

-
-
- -
-
-get_buffer()¶
-
-
acquires a buffer object for the pixels of the Surface.
-
get_buffer() -> BufferProxy
-
-

Return a buffer object for the pixels of the Surface. The buffer can be -used for direct pixel access and manipulation. Surface pixel data is -represented as an unstructured block of memory, with a start address -and length in bytes. The data need not be contiguous. Any gaps are -included in the length, but otherwise ignored.

-

This method implicitly locks the Surface. The lock will be released when -the returned pygame.BufferProxypygame object to export a surface buffer through an array protocol object is garbage collected.

-
-

New in pygame 1.8.

-
-
- -
-
-_pixels_address¶
-
-
pixel buffer address
-
_pixels_address -> int
-
-

The starting address of the surface's raw pixel bytes.

-
-

New in pygame 1.9.2.

-
-
- -
- -
- - -

-
-Edit on GitHub -
-
-
-
-
- - - - \ No newline at end of file diff --git a/venv/Lib/site-packages/pygame/docs/generated/ref/surfarray.html b/venv/Lib/site-packages/pygame/docs/generated/ref/surfarray.html deleted file mode 100644 index fb22728..0000000 --- a/venv/Lib/site-packages/pygame/docs/generated/ref/surfarray.html +++ /dev/null @@ -1,571 +0,0 @@ - - - - - - - - - pygame.surfarray — pygame v2.1.2 documentation - - - - - - - - - - - - - -
- -
- - - - - -
-
- -
-
- -
-
-
-pygame.surfarray
-
-
pygame module for accessing surface pixel data using array interfaces
-
- ----- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-—Copy pixels into a 2d array
-—Reference pixels into a 2d array
-—Copy pixels into a 3d array
-—Reference pixels into a 3d array
-—Copy pixel alphas into a 2d array
-—Reference pixel alphas into a 2d array
-—Copy red pixels into a 2d array
-—Reference pixel red into a 2d array.
-—Copy green pixels into a 2d array
-—Reference pixel green into a 2d array.
-—Copy blue pixels into a 2d array
-—Reference pixel blue into a 2d array.
-—Copy the colorkey values into a 2d array
-—Copy an array to a new surface
-—Blit directly from a array values
-—Map a 3d array into a 2d array
-—Sets the array system to be used for surface arrays
-—Gets the currently active array type.
-—Gets the array system types currently supported.
-

Functions to convert between NumPy arrays and Surface objects. This module -will only be functional when pygame can use the external NumPy package. -If NumPy can't be imported, surfarray becomes a MissingModule object.

-

Every pixel is stored as a single integer value to represent the red, green, -and blue colors. The 8-bit images use a value that looks into a colormap. Pixels -with higher depth use a bit packing process to place three or four values into -a single number.

-

The arrays are indexed by the X axis first, followed by the Y axis. -Arrays that treat the pixels as a single integer are referred to as 2D arrays. -This module can also separate the red, green, and blue color values into -separate indices. These types of arrays are referred to as 3D arrays, and the -last index is 0 for red, 1 for green, and 2 for blue.

-

The pixels of a 2D array as returned by array2d() and pixels2d() -are mapped to the specific surface. Use pygame.Surface.unmap_rgb()convert a mapped integer color value into a Color -to convert to a color, and pygame.Surface.map_rgb()convert a color into a mapped color value to get the surface -specific pixel value of a color. Integer pixel values can only be used directly -between surfaces with matching pixel layouts (see pygame.Surfacepygame object for representing images).

-

All functions that refer to "array" will copy the surface information to a new -numpy array. All functions that refer to "pixels" will directly reference the -pixels from the surface and any changes performed to the array will make changes -in the surface. As this last functions share memory with the surface, this one -will be locked during the lifetime of the array.

-
-
-pygame.surfarray.array2d()¶
-
-
Copy pixels into a 2d array
-
array2d(Surface) -> array
-
-

Copy the mapped (raw) pixels from a Surface -into a 2D array. -The bit depth of the surface will control the size of the integer values, -and will work for any type of pixel format.

-

This function will temporarily lock the Surface as pixels are copied -(see the pygame.Surface.lock()lock the Surface memory for pixel access - lock the Surface memory for pixel -access method).

-
- -
-
-pygame.surfarray.pixels2d()¶
-
-
Reference pixels into a 2d array
-
pixels2d(Surface) -> array
-
-

Create a new 2D array that directly references the pixel values in a -Surface. Any changes to the array will affect the pixels in the Surface. -This is a fast operation since no data is copied.

-

Pixels from a 24-bit Surface cannot be referenced, but all other Surface bit -depths can.

-

The Surface this references will remain locked for the lifetime of the array, -since the array generated by this function shares memory with the surface. -See the pygame.Surface.lock()lock the Surface memory for pixel access - lock the Surface memory for pixel -access method.

-
- -
-
-pygame.surfarray.array3d()¶
-
-
Copy pixels into a 3d array
-
array3d(Surface) -> array
-
-

Copy the pixels from a Surface into a 3D array. The bit depth of the surface -will control the size of the integer values, and will work for any type of -pixel format.

-

This function will temporarily lock the Surface as pixels are copied (see -the pygame.Surface.lock()lock the Surface memory for pixel access - lock the Surface memory for pixel -access method).

-
- -
-
-pygame.surfarray.pixels3d()¶
-
-
Reference pixels into a 3d array
-
pixels3d(Surface) -> array
-
-

Create a new 3D array that directly references the pixel values in a -Surface. Any changes to the array will affect the pixels in the Surface. -This is a fast operation since no data is copied.

-

This will only work on Surfaces that have 24-bit or 32-bit formats. Lower -pixel formats cannot be referenced.

-

The Surface this references will remain locked for the lifetime of the array, -since the array generated by this function shares memory with the surface. -See the pygame.Surface.lock()lock the Surface memory for pixel access - lock the Surface memory for pixel -access method.

-
- -
-
-pygame.surfarray.array_alpha()¶
-
-
Copy pixel alphas into a 2d array
-
array_alpha(Surface) -> array
-
-

Copy the pixel alpha values (degree of transparency) from a Surface into a -2D array. This will work for any type of Surface format. Surfaces without a -pixel alpha will return an array with all opaque values.

-

This function will temporarily lock the Surface as pixels are copied (see -the pygame.Surface.lock()lock the Surface memory for pixel access - lock the Surface memory for pixel -access method).

-
- -
-
-pygame.surfarray.pixels_alpha()¶
-
-
Reference pixel alphas into a 2d array
-
pixels_alpha(Surface) -> array
-
-

Create a new 2D array that directly references the alpha values (degree of -transparency) in a Surface. Any changes to the array will affect the pixels -in the Surface. This is a fast operation since no data is copied.

-

This can only work on 32-bit Surfaces with a per-pixel alpha value.

-

The Surface this references will remain locked for the lifetime of the array, -since the array generated by this function shares memory with the surface. -See the pygame.Surface.lock()lock the Surface memory for pixel access - lock the Surface memory for pixel -access method.

-
- -
-
-pygame.surfarray.array_red()¶
-
-
Copy red pixels into a 2d array
-
array_red(Surface) -> array
-
-

Copy the pixel red values from a Surface into a 2D array. This will work -for any type of Surface format.

-

This function will temporarily lock the Surface as pixels are copied (see -the pygame.Surface.lock()lock the Surface memory for pixel access - lock the Surface memory for pixel -access method).

-
-

New in pygame 2.0.2.

-
-
- -
-
-pygame.surfarray.pixels_red()¶
-
-
Reference pixel red into a 2d array.
-
pixels_red (Surface) -> array
-
-

Create a new 2D array that directly references the red values in a Surface. -Any changes to the array will affect the pixels in the Surface. This is a -fast operation since no data is copied.

-

This can only work on 24-bit or 32-bit Surfaces.

-

The Surface this references will remain locked for the lifetime of the array, -since the array generated by this function shares memory with the surface. -See the pygame.Surface.lock()lock the Surface memory for pixel access - lock the Surface memory for pixel -access method.

-
- -
-
-pygame.surfarray.array_green()¶
-
-
Copy green pixels into a 2d array
-
array_green(Surface) -> array
-
-

Copy the pixel green values from a Surface into a 2D array. This will work -for any type of Surface format.

-

This function will temporarily lock the Surface as pixels are copied (see -the pygame.Surface.lock()lock the Surface memory for pixel access - lock the Surface memory for pixel -access method).

-
-

New in pygame 2.0.2.

-
-
- -
-
-pygame.surfarray.pixels_green()¶
-
-
Reference pixel green into a 2d array.
-
pixels_green (Surface) -> array
-
-

Create a new 2D array that directly references the green values in a -Surface. Any changes to the array will affect the pixels in the Surface. -This is a fast operation since no data is copied.

-

This can only work on 24-bit or 32-bit Surfaces.

-

The Surface this references will remain locked for the lifetime of the array, -since the array generated by this function shares memory with the surface. -See the pygame.Surface.lock()lock the Surface memory for pixel access - lock the Surface memory for pixel -access method.

-
- -
-
-pygame.surfarray.array_blue()¶
-
-
Copy blue pixels into a 2d array
-
array_blue(Surface) -> array
-
-

Copy the pixel blue values from a Surface into a 2D array. This will work -for any type of Surface format.

-

This function will temporarily lock the Surface as pixels are copied (see -the pygame.Surface.lock()lock the Surface memory for pixel access - lock the Surface memory for pixel -access method).

-
-

New in pygame 2.0.2.

-
-
- -
-
-pygame.surfarray.pixels_blue()¶
-
-
Reference pixel blue into a 2d array.
-
pixels_blue (Surface) -> array
-
-

Create a new 2D array that directly references the blue values in a Surface. -Any changes to the array will affect the pixels in the Surface. This is a -fast operation since no data is copied.

-

This can only work on 24-bit or 32-bit Surfaces.

-

The Surface this references will remain locked for the lifetime of the array, -since the array generated by this function shares memory with the surface. -See the pygame.Surface.lock()lock the Surface memory for pixel access - lock the Surface memory for pixel -access method.

-
- -
-
-pygame.surfarray.array_colorkey()¶
-
-
Copy the colorkey values into a 2d array
-
array_colorkey(Surface) -> array
-
-

Create a new array with the colorkey transparency value from each pixel. If -the pixel matches the colorkey it will be fully transparent; otherwise it -will be fully opaque.

-

This will work on any type of Surface format. If the image has no colorkey a -solid opaque array will be returned.

-

This function will temporarily lock the Surface as pixels are copied.

-
- -
-
-pygame.surfarray.make_surface()¶
-
-
Copy an array to a new surface
-
make_surface(array) -> Surface
-
-

Create a new Surface that best resembles the data and format on the array. -The array can be 2D or 3D with any sized integer values. Function -make_surface uses the array struct interface to acquire array properties, -so is not limited to just NumPy arrays. See pygame.pixelcopypygame module for general pixel array copying.

-

New in pygame 1.9.2: array struct interface support.

-
- -
-
-pygame.surfarray.blit_array()¶
-
-
Blit directly from a array values
-
blit_array(Surface, array) -> None
-
-

Directly copy values from an array into a Surface. This is faster than -converting the array into a Surface and blitting. The array must be the same -dimensions as the Surface and will completely replace all pixel values. Only -integer, ASCII character and record arrays are accepted.

-

This function will temporarily lock the Surface as the new values are -copied.

-
- -
-
-pygame.surfarray.map_array()¶
-
-
Map a 3d array into a 2d array
-
map_array(Surface, array3d) -> array2d
-
-

Convert a 3D array into a 2D array. This will use the given Surface format -to control the conversion. Palette surface formats are supported for NumPy -arrays.

-
- -
-
-pygame.surfarray.use_arraytype()¶
-
-
Sets the array system to be used for surface arrays
-
use_arraytype (arraytype) -> None
-
-

DEPRECATED: Uses the requested array type for the module functions. -The only supported arraytype is 'numpy'. Other values will raise -ValueError. Using this function will raise a DeprecationWarning.

-
- -
-
-pygame.surfarray.get_arraytype()¶
-
-
Gets the currently active array type.
-
get_arraytype () -> str
-
-

DEPRECATED: Returns the currently active array type. This will be a value of the -get_arraytypes() tuple and indicates which type of array module is used -for the array creation. Using this function will raise a DeprecationWarning.

-
-

New in pygame 1.8.

-
-
- -
-
-pygame.surfarray.get_arraytypes()¶
-
-
Gets the array system types currently supported.
-
get_arraytypes () -> tuple
-
-

DEPRECATED: Checks, which array systems are available and returns them as a tuple of -strings. The values of the tuple can be used directly in the -pygame.surfarray.use_arraytype()Sets the array system to be used for surface arrays () method. If no supported array -system could be found, None will be returned. Using this function will raise a -DeprecationWarning.

-
-

New in pygame 1.8.

-
-
- -
- -
- - -

-
-Edit on GitHub -
-
-
-
-
- - - - \ No newline at end of file diff --git a/venv/Lib/site-packages/pygame/docs/generated/ref/tests.html b/venv/Lib/site-packages/pygame/docs/generated/ref/tests.html deleted file mode 100644 index ad9bc67..0000000 --- a/venv/Lib/site-packages/pygame/docs/generated/ref/tests.html +++ /dev/null @@ -1,241 +0,0 @@ - - - - - - - - - pygame.tests — pygame v2.1.2 documentation - - - - - - - - - - - - - -
- -
- - - - - -
-
- -
-
- -
-
-
-pygame.tests
-
-
Pygame unit test suite package
-
- ----- - - - - - - -
-—Run the pygame unit test suite
-

A quick way to run the test suite package from the command line is to import -the go submodule with the Python -m option:

-
python -m pygame.tests [<test options>]
-
-
-

Command line option --help displays a usage message. Available options -correspond to the pygame.tests.run()Run the pygame unit test suite arguments.

-

The xxxx_test submodules of the tests package are unit test suites for -individual parts of pygame. Each can also be run as a main program. This is -useful if the test, such as cdrom_test, is interactive.

-

For pygame development the test suite can be run from a pygame distribution -root directory. Program run_tests.py is provided for convenience, though -test/go.py can be run directly.

-

Module level tags control which modules are included in a unit test run. Tags -are assigned to a unit test module with a corresponding <name>_tags.py module. -The tags module has the global __tags__, a list of tag names. For example, -cdrom_test.py has a tag file cdrom_tags.py containing a tags list that -has the 'interactive' string. The 'interactive' tag indicates cdrom_test.py -expects user input. It is excluded from a run_tests.py or -pygame.tests.go run. Two other tags that are excluded are 'ignore' and -'subprocess_ignore'. These two tags indicate unit tests that will not run on a -particular platform, or for which no corresponding pygame module is available. -The test runner will list each excluded module along with the tag responsible.

-
-
-pygame.tests.run()¶
-
-
Run the pygame unit test suite
-
run(*args, **kwds) -> tuple
-
-

Positional arguments (optional):

-
The names of tests to include. If omitted then all tests are run. Test names
-need not include the trailing '_test'.
-
-
-

Keyword arguments:

-
incomplete - fail incomplete tests (default False)
-nosubprocess - run all test suites in the current process
-               (default False, use separate subprocesses)
-dump - dump failures/errors as dict ready to eval (default False)
-file - if provided, the name of a file into which to dump failures/errors
-timings - if provided, the number of times to run each individual test to
-          get an average run time (default is run each test once)
-exclude - A list of TAG names to exclude from the run
-show_output - show silenced stderr/stdout on errors (default False)
-all - dump all results, not just errors (default False)
-randomize - randomize order of tests (default False)
-seed - if provided, a seed randomizer integer
-multi_thread - if provided, the number of THREADS in which to run
-               subprocessed tests
-time_out - if subprocess is True then the time limit in seconds before
-           killing a test (default 30)
-fake - if provided, the name of the fake tests package in the
-       run_tests__tests subpackage to run instead of the normal
-       pygame tests
-python - the path to a python executable to run subprocessed tests
-         (default sys.executable)
-
-
-

Return value:

-
A tuple of total number of tests run, dictionary of error information.
-The dictionary is empty if no errors were recorded.
-
-
-

By default individual test modules are run in separate subprocesses. This -recreates normal pygame usage where pygame.init() and pygame.quit() -are called only once per program execution, and avoids unfortunate -interactions between test modules. Also, a time limit is placed on test -execution, so frozen tests are killed when there time allotment expired. Use -the single process option if threading is not working properly or if tests -are taking too long. It is not guaranteed that all tests will pass in single -process mode.

-

Tests are run in a randomized order if the randomize argument is True or a -seed argument is provided. If no seed integer is provided then the system -time is used.

-

Individual test modules may have a __tags__ attribute, a list of tag strings -used to selectively omit modules from a run. By default only 'interactive' -modules such as cdrom_test are ignored. An interactive module must be run -from the console as a Python program.

-

This function can only be called once per Python session. It is not -reentrant.

-
- -
- -
- - -

-
-Edit on GitHub -
-
-
-
-
- - - - \ No newline at end of file diff --git a/venv/Lib/site-packages/pygame/docs/generated/ref/time.html b/venv/Lib/site-packages/pygame/docs/generated/ref/time.html deleted file mode 100644 index 112d5f7..0000000 --- a/venv/Lib/site-packages/pygame/docs/generated/ref/time.html +++ /dev/null @@ -1,370 +0,0 @@ - - - - - - - - - pygame.time — pygame v2.1.2 documentation - - - - - - - - - - - - - -
- -
- - - - - -
-
- -
-
- -
-
-
-pygame.time
-
-
pygame module for monitoring time
-
- ----- - - - - - - - - - - - - - - - - - - - - - - -
-—get the time in milliseconds
-—pause the program for an amount of time
-—pause the program for an amount of time
-—repeatedly create an event on the event queue
-—create an object to help track time
-

Times in pygame are represented in milliseconds (1/1000 seconds). Most -platforms have a limited time resolution of around 10 milliseconds. This -resolution, in milliseconds, is given in the TIMER_RESOLUTION constant.

-
-
-pygame.time.get_ticks()¶
-
-
get the time in milliseconds
-
get_ticks() -> milliseconds
-
-

Return the number of milliseconds since pygame.init() was called. Before -pygame is initialized this will always be 0.

-
- -
-
-pygame.time.wait()¶
-
-
pause the program for an amount of time
-
wait(milliseconds) -> time
-
-

Will pause for a given number of milliseconds. This function sleeps the -process to share the processor with other programs. A program that waits for -even a few milliseconds will consume very little processor time. It is -slightly less accurate than the pygame.time.delay() function.

-

This returns the actual number of milliseconds used.

-
- -
-
-pygame.time.delay()¶
-
-
pause the program for an amount of time
-
delay(milliseconds) -> time
-
-

Will pause for a given number of milliseconds. This function will use the -processor (rather than sleeping) in order to make the delay more accurate -than pygame.time.wait().

-

This returns the actual number of milliseconds used.

-
- -
-
-pygame.time.set_timer()¶
-
-
repeatedly create an event on the event queue
-
set_timer(event, millis) -> None
-
set_timer(event, millis, loops=0) -> None
-
-

Set an event to appear on the event queue every given number of milliseconds. -The first event will not appear until the amount of time has passed.

-

The event attribute can be a pygame.event.Event object or an integer -type that denotes an event.

-

loops is an integer that denotes the number of events posted. If 0 (default) -then the events will keep getting posted, unless explicitly stopped.

-

To disable the timer for such an event, call the function again with the same -event argument with millis argument set to 0.

-

It is also worth mentioning that a particular event type can only be put on a -timer once. In other words, there cannot be two timers for the same event type. -Setting an event timer for a particular event discards the old one for that -event type.

-

loops replaces the once argument, and this does not break backward -compatability

-
-

New in pygame 2.0.0.dev3: once argument added.

-
-
-

Changed in pygame 2.0.1: event argument supports pygame.event.Event object

-
-
-

New in pygame 2.0.1: added loops argument to replace once argument

-
-
- -
-
-pygame.time.Clock¶
-
-
create an object to help track time
-
Clock() -> Clock
-
- ----- - - - - - - - - - - - - - - - - - - - - - - -
-—update the clock
-—update the clock
-—time used in the previous tick
-—actual time used in the previous tick
-—compute the clock framerate
-

Creates a new Clock object that can be used to track an amount of time. The -clock also provides several functions to help control a game's framerate.

-
-
-tick()¶
-
-
update the clock
-
tick(framerate=0) -> milliseconds
-
-

This method should be called once per frame. It will compute how many -milliseconds have passed since the previous call.

-

If you pass the optional framerate argument the function will delay to -keep the game running slower than the given ticks per second. This can be -used to help limit the runtime speed of a game. By calling -Clock.tick(40) once per frame, the program will never run at more -than 40 frames per second.

-

Note that this function uses SDL_Delay function which is not accurate on -every platform, but does not use much CPU. Use tick_busy_loop if you want -an accurate timer, and don't mind chewing CPU.

-
- -
-
-tick_busy_loop()¶
-
-
update the clock
-
tick_busy_loop(framerate=0) -> milliseconds
-
-

This method should be called once per frame. It will compute how many -milliseconds have passed since the previous call.

-

If you pass the optional framerate argument the function will delay to -keep the game running slower than the given ticks per second. This can be -used to help limit the runtime speed of a game. By calling -Clock.tick_busy_loop(40) once per frame, the program will never run at -more than 40 frames per second.

-

Note that this function uses pygame.time.delay()pause the program for an amount of time, which uses lots -of CPU in a busy loop to make sure that timing is more accurate.

-
-

New in pygame 1.8.

-
-
- -
-
-get_time()¶
-
-
time used in the previous tick
-
get_time() -> milliseconds
-
-

The number of milliseconds that passed between the previous two calls to -Clock.tick().

-
- -
-
-get_rawtime()¶
-
-
actual time used in the previous tick
-
get_rawtime() -> milliseconds
-
-

Similar to Clock.get_time(), but does not include any time used -while Clock.tick() was delaying to limit the framerate.

-
- -
-
-get_fps()¶
-
-
compute the clock framerate
-
get_fps() -> float
-
-

Compute your game's framerate (in frames per second). It is computed by -averaging the last ten calls to Clock.tick().

-
- -
- -
- -
- - -

-
-Edit on GitHub -
-
-
-
-
- - - - \ No newline at end of file diff --git a/venv/Lib/site-packages/pygame/docs/generated/ref/touch.html b/venv/Lib/site-packages/pygame/docs/generated/ref/touch.html deleted file mode 100644 index 198a5ff..0000000 --- a/venv/Lib/site-packages/pygame/docs/generated/ref/touch.html +++ /dev/null @@ -1,240 +0,0 @@ - - - - - - - - - pygame._sdl2.touch — pygame v2.1.2 documentation - - - - - - - - - - - - - -
- -
- - - - - -
-
- -
-
- -
-
-
-pygame._sdl2.touch
-
-
pygame module to work with touch input
-
- ----- - - - - - - - - - - - - - - - - - - -
-—get the number of touch devices
-—get the a touch device id for a given index
-—the number of active fingers for a given touch device
-—get information about an active finger
-
-

New in pygame 2: This module requires SDL2.

-
-
-
-pygame._sdl2.touch.get_num_devices()¶
-
-
get the number of touch devices
-
get_num_devices() -> int
-
-

Return the number of available touch devices.

-
- -
-
-pygame._sdl2.touch.get_device()¶
-
-
get the a touch device id for a given index
-
get_device(index) -> touchid
-
-
-
Parameters
-

index (int) -- This number is at least 0 and less than the -number of devices.

-
-
-

Return an integer id associated with the given index.

-
- -
-
-pygame._sdl2.touch.get_num_fingers()¶
-
-
the number of active fingers for a given touch device
-
get_num_fingers(touchid) -> int
-
-

Return the number of fingers active for the touch device -whose id is touchid.

-
- -
-
-pygame._sdl2.touch.get_finger()¶
-
-
get information about an active finger
-
get_finger(touchid, index) -> int
-
-
-
Parameters
-
    -
  • touchid (int) -- The touch device id.

  • -
  • index (int) -- The index of the finger to return -information about, between 0 and the -number of active fingers.

  • -
-
-
-

Return a dict for the finger index active on touchid. -The dict contains these keys:

-
id         the id of the finger (an integer).
-x          the normalized x position of the finger, between 0 and 1.
-y          the normalized y position of the finger, between 0 and 1.
-pressure   the amount of pressure applied by the finger, between 0 and 1.
-
-
-
- -
- -
- - -

-
-Edit on GitHub -
-
-
-
-
- - - - \ No newline at end of file diff --git a/venv/Lib/site-packages/pygame/docs/generated/ref/transform.html b/venv/Lib/site-packages/pygame/docs/generated/ref/transform.html deleted file mode 100644 index da5a1a5..0000000 --- a/venv/Lib/site-packages/pygame/docs/generated/ref/transform.html +++ /dev/null @@ -1,535 +0,0 @@ - - - - - - - - - pygame.transform — pygame v2.1.2 documentation - - - - - - - - - - - - - -
- -
- - - - - -
-
- -
-
- -
-
-
-pygame.transform
-
-
pygame module to transform surfaces
-
- ----- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-—flip vertically and horizontally
-—resize to new resolution
-—rotate an image
-—filtered scale and rotation
-—specialized image doubler
-—scale a surface to an arbitrary size smoothly
-—return smoothscale filter version in use: 'GENERIC', 'MMX', or 'SSE'
-—set smoothscale filter version to one of: 'GENERIC', 'MMX', or 'SSE'
-—gets a copy of an image with an interior area removed
-—find edges in a surface
-—find the average surface from many surfaces.
-—finds the average color of a surface
-—finds which, and how many pixels in a surface are within a threshold of a 'search_color' or a 'search_surf'.
-

A Surface transform is an operation that moves or resizes the pixels. All these -functions take a Surface to operate on and return a new Surface with the -results.

-

Some of the transforms are considered destructive. These means every time they -are performed they lose pixel data. Common examples of this are resizing and -rotating. For this reason, it is better to re-transform the original surface -than to keep transforming an image multiple times. (For example, suppose you -are animating a bouncing spring which expands and contracts. If you applied the -size changes incrementally to the previous images, you would lose detail. -Instead, always begin with the original image and scale to the desired size.)

-
-

Changed in pygame 2.0.2: transform functions now support keyword arguments.

-
-
-
-pygame.transform.flip()¶
-
-
flip vertically and horizontally
-
flip(surface, flip_x, flip_y) -> Surface
-
-

This can flip a Surface either vertically, horizontally, or both. -The arguments flip_x and flip_y are booleans that control whether -to flip each axis. Flipping a Surface is non-destructive and returns a new -Surface with the same dimensions.

-
- -
-
-pygame.transform.scale()¶
-
-
resize to new resolution
-
scale(surface, size, dest_surface=None) -> Surface
-
-

Resizes the Surface to a new size, given as (width, height). -This is a fast scale operation that does not sample the results.

-

An optional destination surface can be used, rather than have it create a -new one. This is quicker if you want to repeatedly scale something. However -the destination must be the same size as the size (width, height) passed in. Also -the destination surface must be the same format.

-
- -
-
-pygame.transform.rotate()¶
-
-
rotate an image
-
rotate(surface, angle) -> Surface
-
-

Unfiltered counterclockwise rotation. The angle argument represents degrees -and can be any floating point value. Negative angle amounts will rotate -clockwise.

-

Unless rotating by 90 degree increments, the image will be padded larger to -hold the new size. If the image has pixel alphas, the padded area will be -transparent. Otherwise pygame will pick a color that matches the Surface -colorkey or the topleft pixel value.

-
- -
-
-pygame.transform.rotozoom()¶
-
-
filtered scale and rotation
-
rotozoom(surface, angle, scale) -> Surface
-
-

This is a combined scale and rotation transform. The resulting Surface will -be a filtered 32-bit Surface. The scale argument is a floating point value -that will be multiplied by the current resolution. The angle argument is a -floating point value that represents the counterclockwise degrees to rotate. -A negative rotation angle will rotate clockwise.

-
- -
-
-pygame.transform.scale2x()¶
-
-
specialized image doubler
-
scale2x(surface, dest_surface=None) -> Surface
-
-

This will return a new image that is double the size of the original. It -uses the AdvanceMAME Scale2X algorithm which does a 'jaggie-less' scale of -bitmap graphics.

-

This really only has an effect on simple images with solid colors. On -photographic and antialiased images it will look like a regular unfiltered -scale.

-

An optional destination surface can be used, rather than have it create a -new one. This is quicker if you want to repeatedly scale something. However -the destination must be twice the size of the source surface passed in. Also -the destination surface must be the same format.

-
- -
-
-pygame.transform.smoothscale()¶
-
-
scale a surface to an arbitrary size smoothly
-
smoothscale(surface, size, dest_surface=None) -> Surface
-
-

Uses one of two different algorithms for scaling each dimension of the input -surface as required. For shrinkage, the output pixels are area averages of -the colors they cover. For expansion, a bilinear filter is used. For the -x86-64 and i686 architectures, optimized MMX routines are included and -will run much faster than other machine types. The size is a 2 number -sequence for (width, height). This function only works for 24-bit or 32-bit -surfaces. An exception will be thrown if the input surface bit depth is less -than 24.

-
-

New in pygame 1.8.

-
-
- -
-
-pygame.transform.get_smoothscale_backend()¶
-
-
return smoothscale filter version in use: 'GENERIC', 'MMX', or 'SSE'
-
get_smoothscale_backend() -> string
-
-

Shows whether or not smoothscale is using MMX or SSE acceleration. -If no acceleration is available then "GENERIC" is returned. For a x86 -processor the level of acceleration to use is determined at runtime.

-

This function is provided for pygame testing and debugging.

-
- -
-
-pygame.transform.set_smoothscale_backend()¶
-
-
set smoothscale filter version to one of: 'GENERIC', 'MMX', or 'SSE'
-
set_smoothscale_backend(backend) -> None
-
-

Sets smoothscale acceleration. Takes a string argument. A value of 'GENERIC' -turns off acceleration. 'MMX' uses MMX instructions only. 'SSE' allows -SSE extensions as well. A value error is raised if type is not -recognized or not supported by the current processor.

-

This function is provided for pygame testing and debugging. If smoothscale -causes an invalid instruction error then it is a pygame/SDL bug that should -be reported. Use this function as a temporary fix only.

-
- -
-
-pygame.transform.chop()¶
-
-
gets a copy of an image with an interior area removed
-
chop(surface, rect) -> Surface
-
-

Extracts a portion of an image. All vertical and horizontal pixels -surrounding the given rectangle area are removed. The corner areas (diagonal -to the rect) are then brought together. (The original image is not altered -by this operation.)

-

NOTE: If you want a "crop" that returns the part of an image within a -rect, you can blit with a rect to a new surface or copy a subsurface.

-
- -
-
-pygame.transform.laplacian()¶
-
-
find edges in a surface
-
laplacian(surface, dest_surface=None) -> Surface
-
-

Finds the edges in a surface using the laplacian algorithm.

-
-

New in pygame 1.8.

-
-
- -
-
-pygame.transform.average_surfaces()¶
-
-
find the average surface from many surfaces.
-
average_surfaces(surfaces, dest_surface=None, palette_colors=1) -> Surface
-
-

Takes a sequence of surfaces and returns a surface with average colors from -each of the surfaces.

-

palette_colors - if true we average the colors in palette, otherwise we -average the pixel values. This is useful if the surface is actually -greyscale colors, and not palette colors.

-

Note, this function currently does not handle palette using surfaces -correctly.

-
-

New in pygame 1.8.

-
-
-

New in pygame 1.9: palette_colors argument

-
-
- -
-
-pygame.transform.average_color()¶
-
-
finds the average color of a surface
-
average_color(surface, rect=None) -> Color
-
-

Finds the average color of a Surface or a region of a surface specified by a -Rect, and returns it as a Color.

-
- -
-
-pygame.transform.threshold()¶
-
-
finds which, and how many pixels in a surface are within a threshold of a 'search_color' or a 'search_surf'.
-
threshold(dest_surface, surface, search_color, threshold=(0,0,0,0), set_color=(0,0,0,0), set_behavior=1, search_surf=None, inverse_set=False) -> num_threshold_pixels
-
-

This versatile function can be used for find colors in a 'surf' close to a 'search_color' -or close to colors in a separate 'search_surf'.

-

It can also be used to transfer pixels into a 'dest_surf' that match or don't match.

-

By default it sets pixels in the 'dest_surf' where all of the pixels NOT within the -threshold are changed to set_color. If inverse_set is optionally set to True, -the pixels that ARE within the threshold are changed to set_color.

-

If the optional 'search_surf' surface is given, it is used to threshold against -rather than the specified 'set_color'. That is, it will find each pixel in the -'surf' that is within the 'threshold' of the pixel at the same coordinates -of the 'search_surf'.

-
-
Parameters
-
-
-
Return type
-

int

-
-
Returns
-

The number of pixels that are within the 'threshold' in 'surf' -compared to either 'search_color' or search_surf.

-
-
Examples
-

-
-

See the threshold tests for a full of examples: https://github.com/pygame/pygame/blob/master/test/transform_test.py

-
    def test_threshold_dest_surf_not_change(self):
-        """the pixels within the threshold.
-
-        All pixels not within threshold are changed to set_color.
-        So there should be none changed in this test.
-        """
-        (w, h) = size = (32, 32)
-        threshold = (20, 20, 20, 20)
-        original_color = (25, 25, 25, 25)
-        original_dest_color = (65, 65, 65, 55)
-        threshold_color = (10, 10, 10, 10)
-        set_color = (255, 10, 10, 10)
-
-        surf = pygame.Surface(size, pygame.SRCALPHA, 32)
-        dest_surf = pygame.Surface(size, pygame.SRCALPHA, 32)
-        search_surf = pygame.Surface(size, pygame.SRCALPHA, 32)
-
-        surf.fill(original_color)
-        search_surf.fill(threshold_color)
-        dest_surf.fill(original_dest_color)
-
-        # set_behavior=1, set dest_surface from set_color.
-        # all within threshold of third_surface, so no color is set.
-
-        THRESHOLD_BEHAVIOR_FROM_SEARCH_COLOR = 1
-        pixels_within_threshold = pygame.transform.threshold(
-            dest_surface=dest_surf,
-            surface=surf,
-            search_color=None,
-            threshold=threshold,
-            set_color=set_color,
-            set_behavior=THRESHOLD_BEHAVIOR_FROM_SEARCH_COLOR,
-            search_surf=search_surf,
-        )
-
-        # # Return, of pixels within threshold is correct
-        self.assertEqual(w * h, pixels_within_threshold)
-
-        # # Size of dest surface is correct
-        dest_rect = dest_surf.get_rect()
-        dest_size = dest_rect.size
-        self.assertEqual(size, dest_size)
-
-        # The color is not the change_color specified for every pixel As all
-        # pixels are within threshold
-
-        for pt in test_utils.rect_area_pts(dest_rect):
-            self.assertNotEqual(dest_surf.get_at(pt), set_color)
-            self.assertEqual(dest_surf.get_at(pt), original_dest_color)
-
-
-
-

New in pygame 1.8.

-
-
-

Changed in pygame 1.9.4: Fixed a lot of bugs and added keyword arguments. Test your code.

-
-
- -
- -
- - -

-
-Edit on GitHub -
-
-
-
-
- - - - \ No newline at end of file diff --git a/venv/Lib/site-packages/pygame/docs/generated/search.html b/venv/Lib/site-packages/pygame/docs/generated/search.html deleted file mode 100644 index 1f1e1b3..0000000 --- a/venv/Lib/site-packages/pygame/docs/generated/search.html +++ /dev/null @@ -1,96 +0,0 @@ - - - - - - - - Search — pygame v2.1.2 documentation - - - - - - - - - - - - - - - - - - -
-
-
- -

Search

- - - - -

- Searching for multiple words only shows matches that contain - all words. -

- - -
- - - -
- - - -
- -
- - -
-
-
-
-
- - - - \ No newline at end of file diff --git a/venv/Lib/site-packages/pygame/docs/generated/searchindex.js b/venv/Lib/site-packages/pygame/docs/generated/searchindex.js deleted file mode 100644 index 16b2a61..0000000 --- a/venv/Lib/site-packages/pygame/docs/generated/searchindex.js +++ /dev/null @@ -1 +0,0 @@ -Search.setIndex({docnames:["c_api","c_api/base","c_api/bufferproxy","c_api/cdrom","c_api/color","c_api/display","c_api/event","c_api/freetype","c_api/mixer","c_api/rect","c_api/rwobject","c_api/slots","c_api/surface","c_api/surflock","c_api/version","filepaths","index","ref/bufferproxy","ref/camera","ref/cdrom","ref/color","ref/color_list","ref/cursors","ref/display","ref/draw","ref/event","ref/examples","ref/fastevent","ref/font","ref/freetype","ref/gfxdraw","ref/image","ref/joystick","ref/key","ref/locals","ref/mask","ref/math","ref/midi","ref/mixer","ref/mouse","ref/music","ref/overlay","ref/pixelarray","ref/pixelcopy","ref/pygame","ref/rect","ref/scrap","ref/sdl2_controller","ref/sdl2_video","ref/sndarray","ref/sprite","ref/surface","ref/surfarray","ref/tests","ref/time","ref/touch","ref/transform","tut/CameraIntro","tut/ChimpLineByLine","tut/DisplayModes","tut/ImportInit","tut/MakeGames","tut/MoveIt","tut/PygameIntro","tut/SpriteIntro","tut/SurfarrayIntro","tut/chimp.py","tut/en/Red_or_Black/1.Prolog/introduction","tut/en/Red_or_Black/2.Print_text/Basic TEMPLATE and OUTPUT","tut/en/Red_or_Black/3.Move_text/Basic PROCESS","tut/en/Red_or_Black/4.Control_text/Basic INPUT","tut/en/Red_or_Black/5.HP_bar/Advanced OUTPUT with Advanced PROCESS","tut/en/Red_or_Black/6.Buttons/Advanced INPUT with Advanced OUTPUT","tut/en/Red_or_Black/7.Game_board/Advanced OUTPUT and plus alpha","tut/en/Red_or_Black/8.Epilog/Epilog","tut/ko/\ube68\uac04\ube14\ub85d \uac80\uc740\ube14\ub85d/1.\ud504\ub864\ub85c\uadf8/\uc18c\uac1c","tut/ko/\ube68\uac04\ube14\ub85d \uac80\uc740\ube14\ub85d/2.\ud14d\uc2a4\ud2b8 \ucd9c\ub825/\uae30\ucd08 \ud15c\ud50c\ub9bf\uacfc \ucd9c\ub825","tut/ko/\ube68\uac04\ube14\ub85d \uac80\uc740\ube14\ub85d/3.\ud14d\uc2a4\ud2b8 \uc774\ub3d9/\uae30\ucd08 \ucc98\ub9ac","tut/ko/\ube68\uac04\ube14\ub85d \uac80\uc740\ube14\ub85d/4.\ud14d\uc2a4\ud2b8 \uc870\uc885/\uae30\ucd08 \uc785\ub825","tut/ko/\ube68\uac04\ube14\ub85d \uac80\uc740\ube14\ub85d/5.HP\ubc14/\uc2ec\ud654 \ucd9c\ub825 \uadf8\ub9ac\uace0 \uc2ec\ud654 \ucc98\ub9ac","tut/ko/\ube68\uac04\ube14\ub85d \uac80\uc740\ube14\ub85d/6.\ubc84\ud2bc\ub4e4/\uc2ec\ud654 \uc785\ub825 \uadf8\ub9ac\uace0 \uc2ec\ud654 \ucd9c\ub825","tut/ko/\ube68\uac04\ube14\ub85d \uac80\uc740\ube14\ub85d/7.\uac8c\uc784\ud310/\uc2ec\ud654 \ucd9c\ub825 \uadf8\ub9ac\uace0 \uc870\uae08 \ub354","tut/ko/\ube68\uac04\ube14\ub85d \uac80\uc740\ube14\ub85d/8.\uc5d0\ud544\ub85c\uadf8/\uc5d0\ud544\ub85c\uadf8","tut/ko/\ube68\uac04\ube14\ub85d \uac80\uc740\ube14\ub85d/overview","tut/newbieguide","tut/tom_games2","tut/tom_games3","tut/tom_games4","tut/tom_games5","tut/tom_games6"],envversion:{"sphinx.domains.c":2,"sphinx.domains.changeset":1,"sphinx.domains.citation":1,"sphinx.domains.cpp":4,"sphinx.domains.index":1,"sphinx.domains.javascript":2,"sphinx.domains.math":2,"sphinx.domains.python":3,"sphinx.domains.rst":2,"sphinx.domains.std":2,sphinx:56},filenames:["c_api.rst","c_api\\base.rst","c_api\\bufferproxy.rst","c_api\\cdrom.rst","c_api\\color.rst","c_api\\display.rst","c_api\\event.rst","c_api\\freetype.rst","c_api\\mixer.rst","c_api\\rect.rst","c_api\\rwobject.rst","c_api\\slots.rst","c_api\\surface.rst","c_api\\surflock.rst","c_api\\version.rst","filepaths.rst","index.rst","ref\\bufferproxy.rst","ref\\camera.rst","ref\\cdrom.rst","ref\\color.rst","ref\\color_list.rst","ref\\cursors.rst","ref\\display.rst","ref\\draw.rst","ref\\event.rst","ref\\examples.rst","ref\\fastevent.rst","ref\\font.rst","ref\\freetype.rst","ref\\gfxdraw.rst","ref\\image.rst","ref\\joystick.rst","ref\\key.rst","ref\\locals.rst","ref\\mask.rst","ref\\math.rst","ref\\midi.rst","ref\\mixer.rst","ref\\mouse.rst","ref\\music.rst","ref\\overlay.rst","ref\\pixelarray.rst","ref\\pixelcopy.rst","ref\\pygame.rst","ref\\rect.rst","ref\\scrap.rst","ref\\sdl2_controller.rst","ref\\sdl2_video.rst","ref\\sndarray.rst","ref\\sprite.rst","ref\\surface.rst","ref\\surfarray.rst","ref\\tests.rst","ref\\time.rst","ref\\touch.rst","ref\\transform.rst","tut\\CameraIntro.rst","tut\\ChimpLineByLine.rst","tut\\DisplayModes.rst","tut\\ImportInit.rst","tut\\MakeGames.rst","tut\\MoveIt.rst","tut\\PygameIntro.rst","tut\\SpriteIntro.rst","tut\\SurfarrayIntro.rst","tut\\chimp.py.rst","tut\\en\\Red_or_Black\\1.Prolog\\introduction.rst","tut\\en\\Red_or_Black\\2.Print_text\\Basic TEMPLATE and OUTPUT.rst","tut\\en\\Red_or_Black\\3.Move_text\\Basic PROCESS.rst","tut\\en\\Red_or_Black\\4.Control_text\\Basic INPUT.rst","tut\\en\\Red_or_Black\\5.HP_bar\\Advanced OUTPUT with Advanced PROCESS.rst","tut\\en\\Red_or_Black\\6.Buttons\\Advanced INPUT with Advanced OUTPUT.rst","tut\\en\\Red_or_Black\\7.Game_board\\Advanced OUTPUT and plus alpha.rst","tut\\en\\Red_or_Black\\8.Epilog\\Epilog.rst","tut\\ko\\\ube68\uac04\ube14\ub85d \uac80\uc740\ube14\ub85d\\1.\ud504\ub864\ub85c\uadf8\\\uc18c\uac1c.rst","tut\\ko\\\ube68\uac04\ube14\ub85d \uac80\uc740\ube14\ub85d\\2.\ud14d\uc2a4\ud2b8 \ucd9c\ub825\\\uae30\ucd08 \ud15c\ud50c\ub9bf\uacfc \ucd9c\ub825.rst","tut\\ko\\\ube68\uac04\ube14\ub85d \uac80\uc740\ube14\ub85d\\3.\ud14d\uc2a4\ud2b8 \uc774\ub3d9\\\uae30\ucd08 \ucc98\ub9ac.rst","tut\\ko\\\ube68\uac04\ube14\ub85d \uac80\uc740\ube14\ub85d\\4.\ud14d\uc2a4\ud2b8 \uc870\uc885\\\uae30\ucd08 \uc785\ub825.rst","tut\\ko\\\ube68\uac04\ube14\ub85d \uac80\uc740\ube14\ub85d\\5.HP\ubc14\\\uc2ec\ud654 \ucd9c\ub825 \uadf8\ub9ac\uace0 \uc2ec\ud654 \ucc98\ub9ac.rst","tut\\ko\\\ube68\uac04\ube14\ub85d \uac80\uc740\ube14\ub85d\\6.\ubc84\ud2bc\ub4e4\\\uc2ec\ud654 \uc785\ub825 \uadf8\ub9ac\uace0 \uc2ec\ud654 \ucd9c\ub825.rst","tut\\ko\\\ube68\uac04\ube14\ub85d \uac80\uc740\ube14\ub85d\\7.\uac8c\uc784\ud310\\\uc2ec\ud654 \ucd9c\ub825 \uadf8\ub9ac\uace0 \uc870\uae08 \ub354.rst","tut\\ko\\\ube68\uac04\ube14\ub85d \uac80\uc740\ube14\ub85d\\8.\uc5d0\ud544\ub85c\uadf8\\\uc5d0\ud544\ub85c\uadf8.rst","tut\\ko\\\ube68\uac04\ube14\ub85d \uac80\uc740\ube14\ub85d\\overview.rst","tut\\newbieguide.rst","tut\\tom_games2.rst","tut\\tom_games3.rst","tut\\tom_games4.rst","tut\\tom_games5.rst","tut\\tom_games6.rst"],objects:{"":[[14,0,1,"c.PG_MAJOR_VERSION","PG_MAJOR_VERSION"],[14,0,1,"c.PG_MINOR_VERSION","PG_MINOR_VERSION"],[14,0,1,"c.PG_PATCH_VERSION","PG_PATCH_VERSION"],[14,0,1,"c.PG_VERSIONNUM","PG_VERSIONNUM"],[14,0,1,"c.PG_VERSION_ATLEAST","PG_VERSION_ATLEAST"],[1,1,1,"c.import_pygame_base","import_pygame_base"],[1,1,1,"c.pgBuffer_AsArrayInterface","pgBuffer_AsArrayInterface"],[1,1,1,"c.pgBuffer_AsArrayStruct","pgBuffer_AsArrayStruct"],[1,1,1,"c.pgBuffer_Release","pgBuffer_Release"],[2,1,1,"c.pgBufproxy_Check","pgBufproxy_Check"],[2,1,1,"c.pgBufproxy_GetParent","pgBufproxy_GetParent"],[2,1,1,"c.pgBufproxy_New","pgBufproxy_New"],[2,1,1,"c.pgBufproxy_Trip","pgBufproxy_Trip"],[2,3,1,"c.pgBufproxy_Type","pgBufproxy_Type"],[3,4,1,"c.pgCDObject","pgCDObject"],[3,1,1,"c.pgCD_AsID","pgCD_AsID"],[3,1,1,"c.pgCD_Check","pgCD_Check"],[3,1,1,"c.pgCD_New","pgCD_New"],[3,3,1,"c.pgCD_Type","pgCD_Type"],[8,4,1,"c.pgChannelObject","pgChannelObject"],[8,1,1,"c.pgChannel_AsInt","pgChannel_AsInt"],[8,1,1,"c.pgChannel_Check","pgChannel_Check"],[8,1,1,"c.pgChannel_New","pgChannel_New"],[8,3,1,"c.pgChannel_Type","pgChannel_Type"],[4,1,1,"c.pgColor_Check","pgColor_Check"],[4,1,1,"c.pgColor_New","pgColor_New"],[4,1,1,"c.pgColor_NewLength","pgColor_NewLength"],[4,3,1,"c.pgColor_Type","pgColor_Type"],[1,1,1,"c.pgDict_AsBuffer","pgDict_AsBuffer"],[6,4,1,"c.pgEventObject","pgEventObject"],[6,1,1,"c.pgEvent_Check","pgEvent_Check"],[6,1,1,"c.pgEvent_FillUserEvent","pgEvent_FillUserEvent"],[6,1,1,"c.pgEvent_New","pgEvent_New"],[6,1,1,"c.pgEvent_New2","pgEvent_New2"],[6,4,1,"c.pgEvent_Type","pgEvent_Type"],[1,3,1,"c.pgExc_BufferError","pgExc_BufferError"],[1,3,1,"c.pgExc_SDLError","pgExc_SDLError"],[7,4,1,"c.pgFontObject","pgFontObject"],[7,1,1,"c.pgFont_Check","pgFont_Check"],[7,1,1,"c.pgFont_IS_ALIVE","pgFont_IS_ALIVE"],[7,1,1,"c.pgFont_New","pgFont_New"],[7,4,1,"c.pgFont_Type","pgFont_Type"],[13,4,1,"c.pgLifetimeLockObject","pgLifetimeLockObject"],[13,1,1,"c.pgLifetimeLock_Check","pgLifetimeLock_Check"],[13,3,1,"c.pgLifetimeLock_Type","pgLifetimeLock_Type"],[1,1,1,"c.pgObject_GetBuffer","pgObject_GetBuffer"],[10,1,1,"c.pgRWops_FromFileObject","pgRWops_FromFileObject"],[10,1,1,"c.pgRWops_FromObject","pgRWops_FromObject"],[10,1,1,"c.pgRWops_GetFileExtension","pgRWops_GetFileExtension"],[10,1,1,"c.pgRWops_IsFileObject","pgRWops_IsFileObject"],[10,1,1,"c.pgRWops_ReleaseObject","pgRWops_ReleaseObject"],[9,4,1,"c.pgRectObject","pgRectObject"],[9,1,1,"c.pgRect_AsRect","pgRect_AsRect"],[9,1,1,"c.pgRect_FromObject","pgRect_FromObject"],[9,1,1,"c.pgRect_New","pgRect_New"],[9,1,1,"c.pgRect_New4","pgRect_New4"],[9,1,1,"c.pgRect_Normalize","pgRect_Normalize"],[9,3,1,"c.pgRect_Type","pgRect_Type"],[8,4,1,"c.pgSoundObject","pgSoundObject"],[8,1,1,"c.pgSound_AsChunk","pgSound_AsChunk"],[8,1,1,"c.pgSound_Check","pgSound_Check"],[8,1,1,"c.pgSound_New","pgSound_New"],[8,3,1,"c.pgSound_Type","pgSound_Type"],[12,4,1,"c.pgSurfaceObject","pgSurfaceObject"],[12,1,1,"c.pgSurface_AsSurface","pgSurface_AsSurface"],[12,1,1,"c.pgSurface_Blit","pgSurface_Blit"],[12,1,1,"c.pgSurface_Check","pgSurface_Check"],[13,1,1,"c.pgSurface_Lock","pgSurface_Lock"],[13,1,1,"c.pgSurface_LockBy","pgSurface_LockBy"],[13,1,1,"c.pgSurface_LockLifetime","pgSurface_LockLifetime"],[12,1,1,"c.pgSurface_New","pgSurface_New"],[13,1,1,"c.pgSurface_Prep","pgSurface_Prep"],[12,3,1,"c.pgSurface_Type","pgSurface_Type"],[13,1,1,"c.pgSurface_UnLock","pgSurface_UnLock"],[13,1,1,"c.pgSurface_UnLockBy","pgSurface_UnLockBy"],[13,1,1,"c.pgSurface_Unprep","pgSurface_Unprep"],[5,4,1,"c.pgVidInfoObject","pgVidInfoObject"],[5,1,1,"c.pgVidInfo_AsVidInfo","pgVidInfo_AsVidInfo"],[5,1,1,"c.pgVidInfo_Check","pgVidInfo_Check"],[5,1,1,"c.pgVidInfo_New","pgVidInfo_New"],[5,3,1,"c.pgVidInfo_Type","pgVidInfo_Type"],[10,1,1,"c.pg_EncodeFilePath","pg_EncodeFilePath"],[10,1,1,"c.pg_EncodeString","pg_EncodeString"],[1,1,1,"c.pg_FloatFromObj","pg_FloatFromObj"],[1,1,1,"c.pg_FloatFromObjIndex","pg_FloatFromObjIndex"],[1,1,1,"c.pg_GetDefaultWindow","pg_GetDefaultWindow"],[1,1,1,"c.pg_GetDefaultWindowSurface","pg_GetDefaultWindowSurface"],[1,1,1,"c.pg_IntFromObj","pg_IntFromObj"],[1,1,1,"c.pg_IntFromObjIndex","pg_IntFromObjIndex"],[1,1,1,"c.pg_RGBAFromObj","pg_RGBAFromObj"],[1,1,1,"c.pg_RegisterQuit","pg_RegisterQuit"],[1,1,1,"c.pg_SetDefaultWindow","pg_SetDefaultWindow"],[1,1,1,"c.pg_SetDefaultWindowSurface","pg_SetDefaultWindowSurface"],[1,1,1,"c.pg_TwoFloatsFromObj","pg_TwoFloatsFromObj"],[1,1,1,"c.pg_TwoIntsFromObj","pg_TwoIntsFromObj"],[1,1,1,"c.pg_UintFromObj","pg_UintFromObj"],[1,1,1,"c.pg_UintFromObjIndex","pg_UintFromObjIndex"],[1,4,1,"c.pg_buffer","pg_buffer"],[1,1,1,"c.pg_mod_autoinit","pg_mod_autoinit"],[1,1,1,"c.pg_mod_autoquit","pg_mod_autoquit"],[44,5,0,"-","pygame"]],"pygame.BufferProxy":[[17,7,1,"","length"],[17,7,1,"","parent"],[17,7,1,"","raw"],[17,8,1,"","write"]],"pygame.Color":[[20,7,1,"","a"],[20,7,1,"","b"],[20,7,1,"","cmy"],[20,8,1,"","correct_gamma"],[20,7,1,"","g"],[20,7,1,"","hsla"],[20,7,1,"","hsva"],[20,7,1,"","i1i2i3"],[20,8,1,"","lerp"],[20,8,1,"","normalize"],[20,8,1,"","premul_alpha"],[20,7,1,"","r"],[20,8,1,"","set_length"],[20,8,1,"","update"]],"pygame.Overlay":[[41,8,1,"","display"],[41,8,1,"","get_hardware"],[41,8,1,"","set_location"]],"pygame.PixelArray":[[42,8,1,"","close"],[42,8,1,"","compare"],[42,8,1,"","extract"],[42,7,1,"","itemsize"],[42,8,1,"","make_surface"],[42,7,1,"","ndim"],[42,8,1,"","replace"],[42,7,1,"","shape"],[42,7,1,"","strides"],[42,7,1,"","surface"],[42,8,1,"","transpose"]],"pygame.Rect":[[45,8,1,"","clamp"],[45,8,1,"","clamp_ip"],[45,8,1,"","clip"],[45,8,1,"","clipline"],[45,8,1,"","collidedict"],[45,8,1,"","collidedictall"],[45,8,1,"","collidelist"],[45,8,1,"","collidelistall"],[45,8,1,"","collidepoint"],[45,8,1,"","colliderect"],[45,8,1,"","contains"],[45,8,1,"","copy"],[45,8,1,"","fit"],[45,8,1,"","inflate"],[45,8,1,"","inflate_ip"],[45,8,1,"","move"],[45,8,1,"","move_ip"],[45,8,1,"","normalize"],[45,8,1,"","union"],[45,8,1,"","union_ip"],[45,8,1,"","unionall"],[45,8,1,"","unionall_ip"],[45,8,1,"","update"]],"pygame.Surface":[[51,7,1,"","_pixels_address"],[51,8,1,"","blit"],[51,8,1,"","blits"],[51,8,1,"","convert"],[51,8,1,"","convert_alpha"],[51,8,1,"","copy"],[51,8,1,"","fill"],[51,8,1,"","get_abs_offset"],[51,8,1,"","get_abs_parent"],[51,8,1,"","get_alpha"],[51,8,1,"","get_at"],[51,8,1,"","get_at_mapped"],[51,8,1,"","get_bitsize"],[51,8,1,"","get_bounding_rect"],[51,8,1,"","get_buffer"],[51,8,1,"","get_bytesize"],[51,8,1,"","get_clip"],[51,8,1,"","get_colorkey"],[51,8,1,"","get_flags"],[51,8,1,"","get_height"],[51,8,1,"","get_locked"],[51,8,1,"","get_locks"],[51,8,1,"","get_losses"],[51,8,1,"","get_masks"],[51,8,1,"","get_offset"],[51,8,1,"","get_palette"],[51,8,1,"","get_palette_at"],[51,8,1,"","get_parent"],[51,8,1,"","get_pitch"],[51,8,1,"","get_rect"],[51,8,1,"","get_shifts"],[51,8,1,"","get_size"],[51,8,1,"","get_view"],[51,8,1,"","get_width"],[51,8,1,"","lock"],[51,8,1,"","map_rgb"],[51,8,1,"","mustlock"],[51,8,1,"","scroll"],[51,8,1,"","set_alpha"],[51,8,1,"","set_at"],[51,8,1,"","set_clip"],[51,8,1,"","set_colorkey"],[51,8,1,"","set_masks"],[51,8,1,"","set_palette"],[51,8,1,"","set_palette_at"],[51,8,1,"","set_shifts"],[51,8,1,"","subsurface"],[51,8,1,"","unlock"],[51,8,1,"","unmap_rgb"]],"pygame._sdl2":[[47,5,0,"-","controller"],[55,5,0,"-","touch"],[48,5,0,"-","video"]],"pygame._sdl2.controller":[[47,6,1,"","Controller"],[47,9,1,"","get_count"],[47,9,1,"","get_eventstate"],[47,9,1,"","get_init"],[47,9,1,"","init"],[47,9,1,"","is_controller"],[47,9,1,"","name_forindex"],[47,9,1,"","quit"],[47,9,1,"","set_eventstate"]],"pygame._sdl2.controller.Controller":[[47,8,1,"","as_joystick"],[47,8,1,"","attached"],[47,8,1,"","from_joystick"],[47,8,1,"","get_axis"],[47,8,1,"","get_button"],[47,8,1,"","get_init"],[47,8,1,"","get_mapping"],[47,8,1,"","quit"],[47,8,1,"","rumble"],[47,8,1,"","set_mapping"],[47,8,1,"","stop_rumble"]],"pygame._sdl2.touch":[[55,9,1,"","get_device"],[55,9,1,"","get_finger"],[55,9,1,"","get_num_devices"],[55,9,1,"","get_num_fingers"]],"pygame._sdl2.video":[[48,6,1,"","Image"],[48,6,1,"","Renderer"],[48,6,1,"","Texture"],[48,6,1,"","Window"]],"pygame._sdl2.video.Image":[[48,7,1,"","alpha"],[48,7,1,"","angle"],[48,7,1,"","blend_mode"],[48,7,1,"","color"],[48,8,1,"","draw"],[48,7,1,"","flipX"],[48,7,1,"","flipY"],[48,8,1,"","get_rect"],[48,7,1,"","origin"],[48,7,1,"","srcrect"],[48,7,1,"","texture"]],"pygame._sdl2.video.Renderer":[[48,8,1,"","blit"],[48,8,1,"","clear"],[48,7,1,"","draw_blend_mode"],[48,7,1,"","draw_color"],[48,8,1,"","draw_line"],[48,8,1,"","draw_point"],[48,8,1,"","draw_rect"],[48,8,1,"","fill_rect"],[48,8,1,"","from_window"],[48,8,1,"","get_viewport"],[48,7,1,"","logical_size"],[48,8,1,"","present"],[48,7,1,"","scale"],[48,8,1,"","set_viewport"],[48,7,1,"","target"],[48,8,1,"","to_surface"]],"pygame._sdl2.video.Texture":[[48,7,1,"","alpha"],[48,7,1,"","blend_mode"],[48,7,1,"","color"],[48,8,1,"","draw"],[48,8,1,"","from_surface"],[48,8,1,"","get_rect"],[48,7,1,"","height"],[48,7,1,"","renderer"],[48,8,1,"","update"],[48,7,1,"","width"]],"pygame._sdl2.video.Window":[[48,7,1,"","borderless"],[48,7,1,"","brightness"],[48,8,1,"","destroy"],[48,7,1,"","display_index"],[48,8,1,"","focus"],[48,8,1,"","from_display_module"],[48,7,1,"","grab"],[48,8,1,"","hide"],[48,7,1,"","id"],[48,8,1,"","maximize"],[48,8,1,"","minimize"],[48,7,1,"","opacity"],[48,7,1,"","position"],[48,7,1,"","relative_mouse"],[48,7,1,"","resizable"],[48,8,1,"","restore"],[48,8,1,"","set_fullscreen"],[48,8,1,"","set_icon"],[48,8,1,"","set_modal_for"],[48,8,1,"","set_windowed"],[48,8,1,"","show"],[48,7,1,"","size"],[48,7,1,"","title"]],"pygame.camera":[[18,6,1,"","Camera"],[18,9,1,"","colorspace"],[18,9,1,"","get_backends"],[18,9,1,"","init"],[18,9,1,"","list_cameras"]],"pygame.camera.Camera":[[18,8,1,"","get_controls"],[18,8,1,"","get_image"],[18,8,1,"","get_raw"],[18,8,1,"","get_size"],[18,8,1,"","query_image"],[18,8,1,"","set_controls"],[18,8,1,"","start"],[18,8,1,"","stop"]],"pygame.cdrom":[[19,6,1,"","CD"],[19,9,1,"","get_count"],[19,9,1,"","get_init"],[19,9,1,"","init"],[19,9,1,"","quit"]],"pygame.cdrom.CD":[[19,8,1,"","eject"],[19,8,1,"","get_all"],[19,8,1,"","get_busy"],[19,8,1,"","get_current"],[19,8,1,"","get_empty"],[19,8,1,"","get_id"],[19,8,1,"","get_init"],[19,8,1,"","get_name"],[19,8,1,"","get_numtracks"],[19,8,1,"","get_paused"],[19,8,1,"","get_track_audio"],[19,8,1,"","get_track_length"],[19,8,1,"","get_track_start"],[19,8,1,"","init"],[19,8,1,"","pause"],[19,8,1,"","play"],[19,8,1,"","quit"],[19,8,1,"","resume"],[19,8,1,"","stop"]],"pygame.cursors":[[22,6,1,"","Cursor"],[22,9,1,"","compile"],[22,9,1,"","load_xbm"]],"pygame.cursors.Cursor":[[22,8,1,"","copy"],[22,7,1,"","data"],[22,7,1,"","type"]],"pygame.display":[[23,9,1,"","Info"],[23,9,1,"","flip"],[23,9,1,"","get_active"],[23,9,1,"","get_allow_screensaver"],[23,9,1,"","get_caption"],[23,9,1,"","get_desktop_sizes"],[23,9,1,"","get_driver"],[23,9,1,"","get_init"],[23,9,1,"","get_num_displays"],[23,9,1,"","get_surface"],[23,9,1,"","get_window_size"],[23,9,1,"","get_wm_info"],[23,9,1,"","gl_get_attribute"],[23,9,1,"","gl_set_attribute"],[23,9,1,"","iconify"],[23,9,1,"","init"],[23,9,1,"","list_modes"],[23,9,1,"","mode_ok"],[23,9,1,"","quit"],[23,9,1,"","set_allow_screensaver"],[23,9,1,"","set_caption"],[23,9,1,"","set_gamma"],[23,9,1,"","set_gamma_ramp"],[23,9,1,"","set_icon"],[23,9,1,"","set_mode"],[23,9,1,"","set_palette"],[23,9,1,"","toggle_fullscreen"],[23,9,1,"","update"]],"pygame.draw":[[24,9,1,"","aaline"],[24,9,1,"","aalines"],[24,9,1,"","arc"],[24,9,1,"","circle"],[24,9,1,"","ellipse"],[24,9,1,"","line"],[24,9,1,"","lines"],[24,9,1,"","polygon"],[24,9,1,"","rect"]],"pygame.event":[[25,9,1,"","Event"],[25,6,1,"","EventType"],[25,9,1,"","clear"],[25,9,1,"","custom_type"],[25,9,1,"","event_name"],[25,9,1,"","get"],[25,9,1,"","get_blocked"],[25,9,1,"","get_grab"],[25,9,1,"","peek"],[25,9,1,"","poll"],[25,9,1,"","post"],[25,9,1,"","pump"],[25,9,1,"","set_allowed"],[25,9,1,"","set_blocked"],[25,9,1,"","set_grab"],[25,9,1,"","wait"]],"pygame.event.EventType":[[25,7,1,"","__dict__"],[25,7,1,"","type"]],"pygame.examples.aliens":[[26,9,1,"","main"]],"pygame.examples.arraydemo":[[26,9,1,"","main"]],"pygame.examples.blend_fill":[[26,9,1,"","main"]],"pygame.examples.blit_blends":[[26,9,1,"","main"]],"pygame.examples.camera":[[26,9,1,"","main"]],"pygame.examples.chimp":[[26,9,1,"","main"]],"pygame.examples.cursors":[[26,9,1,"","main"]],"pygame.examples.eventlist":[[26,9,1,"","main"]],"pygame.examples.fonty":[[26,9,1,"","main"]],"pygame.examples.freetype_misc":[[26,9,1,"","main"]],"pygame.examples.glcube":[[26,9,1,"","main"]],"pygame.examples.headless_no_windows_needed":[[26,9,1,"","main"]],"pygame.examples.joystick":[[26,9,1,"","main"]],"pygame.examples.liquid":[[26,9,1,"","main"]],"pygame.examples.mask":[[26,9,1,"","main"]],"pygame.examples.midi":[[26,9,1,"","main"]],"pygame.examples.moveit":[[26,9,1,"","main"]],"pygame.examples.pixelarray":[[26,9,1,"","main"]],"pygame.examples.playmus":[[26,9,1,"","main"]],"pygame.examples.scaletest":[[26,9,1,"","main"]],"pygame.examples.scrap_clipboard":[[26,9,1,"","main"]],"pygame.examples.scroll":[[26,9,1,"","main"]],"pygame.examples.sound":[[26,9,1,"","main"]],"pygame.examples.sound_array_demos":[[26,9,1,"","main"]],"pygame.examples.stars":[[26,9,1,"","main"]],"pygame.examples.testsprite":[[26,9,1,"","main"]],"pygame.examples.vgrade":[[26,9,1,"","main"]],"pygame.fastevent":[[27,9,1,"","get"],[27,9,1,"","get_init"],[27,9,1,"","init"],[27,9,1,"","poll"],[27,9,1,"","post"],[27,9,1,"","pump"],[27,9,1,"","wait"]],"pygame.font":[[28,6,1,"","Font"],[28,9,1,"","SysFont"],[28,9,1,"","get_default_font"],[28,9,1,"","get_fonts"],[28,9,1,"","get_init"],[28,9,1,"","init"],[28,9,1,"","match_font"],[28,9,1,"","quit"]],"pygame.font.Font":[[28,7,1,"","bold"],[28,8,1,"","get_ascent"],[28,8,1,"","get_bold"],[28,8,1,"","get_descent"],[28,8,1,"","get_height"],[28,8,1,"","get_italic"],[28,8,1,"","get_linesize"],[28,8,1,"","get_underline"],[28,7,1,"","italic"],[28,8,1,"","metrics"],[28,8,1,"","render"],[28,8,1,"","set_bold"],[28,8,1,"","set_italic"],[28,8,1,"","set_underline"],[28,8,1,"","size"],[28,7,1,"","underline"]],"pygame.freetype":[[29,6,1,"","Font"],[29,9,1,"","SysFont"],[29,9,1,"","get_cache_size"],[29,9,1,"","get_default_font"],[29,9,1,"","get_default_resolution"],[29,9,1,"","get_error"],[29,9,1,"","get_init"],[29,9,1,"","get_version"],[29,9,1,"","init"],[29,9,1,"","quit"],[29,9,1,"","set_default_resolution"],[29,9,1,"","was_init"]],"pygame.freetype.Font":[[29,7,1,"","antialiased"],[29,7,1,"","ascender"],[29,7,1,"","bgcolor"],[29,7,1,"","descender"],[29,7,1,"","fgcolor"],[29,7,1,"","fixed_sizes"],[29,7,1,"","fixed_width"],[29,8,1,"","get_metrics"],[29,8,1,"","get_rect"],[29,8,1,"","get_sized_ascender"],[29,8,1,"","get_sized_descender"],[29,8,1,"","get_sized_glyph_height"],[29,8,1,"","get_sized_height"],[29,8,1,"","get_sizes"],[29,7,1,"","height"],[29,7,1,"","kerning"],[29,7,1,"","name"],[29,7,1,"","oblique"],[29,7,1,"","origin"],[29,7,1,"","pad"],[29,7,1,"","path"],[29,8,1,"","render"],[29,8,1,"","render_raw"],[29,8,1,"","render_raw_to"],[29,8,1,"","render_to"],[29,7,1,"","resolution"],[29,7,1,"","rotation"],[29,7,1,"","scalable"],[29,7,1,"","size"],[29,7,1,"","strength"],[29,7,1,"","strong"],[29,7,1,"","style"],[29,7,1,"","ucs4"],[29,7,1,"","underline"],[29,7,1,"","underline_adjustment"],[29,7,1,"","use_bitmap_strikes"],[29,7,1,"","vertical"],[29,7,1,"","wide"]],"pygame.gfxdraw":[[30,9,1,"","aacircle"],[30,9,1,"","aaellipse"],[30,9,1,"","aapolygon"],[30,9,1,"","aatrigon"],[30,9,1,"","arc"],[30,9,1,"","bezier"],[30,9,1,"","box"],[30,9,1,"","circle"],[30,9,1,"","ellipse"],[30,9,1,"","filled_circle"],[30,9,1,"","filled_ellipse"],[30,9,1,"","filled_polygon"],[30,9,1,"","filled_trigon"],[30,9,1,"","hline"],[30,9,1,"","line"],[30,9,1,"","pie"],[30,9,1,"","pixel"],[30,9,1,"","polygon"],[30,9,1,"","rectangle"],[30,9,1,"","textured_polygon"],[30,9,1,"","trigon"],[30,9,1,"","vline"]],"pygame.image":[[31,9,1,"","frombuffer"],[31,9,1,"","fromstring"],[31,9,1,"","get_extended"],[31,9,1,"","get_sdl_image_version"],[31,9,1,"","load"],[31,9,1,"","load_basic"],[31,9,1,"","load_extended"],[31,9,1,"","save"],[31,9,1,"","save_extended"],[31,9,1,"","tostring"]],"pygame.joystick":[[32,6,1,"","Joystick"],[32,9,1,"","get_count"],[32,9,1,"","get_init"],[32,9,1,"","init"],[32,9,1,"","quit"]],"pygame.joystick.Joystick":[[32,8,1,"","get_axis"],[32,8,1,"","get_ball"],[32,8,1,"","get_button"],[32,8,1,"","get_guid"],[32,8,1,"","get_hat"],[32,8,1,"","get_id"],[32,8,1,"","get_init"],[32,8,1,"","get_instance_id"],[32,8,1,"","get_name"],[32,8,1,"","get_numaxes"],[32,8,1,"","get_numballs"],[32,8,1,"","get_numbuttons"],[32,8,1,"","get_numhats"],[32,8,1,"","get_power_level"],[32,8,1,"","init"],[32,8,1,"","quit"],[32,8,1,"","rumble"],[32,8,1,"","stop_rumble"]],"pygame.key":[[33,9,1,"","get_focused"],[33,9,1,"","get_mods"],[33,9,1,"","get_pressed"],[33,9,1,"","get_repeat"],[33,9,1,"","key_code"],[33,9,1,"","name"],[33,9,1,"","set_mods"],[33,9,1,"","set_repeat"],[33,9,1,"","set_text_input_rect"],[33,9,1,"","start_text_input"],[33,9,1,"","stop_text_input"]],"pygame.mask":[[35,6,1,"","Mask"],[35,9,1,"","from_surface"],[35,9,1,"","from_threshold"]],"pygame.mask.Mask":[[35,8,1,"","angle"],[35,8,1,"","centroid"],[35,8,1,"","clear"],[35,8,1,"","connected_component"],[35,8,1,"","connected_components"],[35,8,1,"","convolve"],[35,8,1,"","copy"],[35,8,1,"","count"],[35,8,1,"","draw"],[35,8,1,"","erase"],[35,8,1,"","fill"],[35,8,1,"","get_at"],[35,8,1,"","get_bounding_rects"],[35,8,1,"","get_rect"],[35,8,1,"","get_size"],[35,8,1,"","invert"],[35,8,1,"","outline"],[35,8,1,"","overlap"],[35,8,1,"","overlap_area"],[35,8,1,"","overlap_mask"],[35,8,1,"","scale"],[35,8,1,"","set_at"],[35,8,1,"","to_surface"]],"pygame.math":[[36,6,1,"","Vector2"],[36,6,1,"","Vector3"]],"pygame.math.Vector2":[[36,8,1,"","angle_to"],[36,8,1,"","as_polar"],[36,8,1,"","copy"],[36,8,1,"","cross"],[36,8,1,"","distance_squared_to"],[36,8,1,"","distance_to"],[36,8,1,"","dot"],[36,8,1,"","elementwise"],[36,8,1,"","from_polar"],[36,8,1,"","is_normalized"],[36,8,1,"","length"],[36,8,1,"","length_squared"],[36,8,1,"","lerp"],[36,8,1,"","magnitude"],[36,8,1,"","magnitude_squared"],[36,8,1,"","normalize"],[36,8,1,"","normalize_ip"],[36,8,1,"","project"],[36,8,1,"","reflect"],[36,8,1,"","reflect_ip"],[36,8,1,"","rotate"],[36,8,1,"","rotate_ip"],[36,8,1,"","rotate_ip_rad"],[36,8,1,"","rotate_rad"],[36,8,1,"","rotate_rad_ip"],[36,8,1,"","scale_to_length"],[36,8,1,"","slerp"],[36,8,1,"","update"]],"pygame.math.Vector3":[[36,8,1,"","angle_to"],[36,8,1,"","as_spherical"],[36,8,1,"","copy"],[36,8,1,"","cross"],[36,8,1,"","distance_squared_to"],[36,8,1,"","distance_to"],[36,8,1,"","dot"],[36,8,1,"","elementwise"],[36,8,1,"","from_spherical"],[36,8,1,"","is_normalized"],[36,8,1,"","length"],[36,8,1,"","length_squared"],[36,8,1,"","lerp"],[36,8,1,"","magnitude"],[36,8,1,"","magnitude_squared"],[36,8,1,"","normalize"],[36,8,1,"","normalize_ip"],[36,8,1,"","project"],[36,8,1,"","reflect"],[36,8,1,"","reflect_ip"],[36,8,1,"","rotate"],[36,8,1,"","rotate_ip"],[36,8,1,"","rotate_ip_rad"],[36,8,1,"","rotate_rad"],[36,8,1,"","rotate_rad_ip"],[36,8,1,"","rotate_x"],[36,8,1,"","rotate_x_ip"],[36,8,1,"","rotate_x_ip_rad"],[36,8,1,"","rotate_x_rad"],[36,8,1,"","rotate_x_rad_ip"],[36,8,1,"","rotate_y"],[36,8,1,"","rotate_y_ip"],[36,8,1,"","rotate_y_ip_rad"],[36,8,1,"","rotate_y_rad"],[36,8,1,"","rotate_y_rad_ip"],[36,8,1,"","rotate_z"],[36,8,1,"","rotate_z_ip"],[36,8,1,"","rotate_z_ip_rad"],[36,8,1,"","rotate_z_rad"],[36,8,1,"","rotate_z_rad_ip"],[36,8,1,"","scale_to_length"],[36,8,1,"","slerp"],[36,8,1,"","update"]],"pygame.midi":[[37,6,1,"","Input"],[37,10,1,"","MidiException"],[37,6,1,"","Output"],[37,9,1,"","frequency_to_midi"],[37,9,1,"","get_count"],[37,9,1,"","get_default_input_id"],[37,9,1,"","get_default_output_id"],[37,9,1,"","get_device_info"],[37,9,1,"","get_init"],[37,9,1,"","init"],[37,9,1,"","midi_to_ansi_note"],[37,9,1,"","midi_to_frequency"],[37,9,1,"","midis2events"],[37,9,1,"","quit"],[37,9,1,"","time"]],"pygame.midi.Input":[[37,8,1,"","close"],[37,8,1,"","poll"],[37,8,1,"","read"]],"pygame.midi.Output":[[37,8,1,"","abort"],[37,8,1,"","close"],[37,8,1,"","note_off"],[37,8,1,"","note_on"],[37,8,1,"","pitch_bend"],[37,8,1,"","set_instrument"],[37,8,1,"","write"],[37,8,1,"","write_short"],[37,8,1,"","write_sys_ex"]],"pygame.mixer":[[38,6,1,"","Channel"],[38,6,1,"","Sound"],[38,9,1,"","fadeout"],[38,9,1,"","find_channel"],[38,9,1,"","get_busy"],[38,9,1,"","get_init"],[38,9,1,"","get_num_channels"],[38,9,1,"","get_sdl_mixer_version"],[38,9,1,"","init"],[40,5,0,"-","music"],[38,9,1,"","pause"],[38,9,1,"","pre_init"],[38,9,1,"","quit"],[38,9,1,"","set_num_channels"],[38,9,1,"","set_reserved"],[38,9,1,"","stop"],[38,9,1,"","unpause"]],"pygame.mixer.Channel":[[38,8,1,"","fadeout"],[38,8,1,"","get_busy"],[38,8,1,"","get_endevent"],[38,8,1,"","get_queue"],[38,8,1,"","get_sound"],[38,8,1,"","get_volume"],[38,8,1,"","pause"],[38,8,1,"","play"],[38,8,1,"","queue"],[38,8,1,"","set_endevent"],[38,8,1,"","set_volume"],[38,8,1,"","stop"],[38,8,1,"","unpause"]],"pygame.mixer.Sound":[[38,8,1,"","fadeout"],[38,8,1,"","get_length"],[38,8,1,"","get_num_channels"],[38,8,1,"","get_raw"],[38,8,1,"","get_volume"],[38,8,1,"","play"],[38,8,1,"","set_volume"],[38,8,1,"","stop"]],"pygame.mixer.music":[[40,9,1,"","fadeout"],[40,9,1,"","get_busy"],[40,9,1,"","get_endevent"],[40,9,1,"","get_pos"],[40,9,1,"","get_volume"],[40,9,1,"","load"],[40,9,1,"","pause"],[40,9,1,"","play"],[40,9,1,"","queue"],[40,9,1,"","rewind"],[40,9,1,"","set_endevent"],[40,9,1,"","set_pos"],[40,9,1,"","set_volume"],[40,9,1,"","stop"],[40,9,1,"","unload"],[40,9,1,"","unpause"]],"pygame.mouse":[[39,9,1,"","get_cursor"],[39,9,1,"","get_focused"],[39,9,1,"","get_pos"],[39,9,1,"","get_pressed"],[39,9,1,"","get_rel"],[39,9,1,"","get_visible"],[39,9,1,"","set_cursor"],[39,9,1,"","set_pos"],[39,9,1,"","set_visible"]],"pygame.pixelcopy":[[43,9,1,"","array_to_surface"],[43,9,1,"","make_surface"],[43,9,1,"","map_array"],[43,9,1,"","surface_to_array"]],"pygame.scrap":[[46,9,1,"","contains"],[46,9,1,"","get"],[46,9,1,"","get_init"],[46,9,1,"","get_types"],[46,9,1,"","init"],[46,9,1,"","lost"],[46,9,1,"","put"],[46,9,1,"","set_mode"]],"pygame.sndarray":[[49,9,1,"","array"],[49,9,1,"","get_arraytype"],[49,9,1,"","get_arraytypes"],[49,9,1,"","make_sound"],[49,9,1,"","samples"],[49,9,1,"","use_arraytype"]],"pygame.sprite":[[50,6,1,"","DirtySprite"],[50,6,1,"","Group"],[50,9,1,"","GroupSingle"],[50,6,1,"","LayeredDirty"],[50,6,1,"","LayeredUpdates"],[50,9,1,"","OrderedUpdates"],[50,6,1,"","RenderClear"],[50,6,1,"","RenderPlain"],[50,6,1,"","RenderUpdates"],[50,6,1,"","Sprite"],[50,9,1,"","collide_circle"],[50,9,1,"","collide_circle_ratio"],[50,9,1,"","collide_mask"],[50,9,1,"","collide_rect"],[50,9,1,"","collide_rect_ratio"],[50,9,1,"","groupcollide"],[50,9,1,"","spritecollide"],[50,9,1,"","spritecollideany"]],"pygame.sprite.Group":[[50,8,1,"","add"],[50,8,1,"","clear"],[50,8,1,"","copy"],[50,8,1,"","draw"],[50,8,1,"","empty"],[50,8,1,"","has"],[50,8,1,"","remove"],[50,8,1,"","sprites"],[50,8,1,"","update"]],"pygame.sprite.LayeredDirty":[[50,8,1,"","change_layer"],[50,8,1,"","clear"],[50,8,1,"","draw"],[50,8,1,"","get_clip"],[50,8,1,"","repaint_rect"],[50,8,1,"","set_clip"],[50,8,1,"","set_timing_threshold"],[50,8,1,"","set_timing_treshold"]],"pygame.sprite.LayeredUpdates":[[50,8,1,"","add"],[50,8,1,"","change_layer"],[50,8,1,"","draw"],[50,8,1,"","get_bottom_layer"],[50,8,1,"","get_layer_of_sprite"],[50,8,1,"","get_sprite"],[50,8,1,"","get_sprites_at"],[50,8,1,"","get_sprites_from_layer"],[50,8,1,"","get_top_layer"],[50,8,1,"","get_top_sprite"],[50,8,1,"","layers"],[50,8,1,"","move_to_back"],[50,8,1,"","move_to_front"],[50,8,1,"","remove_sprites_of_layer"],[50,8,1,"","sprites"],[50,8,1,"","switch_layer"]],"pygame.sprite.RenderUpdates":[[50,8,1,"","draw"]],"pygame.sprite.Sprite":[[50,8,1,"","add"],[50,8,1,"","alive"],[50,8,1,"","groups"],[50,8,1,"","kill"],[50,8,1,"","remove"],[50,8,1,"","update"]],"pygame.surfarray":[[52,9,1,"","array2d"],[52,9,1,"","array3d"],[52,9,1,"","array_alpha"],[52,9,1,"","array_blue"],[52,9,1,"","array_colorkey"],[52,9,1,"","array_green"],[52,9,1,"","array_red"],[52,9,1,"","blit_array"],[52,9,1,"","get_arraytype"],[52,9,1,"","get_arraytypes"],[52,9,1,"","make_surface"],[52,9,1,"","map_array"],[52,9,1,"","pixels2d"],[52,9,1,"","pixels3d"],[52,9,1,"","pixels_alpha"],[52,9,1,"","pixels_blue"],[52,9,1,"","pixels_green"],[52,9,1,"","pixels_red"],[52,9,1,"","use_arraytype"]],"pygame.tests":[[53,9,1,"","run"]],"pygame.time":[[54,6,1,"","Clock"],[54,9,1,"","delay"],[54,9,1,"","get_ticks"],[54,9,1,"","set_timer"],[54,9,1,"","wait"]],"pygame.time.Clock":[[54,8,1,"","get_fps"],[54,8,1,"","get_rawtime"],[54,8,1,"","get_time"],[54,8,1,"","tick"],[54,8,1,"","tick_busy_loop"]],"pygame.transform":[[56,9,1,"","average_color"],[56,9,1,"","average_surfaces"],[56,9,1,"","chop"],[56,9,1,"","flip"],[56,9,1,"","get_smoothscale_backend"],[56,9,1,"","laplacian"],[56,9,1,"","rotate"],[56,9,1,"","rotozoom"],[56,9,1,"","scale"],[56,9,1,"","scale2x"],[56,9,1,"","set_smoothscale_backend"],[56,9,1,"","smoothscale"],[56,9,1,"","threshold"]],"pygame.version":[[44,11,1,"","SDL"],[44,11,1,"","rev"],[44,11,1,"","ver"],[44,11,1,"","vernum"]],pgBuffer_AsArrayInterface:[[1,2,1,"c.pgBuffer_AsArrayInterface","view_p"]],pgBuffer_AsArrayStruct:[[1,2,1,"c.pgBuffer_AsArrayStruct","view_p"]],pgBuffer_Release:[[1,2,1,"c.pgBuffer_Release","pg_view_p"]],pgBufproxy_Check:[[2,2,1,"c.pgBufproxy_Check","x"]],pgBufproxy_GetParent:[[2,2,1,"c.pgBufproxy_GetParent","obj"]],pgBufproxy_New:[[2,2,1,"c.pgBufproxy_New","get_buffer"],[2,2,1,"c.pgBufproxy_New","obj"]],pgBufproxy_Trip:[[2,2,1,"c.pgBufproxy_Trip","obj"]],pgCD_AsID:[[3,2,1,"c.pgCD_AsID","x"]],pgCD_Check:[[3,2,1,"c.pgCD_Check","x"]],pgCD_New:[[3,2,1,"c.pgCD_New","id"]],pgChannel_AsInt:[[8,2,1,"c.pgChannel_AsInt","x"]],pgChannel_Check:[[8,2,1,"c.pgChannel_Check","obj"]],pgChannel_New:[[8,2,1,"c.pgChannel_New","channelnum"]],pgColor_Check:[[4,2,1,"c.pgColor_Check","obj"]],pgColor_New:[[4,2,1,"c.pgColor_New","rgba"]],pgColor_NewLength:[[4,2,1,"c.pgColor_NewLength","length"],[4,2,1,"c.pgColor_NewLength","rgba"]],pgDict_AsBuffer:[[1,2,1,"c.pgDict_AsBuffer","dict"],[1,2,1,"c.pgDict_AsBuffer","flags"],[1,2,1,"c.pgDict_AsBuffer","pg_view_p"]],pgEventObject:[[6,3,1,"c.pgEventObject.type","type"]],pgEvent_Check:[[6,2,1,"c.pgEvent_Check","x"]],pgEvent_FillUserEvent:[[6,2,1,"c.pgEvent_FillUserEvent","e"],[6,2,1,"c.pgEvent_FillUserEvent","event"]],pgEvent_New2:[[6,2,1,"c.pgEvent_New2","dict"],[6,2,1,"c.pgEvent_New2","type"]],pgEvent_New:[[6,2,1,"c.pgEvent_New","event"]],pgFont_Check:[[7,2,1,"c.pgFont_Check","x"]],pgFont_IS_ALIVE:[[7,2,1,"c.pgFont_IS_ALIVE","o"]],pgFont_New:[[7,2,1,"c.pgFont_New","filename"],[7,2,1,"c.pgFont_New","font_index"]],pgLifetimeLockObject:[[13,3,1,"c.pgLifetimeLockObject.lockobj","lockobj"],[13,3,1,"c.pgLifetimeLockObject.surface","surface"]],pgLifetimeLock_Check:[[13,2,1,"c.pgLifetimeLock_Check","x"]],pgObject_GetBuffer:[[1,2,1,"c.pgObject_GetBuffer","flags"],[1,2,1,"c.pgObject_GetBuffer","obj"],[1,2,1,"c.pgObject_GetBuffer","pg_view_p"]],pgRWops_FromFileObject:[[10,2,1,"c.pgRWops_FromFileObject","obj"]],pgRWops_FromObject:[[10,2,1,"c.pgRWops_FromObject","obj"]],pgRWops_GetFileExtension:[[10,2,1,"c.pgRWops_GetFileExtension","rw"]],pgRWops_IsFileObject:[[10,2,1,"c.pgRWops_IsFileObject","rw"]],pgRWops_ReleaseObject:[[10,2,1,"c.pgRWops_ReleaseObject","context"]],pgRectObject:[[9,3,1,"c.pgRectObject.r","r"]],pgRect_AsRect:[[9,2,1,"c.pgRect_AsRect","obj"]],pgRect_FromObject:[[9,2,1,"c.pgRect_FromObject","obj"],[9,2,1,"c.pgRect_FromObject","temp"]],pgRect_New4:[[9,2,1,"c.pgRect_New4","h"],[9,2,1,"c.pgRect_New4","w"],[9,2,1,"c.pgRect_New4","x"],[9,2,1,"c.pgRect_New4","y"]],pgRect_New:[[9,2,1,"c.pgRect_New","r"]],pgRect_Normalize:[[9,2,1,"c.pgRect_Normalize","rect"]],pgSound_AsChunk:[[8,2,1,"c.pgSound_AsChunk","x"]],pgSound_Check:[[8,2,1,"c.pgSound_Check","obj"]],pgSound_New:[[8,2,1,"c.pgSound_New","chunk"]],pgSurface_AsSurface:[[12,2,1,"c.pgSurface_AsSurface","x"]],pgSurface_Blit:[[12,2,1,"c.pgSurface_Blit","dstobj"],[12,2,1,"c.pgSurface_Blit","dstrect"],[12,2,1,"c.pgSurface_Blit","srcobj"],[12,2,1,"c.pgSurface_Blit","srcrect"],[12,2,1,"c.pgSurface_Blit","the_args"]],pgSurface_Check:[[12,2,1,"c.pgSurface_Check","x"]],pgSurface_Lock:[[13,2,1,"c.pgSurface_Lock","surfobj"]],pgSurface_LockBy:[[13,2,1,"c.pgSurface_LockBy","lockobj"],[13,2,1,"c.pgSurface_LockBy","surfobj"]],pgSurface_LockLifetime:[[13,2,1,"c.pgSurface_LockLifetime","lockobj"],[13,2,1,"c.pgSurface_LockLifetime","surfobj"]],pgSurface_New:[[12,2,1,"c.pgSurface_New","s"]],pgSurface_Prep:[[13,2,1,"c.pgSurface_Prep","surfobj"]],pgSurface_UnLock:[[13,2,1,"c.pgSurface_UnLock","surfobj"]],pgSurface_UnLockBy:[[13,2,1,"c.pgSurface_UnLockBy","lockobj"],[13,2,1,"c.pgSurface_UnLockBy","surfobj"]],pgSurface_Unprep:[[13,2,1,"c.pgSurface_Unprep","surfobj"]],pgVidInfo_AsVidInfo:[[5,2,1,"c.pgVidInfo_AsVidInfo","obj"]],pgVidInfo_Check:[[5,2,1,"c.pgVidInfo_Check","x"]],pgVidInfo_New:[[5,2,1,"c.pgVidInfo_New","i"]],pg_EncodeFilePath:[[10,2,1,"c.pg_EncodeFilePath","eclass"],[10,2,1,"c.pg_EncodeFilePath","obj"]],pg_EncodeString:[[10,2,1,"c.pg_EncodeString","eclass"],[10,2,1,"c.pg_EncodeString","encoding"],[10,2,1,"c.pg_EncodeString","errors"],[10,2,1,"c.pg_EncodeString","obj"]],pg_FloatFromObj:[[1,2,1,"c.pg_FloatFromObj","obj"],[1,2,1,"c.pg_FloatFromObj","val"]],pg_FloatFromObjIndex:[[1,2,1,"c.pg_FloatFromObjIndex","index"],[1,2,1,"c.pg_FloatFromObjIndex","obj"],[1,2,1,"c.pg_FloatFromObjIndex","val"]],pg_IntFromObj:[[1,2,1,"c.pg_IntFromObj","obj"],[1,2,1,"c.pg_IntFromObj","val"]],pg_IntFromObjIndex:[[1,2,1,"c.pg_IntFromObjIndex","index"],[1,2,1,"c.pg_IntFromObjIndex","obj"],[1,2,1,"c.pg_IntFromObjIndex","val"]],pg_RGBAFromObj:[[1,2,1,"c.pg_RGBAFromObj","RGBA"],[1,2,1,"c.pg_RGBAFromObj","obj"]],pg_RegisterQuit:[[1,2,1,"c.pg_RegisterQuit","f"]],pg_SetDefaultWindow:[[1,2,1,"c.pg_SetDefaultWindow","win"]],pg_SetDefaultWindowSurface:[[1,2,1,"c.pg_SetDefaultWindowSurface","screen"]],pg_TwoFloatsFromObj:[[1,2,1,"c.pg_TwoFloatsFromObj","obj"],[1,2,1,"c.pg_TwoFloatsFromObj","val1"],[1,2,1,"c.pg_TwoFloatsFromObj","val2"]],pg_TwoIntsFromObj:[[1,2,1,"c.pg_TwoIntsFromObj","obj"],[1,2,1,"c.pg_TwoIntsFromObj","v2"],[1,2,1,"c.pg_TwoIntsFromObj","val1"]],pg_UintFromObj:[[1,2,1,"c.pg_UintFromObj","obj"],[1,2,1,"c.pg_UintFromObj","val"]],pg_UintFromObjIndex:[[1,2,1,"c.pg_UintFromObjIndex","_index"],[1,2,1,"c.pg_UintFromObjIndex","obj"],[1,2,1,"c.pg_UintFromObjIndex","val"]],pg_buffer:[[1,3,1,"c.pg_buffer.consumer","consumer"],[1,3,1,"c.pg_buffer.release_buffer","release_buffer"],[1,3,1,"c.pg_buffer.view","view"]],pg_mod_autoinit:[[1,2,1,"c.pg_mod_autoinit","modname"]],pg_mod_autoquit:[[1,2,1,"c.pg_mod_autoquit","modname"]],pygame:[[17,6,1,"","BufferProxy"],[20,6,1,"","Color"],[41,6,1,"","Overlay"],[42,6,1,"","PixelArray"],[45,6,1,"","Rect"],[51,6,1,"","Surface"],[18,5,0,"-","camera"],[19,5,0,"-","cdrom"],[22,5,0,"-","cursors"],[23,5,0,"-","display"],[24,5,0,"-","draw"],[44,9,1,"","encode_file_path"],[44,9,1,"","encode_string"],[44,10,1,"","error"],[25,5,0,"-","event"],[26,5,0,"-","examples"],[27,5,0,"-","fastevent"],[28,5,0,"-","font"],[29,5,0,"-","freetype"],[44,9,1,"","get_error"],[44,9,1,"","get_init"],[44,9,1,"","get_sdl_byteorder"],[44,9,1,"","get_sdl_version"],[30,5,0,"-","gfxdraw"],[31,5,0,"-","image"],[44,9,1,"","init"],[32,5,0,"-","joystick"],[33,5,0,"-","key"],[34,5,0,"-","locals"],[35,5,0,"-","mask"],[36,5,0,"-","math"],[37,5,0,"-","midi"],[38,5,0,"-","mixer"],[39,5,0,"-","mouse"],[43,5,0,"-","pixelcopy"],[44,9,1,"","quit"],[44,9,1,"","register_quit"],[46,5,0,"-","scrap"],[44,9,1,"","set_error"],[49,5,0,"-","sndarray"],[50,5,0,"-","sprite"],[52,5,0,"-","surfarray"],[53,5,0,"-","tests"],[54,5,0,"-","time"],[56,5,0,"-","transform"],[44,5,0,"-","version"]]},objnames:{"0":["c","macro","C macro"],"1":["c","function","C function"],"10":["py","exception","Python exception"],"11":["py","data","Python data"],"2":["c","functionParam","C function parameter"],"3":["c","member","C member"],"4":["c","type","C type"],"5":["py","module","Python module"],"6":["py","class","Python class"],"7":["py","attribute","Python attribute"],"8":["py","method","Python method"],"9":["py","function","Python function"]},objtypes:{"0":"c:macro","1":"c:function","10":"py:exception","11":"py:data","2":"c:functionParam","3":"c:member","4":"c:type","5":"py:module","6":"py:class","7":"py:attribute","8":"py:method","9":"py:function"},terms:{"0":[1,2,6,7,10,12,17,18,19,20,22,23,24,25,26,28,29,30,31,32,33,35,36,37,38,39,40,41,42,43,44,45,47,48,50,51,52,54,55,56,57,58,59,62,63,64,65,66,67,68,69,70,71,72,73,75,76,77,78,79,80,81,84,85,86,88,89],"02778":29,"08333":29,"0\uac1c":[78,79],"0\ucc28\uc6d0":81,"0d":73,"0dev11":32,"0x00":20,"0x00000000":51,"0x00000001":51,"0x00000004":51,"0x00000100":51,"0x00001000":51,"0x00002000":51,"0x00004000":51,"0x00010000":51,"0x01000000":51,"0x10":37,"0x10000":29,"0x10ffff":29,"0x11":37,"0x12":37,"0x13":37,"0x7d":37,"0x90":37,"0xaacce":42,"0xc0":37,"0xd800":29,"0xdfff":29,"0xf0":37,"0xf7":37,"0xff":20,"0xff00ff":42,"0xffff":23,"0xrrggbb":20,"0xrrggbbaa":20,"1":[1,2,3,4,6,9,12,14,16,17,18,20,22,23,24,25,26,28,29,30,31,32,33,35,36,37,38,39,40,42,43,44,45,46,47,48,49,50,51,52,54,55,56,57,58,59,60,62,63,64,65,66,67,68,69,70,71,72,73,75,76,77,78,79,80,81,83,84],"10":[23,24,26,32,35,36,45,54,56,58,62,63,65,66,68,69,70,71,72,73,76,77,78,79,80,81,84,85,88,89],"100":[20,24,37,51,57,62,65,84],"1000":[50,54,84],"1024":[37,38],"105":57,"1080":[23,59],"1080p":23,"10\ub610\ub294":79,"10\uc758":76,"10\uc774":77,"10th":35,"11":[32,68,69,76,77],"114":42,"115":24,"117":26,"11\uc744":77,"11\uc758":76,"11\uc774":77,"12":[11,29,32,44,58,66,68,76],"120":22,"1234":44,"125":24,"127":[35,37,71,72,73,79,80,81],"128":65,"1280":[58,66],"13":[32,63,65,68,76,89],"135":24,"14":[32,47,68,76],"140":[67,68,69,70,75,76,77,78],"145":57,"14\uc758":76,"15":[24,32,51,58,66,68,69,76,77],"150":[24,85],"15\uc758":76,"16":[15,22,23,29,32,38,49,59,65,68,76],"16711680":59,"16bit":38,"17":[63,68,76],"170":[42,57,58,66],"17\uc5d0\uc11c\uc758":76,"18":[24,58,63,66,68,76],"187":[58,66],"19":[68,76],"192":84,"1920":[23,59],"19\ub294":76,"19\uc5d0\uc11c":76,"1\uac1c":78,"1\uac1c\uc758":76,"1\uacfc":77,"1\uc778":75,"1\uc778\uc9c0":81,"1\uc904\uc9dc\ub9ac":76,"1\ucc28\uc6d0":81,"1\ucd08\uc5d0":77,"1d":[42,73],"1s":62,"1x1":37,"2":[1,9,15,17,18,19,20,22,23,24,25,26,27,28,29,30,31,32,33,35,36,37,38,39,40,41,42,43,44,45,47,50,51,52,54,55,56,57,58,61,63,65,66,67,68,69,70,71,72,73,75,76,77,78,79,80,81,83,84],"20":[22,24,32,45,56,65,68,71,72,73,76,79,80,81,84],"200":[24,63],"2000":[16,63],"20000":37,"2001":63,"2004":18,"204":42,"20500":37,"20\uc77c":79,"20\uc904\uc9dc\ub9ac":76,"21":[37,63],"210":24,"22":49,"220":[24,67,68,69,70,75,76,77,78],"22000":49,"22050":38,"225":24,"23":63,"235":[58,66],"238":[42,58,66],"2380":23,"24":[17,18,22,24,28,31,42,51,52,56,63,65],"240":[63,68,69,70,71,72,73,76,77,78,79,80,81],"24x24":22,"25":[56,58,66],"250":[24,85],"255":[1,20,24,28,29,30,35,42,43,50,51,56,57,58,59,65,68,69,70,71,72,73,76,77,78,79,80,81,84,85],"256":[23,37,51],"260":24,"27":37,"270":[72,73,80,81],"28":84,"299":42,"2\uac1c\ub97c":80,"2\uac1c\uc758":78,"2\ucc28\ub235":81,"2\ucc28\uc6d0":81,"2d":[16,35,42,43,48,52,63,64,65,68,73,76],"2d\uc6a9":76,"2pre":36,"2s":62,"2x2":65,"3":[1,17,18,20,22,23,24,28,30,31,32,33,36,37,38,39,42,43,44,47,50,51,61,62,63,64,65,67,68,69,70,71,73,76,77,78,79,81,83,84],"30":[22,24,29,30,38,45,53,57,63,71,84],"300":24,"3072":38,"30\uc73c\ub85c":79,"315":29,"32":[1,15,17,18,29,31,35,38,51,52,56,59,65,68,69,70,71,72,73,76,77,78,79,80,81],"320":[63,68,69,70,71,72,73,76,77,78,79,80,81],"325":[72,73,80,81],"32767":47,"32768":47,"32x32":23,"33":65,"35":[74,84],"359":29,"35\ub144":82,"36":[29,85],"360":[20,29,32,47,58,66],"390":29,"3\uac1c\uc758":[76,79],"3d":[23,26,43,52,63,65,67],"3f":32,"3rd":63,"3x3":65,"4":[1,4,9,17,20,24,28,29,32,33,36,37,38,39,42,47,51,56,57,58,59,60,61,62,65,66,68,69,70,71,72,73,76,77,78,79,80,81,83,84,86,89],"40":[22,24,54,56,62,63,71,72,73,79,80,81,84],"400":[22,23,24,57],"4096":[37,38],"42":24,"425":[72,73,80,81],"4321":44,"44100":38,"45":[29,72,73,80,81],"47":89,"480":[18,26,39,48,57,58,59,62,66,68,69,70,71,72,73,76,77,78,79,80,81,89],"480\uc73c\ub85c":76,"4\uac1c\uc758":[76,79],"4k":23,"4th":65,"5":[14,20,23,24,25,29,32,33,35,36,37,38,39,40,42,44,46,49,50,57,58,61,62,63,65,66,68,69,70,71,72,73,76,77,78,79,80,81,83,89],"50":[22,24,26,57,65,73,81,84,85],"500":32,"500m":37,"512":38,"55":56,"56":24,"587":42,"5\uac00":77,"5\uac1c\uc758":76,"5\uc5d0":78,"5x5":[73,81],"6":[24,26,32,33,38,44,50,61,62,65,68,69,70,71,73,76,77,78,79,81,83],"60":[22,24,37,38,39,58,66,69,71,72,73,77,79,80,81,89],"600":[22,59,62],"60\uc774\ub77c\ub294":77,"63":20,"64":[20,29,56,58,66],"640":[18,26,39,48,57,59,62,68,69,70,71,72,73,76,77,78,79,80,81,89],"640x480":[23,62,65],"65":[37,56],"65280":59,"6\uc744":79,"6x":84,"7":[23,32,33,43,44,63,65,68,69,70,71,76,77,78,79,83],"70":24,"700":[23,32],"72":29,"720":29,"75":[22,24,69,77],"7\uc5d0\uc11c":79,"7\uc758":76,"8":[15,17,18,20,22,23,24,26,28,29,30,31,32,33,35,38,42,43,44,46,49,50,51,52,54,56,59,62,63,65,68,69,70,76,77,78,83,89],"80":[24,50,58,66],"800":59,"8191":37,"8192":37,"8\uc758":76,"8bit":38,"9":[14,16,17,18,20,23,25,26,29,30,32,33,35,36,37,38,40,42,43,44,45,46,47,51,52,56,57,65,68,69,70,76,77,78],"90":[24,35,38,56,57,58,66,89],"97":33,"9\uc758":76,"\uac00":[76,77,81],"\uac00\ub2a5":80,"\uac00\ub3c5\uc131\uc744":76,"\uac00\ubcf4\uc790":78,"\uac00\uc18d":77,"\uac00\uc7a5":[76,79,82],"\uac00\uc815\ud558\uba74":76,"\uac00\uc815\ud574":75,"\uac00\uc9c0\uace0":[75,78,81],"\uac00\uc9c0\uae30":76,"\uac00\uc9c0\ub294":76,"\uac00\uc9c0\ubbc0\ub85c":[76,79],"\uac00\uc9c4\ub2e4":[76,80],"\uac00\uc9c4\ub2e4\ub294":75,"\uac00\uc9c8":82,"\uac01\uac01":79,"\uac01\uac01\uc758":79,"\uac04\ub2e8\ud558\ub2e4":81,"\uac10\uc18c\uc2dc\ud0a4\ub294":80,"\uac10\uc18c\ud55c\ub2e4":81,"\uac12":79,"\uac12\ub9cc\uc774":79,"\uac12\uc740":[76,79],"\uac12\uc744":[76,79,80],"\uac12\uc774":79,"\uac12\uc774\uace0":79,"\uac16\ub294":[81,82],"\uac19\ub2e4":[75,76,82],"\uac19\uc544":[77,81],"\uac19\uc740":[76,77,82],"\uac19\uc74c":81,"\uac19\uc774":[76,79],"\uac1c\ub150\uc740":82,"\uac1c\ub150\uc744":77,"\uac1c\ub150\uc774\ub2e4":82,"\uac1c\ubc1c\uc790\uac00":75,"\uac1c\uc120\uc758":81,"\uac1c\uc218\ub97c":81,"\uac1c\uc218\ub9cc\ud07c":79,"\uac1d\uccb4":[76,78],"\uac1d\uccb4\uc5d0":76,"\uac1d\uccb4\uc758":76,"\uac70\uc758":[75,76],"\uac71\uc815\ud558\uc9c0":82,"\uac78\ub9ac\ub294":76,"\uac80\uc740":[79,81],"\uac80\uc740\ube14\ub85d":16,"\uac83":[75,76,78],"\uac83\uacfc":[80,82],"\uac83\ub4e4\uc774":81,"\uac83\ub4e4\uc774\ub2e4":76,"\uac83\ubcf4\ub2e4":79,"\uac83\uc5d0":[75,77,78],"\uac83\uc5d0\ub9cc":75,"\uac83\uc740":[76,77,78,79,80,82],"\uac83\uc744":79,"\uac83\uc774":[75,76,79,80],"\uac83\uc774\uae30":78,"\uac83\uc774\ub2e4":[75,76,77,78,79,80,81,82],"\uac83\uc778\ub2e4":80,"\uac83\ucc98\ub7fc":[77,80],"\uac8c\uc784":[75,76,77,78,81,82],"\uac8c\uc784\ub3c4":[75,82],"\uac8c\uc784\ub9cc\uc758":82,"\uac8c\uc784\uc5d0\uc11c\uc758":79,"\uac8c\uc784\uc5d0\uc120":77,"\uac8c\uc784\uc5d4\uc9c4":75,"\uac8c\uc784\uc5d4\uc9c4\uc774\ub098":75,"\uac8c\uc784\uc740":[75,78,81],"\uac8c\uc784\uc744":[75,78,82],"\uac8c\uc784\uc758":[75,78],"\uac8c\uc784\uc774":[75,77,78,80,81,82],"\uac8c\uc784\uc774\ub098":82,"\uac8c\uc784\uc774\ub2e4":81,"\uac8c\uc784\uc774\ub77c\uace0":78,"\uac8c\uc784\uc774\ubbc0\ub85c":76,"\uac8c\uc784\ud310":83,"\uac8c\uc784\ud310\uc740":81,"\uac8c\uc784\ud310\uc744":81,"\uacaa\uc5b4":82,"\uacb0\uacfc":[76,77,78],"\uacb0\uacfc\uac00":77,"\uacb0\uacfc\ub294":[78,81],"\uacb0\uacfc\ub97c":77,"\uacb0\uacfc\ubb3c\ub4e4\uc744":76,"\uacb0\uacfc\ubb3c\uc740":76,"\uacb0\ub860\uc774":82,"\uacb0\uc815\ud560":76,"\uacbd\uc6b0":[76,78,79,80],"\uacbd\uc6b0\ub97c":78,"\uacbd\uc6b0\uc758":82,"\uacbd\ud5d8\uc774":78,"\uacc4\uc0b0\ub9cc":76,"\uacc4\uc0b0\uc744":80,"\uacc4\uc0b0\ud558\uae30":77,"\uacc4\uc0b0\ud574\uc57c":81,"\uacc4\uc18d":[75,76,78],"\uace0":81,"\uace0\uae09":75,"\uace0\ub824\ub418\uc5c8\uc744":82,"\uace0\ub824\ub418\uc9c0":77,"\uace0\ub824\ud558\uc5ec":76,"\uace0\ub974\ub294":81,"\uace0\uc815":77,"\uace0\uc815\ub418\uc5b4":[76,79],"\uace0\uc815\ub41c":76,"\uace0\uc815\ub420":77,"\uace0\uc815\uc2dc\ucf1c":77,"\uace0\uc815\uc2dc\ud0a4\ub294":77,"\uace0\uc815\uc2dc\ud0a8\ub2e4":76,"\uace0\uc815\ud558\ub294":77,"\uacf5\uac04":76,"\uacf5\uac04\uc0c1\uc5d0\uc11c\uc758":75,"\uacf5\uac04\uc744":[76,80],"\uacf5\uc774\ub098":77,"\uacfc":[76,80],"\uad00\ub828":75,"\uad6c\uc131":76,"\uad6c\uc131\ub41c\ub2e4":75,"\uad6c\uc131\ud560":75,"\uad6c\uc2dd\uc774\uace0":75,"\uad6c\uc5ed\uc744":76,"\uad6c\uc5ed\uc758":76,"\uad6c\uccb4\uc801\uc778":[78,80],"\uad6c\ud604\ud55c":[81,82],"\uad6c\ud604\ud55c\ub2e4\uba74":81,"\uad6c\ud604\ud560":82,"\uaddc\uce59":[78,82],"\uaddc\uce59\uc740":81,"\uaddc\uce59\uc744":[81,82],"\uaddc\uce59\uc774":82,"\uadf8":[75,76,77,79,80,81],"\uadf8\uac83\uc740":79,"\uadf8\uac83\uc774":[78,82],"\uadf8\uac83\uc774\ub2e4":76,"\uadf8\ub2e4\uc9c0":76,"\uadf8\ub798\uc11c":[75,76,77],"\uadf8\ub798\ud53d":75,"\uadf8\ub798\ud53d\uc744":75,"\uadf8\ub7ec\ub098":[77,78,82],"\uadf8\ub7ec\uba74":[75,77,81],"\uadf8\ub7ec\ubbc0\ub85c":[76,77,80,82],"\uadf8\ub7f0\uac00":78,"\uadf8\ub807\ub2e4\uba74":[76,77],"\uadf8\ub807\uc9c0":80,"\uadf8\ub824\uc9c0\uace0":76,"\uadf8\ub824\uc9c0\ub294":76,"\uadf8\ub824\uc9c4\ub2e4":81,"\uadf8\ub9ac\uace0":[75,76,77],"\uadf8\ub9ac\uae30":75,"\uadf8\ub9ac\ub294":[76,79],"\uadf8\ub9b0\ub2e4":[79,80],"\uadf9\uc18c\uc218\ub9cc\uc744":82,"\uae30\ub2a5":77,"\uae30\ub2a5\uc744":[76,77],"\uae30\ub85d\ud574\uc57c":80,"\uae30\ubc18":76,"\uae30\ubc18\uc73c\ub85c":76,"\uae30\ubc18\ud558\uc600\uae30":75,"\uae30\uc874\uc758":82,"\uae38\uc774\ub97c":76,"\uae4c\uba39\uc5b4\uc120":76,"\uae5c\ube61\uac70\ub9ac\ub294":75,"\uaf64":76,"\ub049\uaca8":75,"\ub05d\ub09c":76,"\ub05d\uc774":82,"\ub098":81,"\ub098\uac8c":82,"\ub098\ub220\uc9c8":76,"\ub098\ub294":78,"\ub098\uba74":76,"\ub098\uc544\uc84c\ub2e4":79,"\ub098\uc911\uc5d0":[76,77,78],"\ub098\ud0c0\ub0b8\ub2e4":80,"\ub09c\ub2e4":82,"\ub09c\uc218":82,"\ub09c\uc218\uac00":82,"\ub09c\uc218\uae4c\uc9c0":82,"\ub0ab\ub2e4":75,"\ub0b4\ubd80":[76,78],"\ub0b4\ubd80\ub97c":80,"\ub0b4\ubd80\uc5d0\uc11c":80,"\ub0b4\ubd80\uc600\ub2e4\uba74":80,"\ub0b4\uc5d0\uc11c":[77,81,82],"\ub0b4\uc6a9":76,"\ub0b4\uc6a9\uacfc":76,"\ub0b4\uc6a9\uc740":80,"\ub108\ubb34":77,"\ub118\uac8c":82,"\ub123\ub294\ub2e4\uba74":81,"\ub123\uc73c\uba74":81,"\ub124\ubc88\uc9f8":79,"\ub192\ub2e4\ub294":75,"\ub192\uc774\uae30":76,"\ub204\ub974\ub294":77,"\ub204\ub974\uba74":78,"\ub208\uc0ac\ud0dc":82,"\ub208\uc0ac\ud0dc\ub97c":82,"\ub20c\ub7ec":79,"\ub20c\ub824\uc788\ub294":78,"\ub20c\ub838\ub2e4":78,"\ub20c\ub9ac\uc9c0":78,"\ub20c\ub9b0":78,"\ub294":76,"\ub2a5\ub825\uc774\ub2e4":82,"\ub2e4\ub8e8\uaca0\ub2e4":78,"\ub2e4\ub8e8\uae30":78,"\ub2e4\ub974\uac8c":78,"\ub2e4\ub974\ub2e4":77,"\ub2e4\ub974\uc9c0":79,"\ub2e4\ub978":[76,78,82],"\ub2e4\uc2dc":79,"\ub2e4\uc591\uc131":78,"\ub2e4\uc591\ud788":81,"\ub2e4\uc6e0\ub2e4":82,"\ub2e4\uc74c":[76,79,80,81],"\ub2e4\uc911":77,"\ub2e4\ud589\ud788":75,"\ub2e8":77,"\ub2e8\uacc4\ub85c":78,"\ub2e8\uacc4\ub97c":76,"\ub2e8\uacc4\uc5d0\uc11c\uc758":78,"\ub2e8\uc0c9":76,"\ub2e8\uc21c\ud558\uae30":81,"\ub2e8\uc21c\ud558\ub2e4":[79,80],"\ub2e8\uc21c\ud558\uc9c0\ub9cc":81,"\ub2e8\uc21c\ud788":[78,81],"\ub2e8\uc5b4\ub294":78,"\ub2e8\uc810\uc740":77,"\ub2e8\uc810\uc774":75,"\ub2ec\ub77c\uc9c0\ubbc0\ub85c":77,"\ub2ec\ub77c\uc9c4\ub2e4":81,"\ub2ec\ub77c\uc9c8":78,"\ub2f4\uc558\ub294\ub370":79,"\ub2f9\uc5f0\ud55c":76,"\ub300\ub2e8\ud788":[81,82],"\ub300\ub85c":81,"\ub300\ubcf4\uac8c":75,"\ub300\uc0c1\uc5d0":82,"\ub300\uccb4\ud558\ub294\uac00":76,"\ub300\ud55c":76,"\ub354":[75,78,79,82],"\ub370":76,"\ub370\uc5d0\ub9cc":80,"\ub370\uc774\ud130":[78,81],"\ub370\uc774\ud130\uac00":76,"\ub370\uc774\ud130\ub4e4\uc744":79,"\ub370\uc774\ud130\ub97c":79,"\ub3c4":[76,78],"\ub3c4\uc6c0":78,"\ub3c4\uc6c0\uc774":78,"\ub3c4\uc911":76,"\ub3c4\ud615":75,"\ub3c4\ud615\uc744":79,"\ub3d9\uae30\ubd80\uc5ec":78,"\ub3d9\uc2dc\uc5d0":75,"\ub3d9\uc77c\ud55c":[76,79,80],"\ub3d9\uc77c\ud574\uc57c":80,"\ub3d9\uc791\ud558\ub3c4\ub85d":78,"\ub3d9\uce58\uad00\uacc4\ub77c\ub294":75,"\ub418\uae30":78,"\ub418\ub3cc\uc544\uac00\uc57c":76,"\ub418\uc5b4\uc57c\ub9cc":77,"\ub418\uc5c8\ub294\uc9c0":78,"\ub418\uc9c0":79,"\ub41c":[78,80],"\ub41c\ub2e4":[75,76,77,78,79,80,81,82],"\ub41c\ub2e4\uba74":82,"\ub420":[75,77,79],"\ub450":[79,80,81,82],"\ub450\uaed8":79,"\ub450\uaed8\ub9cc":80,"\ub450\ub294":[79,80],"\ub450\ub294\ub370":76,"\ub450\uba74":77,"\ub450\ubc88\uc9f8":[78,79],"\ub450\uc5c8\ub2e4":76,"\ub458":75,"\ub458\uc9f8":[75,80],"\ub4a4\uc5d0":78,"\ub4b7\ubd80\ubd84\uc5d0":77,"\ub4e4\uba74":79,"\ub4e4\uc5b4\uc11c":76,"\ub4e4\uc744":80,"\ub4f1":[75,78],"\ub4f1\uc740":82,"\ub4f1\uc758":75,"\ub514\ub809\ud1a0\ub9ac\uc5d0":76,"\ub514\uc2a4\ud50c\ub808\uc774":75,"\ub514\uc790\uc778\ud560":79,"\ub51c\ub808\ub9c8\uac00":75,"\ub51c\ub808\ub9c8\ub97c":75,"\ub51c\ub808\uc774":77,"\ub530\ub77c":[77,81],"\ub530\uc62c":79,"\ub530\uc838\uc11c":79,"\ub54c":[76,77,79,81,82],"\ub54c\ub9c8\ub2e4":79,"\ub54c\ub9cc":76,"\ub54c\ubb38\uc5d0":[75,76,77,78,81],"\ub54c\ubb38\uc774\ub2e4":[76,77,78,80,81],"\ub54c\uc758":79,"\ub610\ub294":[75,79,82],"\ub610\ud55c":[77,81],"\ub611\uac19\uc740":78,"\ub73b\uc774\ub2e4":78,"\ub77c\uace0":[78,82],"\ub77c\ub294":[76,82],"\ub77c\uc774\ube0c\ub7ec\ub9ac\uc774\uae30":75,"\ub77c\uc774\ube0c\ub7ec\ub9ac\uc774\ub2e4":75,"\ub80c\ub354\ub9c1":79,"\ub85c\uc9c1\uc740":79,"\ub85c\uc9c1\uc744":[76,78],"\ub85c\uc9c1\uc774":77,"\ub8e8\ud2b82":77,"\ub97c":[76,77,78,80],"\ub9c8\ub77c":82,"\ub9c8\uc6b0\uc2a4":[75,78,80],"\ub9c8\uc6b0\uc2a4\uac00up":80,"\ub9c8\uc9c0\ub9c9\uc5d0":76,"\ub9c8\uc9c0\ub9c9\uc73c\ub85c":[78,79],"\ub9c8\ucc2c\uac00\uc9c0\uc774\ub2e4":82,"\ub9cc":75,"\ub9cc\ub4dc\ub294":[75,80,82],"\ub9cc\ub4e0":75,"\ub9cc\ub4e0\ub2e4\uace0":75,"\ub9cc\ub4e4":[75,79,82],"\ub9cc\ub4e4\uace0":81,"\ub9cc\ub4e4\uc5b4":[79,81],"\ub9cc\ub4e4\uc5b4\uc11c":82,"\ub9cc\ub4e4\uc5b4\uc57c":80,"\ub9cc\ub4e4\uc5b4\uc9c4":81,"\ub9cc\ub4e4\uc5b4\uc9c4\ub2e4\uba74":82,"\ub9cc\ub4e4\uc5c8\ub2e4":79,"\ub9cc\uc57d":[76,79,80,82],"\ub9cc\uc744":[76,79],"\ub9ce\ub2e4":81,"\ub9ce\uc73c\ubbc0\ub85c":77,"\ub9ce\uc740":81,"\ub9d0\uc774\ub2e4":76,"\ub9d0\ud558\ub294":78,"\ub9d0\ud55c":76,"\ub9d0\ud588\ub2e4":82,"\ub9d0\ud588\ub4ef":76,"\ub9de\ub294\uac00":78,"\ub9de\uc744":76,"\ub9e4\uac1c":79,"\ub9e4\uc6b0":[78,82],"\uba39\ud788\ub294":76,"\uba3c\uc800":[78,79],"\uba54\ubaa8\ub9ac":75,"\uba64\ubc84":76,"\uba85\ub839\uc5b4\uac00":78,"\uba87":77,"\uba87\uba87":[76,78],"\ubaa8\ub2c8\ud130":76,"\ubaa8\ub450":[75,76],"\ubaa8\ub4c8\ub4e4\uc744":76,"\ubaa8\ub4e0":[75,76,78,81,82],"\ubaa9\ub85d\uc740":78,"\ubaa9\ud45c\uac00":75,"\ubab8\uc758":78,"\ubb34\uc5b8\uac00\ub97c":[76,78],"\ubb34\uc5c7\uc744":77,"\ubb34\uc5c7\uc774\ub4e0\uc9c0":82,"\ubb34\uc5c7\uc778\uac00":[77,79,82],"\ubb34\uc5c7\uc778\uc9c0\ub294":79,"\ubb34\uc791\uc704\ub85c":81,"\ubb34\ud55c":76,"\ubb36\uc744":80,"\ubb38\uad6c\uc774\ub2e4":76,"\ubb38\uc790\uc5f4\uc740":76,"\ubb38\uc790\uc5f4\uc774\ub2e4":76,"\ubb38\uc7a5\ub4e4":76,"\ubb38\uc81c\uac00":[76,77],"\ubb38\uc81c\ub97c":81,"\ubb3c\ub860":[75,76,77,78,79],"\ubb54\uac00":80,"\ubbf8\uce58\uac8c":82,"\ubc0f":78,"\ubc14\uafb8\uace0":77,"\ubc14\uafb8\ub294":77,"\ubc14\uafb8\ub294\uac00":79,"\ubc14\uafb8\uba74":81,"\ubc14\uafbc":78,"\ubc14\uafc0":77,"\ubc14\uafd4":79,"\ubc14\ub00c\ub294\uac00":77,"\ubc14\ub00c\ub294\uc9c0\ub97c":77,"\ubc14\ub294":79,"\ubc14\ub85c":[77,82],"\ubc18\ub4dc\uc2dc":[77,78],"\ubc18\ubcf5\ubb38":76,"\ubc18\uc601":78,"\ubc18\ud544\uc218\uc801\uc73c\ub85c":76,"\ubc18\ud658":76,"\ubc18\ud658\ud55c\ub2e4":[76,81],"\ubc1c\uc0dd":76,"\ubc1c\uc0dd\ud558\uba74":76,"\ubc1c\uc0dd\ud55c":76,"\ubc1c\uc804\ub41c":75,"\ubc1c\ud718\ub41c":82,"\ubc29\ubc95":78,"\ubc29\ubc95\uc740":[78,79],"\ubc29\ubc95\uc744":[77,78],"\ubc29\ubc95\uc774\ub2e4":79,"\ubc29\uc2dd":75,"\ubc29\ud5a5\uc73c\ub85c":78,"\ubc29\ud5a5\ud0a4\ub97c":78,"\ubc30\uc5f4\uacfc":81,"\ubc30\uc5f4\uc5d0\uc11c":81,"\ubc30\uc5f4\uc740":81,"\ubc30\uc5f4\uc744":[76,81],"\ubc30\uc5f4\uc774":81,"\ubc30\uc5f4\ucc98\ub7fc":81,"\ubc30\uc6b0\uace0":[78,82],"\ubc30\uc6b0\ub294":[75,76],"\ubc30\uc6b4\ub2e4":78,"\ubc30\ud2c0\uc2ed":75,"\ubc84\ud2bc":81,"\ubc84\ud2bc\ub4e4":83,"\ubc84\ud2bc\ub4e4\uc740":80,"\ubc84\ud2bc\ub4e4\uc744":80,"\ubc84\ud2bc\uc744":[77,80,81],"\ubc84\ud2bc\uc758":80,"\ubc88":77,"\ubc94\uc704\uac00":80,"\ubc95\uc744":81,"\ubcc0\uacbd\ud558\uba74\uc11c":79,"\ubcc0\uc218":[76,79,80],"\ubcc0\uc218\uac00":[76,77,79,80],"\ubcc0\uc218\ub294":[76,79],"\ubcc0\uc218\ub3c4":80,"\ubcc0\uc218\ub4e4\uc744":80,"\ubcc0\uc218\ub4e4\uc774":76,"\ubcc0\uc218\ub85c":[79,80],"\ubcc0\uc218\ub97c":[76,77,79,80],"\ubcc0\uc218\uc640":78,"\ubcc0\uc218\uc758":79,"\ubcc0\uc218\uc774\uace0":79,"\ubcc0\uc218\uc774\ub2e4":79,"\ubcc0\uc704\uac00":77,"\ubcc0\uc704\ub294":77,"\ubcc0\uc704\ub9cc":77,"\ubcc0\ud55c\ub2e4\uba74":79,"\ubcc0\ud560":79,"\ubcf4\uace0":75,"\ubcf4\ub2e4":77,"\ubcf4\uba74":[77,80],"\ubcf4\uc544\ub77c":[78,79,82],"\ubcf4\uc774\ub294":80,"\ubcf4\uc774\uc9c4":81,"\ubcf4\uc778\ub2e4":77,"\ubcf4\uc778\ub2e4\ub294":[75,77],"\ubcf4\uc77c":77,"\ubcf4\uc790":[75,82],"\ubcf4\ud1b5":75,"\ubcf5\uc18c\uc218\uc88c\ud45c\ub97c":75,"\ubcf5\uc7a1\ub3c4\ub294":77,"\ubcf5\uc7a1\ub3c4\ub97c":77,"\ubcf5\uc7a1\ud558\ub2e4":76,"\ubcf5\uc7a1\ud55c":75,"\ubcf8\uaca9\uc801\uc73c\ub85c":79,"\ubcfc\ub9cc":75,"\ubd80":83,"\ubd80\ubd84\uc5d0\uc11c":78,"\ubd80\ubd84\uc73c\ub85c":76,"\ubd80\ubd84\uc758":81,"\ubd80\uc5ec\ud560":82,"\ubd84\uc11d\ud558\uc9c0":77,"\ubd88\uacfc\ud558\ub2e4":78,"\ubd88\uacfc\ud558\ubbc0\ub85c":77,"\ubd88\ub9b4":78,"\ubd88\uc5f0\uc18d\uc801":79,"\ube14\ub85d":81,"\ube14\ub85d\uc744":81,"\ube14\ub85d\uc758":81,"\ube44\uad50\ud558\uc5ec":78,"\ube44\ud45c\uc900":75,"\ube44\ud558\uba74":76,"\ube48\ub3c4\uc5d0":77,"\ube60\ub978":75,"\ube60\ub97c\uae4c":77,"\ube60\uc9c0\uba74":75,"\ube68\uac04":[76,79,81],"\ube68\uac04\ube14\ub85d":16,"\ube68\ub9ac":75,"\ubfcc\uc694\ubfcc\uc694":75,"\uc0ac\uae30\uac00":80,"\uc0ac\ub78c\ub4e4\uc774":82,"\uc0ac\ub78c\uc774":82,"\uc0ac\uc2e4":81,"\uc0ac\uc6a9\ub418\uc5c8\uae30":80,"\uc0ac\uc6a9\ub418\uc5c8\uc9c0\ub9cc":80,"\uc0ac\uc6a9\ub41c":79,"\uc0ac\uc6a9\uc790\uac00":[76,80],"\uc0ac\uc6a9\ud558\uae30":76,"\uc0ac\uc6a9\ud558\ub294":76,"\uc0ac\uc6a9\ud558\uc5ec":76,"\uc0ac\uc6a9\ud560":76,"\uc0ac\uc774\uc5d0\ub294":75,"\uc0ac\uc774\uc758":75,"\uc0ac\uc9c4":75,"\uc0b4\ud3b4\ubcf4\uc790":76,"\uc0bd\uc785\ud558\uba74":76,"\uc0c1\uc138\ud558\uac8c":79,"\uc0c1\uc218":80,"\uc0c1\uc218\ub4e4\uc744":76,"\uc0c1\ud0dc":78,"\uc0c1\ud0dc\ub97c":75,"\uc0c1\ud638\uc791\uc6a9\uc774":78,"\uc0c8\ub85c\uc6b4":[79,81,82],"\uc0c9":[75,76,79],"\uc0c9\uc0c1":[76,79],"\uc0c9\uc0c1\uacfc":76,"\uc0c9\uc0c1\uc740":[76,81],"\uc0c9\uc0c1\uc744":[76,81],"\uc0c9\uc0c1\uc758":81,"\uc0c9\uc774":76,"\uc0dd\uac01\uc77c":80,"\uc0dd\uac01\ud574":82,"\uc0dd\uac01\ud574\ubcf4\uba74":78,"\uc0dd\uac01\ud574\ubd10\ub77c":78,"\uc0dd\uacbc\ub2e4":78,"\uc0dd\uae38":76,"\uc0dd\uc131\ud558\uace0":76,"\uc11c\ub85c":76,"\uc120\uc5b8":76,"\uc120\uc5b8\ub418\uc5b4\uc57c":76,"\uc120\ud0dd\uc801\uc73c\ub85c":75,"\uc120\ud0dd\uc801\uc778":77,"\uc120\ud0dd\uc9c0\ub294":81,"\uc120\ud589\ub418\uc5b4\uc57c":79,"\uc124\uba85\uc774":76,"\uc124\uba85\uc774\uc5c8\ub2e4":76,"\uc124\uba85\ud558\ub294":79,"\uc124\uba85\ud560":81,"\uc124\uc815":75,"\uc131\ubd84":76,"\uc131\ubd84\uc774":76,"\uc138\uace0":81,"\uc138\ud305":78,"\uc148\uc774\ub2e4":76,"\uc18c\uac1c":83,"\uc18c\ub9ac":[75,78],"\uc18c\uc124":82,"\uc18c\uc2a4":[75,76,77,78],"\uc18c\uc2a4\ucf54\ub4dc":76,"\uc18c\uc2a4\ucf54\ub4dc\uac00":76,"\uc18c\uc2a4\ucf54\ub4dc\ub294":76,"\uc18c\uc2a4\ucf54\ub4dc\ub97c":[75,76],"\uc18c\uc2a4\ucf54\ub4dc\uc5d0":76,"\uc18c\uc2a4\ucf54\ub4dc\uc640":75,"\uc18c\uc2a4\ucf54\ub4dc\uc758":76,"\uc18c\uc2a4\ud30c\uc77c\uc5d0":75,"\uc18c\uc2a4\ud30c\uc77c\uc740":75,"\uc18d":76,"\uc18d\ub3c4\ub97c":77,"\uc18d\ub3c4\uc5d0":77,"\uc18d\ub3c4\uc774\ub77c\ub294":77,"\uc18d\uc5d0\ub294":77,"\uc190\uac00\ub77d":78,"\uc190\uc744":75,"\uc218":[75,76,77,78,79,80,81,82],"\uc218\uac00":82,"\uc218\ub294":[77,82],"\uc218\ub3c4":82,"\uc218\ub97c":81,"\uc218\ub9ce\uc740":75,"\uc218\uc815\ud558\ub294":76,"\uc218\uc815\ud55c\ub2e4\uba74":76,"\uc218\uc900":75,"\uc218\ud589\ub418\uc5b4\uc57c":76,"\uc218\ud589\ud55c\ub2e4":76,"\uc21c\ucc28\uc801\uc73c\ub85c":76,"\uc26c\uc6b4":79,"\uc27d\uac8c":[77,79],"\uc27d\ub2e4":[77,78,80],"\uc2a4\ud06c\ub9b0":76,"\uc2b5\ub4dd\ud560":82,"\uc2dc\uac01\uc801":81,"\uc2dc\uac01\ud654":[78,79],"\uc2dc\uac04":[77,78,81],"\uc2dc\uac04\ubcf4\ub2e4":82,"\uc2dc\uac04\uc21c\uc73c\ub85c":76,"\uc2dc\uac04\uc740":77,"\uc2dc\uac04\uc744":[77,82],"\uc2dc\uac04\uc774":[76,82],"\uc2dc\uac04\uc774\ub2e4":82,"\uc2dc\ub3c4\ud574":75,"\uc2dc\uc2a4\ud15c\uc744":78,"\uc2dc\uc2a4\ud15c\uc774":76,"\uc2dc\uc791\ub418\uae30":77,"\uc2dc\uc791\ub420":77,"\uc2dc\uc791\ud558\ub294":77,"\uc2dc\ud589":82,"\uc2dd\uc758":75,"\uc2e0\uacbd":80,"\uc2e4\uc81c\ub85c":76,"\uc2e4\ud589":[76,77,78,81],"\uc2e4\ud589\ub418\uac70\ub098":75,"\uc2e4\ud589\ub418\ub294":[75,77],"\uc2e4\ud589\ub418\uba74":76,"\uc2e4\ud589\ub418\uc57c":77,"\uc2e4\ud589\ub418\uc5b4\uc57c":76,"\uc2e4\ud589\ub418\uc9c0":76,"\uc2e4\ud589\ub41c\ub2e4":76,"\uc2e4\ud589\ub41c\ub2e4\ub294":75,"\uc2e4\ud589\ub428":75,"\uc2e4\ud589\ud558\ub294":75,"\uc2eb\ub2e4\uba74":80,"\uc2ec\ud654":78,"\uc2ec\ud654\ub41c":78,"\uc2f6\ub2e4\uba74":75,"\uc2f6\uc744":76,"\uc368\uc57c":80,"\uc4f0\uba74":76,"\uc4f8":75,"\uc544\ub2c8\ub2e4":78,"\uc544\ub2c8\ub77c":77,"\uc544\ub2c8\ubbc0\ub85c":77,"\uc544\ub2c8\uc9c0\ub9cc":78,"\uc544\ub2cc":[75,76,77,79,80,81],"\uc544\ub2cc\uc9c0\ub97c":78,"\uc544\ub798\ub97c":79,"\uc544\ub798\uc640":79,"\uc544\ub798\ucabd\uc774":76,"\uc544\ub9c8\ub3c4":78,"\uc544\ubb34":82,"\uc544\ubb34\ub798\ub3c4":75,"\uc544\uc2a4\ud0a4\uc544\ud2b8\ub97c":75,"\uc544\uc774\ub514\uc5b4\ub294":79,"\uc544\uc774\ub514\uc5b4\ub97c":79,"\uc544\uc774\ub514\uc5b4\uc640":79,"\uc544\uc774\ub514\uc5b4\ucc98\ub7fc":80,"\uc544\uc9c1":[79,80],"\uc544\uc9c1\ub3c4":[77,80],"\uc548\ub418\ub294":77,"\uc548\ub41c\ub2e4":76,"\uc54a\ub294":[76,77],"\uc54a\ub294\ub2e4":[77,78,79,81],"\uc54a\ub2e4":79,"\uc54a\ub2e4\uba74":80,"\uc54a\uc558\uc9c0\ub9cc":78,"\uc54a\uc73c\uba74":76,"\uc54a\uc744":[76,77],"\uc54c":[76,80,81],"\uc54c\uace0":[77,78],"\uc54c\uace0\ub9ac\uc998\uc73c\ub85c":75,"\uc54c\uace0\ub9ac\uc998\uc758":78,"\uc54c\uace0\ub9ac\uc998\uc774":78,"\uc54c\ub809\uc138\uc774":82,"\uc54c\uc544\ub0b4\uae30":80,"\uc54c\uc544\ub0bc":[76,77],"\uc54c\uc544\uc57c":78,"\uc54c\uc558\ub2e4":77,"\uc54c\uce74\ub178\uc774\ub4dc\uc758":77,"\uc55e\ubd80\ubd84\uc5d0":77,"\uc55e\uc11c":76,"\uc55e\uc11c\uc11c":76,"\uc55e\uc5d0":78,"\uc560\ub2c8\uba54\uc774\uc158":[75,77],"\uc57d\uac04":78,"\uc57d\uac04\uc758":76,"\uc5b4\ub5a4":[76,82],"\uc5b4\ub5a4\uac00":79,"\uc5b4\ub5a8\uae4c":[76,80],"\uc5b4\ub5bb\uac8c":[76,77,79,80,81],"\uc5b4\ub835\uc9c0":76,"\uc5b4\uca0c\ub4e0":78,"\uc5b4\ucc0c\ub410\ub4e0":76,"\uc5b8\uae09\ud558\uaca0\ub2e4":76,"\uc5b8\uae09\ud55c":81,"\uc5b8\uc5b4\uc758":76,"\uc5bc\ub9c8\ub098":77,"\uc5bc\ub9cc\ud07c\uc758":82,"\uc5c5\ub370\uc774\ud2b8":77,"\uc5c5\ub370\uc774\ud2b8\uac00":76,"\uc5c5\ub370\uc774\ud2b8\ub418\uac70\ub098":76,"\uc5c5\ub370\uc774\ud2b8\ub418\uac8c":77,"\uc5c5\ub370\uc774\ud2b8\ub418\uc5c8\ub294\uc9c0\ub97c":77,"\uc5c5\ub370\uc774\ud2b8\ub41c\ub2e4":78,"\uc5c5\ub370\uc774\ud2b8\ud558\ub294":[77,78,80],"\uc5c6\uace0":[78,80],"\uc5c6\uae30":[76,77,78],"\uc5c6\ub294":[75,76,82],"\uc5c6\ub2e4":[77,78,81],"\uc5c6\uc73c\ubbc0\ub85c":[77,80],"\uc5c6\uc774":[76,78],"\uc5d0":76,"\uc5d0\uc11c":82,"\uc5d0\uc120":76,"\uc5d0\ud544\ub85c\uadf8":83,"\uc5d4\uc9c4":75,"\uc5d4\uc9c4\uc5d0\ub3c4":75,"\uc5d4\uc9c4\uc740":75,"\uc5d4\uc9c4\uc744":75,"\uc5d4\uc9c4\uc758":75,"\uc5d4\ud130":75,"\uc5ec\uae30\uc11c":77,"\uc5ec\uae30\uc5d0":76,"\uc5ec\uae30\uc5d0\uc11c":82,"\uc5ec\ub7ec":75,"\uc5ec\ub7ec\uac00\uc9c0":76,"\uc5ec\uc804\ud788":79,"\uc5ec\uc9c0\uac00":81,"\uc5f0\uacb0\uc2dc\ud0a4\uba74\uc11c":82,"\uc5f0\uacb0\uc810\uc774":75,"\uc601\uc5ed":80,"\uc601\uc5ed\uacfc":80,"\uc601\uc5ed\uc744":80,"\uc601\uc5ed\uc774":80,"\uc601\ud5a5\uc744":82,"\uc601\ud654":82,"\uc608\ub97c":[76,79],"\uc608\uc2dc":75,"\uc608\uc2dc\uc774\ub2e4":82,"\uc608\uc678\uc801\uc73c\ub85c":76,"\uc608\uc81c\uc778":76,"\uc608\uce21\ub418\uae30":77,"\uc624\uac8c":76,"\uc624\uac8c\ub054":76,"\uc624\ub2f5\uc774\ub77c\uba74":81,"\uc624\ub2f5\uc77c":81,"\uc624\ub798":76,"\uc624\ub978\ucabd\uc774":76,"\uc624\ube0c\uc81d\ud2b8\uac04":78,"\uc624\ube0c\uc81d\ud2b8\uc758":77,"\uc624\uc9c1":[79,80],"\uc640":[76,77,78,80,82],"\uc640\uc57c":77,"\uc644\ubcbd\ud788":82,"\uc644\uc131\ub418\uc5c8\ub2e4":78,"\uc644\uc804\ud55c":80,"\uc644\uc804\ud788":78,"\uc65c":78,"\uc65c\ub098\ud558\uba74":76,"\uc65c\ub0d0\uba74":78,"\uc65c\ub0d0\ud558\uba74":[77,78],"\uc678\ubd80":75,"\uc678\uc758":76,"\uc694\uc18c":[78,81],"\uc694\uc18c\ub97c":76,"\uc694\uc57d\ud558\uc790\uba74":75,"\uc6a9":80,"\uc6a9\ub3c4\uac00":79,"\uc6a9\uc774\ub2e4":81,"\uc6b0":80,"\uc6b0\ub9ac\uac00":77,"\uc6b0\ub9ac\ub294":[77,78,81,82],"\uc6b0\ub9ac\uc758":82,"\uc6b0\uc120":[76,77,78,79,81],"\uc6c0\uc9c1\uc774\uac8c":77,"\uc6c0\uc9c1\uc774\ub294":77,"\uc6c0\uc9c1\uc778\ub2e4":[77,78],"\uc6c0\uc9c1\uc778\ub2e4\ub294":78,"\uc6c0\uc9c1\uc77c\uae4c":77,"\uc6d0\ub9ac\ub97c":76,"\uc704":79,"\uc704\uce58":[76,79],"\uc704\uce58\uac00":77,"\uc704\uce58\ub97c":76,"\uc704\uce58\uc5d0\uc11c\uc758":80,"\uc704\ud55c":[75,76,80,81],"\uc704\ud574":[76,78,82],"\uc704\ud574\uc11c\ub294":78,"\uc704\ud574\uc120":76,"\uc708\ub3c4\uc6b0":76,"\uc708\ub3c4\uc6b0\uc758":77,"\uc720\ub2c8\ud2f0":75,"\uc720\uc6a9\ud55c":76,"\uc73c\ub85c":[75,76],"\uc740":77,"\uc744":[77,78,81],"\uc74c\uc545":82,"\uc758":75,"\uc758\ubbf8\ub97c":80,"\uc758\ubbf8\ud558\uac8c":[77,80],"\uc758\ubbf8\ud558\uace0":78,"\uc758\ubbf8\ud558\uc9c0\ub294":78,"\uc758\ubbf8\ud55c\ub2e4":[76,77,78],"\uc774":[75,76,77,78,79,80,81,82],"\uc774\uac83\ub4e4\uc740":79,"\uc774\uac83\uc740":[76,77,81],"\uc774\uac83\uc774":[75,76,82],"\uc774\ub294":[77,78],"\uc774\ub2e4":76,"\uc774\ub3d9":83,"\uc774\ub780":75,"\uc774\ub7f0":[75,78],"\uc774\ub807\uac8c":75,"\uc774\ub8e8\uc5b4\uc9c4\ub2e4":[76,79],"\uc774\ub8e8\uc5b4\uc9d0\uc5d0":78,"\uc774\ub97c":77,"\uc774\ub984":78,"\uc774\ub984\uc73c\ub85c\ub294":78,"\uc774\ub984\uc744":78,"\uc774\ub984\uc758":76,"\uc774\ubbf8":82,"\uc774\ubbf8\uc9c0":[77,78,81],"\uc774\ubca4\ud2b8\uac00":[76,78],"\uc774\ubca4\ud2b8\ub4e4\uc740":76,"\uc774\ubca4\ud2b8\ub4e4\uc744":76,"\uc774\ubca4\ud2b8\ub4e4\uc758":76,"\uc774\ubca4\ud2b8\ub97c":[76,78,80],"\uc774\ubca4\ud2b8\uc801":75,"\uc774\ubcc4\uc744":76,"\uc774\uc0c1":[75,77],"\uc774\uc0c1\uc758":78,"\uc774\uc5d0":76,"\uc774\uc6a9\ud558\uba74":77,"\uc774\uc6a9\ud55c":75,"\uc774\uc6a9\ud560":78,"\uc774\uc720\ub294":80,"\uc774\uc720\uc774\ub2e4":[75,78,82],"\uc774\uc804":[77,78],"\uc774\uc804\uacfc":79,"\uc774\uc804\ubd80\ud130":78,"\uc774\uc804\uc5d0\ub294":78,"\uc774\uc804\uc758":[76,78,80],"\uc774\uc81c":[77,78,80,81,82],"\uc774\uc81c\ub294":79,"\uc774\uc820":77,"\uc774\ud574\ud558\ub294":76,"\uc774\ud574\ud560":[78,79],"\uc774\ud574\ud588\ub2e4\uba74":76,"\uc774\ud6c4":76,"\uc774\ud6c4\uc5d0":76,"\uc778\uc790\ub97c":78,"\uc778\ud130\ud398\uc774\uc2a4":78,"\uc778\ud130\ud398\uc774\uc2a4\ub97c":81,"\uc77c\ubc18\uc801\uc73c\ub85c":76,"\uc77c\ubd80":[76,77,78],"\uc77c\ubd80\ubd84":78,"\uc77c\ubd80\uc774\uae30":75,"\uc77c\uc5b4\ub0ac\ub294\uc9c0":80,"\uc77c\uc73c\ud0a4\ub294":82,"\uc77c\uc885\uc758":77,"\uc77c\uce58\ud558\uc9c0":76,"\uc784\ub9c8\ub204\uc5d8":82,"\uc784\uc740":77,"\uc785\ub825":[75,76,77,80],"\uc785\ub825\ubcf4\ub2e4":82,"\uc785\ub825\uc2dc\ud0a4\ub294":82,"\uc785\ub825\uc740":[75,80],"\uc785\ub825\uc744":[78,80],"\uc785\ub825\uc774":[77,78],"\uc785\ub825\uc774\ub098":77,"\uc785\ub825\uc774\ub780":80,"\uc785\ub825\ud558\ub294":78,"\uc785\ubb38\uc7a5\ubcbd\uc774":75,"\uc788\uac8c":[75,76],"\uc788\uace0":79,"\uc788\uae30":78,"\uc788\ub290\ub0d0":78,"\uc788\ub294":[75,76,80,81,82],"\uc788\ub294\ub370":79,"\uc788\ub2e4":[75,76,77,78,79,80,81,82],"\uc788\ub2e4\uace0":76,"\uc788\ub3c4\ub85d":79,"\uc788\uc5b4\uc57c":77,"\uc788\uc73c\uba74":75,"\uc788\uc744\uae4c":[75,76],"\uc788\uc74c":80,"\uc788\uc74c\uc744":[77,78,80],"\uc790\ub3d9\uc801\uc73c\ub85c":[76,77],"\uc790\uc8fc":77,"\uc791\ub3d9":76,"\uc791\ub3d9\ud558\uc9c0":77,"\uc791\uc131\ub418\uc5b4\uc57c":76,"\uc791\uc131\uc740":81,"\uc791\uc131\ud558\ub294":75,"\uc791\uc131\ud55c":75,"\uc791\uc5c5\uc774":76,"\uc791\uc740":[79,80],"\uc7a5":77,"\uc7a5\uc774":77,"\uc7a5\uc810\ub3c4":75,"\uc7a5\uc810\uc740":75,"\uc7a5\uc810\uc744":75,"\uc800\uae09":75,"\uc800\uc7a5\ud574":76,"\uc801\uc6a9\ub418\uc5b4":80,"\uc801\uc808\ud55c":[76,77,78,80],"\uc801\uc808\ud788":76,"\uc804\uc138\uacc4":82,"\uc804\uc5ed":76,"\uc804\uccb4":[76,81],"\uc804\uccb4\ub97c":79,"\uc804\ud600":78,"\uc808\ucc28\uc801\uc73c\ub85c":75,"\uc808\ucc28\uc801\uc774":75,"\uc810\uc218\ub3c4":78,"\uc810\uc740":80,"\uc811\uadfc\uc131\uc774":75,"\uc811\uadfc\ud560":75,"\uc815\ub2f5\uc774\uac70\ub098":81,"\uc815\ub2f5\uc774\ub77c\uba74":81,"\uc815\ub82c\ub41c\ub2e4":76,"\uc815\ub9ac\ub97c":77,"\uc815\ubcf4\ub4e4\uc740":76,"\uc815\ubcf4\ub97c":76,"\uc815\uc0ac\uac01\ud615":80,"\uc815\uc0ac\uac01\ud615\uc744":80,"\uc815\uc218":[79,81],"\uc815\uc2e0\uc5c6\uc774":77,"\uc815\uc758":78,"\uc815\uc911\uc559\uc5d0":76,"\uc815\uc911\uc559\uc73c\ub85c":76,"\uc815\uc911\uc559\uc740":76,"\uc815\uc911\uc559\uc744":76,"\uc815\uc9c0\ub41c":77,"\uc815\ud558\uace0":76,"\uc815\ud558\ub294":79,"\uc815\ud55c\ub2e4":76,"\uc815\ud560":[76,77],"\uc815\ud574\uc57c":76,"\uc815\ud574\uc838\uc57c":76,"\uc815\ud655\ud788\ub294":76,"\uc81c\uacf5\ud558\uae30":75,"\uc81c\uc57d\uc870\uac74":78,"\uc81c\uc678\ud654\uba74":75,"\uc81c\uc791":75,"\uc81c\ud55c\uc744":81,"\uc870\uac74\ubb38\uc774":76,"\uc870\uac74\uc774":76,"\uc870\uc808\ud558\uac8c":79,"\uc870\uc885":83,"\uc874\uc7ac\ud558\uace0":80,"\uc874\uc7ac\ud558\uae30":77,"\uc874\uc7ac\ud568\uc744":80,"\uc885\ub8cc":77,"\uc885\ub8cc\ub418\uac8c":76,"\uc885\ub8cc\ub418\uace0":78,"\uc885\ub8cc\ub418\uc5b4\uc57c":76,"\uc885\ub8cc\ub41c":76,"\uc885\ub8cc\ud558\uace0":76,"\uc885\ub8cc\ud558\ub294":77,"\uc885\ub958\uc758":76,"\uc88b\ub2e4\ub294":75,"\uc88b\uc740":[75,80],"\uc88b\uc744":79,"\uc88c\ud45c":76,"\uc88c\ud45c\ub97c":[78,79],"\uc88c\ud45c\uc5d0":77,"\uc8fc\ub294":75,"\uc8fc\ub85c":76,"\uc8fc\ubaa9\ud574\ub77c":78,"\uc8fc\uc5b4\uc9c4":76,"\uc904\ub4e4\uc740":78,"\uc911":[75,77],"\uc911\uc694\ud55c":82,"\uc990\uae38":78,"\uc99d\uac00\uc2dc\ud0a4\uac70\ub098":80,"\uc99d\uac00\ud558\uace0":81,"\uc9c0\uae08\uae4c\uc9c0":78,"\uc9c0\uae08\uc740":78,"\uc9c0\ub294":80,"\uc9c0\uc2dd\ub9cc\uc73c\ub85c":82,"\uc9c0\uc2dd\ubcf4\ub2e4":82,"\uc9c0\uc2dd\uc5d0":82,"\uc9c0\uc2dd\uc744":82,"\uc9c0\uc5d0":81,"\uc9c0\uc6b0\ub294":75,"\uc9c0\uc810\uc744":80,"\uc9c1\uad00\uc801\uc73c\ub85c":78,"\uc9c1\uad00\uc801\uc778":78,"\uc9c1\uc0ac\uac01\ud615":76,"\uc9c1\uc0ac\uac01\ud615\ub4e4\uc5d0":79,"\uc9c1\uc0ac\uac01\ud615\ub4e4\uc744":79,"\uc9c1\uc0ac\uac01\ud615\uc744":79,"\uc9c1\uc811":79,"\uc9c4\uc9dc":78,"\uc9c4\ud589\ub420":76,"\uc9c8":81,"\uc9c8\ub9b0\ub2e4\uba74":75,"\uc9d1\uc911\ud558\uba74":75,"\uc9dc\uc99d\ub0a0":81,"\ucc28\uc774\uc810\uc774":78,"\ucc28\uc774\uc810\uc774\ub2e4":78,"\ucc29\uc624\ub97c":82,"\ucc38\uace0":[76,77,78,79,80,81],"\ucc3d\uc758\uc801\uc778":82,"\ucc3e\uc544\ub0b4\uba74":77,"\ucc3e\uc744":79,"\ucc44\ub85c":78,"\ucc44\uc6b0\ub294":76,"\ucc98\ub9ac":[75,76,78],"\ucc98\ub9ac\uac00":[76,77],"\ucc98\ub9ac\ub294":75,"\ucc98\ub9ac\ub97c":79,"\ucc98\ub9ac\ub9cc\uc774":80,"\ucc98\ub9ac\uc758":76,"\ucc98\ub9ac\ud558\uace0":76,"\ucc98\ub9ac\ud558\ub294":80,"\ucc98\ub9ac\ud558\ub294\uc9c0\ub294":77,"\ucc98\ub9ac\ud558\ub824\uba74":81,"\ucc98\ub9ac\ud560":76,"\ucc9c\uc7ac\uc131\uc774\ub780":82,"\uccab\ubc88\uc9f8":79,"\uccab\uc9f8":[75,80],"\uccab\uc9f8\ub294":78,"\uccab\uc9f8\ub85c":76,"\uccb4\ud06c\ud558\ub294":76,"\ucd08\uae30\ud654\ub418\uac70\ub098":76,"\ucd08\uae30\ud654\ub41c\ub2e4":76,"\ucd08\uae30\ud654\ub428\uc744":77,"\ucd08\ub85d":76,"\ucd1d":79,"\ucd5c\ub300":79,"\ucd5c\ub300\uac12\uc774":79,"\ucd5c\ub300\ud55c":75,"\ucd5c\uc18c":76,"\ucd94\uac00":77,"\ucd94\uac00\ub418\uba74":81,"\ucd94\uac00\ub418\uc5c8\uace0":77,"\ucd94\uac00\ub418\uc5c8\ub2e4":[77,80],"\ucd94\uac00\ub41c":76,"\ucd94\uac00\ub41c\ub2e4":78,"\ucd94\uac00\uc801\uc73c\ub85c":78,"\ucd94\uac00\uc801\uc778":[76,77,78,79],"\ucd94\uac00\ud558\uac70\ub098":76,"\ucd94\uac00\ud558\ub294":[76,78],"\ucd94\uac00\ud558\ub824":81,"\ucd94\uac00\ud574\ubcf4\uc790":78,"\ucd94\uac00\ud574\uc57c":77,"\ucd9c\ub825":[75,78,79,81,83],"\ucd9c\ub825\ub418\uac8c":76,"\ucd9c\ub825\ub418\ub294":76,"\ucd9c\ub825\ub420":79,"\ucd9c\ub825\uc6a9\uc774\ub2e4":80,"\ucd9c\ub825\uc73c\ub85c":75,"\ucd9c\ub825\uc740":[75,78],"\ucd9c\ub825\uc744":[76,77,80],"\ucd9c\ub825\uc758":78,"\ucd9c\ub825\uc774":82,"\ucd9c\ub825\ud558\uae30":76,"\ucd9c\ub825\ud558\ub294":[76,78,79,81],"\ucd9c\ub825\ud55c\ub2e4":81,"\ucd9c\ub825\ud574\uc57c":81,"\ucda9\ub3cc":75,"\ucda9\ubd84\ud788":79,"\uce58\uace4":76,"\uce5c\uc219\ud55c":76,"\uce60\ud558\uae30":75,"\uce78\ud2b8\ub294":82,"\uce94\ubc84\uc2a4":[76,79],"\uce94\ubc84\uc2a4\ub97c":76,"\uce94\ubc84\uc2a4\uc5d0":76,"\uce94\ubc84\uc2a4\uc758":76,"\ucea1\uc158\uc5d0":76,"\ucee4\uc9c0\uac8c":82,"\ucee8\ud150\uce20":78,"\ucef4\ud4e8\ud130\ub294":82,"\ucef4\ud4e8\ud130\ub9c8\ub2e4":77,"\ucef4\ud4e8\ud130\uc5d0\uac8c":82,"\ucf54\ub4dc":[76,77,78,79,80,81],"\ucf54\ub4dc\uac00":76,"\ucf54\ub4dc\ub97c":75,"\ucf54\ub4dc\uc640":76,"\ucf54\ub529\ud558\uac8c":75,"\ucf54\ub529\ud574":75,"\ucf58\uc194":75,"\ucf58\uc194\uc5d0\uc11c":75,"\ud06c\uac8c":[79,81],"\ud06c\uace0":76,"\ud06c\uae30":76,"\ud06c\uae30\uac00":76,"\ud06c\uae30\ub97c":[76,81],"\ud06c\uae30\uc640":76,"\ud06c\ub2e4\ub294":82,"\ud06c\uba74":77,"\ud070":[75,78,79,80],"\ud074\uae4c":82,"\ud074\ub9ad":80,"\ud074\ub9ad\uc774":80,"\ud074\ub9ad\ud588\ub2e4":80,"\ud07c\uc744":76,"\ud0a4":78,"\ud0a4\ub294":78,"\ud0a4\ub4e4\ub3c4":78,"\ud0a4\ub97c":78,"\ud0a4\ubcf4\ub4dc":[75,78,79],"\ud0a4\ubcf4\ub4dc\uac00down\ub41c":80,"\ud0a4\ubcf4\ub4dc\uc5d0":78,"\ud0a4\uc758":78,"\ud0c4\ucc3d\uc5d0\uc11c":79,"\ud14c\ub450\ub9ac\ub97c":[79,81],"\ud14c\ud2b8\ub9ac\uc2a4\ub97c":82,"\ud14d\uc2a4\ud2b8":[76,83],"\ud14d\uc2a4\ud2b8\uac00":[76,79],"\ud14d\uc2a4\ud2b8\ub294":76,"\ud14d\uc2a4\ud2b8\ub97c":[76,77,79],"\ud14d\uc2a4\ud2b8\uc758":[76,77],"\ud14d\uc2a4\ud2b8\uc774\ub2e4":79,"\ud1b5\ud574":80,"\ud22c\uc790\ud588\uc744\uae4c":82,"\ud234\uc774":75,"\ud29c\ud1a0\ub9ac\uc5bc":[16,81],"\ud29c\ud1a0\ub9ac\uc5bc\uc740":82,"\ud2b8\ub9ac\uac70":78,"\ud2b8\ub9ac\uac70\ub418\uba74":76,"\ud2b9\uc131":[76,81],"\ud2b9\uc131\uc774":82,"\ud2b9\uc131\uc774\ub2e4":82,"\ud2b9\uc774\ud55c":80,"\ud2b9\uc815":[76,77,80],"\ud2b9\uc815\ud55c":80,"\ud30c\uc774\uac8c\uc784":[76,81],"\ud30c\uc774\uac8c\uc784\uc740":[75,76],"\ud30c\uc774\uac8c\uc784\uc744":75,"\ud30c\uc774\uac8c\uc784\uc758":[75,76,77,82],"\ud30c\uc774\uac8c\uc784\uc774":[75,76],"\ud30c\uc774\uc36c\uc5d0":75,"\ud30c\uc774\uc36c\uc758":[75,76],"\ud30c\uc77c":75,"\ud30c\uc77c\ub85c":81,"\ud30c\uc77c\uc744":[75,76],"\ud30c\uc77c\uc774\ub098":75,"\ud30c\uc9c0\ud2b8\ub178\ud504\uac00":82,"\ud310\ub2e8\ud558\ub294":78,"\ud310\uc815":80,"\ud3ec\ud568":82,"\ud3ed\ub113\uc740":82,"\ud3f0\ud2b8":76,"\ud3f0\ud2b8\ub97c":76,"\ud3f0\ud2b8\uc640":76,"\ud479":75,"\ud48d\uc131\ud558\uac8c":78,"\ud504\ub85c\uadf8\ub798\uba38\uac00":75,"\ud504\ub85c\uadf8\ub798\uba38\ub294":75,"\ud504\ub85c\uadf8\ub798\ubc0d":76,"\ud504\ub85c\uadf8\ub798\ubc0d\uacfc":82,"\ud504\ub85c\uadf8\ub798\ubc0d\uc740":82,"\ud504\ub85c\uadf8\ub798\ubc0d\uc758":82,"\ud504\ub85c\uadf8\ub7a8":[76,77],"\ud504\ub85c\uadf8\ub7a8\uacfc":75,"\ud504\ub85c\uadf8\ub7a8\uc5d0":81,"\ud504\ub85c\uadf8\ub7a8\uc5d0\uc120":78,"\ud504\ub85c\uadf8\ub7a8\uc740":[81,82],"\ud504\ub85c\uadf8\ub7a8\uc744":[76,77,79,82],"\ud504\ub85c\uadf8\ub7a8\uc758":[75,76,78],"\ud504\ub85c\uadf8\ub7a8\uc774":79,"\ud504\ub85c\uc81d\ud2b8":77,"\ud504\ub85c\uc81d\ud2b8\uac00":[76,78],"\ud504\ub85c\uc81d\ud2b8\ub294":[76,77,78],"\ud504\ub85c\uc81d\ud2b8\ub85c":76,"\ud504\ub85c\uc81d\ud2b8\uc5d0\uc11c":76,"\ud504\ub85c\uc81d\ud2b8\uc640":78,"\ud504\ub85c\uc81d\ud2b8\uc740":75,"\ud504\ub85c\uc81d\ud2b8\uc758":[76,77,78],"\ud504\ub85c\uc81d\ud2b8\uc774\uba70":76,"\ud504\ub864\ub85c\uadf8":83,"\ud504\ub864\ub85c\uadf8\uc5d0\uc11c":81,"\ud504\ub9b0\ud2b8":80,"\ud50c\ub808\uc774":[78,82],"\ud53c\ud0c0\uace0\ub77c\uc2a4":77,"\ud544\uc218\uc870\uac74\uc774":78,"\ud544\uc218\uc870\uac74\uc774\uae30":78,"\ud544\uc694":[75,76,81],"\ud544\uc694\ub85c":76,"\ud544\uc694\ud558\uac8c":80,"\ud544\uc694\ud558\ub2e4":[76,78],"\ud544\uc694\ud560":77,"\ud558\uaca0\ub2e4":79,"\ud558\uace0":81,"\ud558\uae30":[76,78],"\ud558\uae30\ub9cc":79,"\ud558\ub098":75,"\ud558\ub098\ub85c":80,"\ud558\ub098\ub9cc":81,"\ud558\ub098\uc758":[75,76,77,79,82],"\ud558\ub098\uc774\ub2e4":77,"\ud558\ub294":[76,77,80],"\ud558\ub294\ub370":80,"\ud558\ub294\uc9c0":76,"\ud558\ub2e4":75,"\ud558\uba74":[76,77,79],"\ud558\uc580":79,"\ud558\uc580\uc0c9":76,"\ud558\uc600\ub2e4":79,"\ud558\uc9c0\ub9cc":[75,76,77,79,80,82],"\ud55c":[77,80,82],"\ud55c\uad6d\uc5b4":16,"\ud55c\ub2e4":[76,77,78,79,80,81],"\ud55c\ubc88\ub9cc":76,"\ud55c\ubc88\ucbe4\uc740":75,"\ud560":[76,77,79,82],"\ud560\uae4c":79,"\ud560\ub2f9\ub41c":76,"\ud568\uc218":[76,77],"\ud568\uc218\uac00":[76,77],"\ud568\uc218\ub294":[76,77,80,81],"\ud568\uc218\ub4e4\uacfc":75,"\ud568\uc218\ub4e4\uc740":[75,76],"\ud568\uc218\ub4e4\uc744":[75,76],"\ud568\uc218\ub4e4\uc774":75,"\ud568\uc218\ub85c":75,"\ud568\uc218\ub97c":[76,79],"\ud568\uc218\ubcf4\ub2e4":77,"\ud568\uc218\uc5d0\uc120":79,"\ud568\uc218\uc640":[76,77],"\ud568\uc218\uc758":76,"\ud568\uc218\uc774\uae30":76,"\ud568\uc218\uc774\ub2e4":77,"\ud568\uc218\ud654":78,"\ud568\uc218\ud654\ub97c":79,"\ud56d\uc0c1":[75,76,78,80],"\ud574":75,"\ud574\uacb0\ud560":75,"\ud574\ub2f9":78,"\ud574\ub2f9\ub418\ub294":77,"\ud574\uc11c":81,"\ud574\uc57c":[76,79],"\ud5f7\uac08\ub9ac\uba74":76,"\ud604\uc7ac":79,"\ud615\uc2dd\uc5d0":76,"\ud615\uc2dd\uc744":76,"\ud615\uc2dd\uc774":76,"\ud638\ucd9c\ub418\ub294\ub370":76,"\ud638\ucd9c\ub418\uba74":76,"\ud638\ucd9c\ub418\uc5b4\uc57c":76,"\ud638\ucd9c\ub41c\ub2e4":76,"\ud638\ud658\uc131":75,"\ud654\ub824\ud55c":76,"\ud654\uba74":[76,79],"\ud654\uba74\uacfc":76,"\ud654\uba74\ubcf4\ub2e4\ub294":77,"\ud654\uba74\ubcf4\ud638\uae30\ucc98\ub7fc":77,"\ud654\uba74\uc744":[75,77],"\ud654\uba74\uc758":76,"\ud655\uc2e4\ud558\ub2e4":77,"\ud655\uc778\ud558\ub294":[75,79],"\ud655\uc778\ud558\ub77c":80,"\ud655\uc778\ud558\uba74":[79,80],"\ud655\uc778\ud560":[77,78,79],"\ud655\uc778\ud574\uc57c":78,"\ud655\uc815\ub41c\ub2e4\uba74":76,"\ud658\uacbd":75,"\ud658\uacbd\uacfc":75,"\ud658\uacbd\uc5d0\uc11c":75,"\ud658\uacbd\uc5d0\uc11c\uc758":75,"\ud658\uacbd\uc6a9":75,"\ud658\uacbd\uc740":75,"\ud65c\ub3d9\uc774\ub2e4":82,"\ud65c\uc131\ud654\ub418\uba74":80,"\ud65c\uc6a9\ud558\uace0":82,"\ud65c\uc6a9\ud55c":75,"\ud65c\uc6a9\ud574":75,"\ud69f\uc218\ub97c":77,"\ud6a8\uacfc\uac00":[80,82],"\ud6a8\uacfc\ub97c":81,"\ud6a8\uacfc\uc74c\uc744":81,"\ud6c4":[75,78],"\ud6e8\uc52c":79,"\ud765\ubbf8\ub85c\uc6b4":82,"always\ubb38":[76,77],"always\ubb38\uacfc":79,"always\ubb38\uc5d0":[76,77],"always\ubb38\uc5d0\uc11c":76,"always\ubb38\uc758":77,"always\ubb38\uc774":77,"b\u00e9zier":30,"b\uac12":76,"blit\uc774":76,"blit\ud568\uc218\ub294":76,"boolean":[25,28,32,33,39,56,64],"break":[18,30,33,46,54,62,88],"byte":[10,15,17,18,20,22,23,28,29,31,37,38,42,43,44,46,51],"c\ub85c":75,"case":[16,18,20,23,24,28,29,31,33,37,38,40,43,44,51,57,58,64,68,70,72,73,74,84,85,87,88],"catch":[44,64,89],"center\ub77c\ub294":76,"char":[1,7,10,28,29,51],"class":[0,16,17,19,20,26,29,31,32,35,37,38,45,51,57,61,62,66,84,86,89],"collidepoint\ub97c":80,"const":[1,7,10,70],"cui\uac00":75,"cui\uace0":75,"cui\ud658\uacbd\uc5d0\uc11c\ub9cc":76,"default":[1,15,18,20,22,23,24,25,26,28,29,31,33,35,37,38,40,43,44,45,48,50,51,53,54,56,58,59,63],"do":[9,16,18,19,20,22,23,24,25,26,27,29,32,35,41,42,44,45,50,51,56,57,58,59,60,61,63,64,65,67,68,69,74,85,86,87,88,89],"drawbuttons\uc5d0":80,"drawhp\ub77c\ub294":79,"else\ubb38\uc740":77,"event\ubb38":[76,78],"event\ubb38\uc5d0":80,"event\ubb38\uc5d0\uc11c":79,"event\ubb38\uc744":79,"event\ubb38\uc774":78,"export":[0,11,20,38,42,43,51],"fill\ud568\uc218\ub098":76,"final":[22,26,28,29,42,44,51,58,61,62,71,84,85,86,89],"float":[1,17,19,20,24,29,30,32,35,36,38,40,45,48,50,54,56,65],"fps\uac00":77,"fps\uac12\uc774":77,"fps\ub294":77,"fps\ub300\ub85c":77,"function":[0,1,8,9,10,13,16,18,19,20,22,23,24,25,26,27,28,29,30,31,32,33,35,37,38,39,40,41,43,44,45,46,47,48,49,50,51,52,53,54,56,57,58,60,61,63,64,66,67,68,69,70,72,73,84,85,87,88,89],"g\uac12":76,"gui\uac00":80,"gui\ub97c":76,"gui\uc5d0\uc11c\uc758":80,"gui\uc774\ubbc0\ub85c":76,"gui\uc774\uc9c0\ub9cc":81,"gui\uc784\uc744":76,"gui\ud658\uacbd\uc5d0\uc11c":76,"header\uc5d0\uc120":76,"header\uc758":78,"hp\ub294":[79,81],"hp\ub97c":[79,80],"hp\ubc14":83,"hp\uc744":79,"hp\uc758":79,"import":[1,11,16,18,22,24,25,26,27,28,29,30,32,34,36,38,39,44,49,52,53,59,61,62,63,64,66,67,68,69,70,71,72,73,75,76,77,78,79,80,81,84,85,86,89],"import\ud558\ub294":76,"in\ubb38\uc744":76,"initial\ubb38":76,"initial\ubb38\uc5d0":76,"initial\ubb38\uc758":77,"input\ud568\uc218\ub97c":76,"input\ud568\uc218\uc640\ub294":76,"int":[1,2,3,4,5,6,7,8,9,10,12,13,17,18,20,23,24,25,28,29,30,32,33,35,36,37,41,42,44,45,47,48,50,51,55,56,71,72,73,79,80,81],"it\uc744":81,"k_\uc2dc\ub9ac\uc988\uc774\ub2e4":78,"kewdown\uc740":78,"key\ub294":78,"keydown\uac00":80,"keyup\uc774\ub77c\ub294":78,"l_f4\ub4f1\uc774":78,"locals\ub85c\ubd80\ud130":78,"long":[7,25,27,29,35,38,40,44,50,53,63,64,84,88],"main\ud568\uc218\ub97c":79,"main\ud568\uc218\uc5d0":79,"mousebuttonup\uc774":80,"mytext\uac1d\uccb4\uc758":76,"mytext\ub77c\ub294":76,"mytextarea\ub294":76,"mytextarea\ub77c\ub294":76,"mytextfont\uac1d\uccb4\uc758":76,"mytextfont\ub77c\ub294":76,"name\uc5d0\uc11c":78,"new":[2,3,4,5,6,7,8,9,12,13,14,17,18,20,21,22,23,25,26,28,29,30,31,32,33,35,36,37,38,39,40,42,43,44,45,46,47,48,49,50,51,52,54,55,56,58,59,61,62,63,64,65,69,71,72,73,74,84,85,86,87,89],"null":[1,2,3,4,5,6,7,8,9,10,12,15,28],"play\ub77c\ub294":78,"play\ud55c\ub2e4":78,"pos\ub294":80,"print\ud568\uc218\ub098":76,"public":[26,86,89],"quit\uac19\uc740":76,"quit\ub77c\ub294":76,"r\uac12":76,"return":[1,2,3,4,5,6,7,8,9,10,12,13,14,15,17,18,19,20,22,23,24,25,27,28,29,30,31,32,33,35,36,37,38,39,40,41,42,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,62,63,64,65,66,68,71,72,73,79,80,81,84,85,86,87,88,89],"short":[25,63,65,84],"statement\uc5d0":77,"statement\uc5d0\uc11c":77,"static":[47,48,50,69],"super":[26,35,48,64],"switch":[22,23,29,44,50,59,89],"sys\ub294":76,"throw":[40,62,64],"tick\ud568\uc218\ub294":77,"true":[2,3,4,5,6,7,8,10,12,13,14,17,18,19,20,22,23,24,25,26,27,28,29,31,32,33,35,36,37,38,39,40,41,44,45,46,47,50,51,53,56,57,58,60,64,66,68,69,70,71,72,73,76,77,78,79,80,81,89],"try":[25,27,29,32,44,51,58,61,62,63,64,65,67,84,85,86,89],"ttf\ud30c\uc77c\ub85c":76,"ttf\ud655\uc7a5\uc790\ub97c":76,"update\ud568\uc218\uac00":76,"void":[1,9,13],"while":[0,17,19,22,24,25,28,30,31,32,38,39,40,42,43,44,49,50,51,54,57,62,63,64,65,66,67,68,69,70,71,72,73,75,76,77,78,79,80,81,84,85,87,89],"world\uac00":[77,78],"world\ub294":77,"world\uc758":77,"x\uac12\uacfc":80,"x\uc88c\ud45c\uac00":76,"y\uac12\uc744":80,"y\uc131\ubd84\uc744":76,"y\uc88c\ud45c\uac00":76,A:[1,5,8,9,12,13,15,16,17,18,20,22,23,24,25,26,29,30,32,33,35,38,39,40,42,43,45,46,49,50,51,53,54,56,58,63,64,85,86,89],AND:[18,33],ANDing:84,AS:18,And:[22,32,58,62,66,68,69,71,74,84,85,89],As:[24,29,35,38,39,44,51,52,56,57,58,64,65,67,68,84,85,86,88,89],At:[26,31,63,64,65],BE:18,BUT:18,BY:18,Be:[22,23,24,28,44,65],Being:63,But:[26,39,43,51,58,62,64,65,68,69,71,72,74,85,87,88],By:[23,25,29,31,35,39,43,44,51,53,54,56,57,61,62,64,85,86],FOR:18,For:[1,17,18,20,22,23,24,25,26,27,28,29,30,31,32,33,35,36,37,38,40,42,43,44,45,46,47,48,49,50,51,53,56,57,60,61,62,63,64,65,67,68,71,84,85,86,88],IF:18,IN:[18,27],IS:[18,27],If:[6,10,13,17,18,19,20,22,23,24,25,26,27,28,29,30,31,32,33,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,56,58,59,60,61,62,63,64,65,67,68,69,71,72,73,74,84,85,87,88,89],In:[17,18,22,23,27,29,30,31,32,33,36,37,39,40,43,44,51,54,57,58,59,61,62,63,64,65,68,70,72,84,85,86,87,88,89],Is:[37,50,63],It:[2,13,16,17,18,19,20,23,25,26,28,29,31,32,33,36,37,38,40,42,43,44,47,50,51,53,54,56,57,58,59,60,61,63,64,65,66,68,69,72,73,84,86,87,88,89],Its:34,NO:18,NOT:[18,37,40,56],No:[1,3,6,7,9,12,33,44,45,50,51,67,70],Not:[23,25,41,51,60,63,64,65,69,70],OF:18,ON:18,OR:[18,29,35],ORed:38,ORing:33,Of:[57,69,70,71],On:[1,2,3,4,5,6,7,8,9,10,18,23,25,26,37,40,44,46,50,51,56,63,66,84],One:[11,19,32,62,63,65,84],Or:[26,44,64,74],SUCH:18,THE:18,TO:[18,32],That:[29,56,58,62,63,64,67,68,69,70,71,73,74,84],The:[1,2,3,4,5,6,7,8,9,10,12,13,15,16,17,18,19,20,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,59,60,61,63,65,66,68,71,84,87,88],Then:[11,22,24,58,62,64,65,68,71,73,84,85,86,88,89],There:[18,19,20,22,23,24,25,26,27,28,30,32,33,37,39,45,46,50,51,57,58,59,60,62,64,65,69,70,72,73,85],These:[20,22,23,24,26,29,32,37,39,44,45,47,48,50,51,52,53,56,58,59,62,64,65,84,85,87],To:[22,23,25,28,30,33,35,37,38,39,40,41,42,43,47,50,54,57,58,65,87,88],Will:[3,5,6,7,12,32,38,54],With:[26,37,51,62,63,65,84,85,87,88],_:33,__copy__:35,__dict__:25,__file__:[26,58,66],__init__:[32,45,50,57,58,62,64,66,86,87,88,89],__main__:[66,71,72,73,79,80,81,85,89],__name__:[66,71,72,73,79,80,81,85,89],__new__:[29,45],__tags__:53,_camera:18,_default_lay:50,_freetyp:[0,11],_index:1,_layer:50,_pixels_address:51,_pygam:11,_sdl2:48,_spin:[58,66],_sprite__g:64,_spritegroup:64,_tag:53,_test:53,_time_threshold:50,_use_upd:50,_walk:[58,66],a0:37,a6f89747b551:44,a_mask:35,aa:[20,30,65],aaa:65,aacircl:30,aaellips:30,aalib:23,aalin:24,aapolygon:[24,30],aatrigon:30,abandon:18,abil:[44,58,63],abl:[31,42,58,63,84,89],abnorm:89,abort:37,about:[16,18,19,23,26,32,36,37,39,42,44,48,50,51,55,58,59,61,62,63,64,65,67,68,70,71,72,73,74,84,85,87,88],abov:[15,18,19,22,23,24,29,32,40,41,42,43,45,48,57,58,62,64,65,74,84,87],absent:23,absolut:[19,20,29,32,37,40,51,84],abspath:[58,66],abstractgroup:50,acceler:[23,24,30,41,48,51,56,63,69,84],accept:[15,24,26,28,29,30,31,38,39,45,50,51,52,62],access:[9,10,16,17,19,24,25,26,28,29,32,35,37,38,39,41,43,44,46,50,51,59,64,65,67,84],accord:[43,69],accordingli:51,account:[32,33,38,40,42,68,84],accur:[39,40,54],achiev:[62,86],acolor:20,acquir:[10,51,52],across:[22,23,58,62,65,66,84,87,88,89],act:[58,63,70,84],action:[25,32,63,65,88],activ:[23,38,39,40,49,51,52,55,72,74],activeev:[23,25],actual:[5,12,18,19,22,23,28,30,32,37,38,40,41,43,45,50,51,54,56,58,59,62,63,64,65,73,84,85,88],ad:[11,23,24,25,31,32,35,38,39,40,42,43,47,50,54,56,58,62,64,65,68,69,70,72,73,84,87,89],add:[23,29,48,50,57,58,61,62,63,64,65,68,69,70,89],add_intern:64,add_map:47,addit:[22,23,24,25,26,32,33,36,37,44,46,48,50,51],addition:[23,24,47],address:[17,51,84,86],adequ:72,adjac:[24,30],adjust:[9,20,23,28,29,37,71,89],admit:84,adopt:[48,61],advanc:[16,28,29,47,50,51,58,62,63,67,70],advancedinputoutput1:[72,80],advancedinputoutput2:[72,80],advancedinputoutput3:[72,80],advancedinputoutput4:[72,80],advancedinputoutput5:[72,80],advancedoutputalpha1:[73,81],advancedoutputalpha2:[73,81],advancedoutputalpha3:[73,81],advancedoutputprocess1:[71,79],advancedoutputprocess2:[71,79],advancedoutputprocess3:[71,79],advancedoutputprocess4:[71,79],advancedoutputprocess5:[71,79],advancedoutputprocess6:[71,79],advancemam:56,advantag:[59,63,64,67,84,87],advic:[63,84],advis:[18,22,23],ae:28,affect:[12,23,24,29,38,42,44,45,50,51,52,65,74],afraid:65,after:[17,19,23,24,29,31,32,33,37,38,40,47,49,50,51,57,58,59,62,63,65,68,69,70,84,89],again:[18,33,40,44,50,51,54,59,61,62,64,65,84,85,88,89],against:[29,45,56,57,64],ago:63,agp:65,ahead:[24,32],ai:[84,86,89],aid:29,aim:61,alexei:74,algorithm:[24,35,44,56,67,70],alia:[46,50],alias:[16,23,28,29,85],aliceblu:21,alien:[26,64],align:[29,35,45,84],aliv:[17,50,64],all:[16,17,18,19,20,22,23,24,25,26,27,28,29,30,31,32,33,35,36,37,38,39,40,41,42,44,45,47,48,50,51,52,53,56,57,59,60,61,63,64,65,67,68,84,85,86,87,88],all_my_sprites_list:84,allblack:65,alloc:[31,38,68],allot:53,allow:[16,18,20,22,23,25,27,28,29,31,32,36,38,42,44,46,50,51,56,57,58,59,64,65,87,89],allowedchang:38,allsprit:[58,66],almost:[16,58,61,63,64,65,67,86],alon:[26,84],along:[24,25,26,29,37,45,53,63,64,65,66,74,84,87],alpha:[1,20,23,24,26,28,29,30,31,35,43,44,48,51,52,56,63,65,86],alreadi:[10,19,23,25,29,32,37,38,40,45,47,50,57,62,63,64,65,88],alsa:37,also:[11,13,15,16,17,19,20,22,23,25,26,28,29,30,31,33,35,36,37,38,39,40,42,44,45,47,50,51,52,53,54,56,57,58,59,60,61,62,63,64,65,68,69,72,73,84,85,86,87,88,89],alt:33,alter:[35,39,56],altern:[19,23,24,26,28,29,30,45],altgr:33,although:[58,68,86],alwai:[18,23,25,26,28,29,30,31,32,33,35,39,41,44,45,47,49,50,51,54,56,58,59,60,62,63,64,65,68,69,70,71,77,86,87,89],ambigu:38,among:18,amongst:16,amount:[23,25,28,29,32,39,54,55,56,58,63],ampersand:33,amplitud:49,an:[1,2,4,5,6,7,8,10,11,12,13,14,15,16,18,19,20,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,63,64,65,67,69,84,85,86,87,88,89],an_id:37,analog:[32,42,47],andal:29,andmask:[22,39],android:33,angl:[24,29,30,35,36,48,56,57,87,89],angle_to:36,angular:87,ani:[1,10,16,17,18,19,21,22,23,24,25,26,27,28,29,30,31,32,35,37,38,40,41,42,43,44,45,47,48,50,51,52,54,56,57,58,59,60,61,62,63,64,65,67,68,74,84,85,86,87,88],anim:[16,26,31,50,56,62,63,64,67],anisotrop:44,annoi:[16,61,73],anoth:[11,20,23,24,26,28,29,30,32,33,35,36,39,40,42,43,45,46,47,50,51,62,63,64,65,73,74,84,86,89],ansi:37,ansi_not:37,answer:[63,84],anti:[16,23,29,85],antialia:28,antialias:[24,28,29,30,56,58],anticip:[44,69],antiquewhit:21,antiquewhite1:21,antiquewhite2:21,antiquewhite3:21,antiquewhite4:21,anyon:64,anyth:[19,23,27,44,50,51,58,62,63,64,65,74,84],anywai:[65,70,84,85],anywher:[13,62],apart:38,api:[16,18,23,30,37,46,47,48,57],app:[23,26,44,57],appear:[16,23,25,54,62,63,64,70,84],append:[27,57,62,64,69,73,81,84],appli:[20,24,26,29,35,36,37,51,55,56,65,84],applic:[0,17,23,25,26,27,33,34,37,38,46,51,84,88],appreci:84,approach:61,appropri:[23,25,39,58,59,64,69,88],approxim:[32,35,51,57],aptitud:74,aqua:21,aquamarin:21,aquamarine1:21,aquamarine2:21,aquamarine3:21,aquamarine4:21,ar:[1,9,10,11,13,15,16,17,18,19,20,22,23,24,25,26,27,28,29,30,31,32,33,34,35,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,56,57,58,59,60,61,63,64,65,66,67,68,70,71,72,73,74,85,86,87,88,89],arang:65,arbitrari:[50,56,62],arbitrarili:23,arc:[24,30],arcad:[63,84],architectur:56,archiv:[28,29,84],area:[12,23,24,26,28,29,32,35,44,45,48,50,51,56,57,58,62,63,64,66,68,72,84,85,87,88,89],aren:[58,62,84],arg:[26,50,53],argb:31,argb_premult:31,argument:[1,2,4,10,12,16,17,18,19,20,22,23,24,25,26,28,29,30,31,32,33,35,36,38,39,40,41,43,44,45,47,48,50,51,53,54,56,58,59,62,64,84,85],aris:18,arithmet:[20,42,65],arkanoid:69,around:[23,24,26,32,36,39,42,45,48,54,57,58,59,62,63,65,84,85,86,89],arrai:[1,2,4,16,20,22,26,29,33,38,42,49,51,62,63,65,72,73,80],arrang:[65,68],array2d:[52,65],array3d:[52,65],array_alpha:[52,65],array_blu:52,array_colorkei:[43,52,65],array_green:52,array_r:52,array_to_surfac:43,arraydemo:[16,26,65],arraytyp:[26,49,52],arrayxd:65,arriv:84,arrow:[22,26,33,70],art:[67,74],articl:[62,63],as_joystick:47,as_polar:36,as_spher:36,ascend:29,ascent:[28,29],ascii:[15,22,33,52,67],asid:67,ask:[23,28,29,57,58,59,62,63,84],aspect:[44,45],assembl:[33,63],assertequ:56,assertnotequ:56,assign:[20,25,29,32,42,45,47,50,53,58,62,65,88],assist:51,associ:[3,8,29,48,55],assum:[1,10,12,20,28,29,37,39,45,57,61,64,86,88],asterisk:33,astonish:84,astyp:65,asurf:31,asyncblit:51,asynchron:51,attach:[10,23,26,47,57],attack:63,attempt:[18,28,29,37,51,58,60,65],attent:[63,70],attibut:50,attract:70,attribut:[6,23,25,27,28,29,30,32,33,34,35,36,39,44,45,50,51,53,54,58,64,85,87,88,89],attributeerror:[29,32,51,64],audio:[3,26,37,38,44,46,49,63],audio_allow_any_chang:38,audio_allow_channels_chang:38,audio_allow_format_chang:38,audio_allow_frequency_chang:38,audiodevicead:25,audiodeviceremov:25,author:[18,57,58,59,60,62,63,64,65,84],autom:84,automat:[13,19,23,28,29,30,31,32,33,34,37,38,44,46,50,51,58,60,65,68,69],avail:[0,1,3,10,18,20,22,23,26,27,28,29,30,31,37,38,41,44,46,49,51,52,53,55,56,58,59,60,62,63,64,65,84],avalanch:74,averag:[28,29,53,54,56,57,65,84],average_color:[56,57],average_surfac:[56,57],avid:84,avoid:[19,23,33,36,38,42,53,65,89],awai:[64,71,89],awar:[23,25,28,44,65],awhil:19,awkward:[62,84],ax:[23,29,32,47,87],axi:[24,25,29,32,36,39,42,47,48,49,52,56,85,87],axis_numb:32,azimuth:36,azur:21,azure1:21,azure2:21,azure3:21,azure4:21,b0:47,b3:47,b:[20,26,32,33,42,43,46,47,51,56,65],b_black:[73,81],b_height:[73,81],b_red:[73,81],b_width:[73,81],bach:40,back:[19,28,29,32,33,37,38,50,57,58,59,66,68,88,89],backend:[18,23,43,44,56,59],backgound:66,background:[24,26,28,29,38,50,57,61,64,66,84,85,87,89],backslash:33,backslashreplac:44,backspac:33,backward:[23,25,29,38,54],backyard:63,bad:84,bagic:[68,69,70,76,77,78],baker:63,ball:[25,32,61,63,67,68,69,70,71,72,73,75,76,77,78,79,80,81,85,86,88],ball_numb:32,ballrect:[63,67,68,69,70,71,72,73,75,76,77,78,79,80,81],ballsprit:89,banner:[16,26,58,66],bar:[71,79],barrier:67,base:[0,11,17,18,24,25,26,28,29,35,48,50,57,58,62,64,65,66,67,68,87,89],baselin:[28,29],basic:[16,18,20,23,26,31,46,58,61,62,63,64,65,76,84,86,87,88,89],bat:61,battleship:[67,75],bayer:18,bb:20,bdf:29,beam:22,bear:[28,29,39],beauti:62,becam:25,becaus:[24,27,29,36,37,51,58,61,64,65,67,68,69,70,72,73,84,85,86,87,88,89],becom:[23,25,29,38,40,49,51,52,62,63,64,65,68,73],been:[19,20,22,23,24,25,29,32,36,38,39,40,44,46,47,50,51,59,60,62,63,64,84,88,89],befor:[10,17,18,19,22,23,27,28,29,32,33,35,37,38,39,40,44,46,50,51,53,54,57,58,59,60,61,62,63,64,65,68,69,70,71,84,87,88],begin:[16,17,18,26,38,40,45,56,62,63,65,69,87],beginn:65,behalf:17,behavior:[39,50],behaviour:[23,39,44,46,89],behind:[16,23,50,87,88,89],beig:21,being:[9,18,23,24,25,31,32,33,35,37,38,39,40,42,46,63,64,69,84,88],believ:84,belong:[50,58,64,85],below:[18,20,23,24,26,27,32,37,44,45,47,50,57,62,71],bend:37,benefit:[62,64],beo:84,besid:[19,26,65],best:[13,18,23,25,26,35,38,42,43,51,52,59,62,63,64,65,72,84,86],bet:59,better:[23,28,30,37,39,40,51,56,62,63,64,65,71,84,85],between:[1,4,20,23,24,25,28,29,32,33,35,36,37,38,40,42,43,44,45,46,47,48,49,50,51,52,53,54,55,57,64,65,67,70,84],bezier:30,bg:57,bgcolor:29,bgd:50,bgr:[31,51,65],bid:58,big:[17,18,44,50,57,62,64,65,70,71,72],bigger:[16,23,58,73],biggest:[23,59],bilinear:56,bin:[66,85,86,87],binari:[16,18,20,22,41],bind:[37,59],bisqu:21,bisque1:21,bisque2:21,bisque3:21,bisque4:21,bit:[1,15,16,17,18,22,23,26,28,29,30,31,32,35,38,40,42,43,49,50,51,52,56,57,58,59,61,62,64,65,87,89],bitblt:62,bitmap:[22,29,31,56,62],bitmap_1:22,bitmap_2:22,bitmask:[22,33,35,50,51],bitsiz:[23,35,59],bitstream:[28,29],bitstreamverasan:28,bitwis:[23,33,35],bl:89,bla:31,black:[21,22,24,26,29,32,35,42,51,57,58,63,65,67,68,69,70,71,72,73,75,76,77,78,79,80,81,84],blade:63,blanchedalmond:21,blank:[28,57,62,84,85],blanket:[43,51],blend:[20,24,26,29,44,48,51],blend_add:51,blend_alpha_sdl2:51,blend_fil:26,blend_max:51,blend_min:51,blend_mod:48,blend_mult:51,blend_premultipli:[20,51],blend_rgb_add:51,blend_rgb_max:51,blend_rgb_min:51,blend_rgb_mult:51,blend_rgb_sub:51,blend_rgba_add:51,blend_rgba_max:51,blend_rgba_min:51,blend_rgba_mult:51,blend_rgba_sub:51,blend_sub:51,blend_xxx:26,blendmod:50,blink:67,blit:[12,20,23,26,28,29,30,32,43,48,50,51,52,56,57,58,61,63,64,65,66,67,68,69,70,71,72,73,75,76,77,78,79,80,81,84,87,88,89],blit_arrai:[43,52,65],blit_blend:26,blit_hw:[23,59],blit_hw_a:[23,59],blit_hw_cc:[23,59],blit_sequ:51,blit_sw:[23,59],blit_sw_a:[23,59],blit_sw_cc:[23,59],blitter:[44,51,62],blitzbas:26,blob:[56,57],block:[17,18,25,35,40,50,51,57,58,73,84],block_list:50,blocks_hit_list:50,bloodi:63,blt:62,blue1:21,blue2:21,blue3:21,blue4:21,blue:[1,18,20,21,23,24,28,31,42,51,52,56,57,65,68,71,72,73,79,80,81,85,87],blueviolet:21,bluish:65,bmp:[31,46,62,63],board:[73,81],bodi:70,bold:[28,29],bomb:64,bonu:62,bool:[17,18,19,23,24,25,27,28,29,31,32,33,35,36,37,38,39,40,44,45,46,47,48,50,51,56],boom:64,boom_sound:64,border:[23,24,30,39,45,48],border_bottom_left_radiu:24,border_bottom_right_radiu:24,border_radiu:24,border_top_left_radiu:24,border_top_right_radiu:24,borderless:48,bore:[67,86],borrow:[1,63,86],both:[19,23,24,28,29,30,31,32,33,35,37,38,39,42,51,56,57,62,63,64,65,67,71,74,84,87,89],bother:[58,61,86],bottom:[23,24,26,28,29,30,31,35,45,50,63,67,68,69,70,71,72,73,75,76,77,78,79,80,81,89],bottomleft:[45,89],bottomright:[45,89],bounc:[26,36,56,63,89],bound:[22,24,29,35,50,51],boundari:[24,29],box:[24,29,30,32,33,57,64,88],br:89,bracket:[33,87],breakag:42,breakdown:63,brief:[19,26,44,61,62],briefli:85,bright:[18,48,57],brighten:23,brightmap:65,bring:[50,63],broadcast:[42,65],broken:[39,44],broken_x:22,brought:56,brown1:21,brown2:21,brown3:21,brown4:21,brown:21,bu:[65,84],buffer:[1,2,18,20,23,31,37,38,43,46,51,58,63,64,84],buffer_s:37,bufferproxi:[0,16,17,51],buffers:[37,38],bug:[20,44,56,63,89],build:[44,64,65,85,89],built:[15,18,29,31,44,47,85],builtin:[18,28,64],bullet:[64,84],bump:84,bumper:32,bunch:[57,64],bundl:[28,29],burlywood1:21,burlywood2:21,burlywood3:21,burlywood4:21,burlywood:21,busi:[18,19,38,54,62],button1:39,button2:39,button3:39,button4:39,button5:39,button:[24,25,26,27,32,33,39,47,58,61,62,69,73,80,84,85,88],bx:47,bye:68,bypass:44,byte_data:46,bytecod:63,byteord:17,bytes:[17,23,35,51,59],bytestr:38,c:[11,16,17,18,20,26,30,33,43,44,51,60,63,65,67,75],c_api:0,cach:29,cache_s:29,cadetblu:21,cadetblue1:21,cadetblue2:21,cadetblue3:21,cadetblue4:21,cadillac:64,calcnewpo:[87,89],calcul:[24,29,35,36,40,42,50,68,69,72,73,87,88],calibr:57,call:[1,10,13,17,18,19,22,23,24,25,26,27,28,29,30,31,32,33,35,36,37,38,39,40,41,44,45,46,47,48,50,51,53,54,57,58,59,60,61,62,63,64,65,66,68,69,70,84,85,86,87,88,89],callabl:[17,25,44,50],callback:[1,2,17,50],caller:37,calling_mask:35,cam:57,came:[26,31],camera:[16,26,44],camlist:57,can:[1,9,14,15,16,17,18,19,20,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,49,50,51,52,53,54,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,84,85,86,87,88,89],candid:[13,33],cannot:[15,18,23,25,28,30,38,40,45,48,50,51,52,54,62,63,64,69,84,86,89],canva:[68,71],cap:33,capabl:[23,59,63,86,87],capslock:33,caption:[23,68],captur:[16,18,26,31],capword:25,card:[23,38,65],care:[27,38,58,65,72,85],caret:33,carri:89,castl:63,categor:64,caught:63,caus:[2,18,20,25,28,35,40,42,56,58,64,65,66,68,84,88],caveat:23,cc:57,ccolor:57,cd:[3,19],cdrom:[0,63],cdrom_tag:53,cdrom_test:53,ceil:63,center:[24,26,30,32,35,44,45,48,50,51,57,66,68,69,70,71,72,73,76,77,78,79,80,81,85],centeri:45,centerx:[45,58,66,85],centr:44,centroid:[35,57],certain:[20,25,33,36,57,58,61,67,68,69,70,71,72,84],certainli:[62,65],cff:29,challeng:63,chanc:[38,65,84],chang:[16,18,20,23,24,25,26,28,29,30,31,32,33,35,36,37,38,39,40,42,44,45,46,47,48,49,50,51,52,54,56,57,58,59,61,63,64,65,68,69,70,71,73,84,87,88,89],change_color:56,change_lay:50,channel:[8,20,31,37,38,40,65,84],channelnum:8,char_bit:35,charact:[15,17,23,28,29,33,43,44,52,62,84,88],character:36,characterist:[20,74],charset:46,chart:65,chartreus:21,chartreuse1:21,chartreuse2:21,chartreuse3:21,chartreuse4:21,chase:64,chaser:64,chatroom:62,cheap:[50,64],check:[3,4,5,6,7,8,12,18,23,25,26,28,32,35,38,39,40,42,44,46,47,49,50,52,57,58,60,62,63,64,67,68,70,71,72,84,85,86,87,88,89],checkout:44,chew:54,chief:86,child:51,chimp:[16,26,61,64,86],chimpanze:16,chocol:21,chocolate1:21,chocolate2:21,chocolate3:21,chocolate4:21,choic:[18,23,38,57],choos:[18,22,23,26,59,61,65,73,84],chop:56,chord:84,chore:84,chose:73,chosen:[18,23,29],chromin:18,chunk:[8,17,89],circl:[22,24,26,30,32,50,57],circular:64,circumst:89,claim:18,clamp:[26,45],clamp_ip:45,clariti:35,clark:84,classless:[86,87],classmethod:48,claus:89,clean:[39,42,58,60,62,63,64],cleaner:[62,63],cleanli:[60,62,63,64,89],cleanup:42,clear:[16,24,25,32,33,35,38,43,48,50,51,62,63,64,67,75,84],clear_callback:50,clearer:60,clench:[58,66],click:[22,24,26,32,33,39,69,72,84,85],clip:[24,26,45,50,51,62],clipboard:[16,26],cliplin:45,clipped_lin:45,clist:57,clock:[22,24,32,39,54,58,66,69,70,71,72,73,77,78,79,80,81,89],clockwis:[29,30,36,56,87],clone:20,close:[10,18,22,23,24,25,26,29,32,37,40,42,47,56,57,62,65,85,86],close_to_play:64,close_to_player2:64,close_to_player3:64,closest:[23,37,38,59],cmy:20,co:[87,89],cocoa:23,code:[6,15,16,17,18,22,23,24,25,26,27,28,29,30,32,33,35,39,42,43,44,45,46,50,51,56,57,58,62,63,64,65,67,68,69,70,71,72,73,84,85,86,87,88,89],codec:44,codepoint:28,coercion:65,col:30,collect:[13,23,51,60],collid:[35,45,50,58,64,66,87,89],collide_circl:50,collide_circle_ratio:50,collide_mask:[35,50],collide_rect:50,collide_rect_ratio:50,collided_cal:50,collidedict:45,collidedictal:45,collidelist:45,collidelistal:45,collidepoint:[45,72,73,80,81,84,89],colliderect:[45,50,58,66,89],colliding_sprit:50,collis:[26,35,36,45,50,57,67,89],colon:33,color:[0,1,11,16,22,23,24,28,29,30,31,32,35,42,43,48,50,51,52,56,57,58,59,62,63,65,67,68,71,72,73,80,81,84,85],color_valu:20,colordict:21,colorkei:[23,26,28,29,31,43,51,52,56,58,63,65,66],colormap:[26,43,52],colorspac:18,colour:[20,44],column:[35,42,65,73,81],com:56,combin:[23,29,33,45,51,56,63,64,84,89],come:[10,25,26,28,29,37,44,57,62,63,64,65,66,84,87,88,89],comfort:[16,87],comma:[28,29,33,37,65],command:[24,25,26,32,53,65,67,68,69,70],commend:65,comment:[26,32,61,64,66,87,89],commerci:[16,63],commit:[33,63],common:[23,28,29,32,47,50,56,60,61,62,63,84],commun:[25,48,84],comp:84,compani:63,compar:[33,35,42,44,50,56,63,68,84],comparison:[20,25,26,42,65,70,74],compat:[23,25,28,29,31,38,48,50,54,67,86],compens:63,compil:[14,16,19,22,25,29,38,41,44],complement:36,complet:[24,33,38,40,45,50,51,52,63,64,84,89],complex:[38,58,63,64,67,69,84,86,87,88],complic:[68,84],compon:[20,35,36,43,51,65,68],composit:33,compositor:44,compound:46,compound_text:46,compress:[40,51],comput:[16,18,19,26,28,31,32,38,54,58,62,63,64,69,74,84],concept:[16,61,62,63,65,69,74],concern:[64,74],conclud:63,conclus:74,condit:[18,22,38,68,89],confid:84,configur:[16,23,29,39,65],confin:48,confus:[24,62,68,84],connect:[24,26,30,35,47,61,62,74,86],connected_compon:[35,57],consequ:86,consequenti:18,consid:[23,24,29,35,38,40,44,45,50,51,56,84,89],consider:51,consist:[19,24,25,27,38,46,61,65,85,86],consol:[26,44,53,67],constant:[16,22,25,29,33,38,39,44,46,47,54,57,60,68,72],constantli:84,constrain:[24,39,70],construct:[36,45,62],constructor:[29,32,50,58,64],consum:[1,54,65,84],contact:[57,58,59,60,62,63,64,65],contain:[0,7,9,10,16,19,22,23,24,25,28,29,31,32,33,34,35,37,38,39,43,45,46,47,50,51,53,55,58,59,62,63,64,66,84,86,88,89],content:[23,34,41,42,46,58,59,68,70],context:[10,23,42,48],contigu:[17,24,51],continu:[35,38,44,50,51,63,85],contol:47,contract:[18,56],contrast:[39,70,87],contribut:64,contributor:18,control:[3,5,18,22,25,26,28,29,32,33,34,38,41,44,50,51,52,53,54,56,58,59,60,61,63,69,70,78,84,86],controller_axis_lefti:47,controller_axis_leftx:47,controller_axis_righti:47,controller_axis_rightx:47,controller_axis_triggerleft:47,controller_axis_triggerright:47,controller_button_a:47,controller_button_b:47,controller_button_back:47,controller_button_dpad_down:47,controller_button_dpad_left:47,controller_button_dpad_right:47,controller_button_dpad_up:47,controller_button_guid:47,controller_button_i:47,controller_button_leftshould:47,controller_button_leftstick:47,controller_button_rightshould:47,controller_button_rightstick:47,controller_button_start:47,controller_button_x:47,controlleraxismot:47,controllerbuttondown:47,controllerbuttonup:47,controllerdevicead:[25,47],controllerdeviceremap:[25,47],controllerdeviceremov:[25,47],controllertouchpaddown:47,controllertouchpadmot:47,controllertouchpadup:47,convei:22,conveni:[36,44,50,53,62,63],convent:47,convers:[1,18,20,35,42,52,84],convert:[1,18,20,22,24,29,31,37,38,40,43,45,49,51,52,58,59,62,65,66,85,86,89],convert_alpha:[26,31,51,86,89],convolut:[35,65],convolv:35,cool:84,cooper:19,coord:57,coordin:[9,22,24,29,30,32,35,36,39,50,51,56,85,89],copi:[17,22,26,31,35,36,38,42,45,46,48,49,50,51,52,56,58,62,63,64,65,84,85],copyright:18,coral1:21,coral2:21,coral3:21,coral4:21,coral:21,cord:26,coremidi:37,corner:[24,29,30,35,39,42,44,50,51,56,57,58,62,84,89],cornflowerblu:21,cornsilk1:21,cornsilk2:21,cornsilk3:21,cornsilk4:21,cornsilk:21,correct:[18,22,28,41,45,56,62,65,68,73],correct_gamma:20,correctli:[37,38,56,57,58,62,65,84],correspond:[29,33,35,37,42,51,53,68,84,89],cost:84,could:[22,25,33,41,49,50,52,56,57,58,61,62,64,65,84,85,86,87],couldn:[86,89],count:[17,19,32,35,38,42,56,57,69,73],counterclockwis:[24,29,36,56],coupl:[32,58,62,64,65],courier:29,cours:[57,69,70,71,85,86,87,89],cover:[16,24,44,45,51,56,57,59,62,74,85],coverag:24,cpu:[24,54,67,75,84],cram:84,crash:[41,64],crate:87,creat:[1,2,6,7,16,17,19,20,21,22,23,25,26,28,29,31,32,33,35,38,39,41,42,43,45,47,48,49,50,51,52,54,56,57,59,63,64,65,66,74,84,85,86,87,88,89],create_graphics_screen:62,create_screen:62,creation:[29,32,47,49,50,52,62],creativ:74,crect:57,crimson:21,critic:84,critter:[58,66],crop:[45,56],cross:[32,36,58,63,65,84,86],crossbar:26,crossbon:22,crossfad:65,crosshair:22,crt:23,crucial:84,crude:[26,58,62],cryptic:64,cube:26,cui:[67,68,73,81],current:[14,18,19,22,23,25,26,27,28,29,32,35,36,37,38,39,40,41,42,44,45,46,47,48,49,50,51,52,53,56,58,59,63,64,67,68,69,71,84,87,88,89],current_h:[23,59],current_w:[23,59],currrent:23,cursor:[16,26,39,58,63,84],cursor_arg:22,cursor_index:22,cursorfil:22,curv:30,custom:[16,23,25,26,38,50,63],custom_typ:25,customis:86,cut:[38,46],cutout:62,cx1:45,cx2:45,cy1:45,cy2:45,cyan1:21,cyan2:21,cyan3:21,cyan4:21,cyan:21,d:[32,33,51,62,84,85,87],da:61,dai:[63,64],damag:18,dark:[58,63],darkblu:21,darkcyan:21,darken:23,darkgoldenrod1:21,darkgoldenrod2:21,darkgoldenrod3:21,darkgoldenrod4:21,darkgoldenrod:21,darkgrai:21,darkgreen:21,darkgrei:21,darkkhaki:21,darkmagenta:21,darkolivegreen1:21,darkolivegreen2:21,darkolivegreen3:21,darkolivegreen4:21,darkolivegreen:21,darkorang:21,darkorange1:21,darkorange2:21,darkorange3:21,darkorange4:21,darkorchid1:21,darkorchid2:21,darkorchid3:21,darkorchid4:21,darkorchid:21,darkr:21,darksalmon:21,darkseagreen1:21,darkseagreen2:21,darkseagreen3:21,darkseagreen4:21,darkseagreen:21,darkslateblu:21,darkslategrai:21,darkslategray1:21,darkslategray2:21,darkslategray3:21,darkslategray4:21,darkslategrei:21,darkturquois:21,darkviolet:21,data1:37,data2:37,data3:37,data:[0,2,16,17,18,19,22,26,31,35,37,38,39,40,41,43,46,51,56,58,62,63,65,66,68,70,71,72,73,84,86,89],data_dir:[58,66],datatyp:65,david:84,dead:27,deal:[33,44,65,68,84,88],dealloc:32,dealt:[25,27,88],death:63,debat:84,debug:[25,56,88],decapit:63,decept:72,decid:[25,27,35,58,61,64,68],decim:24,declar:[9,65],decod:[22,29,33,46,63],decor:85,decreas:[38,72],decrement:10,dedic:64,deeppink1:21,deeppink2:21,deeppink3:21,deeppink4:21,deeppink:21,deepskyblu:21,deepskyblue1:21,deepskyblue2:21,deepskyblue3:21,deepskyblue4:21,def:[29,32,35,39,50,56,57,58,62,64,66,71,72,73,79,80,81,85,86,87,88,89],default_id:37,default_lay:50,defin:[1,2,4,7,8,9,11,12,24,25,28,29,32,34,44,46,50,58,61,84,85,88],definit:[46,60,64,65,70],deflat:89,degre:[29,30,35,36,52,56,58,87,89],del:65,delai:[26,33,37,54,62,69,84],delet:[25,33,62,64,84],deliv:37,demo:[26,58,65],demonstr:[16,26,58,65],denot:[40,45,54],depend:[15,23,25,28,29,31,35,38,40,42,43,46,49,58,63,66,69,71,84],deprec:[23,25,26,27,29,32,36,49,50,52],deprecationwarn:[49,52],depth:[18,23,35,42,48,51,52,56,57,58,59,62],deriv:[26,44,50,58,64],descend:29,descent:[28,29],describ:[1,17,22,23,37,39,45,57,59,86],descript:[1,18,22,25,29,33,34,44,46,62,64],design:[17,50,61,62,63,64,68,71,88],desir:[18,51,56,58,59,84],desktop:[23,48,59],desper:62,dest:[29,35,48,51,56,65],dest_rect:56,dest_siz:56,dest_surf:56,dest_surfac:56,destin:[18,28,29,30,35,42,50,51,56,62,63,85],destroi:[1,31,48,64],destruct:[56,58],destsurfac:18,detail:[16,23,24,29,34,35,36,37,41,50,51,56,57,61,71,84],detect:[23,26,35,36,42,44,45,50,57],determin:[19,20,22,23,24,28,29,31,33,36,37,40,41,50,51,56,59,64,65,68,69,72,87],dev13:25,dev3:[25,54],dev7:39,dev8:24,dev:[18,57],develop:[23,39,44,48,53,63,67,86],devic:[16,18,19,25,26,32,33,37,39,47,55,57,59,63,84],device_id:[26,37],device_index:25,devicenam:38,dga2:84,dga:23,diagon:[35,56],diagram:[87,88],diamond:22,dict:[1,6,17,20,23,25,35,45,47,51,53,55],dictionari:[1,6,23,25,45,47,50,53,64,84],did:[18,24,32,62,63,64],didn:[84,85],diff:65,differ:[18,19,20,22,23,26,27,28,29,30,32,33,35,38,40,42,44,48,50,51,56,57,58,59,60,62,63,64,65,68,69,70,84,85,87,88],difficult:[37,61,63],digit:[20,32],dilemma:67,dimens:[17,18,23,24,26,28,29,30,35,36,42,43,45,50,51,52,56,58,65,89],dimension:[20,24,29,30,35,36,42,62,65],dimgrai:21,dimgrei:21,direct:[18,23,24,26,29,30,32,36,39,43,47,51,58,65,70,87,89],directfb:23,directli:[15,17,22,24,25,28,29,31,33,42,43,46,49,51,52,53,62,64,65,67,84],directmedia:63,directori:[23,28,29,53,58,62,67,68,86],directx:[23,37,63],dirti:[50,64],dirty_rect:84,dirtysprit:50,disabl:[23,25,29,33,44,47,51,54,58,66],disable_advanced_featur:44,disadvantag:[59,84],disallow:23,disappear:[18,30,46],disc:19,discard:54,disclaim:18,disconnect:61,discontinu:[17,51,67],discourag:27,discov:[63,84],discret:71,discuss:[64,84],disk:[50,84],displac:69,displai:[0,1,16,22,24,25,26,30,32,33,34,36,38,39,41,44,46,48,50,51,53,57,62,63,64,65,66,67,68,69,70,71,72,73,75,76,77,78,79,80,81,84,85,87,88,89],display_index:48,distanc:[36,42,56],distance_squared_to:36,distance_to:36,distil:84,distort:23,distribut:[16,18,53,58],dive:68,divers:61,divid:[33,61],divis:[20,22],dizzi:[58,66],doc:[23,27,48,50,51,70,78],document:[18,34,38,44,51,57,59,61,63,64,65,84,86,87],dodgerblu:21,dodgerblue1:21,dodgerblue2:21,dodgerblue3:21,dodgerblue4:21,doe:[4,5,8,13,19,22,23,24,25,26,28,29,30,32,33,35,37,38,40,41,43,44,45,46,47,50,51,54,56,57,58,62,64,65,68,84,86,88],doesn:[18,23,26,28,29,44,51,58,62,64,72,73,84,86,87,88,89],dokil:[50,64],dokill1:[50,64],dokill2:[50,64],dollar:33,domain:26,don:[18,24,26,29,32,50,54,56,57,58,61,62,63,64,65,68,70,74,85,86,87,89],done:[24,28,29,32,40,58,62,63,65,68,70,71,84,87],doreturn:51,dot:[29,36],doubl:[22,23,56,58,63,65,84],doublebuf:[23,84],doubler:56,down:[23,29,32,33,39,44,47,51,57,59,62,63,65,68,69,71,72,84,88],download:[63,65,89],dozen:84,dpad:47,dpi:29,drastic:85,draw:[16,20,23,28,29,31,32,35,48,50,51,57,62,63,64,66,67,68,71,72,73,79,80,81,84,85,89],draw_blend_mod:48,draw_bottom_left:24,draw_bottom_right:24,draw_circle_part:24,draw_color:48,draw_lin:48,draw_point:48,draw_rect:48,draw_top_left:24,draw_top_right:24,drawback:84,drawboard:[73,81],drawbutton:[72,73,80,81],drawhp:[71,72,73,79,80,81],drawn:[23,24,30,32,35,50,51,58,62,63,64,68,85],drawplain:58,dream:67,drew:62,drift:32,drive:[3,19],driven:[23,50,67],driver:[23,37,44,59,84],drivernam:44,drop:[25,29,38],dropbegin:25,dropcomplet:25,dropfil:25,dropout:38,droptext:25,dstobj:12,dstrect:[12,48],dualshock:47,due:[20,23,36,51],dull:84,dummi:[58,64,88],dump:53,dungeon:63,duplic:[50,51],durat:[32,47],dure:[18,23,29,30,33,40,42,52,65],dvd:[19,69,77],dx:[25,35,51,87,89],dy:[25,35,51,87,89],dynam:69,e:[6,20,23,24,28,30,31,32,33,35,36,37,38,44,45,50,57,61,85,87,88],each:[17,18,19,20,23,24,25,26,27,28,29,30,32,35,36,38,39,42,46,49,50,51,52,53,56,57,58,60,61,62,63,64,65,71,84,85,87,88,89],earli:[16,48,63,84],earlier:[26,40,43],easi:[26,48,59,60,61,62,63,64,65,70,84,88,89],easier:[45,50,59,60,62,87],easiest:[26,47,62,71,85],easili:[22,44,57,58,60,62,64,65,69,74,84,87],east:22,eat:64,echo:26,eclass:10,eclecti:57,ed:[51,64],edg:[24,28,39,45,56,89],edit:[33,47],editbox:84,editor:[33,84],effect:[16,23,25,26,28,32,40,44,46,47,50,51,56,57,58,62,63,64,65,73,84,86],effici:[16,23,25,28,50,62,64],effort:74,eg:[26,29,31],eight:26,either:[15,17,19,22,23,29,31,32,38,39,45,50,51,56,58,59,89],eject:19,element:[4,17,20,29,36,37,42,43,49,51,62,64,65],elementari:87,elementwis:36,elif:[32,39,58,66,69,70,71,72,73,77,78,79,80,81,88,89],ellips:[16,24,30],ellipt:[24,30],els:[1,9,32,33,43,44,45,46,58,59,64,66,69,84,86,89],elsewher:1,emb:23,embed:[23,29],emit:39,emoji:28,empti:[6,19,23,25,27,28,29,32,35,44,45,46,50,53,59,64],emul:[23,28,39,59],enabl:[23,25,28,29,33,44,47,48,63,67],encapsul:88,enclos:[24,50,68],encod:[10,15,22,28,29,40,44,46,51],encode_file_path:44,encode_str:44,encount:35,end:[19,24,25,30,31,33,38,40,44,45,50,58,61,62,63,66,69,74,84,85,88],end_index:65,end_po:24,endcap:[24,30],endev:[38,40],endian:[17,44],endpoint:[24,30,45],enemi:64,engin:[24,63,67,84],enhanc:[16,28,29],enjoy:70,enlarg:29,enough:[29,50,57,58,60,62,63,64,65,67,71,84],ensur:[25,27,33,38,63,84,85,86],enter:[25,28,33,39,48,58,67],entir:[16,19,23,24,35,38,48,50,51,62,63,64,68,69,70,72,84],entri:[17,33,51,84],enumer:18,env:[66,86],env_var:44,environ:[23,25,28,37,44,46,63,67,68,86,87],equal:[14,17,20,22,24,25,29,33,35,36,51,67,73],equat:[35,69],equip:65,equival:[17,29,35,37,42,43],eras:[32,35,50,58,62,63,64,84],err:[86,89],errno:37,error:[1,2,3,7,10,12,20,23,25,26,29,31,33,37,43,44,46,53,56,58,60,64,65,74,84,86,89],error_msg:44,errorstr:44,es:23,esc:26,escap:[15,29,33,58,89],especi:[25,29,44,64,65,84,86],essenti:65,etc:[31,32,37,42,45,61,65,67,84],etyp:44,euclidean:[36,42],euro:33,eval:53,evalu:58,even:[15,18,19,23,24,26,28,29,32,40,42,44,54,57,62,63,64,65,67,68,72,84],event:[0,16,18,22,23,24,26,32,33,34,37,38,39,40,47,54,57,61,62,63,66,67,69,71,72,73,75,76,77,78,79,80,81,89],event_nam:25,eventlist:[25,26],eventtyp:[6,25],eventu:[13,23,25],ever:[37,40,59],everi:[16,18,19,22,25,26,33,35,36,38,40,44,50,51,52,54,56,59,61,63,64,65,67,68,69,70,71,74,84,85,86,88],everyth:[25,43,44,62,63,64,65,66,67,68,73,84,85,89],evil:84,evolv:26,ex:[39,70],exact:[20,22,23,24,31,32,39,57,59,72,73],exactli:[37,42,45,51,58,62,63,64,65,68,84],examin:16,exampl:[11,16,17,20,22,23,24,25,27,28,29,30,31,32,33,35,37,38,39,40,44,45,46,47,49,50,51,53,56,57,60,61,62,63,64,67,68,70,71,74,84,85,86,87,88],exce:51,exceedingli:29,excel:[58,62,63],except:[1,3,4,5,6,7,8,9,10,12,15,20,22,23,24,25,28,29,31,32,33,37,38,44,45,46,51,56,60,61,65,69,86,89],exchang:42,excit:[58,62,63,65],exclaim:33,exclud:[17,25,51,53],exclus:[37,51],execut:[39,53,66,67,68,69,70,71,73],exemplari:18,exist:[18,19,22,23,24,25,28,29,32,37,47,48,50,51,64],exit:[23,24,32,37,44,48,62,63,67,68,69,70,71,72,73,75,76,77,78,79,80,81,85,86,89],exmapl:67,expand:[56,61],expans:56,expect:[20,27,37,38,39,43,53,57,62],expens:65,experi:[25,63,64,70],experiment:[18,23,30,36,44,46,48,57],expir:53,explain:[58,62,64,68,71,73,86,87,89],explan:[26,36,60,64,66,68,70,84],explanatori:58,explicit:[23,39,43],explicitli:[18,23,29,30,42,54,58,60],explor:63,explos:64,expos:[2,11,29,43,51],express:[18,20],extend:[17,24,31,44,50],extens:[1,2,3,4,5,7,8,9,10,12,13,16,31,56,63],extern:[22,25,49,52,67],extra:[22,29,38,45,50,51,58,59,62,63,64,65,70,89],extract:[42,56,58],extrem:[26,58,65],extsion:6,ey:63,f10:33,f11:33,f12:33,f13:33,f14:33,f15:33,f1:33,f2:33,f3:33,f4:33,f5:33,f6:33,f7:33,f8:33,f9:33,f:[1,17,18,33,84],face:16,fact:[30,64,84],factor:[26,29,65],fade:[26,38,40,65],fade_m:[38,40],fadeout:[38,40],fail:[23,25,27,28,29,44,53,58,60],failur:[1,2,4,5,6,8,9,53],fairli:[50,60,63,84,86,87],fake:[26,28,53],fall:[28,29,50],fals:[2,3,5,6,7,12,13,17,18,19,22,23,24,25,26,28,29,31,32,35,36,37,38,39,40,45,46,47,48,50,53,56,57,58,66,89],famili:28,familiar:[62,63,64,84,85,88],fantast:62,far:[20,36,58,62,71,84,87,88,89],farther:[58,62],fast:[32,35,42,51,52,56,58,62,63,64,65,69,84],faster:[31,36,44,50,52,56,58,64,65,67,84],fastest:[23,51,58,63],fastev:27,fastrendergroup:26,fault:84,favorit:84,favour:27,fbcon:23,featur:[16,23,26,27,28,29,35,44,50,51,59,64,65,87],fed:18,feed:89,feedback:23,feel:[26,62,64,84],felt:63,fetch:50,fever:[58,66],few:[22,28,54,62,63,64,65,74,84,86],ffff:15,fgcolor:29,field:[5,9,13,17,32,37],fighter:63,figur:62,file:[0,2,3,4,5,6,7,8,9,10,11,12,13,14,16,22,23,25,26,28,29,31,38,40,44,49,53,58,61,63,65,67,68,73,84,86],file_path:26,filenam:[7,22,25,28,29,31,38,40,62,86],fileobj:[31,40],filesystem:15,fill:[6,9,10,22,23,24,25,26,29,30,32,35,39,43,48,50,51,56,57,58,62,63,64,66,67,68,69,70,71,72,73,75,76,77,78,79,80,81,84,85,89],fill_rect:48,filled_:30,filled_circl:30,filled_ellips:30,filled_polygon:30,filled_trigon:30,filter:[16,25,35,56,65],fin:26,find:[16,18,19,23,24,25,26,28,32,35,36,38,50,51,56,57,58,59,62,63,64,65,69,71,84,87,89],find_channel:38,fine:[38,51,59,64,84],finer:57,finger:[55,70,86],finger_id:25,fingerdown:25,fingermot:25,fingerup:25,finish:[13,33,38,40,51,60,61,62,63,65],finit:35,fire:[65,84],firebrick1:21,firebrick2:21,firebrick3:21,firebrick4:21,firebrick:21,firmer:[61,63],first:[4,17,19,24,26,28,29,30,31,32,33,35,37,38,40,42,43,45,49,50,51,52,54,57,58,59,60,61,63,64,65,68,69,70,71,72,73,84,85,87,88,89],firstli:70,fist:[16,58,66],fist_offset:[58,66],fit:[18,23,29,38,45,51,85],five:[38,39,40],fix:[25,29,51,56,63,65,68,69,71,89],fixed_s:29,fixed_width:29,flag:[1,17,20,23,24,26,28,29,32,34,35,43,50,51,58,59,64,84],flame:65,flash:16,flavor:84,flexibl:[16,60,64,86,87],flickeri:84,flip:[18,22,23,24,25,31,32,35,39,42,45,48,50,56,57,58,63,65,66,67,68,69,70,71,72,73,75,76,77,78,79,80,81,84,85,89],flip_i:56,flip_x:56,flipi:48,flipx:48,flood:[73,81],floor:20,floralwhit:21,flourish:70,flush:[31,37],fly:[45,84,85,88],fnt:29,focu:[23,25,33,39,48,57,67],folder:58,folk:62,follow:[17,18,22,24,25,26,29,30,31,32,33,35,36,37,38,40,45,46,47,49,50,52,58,61,63,64,65,66,67,84],font:[7,16,26,32,44,58,60,63,66,68,69,70,71,72,73,76,77,78,79,80,81,84,85],font_index:[7,29],fonti:26,foo:84,fool:[63,65],forbidden:10,forc:[23,26,38,44,63],forcibl:38,forego:25,foreground:29,foreign:62,forestgreen:21,forev:62,forget:[32,64,87],form:[18,22,25,30,37,51,85],formal:62,format:[18,20,22,23,24,29,30,31,32,35,37,38,40,41,42,43,44,45,49,51,52,56,58,59,62,63,65,68,84,85],formula:[42,44,51,87],forth:58,fortun:[26,62,65],forward:33,found:[16,20,26,28,29,34,35,37,45,49,50,51,52,58,59,62,65,70,85,88],four:[4,19,22,23,24,29,45,52,70,89],fourth:[22,71],fout:26,fp:[46,50,69,77,84],fpsclock:[69,70,71,72,73,77,78,79,80,81],fraction:29,frame:[18,23,25,27,32,44,50,54,57,58,62,63,64,69,77,84,85,88,89],framebuff:23,framer:[16,18,54,57,58,84],framework:[26,63],free:[10,26,40,62,64],freedom:63,freetyp:[7,16,26,28,29,44],freetype2:29,freetype_misc:[26,29],frequenc:[37,38],frequency_to_midi:37,frequent:[32,62,64,69],friendli:[24,68],friendlier:63,frill:64,from:[0,4,6,7,9,10,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,42,43,44,45,46,47,48,49,50,51,52,53,56,57,58,60,61,63,64,65,67,68,69,70,71,72,73,76,77,78,79,80,81,84,85,86,87,88,89],from_display_modul:48,from_joystick:47,from_polar:36,from_spher:36,from_surfac:[35,48,50],from_threshold:[35,57],from_window:48,frombuff:31,fromstr:31,front:50,frozen:53,ftfont:[28,29],fuchsia:21,full:[23,25,26,27,28,29,31,32,38,39,40,42,50,51,56,57,58,59,62,63,64,66,70,85,86,89],fulli:[20,23,33,45,48,50,51,52,58,65,84],fullnam:[58,66,86,89],fullscreen:[23,34,48,50,59,84],fullscreen_desktop:48,fun:[26,57,62,63,65],fundament:61,funni:64,further:46,furthermor:[68,70,71],futur:[18,19,23,27,29,32,37,38,41,46,64],g:[20,23,24,26,28,30,31,33,36,37,42,43,44,45,50,51,56,87,88],gain:[23,25,29,57,84],gainsboro:21,game:[16,18,22,23,24,25,26,27,32,39,44,47,51,54,57,59,60,62,64,66,67,68,69,70,71,72,73,74,84,86,88,89],gameobject:62,gamepad:[25,47],gameplai:[33,64],gamma:[20,23,48],gap:[17,51],garbag:[13,51],gather:59,gaussian:65,gener:[1,16,23,25,26,31,32,33,36,37,39,40,42,47,51,52,56,57,58,63,64,65,68,73,86,89],generateboard:[73,81],geniu:74,geometri:[67,71],get:[2,16,18,19,20,22,23,24,25,26,27,28,29,31,32,33,35,37,38,39,40,42,43,44,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,62,63,64,65,66,67,68,69,70,71,72,73,75,76,77,78,79,80,81,85,86,87,88,89],get_abs_offset:51,get_abs_par:51,get_act:23,get_al:19,get_allow_screensav:23,get_alpha:[51,86,89],get_and_flip:57,get_arraytyp:[49,52],get_asc:28,get_at:[35,51,56,58,65,66,84],get_at_map:[20,51],get_axi:[32,47],get_backend:18,get_bal:32,get_bits:51,get_block:25,get_bold:28,get_bottom_lay:50,get_bounding_rect:[35,51],get_buff:[2,17,51],get_busi:[19,38,40],get_button:[32,47],get_bytes:[42,51],get_cache_s:29,get_capt:23,get_clip:[50,51],get_colorkei:51,get_control:[18,57],get_count:[19,32,37,47],get_curr:19,get_cursor:39,get_default_font:[28,29],get_default_input_id:37,get_default_output_id:37,get_default_resolut:29,get_desc:28,get_desktop_s:23,get_devic:55,get_device_info:37,get_driv:[23,59],get_empti:19,get_endev:[38,40],get_error:[29,44],get_eventst:47,get_extend:31,get_fing:55,get_flag:51,get_focus:[23,33,39],get_font:28,get_fp:54,get_grab:25,get_guid:32,get_hardwar:41,get_hat:32,get_height:[28,51],get_id:[19,32],get_imag:[18,57],get_init:[19,23,27,28,29,32,37,38,44,46,47,49,58,60,66],get_instance_id:32,get_ital:28,get_layer_of_sprit:50,get_length:38,get_lines:28,get_lock:51,get_loss:51,get_map:47,get_mask:51,get_metr:29,get_mod:33,get_nam:[19,32],get_num_channel:38,get_num_devic:55,get_num_displai:23,get_num_fing:55,get_numax:32,get_numbal:32,get_numbutton:32,get_numhat:32,get_numtrack:19,get_offset:51,get_palett:51,get_palette_at:51,get_par:51,get_paus:19,get_pitch:51,get_po:[39,40,58,66,84],get_power_level:32,get_press:[33,39,84],get_queu:38,get_raw:[18,38],get_rawtim:54,get_rect:[29,35,48,50,51,56,58,62,63,66,67,68,69,70,71,72,73,75,76,77,78,79,80,81,85,86,87,88,89],get_rel:39,get_repeat:33,get_sdl_byteord:44,get_sdl_image_vers:31,get_sdl_mixer_vers:38,get_sdl_vers:[23,44],get_shift:51,get_siz:[18,29,35,42,51,58,66,85,89],get_sized_ascend:29,get_sized_descend:29,get_sized_glyph_height:29,get_sized_height:29,get_smoothscale_backend:56,get_sound:38,get_sprit:50,get_sprites_at:50,get_sprites_from_lay:50,get_surfac:[23,58,66,87,88,89],get_tick:54,get_tim:54,get_top_lay:50,get_top_sprit:50,get_track_audio:19,get_track_length:19,get_track_start:19,get_typ:46,get_underlin:28,get_vers:29,get_view:[17,51],get_viewport:48,get_vis:39,get_volum:[38,40],get_width:[51,58,66],get_window_s:23,get_wm_info:23,getbufferproc:2,getch:[67,75],getfilesystemencod:[15,44],getopt:[86,89],gfxdraw:[11,16,24,30],gg:20,ggi:23,ghost:64,ghostwhit:21,gif:[31,63,71,72,73,79,80,81,84],gil:[10,30],github:[44,46,56],give:[17,22,23,29,32,37,40,42,50,51,58,59,60,61,65,74,84,85,86,87],given:[9,14,17,20,22,23,24,25,28,29,30,32,35,36,37,38,39,44,45,46,47,50,51,52,54,55,56,58,59,64,68,70,73,84],gl:23,gl_accelerated_visu:23,gl_accum_alpha_s:23,gl_accum_blue_s:23,gl_accum_green_s:23,gl_accum_red_s:23,gl_alpha_s:23,gl_buffer_s:23,gl_context_flag:23,gl_context_major_vers:23,gl_context_minor_vers:23,gl_context_profile_:23,gl_context_profile_compat:23,gl_context_profile_cor:23,gl_context_profile_mask:23,gl_context_release_behavior:23,gl_depth_siz:23,gl_framebuffer_srgb_cap:23,gl_get_attribut:23,gl_multisamplebuff:23,gl_multisamplesampl:23,gl_set_attribut:23,gl_share_with_current_context:23,gl_stencil_s:23,gl_stereo:23,glcube:26,glitch:89,global:[53,60,68,86,89],glsl:36,glue:89,glyph:[28,29],gnu:[86,89],go:[20,22,24,32,36,37,51,53,57,58,59,60,61,64,65,66,68,70,74,84,85,86,88,89],goal:63,goe:[58,62,64,68,89],gold1:21,gold2:21,gold3:21,gold4:21,gold:21,golden:59,goldenrod1:21,goldenrod2:21,goldenrod3:21,goldenrod4:21,goldenrod:21,gone:[62,89],good:[18,22,23,26,28,38,44,51,58,61,62,63,64,65,67,68,84,85,86,87,89],goodluck:26,got:[25,40,57,62,64,84],gotten:62,grab:[25,33,39,48,64,89],grace:89,gradient:[26,35,65],grahic:23,grai:[21,29,71,72,73,79,80,81],graphic:[23,26,56,58,59,62,63,64,65,67,68,84],grasp:[61,89],grave:33,gray0:21,gray100:21,gray10:21,gray11:21,gray12:21,gray13:21,gray14:21,gray15:21,gray16:21,gray17:21,gray18:21,gray19:21,gray1:21,gray20:21,gray21:21,gray22:21,gray23:21,gray24:21,gray25:21,gray26:21,gray27:21,gray28:21,gray29:21,gray2:21,gray30:21,gray31:21,gray32:21,gray33:21,gray34:21,gray35:21,gray36:21,gray37:21,gray38:21,gray39:21,gray3:21,gray40:21,gray41:21,gray42:21,gray43:21,gray44:21,gray45:21,gray46:21,gray47:21,gray48:21,gray49:21,gray4:21,gray50:21,gray51:21,gray52:21,gray53:21,gray54:21,gray55:21,gray56:21,gray57:21,gray58:21,gray59:21,gray5:21,gray60:21,gray61:21,gray62:21,gray63:21,gray64:21,gray65:21,gray66:21,gray67:21,gray68:21,gray69:21,gray6:21,gray70:21,gray71:21,gray72:21,gray73:21,gray74:21,gray75:21,gray76:21,gray77:21,gray78:21,gray79:21,gray7:21,gray80:21,gray81:21,gray82:21,gray83:21,gray84:21,gray85:21,gray86:21,gray87:21,gray88:21,gray89:21,gray8:21,gray90:21,gray91:21,gray92:21,gray93:21,gray94:21,gray95:21,gray96:21,gray97:21,gray98:21,gray99:21,gray9:21,great:[23,57,63,65,84],greater:[17,24,28,29,33,35,37,40,51,74],green1:21,green2:21,green3:21,green4:21,green:[1,18,20,21,23,24,42,51,52,57,58,65,68,69,70,71,72,73,76,77,78,79,80,81,85],greenyellow:21,grei:[21,58],grey0:21,grey100:21,grey10:21,grey11:21,grey12:21,grey13:21,grey14:21,grey15:21,grey16:21,grey17:21,grey18:21,grey19:21,grey1:21,grey20:21,grey21:21,grey22:21,grey23:21,grey24:21,grey25:21,grey26:21,grey27:21,grey28:21,grey29:21,grey2:21,grey30:21,grey31:21,grey32:21,grey33:21,grey34:21,grey35:21,grey36:21,grey37:21,grey38:21,grey39:21,grey3:21,grey40:21,grey41:21,grey42:21,grey43:21,grey44:21,grey45:21,grey46:21,grey47:21,grey48:21,grey49:21,grey4:21,grey50:21,grey51:21,grey52:21,grey53:21,grey54:21,grey55:21,grey56:21,grey57:21,grey58:21,grey59:21,grey5:21,grey60:21,grey61:21,grey62:21,grey63:21,grey64:21,grey65:21,grey66:21,grey67:21,grey68:21,grey69:21,grey6:21,grey70:21,grey71:21,grey72:21,grey73:21,grey74:21,grey75:21,grey76:21,grey77:21,grey78:21,grey79:21,grey7:21,grey80:21,grey81:21,grey82:21,grey83:21,grey84:21,grey85:21,grey86:21,grey87:21,grey88:21,grey89:21,grey8:21,grey90:21,grey91:21,grey92:21,grey93:21,grey94:21,grey95:21,grey96:21,grey97:21,grey98:21,grey99:21,grey9:21,greyscal:56,grid:29,ground:62,group1:[50,64],group2:[50,64],group:[26,35,50,58,63,86],group_list:50,groupcollid:[50,64,87],groupmulti:64,groupsingl:[50,64],grow:[24,45,84],guarante:[18,23,25,38,46,53],guess:[38,84],gui:[63,67,71,72,73,75,79],guid:[16,32,57,85],gun:71,h:[1,2,3,4,5,6,7,8,9,10,11,12,13,14,20,25,26,30,33,42,43,45,51,56],ha:[1,2,11,16,17,18,19,22,23,24,25,26,27,28,29,31,32,33,35,36,38,39,40,42,43,44,45,46,47,49,50,51,52,53,54,56,57,58,59,60,62,63,64,65,67,68,69,70,71,72,73,84,86,87,89],habit:87,had:[51,63,64,84],hadn:84,half:[26,50,63,84],hand:[20,22,42,43,58,60,62,84,87],handi:[26,61,86,87,88,89],handili:89,handl:[10,15,16,23,25,28,29,32,33,38,43,44,50,56,59,61,63,64,65,66,70,87,89],handler:[25,27],hang:[32,84],happen:[19,24,58,60,63,64,84,85,88,89],hard:[62,63,65,68],hardcod:26,harder:[61,65],hardest:64,hardwar:[22,23,24,30,32,37,38,41,51,63,64,65],harmless:23,hasattr:28,hash:33,hashabl:45,hasn:29,hat:[25,32,47],hat_numb:32,have:[1,4,9,11,18,19,20,22,23,24,25,26,27,28,29,30,32,33,35,36,38,40,41,42,43,44,45,47,50,51,52,53,54,56,57,58,59,60,61,62,63,64,65,67,68,69,70,72,73,84,85,86,87,88,89],haven:84,he:[58,84,85],headach:84,header:[0,1,2,3,4,5,6,7,8,9,10,12,13,14,68,70,76],headless:26,headless_no_windows_need:26,heavili:25,hei:84,height:[9,18,22,23,24,26,28,29,31,32,35,41,42,45,48,50,51,56,59,62,63,67,68,69,70,71,72,73,75,76,77,78,79,80,81,84],held:[33,88],hello:[68,69,70,76,77,78,85],help:[16,22,25,26,28,32,33,37,38,39,48,50,51,53,54,58,59,61,64,65,70,86,87,88],helper:63,henc:29,here:[18,23,25,26,34,35,39,41,44,50,51,58,59,60,64,65,66,68,70,84,85,86,87,88,89],hex:20,hflip:[18,57],hi:[58,62],hidden:[23,25,39],hide:[33,39,44,48,58],high:[0,23,26,50,67,69,84],high_frequ:[32,47],higher:[16,23,37,47,50,52,59,63,64],highest:44,highgui:44,highli:[27,51],him:[58,62],hint:[23,84],hit:[25,26,58,61,84,85,87,88],hitbox:[58,66],hkey_local_machin:37,hline:30,hmm:62,hold:[1,25,28,32,33,50,56,64,65,70],holdov:64,home:33,honeydew1:21,honeydew2:21,honeydew3:21,honeydew4:21,honeydew:21,hook:50,hoonwhitecatr:[68,69,70,71,72,73,76,77,78,79,80,81],hope:64,hopefulli:[62,65,87],horizont:[18,24,25,26,29,30,39,56,58,65],horizontal_advance_i:29,horizontal_advance_x:29,hot:25,hotpink1:21,hotpink2:21,hotpink3:21,hotpink4:21,hotpink:21,hotplug:32,hotspot:[22,39],hour:84,how:[15,16,19,20,23,24,26,29,31,32,33,35,36,38,40,44,46,50,51,54,56,58,61,63,64,65,68,69,70,71,72,73,74,84,87,88,89],howev:[13,18,29,34,42,44,45,47,50,56,67,68,69,70,74,84],hp:[70,71,72,73,78,79,80,81],hsl:20,hsla:20,hsv:[18,20,57],hsva:20,html:[20,70,78],http:[39,56,70,78,86,89],hue:18,human:[63,74,86],humung:63,hundr:63,husano896:39,hw:[23,59],hwaccel:51,hwsurfac:[23,51,65,84],hx:47,i1:20,i1i2i3:20,i2:20,i3:20,i686:56,i:[1,5,16,17,18,22,24,26,27,32,33,35,36,38,45,57,61,63,64,65,68,70,71,72,73,79,80,81,84,85,86,87,88,89],iceberg:64,icon:[23,48],iconifi:23,icontitl:23,id3:40,id:[3,19,23,25,32,33,37,38,48,55],idea:[23,51,57,59,61,63,65,70,71,72,73,86,87],ident:[25,42,72],identif:37,identifi:[3,25,32,33,44,46,57],idiom:[58,84],idl:[24,25,27,32,38,40],idx:50,ie:[31,65,84],ignor:[25,29,35,37,38,40,51,53,84],illeg:45,illus:62,illustr:[62,65,87],im:33,imag:[12,13,16,18,20,22,23,26,28,29,30,41,46,48,50,52,56,58,59,61,63,64,65,66,67,68,69,70,71,72,73,75,76,77,78,79,80,81,84,85,86,87,88,89],image_fil:26,imagefil:26,imagin:[62,85,89],imgsurfac:65,immanuel:74,immedi:[23,25,32,37,38,58],immut:20,implement:[10,11,13,17,18,20,23,26,29,37,42,43,44,50,61,63,64,73,74,84,89],impli:[18,29],implicitli:[23,51],import_pygame_bas:[1,11],importantli:62,importerror:[65,86,89],impos:[33,38],imprecis:84,impress:63,improperli:41,improv:[24,28,40,58,73,84],inact:[29,38],inadequ:84,incas:31,inch:29,incident:18,inclin:36,includ:[0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,16,18,22,23,26,28,29,30,31,32,34,37,44,45,50,51,53,54,56,58,61,63,64,65,67,70,74,84,86,87],inclus:[1,4,20,25,29,38],incom:16,incomplet:53,incorrect:43,increas:[23,29,38,50,68,72,74,84],incred:[74,84],increment:[11,56,65],inde:4,indefinit:[38,40],indent:[32,61],independ:[58,86],index:[1,7,16,18,19,23,29,32,33,42,43,44,45,47,48,49,50,51,52,55,62,65,84],indexerror:[30,35,37,51],indexexcept:17,indexoutofbound:50,indianr:21,indianred1:21,indianred2:21,indianred3:21,indianred4:21,indic:[12,23,24,25,30,35,37,39,40,42,45,46,49,50,52,53,65],indigo:21,indirect:18,individu:[16,19,29,35,44,51,53,58,84],ineffici:58,inequ:25,infinit:[58,63,68],inflat:[29,45,58,66,84,89],inflate_ip:45,influenc:23,info:[5,23,37,59,86],inform:[6,14,16,17,18,19,23,28,32,33,36,37,39,47,51,52,53,55,59,62,64,65,68],inherit:[50,51,64,86,87],init:[1,18,19,22,23,24,27,28,29,32,37,38,39,44,46,47,53,54,58,59,62,63,66,67,68,69,70,71,72,73,75,76,77,78,79,80,81,85,89],initi:[16,18,19,23,24,25,27,28,29,32,33,34,37,38,44,46,47,49,50,51,54,57,59,63,64,66,68,69,77,87],initialis:[32,44,47,61,85,88,89],innat:74,inner:72,input:[16,18,23,25,26,32,33,39,48,51,53,56,57,61,63,66,67,68,69,72,73,74,78,84,87],input_onli:48,inputimag:26,insensit:[43,51],insert:[33,69,73],insid:[22,24,26,31,44,45,50,51,57,58,63,65,71,72,89],insight:84,inspir:16,instal:[16,18,26,58,65,84],instanc:[2,3,4,5,6,7,8,9,12,13,17,19,25,26,29,30,32,35,38,42,45,50,51,58,61,64,85,87,88],instance_id:[25,32],instead:[9,15,20,22,23,24,25,26,28,29,32,33,36,38,42,44,50,51,53,56,62,64,65,69,84,87],instruct:56,instrument:37,instrument_id:37,int32:65,int8:65,int_valu:51,integ:[1,9,14,17,19,20,23,24,26,29,30,31,32,33,37,38,39,40,42,43,44,45,47,49,50,51,52,53,54,55,65,71,73],integr:18,intend:[43,50,51],interact:[6,16,26,33,34,42,47,50,53,63,65,70,87],interest:[25,46,63,67,74,84],interf:37,interfac:[1,17,20,29,33,37,38,42,43,47,51,57,63,70,73],interior:56,intern:[0,13,23,25,27,38,44,46,51,58,68,70,84,89],interpol:[20,30,36],interpret:[15,26,29,44,61,84],interrupt:18,intersect:[35,45,50,64,67],interv:33,intial:[58,66],intimid:65,intric:63,intro_bal:63,introduc:[23,25,43,61,62,65,87,89],introduct:[16,60,67,75],introspect:43,intuit:70,invalid:[1,23,38,45,47,56],invalu:87,inverse_set:56,invert:[29,35,36,42],invis:85,invok:[17,29,88],involv:[16,26,29,51,64,84,87,89],inward:24,ip:45,irc:84,is_control:47,is_norm:36,iscaptur:25,ish:24,isn:[19,27,48,57,61,62,64,69,71,84,88],isol:[51,57],issu:[23,62,63],ital:[28,29],item:[29,30,42,43,50,62,64],items:42,iter:[28,29,42,50,64,85,88,89],its:[2,10,13,17,18,23,24,25,29,30,32,35,36,37,38,42,44,45,46,48,50,51,58,59,61,62,63,64,68,69,70,84,85,86,87,88,89],itself:[13,18,28,29,36,42,43,44,48,51,58,61,62,63,64,87,88,89],ivori:21,ivory1:21,ivory2:21,ivory3:21,ivory4:21,iyuv_overlai:41,j:[33,35,73,81],jaggi:56,jid:32,jitter:32,job:[63,64],joi:25,join:[31,45,58,66,86,89],joint:24,journal:84,joyaxismot:[25,32],joyballmot:[25,32],joybuttondown:[25,32],joybuttonup:[25,32],joydevicead:[25,32],joydeviceremov:[25,32],joyhatmot:[25,32],joystick:[16,25,26,47,63,84,85,88],joystick_count:32,jp:18,jpeg:[31,84],jpg:[31,63],jumbl:84,jump:[39,62,63,65,84],just:[24,26,29,30,32,35,38,40,41,50,51,52,53,56,57,58,59,60,63,64,65,67,68,69,70,71,73,84,85,87,88,89],k:[33,36],k_0:33,k_1:33,k_2:33,k_3:33,k_4:33,k_5:33,k_6:33,k_7:33,k_8:[33,70,78],k_9:33,k_:[33,34,70],k_a:[33,70,78,89],k_ac_back:33,k_ampersand:33,k_asterisk:33,k_at:33,k_b:33,k_backquot:33,k_backslash:33,k_backspac:33,k_break:33,k_c:33,k_capslock:33,k_caret:33,k_clear:33,k_colon:33,k_comma:33,k_d:33,k_delet:[33,70,78],k_dollar:33,k_down:[33,70,71,72,73,78,79,80,81,88,89],k_e:33,k_end:33,k_equal:33,k_escap:[22,33,57,58,66],k_euro:33,k_exclaim:33,k_f10:33,k_f11:33,k_f12:33,k_f13:33,k_f14:33,k_f15:33,k_f1:33,k_f2:33,k_f3:33,k_f4:[33,70],k_f5:33,k_f6:33,k_f7:33,k_f8:33,k_f9:33,k_f:[33,84],k_g:33,k_greater:33,k_h:33,k_hash:33,k_help:33,k_home:33,k_i:33,k_insert:33,k_j:33,k_k:33,k_kp0:33,k_kp1:33,k_kp2:33,k_kp3:33,k_kp4:33,k_kp5:33,k_kp6:33,k_kp7:33,k_kp8:33,k_kp9:33,k_kp_divid:33,k_kp_enter:33,k_kp_equal:33,k_kp_minu:33,k_kp_multipli:33,k_kp_period:33,k_kp_plu:33,k_l:[33,70,78],k_lalt:33,k_lctrl:[33,70,78],k_left:[33,70,78],k_leftbracket:33,k_leftparen:33,k_less:33,k_lmeta:33,k_lshift:33,k_lsuper:33,k_m:33,k_menu:33,k_minu:33,k_mode:33,k_n:33,k_numlock:33,k_o:33,k_p:33,k_pagedown:33,k_pageup:33,k_paus:33,k_period:33,k_plu:33,k_power:33,k_print:33,k_q:33,k_question:33,k_quot:33,k_quotedbl:33,k_r:33,k_ralt:33,k_rctrl:33,k_return:33,k_right:[33,70,78],k_rightbracket:33,k_rightparen:33,k_rmeta:33,k_rshift:33,k_rsuper:33,k_scrollock:33,k_semicolon:33,k_slash:33,k_space:33,k_sysreq:33,k_t:[33,84],k_tab:33,k_u:33,k_underscor:33,k_up:[33,70,71,72,73,78,79,80,81,88,89],k_v:33,k_w:33,k_x:33,k_y:33,k_z:[33,89],kanji:29,kant:74,kde:44,keep:[13,17,25,32,50,54,56,57,58,59,60,61,62,63,64,84,88],kei:[16,17,20,22,23,25,26,33,34,35,45,51,55,57,58,61,63,64,66,70,71,72,73,74,78,79,80,81,84,88,89],kern:[28,29],kewdown:78,key_cod:33,key_rect:45,keyboard:[16,25,26,34,39,62,63,67,70,84,85],keycod:33,keydown:[22,25,33,34,57,58,62,66,70,71,72,73,78,79,80,81,88,89],keypad:33,keypress:84,keyup:[25,33,34,70,88,89],keyword:[16,18,24,25,26,35,38,44,48,51,53,56],khaki1:21,khaki2:21,khaki3:21,khaki4:21,khaki:21,khz:49,kick:61,kill:[50,53,64],kind:[37,43,51,61,65,67,84],kmod_alt:33,kmod_cap:33,kmod_ctrl:33,kmod_lalt:33,kmod_lctrl:33,kmod_lmeta:33,kmod_lshift:33,kmod_meta:33,kmod_mod:33,kmod_non:33,kmod_num:33,kmod_ralt:33,kmod_rctrl:33,kmod_rmeta:33,kmod_rshift:33,kmod_shift:33,know:[18,28,31,33,46,57,58,62,64,65,67,68,69,70,73,87,88,89],knowledg:[74,84],known:[33,84],korean:16,kwarg:[35,48,50,51],kwd:53,l:[32,33],l_margin:[73,81],lack:23,laggi:38,laid:[29,61],landscap:62,lang:84,languag:[26,28,33,62,63,84],lantinga:63,laplacian:56,larg:[16,29,43,45,51,61,64,65,87],larger:[23,38,51,56,58,65,67],largest:[35,57,59,63,65],last:[24,25,29,30,32,50,51,52,54,58,62,63,64,65,68,84],lastli:[22,50,58,59,62,64,65],late:84,latenc:[37,38,84],later:[18,23,30,37,38,42,46,50,57,58,59,60,62,63,64,65,68,86,89],latest:[29,70],latin1:[28,29],lavend:21,lavenderblush1:21,lavenderblush2:21,lavenderblush3:21,lavenderblush4:21,lavenderblush:21,lawngreen:21,layer1:50,layer1_nr:50,layer2:50,layer2_nr:50,layer:[50,63,64],layer_nr:50,layereddirti:50,layeredupd:50,layout:[17,28,29,43,52,88],lbm:31,leak:62,learn:[26,51,59,62,63,65,67,68,69,70,73,74,84],learnt:89,least:[14,24,30,35,43,51,55,61,65,67,70,84],leav:[24,29,51,65],left:[20,23,24,25,29,32,33,35,38,39,44,45,47,50,51,57,58,62,63,66,67,68,69,70,71,72,73,75,76,77,78,79,80,81,84,85,86,87,88,89],leftclick:26,leftmost:47,legaci:[27,67,75],lemonchiffon1:21,lemonchiffon2:21,lemonchiffon3:21,lemonchiffon4:21,lemonchiffon:21,len:[17,20,22,24,30,50,64,65],length:[1,4,9,17,19,20,23,24,25,30,32,33,36,38,42,47,51,58,68,87],length_squar:36,leonidovich:74,lerp:[20,36],less:[24,25,27,28,33,35,37,38,54,55,56,67,85],lesson:84,let:[10,20,21,26,44,51,58,59,61,63,65,69,70,71,84,88],letter:[19,28,51],level:[0,16,17,20,23,28,37,38,40,41,51,53,56,63,64,65,67,70],lgpl:16,li:84,liabil:18,liabl:18,lib:26,librari:[18,24,26,29,30,31,32,37,38,44,47,59,62,63,67],libsdl:39,licens:[16,86,89],lie:[30,45],life:[57,59],lifetim:[13,42,52,65],lightblu:21,lightblue1:21,lightblue2:21,lightblue3:21,lightblue4:21,lightcor:21,lightcyan1:21,lightcyan2:21,lightcyan3:21,lightcyan4:21,lightcyan:21,lightgoldenrod1:21,lightgoldenrod2:21,lightgoldenrod3:21,lightgoldenrod4:21,lightgoldenrod:21,lightgoldenrodyellow:21,lightgrai:21,lightgreen:21,lightgrei:21,lightpink1:21,lightpink2:21,lightpink3:21,lightpink4:21,lightpink:21,lightsalmon1:21,lightsalmon2:21,lightsalmon3:21,lightsalmon4:21,lightsalmon:21,lightseagreen:21,lightskyblu:21,lightskyblue1:21,lightskyblue2:21,lightskyblue3:21,lightskyblue4:21,lightslateblu:21,lightslategrai:21,lightslategrei:21,lightsteelblu:21,lightsteelblue1:21,lightsteelblue2:21,lightsteelblue3:21,lightsteelblue4:21,lightweight:[50,84],lightyellow1:21,lightyellow2:21,lightyellow3:21,lightyellow4:21,lightyellow:21,like:[1,10,16,18,20,22,23,24,26,27,29,31,32,33,34,35,36,38,39,40,42,44,47,50,51,56,57,58,59,60,61,62,63,64,65,68,69,70,72,73,74,84,85,86,87,88,89],lime:21,limegreen:21,limit:[18,19,23,24,25,28,29,31,32,38,40,51,52,53,54,60,63,64,65,73,84],line:[16,24,26,28,30,31,32,45,48,53,60,61,62,63,64,65,66,68,69,70,84,87,89],line_height:32,line_spac:29,linear:[20,23,36,50,65],linearli:51,linen:21,link:38,linux:[18,23,37,40,44,57,63,84],liquid:[26,62],list:[16,18,19,20,22,23,24,25,26,27,28,29,30,32,33,34,35,37,41,44,45,46,50,51,53,58,59,64,65,68,70,84],list_camera:[18,57],list_mod:[23,59],listen:39,littl:[17,22,26,44,50,54,58,59,61,62,64,65,68,84,87,88],live:[16,26,65],ll:[22,58,61,62,63,64,65,84,85,87,88,89],load:[8,10,16,18,22,26,30,31,40,50,59,61,62,63,65,66,67,68,69,70,71,72,73,75,76,77,78,79,80,81,84,87,89],load_background_imag:62,load_bas:31,load_extend:31,load_imag:[58,66],load_player_imag:62,load_png:[86,87,88,89],load_sound:[58,66,86],load_xbm:22,local:[9,16,33,34,38,39,44,57,58,60,61,68,69,70,71,72,73,76,77,78,79,80,81,85,86,89],locat:[26,34,35,39,41,58,68,69,70,71,72,73,84],lock:[13,17,24,25,27,33,42,43,50,51,52,84],lockobj:13,logger:26,logic:[48,58,61,63,65,68,69,70,71,84,86],logical_s:48,logo:23,loki:63,longer:[19,32,38,47,87,88],longest:38,look:[22,24,28,36,37,44,46,52,56,57,58,62,63,64,65,68,69,70,72,84,85,86,87,88,89],lookout:26,lookup:[23,25,28,50,58,65],loop:[24,25,32,38,40,54,57,61,62,63,64,66,68,84,86,87,88,89],lose:[19,23,33,56,84],loss:[18,23,58,59],lost:[25,40,46,62],lostsprit:64,lot:[26,32,51,54,56,58,59,61,62,63,64,65,67,73,84,85,87,89],loud:38,love:84,low:[32,63,67],low_frequ:[32,47],lower:[19,23,32,37,38,41,52,84],lowercas:28,lowest:37,lowli:84,ls:18,lt:32,luck:65,luckili:84,luma:18,m:[20,26,33,53,58,65,84],mac:[18,23,26,37,40,44,46,63,84],machin:[26,56,64,65,84],maco:22,macosx:25,macro:[3,4,5,6,7,8,9,12,14],made:[3,6,7,12,16,23,25,49,62,71,73,74,84,87],magazin:[16,71],magenta1:21,magenta2:21,magenta3:21,magenta4:21,magenta:21,magic:87,magnifi:26,magnitud:36,magnitude_squar:36,mai:[1,2,18,19,20,22,23,25,27,28,29,30,31,32,33,37,38,40,43,44,45,46,51,53,57,59,62,63,64,65,84],mail:[62,84],main:[18,24,25,26,27,32,39,41,46,50,53,57,61,62,64,65,66,71,72,73,79,80,81,85,86,87,88,89],main_dir:[58,66],mainli:[28,51,62,64,65],maintain:[28,44,50,61],major:[14,23,31,38,44,63],make:[0,16,20,22,24,25,26,27,31,38,42,44,45,50,51,52,54,56,57,58,59,60,63,64,65,67,68,70,71,72,73,74,84,85,86,87,88,89],make_sound:[38,49],make_surfac:[42,43,52],maker:[67,74],malform:29,man:67,manag:[16,19,25,32,38,42,50,51,59,63,64],mani:[19,23,24,25,28,29,33,38,39,40,44,45,50,51,54,56,58,59,62,63,84],manipul:[16,31,42,45,51,63,65,84,87],manner:[59,87],manual:[19,28,29,41,44,51,57,64,84],map:[17,20,24,32,42,43,47,51,52,59,65],map_arrai:[43,52],map_rgb:[20,24,42,51,52],mapped_int:51,margin:[26,68,71,72,73,80,81],mario:88,mark:[33,42,57,63],maroon1:21,maroon2:21,maroon3:21,maroon4:21,maroon:21,mask:[22,23,26,29,42,50,51,59,84],maskfil:22,mass:35,master:[56,65,67],match:[17,23,28,29,32,33,37,38,42,43,51,52,56,58,59,64,65,68],match_font:28,materi:18,math:[24,30,35,36,86,87,89],mathemat:[51,61,65],matter:[31,51,62,64,84,87],max:[24,32,35,57,71],max_i:29,max_x:29,maxhp:[71,72,73,79,80,81],maxi:28,maxim:[25,48],maximum:[23,28,29,84],maxtim:38,maxx:28,mayb:[62,64,70,84,86],me:[59,63,65],mean:[20,22,23,24,29,32,33,36,37,38,39,40,43,47,48,50,56,57,58,59,62,63,64,68,69,70,71,72,84,88],meant:[44,48,64,84],measur:[24,37,84,87],mechanim:67,mechanin:75,mediev:63,medium:32,mediumaquamarin:21,mediumblu:21,mediumorchid1:21,mediumorchid2:21,mediumorchid3:21,mediumorchid4:21,mediumorchid:21,mediumpurpl:21,mediumpurple1:21,mediumpurple2:21,mediumpurple3:21,mediumpurple4:21,mediumseagreen:21,mediumslateblu:21,mediumspringgreen:21,mediumturquois:21,mediumvioletr:21,meet:[45,84],megabyt:23,member:[25,50,59,64,68,84],membership:[50,64],memori:[17,23,24,29,38,51,52,62,67,84],mental:74,mention:[54,58,64,68,70,84],menu:[23,33,61],merchant:18,mercuri:44,mere:[37,84],merg:64,merrili:89,messag:[25,27,37,44,53,58,86,89],messi:64,met:[18,22],meta:33,method:[9,10,12,15,16,17,19,20,22,23,24,25,26,28,29,30,31,32,35,36,38,39,40,42,44,45,46,49,50,51,52,54,58,59,61,62,63,64,65,71,73,84,85,86,87,89],metric:[28,29],mice:39,micro:44,microsoft:23,midbottom:45,middl:[39,47,57,62,65],midi:[25,26],midi_ev:37,midi_event_list:37,midi_not:37,midi_output:37,midi_to_ansi_not:37,midi_to_frequ:37,midiexcept:37,midiin:[25,37],midiout:[25,37],midis2ev:37,midisport:37,midleft:[45,88,89],midnightblu:21,midright:[45,88,89],midtop:45,might:[20,23,25,26,29,44,46,60,61,62,64,65,84,86,87,89],mighti:62,migrationguid:39,milli:54,millisecond:[25,33,37,38,40,50,54,63,84],mime:46,mimic:29,min:[24,57],min_alpha:51,min_i:29,min_x:29,mind:[39,54,60,65],minhp:73,mini:28,miniatur:32,minim:[23,25,48,64],minimum:[23,24,28,30,31,35,51],minor:[14,31,38,44,63],mintcream:21,minu:33,minx:28,mirror:[23,58],miss:[58,66,84],missingmodul:[49,52],mistyros:21,mistyrose1:21,mistyrose2:21,mistyrose3:21,mistyrose4:21,misunderstand:84,miter:24,mix:[28,38,51,63,65,84],mix_chunk:8,mix_setmusicposit:40,mixer:[0,16,26,38,44,49,58,66],mizuno:18,mmsystem:37,mmx:[26,56],moccasin:21,mod:[25,33,34,40,67],mod_:34,modal:48,mode:[16,20,23,24,25,26,28,29,30,33,38,39,44,46,48,50,51,53,58,63,65],mode_ok:[23,59],model:51,modif:[13,18,51,86],modifi:[29,30,33,34,37,39,45,48,49,50,51,64,65,85],modnam:1,modul:[0,1,2,3,4,5,6,7,8,9,10,12,13,16,17,21,26,34,41,42,48,51,53,59,60,61,62,65,66,68,87,89],modulu:20,moment:[46,49,84,87],momma:62,monitor:[23,34,59,63,68],monkei:[58,63,66],mono:[29,38,49],monochrom:29,monster:64,month:63,moon:84,more:[16,18,19,20,23,24,25,26,28,29,30,31,32,35,36,37,38,39,42,43,44,47,50,51,54,57,58,59,60,61,62,63,64,70,71,73,74,85,86,87,88,89],most:[19,23,24,25,26,28,29,31,38,39,40,44,45,50,51,54,57,59,60,61,63,64,65,74,84,85],mostli:70,motion:[32,39,48,63,87,89],motiv:70,mous:[16,22,23,25,26,32,46,48,58,61,62,63,66,67,70,72,84,85,88],mousebuttondown:[22,25,27,39,58,66,84],mousebuttonup:[25,39,58,66,72,73,80,81,84],mousemot:[25,39],mousewheel:[25,39],movabl:[88,89],move:[16,25,26,39,44,45,50,51,56,58,61,63,65,66,67,68,69,70,71,72,73,75,76,77,78,79,80,81,84,85,87,88,89],move_and_draw_all_game_object:62,move_ip:[45,58,66],move_to_back:50,move_to_front:50,movedown:[70,78,88,89],moveit:[26,62],movement:[32,39,50,58,87,88],movepo:[88,89],moveright:[69,70,77,78],moveup:[69,77,88,89],movi:74,mozart:40,mp3:40,ms:[32,37,39,47],msg:37,msmf:18,much:[18,24,26,28,31,54,56,58,59,60,61,62,63,65,67,68,71,73,74,84,85,87,89],multi_thread:53,multicolor:26,multidimension:65,multigestur:25,multimedia:[33,63,84],multipl:[1,7,18,19,22,23,24,25,26,28,32,33,36,38,41,44,50,51,56,57,61,63,64,65,69],multipli:[20,24,29,33,36,42,48,56,65],multisampl:23,multithread:27,music:[8,16,26,37,38,40,63,74,86],must:[1,2,4,17,18,19,20,22,23,24,25,27,28,29,30,31,32,35,36,37,38,41,42,43,44,45,46,47,49,50,51,52,53,56,58,59,60,63,64,65,68,72],mustlock:51,mutabl:25,mute:38,my:[26,32,61,62,63,65,84,86,89],my_appl:44,my_data_typ:46,mygroup:64,myscreen:[68,69,70,71,72,73,76,77,78,79,80,81],mysprit:64,mysurf:51,mytext:[68,69,70,71,72,73,76,77,78,79,80,81],mytextarea:[68,69,70,71,72,73,76,77,78,79,80,81],mytextfont:[68,69,70,71,72,73,76,77,78,79,80,81],n:[28,33,38,65],name:[1,10,18,19,20,23,25,26,28,29,32,33,37,44,45,47,50,51,53,58,59,61,62,63,64,65,66,68,70,86,88,89],name_forindex:47,name_of_environment_vari:44,namehint:[31,40],namespac:[34,58,60],nano:31,nasti:86,nativ:[16,18,22,44,57,84],natur:[40,65,74,84,86],navajowhit:21,navajowhite1:21,navajowhite2:21,navajowhite3:21,navajowhite4:21,navi:21,navyblu:21,ndim:42,nearest:[24,38],nearli:[64,67],neater:88,necessari:[23,25,27,29,38,51,60,61,64,68,84,85,87,88,89],necessarili:63,need:[11,16,18,19,22,23,25,26,27,28,31,32,33,35,42,44,45,46,50,51,53,57,58,59,60,62,63,64,65,66,67,68,69,70,71,72,73,85,86,87,88,89],needless:[68,71,73,84],needn:[26,61,89],neg:[9,10,17,25,29,35,36,38,39,40,42,45,51,56,65,89],neglig:18,neither:[50,69,70],nest:51,net:[86,89],network:[61,86],never:[29,32,38,39,40,51,54,57,63,65,84],new_height:26,new_lay:50,new_mask:35,new_width:26,newarrai:42,newbi:16,newcom:84,newer:[23,40],newest:68,newli:[20,35],newlin:28,newpo:[58,66,87,88,89],newrect:64,newtonian:63,next:[18,27,29,30,35,36,38,41,46,58,64,68,73,85,88,89],nice:[58,61,62,65,87],nirav:57,node:44,noevent:[25,27,38,40,84],nofram:23,nois:[32,35,57],nomin:29,non:[17,19,23,24,26,29,31,33,41,42,44,50,51,56,58,64,84,86],none:[17,18,19,20,23,24,25,26,27,28,29,30,31,32,33,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,54,56,58,60,65,66,85,86,87,89],nonesound:[58,66],nonetheless:26,nonetyp:[30,35,50],nonlinear:51,nonzero:[35,45],nor:[50,70],normal:[9,12,19,20,28,29,35,36,44,45,50,51,53,55,58,64,65,71,87,89],normalize_ip:36,north:22,northeast:22,northwest:22,nosubprocess:53,notabl:[23,40,87],notat:39,notdef:29,note1:35,note2:35,note:[18,19,20,23,24,25,26,27,29,31,32,33,35,36,37,38,39,40,42,44,45,47,54,56,57,58,59,60,62,64,66,84,86,87,89],note_off:37,note_on:37,noteworthi:63,noth:[19,23,24,30,32,50,58,59,63,64,84,85,86],notic:[18,36,45,62,68,69,70,72,84,87,88,89],notifi:64,notimplementederror:[33,40],novel:74,now:[26,27,28,29,32,33,35,37,38,43,44,46,56,57,58,61,62,63,64,65,67,68,69,70,71,72,73,74,84,85,87,89],nowadai:63,nrp:57,num:[33,72,73,80,81],num_button:39,num_devic:37,num_ev:37,num_fing:25,num_threshold_pixel:56,num_track:19,number:[1,8,11,17,18,19,20,23,24,25,26,28,29,30,31,32,33,35,36,37,38,40,42,44,47,49,50,51,52,53,54,55,56,62,64,65,69,73,74,84],numbit:35,numer:[36,38],numev:[25,38],numfail:44,numlock:33,numpass:44,numpi:[1,16,26,42,43,49,51,52,63],o:[7,22,33,62],obj:[1,2,4,5,8,9,10,44],object:[1,2,4,5,6,7,9,10,12,13,16,18,19,21,22,23,24,25,26,27,28,29,30,31,32,35,38,39,40,43,44,46,47,48,49,52,54,56,57,59,61,62,63,64,65,66,68,69,70,86,89],obliqu:29,obscur:[61,84],obsolet:[23,44,51],obtain:[29,37],obviou:61,obvious:[64,85],occasion:64,occupi:85,occur:[29,33,37,38,42,68],octob:63,odd:[24,65,89],ofcod:89,off:[18,24,25,26,29,32,37,38,40,45,56,58,61,62,64,84,89],offcourt:89,offer:[22,23,25,47,57],offici:62,offset:[17,24,26,28,29,30,35,37,40,42,45,50,51,58],often:[19,31,36,51,61,63,65,69,84,86],ogg:[38,40],oh:89,ok:[65,84,85,87],okai:[69,72],old:[23,39,44,50,51,54,58,62,64,74,84,88],older:[27,32,40,44,64],oldest:68,oldlac:21,oliv:21,olivedrab1:21,olivedrab2:21,olivedrab3:21,olivedrab4:21,olivedrab:21,omit:[29,37,42,53],onc:[18,19,23,25,28,29,31,32,33,35,37,38,40,42,44,47,51,53,54,58,60,63,64,65,84,85,86,87,89],one:[17,18,20,22,23,24,25,26,27,28,29,30,31,32,33,35,38,39,40,42,43,44,45,46,47,49,50,51,52,54,56,57,58,59,60,61,62,63,64,65,67,68,69,70,71,73,84,85,86,87,88,89],ones:[29,40,51,60,64,87,88],onli:[3,17,18,19,20,23,24,25,28,29,31,32,34,35,37,38,39,40,42,43,44,45,46,47,48,49,50,51,52,53,54,56,57,58,59,60,61,62,63,64,65,67,68,72,73,74,84,85,86,87,89],ons:63,onscreen:23,onto:[12,20,27,28,29,30,35,36,50,51,58,62,63,65,68,84,85,87],ooo:84,opac:[48,84],opaqu:[20,26,29,35,43,48,51,52,65,84],open:[1,7,10,16,18,19,23,25,32,33,37,44,46,47,57,58,63,85,86],opencv:[18,44,57],opengl:[23,26],opentyp:29,oper:[15,20,22,23,24,25,27,33,36,42,44,45,48,50,51,52,56,64,65,85],operand:65,oppos:29,optim:[23,26,28,51,56,64,65],optimis:84,option:[1,17,18,20,23,24,26,28,29,30,31,32,35,37,38,39,40,43,44,50,51,53,54,56,57,58,60,62,65,84],orang:21,orange1:21,orange2:21,orange3:21,orange4:21,orangered1:21,orangered2:21,orangered3:21,orangered4:21,orchid1:21,orchid2:21,orchid3:21,orchid4:21,orchid:21,order:[1,18,23,28,29,33,37,38,40,42,44,50,51,53,54,58,62,65,68,84,89],orderedupd:50,ordinari:64,org:[39,58,59,60,62,63,64,65,70,78],organ:[16,50,64,84],organis:61,orient:[29,35,62],origin:[10,20,22,23,24,26,29,31,32,35,42,45,47,48,50,51,56,58,62,63,64,65,66],original_color:56,original_dest_color:56,orthogon:35,os:[10,18,23,26,31,32,37,44,46,58,66,84,86,89],oss:37,osx:[23,63],other:[0,18,19,20,23,24,25,26,27,28,29,30,31,32,33,35,36,38,40,42,43,44,45,46,48,49,50,51,52,53,54,56,57,58,61,62,63,64,68,70,71,84,85,86,87,88,89],otherarrai:42,othermask:35,othersurfac:35,otherwis:[1,2,6,10,13,17,18,22,23,24,26,29,31,35,36,37,38,39,43,45,46,50,51,52,56,62,72,73,89],ouput:[68,76],our:[58,62,63,65,66,74,85],out:[18,24,29,32,35,37,38,40,57,58,59,61,62,64,65,84,85,86,87,88,89],outdat:67,outer:72,outgo:37,outlin:[24,29,35,57],outpng:26,output:[18,25,26,28,32,35,56,67,69,70,73,74,84,88],outputimag:26,outsid:[24,28,30,33,35,39,45,51,56,58,63,68],over:[22,23,29,35,38,40,42,44,47,50,57,60,62,63,64,65,66,84,86],overal:65,overboard:86,overcom:67,overflow:65,overhead:[51,64,84],overlai:64,overlap:[35,45,50,62,64,84,87,89],overlap_area:35,overlap_mask:35,overlin:29,overrid:[18,29,35,44,50,51,63,84],overridden:[23,29,35,38,45],overwrit:[17,24,26,32,47],overwritten:[24,32,47,51],own:[13,22,25,26,27,37,38,46,48,59,61,63,65,84,86],own_data_typ:46,owner:13,ownership:46,p1:48,p2:48,p:[31,33,43],pac:64,pack:[23,51,52,59],packag:[16,26,28,29,30,31,49,52,58,60,63,65],pacman:64,pad:[29,32,56],page:[33,39,46,65],pai:70,pain:84,painless:86,paint:[50,85],pair:[13,23,24,28,29,31,32,45,51],pajitnov:74,palegoldenrod:21,palegreen1:21,palegreen2:21,palegreen3:21,palegreen4:21,palegreen:21,palett:[23,28,31,35,51,52,56],palette_color:[35,56],paleturquois:21,paleturquoise1:21,paleturquoise2:21,paleturquoise3:21,paleturquoise4:21,palevioletr:21,palevioletred1:21,palevioletred2:21,palevioletred3:21,palevioletred4:21,panic:84,papayawhip:21,paper:84,parallax:84,param:24,paramet:[18,20,23,24,25,26,29,30,31,33,35,36,37,38,39,40,44,45,46,51,55,56,57,70,71],parametr:36,parent:[2,13,17,48,50,51],parenthesi:33,pars:[84,87],part:[23,24,25,29,30,35,51,53,56,62,63,64,65,68,69,70,72,86],parti:[18,63],partial:[23,24,37,51,84],particular:[18,29,40,42,49,50,53,54,59,84],particularli:37,pass:[9,10,15,17,18,19,20,22,23,24,25,26,28,29,31,32,35,37,38,39,40,41,42,44,46,50,51,53,54,56,58,62,63,64,66,86],past:[24,46,58,63,84],patch:[14,26,31,38,44,63],patel:57,path:[7,10,16,25,26,28,29,31,36,38,44,53,57,58,66,86,89],pathlib:[28,29,31,38],pathlib_path:38,pathnam:[38,58,86],pattern:[29,40],paus:[19,26,33,38,40,54,86],pbm:[31,46],pc:[17,37],pcf:29,pci:65,pcx:31,peachpuff1:21,peachpuff2:21,peachpuff3:21,peachpuff4:21,peachpuff:21,peek:25,pellet:64,penalti:[51,59,64],pend:37,peopl:[16,58,61,62,63,84],per:[16,24,25,29,30,31,32,35,49,50,51,52,53,54,58,59,63,65,69,77,84,88,89],perfect:35,perfectli:84,perform:[12,23,25,26,28,29,30,35,36,38,42,50,51,52,56,57,59,64,65,84,85,88],perhap:[62,64,84,86],period:[33,40],permiss:23,permit:[15,18,29],person:[62,63,88],perspect:[26,63],peru:21,pete:[58,59,60,62,63,64,65,84],pfr:29,pg:[22,58,66],pg_buffer:[1,2],pg_encodefilepath:10,pg_encodestr:10,pg_floatfromobj:1,pg_floatfromobjindex:1,pg_getdefaultwindow:1,pg_getdefaultwindowsurfac:1,pg_intfromobj:1,pg_intfromobjindex:1,pg_major_vers:14,pg_minor_vers:14,pg_mod_autoinit:1,pg_mod_autoquit:1,pg_patch_vers:14,pg_registerquit:1,pg_rgbafromobj:[1,11],pg_setdefaultwindow:1,pg_setdefaultwindowsurfac:1,pg_twofloatsfromobj:1,pg_twointsfromobj:1,pg_uintfromobj:1,pg_uintfromobjindex:1,pg_version_atleast:14,pg_versionnum:14,pg_view_p:1,pgbuffer_asarrayinterfac:1,pgbuffer_asarraystruct:1,pgbuffer_releas:1,pgbufproxy_check:2,pgbufproxy_getpar:2,pgbufproxy_new:2,pgbufproxy_trip:2,pgbufproxy_typ:2,pgcd_asid:3,pgcd_check:3,pgcd_new:3,pgcd_type:3,pgcdobject:3,pgchannel_asint:8,pgchannel_check:8,pgchannel_new:8,pgchannel_typ:8,pgchannelobject:8,pgcolor_check:4,pgcolor_new:4,pgcolor_newlength:4,pgcolor_typ:4,pgdict_asbuff:1,pgevent_check:6,pgevent_filluserev:6,pgevent_new2:6,pgevent_new:6,pgevent_typ:6,pgeventobject:6,pgexc_buffererror:1,pgexc_sdlerror:1,pgfont_check:7,pgfont_is_al:7,pgfont_new:7,pgfont_typ:7,pgfontobject:7,pglifetimelock_check:13,pglifetimelock_typ:13,pglifetimelockobject:13,pgm:31,pgobject_getbuff:1,pgrect_asrect:9,pgrect_fromobject:9,pgrect_new4:9,pgrect_new:9,pgrect_norm:9,pgrect_typ:9,pgrectobject:9,pgrwops_fromfileobject:10,pgrwops_fromobject:10,pgrwops_getfileextens:10,pgrwops_isfileobject:10,pgrwops_releaseobject:10,pgsound_aschunk:8,pgsound_check:8,pgsound_new:8,pgsound_typ:8,pgsoundobject:8,pgsurface_assurfac:12,pgsurface_blit:12,pgsurface_check:12,pgsurface_lock:13,pgsurface_lockbi:13,pgsurface_locklifetim:13,pgsurface_new:12,pgsurface_prep:13,pgsurface_typ:12,pgsurface_unlock:13,pgsurface_unlockbi:13,pgsurface_unprep:13,pgsurfaceobject:[1,12,13],pgvidinfo_asvidinfo:5,pgvidinfo_check:5,pgvidinfo_new:5,pgvidinfo_typ:5,pgvidinfoobject:5,pgyam:[0,5],phase:[69,70],phi:36,photo:44,photograph:56,physic:[17,23,32,61,63,86,88,89],pi:[24,89],pick:[23,44,56,59,62,64],pictur:[31,63],pie:30,piec:[29,84],pile:26,pinch:25,pink1:21,pink2:21,pink3:21,pink4:21,pink:21,pipe:23,pitch:[37,51,62],pitch_bend:37,pixel2d:65,pixel3d:65,pixel:[16,17,18,20,22,23,24,26,28,29,30,31,35,41,45,48,51,56,58,59,63,65,85,88,89],pixel_arrai:42,pixelarrai:[16,26,42,51],pixelcopi:[26,42,43,52],pixelformat:18,pixels2d:[52,65],pixels3d:[52,65],pixels_alpha:[52,65],pixels_blu:52,pixels_green:52,pixels_r:52,pixels_within_threshold:56,place:[1,23,25,27,29,34,36,37,39,42,44,45,46,50,51,52,53,58,62,63,64,65,68,84,87,88],placehold:44,placement:44,plai:[8,16,19,23,26,32,37,40,47,58,62,63,64,66,70,74,84,86,89],plain:46,plan:[51,63],plane:[41,51],plant:65,platform:[16,18,23,24,25,30,33,35,37,38,41,44,46,53,54,57,58,59,63,84,86,88],playabl:49,playback:[19,26,38,40,63],player1:[61,89],player2:89,player:[26,50,58,61,62,64,68,70,73,74,84,88,89],playerimag:62,playerpo:62,playersprit:89,playmu:26,playstat:32,pleas:[23,44,84],plenti:61,plot:61,plu:[28,29,33,37,44,64],plug:[25,32],plum1:21,plum2:21,plum3:21,plum4:21,plum:21,pm_recommended_input_devic:37,pm_recommended_output_devic:37,pmdeviceid:37,png:[26,31,58,63,65,66,67,68,69,70,75,76,77,78,84,86,87,88,89],pnm:31,po:[25,27,35,40,50,58,62,66,72,73,80,81],point:[15,17,19,20,22,24,26,28,29,30,35,36,37,45,48,50,56,57,62,63,64,65,67,72,84,89],pointer:[9,12,58,84],polar:36,polish:[22,63],poll:[25,27,37,84],polygon:[24,30],pong:[61,86,89],poor:61,poorli:61,pop:44,popular:[26,32,66],port:[18,26,37,44,48],portabl:[33,37,63],portion:[12,23,45,48,51,56,63,64],portmidi:37,posit:[1,9,17,19,22,23,24,25,26,28,29,30,32,33,35,38,39,40,44,45,48,50,51,53,55,57,58,62,63,64,66,68,69,84,85,87,88,89],possibl:[18,23,28,29,31,32,50,51,57,58,61,63,65,67,68],post:[25,27,38,47,54],potenti:[31,64,84,88],powderblu:21,power:[18,32,33,38,64,74],ppem:29,ppm:[31,46],pr:39,practic:[84,86],pre:[20,29],pre_init:38,prealloc:51,prebuilt:16,preced:[35,68],precis:[23,38,51,65,84],predecessor:[65,84],predefin:[25,46],predomin:43,prefer:[23,30,31,33],prefix:58,prematur:84,premul_alpha:20,prepar:[26,40,66],present:[44,48,60,63,64],preserv:[20,44,45,51],preset:[22,38],press:[26,32,33,39,47,57,58,62,70,71,84],pressur:55,pretend:62,pretti:[59,62,64,65,84,85,88,89],prevar:84,prevent:[25,89],prevent_display_stretch:23,previou:[1,23,29,38,39,48,50,54,56,58,62,64,65,69,70,72,73,85,89],previoulsi:40,previous:[18,20,25,28,29,31,38,40,44,59,85],primari:39,primarili:[29,44,48],prime:48,primer:65,primit:30,principl:[88,89],print:[25,28,32,33,36,39,44,45,46,57,58,59,62,66,68,70,71,72,73,76,84,86,89],printboard:[73,81],printf:[67,75],prior:23,prioriti:18,privat:[51,58],probabl:[26,62,64,68,84,85],problem:[43,44,58,61,69,84,86],procedur:[67,68],process:[18,25,26,27,32,46,52,53,54,60,62,67,68,70,72,73,77,84],processor:[54,56],procur:18,produc:[29,84],product:[36,61],profil:[23,84],profit:18,program:[16,19,23,25,27,32,33,37,38,41,44,45,53,54,58,61,62,63,64,65,66,67,68,69,70,71,73,74,84,85,86,87,88],programm:[63,64,84,86,87],programmat:44,progress:43,project:[26,36,61,63,67,68,69,70,71,72,73,76,77,78,79,80,81,84,86,87,89],prolog:73,promis:[58,62],prompt:[44,63,65],proper:[29,33,38,39,64,65],properli:[10,16,25,39,53,58,62,64,65,84],properti:[17,25,29,39,50,52],propos:63,protect:[26,58],protocol:[2,16,43,51],proud:62,provid:[16,18,20,23,26,27,28,29,33,35,36,38,41,42,44,45,50,51,53,54,56,57,58,67,84,86,87],proxi:[2,17],ps4:32,ps:[26,32],pseudo:87,pt:56,pull:[18,58,66],pummel:[58,66],pump:[25,27,32,88,89],punch:[58,63,66],punch_sound:[58,66],punchabl:58,punctuat:28,pure:[44,51],purpl:21,purple1:21,purple2:21,purple3:21,purple4:21,purpos:[1,18,48,57,61],push:[33,71,88],pushabl:32,put:[32,46,54,60,61,63,64,66,68,85,86,87],puyopuyo:[67,75],pxarrai:42,py:[16,23,26,44,46,53,56,61,62,63,65],py_buff:[1,17],pybuf:1,pybuffer_releaseproc:1,pycdio:19,pygam:[2,11,15,21,68,69,70,71,72,73,74,75,76,77,78,79,80,81,86,87,89],pygame_blend_add:12,pygame_blend_alpha_sdl2:[12,44],pygame_blend_max:12,pygame_blend_min:12,pygame_blend_mult:12,pygame_blend_premultipli:12,pygame_blend_rgba_add:12,pygame_blend_rgba_max:12,pygame_blend_rgba_min:12,pygame_blend_rgba_mult:12,pygame_blend_rgba_sub:12,pygame_blend_sub:12,pygame_bufferproxi:2,pygame_camera:44,pygame_displai:44,pygame_force_scal:44,pygame_freetyp:[7,28,44],pygame_hide_support_prompt:44,pygame_mix:8,pygameapi_base_numslot:11,pygamevers:44,pyobject:[1,2,3,4,5,6,7,8,9,10,12,13],pyopengl:[26,31,63],pypi:42,pyportmidi:37,pysdl:[63,84],pythagorean:[36,69],python26:26,python2:26,python3:18,python:[1,2,3,4,5,6,7,8,9,10,12,13,15,16,17,18,19,22,26,28,29,31,38,43,44,45,50,51,53,58,60,61,62,64,66,67,68,85,86,87],pytypeobject:[2,3,4,5,8,9,12,13],pyunicode_asencodedstr:[10,44],q:33,qce:18,quadrant:24,quadruplet:[24,30],quake3:63,qualiti:[26,44,58,63],quaternion:67,queri:[23,37,46,59],query_imag:[18,57],question:[33,62,84],queu:[25,38,40],queue:[6,18,23,32,33,34,37,38,39,40,47,54,58,63,84,88],quick:[25,26,32,51,53,60,64,65,84,88],quicker:[50,51,56,64],quickest:51,quickli:[31,58,59,62,63,64,65,84],quietli:25,quit:[1,19,22,23,24,25,26,28,29,32,37,38,39,44,47,51,53,57,58,62,63,65,66,67,68,69,70,71,72,73,75,76,77,78,79,80,81,85,88,89],quiz:73,quot:33,quotedbl:33,r:[9,20,26,30,32,33,36,42,43,51,56,64,65,71,72,73,79,80,81],r_margin:[72,73,80,81],radial:[36,65],radian:[24,30,36,87,89],radii:[24,30],radiu:[24,30,50],radom:74,rais:[1,2,3,4,5,6,7,8,9,10,15,17,20,22,23,24,25,26,28,29,30,31,33,35,36,37,38,40,43,44,45,46,48,49,50,51,52,56,57,60,65,86,89],ramp:23,ran:85,rand:89,randint:[73,81,89],random:[53,61,73,74,81,86,89],randomli:73,rang:[20,23,24,25,28,29,32,35,36,37,38,42,44,47,50,57,62,63,65,68,71,72,73,79,80,81],rank:84,rapid:32,rapidli:[33,62],rare:[44,51,84],rate:[38,40,50,69,84],rather:[28,29,30,33,44,50,54,56,69,85,87],ratio:[45,50],raw:[17,31,42,43,51,52,65],rb:46,re:[22,26,29,44,56,59,61,62,64,71,85,87,88],reach:63,read:[10,17,20,25,26,29,37,39,46,48,50,51,61,62,65,84],readabl:[20,38,68,86],readi:[18,22,32,48,53,57,58,62,63],readlin:22,readm:16,readonli:50,real:[25,28,29,37,38,51,57,59,62,64,70,84],realist:[86,89],realiti:59,realiz:[63,84],realli:[23,41,56,58,59,62,63,64,65,70,85,87,88],realtim:[51,63,65],reason:[23,27,28,29,38,39,56,57,61,62,64,84,87],rebel:63,rebind:47,recalcul:29,recap:61,receiv:[23,27,32,33,39,47,84],recent:[58,63,64,65],reciev:23,recogn:[17,25,29,43,45,56],recogniz:31,recommend:[23,25,27,28,50,51],recommended_input_devic:37,recommended_output_devic:37,recompil:89,record:[18,29,52,53,72,84],recreat:[26,50,53],rect1:45,rect2:45,rect:[0,16,24,29,30,33,35,41,45,48,50,51,56,57,58,62,63,64,66,71,72,73,79,80,81,86,87,88,89],rect_area_pt:56,rect_list:50,rect_sequ:45,rectangl:[9,16,23,24,26,28,29,30,33,45,48,50,51,56,64,68,84,85,87,89],rectangle_list:23,rectangular:[9,29,30,35,42,48,50,51,62,63,71,84],rectstyl:84,red1:21,red2:21,red3:21,red4:21,red:[1,18,20,21,23,24,31,42,50,51,52,65,68,69,70,71,72,73,76,77,78,79,80,81,85],redimg:65,redistribut:18,redraw:[41,62],redrawn:[23,41],reduc:[29,38,64,88],reentrant:53,ref:[70,78],refcount:10,refer:[1,13,17,23,24,31,33,35,42,46,47,49,51,52,57,58,59,62,63,64,65,68,69,70,71,72,73,84,88],referenc:[52,58,64,65],reflect:36,reflect_ip:36,regard:[24,68],regardless:[28,44],region:[31,51,56,86],regist:[1,44,46,87],register_quit:44,registri:37,regular:[16,23,24,28,40,41,50,51,56,64],regularli:32,reinit:[88,89],reiniti:38,reinitialis:32,rel:[25,32,37,39,40,41,48,50,61,86],relat:[1,23,29,34,39,43,47,50,84],relationship:50,relative_mous:48,releas:[1,13,17,18,23,29,30,32,33,37,39,42,44,46,51,58,63,84,86,88,89],release_buff:1,relev:[29,84],reli:[9,88],reliabl:[18,23,37,84],remain:[20,40,45,51,52,85],remap:[42,47],rememb:[25,39,50,51,59,61,63,64,65,68,73,84,86,87],remind:68,remov:[13,17,18,25,27,28,32,36,48,50,56,58,64,89],remove_intern:64,remove_sprites_of_lay:50,renam:47,render:[16,23,24,26,32,41,48,50,58,63,66,68,69,70,71,72,73,76,77,78,79,80,81,84,85,87,88],render_raw:29,render_raw_to:29,render_to:29,renderclear:[50,64],renderplain:[50,58,64,66,89],renderupd:[26,50,64],renderupdatesdraw:64,repaint:[25,27,50],repaint_rect:50,repcolor:42,repeat:[23,33,35,38,40,44,61],repeatedli:[50,51,54,56,58],replac:[1,23,27,29,38,42,43,44,51,52,54,62,65,68],report:[26,32,44,56,84],repositori:[44,84],repr:44,repres:[1,12,13,14,16,17,19,20,22,23,24,25,29,31,32,33,35,38,39,40,41,44,45,47,48,52,54,56,58,62,63,64,65,84,87],represent:[1,4,9,16,21,24,30,42,56,58],reproduc:18,request:[1,23,26,28,29,35,38,49,51,52,59],requir:[17,18,23,25,26,28,31,33,35,36,37,38,44,45,46,47,50,51,55,56,59,61,62,63,64,65,68,70,84,85,86,88],resampl:38,rescal:26,resembl:[26,43,52],reserv:[25,37,38],reset:[29,32,37,38,40,50,58,89],resist:84,resiz:[16,23,25,26,35,45,48,56,58,85],resolut:[23,29,41,51,54,56,58,59],resolv:[46,68,87],resourc:[26,38,40,44,59,61,66,87],respect:[1,22,23,24,33,35,36,46,50,51],respond:[25,27,62],respons:53,rest:[19,25,27,57,58,62,63,65,84],restart:[40,86],restor:[23,25,48],restrict:51,result:[20,24,28,29,35,36,37,40,42,51,53,56,58,62,63,64,65,68,69,70,73,76,84,89],resultscreen:[69,70,77,78],resum:[19,38,40],retail:63,retain:[18,51],retrac:23,retriev:[14,36,38,46,84,88],reus:[18,86],reusabl:86,rev:44,revers:[1,58,63,65,89],revis:[44,61,63],reward:[58,63],rewind:40,rewound:26,rgb:[18,20,23,24,28,30,31,41,43,51,57,58,63,65,84,85],rgba:[1,4,20,23,24,30,31,51],rgba_premult:31,rgbarrai:65,rgbx:31,rich:58,rid:57,ridicul:61,right:[20,23,24,25,29,30,32,33,35,38,42,44,45,47,50,51,58,62,63,64,65,66,67,68,69,70,71,72,73,75,76,77,78,79,80,81,84,87,88,89],rle:51,rleaccel:[51,58,66],rleaccelok:51,road:62,roll:[32,39,64],root:[36,53,84],rosybrown1:21,rosybrown2:21,rosybrown3:21,rosybrown4:21,rosybrown:21,rotat:[25,26,29,36,48,50,56,58,63,66],rotate_i:36,rotate_ip:36,rotate_ip_rad:36,rotate_rad:36,rotate_rad_ip:36,rotate_x:36,rotate_x_ip:36,rotate_x_ip_rad:36,rotate_x_rad:36,rotate_x_rad_ip:36,rotate_y_ip:36,rotate_y_ip_rad:36,rotate_y_rad:36,rotate_y_rad_ip:36,rotate_z:36,rotate_z_ip:36,rotate_z_ip_rad:36,rotate_z_rad:36,rotate_z_rad_ip:36,rotozoom:56,round:[20,24,37,38,88],routin:[1,18,25,28,29,51,56,58,59,68],row1:65,row2:65,row:[35,42,51,65],royalblu:21,royalblue1:21,royalblue2:21,royalblue3:21,royalblue4:21,rr:20,rrggbb:20,rrggbbaa:20,rt:32,rudder:32,rudimentari:26,ruin:65,rule:[59,70,73,74,89],rumbl:[32,47],run:[23,25,26,31,32,38,44,53,54,56,58,59,62,63,64,65,66,67,84,85,89],run_speed_test:26,run_test:53,run_tests__test:53,rundown:26,runner:53,runtim:[23,38,54,56,63],runtimeerror:[29,44],rw:10,rwobject:0,rx:30,ry:30,s:[9,12,13,15,17,20,22,23,24,25,26,28,29,31,33,35,36,37,38,39,40,41,42,43,44,45,46,47,48,50,51,54,58,59,61,63,64,65,67,68,69,70,71,74,84,85,86,87,88,89],saddlebrown:21,safe:[19,23,25,28,29,32,37,38,42,44,47,50,51,57,60],sai:[51,61,62,64,68,84],said:[24,61,68,70,74,84,85],sake:85,salmon1:21,salmon2:21,salmon3:21,salmon4:21,salmon:21,sam:63,same:[10,18,19,20,22,23,24,25,28,29,30,31,32,33,35,36,38,39,42,43,44,45,47,50,51,52,54,56,57,58,59,62,63,64,65,67,68,70,71,72,73,84,85,89],sampl:[16,20,38,40,56,57,62,64,65,88],san:29,sandybrown:21,satisfactori:86,satisfi:23,satur:18,sauf:35,save:[16,18,31,61,63,84],save_extend:31,saw:[57,58,62],scalabl:29,scalar:[36,58],scale2x:56,scale:[18,23,24,26,29,31,35,36,44,48,50,56,58,63,65,66],scale_to_length:36,scaledown:65,scaler:26,scaletest:26,scaleup:65,scan:19,scancod:[25,33],scanf:[67,75],scanlin:62,scant:26,scene:57,school:39,scipi:65,scope:[42,65],score:[26,50,61,70,89],scoreboard:61,scoreup:61,scrap:[16,46],scrap_bmp:46,scrap_clipboard:[26,46],scrap_pbm:46,scrap_ppm:46,scrap_select:46,scrap_text:46,scratch:64,scratchi:38,screen:[1,5,16,22,24,25,26,28,31,32,33,34,39,44,45,46,48,50,51,57,58,59,61,63,64,65,66,67,68,69,70,71,72,73,75,76,77,78,79,80,81,84,85,87,88,89],screen_dim:26,screen_height:23,screen_rect:50,screen_width:23,screensav:[23,44,69],screenshot:58,script:[29,60,63,66],scroll:[25,26,39,51,62,63,64,84],scrollabl:26,scroller:84,scrollock:33,sdl1:[19,23,41,51],sdl2:[25,32,38,39,44,47,51,55],sdl:[1,3,6,8,10,12,13,23,25,26,27,32,33,38,41,43,44,47,48,56,59,63,84],sdl_audiodriv:44,sdl_delai:54,sdl_event:6,sdl_gfx:30,sdl_hint_video_allow_screensav:23,sdl_imag:[31,63],sdl_mixer:[38,40],sdl_rect:[9,12],sdl_rwop:10,sdl_surfac:12,sdl_ttf:[28,29,44],sdl_video:51,sdl_video_allow_screensav:44,sdl_video_cent:44,sdl_video_window_po:44,sdl_video_x11_net_wm_bypass_compositor:44,sdl_videodriv:[23,44],sdl_videoinfo:5,sdl_window:1,sdl_windowid:23,sdlerror:40,sdlversion:44,seagreen1:21,seagreen2:21,seagreen3:21,seagreen4:21,seagreen:21,search:[16,28,29,35,45,50,56],search_color:56,search_surf:56,seashel:21,seashell1:21,seashell2:21,seashell3:21,seashell4:21,second:[17,19,20,22,24,26,30,32,36,38,40,42,49,50,53,54,58,60,62,63,65,69,70,71,72,77,84,88,89],secondari:84,section:[19,51,58,61,62,64,65,67,68,86,89],secur:86,see:[18,19,20,23,24,26,28,29,30,31,32,33,35,36,37,38,39,43,44,45,47,50,51,52,56,57,58,60,61,62,63,64,65,84,85,87,88,89],seed:[53,65],seek:10,seem:[31,46,61,62,63,64,68,72,73,84,85,87],seemingli:61,seen:[23,61,65,88,89],segment:[24,30],select:[18,23,29,33,35,37,38,40,44,46,53,59,67,69,73,84],self:[20,32,35,36,50,51,56,57,58,62,64,66,87,88,89],sell:62,semi:[26,84],semicolon:33,semiton:37,send:[25,37,38,40,89],sens:[62,87,88,89],sensit:84,sent:[23,25,32,33,38,40],separ:[18,22,26,28,29,37,38,44,50,51,52,53,56,57,61,62,64,65,72,84,86],sequenc:[1,9,15,22,23,24,25,29,30,33,35,37,39,42,45,50,51,56,59,63,64,84,88],sequenti:24,seri:[37,63,70],serv:[27,63],server:[26,39],servic:18,session:53,set:[1,7,17,18,20,22,23,24,25,28,29,31,32,33,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,54,56,58,60,62,65,67,68,70,71,72,73,84,85,86,87,88,89],set_allow:25,set_allow_screensav:23,set_alpha:[43,51],set_at:[35,51,65,84],set_behavior:56,set_block:[25,84],set_bold:28,set_capt:[22,23,24,32,58,66,68,69,70,71,72,73,76,77,78,79,80,81,85,89],set_clip:[50,51],set_color:56,set_colorkei:[43,51,58,66,84],set_control:[18,57],set_cursor:[22,39],set_default_resolut:29,set_endev:[38,40],set_error:44,set_eventst:47,set_fullscreen:48,set_gamma:23,set_gamma_ramp:23,set_grab:[25,33,39],set_icon:[23,48],set_instru:37,set_ital:28,set_length:20,set_loc:41,set_map:47,set_mask:51,set_mod:[1,22,23,24,32,33,34,39,44,46,48,51,57,58,59,62,63,66,67,68,69,70,71,72,73,75,76,77,78,79,80,81,84,85,89],set_modal_for:48,set_num_channel:38,set_palett:[23,51],set_palette_at:51,set_po:[39,40],set_repeat:33,set_reserv:38,set_shift:51,set_smoothscale_backend:56,set_text_input_rect:33,set_tim:54,set_timing_threshold:50,set_timing_treshold:50,set_underlin:28,set_viewport:48,set_vis:[39,58,66],set_volum:[38,40],set_window:48,setcolor:35,setsurfac:35,settabl:44,setup:[22,26],sever:[16,22,23,24,26,29,38,45,50,54,59,60,62,63,64,65,84,85,89],sf:42,sfnt:29,sg:22,shade:24,shall:18,shallow:35,shape:[16,17,42,43,51,63,65,72,84],share:[16,25,28,31,32,33,38,46,48,51,52,54],sharp:24,she:85,shell:26,shift:[23,26,33,35,42,44,51,59,65],shinner:[58,59,60,62,63,64,65,84],shoot:63,shortcut:64,shorter:[23,58,65],shortest:36,shot:64,should:[18,19,22,23,25,26,28,29,30,31,32,33,35,36,37,38,42,44,45,47,50,51,54,56,58,59,61,62,63,64,65,68,69,84,85,86,87,89],shoulder:47,shouldn:88,show:[22,23,26,29,33,39,41,48,53,56,57,58,59,62,65,88,89],show_output:53,showcas:[26,84],shown:[23,25,33,41,57,62,87],shrink:[45,84,89],shrinkag:56,shrunk:51,shtml:[86,89],shut:[23,29,32,44,69],shutdown:[8,63],side:[23,24,42,45,47,58,61,63,87,88],sienna1:21,sienna2:21,sienna3:21,sienna4:21,sienna:21,sign:[17,33,37,38,39,44],signal:[25,40,85],signific:51,silenc:53,silent:[23,60,84],silver:21,similar:[29,31,32,42,49,50,51,54,58,62,64,65,85,86,88,89],simpl:[16,22,23,24,25,26,32,42,50,51,56,57,58,60,61,62,63,64,65,66,67,69,71,72,73,74,84,85,86,89],simpler:[62,63,64],simplest:[57,71],simpli:[22,25,37,41,44,50,57,58,59,62,63,65,67,84,85,87,89],simul:[58,67],simultan:[38,67],sin:[87,89],sinc:[19,23,25,31,32,36,38,39,44,48,50,51,52,54,57,58,59,60,62,63,64,65,84],singl:[17,20,22,23,24,25,28,29,30,32,33,38,40,41,42,43,45,50,51,52,53,58,60,61,62,63,64,65,67,68,69,70,71,73,85],sit:84,site:26,situat:[23,51,58,64,65],six:[38,40,61,63],sizabl:84,size:[9,17,18,22,23,24,25,26,28,29,31,35,38,39,41,42,43,45,48,50,51,52,56,57,58,59,61,62,63,65,66,67,68,69,70,71,72,73,75,76,77,78,79,80,81,84,85],sizeabl:23,sizeal:22,sizenesw:22,sizenws:22,sizeof:35,sizer_x_str:22,sizer_xy_str:22,sizer_y_str:22,skew:28,skip:[23,35,38,58],skyblu:21,skyblue1:21,skyblue2:21,skyblue3:21,skyblue4:21,sl:22,slash:[22,33],slateblu:21,slateblue1:21,slateblue2:21,slateblue3:21,slateblue4:21,slategrai:21,slategray1:21,slategray2:21,slategray3:21,slategray4:21,slategrei:21,sleep:[25,54],slerp:36,slice:[36,42,43,65,84],slight:[58,64],slightli:[20,25,26,39,40,51,54,58,64,84,85],slope:24,sloppi:26,slot:0,slow:[23,48,51,57,59,62,65,84,85],slower:[24,50,51,54,62,84],slowest:[44,51,85],small:[18,22,23,28,29,32,39,44,45,58,62,63,65,71,72,85,87],smaller:[18,23,35,38,45,51,71,72,84],smallest:[23,51,59],smart:[62,64],smooth:[28,58,84],smoother:62,smoothli:[56,62,84],smoothscal:[26,56],sn9c101:18,snapshot:57,sndarrai:[16,26,38,49,63],snow1:21,snow2:21,snow3:21,snow4:21,snow:21,so:[11,17,18,22,23,24,26,28,29,30,31,32,33,36,38,42,43,44,45,50,51,52,53,56,58,59,61,63,64,65,67,68,69,70,71,72,74,84,85,86,87,88,89],socket:[86,89],soften:65,softwar:[16,18,23,24,30,37,41,50,51,58,63,84],solarwolf:[63,84],solid:[24,28,30,50,51,52,56,65],solut:[84,85],solv:[38,61,67],some:[16,18,22,23,24,25,27,28,29,30,31,32,34,36,37,38,39,42,44,45,46,50,56,57,58,59,60,61,63,64,65,67,68,69,70,72,84,85,86,87,88,89],someimag:26,someth:[18,24,26,32,39,56,57,59,62,63,64,65,68,69,70,84,85],sometim:[22,23,27,44,64,84],somewhat:[26,64],somewher:[64,68],sonix:18,soon:[40,44,64,88],sophist:[86,87],sorri:[57,59],sort:[23,25,26,27,50,58,59,62,64,84,89],sound:[8,16,26,40,58,61,63,64,66,67,70,73,84,86],sound_array_demo:26,sour:48,sourc:[16,18,26,31,37,38,42,43,50,51,56,58,61,62,63,64,67,68,69,70,84,86,89],source_rect:50,sourcecod:[68,69,70,76,77,78],south:22,southeast:22,southwest:22,space:[20,28,29,31,33,37,42,62],sparingli:48,speak:[85,86],speaker:38,special:[18,22,23,25,33,35,38,43,50,56,58,59,62,64,65,68,84,85],special_flag:[48,50,51],specif:[23,25,28,29,33,35,38,43,46,50,51,52,57,58,59,64,65,68,70,71,72,84],specifi:[14,18,21,23,28,29,31,32,35,37,38,40,46,47,50,56,59,65,84,89],sped:24,speed:[25,26,29,44,54,62,63,64,67,68,69,70,71,72,73,75,76,77,78,79,80,81,84,85,87,88,89],spend:84,spent:[63,74,84],spheric:36,spin:[26,58,66,88,89],spite:50,split:[22,29,58,63,64,66,84],sport:63,spot:84,spread:84,spring:56,springgreen1:21,springgreen2:21,springgreen3:21,springgreen4:21,springgreen:21,sprite1:50,sprite2:50,sprite:[16,26,35,50,61,66,84,88,89],sprite_dict:50,sprite_list:50,spritecollid:[50,64,87],spritecollideani:50,spritedict:64,sprites_click:84,sqrt:[36,69],squar:[24,32,36,72],src:65,src_c:[0,11,14],srcalpha:[35,43,51,56],srccolorkei:51,srcobj:12,srcrect:[12,48],sse:[26,56],stabl:43,stack:25,stage:57,stai:[61,65,70],stand:26,standard:[1,22,25,27,41,43,44,46,50,57,58,59,61,63,64,65,68],star:26,starfield:[26,84],start:[1,17,18,19,24,25,26,29,30,32,33,35,38,39,40,42,44,45,47,50,51,57,58,59,61,62,63,64,65,66,67,69,74,84,85,86,88,89],start_angl:[24,30],start_index:65,start_po:24,start_text_input:33,startup:[8,38],state:[23,25,26,29,31,32,33,37,38,39,46,47,48,50,51,58,64,66,67,84,88,89],statement:[68,69,70,71,72,89],stationari:64,statu:[26,32,37,70],stderr:53,stdin:65,stdout:53,steelblu:21,steelblue1:21,steelblue2:21,steelblue3:21,steelblue4:21,steep:24,stencil:23,step:[16,30,32,42,58,65,68,70,73,84,88],stereo:[23,38,49],stick:[32,47,62,65,84],still:[23,31,38,39,43,46,47,48,51,57,58,61,62,63,64,65,71,72,73,84,85,88,89],stop:[1,18,19,24,30,32,33,38,39,40,44,47,54,57,62,64,84,88,89],stop_angl:[24,30],stop_rumbl:[32,47],stop_text_input:33,store:[9,22,23,25,29,30,35,50,51,52,58,62,64,68],str:[17,18,20,29,32,35,37,44,47,49,52,71,72,73,79,80,81],straight:[24,30,62,63,65,86],straighten:62,straightforward:[62,63],strang:[23,63],strateg:84,strategi:84,stream:[16,37,38,48,63],strength:[29,32,47],stress:63,stretch:[23,26,28,29],strict:[18,61],strictli:[30,85],stride:[17,42],strike:29,string:[10,15,17,18,19,20,22,23,25,28,29,31,32,33,37,38,41,44,46,48,49,51,52,53,56,68,84,86,87],strip:51,stripe:65,stroke:24,strong:29,strongli:[23,50],struct:[1,3,5,6,7,8,10,17,29,42,43,51,52],structur:[8,11,43,85,87,88],stuck:62,studi:[65,87],studio:63,stuff:[46,57,61,62,65],style:[25,26,29,32,42,63,86],style_default:29,style_norm:29,style_obliqu:29,style_strong:29,style_underlin:29,style_wid:29,sub:[50,84],subarrai:42,subclass:[2,3,4,5,6,7,8,12,13,17,20,35,45,50,51],subdirectori:[26,58],subgroup:67,subject:[23,44],submask:35,submit:84,submodul:[44,53],subpackag:53,subprocess:53,subprocess_ignor:53,subscript:[36,42],subsect:62,subsequ:57,subset:[22,23,84],substanti:84,substitut:[18,22],substr:37,subsubsurfac:51,subsurfac:[13,51,56],subtract:89,subview:42,succe:[18,23],succeed:18,success:[1,2,6,9,10,12,23,44,84],successfulli:[32,47],sudden:63,sufac:42,suffix:45,suggest:[42,58,84],suit:[27,51,59],suitabl:[23,28,29,46,63,68,86],sum:[24,74],summari:64,summer:63,suppli:[18,35,37,38,57,58,65,85],support:[1,7,17,18,19,20,22,23,24,25,26,28,29,31,32,33,35,36,37,38,39,40,41,42,43,44,47,48,49,50,51,52,54,56,57,58,59,63,84,87],suppos:[56,84],sure:[11,50,54,57,58,63,64,65,69,70,84,89],surf:[22,26,29,30,42,50,51,56],surfac:[0,1,2,11,13,16,18,20,22,23,24,26,28,29,30,31,35,36,39,41,43,46,48,50,51,57,58,59,62,63,64,66,85,89],surface_dest:50,surface_to_arrai:43,surfarrai:[16,17,26,43,49,51,52,63,84],surfdemo_show:65,surflock:0,surfobj:13,surpris:[62,63],surrog:[15,29],surround:[56,85],suspend:84,svg:31,svgalib:23,swap:[23,31,42,45],swatch:20,swig:63,switch_lay:50,swizzl:36,swsurfac:[51,84],sy:[15,44,53,62,63,67,68,69,70,71,72,73,75,76,77,78,79,80,81,86,89],symbol:33,sync:[23,25],synchron:37,synonym:25,syntax:[65,84],syntaxerror:44,synthes:37,sysfont:[28,29],sysrq:33,system:[15,16,18,19,22,23,25,26,27,28,29,32,33,36,37,39,40,44,48,49,51,52,53,59,68,70,84,85,88],system_cursor_arrow:22,system_cursor_crosshair:22,system_cursor_hand:22,system_cursor_ibeam:22,system_cursor_no:22,system_cursor_sizeal:22,system_cursor_sizen:22,system_cursor_sizenesw:22,system_cursor_sizenws:22,system_cursor_sizew:22,system_cursor_wait:22,system_cursor_waitarrow:22,systemexit:[86,89],t:[18,19,23,24,26,27,28,29,32,33,36,40,44,46,48,49,50,51,52,54,56,57,58,61,62,63,64,65,68,69,70,71,72,73,74,85,86,87,88,89],ta:61,tab:33,tabl:[23,43],tag:[40,53],taka:18,takafumi:18,take:[15,22,24,25,26,29,31,32,33,35,36,37,38,40,42,45,50,51,53,56,57,58,59,63,64,65,68,84,85,86,88,89],taken:[4,21,23,26,27,38,39,87],talk:62,tan1:21,tan2:21,tan3:21,tan4:21,tan:21,tango:29,target:[29,42,43,45,48,50,58,64,66],target_textur:48,task:[59,86],tau:24,teach:62,teal:21,technic:41,techniqu:[50,84],tell:[10,38,39,44,58,59,62,64,84,88],temp:9,temporari:[56,65],temporarili:[19,24,33,38,40,51,52],tempt:84,temptat:84,ten:54,tenni:[88,89],term:[62,64,85],termin:[1,37,44,58,68],terminolog:[39,84],terrain1:62,terrain2:62,test:[16,19,25,26,28,29,31,32,33,36,38,39,41,44,45,47,50,51,56,58,59,60,65],test_threshold_dest_surf_not_chang:56,test_util:56,testin:37,testout:37,testsprit:26,tetri:74,text:[25,26,28,29,33,46,66,68,69,70,71,84,85,87],textbitmap:32,textedit:[25,33],textinput:[25,33],textmarker_str:22,textpo:[58,66,85],textprint:32,textstr:32,textur:[30,48],textured_polygon:30,textureorimag:48,tga:[31,63],than:[16,18,19,23,24,25,26,28,29,30,31,32,33,35,36,37,38,40,44,45,47,50,51,52,54,55,56,57,58,59,60,62,63,64,65,69,70,74,85,86,87,88,89],thank:88,the_arg:12,the_dirty_rectangl:84,thei:[1,11,18,19,22,23,24,25,26,28,29,32,35,36,38,39,44,45,46,47,50,51,56,58,59,62,63,64,65,68,69,71,72,86,87,88,89],them:[16,22,24,25,26,27,31,33,34,35,38,42,44,49,50,51,52,57,58,59,62,63,65,71,84,85,86,87,88,89],themselv:[51,61,64,85],theorem:36,theori:18,therefor:[24,30,35,50,64,84,87],theta:[35,36],thi:[1,2,3,4,5,6,7,9,10,11,12,13,16,17,18,19,20,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,72,73,74,84,85,86,87,88,89],thick:[24,28],thickarrow_str:22,thin:58,thing:[19,25,27,40,44,57,58,59,60,61,62,63,64,65,85,87],think:[18,23,62,64,65,70,74,84,89],third:[17,18,22,30,36,62,63,65,71,85],third_surfac:56,thirteen:16,thistl:21,thistle1:21,thistle2:21,thistle3:21,thistle4:21,thorough:89,those:[16,18,22,23,24,25,26,28,29,39,42,43,50,57,61,62,64,65,84,86,87,88,89],though:[18,23,30,37,38,43,53,61,65,85,86,87,88,89],thought:84,thousand:49,thread:[10,18,25,26,27,28,30,38,39,50,53,57],three:[36,38,44,51,52,64,65,68,84,85,88],threshold:[35,42,50,56],threshold_behavior_from_search_color:56,threshold_color:56,throttl:32,through:[2,18,25,27,29,35,37,38,44,50,51,58,61,62,63,64,74,84,85,88],throughout:34,thrown:[56,89],thru:63,thu:[36,42,87],thumbnail:26,ti:40,tick:[22,24,32,39,54,58,66,69,71,72,73,77,79,80,81,89],tick_busy_loop:54,tie:[57,72],tif:31,tiff:[31,46],tile:62,time:[14,16,18,19,22,23,24,25,26,28,29,32,34,37,38,39,40,44,48,49,50,51,53,56,57,58,59,61,62,63,64,65,66,67,68,69,70,71,72,73,74,77,78,79,80,81,84,85,86,87,89],time_m:50,time_out:53,time_proc:37,timeout:25,timer:[37,54],timer_resolut:[34,54],timestamp:37,tini:[23,84],tip:[16,64,84],titl:[23,48,58,63,68],tl:89,to_surfac:[35,48],todo:64,togeth:[33,38,56,61,63,65,86],toggl:[22,47],toggle_fullscreen:23,toler:32,tom:[86,89],tomato1:21,tomato2:21,tomato3:21,tomato4:21,tomato:21,tomchanc:[86,89],tompong:[61,89],tone:37,too:[25,27,29,42,45,50,51,53,58,62,65,67,68,69,73,84,85],took:63,tool:[26,64,67,87],top:[16,23,24,26,28,29,31,35,38,39,45,50,51,62,63,67,68,69,70,71,72,73,75,76,77,78,79,80,81,84,85,86,89],topic:16,topleft:[35,42,45,50,56,58,62,66,89],toplevel:38,topmost:50,topright:[45,89],tort:18,tortur:84,tostr:31,total:[38,43,44,53,62,68,86],touch:[25,32,84],touch_id:25,touchid:55,toward:[23,61,63],tp:[67,75],tprint:32,tr:89,traceback:[65,86],track:[16,19,50,54,57,62,64,84],trackbal:[25,47],tradition:22,trail:[53,63],train:84,trait:[68,74],transfer:[16,46,56,61,65],transform:[16,18,29,57,58,62,63,65,66],transform_test:56,translat:[9,15,26,29,33,42],transluc:84,transmiss:37,transpar:[23,24,26,28,29,31,35,43,48,51,52,56,58,62,63,84,86],transpos:42,travel:89,treat:[23,29,32,52,65],tree:57,trend:63,tri:[44,62,63,84,86],tri_left:22,tri_right:22,trial:[74,84],triangl:[24,26,30,32],trick:89,tricki:[64,65,84],trickier:65,trigger:[32,33,39,40,47,68,70],trigon:30,trigonometri:87,triplet:[23,24,30,58],truetyp:[16,28,58,63],truncat:[24,30,38,45,65],truth:64,ttf:[28,29,68,69,70,71,72,73,76,77,78,79,80,81],tune:[29,64],tupl:[9,17,19,20,22,23,24,28,29,30,31,32,35,36,37,38,39,42,44,45,47,49,50,51,52,53,62,65,84],turn:[29,37,56,58,62,64,66,84],turquois:21,turquoise1:21,turquoise2:21,turquoise3:21,turquoise4:21,turtl:68,tutori:[26,61,63,66,73,74,85,89],tweak:64,twice:[50,56,65],twitch:84,two:[1,16,18,19,22,23,24,25,26,28,29,30,32,33,35,36,37,38,42,43,45,47,49,50,53,54,56,58,62,63,64,65,70,71,72,73,84,85,87],tx:30,ty:30,type1:29,type42:29,type:[1,2,3,4,5,6,7,8,9,10,12,13,15,16,17,20,22,23,24,25,27,28,29,30,31,32,33,34,35,37,38,39,40,41,42,44,45,46,47,49,50,51,52,54,56,57,58,59,62,63,65,66,67,68,69,70,71,72,73,75,76,77,78,79,80,81,84,85,87,88,89],typeerror:[24,28,43,45,50],typelist:25,typestr:17,typic:[18,23,37,51,58],u0001:28,u4:17,u:[15,17,33,41],u_margin:[73,81],uc:[15,28,29],ucs4:29,ucs_4:28,uffff:[15,28],ufo:63,ufunc:65,ugh:84,ui:26,uint32:1,uint8:[1,4],uint:65,uklinux:[86,89],ultim:74,unabl:[31,46,64],unaccept:84,unalt:[44,84],unari:20,unavail:[23,28,58,84],unchang:[15,20,23,29],uncommit:44,uncommon:58,uncompress:[31,38],undefin:[29,65],under:[16,37,39,40,63,84,86,89],underli:[29,32,37,44],underlin:[28,29,40],underline_adjust:29,underneath:65,underscor:[33,58],understand:[16,41,59,61,62,63,64,65,68,69,70,71,84,85,86,88,89],understood:71,undesir:24,unencod:44,unfamiliar:[62,84],unfil:[30,35],unfilt:56,unfortun:[53,84],unicod:[15,25,28,29,33,38,44,46,51],unicode_escap:44,unicodeencodeerror:[29,44],unicodeerror:28,unind:32,uniniti:[18,19,23,28,32,37,38,44,47],union:[45,64,84],union_ip:45,unional:45,unionall_ip:45,uniqu:[32,48,50,51,62,67,72,74],unit:[28,29,44,51],uniti:[67,75],unix:[22,23,63,84],unknown:[23,25,28,32],unless:[7,19,23,29,31,41,46,51,54,56,58,62,64,68,69,84,87,88],unlik:[38,43,51,65,88],unload:40,unlock:[24,51,84],unmap:43,unmap_rgb:[20,24,43,51,52],unmodifi:18,unnorm:67,unnot:33,unpack:[20,39],unpaus:[19,38,40],unplay:[23,63],unpredict:29,unpunch:[58,66],unreal:[63,67,75],unrealist:89,unrecogn:[28,31],unrel:23,unscal:29,unset:[29,35,51,89],unsetcolor:35,unsetsurfac:35,unsign:[1,17,20,35,38,51,65],unspecifi:29,unstructur:51,unsupport:[18,40],until:[18,23,24,25,27,32,35,37,40,47,51,54,57,62,64,66,84,85,88],untransform:29,unus:[31,38],unwieldi:84,up:[18,22,23,24,25,27,32,33,37,38,39,40,42,44,47,50,51,58,60,62,64,65,71,72,73,80,81,84,85,86,87,88],updat:[12,20,23,24,25,32,36,41,44,45,47,48,50,54,59,60,61,62,63,64,66,68,69,70,71,72,73,76,77,78,79,80,81,84,85,87,88,89],update_rect:26,upon:89,upper:[25,29,32,51,57],us:[0,1,9,10,11,14,15,16,17,19,20,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,53,54,56,58,59,61,62,63,64,65,66,67,68,71,72,85,86,87,88,89],usabl:[20,31,57],usag:[23,27,37,51,53],usb:37,use_alpha:26,use_arraytyp:[49,52],use_bitmap_strik:29,use_fastrendergroup:26,use_stat:26,use_valu:45,user:[6,16,18,23,24,25,27,32,33,37,44,46,51,53,58,61,62,63,64,65,67,72,84,85,87],userev:[25,38],userevent_dropfil:25,userrect:84,usr:[26,28,66,85,86,87],usual:[23,25,28,29,32,38,44,50,51,58,59,60,62,63,64,65,70,84,86],utf8_str:46,utf:[15,29,46],util:[63,74,84],uxxxxxxxx:[15,29],uyvy_overlai:41,v1:44,v2:[1,40],v3:17,v4l2:[18,57],v:[17,20,33,36,41],val1:1,val2:1,val:[1,65],valid:[17,23,38,39,41,51,57,64],valu:[1,4,9,10,17,18,19,20,22,23,24,25,26,28,29,30,31,32,33,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,56,58,59,62,64,65,68,69,71,72,84,85,88,89],value_to_set:44,valueerror:[17,22,24,26,29,30,33,35,36,43,46,49,51,52,57,65],vanish:84,vari:71,variabl:[17,22,23,28,37,40,44,57,58,63,64,65,69,70,71,74,89],variant:22,varieti:[25,63,70,84],variou:[1,16,22,25,26,34,84,89],ve:[24,32,58,61,62,63,64,65,84,85,87,88,89],vec:36,vector2:[24,30,35,36,45],vector3:36,vector:[24,30,35,61,89],vectorelementwiseproxi:36,veloc:[37,69],ver:44,vera:[28,29],veri:[18,23,26,37,54,58,60,62,63,64,65,70,84,87,88,89],verifi:23,vernum:44,versatil:[47,56],version:[0,12,17,23,25,26,27,29,30,31,32,38,40,43,45,47,50,56,57,58,60,61,62,63,64,84,86,89],vertic:[18,23,24,26,29,30,31,39,42,56,58,65],vflip:[18,57],vgl:23,vgrade:[26,65],via:[18,23,29,35,57],vidcaptur:44,video0:[18,57],video:[23,25,26,32,44,48,51,57,59,64,84],video_mem:[23,59],videocaptur:[18,57],videoexpos:[23,25],videoinfo:[23,59],videores:[23,25],vidinfo:[23,59],view:[1,2,16,42,51],view_p:1,violet:21,violetr:21,violetred1:21,violetred2:21,violetred3:21,violetred4:21,virtual:[23,26,37,39,45],visibl:[17,22,23,39,41,50,58,59,62,63],vision:[16,18],visit:63,vista:23,visual:[70,71,72,73],visualis:87,vline:30,volatil:51,volum:[16,38,40,63],vsync:[23,48],w:[9,25,26,30,33,35,36,42,43,45,56],wa:[10,16,19,20,22,23,24,25,26,27,31,32,33,35,37,38,39,40,42,43,44,47,54,57,58,62,63,64,65,68,70,73,84,86,88],wai:[18,23,24,25,26,28,29,31,32,33,38,39,43,44,47,48,50,53,58,60,61,62,63,64,65,71,85,87,88,89],wait:[22,23,25,27,32,37,39,54,58,84],waitarrow:22,walk:[58,66],wall:[36,57,89],want:[18,20,23,25,27,31,32,33,36,44,50,51,54,56,57,58,59,61,62,63,64,65,67,68,84,85,86,87,88,89],wargam:84,warn:[44,48,58,66],warranti:18,warrior:63,was_init:29,wasn:63,wast:[84,87],watch:[16,57,61,85],wav:[38,40,46,58,66],wave:49,wayland:[22,23],we:[22,24,26,32,56,57,58,60,62,63,64,65,67,68,69,70,71,72,73,74,84,85,86,87,88,89],weak:[13,17,84],web:[26,66],webcam:18,webp:31,websit:63,week:63,weight:[42,65],weird:33,welcom:44,well:[1,2,13,22,24,27,30,31,33,39,42,44,47,51,56,61,62,63,64,65,84,89],were:[1,11,23,25,28,33,50,53,62,63,64,69,84,86,89],west:22,what:[16,20,22,23,24,26,28,32,38,39,44,46,50,57,58,59,60,61,63,64,65,68,69,71,74,85,86,88,89],whatev:[50,57,58,61,64,84],wheat1:21,wheat2:21,wheat3:21,wheat4:21,wheat:21,wheel:39,when:[13,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,37,38,39,40,41,42,44,45,46,49,50,51,52,53,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,73,74,84,85,87,88,89],whenev:[23,39,44,65],where:[11,16,17,19,20,22,23,24,25,26,29,30,33,35,36,38,39,40,41,42,44,46,47,50,51,53,56,57,58,62,64,65,68,84,85,88,89],wherea:[23,87],wherev:65,whether:[18,23,25,26,28,29,35,38,39,45,46,48,56,61,71,72,84,85],which:[1,13,15,17,18,20,22,23,24,25,26,28,29,31,32,33,35,36,37,38,39,40,42,44,45,46,47,49,50,51,52,53,54,56,58,59,60,61,62,63,64,65,67,68,69,70,71,72,73,74,85,86,87,88,89],whiff:[58,66],whiff_sound:[58,66],whilst:23,white:[21,22,24,32,35,42,57,58,68,69,70,71,72,73,76,77,78,79,80,81,85],whitesmok:21,whitespac:61,who:[16,48,61,62,64,84,89],whole:[23,24,37,44,62,84,87],whoop:62,whose:[24,55],why:[44,63,70,74,87],wide:[24,28,29,35,44,62,63],wider:18,widget:26,width:[9,18,22,23,24,26,28,29,31,32,35,41,42,45,48,50,51,56,59,62,63,67,68,69,70,71,72,73,75,76,77,78,79,80,81,84],wiki:39,wikipedia:36,win32:37,win:[1,58,66,73],windib:23,window:[1,5,16,18,25,26,27,29,32,33,34,37,39,44,46,48,51,58,59,62,63,68,69,84,85,88],window_surfac:23,windowclos:25,windowent:25,windowev:23,windowevent_minim:23,windowexpos:25,windowfocusgain:25,windowfocuslost:25,windowhidden:25,windowhittest:25,windowleav:25,windowmaxim:25,windowminim:25,windowmov:25,windowpos_cent:48,windowpos_undefin:48,windowres:25,windowrestor:25,windowshown:25,windowsizechang:25,windowtakefocu:25,wire:32,wireless:32,wisdom:65,wise:[29,65],wish:86,within:[13,17,22,29,33,35,44,50,51,56,68,74,84],without:[18,22,23,24,29,38,42,45,48,50,51,52,58,59,62,63,64,65,69,70,72,88,89],wm:[23,59],won:[40,50,58,61,62,64,65,84,86,87,88],wonder:[57,84],word:[29,46,54,62,64,88],word_wrap:29,wordwrap:28,work:[18,19,20,22,23,24,25,26,28,29,30,31,32,34,35,37,38,42,43,46,50,51,52,53,56,57,58,59,61,62,63,64,65,68,69,85,87,89],world:[57,68,69,70,74,76,77,78,84],worri:[26,48,50,64,65,74,84],wors:[58,84],worst:61,worth:54,would:[17,22,24,25,28,35,36,38,44,50,51,56,57,58,60,61,62,63,64,65,84,85,87,88,89],wow:68,wrap:[2,5,10,17,22,30,42,51,63],wrapper:[10,84],wrestl:26,writabl:17,write:[1,10,17,26,37,51,61,63,67,84,86,87],write_short:37,write_sys_ex:37,written:[16,24,60,61,63,64,84,86,87],wrong:[38,65,84],wrote:84,www:[70,78,86,89],x00:[15,28],x10:37,x11:[23,25,37,39,44,46,48],x12:37,x13:37,x1:[24,30,45],x2:[24,30,45],x360:32,x3:[24,30],x4:84,x7d:37,x86:56,x:[2,3,5,6,7,8,9,12,13,22,23,24,25,26,28,29,30,32,33,35,36,37,39,42,44,45,46,47,48,49,50,51,52,55,58,59,62,68,69,70,72,73,76,77,78,80,81,84,85,87,89],x_offset:35,x_scale:48,xbm:22,xbox:[32,47],xf0:37,xf7:37,xfade:65,xor:22,xormask:[22,39],xpm:31,xx:22,xxx:22,xxxx:22,xxxx_test:53,xxxxx:22,xy:[36,89],y1:[24,30,45],y2:[24,30,45],y3:[24,30],y:[9,20,22,23,24,25,28,29,30,32,33,35,36,39,41,42,44,45,47,48,49,50,51,52,55,58,62,66,68,69,70,72,73,77,78,80,81,84,85,87,88,89],y_offset:35,y_scale:48,ye:[65,67,85],yeah:71,year:[63,74,84],yellow1:21,yellow2:21,yellow3:21,yellow4:21,yellow:21,yellowgreen:21,yet:[29,48,65,84,88],you:[11,16,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,36,37,38,39,40,41,42,44,46,50,51,54,56,57,58,59,60,61,63,64,65,67,68,71,72,74,85,86,87,88,89],your:[16,18,19,22,23,25,26,27,28,30,31,33,37,38,44,46,54,56,57,58,59,60,61,63,65,67,70,85,86,87,88,89],yourself:[28,50,58,63,65,86,87],yup:84,yuv:[18,41,57],yuy2_overlai:41,yv12_overlai:41,yvyu_overlai:41,z:[33,36,87,89],zero:[19,20,24,28,29,36,37,38,59,64,65,89],zine:63,zip:84,zoom:26},titles:["pygame C API","High level API exported by pygame.base","Class BufferProxy API exported by pgyame.bufferproxy","API exported by pygame.cdrom","Class Color API exported by pygame.color","API exported by pygame.display","API exported by pygame.event","API exported by pygame._freetype","API exported by pygame.mixer","Class Rect API exported by pygame.rect","API exported by pygame.rwobject","Slots and c_api - Making functions and data available from other modules","Class Surface API exported by pygame.surface","API exported by pygame.surflock","API exported by pygame.version","File Path Function Arguments","Pygame Front Page","pygame.BufferProxy","pygame.camera","pygame.cdrom","pygame.Color","Named Colors","pygame.cursors","pygame.display","pygame.draw","pygame.event","pygame.examples","pygame.fastevent","pygame.font","pygame.freetype","pygame.gfxdraw","pygame.image","pygame.joystick","pygame.key","pygame.locals","pygame.mask","pygame.math","pygame.midi","pygame.mixer","pygame.mouse","pygame.mixer.music","pygame.Overlay","pygame.PixelArray","pygame.pixelcopy","pygame","pygame.Rect","pygame.scrap","pygame._sdl2.controller","pygame.sdl2_video","pygame.sndarray","pygame.sprite","pygame.Surface","pygame.surfarray","pygame.tests","pygame.time","pygame._sdl2.touch","pygame.transform","Pygame Tutorials - Camera Module Introduction","Pygame Tutorials - Line By Line Chimp Example","Pygame Tutorials - Setting Display Modes","Pygame Tutorials - Import and Initialize","Making Games With Pygame","Pygame Tutorials - Help! How Do I Move An Image?","Pygame Intro","Pygame Tutorials - Sprite Module Introduction","Pygame Tutorials - Surfarray Introduction","pygame/examples/chimp.py","Author: Youngwook Kim (Korean)","Author: Youngwook Kim (Korean)","Author: Youngwook Kim (Korean)","Author: Youngwook Kim (Korean)","Author: Youngwook Kim (Korean)","Author: Youngwook Kim (Korean)","Author: Youngwook Kim (Korean)","Author: Youngwook Kim (Korean)","Author: Youngwook Kim (Korean)","Author: Youngwook Kim (Korean)","Author: Youngwook Kim (Korean)","Author: Youngwook Kim (Korean)","Author: Youngwook Kim (Korean)","Author: Youngwook Kim (Korean)","Author: Youngwook Kim (Korean)","Author: Youngwook Kim (Korean)","\ud55c\uad6d\uc5b4 \ud29c\ud1a0\ub9ac\uc5bc","A Newbie Guide to pygame","Revision: Pygame fundamentals","Kicking things off","Game object classes","User-controllable objects","Putting it all together"],titleterms:{"1":[61,85,86,87,88,89],"2":[62,85,86,87,89],"3":[85,86,88,89],"4":[85,87],"5":[85,88],"6":89,"\uadf8\ub9ac\uace0":[79,80,81],"\uae30\ubc18\uacfc":76,"\uae30\ubc18\uc73c\ub85c\uc758":76,"\uae30\ubcf8":76,"\uae30\ucd08":[76,77,78],"\ub354":81,"\ubc84\ud2bc":80,"\uc0c8\ub85c\uc6b4":78,"\uc2ec\ud654":[79,80],"\uc5d0\ud544\ub85c\uadf8":82,"\uc65c":75,"\uc6c0\uc9c1\uc774\uae30":77,"\uc704\ud55c":77,"\uc774\ubca4\ud2b8":[76,78],"\uc785\ub825":78,"\uc785\ub825\uc740":78,"\uc785\ubb38":76,"\uc870\uac74":77,"\uc870\uae08":81,"\ucc98\ub9ac":[77,79],"\ucd9c\ub825":[76,80],"\ud29c\ud1a0\ub9ac\uc5bc":83,"\ud30c\uc774\uac8c\uc784":75,"\ud504\ub864\ub85c\uadf8":75,"\ud558\ud544":75,"\ud55c\uad6d\uc5b4":83,"\ud568\uc218\ud654":79,"\ud615\uc2dd\uacfc":76,"\ud654\uba74\uc774":77,"class":[2,4,9,12,36,50,58,64,87,88],"do":[62,84],"export":[1,2,3,4,5,6,7,8,9,10,12,13,14,17],"function":[11,15,59,62,65,71,86],"import":[57,58,60,65],"new":70,"while":58,A:[61,62,84,87,88],AND:63,By:58,Into:68,It:62,NO:84,On:[58,62],The:[58,62,64,85,86,89],There:84,To:62,With:61,_freetyp:7,_sdl2:[47,55],access:[42,49,52],advanc:[64,65,71,72],all:[58,62,89],alpha:[73,84],an:[17,62],anim:[69,84],api:[0,1,2,3,4,5,6,7,8,9,10,12,13,14],ar:[62,84],argument:15,arrai:[17,43,52],audio:[19,40],author:[67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82],avail:11,back:62,background:[58,62],ball:[87,89],base:1,basic:[50,57,59,68,69,70,85],bat:[88,89],blit:[62,85],bother:84,buffer:17,bufferproxi:2,bufferproxypygam:17,button:72,c:[0,1,2,3,4,5,6,7,8,9,10,12,13],c_api:11,camera:[18,57],camerapygam:18,captur:57,cdrom:[3,19],cdrompygam:19,center:58,chang:62,chimp:[58,66],clipboard:46,close:63,code:61,collis:[64,84],color:[4,20,21],colorkei:84,colorpygam:20,colorspac:57,com:[67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82],comfort:84,common:64,comput:[29,57],connect:57,constant:34,contact:[67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82],contain:44,content:61,control:[19,23,40,47,57,88],controllerpygam:47,convert:84,coordin:[45,62],copi:43,creat:[58,62],cursor:22,cursorspygam:22,da:85,data:[11,49,52],decid:59,definit:62,detect:[64,84],direct:42,dirti:84,displai:[5,23,58,59],displaypygam:23,distract:84,divers:[87,88],document:16,don:84,draw:[24,30,58],drawpygam:24,driven:68,entir:58,epilog:74,event:[6,25,27,58,68,70,84,85,88],eventpygam:25,everyth:58,exampl:[26,58,59,65,66],examplesmodul:26,extend:64,fasteventpygam:27,file:15,finish:[58,89],first:[62,86],font:[28,29],fontpygam:28,freetypeenhanc:29,friend:84,from:[11,62],front:16,fundament:85,game:[50,58,61,63,85,87],gamepad:32,gener:43,get:84,gfxdrawpygam:30,gmail:[67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82],go:62,graduat:65,graphic:41,group:64,gui:[68,76],guid:84,handl:[58,62,86],hardwar:84,help:62,here:62,hero:62,high:1,histori:[63,64],hit:89,how:[59,62],i:62,imag:[31,35,51,57,62],imagepygam:31,inform:44,init:[57,60],initi:[58,60],input:[37,55,58,62,70],interact:[25,27,32,37],interfac:52,intro:63,introduct:[57,58,59,61,63,64,65],issu:84,joystick:32,joystickpygam:32,just:62,keyboard:33,keypygam:33,kick:86,kim:[67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82],know:84,korean:[67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82],lesson:64,let:[62,89],level:[1,44],line:[58,86],list:[57,62],live:57,load:[28,29,38,58,86],localspygam:34,lock:65,loop:[58,85],main:58,make:[11,61,62],manag:84,mani:64,map:62,mask:[35,57],maskpygam:35,mathpygam:36,midi:37,midipygam:37,mix:64,mixer:[8,40],mixerpygam:38,mode:59,modul:[11,18,19,22,23,24,25,27,28,29,30,31,32,33,35,36,37,38,39,40,43,44,46,47,49,50,52,54,55,56,57,58,63,64,86],monitor:54,more:[65,84],mous:39,mousepygam:39,move:62,movement:62,multipl:62,musicpygam:40,mysteri:62,name:21,need:84,newbi:84,next:62,note:61,numer:65,numpi:65,object:[17,20,41,42,45,50,51,58,85,87,88],off:86,other:[11,65],output:[37,68,72],over:58,overlai:41,overlaypygam:41,overview:63,own:[62,64],packag:[44,53],page:16,part:84,path:15,perfect:84,pgyam:2,physic:87,pixel:[42,43,52,62,84],pixelarraypygam:42,pixelcopypygam:43,plai:38,plu:73,prepar:58,problem:64,process:[69,71],product:89,program:26,prolog:67,protocol:17,put:[58,62,89],py:[14,66],pygam:[0,1,3,4,5,6,7,8,9,10,12,13,14,16,17,18,19,20,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,84,85,88],pygameth:44,python:[63,65,84],pythoni:84,queue:[25,27],quit:60,re:84,realli:84,recogn:84,rect:[9,84],rectangular:45,rectpygam:45,refer:16,render:[28,29,64],repres:51,represent:20,resourc:[22,58,86],revis:85,rule:84,rumia0601:[67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82],rwobject:10,s:62,sampl:49,scene:58,scrappygam:46,screen:[23,62],sdl2_video:48,set:[59,69],setup:58,shape:[24,30],side:[84,89],simpl:[87,88],singl:57,six:84,slot:11,smooth:62,sndarraypygam:49,so:62,some:62,sound:[38,49],sprite:[58,64,87],spritepygam:50,src_c:[1,2,3,4,5,6,7,8,9,10,12,13],src_py:14,step:62,store:45,stream:[40,57],style:61,subsystem:84,suit:53,support:46,surfac:[12,17,42,52,56,65,84],surfacepygam:51,surfarrai:65,surfarraypygam:52,surflock:13,t:84,ta:85,tabl:61,take:62,tast:63,templat:68,test:53,testspygam:53,text:58,than:84,thei:84,them:64,thing:[84,86],threshold:57,through:17,time:54,timepygam:54,togeth:[62,64,89],top:44,touch:55,touchpygam:55,trackbal:32,transfer:31,transform:56,transformpygam:56,transpar:65,troubl:84,tutori:[16,57,58,59,60,62,64,65],type:64,unit:53,updat:58,us:[18,52,57,84],user:88,vector:[36,87],version:[14,44],versionsmal:44,video:41,vision:57,vs:84,wai:84,what:[62,84],which:84,why:67,window:23,work:[33,39,47,55,84],worth:84,you:[62,84],youngwook:[67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82],your:[62,64,84]}}) \ No newline at end of file diff --git a/venv/Lib/site-packages/pygame/docs/generated/tut/CameraIntro.html b/venv/Lib/site-packages/pygame/docs/generated/tut/CameraIntro.html deleted file mode 100644 index 400dc61..0000000 --- a/venv/Lib/site-packages/pygame/docs/generated/tut/CameraIntro.html +++ /dev/null @@ -1,378 +0,0 @@ - - - - - - - - - Pygame Tutorials - Camera Module Introduction — pygame v2.1.2 documentation - - - - - - - - - - - - - -
- -
- - - - - -
-
- -
-
- -
-
-

Camera Module Introduction¶

-
-
Author
-

by Nirav Patel

-
-
Contact
-

nrp@eclecti.cc

-
-
-

Pygame 1.9 comes with support for interfacing cameras, allowing you to capture -still images, watch live streams, and do some simple computer vision. This -tutorial will cover all of those use cases, providing code samples you can base -your app or game on. You can refer to the reference documentation -for the full API.

-
-

Note

-

As of Pygame 1.9, the camera module offers native support for cameras -that use v4l2 on Linux. There is support for other platforms via Videocapture -or OpenCV, but this guide will focus on the native module. Most of the code -will be valid for other platforms, but certain things like controls will not -work. The module is also marked as EXPERIMENTAL, meaning the API could -change in subsequent versions.

-
-
-

Import and Init¶

-
import pygame
-import pygame.camera
-from pygame.locals import *
-
-pygame.init()
-pygame.camera.init()
-
-
-

As the camera module is optional, it needs to be imported and initialized -manually as shown above.

-
-
-

Capturing a Single Image¶

-

Now we will go over the simplest case of opening a camera and capturing a frame -as a surface. In the below example, we assume that there is a camera at -/dev/video0 on the computer, and initialize it with a size of 640 by 480. -The surface called image is whatever the camera was seeing when get_image() was -called.

-
cam = pygame.camera.Camera("/dev/video0",(640,480))
-cam.start()
-image = cam.get_image()
-
-
-
-

Listing Connected Cameras¶

-

You may be wondering, what if we don't know the exact path of the camera? -We can ask the module to provide a list of cameras attached to the -computer and initialize the first camera in the list.

-
camlist = pygame.camera.list_cameras()
-if camlist:
-    cam = pygame.camera.Camera(camlist[0],(640,480))
-
-
-
-
-

Using Camera Controls¶

-

Most cameras support controls like flipping the image and changing brightness. -set_controls() and get_controls() can be used at any point after using start().

-
cam.set_controls(hflip = True, vflip = False)
-print camera.get_controls()
-
-
-
-
-
-

Capturing a Live Stream¶

-

The rest of this tutorial will be based around capturing a live stream of -images. For this, we will be using the class below. As described, it will -simply blit a constant stream of camera frames to the screen, effectively -showing live video. It is basically what you would expect, looping get_image(), -blitting to the display surface, and flipping it. For performance reasons, -we will be supplying the camera with the same surface to use each time.

-
class Capture(object):
-    def __init__(self):
-        self.size = (640,480)
-        # create a display surface. standard pygame stuff
-        self.display = pygame.display.set_mode(self.size, 0)
-
-        # this is the same as what we saw before
-        self.clist = pygame.camera.list_cameras()
-        if not self.clist:
-            raise ValueError("Sorry, no cameras detected.")
-        self.cam = pygame.camera.Camera(self.clist[0], self.size)
-        self.cam.start()
-
-        # create a surface to capture to.  for performance purposes
-        # bit depth is the same as that of the display surface.
-        self.snapshot = pygame.surface.Surface(self.size, 0, self.display)
-
-    def get_and_flip(self):
-        # if you don't want to tie the framerate to the camera, you can check
-        # if the camera has an image ready.  note that while this works
-        # on most cameras, some will never return true.
-        if self.cam.query_image():
-            self.snapshot = self.cam.get_image(self.snapshot)
-
-        # blit it to the display surface.  simple!
-        self.display.blit(self.snapshot, (0,0))
-        pygame.display.flip()
-
-    def main(self):
-        going = True
-        while going:
-            events = pygame.event.get()
-            for e in events:
-                if e.type == QUIT or (e.type == KEYDOWN and e.key == K_ESCAPE):
-                    # close the camera safely
-                    self.cam.stop()
-                    going = False
-
-            self.get_and_flip()
-
-
-

Since get_image() is a blocking call that could take quite a bit of time on a -slow camera, this example uses query_image() to see if the camera is ready. -This allows you to separate the framerate of your game from that of your camera. -It is also possible to have the camera capturing images in a separate thread, -for approximately the same performance gain, if you find that your camera does -not support the query_image() function correctly.

-
-
-

Basic Computer Vision¶

-

By using the camera, transform, and mask modules, pygame can do some basic -computer vision.

-
-

Colorspaces¶

-

When initializing a camera, colorspace is an optional parameter, with 'RGB', -'YUV', and 'HSV' as the possible choices. YUV and HSV are both generally more -useful for computer vision than RGB, and allow you to more easily threshold by -color, something we will look at later in the tutorial.

-
self.cam = pygame.camera.Camera(self.clist[0], self.size, "RGB")
-
-
-../_images/camera_rgb.jpg -
self.cam = pygame.camera.Camera(self.clist[0], self.size, "YUV")
-
-
-../_images/camera_yuv.jpg -
self.cam = pygame.camera.Camera(self.clist[0], self.size, "HSV")
-
-
-../_images/camera_hsv.jpg -
-
-

Thresholding¶

-

Using the threshold() function from the transform module, one can do simple -green screen like effects, or isolate specifically colored objects in a scene. -In the below example, we threshold out just the green tree and make the rest -of the image black. Check the reference documentation for details on the -threshold function.

-
self.thresholded = pygame.surface.Surface(self.size, 0, self.display)
-self.snapshot = self.cam.get_image(self.snapshot)
-pygame.transform.threshold(self.thresholded,self.snapshot,(0,255,0),(90,170,170),(0,0,0),2)
-
-
-../_images/camera_thresholded.jpg -

Of course, this is only useful if you already know the exact color of the object -you are looking for. To get around this and make thresholding usable in the -real world, we need to add a calibration stage where we identify the color of an -object and use it to threshold against. We will be using the average_color() -function of the transform module to do this. Below is an example calibration -function that you could loop until an event like a key press, and an image of -what it would look like. The color inside the box will be the one that is -used for the threshold. Note that we are using the HSV colorspace in the below -images.

-
def calibrate(self):
-    # capture the image
-    self.snapshot = self.cam.get_image(self.snapshot)
-    # blit it to the display surface
-    self.display.blit(self.snapshot, (0,0))
-    # make a rect in the middle of the screen
-    crect = pygame.draw.rect(self.display, (255,0,0), (145,105,30,30), 4)
-    # get the average color of the area inside the rect
-    self.ccolor = pygame.transform.average_color(self.snapshot, crect)
-    # fill the upper left corner with that color
-    self.display.fill(self.ccolor, (0,0,50,50))
-    pygame.display.flip()
-
-
-../_images/camera_average.jpg -
pygame.transform.threshold(self.thresholded,self.snapshot,self.ccolor,(30,30,30),(0,0,0),2)
-
-
-../_images/camera_thresh.jpg -

You can use the same idea to do a simple green screen/blue screen, by first -getting a background image and then thresholding against it. The below example -just has the camera pointed at a blank white wall in HSV colorspace.

-
def calibrate(self):
-    # capture a bunch of background images
-    bg = []
-    for i in range(0,5):
-      bg.append(self.cam.get_image(self.background))
-    # average them down to one to get rid of some noise
-    pygame.transform.average_surfaces(bg,self.background)
-    # blit it to the display surface
-    self.display.blit(self.background, (0,0))
-    pygame.display.flip()
-
-
-../_images/camera_background.jpg -
pygame.transform.threshold(self.thresholded,self.snapshot,(0,255,0),(30,30,30),(0,0,0),1,self.background)
-
-
-../_images/camera_green.jpg -
-
-

Using the Mask Module¶

-

The stuff above is great if you just want to display images, but with the -mask module, you can also use a camera as an -input device for a game. For example, going back to the example of -thresholding out a specific object, we can find the position of that object and -use it to control an on screen object.

-
def get_and_flip(self):
-    self.snapshot = self.cam.get_image(self.snapshot)
-    # threshold against the color we got before
-    mask = pygame.mask.from_threshold(self.snapshot, self.ccolor, (30, 30, 30))
-    self.display.blit(self.snapshot,(0,0))
-    # keep only the largest blob of that color
-    connected = mask.connected_component()
-    # make sure the blob is big enough that it isn't just noise
-    if mask.count() > 100:
-        # find the center of the blob
-        coord = mask.centroid()
-        # draw a circle with size variable on the size of the blob
-        pygame.draw.circle(self.display, (0,255,0), coord, max(min(50,mask.count()/400),5))
-    pygame.display.flip()
-
-
-../_images/camera_mask.jpg -

This is just the most basic example. You can track multiple different colored -blobs, find the outlines of objects, have collision detection between real life -and in game objects, get the angle of an object to allow for even finer control, -and more. Have fun!

-
-
-
-
- - -

-
-Edit on GitHub -
-
-
-
-
- - - - \ No newline at end of file diff --git a/venv/Lib/site-packages/pygame/docs/generated/tut/ChimpLineByLine.html b/venv/Lib/site-packages/pygame/docs/generated/tut/ChimpLineByLine.html deleted file mode 100644 index 6f1e21f..0000000 --- a/venv/Lib/site-packages/pygame/docs/generated/tut/ChimpLineByLine.html +++ /dev/null @@ -1,597 +0,0 @@ - - - - - - - - - Pygame Tutorials - Line By Line Chimp Example — pygame v2.1.2 documentation - - - - - - - - - - - - - -
- -
- - - - - -
-
- -
-
- -
-
-

Line By Line Chimp¶

-
-
Author
-

Pete Shinners

-
-
Contact
-

pete@shinners.org

-
-
-
-
-
-

Introduction¶

-

In the pygame examples there is a simple example named "chimp". -This example simulates a punchable monkey moving around the screen with -promises of riches and reward. The example itself is very simple, and a -bit thin on error-checking code. This example program demonstrates many of -pygame's abilities, like creating a window, loading images and sounds, -rendering text, and basic event and mouse handling.

-

The program and images can be found inside the standard source distribution -of pygame. You can run it by running python -m pygame.examples.chimp in -your terminal.

-

This tutorial will go through the code block by block. Explaining how -the code works. There will also be mention of how the code could be improved -and what error checking could help out.

-

This is an excellent tutorial for people getting their first look at -the pygame code. Once pygame is fully installed, you can find -and run the chimp demo for yourself in the examples directory.

-
-

(no, this is not a banner ad, it's the screenshot)

-chimp game banner -

Full Source

-
-
-
-

Import Modules¶

-

This is the code that imports all the needed modules into your program. -It also checks for the availability of some of the optional pygame modules.

-
# Import Modules
-import os
-import pygame as pg
-
-if not pg.font:
-    print("Warning, fonts disabled")
-if not pg.mixer:
-    print("Warning, sound disabled")
-
-main_dir = os.path.split(os.path.abspath(__file__))[0]
-data_dir = os.path.join(main_dir, "data")
-
-
-

First, we import the standard "os" python module. This allow -us to do things like create platform independent file paths.

-

In the next line, we import the pygame package. In our case, we import -pygame as pg, so that all of the functionality of pygame is able to -be referenced from the namespace pg.

-

Some pygame modules are optional, and if they aren't found, -they evaluate to False. Because of that, we decide to print -a nice warning message if the font or -mixer modules in pygame are not available. -(Although they will only be unavailable in very uncommon situations).

-

Lastly, we prepare two paths for the rest of the code to use. -main_dir uses the os.path module and the __file__ variable provided -by Python to locate the game's python file, and extract the folder from -that path. It then prepares the variable data_dir to tell the -loading functions exactly where to look.

-
-
-

Loading Resources¶

-

Here we have two functions we can use to load images and sounds. We will -look at each function individually in this section.

-
def load_image(name, colorkey=None, scale=1):
-    fullname = os.path.join(data_dir, name)
-    image = pg.image.load(fullname)
-
-    size = image.get_size()
-    size = (size[0] * scale, size[1] * scale)
-    image = pg.transform.scale(image, size)
-
-    image = image.convert()
-    if colorkey is not None:
-        if colorkey == -1:
-            colorkey = image.get_at((0, 0))
-        image.set_colorkey(colorkey, pg.RLEACCEL)
-    return image, image.get_rect()
-
-
-

This function takes the name of an image to load. It also optionally -takes an argument it can use to set a colorkey for the image, and an argument -to scale the image. A colorkey is used in graphics to represent a color of the -image that is transparent.

-

The first thing this function does is create a full pathname to the file. -In this example all the resources are in a "data" subdirectory. By using -the os.path.join function, a pathname will be created that works for whatever -platform the game is running on.

-

Next we load the image using the pygame.image.load()load new image from a file (or file-like object) function. -After the image is loaded, we make an important -call to the convert() function. This makes a new copy of a Surface and converts -its color format and depth to match the display. This means blitting the -image to the screen will happen as quickly as possible.

-

We then scale the image, using the pygame.transform.scale()resize to new resolution function. -This function takes a Surface and the size it should be scaled to. To scale -by a scalar, we can get the size and scale the x and y by the scalar.

-

Last, we set the colorkey for the image. If the user supplied an argument -for the colorkey argument we use that value as the colorkey for the image. -This would usually just be a color RGB value, like (255, 255, 255) for -white. You can also pass a value of -1 as the colorkey. In this case the -function will lookup the color at the topleft pixel of the image, and use -that color for the colorkey.

-
def load_sound(name):
-    class NoneSound:
-        def play(self):
-            pass
-
-    if not pg.mixer or not pg.mixer.get_init():
-        return NoneSound()
-
-    fullname = os.path.join(data_dir, name)
-    sound = pg.mixer.Sound(fullname)
-
-    return sound
-
-
-

Next is the function to load a sound file. The first thing this function -does is check to see if the pygame.mixerpygame module for loading and playing sounds module was imported correctly. -If not, it returns a small class instance that has a dummy play method. -This will act enough like a normal Sound object for this game to run without -any extra error checking.

-

This function is similar to the image loading function, but handles some -different problems. First we create a full path to the sound image, and -load the sound file. Then we simply return the loaded Sound object.

-
-
-

Game Object Classes¶

-

Here we create two classes to represent the objects in our game. Almost -all the logic for the game goes into these two classes. We will look over -them one at a time here.

-
class Fist(pg.sprite.Sprite):
-    """moves a clenched fist on the screen, following the mouse"""
-
-    def __init__(self):
-        pg.sprite.Sprite.__init__(self)  # call Sprite initializer
-        self.image, self.rect = load_image("fist.png", -1)
-        self.fist_offset = (-235, -80)
-        self.punching = False
-
-    def update(self):
-        """move the fist based on the mouse position"""
-        pos = pg.mouse.get_pos()
-        self.rect.topleft = pos
-        self.rect.move_ip(self.fist_offset)
-        if self.punching:
-            self.rect.move_ip(15, 25)
-
-    def punch(self, target):
-        """returns true if the fist collides with the target"""
-        if not self.punching:
-            self.punching = True
-            hitbox = self.rect.inflate(-5, -5)
-            return hitbox.colliderect(target.rect)
-
-    def unpunch(self):
-        """called to pull the fist back"""
-        self.punching = False
-
-
-

Here we create a class to represent the players fist. It is derived from -the Sprite class included in the pygame.spritepygame module with basic game object classes module. The __init__ function -is called when new instances of this class are created. The first thing -we do is be sure to call the __init__ function for our base class. This -allows the Sprite's __init__ function to prepare our object for use as a -sprite. This game uses one of the sprite drawing Group classes. These classes -can draw sprites that have an "image" and "rect" attribute. By simply changing -these two attributes, the renderer will draw the current image at the current -position.

-

All sprites have an update() method. This function is typically called -once per frame. It is where you should put code that moves and updates -the variables for the sprite. The update() method for the fist moves the -fist to the location of the mouse pointer. It also offsets the fist position -slightly if the fist is in the "punching" state.

-

The following two functions punch() and unpunch() change the punching -state for the fist. The punch() method also returns a true value if the fist -is colliding with the given target sprite.

-
class Chimp(pg.sprite.Sprite):
-    """moves a monkey critter across the screen. it can spin the
-    monkey when it is punched."""
-
-    def __init__(self):
-        pg.sprite.Sprite.__init__(self)  # call Sprite intializer
-        self.image, self.rect = load_image("chimp.png", -1, 4)
-        screen = pg.display.get_surface()
-        self.area = screen.get_rect()
-        self.rect.topleft = 10, 90
-        self.move = 18
-        self.dizzy = False
-
-    def update(self):
-        """walk or spin, depending on the monkeys state"""
-        if self.dizzy:
-            self._spin()
-        else:
-            self._walk()
-
-    def _walk(self):
-        """move the monkey across the screen, and turn at the ends"""
-        newpos = self.rect.move((self.move, 0))
-        if not self.area.contains(newpos):
-            if self.rect.left < self.area.left or self.rect.right > self.area.right:
-                self.move = -self.move
-                newpos = self.rect.move((self.move, 0))
-                self.image = pg.transform.flip(self.image, True, False)
-        self.rect = newpos
-
-    def _spin(self):
-        """spin the monkey image"""
-        center = self.rect.center
-        self.dizzy = self.dizzy + 12
-        if self.dizzy >= 360:
-            self.dizzy = False
-            self.image = self.original
-        else:
-            rotate = pg.transform.rotate
-            self.image = rotate(self.original, self.dizzy)
-        self.rect = self.image.get_rect(center=center)
-
-    def punched(self):
-        """this will cause the monkey to start spinning"""
-        if not self.dizzy:
-            self.dizzy = True
-            self.original = self.image
-
-
-

The Chimp class is doing a little more work than the fist, but nothing -more complex. This class will move the chimp back and forth across the -screen. When the monkey is punched, he will spin around to exciting effect. -This class is also derived from the base Sprite -class, and is initialized the same as the fist. While initializing, the class -also sets the attribute "area" to be the size of the display screen.

-

The update function for the chimp simply looks at the current "dizzy" -state, which is true when the monkey is spinning from a punch. It calls either -the _spin or _walk method. These functions are prefixed with an underscore. -This is just a standard python idiom which suggests these methods should -only be used by the Chimp class. We could go so far as to give them a double -underscore, which would tell python to really try to make them private -methods, but we don't need such protection. :)

-

The _walk method creates a new position for the monkey by moving the current -rect by a given offset. If this new position crosses outside the display -area of the screen, it reverses the movement offset. It also mirrors the -image using the pygame.transform.flip()flip vertically and horizontally function. This is a crude effect -that makes the monkey look like he's turning the direction he is moving.

-

The _spin method is called when the monkey is currently "dizzy". The dizzy -attribute is used to store the current amount of rotation. When the monkey -has rotated all the way around (360 degrees) it resets the monkey image -back to the original, non-rotated version. Before calling the -pygame.transform.rotate()rotate an image function, you'll see the code makes a local -reference to the function simply named "rotate". There is no need to do that -for this example, it is just done here to keep the following line's length a -little shorter. Note that when calling the rotate function, we are always -rotating from the original monkey image. When rotating, there is a slight loss -of quality. Repeatedly rotating the same image and the quality would get worse -each time. Also, when rotating an image, the size of the image will actually -change. This is because the corners of the image will be rotated out, making -the image bigger. We make sure the center of the new image matches the center -of the old image, so it rotates without moving.

-

The last method is punched() which tells the sprite to enter its dizzy -state. This will cause the image to start spinning. It also makes a copy -of the current image named "original".

-
-
-

Initialize Everything¶

-

Before we can do much with pygame, we need to make sure its modules -are initialized. In this case we will also open a simple graphics window. -Now we are in the main() function of the program, which actually runs everything.

-
pg.init()
-screen = pg.display.set_mode((1280, 480), pg.SCALED)
-pg.display.set_caption("Monkey Fever")
-pg.mouse.set_visible(False)
-
-
-

The first line to initialize pygame takes care of a bit of -work for us. It checks through the imported pygame modules and attempts -to initialize each one of them. It is possible to go back and check if modules -failed to initialize, but we won't bother here. It is also possible to -take a lot more control and initialize each specific module by hand. That -type of control is generally not needed, but is available if you desire.

-

Next we set up the display graphics mode. Note that the pygame.displaypygame module to control the display window and screen -module is used to control all the display settings. In this case we are -asking for a 1280 by 480 window, with the SCALED display flag. -This automatically scales up the window for displays much larger than the -window.

-

Last we set the window title and turn off the mouse cursor for our -window. Very basic to do, and now we have a small black window ready to -do our bidding. Usually the cursor defaults to visible, so there is no need -to really set the state unless we want to hide it.

-
-
-

Create The Background¶

-

Our program is going to have text message in the background. It would -be nice for us to create a single surface to represent the background and -repeatedly use that. The first step is to create the surface.

-
background = pg.Surface(screen.get_size())
-background = background.convert()
-background.fill((170, 238, 187))
-
-
-

This creates a new surface for us that is the same size as the display -window. Note the extra call to convert() after creating the Surface. The -convert with no arguments will make sure our background is the same format -as the display window, which will give us the fastest results.

-

We also fill the entire background with a certain green color. The fill() -function usually takes an RGB triplet as arguments, but supports many -input formats. See the pygame.Colorpygame object for color representations for all the color formats.

-
-
-

Put Text On The Background, Centered¶

-

Now that we have a background surface, lets get the text rendered to it. We -only do this if we see the pygame.fontpygame module for loading and rendering fonts module has imported properly. -If not, we just skip this section.

-
if pg.font:
-    font = pg.font.Font(None, 64)
-    text = font.render("Pummel The Chimp, And Win $$$", True, (10, 10, 10))
-    textpos = text.get_rect(centerx=background.get_width() / 2, y=10)
-    background.blit(text, textpos)
-
-
-

As you see, there are a couple steps to getting this done. First we -must create the font object and render it into a new surface. We then find -the center of that new surface and blit (paste) it onto the background.

-

The font is created with the font module's Font() constructor. Usually -you will pass the name of a TrueType font file to this function, but we -can also pass None, which will use a default font. The Font constructor -also needs to know the size of font we want to create.

-

We then render that font into a new surface. The render function creates -a new surface that is the appropriate size for our text. In this case -we are also telling render to create antialiased text (for a nice smooth -look) and to use a dark grey color.

-

Next we need to find the centered position of the text on our display. -We create a "Rect" object from the text dimensions, which allows us to -easily assign it to the screen center.

-

Finally we blit (blit is like a copy or paste) the text onto the background -image.

-
-
-

Display The Background While Setup Finishes¶

-

We still have a black window on the screen. Lets show our background -while we wait for the other resources to load.

-
screen.blit(background, (0, 0))
-pygame.display.flip()
-
-
-

This will blit our entire background onto the display window. The -blit is self explanatory, but what about this flip routine?

-

In pygame, changes to the display surface are not immediately visible. -Normally, a display must be updated in areas that have changed for them -to be visible to the user. In this case the flip() function works nicely -because it simply handles the entire window area.

-
-
-

Prepare Game Object¶

-

Here we create all the objects that the game is going to need.

-
whiff_sound = load_sound("whiff.wav")
-punch_sound = load_sound("punch.wav")
-chimp = Chimp()
-fist = Fist()
-allsprites = pg.sprite.RenderPlain((chimp, fist))
-clock = pg.time.Clock()
-
-
-

First we load two sound effects using the load_sound function we defined -above. Then we create an instance of each of our sprite classes. And lastly -we create a sprite Group which will contain all -our sprites.

-

We actually use a special sprite group named RenderPlain. This sprite group can draw all the sprites it -contains to the screen. It is called RenderPlain because there are actually -more advanced Render groups. But for our game, we just need simple drawing. We -create the group named "allsprites" by passing a list with all the sprites that -should belong in the group. We could later on add or remove sprites from this -group, but in this game we won't need to.

-

The clock object we create will be used to help control our game's framerate. -we will use it in the main loop of our game to make sure it doesn't run too fast.

-
-
-

Main Loop¶

-

Nothing much here, just an infinite loop.

-
going = True
-while going:
-    clock.tick(60)
-
-
-

All games run in some sort of loop. The usual order of things is to -check on the state of the computer and user input, move and update the -state of all the objects, and then draw them to the screen. You'll see -that this example is no different.

-

We also make a call to our clock object, which will make sure our game -doesn't run faster than 60 frames per second.

-
-
-

Handle All Input Events¶

-

This is an extremely simple case of working the event queue.

-
for event in pg.event.get():
-    if event.type == pg.QUIT:
-        going = False
-    elif event.type == pg.KEYDOWN and event.key == pg.K_ESCAPE:
-        going = False
-    elif event.type == pg.MOUSEBUTTONDOWN:
-        if fist.punch(chimp):
-            punch_sound.play()  # punch
-            chimp.punched()
-        else:
-            whiff_sound.play()  # miss
-    elif event.type == pg.MOUSEBUTTONUP:
-        fist.unpunch()
-
-
-

First we get all the available Events from pygame and loop through each -of them. The first two tests see if the user has quit our game, or pressed -the escape key. In these cases we just set going to False, allowing -us out of the infinite loop.

-

Next we just check to see if the mouse button was pressed or released. -If the button was pressed, we ask the fist object if it has collided with -the chimp. We play the appropriate sound effect, and if the monkey was hit, -we tell him to start spinning (by calling his punched() method).

-
-
-

Update the Sprites¶

-
allsprites.update()
-
-
-

Sprite groups have an update() method, which simply calls the update method -for all the sprites it contains. Each of the objects will move around, depending -on which state they are in. This is where the chimp will move one step side -to side, or spin a little farther if he was recently punched.

-
-
-

Draw The Entire Scene¶

-

Now that all the objects are in the right place, time to draw them.

-
screen.blit(background, (0, 0))
-allsprites.draw(screen)
-pygame.display.flip()
-
-
-

The first blit call will draw the background onto the entire screen. This -erases everything we saw from the previous frame (slightly inefficient, but -good enough for this game). Next we call the draw() method of the sprite -container. Since this sprite container is really an instance of the "DrawPlain" -sprite group, it knows how to draw our sprites. Lastly, we flip() the contents -of pygame's software double buffer to the screen. This makes everything we've -drawn visible all at once.

-
-
-

Game Over¶

-

User has quit, time to clean up.

-
pg.quit()
-
-
-

Cleaning up the running game in pygame is extremely simple. -Since all variables are automatically destructed, we don't really have to do -anything, but calling pg.quit() explicitly cleans up pygame's internals.

-
-
-
- - -

-
-Edit on GitHub -
-
-
-
-
- - - - \ No newline at end of file diff --git a/venv/Lib/site-packages/pygame/docs/generated/tut/DisplayModes.html b/venv/Lib/site-packages/pygame/docs/generated/tut/DisplayModes.html deleted file mode 100644 index b0d7d82..0000000 --- a/venv/Lib/site-packages/pygame/docs/generated/tut/DisplayModes.html +++ /dev/null @@ -1,314 +0,0 @@ - - - - - - - - - Pygame Tutorials - Setting Display Modes — pygame v2.1.2 documentation - - - - - - - - - - - - - -
- -
- - - - - -
-
- -
-
- -
-
-

Setting Display Modes¶

-
-
Author
-

Pete Shinners

-
-
Contact
-

pete@shinners.org

-
-
-
-

Introduction¶

-

Setting the display mode in pygame creates a visible image surface -on the monitor. -This surface can either cover the full screen, or be windowed -on platforms that support a window manager. -The display surface is nothing more than a standard pygame surface object. -There are special functions needed in the pygame.displaypygame module to control the display window and screen -module to keep the image surface contents updated on the monitor.

-

Setting the display mode in pygame is an easier task than with most -graphic libraries. -The advantage is if your display mode is not available, -pygame will emulate the display mode that you asked for. -Pygame will select a display resolution and color depth that best matches -the settings you have requested, -then allow you to access the display with the format you have requested. -In reality, since the pygame.displaypygame module to control the display window and screen module is -a binding around the SDL library, SDL is really doing all this work.

-

There are advantages and disadvantages to setting the display mode in this -manner. -The advantage is that if your game requires a specific display mode, -your game will run on platforms that do not support your requirements. -It also makes life easier when you're getting something started, -it is always easy to go back later and make the mode selection a little more -particular. -The disadvantage is that what you request is not always what you will get. -There is also a performance penalty when the display mode must be emulated. -This tutorial will help you understand the different methods for querying -the platforms display capabilities, and setting the display mode for your game.

-
-
-

Setting Basics¶

-

The first thing to learn about is how to actually set the current display mode. -The display mode may be set at any time after the pygame.displaypygame module to control the display window and screen -module has been initialized. -If you have previously set the display mode, -setting it again will change the current mode. -Setting the display mode is handled with the function -pygame.display.set_mode((width, height), flags, depth)Initialize a window or screen for display. -The only required argument in this function is a sequence containing -the width and height of the new display mode. -The depth flag is the requested bits per pixel for the surface. -If the given depth is 8, pygame will create a color-mapped surface. -When given a higher bit depth, pygame will use a packed color mode. -Much more information about depths and color modes can be found in the -documentation for the display and surface modules. -The default value for depth is 0. -When given an argument of 0, pygame will select the best bit depth to use, -usually the same as the system's current bit depth. -The flags argument lets you control extra features for the display mode. -Again, more information about this is found in the pygame reference documents.

-
-
-

How to Decide¶

-

So how do you select a display mode that is going to work best with your -graphic resources and the platform your game is running on? -There are several methods for gathering information about the display device. -All of these methods must be called after the display module has been -initialized, but you likely want to call them before setting the display mode. -First, pygame.display.Info()Create a video display information object -will return a special object type of VidInfo, -which can tell you a lot about the graphics driver capabilities. -The function -pygame.display.list_modes(depth, flags)Get list of available fullscreen modes -can be used to find the supported graphic modes by the system. -pygame.display.mode_ok((width, height), flags, depth)Pick the best color depth for a display mode takes the same arguments as -set_mode(), -but returns the closest matching bit depth to the one you request. -Lastly, pygame.display.get_driver()Get the name of the pygame display backend -will return the name of the graphics driver selected by pygame.

-

Just remember the golden rule. -Pygame will work with pretty much any display mode you request. -Some display modes will need to be emulated, -which will slow your game down, -since pygame will need to convert every update you make to the -"real" display mode. The best bet is to always let pygame -choose the best bit depth, -and convert all your graphic resources to that format when they are loaded. -You let pygame choose its bit depth by calling -set_mode() -with no depth argument or a depth of 0, -or you can call -mode_ok() -to find a closest matching bit depth to what you need.

-

When your display mode is windowed, -you usually must match the same bit depth as the desktop. -When you are fullscreen, some platforms can switch to any bit depth that -best suits your needs. -You can find the depth of the current desktop if you get a VidInfo object -before ever setting your display mode.

-

After setting the display mode, -you can find out information about its settings by getting a VidInfo object, -or by calling any of the Surface.get* methods on the display surface.

-
-
-

Functions¶

-

These are the routines you can use to determine the most appropriate -display mode. -You can find more information about these functions in the display module -documentation.

-
-

pygame.display.mode_ok(size, flags, depth)Pick the best color depth for a display mode

-
-

This function takes the exact same arguments as pygame.display.set_mode(). -It returns the best available bit depth for the mode you have described. -If this returns zero, -then the desired display mode is not available without emulation.

-
-

pygame.display.list_modes(depth, flags)Get list of available fullscreen modes

-
-

Returns a list of supported display modes with the requested -depth and flags. -An empty list is returned when there are no modes. -The flags argument defaults to FULLSCREEN. -If you specify your own flags without FULLSCREEN, -you will likely get a return value of -1. -This means that any display size is fine, since the display will be windowed. -Note that the listed modes are sorted largest to smallest.

-
-

pygame.display.Info()Create a video display information object

-
-

This function returns an object with many members describing -the display device. -Printing the VidInfo object will quickly show you all the -members and values for this object.

-
>>> import pygame.display
->>> pygame.display.init()
->>> info = pygame.display.Info()
->>> print(info)
-<VideoInfo(hw = 0, wm = 1,video_mem = 0
-        blit_hw = 0, blit_hw_CC = 0, blit_hw_A = 0,
-        blit_sw = 0, blit_sw_CC = 0, blit_sw_A = 0,
-        bitsize  = 32, bytesize = 4,
-        masks =  (16711680, 65280, 255, 0),
-        shifts = (16, 8, 0, 0),
-        losses =  (0, 0, 0, 8),
-        current_w = 1920, current_h = 1080
->
-
-
-
-
-

You can test all these flags as simply members of the VidInfo object.

-
-
-

Examples¶

-

Here are some examples of different methods to init the graphics display. -They should help you get an idea of how to go about setting your display mode.

-
>>> #give me the best depth with a 640 x 480 windowed display
->>> pygame.display.set_mode((640, 480))
-
->>> #give me the biggest 16-bit display available
->>> modes = pygame.display.list_modes(16)
->>> if not modes:
-...     print('16-bit not supported')
-... else:
-...     print('Found Resolution:', modes[0])
-...     pygame.display.set_mode(modes[0], FULLSCREEN, 16)
-
->>> #need an 8-bit surface, nothing else will do
->>> if pygame.display.mode_ok((800, 600), 0, 8) != 8:
-...     print('Can only work with an 8-bit display, sorry')
-... else:
-...     pygame.display.set_mode((800, 600), 0, 8)
-
-
-
-
-
- - -

-
-Edit on GitHub -
-
-
-
-
- - - - \ No newline at end of file diff --git a/venv/Lib/site-packages/pygame/docs/generated/tut/ImportInit.html b/venv/Lib/site-packages/pygame/docs/generated/tut/ImportInit.html deleted file mode 100644 index 2b11c40..0000000 --- a/venv/Lib/site-packages/pygame/docs/generated/tut/ImportInit.html +++ /dev/null @@ -1,197 +0,0 @@ - - - - - - - - - Pygame Tutorials - Import and Initialize — pygame v2.1.2 documentation - - - - - - - - - - - - - -
- -
- - - - - -
-
- -
-
- -
-
-

Import and Initialize¶

-
-
Author
-

Pete Shinners

-
-
Contact
-

pete@shinners.org

-
-
-

Getting pygame imported and initialized is a very simple process. It is also -flexible enough to give you control over what is happening. Pygame is a -collection of different modules in a single python package. Some of the -modules are written in C, and some are written in python. Some modules -are also optional, and might not always be present.

-

This is just a quick introduction on what is going on when you import pygame. -For a clearer explanation definitely see the pygame examples.

-
-

Import¶

-

First we must import the pygame package. Since pygame version 1.4 this -has been updated to be much easier. Most games will import all of pygame like this.

-
import pygame
-from pygame.locals import *
-
-
-

The first line here is the only necessary one. It imports all the available pygame -modules into the pygame package. The second line is optional, and puts a limited -set of constants and functions into the global namespace of your script.

-

An important thing to keep in mind is that several pygame modules are optional. -For example, one of these is the font module. When you "import pygame", pygame -will check to see if the font module is available. If the font module is available -it will be imported as "pygame.font". If the module is not available, "pygame.font" -will be set to None. This makes it fairly easy to later on test if the font module is available.

-
-
-

Init¶

-

Before you can do much with pygame, you will need to initialize it. The most common -way to do this is just make one call.

-
pygame.init()
-
-
-

This will attempt to initialize all the pygame modules for you. Not all pygame modules -need to be initialized, but this will automatically initialize the ones that do. You can -also easily initialize each pygame module by hand. For example to only initialize the -font module you would just call.

-
pygame.font.init()
-
-
-

Note that if there is an error when you initialize with "pygame.init()", it will silently fail. -When hand initializing modules like this, any errors will raise an exception. Any -modules that must be initialized also have a "get_init()" function, which will return true -if the module has been initialized.

-

It is safe to call the init() function for any module more than once.

-
-
-

Quit¶

-

Modules that are initialized also usually have a quit() function that will clean up. -There is no need to explicitly call these, as pygame will cleanly quit all the -initialized modules when python finishes.

-
-
-
- - -

-
-Edit on GitHub -
-
-
-
-
- - - - \ No newline at end of file diff --git a/venv/Lib/site-packages/pygame/docs/generated/tut/MakeGames.html b/venv/Lib/site-packages/pygame/docs/generated/tut/MakeGames.html deleted file mode 100644 index b9f6ed9..0000000 --- a/venv/Lib/site-packages/pygame/docs/generated/tut/MakeGames.html +++ /dev/null @@ -1,237 +0,0 @@ - - - - - - - - - Making Games With Pygame — pygame v2.1.2 documentation - - - - - - - - - - - - - -
- -
- - - - - -
-
- -
-
- -
-
-

Making Games With Pygame¶

-
-
-
-

Table of Contents¶

-

1. Introduction

-
-
-

2. Revision: Pygame fundamentals

-
-
-

3. Kicking things off

-
-
-

4. Game object classes

-
-
-

5. User-controllable objects

-
-
-

6. Putting it all together

-
-
-
-
-

1. Introduction¶

-

First of all, I will assume you have read the Line By Line Chimp -tutorial, which introduces the basics of Python and pygame. Give it a read before reading this -tutorial, as I won't bother repeating what that tutorial says (or at least not in as much detail). This tutorial is aimed at those -who understand how to make a ridiculously simple little "game", and who would like to make a relatively simple game like Pong. -It introduces you to some concepts of game design, some simple mathematics to work out ball physics, and some ways to keep your -game easy to maintain and expand.

-

All the code in this tutorial works toward implementing TomPong, -a game I've written. By the end of the tutorial, you should not only have a firmer grasp of pygame, but -you should also understand how TomPong works, and how to make your own version.

-

Now, for a brief recap of the basics of pygame. A common method of organising the code for a game is to divide it into the following -six sections:

-
-
    -
  • Load modules which are required in the game. Standard stuff, except that you should -remember to import the pygame local names as well as the pygame module itself

  • -
  • Resource handling classes; define some classes to handle your most basic resources, -which will be loading images and sounds, as well as connecting and disconnecting to and from networks, loading save game -files, and any other resources you might have.

  • -
  • Game object classes; define the classes for your game object. In the pong example, -these will be one for the player's bat (which you can initialise multiple times, one for each player in the game), and one -for the ball (which can again have multiple instances). If you're going to have a nice in-game menu, it's also a good idea to make a -menu class.

  • -
  • Any other game functions; define other necessary functions, such as scoreboards, menu -handling, etc. Any code that you could put into the main game logic, but that would make understanding said logic harder, should -be put into its own function. So as plotting a scoreboard isn't game logic, it should be moved into a function.

  • -
  • Initialise the game, including the pygame objects themselves, the background, the game -objects (initialising instances of the classes) and any other little bits of code you might want to add in.

  • -
  • The main loop, into which you put any input handling (i.e. watching for users hitting -keys/mouse buttons), the code for updating the game objects, and finally for updating the screen.

  • -
-
-

Every game you make will have some or all of those sections, possibly with more of your own. For the purposes of this tutorial, I will -write about how TomPong is laid out, and the ideas I write about can be transferred to almost any kind of game you might make. I will -also assume that you want to keep all of the code in a single file, but if you're making a reasonably large game, it's often a good -idea to source certain sections into module files. Putting the game object classes into a file called objects.py, for -example, can help you keep game logic separate from game objects. If you have a lot of resource handling code, it can also be handy -to put that into resources.py. You can then from objects,resources import * to import all of the -classes and functions.

-
-
-

1.1. A note on coding styles¶

-

The first thing to remember when approaching any programming project is to decide on a coding style, and stay consistent. Python -solves a lot of the problems because of its strict interpretation of whitespace and indentation, but you can still choose the size -of your indentations, whether you put each module import on a new line, how you comment code, etc. You'll see how I do all of this -in the code examples; you needn't use my style, but whatever style you adopt, use it all the way through the program code. Also try -to document all of your classes, and comment on any bits of code that seem obscure, though don't start commenting the obvious. I've -seen plenty of people do the following:

-
player1.score += scoreup        # Add scoreup to player1 score
-
-
-

The worst code is poorly laid out, with seemingly random changes in style, and poor documentation. Poor code is not only annoying -for other people, but it also makes it difficult for you to maintain.

-
-
-
- - -

-
-Edit on GitHub -
-
-
-
-
- - - - \ No newline at end of file diff --git a/venv/Lib/site-packages/pygame/docs/generated/tut/MoveIt.html b/venv/Lib/site-packages/pygame/docs/generated/tut/MoveIt.html deleted file mode 100644 index 67469fa..0000000 --- a/venv/Lib/site-packages/pygame/docs/generated/tut/MoveIt.html +++ /dev/null @@ -1,539 +0,0 @@ - - - - - - - - - Pygame Tutorials - Help! How Do I Move An Image? — pygame v2.1.2 documentation - - - - - - - - - - - - - -
- -
- - - - - -
-
- -
-
- -
-
-

Help! How Do I Move An Image?¶

-
-
Author
-

Pete Shinners

-
-
Contact
-

pete@shinners.org

-
-
-

Many people new to programming and graphics have a hard time figuring -out how to make an image move around the screen. Without understanding -all the concepts, it can be very confusing. You're not the first person -to be stuck here, I'll do my best to take things step by step. We'll even -try to end with methods of keeping your animations efficient.

-

Note that we won't be teaching you to program with python in this article, -just introduce you to some of the basics with pygame.

-
-

Just Pixels On The Screen¶

-

Pygame has a display Surface. This is basically an image that is visible -on the screen, and the image is made up of pixels. The main way you change -these pixels is by calling the blit() function. This copies the pixels -from one image onto another.

-

This is the first thing to understand. When you blit an image onto the -screen, you are simply changing the color of the pixels on the screen. -Pixels aren't added or moved, we just change the colors of the pixels already -on the screen. These images you blit to the screen are also Surfaces in -pygame, but they are in no way connected to the display Surface. When they -are blitted to the screen they are copied into the display, but you still -have a unique copy of the original.

-

With this brief description. Perhaps you can already understand what -is needed to "move" an image. We don't actually move anything at all. We -simply blit the image in a new position. But before we draw the image in -the new position, we'll need to "erase" the old one. Otherwise the image -will be visible in two places on the screen. By rapidly erasing the image -and redrawing it in a new place, we achieve the "illusion" of movement.

-

Through the rest of this tutorial we will break this process down into -simpler steps. Even explaining the best ways to have multiple images moving -around the screen. You probably already have questions. Like, how do we -"erase" the image before drawing it in a new position? Perhaps you're still -totally lost? Well hopefully the rest of this tutorial can straighten things -out for you.

-
-
-

Let's Go Back A Step¶

-

Perhaps the concept of pixels and images is still a little foreign to -you? Well good news, for the next few sections we are going to use code that -does everything we want, it just doesn't use pixels. We're going to create -a small python list of 6 numbers, and imagine it represents some fantastic -graphics we could see on the screen. It might actually be surprising how -closely this represents exactly what we'll later be doing with real graphics.

-

So let's begin by creating our screen list and fill it with a beautiful -landscape of 1s and 2s.

-
>>> screen = [1, 1, 2, 2, 2, 1]
->>> print screen
-[1, 1, 2, 2, 2, 1]
-
-
-

Now we've created our background. It's not going to be very exciting -unless we also draw a player on the screen. We'll create a mighty hero -that looks like the number 8. Let's stick him near the middle of the map -and see what it looks like.

-
>>> screen[3] = 8
->>> print screen
-[1, 1, 2, 8, 2, 1]
-
-
-

This might have been as far as you've gotten if you jumped right in doing -some graphics programming with pygame. You've got some nice looking stuff -on the screen, but it cannot move anywhere. Perhaps now that our screen -is just a list of numbers, it's easier to see how to move him?

-
-
-

Making The Hero Move¶

-

Before we can start moving the character. We need to keep track of some -sort of position for him. In the last section when we drew him, we just picked -an arbitrary position. Let's do it a little more officially this time.

-
>>> playerpos = 3
->>> screen[playerpos] = 8
->>> print screen
-[1, 1, 2, 8, 2, 1]
-
-
-

Now it is pretty easy to move him to a new position. We simply change -the value of playerpos, and draw him on the screen again.

-
>>> playerpos = playerpos - 1
->>> screen[playerpos] = 8
->>> print screen
-[1, 1, 8, 8, 2, 1]
-
-
-

Whoops. Now we can see two heroes. One in the old position, and one -in his new position. This is exactly the reason we need to "erase" the hero -in his old position before we draw him in the new position. To erase him, -we need to change that value in the list back to what it was before the hero -was there. That means we need to keep track of the values on the screen before -the hero replaced them. There's several way you could do this, but the easiest -is usually to keep a separate copy of the screen background. This means -we need to make some changes to our little game.

-
-
-

Creating A Map¶

-

What we want to do is create a separate list we will call our background. -We will create the background so it looks like our original screen did, -with 1s and 2s. Then we will copy each item from the background to the screen. -After that we can finally draw our hero back onto the screen.

-
>>> background = [1, 1, 2, 2, 2, 1]
->>> screen = [0]*6                         #a new blank screen
->>> for i in range(6):
-...     screen[i] = background[i]
->>> print screen
-[1, 1, 2, 2, 2, 1]
->>> playerpos = 3
->>> screen[playerpos] = 8
->>> print screen
-[1, 1, 2, 8, 2, 1]
-
-
-

It may seem like a lot of extra work. We're no farther off than we were -before the last time we tried to make him move. But this time we have the -extra information we need to move him properly.

-
-
-

Making The Hero Move (Take 2)¶

-

This time it will be easy to move the hero around. First we will erase -the hero from his old position. We do this by copying the correct value -from the background onto the screen. Then we will draw the character in his -new position on the screen

-
>>> print screen
-[1, 1, 2, 8, 2, 1]
->>> screen[playerpos] = background[playerpos]
->>> playerpos = playerpos - 1
->>> screen[playerpos] = 8
->>> print screen
-[1, 1, 8, 2, 2, 1]
-
-
-

There it is. The hero has moved one space to the left. We can use this -same code to move him to the left again.

-
>>> screen[playerpos] = background[playerpos]
->>> playerpos = playerpos - 1
->>> screen[playerpos] = 8
->>> print screen
-[1, 8, 2, 2, 2, 1]
-
-
-

Excellent! This isn't exactly what you'd call smooth animation. But with -a couple small changes, we'll make this work directly with graphics on -the screen.

-
-
-

Definition: "blit"¶

-

In the next sections we will transform our program from using lists to -using real graphics on the screen. When displaying the graphics we will -use the term blit frequently. If you are new to doing graphics -work, you are probably unfamiliar with this common term.

-

BLIT: Basically, blit means to copy graphics from one image -to another. A more formal definition is to copy an array of data -to a bitmapped array destination. You can think of blit as just -"assigning" pixels. Much like setting values in our screen-list -above, blitting assigns the color of pixels in our image.

-

Other graphics libraries will use the word bitblt, or just blt, -but they are talking about the same thing. It is basically copying -memory from one place to another. Actually, it is a bit more advanced than -straight copying of memory, since it needs to handle things like pixel -formats, clipping, and scanline pitches. Advanced blitters can also -handle things like transparency and other special effects.

-
-
-

Going From The List To The Screen¶

-

To take the code we see in the above to examples and make them work with -pygame is very straightforward. We'll pretend we have loaded some pretty -graphics and named them "terrain1", "terrain2", and "hero". Where before -we assigned numbers to a list, we now blit graphics to the screen. Another -big change, instead of using positions as a single index (0 through 5), we -now need a two dimensional coordinate. We'll pretend each of the graphics -in our game is 10 pixels wide.

-
>>> background = [terrain1, terrain1, terrain2, terrain2, terrain2, terrain1]
->>> screen = create_graphics_screen()
->>> for i in range(6):
-...     screen.blit(background[i], (i*10, 0))
->>> playerpos = 3
->>> screen.blit(playerimage, (playerpos*10, 0))
-
-
-

Hmm, that code should seem very familiar, and hopefully more importantly; -the code above should make a little sense. Hopefully my illustration of setting -simple values in a list shows the similarity of setting pixels on the screen -(with blit). The only part that's really extra work is converting the player position -into coordinates on the screen. For now we just use a crude (playerpos*10, 0) , -but we can certainly do better than that. Now let's move the player -image over a space. This code should have no surprises.

-
>>> screen.blit(background[playerpos], (playerpos*10, 0))
->>> playerpos = playerpos - 1
->>> screen.blit(playerimage, (playerpos*10, 0))
-
-
-

There you have it. With this code we've shown how to display a simple background -with a hero's image on it. Then we've properly moved that hero one space -to the left. So where do we go from here? Well for one the code is still -a little awkward. First thing we'll want to do is find a cleaner way to represent -the background and player position. Then perhaps a bit of smoother, real -animation.

-
-
-

Screen Coordinates¶

-

To position an object on the screen, we need to tell the blit() function -where to put the image. In pygame we always pass positions as an (X,Y) coordinate. -This represents the number of pixels to the right, and the number of pixels -down to place the image. The top-left corner of a Surface is coordinate (0, -0). Moving to the right a little would be (10, 0), and then moving down just -as much would be (10, 10). When blitting, the position argument represents -where the topleft corner of the source should be placed on the destination.

-

Pygame comes with a convenient container for these coordinates, it is a -Rect. The Rect basically represents a rectangular area in these coordinates. -It has topleft corner and a size. The Rect comes with a lot of convenient -methods which help you move and position them. In our next examples we will -represent the positions of our objects with the Rects.

-

Also know that many functions in pygame expect Rect arguments. All of these -functions can also accept a simple tuple of 4 elements (left, top, width, -height). You aren't always required to use these Rect objects, but you will -mainly want to. Also, the blit() function can accept a Rect as its position -argument, it simply uses the topleft corner of the Rect as the real position.

-
-
-

Changing The Background¶

-

In all our previous sections, we've been storing the background as a list -of different types of ground. That is a good way to create a tile-based game, -but we want smooth scrolling. To make that a little easier, we're going to -change the background into a single image that covers the whole screen. This -way, when we want to "erase" our objects (before redrawing them) we only need -to blit the section of the erased background onto the screen.

-

By passing an optional third Rect argument to blit, we tell blit to only -use that subsection of the source image. You'll see that in use below as we -erase the player image.

-

Also note, now when we finish drawing to the screen, we call pygame.display.update() -which will show everything we've drawn onto the screen.

-
-
-

Smooth Movement¶

-

To make something appear to move smoothly, we only want to move it a couple -pixels at a time. Here is the code to make an object move smoothly across -the screen. Based on what we already now know, this should look pretty simple.

-
>>> screen = create_screen()
->>> player = load_player_image()
->>> background = load_background_image()
->>> screen.blit(background, (0, 0))        #draw the background
->>> position = player.get_rect()
->>> screen.blit(player, position)          #draw the player
->>> pygame.display.update()                #and show it all
->>> for x in range(100):                   #animate 100 frames
-...     screen.blit(background, position, position) #erase
-...     position = position.move(2, 0)     #move player
-...     screen.blit(player, position)      #draw new player
-...     pygame.display.update()            #and show it all
-...     pygame.time.delay(100)             #stop the program for 1/10 second
-
-
-

There you have it. This is all the code that is needed to smoothly animate -an object across the screen. We can even use a pretty background character. -Another benefit of doing the background this way, the image for the player -can have transparency or cutout sections and it will still draw correctly -over the background (a free bonus).

-

We also throw in a call to pygame.time.delay() at the end of our loop above. -This slows down our program a little, otherwise it might run so fast you might -not see it.

-
-
-

So, What Next?¶

-

Well there we have it. Hopefully this article has done everything it promised -to do. But, at this point the code really isn't ready for the next best-selling -game. How do we easily have multiple moving objects? What exactly are those -mysterious functions like load_player_image()? We also need a way to get simple -user input, and loop for more than 100 frames. We'll take the example we -have here, and turn it into an object oriented creation that would make momma -proud.

-
-
-

First, The Mystery Functions¶

-

Full information on these types of functions can be found in other tutorials -and reference. The pygame.image module has a load() function which will do -what we want. The lines to load the images should become this.

-
>>> player = pygame.image.load('player.bmp').convert()
->>> background = pygame.image.load('liquid.bmp').convert()
-
-
-

We can see that's pretty simple, the load function just takes a filename -and returns a new Surface with the loaded image. After loading we make a call -to the Surface method, convert(). Convert returns us a new Surface of the -image, but now converted to the same pixel format as our display. Since the -images will be the same format at the screen, they will blit very quickly. -If we did not convert, the blit() function is slower, since it has to convert -from one type of pixel to another as it goes.

-

You may also have noticed that both the load() and convert() return new -Surfaces. This means we're really creating two Surfaces on each of these -lines. In other programming languages, this results in a memory leak (not -a good thing). Fortunately Python is smart enough to handle this, and pygame -will properly clean up the Surface we end up not using.

-

The other mystery function we saw in the above example was create_screen(). -In pygame it is simple to create a new window for graphics. The code to create -a 640x480 surface is below. By passing no other arguments, pygame will just -pick the best color depth and pixel format for us.

-
>>> screen = pygame.display.set_mode((640, 480))
-
-
-
-
-

Handling Some Input¶

-

We desperately need to change the main loop to look for any user input, (like -when the user closes the window). We need to add "event handling" to our -program. All graphical programs use this Event Based design. The program -gets events like "keyboard pressed" or "mouse moved" from the computer. Then -the program responds to the different events. Here's what the code should -look like. Instead of looping for 100 frames, we'll keep looping until the -user asks us to stop.

-
>>> while 1:
-...     for event in pygame.event.get():
-...         if event.type in (QUIT, KEYDOWN):
-...             sys.exit()
-...     move_and_draw_all_game_objects()
-
-
-

What this code simply does is, first loop forever, then check if there are -any events from the user. We exit the program if the user presses the keyboard -or the close button on the window. After we've checked all the events we -move and draw our game objects. (We'll also erase them before they move, -too)

-
-
-

Moving Multiple Images¶

-

Here's the part where we're really going to change things around. Let's -say we want 10 different images moving around on the screen. A good way to -handle this is to use python's classes. We'll create a class that represents -our game object. This object will have a function to move itself, and then -we can create as many as we like. The functions to draw and move the object -need to work in a way where they only move one frame (or one step) at a time. -Here's the python code to create our class.

-
>>> class GameObject:
-...     def __init__(self, image, height, speed):
-...         self.speed = speed
-...         self.image = image
-...         self.pos = image.get_rect().move(0, height)
-...     def move(self):
-...         self.pos = self.pos.move(0, self.speed)
-...         if self.pos.right > 600:
-...             self.pos.left = 0
-
-
-

So we have two functions in our class. The init function constructs our object. -It positions the object and sets its speed. The move method moves the object -one step. If it's gone too far, it moves the object back to the left.

-
-
-

Putting It All Together¶

-

Now with our new object class, we can put together the entire game. Here -is what the main function for our program will look like.

-
>>> screen = pygame.display.set_mode((640, 480))
->>> player = pygame.image.load('player.bmp').convert()
->>> background = pygame.image.load('background.bmp').convert()
->>> screen.blit(background, (0, 0))
->>> objects = []
->>> for x in range(10):                    #create 10 objects</i>
-...     o = GameObject(player, x*40, x)
-...     objects.append(o)
->>> while 1:
-...     for event in pygame.event.get():
-...         if event.type in (QUIT, KEYDOWN):
-...             sys.exit()
-...     for o in objects:
-...         screen.blit(background, o.pos, o.pos)
-...     for o in objects:
-...         o.move()
-...         screen.blit(o.image, o.pos)
-...     pygame.display.update()
-...     pygame.time.delay(100)
-
-
-

And there it is. This is the code we need to animate 10 objects on the screen. -The only point that might need explaining is the two loops we use to clear -all the objects and draw all the objects. In order to do things properly, -we need to erase all the objects before drawing any of them. In our sample -here it may not matter, but when objects are overlapping, using two loops -like this becomes important.

-
-
-

You Are On Your Own From Here¶

-

So what would be next on your road to learning? Well first playing around -with this example a bit. The full running version of this example is available -in the pygame examples directory. It is the example named -moveit.py . -Take a look at the code and play with it, run it, learn it.

-

Things you may want to work on is maybe having more than one type of object. -Finding a way to cleanly "delete" objects when you don't want to show them -any more. Also updating the display.update() call to pass a list of the areas -on-screen that have changed.

-

There are also other tutorials and examples in pygame that cover these -issues. So when you're ready to keep learning, keep on reading. :-)

-

Lastly, you can feel free to come to the pygame mailing list or chatroom -with any questions on this stuff. There's always folks on hand who can help -you out with this sort of business.

-

Lastly, have fun, that's what games are for!

-
-
-
- - -

-
-Edit on GitHub -
-
-
-
-
- - - - \ No newline at end of file diff --git a/venv/Lib/site-packages/pygame/docs/generated/tut/PygameIntro.html b/venv/Lib/site-packages/pygame/docs/generated/tut/PygameIntro.html deleted file mode 100644 index 0331936..0000000 --- a/venv/Lib/site-packages/pygame/docs/generated/tut/PygameIntro.html +++ /dev/null @@ -1,418 +0,0 @@ - - - - - - - - - Pygame Intro — pygame v2.1.2 documentation - - - - - - - - - - - - - -
- -
- - - - - -
-
- -
-
- -
-
-

Python Pygame Introduction¶

-
-
Author
-

Pete Shinners

-
-
Contact
-

pete@shinners.org

-
-
-

This article is an introduction to the pygame library -for Python programmers. -The original version appeared in the Py Zine, -volume 1 issue 3. This version contains minor revisions, to -create an all-around better article. Pygame is a Python extension -library that wraps the SDL library -and its helpers.

-
-

HISTORY¶

-

Pygame started in the summer of 2000. Being a C programmer of many -years, I discovered both Python and SDL at about the same time. You are -already familiar with Python, which was at version 1.5.2. You may need -an introduction to SDL, which is the Simple DirectMedia Layer. -Created by Sam Lantinga, SDL is a cross-platform C library for -controlling multimedia, comparable to DirectX. It has been used for -hundreds of commercial and open source games. I was impressed at how clean -and straightforward both projects were and it wasn't long before I -realized mixing Python and SDL was an interesting proposal.

-

I discovered a small project already under-way with exactly the same -idea, PySDL. Created by Mark Baker, PySDL was a straightforward -implementation of SDL as a Python extension. The interface was cleaner -than a generic SWIG wrapping, but I felt it forced a "C style" of code. -The sudden death of PySDL prompted me to take on a new project of my -own.

-

I wanted to put together a project that really took advantage of -Python. My goal was to make it easy to do the simple things, and -straightforward to do the difficult things. Pygame was started in -October, 2000. Six months later pygame version 1.0 was released.

-
-
-

TASTE¶

-

I find the best way to understand a new library is to jump straight -into an example. In the early days of pygame, I created a bouncing ball -animation with 7 lines of code. Let's take a look at a friendlier -version of that same thing. This should be simple enough to follow -along, and a complete breakdown follows.

-../_images/intro_ball.gif -
 1import sys, pygame
- 2pygame.init()
- 3
- 4size = width, height = 320, 240
- 5speed = [2, 2]
- 6black = 0, 0, 0
- 7
- 8screen = pygame.display.set_mode(size)
- 9
-10ball = pygame.image.load("intro_ball.gif")
-11ballrect = ball.get_rect()
-12
-13while 1:
-14    for event in pygame.event.get():
-15        if event.type == pygame.QUIT: sys.exit()
-16
-17    ballrect = ballrect.move(speed)
-18    if ballrect.left < 0 or ballrect.right > width:
-19        speed[0] = -speed[0]
-20    if ballrect.top < 0 or ballrect.bottom > height:
-21        speed[1] = -speed[1]
-22
-23    screen.fill(black)
-24    screen.blit(ball, ballrect)
-25    pygame.display.flip()
-
-
-

This is as simple as you can get for a bouncing animation. -First we see importing and initializing pygame is nothing noteworthy. -The import pygame imports the package with all the available -pygame modules. -The call to pygame.init() initializes each of these modules.

-

On line 8 we create a -graphical window with the call to pygame.display.set_mode(). -Pygame and SDL make this easy by defaulting to the best graphics modes -for the graphics hardware. You can override the mode and SDL will -compensate for anything the hardware cannot do. Pygame represents -images as Surface objects. -The display.set_mode() function creates a new Surface -object that represents the actual displayed graphics. Any drawing you -do to this Surface will become visible on the monitor.

-

At line 10 we load -our ball image. Pygame supports a variety of image formats through the -SDL_image library, including BMP, JPG, PNG, TGA, and GIF. -The pygame.image.load() function -returns us a Surface with the ball data. The Surface will keep any -colorkey or alpha transparency from the file. After loading the ball -image we create a variable named ballrect. Pygame comes with a -convenient utility object type named Rect, -which represents a rectangular area. Later, in the animation part of -the code, we will see what the Rect objects can do.

-

At this point, line 13, -our program is initialized and ready to run. Inside an infinite loop we -check for user input, move the ball, and then draw the ball. If you are -familiar with GUI programming, you have had experience with events and -event loops. In pygame this is no different, -we check if a QUIT event has happened. If so we -simply exit the program, pygame will ensure everything is cleanly -shutdown.

-

It is time to update our position for the ball. -Lines 17 moves the ballrect variable by the current speed. -Lines 18 thru 21 reverse the speed if the ball has moved outside the screen. -Not exactly Newtonian physics, but it is all we need.

-

On line 23 we erase -the screen by filling it with a black RGB color. If you have never -worked with animations this may seem strange. You may be asking "Why do -we need to erase anything, why don't we just move the ball on the -screen?" That is not quite the way computer animation works. Animation -is nothing more than a series of single images, which when displayed in -sequence do a very good job of fooling the human eye into seeing -motion. The screen is just a single image that the user sees. If we did -not take the time to erase the ball from the screen, we would actually -see a "trail" of the ball as we continuously draw the ball in its new -positions.

-

On line 24 we draw the ball image onto the screen. -Drawing of images is handled by the -Surface.blit() method. -A blit basically means copying pixel colors from one image to another. -We pass the blit method a source Surface -to copy from, and a position to place the source onto the destination.

-

The last thing we need to do is actually update the visible display. -Pygame manages the display with a double buffer. When we are finished -drawing we call the pygame.display.flip()Update the full display Surface to the screen method. -This makes everything we have drawn on the screen Surface -become visible. This buffering makes sure we only see completely drawn -frames on the screen. Without it, the user would see the half completed -parts of the screen as they are being created.

-

That concludes this short introduction to pygame. Pygame also has -modules to do things like input handling for the keyboard, mouse, and -joystick. It can mix audio and decode streaming music. -With the Surfaces you can draw simple -shapes, rotate and scale the picture, and even manipulate the pixels of -an image in realtime as numpy arrays. -Pygame also has the ability to act as a -cross platform display layer for PyOpenGL. Most of the pygame modules -are written in C, few are actually done in Python.

-

The pygame website has full reference documentation for every pygame -function and tutorials for all ranges of users. The pygame source comes -with many examples of things like monkey punching and UFO shooting.

-
-
-

PYTHON AND GAMING¶

-

"Is Python suitable for gaming?" The answer is, "It depends on the -game."

-

Python is actually quite capable at running games. It will likely even -surprise you how much is possible in under 30 milliseconds. Still, it -is not hard to reach the ceiling once your game begins to get more -complex. Any game running in realtime will be making full use of the -computer.

-../_images/intro_blade.jpg -

Over the past several years there has been an interesting trend in game development, -the move towards higher level languages. Usually a game is split into -two major parts. The game engine, which must be as fast as possible, -and the game logic, which makes the engine actually do something. It -wasn't long ago when the engine of a game was written in assembly, with -portions written in C. Nowadays, C has moved to the game engine, while -often the game itself is written in higher level scripting languages. -Games like Quake3 and Unreal run these scripts as portable bytecode.

-

In early 2001, developer Rebel Act Studios finished their game, -Severance: Blade of Darkness. Using their own custom 3D engine, the -rest of the game is written with Python. The game is a bloody action -3rd person perspective fighter. You control medieval warriors into -intricate decapitating combination attacks while exploring dungeons and -castles. You can download third party add-ons for this game, and find -they are nothing more than Python source files.

-

More recently, Python has been used in a variety of games like Freedom -Force, and Humungous' Backyard Sports Series.

-../_images/intro_freedom.jpg -

Pygame and SDL serve as an excellent C engine for 2D games. -Games will still find the largest part of their runtime is spent -inside SDL handling the graphics. -SDL can take advantage of graphics hardware acceleration. -Enabling this can change a game from running around 40 frames per -second to over 200 frames per second. When you see your Python game -running at 200 frames per second, you realize that Python and games can -work together.

-

It is impressive how well both Python and SDL work on multiple -platforms. For example, in May of 2001 I released my own full pygame -project, SolarWolf, an arcade style action game. One thing that has -surprised me is that one year later there has been no need for any -patches, bug fixes, or updates. The game was developed entirely on -windows, but runs on Linux, Mac OSX, and many Unixes without any extra -work on my end.

-

Still, there are very clear limitations. The best way to manage -hardware accelerated graphics is not always the way to get fastest -results from software rendering. Hardware support is not available on -all platforms. When a game gets more complex, it often must commit to -one or the other. SDL has some other design limitations, things like -full screen scrolling graphics can quickly bring your game down to -unplayable speeds. While SDL is not suitable for all types of games, -remember companies like Loki have used SDL to run a wide variety of -retail quality titles.

-

Pygame is fairly low-level when it comes to writing games. You'll -quickly find yourself needing to wrap common functions into your own -game environment. The great thing about this is there is nothing inside -pygame to get in your way. Your program is in full control of -everything. The side effect of that is you will find yourself borrowing -a lot of code to get a more advanced framework put together. You'll -need a better understanding of what you are doing.

-
-
-

CLOSING¶

-

Developing games is very rewarding, there is something exciting about -being able to see and interact with the code you've written. Pygame -currently has almost 30 other projects using it. Several of them are -ready to play now. You may be surprised to visit the pygame website, -and see what other users have been able to do with Python.

-

One thing that has caught my attention is the amount of people coming -to Python for the first time to try game development. I can see why -games are a draw for new programmers, but it can be difficult since -creating games requires a firmer understanding of the language. I've -tried to support this group of users by writing many examples and -pygame tutorials for people new to these concepts.

-

In the end, my advice is to keep it simple. I cannot stress this -enough. If you are planning to create your first game, there is a -lot to learn. Even a simpler game will challenge your designs, and -complex games don't necessarily mean fun games. When you understand -Python, you can use pygame to create a simple game in only one or two -weeks. From there you'll need a surprising amount of time to add -the polish to make that into a full presentable game.

-
-

Pygame Modules Overview¶

- ---- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

cdrom

playback

cursors

load cursor images, includes standard cursors

display

control the display window or screen

draw

draw simple shapes onto a Surface

event

manage events and the event queue

font

create and render TrueType fonts

image

save and load images

joystick

manage joystick devices

key

manage the keyboard

mouse

manage the mouse

sndarray

manipulate sounds with numpy

surfarray

manipulate images with numpy

time

control timing

transform

scale, rotate, and flip images

-
-
-
-
- - -

-
-Edit on GitHub -
-
-
-
-
- - - - \ No newline at end of file diff --git a/venv/Lib/site-packages/pygame/docs/generated/tut/SpriteIntro.html b/venv/Lib/site-packages/pygame/docs/generated/tut/SpriteIntro.html deleted file mode 100644 index b691185..0000000 --- a/venv/Lib/site-packages/pygame/docs/generated/tut/SpriteIntro.html +++ /dev/null @@ -1,498 +0,0 @@ - - - - - - - - - Pygame Tutorials - Sprite Module Introduction — pygame v2.1.2 documentation - - - - - - - - - - - - - -
- -
- - - - - -
-
- -
-
- -
-
-

Sprite Module Introduction¶

-
-
Author
-

Pete Shinners

-
-
Contact
-

pete@shinners.org

-
-
-

Pygame version 1.3 comes with a new module, pygame.sprite. This module is -written in Python and includes some higher-level classes to manage your game -objects. By using this module to its full potential, you can easily manage and -draw your game objects. The sprite classes are very optimized, so it's likely -your game will run faster with the sprite module than without.

-

The sprite module is also meant to be very generic. It turns out you can use it -with nearly any type of gameplay. All this flexibility comes with a slight -penalty, it needs a little understanding to properly use it. The -reference documentation for the sprite module can keep -you running, but you'll probably need a bit more explanation of how to use -pygame.sprite in your own game.

-

Several of the pygame examples (like "chimp" and "aliens") have been updated to -use the sprite module. You may want to look into those first to see what this -sprite module is all about. The chimp module even has its own line-by-line -tutorial, which may help get more an understanding of programming with python -and pygame.

-

Note that this introduction will assume you have a bit of experience -programming with python, and are somewhat familiar with the different parts of -creating a simple game. In this tutorial the word "reference" is occasionally -used. This represents a python variable. Variables in python are references, -so you can have several variables all pointing to the same object.

-
-

History Lesson¶

-

The term "sprite" is a holdover from older computer and game machines. These -older boxes were unable to draw and erase normal graphics fast enough for them -to work as games. These machines had special hardware to handle game like -objects that needed to animate very quickly. These objects were called -"sprites" and had special limitations, but could be drawn and updated very -fast. They usually existed in special overlay buffers in the video. These days -computers have become generally fast enough to handle sprite like objects -without dedicated hardware. The term sprite is still used to represent just -about anything in a 2D game that is animated.

-
-
-

The Classes¶

-

The sprite module comes with two main classes. The first is Sprite, which should be used as a base class for all your game -objects. This class doesn't really do anything on its own, it just includes -several functions to help manage the game object. The other type of class is -Group. The Group class is a container for -different Sprite objects. There are actually several different types of -group classes. Some of the Groups can draw all the elements they contain, -for example.

-

This is all there really is to it. We'll start with a description of what each -type of class does, and then discuss the proper ways to use these two classes.

-
-
-

The Sprite Class¶

-

As mentioned before, the Sprite class is designed to be a base class for all -your game objects. You cannot really use it on its own, as it only has several -methods to help it work with the different Group classes. The sprite keeps -track of which groups it belongs to. -The class constructor (__init__ method) takes an argument of a -Group (or list of Groups) the Sprite instance should belong to. -You can also change the Group membership for the Sprite with the -add() and -remove() methods. -There is also a groups() method, -which returns a list of the current groups containing the sprite.

-

When using the your Sprite classes it's best to think of them as "valid" or -"alive" when they are belonging to one or more Groups. When you remove the -instance from all groups pygame will clean up the object. (Unless you have your -own references to the instance somewhere else.) The kill() method removes the sprite from all groups it -belongs to. This will cleanly delete the sprite object. If you've put some -little games together, you'll know sometimes cleanly deleting a game object can -be tricky. The sprite also comes with an alive() method, which returns true if it is still a -member of any groups.

-
-
-

The Group Class¶

-

The Group class is just a simple container. Similar to the sprite, it has -an add() and remove() method which can change which sprites belong to -the group. You also can pass a sprite or list of sprites to the constructor -(__init__() method) to create a Group instance that contains some -initial sprites.

-

The Group has a few other methods like empty() to remove all sprites from the group and -copy() which will return a copy of the group -with all the same members. Also the has() -method will quickly check if the Group contains a sprite or list of -sprites.

-

The other function you will use frequently is the sprites() method. This returns an object that can be -looped on to access every sprite the group contains. Currently this is just a -list of the sprites, but in later version of python this will likely use -iterators for better performance.

-

As a shortcut, the Group also has an update() method, which will call an update() method on -every sprite in the group. Passing the same arguments to each one. Usually in a -game you need some function that updates the state of a game object. It's very -easy to call your own methods using the Group.sprites() method, but this is -a shortcut that's used enough to be included. Also note that the base -Sprite class has a "dummy" update() method that takes any sort of -arguments and does nothing.

-

Lastly, the Group has a couple other methods that allow you to use it with -the builtin len() function, getting the number of sprites it contains, and -the "truth" operator, which allows you to do "if mygroup:" to check if the -group has any sprites.

-
-
-

Mixing Them Together¶

-

At this point the two classes seem pretty basic. Not doing a lot more than you -can do with a simple list and your own class of game objects. But there are -some big advantages to using the Sprite and Group together. A sprite -can belong to as many groups as you want. Remember as soon as it belongs to no -groups, it will usually be cleared up (unless you have other "non-group" -references to that object).

-

The first big thing is a fast simple way to categorize sprites. For example, -say we had a Pacman-like game. We could make separate groups for the different -types of objects in the game. Ghosts, Pac, and Pellets. When Pac eats a power -pellet, we can change the state for all ghost objects by effecting everything -in the Ghost group. This is quicker and simpler than looping through a list -of all the game objects and checking which ones are ghosts.

-

Adding and removing groups and sprites from each other is a very fast -operation, quicker than using lists to store everything. Therefore you can very -efficiently change group memberships. Groups can be used to work like simple -attributes for each game object. Instead of tracking some attribute like -"close_to_player" for a bunch of enemy objects, you could add them to a -separate group. Then when you need to access all the enemies that are near the -player, you already have a list of them, instead of going through a list of all -the enemies, checking for the "close_to_player" flag. Later on your game could -add multiple players, and instead of adding more "close_to_player2", -"close_to_player3" attributes, you can easily add them to different groups or -each player.

-

Another important benefit of using the Sprites and Groups is that the groups -cleanly handle the deleting (or killing) of game objects. In a game where many -objects are referencing other objects, sometimes deleting an object can be the -hardest part, since it can't go away until it is not referenced by anyone. Say -we have an object that is "chasing" another object. The chaser can keep a -simple Group that references the object (or objects) it is chasing. If the -object being chased happens to be destroyed, we don't need to worry about -notifying the chaser to stop chasing. The chaser can see for itself that its -group is now empty, and perhaps find a new target.

-

Again, the thing to remember is that adding and removing sprites from groups is -a very cheap/fast operation. You may be best off by adding many groups to -contain and organize your game objects. Some could even be empty for large -portions of the game, there isn't any penalties for managing your game like -this.

-
-
-

The Many Group Types¶

-

The above examples and reasons to use Sprites and Groups are only a tip -of the iceberg. Another advantage is that the sprite module comes with several -different types of Groups. These groups all work just like a regular old -Group, but they also have added functionality (or slightly different -functionality). Here's a list of the Group classes included with the -sprite module.

-
-

Group

-
-

This is the standard "no frills" group mainly explained above. Most of the -other Groups are derived from this one, but not all.

-
-

GroupSingle

-
-

This works exactly like the regular Group class, but it only contains -the most recently added sprite. Therefore when you add a sprite to this group, -it "forgets" about any previous sprites it had. Therefore it always contains -only one or zero sprites.

-
-

RenderPlain

-
-

This is a standard group derived from Group. It has a draw() method -that draws all the sprites it contains to the screen (or any Surface). For -this to work, it requires all sprites it contains to have a "image" and "rect" -attributes. It uses these to know what to blit, and where to blit it.

-
-

RenderClear

-
-

This is derived from the RenderPlain group, and adds a method named -clear(). This will erase the previous position of all drawn sprites. It -uses a background image to fill in the areas where the sprite were. It is smart -enough to handle deleted sprites and properly clear them from the screen when -the clear() method is called.

-
-

RenderUpdates

-
-

This is the Cadillac of rendering Groups. It is inherited from -RenderClear, but changes the draw() method to also return a list of -pygame Rects, which represent all the areas on screen that have been -changed.

-
-
-

That is the list of different groups available We'll discuss more about these -rendering groups in the next section. There's nothing stopping you from -creating your own Group classes as well. They are just python code, so you can -inherit from one of these and add/change whatever you want. In the future I -hope we can add a couple more Groups to this list. A GroupMulti which -is like the GroupSingle, but can hold up to a given number of sprites (in -some sort of circular buffer?). Also a super-render group that can clear the -position of the old sprites without needing a background image to do it (by -grabbing a copy of the screen before blitting). Who knows really, but in the -future we can add more useful classes to this list.

-
-
-

The Rendering Groups¶

-

From above we can see there are three different rendering groups. We could -probably just get away with the RenderUpdates one, but it adds overhead not -really needed for something like a scrolling game. So we have a couple tools -here, pick the right one for the right job.

-

For a scrolling type game, where the background completely changes every frame, -we obviously don't need to worry about python's update rectangles in the call -to display.update(). You should definitely go with the RenderPlain -group here to manage your rendering.

-

For games where the background is more stationary, you definitely don't want -pygame updating the entire screen (since it doesn't need to). This type of game -usually involves erasing the old position of each object, then drawing it in a -new place for each frame. This way we are only changing what is necessary. -Most of the time you will just want to use the RenderUpdates class here. -Since you will also want to pass this list of changes to the -display.update() function.

-

The RenderUpdates class also does a good job at minimizing overlapping -areas in the list of updated rectangles. If the previous position and current -position of an object overlap, it will merge them into a single rectangle. -Combined with the fact that it properly handles deleted objects, this is -one powerful Group class. If you've written a game that manages the changed -rectangles for the objects in a game, you know this the cause for a lot of -messy code in your game. Especially once you start to throw in objects that can -be deleted at any time. All this work is reduced to a clear() and -draw() method with this monster class. Plus with the overlap checking, it -is likely faster than when you did it manually.

-

Also note that there's nothing stopping you from mixing and matching these -render groups in your game. You should definitely use multiple rendering groups -when you want to do layering with your sprites. Also if the screen is split -into multiple sections, perhaps each section of the screen should use an -appropriate render group?

-
-
-

Collision Detection¶

-

The sprite module also comes with two very generic collision detection -functions. For more complex games, these really won't work for you, but you -can easily grab the source code for them, and modify them as needed.

-

Here's a summary of what they are, and what they do.

-
-

spritecollide(sprite, group, dokill) -> list

-
-

This checks for collisions between a single sprite and the sprites in a group. -It requires a "rect" attribute for all the sprites used. It returns a list of -all the sprites that overlap with the first sprite. The "dokill" argument is a -boolean argument. If it is true, the function will call the kill() method -on all the sprites. This means the last reference to each sprite is probably in -the returned list. Once the list goes away so do the sprites. A quick example -of using this in a loop

-
>>> for bomb in sprite.spritecollide(player, bombs, 1):
-...     boom_sound.play()
-...     Explosion(bomb, 0)
-
-
-

This finds all the sprites in the "bomb" group that collide with the player. -Because of the "dokill" argument it deletes all the crashed bombs. For each -bomb that did collide, it plays a "boom" sound effect, and creates a new -Explosion where the bomb was. (Note, the Explosion class here knows to -add each instance to the appropriate class, so we don't need to store it in a -variable, that last line might feel a little "funny" to you python programmers.

-
-

groupcollide(group1, group2, dokill1, dokill2) -> dictionary

-
-

This is similar to the spritecollide function, but a little more complex. -It checks for collisions for all the sprites in one group, to the sprites in -another. There is a dokill argument for the sprites in each list. When -dokill1 is true, the colliding sprites in group1 will be kill()``ed. -When ``dokill2 is true, we get the same results for group2. The -dictionary it returns works like this; each key in the dictionary is a sprite -from group1 that had a collision. The value for that key is a list of the -sprites that it collided with. Perhaps another quick code sample explains it -best

-
>>> for alien in sprite.groupcollide(aliens, shots, 1, 1).keys()
-...     boom_sound.play()
-...     Explosion(alien, 0)
-...     kills += 1
-
-
-

This code checks for the collisions between player bullets and all the aliens -they might intersect. In this case we only loop over the dictionary keys, but -we could loop over the values() or items() if we wanted to do something -to the specific shots that collided with aliens. If we did loop over the -values() we would be looping through lists that contain sprites. The same -sprite may even appear more than once in these different loops, since the same -"shot" could have collided against multiple "aliens".

-
-
-

Those are the basic collision functions that come with pygame. It should be -easy to roll your own that perhaps use something different than the "rect" -attribute. Or maybe try to fine-tweak your code a little more by directly -effecting the collision object, instead of building a list of the collision? -The code in the sprite collision functions is very optimized, but you could -speed it up slightly by taking out some functionality you don't need.

-
-
-

Common Problems¶

-

Currently there is one main problem that catches new users. When you derive -your new sprite class with the Sprite base, you must call the -Sprite.__init__() method from your own class __init__() method. If you -forget to call the Sprite.__init__() method, you get a cryptic error, like -this

-
AttributeError: 'mysprite' instance has no attribute '_Sprite__g'
-
-
-
-
-

Extending Your Own Classes (Advanced)¶

-

Because of speed concerns, the current Group classes try to only do exactly -what they need, and not handle a lot of general situations. If you decide you -need extra features, you may want to create your own Group class.

-

The Sprite and Group classes were designed to be extended, so feel free -to create your own Group classes to do specialized things. The best place -to start is probably the actual python source code for the sprite module. -Looking at the current Sprite groups should be enough example on how to -create your own.

-

For example, here is the source code for a rendering Group that calls a -render() method for each sprite, instead of just blitting an "image" -variable from it. Since we want it to also handle updated areas, we will start -with a copy of the original RenderUpdates group, here is the code:

-
class RenderUpdatesDraw(RenderClear):
-    """call sprite.draw(screen) to render sprites"""
-    def draw(self, surface):
-        dirty = self.lostsprites
-        self.lostsprites = []
-        for s, r in self.spritedict.items():
-            newrect = s.draw(screen) #Here's the big change
-            if r is 0:
-                dirty.append(newrect)
-            else:
-                dirty.append(newrect.union(r))
-            self.spritedict[s] = newrect
-        return dirty
-
-
-

Following is more information on how you could create your own Sprite and -Group objects from scratch.

-

The Sprite objects only "require" two methods. "add_internal()" and -"remove_internal()". These are called by the Group classes when they are -removing a sprite from themselves. The add_internal() and -remove_internal() have a single argument which is a group. Your Sprite -will need some way to also keep track of the Groups it belongs to. You will -likely want to try to match the other methods and arguments to the real -Sprite class, but if you're not going to use those methods, you sure don't -need them.

-

It is almost the same requirements for creating your own Group. In fact, if -you look at the source you'll see the GroupSingle isn't derived from the -Group class, it just implements the same methods so you can't really tell -the difference. Again you need an "add_internal()" and "remove_internal()" -method that the sprites call when they want to belong or remove themselves from -the group. The add_internal() and remove_internal() have a single -argument which is a sprite. The only other requirement for the Group -classes is they have a dummy attribute named "_spritegroup". It doesn't matter -what the value is, as long as the attribute is present. The Sprite classes can -look for this attribute to determine the difference between a "group" and any -ordinary python container. (This is important, because several sprite methods -can take an argument of a single group, or a sequence of groups. Since they -both look similar, this is the most flexible way to "see" the difference.)

-

You should go through the code for the sprite module. While the code is a bit -"tuned", it's got enough comments to help you follow along. There's even a -TODO section in the source if you feel like contributing.

-
-
-
- - -

-
-Edit on GitHub -
-
-
-
-
- - - - \ No newline at end of file diff --git a/venv/Lib/site-packages/pygame/docs/generated/tut/SurfarrayIntro.html b/venv/Lib/site-packages/pygame/docs/generated/tut/SurfarrayIntro.html deleted file mode 100644 index 9bc37f4..0000000 --- a/venv/Lib/site-packages/pygame/docs/generated/tut/SurfarrayIntro.html +++ /dev/null @@ -1,661 +0,0 @@ - - - - - - - - - Pygame Tutorials - Surfarray Introduction — pygame v2.1.2 documentation - - - - - - - - - - - - - -
- -
- - - - - -
-
- -
-
- -
-
-

Surfarray Introduction¶

-
-
Author
-

Pete Shinners

-
-
Contact
-

pete@shinners.org

-
-
-
-

Introduction¶

-

This tutorial will attempt to introduce users to both NumPy and the pygame -surfarray module. To beginners, the code that uses surfarray can be quite -intimidating. But actually there are only a few concepts to understand and -you will be up and running. Using the surfarray module, it becomes possible -to perform pixel level operations from straight python code. The performance -can become quite close to the level of doing the code in C.

-

You may just want to jump down to the "Examples" section to get an -idea of what is possible with this module, then start at the beginning here -to work your way up.

-

Now I won't try to fool you into thinking everything is very easy. To get -more advanced effects by modifying pixel values is very tricky. Just mastering -Numeric Python (SciPy's original array package was Numeric, the predecessor of NumPy) -takes a lot of learning. In this tutorial I'll be sticking with -the basics and using a lot of examples in an attempt to plant seeds of wisdom. -After finishing the tutorial you should have a basic handle on how the surfarray -works.

-
-
-

Numeric Python¶

-

If you do not have the python NumPy package installed, -you will need to do that now. -You can download the package from the -NumPy Downloads Page -To make sure NumPy is working for you, -you should get something like this from the interactive python prompt.

-
>>> from numpy import *                    #import numeric
->>> a = array((1,2,3,4,5))                 #create an array
->>> a                                      #display the array
-array([1, 2, 3, 4, 5])
->>> a[2]                                   #index into the array
-3
->>> a*2                                    #new array with twiced values
-array([ 2,  4,  6,  8, 10])
-
-
-

As you can see, the NumPy module gives us a new data type, the array. -This object holds an array of fixed size, and all values inside are of the same -type. The arrays can also be multidimensional, which is how we will use them -with images. There's a bit more to it than this, but it is enough to get us -started.

-

If you look at the last command above, you'll see that mathematical operations -on NumPy arrays apply to all values in the array. This is called "element-wise -operations". These arrays can also be sliced like normal lists. The slicing -syntax is the same as used on standard python objects. -(so study up if you need to :] ). -Here are some more examples of working with arrays.

-
>>> len(a)                                 #get array size
-5
->>> a[2:]                                  #elements 2 and up
-array([3, 4, 5])
->>> a[:-2]                                 #all except last 2
-array([1, 2, 3])
->>> a[2:] + a[:-2]                         #add first and last
-array([4, 6, 8])
->>> array((1,2,3)) + array((3,4))          #add arrays of wrong sizes
-Traceback (most recent call last):
-  File "<stdin>", line 1, in <module>
-ValueError: operands could not be broadcast together with shapes (3,) (2,)
-
-
-

We get an error on the last commend, because we try add together two arrays -that are different sizes. In order for two arrays two operate with each other, -including comparisons and assignment, they must have the same dimensions. It is -very important to know that the new arrays created from slicing the original all -reference the same values. So changing the values in a slice also changes the -original values. It is important how this is done.

-
>>> a                                      #show our starting array
-array([1, 2, 3, 4, 5])
->>> aa = a[1:3]                            #slice middle 2 elements
->>> aa                                     #show the slice
-array([2, 3])
->>> aa[1] = 13                             #chance value in slice
->>> a                                      #show change in original
-array([ 1, 2, 13,  4,  5])
->>> aaa = array(a)                         #make copy of array
->>> aaa                                    #show copy
-array([ 1, 2, 13,  4,  5])
->>> aaa[1:4] = 0                           #set middle values to 0
->>> aaa                                    #show copy
-array([1, 0, 0, 0, 5])
->>> a                                      #show original again
-array([ 1, 2, 13,  4,  5])
-
-
-

Now we will look at small arrays with two -dimensions. Don't be too worried, getting started it is the same as having a -two dimensional tuple (a tuple inside a tuple). Let's get started with -two dimensional arrays.

-
>>> row1 = (1,2,3)                         #create a tuple of vals
->>> row2 = (3,4,5)                         #another tuple
->>> (row1,row2)                            #show as a 2D tuple
-((1, 2, 3), (3, 4, 5))
->>> b = array((row1, row2))                #create a 2D array
->>> b                                      #show the array
-array([[1, 2, 3],
-       [3, 4, 5]])
->>> array(((1,2),(3,4),(5,6)))             #show a new 2D array
-array([[1, 2],
-       [3, 4],
-       [5, 6]])
-
-
-

Now with this two -dimensional array (from now on as "2D") we can index specific values -and do slicing on both dimensions. Simply using a comma to separate the indices -allows us to lookup/slice in multiple dimensions. Just using ":" as an -index (or not supplying enough indices) gives us all the values in -that dimension. Let's see how this works.

-
>>> b                                      #show our array from above
-array([[1, 2, 3],
-       [3, 4, 5]])
->>> b[0,1]                                 #index a single value
-2
->>> b[1,:]                                 #slice second row
-array([3, 4, 5])
->>> b[1]                                   #slice second row (same as above)
-array([3, 4, 5])
->>> b[:,2]                                 #slice last column
-array([3, 5])
->>> b[:,:2]                                #slice into a 2x2 array
-array([[1, 2],
-       [3, 4]])
-
-
-

Ok, stay with me here, this is about as hard as it gets. When using NumPy -there is one more feature to slicing. Slicing arrays also allow you to specify -a slice increment. The syntax for a slice with increment is -start_index : end_index : increment.

-
>>> c = arange(10)                         #like range, but makes an array
->>> c                                      #show the array
-array([0, 1, 2, 3, 4, 5, 6, 7, 8, 9])
->>> c[1:6:2]                               #slice odd values from 1 to 6
-array([1, 3, 5])
->>> c[4::4]                                #slice every 4th val starting at 4
-array([4, 8])
->>> c[8:1:-1]                              #slice 1 to 8, reversed
-array([8, 7, 6, 5, 4, 3, 2])
-
-
-

Well that is it. There's enough information there to get you started using -NumPy with the surfarray module. There's certainly a lot more to NumPy, but -this is only an introduction. Besides, we want to get on to the fun stuff, -correct?

-
-
-

Import Surfarray¶

-

In order to use the surfarray module we need to import it. Since both surfarray -and NumPy are optional components for pygame, it is nice to make sure they -import correctly before using them. In these examples I'm going to import -NumPy into a variable named N. This will let you know which functions -I'm using are from the NumPy package. -(and is a lot shorter than typing NumPy before each function)

-
try:
-    import numpy as N
-    import pygame.surfarray as surfarray
-except ImportError:
-    raise ImportError, "NumPy and Surfarray are required."
-
-
-
-
-

Surfarray Introduction¶

-

There are two main types of functions in surfarray. One set of functions for -creating an array that is a copy of a surface pixel data. The other functions -create a referenced copy of the array pixel data, so that changes to the array -directly affect the original surface. There are other functions that allow you -to access any per-pixel alpha values as arrays along with a few other helpful -functions. We will look at these other functions later on.

-

When working with these surface arrays, there are two ways of representing the -pixel values. First, they can be represented as mapped integers. This type of -array is a simple 2D array with a single integer representing the surface's -mapped color value. This type of array is good for moving parts of an image -around. The other type of array uses three RGB values to represent each pixel -color. This type of array makes it extremely simple to do types of effects that -change the color of each pixel. This type of array is also a little trickier to -deal with, since it is essentially a 3D numeric array. Still, once you get your -mind into the right mode, it is not much harder than using the normal 2D arrays.

-

The NumPy module uses a machine's natural number types to represent the data -values, so a NumPy array can consist of integers that are 8-bits, 16-bits, and 32-bits. -(the arrays can also use other types like floats and doubles, but for our image -manipulation we mainly need to worry about the integer types). -Because of this limitation of integer sizes, you must take a little extra care -that the type of arrays that reference pixel data can be properly mapped to a -proper type of data. The functions create these arrays from surfaces are:

-
-
-surfarray.pixels2d(surface)
-

Creates a 2D array (integer pixel values) that reference the original surface data. -This will work for all surface formats except 24-bit.

-
- -
-
-surfarray.array2d(surface)
-

Creates a 2D array (integer pixel values) that is copied from any type of surface.

-
- -
-
-surfarray.pixels3d(surface)
-

Creates a 3D array (RGB pixel values) that reference the original surface data. -This will only work on 24-bit and 32-bit surfaces that have RGB or BGR formatting.

-
- -
-
-surfarray.array3d(surface)
-

Creates a 3D array (RGB pixel values) that is copied from any type of surface.

-
- -

Here is a small chart that might better illustrate what types of functions -should be used on which surfaces. As you can see, both the arrayXD functions -will work with any type of surface.

- ------- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

32-bit

24-bit

16-bit

8-bit(c-map)

pixel2d

yes

yes

yes

array2d

yes

yes

yes

yes

pixel3d

yes

yes

array3d

yes

yes

yes

yes

-
-
-

Examples¶

-

With this information, we are equipped to start trying things with surface -arrays. The following are short little demonstrations that create a NumPy -array and display them in pygame. These different tests are found in the -arraydemo.py example. There is a simple function named surfdemo_show -that displays an array on the screen.

-
-
-allblack -
allblack = N.zeros((128, 128))
-surfdemo_show(allblack, 'allblack')
-
-
-

Our first example creates an all black array. Whenever you need -to create a new numeric array of a specific size, it is best to use the -zeros function. Here we create a 2D array of all zeros and display -it.

-
-
-
-
-striped -
striped = N.zeros((128, 128, 3))
-striped[:] = (255, 0, 0)
-striped[:,::3] = (0, 255, 255)
-surfdemo_show(striped, 'striped')
-
-
-

Here we are dealing with a 3D array. We start by creating an all red image. -Then we slice out every third row and assign it to a blue/green color. As you -can see, we can treat the 3D arrays almost exactly the same as 2D arrays, just -be sure to assign them 3 values instead of a single mapped integer.

-
-
-
-
-rgbarray -
imgsurface = pygame.image.load('surfarray.png')
-rgbarray = surfarray.array3d(imgsurface)
-surfdemo_show(rgbarray, 'rgbarray')
-
-
-

Here we load an image with the image module, then convert it to a 3D -array of integer RGB color elements. An RGB copy of a surface always -has the colors arranged as a[r,c,0] for the red component, -a[r,c,1] for the green component, and a[r,c,2] for blue. This can then -be used without caring how the pixels of the actual surface are configured, -unlike a 2D array which is a copy of the mapped -(raw) surface pixels. We will use this image in the rest of the samples.

-
-
-
-
-flipped -
flipped = rgbarray[:,::-1]
-surfdemo_show(flipped, 'flipped')
-
-
-

Here we flip the image vertically. All we need to do is take the original -image array and slice it using a negative increment.

-
-
-
-
-scaledown -
scaledown = rgbarray[::2,::2]
-surfdemo_show(scaledown, 'scaledown')
-
-
-

Based on the last example, scaling an image down is pretty logical. We just -slice out all the pixels using an increment of 2 vertically and horizontally.

-
-
-
-
-scaleup -
shape = rgbarray.shape
-scaleup = N.zeros((shape[0]*2, shape[1]*2, shape[2]))
-scaleup[::2,::2,:] = rgbarray
-scaleup[1::2,::2,:] = rgbarray
-scaleup[:,1::2] = scaleup[:,::2]
-surfdemo_show(scaleup, 'scaleup')
-
-
-

Scaling the image up is a little more work, but is similar to the previous -scaling down, we do it all with slicing. First we create an array that is -double the size of our original. First we copy the original array into every -other pixel of the new array. Then we do it again for every other pixel doing -the odd columns. At this point we have the image scaled properly going across, -but every other row is black, so we simply need to copy each row to the one -underneath it. Then we have an image doubled in size.

-
-
-
-
-redimg -
redimg = N.array(rgbarray)
-redimg[:,:,1:] = 0
-surfdemo_show(redimg, 'redimg')
-
-
-

Now we are using 3D arrays to change the colors. Here we -set all the values in green and blue to zero. -This leaves us with just the red channel.

-
-
-
-
-soften -
factor = N.array((8,), N.int32)
-soften = N.array(rgbarray, N.int32)
-soften[1:,:]  += rgbarray[:-1,:] * factor
-soften[:-1,:] += rgbarray[1:,:] * factor
-soften[:,1:]  += rgbarray[:,:-1] * factor
-soften[:,:-1] += rgbarray[:,1:] * factor
-soften //= 33
-surfdemo_show(soften, 'soften')
-
-
-

Here we perform a 3x3 convolution filter that will soften our image. -It looks like a lot of steps here, but what we are doing is shifting -the image 1 pixel in each direction and adding them all together (with some -multiplication for weighting). Then average all the values. It's no Gaussian, -but it's fast. One point with NumPy arrays, the precision of arithmetic -operations is determined by the array with the largest data type. -So if factor was not declared as a 1 element array of type numpy.int32, -the multiplications would be performed using numpy.int8, the 8 bit integer -type of each rgbarray element. This will cause value truncation. The soften -array must also be declared to have a larger integer size than rgbarray to -avoid truncation.

-
-
-
-
-xfade -
src = N.array(rgbarray)
-dest = N.zeros(rgbarray.shape)
-dest[:] = 20, 50, 100
-diff = (dest - src) * 0.50
-xfade = src + diff.astype(N.uint)
-surfdemo_show(xfade, 'xfade')
-
-
-

Lastly, we are cross fading between the original image and a solid bluish -image. Not exciting, but the dest image could be anything, and changing the 0.50 -multiplier will let you choose any step in a linear crossfade between two images.

-
-
-
-
-

Hopefully by this point you are starting to see how surfarray can be used to -perform special effects and transformations that are only possible at the pixel -level. At the very least, you can use the surfarray to do a lot of Surface.set_at() -Surface.get_at() type operations very quickly. But don't think you are finished -yet, there is still much to learn.

-
-
-

Surface Locking¶

-

Like the rest of pygame, surfarray will lock any Surfaces it needs to -automatically when accessing pixel data. There is one extra thing to be aware -of though. When creating the pixel arrays, the original surface will -be locked during the lifetime of that pixel array. This is important to remember. -Be sure to "del" the pixel array or let it go out of scope -(ie, when the function returns, etc).

-

Also be aware that you really don't want to be doing much (if any) -direct pixel access on hardware surfaces (HWSURFACE). This is because -the actual surface data lives on the graphics card, and transferring pixel -changes over the PCI/AGP bus is not fast.

-
-
-

Transparency¶

-

The surfarray module has several methods for accessing a Surface's alpha/colorkey -values. None of the alpha functions are affected by overall transparency of a -Surface, just the pixel alpha values. Here's the list of those functions.

-
-
-surfarray.pixels_alpha(surface)
-

Creates a 2D array (integer pixel values) that references the original -surface alpha data. -This will only work on 32-bit images with an 8-bit alpha component.

-
- -
-
-surfarray.array_alpha(surface)
-

Creates a 2D array (integer pixel values) that is copied from any -type of surface. -If the surface has no alpha values, -the array will be fully opaque values (255).

-
- -
-
-surfarray.array_colorkey(surface)
-

Creates a 2D array (integer pixel values) that is set to transparent -(0) wherever that pixel color matches the Surface colorkey.

-
- -
-
-

Other Surfarray Functions¶

-

There are only a few other functions available in surfarray. You can get a better -list with more documentation on the -surfarray reference page. -There is one very useful function though.

-
-
-surfarray.blit_array(surface, array)
-

This will transfer any type of 2D or 3D surface array onto a Surface -of the same dimensions. -This surfarray blit will generally be faster than assigning an array to a -referenced pixel array. -Still, it should not be as fast as normal Surface blitting, -since those are very optimized.

-
- -
-
-

More Advanced NumPy¶

-

There's a couple last things you should know about NumPy arrays. When dealing -with very large arrays, like the kind that are 640x480 big, there are some extra -things you should be careful about. Mainly, while using the operators like + and -* on the arrays makes them easy to use, it is also very expensive on big arrays. -These operators must make new temporary copies of the array, that are then -usually copied into another array. This can get very time consuming. Fortunately, -all the NumPy operators come with special functions that can perform the -operation "in place". For example, you would want to replace -screen[:] = screen + brightmap with the much faster -add(screen, brightmap, screen). -Anyway, you'll want to read up on the NumPy UFunc -documentation for more about this. -It is important when dealing with the arrays.

-

Another thing to be aware of when working with NumPy arrays is the datatype -of the array. Some of the arrays (especially the mapped pixel type) often return -arrays with an unsigned 8-bit value. These arrays will easily overflow if you are -not careful. NumPy will use the same coercion that you find in C programs, so -mixing an operation with 8-bit numbers and 32-bit numbers will give a result as -32-bit numbers. You can convert the datatype of an array, but definitely be -aware of what types of arrays you have, if NumPy gets in a situation where -precision would be ruined, it will raise an exception.

-

Lastly, be aware that when assigning values into the 3D arrays, they must be -between 0 and 255, or you will get some undefined truncating.

-
-
-

Graduation¶

-

Well there you have it. My quick primer on Numeric Python and surfarray. -Hopefully now you see what is possible, and even if you never use them for -yourself, you do not have to be afraid when you see code that does. Look into -the vgrade example for more numeric array action. There are also some "flame" -demos floating around that use surfarray to create a realtime fire effect.

-

Best of all, try some things on your own. Take it slow at first and build up, -I've seen some great things with surfarray already like radial gradients and -more. Good Luck.

-
-
-
- - -

-
-Edit on GitHub -
-
-
-
-
- - - - \ No newline at end of file diff --git a/venv/Lib/site-packages/pygame/docs/generated/tut/chimp.py.html b/venv/Lib/site-packages/pygame/docs/generated/tut/chimp.py.html deleted file mode 100644 index 3d2925f..0000000 --- a/venv/Lib/site-packages/pygame/docs/generated/tut/chimp.py.html +++ /dev/null @@ -1,342 +0,0 @@ - - - - - - - - - pygame/examples/chimp.py — pygame v2.1.2 documentation - - - - - - - - - - - - - -
- -
- - - - - -
-
- -
-
- -
-
#!/usr/bin/env python
-""" pygame.examples.chimp
-
-This simple example is used for the line-by-line tutorial
-that comes with pygame. It is based on a 'popular' web banner.
-Note there are comments here, but for the full explanation,
-follow along in the tutorial.
-"""
-
-
-# Import Modules
-import os
-import pygame as pg
-
-if not pg.font:
-    print("Warning, fonts disabled")
-if not pg.mixer:
-    print("Warning, sound disabled")
-
-main_dir = os.path.split(os.path.abspath(__file__))[0]
-data_dir = os.path.join(main_dir, "data")
-
-
-# functions to create our resources
-def load_image(name, colorkey=None, scale=1):
-    fullname = os.path.join(data_dir, name)
-    image = pg.image.load(fullname)
-    image = image.convert()
-
-    size = image.get_size()
-    size = (size[0] * scale, size[1] * scale)
-    image = pg.transform.scale(image, size)
-
-    if colorkey is not None:
-        if colorkey == -1:
-            colorkey = image.get_at((0, 0))
-        image.set_colorkey(colorkey, pg.RLEACCEL)
-    return image, image.get_rect()
-
-
-def load_sound(name):
-    class NoneSound:
-        def play(self):
-            pass
-
-    if not pg.mixer or not pg.mixer.get_init():
-        return NoneSound()
-
-    fullname = os.path.join(data_dir, name)
-    sound = pg.mixer.Sound(fullname)
-
-    return sound
-
-
-# classes for our game objects
-class Fist(pg.sprite.Sprite):
-    """moves a clenched fist on the screen, following the mouse"""
-
-    def __init__(self):
-        pg.sprite.Sprite.__init__(self)  # call Sprite initializer
-        self.image, self.rect = load_image("fist.png", -1)
-        self.fist_offset = (-235, -80)
-        self.punching = False
-
-    def update(self):
-        """move the fist based on the mouse position"""
-        pos = pg.mouse.get_pos()
-        self.rect.topleft = pos
-        self.rect.move_ip(self.fist_offset)
-        if self.punching:
-            self.rect.move_ip(15, 25)
-
-    def punch(self, target):
-        """returns true if the fist collides with the target"""
-        if not self.punching:
-            self.punching = True
-            hitbox = self.rect.inflate(-5, -5)
-            return hitbox.colliderect(target.rect)
-
-    def unpunch(self):
-        """called to pull the fist back"""
-        self.punching = False
-
-
-class Chimp(pg.sprite.Sprite):
-    """moves a monkey critter across the screen. it can spin the
-    monkey when it is punched."""
-
-    def __init__(self):
-        pg.sprite.Sprite.__init__(self)  # call Sprite intializer
-        self.image, self.rect = load_image("chimp.png", -1, 4)
-        screen = pg.display.get_surface()
-        self.area = screen.get_rect()
-        self.rect.topleft = 10, 90
-        self.move = 18
-        self.dizzy = False
-
-    def update(self):
-        """walk or spin, depending on the monkeys state"""
-        if self.dizzy:
-            self._spin()
-        else:
-            self._walk()
-
-    def _walk(self):
-        """move the monkey across the screen, and turn at the ends"""
-        newpos = self.rect.move((self.move, 0))
-        if not self.area.contains(newpos):
-            if self.rect.left < self.area.left or self.rect.right > self.area.right:
-                self.move = -self.move
-                newpos = self.rect.move((self.move, 0))
-                self.image = pg.transform.flip(self.image, True, False)
-        self.rect = newpos
-
-    def _spin(self):
-        """spin the monkey image"""
-        center = self.rect.center
-        self.dizzy = self.dizzy + 12
-        if self.dizzy >= 360:
-            self.dizzy = False
-            self.image = self.original
-        else:
-            rotate = pg.transform.rotate
-            self.image = rotate(self.original, self.dizzy)
-        self.rect = self.image.get_rect(center=center)
-
-    def punched(self):
-        """this will cause the monkey to start spinning"""
-        if not self.dizzy:
-            self.dizzy = True
-            self.original = self.image
-
-
-def main():
-    """this function is called when the program starts.
-    it initializes everything it needs, then runs in
-    a loop until the function returns."""
-    # Initialize Everything
-    pg.init()
-    screen = pg.display.set_mode((1280, 480), pg.SCALED)
-    pg.display.set_caption("Monkey Fever")
-    pg.mouse.set_visible(False)
-
-    # Create The Backgound
-    background = pg.Surface(screen.get_size())
-    background = background.convert()
-    background.fill((170, 238, 187))
-
-    # Put Text On The Background, Centered
-    if pg.font:
-        font = pg.font.Font(None, 64)
-        text = font.render("Pummel The Chimp, And Win $$$", True, (10, 10, 10))
-        textpos = text.get_rect(centerx=background.get_width() / 2, y=10)
-        background.blit(text, textpos)
-
-    # Display The Background
-    screen.blit(background, (0, 0))
-    pg.display.flip()
-
-    # Prepare Game Objects
-    whiff_sound = load_sound("whiff.wav")
-    punch_sound = load_sound("punch.wav")
-    chimp = Chimp()
-    fist = Fist()
-    allsprites = pg.sprite.RenderPlain((chimp, fist))
-    clock = pg.time.Clock()
-
-    # Main Loop
-    going = True
-    while going:
-        clock.tick(60)
-
-        # Handle Input Events
-        for event in pg.event.get():
-            if event.type == pg.QUIT:
-                going = False
-            elif event.type == pg.KEYDOWN and event.key == pg.K_ESCAPE:
-                going = False
-            elif event.type == pg.MOUSEBUTTONDOWN:
-                if fist.punch(chimp):
-                    punch_sound.play()  # punch
-                    chimp.punched()
-                else:
-                    whiff_sound.play()  # miss
-            elif event.type == pg.MOUSEBUTTONUP:
-                fist.unpunch()
-
-        allsprites.update()
-
-        # Draw Everything
-        screen.blit(background, (0, 0))
-        allsprites.draw(screen)
-        pg.display.flip()
-
-    pg.quit()
-
-
-# Game Over
-
-
-# this calls the 'main' function when this script is executed
-if __name__ == "__main__":
-    main()
-
-
-
- - -

-
-Edit on GitHub -
-
-
-
-
- - - - \ No newline at end of file diff --git a/venv/Lib/site-packages/pygame/docs/generated/tut/newbieguide.html b/venv/Lib/site-packages/pygame/docs/generated/tut/newbieguide.html deleted file mode 100644 index b5e36a6..0000000 --- a/venv/Lib/site-packages/pygame/docs/generated/tut/newbieguide.html +++ /dev/null @@ -1,475 +0,0 @@ - - - - - - - - - A Newbie Guide to pygame — pygame v2.1.2 documentation - - - - - - - - - - - - - -
- -
- - - - - -
-
- -
-
- -
-
-

A Newbie Guide to pygame¶

-

or Things I learned by trial and error so you don't have to,

-

or How I learned to stop worrying and love the blit.

-

Pygame is a python wrapper for SDL, written by Pete Shinners. What this -means is that, using pygame, you can write games or other multimedia -applications in Python that will run unaltered on any of SDL's supported -platforms (Windows, Unix, Mac, BeOS and others).

-

Pygame may be easy to learn, but the world of graphics programming can be -pretty confusing to the newcomer. I wrote this to try to distill the practical -knowledge I've gained over the past year or so of working with pygame, and its -predecessor, PySDL. I've tried to rank these suggestions in order of -importance, but how relevant any particular hint is will depend on your own -background and the details of your project.

-
-

Get comfortable working in Python.¶

-

The most important thing is to feel confident using python. Learning something -as potentially complicated as graphics programming will be a real chore if -you're also unfamiliar with the language you're using. Write a few sizable -non-graphical programs in python -- parse some text files, write a guessing -game or a journal-entry program or something. Get comfortable with string and -list manipulation -- know how to split, slice and combine strings and lists. -Know how import works -- try writing a program that is spread across -several source files. Write your own functions, and practice manipulating -numbers and characters; know how to convert between the two. Get to the point -where the syntax for using lists and dictionaries is second-nature -- you don't -want to have to run to the documentation every time you need to slice a list or -sort a set of keys. Resist the temptation to run to a mailing list, -comp.lang.python, or IRC when you run into trouble. Instead, fire up the -interpreter and play with the problem for a few hours. Print out the Python -2.0 Quick Reference and keep it by your computer.

-

This may sound incredibly dull, but the confidence you'll gain through your -familiarity with python will work wonders when it comes time to write your -game. The time you spend making python code second-nature will be nothing -compared to the time you'll save when you're writing real code.

-
-
-

Recognize which parts of pygame you really need.¶

-

Looking at the jumble of classes at the top of the pygame Documentation index -may be confusing. The important thing is to realize that you can do a great -deal with only a tiny subset of functions. Many classes you'll probably never -use -- in a year, I haven't touched the Channel, Joystick, cursors, -Userrect, surfarray or version functions.

-
-
-

Know what a surface is.¶

-

The most important part of pygame is the surface. Just think of a surface as a -blank piece of paper. You can do a lot of things with a surface -- you can -draw lines on it, fill parts of it with color, copy images to and from it, and -set or read individual pixel colors on it. A surface can be any size (within -reason) and you can have as many of them as you like (again, within reason). -One surface is special -- the one you create with -pygame.display.set_mode(). This 'display surface' represents the screen; -whatever you do to it will appear on the user's screen. You can only have one -of these -- that's an SDL limitation, not a pygame one.

-

So how do you create surfaces? As mentioned above, you create the special -'display surface' with pygame.display.set_mode(). You can create a surface -that contains an image by using image.load(), or you can make a surface -that contains text with font.render(). You can even create a surface that -contains nothing at all with Surface().

-

Most of the surface functions are not critical. Just learn blit(), -fill(), set_at() and get_at(), and you'll be fine.

-
-
-

Use surface.convert().¶

-

When I first read the documentation for surface.convert(), I didn't think -it was something I had to worry about. 'I only use PNGs, therefore everything I -do will be in the same format. So I don't need convert()';. It turns out I -was very, very wrong.

-

The 'format' that convert() refers to isn't the file format (ie PNG, -JPEG, GIF), it's what's called the 'pixel format'. This refers to the -particular way that a surface records individual colors in a specific pixel. -If the surface format isn't the same as the display format, SDL will have to -convert it on-the-fly for every blit -- a fairly time-consuming process. Don't -worry too much about the explanation; just note that convert() is necessary -if you want to get any kind of speed out of your blits.

-

How do you use convert? Just call it after creating a surface with the -image.load() function. Instead of just doing:

-
surface = pygame.image.load('foo.png')
-
-
-

Do:

-
surface = pygame.image.load('foo.png').convert()
-
-
-

It's that easy. You just need to call it once per surface, when you load an -image off the disk. You'll be pleased with the results; I see about a 6x -increase in blitting speed by calling convert().

-

The only times you don't want to use convert() is when you really need to -have absolute control over an image's internal format -- say you were writing -an image conversion program or something, and you needed to ensure that the -output file had the same pixel format as the input file. If you're writing a -game, you need speed. Use convert().

-
-
-

Dirty rect animation.¶

-

The most common cause of inadequate frame rates in pygame programs results from -misunderstanding the pygame.display.update() function. With pygame, merely -drawing something to the display surface doesn't cause it to appear on the -screen -- you need to call pygame.display.update(). There are three ways -of calling this function:

-
-
    -
  • pygame.display.update() -- This updates the whole window (or the whole screen for fullscreen displays).

  • -
  • pygame.display.flip() -- This does the same thing, and will also do the right thing if you're using double-buffered hardware acceleration, which you're not, so on to...

  • -
  • pygame.display.update(a rectangle or some list of rectangles) -- This updates just the rectangular areas of the screen you specify.

  • -
-
-

Most people new to graphics programming use the first option -- they update the -whole screen every frame. The problem is that this is unacceptably slow for -most people. Calling update() takes 35 milliseconds on my machine, which -doesn't sound like much, until you realize that 1000 / 35 = 28 frames per -second maximum. And that's with no game logic, no blits, no input, no AI, -nothing. I'm just sitting there updating the screen, and 28 fps is my maximum -framerate. Ugh.

-

The solution is called 'dirty rect animation'. Instead of updating the whole -screen every frame, only the parts that changed since the last frame are -updated. I do this by keeping track of those rectangles in a list, then -calling update(the_dirty_rectangles) at the end of the frame. In detail -for a moving sprite, I:

-
-
    -
  • Blit a piece of the background over the sprite's current location, erasing it.

  • -
  • Append the sprite's current location rectangle to a list called dirty_rects.

  • -
  • Move the sprite.

  • -
  • Draw the sprite at its new location.

  • -
  • Append the sprite's new location to my dirty_rects list.

  • -
  • Call display.update(dirty_rects)

  • -
-
-

The difference in speed is astonishing. Consider that SolarWolf has dozens of -constantly moving sprites updating smoothly, and still has enough time left -over to display a parallax starfield in the background, and update that too.

-

There are two cases where this technique just won't work. The first is where -the whole window or screen really is being updated every frame -- think of a -smooth-scrolling engine like an overhead real-time strategy game or a -side-scroller. So what do you do in this case? Well, the short answer is -- -don't write this kind of game in pygame. The long answer is to scroll in steps -of several pixels at a time; don't try to make scrolling perfectly smooth. -Your player will appreciate a game that scrolls quickly, and won't notice the -background jumping along too much.

-

A final note -- not every game requires high framerates. A strategic wargame -could easily get by on just a few updates per second -- in this case, the added -complexity of dirty rect animation may not be necessary.

-
-
-

There is NO rule six.¶

-
-
-

Hardware surfaces are more trouble than they're worth.¶

-

Especially in pygame 2, because HWSURFACE now does nothing

-

If you've been looking at the various flags you can use with -pygame.display.set_mode(), you may have thought like this: Hey, -HWSURFACE! Well, I want that -- who doesn't like hardware acceleration. Ooo... -DOUBLEBUF; well, that sounds fast, I guess I want that too!. It's not -your fault; we've been trained by years of 3-d gaming to believe that hardware -acceleration is good, and software rendering is slow.

-

Unfortunately, hardware rendering comes with a long list of drawbacks:

-
-
    -
  • It only works on some platforms. Windows machines can usually get hardware surfaces if you ask for them. Most other platforms can't. Linux, for example, may be able to provide a hardware surface if X4 is installed, if DGA2 is working properly, and if the moons are aligned correctly. If a hardware surface is unavailable, SDL will silently give you a software surface instead.

  • -
  • It only works fullscreen.

  • -
  • It complicates per-pixel access. If you have a hardware surface, you need to Lock the surface before writing or reading individual pixel values on it. If you don't, Bad Things Happen. Then you need to quickly Unlock the surface again, before the OS gets all confused and starts to panic. Most of this process is automated for you in pygame, but it's something else to take into account.

  • -
  • You lose the mouse pointer. If you specify HWSURFACE (and actually get it), your pointer will usually just vanish (or worse, hang around in a half-there, half-not flickery state). You'll need to create a sprite to act as a manual mouse pointer, and you'll need to worry about pointer acceleration and sensitivity. What a pain.

  • -
  • It might be slower anyway. Many drivers are not accelerated for the types of drawing that we do, and since everything has to be blitted across the video bus (unless you can cram your source surface into video memory as well), it might end up being slower than software access anyway.

  • -
-
-

Hardware rendering has its place. It works pretty reliably under Windows, so -if you're not interested in cross-platform performance, it may provide you with -a substantial speed increase. However, it comes at a cost -- increased -headaches and complexity. It's best to stick with good old reliable -SWSURFACE until you're sure you know what you're doing.

-
-
-

Don't get distracted by side issues.¶

-

Sometimes, new game programmers spend too much time worrying about issues that -aren't really critical to their game's success. The desire to get secondary -issues 'right' is understandable, but early in the process of creating a game, -you cannot even know what the important questions are, let alone what answers -you should choose. The result can be a lot of needless prevarication.

-

For example, consider the question of how to organize your graphics files. -Should each frame have its own graphics file, or each sprite? Perhaps all the -graphics should be zipped up into one archive? A great deal of time has been -wasted on a lot of projects, asking these questions on mailing lists, debating -the answers, profiling, etc, etc. This is a secondary issue; any time spent -discussing it should have been spent coding the actual game.

-

The insight here is that it is far better to have a 'pretty good' solution that -was actually implemented, than a perfect solution that you never got around to -writing.

-
-
-

Rects are your friends.¶

-

Pete Shinners' wrapper may have cool alpha effects and fast blitting speeds, -but I have to admit my favorite part of pygame is the lowly Rect class. A -rect is simply a rectangle -- defined only by the position of its top left -corner, its width, and its height. Many pygame functions take rects as -arguments, and they also take 'rectstyles', a sequence that has the same values -as a rect. So if I need a rectangle that defines the area between 10, 20 and -40, 50, I can do any of the following:

-
rect = pygame.Rect(10, 20, 30, 30)
-rect = pygame.Rect((10, 20, 30, 30))
-rect = pygame.Rect((10, 20), (30, 30))
-rect = (10, 20, 30, 30)
-rect = ((10, 20, 30, 30))
-
-
-

If you use any of the first three versions, however, you get access to Rect's -utility functions. These include functions to move, shrink and inflate rects, -find the union of two rects, and a variety of collision-detection functions.

-

For example, suppose I'd like to get a list of all the sprites that contain a -point (x, y) -- maybe the player clicked there, or maybe that's the current -location of a bullet. It's simple if each sprite has a .rect member -- I just -do:

-
sprites_clicked = [sprite for sprite in all_my_sprites_list if sprite.rect.collidepoint(x, y)]
-
-
-

Rects have no other relation to surfaces or graphics functions, other than the -fact that you can use them as arguments. You can also use them in places that -have nothing to do with graphics, but still need to be defined as rectangles. -Every project I discover a few new places to use rects where I never thought -I'd need them.

-
-
-

Don't bother with pixel-perfect collision detection.¶

-

So you've got your sprites moving around, and you need to know whether or not they're bumping into one another. It's tempting to write something like the following:

-
-
    -
  • Check to see if the rects are in collision. If they aren't, ignore them.

  • -
  • For each pixel in the overlapping area, see if the corresponding pixels from both sprites are opaque. If so, there's a collision.

  • -
-
-

There are other ways to do this, with ANDing sprite masks and so on, but any -way you do it in pygame, it's probably going to be too slow. For most games, -it's probably better just to do 'sub-rect collision' -- create a rect for each -sprite that's a little smaller than the actual image, and use that for -collisions instead. It will be much faster, and in most cases the player won't -notice the imprecision.

-
-
-

Managing the event subsystem.¶

-

Pygame's event system is kind of tricky. There are actually two different ways -to find out what an input device (keyboard, mouse or joystick) is doing.

-

The first is by directly checking the state of the device. You do this by -calling, say, pygame.mouse.get_pos() or pygame.key.get_pressed(). -This will tell you the state of that device at the moment you call the -function.

-

The second method uses the SDL event queue. This queue is a list of events -- -events are added to the list as they're detected, and they're deleted from the -queue as they're read off.

-

There are advantages and disadvantages to each system. State-checking (system -1) gives you precision -- you know exactly when a given input was made -- if -mouse.get_pressed([0]) is 1, that means that the left mouse button is -down right at this moment. The event queue merely reports that the -mouse was down at some time in the past; if you check the queue fairly often, -that can be ok, but if you're delayed from checking it by other code, input -latency can grow. Another advantage of the state-checking system is that it -detects "chording" easily; that is, several states at the same time. If you -want to know whether the t and f keys are down at the same time, just -check:

-
if (key.get_pressed[K_t] and key.get_pressed[K_f]):
-    print "Yup!"
-
-
-

In the queue system, however, each keypress arrives in the queue as a -completely separate event, so you'd need to remember that the t key was -down, and hadn't come up yet, while checking for the f key. A little more -complicated.

-

The state system has one great weakness, however. It only reports what the -state of the device is at the moment it's called; if the user hits a mouse -button then releases it just before a call to mouse.get_pressed(), the -mouse button will return 0 -- get_pressed() missed the mouse button press -completely. The two events, MOUSEBUTTONDOWN and MOUSEBUTTONUP, will -still be sitting in the event queue, however, waiting to be retrieved and -processed.

-

The lesson is: choose the system that meets your requirements. If you don't -have much going on in your loop -- say you're just sitting in a while 1 -loop, waiting for input, use get_pressed() or another state function; the -latency will be lower. On the other hand, if every keypress is crucial, but -latency isn't as important -- say your user is typing something in an editbox, -use the event queue. Some keypresses may be slightly late, but at least you'll -get them all.

-

A note about event.poll() vs. wait() -- poll() may seem better, -since it doesn't block your program from doing anything while it's waiting for -input -- wait() suspends the program until an event is received. -However, poll() will consume 100% of available CPU time while it runs, -and it will fill the event queue with NOEVENTS. Use set_blocked() to -select just those event types you're interested in -- your queue will be much -more manageable.

-
-
-

Colorkey vs. Alpha.¶

-

There's a lot of confusion around these two techniques, and much of it comes from the terminology used.

-

'Colorkey blitting' involves telling pygame that all pixels of a certain color -in a certain image are transparent instead of whatever color they happen to be. -These transparent pixels are not blitted when the rest of the image is blitted, -and so don't obscure the background. This is how we make sprites that aren't -rectangular in shape. Simply call surface.set_colorkey(color), where -color is an RGB tuple -- say (0,0,0). This would make every pixel in the source -image transparent instead of black.

-

'Alpha' is different, and it comes in two flavors. 'Image alpha' applies to the -whole image, and is probably what you want. Properly known as 'translucency', -alpha causes each pixel in the source image to be only partially opaque. -For example, if you set a surface's alpha to 192 and then blitted it onto a -background, 3/4 of each pixel's color would come from the source image, and 1/4 -from the background. Alpha is measured from 255 to 0, where 0 is completely -transparent, and 255 is completely opaque. Note that colorkey and alpha -blitting can be combined -- this produces an image that is fully transparent in -some spots, and semi-transparent in others.

-

'Per-pixel alpha' is the other flavor of alpha, and it's more complicated. -Basically, each pixel in the source image has its own alpha value, from 0 to -255. Each pixel, therefore, can have a different opacity when blitted onto a -background. This type of alpha can't be mixed with colorkey blitting, -and it overrides per-image alpha. Per-pixel alpha is rarely used in -games, and to use it you have to save your source image in a graphic -editor with a special alpha channel. It's complicated -- don't use it -yet.

-
-
-

Do things the pythony way.¶

-

A final note (this isn't the least important one; it just comes at the end). -Pygame is a pretty lightweight wrapper around SDL, which is in turn a pretty -lightweight wrapper around your native OS graphics calls. Chances are pretty -good that if your code is still slow, and you've done the things I've mentioned -above, then the problem lies in the way you're addressing your data in python. -Certain idioms are just going to be slow in python no matter what you do. -Luckily, python is a very clear language -- if a piece of code looks awkward or -unwieldy, chances are its speed can be improved, too. Read over Python -Performance Tips for some great advice on how you can improve the speed of -your code. That said, premature optimisation is the root of all evil; if it's -just not fast enough, don't torture the code trying to make it faster. Some -things are just not meant to be :)

-

There you go. Now you know practically everything I know about using pygame. -Now, go write that game!

-
-

David Clark is an avid pygame user and the editor of the Pygame Code -Repository, a showcase for community-submitted python game code. He is also -the author of Twitch, an entirely average pygame arcade game.

-
-
-
- - -

-
-Edit on GitHub -
-
-
-
-
- - - - \ No newline at end of file diff --git a/venv/Lib/site-packages/pygame/docs/generated/tut/tom_games2.html b/venv/Lib/site-packages/pygame/docs/generated/tut/tom_games2.html deleted file mode 100644 index 5d54038..0000000 --- a/venv/Lib/site-packages/pygame/docs/generated/tut/tom_games2.html +++ /dev/null @@ -1,240 +0,0 @@ - - - - - - - - - Revision: Pygame fundamentals — pygame v2.1.2 documentation - - - - - - - - - - - - - -
- -
- - - - - -
-
- -
-
- -
-
-

2. Revision: Pygame fundamentals¶

-
-

2.1. The basic Pygame game¶

-

For the sake of revision, and to ensure that you are familiar with the basic structure of a Pygame program, I'll briefly run through -a basic Pygame program, which will display no more than a window with some text in it, that should, by the end, look something like -this (though of course the window decoration will probably be different on your system):

-../_images/tom_basic.png -

The full code for this example looks like this:

-
#!/usr/bin/python
-
-import pygame
-from pygame.locals import *
-
-def main():
-    # Initialise screen
-    pygame.init()
-    screen = pygame.display.set_mode((150, 50))
-    pygame.display.set_caption('Basic Pygame program')
-
-    # Fill background
-    background = pygame.Surface(screen.get_size())
-    background = background.convert()
-    background.fill((250, 250, 250))
-
-    # Display some text
-    font = pygame.font.Font(None, 36)
-    text = font.render("Hello There", 1, (10, 10, 10))
-    textpos = text.get_rect()
-    textpos.centerx = background.get_rect().centerx
-    background.blit(text, textpos)
-
-    # Blit everything to the screen
-    screen.blit(background, (0, 0))
-    pygame.display.flip()
-
-    # Event loop
-    while 1:
-        for event in pygame.event.get():
-            if event.type == QUIT:
-                return
-
-        screen.blit(background, (0, 0))
-        pygame.display.flip()
-
-
-if __name__ == '__main__': main()
-
-
-
-
-

2.2. Basic Pygame objects¶

-

As you can see, the code consists of three main objects: the screen, the background, and the text. Each of these objects is created -by first calling an instance of an in-built Pygame object, and then modifying it to fit our needs. The screen is a slightly special -case, because we still modify the display through Pygame calls, rather than calling methods belonging to the screen object. But for -all other Pygame objects, we first create the object as a copy of a Pygame object, giving it some attributes, and build our game -objects from them.

-

With the background, we first create a Pygame Surface object, and make it the size of the screen. We then perform the convert() -operation to convert the Surface to a single pixel format. This is more obviously necessary when we have several images and surfaces, -all of different pixel formats, which makes rendering them quite slow. By converting all the surfaces, we can drastically speed up -rendering times. Finally, we fill the background surface with white (255, 255, 255). These values are RGB (Red Green -Blue), and can be worked out from any good paint program.

-

With the text, we require more than one object. First, we create a font object, which defines which font to use, and the size of the -font. Then we create a text object, by using the render method that belongs to our font object, supplying three arguments: -the text to be rendered, whether or not it should be anti-aliased (1=yes, 0=no), and the color of the text (again in RGB format). Next -we create a third text object, which gets the rectangle for the text. The easiest way to understand this is to imagine drawing a -rectangle that will surround all of the text; you can then use this rectangle to get/set the position of the text on the screen. So -in this example we get the rectangle, set its centerx attribute to be the centerx attribute of the -background (so the text's center will be the same as the background's center, i.e. the text will be centered on the screen on the x -axis). We could also set the y coordinate, but it's not any different so I left the text at the top of the screen. As the screen is -small anyway, it didn't seem necessary.

-
-
-

2.3. Blitting¶

-

Now we have created our game objects, we need to actually render them. If we didn't and we ran the program, we'd just see a -blank window, and the objects would remain invisible. The term used for rendering objects is blitting, which is where -you copy the pixels belonging to said object onto the destination object. So to render the background object, you blit it onto the -screen. In this example, to make things simple, we blit the text onto the background (so the background will now have a copy of the -text on it), and then blit the background onto the screen.

-

Blitting is one of the slowest operations in any game, so you need to be careful not to blit too much onto the screen in every frame. -If you have a background image, and a ball flying around the screen, then you could blit the background and then the ball in every -frame, which would cover up the ball's previous position and render the new ball, but this would be pretty slow. A better solution is -to blit the background onto the area that the ball previously occupied, which can be found by the ball's previous rectangle, and then -blitting the ball, so that you are only blitting two small areas.

-
-
-

2.4. The event loop¶

-

Once you've set the game up, you need to put it into a loop so that it will continuously run until the user signals that he/she wants -to exit. So you start an open while loop, and then for each iteration of the loop, which will be each frame of the game, -update the game. The first thing is to check for any Pygame events, which will be the user hitting the keyboard, clicking a mouse -button, moving a joystick, resizing the window, or trying to close it. In this case, we simply want to watch out for for user trying -to quit the game by closing the window, in which case the game should return, which will end the while loop. -Then we simply need to re-blit the background, and flip (update) the display to have everything drawn. OK, as nothing moves or happens -in this example, we don't strictly speaking need to re-blit the background in every iteration, but I put it in because when things are -moving around on the screen, you will need to do all your blitting here.

-
-
-

2.5. Ta-da!¶

-

And that's it - your most basic Pygame game! All games will take a form similar to this, but with lots more code for the actual game -functions themselves, which are more to do your with programming, and less guided in structure by the workings of Pygame. This is what -this tutorial is really about, and will now go onto.

-
-
-
- - -

-
-Edit on GitHub -
-
-
-
-
- - - - \ No newline at end of file diff --git a/venv/Lib/site-packages/pygame/docs/generated/tut/tom_games3.html b/venv/Lib/site-packages/pygame/docs/generated/tut/tom_games3.html deleted file mode 100644 index a17fb21..0000000 --- a/venv/Lib/site-packages/pygame/docs/generated/tut/tom_games3.html +++ /dev/null @@ -1,220 +0,0 @@ - - - - - - - - - Kicking things off — pygame v2.1.2 documentation - - - - - - - - - - - - - -
- -
- - - - - -
-
- -
-
- -
-
-

3. Kicking things off¶

-

The first sections of code are relatively simple, and, once written, can usually be reused in every game you consequently make. They -will do all of the boring, generic tasks like loading modules, loading images, opening networking connections, playing music, and so -on. They will also include some simple but effective error handling, and any customisation you wish to provide on top of functions -provided by modules like sys and pygame.

-
-

3.1. The first lines, and loading modules¶

-

First off, you need to start off your game and load up your modules. It's always a good idea to set a few things straight at the top of -the main source file, such as the name of the file, what it contains, the license it is under, and any other helpful info you might -want to give those will will be looking at it. Then you can load modules, with some error checking so that Python doesn't print out -a nasty traceback, which non-programmers won't understand. The code is fairly simple, so I won't bother explaining any of it:

-
#!/usr/bin/env python
-#
-# Tom's Pong
-# A simple pong game with realistic physics and AI
-# http://www.tomchance.uklinux.net/projects/pong.shtml
-#
-# Released under the GNU General Public License
-
-VERSION = "0.4"
-
-try:
-    import sys
-    import random
-    import math
-    import os
-    import getopt
-    import pygame
-    from socket import *
-    from pygame.locals import *
-except ImportError, err:
-    print "couldn't load module. %s" % (err)
-    sys.exit(2)
-
-
-
-
-

3.2. Resource handling functions¶

-

In the Line By Line Chimp example, the first code to be written was for loading images and sounds. As these -were totally independent of any game logic or game objects, they were written as separate functions, and were written first so -that later code could make use of them. I generally put all my code of this nature first, in their own, classless functions; these -will, generally speaking, be resource handling functions. You can of course create classes for these, so that you can group them -together, and maybe have an object with which you can control all of your resources. As with any good programming environment, it's up -to you to develop your own best practice and style.

-

It's always a good idea to write your own resource handling functions, -because although Pygame has methods for opening images and sounds, and other modules will have their methods of opening other -resources, those methods can take up more than one line, they can require consistent modification by yourself, and they often don't -provide satisfactory error handling. Writing resource handling functions gives you sophisticated, reusable code, and gives you more -control over your resources. Take this example of an image loading function:

-
def load_png(name):
-    """ Load image and return image object"""
-    fullname = os.path.join('data', name)
-    try:
-        image = pygame.image.load(fullname)
-        if image.get_alpha() is None:
-            image = image.convert()
-        else:
-            image = image.convert_alpha()
-    except pygame.error, message:
-        print 'Cannot load image:', fullname
-        raise SystemExit, message
-    return image, image.get_rect()
-
-
-

Here we make a more sophisticated image loading function than the one provided by pygame.image.load()load new image from a file (or file-like object). Note that -the first line of the function is a documentation string describing what the function does, and what object(s) it returns. The -function assumes that all of your images are in a directory called data, and so it takes the filename and creates the full pathname, -for example data/ball.png, using the os module to ensure cross-platform compatibility. Then it -tries to load the image, and convert any alpha regions so you can achieve transparency, and it returns a more human-readable error -if there's a problem. Finally it returns the image object, and its rect.

-

You can make similar functions for loading any other resources, such as loading sounds. You can also make resource handling classes, -to give you more flexibility with more complex resources. For example, you could make a music class, with an __init__ -function that loads the sound (perhaps borrowing from a load_sound() function), a function to pause the music, and a -function to restart. Another handy resource handling class is for network connections. Functions to open sockets, pass data with -suitable security and error checking, close sockets, finger addresses, and other network tasks, can make writing a game with network -capabilities relatively painless.

-

Remember the chief task of these functions/classes is to ensure that by the time you get around to writing game object classes, -and the main loop, there's almost nothing left to do. Class inheritance can make these basic classes especially handy. Don't go -overboard though; functions which will only be used by one class should be written as part of that class, not as a global -function.

-
-
-
- - -

-
-Edit on GitHub -
-
-
-
-
- - - - \ No newline at end of file diff --git a/venv/Lib/site-packages/pygame/docs/generated/tut/tom_games4.html b/venv/Lib/site-packages/pygame/docs/generated/tut/tom_games4.html deleted file mode 100644 index 0504256..0000000 --- a/venv/Lib/site-packages/pygame/docs/generated/tut/tom_games4.html +++ /dev/null @@ -1,249 +0,0 @@ - - - - - - - - - Game object classes — pygame v2.1.2 documentation - - - - - - - - - - - - - -
- -
- - - - - -
-
- -
-
- -
-
-

4. Game object classes¶

-

Once you've loaded your modules, and written your resource handling functions, you'll want to get on to writing some game objects. -The way this is done is fairly simple, though it can seem complex at first. You write a class for each type of object in the game, -and then create an instance of those classes for the objects. You can then use those classes' methods to manipulate the objects, -giving objects some motion and interactive capabilities. So your game, in pseudo-code, will look like this:

-
#!/usr/bin/python
-
-# [load modules here]
-
-# [resource handling functions here]
-
-class Ball:
-    # [ball functions (methods) here]
-    # [e.g. a function to calculate new position]
-    # [and a function to check if it hits the side]
-
-def main:
-    # [initiate game environment here]
-
-    # [create new object as instance of ball class]
-    ball = Ball()
-
-    while 1:
-        # [check for user input]
-
-        # [call ball's update function]
-        ball.update()
-
-
-

This is, of course, a very simple example, and you'd need to put in all the code, instead of those little bracketed comments. But -you should get the basic idea. You crate a class, into which you put all the functions for a ball, including __init__, -which would create all the ball's attributes, and update, which would move the ball to its new position, before blitting -it onto the screen in this position.

-

You can then create more classes for all of your other game objects, and then create instances of them so that you can handle them -easily in the main function and the main program loop. Contrast this with initiating the ball in the main -function, and then having lots of classless functions to manipulate a set ball object, and you'll hopefully see why using classes is -an advantage: It allows you to put all of the code for each object in one place; it makes using objects easier; it makes adding new -objects, and manipulating them, more flexible. Rather than adding more code for each new ball object, you could simply create new -instances of the Ball class for each new ball object. Magic!

-
-

4.1. A simple ball class¶

-

Here is a simple class with the functions necessary for creating a ball object that will, if the update function is called -in the main loop, move across the screen:

-
class Ball(pygame.sprite.Sprite):
-    """A ball that will move across the screen
-    Returns: ball object
-    Functions: update, calcnewpos
-    Attributes: area, vector"""
-
-    def __init__(self, vector):
-        pygame.sprite.Sprite.__init__(self)
-        self.image, self.rect = load_png('ball.png')
-        screen = pygame.display.get_surface()
-        self.area = screen.get_rect()
-        self.vector = vector
-
-    def update(self):
-        newpos = self.calcnewpos(self.rect,self.vector)
-        self.rect = newpos
-
-    def calcnewpos(self,rect,vector):
-        (angle,z) = vector
-        (dx,dy) = (z*math.cos(angle),z*math.sin(angle))
-        return rect.move(dx,dy)
-
-
-

Here we have the Ball class, with an __init__ function that sets the ball up, an update -function that changes the ball's rectangle to be in the new position, and a calcnewpos function to calculate the ball's -new position based on its current position, and the vector by which it is moving. I'll explain the physics in a moment. The one other -thing to note is the documentation string, which is a little bit longer this time, and explains the basics of the class. These strings -are handy not only to yourself and other programmers looking at the code, but also for tools to parse your code and document it. They -won't make much of a difference in small programs, but with large ones they're invaluable, so it's a good habit to get into.

-
-

4.1.1. Diversion 1: Sprites¶

-

The other reason for creating a class for each object is sprites. Each image you render in your game will be a sprite object, and so -to begin with, the class for each object should inherit the Sprite class. -This is a really nice feature of Python - class -inheritance. Now the Ball class has all of the functions that come with the Sprite class, and any object -instances of the Ball class will be registered by Pygame as sprites. Whereas with text and the background, which don't -move, it's OK to blit the object onto the background, Pygame handles sprite objects in a different manner, which you'll see when we -look at the whole program's code.

-

Basically, you create both a ball object, and a sprite object for that ball, and you then call the ball's update function on the -sprite object, thus updating the sprite. Sprites also give you sophisticated ways of determining if two objects have collided. -Normally you might just check in the main loop to see if their rectangles overlap, but that would involve a lot of code, which would -be a waste because the Sprite class provides two functions (spritecollide and groupcollide) -to do this for you.

-
-
-

4.1.2. Diversion 2: Vector physics¶

-

Other than the structure of the Ball class, the notable thing about this code is the vector physics, used to calculate -the ball's movement. With any game involving angular movement, you won't get very far unless you're comfortable with trigonometry, so -I'll just introduce the basics you need to know to make sense of the calcnewpos function.

-

To begin with, you'll notice that the ball has an attribute vector, which is made up of angle and z. -The angle is measured in radians, and will give you the direction in which the ball is moving. Z is the speed at which the ball -moves. So by using this vector, we can determine the direction and speed of the ball, and therefore how much it will move on the x and -y axes:

-../_images/tom_radians.png -

The diagram above illustrates the basic maths behind vectors. In the left hand diagram, you can see the ball's projected movement -represented by the blue line. The length of that line (z) represents its speed, and the angle is the direction in which -it will move. The angle for the ball's movement will always be taken from the x axis on the right, and it is measured clockwise from -that line, as shown in the diagram.

-

From the angle and speed of the ball, we can then work out how much it has moved along the x and y axes. We need to do this because -Pygame doesn't support vectors itself, and we can only move the ball by moving its rectangle along the two axes. So we need to -resolve the angle and speed into its movement on the x axis (dx) and on the y axis (dy). This is a simple matter of -trigonometry, and can be done with the formulae shown in the diagram.

-

If you've studied elementary trigonometry before, none of this should be news to you. But just in case you're forgetful, here are some -useful formulae to remember, that will help you visualise the angles (I find it easier to visualise angles in degrees than in radians!)

-../_images/tom_formulae.png -
-
-
-
- - -

-
-Edit on GitHub -
-
-
-
-
- - - - \ No newline at end of file diff --git a/venv/Lib/site-packages/pygame/docs/generated/tut/tom_games5.html b/venv/Lib/site-packages/pygame/docs/generated/tut/tom_games5.html deleted file mode 100644 index 97f8f87..0000000 --- a/venv/Lib/site-packages/pygame/docs/generated/tut/tom_games5.html +++ /dev/null @@ -1,238 +0,0 @@ - - - - - - - - - User-controllable objects — pygame v2.1.2 documentation - - - - - - - - - - - - - -
- -
- - - - - -
-
- -
-
- -
-
-

5. User-controllable objects¶

-

So far you can create a Pygame window, and render a ball that will fly across the screen. The next step is to make some bats which -the user can control. This is potentially far more simple than the ball, because it requires no physics (unless your user-controlled -object will move in ways more complex than up and down, e.g. a platform character like Mario, in which case you'll need more physics). -User-controllable objects are pretty easy to create, thanks to Pygame's event queue system, as you'll see.

-
-

5.1. A simple bat class¶

-

The principle behind the bat class is similar to that of the ball class. You need an __init__ function to initialise the -ball (so you can create object instances for each bat), an update function to perform per-frame changes on the bat before -it is blitted the bat to the screen, and the functions that will define what this class will actually do. Here's some sample code:

-
class Bat(pygame.sprite.Sprite):
-    """Movable tennis 'bat' with which one hits the ball
-    Returns: bat object
-    Functions: reinit, update, moveup, movedown
-    Attributes: which, speed"""
-
-    def __init__(self, side):
-        pygame.sprite.Sprite.__init__(self)
-        self.image, self.rect = load_png('bat.png')
-        screen = pygame.display.get_surface()
-        self.area = screen.get_rect()
-        self.side = side
-        self.speed = 10
-        self.state = "still"
-        self.reinit()
-
-    def reinit(self):
-        self.state = "still"
-        self.movepos = [0,0]
-        if self.side == "left":
-            self.rect.midleft = self.area.midleft
-        elif self.side == "right":
-            self.rect.midright = self.area.midright
-
-    def update(self):
-        newpos = self.rect.move(self.movepos)
-        if self.area.contains(newpos):
-            self.rect = newpos
-        pygame.event.pump()
-
-    def moveup(self):
-        self.movepos[1] = self.movepos[1] - (self.speed)
-        self.state = "moveup"
-
-    def movedown(self):
-        self.movepos[1] = self.movepos[1] + (self.speed)
-        self.state = "movedown"
-
-
-

As you can see, this class is very similar to the ball class in its structure. But there are differences in what each function does. -First of all, there is a reinit function, which is used when a round ends, and the bat needs to be set back in its starting place, -with any attributes set back to their necessary values. Next, the way in which the bat is moved is a little more complex than with the -ball, because here its movement is simple (up/down), but it relies on the user telling it to move, unlike the ball which just keeps -moving in every frame. To make sense of how the ball moves, it is helpful to look at a quick diagram to show the sequence of events:

-../_images/tom_event-flowchart.png -

What happens here is that the person controlling the bat pushes down on the key that moves the bat up. For each iteration of the main -game loop (for every frame), if the key is still held down, then the state attribute of that bat object will be set to -"moving", and the moveup function will be called, causing the ball's y position to be reduced by the value of the -speed attribute (in this example, 10). In other words, so long as the key is held down, the bat will move up the screen -by 10 pixels per frame. The state attribute isn't used here yet, but it's useful to know if you're dealing with spin, or -would like some useful debugging output.

-

As soon as the player lets go of that key, the second set of boxes is invoked, and the state attribute of the bat object -will be set back to "still", and the movepos attribute will be set back to [0,0], meaning that when the update function is called, it won't move the bat any more. So when the player lets go of the key, the bat stops moving. Simple!

-
-

5.1.1. Diversion 3: Pygame events¶

-

So how do we know when the player is pushing keys down, and then releasing them? With the Pygame event queue system, dummy! It's a -really easy system to use and understand, so this shouldn't take long :) You've already seen the event queue in action in the basic -Pygame program, where it was used to check if the user was quitting the application. The code for moving the bat is about as simple -as that:

-
for event in pygame.event.get():
-    if event.type == QUIT:
-        return
-    elif event.type == KEYDOWN:
-        if event.key == K_UP:
-            player.moveup()
-        if event.key == K_DOWN:
-            player.movedown()
-    elif event.type == KEYUP:
-        if event.key == K_UP or event.key == K_DOWN:
-            player.movepos = [0,0]
-            player.state = "still"
-
-
-

Here assume that you've already created an instance of a bat, and called the object player. You can see the familiar -layout of the for structure, which iterates through each event found in the Pygame event queue, which is retrieved with -the event.get() function. As the user hits keys, pushes mouse buttons and moves the joystick about, those actions are -pumped into the Pygame event queue, and left there until dealt with. So in each iteration of the main game loop, you go through -these events, checking if they're ones you want to deal with, and then dealing with them appropriately. The event.pump() -function that was in the Bat.update function is then called in every iteration to pump out old events, and keep the queue -current.

-

First we check if the user is quitting the program, and quit it if they are. Then we check if any keys are being pushed down, and if -they are, we check if they're the designated keys for moving the bat up and down. If they are, then we call the appropriate moving -function, and set the player state appropriately (though the states moveup and movedown and changed in the moveup() and -movedown() functions, which makes for neater code, and doesn't break encapsulation, which means that you -assign attributes to the object itself, without referring to the name of the instance of that object). Notice here we have three -states: still, moveup, and movedown. Again, these come in handy if you want to debug or calculate spin. We also check if any keys have -been "let go" (i.e. are no longer being held down), and again if they're the right keys, we stop the bat from moving.

-
-
-
-
- - -

-
-Edit on GitHub -
-
-
-
-
- - - - \ No newline at end of file diff --git a/venv/Lib/site-packages/pygame/docs/generated/tut/tom_games6.html b/venv/Lib/site-packages/pygame/docs/generated/tut/tom_games6.html deleted file mode 100644 index 3ab1389..0000000 --- a/venv/Lib/site-packages/pygame/docs/generated/tut/tom_games6.html +++ /dev/null @@ -1,436 +0,0 @@ - - - - - - - - - Putting it all together — pygame v2.1.2 documentation - - - - - - - - - - - - - -
- -
- - - - - -
-
- -
-
- -
-
-

6. Putting it all together¶

-

So far you've learnt all the basics necessary to build a simple game. You should understand how to create Pygame objects, how Pygame -displays objects, how it handles events, and how you can use physics to introduce some motion into your game. Now I'll just show how -you can take all those chunks of code and put them together into a working game. What we need first is to let the ball hit the sides -of the screen, and for the bat to be able to hit the ball, otherwise there's not going to be much game play involved. We do this -using Pygame's collision methods.

-
-

6.1. Let the ball hit sides¶

-

The basics principle behind making it bounce of the sides is easy to grasp. You grab the coordinates of the four corners of the ball, -and check to see if they correspond with the x or y coordinate of the edge of the screen. So if the top right and top left corners both -have a y coordinate of zero, you know that the ball is currently on the top edge of the screen. We do all this in the update function, -after we've worked out the new position of the ball.

-
if not self.area.contains(newpos):
-      tl = not self.area.collidepoint(newpos.topleft)
-      tr = not self.area.collidepoint(newpos.topright)
-      bl = not self.area.collidepoint(newpos.bottomleft)
-      br = not self.area.collidepoint(newpos.bottomright)
-      if tr and tl or (br and bl):
-              angle = -angle
-      if tl and bl:
-              self.offcourt(player=2)
-      if tr and br:
-              self.offcourt(player=1)
-
-self.vector = (angle,z)
-
-
-

Here we check to see if the area -contains the new position of the ball (it always should, so we needn't have an else clause, -though in other circumstances you might want to consider it. We then check if the coordinates for the four corners -are colliding with the area's edges, and create objects for each result. If they are, the objects will have a value of 1, -or True. If they don't, then the value will be None, or False. We then see if it has hit the top or bottom, and if it -has we change the ball's direction. Handily, using radians we can do this by simply reversing its positive/negative value. -We also check to see if the ball has gone off the sides, and if it has we call the offcourt function. -This, in my game, resets the ball, adds 1 point to the score of the player specified when calling the function, and displays the new score.

-

Finally, we recompile the vector based on the new angle. And that is it. The ball will now merrily bounce off the walls and go -offcourt with good grace.

-
-
-

6.2. Let the ball hit bats¶

-

Making the ball hit the bats is very similar to making it hit the sides of the screen. We still use the collide method, but this time -we check to see if the rectangles for the ball and either bat collide. In this code I've also put in some extra code to avoid various -glitches. You'll find that you'll have to put all sorts of extra code in to avoid glitches and bugs, so it's good to get used to seeing -it.

-
else:
-    # Deflate the rectangles so you can't catch a ball behind the bat
-    player1.rect.inflate(-3, -3)
-    player2.rect.inflate(-3, -3)
-
-    # Do ball and bat collide?
-    # Note I put in an odd rule that sets self.hit to 1 when they collide, and unsets it in the next
-    # iteration. this is to stop odd ball behaviour where it finds a collision *inside* the
-    # bat, the ball reverses, and is still inside the bat, so bounces around inside.
-    # This way, the ball can always escape and bounce away cleanly
-    if self.rect.colliderect(player1.rect) == 1 and not self.hit:
-        angle = math.pi - angle
-        self.hit = not self.hit
-    elif self.rect.colliderect(player2.rect) == 1 and not self.hit:
-        angle = math.pi - angle
-        self.hit = not self.hit
-    elif self.hit:
-        self.hit = not self.hit
-self.vector = (angle,z)
-
-
-

We start this section with an else statement, because this carries on from the previous chunk of code to check if the ball -hits the sides. It makes sense that if it doesn't hit the sides, it might hit a bat, so we carry on the conditional statement. The -first glitch to fix is to shrink the players' rectangles by 3 pixels in both dimensions, to stop the bat catching a ball that goes -behind them (if you imagine you just move the bat so that as the ball travels behind it, the rectangles overlap, and so normally the -ball would then have been "hit" - this prevents that).

-

Next we check if the rectangles collide, with one more glitch fix. Notice that I've commented on these odd bits of code - it's always -good to explain bits of code that are abnormal, both for others who look at your code, and so you understand it when you come back to -it. The without the fix, the ball might hit a corner of the bat, change direction, and one frame later still find itself inside the -bat. Then it would again think it has been hit, and change its direction. This can happen several times, making the ball's motion -completely unrealistic. So we have a variable, self.hit, which we set to True when it has been hit, and False one frame -later. When we check if the rectangles have collided, we also check if self.hit is True/False, to stop internal bouncing.

-

The important code here is pretty easy to understand. All rectangles have a colliderect -function, into which you feed the rectangle of another object, which returns True if the rectangles do overlap, and False if not. -If they do, we can change the direction by subtracting the current angle from pi (again, a handy trick you can do with radians, -which will adjust the angle by 90 degrees and send it off in the right direction; you might find at this point that a thorough -understanding of radians is in order!). Just to finish the glitch checking, we switch self.hit back to False if it's the frame -after they were hit.

-

We also then recompile the vector. You would of course want to remove the same line in the previous chunk of code, so that you only do -this once after the if-else conditional statement. And that's it! The combined code will now allow the ball to hit sides and bats.

-
-
-

6.3. The Finished product¶

-

The final product, with all the bits of code thrown together, as well as some other bits ofcode to glue it all together, will look -like this:

-
#
-# Tom's Pong
-# A simple pong game with realistic physics and AI
-# http://www.tomchance.uklinux.net/projects/pong.shtml
-#
-# Released under the GNU General Public License
-
-VERSION = "0.4"
-
-try:
-    import sys
-    import random
-    import math
-    import os
-    import getopt
-    import pygame
-    from socket import *
-    from pygame.locals import *
-except ImportError, err:
-    print "couldn't load module. %s" % (err)
-    sys.exit(2)
-
-def load_png(name):
-    """ Load image and return image object"""
-    fullname = os.path.join('data', name)
-    try:
-        image = pygame.image.load(fullname)
-        if image.get_alpha is None:
-            image = image.convert()
-        else:
-            image = image.convert_alpha()
-    except pygame.error, message:
-        print 'Cannot load image:', fullname
-        raise SystemExit, message
-    return image, image.get_rect()
-
-class Ball(pygame.sprite.Sprite):
-    """A ball that will move across the screen
-    Returns: ball object
-    Functions: update, calcnewpos
-    Attributes: area, vector"""
-
-    def __init__(self, (xy), vector):
-        pygame.sprite.Sprite.__init__(self)
-        self.image, self.rect = load_png('ball.png')
-        screen = pygame.display.get_surface()
-        self.area = screen.get_rect()
-        self.vector = vector
-        self.hit = 0
-
-    def update(self):
-        newpos = self.calcnewpos(self.rect,self.vector)
-        self.rect = newpos
-        (angle,z) = self.vector
-
-        if not self.area.contains(newpos):
-            tl = not self.area.collidepoint(newpos.topleft)
-            tr = not self.area.collidepoint(newpos.topright)
-            bl = not self.area.collidepoint(newpos.bottomleft)
-            br = not self.area.collidepoint(newpos.bottomright)
-            if tr and tl or (br and bl):
-                angle = -angle
-            if tl and bl:
-                #self.offcourt()
-                angle = math.pi - angle
-            if tr and br:
-                angle = math.pi - angle
-                #self.offcourt()
-        else:
-            # Deflate the rectangles so you can't catch a ball behind the bat
-            player1.rect.inflate(-3, -3)
-            player2.rect.inflate(-3, -3)
-
-            # Do ball and bat collide?
-            # Note I put in an odd rule that sets self.hit to 1 when they collide, and unsets it in the next
-            # iteration. this is to stop odd ball behaviour where it finds a collision *inside* the
-            # bat, the ball reverses, and is still inside the bat, so bounces around inside.
-            # This way, the ball can always escape and bounce away cleanly
-            if self.rect.colliderect(player1.rect) == 1 and not self.hit:
-                angle = math.pi - angle
-                self.hit = not self.hit
-            elif self.rect.colliderect(player2.rect) == 1 and not self.hit:
-                angle = math.pi - angle
-                self.hit = not self.hit
-            elif self.hit:
-                self.hit = not self.hit
-        self.vector = (angle,z)
-
-    def calcnewpos(self,rect,vector):
-        (angle,z) = vector
-        (dx,dy) = (z*math.cos(angle),z*math.sin(angle))
-        return rect.move(dx,dy)
-
-class Bat(pygame.sprite.Sprite):
-    """Movable tennis 'bat' with which one hits the ball
-    Returns: bat object
-    Functions: reinit, update, moveup, movedown
-    Attributes: which, speed"""
-
-    def __init__(self, side):
-        pygame.sprite.Sprite.__init__(self)
-        self.image, self.rect = load_png('bat.png')
-        screen = pygame.display.get_surface()
-        self.area = screen.get_rect()
-        self.side = side
-        self.speed = 10
-        self.state = "still"
-        self.reinit()
-
-    def reinit(self):
-        self.state = "still"
-        self.movepos = [0,0]
-        if self.side == "left":
-            self.rect.midleft = self.area.midleft
-        elif self.side == "right":
-            self.rect.midright = self.area.midright
-
-    def update(self):
-        newpos = self.rect.move(self.movepos)
-        if self.area.contains(newpos):
-            self.rect = newpos
-        pygame.event.pump()
-
-    def moveup(self):
-        self.movepos[1] = self.movepos[1] - (self.speed)
-        self.state = "moveup"
-
-    def movedown(self):
-        self.movepos[1] = self.movepos[1] + (self.speed)
-        self.state = "movedown"
-
-
-def main():
-    # Initialise screen
-    pygame.init()
-    screen = pygame.display.set_mode((640, 480))
-    pygame.display.set_caption('Basic Pong')
-
-    # Fill background
-    background = pygame.Surface(screen.get_size())
-    background = background.convert()
-    background.fill((0, 0, 0))
-
-    # Initialise players
-    global player1
-    global player2
-    player1 = Bat("left")
-    player2 = Bat("right")
-
-    # Initialise ball
-    speed = 13
-    rand = ((0.1 * (random.randint(5,8))))
-    ball = Ball((0,0),(0.47,speed))
-
-    # Initialise sprites
-    playersprites = pygame.sprite.RenderPlain((player1, player2))
-    ballsprite = pygame.sprite.RenderPlain(ball)
-
-    # Blit everything to the screen
-    screen.blit(background, (0, 0))
-    pygame.display.flip()
-
-    # Initialise clock
-    clock = pygame.time.Clock()
-
-    # Event loop
-    while 1:
-        # Make sure game doesn't run at more than 60 frames per second
-        clock.tick(60)
-
-        for event in pygame.event.get():
-            if event.type == QUIT:
-                return
-            elif event.type == KEYDOWN:
-                if event.key == K_a:
-                    player1.moveup()
-                if event.key == K_z:
-                    player1.movedown()
-                if event.key == K_UP:
-                    player2.moveup()
-                if event.key == K_DOWN:
-                    player2.movedown()
-            elif event.type == KEYUP:
-                if event.key == K_a or event.key == K_z:
-                    player1.movepos = [0,0]
-                    player1.state = "still"
-                if event.key == K_UP or event.key == K_DOWN:
-                    player2.movepos = [0,0]
-                    player2.state = "still"
-
-        screen.blit(background, ball.rect, ball.rect)
-        screen.blit(background, player1.rect, player1.rect)
-        screen.blit(background, player2.rect, player2.rect)
-        ballsprite.update()
-        playersprites.update()
-        ballsprite.draw(screen)
-        playersprites.draw(screen)
-        pygame.display.flip()
-
-
-if __name__ == '__main__': main()
-
-
-

As well as showing you the final product, I'll point you back to TomPong, upon which all of this is based. Download it, have a look -at the source code, and you'll see a full implementation of pong using all of the code you've seen in this tutorial, as well as lots of -other code I've added in various versions, such as some extra physics for spinning, and various other bug and glitch fixes.

-

Oh, find TomPong at http://www.tomchance.uklinux.net/projects/pong.shtml.

-
-
-
- - -

-
-Edit on GitHub -
-
-
-
-
- - - - \ No newline at end of file diff --git a/venv/Lib/site-packages/pygame/draw.cp39-win_amd64.pyd b/venv/Lib/site-packages/pygame/draw.cp39-win_amd64.pyd deleted file mode 100644 index 70528da..0000000 Binary files a/venv/Lib/site-packages/pygame/draw.cp39-win_amd64.pyd and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/draw.pyi b/venv/Lib/site-packages/pygame/draw.pyi deleted file mode 100644 index 8a24089..0000000 --- a/venv/Lib/site-packages/pygame/draw.pyi +++ /dev/null @@ -1,74 +0,0 @@ -from typing import Optional, Sequence - -from pygame.rect import Rect -from pygame.surface import Surface - -from ._common import _ColorValue, _Coordinate, _RectValue - -def rect( - surface: Surface, - color: _ColorValue, - rect: _RectValue, - width: int = 0, - border_radius: int = -1, - border_top_left_radius: int = -1, - border_top_right_radius: int = -1, - border_bottom_left_radius: int = -1, - border_bottom_right_radius: int = -1, -) -> Rect: ... -def polygon( - surface: Surface, - color: _ColorValue, - points: Sequence[_Coordinate], - width: int = 0, -) -> Rect: ... -def circle( - surface: Surface, - color: _ColorValue, - center: _Coordinate, - radius: float, - width: int = 0, - draw_top_right: Optional[bool] = None, - draw_top_left: Optional[bool] = None, - draw_bottom_left: Optional[bool] = None, - draw_bottom_right: Optional[bool] = None, -) -> Rect: ... -def ellipse( - surface: Surface, color: _ColorValue, rect: _RectValue, width: int = 0 -) -> Rect: ... -def arc( - surface: Surface, - color: _ColorValue, - rect: _RectValue, - start_angle: float, - stop_angle: float, - width: int = 1, -) -> Rect: ... -def line( - surface: Surface, - color: _ColorValue, - start_pos: _Coordinate, - end_pos: _Coordinate, - width: int = 1, -) -> Rect: ... -def lines( - surface: Surface, - color: _ColorValue, - closed: bool, - points: Sequence[_Coordinate], - width: int = 1, -) -> Rect: ... -def aaline( - surface: Surface, - color: _ColorValue, - start_pos: _Coordinate, - end_pos: _Coordinate, - blend: int = 1, -) -> Rect: ... -def aalines( - surface: Surface, - color: _ColorValue, - closed: bool, - points: Sequence[_Coordinate], - blend: int = 1, -) -> Rect: ... diff --git a/venv/Lib/site-packages/pygame/draw_py.py b/venv/Lib/site-packages/pygame/draw_py.py deleted file mode 100644 index edd1c0c..0000000 --- a/venv/Lib/site-packages/pygame/draw_py.py +++ /dev/null @@ -1,564 +0,0 @@ -# -*- coding: utf-8 -*- -"""Pygame Drawing algorithms written in Python. (Work in Progress) - -Implement Pygame's Drawing Algorithms in a Python version for testing -and debugging. -""" - -from collections import namedtuple -from math import floor, ceil - - -# H E L P E R F U N C T I O N S # - -# fractional part of x - - -def frac(value): - """return fractional part of x""" - return value - floor(value) - - -def inv_frac(value): - """return inverse fractional part of x""" - return 1 - (value - floor(value)) # eg, 1 - frac(x) - - -BoundingBox = namedtuple("BoundingBox", ["left", "top", "right", "bottom"]) -Point = namedtuple("Point", ["x", "y"]) - - -# L O W L E V E L D R A W F U N C T I O N S # -# (They are too low-level to be translated into python, right?) - - -def set_at(surf, in_x, in_y, color): - """Set the color of a pixel in a surface""" - surf.set_at((in_x, in_y), color) - - -def draw_pixel(surf, pos, color, bright, blend=True): - """draw one blended pixel with given brightness.""" - try: - other_col = surf.get_at(pos) if blend else (0, 0, 0, 0) - except IndexError: # pixel outside the surface - return - new_color = tuple( - (bright * col + (1 - bright) * pix) for col, pix in zip(color, other_col) - ) - # FIXME what should happen if only one, color or surf_col, has alpha? - surf.set_at(pos, new_color) - - -def _drawhorzline(surf, color, x_from, in_y, x_to): - if x_from == x_to: - surf.set_at((x_from, in_y), color) - return - - start, end = (x_from, x_to) if x_from <= x_to else (x_to, x_from) - for line_x in range(start, end + 1): - surf.set_at((line_x, in_y), color) - - -def _drawvertline(surf, color, in_x, y_from, y_to): - if y_from == y_to: - surf.set_at((in_x, y_from), color) - return - - start, end = (y_from, y_to) if y_from <= y_to else (y_to, y_from) - for line_y in range(start, end + 1): - surf.set_at((in_x, line_y), color) - - -# I N T E R N A L D R A W L I N E F U N C T I O N S # - - -def _clip_and_draw_horizline(surf, color, x_from, in_y, x_to): - """draw clipped horizontal line.""" - # check Y inside surf - clip = surf.get_clip() - if in_y < clip.y or in_y >= clip.y + clip.h: - return - - x_from = max(x_from, clip.x) - x_to = min(x_to, clip.x + clip.w - 1) - - # check any x inside surf - if x_to < clip.x or x_from >= clip.x + clip.w: - return - - _drawhorzline(surf, color, x_from, in_y, x_to) - - -def _clip_and_draw_vertline(surf, color, in_x, y_from, y_to): - """draw clipped vertical line.""" - # check X inside surf - clip = surf.get_clip() - - if in_x < clip.x or in_x >= clip.x + clip.w: - return - - y_from = max(y_from, clip.y) - y_to = min(y_to, clip.y + clip.h - 1) - - # check any y inside surf - if y_to < clip.y or y_from >= clip.y + clip.h: - return - - _drawvertline(surf, color, in_x, y_from, y_to) - - -# These constants xxx_EDGE are "outside-the-bounding-box"-flags -LEFT_EDGE = 0x1 -RIGHT_EDGE = 0x2 -BOTTOM_EDGE = 0x4 -TOP_EDGE = 0x8 - - -def encode(pos, b_box): - """returns a code that defines position with respect to a bounding box""" - # we use the fact that python interprets booleans (the inequalities) - # as 0/1, and then multiply them with the xxx_EDGE flags - return ( - (pos[0] < b_box.left) * LEFT_EDGE - + (pos[0] > b_box.right) * RIGHT_EDGE - + (pos[1] < b_box.top) * TOP_EDGE - + (pos[1] > b_box.bottom) * BOTTOM_EDGE - ) - - -def clip_line(line, b_box, use_float=False): - """Algorithm to calculate the clipped line. - - We calculate the coordinates of the part of the line segment within the - bounding box (defined by left, top, right, bottom). The we write - the coordinates of the line segment into "line", much like the C-algorithm. - With `use_float` True, clip_line is usable for float-clipping. - - Returns: true if the line segment cuts the bounding box (false otherwise) - """ - - def inside(code): - return not code - - def accept(code_a, code_b): - return not (code_a or code_b) - - def reject(code_a, code_b): - return code_a and code_b - - assert isinstance(line, list) - x_1, y_1, x_2, y_2 = line - dtype = float if use_float else int - - while True: - # the coordinates are progressively modified with the codes, - # until they are either rejected or correspond to the final result. - code1 = encode((x_1, y_1), b_box) - code2 = encode((x_2, y_2), b_box) - - if accept(code1, code2): - # write coordinates into "line" ! - line[:] = x_1, y_1, x_2, y_2 - return True - if reject(code1, code2): - return False - - # We operate on the (x_1, y_1) point, - # and swap if it is inside the bbox: - if inside(code1): - x_1, x_2 = x_2, x_1 - y_1, y_2 = y_2, y_1 - code1, code2 = code2, code1 - slope = (y_2 - y_1) / float(x_2 - x_1) if (x_2 != x_1) else 1.0 - # Each case, if true, means that we are outside the border: - # calculate x_1 and y_1 to be the "first point" inside the bbox... - if code1 & LEFT_EDGE: - y_1 += dtype((b_box.left - x_1) * slope) - x_1 = b_box.left - elif code1 & RIGHT_EDGE: - y_1 += dtype((b_box.right - x_1) * slope) - x_1 = b_box.right - elif code1 & BOTTOM_EDGE: - if x_2 != x_1: - x_1 += dtype((b_box.bottom - y_1) / slope) - y_1 = b_box.bottom - elif code1 & TOP_EDGE: - if x_2 != x_1: - x_1 += dtype((b_box.top - y_1) / slope) - y_1 = b_box.top - - -def _draw_line(surf, color, start, end): - """draw a non-horizontal line (without anti-aliasing).""" - # Variant of https://en.wikipedia.org/wiki/Bresenham's_line_algorithm - # - # This strongly differs from craw.c implementation, because we use a - # "slope" variable (instead of delta_x and delta_y) and a "error" variable. - # And we can not do pointer-arithmetic with "BytesPerPixel", like in - # the C-algorithm. - if start.x == end.x: - # This case should not happen... - raise ValueError - - slope = abs((end.y - start.y) / (end.x - start.x)) - error = 0.0 - - if slope < 1: - # Here, it's a rather horizontal line - - # 1. check in which octants we are & set init values - if end.x < start.x: - start.x, end.x = end.x, start.x - start.y, end.y = end.y, start.y - line_y = start.y - dy_sign = 1 if (start.y < end.y) else -1 - - # 2. step along x coordinate - for line_x in range(start.x, end.x + 1): - set_at(surf, line_x, line_y, color) - error += slope - if error >= 0.5: - line_y += dy_sign - error -= 1 - else: - # Case of a rather vertical line - - # 1. check in which octants we are & set init values - if start.y > end.y: - start.x, end.x = end.x, start.x - start.y, end.y = end.y, start.y - line_x = start.x - slope = 1 / slope - dx_sign = 1 if (start.x < end.x) else -1 - - # 2. step along y coordinate - for line_y in range(start.y, end.y + 1): - set_at(surf, line_x, line_y, color) - error += slope - if error >= 0.5: - line_x += dx_sign - error -= 1 - - -def _draw_aaline(surf, color, start, end, blend): - """draw an anti-aliased line. - - The algorithm yields identical results with _draw_line for horizontal, - vertical or diagonal lines, and results changes smoothly when changing - any of the endpoint coordinates. - - Note that this yields strange results for very short lines, eg - a line from (0, 0) to (0, 1) will draw 2 pixels, and a line from - (0, 0) to (0, 1.1) will blend 10 % on the pixel (0, 2). - """ - # The different requirements that we have on an antialiasing algorithm - # implies to make some compromises: - # 1. We want smooth evolution wrt to the 4 endpoint coordinates - # (this means also that we want a smooth evolution when the angle - # passes +/- 45° - # 2. We want the same behavior when swapping the endpoints - # 3. We want understandable results for the endpoint values - # (eg we want to avoid half-integer values to draw a simple plain - # horizontal or vertical line between two integer l endpoints) - # - # This implies to somehow make the line artificially 1 pixel longer - # and to draw a full pixel when we have the endpoints are identical. - d_x = end.x - start.x - d_y = end.y - start.y - - if d_x == 0 and d_y == 0: - # For smoothness reasons, we could also do some blending here, - # but it seems overshoot... - set_at(surf, int(start.x), int(start.y), color) - return - - if start.x > end.x or start.y > end.y: - start.x, end.x = end.x, start.x - start.y, end.y = end.y, start.y - d_x = -d_x - d_y = -d_y - - if abs(d_x) >= abs(d_y): - slope = d_y / d_x - - def draw_two_pixel(in_x, float_y, factor): - flr_y = floor(float_y) - draw_pixel(surf, (in_x, flr_y), color, factor * inv_frac(float_y), blend) - draw_pixel(surf, (in_x, flr_y + 1), color, factor * frac(float_y), blend) - - _draw_aaline_dx(d_x, slope, end, start, draw_two_pixel) - else: - slope = d_x / d_y - - def draw_two_pixel(float_x, in_y, factor): - fl_x = floor(float_x) - draw_pixel(surf, (fl_x, in_y), color, factor * inv_frac(float_x), blend) - draw_pixel(surf, (fl_x + 1, in_y), color, factor * frac(float_x), blend) - - _draw_aaline_dy(d_y, slope, end, start, draw_two_pixel) - - -def _draw_aaline_dy(d_y, slope, end, start, draw_two_pixel): - g_y = ceil(start.y) - g_x = start.x + (g_y - start.y) * slope - # 1. Draw start of the segment - if start.y < g_y: - draw_two_pixel(g_x - slope, floor(start.y), inv_frac(start.y)) - # 2. Draw end of the segment - rest = frac(end.y) - s_y = ceil(end.y) - if rest > 0: - s_x = start.x + slope * (d_y + 1 - rest) - draw_two_pixel(s_x, s_y, rest) - else: - s_y += 1 - # 3. loop for other points - for line_y in range(g_y, s_y): - line_x = g_x + slope * (line_y - g_y) - draw_two_pixel(line_x, line_y, 1) - - -def _draw_aaline_dx(d_x, slope, end, start, draw_two_pixel): - # A and G are respectively left and right to the "from" point, but - # with integer-x-coordinate, (and only if from_x is not integer). - # Hence they appear in following order on the line in general case: - # A from-pt G . . . to-pt S - # |------*-------|--- . . . ---|-----*------|- - g_x = ceil(start.x) - g_y = start.y + (g_x - start.x) * slope - # 1. Draw start of the segment if we have a non-integer-part - if start.x < g_x: - # this corresponds to the point "A" - draw_two_pixel(floor(start.x), g_y - slope, inv_frac(start.x)) - # 2. Draw end of the segment: we add one pixel for homogeneity reasons - rest = frac(end.x) - s_x = ceil(end.x) - if rest > 0: - # Again we draw only if we have a non-integer-part - s_y = start.y + slope * (d_x + 1 - rest) - draw_two_pixel(s_x, s_y, rest) - else: - s_x += 1 - # 3. loop for other points - for line_x in range(g_x, s_x): - line_y = g_y + slope * (line_x - g_x) - draw_two_pixel(line_x, line_y, 1) - - -# C L I P A N D D R A W L I N E F U N C T I O N S # - - -def _clip_and_draw_line(surf, rect, color, pts): - """clip the line into the rectangle and draw if needed. - - Returns true if anything has been drawn, else false.""" - # "pts" is a list with the four coordinates of the two endpoints - # of the line to be drawn : pts = x1, y1, x2, y2. - # The data format is like that to stay closer to the C-algorithm. - if not clip_line( - pts, BoundingBox(rect.x, rect.y, rect.x + rect.w - 1, rect.y + rect.h - 1) - ): - # The line segment defined by "pts" is not crossing the rectangle - return 0 - if pts[1] == pts[3]: # eg y1 == y2 - _drawhorzline(surf, color, pts[0], pts[1], pts[2]) - elif pts[0] == pts[2]: # eg x1 == x2 - _drawvertline(surf, color, pts[0], pts[1], pts[3]) - else: - _draw_line(surf, color, Point(pts[0], pts[1]), Point(pts[2], pts[3])) - return 1 - - -def _clip_and_draw_line_width(surf, rect, color, line, width): - yinc = xinc = 0 - if abs(line[0] - line[2]) > abs(line[1] - line[3]): - yinc = 1 - else: - xinc = 1 - newpts = line[:] - if _clip_and_draw_line(surf, rect, color, newpts): - anydrawn = 1 - frame = newpts[:] - else: - anydrawn = 0 - frame = [10000, 10000, -10000, -10000] - - for loop in range(1, width // 2 + 1): - newpts[0] = line[0] + xinc * loop - newpts[1] = line[1] + yinc * loop - newpts[2] = line[2] + xinc * loop - newpts[3] = line[3] + yinc * loop - if _clip_and_draw_line(surf, rect, color, newpts): - anydrawn = 1 - frame[0] = min(newpts[0], frame[0]) - frame[1] = min(newpts[1], frame[1]) - frame[2] = max(newpts[2], frame[2]) - frame[3] = max(newpts[3], frame[3]) - - if loop * 2 < width: - newpts[0] = line[0] - xinc * loop - newpts[1] = line[1] - yinc * loop - newpts[2] = line[2] - xinc * loop - newpts[3] = line[3] - yinc * loop - if _clip_and_draw_line(surf, rect, color, newpts): - anydrawn = 1 - frame[0] = min(newpts[0], frame[0]) - frame[1] = min(newpts[1], frame[1]) - frame[2] = max(newpts[2], frame[2]) - frame[3] = max(newpts[3], frame[3]) - - return anydrawn - - -def _clip_and_draw_aaline(surf, rect, color, line, blend): - """draw anti-aliased line between two endpoints.""" - if not clip_line( - line, - BoundingBox(rect.x - 1, rect.y - 1, rect.x + rect.w, rect.y + rect.h), - use_float=True, - ): - return # TODO Rect(rect.x, rect.y, 0, 0) - _draw_aaline(surf, color, Point(line[0], line[1]), Point(line[2], line[3]), blend) - return # TODO Rect(-- affected area --) - - -# D R A W L I N E F U N C T I O N S # - - -def draw_aaline(surf, color, from_point, to_point, blend=True): - """draw anti-aliased line between two endpoints.""" - line = [from_point[0], from_point[1], to_point[0], to_point[1]] - return _clip_and_draw_aaline(surf, surf.get_clip(), color, line, blend) - - -def draw_line(surf, color, from_point, to_point, width=1): - """draw anti-aliased line between two endpoints.""" - line = [from_point[0], from_point[1], to_point[0], to_point[1]] - return _clip_and_draw_line_width(surf, surf.get_clip(), color, line, width) - - -# M U L T I L I N E F U N C T I O N S # - - -def _multi_lines( - surf, - color, - closed, # pylint: disable=too-many-arguments - points, - width=1, - blend=False, - aaline=False, -): - """draw several lines, either anti-aliased or not.""" - # The code for anti-aliased or not is almost identical, so it's factorized - if len(points) <= 2: - raise TypeError - line = [0] * 4 # store x1, y1 & x2, y2 of the lines to be drawn - - xlist = [pt[0] for pt in points] - ylist = [pt[1] for pt in points] - line[0] = xlist[0] - line[1] = ylist[0] - b_box = BoundingBox(left=xlist[0], right=xlist[0], top=ylist[0], bottom=ylist[0]) - - for line_x, line_y in points[1:]: - b_box.left = min(b_box.left, line_x) - b_box.right = max(b_box.right, line_x) - b_box.top = min(b_box.top, line_y) - b_box.bottom = max(b_box.bottom, line_y) - - rect = surf.get_clip() - for loop in range(1, len(points)): - - line[0] = xlist[loop - 1] - line[1] = ylist[loop - 1] - line[2] = xlist[loop] - line[3] = ylist[loop] - if aaline: - _clip_and_draw_aaline(surf, rect, color, line, blend) - else: - _clip_and_draw_line_width(surf, rect, color, line, width) - - if closed: - line[0] = xlist[len(points) - 1] - line[1] = ylist[len(points) - 1] - line[2] = xlist[0] - line[3] = ylist[0] - if aaline: - _clip_and_draw_aaline(surf, rect, color, line, blend) - else: - _clip_and_draw_line_width(surf, rect, color, line, width) - - # TODO Rect(...) - - -def draw_lines(surf, color, closed, points, width=1): - """draw several lines connected through the points.""" - return _multi_lines(surf, color, closed, points, width, aaline=False) - - -def draw_aalines(surf, color, closed, points, blend=True): - """draw several anti-aliased lines connected through the points.""" - return _multi_lines(surf, color, closed, points, blend=blend, aaline=True) - - -def draw_polygon(surface, color, points, width): - """Draw a polygon""" - if width: - draw_lines(surface, color, 1, points, width) - return # TODO Rect(...) - num_points = len(points) - point_x = [x for x, y in points] - point_y = [y for x, y in points] - - miny = min(point_y) - maxy = max(point_y) - - if miny == maxy: - minx = min(point_x) - maxx = max(point_x) - _clip_and_draw_horizline(surface, color, minx, miny, maxx) - return # TODO Rect(...) - - for y_coord in range(miny, maxy + 1): - x_intersect = [] - for i in range(num_points): - _draw_polygon_inner_loop(i, point_x, point_y, y_coord, x_intersect) - - x_intersect.sort() - for i in range(0, len(x_intersect), 2): - _clip_and_draw_horizline( - surface, color, x_intersect[i], y_coord, x_intersect[i + 1] - ) - - # special case : horizontal border lines - for i in range(num_points): - i_prev = i - 1 if i else num_points - 1 - if miny < point_y[i] == point_y[i_prev] < maxy: - _clip_and_draw_horizline( - surface, color, point_x[i], point_y[i], point_x[i_prev] - ) - - return # TODO Rect(...) - - -def _draw_polygon_inner_loop(index, point_x, point_y, y_coord, x_intersect): - i_prev = index - 1 if index else len(point_x) - 1 - - y_1 = point_y[i_prev] - y_2 = point_y[index] - - if y_1 < y_2: - x_1 = point_x[i_prev] - x_2 = point_x[index] - elif y_1 > y_2: - y_2 = point_y[i_prev] - y_1 = point_y[index] - x_2 = point_x[i_prev] - x_1 = point_x[index] - else: # special case handled below - return - - if (y_2 > y_coord >= y_1) or ((y_coord == max(point_y)) and (y_coord <= y_2)): - x_intersect.append((y_coord - y_1) * (x_2 - x_1) // (y_2 - y_1) + x_1) diff --git a/venv/Lib/site-packages/pygame/event.cp39-win_amd64.pyd b/venv/Lib/site-packages/pygame/event.cp39-win_amd64.pyd deleted file mode 100644 index 03cf7a6..0000000 Binary files a/venv/Lib/site-packages/pygame/event.cp39-win_amd64.pyd and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/event.pyi b/venv/Lib/site-packages/pygame/event.pyi deleted file mode 100644 index 85d7a5d..0000000 --- a/venv/Lib/site-packages/pygame/event.pyi +++ /dev/null @@ -1,32 +0,0 @@ -from typing import Any, Dict, List, Optional, SupportsInt, Tuple, Union, overload - -class Event: - type: int - __dict__: Dict[str, Any] - __hash__: None # type: ignore - @overload - def __init__(self, type: int, dict: Dict[str, Any]) -> None: ... - @overload - def __init__(self, type: int, **attributes: Any) -> None: ... - def __getattr__(self, name: str) -> Any: ... - -_EventTypes = Union[SupportsInt, Tuple[SupportsInt, ...], List[SupportsInt]] - -def pump() -> None: ... -def get( - eventtype: Optional[_EventTypes] = None, - pump: Any = True, - exclude: Optional[_EventTypes] = None, -) -> List[Event]: ... -def poll() -> Event: ... -def wait(timeout: int = 0) -> Event: ... -def peek(eventtype: Optional[_EventTypes] = None, pump: Any = True) -> bool: ... -def clear(eventtype: Optional[_EventTypes] = None, pump: Any = True) -> None: ... -def event_name(type: int) -> str: ... -def set_blocked(type: Optional[_EventTypes]) -> None: ... -def set_allowed(type: Optional[_EventTypes]) -> None: ... -def get_blocked(type: _EventTypes) -> bool: ... -def set_grab(grab: bool) -> None: ... -def get_grab() -> bool: ... -def post(event: Event) -> bool: ... -def custom_type() -> int: ... diff --git a/venv/Lib/site-packages/pygame/examples/README.rst b/venv/Lib/site-packages/pygame/examples/README.rst deleted file mode 100644 index a319922..0000000 --- a/venv/Lib/site-packages/pygame/examples/README.rst +++ /dev/null @@ -1,142 +0,0 @@ -These examples should help get you started with pygame. Here is a -brief rundown of what you get. The source code for all examples -is in the public domain. Feel free to use for your own projects. - -aliens.py - This started off as a port of the SDL demonstration, Aliens. - Now it has evolved into something sort of resembling fun. - This demonstrates a lot of different uses of sprites and - optimized blitting. Also transparancy, colorkeys, fonts, sound, - music, joystick, and more. (PS, my high score is 117! goodluck) - -arraydemo.py - Another example filled with various surfarray effects. - It requires the surfarray and image modules to be installed. - This little demo can also make a good starting point for any of - your own tests with surfarray - -audiocapture.py - Record sound from a microphone, and play back the recorded sound. - -blend_fill.py - BLEND_ing colors in different ways with Surface.fill(). - -blit_blends.py - BLEND_ing colors Surface.blit(). - -camera.py - Basic image capturing and display using pygame.camera - -cursors.py - Make custom cursors :) - -dropevent.py - Drag and drop files. Using the following events. - DROPBEGIN, DROPCOMPLETE, DROPTEXT, DROPFILE - -eventlist.py - Learn about pygame events and input. - Watch the events fly by. Click the mouse, and see the mouse - event come up. Press a keyboard key, and see the key up event. - -font_viewer.py - Display all available fonts in a scrolling window. - -fonty.py - Super quick, super simple application demonstrating - the different ways to render fonts with the font module - -freetype_misc.py - FreeType is a world famous font project. - -glcube.py - Using PyOpenGL and Pygame, this creates a spinning 3D multicolored cube. - -headless_no_windows_needed.py - For using pygame in scripts. - -liquid.py - This example was created in a quick comparison with the - BlitzBasic gaming language. Nonetheless, it demonstrates a quick - 8-bit setup (with colormap). - -mask.py - Single bit pixel manipulation. Fast for collision detection, - and also good for computer vision. - -midi.py - For connecting pygame to musical equipment. - -moveit.py - A very simple example of moving stuff. - -music_drop_fade.py - Fade in and play music from a list while observing - several events. Uses fade_ms added in pygame2, as well as set_endevent, - set_volume, drag and drop events, and the scrap module. - -overlay.py - An old way of displaying video content. - -pixelarray.py - Process whole arrays of pixels at a time. - Like numpy, but for pixels, and also built into pygame. - -playmus.py - Simple music playing example. - -prevent_display_stretching.py - A windows specific example. - -scaletest.py - Showing how to scale Surfaces. - -scrap_clipboard.py - A simple demonstration example for the clipboard support. - -setmodescale.py - SCALED allows you to work in 320x200 and have it show up big. - It handles mouse scaling and selection of a good sized window depending - on the display. - -sound.py - Extremely basic testing of the mixer module. Load a - sound and play it. All from the command shell, no graphics. - -sound_array_demos.py - Echo, delay and other array based processing of sounds. - -sprite_texture.py - Shows how to use hardware Image Textures with pygame.sprite. - -stars.py - A simple starfield example. You can change the center of - perspective by leftclicking the mouse on the screen. - -testsprite.py - More of a test example. If you're interested in how to use sprites, - then check out the aliens.py example instead. - -textinput.py - A little "console" where you can write in text. - Shows how to use the TEXTEDITING and TEXTINPUT events. - -vgrade.py - Demonstrates creating a vertical gradient with - Numpy. The app will create a new gradient every half - second and report the time needed to create and display the - image. If you're not prepared to start working with the - Numpy arrays, don't worry about the source for this one :] - -video.py - It explores some new video APIs in pygame 2. - Including multiple windows, Textures, and such. - -data/ - Directory with the resources for the examples. - -There's LOTS of examples on the pygame website, and on places like github. - -We're always on the lookout for more examples and/or example -requests. Code like this is probably the best way to start -getting involved with Python gaming. diff --git a/venv/Lib/site-packages/pygame/examples/__init__.py b/venv/Lib/site-packages/pygame/examples/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/venv/Lib/site-packages/pygame/examples/__pycache__/__init__.cpython-39.pyc b/venv/Lib/site-packages/pygame/examples/__pycache__/__init__.cpython-39.pyc deleted file mode 100644 index bc6994f..0000000 Binary files a/venv/Lib/site-packages/pygame/examples/__pycache__/__init__.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/examples/__pycache__/aacircle.cpython-39.pyc b/venv/Lib/site-packages/pygame/examples/__pycache__/aacircle.cpython-39.pyc deleted file mode 100644 index f05abe0..0000000 Binary files a/venv/Lib/site-packages/pygame/examples/__pycache__/aacircle.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/examples/__pycache__/aliens.cpython-39.pyc b/venv/Lib/site-packages/pygame/examples/__pycache__/aliens.cpython-39.pyc deleted file mode 100644 index 9e31170..0000000 Binary files a/venv/Lib/site-packages/pygame/examples/__pycache__/aliens.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/examples/__pycache__/arraydemo.cpython-39.pyc b/venv/Lib/site-packages/pygame/examples/__pycache__/arraydemo.cpython-39.pyc deleted file mode 100644 index 6060dd0..0000000 Binary files a/venv/Lib/site-packages/pygame/examples/__pycache__/arraydemo.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/examples/__pycache__/audiocapture.cpython-39.pyc b/venv/Lib/site-packages/pygame/examples/__pycache__/audiocapture.cpython-39.pyc deleted file mode 100644 index ad88641..0000000 Binary files a/venv/Lib/site-packages/pygame/examples/__pycache__/audiocapture.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/examples/__pycache__/blend_fill.cpython-39.pyc b/venv/Lib/site-packages/pygame/examples/__pycache__/blend_fill.cpython-39.pyc deleted file mode 100644 index 8eb9d1c..0000000 Binary files a/venv/Lib/site-packages/pygame/examples/__pycache__/blend_fill.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/examples/__pycache__/blit_blends.cpython-39.pyc b/venv/Lib/site-packages/pygame/examples/__pycache__/blit_blends.cpython-39.pyc deleted file mode 100644 index a7c51b8..0000000 Binary files a/venv/Lib/site-packages/pygame/examples/__pycache__/blit_blends.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/examples/__pycache__/camera.cpython-39.pyc b/venv/Lib/site-packages/pygame/examples/__pycache__/camera.cpython-39.pyc deleted file mode 100644 index c623bcc..0000000 Binary files a/venv/Lib/site-packages/pygame/examples/__pycache__/camera.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/examples/__pycache__/chimp.cpython-39.pyc b/venv/Lib/site-packages/pygame/examples/__pycache__/chimp.cpython-39.pyc deleted file mode 100644 index e2a1d12..0000000 Binary files a/venv/Lib/site-packages/pygame/examples/__pycache__/chimp.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/examples/__pycache__/cursors.cpython-39.pyc b/venv/Lib/site-packages/pygame/examples/__pycache__/cursors.cpython-39.pyc deleted file mode 100644 index b9eec4b..0000000 Binary files a/venv/Lib/site-packages/pygame/examples/__pycache__/cursors.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/examples/__pycache__/dropevent.cpython-39.pyc b/venv/Lib/site-packages/pygame/examples/__pycache__/dropevent.cpython-39.pyc deleted file mode 100644 index c2dbfac..0000000 Binary files a/venv/Lib/site-packages/pygame/examples/__pycache__/dropevent.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/examples/__pycache__/eventlist.cpython-39.pyc b/venv/Lib/site-packages/pygame/examples/__pycache__/eventlist.cpython-39.pyc deleted file mode 100644 index 6a24b69..0000000 Binary files a/venv/Lib/site-packages/pygame/examples/__pycache__/eventlist.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/examples/__pycache__/font_viewer.cpython-39.pyc b/venv/Lib/site-packages/pygame/examples/__pycache__/font_viewer.cpython-39.pyc deleted file mode 100644 index fab7c9e..0000000 Binary files a/venv/Lib/site-packages/pygame/examples/__pycache__/font_viewer.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/examples/__pycache__/fonty.cpython-39.pyc b/venv/Lib/site-packages/pygame/examples/__pycache__/fonty.cpython-39.pyc deleted file mode 100644 index a0ba814..0000000 Binary files a/venv/Lib/site-packages/pygame/examples/__pycache__/fonty.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/examples/__pycache__/freetype_misc.cpython-39.pyc b/venv/Lib/site-packages/pygame/examples/__pycache__/freetype_misc.cpython-39.pyc deleted file mode 100644 index d543878..0000000 Binary files a/venv/Lib/site-packages/pygame/examples/__pycache__/freetype_misc.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/examples/__pycache__/glcube.cpython-39.pyc b/venv/Lib/site-packages/pygame/examples/__pycache__/glcube.cpython-39.pyc deleted file mode 100644 index d2545e7..0000000 Binary files a/venv/Lib/site-packages/pygame/examples/__pycache__/glcube.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/examples/__pycache__/headless_no_windows_needed.cpython-39.pyc b/venv/Lib/site-packages/pygame/examples/__pycache__/headless_no_windows_needed.cpython-39.pyc deleted file mode 100644 index 839d7d9..0000000 Binary files a/venv/Lib/site-packages/pygame/examples/__pycache__/headless_no_windows_needed.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/examples/__pycache__/joystick.cpython-39.pyc b/venv/Lib/site-packages/pygame/examples/__pycache__/joystick.cpython-39.pyc deleted file mode 100644 index f557d6c..0000000 Binary files a/venv/Lib/site-packages/pygame/examples/__pycache__/joystick.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/examples/__pycache__/liquid.cpython-39.pyc b/venv/Lib/site-packages/pygame/examples/__pycache__/liquid.cpython-39.pyc deleted file mode 100644 index 4019b2c..0000000 Binary files a/venv/Lib/site-packages/pygame/examples/__pycache__/liquid.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/examples/__pycache__/mask.cpython-39.pyc b/venv/Lib/site-packages/pygame/examples/__pycache__/mask.cpython-39.pyc deleted file mode 100644 index 5eaac2f..0000000 Binary files a/venv/Lib/site-packages/pygame/examples/__pycache__/mask.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/examples/__pycache__/midi.cpython-39.pyc b/venv/Lib/site-packages/pygame/examples/__pycache__/midi.cpython-39.pyc deleted file mode 100644 index c77a77b..0000000 Binary files a/venv/Lib/site-packages/pygame/examples/__pycache__/midi.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/examples/__pycache__/moveit.cpython-39.pyc b/venv/Lib/site-packages/pygame/examples/__pycache__/moveit.cpython-39.pyc deleted file mode 100644 index 1199c8d..0000000 Binary files a/venv/Lib/site-packages/pygame/examples/__pycache__/moveit.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/examples/__pycache__/music_drop_fade.cpython-39.pyc b/venv/Lib/site-packages/pygame/examples/__pycache__/music_drop_fade.cpython-39.pyc deleted file mode 100644 index e2df202..0000000 Binary files a/venv/Lib/site-packages/pygame/examples/__pycache__/music_drop_fade.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/examples/__pycache__/pixelarray.cpython-39.pyc b/venv/Lib/site-packages/pygame/examples/__pycache__/pixelarray.cpython-39.pyc deleted file mode 100644 index 0219d97..0000000 Binary files a/venv/Lib/site-packages/pygame/examples/__pycache__/pixelarray.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/examples/__pycache__/playmus.cpython-39.pyc b/venv/Lib/site-packages/pygame/examples/__pycache__/playmus.cpython-39.pyc deleted file mode 100644 index 04a5f73..0000000 Binary files a/venv/Lib/site-packages/pygame/examples/__pycache__/playmus.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/examples/__pycache__/prevent_display_stretching.cpython-39.pyc b/venv/Lib/site-packages/pygame/examples/__pycache__/prevent_display_stretching.cpython-39.pyc deleted file mode 100644 index 9969fd4..0000000 Binary files a/venv/Lib/site-packages/pygame/examples/__pycache__/prevent_display_stretching.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/examples/__pycache__/resizing_new.cpython-39.pyc b/venv/Lib/site-packages/pygame/examples/__pycache__/resizing_new.cpython-39.pyc deleted file mode 100644 index 7257e3d..0000000 Binary files a/venv/Lib/site-packages/pygame/examples/__pycache__/resizing_new.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/examples/__pycache__/scaletest.cpython-39.pyc b/venv/Lib/site-packages/pygame/examples/__pycache__/scaletest.cpython-39.pyc deleted file mode 100644 index 839ebf7..0000000 Binary files a/venv/Lib/site-packages/pygame/examples/__pycache__/scaletest.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/examples/__pycache__/scrap_clipboard.cpython-39.pyc b/venv/Lib/site-packages/pygame/examples/__pycache__/scrap_clipboard.cpython-39.pyc deleted file mode 100644 index 51e7417..0000000 Binary files a/venv/Lib/site-packages/pygame/examples/__pycache__/scrap_clipboard.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/examples/__pycache__/scroll.cpython-39.pyc b/venv/Lib/site-packages/pygame/examples/__pycache__/scroll.cpython-39.pyc deleted file mode 100644 index 32e43ae..0000000 Binary files a/venv/Lib/site-packages/pygame/examples/__pycache__/scroll.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/examples/__pycache__/setmodescale.cpython-39.pyc b/venv/Lib/site-packages/pygame/examples/__pycache__/setmodescale.cpython-39.pyc deleted file mode 100644 index ca2b331..0000000 Binary files a/venv/Lib/site-packages/pygame/examples/__pycache__/setmodescale.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/examples/__pycache__/sound.cpython-39.pyc b/venv/Lib/site-packages/pygame/examples/__pycache__/sound.cpython-39.pyc deleted file mode 100644 index 68fbb9f..0000000 Binary files a/venv/Lib/site-packages/pygame/examples/__pycache__/sound.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/examples/__pycache__/sound_array_demos.cpython-39.pyc b/venv/Lib/site-packages/pygame/examples/__pycache__/sound_array_demos.cpython-39.pyc deleted file mode 100644 index f4be815..0000000 Binary files a/venv/Lib/site-packages/pygame/examples/__pycache__/sound_array_demos.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/examples/__pycache__/sprite_texture.cpython-39.pyc b/venv/Lib/site-packages/pygame/examples/__pycache__/sprite_texture.cpython-39.pyc deleted file mode 100644 index 4488170..0000000 Binary files a/venv/Lib/site-packages/pygame/examples/__pycache__/sprite_texture.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/examples/__pycache__/stars.cpython-39.pyc b/venv/Lib/site-packages/pygame/examples/__pycache__/stars.cpython-39.pyc deleted file mode 100644 index 035036c..0000000 Binary files a/venv/Lib/site-packages/pygame/examples/__pycache__/stars.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/examples/__pycache__/testsprite.cpython-39.pyc b/venv/Lib/site-packages/pygame/examples/__pycache__/testsprite.cpython-39.pyc deleted file mode 100644 index 791060c..0000000 Binary files a/venv/Lib/site-packages/pygame/examples/__pycache__/testsprite.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/examples/__pycache__/textinput.cpython-39.pyc b/venv/Lib/site-packages/pygame/examples/__pycache__/textinput.cpython-39.pyc deleted file mode 100644 index f1d4283..0000000 Binary files a/venv/Lib/site-packages/pygame/examples/__pycache__/textinput.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/examples/__pycache__/vgrade.cpython-39.pyc b/venv/Lib/site-packages/pygame/examples/__pycache__/vgrade.cpython-39.pyc deleted file mode 100644 index e1a0267..0000000 Binary files a/venv/Lib/site-packages/pygame/examples/__pycache__/vgrade.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/examples/__pycache__/video.cpython-39.pyc b/venv/Lib/site-packages/pygame/examples/__pycache__/video.cpython-39.pyc deleted file mode 100644 index 6e16fc3..0000000 Binary files a/venv/Lib/site-packages/pygame/examples/__pycache__/video.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/examples/aacircle.py b/venv/Lib/site-packages/pygame/examples/aacircle.py deleted file mode 100644 index de3733d..0000000 --- a/venv/Lib/site-packages/pygame/examples/aacircle.py +++ /dev/null @@ -1,41 +0,0 @@ -#!/usr/bin/env python - -"""Proof of concept gfxdraw example""" - -import pygame -import pygame.gfxdraw - - -def main(): - pygame.init() - screen = pygame.display.set_mode((500, 500)) - screen.fill((255, 0, 0)) - s = pygame.Surface(screen.get_size(), pygame.SRCALPHA, 32) - pygame.draw.line(s, (0, 0, 0), (250, 250), (250 + 200, 250)) - - width = 1 - for a_radius in range(width): - radius = 200 - pygame.gfxdraw.aacircle(s, 250, 250, radius - a_radius, (0, 0, 0)) - - screen.blit(s, (0, 0)) - - pygame.draw.circle(screen, "green", (50, 100), 10) - pygame.draw.circle(screen, "black", (50, 100), 10, 1) - - pygame.display.flip() - try: - while 1: - event = pygame.event.wait() - if event.type == pygame.QUIT: - break - if event.type == pygame.KEYDOWN: - if event.key == pygame.K_ESCAPE or event.unicode == "q": - break - pygame.display.flip() - finally: - pygame.quit() - - -if __name__ == "__main__": - main() diff --git a/venv/Lib/site-packages/pygame/examples/aliens.py b/venv/Lib/site-packages/pygame/examples/aliens.py deleted file mode 100644 index 9fecb3f..0000000 --- a/venv/Lib/site-packages/pygame/examples/aliens.py +++ /dev/null @@ -1,401 +0,0 @@ -#!/usr/bin/env python -""" pygame.examples.aliens - -Shows a mini game where you have to defend against aliens. - -What does it show you about pygame? - -* pg.sprite, the difference between Sprite and Group. -* dirty rectangle optimization for processing for speed. -* music with pg.mixer.music, including fadeout -* sound effects with pg.Sound -* event processing, keyboard handling, QUIT handling. -* a main loop frame limited with a game clock from pg.time.Clock -* fullscreen switching. - - -Controls --------- - -* Left and right arrows to move. -* Space bar to shoot -* f key to toggle between fullscreen. - -""" - -import random -import os - -# import basic pygame modules -import pygame as pg - -# see if we can load more than standard BMP -if not pg.image.get_extended(): - raise SystemExit("Sorry, extended image module required") - - -# game constants -MAX_SHOTS = 2 # most player bullets onscreen -ALIEN_ODDS = 22 # chances a new alien appears -BOMB_ODDS = 60 # chances a new bomb will drop -ALIEN_RELOAD = 12 # frames between new aliens -SCREENRECT = pg.Rect(0, 0, 640, 480) -SCORE = 0 - -main_dir = os.path.split(os.path.abspath(__file__))[0] - - -def load_image(file): - """loads an image, prepares it for play""" - file = os.path.join(main_dir, "data", file) - try: - surface = pg.image.load(file) - except pg.error: - raise SystemExit('Could not load image "%s" %s' % (file, pg.get_error())) - return surface.convert() - - -def load_sound(file): - """because pygame can be be compiled without mixer.""" - if not pg.mixer: - return None - file = os.path.join(main_dir, "data", file) - try: - sound = pg.mixer.Sound(file) - return sound - except pg.error: - print("Warning, unable to load, %s" % file) - return None - - -# Each type of game object gets an init and an update function. -# The update function is called once per frame, and it is when each object should -# change its current position and state. -# -# The Player object actually gets a "move" function instead of update, -# since it is passed extra information about the keyboard. - - -class Player(pg.sprite.Sprite): - """Representing the player as a moon buggy type car.""" - - speed = 10 - bounce = 24 - gun_offset = -11 - images = [] - - def __init__(self): - pg.sprite.Sprite.__init__(self, self.containers) - self.image = self.images[0] - self.rect = self.image.get_rect(midbottom=SCREENRECT.midbottom) - self.reloading = 0 - self.origtop = self.rect.top - self.facing = -1 - - def move(self, direction): - if direction: - self.facing = direction - self.rect.move_ip(direction * self.speed, 0) - self.rect = self.rect.clamp(SCREENRECT) - if direction < 0: - self.image = self.images[0] - elif direction > 0: - self.image = self.images[1] - self.rect.top = self.origtop - (self.rect.left // self.bounce % 2) - - def gunpos(self): - pos = self.facing * self.gun_offset + self.rect.centerx - return pos, self.rect.top - - -class Alien(pg.sprite.Sprite): - """An alien space ship. That slowly moves down the screen.""" - - speed = 13 - animcycle = 12 - images = [] - - def __init__(self): - pg.sprite.Sprite.__init__(self, self.containers) - self.image = self.images[0] - self.rect = self.image.get_rect() - self.facing = random.choice((-1, 1)) * Alien.speed - self.frame = 0 - if self.facing < 0: - self.rect.right = SCREENRECT.right - - def update(self): - self.rect.move_ip(self.facing, 0) - if not SCREENRECT.contains(self.rect): - self.facing = -self.facing - self.rect.top = self.rect.bottom + 1 - self.rect = self.rect.clamp(SCREENRECT) - self.frame = self.frame + 1 - self.image = self.images[self.frame // self.animcycle % 3] - - -class Explosion(pg.sprite.Sprite): - """An explosion. Hopefully the Alien and not the player!""" - - defaultlife = 12 - animcycle = 3 - images = [] - - def __init__(self, actor): - pg.sprite.Sprite.__init__(self, self.containers) - self.image = self.images[0] - self.rect = self.image.get_rect(center=actor.rect.center) - self.life = self.defaultlife - - def update(self): - """called every time around the game loop. - - Show the explosion surface for 'defaultlife'. - Every game tick(update), we decrease the 'life'. - - Also we animate the explosion. - """ - self.life = self.life - 1 - self.image = self.images[self.life // self.animcycle % 2] - if self.life <= 0: - self.kill() - - -class Shot(pg.sprite.Sprite): - """a bullet the Player sprite fires.""" - - speed = -11 - images = [] - - def __init__(self, pos): - pg.sprite.Sprite.__init__(self, self.containers) - self.image = self.images[0] - self.rect = self.image.get_rect(midbottom=pos) - - def update(self): - """called every time around the game loop. - - Every tick we move the shot upwards. - """ - self.rect.move_ip(0, self.speed) - if self.rect.top <= 0: - self.kill() - - -class Bomb(pg.sprite.Sprite): - """A bomb the aliens drop.""" - - speed = 9 - images = [] - - def __init__(self, alien): - pg.sprite.Sprite.__init__(self, self.containers) - self.image = self.images[0] - self.rect = self.image.get_rect(midbottom=alien.rect.move(0, 5).midbottom) - - def update(self): - """called every time around the game loop. - - Every frame we move the sprite 'rect' down. - When it reaches the bottom we: - - - make an explosion. - - remove the Bomb. - """ - self.rect.move_ip(0, self.speed) - if self.rect.bottom >= 470: - Explosion(self) - self.kill() - - -class Score(pg.sprite.Sprite): - """to keep track of the score.""" - - def __init__(self): - pg.sprite.Sprite.__init__(self) - self.font = pg.font.Font(None, 20) - self.font.set_italic(1) - self.color = "white" - self.lastscore = -1 - self.update() - self.rect = self.image.get_rect().move(10, 450) - - def update(self): - """We only update the score in update() when it has changed.""" - if SCORE != self.lastscore: - self.lastscore = SCORE - msg = "Score: %d" % SCORE - self.image = self.font.render(msg, 0, self.color) - - -def main(winstyle=0): - # Initialize pygame - if pg.get_sdl_version()[0] == 2: - pg.mixer.pre_init(44100, 32, 2, 1024) - pg.init() - if pg.mixer and not pg.mixer.get_init(): - print("Warning, no sound") - pg.mixer = None - - fullscreen = False - # Set the display mode - winstyle = 0 # |FULLSCREEN - bestdepth = pg.display.mode_ok(SCREENRECT.size, winstyle, 32) - screen = pg.display.set_mode(SCREENRECT.size, winstyle, bestdepth) - - # Load images, assign to sprite classes - # (do this before the classes are used, after screen setup) - img = load_image("player1.gif") - Player.images = [img, pg.transform.flip(img, 1, 0)] - img = load_image("explosion1.gif") - Explosion.images = [img, pg.transform.flip(img, 1, 1)] - Alien.images = [load_image(im) for im in ("alien1.gif", "alien2.gif", "alien3.gif")] - Bomb.images = [load_image("bomb.gif")] - Shot.images = [load_image("shot.gif")] - - # decorate the game window - icon = pg.transform.scale(Alien.images[0], (32, 32)) - pg.display.set_icon(icon) - pg.display.set_caption("Pygame Aliens") - pg.mouse.set_visible(0) - - # create the background, tile the bgd image - bgdtile = load_image("background.gif") - background = pg.Surface(SCREENRECT.size) - for x in range(0, SCREENRECT.width, bgdtile.get_width()): - background.blit(bgdtile, (x, 0)) - screen.blit(background, (0, 0)) - pg.display.flip() - - # load the sound effects - boom_sound = load_sound("boom.wav") - shoot_sound = load_sound("car_door.wav") - if pg.mixer: - music = os.path.join(main_dir, "data", "house_lo.wav") - pg.mixer.music.load(music) - pg.mixer.music.play(-1) - - # Initialize Game Groups - aliens = pg.sprite.Group() - shots = pg.sprite.Group() - bombs = pg.sprite.Group() - all = pg.sprite.RenderUpdates() - lastalien = pg.sprite.GroupSingle() - - # assign default groups to each sprite class - Player.containers = all - Alien.containers = aliens, all, lastalien - Shot.containers = shots, all - Bomb.containers = bombs, all - Explosion.containers = all - Score.containers = all - - # Create Some Starting Values - global score - alienreload = ALIEN_RELOAD - clock = pg.time.Clock() - - # initialize our starting sprites - global SCORE - player = Player() - Alien() # note, this 'lives' because it goes into a sprite group - if pg.font: - all.add(Score()) - - # Run our main loop whilst the player is alive. - while player.alive(): - - # get input - for event in pg.event.get(): - if event.type == pg.QUIT: - return - if event.type == pg.KEYDOWN and event.key == pg.K_ESCAPE: - return - elif event.type == pg.KEYDOWN: - if event.key == pg.K_f: - if not fullscreen: - print("Changing to FULLSCREEN") - screen_backup = screen.copy() - screen = pg.display.set_mode( - SCREENRECT.size, winstyle | pg.FULLSCREEN, bestdepth - ) - screen.blit(screen_backup, (0, 0)) - else: - print("Changing to windowed mode") - screen_backup = screen.copy() - screen = pg.display.set_mode( - SCREENRECT.size, winstyle, bestdepth - ) - screen.blit(screen_backup, (0, 0)) - pg.display.flip() - fullscreen = not fullscreen - - keystate = pg.key.get_pressed() - - # clear/erase the last drawn sprites - all.clear(screen, background) - - # update all the sprites - all.update() - - # handle player input - direction = keystate[pg.K_RIGHT] - keystate[pg.K_LEFT] - player.move(direction) - firing = keystate[pg.K_SPACE] - if not player.reloading and firing and len(shots) < MAX_SHOTS: - Shot(player.gunpos()) - if pg.mixer: - shoot_sound.play() - player.reloading = firing - - # Create new alien - if alienreload: - alienreload = alienreload - 1 - elif not int(random.random() * ALIEN_ODDS): - Alien() - alienreload = ALIEN_RELOAD - - # Drop bombs - if lastalien and not int(random.random() * BOMB_ODDS): - Bomb(lastalien.sprite) - - # Detect collisions between aliens and players. - for alien in pg.sprite.spritecollide(player, aliens, 1): - if pg.mixer: - boom_sound.play() - Explosion(alien) - Explosion(player) - SCORE = SCORE + 1 - player.kill() - - # See if shots hit the aliens. - for alien in pg.sprite.groupcollide(aliens, shots, 1, 1).keys(): - if pg.mixer: - boom_sound.play() - Explosion(alien) - SCORE = SCORE + 1 - - # See if alien boms hit the player. - for bomb in pg.sprite.spritecollide(player, bombs, 1): - if pg.mixer: - boom_sound.play() - Explosion(player) - Explosion(bomb) - player.kill() - - # draw the scene - dirty = all.draw(screen) - pg.display.update(dirty) - - # cap the framerate at 40fps. Also called 40HZ or 40 times per second. - clock.tick(40) - - if pg.mixer: - pg.mixer.music.fadeout(1000) - pg.time.wait(1000) - - -# call the "main" function if running this script -if __name__ == "__main__": - main() - pg.quit() diff --git a/venv/Lib/site-packages/pygame/examples/arraydemo.py b/venv/Lib/site-packages/pygame/examples/arraydemo.py deleted file mode 100644 index c35f376..0000000 --- a/venv/Lib/site-packages/pygame/examples/arraydemo.py +++ /dev/null @@ -1,129 +0,0 @@ -#!/usr/bin/env python -""" pygame.examples.arraydemo - -Welcome to the arraydemo! - -Use the numpy array package to manipulate pixels. - -This demo will show you a few things: - -* scale up, scale down, flip, -* cross fade -* soften -* put stripes on it! - -""" - - -import os - -import pygame as pg -from pygame import surfarray - -main_dir = os.path.split(os.path.abspath(__file__))[0] - - -def surfdemo_show(array_img, name): - "displays a surface, waits for user to continue" - screen = pg.display.set_mode(array_img.shape[:2], 0, 32) - surfarray.blit_array(screen, array_img) - pg.display.flip() - pg.display.set_caption(name) - while 1: - e = pg.event.wait() - if e.type == pg.MOUSEBUTTONDOWN: - break - elif e.type == pg.KEYDOWN and e.key == pg.K_s: - # pg.image.save(screen, name+'.bmp') - # s = pg.Surface(screen.get_size(), 0, 32) - # s = s.convert_alpha() - # s.fill((0,0,0,255)) - # s.blit(screen, (0,0)) - # s.fill((222,0,0,50), (0,0,40,40)) - # pg.image.save_extended(s, name+'.png') - # pg.image.save(s, name+'.png') - # pg.image.save(screen, name+'_screen.png') - # pg.image.save(s, name+'.tga') - pg.image.save(screen, name + ".png") - elif e.type == pg.QUIT: - pg.quit() - raise SystemExit() - - -def main(): - """show various surfarray effects""" - import numpy as N - from numpy import int32, uint8, uint - - pg.init() - print("Using %s" % surfarray.get_arraytype().capitalize()) - print("Press the mouse button to advance image.") - print('Press the "s" key to save the current image.') - - # allblack - allblack = N.zeros((128, 128), int32) - surfdemo_show(allblack, "allblack") - - # striped - # the element type is required for N.zeros in numpy else - # an array of float is returned. - striped = N.zeros((128, 128, 3), int32) - striped[:] = (255, 0, 0) - striped[:, ::3] = (0, 255, 255) - surfdemo_show(striped, "striped") - - # rgbarray - imagename = os.path.join(main_dir, "data", "arraydemo.bmp") - imgsurface = pg.image.load(imagename) - rgbarray = surfarray.array3d(imgsurface) - surfdemo_show(rgbarray, "rgbarray") - - # flipped - flipped = rgbarray[:, ::-1] - surfdemo_show(flipped, "flipped") - - # scaledown - scaledown = rgbarray[::2, ::2] - surfdemo_show(scaledown, "scaledown") - - # scaleup - # the element type is required for N.zeros in numpy else - # an #array of floats is returned. - shape = rgbarray.shape - scaleup = N.zeros((shape[0] * 2, shape[1] * 2, shape[2]), int32) - scaleup[::2, ::2, :] = rgbarray - scaleup[1::2, ::2, :] = rgbarray - scaleup[:, 1::2] = scaleup[:, ::2] - surfdemo_show(scaleup, "scaleup") - - # redimg - redimg = N.array(rgbarray) - redimg[:, :, 1:] = 0 - surfdemo_show(redimg, "redimg") - - # soften - # having factor as an array forces integer upgrade during multiplication - # of rgbarray, even for numpy. - factor = N.array((8,), int32) - soften = N.array(rgbarray, int32) - soften[1:, :] += rgbarray[:-1, :] * factor - soften[:-1, :] += rgbarray[1:, :] * factor - soften[:, 1:] += rgbarray[:, :-1] * factor - soften[:, :-1] += rgbarray[:, 1:] * factor - soften //= 33 - surfdemo_show(soften, "soften") - - # crossfade (50%) - src = N.array(rgbarray) - dest = N.zeros(rgbarray.shape) # dest is float64 by default. - dest[:] = 20, 50, 100 - diff = (dest - src) * 0.50 - xfade = src + diff.astype(uint) - surfdemo_show(xfade, "xfade") - - # alldone - pg.quit() - - -if __name__ == "__main__": - main() diff --git a/venv/Lib/site-packages/pygame/examples/audiocapture.py b/venv/Lib/site-packages/pygame/examples/audiocapture.py deleted file mode 100644 index aa50fcf..0000000 --- a/venv/Lib/site-packages/pygame/examples/audiocapture.py +++ /dev/null @@ -1,78 +0,0 @@ -#!/usr/bin/env python -""" pygame.examples.audiocapture - -A pygame 2 experiment. - -* record sound from a microphone -* play back the recorded sound -""" -import pygame as pg -import time - -from pygame._sdl2 import ( - get_audio_device_names, - AudioDevice, - AUDIO_F32, - AUDIO_ALLOW_FORMAT_CHANGE, -) -from pygame._sdl2.mixer import set_post_mix - - -pg.mixer.pre_init(44100, 32, 2, 512) -pg.init() - -# init_subsystem(INIT_AUDIO) -names = get_audio_device_names(True) -print(names) - -sounds = [] -sound_chunks = [] - - -def callback(audiodevice, audiomemoryview): - """This is called in the sound thread. - - Note, that the frequency and such you request may not be what you get. - """ - # print(type(audiomemoryview), len(audiomemoryview)) - # print(audiodevice) - sound_chunks.append(bytes(audiomemoryview)) - - -def postmix_callback(postmix, audiomemoryview): - """This is called in the sound thread. - - At the end of mixing we get this data. - """ - print(type(audiomemoryview), len(audiomemoryview)) - print(postmix) - - -set_post_mix(postmix_callback) - -audio = AudioDevice( - devicename=names[0], - iscapture=True, - frequency=44100, - audioformat=AUDIO_F32, - numchannels=2, - chunksize=512, - allowed_changes=AUDIO_ALLOW_FORMAT_CHANGE, - callback=callback, -) -# start recording. -audio.pause(0) - -print(audio) - -print("recording with '%s'" % names[0]) -time.sleep(5) - - -print("Turning data into a pg.mixer.Sound") -sound = pg.mixer.Sound(buffer=b"".join(sound_chunks)) - -print("playing back recorded sound") -sound.play() -time.sleep(5) -pg.quit() diff --git a/venv/Lib/site-packages/pygame/examples/blend_fill.py b/venv/Lib/site-packages/pygame/examples/blend_fill.py deleted file mode 100644 index 301888b..0000000 --- a/venv/Lib/site-packages/pygame/examples/blend_fill.py +++ /dev/null @@ -1,115 +0,0 @@ -#!/usr/bin/env python -""" pygame.examples.blend_fill - -BLEND_ing colors in different ways with Surface.fill(). - -Keyboard Controls: - -* Press R, G, B to increase the color channel values, -* 1-9 to set the step range for the increment, -* A - ADD, S- SUB, M- MULT, - MIN, + MAX to change the blend modes - -""" -import os -import pygame as pg -from pygame import K_1, K_2, K_3, K_4, K_5, K_6, K_7, K_8, K_9 - - -def usage(): - print("Press R, G, B to increase the color channel values,") - print("1-9 to set the step range for the increment,") - print("A - ADD, S- SUB, M- MULT, - MIN, + MAX") - print(" to change the blend modes") - - -main_dir = os.path.split(os.path.abspath(__file__))[0] -data_dir = os.path.join(main_dir, "data") - - -def main(): - color = [0, 0, 0] - changed = False - blendtype = 0 - step = 5 - - pg.init() - screen = pg.display.set_mode((640, 480), 0, 32) - screen.fill((100, 100, 100)) - - image = pg.image.load(os.path.join(data_dir, "liquid.bmp")).convert() - blendimage = pg.image.load(os.path.join(data_dir, "liquid.bmp")).convert() - screen.blit(image, (10, 10)) - screen.blit(blendimage, (200, 10)) - - pg.display.flip() - pg.key.set_repeat(500, 30) - usage() - - going = True - while going: - for event in pg.event.get(): - if event.type == pg.QUIT: - going = False - - if event.type == pg.KEYDOWN: - usage() - - if event.key == pg.K_ESCAPE: - going = False - - if event.key == pg.K_r: - color[0] += step - if color[0] > 255: - color[0] = 0 - changed = True - - elif event.key == pg.K_g: - color[1] += step - if color[1] > 255: - color[1] = 0 - changed = True - - elif event.key == pg.K_b: - color[2] += step - if color[2] > 255: - color[2] = 0 - changed = True - - elif event.key == pg.K_a: - blendtype = pg.BLEND_ADD - changed = True - elif event.key == pg.K_s: - blendtype = pg.BLEND_SUB - changed = True - elif event.key == pg.K_m: - blendtype = pg.BLEND_MULT - changed = True - elif event.key == pg.K_PLUS: - blendtype = pg.BLEND_MAX - changed = True - elif event.key == pg.K_MINUS: - blendtype = pg.BLEND_MIN - changed = True - - elif event.key in (K_1, K_2, K_3, K_4, K_5, K_6, K_7, K_8, K_9): - step = int(event.unicode) - - if changed: - screen.fill((100, 100, 100)) - screen.blit(image, (10, 10)) - blendimage.blit(image, (0, 0)) - # blendimage.fill (color, (0, 0, 20, 20), blendtype) - blendimage.fill(color, None, blendtype) - screen.blit(blendimage, (200, 10)) - print( - "Color: %s, Pixel (0,0): %s" - % (tuple(color), [blendimage.get_at((0, 0))]) - ) - changed = False - pg.display.flip() - - pg.quit() - - -if __name__ == "__main__": - main() diff --git a/venv/Lib/site-packages/pygame/examples/blit_blends.py b/venv/Lib/site-packages/pygame/examples/blit_blends.py deleted file mode 100644 index 8bc8978..0000000 --- a/venv/Lib/site-packages/pygame/examples/blit_blends.py +++ /dev/null @@ -1,198 +0,0 @@ -#!/usr/bin/env python -""" pygame.examples.blit_blends - -Blending colors in different ways with different blend modes. - -It also shows some tricks with the surfarray. -Including how to do additive blending. - - -Keyboard Controls ------------------ - -* R, G, B - add a bit of Red, Green, or Blue. -* A - Add blend mode -* S - Subtractive blend mode -* M - Multiply blend mode -* = key BLEND_MAX blend mode. -* - key BLEND_MIN blend mode. -* 1, 2, 3, 4 - use different images. - -""" -import os -import pygame as pg -import time - -main_dir = os.path.split(os.path.abspath(__file__))[0] -data_dir = os.path.join(main_dir, "data") - -try: - import pygame.surfarray - import numpy -except ImportError: - print("no surfarray for you! install numpy") - - -def main(): - pg.init() - pg.mixer.quit() # remove ALSA underflow messages for Debian squeeze - screen = pg.display.set_mode((640, 480)) - - im1 = pg.Surface(screen.get_size()) - # im1= im1.convert() - im1.fill((100, 0, 0)) - - im2 = pg.Surface(screen.get_size()) - im2.fill((0, 50, 0)) - # we make a srcalpha copy of it. - # im3= im2.convert(SRCALPHA) - im3 = im2 - im3.set_alpha(127) - - images = {} - images[pg.K_1] = im2 - images[pg.K_2] = pg.image.load(os.path.join(data_dir, "chimp.png")) - images[pg.K_3] = pg.image.load(os.path.join(data_dir, "alien3.gif")) - images[pg.K_4] = pg.image.load(os.path.join(data_dir, "liquid.bmp")) - img_to_blit = im2.convert() - iaa = img_to_blit.convert_alpha() - - blits = {} - blits[pg.K_a] = pg.BLEND_ADD - blits[pg.K_s] = pg.BLEND_SUB - blits[pg.K_m] = pg.BLEND_MULT - blits[pg.K_EQUALS] = pg.BLEND_MAX - blits[pg.K_MINUS] = pg.BLEND_MIN - - blitsn = {} - blitsn[pg.K_a] = "BLEND_ADD" - blitsn[pg.K_s] = "BLEND_SUB" - blitsn[pg.K_m] = "BLEND_MULT" - blitsn[pg.K_EQUALS] = "BLEND_MAX" - blitsn[pg.K_MINUS] = "BLEND_MIN" - - screen.blit(im1, (0, 0)) - pg.display.flip() - clock = pg.time.Clock() - print("one pixel is:%s:" % [im1.get_at((0, 0))]) - - going = True - while going: - clock.tick(60) - - for event in pg.event.get(): - if event.type == pg.QUIT: - going = False - if event.type == pg.KEYDOWN: - usage() - - if event.type == pg.KEYDOWN and event.key == pg.K_ESCAPE: - going = False - - elif event.type == pg.KEYDOWN and event.key in images.keys(): - img_to_blit = images[event.key] - iaa = img_to_blit.convert_alpha() - - elif event.type == pg.KEYDOWN and event.key in blits.keys(): - t1 = time.time() - # blits is a dict keyed with key -> blit flag. eg BLEND_ADD. - im1.blit(img_to_blit, (0, 0), None, blits[event.key]) - t2 = time.time() - print("one pixel is:%s:" % [im1.get_at((0, 0))]) - print("time to do:%s:" % (t2 - t1)) - - elif event.type == pg.KEYDOWN and event.key in [pg.K_t]: - - for bkey in blits.keys(): - t1 = time.time() - - for x in range(300): - im1.blit(img_to_blit, (0, 0), None, blits[bkey]) - - t2 = time.time() - - # show which key we're doing... - onedoing = blitsn[bkey] - print("time to do :%s: is :%s:" % (onedoing, t2 - t1)) - - elif event.type == pg.KEYDOWN and event.key in [pg.K_o]: - t1 = time.time() - # blits is a dict keyed with key -> blit flag. eg BLEND_ADD. - im1.blit(iaa, (0, 0)) - t2 = time.time() - print("one pixel is:%s:" % [im1.get_at((0, 0))]) - print("time to do:%s:" % (t2 - t1)) - - elif event.type == pg.KEYDOWN and event.key == pg.K_SPACE: - # this additive blend without clamp two surfaces. - # im1.set_alpha(127) - # im1.blit(im1, (0,0)) - # im1.set_alpha(255) - t1 = time.time() - - im1p = pygame.surfarray.pixels2d(im1) - im2p = pygame.surfarray.pixels2d(im2) - im1p += im2p - del im1p - del im2p - t2 = time.time() - print("one pixel is:%s:" % [im1.get_at((0, 0))]) - print("time to do:%s:" % (t2 - t1)) - - elif event.type == pg.KEYDOWN and event.key in [pg.K_z]: - t1 = time.time() - im1p = pygame.surfarray.pixels3d(im1) - im2p = pygame.surfarray.pixels3d(im2) - im1p16 = im1p.astype(numpy.uint16) - im2p16 = im1p.astype(numpy.uint16) - im1p16 += im2p16 - im1p16 = numpy.minimum(im1p16, 255) - pygame.surfarray.blit_array(im1, im1p16) - - del im1p - del im2p - t2 = time.time() - print("one pixel is:%s:" % [im1.get_at((0, 0))]) - print("time to do:%s:" % (t2 - t1)) - - elif event.type == pg.KEYDOWN and event.key in [pg.K_r, pg.K_g, pg.K_b]: - # this adds one to each pixel. - colmap = {} - colmap[pg.K_r] = 0x10000 - colmap[pg.K_g] = 0x00100 - colmap[pg.K_b] = 0x00001 - im1p = pygame.surfarray.pixels2d(im1) - im1p += colmap[event.key] - del im1p - print("one pixel is:%s:" % [im1.get_at((0, 0))]) - - elif event.type == pg.KEYDOWN and event.key == pg.K_p: - print("one pixel is:%s:" % [im1.get_at((0, 0))]) - - elif event.type == pg.KEYDOWN and event.key == pg.K_f: - # this additive blend without clamp two surfaces. - - t1 = time.time() - im1.set_alpha(127) - im1.blit(im2, (0, 0)) - im1.set_alpha(255) - - t2 = time.time() - print("one pixel is:%s:" % [im1.get_at((0, 0))]) - print("time to do:%s:" % (t2 - t1)) - - screen.blit(im1, (0, 0)) - pg.display.flip() - - pg.quit() - - -def usage(): - print("press keys 1-5 to change image to blit.") - print("A - ADD, S- SUB, M- MULT, - MIN, + MAX") - print("T - timing test for special blend modes.") - - -if __name__ == "__main__": - usage() - main() diff --git a/venv/Lib/site-packages/pygame/examples/camera.py b/venv/Lib/site-packages/pygame/examples/camera.py deleted file mode 100644 index 34fc46e..0000000 --- a/venv/Lib/site-packages/pygame/examples/camera.py +++ /dev/null @@ -1,105 +0,0 @@ -#!/usr/bin/env python -""" pygame.examples.camera - -Basic image capturing and display using pygame.camera - -Keyboard controls ------------------ - -- 0, start camera 0. -- 1, start camera 1. -- 9, start camera 9. -- 10, start camera... wait a minute! There's not 10 key! -""" -import pygame as pg -import pygame.camera - - -class VideoCapturePlayer(object): - - size = (640, 480) - - def __init__(self, **argd): - self.__dict__.update(**argd) - super(VideoCapturePlayer, self).__init__(**argd) - - # create a display surface. standard pygame stuff - self.display = pg.display.set_mode(self.size) - self.init_cams(0) - - def init_cams(self, which_cam_idx): - - # gets a list of available cameras. - self.clist = pygame.camera.list_cameras() - print(self.clist) - - if not self.clist: - raise ValueError("Sorry, no cameras detected.") - - try: - cam_id = self.clist[which_cam_idx] - except IndexError: - cam_id = self.clist[0] - - # creates the camera of the specified size and in RGB colorspace - self.camera = pygame.camera.Camera(cam_id, self.size, "RGB") - - # starts the camera - self.camera.start() - - self.clock = pg.time.Clock() - - # create a surface to capture to. for performance purposes, you want the - # bit depth to be the same as that of the display surface. - self.snapshot = pg.surface.Surface(self.size, 0, self.display) - - def get_and_flip(self): - # if you don't want to tie the framerate to the camera, you can check and - # see if the camera has an image ready. note that while this works - # on most cameras, some will never return true. - - self.snapshot = self.camera.get_image(self.display) - - # if 0 and self.camera.query_image(): - # # capture an image - - # self.snapshot = self.camera.get_image(self.snapshot) - - # if 0: - # self.snapshot = self.camera.get_image(self.snapshot) - # # self.snapshot = self.camera.get_image() - - # # blit it to the display surface. simple! - # self.display.blit(self.snapshot, (0, 0)) - # else: - - # self.snapshot = self.camera.get_image(self.display) - # # self.display.blit(self.snapshot, (0,0)) - - pg.display.flip() - - def main(self): - going = True - while going: - events = pg.event.get() - for e in events: - if e.type == pg.QUIT or (e.type == pg.KEYDOWN and e.key == pg.K_ESCAPE): - going = False - if e.type == pg.KEYDOWN: - if e.key in range(pg.K_0, pg.K_0 + 10): - self.init_cams(e.key - pg.K_0) - - self.get_and_flip() - self.clock.tick() - pygame.display.set_caption(f"CAMERA! ({self.clock.get_fps():.2f} FPS)") - - -def main(): - pg.init() - pygame.camera.init() - VideoCapturePlayer().main() - pg.quit() - - -if __name__ == "__main__": - main() diff --git a/venv/Lib/site-packages/pygame/examples/chimp.py b/venv/Lib/site-packages/pygame/examples/chimp.py deleted file mode 100644 index 4612be5..0000000 --- a/venv/Lib/site-packages/pygame/examples/chimp.py +++ /dev/null @@ -1,203 +0,0 @@ -#!/usr/bin/env python -""" pygame.examples.chimp - -This simple example is used for the line-by-line tutorial -that comes with pygame. It is based on a 'popular' web banner. -Note there are comments here, but for the full explanation, -follow along in the tutorial. -""" - - -# Import Modules -import os -import pygame as pg - -if not pg.font: - print("Warning, fonts disabled") -if not pg.mixer: - print("Warning, sound disabled") - -main_dir = os.path.split(os.path.abspath(__file__))[0] -data_dir = os.path.join(main_dir, "data") - - -# functions to create our resources -def load_image(name, colorkey=None, scale=1): - fullname = os.path.join(data_dir, name) - image = pg.image.load(fullname) - image = image.convert() - - size = image.get_size() - size = (size[0] * scale, size[1] * scale) - image = pg.transform.scale(image, size) - - if colorkey is not None: - if colorkey == -1: - colorkey = image.get_at((0, 0)) - image.set_colorkey(colorkey, pg.RLEACCEL) - return image, image.get_rect() - - -def load_sound(name): - class NoneSound: - def play(self): - pass - - if not pg.mixer or not pg.mixer.get_init(): - return NoneSound() - - fullname = os.path.join(data_dir, name) - sound = pg.mixer.Sound(fullname) - - return sound - - -# classes for our game objects -class Fist(pg.sprite.Sprite): - """moves a clenched fist on the screen, following the mouse""" - - def __init__(self): - pg.sprite.Sprite.__init__(self) # call Sprite initializer - self.image, self.rect = load_image("fist.png", -1) - self.fist_offset = (-235, -80) - self.punching = False - - def update(self): - """move the fist based on the mouse position""" - pos = pg.mouse.get_pos() - self.rect.topleft = pos - self.rect.move_ip(self.fist_offset) - if self.punching: - self.rect.move_ip(15, 25) - - def punch(self, target): - """returns true if the fist collides with the target""" - if not self.punching: - self.punching = True - hitbox = self.rect.inflate(-5, -5) - return hitbox.colliderect(target.rect) - - def unpunch(self): - """called to pull the fist back""" - self.punching = False - - -class Chimp(pg.sprite.Sprite): - """moves a monkey critter across the screen. it can spin the - monkey when it is punched.""" - - def __init__(self): - pg.sprite.Sprite.__init__(self) # call Sprite intializer - self.image, self.rect = load_image("chimp.png", -1, 4) - screen = pg.display.get_surface() - self.area = screen.get_rect() - self.rect.topleft = 10, 90 - self.move = 18 - self.dizzy = False - - def update(self): - """walk or spin, depending on the monkeys state""" - if self.dizzy: - self._spin() - else: - self._walk() - - def _walk(self): - """move the monkey across the screen, and turn at the ends""" - newpos = self.rect.move((self.move, 0)) - if not self.area.contains(newpos): - if self.rect.left < self.area.left or self.rect.right > self.area.right: - self.move = -self.move - newpos = self.rect.move((self.move, 0)) - self.image = pg.transform.flip(self.image, True, False) - self.rect = newpos - - def _spin(self): - """spin the monkey image""" - center = self.rect.center - self.dizzy = self.dizzy + 12 - if self.dizzy >= 360: - self.dizzy = False - self.image = self.original - else: - rotate = pg.transform.rotate - self.image = rotate(self.original, self.dizzy) - self.rect = self.image.get_rect(center=center) - - def punched(self): - """this will cause the monkey to start spinning""" - if not self.dizzy: - self.dizzy = True - self.original = self.image - - -def main(): - """this function is called when the program starts. - it initializes everything it needs, then runs in - a loop until the function returns.""" - # Initialize Everything - pg.init() - screen = pg.display.set_mode((1280, 480), pg.SCALED) - pg.display.set_caption("Monkey Fever") - pg.mouse.set_visible(False) - - # Create The Backgound - background = pg.Surface(screen.get_size()) - background = background.convert() - background.fill((170, 238, 187)) - - # Put Text On The Background, Centered - if pg.font: - font = pg.font.Font(None, 64) - text = font.render("Pummel The Chimp, And Win $$$", True, (10, 10, 10)) - textpos = text.get_rect(centerx=background.get_width() / 2, y=10) - background.blit(text, textpos) - - # Display The Background - screen.blit(background, (0, 0)) - pg.display.flip() - - # Prepare Game Objects - whiff_sound = load_sound("whiff.wav") - punch_sound = load_sound("punch.wav") - chimp = Chimp() - fist = Fist() - allsprites = pg.sprite.RenderPlain((chimp, fist)) - clock = pg.time.Clock() - - # Main Loop - going = True - while going: - clock.tick(60) - - # Handle Input Events - for event in pg.event.get(): - if event.type == pg.QUIT: - going = False - elif event.type == pg.KEYDOWN and event.key == pg.K_ESCAPE: - going = False - elif event.type == pg.MOUSEBUTTONDOWN: - if fist.punch(chimp): - punch_sound.play() # punch - chimp.punched() - else: - whiff_sound.play() # miss - elif event.type == pg.MOUSEBUTTONUP: - fist.unpunch() - - allsprites.update() - - # Draw Everything - screen.blit(background, (0, 0)) - allsprites.draw(screen) - pg.display.flip() - - pg.quit() - - -# Game Over - - -# this calls the 'main' function when this script is executed -if __name__ == "__main__": - main() diff --git a/venv/Lib/site-packages/pygame/examples/cursors.py b/venv/Lib/site-packages/pygame/examples/cursors.py deleted file mode 100644 index 09e5d1f..0000000 --- a/venv/Lib/site-packages/pygame/examples/cursors.py +++ /dev/null @@ -1,105 +0,0 @@ -#!/usr/bin/env python -""" pygame.examples.cursors - -Click a mouse button (if you have one!) and the cursor changes. - -""" -import pygame as pg - - -arrow = ( - "xX ", - "X.X ", - "X..X ", - "X...X ", - "X....X ", - "X.....X ", - "X......X ", - "X.......X ", - "X........X ", - "X.........X ", - "X......XXXXX ", - "X...X..X ", - "X..XX..X ", - "X.X XX..X ", - "XX X..X ", - "X X..X ", - " X..X ", - " X..X ", - " X..X ", - " XX ", - " ", - " ", - " ", - " ", -) - - -no = ( - " ", - " ", - " XXXXXX ", - " XX......XX ", - " X..........X ", - " X....XXXX....X ", - " X...XX XX...X ", - " X.....X X...X ", - " X..X...X X..X ", - " X...XX...X X...X ", - " X..X X...X X..X ", - " X..X X...X X..X ", - " X..X X.,.X X..X ", - " X..X X...X X..X ", - " X...X X...XX...X ", - " X..X X...X..X ", - " X...X X.....X ", - " X...XX X...X ", - " X....XXXXX...X ", - " X..........X ", - " XX......XX ", - " XXXXXX ", - " ", - " ", -) - - -def TestCursor(arrow): - hotspot = None - for y, line in enumerate(arrow): - for x, char in enumerate(line): - if char in ["x", ",", "O"]: - hotspot = x, y - break - if hotspot is not None: - break - if hotspot is None: - raise Exception("No hotspot specified for cursor '%s'!" % arrow) - s2 = [] - for line in arrow: - s2.append(line.replace("x", "X").replace(",", ".").replace("O", "o")) - cursor, mask = pg.cursors.compile(s2, "X", ".", "o") - size = len(arrow[0]), len(arrow) - pg.mouse.set_cursor(size, hotspot, cursor, mask) - - -def main(): - pg.init() - pg.font.init() - font = pg.font.Font(None, 24) - bg = pg.display.set_mode((800, 600), 0, 24) - bg.fill((255, 255, 255)) - bg.blit(font.render("Click to advance", 1, (0, 0, 0)), (0, 0)) - pg.display.update() - for cursor in [no, arrow]: - TestCursor(cursor) - going = True - while going: - pg.event.pump() - for e in pg.event.get(): - if e.type == pg.MOUSEBUTTONDOWN: - going = False - pg.quit() - - -if __name__ == "__main__": - main() diff --git a/venv/Lib/site-packages/pygame/examples/data/BGR.png b/venv/Lib/site-packages/pygame/examples/data/BGR.png deleted file mode 100644 index f5dba74..0000000 Binary files a/venv/Lib/site-packages/pygame/examples/data/BGR.png and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/examples/data/alien1.gif b/venv/Lib/site-packages/pygame/examples/data/alien1.gif deleted file mode 100644 index c4497e0..0000000 Binary files a/venv/Lib/site-packages/pygame/examples/data/alien1.gif and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/examples/data/alien1.jpg b/venv/Lib/site-packages/pygame/examples/data/alien1.jpg deleted file mode 100644 index 6d110a4..0000000 Binary files a/venv/Lib/site-packages/pygame/examples/data/alien1.jpg and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/examples/data/alien1.png b/venv/Lib/site-packages/pygame/examples/data/alien1.png deleted file mode 100644 index 471d6a4..0000000 Binary files a/venv/Lib/site-packages/pygame/examples/data/alien1.png and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/examples/data/alien2.gif b/venv/Lib/site-packages/pygame/examples/data/alien2.gif deleted file mode 100644 index 8df05a3..0000000 Binary files a/venv/Lib/site-packages/pygame/examples/data/alien2.gif and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/examples/data/alien2.png b/venv/Lib/site-packages/pygame/examples/data/alien2.png deleted file mode 100644 index aef5ace..0000000 Binary files a/venv/Lib/site-packages/pygame/examples/data/alien2.png and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/examples/data/alien3.gif b/venv/Lib/site-packages/pygame/examples/data/alien3.gif deleted file mode 100644 index 5305d41..0000000 Binary files a/venv/Lib/site-packages/pygame/examples/data/alien3.gif and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/examples/data/alien3.png b/venv/Lib/site-packages/pygame/examples/data/alien3.png deleted file mode 100644 index 90d0f7c..0000000 Binary files a/venv/Lib/site-packages/pygame/examples/data/alien3.png and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/examples/data/arraydemo.bmp b/venv/Lib/site-packages/pygame/examples/data/arraydemo.bmp deleted file mode 100644 index ad96338..0000000 Binary files a/venv/Lib/site-packages/pygame/examples/data/arraydemo.bmp and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/examples/data/asprite.bmp b/venv/Lib/site-packages/pygame/examples/data/asprite.bmp deleted file mode 100644 index cc96356..0000000 Binary files a/venv/Lib/site-packages/pygame/examples/data/asprite.bmp and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/examples/data/background.gif b/venv/Lib/site-packages/pygame/examples/data/background.gif deleted file mode 100644 index 5041ce6..0000000 Binary files a/venv/Lib/site-packages/pygame/examples/data/background.gif and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/examples/data/black.ppm b/venv/Lib/site-packages/pygame/examples/data/black.ppm deleted file mode 100644 index 698a52c..0000000 --- a/venv/Lib/site-packages/pygame/examples/data/black.ppm +++ /dev/null @@ -1,3076 +0,0 @@ -P3 -# Created by GIMP version 2.10.20 PNM plug-in -32 32 -255 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 diff --git a/venv/Lib/site-packages/pygame/examples/data/blue.gif b/venv/Lib/site-packages/pygame/examples/data/blue.gif deleted file mode 100644 index 98c6fd6..0000000 Binary files a/venv/Lib/site-packages/pygame/examples/data/blue.gif and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/examples/data/blue.mpg b/venv/Lib/site-packages/pygame/examples/data/blue.mpg deleted file mode 100644 index 60dceca..0000000 Binary files a/venv/Lib/site-packages/pygame/examples/data/blue.mpg and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/examples/data/bomb.gif b/venv/Lib/site-packages/pygame/examples/data/bomb.gif deleted file mode 100644 index f885cbb..0000000 Binary files a/venv/Lib/site-packages/pygame/examples/data/bomb.gif and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/examples/data/boom.wav b/venv/Lib/site-packages/pygame/examples/data/boom.wav deleted file mode 100644 index f19126a..0000000 Binary files a/venv/Lib/site-packages/pygame/examples/data/boom.wav and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/examples/data/brick.png b/venv/Lib/site-packages/pygame/examples/data/brick.png deleted file mode 100644 index cfe37a3..0000000 Binary files a/venv/Lib/site-packages/pygame/examples/data/brick.png and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/examples/data/car_door.wav b/venv/Lib/site-packages/pygame/examples/data/car_door.wav deleted file mode 100644 index 60acf9e..0000000 Binary files a/venv/Lib/site-packages/pygame/examples/data/car_door.wav and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/examples/data/chimp.png b/venv/Lib/site-packages/pygame/examples/data/chimp.png deleted file mode 100644 index 9bf37b1..0000000 Binary files a/venv/Lib/site-packages/pygame/examples/data/chimp.png and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/examples/data/city.png b/venv/Lib/site-packages/pygame/examples/data/city.png deleted file mode 100644 index 202da5c..0000000 Binary files a/venv/Lib/site-packages/pygame/examples/data/city.png and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/examples/data/crimson.pnm b/venv/Lib/site-packages/pygame/examples/data/crimson.pnm deleted file mode 100644 index 28501e9..0000000 --- a/venv/Lib/site-packages/pygame/examples/data/crimson.pnm +++ /dev/null @@ -1,5 +0,0 @@ -P6 -# CREATOR: GIMP PNM Filter Version 1.1 -32 32 -255 -Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü<Ü< \ No newline at end of file diff --git a/venv/Lib/site-packages/pygame/examples/data/danger.gif b/venv/Lib/site-packages/pygame/examples/data/danger.gif deleted file mode 100644 index 106d69c..0000000 Binary files a/venv/Lib/site-packages/pygame/examples/data/danger.gif and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/examples/data/explosion1.gif b/venv/Lib/site-packages/pygame/examples/data/explosion1.gif deleted file mode 100644 index fabec16..0000000 Binary files a/venv/Lib/site-packages/pygame/examples/data/explosion1.gif and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/examples/data/fist.png b/venv/Lib/site-packages/pygame/examples/data/fist.png deleted file mode 100644 index 9097629..0000000 Binary files a/venv/Lib/site-packages/pygame/examples/data/fist.png and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/examples/data/green.pcx b/venv/Lib/site-packages/pygame/examples/data/green.pcx deleted file mode 100644 index c0aea8d..0000000 Binary files a/venv/Lib/site-packages/pygame/examples/data/green.pcx and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/examples/data/grey.pgm b/venv/Lib/site-packages/pygame/examples/data/grey.pgm deleted file mode 100644 index b181a5d..0000000 --- a/venv/Lib/site-packages/pygame/examples/data/grey.pgm +++ /dev/null @@ -1,1028 +0,0 @@ -P2 -# Created by GIMP version 2.10.20 PNM plug-in -32 32 -255 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 diff --git a/venv/Lib/site-packages/pygame/examples/data/house_lo.mp3 b/venv/Lib/site-packages/pygame/examples/data/house_lo.mp3 deleted file mode 100644 index 4c26994..0000000 Binary files a/venv/Lib/site-packages/pygame/examples/data/house_lo.mp3 and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/examples/data/house_lo.ogg b/venv/Lib/site-packages/pygame/examples/data/house_lo.ogg deleted file mode 100644 index e050848..0000000 Binary files a/venv/Lib/site-packages/pygame/examples/data/house_lo.ogg and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/examples/data/house_lo.wav b/venv/Lib/site-packages/pygame/examples/data/house_lo.wav deleted file mode 100644 index 68a96b8..0000000 Binary files a/venv/Lib/site-packages/pygame/examples/data/house_lo.wav and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/examples/data/laplacian.png b/venv/Lib/site-packages/pygame/examples/data/laplacian.png deleted file mode 100644 index 8d064f5..0000000 Binary files a/venv/Lib/site-packages/pygame/examples/data/laplacian.png and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/examples/data/liquid.bmp b/venv/Lib/site-packages/pygame/examples/data/liquid.bmp deleted file mode 100644 index c4f12eb..0000000 Binary files a/venv/Lib/site-packages/pygame/examples/data/liquid.bmp and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/examples/data/midikeys.png b/venv/Lib/site-packages/pygame/examples/data/midikeys.png deleted file mode 100644 index 74ecb86..0000000 Binary files a/venv/Lib/site-packages/pygame/examples/data/midikeys.png and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/examples/data/player1.gif b/venv/Lib/site-packages/pygame/examples/data/player1.gif deleted file mode 100644 index 6c4eda7..0000000 Binary files a/venv/Lib/site-packages/pygame/examples/data/player1.gif and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/examples/data/punch.wav b/venv/Lib/site-packages/pygame/examples/data/punch.wav deleted file mode 100644 index aa3f56c..0000000 Binary files a/venv/Lib/site-packages/pygame/examples/data/punch.wav and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/examples/data/purple.xpm b/venv/Lib/site-packages/pygame/examples/data/purple.xpm deleted file mode 100644 index 7798cc1..0000000 --- a/venv/Lib/site-packages/pygame/examples/data/purple.xpm +++ /dev/null @@ -1,36 +0,0 @@ -/* XPM */ -static char * C:\Users\Kristof\Documents\purple_xpm[] = { -"32 32 1 1", -" c #FF00FF", -" ", -" ", -" ", -" ", -" ", -" ", -" ", -" ", -" ", -" ", -" ", -" ", -" ", -" ", -" ", -" ", -" ", -" ", -" ", -" ", -" ", -" ", -" ", -" ", -" ", -" ", -" ", -" ", -" ", -" ", -" ", -" "}; diff --git a/venv/Lib/site-packages/pygame/examples/data/red.jpg b/venv/Lib/site-packages/pygame/examples/data/red.jpg deleted file mode 100644 index 11a9aa0..0000000 Binary files a/venv/Lib/site-packages/pygame/examples/data/red.jpg and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/examples/data/sans.ttf b/venv/Lib/site-packages/pygame/examples/data/sans.ttf deleted file mode 100644 index 09fac2f..0000000 Binary files a/venv/Lib/site-packages/pygame/examples/data/sans.ttf and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/examples/data/scarlet.webp b/venv/Lib/site-packages/pygame/examples/data/scarlet.webp deleted file mode 100644 index cd0a15c..0000000 Binary files a/venv/Lib/site-packages/pygame/examples/data/scarlet.webp and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/examples/data/secosmic_lo.wav b/venv/Lib/site-packages/pygame/examples/data/secosmic_lo.wav deleted file mode 100644 index 867f802..0000000 Binary files a/venv/Lib/site-packages/pygame/examples/data/secosmic_lo.wav and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/examples/data/shot.gif b/venv/Lib/site-packages/pygame/examples/data/shot.gif deleted file mode 100644 index 18de528..0000000 Binary files a/venv/Lib/site-packages/pygame/examples/data/shot.gif and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/examples/data/static.png b/venv/Lib/site-packages/pygame/examples/data/static.png deleted file mode 100644 index fb3b057..0000000 Binary files a/venv/Lib/site-packages/pygame/examples/data/static.png and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/examples/data/teal.svg b/venv/Lib/site-packages/pygame/examples/data/teal.svg deleted file mode 100644 index 85f4149..0000000 --- a/venv/Lib/site-packages/pygame/examples/data/teal.svg +++ /dev/null @@ -1,9 +0,0 @@ - - teal - - - Layer 1 - - - - diff --git a/venv/Lib/site-packages/pygame/examples/data/turquoise.tif b/venv/Lib/site-packages/pygame/examples/data/turquoise.tif deleted file mode 100644 index 39b3620..0000000 Binary files a/venv/Lib/site-packages/pygame/examples/data/turquoise.tif and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/examples/data/whiff.wav b/venv/Lib/site-packages/pygame/examples/data/whiff.wav deleted file mode 100644 index 3954efa..0000000 Binary files a/venv/Lib/site-packages/pygame/examples/data/whiff.wav and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/examples/data/yellow.tga b/venv/Lib/site-packages/pygame/examples/data/yellow.tga deleted file mode 100644 index d0124fe..0000000 Binary files a/venv/Lib/site-packages/pygame/examples/data/yellow.tga and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/examples/dropevent.py b/venv/Lib/site-packages/pygame/examples/dropevent.py deleted file mode 100644 index e812558..0000000 --- a/venv/Lib/site-packages/pygame/examples/dropevent.py +++ /dev/null @@ -1,76 +0,0 @@ -#!/usr/bin/env python -""" pygame.examples.dropfile - -Drag and drop an image on here. - -Uses these events: - -* DROPBEGIN -* DROPCOMPLETE -* DROPTEXT -* DROPFILE -""" -import pygame as pg - -if pg.get_sdl_version() < (2, 0, 0): - raise Exception("This example requires SDL2.") - -pg.init() - - -def main(): - - Running = True - surf = pg.display.set_mode((640, 480)) - font = pg.font.SysFont("Arial", 24) - clock = pg.time.Clock() - - spr_file_text = font.render("Feed me some file or image!", 1, (255, 255, 255)) - spr_file_text_rect = spr_file_text.get_rect() - spr_file_text_rect.center = surf.get_rect().center - - spr_file_image = None - spr_file_image_rect = None - - while Running: - for ev in pg.event.get(): - if ev.type == pg.QUIT: - Running = False - elif ev.type == pg.DROPBEGIN: - print(ev) - print("File drop begin!") - elif ev.type == pg.DROPCOMPLETE: - print(ev) - print("File drop complete!") - elif ev.type == pg.DROPTEXT: - print(ev) - spr_file_text = font.render(ev.text, 1, (255, 255, 255)) - spr_file_text_rect = spr_file_text.get_rect() - spr_file_text_rect.center = surf.get_rect().center - elif ev.type == pg.DROPFILE: - print(ev) - spr_file_text = font.render(ev.file, 1, (255, 255, 255)) - spr_file_text_rect = spr_file_text.get_rect() - spr_file_text_rect.center = surf.get_rect().center - - # Try to open the file if it's an image - filetype = ev.file[-3:] - if filetype in ["png", "bmp", "jpg"]: - spr_file_image = pg.image.load(ev.file).convert() - spr_file_image.set_alpha(127) - spr_file_image_rect = spr_file_image.get_rect() - spr_file_image_rect.center = surf.get_rect().center - - surf.fill((0, 0, 0)) - surf.blit(spr_file_text, spr_file_text_rect) - if spr_file_image: - surf.blit(spr_file_image, spr_file_image_rect) - - pg.display.flip() - clock.tick(30) - - pg.quit() - - -if __name__ == "__main__": - main() diff --git a/venv/Lib/site-packages/pygame/examples/eventlist.py b/venv/Lib/site-packages/pygame/examples/eventlist.py deleted file mode 100644 index b329586..0000000 --- a/venv/Lib/site-packages/pygame/examples/eventlist.py +++ /dev/null @@ -1,196 +0,0 @@ -#!/usr/bin/env python -""" pygame.examples.eventlist - -Learn about pygame events and input. - -At the top of the screen are the state of several device values, -and a scrolling list of events are displayed on the bottom. - -""" - -usage = """ -Mouse Controls -============== - -- 1st button on mouse (left click) to toggle events 'grabed'. -- 3rd button on mouse (right click) to toggle mouse visible. -- The window can be resized. -- Mouse the mouse around to see mouse events. -- If events grabbed and mouse invisible show virtual mouse coords. - - -Keyboard Joystick Controls -========================== - -- press keys up an down to see events. -- you can see joystick events if any are plugged in. -- press "c" to toggle events generated by controllers. -""" - -import pygame as pg - -try: - import pygame._sdl2.controller - - pygame._sdl2.controller.init() - SDL2 = True -except ImportError: - SDL2 = False - -img_on_off = [] -font = None -last_key = None - -# these are a running counter of mouse.get_rel() calls. -virtual_x = 0 -virtual_y = 0 - - -def showtext(win, pos, text, color, bgcolor): - textimg = font.render(text, 1, color, bgcolor) - win.blit(textimg, pos) - return pos[0] + textimg.get_width() + 5, pos[1] - - -def drawstatus(win): - global virtual_x, virtual_y - bgcolor = 50, 50, 50 - win.fill(bgcolor, (0, 0, 640, 120)) - win.blit(font.render("Status Area", 1, (155, 155, 155), bgcolor), (2, 2)) - - pos = showtext(win, (10, 30), "Mouse Focus", (255, 255, 255), bgcolor) - win.blit(img_on_off[pg.mouse.get_focused()], pos) - - pos = showtext( - win, (pos[0] + 50, pos[1]), "Mouse visible", (255, 255, 255), bgcolor - ) - win.blit(img_on_off[pg.mouse.get_visible()], pos) - - pos = showtext(win, (330, 30), "Keyboard Focus", (255, 255, 255), bgcolor) - win.blit(img_on_off[pg.key.get_focused()], pos) - - pos = showtext(win, (10, 60), "Mouse Position(rel)", (255, 255, 255), bgcolor) - rel = pg.mouse.get_rel() - virtual_x += rel[0] - virtual_y += rel[1] - - mouse_data = tuple(list(pg.mouse.get_pos()) + list(rel)) - p = "%s, %s (%s, %s)" % mouse_data - showtext(win, pos, p, bgcolor, (255, 255, 55)) - - pos = showtext(win, (330, 60), "Last Keypress", (255, 255, 255), bgcolor) - if last_key: - p = "%d, %s" % (last_key, pg.key.name(last_key)) - else: - p = "None" - showtext(win, pos, p, bgcolor, (255, 255, 55)) - - pos = showtext(win, (10, 90), "Input Grabbed", (255, 255, 255), bgcolor) - win.blit(img_on_off[pg.event.get_grab()], pos) - - is_virtual_mouse = pg.event.get_grab() and not pg.mouse.get_visible() - pos = showtext(win, (330, 90), "Virtual Mouse", (255, 255, 255), bgcolor) - win.blit(img_on_off[is_virtual_mouse], pos) - if is_virtual_mouse: - p = "%s, %s" % (virtual_x, virtual_y) - showtext(win, (pos[0] + 50, pos[1]), p, bgcolor, (255, 255, 55)) - - -def drawhistory(win, history): - img = font.render("Event History Area", 1, (155, 155, 155), (0, 0, 0)) - win.blit(img, (2, 132)) - ypos = 450 - h = list(history) - h.reverse() - for line in h: - r = win.blit(line, (10, ypos)) - win.fill(0, (r.right, r.top, 620, r.height)) - ypos -= font.get_height() - - -def draw_usage_in_history(history, text): - lines = text.split("\n") - for line in lines: - if line == "" or "===" in line: - continue - img = font.render(line, 1, (50, 200, 50), (0, 0, 0)) - history.append(img) - - -def main(): - pg.init() - print(usage) - - win = pg.display.set_mode((640, 480), pg.RESIZABLE) - pg.display.set_caption("Mouse Focus Workout. h key for help") - - global font - font = pg.font.Font(None, 26) - - global img_on_off - img_on_off.append(font.render("Off", 1, (0, 0, 0), (255, 50, 50))) - img_on_off.append(font.render("On", 1, (0, 0, 0), (50, 255, 50))) - - # stores surfaces of text representing what has gone through the event queue - history = [] - - # let's turn on the joysticks just so we can play with em - for x in range(pg.joystick.get_count()): - if SDL2 and pg._sdl2.controller.is_controller(x): - c = pg._sdl2.controller.Controller(x) - txt = "Enabled controller: " + c.name - else: - j = pg.joystick.Joystick(x) - txt = "Enabled joystick: " + j.get_name() - - img = font.render(txt, 1, (50, 200, 50), (0, 0, 0)) - history.append(img) - if not pg.joystick.get_count(): - img = font.render("No Joysticks to Initialize", 1, (50, 200, 50), (0, 0, 0)) - history.append(img) - - going = True - while going: - for e in pg.event.get(): - if e.type == pg.KEYDOWN: - if e.key == pg.K_ESCAPE: - going = False - else: - global last_key - last_key = e.key - if e.key == pg.K_h: - draw_usage_in_history(history, usage) - if SDL2 and e.key == pg.K_c: - current_state = pg._sdl2.controller.get_eventstate() - pg._sdl2.controller.set_eventstate(not current_state) - - if e.type == pg.MOUSEBUTTONDOWN and e.button == 1: - pg.event.set_grab(not pg.event.get_grab()) - - if e.type == pg.MOUSEBUTTONDOWN and e.button == 3: - pg.mouse.set_visible(not pg.mouse.get_visible()) - - if e.type != pg.MOUSEMOTION: - txt = "%s: %s" % (pg.event.event_name(e.type), e.dict) - img = font.render(txt, 1, (50, 200, 50), (0, 0, 0)) - history.append(img) - history = history[-13:] - - if e.type == pg.VIDEORESIZE: - win = pg.display.set_mode(e.size, pg.RESIZABLE) - - if e.type == pg.QUIT: - going = False - - drawstatus(win) - drawhistory(win, history) - - pg.display.flip() - pg.time.wait(10) - - pg.quit() - raise SystemExit - - -if __name__ == "__main__": - main() diff --git a/venv/Lib/site-packages/pygame/examples/font_viewer.py b/venv/Lib/site-packages/pygame/examples/font_viewer.py deleted file mode 100644 index 7170fd4..0000000 --- a/venv/Lib/site-packages/pygame/examples/font_viewer.py +++ /dev/null @@ -1,279 +0,0 @@ -#!/usr/bin/env python -""" pygame.examples.font_viewer -Scroll through your system fonts from a list of surfaces or one huge buffer. - -This example exhibits: -* iterate over available fonts using font.get_fonts and font.SysFont() -* click and drag using mouse input -* scrolling with the scroll wheel -* save a surface to disk -* work with a very large surface -* simple mouse and keyboard scroll speed acceleration - -By default this example uses the fonts returned by pygame.font.get_fonts() -and opens them using pygame.font.SysFont(). -Alternatively, you may pass a path to the command line. The TTF files found -in that directory will be used instead. - -Mouse Controls: -* Use the mouse wheel or click and drag to scroll - -Keyboard Controls: -* Press up or down to scroll -* Press escape to exit -""" -import sys -import os - -import pygame as pg - -use_big_surface = False # draw into large buffer and save png file - - -class FontViewer: - """ - This example is encapsulated by the fontviewer class - It initializes the pygame window, handles input, and draws itself - to the screen. - """ - - KEY_SCROLL_SPEED = 10 - MOUSE_SCROLL_SPEED = 50 - - def __init__(self, **dparams): - pg.init() - self.font_dir = dparams.get("folder", None) - - # create a window that uses 80 percent of the screen - info = pg.display.Info() - w = info.current_w - h = info.current_h - pg.display.set_mode((int(w * 0.8), int(h * 0.8))) - self.font_size = h // 20 - - self.clock = pg.time.Clock() - self.y_offset = 0 - self.grabbed = False - self.render_fonts("&N abcDEF789") - - if use_big_surface or "big" in sys.argv: - self.render_surface() - self.display_surface() - self.save_png() - else: - self.display_fonts() - - def get_font_list(self): - """ - Generate a font list using font.get_fonts() for system fonts or - from a path from the command line. - """ - path = "" - if len(sys.argv) > 1 and os.path.exists(sys.argv[1]): - path = os.path.join(sys.argv[1], "") - fonts = [] - if os.path.exists(path): - # this list comprehension could replace the following loop - # fonts = [f in os.listdir(path) if f.endswith('.ttf')] - for font in os.listdir(path): - if font.endswith(".ttf"): - fonts.append(font) - return fonts or pg.font.get_fonts(), path - - def render_fonts(self, text="A display of font &N", **dparams): - """ - Build a list that includes a surface and the running total of their - height for each font in the font list. Store the largest width and - other variables for later use. - """ - font_size = dparams.get("size", 0) or self.font_size - color = dparams.get("color", (255, 255, 255)) - self.back_color = dparams.get("back_color", (0, 0, 0)) - - fonts, path = self.get_font_list() - font_surfaces = [] - total_height = 0 - max_width = 0 - - load_font = pg.font.Font if path else pg.font.SysFont - - # display instructions at the top of the display - font = pg.font.SysFont(pg.font.get_default_font(), font_size) - lines = ( - "Use the scroll wheel or click and drag", - "to scroll up and down", - "Foreign fonts might render incorrectly", - "Here are your {} fonts".format(len(fonts)), - "", - ) - for line in lines: - surf = font.render(line, 1, color, self.back_color) - font_surfaces.append((surf, total_height)) - total_height += surf.get_height() - max_width = max(max_width, surf.get_width()) - - # render all the fonts and store them with the total height - for name in sorted(fonts): - try: - font = load_font(path + name, font_size) - except IOError: - continue - line = text.replace("&N", name) - try: - surf = font.render(line, 1, color, self.back_color) - except pg.error as e: - print(e) - break - - max_width = max(max_width, surf.get_width()) - font_surfaces.append((surf, total_height)) - total_height += surf.get_height() - - # store variables for later usage - self.total_height = total_height - self.max_width = max_width - self.font_surfaces = font_surfaces - self.max_y = total_height - pg.display.get_surface().get_height() - - def display_fonts(self): - """ - Display the visible fonts based on the y_offset value(updated in - handle_events) and the height of the pygame window. - """ - display = pg.display.get_surface() - clock = pg.time.Clock() - center = display.get_width() // 2 - - while True: - # draw visible surfaces - display.fill(self.back_color) - for surface, top in self.font_surfaces: - bottom = top + surface.get_height() - if ( - bottom >= self.y_offset - and top <= self.y_offset + display.get_height() - ): - x = center - surface.get_width() // 2 - display.blit(surface, (x, top - self.y_offset)) - # get input and update the screen - if not self.handle_events(): - break - pg.display.flip() - clock.tick(30) - - def render_surface(self): - """ - Note: this method uses twice the memory and is only called if - big_surface is set to true or big is added to the command line. - - Optionally generates one large buffer to draw all the font surfaces - into. This is necessary to save the display to a png file and may - be useful for testing large surfaces. - """ - - large_surface = pg.surface.Surface( - (self.max_width, self.total_height) - ).convert() - large_surface.fill(self.back_color) - print("scrolling surface created") - - # display the surface size and memory usage - byte_size = large_surface.get_bytesize() - total_size = byte_size * (self.max_width * self.total_height) - print( - "Surface Size = {}x{} @ {}bpp: {:,.3f}mb".format( - self.max_width, self.total_height, byte_size, total_size / 1000000.0 - ) - ) - - y = 0 - center = int(self.max_width / 2) - for surface, top in self.font_surfaces: - w = surface.get_width() - x = center - int(w / 2) - large_surface.blit(surface, (x, y)) - y += surface.get_height() - self.max_y = large_surface.get_height() - pg.display.get_surface().get_height() - self.surface = large_surface - - def display_surface(self, time=10): - """ - Display the large surface created by the render_surface method. Scrolls - based on the y_offset value(set in handle_events) and the height of the - pygame window. - """ - screen = pg.display.get_surface() - - # Create a Rect equal to size of screen. Then we can just change its - # top attribute to draw the desired part of the rendered font surface - # to the display surface - rect = pg.rect.Rect( - 0, - 0, - self.surface.get_width(), - min(self.surface.get_height(), screen.get_height()), - ) - - x = int((screen.get_width() - self.surface.get_width()) / 2) - going = True - while going: - if not self.handle_events(): - going = False - screen.fill(self.back_color) - rect.top = self.y_offset - screen.blit(self.surface, (x, 0), rect) - pg.display.flip() - self.clock.tick(20) - - def save_png(self, name="font_viewer.png"): - pg.image.save(self.surface, name) - file_size = os.path.getsize(name) // 1024 - print("font surface saved to {}\nsize: {:,}Kb".format(name, file_size)) - - def handle_events(self): - """ - This method handles user input. It returns False when it receives - a pygame.QUIT event or the user presses escape. The y_offset is - changed based on mouse and keyboard input. display_fonts() and - display_surface() use the y_offset to scroll display. - """ - events = pg.event.get() - for e in events: - if e.type == pg.QUIT: - return False - elif e.type == pg.KEYDOWN: - if e.key == pg.K_ESCAPE: - return False - elif e.type == pg.MOUSEWHEEL: - self.y_offset += e.y * self.MOUSE_SCROLL_SPEED * -1 - elif e.type == pg.MOUSEBUTTONDOWN: - # enter dragging mode on mouse down - self.grabbed = True - pg.event.set_grab(True) - elif e.type == pg.MOUSEBUTTONUP: - # exit drag mode on mouse up - self.grabbed = False - pg.event.set_grab(False) - - # allow simple accelerated scrolling with the keyboard - keys = pg.key.get_pressed() - if keys[pg.K_UP]: - self.key_held += 1 - self.y_offset -= int(self.KEY_SCROLL_SPEED * (self.key_held // 10)) - elif keys[pg.K_DOWN]: - self.key_held += 1 - self.y_offset += int(self.KEY_SCROLL_SPEED * (self.key_held // 10)) - else: - self.key_held = 20 - - # set the y_offset for scrolling and keep it between 0 and max_y - y = pg.mouse.get_rel()[1] - if y and self.grabbed: - self.y_offset -= y - - self.y_offset = min((max(self.y_offset, 0), self.max_y)) - return True - - -viewer = FontViewer() -pg.quit() diff --git a/venv/Lib/site-packages/pygame/examples/fonty.py b/venv/Lib/site-packages/pygame/examples/fonty.py deleted file mode 100644 index 3eed676..0000000 --- a/venv/Lib/site-packages/pygame/examples/fonty.py +++ /dev/null @@ -1,80 +0,0 @@ -#!/usr/bin/env python -""" pygame.examples.fonty - -Here we load a .TTF True Type font file, and display it in -a basic pygame window. - -Demonstrating several Font object attributes. - -- basic window, event, and font management. -""" -import pygame as pg - -def main(): - # initialize - pg.init() - resolution = 400, 200 - screen = pg.display.set_mode(resolution) - - ## pg.mouse.set_cursor(*pg.cursors.diamond) - - fg = 250, 240, 230 - bg = 5, 5, 5 - wincolor = 40, 40, 90 - - # fill background - screen.fill(wincolor) - - # load font, prepare values - font = pg.font.Font(None, 80) - text = "Fonty" - size = font.size(text) - - # no AA, no transparancy, normal - ren = font.render(text, 0, fg, bg) - screen.blit(ren, (10, 10)) - - # no AA, transparancy, underline - font.set_underline(1) - ren = font.render(text, 0, fg) - screen.blit(ren, (10, 40 + size[1])) - font.set_underline(0) - - a_sys_font = pg.font.SysFont("Arial", 60) - - # AA, no transparancy, bold - a_sys_font.set_bold(1) - ren = a_sys_font.render(text, 1, fg, bg) - screen.blit(ren, (30 + size[0], 10)) - a_sys_font.set_bold(0) - - # AA, transparancy, italic - a_sys_font.set_italic(1) - ren = a_sys_font.render(text, 1, fg) - screen.blit(ren, (30 + size[0], 40 + size[1])) - a_sys_font.set_italic(0) - - # Get some metrics. - print("Font metrics for 'Fonty': %s" % a_sys_font.metrics(text)) - ch = "\u3060" - msg = "Font metrics for '%s': %s" % (ch, a_sys_font.metrics(ch)) - print(msg) - - ## #some_japanese_unicode = u"\u304b\u3070\u306b" - ##some_japanese_unicode = unicode_('%c%c%c') % (0x304b, 0x3070, 0x306b) - - # AA, transparancy, italic - ##ren = a_sys_font.render(some_japanese_unicode, 1, fg) - ##screen.blit(ren, (30 + size[0], 40 + size[1])) - - # show the surface and await user quit - pg.display.flip() - while True: - # use event.wait to keep from polling 100% cpu - if pg.event.wait().type in (pg.QUIT, pg.KEYDOWN, pg.MOUSEBUTTONDOWN): - break - pg.quit() - - -if __name__ == "__main__": - main() diff --git a/venv/Lib/site-packages/pygame/examples/freetype_misc.py b/venv/Lib/site-packages/pygame/examples/freetype_misc.py deleted file mode 100644 index d07c422..0000000 --- a/venv/Lib/site-packages/pygame/examples/freetype_misc.py +++ /dev/null @@ -1,157 +0,0 @@ -#!/usr/bin/env python -""" pygame.examples.freetype_misc - - -Miscellaneous (or misc) means: - "consisting of a mixture of various things that are not - usually connected with each other" - Adjective - - -All those words you read on computers, magazines, books, and such over the years? -Probably a lot of them were constructed with... - -The FreeType Project: a free, high-quality and portable Font engine. -https://freetype.org - -Next time you're reading something. Think of them. - - -Herein lies a *BOLD* demo consisting of a mixture of various things. - - Not only is it a *BOLD* demo, it's an - italics demo, - a rotated demo, - it's a blend, - and is sized to go nicely with a cup of tea*. - - * also goes well with coffee. - -Enjoy! -""" -import os -import pygame as pg -import pygame.freetype as freetype - - -def run(): - pg.init() - - fontdir = os.path.dirname(os.path.abspath(__file__)) - font = freetype.Font(os.path.join(fontdir, "data", "sans.ttf")) - - screen = pg.display.set_mode((800, 600)) - screen.fill("gray") - - font.underline_adjustment = 0.5 - font.pad = True - font.render_to( - screen, - (32, 32), - "Hello World", - "red3", - "dimgray", - size=64, - style=freetype.STYLE_UNDERLINE | freetype.STYLE_OBLIQUE, - ) - font.pad = False - - font.render_to( - screen, - (32, 128), - "abcdefghijklm", - "dimgray", - "green3", - size=64, - ) - - font.vertical = True - font.render_to(screen, (32, 200), "Vertical?", "blue3", None, size=32) - font.vertical = False - - font.render_to(screen, (64, 190), "Let's spin!", "red3", None, size=48, rotation=55) - - font.render_to( - screen, (160, 290), "All around!", "green3", None, size=48, rotation=-55 - ) - - font.render_to(screen, (250, 220), "and BLEND", (255, 0, 0, 128), None, size=64) - - font.render_to(screen, (265, 237), "or BLAND!", (0, 0xCC, 28, 128), None, size=64) - - # Some pinwheels - font.origin = True - for angle in range(0, 360, 45): - font.render_to(screen, (150, 420), ")", "black", size=48, rotation=angle) - font.vertical = True - for angle in range(15, 375, 30): - font.render_to(screen, (600, 400), "|^*", "orange", size=48, rotation=angle) - font.vertical = False - font.origin = False - - utext = "I \u2665 Unicode" - font.render_to(screen, (298, 320), utext, (0, 0xCC, 0xDD), None, size=64) - - utext = "\u2665" - font.render_to(screen, (480, 32), utext, "gray", "red3", size=148) - - font.render_to( - screen, - (380, 380), - "...yes, this is an SDL surface", - "black", - None, - size=24, - style=freetype.STYLE_STRONG, - ) - - font.origin = True - r = font.render_to( - screen, - (100, 530), - "stretch", - "red3", - None, - size=(24, 24), - style=freetype.STYLE_NORMAL, - ) - font.render_to( - screen, - (100 + r.width, 530), - " VERTICAL", - "red3", - None, - size=(24, 48), - style=freetype.STYLE_NORMAL, - ) - - r = font.render_to( - screen, - (100, 580), - "stretch", - "blue3", - None, - size=(24, 24), - style=freetype.STYLE_NORMAL, - ) - font.render_to( - screen, - (100 + r.width, 580), - " HORIZONTAL", - "blue3", - None, - size=(48, 24), - style=freetype.STYLE_NORMAL, - ) - - pg.display.flip() - - while 1: - if pg.event.wait().type in (pg.QUIT, pg.KEYDOWN, pg.MOUSEBUTTONDOWN): - break - - pg.quit() - - -if __name__ == "__main__": - run() diff --git a/venv/Lib/site-packages/pygame/examples/glcube.py b/venv/Lib/site-packages/pygame/examples/glcube.py deleted file mode 100644 index 9d85c24..0000000 --- a/venv/Lib/site-packages/pygame/examples/glcube.py +++ /dev/null @@ -1,591 +0,0 @@ -#!/usr/bin/env python -""" pygame.examples.glcube - -Draw a cube on the screen. - - - -Amazing. - -Every frame we orbit the camera around a small amount -creating the illusion of a spinning object. - -First we setup some points of a multicolored cube. Then we then go through -a semi-unoptimized loop to draw the cube points onto the screen. - -OpenGL does all the hard work for us. :] - - -Keyboard Controls ------------------ - -* ESCAPE key to quit -* f key to toggle fullscreen. - -""" -import math -import ctypes - -import pygame as pg - -try: - import OpenGL.GL as GL - import OpenGL.GLU as GLU -except ImportError: - print("pyopengl missing. The GLCUBE example requires: pyopengl numpy") - raise SystemExit - -try: - from numpy import array, dot, eye, zeros, float32, uint32 -except ImportError: - print("numpy missing. The GLCUBE example requires: pyopengl numpy") - raise SystemExit - - -# do we want to use the 'modern' OpenGL API or the old one? -# This example shows you how to do both. -USE_MODERN_GL = True - -# Some simple data for a colored cube here we have the 3D point position -# and color for each corner. A list of indices describes each face, and a -# list of indices describes each edge. - -CUBE_POINTS = ( - (0.5, -0.5, -0.5), - (0.5, 0.5, -0.5), - (-0.5, 0.5, -0.5), - (-0.5, -0.5, -0.5), - (0.5, -0.5, 0.5), - (0.5, 0.5, 0.5), - (-0.5, -0.5, 0.5), - (-0.5, 0.5, 0.5), -) - -# colors are 0-1 floating values -CUBE_COLORS = ( - (1, 0, 0), - (1, 1, 0), - (0, 1, 0), - (0, 0, 0), - (1, 0, 1), - (1, 1, 1), - (0, 0, 1), - (0, 1, 1), -) - -CUBE_QUAD_VERTS = ( - (0, 1, 2, 3), - (3, 2, 7, 6), - (6, 7, 5, 4), - (4, 5, 1, 0), - (1, 5, 7, 2), - (4, 0, 3, 6), -) - -CUBE_EDGES = ( - (0, 1), - (0, 3), - (0, 4), - (2, 1), - (2, 3), - (2, 7), - (6, 3), - (6, 4), - (6, 7), - (5, 1), - (5, 4), - (5, 7), -) - - -def translate(matrix, x=0.0, y=0.0, z=0.0): - """ - Translate (move) a matrix in the x, y and z axes. - - :param matrix: Matrix to translate. - :param x: direction and magnitude to translate in x axis. Defaults to 0. - :param y: direction and magnitude to translate in y axis. Defaults to 0. - :param z: direction and magnitude to translate in z axis. Defaults to 0. - :return: The translated matrix. - """ - translation_matrix = array( - [ - [1.0, 0.0, 0.0, x], - [0.0, 1.0, 0.0, y], - [0.0, 0.0, 1.0, z], - [0.0, 0.0, 0.0, 1.0], - ], - dtype=matrix.dtype, - ).T - matrix[...] = dot(matrix, translation_matrix) - return matrix - - -def frustum(left, right, bottom, top, znear, zfar): - """ - Build a perspective matrix from the clipping planes, or camera 'frustrum' - volume. - - :param left: left position of the near clipping plane. - :param right: right position of the near clipping plane. - :param bottom: bottom position of the near clipping plane. - :param top: top position of the near clipping plane. - :param znear: z depth of the near clipping plane. - :param zfar: z depth of the far clipping plane. - - :return: A perspective matrix. - """ - perspective_matrix = zeros((4, 4), dtype=float32) - perspective_matrix[0, 0] = +2.0 * znear / (right - left) - perspective_matrix[2, 0] = (right + left) / (right - left) - perspective_matrix[1, 1] = +2.0 * znear / (top - bottom) - perspective_matrix[3, 1] = (top + bottom) / (top - bottom) - perspective_matrix[2, 2] = -(zfar + znear) / (zfar - znear) - perspective_matrix[3, 2] = -2.0 * znear * zfar / (zfar - znear) - perspective_matrix[2, 3] = -1.0 - return perspective_matrix - - -def perspective(fovy, aspect, znear, zfar): - """ - Build a perspective matrix from field of view, aspect ratio and depth - planes. - - :param fovy: the field of view angle in the y axis. - :param aspect: aspect ratio of our view port. - :param znear: z depth of the near clipping plane. - :param zfar: z depth of the far clipping plane. - - :return: A perspective matrix. - """ - h = math.tan(fovy / 360.0 * math.pi) * znear - w = h * aspect - return frustum(-w, w, -h, h, znear, zfar) - - -def rotate(matrix, angle, x, y, z): - """ - Rotate a matrix around an axis. - - :param matrix: The matrix to rotate. - :param angle: The angle to rotate by. - :param x: x of axis to rotate around. - :param y: y of axis to rotate around. - :param z: z of axis to rotate around. - - :return: The rotated matrix - """ - angle = math.pi * angle / 180 - c, s = math.cos(angle), math.sin(angle) - n = math.sqrt(x * x + y * y + z * z) - x, y, z = x / n, y / n, z / n - cx, cy, cz = (1 - c) * x, (1 - c) * y, (1 - c) * z - rotation_matrix = array( - [ - [cx * x + c, cy * x - z * s, cz * x + y * s, 0], - [cx * y + z * s, cy * y + c, cz * y - x * s, 0], - [cx * z - y * s, cy * z + x * s, cz * z + c, 0], - [0, 0, 0, 1], - ], - dtype=matrix.dtype, - ).T - matrix[...] = dot(matrix, rotation_matrix) - return matrix - - -class Rotation: - """ - Data class that stores rotation angles in three axes. - """ - - def __init__(self): - self.theta = 20 - self.phi = 40 - self.psi = 25 - - -def drawcube_old(): - """ - Draw the cube using the old open GL methods pre 3.2 core context. - """ - allpoints = list(zip(CUBE_POINTS, CUBE_COLORS)) - - GL.glBegin(GL.GL_QUADS) - for face in CUBE_QUAD_VERTS: - for vert in face: - pos, color = allpoints[vert] - GL.glColor3fv(color) - GL.glVertex3fv(pos) - GL.glEnd() - - GL.glColor3f(1.0, 1.0, 1.0) - GL.glBegin(GL.GL_LINES) - for line in CUBE_EDGES: - for vert in line: - pos, color = allpoints[vert] - GL.glVertex3fv(pos) - - GL.glEnd() - - -def init_gl_stuff_old(): - """ - Initialise open GL, prior to core context 3.2 - """ - GL.glEnable(GL.GL_DEPTH_TEST) # use our zbuffer - - # setup the camera - GL.glMatrixMode(GL.GL_PROJECTION) - GL.glLoadIdentity() - GLU.gluPerspective(45.0, 640 / 480.0, 0.1, 100.0) # setup lens - GL.glTranslatef(0.0, 0.0, -3.0) # move back - GL.glRotatef(25, 1, 0, 0) # orbit higher - - -def init_gl_modern(display_size): - """ - Initialise open GL in the 'modern' open GL style for open GL versions - greater than 3.1. - - :param display_size: Size of the window/viewport. - """ - - # Create shaders - # -------------------------------------- - vertex_code = """ - - #version 150 - uniform mat4 model; - uniform mat4 view; - uniform mat4 projection; - - uniform vec4 colour_mul; - uniform vec4 colour_add; - - in vec4 vertex_colour; // vertex colour in - in vec3 vertex_position; - - out vec4 vertex_color_out; // vertex colour out - void main() - { - vertex_color_out = (colour_mul * vertex_colour) + colour_add; - gl_Position = projection * view * model * vec4(vertex_position, 1.0); - } - - """ - - fragment_code = """ - #version 150 - in vec4 vertex_color_out; // vertex colour from vertex shader - out vec4 fragColor; - void main() - { - fragColor = vertex_color_out; - } - """ - - program = GL.glCreateProgram() - vertex = GL.glCreateShader(GL.GL_VERTEX_SHADER) - fragment = GL.glCreateShader(GL.GL_FRAGMENT_SHADER) - GL.glShaderSource(vertex, vertex_code) - GL.glCompileShader(vertex) - - # this logs issues the shader compiler finds. - log = GL.glGetShaderInfoLog(vertex) - if isinstance(log, bytes): - log = log.decode() - for line in log.split("\n"): - print(line) - - GL.glAttachShader(program, vertex) - GL.glShaderSource(fragment, fragment_code) - GL.glCompileShader(fragment) - - # this logs issues the shader compiler finds. - log = GL.glGetShaderInfoLog(fragment) - if isinstance(log, bytes): - log = log.decode() - for line in log.split("\n"): - print(line) - - GL.glAttachShader(program, fragment) - GL.glValidateProgram(program) - GL.glLinkProgram(program) - - GL.glDetachShader(program, vertex) - GL.glDetachShader(program, fragment) - GL.glUseProgram(program) - - # Create vertex buffers and shader constants - # ------------------------------------------ - - # Cube Data - vertices = zeros( - 8, [("vertex_position", float32, 3), ("vertex_colour", float32, 4)] - ) - - vertices["vertex_position"] = [ - [1, 1, 1], - [-1, 1, 1], - [-1, -1, 1], - [1, -1, 1], - [1, -1, -1], - [1, 1, -1], - [-1, 1, -1], - [-1, -1, -1], - ] - - vertices["vertex_colour"] = [ - [0, 1, 1, 1], - [0, 0, 1, 1], - [0, 0, 0, 1], - [0, 1, 0, 1], - [1, 1, 0, 1], - [1, 1, 1, 1], - [1, 0, 1, 1], - [1, 0, 0, 1], - ] - - filled_cube_indices = array( - [ - 0, - 1, - 2, - 0, - 2, - 3, - 0, - 3, - 4, - 0, - 4, - 5, - 0, - 5, - 6, - 0, - 6, - 1, - 1, - 6, - 7, - 1, - 7, - 2, - 7, - 4, - 3, - 7, - 3, - 2, - 4, - 7, - 6, - 4, - 6, - 5, - ], - dtype=uint32, - ) - - outline_cube_indices = array( - [0, 1, 1, 2, 2, 3, 3, 0, 4, 7, 7, 6, 6, 5, 5, 4, 0, 5, 1, 6, 2, 7, 3, 4], - dtype=uint32, - ) - - shader_data = {"buffer": {}, "constants": {}} - - GL.glBindVertexArray(GL.glGenVertexArrays(1)) # Have to do this first - - shader_data["buffer"]["vertices"] = GL.glGenBuffers(1) - GL.glBindBuffer(GL.GL_ARRAY_BUFFER, shader_data["buffer"]["vertices"]) - GL.glBufferData(GL.GL_ARRAY_BUFFER, vertices.nbytes, vertices, GL.GL_DYNAMIC_DRAW) - - stride = vertices.strides[0] - offset = ctypes.c_void_p(0) - - loc = GL.glGetAttribLocation(program, "vertex_position") - GL.glEnableVertexAttribArray(loc) - GL.glVertexAttribPointer(loc, 3, GL.GL_FLOAT, False, stride, offset) - - offset = ctypes.c_void_p(vertices.dtype["vertex_position"].itemsize) - - loc = GL.glGetAttribLocation(program, "vertex_colour") - GL.glEnableVertexAttribArray(loc) - GL.glVertexAttribPointer(loc, 4, GL.GL_FLOAT, False, stride, offset) - - shader_data["buffer"]["filled"] = GL.glGenBuffers(1) - GL.glBindBuffer(GL.GL_ELEMENT_ARRAY_BUFFER, shader_data["buffer"]["filled"]) - GL.glBufferData( - GL.GL_ELEMENT_ARRAY_BUFFER, - filled_cube_indices.nbytes, - filled_cube_indices, - GL.GL_STATIC_DRAW, - ) - - shader_data["buffer"]["outline"] = GL.glGenBuffers(1) - GL.glBindBuffer(GL.GL_ELEMENT_ARRAY_BUFFER, shader_data["buffer"]["outline"]) - GL.glBufferData( - GL.GL_ELEMENT_ARRAY_BUFFER, - outline_cube_indices.nbytes, - outline_cube_indices, - GL.GL_STATIC_DRAW, - ) - - shader_data["constants"]["model"] = GL.glGetUniformLocation(program, "model") - GL.glUniformMatrix4fv(shader_data["constants"]["model"], 1, False, eye(4)) - - shader_data["constants"]["view"] = GL.glGetUniformLocation(program, "view") - view = translate(eye(4), z=-6) - GL.glUniformMatrix4fv(shader_data["constants"]["view"], 1, False, view) - - shader_data["constants"]["projection"] = GL.glGetUniformLocation( - program, "projection" - ) - GL.glUniformMatrix4fv(shader_data["constants"]["projection"], 1, False, eye(4)) - - # This colour is multiplied with the base vertex colour in producing - # the final output - shader_data["constants"]["colour_mul"] = GL.glGetUniformLocation( - program, "colour_mul" - ) - GL.glUniform4f(shader_data["constants"]["colour_mul"], 1, 1, 1, 1) - - # This colour is added on to the base vertex colour in producing - # the final output - shader_data["constants"]["colour_add"] = GL.glGetUniformLocation( - program, "colour_add" - ) - GL.glUniform4f(shader_data["constants"]["colour_add"], 0, 0, 0, 0) - - # Set GL drawing data - # ------------------- - GL.glClearColor(0, 0, 0, 0) - GL.glPolygonOffset(1, 1) - GL.glEnable(GL.GL_LINE_SMOOTH) - GL.glBlendFunc(GL.GL_SRC_ALPHA, GL.GL_ONE_MINUS_SRC_ALPHA) - GL.glDepthFunc(GL.GL_LESS) - GL.glHint(GL.GL_LINE_SMOOTH_HINT, GL.GL_NICEST) - GL.glLineWidth(1.0) - - projection = perspective(45.0, display_size[0] / float(display_size[1]), 2.0, 100.0) - GL.glUniformMatrix4fv(shader_data["constants"]["projection"], 1, False, projection) - - return shader_data, filled_cube_indices, outline_cube_indices - - -def draw_cube_modern(shader_data, filled_cube_indices, outline_cube_indices, rotation): - """ - Draw a cube in the 'modern' Open GL style, for post 3.1 versions of - open GL. - - :param shader_data: compile vertex & pixel shader data for drawing a cube. - :param filled_cube_indices: the indices to draw the 'filled' cube. - :param outline_cube_indices: the indices to draw the 'outline' cube. - :param rotation: the current rotations to apply. - """ - - GL.glClear(GL.GL_COLOR_BUFFER_BIT | GL.GL_DEPTH_BUFFER_BIT) - - # Filled cube - GL.glDisable(GL.GL_BLEND) - GL.glEnable(GL.GL_DEPTH_TEST) - GL.glEnable(GL.GL_POLYGON_OFFSET_FILL) - GL.glUniform4f(shader_data["constants"]["colour_mul"], 1, 1, 1, 1) - GL.glUniform4f(shader_data["constants"]["colour_add"], 0, 0, 0, 0.0) - GL.glBindBuffer(GL.GL_ELEMENT_ARRAY_BUFFER, shader_data["buffer"]["filled"]) - GL.glDrawElements( - GL.GL_TRIANGLES, len(filled_cube_indices), GL.GL_UNSIGNED_INT, None - ) - - # Outlined cube - GL.glDisable(GL.GL_POLYGON_OFFSET_FILL) - GL.glEnable(GL.GL_BLEND) - GL.glUniform4f(shader_data["constants"]["colour_mul"], 0, 0, 0, 0.0) - GL.glUniform4f(shader_data["constants"]["colour_add"], 1, 1, 1, 1.0) - GL.glBindBuffer(GL.GL_ELEMENT_ARRAY_BUFFER, shader_data["buffer"]["outline"]) - GL.glDrawElements(GL.GL_LINES, len(outline_cube_indices), GL.GL_UNSIGNED_INT, None) - - # Rotate cube - # rotation.theta += 1.0 # degrees - rotation.phi += 1.0 # degrees - # rotation.psi += 1.0 # degrees - model = eye(4, dtype=float32) - # rotate(model, rotation.theta, 0, 0, 1) - rotate(model, rotation.phi, 0, 1, 0) - rotate(model, rotation.psi, 1, 0, 0) - GL.glUniformMatrix4fv(shader_data["constants"]["model"], 1, False, model) - - -def main(): - """run the demo""" - - # initialize pygame and setup an opengl display - pg.init() - - gl_version = (3, 0) # GL Version number (Major, Minor) - if USE_MODERN_GL: - gl_version = (3, 2) # GL Version number (Major, Minor) - - # By setting these attributes we can choose which Open GL Profile - # to use, profiles greater than 3.2 use a different rendering path - pg.display.gl_set_attribute(pg.GL_CONTEXT_MAJOR_VERSION, gl_version[0]) - pg.display.gl_set_attribute(pg.GL_CONTEXT_MINOR_VERSION, gl_version[1]) - pg.display.gl_set_attribute( - pg.GL_CONTEXT_PROFILE_MASK, pg.GL_CONTEXT_PROFILE_CORE - ) - - fullscreen = False # start in windowed mode - - display_size = (640, 480) - pg.display.set_mode(display_size, pg.OPENGL | pg.DOUBLEBUF | pg.RESIZABLE) - - if USE_MODERN_GL: - gpu, f_indices, o_indices = init_gl_modern(display_size) - rotation = Rotation() - else: - init_gl_stuff_old() - - going = True - while going: - # check for quit'n events - events = pg.event.get() - for event in events: - if event.type == pg.QUIT or ( - event.type == pg.KEYDOWN and event.key == pg.K_ESCAPE - ): - going = False - - elif event.type == pg.KEYDOWN and event.key == pg.K_f: - if not fullscreen: - print("Changing to FULLSCREEN") - pg.display.set_mode( - (640, 480), pg.OPENGL | pg.DOUBLEBUF | pg.FULLSCREEN - ) - else: - print("Changing to windowed mode") - pg.display.set_mode((640, 480), pg.OPENGL | pg.DOUBLEBUF) - fullscreen = not fullscreen - if gl_version[0] >= 4 or (gl_version[0] == 3 and gl_version[1] >= 2): - gpu, f_indices, o_indices = init_gl_modern(display_size) - rotation = Rotation() - else: - init_gl_stuff_old() - - if USE_MODERN_GL: - draw_cube_modern(gpu, f_indices, o_indices, rotation) - else: - # clear screen and move camera - GL.glClear(GL.GL_COLOR_BUFFER_BIT | GL.GL_DEPTH_BUFFER_BIT) - # orbit camera around by 1 degree - GL.glRotatef(1, 0, 1, 0) - drawcube_old() - - pg.display.flip() - pg.time.wait(10) - - pg.quit() - - -if __name__ == "__main__": - main() diff --git a/venv/Lib/site-packages/pygame/examples/headless_no_windows_needed.py b/venv/Lib/site-packages/pygame/examples/headless_no_windows_needed.py deleted file mode 100644 index a74057c..0000000 --- a/venv/Lib/site-packages/pygame/examples/headless_no_windows_needed.py +++ /dev/null @@ -1,50 +0,0 @@ -#!/usr/bin/env python -""" pygame.examples.headless_no_windows_needed - -How to use pygame with no windowing system, like on headless servers. - -Thumbnail generation with scaling is an example of what you can do with pygame. -NOTE: the pygame scale function uses mmx/sse if available, and can be run - in multiple threads. -""" -useage = """-scale inputimage outputimage new_width new_height -eg. -scale in.png out.png 50 50 - -""" - -import os -import sys - -# set SDL to use the dummy NULL video driver, -# so it doesn't need a windowing system. -os.environ["SDL_VIDEODRIVER"] = "dummy" - -import pygame as pg - -# Some platforms need to init the display for some parts of pg. -pg.display.init() -screen = pg.display.set_mode((1, 1)) - - -def scaleit(fin, fout, w, h): - i = pg.image.load(fin) - - if hasattr(pg.transform, "smoothscale"): - scaled_image = pg.transform.smoothscale(i, (w, h)) - else: - scaled_image = pg.transform.scale(i, (w, h)) - pg.image.save(scaled_image, fout) - - -def main(fin, fout, w, h): - """smoothscale image file named fin as fout with new size (w,h)""" - scaleit(fin, fout, w, h) - - -if __name__ == "__main__": - if "-scale" in sys.argv: - fin, fout, w, h = sys.argv[2:] - w, h = map(int, [w, h]) - main(fin, fout, w, h) - else: - print(useage) diff --git a/venv/Lib/site-packages/pygame/examples/joystick.py b/venv/Lib/site-packages/pygame/examples/joystick.py deleted file mode 100644 index 44cc7b8..0000000 --- a/venv/Lib/site-packages/pygame/examples/joystick.py +++ /dev/null @@ -1,149 +0,0 @@ -import pygame - -pygame.init() - -# This is a simple class that will help us print to the screen. -# It has nothing to do with the joysticks, just outputting the -# information. -class TextPrint(object): - def __init__(self): - self.reset() - self.font = pygame.font.Font(None, 20) - - def tprint(self, screen, text): - text_bitmap = self.font.render(text, True, "black") - screen.blit(text_bitmap, (self.x, self.y)) - self.y += self.line_height - - def reset(self): - self.x = 10 - self.y = 10 - self.line_height = 15 - - def indent(self): - self.x += 10 - - def unindent(self): - self.x -= 10 - - -def main(): - # Set the width and height of the screen (width, height), and name the window. - screen = pygame.display.set_mode((500, 700)) - pygame.display.set_caption("Joystick example") - - clock = pygame.time.Clock() - - # Get ready to print. - text_print = TextPrint() - - # This dict can be left as-is, since pygame will generate a - # pygame.JOYDEVICEADDED event for every joystick connected - # at the start of the program. - joysticks = {} - - done = False - while not done: - # Event processing step. - # Possible joystick events: JOYAXISMOTION, JOYBALLMOTION, JOYBUTTONDOWN, - # JOYBUTTONUP, JOYHATMOTION, JOYDEVICEADDED, JOYDEVICEREMOVED - for event in pygame.event.get(): - if event.type == pygame.QUIT: - done = True # Flag that we are done so we exit this loop. - - if event.type == pygame.JOYBUTTONDOWN: - print("Joystick button pressed.") - if event.button == 0: - joystick = joysticks[event.instance_id] - if joystick.rumble(0, 0.7, 500): - print( - "Rumble effect played on joystick {}".format( - event.instance_id - ) - ) - - if event.type == pygame.JOYBUTTONUP: - print("Joystick button released.") - - # Handle hotplugging - if event.type == pygame.JOYDEVICEADDED: - # This event will be generated when the program starts for every - # joystick, filling up the list without needing to create them manually. - joy = pygame.joystick.Joystick(event.device_index) - joysticks[joy.get_instance_id()] = joy - print("Joystick {} connencted".format(joy.get_instance_id())) - - if event.type == pygame.JOYDEVICEREMOVED: - del joysticks[event.instance_id] - print("Joystick {} disconnected".format(event.instance_id)) - - # Drawing step - # First, clear the screen to white. Don't put other drawing commands - # above this, or they will be erased with this command. - screen.fill("white") - text_print.reset() - - text_print.tprint(screen, "Number of joysticks: {}".format(len(joysticks))) - text_print.indent() - - # For each joystick: - for joystick in joysticks.values(): - jid = joystick.get_instance_id() - text_print.tprint(screen, "Joystick {}".format(jid)) - text_print.indent() - - # Get the name from the OS for the joystick. - name = joystick.get_name() - text_print.tprint(screen, "Joystick name: {}".format(name)) - - guid = joystick.get_guid() - text_print.tprint(screen, "GUID: {}".format(guid)) - - power_level = joystick.get_power_level() - text_print.tprint(screen, "Joystick's power level: {}".format(power_level)) - - # Usually axis run in pairs, up/down for one, and left/right for - # the other. Triggers count as axes. - axes = joystick.get_numaxes() - text_print.tprint(screen, "Number of axes: {}".format(axes)) - text_print.indent() - - for i in range(axes): - axis = joystick.get_axis(i) - text_print.tprint(screen, "Axis {} value: {:>6.3f}".format(i, axis)) - text_print.unindent() - - buttons = joystick.get_numbuttons() - text_print.tprint(screen, "Number of buttons: {}".format(buttons)) - text_print.indent() - - for i in range(buttons): - button = joystick.get_button(i) - text_print.tprint(screen, "Button {:>2} value: {}".format(i, button)) - text_print.unindent() - - hats = joystick.get_numhats() - text_print.tprint(screen, "Number of hats: {}".format(hats)) - text_print.indent() - - # Hat position. All or nothing for direction, not a float like - # get_axis(). Position is a tuple of int values (x, y). - for i in range(hats): - hat = joystick.get_hat(i) - text_print.tprint(screen, "Hat {} value: {}".format(i, str(hat))) - text_print.unindent() - - text_print.unindent() - - # Go ahead and update the screen with what we've drawn. - pygame.display.flip() - - # Limit to 30 frames per second. - clock.tick(30) - - -if __name__ == "__main__": - main() - # If you forget this line, the program will 'hang' - # on exit if running from IDLE. - pygame.quit() diff --git a/venv/Lib/site-packages/pygame/examples/liquid.py b/venv/Lib/site-packages/pygame/examples/liquid.py deleted file mode 100644 index d55ba9c..0000000 --- a/venv/Lib/site-packages/pygame/examples/liquid.py +++ /dev/null @@ -1,89 +0,0 @@ -#!/usr/bin/env python -""" pygame.examples.liquid - -This example demonstrates a simplish water effect of an -image. It attempts to create a hardware display surface that -can use pageflipping for faster updates. Note that the colormap -from the loaded GIF image is copied to the colormap for the -display surface. - -This is based on the demo named F2KWarp by Brad Graham of Freedom2000 -done in BlitzBasic. I was just translating the BlitzBasic code to -pygame to compare the results. I didn't bother porting the text and -sound stuff, that's an easy enough challenge for the reader :] -""" - -import pygame as pg -import os -from math import sin -import time - -main_dir = os.path.split(os.path.abspath(__file__))[0] - - -def main(): - # initialize and setup screen - pg.init() - screen = pg.display.set_mode((640, 480), pg.HWSURFACE | pg.DOUBLEBUF) - - # load image and quadruple - imagename = os.path.join(main_dir, "data", "liquid.bmp") - bitmap = pg.image.load(imagename) - bitmap = pg.transform.scale2x(bitmap) - bitmap = pg.transform.scale2x(bitmap) - - # get the image and screen in the same format - if screen.get_bitsize() == 8: - screen.set_palette(bitmap.get_palette()) - else: - bitmap = bitmap.convert() - - # prep some variables - anim = 0.0 - - # mainloop - xblocks = range(0, 640, 20) - yblocks = range(0, 480, 20) - stopevents = pg.QUIT, pg.KEYDOWN, pg.MOUSEBUTTONDOWN - while 1: - for e in pg.event.get(): - if e.type in stopevents: - return - - anim = anim + 0.02 - for x in xblocks: - xpos = (x + (sin(anim + x * 0.01) * 15)) + 20 - for y in yblocks: - ypos = (y + (sin(anim + y * 0.01) * 15)) + 20 - screen.blit(bitmap, (x, y), (xpos, ypos, 20, 20)) - - pg.display.flip() - time.sleep(0.01) - - -if __name__ == "__main__": - main() - pg.quit() - - -"""BTW, here is the code from the BlitzBasic example this was derived -from. i've snipped the sound and text stuff out. ------------------------------------------------------------------ -; Brad@freedom2000.com - -; Load a bmp pic (800x600) and slice it into 1600 squares -Graphics 640,480 -SetBuffer BackBuffer() -bitmap$="f2kwarp.bmp" -pic=LoadAnimImage(bitmap$,20,15,0,1600) - -; use SIN to move all 1600 squares around to give liquid effect -Repeat -f=0:w=w+10:If w=360 Then w=0 -For y=0 To 599 Step 15 -For x = 0 To 799 Step 20 -f=f+1:If f=1600 Then f=0 -DrawBlock pic,(x+(Sin(w+x)*40))/1.7+80,(y+(Sin(w+y)*40))/1.7+60,f -Next:Next:Flip:Cls -Until KeyDown(1) -""" diff --git a/venv/Lib/site-packages/pygame/examples/mask.py b/venv/Lib/site-packages/pygame/examples/mask.py deleted file mode 100644 index dae3b2b..0000000 --- a/venv/Lib/site-packages/pygame/examples/mask.py +++ /dev/null @@ -1,212 +0,0 @@ -#!/usr/bin/env python -""" pygame.examples.mask - -A pygame.mask collision detection production. - - - - -Brought - - to - you - by - - the - -pixels - 0000000000000 - and - 111111 - - -This is 32 bits: - 11111111111111111111111111111111 - -There are 32 or 64 bits in a computer 'word'. -Rather than using one word for a pixel, -the mask module represents 32 or 64 pixels in one word. -As you can imagine, this makes things fast, and saves memory. - -Compute intensive things like collision detection, -and computer vision benefit greatly from this. - - -This module can also be run as a stand-alone program, excepting -one or more image file names as command line arguments. -""" - -import sys -import os -import random - -import pygame as pg - - -def maskFromSurface(surface, threshold=127): - return pg.mask.from_surface(surface, threshold) - - -def vadd(x, y): - return [x[0] + y[0], x[1] + y[1]] - - -def vsub(x, y): - return [x[0] - y[0], x[1] - y[1]] - - -def vdot(x, y): - return x[0] * y[0] + x[1] * y[1] - - -class Sprite: - def __init__(self, surface, mask=None): - self.surface = surface - if mask: - self.mask = mask - else: - self.mask = maskFromSurface(self.surface) - self.setPos([0, 0]) - self.setVelocity([0, 0]) - - def setPos(self, pos): - self.pos = [pos[0], pos[1]] - - def setVelocity(self, vel): - self.vel = [vel[0], vel[1]] - - def move(self, dr): - self.pos = vadd(self.pos, dr) - - def kick(self, impulse): - self.vel[0] += impulse[0] - self.vel[1] += impulse[1] - - def collide(self, s): - """Test if the sprites are colliding and - resolve the collision in this case.""" - offset = [int(x) for x in vsub(s.pos, self.pos)] - overlap = self.mask.overlap_area(s.mask, offset) - if overlap == 0: - return - """Calculate collision normal""" - nx = self.mask.overlap_area( - s.mask, (offset[0] + 1, offset[1]) - ) - self.mask.overlap_area(s.mask, (offset[0] - 1, offset[1])) - ny = self.mask.overlap_area( - s.mask, (offset[0], offset[1] + 1) - ) - self.mask.overlap_area(s.mask, (offset[0], offset[1] - 1)) - if nx == 0 and ny == 0: - """One sprite is inside another""" - return - n = [nx, ny] - dv = vsub(s.vel, self.vel) - J = vdot(dv, n) / (2 * vdot(n, n)) - if J > 0: - """Can scale up to 2*J here to get bouncy collisions""" - J *= 1.9 - self.kick([nx * J, ny * J]) - s.kick([-J * nx, -J * ny]) - return - - # """Separate the sprites""" - # c1 = -overlap/vdot(n,n) - # c2 = -c1/2 - # self.move([c2*nx,c2*ny]) - # s.move([(c1+c2)*nx,(c1+c2)*ny]) - - def update(self, dt): - self.pos[0] += dt * self.vel[0] - self.pos[1] += dt * self.vel[1] - - -def main(*args): - """Display multiple images bounce off each other using collision detection - - Positional arguments: - one or more image file names. - - This pg.masks demo will display multiple moving sprites bouncing - off each other. More than one sprite image can be provided. - """ - - if len(args) == 0: - raise ValueError("Require at least one image file name: non given") - print("Press any key to quit") - pg.init() - screen = pg.display.set_mode((640, 480)) - images = [] - masks = [] - for impath in args: - images.append(pg.image.load(impath).convert_alpha()) - masks.append(maskFromSurface(images[-1])) - - numtimes = 10 - import time - - t1 = time.time() - for x in range(numtimes): - unused_mask = maskFromSurface(images[-1]) - t2 = time.time() - - print("python maskFromSurface :%s" % (t2 - t1)) - - t1 = time.time() - for x in range(numtimes): - unused_mask = pg.mask.from_surface(images[-1]) - t2 = time.time() - - print("C pg.mask.from_surface :%s" % (t2 - t1)) - - sprites = [] - for i in range(20): - j = i % len(images) - s = Sprite(images[j], masks[j]) - s.setPos( - ( - random.uniform(0, screen.get_width()), - random.uniform(0, screen.get_height()), - ) - ) - s.setVelocity((random.uniform(-5, 5), random.uniform(-5, 5))) - sprites.append(s) - pg.time.set_timer(pg.USEREVENT, 33) - while 1: - event = pg.event.wait() - if event.type == pg.QUIT: - return - elif event.type == pg.USEREVENT: - - # Do both mechanics and screen update - screen.fill((240, 220, 100)) - for i, sprite in enumerate(sprites): - for j in range(i + 1, len(sprites)): - sprite.collide(sprites[j]) - for s in sprites: - s.update(1) - if s.pos[0] < -s.surface.get_width() - 3: - s.pos[0] = screen.get_width() - elif s.pos[0] > screen.get_width() + 3: - s.pos[0] = -s.surface.get_width() - if s.pos[1] < -s.surface.get_height() - 3: - s.pos[1] = screen.get_height() - elif s.pos[1] > screen.get_height() + 3: - s.pos[1] = -s.surface.get_height() - screen.blit(s.surface, s.pos) - pg.display.update() - elif event.type == pg.KEYDOWN: - return - - -if __name__ == "__main__": - if len(sys.argv) < 2: - print("Usage: mask.py [ ...]") - print("Let many copies of IMAGE(s) bounce against each other") - print("Press any key to quit") - main_dir = os.path.split(os.path.abspath(__file__))[0] - imagename = os.path.join(main_dir, "data", "chimp.png") - main(imagename) - - else: - main(*sys.argv[1:]) - pg.quit() diff --git a/venv/Lib/site-packages/pygame/examples/midi.py b/venv/Lib/site-packages/pygame/examples/midi.py deleted file mode 100644 index 3e184ef..0000000 --- a/venv/Lib/site-packages/pygame/examples/midi.py +++ /dev/null @@ -1,877 +0,0 @@ -#!/usr/bin/env python -""" pygame.examples.midi - -midi input, and a separate example of midi output. - -By default it runs the output example. - -python -m pygame.examples.midi --output -python -m pygame.examples.midi --input -python -m pygame.examples.midi --input -""" - -import sys -import os - -import pygame as pg -import pygame.midi - -# black and white piano keys use b/w color values directly -BACKGROUNDCOLOR = "slategray" - - -def print_device_info(): - pygame.midi.init() - _print_device_info() - pygame.midi.quit() - - -def _print_device_info(): - for i in range(pygame.midi.get_count()): - r = pygame.midi.get_device_info(i) - (interf, name, input, output, opened) = r - - in_out = "" - if input: - in_out = "(input)" - if output: - in_out = "(output)" - - print( - "%2i: interface :%s:, name :%s:, opened :%s: %s" - % (i, interf, name, opened, in_out) - ) - - -def input_main(device_id=None): - pg.init() - - pygame.midi.init() - - _print_device_info() - - if device_id is None: - input_id = pygame.midi.get_default_input_id() - else: - input_id = device_id - - print("using input_id :%s:" % input_id) - i = pygame.midi.Input(input_id) - - pg.display.set_mode((1, 1)) - - going = True - while going: - events = pygame.event.get() - for e in events: - if e.type in [pg.QUIT]: - going = False - if e.type in [pg.KEYDOWN]: - going = False - if e.type in [pygame.midi.MIDIIN]: - print(e) - - if i.poll(): - midi_events = i.read(10) - # convert them into pygame events. - midi_evs = pygame.midi.midis2events(midi_events, i.device_id) - - for m_e in midi_evs: - pygame.event.post(m_e) - - del i - pygame.midi.quit() - - -def output_main(device_id=None): - """Execute a musical keyboard example for the Church Organ instrument - - This is a piano keyboard example, with a two octave keyboard, starting at - note F3. Left mouse down over a key starts a note, left up stops it. The - notes are also mapped to the computer keyboard keys, assuming an American - English PC keyboard (sorry everyone else, but I don't know if I can map to - absolute key position instead of value.) The white keys are on the second - row, TAB to BACKSLASH, starting with note F3. The black keys map to the top - row, '1' to BACKSPACE, starting with F#3. 'r' is middle C. Close the - window or press ESCAPE to quit the program. Key velocity (note - amplitude) varies vertically on the keyboard image, with minimum velocity - at the top of a key and maximum velocity at bottom. - - Default Midi output, no device_id given, is to the default output device - for the computer. - - """ - - # A note to new pygamers: - # - # All the midi module stuff is in this function. It is unnecessary to - # understand how the keyboard display works to appreciate how midi - # messages are sent. - - # The keyboard is drawn by a Keyboard instance. This instance maps Midi - # notes to musical keyboard keys. A regions surface maps window position - # to (Midi note, velocity) pairs. A key_mapping dictionary does the same - # for computer keyboard keys. Midi sound is controlled with direct method - # calls to a pygame.midi.Output instance. - # - # Things to consider when using pygame.midi: - # - # 1) Initialize the midi module with a to pygame.midi.init(). - # 2) Create a midi.Output instance for the desired output device port. - # 3) Select instruments with set_instrument() method calls. - # 4) Play notes with note_on() and note_off() method calls. - # 5) Call pygame.midi.Quit() when finished. Though the midi module tries - # to ensure that midi is properly shut down, it is best to do it - # explicitly. A try/finally statement is the safest way to do this. - # - - # GRAND_PIANO = 0 - CHURCH_ORGAN = 19 - - instrument = CHURCH_ORGAN - # instrument = GRAND_PIANO - start_note = 53 # F3 (white key note), start_note != 0 - n_notes = 24 # Two octaves (14 white keys) - - key_mapping = make_key_mapping( - [ - pg.K_TAB, - pg.K_1, - pg.K_q, - pg.K_2, - pg.K_w, - pg.K_3, - pg.K_e, - pg.K_r, - pg.K_5, - pg.K_t, - pg.K_6, - pg.K_y, - pg.K_u, - pg.K_8, - pg.K_i, - pg.K_9, - pg.K_o, - pg.K_0, - pg.K_p, - pg.K_LEFTBRACKET, - pg.K_EQUALS, - pg.K_RIGHTBRACKET, - pg.K_BACKSPACE, - pg.K_BACKSLASH, - ], - start_note, - ) - - pg.init() - pygame.midi.init() - - _print_device_info() - - if device_id is None: - port = pygame.midi.get_default_output_id() - else: - port = device_id - - print("using output_id :%s:" % port) - - midi_out = pygame.midi.Output(port, 0) - try: - midi_out.set_instrument(instrument) - keyboard = Keyboard(start_note, n_notes) - - screen = pg.display.set_mode(keyboard.rect.size) - screen.fill(BACKGROUNDCOLOR) - pg.display.flip() - - background = pg.Surface(screen.get_size()) - background.fill(BACKGROUNDCOLOR) - dirty_rects = [] - keyboard.draw(screen, background, dirty_rects) - pg.display.update(dirty_rects) - - regions = pg.Surface(screen.get_size()) # initial color (0,0,0) - keyboard.map_regions(regions) - - pg.event.set_blocked(pg.MOUSEMOTION) - mouse_note = 0 - on_notes = set() - while 1: - e = pg.event.wait() - if e.type == pg.MOUSEBUTTONDOWN: - mouse_note, velocity, __, __ = regions.get_at(e.pos) - if mouse_note and mouse_note not in on_notes: - keyboard.key_down(mouse_note) - midi_out.note_on(mouse_note, velocity) - on_notes.add(mouse_note) - else: - mouse_note = 0 - elif e.type == pg.MOUSEBUTTONUP: - if mouse_note: - midi_out.note_off(mouse_note) - keyboard.key_up(mouse_note) - on_notes.remove(mouse_note) - mouse_note = 0 - elif e.type == pg.QUIT: - break - elif e.type == pg.KEYDOWN: - if e.key == pg.K_ESCAPE: - break - try: - note, velocity = key_mapping[e.key] - except KeyError: - pass - else: - if note not in on_notes: - keyboard.key_down(note) - midi_out.note_on(note, velocity) - on_notes.add(note) - elif e.type == pg.KEYUP: - try: - note, __ = key_mapping[e.key] - except KeyError: - pass - else: - if note in on_notes and note != mouse_note: - keyboard.key_up(note) - midi_out.note_off(note, 0) - on_notes.remove(note) - - dirty_rects = [] - keyboard.draw(screen, background, dirty_rects) - pg.display.update(dirty_rects) - finally: - del midi_out - pygame.midi.quit() - - -def make_key_mapping(keys, start_note): - """Return a dictionary of (note, velocity) by computer keyboard key code""" - mapping = {} - for i, key in enumerate(keys): - mapping[key] = (start_note + i, 127) - return mapping - - -class NullKey(object): - """A dummy key that ignores events passed to it by other keys - - A NullKey instance is the left key instance used by default - for the left most keyboard key. - - """ - - def _right_white_down(self): - pass - - def _right_white_up(self): - pass - - def _right_black_down(self): - pass - - def _right_black_up(self): - pass - - -null_key = NullKey() - - -def key_class(updates, image_strip, image_rects, is_white_key=True): - """Return a keyboard key widget class - - Arguments: - updates - a set into which a key instance adds itself if it needs - redrawing. - image_strip - The surface containing the images of all key states. - image_rects - A list of Rects giving the regions within image_strip that - are relevant to this key class. - is_white_key (default True) - Set false if this is a black key. - - This function automates the creation of a key widget class for the - three basic key types. A key has two basic states, up or down ( - depressed). Corresponding up and down images are drawn for each - of these two states. But to give the illusion of depth, a key - may have shadows cast upon it by the adjacent keys to its right. - These shadows change depending on the up/down state of the key and - its neighbors. So a key may support multiple images and states - depending on the shadows. A key type is determined by the length - of image_rects and the value of is_white. - - """ - - # Naming convention: Variables used by the Key class as part of a - # closure start with 'c_'. - - # State logic and shadows: - # - # A key may cast a shadow upon the key to its left. A black key casts a - # shadow on an adjacent white key. The shadow changes depending of whether - # the black or white key is depressed. A white key casts a shadow on the - # white key to its left if it is up and the left key is down. Therefore - # a keys state, and image it will draw, is determined entirely by its - # itself and the key immediately adjacent to it on the right. A white key - # is always assumed to have an adjacent white key. - # - # There can be up to eight key states, representing all permutations - # of the three fundamental states of self up/down, adjacent white - # right up/down, adjacent black up/down. - # - down_state_none = 0 - down_state_self = 1 - down_state_white = down_state_self << 1 - down_state_self_white = down_state_self | down_state_white - down_state_black = down_state_white << 1 - down_state_self_black = down_state_self | down_state_black - down_state_white_black = down_state_white | down_state_black - down_state_all = down_state_self | down_state_white_black - - # Some values used in the class. - # - c_down_state_initial = down_state_none - c_down_state_rect_initial = image_rects[0] - c_updates = updates - c_image_strip = image_strip - c_width, c_height = image_rects[0].size - - # A key propagates its up/down state change to the adjacent white key on - # the left by calling the adjacent key's _right_black_down or - # _right_white_down method. - # - if is_white_key: - key_color = "white" - else: - key_color = "black" - c_notify_down_method = "_right_%s_down" % key_color - c_notify_up_method = "_right_%s_up" % key_color - - # Images: - # - # A black key only needs two images, for the up and down states. Its - # appearance is unaffected by the adjacent keys to its right, which cast no - # shadows upon it. - # - # A white key with a no adjacent black to its right only needs three - # images, for self up, self down, and both self and adjacent white down. - # - # A white key with both a black and white key to its right needs six - # images: self up, self up and adjacent black down, self down, self and - # adjacent white down, self and adjacent black down, and all three down. - # - # Each 'c_event' dictionary maps the current key state to a new key state, - # along with corresponding image, for the related event. If no redrawing - # is required for the state change then the image rect is simply None. - # - c_event_down = {down_state_none: (down_state_self, image_rects[1])} - c_event_up = {down_state_self: (down_state_none, image_rects[0])} - c_event_right_white_down = { - down_state_none: (down_state_none, None), - down_state_self: (down_state_self, None), - } - c_event_right_white_up = c_event_right_white_down.copy() - c_event_right_black_down = c_event_right_white_down.copy() - c_event_right_black_up = c_event_right_white_down.copy() - if len(image_rects) > 2: - c_event_down[down_state_white] = (down_state_self_white, image_rects[2]) - c_event_up[down_state_self_white] = (down_state_white, image_rects[0]) - c_event_right_white_down[down_state_none] = (down_state_white, None) - c_event_right_white_down[down_state_self] = ( - down_state_self_white, - image_rects[2], - ) - c_event_right_white_up[down_state_white] = (down_state_none, None) - c_event_right_white_up[down_state_self_white] = ( - down_state_self, - image_rects[1], - ) - c_event_right_black_down[down_state_white] = (down_state_white, None) - c_event_right_black_down[down_state_self_white] = (down_state_self_white, None) - c_event_right_black_up[down_state_white] = (down_state_white, None) - c_event_right_black_up[down_state_self_white] = (down_state_self_white, None) - if len(image_rects) > 3: - c_event_down[down_state_black] = (down_state_self_black, image_rects[4]) - c_event_down[down_state_white_black] = (down_state_all, image_rects[5]) - c_event_up[down_state_self_black] = (down_state_black, image_rects[3]) - c_event_up[down_state_all] = (down_state_white_black, image_rects[3]) - c_event_right_white_down[down_state_black] = (down_state_white_black, None) - c_event_right_white_down[down_state_self_black] = ( - down_state_all, - image_rects[5], - ) - c_event_right_white_up[down_state_white_black] = (down_state_black, None) - c_event_right_white_up[down_state_all] = (down_state_self_black, image_rects[4]) - c_event_right_black_down[down_state_none] = (down_state_black, image_rects[3]) - c_event_right_black_down[down_state_self] = ( - down_state_self_black, - image_rects[4], - ) - c_event_right_black_down[down_state_white] = ( - down_state_white_black, - image_rects[3], - ) - c_event_right_black_down[down_state_self_white] = ( - down_state_all, - image_rects[5], - ) - c_event_right_black_up[down_state_black] = (down_state_none, image_rects[0]) - c_event_right_black_up[down_state_self_black] = ( - down_state_self, - image_rects[1], - ) - c_event_right_black_up[down_state_white_black] = ( - down_state_white, - image_rects[0], - ) - c_event_right_black_up[down_state_all] = (down_state_self_white, image_rects[2]) - - class Key(object): - """A key widget, maintains key state and draws the key's image - - Constructor arguments: - ident - A unique key identifier. Any immutable type suitable as a key. - posn - The location of the key on the display surface. - key_left - Optional, the adjacent white key to the left. Changes in - up and down state are propagated to that key. - - A key has an associated position and state. Related to state is the - image drawn. State changes are managed with method calls, one method - per event type. The up and down event methods are public. Other - internal methods are for passing on state changes to the key_left - key instance. - - """ - - is_white = is_white_key - - def __init__(self, ident, posn, key_left=None): - """Return a new Key instance - - The initial state is up, with all adjacent keys to the right also - up. - - """ - if key_left is None: - key_left = null_key - rect = pg.Rect(posn[0], posn[1], c_width, c_height) - self.rect = rect - self._state = c_down_state_initial - self._source_rect = c_down_state_rect_initial - self._ident = ident - self._hash = hash(ident) - self._notify_down = getattr(key_left, c_notify_down_method) - self._notify_up = getattr(key_left, c_notify_up_method) - self._key_left = key_left - self._background_rect = pg.Rect(rect.left, rect.bottom - 10, c_width, 10) - c_updates.add(self) - - def down(self): - """Signal that this key has been depressed (is down)""" - - self._state, source_rect = c_event_down[self._state] - if source_rect is not None: - self._source_rect = source_rect - c_updates.add(self) - self._notify_down() - - def up(self): - """Signal that this key has been released (is up)""" - - self._state, source_rect = c_event_up[self._state] - if source_rect is not None: - self._source_rect = source_rect - c_updates.add(self) - self._notify_up() - - def _right_white_down(self): - """Signal that the adjacent white key has been depressed - - This method is for internal propagation of events between - key instances. - - """ - - self._state, source_rect = c_event_right_white_down[self._state] - if source_rect is not None: - self._source_rect = source_rect - c_updates.add(self) - - def _right_white_up(self): - """Signal that the adjacent white key has been released - - This method is for internal propagation of events between - key instances. - - """ - - self._state, source_rect = c_event_right_white_up[self._state] - if source_rect is not None: - self._source_rect = source_rect - c_updates.add(self) - - def _right_black_down(self): - """Signal that the adjacent black key has been depressed - - This method is for internal propagation of events between - key instances. - - """ - - self._state, source_rect = c_event_right_black_down[self._state] - if source_rect is not None: - self._source_rect = source_rect - c_updates.add(self) - - def _right_black_up(self): - """Signal that the adjacent black key has been released - - This method is for internal propagation of events between - key instances. - - """ - - self._state, source_rect = c_event_right_black_up[self._state] - if source_rect is not None: - self._source_rect = source_rect - c_updates.add(self) - - def __eq__(self, other): - """True if same identifiers""" - - return self._ident == other._ident - - def __hash__(self): - """Return the immutable hash value""" - - return self._hash - - def __str__(self): - """Return the key's identifier and position as a string""" - - return "" % (self._ident, self.rect.top, self.rect.left) - - def draw(self, surf, background, dirty_rects): - """Redraw the key on the surface surf - - The background is redrawn. The altered region is added to the - dirty_rects list. - - """ - - surf.blit(background, self._background_rect, self._background_rect) - surf.blit(c_image_strip, self.rect, self._source_rect) - dirty_rects.append(self.rect) - - return Key - - -def key_images(): - """Return a keyboard keys image strip and a mapping of image locations - - The return tuple is a surface and a dictionary of rects mapped to key - type. - - This function encapsulates the constants relevant to the keyboard image - file. There are five key types. One is the black key. The other four - white keys are determined by the proximity of the black keys. The plain - white key has no black key adjacent to it. A white-left and white-right - key has a black key to the left or right of it respectively. A white-center - key has a black key on both sides. A key may have up to six related - images depending on the state of adjacent keys to its right. - - """ - - my_dir = os.path.split(os.path.abspath(__file__))[0] - strip_file = os.path.join(my_dir, "data", "midikeys.png") - white_key_width = 42 - white_key_height = 160 - black_key_width = 22 - black_key_height = 94 - strip = pg.image.load(strip_file) - names = [ - "black none", - "black self", - "white none", - "white self", - "white self-white", - "white-left none", - "white-left self", - "white-left black", - "white-left self-black", - "white-left self-white", - "white-left all", - "white-center none", - "white-center self", - "white-center black", - "white-center self-black", - "white-center self-white", - "white-center all", - "white-right none", - "white-right self", - "white-right self-white", - ] - rects = {} - for i in range(2): - rects[names[i]] = pg.Rect( - i * white_key_width, 0, black_key_width, black_key_height - ) - for i in range(2, len(names)): - rects[names[i]] = pg.Rect( - i * white_key_width, 0, white_key_width, white_key_height - ) - return strip, rects - - -class Keyboard(object): - """Musical keyboard widget - - Constructor arguments: - start_note: midi note value of the starting note on the keyboard. - n_notes: number of notes (keys) on the keyboard. - - A Keyboard instance draws the musical keyboard and maintains the state of - all the keyboard keys. Individual keys can be in a down (depressed) or - up (released) state. - - """ - - _image_strip, _rects = key_images() - - white_key_width, white_key_height = _rects["white none"].size - black_key_width, black_key_height = _rects["black none"].size - - _updates = set() - - # There are five key classes, representing key shape: - # black key (BlackKey), plain white key (WhiteKey), white key to the left - # of a black key (WhiteKeyLeft), white key between two black keys - # (WhiteKeyCenter), and white key to the right of a black key - # (WhiteKeyRight). - BlackKey = key_class( - _updates, _image_strip, [_rects["black none"], _rects["black self"]], False - ) - WhiteKey = key_class( - _updates, - _image_strip, - [_rects["white none"], _rects["white self"], _rects["white self-white"]], - ) - WhiteKeyLeft = key_class( - _updates, - _image_strip, - [ - _rects["white-left none"], - _rects["white-left self"], - _rects["white-left self-white"], - _rects["white-left black"], - _rects["white-left self-black"], - _rects["white-left all"], - ], - ) - WhiteKeyCenter = key_class( - _updates, - _image_strip, - [ - _rects["white-center none"], - _rects["white-center self"], - _rects["white-center self-white"], - _rects["white-center black"], - _rects["white-center self-black"], - _rects["white-center all"], - ], - ) - WhiteKeyRight = key_class( - _updates, - _image_strip, - [ - _rects["white-right none"], - _rects["white-right self"], - _rects["white-right self-white"], - ], - ) - - def __init__(self, start_note, n_notes): - """Return a new Keyboard instance with n_note keys""" - - self._start_note = start_note - self._end_note = start_note + n_notes - 1 - self._add_keys() - - def _add_keys(self): - """Populate the keyboard with key instances - - Set the _keys and rect attributes. - - """ - - # Keys are entered in a list, where index is Midi note. Since there are - # only 128 possible Midi notes the list length is managable. Unassigned - # note positions should never be accessed, so are set None to ensure - # the bug is quickly detected. - # - key_map = [None] * 128 - - start_note = self._start_note - end_note = self._end_note - black_offset = self.black_key_width // 2 - prev_white_key = None - x = y = 0 - if is_white_key(start_note): - is_prev_white = True - else: - x += black_offset - is_prev_white = False - for note in range(start_note, end_note + 1): - ident = note # For now notes uniquely identify keyboard keys. - if is_white_key(note): - if is_prev_white: - if note == end_note or is_white_key(note + 1): - key = self.WhiteKey(ident, (x, y), prev_white_key) - else: - key = self.WhiteKeyLeft(ident, (x, y), prev_white_key) - else: - if note == end_note or is_white_key(note + 1): - key = self.WhiteKeyRight(ident, (x, y), prev_white_key) - else: - key = self.WhiteKeyCenter(ident, (x, y), prev_white_key) - is_prev_white = True - x += self.white_key_width - prev_white_key = key - else: - key = self.BlackKey(ident, (x - black_offset, y), prev_white_key) - is_prev_white = False - key_map[note] = key - self._keys = key_map - - kb_width = key_map[self._end_note].rect.right - kb_height = self.white_key_height - self.rect = pg.Rect(0, 0, kb_width, kb_height) - - def map_regions(self, regions): - """Draw the key regions onto surface regions. - - Regions must have at least 3 byte pixels. Each pixel of the keyboard - rectangle is set to the color (note, velocity, 0). The regions surface - must be at least as large as (0, 0, self.rect.left, self.rect.bottom) - - """ - - # First draw the white key regions. Then add the overlapping - # black key regions. - # - cutoff = self.black_key_height - black_keys = [] - for note in range(self._start_note, self._end_note + 1): - key = self._keys[note] - if key.is_white: - fill_region(regions, note, key.rect, cutoff) - else: - black_keys.append((note, key)) - for note, key in black_keys: - fill_region(regions, note, key.rect, cutoff) - - def draw(self, surf, background, dirty_rects): - """Redraw all altered keyboard keys""" - - changed_keys = self._updates - while changed_keys: - changed_keys.pop().draw(surf, background, dirty_rects) - - def key_down(self, note): - """Signal a key down event for note""" - - self._keys[note].down() - - def key_up(self, note): - """Signal a key up event for note""" - - self._keys[note].up() - - -def fill_region(regions, note, rect, cutoff): - """Fill the region defined by rect with a (note, velocity, 0) color - - The velocity varies from a small value at the top of the region to - 127 at the bottom. The vertical region 0 to cutoff is split into - three parts, with velocities 42, 84 and 127. Everything below cutoff - has velocity 127. - - """ - - x, y, width, height = rect - if cutoff is None: - cutoff = height - delta_height = cutoff // 3 - regions.fill((note, 42, 0), (x, y, width, delta_height)) - regions.fill((note, 84, 0), (x, y + delta_height, width, delta_height)) - regions.fill( - (note, 127, 0), (x, y + 2 * delta_height, width, height - 2 * delta_height) - ) - - -def is_white_key(note): - """True if note is represented by a white key""" - - key_pattern = [ - True, - False, - True, - True, - False, - True, - False, - True, - True, - False, - True, - False, - ] - return key_pattern[(note - 21) % len(key_pattern)] - - -def usage(): - print("--input [device_id] : Midi message logger") - print("--output [device_id] : Midi piano keyboard") - print("--list : list available midi devices") - - -def main(mode="output", device_id=None): - """Run a Midi example - - Arguments: - mode - if 'output' run a midi keyboard output example - 'input' run a midi event logger input example - 'list' list available midi devices - (default 'output') - device_id - midi device number; if None then use the default midi input or - output device for the system - - """ - - if mode == "input": - input_main(device_id) - elif mode == "output": - output_main(device_id) - elif mode == "list": - print_device_info() - else: - raise ValueError("Unknown mode option '%s'" % mode) - - -if __name__ == "__main__": - - try: - device_id = int(sys.argv[-1]) - except ValueError: - device_id = None - - if "--input" in sys.argv or "-i" in sys.argv: - - input_main(device_id) - - elif "--output" in sys.argv or "-o" in sys.argv: - output_main(device_id) - elif "--list" in sys.argv or "-l" in sys.argv: - print_device_info() - else: - usage() - - pg.quit() diff --git a/venv/Lib/site-packages/pygame/examples/moveit.py b/venv/Lib/site-packages/pygame/examples/moveit.py deleted file mode 100644 index b902ce5..0000000 --- a/venv/Lib/site-packages/pygame/examples/moveit.py +++ /dev/null @@ -1,72 +0,0 @@ -#!/usr/bin/env python -""" pygame.examples.moveit - -This is the full and final example from the Pygame Tutorial, -"How Do I Make It Move". It creates 10 objects and animates -them on the screen. - -Note it's a bit scant on error checking, but it's easy to read. :] -Fortunately, this is python, and we needn't wrestle with a pile of -error codes. -""" -import os -import pygame as pg - -main_dir = os.path.split(os.path.abspath(__file__))[0] - -# our game object class -class GameObject: - def __init__(self, image, height, speed): - self.speed = speed - self.image = image - self.pos = image.get_rect().move(0, height) - - def move(self): - self.pos = self.pos.move(self.speed, 0) - if self.pos.right > 600: - self.pos.left = 0 - - -# quick function to load an image -def load_image(name): - path = os.path.join(main_dir, "data", name) - return pg.image.load(path).convert() - - -# here's the full code -def main(): - pg.init() - screen = pg.display.set_mode((640, 480)) - - player = load_image("player1.gif") - background = load_image("liquid.bmp") - - # scale the background image so that it fills the window and - # successfully overwrites the old sprite position. - background = pg.transform.scale2x(background) - background = pg.transform.scale2x(background) - - screen.blit(background, (0, 0)) - - objects = [] - for x in range(10): - o = GameObject(player, x * 40, x) - objects.append(o) - - while 1: - for event in pg.event.get(): - if event.type in (pg.QUIT, pg.KEYDOWN): - return - - for o in objects: - screen.blit(background, o.pos, o.pos) - for o in objects: - o.move() - screen.blit(o.image, o.pos) - - pg.display.update() - - -if __name__ == "__main__": - main() - pg.quit() diff --git a/venv/Lib/site-packages/pygame/examples/music_drop_fade.py b/venv/Lib/site-packages/pygame/examples/music_drop_fade.py deleted file mode 100644 index 9d83546..0000000 --- a/venv/Lib/site-packages/pygame/examples/music_drop_fade.py +++ /dev/null @@ -1,249 +0,0 @@ -#!/usr/bin/env python -""" pygame.examples.music_drop_fade -Fade in and play music from a list while observing several events - -Adds music files to a playlist whenever played by one of the following methods -Music files passed from the commandline are played -Music files and filenames are played when drag and dropped onto the pygame window -Polls the clipboard and plays music files if it finds one there - -Keyboard Controls: -* Press space or enter to pause music playback -* Press up or down to change the music volume -* Press left or right to seek 5 seconds into the track -* Press escape to quit -* Press any other button to skip to the next music file in the list -""" - -import pygame as pg -import os, sys - -VOLUME_CHANGE_AMOUNT = 0.02 # how fast should up and down arrows change the volume? - - -def add_file(filename): - """ - This function will check if filename exists and is a music file - If it is the file will be added to a list of music files(even if already there) - Type checking is by the extension of the file, not by its contents - We can only discover if the file is valid when we mixer.music.load() it later - - It looks in the file directory and its data subdirectory - """ - if filename.rpartition(".")[2].lower() not in music_file_types: - print("{} not added to file list".format(filename)) - print("only these files types are allowed: ", music_file_types) - return False - elif os.path.exists(filename): - music_file_list.append(filename) - elif os.path.exists(os.path.join(main_dir, filename)): - music_file_list.append(os.path.join(main_dir, filename)) - elif os.path.exists(os.path.join(data_dir, filename)): - music_file_list.append(os.path.join(data_dir, filename)) - else: - print("file not found") - return False - print("{} added to file list".format(filename)) - return True - - -def play_file(filename): - """ - This function will call add_file and play it if successful - The music will fade in during the first 4 seconds - set_endevent is used to post a MUSIC_DONE event when the song finishes - The main loop will call play_next() when the MUSIC_DONE event is received - """ - global starting_pos - - if add_file(filename): - try: # we must do this in case the file is not a valid audio file - pg.mixer.music.load(music_file_list[-1]) - except pg.error as e: - print(e) # print description such as 'Not an Ogg Vorbis audio stream' - if filename in music_file_list: - music_file_list.remove(filename) - print("{} removed from file list".format(filename)) - return - pg.mixer.music.play(fade_ms=4000) - pg.mixer.music.set_volume(volume) - - if filename.rpartition(".")[2].lower() in music_can_seek: - print("file supports seeking") - starting_pos = 0 - else: - print("file does not support seeking") - starting_pos = -1 - pg.mixer.music.set_endevent(MUSIC_DONE) - - -def play_next(): - """ - This function will play the next song in music_file_list - It uses pop(0) to get the next song and then appends it to the end of the list - The song will fade in during the first 4 seconds - """ - - global starting_pos - if len(music_file_list) > 1: - nxt = music_file_list.pop(0) - - try: - pg.mixer.music.load(nxt) - except pg.error as e: - print(e) - print("{} removed from file list".format(nxt)) - - music_file_list.append(nxt) - print("starting next song: ", nxt) - else: - nxt = music_file_list[0] - pg.mixer.music.play(fade_ms=4000) - pg.mixer.music.set_volume(volume) - pg.mixer.music.set_endevent(MUSIC_DONE) - - if nxt.rpartition(".")[2].lower() in music_can_seek: - starting_pos = 0 - else: - starting_pos = -1 - - -def draw_text_line(text, y=0): - """ - Draws a line of text onto the display surface - The text will be centered horizontally at the given y postition - The text's height is added to y and returned to the caller - """ - screen = pg.display.get_surface() - surf = font.render(text, 1, (255, 255, 255)) - y += surf.get_height() - x = (screen.get_width() - surf.get_width()) / 2 - screen.blit(surf, (x, y)) - return y - - -def change_music_postion(amount): - """ - Changes current playback postition by amount seconds. - This only works with OGG and MP3 files. - music.get_pos() returns how many milliseconds the song has played, not - the current postion in the file. We must track the starting postion - ourselves. music.set_pos() will set the position in seconds. - """ - global starting_pos - - if starting_pos >= 0: # will be -1 unless play_file() was OGG or MP3 - played_for = pg.mixer.music.get_pos() / 1000.0 - old_pos = starting_pos + played_for - starting_pos = old_pos + amount - pg.mixer.music.play(start=starting_pos) - print("jumped from {} to {}".format(old_pos, starting_pos)) - - -MUSIC_DONE = pg.event.custom_type() # event to be set as mixer.music.set_endevent() -main_dir = os.path.split(os.path.abspath(__file__))[0] -data_dir = os.path.join(main_dir, "data") - -starting_pos = 0 # needed to fast forward and rewind -volume = 0.75 -music_file_list = [] -music_file_types = ("mp3", "ogg", "mid", "mod", "it", "xm", "wav") -music_can_seek = ("mp3", "ogg", "mod", "it", "xm") - - -def main(): - global font # this will be used by the draw_text_line function - global volume, starting_pos - running = True - paused = False - - # we will be polling for key up and key down events - # users should be able to change the volume by holding the up and down arrows - # the change_volume variable will be set by key down events and cleared by key up events - change_volume = 0 - - pg.init() - pg.display.set_mode((640, 480)) - font = pg.font.SysFont("Arial", 24) - clock = pg.time.Clock() - - pg.scrap.init() - pg.SCRAP_TEXT = pg.scrap.get_types()[0] # TODO remove when scrap module is fixed - clipped = pg.scrap.get(pg.SCRAP_TEXT).decode("UTF-8") - # store the current text from the clipboard TODO remove decode - - # add the command line arguments to the music_file_list - for arg in sys.argv[1:]: - add_file(arg) - play_file("house_lo.ogg") # play default music included with pygame - - # draw instructions on screen - y = draw_text_line("Drop music files or path names onto this window", 20) - y = draw_text_line("Copy file names into the clipboard", y) - y = draw_text_line("Or feed them from the command line", y) - y = draw_text_line("If it's music it will play!", y) - y = draw_text_line("SPACE to pause or UP/DOWN to change volume", y) - y = draw_text_line("LEFT and RIGHT will skip around the track", y) - draw_text_line("Other keys will start the next track", y) - - """ - This is the main loop - It will respond to drag and drop, clipboard changes, and key presses - """ - while running: - for ev in pg.event.get(): - if ev.type == pg.QUIT: - running = False - elif ev.type == pg.DROPTEXT: - play_file(ev.text) - elif ev.type == pg.DROPFILE: - play_file(ev.file) - elif ev.type == MUSIC_DONE: - play_next() - elif ev.type == pg.KEYDOWN: - if ev.key == pg.K_ESCAPE: - running = False # exit loop - elif ev.key in (pg.K_SPACE, pg.K_RETURN): - if paused: - pg.mixer.music.unpause() - paused = False - else: - pg.mixer.music.pause() - paused = True - elif ev.key == pg.K_UP: - change_volume = VOLUME_CHANGE_AMOUNT - elif ev.key == pg.K_DOWN: - change_volume = -VOLUME_CHANGE_AMOUNT - elif ev.key == pg.K_RIGHT: - change_music_postion(+5) - elif ev.key == pg.K_LEFT: - change_music_postion(-5) - - else: - play_next() - - elif ev.type == pg.KEYUP: - if ev.key in (pg.K_UP, pg.K_DOWN): - change_volume = 0 - - # is the user holding up or down? - if change_volume: - volume += change_volume - volume = min(max(0, volume), 1) # volume should be between 0 and 1 - pg.mixer.music.set_volume(volume) - print("volume:", volume) - - # TODO remove decode when SDL2 scrap is fixed - new_text = pg.scrap.get(pg.SCRAP_TEXT).decode("UTF-8") - if new_text != clipped: # has the clipboard changed? - clipped = new_text - play_file(clipped) # try to play the file if it has - - pg.display.flip() - clock.tick(9) # keep CPU use down by updating screen less often - - pg.quit() - - -if __name__ == "__main__": - main() diff --git a/venv/Lib/site-packages/pygame/examples/pixelarray.py b/venv/Lib/site-packages/pygame/examples/pixelarray.py deleted file mode 100644 index 9961091..0000000 --- a/venv/Lib/site-packages/pygame/examples/pixelarray.py +++ /dev/null @@ -1,142 +0,0 @@ -#!/usr/bin/env python -""" pygame.examples.pixelarray - -PixelArray does array processing of pixels. -Sort of like another array processor called 'numpy' - But for pixels. - - Flip it, - stripe it, - rotate it. - -Controls --------- - -To see different effects - press a key or click a mouse. -""" -import os -import pygame as pg - - -main_dir = os.path.split(os.path.abspath(__file__))[0] -data_dir = os.path.join(main_dir, "data") - - -def show(image): - screen = pg.display.get_surface() - screen.fill((255, 255, 255)) - screen.blit(image, (0, 0)) - pg.display.flip() - while 1: - event = pg.event.wait() - if event.type == pg.QUIT: - pg.quit() - raise SystemExit - if event.type in [pg.MOUSEBUTTONDOWN, pg.KEYDOWN]: - break - - -def main(): - pg.init() - - pg.display.set_mode((255, 255)) - surface = pg.Surface((255, 255)) - - pg.display.flip() - - # Create the PixelArray. - ar = pg.PixelArray(surface) - - # Do some easy gradient effect. - for y in range(255): - r, g, b = y, y, y - ar[:, y] = (r, g, b) - del ar - show(surface) - - # We have made some gradient effect, now flip it. - ar = pg.PixelArray(surface) - ar[:] = ar[:, ::-1] - del ar - show(surface) - - # Every second column will be made blue - ar = pg.PixelArray(surface) - ar[::2] = (0, 0, 255) - del ar - show(surface) - - # Every second row will be made green - ar = pg.PixelArray(surface) - ar[:, ::2] = (0, 255, 0) - del ar - show(surface) - - # Manipulate the image. Flip it around the y axis. - surface = pg.image.load(os.path.join(data_dir, "arraydemo.bmp")) - ar = pg.PixelArray(surface) - ar[:] = ar[:, ::-1] - del ar - show(surface) - - # Flip the image around the x axis. - ar = pg.PixelArray(surface) - ar[:] = ar[::-1, :] - del ar - show(surface) - - # Every second column will be made white. - ar = pg.PixelArray(surface) - ar[::2] = (255, 255, 255) - del ar - show(surface) - - # Flip the image around both axes, restoring its original layout. - ar = pg.PixelArray(surface) - ar[:] = ar[::-1, ::-1] - del ar - show(surface) - - # Rotate 90 degrees clockwise. - w, h = surface.get_size() - surface2 = pg.Surface((h, w), surface.get_flags(), surface) - ar = pg.PixelArray(surface) - ar2 = pg.PixelArray(surface2) - ar2[...] = ar.transpose()[::-1, :] - del ar, ar2 - show(surface2) - - # Scale it by throwing each second pixel away. - surface = pg.image.load(os.path.join(data_dir, "arraydemo.bmp")) - ar = pg.PixelArray(surface) - sf2 = ar[::2, ::2].make_surface() - del ar - show(sf2) - - # Replace anything looking like the blue color from the text. - ar = pg.PixelArray(surface) - ar.replace((60, 60, 255), (0, 255, 0), 0.06) - del ar - show(surface) - - # Extract anything which might be somewhat black. - surface = pg.image.load(os.path.join(data_dir, "arraydemo.bmp")) - ar = pg.PixelArray(surface) - ar2 = ar.extract((0, 0, 0), 0.07) - sf2 = ar2.surface - del ar, ar2 - show(sf2) - - # Compare two images. - surface = pg.image.load(os.path.join(data_dir, "alien1.gif")) - surface2 = pg.image.load(os.path.join(data_dir, "alien2.gif")) - ar1 = pg.PixelArray(surface) - ar2 = pg.PixelArray(surface2) - ar3 = ar1.compare(ar2, 0.07) - sf3 = ar3.surface - del ar1, ar2, ar3 - show(sf3) - - -if __name__ == "__main__": - main() - pg.quit() diff --git a/venv/Lib/site-packages/pygame/examples/playmus.py b/venv/Lib/site-packages/pygame/examples/playmus.py deleted file mode 100644 index dac2f64..0000000 --- a/venv/Lib/site-packages/pygame/examples/playmus.py +++ /dev/null @@ -1,165 +0,0 @@ -#!/usr/bin/env python -""" pygame.examples.playmus - -A simple music player. - - Use pygame.mixer.music to play an audio file. - -A window is created to handle keyboard events for playback commands. - - -Keyboard Controls ------------------ - -space - play/pause toggle -r - rewind -f - fade out -q - stop - -""" -import sys - -import pygame as pg -import pygame.freetype - - -class Window(object): - """The application's Pygame window - - A Window instance manages the creation of and drawing to a - window. It is a singleton class. Only one instance can exist. - - """ - - instance = None - - def __new__(cls, *args, **kwds): - """Return an open Pygame window""" - - if Window.instance is not None: - return Window.instance - self = object.__new__(cls) - pg.display.init() - self.screen = pg.display.set_mode((600, 400)) - Window.instance = self - return self - - def __init__(self, title): - pg.display.set_caption(title) - self.text_color = (254, 231, 21, 255) - self.background_color = (16, 24, 32, 255) - self.screen.fill(self.background_color) - pg.display.flip() - - pygame.freetype.init() - self.font = pygame.freetype.Font(None, 20) - self.font.origin = True - self.ascender = int(self.font.get_sized_ascender() * 1.5) - self.descender = int(self.font.get_sized_descender() * 1.5) - self.line_height = self.ascender - self.descender - - self.write_lines( - "\nPress 'q' or 'ESCAPE' or close this window to quit\n" - "Press 'SPACE' to play / pause\n" - "Press 'r' to rewind to the beginning (restart)\n" - "Press 'f' to fade music out over 5 seconds\n\n" - "Window will quit automatically when music ends\n", - 0, - ) - - def __enter__(self): - return self - - def __exit__(self, exc_type, exc_val, exc_tb): - self.close() - return False - - def close(self): - pg.display.quit() - Window.instance = None - - def write_lines(self, text, line=0): - w, h = self.screen.get_size() - line_height = self.line_height - nlines = h // line_height - if line < 0: - line = nlines + line - for i, text_line in enumerate(text.split("\n"), line): - y = i * line_height + self.ascender - # Clear the line first. - self.screen.fill( - self.background_color, (0, i * line_height, w, line_height) - ) - # Write new text. - self.font.render_to(self.screen, (15, y), text_line, self.text_color) - pg.display.flip() - - -def show_usage_message(): - print("Usage: python playmus.py ") - print(" python -m pygame.examples.playmus ") - - -def main(file_path): - """Play an audio file with pg.mixer.music""" - - with Window(file_path) as win: - win.write_lines("Loading ...", -1) - pg.mixer.init(frequency=44100) - try: - paused = False - pg.mixer.music.load(file_path) - - # Make sure the event loop ticks over at least every 0.5 seconds. - pg.time.set_timer(pg.USEREVENT, 500) - - pg.mixer.music.play() - win.write_lines("Playing ...\n", -1) - - while pg.mixer.music.get_busy() or paused: - e = pg.event.wait() - if e.type == pg.KEYDOWN: - key = e.key - if key == pg.K_SPACE: - if paused: - pg.mixer.music.unpause() - paused = False - win.write_lines("Playing ...\n", -1) - else: - pg.mixer.music.pause() - paused = True - win.write_lines("Paused ...\n", -1) - elif key == pg.K_r: - if file_path[-3:].lower() in ("ogg", "mp3", "mod"): - status = "Rewound." - pg.mixer.music.rewind() - else: - status = "Restarted." - pg.mixer.music.play() - if paused: - pg.mixer.music.pause() - win.write_lines(status, -1) - elif key == pg.K_f: - win.write_lines("Fading out ...\n", -1) - pg.mixer.music.fadeout(5000) - # when finished get_busy() will return False. - elif key in [pg.K_q, pg.K_ESCAPE]: - paused = False - pg.mixer.music.stop() - # get_busy() will now return False. - elif e.type == pg.QUIT: - paused = False - pg.mixer.music.stop() - # get_busy() will now return False. - pg.time.set_timer(pg.USEREVENT, 0) - finally: - pg.mixer.quit() - pg.quit() - - -if __name__ == "__main__": - # Check the only command line argument, a file path - if len(sys.argv) != 2: - show_usage_message() - else: - main(sys.argv[1]) diff --git a/venv/Lib/site-packages/pygame/examples/prevent_display_stretching.py b/venv/Lib/site-packages/pygame/examples/prevent_display_stretching.py deleted file mode 100644 index 363df81..0000000 --- a/venv/Lib/site-packages/pygame/examples/prevent_display_stretching.py +++ /dev/null @@ -1,93 +0,0 @@ -#!/usr/bin/env python -""" pygame.examples.prevent_display_stretching - -Prevent display stretching on Windows. - -On some computers, the display environment can be configured to stretch -all windows so that they will not appear too small on the screen for -the user. This configuration is especially common on high-DPI displays. -pygame graphics appear distorted when automatically stretched by the -display environment. This script demonstrates a technique for preventing -this stretching and distortion. - -Limitations: -This script makes an API call that is only available on Windows (versions -Vista and newer). - -""" - -# Ensure that the computer is running Windows Vista or newer -import os -import sys - -# game constants -TEXTCOLOR = "green" -BACKGROUNDCOLOR = "black" -AXISCOLOR = "white" - -if os.name != "nt" or sys.getwindowsversion()[0] < 6: - raise NotImplementedError("this script requires Windows Vista or newer") - -import pygame as pg - -import ctypes - -# Determine whether or not the user would like to prevent stretching -if os.path.basename(sys.executable) == "pythonw.exe": - selection = "y" -else: - selection = None - while selection not in ("y", "n"): - selection = input("Prevent stretching? (y/n): ").strip().lower() - -if selection == "y": - msg = "Stretching is prevented." -else: - msg = "Stretching is not prevented." - -# Prevent stretching -if selection == "y": - user32 = ctypes.windll.user32 - user32.SetProcessDPIAware() - -# Show screen -pg.display.init() -RESOLUTION = (350, 350) -screen = pg.display.set_mode(RESOLUTION) - -# Render message onto a surface -pg.font.init() -font = pg.font.Font(None, 36) -msg_surf = font.render(msg, 1, TEXTCOLOR) -res_surf = font.render("Intended resolution: %ix%i" % RESOLUTION, 1, TEXTCOLOR) - -# Control loop -running = True -clock = pg.time.Clock() -counter = 0 -while running: - - for event in pg.event.get(): - if event.type == pg.QUIT: - running = False - - screen.fill(BACKGROUNDCOLOR) - - # Draw lines which will be blurry if the window is stretched - # or clear if the window is not stretched. - pg.draw.line(screen, AXISCOLOR, (0, counter), (RESOLUTION[0] - 1, counter)) - pg.draw.line(screen, AXISCOLOR, (counter, 0), (counter, RESOLUTION[1] - 1)) - - # Blit message onto screen surface - msg_blit_rect = screen.blit(msg_surf, (0, 0)) - screen.blit(res_surf, (0, msg_blit_rect.bottom)) - - clock.tick(10) - - pg.display.flip() - - counter += 1 - if counter == RESOLUTION[0]: - counter = 0 - -pg.quit() diff --git a/venv/Lib/site-packages/pygame/examples/resizing_new.py b/venv/Lib/site-packages/pygame/examples/resizing_new.py deleted file mode 100644 index cda01f2..0000000 --- a/venv/Lib/site-packages/pygame/examples/resizing_new.py +++ /dev/null @@ -1,44 +0,0 @@ -#!/usr/bin/env python -import pygame as pg - -pg.init() - -RES = (160, 120) -FPS = 30 -clock = pg.time.Clock() - -screen = pg.display.set_mode(RES, pg.RESIZABLE) -pg.display._set_autoresize(False) - -# MAIN LOOP - -done = False - -i = 0 -j = 0 - -while not done: - for event in pg.event.get(): - if event.type == pg.KEYDOWN and event.key == pg.K_q: - done = True - if event.type == pg.QUIT: - done = True - # if event.type==pg.WINDOWRESIZED: - # screen=pg.display.get_surface() - if event.type == pg.VIDEORESIZE: - screen = pg.display.get_surface() - i += 1 - i = i % screen.get_width() - j += i % 2 - j = j % screen.get_height() - - screen.fill((255, 0, 255)) - pg.draw.circle(screen, (0, 0, 0), (100, 100), 20) - pg.draw.circle(screen, (0, 0, 200), (0, 0), 10) - pg.draw.circle(screen, (200, 0, 0), (160, 120), 30) - pg.draw.line(screen, (250, 250, 0), (0, 120), (160, 0)) - pg.draw.circle(screen, (255, 255, 255), (i, j), 5) - - pg.display.flip() - clock.tick(FPS) -pg.quit() diff --git a/venv/Lib/site-packages/pygame/examples/scaletest.py b/venv/Lib/site-packages/pygame/examples/scaletest.py deleted file mode 100644 index 6d7b964..0000000 --- a/venv/Lib/site-packages/pygame/examples/scaletest.py +++ /dev/null @@ -1,158 +0,0 @@ -#!/usr/bin/env python -""" pygame.examples.scaletest - -Shows an interactive image scaler. - -""" -import sys -import time -import pygame as pg - - -def main(imagefile, convert_alpha=False, run_speed_test=False): - """show an interactive image scaler - - Args: - imagefile - name of source image (required) - convert_alpha - use convert_alpha() on the surf (default False) - run_speed_test - (default False) - """ - - # initialize display - pg.display.init() - # load background image - background = pg.image.load(imagefile) - - if run_speed_test: - if convert_alpha: - # convert_alpha() requires the display mode to be set - pg.display.set_mode((1, 1)) - background = background.convert_alpha() - - SpeedTest(background) - return - - # start fullscreen mode - screen = pg.display.set_mode((1024, 768), pg.FULLSCREEN) - if convert_alpha: - background = background.convert_alpha() - - # turn off the mouse pointer - pg.mouse.set_visible(0) - # main loop - bRunning = True - bUp = False - bDown = False - bLeft = False - bRight = False - cursize = [background.get_width(), background.get_height()] - while bRunning: - image = pg.transform.smoothscale(background, cursize) - imgpos = image.get_rect(centerx=512, centery=384) - screen.fill((255, 255, 255)) - screen.blit(image, imgpos) - pg.display.flip() - for event in pg.event.get(): - if event.type == pg.QUIT or ( - event.type == pg.KEYDOWN and event.key == pg.K_ESCAPE - ): - bRunning = False - if event.type == pg.KEYDOWN: - if event.key == pg.K_UP: - bUp = True - if event.key == pg.K_DOWN: - bDown = True - if event.key == pg.K_LEFT: - bLeft = True - if event.key == pg.K_RIGHT: - bRight = True - if event.type == pg.KEYUP: - if event.key == pg.K_UP: - bUp = False - if event.key == pg.K_DOWN: - bDown = False - if event.key == pg.K_LEFT: - bLeft = False - if event.key == pg.K_RIGHT: - bRight = False - if bUp: - cursize[1] -= 2 - if cursize[1] < 1: - cursize[1] = 1 - if bDown: - cursize[1] += 2 - if bLeft: - cursize[0] -= 2 - if cursize[0] < 1: - cursize[0] = 1 - if bRight: - cursize[0] += 2 - pg.quit() - - -def SpeedTest(image): - print("\nImage Scaling Speed Test - Image Size %s\n" % str(image.get_size())) - - imgsize = [image.get_width(), image.get_height()] - duration = 0.0 - for i in range(128): - shrinkx = (imgsize[0] * i) // 128 - shrinky = (imgsize[1] * i) // 128 - start = time.time() - tempimg = pg.transform.smoothscale(image, (shrinkx, shrinky)) - duration += time.time() - start - del tempimg - - print( - "Average transform.smoothscale shrink time: %.4f ms." % (duration / 128 * 1000) - ) - - duration = 0.0 - for i in range(128): - expandx = (imgsize[0] * (i + 129)) // 128 - expandy = (imgsize[1] * (i + 129)) // 128 - start = time.time() - tempimg = pg.transform.smoothscale(image, (expandx, expandy)) - duration += time.time() - start - del tempimg - - print( - "Average transform.smoothscale expand time: %.4f ms." % (duration / 128 * 1000) - ) - - duration = 0.0 - for i in range(128): - shrinkx = (imgsize[0] * i) // 128 - shrinky = (imgsize[1] * i) // 128 - start = time.time() - tempimg = pg.transform.scale(image, (shrinkx, shrinky)) - duration += time.time() - start - del tempimg - - print("Average transform.scale shrink time: %.4f ms." % (duration / 128 * 1000)) - - duration = 0.0 - for i in range(128): - expandx = (imgsize[0] * (i + 129)) // 128 - expandy = (imgsize[1] * (i + 129)) // 128 - start = time.time() - tempimg = pg.transform.scale(image, (expandx, expandy)) - duration += time.time() - start - del tempimg - - print("Average transform.scale expand time: %.4f ms." % (duration / 128 * 1000)) - - -if __name__ == "__main__": - # check input parameters - if len(sys.argv) < 2: - print("\nUsage: %s imagefile [-t] [-convert_alpha]" % sys.argv[0]) - print(" imagefile image filename (required)") - print(" -t run speed test") - print(" -convert_alpha use convert_alpha() on the image's " "surface\n") - else: - main( - sys.argv[1], - convert_alpha="-convert_alpha" in sys.argv, - run_speed_test="-t" in sys.argv, - ) diff --git a/venv/Lib/site-packages/pygame/examples/scrap_clipboard.py b/venv/Lib/site-packages/pygame/examples/scrap_clipboard.py deleted file mode 100644 index 5978a42..0000000 --- a/venv/Lib/site-packages/pygame/examples/scrap_clipboard.py +++ /dev/null @@ -1,94 +0,0 @@ -#!/usr/bin/env python -""" pygame.examples.scrap_clipboard - -Demonstrates the clipboard capabilities of pygame. - -Copy/paste! - - -Keyboard Controls ------------------ - -g - get and print types in clipboard. If, image blit to screen. -p - place some text into clipboard -a - print types available in the clipboard -i - put image into the clipboard -""" -import os - -import pygame as pg -import pygame.scrap as scrap - -from io import BytesIO - - -def usage(): - print("Press the 'g' key to get all of the current clipboard data") - print("Press the 'p' key to put a string into the clipboard") - print("Press the 'a' key to get a list of the currently available types") - print("Press the 'i' key to put an image into the clipboard") - - -main_dir = os.path.split(os.path.abspath(__file__))[0] - -pg.init() -screen = pg.display.set_mode((200, 200)) -c = pg.time.Clock() -going = True - -# Initialize the scrap module and use the clipboard mode. -scrap.init() -scrap.set_mode(pg.SCRAP_CLIPBOARD) - -usage() - -while going: - for e in pg.event.get(): - if e.type == pg.QUIT or (e.type == pg.KEYDOWN and e.key == pg.K_ESCAPE): - going = False - - elif e.type == pg.KEYDOWN and e.key == pg.K_g: - # This means to look for data. - print("Getting the different clipboard data..") - for t in scrap.get_types(): - r = scrap.get(t) - if r and len(r) > 500: - print("Type %s : (large %i byte buffer)" % (t, len(r))) - elif r is None: - print("Type %s : None" % (t,)) - else: - print("Type %s : '%s'" % (t, r.decode("ascii", "ignore"))) - if "image" in t: - namehint = t.split("/")[1] - if namehint in ["bmp", "png", "jpg"]: - f = BytesIO(r) - loaded_surf = pg.image.load(f, "." + namehint) - screen.blit(loaded_surf, (0, 0)) - - elif e.type == pg.KEYDOWN and e.key == pg.K_p: - # Place some text into the selection. - print("Placing clipboard text.") - scrap.put(pg.SCRAP_TEXT, b"Hello. This is a message from scrap.") - - elif e.type == pg.KEYDOWN and e.key == pg.K_a: - # Get all available types. - print("Getting the available types from the clipboard.") - types = scrap.get_types() - print(types) - if len(types) > 0: - print("Contains %s: %s" % (types[0], scrap.contains(types[0]))) - print("Contains _INVALID_: ", scrap.contains("_INVALID_")) - - elif e.type == pg.KEYDOWN and e.key == pg.K_i: - print("Putting image into the clipboard.") - scrap.set_mode(pg.SCRAP_CLIPBOARD) - fp = open(os.path.join(main_dir, "data", "liquid.bmp"), "rb") - buf = fp.read() - scrap.put("image/bmp", buf) - fp.close() - - elif e.type in (pg.KEYDOWN, pg.MOUSEBUTTONDOWN): - usage() - pg.display.flip() - c.tick(40) -pg.quit() diff --git a/venv/Lib/site-packages/pygame/examples/scroll.py b/venv/Lib/site-packages/pygame/examples/scroll.py deleted file mode 100644 index 48b7417..0000000 --- a/venv/Lib/site-packages/pygame/examples/scroll.py +++ /dev/null @@ -1,192 +0,0 @@ -#!/usr/bin/env python -""" pygame.examples.scroll - -An zoomed image viewer that demonstrates Surface.scroll - -This example shows a scrollable image that has a zoom factor of eight. -It uses the Surface.scroll function to shift the image on the display -surface. A clip rectangle protects a margin area. If called as a function, -the example accepts an optional image file path. If run as a program -it takes an optional file path command line argument. If no file -is provided a default image file is used. - -When running click on a black triangle to move one pixel in the direction -the triangle points. Or use the arrow keys. Close the window or press ESC -to quit. -""" -import sys -import os - -import pygame as pg -from pygame.transform import scale - -main_dir = os.path.dirname(os.path.abspath(__file__)) - -# game constants -DIR_UP = 1 -DIR_DOWN = 2 -DIR_LEFT = 3 -DIR_RIGHT = 4 - -zoom_factor = 8 - - -def draw_arrow(surf, color, posn, direction): - x, y = posn - if direction == DIR_UP: - pointlist = ((x - 29, y + 30), (x + 30, y + 30), (x + 1, y - 29), (x, y - 29)) - elif direction == DIR_DOWN: - pointlist = ((x - 29, y - 29), (x + 30, y - 29), (x + 1, y + 30), (x, y + 30)) - elif direction == DIR_LEFT: - pointlist = ((x + 30, y - 29), (x + 30, y + 30), (x - 29, y + 1), (x - 29, y)) - else: - pointlist = ((x - 29, y - 29), (x - 29, y + 30), (x + 30, y + 1), (x + 30, y)) - pg.draw.polygon(surf, color, pointlist) - - -def add_arrow_button(screen, regions, posn, direction): - draw_arrow(screen, "black", posn, direction) - draw_arrow(regions, (direction, 0, 0), posn, direction) - - -def scroll_view(screen, image, direction, view_rect): - src_rect = None - zoom_view_rect = screen.get_clip() - image_w, image_h = image.get_size() - if direction == DIR_UP: - if view_rect.top > 0: - screen.scroll(dy=zoom_factor) - view_rect.move_ip(0, -1) - src_rect = view_rect.copy() - src_rect.h = 1 - dst_rect = zoom_view_rect.copy() - dst_rect.h = zoom_factor - elif direction == DIR_DOWN: - if view_rect.bottom < image_h: - screen.scroll(dy=-zoom_factor) - view_rect.move_ip(0, 1) - src_rect = view_rect.copy() - src_rect.h = 1 - src_rect.bottom = view_rect.bottom - dst_rect = zoom_view_rect.copy() - dst_rect.h = zoom_factor - dst_rect.bottom = zoom_view_rect.bottom - elif direction == DIR_LEFT: - if view_rect.left > 0: - screen.scroll(dx=zoom_factor) - view_rect.move_ip(-1, 0) - src_rect = view_rect.copy() - src_rect.w = 1 - dst_rect = zoom_view_rect.copy() - dst_rect.w = zoom_factor - elif direction == DIR_RIGHT: - if view_rect.right < image_w: - screen.scroll(dx=-zoom_factor) - view_rect.move_ip(1, 0) - src_rect = view_rect.copy() - src_rect.w = 1 - src_rect.right = view_rect.right - dst_rect = zoom_view_rect.copy() - dst_rect.w = zoom_factor - dst_rect.right = zoom_view_rect.right - if src_rect is not None: - scale(image.subsurface(src_rect), dst_rect.size, screen.subsurface(dst_rect)) - pg.display.update(zoom_view_rect) - - -def main(image_file=None): - if image_file is None: - image_file = os.path.join(main_dir, "data", "arraydemo.bmp") - margin = 80 - view_size = (30, 20) - zoom_view_size = (view_size[0] * zoom_factor, view_size[1] * zoom_factor) - win_size = (zoom_view_size[0] + 2 * margin, zoom_view_size[1] + 2 * margin) - background_color = pg.Color("beige") - - pg.init() - - # set up key repeating so we can hold down the key to scroll. - old_k_delay, old_k_interval = pg.key.get_repeat() - pg.key.set_repeat(500, 30) - - try: - screen = pg.display.set_mode(win_size) - screen.fill(background_color) - pg.display.flip() - - image = pg.image.load(image_file).convert() - image_w, image_h = image.get_size() - - if image_w < view_size[0] or image_h < view_size[1]: - print("The source image is too small for this example.") - print("A %i by %i or larger image is required." % zoom_view_size) - return - - regions = pg.Surface(win_size, 0, 24) - add_arrow_button(screen, regions, (40, win_size[1] // 2), DIR_LEFT) - add_arrow_button( - screen, regions, (win_size[0] - 40, win_size[1] // 2), DIR_RIGHT - ) - add_arrow_button(screen, regions, (win_size[0] // 2, 40), DIR_UP) - add_arrow_button( - screen, regions, (win_size[0] // 2, win_size[1] - 40), DIR_DOWN - ) - pg.display.flip() - - screen.set_clip((margin, margin, zoom_view_size[0], zoom_view_size[1])) - - view_rect = pg.Rect(0, 0, view_size[0], view_size[1]) - - scale( - image.subsurface(view_rect), - zoom_view_size, - screen.subsurface(screen.get_clip()), - ) - pg.display.flip() - - # the direction we will scroll in. - direction = None - - clock = pg.time.Clock() - clock.tick() - - going = True - while going: - # wait for events before doing anything. - # events = [pg.event.wait()] + pg.event.get() - events = pg.event.get() - - for e in events: - if e.type == pg.KEYDOWN: - if e.key == pg.K_ESCAPE: - going = False - elif e.key == pg.K_DOWN: - scroll_view(screen, image, DIR_DOWN, view_rect) - elif e.key == pg.K_UP: - scroll_view(screen, image, DIR_UP, view_rect) - elif e.key == pg.K_LEFT: - scroll_view(screen, image, DIR_LEFT, view_rect) - elif e.key == pg.K_RIGHT: - scroll_view(screen, image, DIR_RIGHT, view_rect) - elif e.type == pg.QUIT: - going = False - elif e.type == pg.MOUSEBUTTONDOWN: - direction = regions.get_at(e.pos)[0] - elif e.type == pg.MOUSEBUTTONUP: - direction = None - - if direction: - scroll_view(screen, image, direction, view_rect) - clock.tick(30) - - finally: - pg.key.set_repeat(old_k_delay, old_k_interval) - pg.quit() - - -if __name__ == "__main__": - if len(sys.argv) > 1: - image_file = sys.argv[1] - else: - image_file = None - main(image_file) diff --git a/venv/Lib/site-packages/pygame/examples/setmodescale.py b/venv/Lib/site-packages/pygame/examples/setmodescale.py deleted file mode 100644 index 3f427d2..0000000 --- a/venv/Lib/site-packages/pygame/examples/setmodescale.py +++ /dev/null @@ -1,67 +0,0 @@ -#!/usr/bin/env python -""" pygame.examples.setmodescale - -On high resolution displays(4k, 1080p) and tiny graphics games (640x480) -show up very small so that they are unplayable. SCALED scales up the window -for you. The game thinks it's a 640x480 window, but really it can be bigger. -Mouse events are scaled for you, so your game doesn't need to do it. - -Passing SCALED to pygame.display.set_mode means the resolution depends -on desktop size and the graphics are scaled. -""" - -import pygame as pg - -pg.init() - -RES = (160, 120) -FPS = 30 -clock = pg.time.Clock() - -print("desktops", pg.display.get_desktop_sizes()) -screen = pg.display.set_mode(RES, pg.SCALED | pg.RESIZABLE) - -# MAIN LOOP - -done = False - -i = 0 -j = 0 - -r_name, r_flags = pg.display._get_renderer_info() -print("renderer:", r_name, "flags:", bin(r_flags)) -for flag, name in [ - (1, "software"), - (2, "accelerated"), - (4, "VSync"), - (8, "render to texture"), -]: - if flag & r_flags: - print(name) - -while not done: - for event in pg.event.get(): - if event.type == pg.KEYDOWN and event.key == pg.K_q: - done = True - if event.type == pg.QUIT: - done = True - if event.type == pg.KEYDOWN and event.key == pg.K_f: - pg.display.toggle_fullscreen() - if event.type == pg.VIDEORESIZE: - pg.display._resize_event(event) - - i += 1 - i = i % screen.get_width() - j += i % 2 - j = j % screen.get_height() - - screen.fill((255, 0, 255)) - pg.draw.circle(screen, (0, 0, 0), (100, 100), 20) - pg.draw.circle(screen, (0, 0, 200), (0, 0), 10) - pg.draw.circle(screen, (200, 0, 0), (160, 120), 30) - pg.draw.line(screen, (250, 250, 0), (0, 120), (160, 0)) - pg.draw.circle(screen, (255, 255, 255), (i, j), 5) - - pg.display.flip() - clock.tick(FPS) -pg.quit() diff --git a/venv/Lib/site-packages/pygame/examples/sound.py b/venv/Lib/site-packages/pygame/examples/sound.py deleted file mode 100644 index c5a23b9..0000000 --- a/venv/Lib/site-packages/pygame/examples/sound.py +++ /dev/null @@ -1,45 +0,0 @@ -#!/usr/bin/env python -""" pygame.examples.sound - -Playing a soundfile and waiting for it to finish. You'll need the -pygame.mixer module for this to work. Note how in this simple example -we don't even bother loading all of the pygame package. -Just pick the mixer for sound and time for the delay function. - -Optional command line argument: audio file name -""" -import os -import sys -import pygame as pg - -main_dir = os.path.split(os.path.abspath(__file__))[0] - - -def main(file_path=None): - """Play an audio file as a buffered sound sample - - :param str file_path: audio file (default data/secosmic_low.wav) - """ - # choose a desired audio format - pg.mixer.init(11025) # raises exception on fail - - # load the sound - sound = pg.mixer.Sound(file_path) - - # start playing - print("Playing Sound...") - channel = sound.play() - - # poll until finished - while channel.get_busy(): # still playing - print(" ...still going...") - pg.time.wait(1000) - print("...Finished") - pg.quit() - - -if __name__ == "__main__": - if len(sys.argv) > 1: - main(sys.argv[1]) - else: - main(os.path.join(main_dir, "data", "secosmic_lo.wav")) diff --git a/venv/Lib/site-packages/pygame/examples/sound_array_demos.py b/venv/Lib/site-packages/pygame/examples/sound_array_demos.py deleted file mode 100644 index 1a2b49d..0000000 --- a/venv/Lib/site-packages/pygame/examples/sound_array_demos.py +++ /dev/null @@ -1,217 +0,0 @@ -#!/usr/bin/env python -""" pygame.examples.sound_array_demos - -Creates an echo effect on any Sound object. - -Uses sndarray and numpy to create offset faded copies of the -original sound. Currently it just uses hardcoded values for the -number of echos and the delay. Easy for you to recreate as -needed. - -version 2. changes: -- Should work with different sample rates now. -- put into a function. -- Uses numpy by default, but falls back on Numeric. -""" -import os -import pygame as pg -from numpy import zeros, int32, int16 -import time - - -# pg.mixer.init(44100, -16, 0) -pg.mixer.init() -# pg.mixer.init(11025, -16, 0) -# pg.mixer.init(11025) - - -def make_echo(sound, samples_per_second, mydebug=True): - """returns a sound which is echoed of the last one.""" - - echo_length = 3.5 - - a1 = pg.sndarray.array(sound) - if mydebug: - print("SHAPE1: %s" % (a1.shape,)) - - length = a1.shape[0] - - # myarr = zeros(length+12000) - myarr = zeros(a1.shape, int32) - - if len(a1.shape) > 1: - # mult = a1.shape[1] - size = (a1.shape[0] + int(echo_length * a1.shape[0]), a1.shape[1]) - # size = (a1.shape[0] + int(a1.shape[0] + (echo_length * 3000)), a1.shape[1]) - else: - # mult = 1 - size = (a1.shape[0] + int(echo_length * a1.shape[0]),) - # size = (a1.shape[0] + int(a1.shape[0] + (echo_length * 3000)),) - - if mydebug: - print(int(echo_length * a1.shape[0])) - myarr = zeros(size, int32) - - if mydebug: - print("size %s" % (size,)) - print(myarr.shape) - myarr[:length] = a1 - # print (myarr[3000:length+3000]) - # print (a1 >> 1) - # print ("a1.shape %s" % (a1.shape,)) - # c = myarr[3000:length+(3000*mult)] - # print ("c.shape %s" % (c.shape,)) - - incr = int(samples_per_second / echo_length) - gap = length - - myarr[incr : gap + incr] += a1 >> 1 - myarr[incr * 2 : gap + (incr * 2)] += a1 >> 2 - myarr[incr * 3 : gap + (incr * 3)] += a1 >> 3 - myarr[incr * 4 : gap + (incr * 4)] += a1 >> 4 - - if mydebug: - print("SHAPE2: %s" % (myarr.shape,)) - - sound2 = pg.sndarray.make_sound(myarr.astype(int16)) - - return sound2 - - -def slow_down_sound(sound, rate): - """returns a sound which is a slowed down version of the original. - rate - at which the sound should be slowed down. eg. 0.5 would be half speed. - """ - - raise NotImplementedError() - # grow_rate = 1 / rate - # make it 1/rate times longer. - # a1 = pg.sndarray.array(sound) - # surf = pg.surfarray.make_surface(a1) - # print (a1.shape[0] * grow_rate) - # scaled_surf = pg.transform.scale(surf, (int(a1.shape[0] * grow_rate), a1.shape[1])) - # print (scaled_surf) - # print (surf) - - # a2 = a1 * rate - # print (a1.shape) - # print (a2.shape) - # print (a2) - # sound2 = pg.sndarray.make_sound(a2.astype(int16)) - # return sound2 - - -def sound_from_pos(sound, start_pos, samples_per_second=None, inplace=1): - """returns a sound which begins at the start_pos. - start_pos - in seconds from the begining. - samples_per_second - - """ - - # see if we want to reuse the sound data or not. - if inplace: - a1 = pg.sndarray.samples(sound) - else: - a1 = pg.sndarray.array(sound) - - # see if samples per second has been given. If not, query the pg.mixer. - # eg. it might be set to 22050 - if samples_per_second is None: - samples_per_second = pg.mixer.get_init()[0] - - # figure out the start position in terms of samples. - start_pos_in_samples = int(start_pos * samples_per_second) - - # cut the beginning off the sound at the start position. - a2 = a1[start_pos_in_samples:] - - # make the Sound instance from the array. - sound2 = pg.sndarray.make_sound(a2) - - return sound2 - - -def main(): - """play various sndarray effects""" - - main_dir = os.path.split(os.path.abspath(__file__))[0] - print("mixer.get_init %s" % (pg.mixer.get_init(),)) - - samples_per_second = pg.mixer.get_init()[0] - - print(("-" * 30) + "\n") - print("loading sound") - sound = pg.mixer.Sound(os.path.join(main_dir, "data", "car_door.wav")) - - print("-" * 30) - print("start positions") - print("-" * 30) - - start_pos = 0.1 - sound2 = sound_from_pos(sound, start_pos, samples_per_second) - - print("sound.get_length %s" % (sound.get_length(),)) - print("sound2.get_length %s" % (sound2.get_length(),)) - sound2.play() - while pg.mixer.get_busy(): - pg.time.wait(200) - - print("waiting 2 seconds") - pg.time.wait(2000) - print("playing original sound") - - sound.play() - while pg.mixer.get_busy(): - pg.time.wait(200) - - print("waiting 2 seconds") - pg.time.wait(2000) - - # if 0: - # #TODO: this is broken. - # print (("-" * 30) + "\n") - # print ("Slow down the original sound.") - # rate = 0.2 - # slowed_sound = slow_down_sound(sound, rate) - # slowed_sound.play() - # while pg.mixer.get_busy(): - # pg.time.wait(200) - - print("-" * 30) - print("echoing") - print("-" * 30) - - t1 = time.time() - sound2 = make_echo(sound, samples_per_second) - print("time to make echo %i" % (time.time() - t1,)) - - print("original sound") - sound.play() - while pg.mixer.get_busy(): - pg.time.wait(200) - - print("echoed sound") - sound2.play() - while pg.mixer.get_busy(): - pg.time.wait(200) - - sound = pg.mixer.Sound(os.path.join(main_dir, "data", "secosmic_lo.wav")) - - t1 = time.time() - sound3 = make_echo(sound, samples_per_second) - print("time to make echo %i" % (time.time() - t1,)) - - print("original sound") - sound.play() - while pg.mixer.get_busy(): - pg.time.wait(200) - - print("echoed sound") - sound3.play() - while pg.mixer.get_busy(): - pg.time.wait(200) - - pg.quit() - - -if __name__ == "__main__": - main() diff --git a/venv/Lib/site-packages/pygame/examples/sprite_texture.py b/venv/Lib/site-packages/pygame/examples/sprite_texture.py deleted file mode 100644 index 79d8cf5..0000000 --- a/venv/Lib/site-packages/pygame/examples/sprite_texture.py +++ /dev/null @@ -1,104 +0,0 @@ -#!/usr/bin/env python -""" pygame.examples.sprite_texture - -Experimental! Uses APIs which may disapear in the next release (_sdl2 is private). - - -Hardware accelerated Image objects with pygame.sprite. - -_sdl2.video.Image is a backwards compatible way with to use Texture with -pygame.sprite groups. -""" -import os -import pygame as pg - -if pg.get_sdl_version()[0] < 2: - raise SystemExit("This example requires pygame 2 and SDL2.") -from pygame._sdl2 import Window, Texture, Image, Renderer - - -data_dir = os.path.join(os.path.split(os.path.abspath(__file__))[0], "data") - - -def load_img(file): - return pg.image.load(os.path.join(data_dir, file)) - - -pg.display.init() -pg.key.set_repeat(10, 10) - -win = Window("asdf", resizable=True) -renderer = Renderer(win) -tex = Texture.from_surface(renderer, load_img("alien1.gif")) - - -class Something(pg.sprite.Sprite): - def __init__(self, img): - pg.sprite.Sprite.__init__(self) - - self.rect = img.get_rect() - self.image = img - - self.rect.w *= 5 - self.rect.h *= 5 - - img.origin = self.rect.w / 2, self.rect.h / 2 - - -sprite = Something(Image(tex, (0, 0, tex.width / 2, tex.height / 2))) -sprite.rect.x = 250 -sprite.rect.y = 50 - -# sprite2 = Something(Image(sprite.image)) -sprite2 = Something(Image(tex)) -sprite2.rect.x = 250 -sprite2.rect.y = 250 -sprite2.rect.w /= 2 -sprite2.rect.h /= 2 - -group = pg.sprite.Group() -group.add(sprite2) -group.add(sprite) - -import math - -t = 0 -running = True -clock = pg.time.Clock() -renderer.draw_color = (255, 0, 0, 255) - -while running: - for event in pg.event.get(): - if event.type == pg.QUIT: - running = False - elif event.type == pg.KEYDOWN: - if event.key == pg.K_ESCAPE: - running = False - elif event.key == pg.K_LEFT: - sprite.rect.x -= 5 - elif event.key == pg.K_RIGHT: - sprite.rect.x += 5 - elif event.key == pg.K_DOWN: - sprite.rect.y += 5 - elif event.key == pg.K_UP: - sprite.rect.y -= 5 - - renderer.clear() - t += 1 - - img = sprite.image - img.angle += 1 - img.flipX = t % 50 < 25 - img.flipY = t % 100 < 50 - img.color[0] = int(255.0 * (0.5 + math.sin(0.5 * t + 10.0) / 2.0)) - img.alpha = int(255.0 * (0.5 + math.sin(0.1 * t) / 2.0)) - # img.draw(dstrect=(x, y, 5 * img.srcrect['w'], 5 * img.srcrect['h'])) - - group.draw(renderer) - - renderer.present() - - clock.tick(60) - win.title = str("FPS: {}".format(clock.get_fps())) - -pg.quit() diff --git a/venv/Lib/site-packages/pygame/examples/stars.py b/venv/Lib/site-packages/pygame/examples/stars.py deleted file mode 100644 index 1bd2ac6..0000000 --- a/venv/Lib/site-packages/pygame/examples/stars.py +++ /dev/null @@ -1,103 +0,0 @@ -#!/usr/bin/env python -""" pg.examples.stars - - We are all in the gutter, - but some of us are looking at the stars. - -- Oscar Wilde - -A simple starfield example. Note you can move the 'center' of -the starfield by leftclicking in the window. This example show -the basics of creating a window, simple pixel plotting, and input -event management. -""" -import random -import math -import pygame as pg - -# constants -WINSIZE = [640, 480] -WINCENTER = [320, 240] -NUMSTARS = 150 - - -def init_star(): - "creates new star values" - dir = random.randrange(100000) - velmult = random.random() * 0.6 + 0.4 - vel = [math.sin(dir) * velmult, math.cos(dir) * velmult] - return vel, WINCENTER[:] - - -def initialize_stars(): - "creates a new starfield" - stars = [] - for x in range(NUMSTARS): - star = init_star() - vel, pos = star - steps = random.randint(0, WINCENTER[0]) - pos[0] = pos[0] + (vel[0] * steps) - pos[1] = pos[1] + (vel[1] * steps) - vel[0] = vel[0] * (steps * 0.09) - vel[1] = vel[1] * (steps * 0.09) - stars.append(star) - move_stars(stars) - return stars - - -def draw_stars(surface, stars, color): - "used to draw (and clear) the stars" - for vel, pos in stars: - pos = (int(pos[0]), int(pos[1])) - surface.set_at(pos, color) - - -def move_stars(stars): - "animate the star values" - for vel, pos in stars: - pos[0] = pos[0] + vel[0] - pos[1] = pos[1] + vel[1] - if not 0 <= pos[0] <= WINSIZE[0] or not 0 <= pos[1] <= WINSIZE[1]: - vel[:], pos[:] = init_star() - else: - vel[0] = vel[0] * 1.05 - vel[1] = vel[1] * 1.05 - - -def main(): - "This is the starfield code" - # create our starfield - random.seed() - stars = initialize_stars() - clock = pg.time.Clock() - # initialize and prepare screen - pg.init() - screen = pg.display.set_mode(WINSIZE) - pg.display.set_caption("pygame Stars Example") - white = 255, 240, 200 - black = 20, 20, 40 - screen.fill(black) - - # main game loop - done = 0 - while not done: - draw_stars(screen, stars, black) - move_stars(stars) - draw_stars(screen, stars, white) - pg.display.update() - for e in pg.event.get(): - if e.type == pg.QUIT or (e.type == pg.KEYUP and e.key == pg.K_ESCAPE): - done = 1 - break - elif e.type == pg.MOUSEBUTTONDOWN and e.button == 1: - WINCENTER[:] = list(e.pos) - clock.tick(50) - pg.quit() - - -# if python says run, then we should run -if __name__ == "__main__": - main() - - # I prefer the time of insects to the time of stars. - # - # -- WisÅ‚awa Szymborska diff --git a/venv/Lib/site-packages/pygame/examples/testsprite.py b/venv/Lib/site-packages/pygame/examples/testsprite.py deleted file mode 100644 index 825fb9e..0000000 --- a/venv/Lib/site-packages/pygame/examples/testsprite.py +++ /dev/null @@ -1,261 +0,0 @@ -#!/usr/bin/env python -""" pg.examples.testsprite - -Like the testsprite.c that comes with libsdl, this pygame version shows -lots of sprites moving around. - -It is an abomination of ugly code, and mostly used for testing. - - -See pg.examples.aliens for some prettyier code. -""" -import sys -import os - -from random import randint -from time import time - -import pygame as pg - - -if "-psyco" in sys.argv: - try: - import psyco - - psyco.full() - except Exception: - print("No psyco for you! psyco failed to import and run.") - -main_dir = os.path.split(os.path.abspath(__file__))[0] -data_dir = os.path.join(main_dir, "data") - - -# use this to use update rects or not. -# If the screen is mostly full, then update rects are not useful. -update_rects = True -if "-update_rects" in sys.argv: - update_rects = True -if "-noupdate_rects" in sys.argv: - update_rects = False - -use_static = False -if "-static" in sys.argv: - use_static = True - - -use_layered_dirty = False -if "-layered_dirty" in sys.argv: - update_rects = True - use_layered_dirty = True - - -flags = 0 -if "-flip" in sys.argv: - flags ^= pg.DOUBLEBUF - -if "-fullscreen" in sys.argv: - flags ^= pg.FULLSCREEN - -if "-sw" in sys.argv: - flags ^= pg.SWSURFACE - -use_rle = True - -if "-hw" in sys.argv: - flags ^= pg.HWSURFACE - use_rle = False - -if "-scaled" in sys.argv: - flags ^= pg.SCALED - -screen_dims = [640, 480] - -if "-height" in sys.argv: - i = sys.argv.index("-height") - screen_dims[1] = int(sys.argv[i + 1]) - -if "-width" in sys.argv: - i = sys.argv.index("-width") - screen_dims[0] = int(sys.argv[i + 1]) - -if "-alpha" in sys.argv: - use_alpha = True -else: - use_alpha = False - -print(screen_dims) - - -##class Thingy(pg.sprite.Sprite): -## images = None -## def __init__(self): -## pg.sprite.Sprite.__init__(self) -## self.image = Thingy.images[0] -## self.rect = self.image.get_rect() -## self.rect.x = randint(0, screen_dims[0]) -## self.rect.y = randint(0, screen_dims[1]) -## #self.vel = [randint(-10, 10), randint(-10, 10)] -## self.vel = [randint(-1, 1), randint(-1, 1)] -## -## def move(self): -## for i in [0, 1]: -## nv = self.rect[i] + self.vel[i] -## if nv >= screen_dims[i] or nv < 0: -## self.vel[i] = -self.vel[i] -## nv = self.rect[i] + self.vel[i] -## self.rect[i] = nv - - -class Thingy(pg.sprite.DirtySprite): - images = None - - def __init__(self): - ## pg.sprite.Sprite.__init__(self) - pg.sprite.DirtySprite.__init__(self) - self.image = Thingy.images[0] - self.rect = self.image.get_rect() - self.rect.x = randint(0, screen_dims[0]) - self.rect.y = randint(0, screen_dims[1]) - # self.vel = [randint(-10, 10), randint(-10, 10)] - self.vel = [randint(-1, 1), randint(-1, 1)] - self.dirty = 2 - - def update(self): - for i in [0, 1]: - nv = self.rect[i] + self.vel[i] - if nv >= screen_dims[i] or nv < 0: - self.vel[i] = -self.vel[i] - nv = self.rect[i] + self.vel[i] - self.rect[i] = nv - - -class Static(pg.sprite.DirtySprite): - images = None - - def __init__(self): - pg.sprite.DirtySprite.__init__(self) - self.image = Static.images[0] - self.rect = self.image.get_rect() - self.rect.x = randint(0, 3 * screen_dims[0] / 4) - self.rect.y = randint(0, 3 * screen_dims[1] / 4) - - -def main( - update_rects=True, - use_static=False, - use_layered_dirty=False, - screen_dims=[640, 480], - use_alpha=False, - flags=0, -): - """Show lots of sprites moving around - - Optional keyword arguments: - update_rects - use the RenderUpdate sprite group class (default True) - use_static - include non-moving images (default False) - use_layered_dirty - Use the FastRenderGroup sprite group (default False) - screen_dims - Pygame window dimensions (default [640, 480]) - use_alpha - use alpha blending (default False) - flags - additional display mode flags (default no additional flags) - - """ - - if use_layered_dirty: - update_rects = True - - pg.init() # needed to initialise time module for get_ticks() - pg.display.init() - - # if "-fast" in sys.argv: - - screen = pg.display.set_mode(screen_dims, flags, vsync="-vsync" in sys.argv) - - # this is mainly for GP2X, so it can quit. - pg.joystick.init() - num_joysticks = pg.joystick.get_count() - if num_joysticks > 0: - stick = pg.joystick.Joystick(0) - stick.init() # now we will receive events for the joystick - - screen.fill([0, 0, 0]) - pg.display.flip() - sprite_surface = pg.image.load(os.path.join(data_dir, "asprite.bmp")) - sprite_surface2 = pg.image.load(os.path.join(data_dir, "static.png")) - - if use_rle: - sprite_surface.set_colorkey([0xFF, 0xFF, 0xFF], pg.SRCCOLORKEY | pg.RLEACCEL) - sprite_surface2.set_colorkey([0xFF, 0xFF, 0xFF], pg.SRCCOLORKEY | pg.RLEACCEL) - else: - sprite_surface.set_colorkey([0xFF, 0xFF, 0xFF], pg.SRCCOLORKEY) - sprite_surface2.set_colorkey([0xFF, 0xFF, 0xFF], pg.SRCCOLORKEY) - - if use_alpha: - sprite_surface = sprite_surface.convert_alpha() - sprite_surface2 = sprite_surface2.convert_alpha() - else: - sprite_surface = sprite_surface.convert() - sprite_surface2 = sprite_surface2.convert() - - Thingy.images = [sprite_surface] - if use_static: - Static.images = [sprite_surface2] - - if len(sys.argv) > 1: - try: - numsprites = int(sys.argv[-1]) - except Exception: - numsprites = 100 - else: - numsprites = 100 - sprites = None - if use_layered_dirty: - ## sprites = pg.sprite.FastRenderGroup() - sprites = pg.sprite.LayeredDirty() - else: - if update_rects: - sprites = pg.sprite.RenderUpdates() - else: - sprites = pg.sprite.Group() - - for i in range(0, numsprites): - if use_static and i % 2 == 0: - sprites.add(Static()) - sprites.add(Thingy()) - - frames = 0 - start = time() - - background = pg.Surface(screen.get_size()) - background = background.convert() - background.fill([0, 0, 0]) - - going = True - while going: - if not update_rects: - screen.fill([0, 0, 0]) - - ## for sprite in sprites: - ## sprite.move() - - if update_rects: - sprites.clear(screen, background) - sprites.update() - - rects = sprites.draw(screen) - if update_rects: - pg.display.update(rects) - else: - pg.display.flip() - - for event in pg.event.get(): - if event.type in [pg.QUIT, pg.KEYDOWN, pg.QUIT, pg.JOYBUTTONDOWN]: - going = False - - frames += 1 - end = time() - print("FPS: %f" % (frames / ((end - start)))) - pg.quit() - - -if __name__ == "__main__": - main(update_rects, use_static, use_layered_dirty, screen_dims, use_alpha, flags) diff --git a/venv/Lib/site-packages/pygame/examples/textinput.py b/venv/Lib/site-packages/pygame/examples/textinput.py deleted file mode 100644 index 4e241d1..0000000 --- a/venv/Lib/site-packages/pygame/examples/textinput.py +++ /dev/null @@ -1,174 +0,0 @@ -#!/usr/bin/env python -""" pg.examples.textinput - -A little "console" where you can write in text. - -Shows how to use the TEXTEDITING and TEXTINPUT events. -""" -import sys -import pygame as pg -import pygame.freetype as freetype - -# Version check -if pg.get_sdl_version() < (2, 0, 0): - raise Exception("This example requires pygame 2.") - -###CONSTS -# Set to true or add 'showevent' in argv to see IME and KEYDOWN events -PRINT_EVENT = False -# frames per second, the general speed of the program -FPS = 50 -# size of window -WINDOWWIDTH, WINDOWHEIGHT = 640, 480 -BGCOLOR = (0, 0, 0) - -# position of chatlist and chatbox -CHATLIST_POS = pg.Rect(0, 20, WINDOWWIDTH, 400) -CHATBOX_POS = pg.Rect(0, 440, WINDOWWIDTH, 40) -CHATLIST_MAXSIZE = 20 - -TEXTCOLOR = (0, 255, 0) - -# Add fontname for each language, otherwise some text can't be correctly displayed. -FONTNAMES = [ - "notosanscjktcregular", - "notosansmonocjktcregular", - "notosansregular,", - "microsoftjhengheimicrosoftjhengheiuilight", - "microsoftyaheimicrosoftyaheiuilight", - "msgothicmsuigothicmspgothic", - "msmincho", - "Arial", -] - -# Initalize -pg.init() -Screen = pg.display.set_mode((WINDOWWIDTH, WINDOWHEIGHT)) -pg.display.set_caption("TextInput example") -FPSClock = pg.time.Clock() - -# Freetype -# "The font name can be a comma separated list of font names to search for." -FONTNAMES = ",".join(str(x) for x in FONTNAMES) -Font = freetype.SysFont(FONTNAMES, 24) -FontSmall = freetype.SysFont(FONTNAMES, 16) -print("Using font: " + Font.name) - -# Main loop process -def main(): - global BGCOLOR, PRINT_EVENT, CHATBOX_POS, CHATLIST_POS, CHATLIST_MAXSIZE - global FPSClock, Font, Screen - - """ - https://wiki.libsdl.org/SDL_HINT_IME_INTERNAL_EDITING - https://wiki.libsdl.org/Tutorials/TextInput - Candidate list not showing due to SDL2 problem ;w; - """ - pg.key.start_text_input() - input_rect = pg.Rect(80, 80, 320, 40) - pg.key.set_text_input_rect(input_rect) - - _IMEEditing = False - _IMEText = "" - _IMETextPos = 0 - _IMEEditingText = "" - _IMEEditingPos = 0 - ChatList = [] - - while True: - for event in pg.event.get(): - if event.type == pg.QUIT: - pg.quit() - return - - elif event.type == pg.KEYDOWN: - if PRINT_EVENT: - print(event) - - if _IMEEditing: - if len(_IMEEditingText) == 0: - _IMEEditing = False - continue - - if event.key == pg.K_BACKSPACE: - if len(_IMEText) > 0 and _IMETextPos > 0: - _IMEText = ( - _IMEText[0 : _IMETextPos - 1] + _IMEText[_IMETextPos:] - ) - _IMETextPos = max(0, _IMETextPos - 1) - - elif event.key == pg.K_DELETE: - _IMEText = _IMEText[0:_IMETextPos] + _IMEText[_IMETextPos + 1 :] - elif event.key == pg.K_LEFT: - _IMETextPos = max(0, _IMETextPos - 1) - elif event.key == pg.K_RIGHT: - _IMETextPos = min(len(_IMEText), _IMETextPos + 1) - # Handle ENTER key - elif event.key in [pg.K_RETURN, pg.K_KP_ENTER]: - # Block if we have no text to append - if len(_IMEText) == 0: - continue - - # Append chat list - ChatList.append(_IMEText) - if len(ChatList) > CHATLIST_MAXSIZE: - ChatList.pop(0) - _IMEText = "" - _IMETextPos = 0 - - elif event.type == pg.TEXTEDITING: - if PRINT_EVENT: - print(event) - _IMEEditing = True - _IMEEditingText = event.text - _IMEEditingPos = event.start - - elif event.type == pg.TEXTINPUT: - if PRINT_EVENT: - print(event) - _IMEEditing = False - _IMEEditingText = "" - _IMEText = _IMEText[0:_IMETextPos] + event.text + _IMEText[_IMETextPos:] - _IMETextPos += len(event.text) - - # Screen updates - Screen.fill(BGCOLOR) - - # Chat List updates - chat_height = CHATLIST_POS.height / CHATLIST_MAXSIZE - for i, chat in enumerate(ChatList): - FontSmall.render_to( - Screen, - (CHATLIST_POS.x, CHATLIST_POS.y + i * chat_height), - chat, - TEXTCOLOR, - ) - - # Chat box updates - start_pos = CHATBOX_POS.copy() - ime_textL = ">" + _IMEText[0:_IMETextPos] - ime_textM = ( - _IMEEditingText[0:_IMEEditingPos] + "|" + _IMEEditingText[_IMEEditingPos:] - ) - ime_textR = _IMEText[_IMETextPos:] - - rect_textL = Font.render_to(Screen, start_pos, ime_textL, TEXTCOLOR) - start_pos.x += rect_textL.width - - # Editing texts should be underlined - rect_textM = Font.render_to( - Screen, start_pos, ime_textM, TEXTCOLOR, None, freetype.STYLE_UNDERLINE - ) - start_pos.x += rect_textM.width - Font.render_to(Screen, start_pos, ime_textR, TEXTCOLOR) - - pg.display.update() - - FPSClock.tick(FPS) - - -if __name__ == "__main__": - if "showevent" in sys.argv: - PRINT_EVENT = True - - main() diff --git a/venv/Lib/site-packages/pygame/examples/vgrade.py b/venv/Lib/site-packages/pygame/examples/vgrade.py deleted file mode 100644 index 9618c45..0000000 --- a/venv/Lib/site-packages/pygame/examples/vgrade.py +++ /dev/null @@ -1,102 +0,0 @@ -#!/usr/bin/env python -""" pg.examples.vgrade - -This example demonstrates creating an image with numpy -python, and displaying that through SDL. You can look at the -method of importing numpy and pg.surfarray. This method -will fail 'gracefully' if it is not available. -I've tried mixing in a lot of comments where the code might -not be self explanatory, nonetheless it may still seem a bit -strange. Learning to use numpy for images like this takes a -bit of learning, but the payoff is extremely fast image -manipulation in python. - -For Pygame 1.9.2 and up, this example also showcases a new feature -of surfarray.blit_surface: array broadcasting. If a source array -has either a width or height of 1, the array is repeatedly blitted -to the surface along that dimension to fill the surface. In fact, -a (1, 1) or (1, 1, 3) array results in a simple surface color fill. - -Just so you know how this breaks down. For each sampling of -time, 30% goes to each creating the gradient and blitting the -array. The final 40% goes to flipping/updating the display surface - -The window will have no border decorations. - -The code also demonstrates use of the timer events. -""" - - -import os -import pygame as pg - -try: - import numpy as np - import numpy.random as np_random -except ImportError: - raise SystemExit("This example requires numpy and the pygame surfarray module") - -timer = 0 - - -def stopwatch(message=None): - "simple routine to time python code" - global timer - if not message: - timer = pg.time.get_ticks() - return - now = pg.time.get_ticks() - runtime = (now - timer) / 1000.0 + 0.001 - print("%s %s %s" % (message, runtime, ("seconds\t(%.2ffps)" % (1.0 / runtime)))) - timer = now - - -def VertGradientColumn(surf, topcolor, bottomcolor): - "creates a new 3d vertical gradient array" - topcolor = np.array(topcolor, copy=False) - bottomcolor = np.array(bottomcolor, copy=False) - diff = bottomcolor - topcolor - width, height = surf.get_size() - # create array from 0.0 to 1.0 triplets - column = np.arange(height, dtype="float") / height - column = np.repeat(column[:, np.newaxis], [3], 1) - # create a single column of gradient - column = topcolor + (diff * column).astype("int") - # make the column a 3d image column by adding X - column = column.astype("uint8")[np.newaxis, :, :] - # 3d array into 2d array - return pg.surfarray.map_array(surf, column) - - -def DisplayGradient(surf): - "choose random colors and show them" - stopwatch() - colors = np_random.randint(0, 255, (2, 3)) - column = VertGradientColumn(surf, colors[0], colors[1]) - pg.surfarray.blit_array(surf, column) - pg.display.flip() - stopwatch("Gradient:") - - -def main(): - pg.init() - pg.mixer.quit() # remove ALSA underflow messages for Debian squeeze - size = 600, 400 - os.environ["SDL_VIDEO_CENTERED"] = "1" - screen = pg.display.set_mode(size, pg.NOFRAME, 0) - - pg.event.set_blocked(pg.MOUSEMOTION) # keep our queue cleaner - pg.time.set_timer(pg.USEREVENT, 500) - - while 1: - event = pg.event.wait() - if event.type in (pg.QUIT, pg.KEYDOWN, pg.MOUSEBUTTONDOWN): - break - elif event.type == pg.USEREVENT: - DisplayGradient(screen) - - pg.quit() - - -if __name__ == "__main__": - main() diff --git a/venv/Lib/site-packages/pygame/examples/video.py b/venv/Lib/site-packages/pygame/examples/video.py deleted file mode 100644 index 3d4b9f1..0000000 --- a/venv/Lib/site-packages/pygame/examples/video.py +++ /dev/null @@ -1,162 +0,0 @@ -#!/usr/bin/env python -""" pg.examples.video - -Experimental! - -* dialog message boxes with messagebox. -* multiple windows with Window -* driver selection -* Renderer, Texture, and Image classes -* Drawing lines, rects, and such onto Renderers. -""" -import os -import pygame as pg - -if pg.get_sdl_version()[0] < 2: - raise SystemExit( - "This example requires pygame 2 and SDL2. _sdl2 is experimental and will change." - ) -from pygame._sdl2 import Window, Texture, Image, Renderer, get_drivers, messagebox - -data_dir = os.path.join(os.path.split(os.path.abspath(__file__))[0], "data") - - -def load_img(file): - return pg.image.load(os.path.join(data_dir, file)) - - -pg.display.init() -pg.key.set_repeat(1000, 10) - -for driver in get_drivers(): - print(driver) - -import random - -answer = messagebox( - "I will open two windows! Continue?", - "Hello!", - info=True, - buttons=("Yes", "No", "Chance"), - return_button=0, - escape_button=1, -) -if answer == 1 or (answer == 2 and random.random() < 0.5): - import sys - - sys.exit(0) - -win = Window("asdf", resizable=True) -renderer = Renderer(win) -tex = Texture.from_surface(renderer, load_img("alien1.gif")) - -running = True - -x, y = 250, 50 -clock = pg.time.Clock() - -backgrounds = [(255, 0, 0, 255), (0, 255, 0, 255), (0, 0, 255, 255)] -bg_index = 0 - -renderer.draw_color = backgrounds[bg_index] - -win2 = Window("2nd window", size=(256, 256), always_on_top=True) -win2.opacity = 0.5 -win2.set_icon(load_img("bomb.gif")) -renderer2 = Renderer(win2) -tex2 = Texture.from_surface(renderer2, load_img("asprite.bmp")) -renderer2.clear() -tex2.draw() -renderer2.present() -del tex2 - -full = 0 - -tex = Image(tex) - - -surf = pg.Surface((64, 64)) -streamtex = Texture(renderer, (64, 64), streaming=True) -tex_update_interval = 1000 -next_tex_update = pg.time.get_ticks() - - -while running: - for event in pg.event.get(): - if event.type == pg.QUIT: - running = False - elif getattr(event, "window", None) == win2: - if ( - event.type == pg.KEYDOWN - and event.key == pg.K_ESCAPE - or event.type == pg.WINDOWCLOSE - ): - win2.destroy() - elif event.type == pg.KEYDOWN: - if event.key == pg.K_ESCAPE: - running = False - elif event.key == pg.K_LEFT: - x -= 5 - elif event.key == pg.K_RIGHT: - x += 5 - elif event.key == pg.K_DOWN: - y += 5 - elif event.key == pg.K_UP: - y -= 5 - elif event.key == pg.K_f: - if full == 0: - win.set_fullscreen(True) - full = 1 - else: - win.set_windowed() - full = 0 - elif event.key == pg.K_s: - readsurf = renderer.to_surface() - pg.image.save(readsurf, "test.png") - - elif event.key == pg.K_SPACE: - bg_index = (bg_index + 1) % len(backgrounds) - renderer.draw_color = backgrounds[bg_index] - - renderer.clear() - - # update texture - curtime = pg.time.get_ticks() - if curtime >= next_tex_update: - for x_ in range(streamtex.width // 4): - for y_ in range(streamtex.height // 4): - newcol = ( - random.randint(0, 255), - random.randint(0, 255), - random.randint(0, 255), - 255, - ) - area = (4 * x_, 4 * y_, 4, 4) - surf.fill(newcol, area) - streamtex.update(surf) - next_tex_update = curtime + tex_update_interval - streamtex.draw(dstrect=pg.Rect(64, 128, 64, 64)) - - tex.draw(dstrect=(x, y)) - - # TODO: should these be? - # - line instead of draw_line - # - point instead of draw_point - # - rect(rect, width=1)->draw 1 pixel, instead of draw_rect - # - rect(rect, width=0)->filled ? , instead of fill_rect - # - # TODO: should these work with pg.draw.line(renderer, ...) functions? - renderer.draw_color = (255, 255, 255, 255) - renderer.draw_line((0, 0), (64, 64)) - renderer.draw_line((64, 64), (128, 0)) - renderer.draw_point((72, 32)) - renderer.draw_rect(pg.Rect(0, 64, 64, 64)) - renderer.fill_rect(pg.Rect(0, 128, 64, 64)) - renderer.draw_color = backgrounds[bg_index] - - renderer.present() - - clock.tick(60) - win.title = str("FPS: {}".format(clock.get_fps())) - -pg.quit() diff --git a/venv/Lib/site-packages/pygame/fastevent.py b/venv/Lib/site-packages/pygame/fastevent.py deleted file mode 100644 index e102fc4..0000000 --- a/venv/Lib/site-packages/pygame/fastevent.py +++ /dev/null @@ -1,88 +0,0 @@ -""" -A compatibility shim for pygame.fastevent based on pygame.event. -This module was deprecated in pygame 2.2, and is scheduled for removal in a -future pygame version. If you are using pygame.fastevent, please migrate to -using regular pygame.event module -""" - -import pygame.event -import pygame.display -from pygame import error, register_quit -from pygame.event import Event - -_ft_init = False - - -def _ft_init_check(): - """ - Raises error if module is not init - """ - if not _ft_init: - raise error("fastevent system not initialized") - - -def _quit_hook(): - """ - Hook that gets run to quit module - """ - global _ft_init - _ft_init = False - - -def init(): - """init() -> None - initialize pygame.fastevent - """ - global _ft_init - if not pygame.display.get_init(): - raise error("video system not initialized") - - register_quit(_quit_hook) - _ft_init = True - - -def get_init(): - """get_init() -> bool - returns True if the fastevent module is currently initialized - """ - return _ft_init - - -def pump(): - """pump() -> None - internally process pygame event handlers - """ - _ft_init_check() - pygame.event.pump() - - -def wait(): - """wait() -> Event - wait for an event - """ - _ft_init_check() - return pygame.event.wait() - - -def poll(): - """poll() -> Event - get an available event - """ - _ft_init_check() - return pygame.event.poll() - - -def get(): - """get() -> list of Events - get all events from the queue - """ - _ft_init_check() - return pygame.event.get() - - -def post(event: Event): - """post(Event) -> None - place an event on the queue - """ - _ft_init_check() - pygame.event.post(event) diff --git a/venv/Lib/site-packages/pygame/fastevent.pyi b/venv/Lib/site-packages/pygame/fastevent.pyi deleted file mode 100644 index 9ef2e26..0000000 --- a/venv/Lib/site-packages/pygame/fastevent.pyi +++ /dev/null @@ -1,11 +0,0 @@ -from typing import List - -from pygame.event import Event - -def init() -> None: ... -def get_init() -> bool: ... -def pump() -> None: ... -def wait() -> Event: ... -def pool() -> Event: ... -def get() -> List[Event]: ... -def post(event: Event) -> None: ... diff --git a/venv/Lib/site-packages/pygame/font.cp39-win_amd64.pyd b/venv/Lib/site-packages/pygame/font.cp39-win_amd64.pyd deleted file mode 100644 index 0a900ac..0000000 Binary files a/venv/Lib/site-packages/pygame/font.cp39-win_amd64.pyd and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/font.pyi b/venv/Lib/site-packages/pygame/font.pyi deleted file mode 100644 index 9002ed7..0000000 --- a/venv/Lib/site-packages/pygame/font.pyi +++ /dev/null @@ -1,50 +0,0 @@ -from typing import Hashable, Iterable, List, Optional, Tuple, Union - -from pygame.surface import Surface - -from ._common import _ColorValue, _FileArg - -def init() -> None: ... -def quit() -> None: ... -def get_init() -> bool: ... -def get_default_font() -> str: ... -def get_fonts() -> List[str]: ... -def match_font( - name: Union[str, bytes, Iterable[Union[str, bytes]]], - bold: Hashable = False, - italic: Hashable = False, -) -> str: ... -def SysFont( - name: Union[str, bytes, Iterable[Union[str, bytes]]], - size: int, - bold: Hashable = False, - italic: Hashable = False, -) -> Font: ... - -class Font(object): - - bold: bool - italic: bool - underline: bool - def __init__(self, name: Optional[_FileArg], size: int) -> None: ... - def render( - self, - text: Union[str, bytes], - antialias: bool, - color: _ColorValue, - background: Optional[_ColorValue] = None, - ) -> Surface: ... - def size(self, text: Union[str, bytes]) -> Tuple[int, int]: ... - def set_underline(self, value: bool) -> None: ... - def get_underline(self) -> bool: ... - def set_bold(self, value: bool) -> None: ... - def get_bold(self) -> bool: ... - def set_italic(self, value: bool) -> None: ... - def metrics( - self, text: Union[str, bytes] - ) -> List[Tuple[int, int, int, int, int]]: ... - def get_italic(self) -> bool: ... - def get_linesize(self) -> int: ... - def get_height(self) -> int: ... - def get_ascent(self) -> int: ... - def get_descent(self) -> int: ... diff --git a/venv/Lib/site-packages/pygame/freesansbold.ttf b/venv/Lib/site-packages/pygame/freesansbold.ttf deleted file mode 100644 index a98562f..0000000 Binary files a/venv/Lib/site-packages/pygame/freesansbold.ttf and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/freetype.py b/venv/Lib/site-packages/pygame/freetype.py deleted file mode 100644 index c1b5f59..0000000 --- a/venv/Lib/site-packages/pygame/freetype.py +++ /dev/null @@ -1,78 +0,0 @@ -"""Enhanced Pygame module for loading and rendering computer fonts""" - -from pygame._freetype import ( - Font, - STYLE_NORMAL, - STYLE_OBLIQUE, - STYLE_STRONG, - STYLE_UNDERLINE, - STYLE_WIDE, - STYLE_DEFAULT, - init, - quit, - get_init, - was_init, - get_cache_size, - get_default_font, - get_default_resolution, - get_error, - get_version, - set_default_resolution, -) -from pygame.sysfont import match_font, get_fonts, SysFont as _SysFont - -__all__ = [ - "Font", - "STYLE_NORMAL", - "STYLE_OBLIQUE", - "STYLE_STRONG", - "STYLE_UNDERLINE", - "STYLE_WIDE", - "STYLE_DEFAULT", - "init", - "quit", - "get_init", - "was_init", - "get_cache_size", - "get_default_font", - "get_default_resolution", - "get_error", - "get_version", - "set_default_resolution", - "match_font", - "get_fonts", -] - - -def SysFont(name, size, bold=0, italic=0, constructor=None): - """pygame.ftfont.SysFont(name, size, bold=False, italic=False, constructor=None) -> Font - Create a pygame Font from system font resources. - - This will search the system fonts for the given font - name. You can also enable bold or italic styles, and - the appropriate system font will be selected if available. - - This will always return a valid Font object, and will - fallback on the builtin pygame font if the given font - is not found. - - Name can also be an iterable of font names, a string of - comma-separated font names, or a bytes of comma-separated - font names, in which case the set of names will be searched - in order. Pygame uses a small set of common font aliases. If the - specific font you ask for is not available, a reasonable - alternative may be used. - - If optional constructor is provided, it must be a function with - signature constructor(fontpath, size, bold, italic) which returns - a Font instance. If None, a pygame.freetype.Font object is created. - """ - if constructor is None: - - def constructor(fontpath, size, bold, italic): - font = Font(fontpath, size) - font.strong = bold - font.oblique = italic - return font - - return _SysFont(name, size, bold, italic, constructor) diff --git a/venv/Lib/site-packages/pygame/freetype.pyi b/venv/Lib/site-packages/pygame/freetype.pyi deleted file mode 100644 index dd77f55..0000000 --- a/venv/Lib/site-packages/pygame/freetype.pyi +++ /dev/null @@ -1,122 +0,0 @@ -from typing import Any, Iterable, List, Optional, Text, Tuple, Union - -from pygame.color import Color -from pygame.font import Font -from pygame.rect import Rect -from pygame.surface import Surface - -from ._common import _ColorValue, _FileArg, _CanBeRect - -def get_error() -> str: ... -def get_version() -> Tuple[int, int, int]: ... -def init(cache_size: int = 64, resolution: int = 72) -> None: ... -def quit() -> None: ... -def get_init() -> bool: ... -def was_init() -> bool: ... -def get_cache_size() -> int: ... -def get_default_resolution() -> int: ... -def set_default_resolution(resolution: int) -> None: ... -def SysFont( - name: Union[str, bytes, Iterable[Union[str, bytes]]], - size: int, - bold: int = False, - italic: int = False, -) -> Font: ... -def get_default_font() -> str: ... - -STYLE_NORMAL: int -STYLE_UNDERLINE: int -STYLE_OBLIQUE: int -STYLE_STRONG: int -STYLE_WIDE: int -STYLE_DEFAULT: int - -class Font: - name: str - path: Text - size: Union[float, Tuple[float, float]] - height: int - ascender: int - descender: int - style: int - underline: bool - strong: bool - oblique: bool - wide: bool - strength: float - underline_adjustment: float - fixed_width: bool - fixed_sizes: int - scalable: bool - use_bitmap_strikes: bool - antialiased: bool - kerning: bool - vertical: bool - rotation: int - fgcolor: Color - bgcolor: Color - origin: bool - pad: bool - ucs4: bool - resolution: int - def __init__( - self, - file: Optional[_FileArg], - size: float = 0, - font_index: int = 0, - resolution: int = 0, - ucs4: int = False, - ) -> None: ... - def get_rect( - self, - text: str, - style: int = STYLE_DEFAULT, - rotation: int = 0, - size: float = 0, - ) -> Rect: ... - def get_metrics( - self, text: str, size: float = 0 - ) -> List[Tuple[int, int, int, int, float, float]]: ... - def get_sized_ascender(self, size: float) -> int: ... - def get_sized_descender(self, size: float) -> int: ... - def get_sized_height(self, size: float) -> int: ... - def get_sized_glyph_height(self, size: float) -> int: ... - def get_sizes(self) -> List[Tuple[int, int, int, float, float]]: ... - def render( - self, - text: str, - fgcolor: Optional[_ColorValue] = None, - bgcolor: Optional[_ColorValue] = None, - style: int = STYLE_DEFAULT, - rotation: int = 0, - size: float = 0, - ) -> Tuple[Surface, Rect]: ... - def render_to( - self, - surf: Surface, - dest: _CanBeRect, - text: str, - fgcolor: Optional[_ColorValue] = None, - bgcolor: Optional[_ColorValue] = None, - style: int = STYLE_DEFAULT, - rotation: int = 0, - size: float = 0, - ) -> Rect: ... - def render_raw( - self, - text: str, - style: int = STYLE_DEFAULT, - rotation: int = 0, - size: float = 0, - invert: bool = False, - ) -> Tuple[bytes, Tuple[int, int]]: ... - def render_raw_to( - self, - array: Any, - text: str, - dest: Optional[_CanBeRect] = None, - style: int = STYLE_DEFAULT, - rotation: int = 0, - size: float = 0, - invert: bool = False, - ) -> Rect: ... diff --git a/venv/Lib/site-packages/pygame/ftfont.py b/venv/Lib/site-packages/pygame/ftfont.py deleted file mode 100644 index e648ede..0000000 --- a/venv/Lib/site-packages/pygame/ftfont.py +++ /dev/null @@ -1,203 +0,0 @@ -"""pygame module for loading and rendering fonts (freetype alternative)""" - -__all__ = [ - "Font", - "init", - "quit", - "get_default_font", - "get_init", - "SysFont", - "match_font", - "get_fonts", -] - -from pygame._freetype import init, Font as _Font, get_default_resolution -from pygame._freetype import quit, get_default_font, get_init as _get_init -from pygame._freetype import __PYGAMEinit__ -from pygame.sysfont import match_font, get_fonts, SysFont as _SysFont -from pygame import encode_file_path - - -class Font(_Font): - """Font(filename, size) -> Font - Font(object, size) -> Font - create a new Font object from a file (freetype alternative) - - This Font type differs from font.Font in that it can render glyphs - for Unicode code points in the supplementary planes (> 0xFFFF). - """ - - __encode_file_path = staticmethod(encode_file_path) - __get_default_resolution = staticmethod(get_default_resolution) - __default_font = encode_file_path(get_default_font()) - - __unull = "\x00" - __bnull = b"\x00" - - def __init__(self, file, size=-1): - size = max(size, 1) - if isinstance(file, str): - try: - bfile = self.__encode_file_path(file, ValueError) - except ValueError: - bfile = "" - else: - bfile = file - if isinstance(bfile, bytes) and bfile == self.__default_font: - file = None - if file is None: - resolution = int(self.__get_default_resolution() * 0.6875) - if resolution == 0: - resolution = 1 - else: - resolution = 0 - super(Font, self).__init__(file, size=size, resolution=resolution) - self.strength = 1.0 / 12.0 - self.kerning = False - self.origin = True - self.pad = True - self.ucs4 = True - self.underline_adjustment = 1.0 - - def render(self, text, antialias, color, background=None): - """render(text, antialias, color, background=None) -> Surface - draw text on a new Surface""" - - if text is None: - text = "" - if isinstance(text, str) and self.__unull in text: - raise ValueError("A null character was found in the text") - if isinstance(text, bytes) and self.__bnull in text: - raise ValueError("A null character was found in the text") - save_antialiased = ( - self.antialiased # pylint: disable = access-member-before-definition - ) - self.antialiased = bool(antialias) - try: - s, _ = super(Font, self).render(text, color, background) - return s - finally: - self.antialiased = save_antialiased - - def set_bold(self, value): - """set_bold(bool) -> None - enable fake rendering of bold text""" - - self.wide = bool(value) - - def get_bold(self): - """get_bold() -> bool - check if text will be rendered bold""" - - return self.wide - - bold = property(get_bold, set_bold) - - def set_italic(self, value): - """set_italic(bool) -> None - enable fake rendering of italic text""" - - self.oblique = bool(value) - - def get_italic(self): - """get_italic() -> bool - check if the text will be rendered italic""" - - return self.oblique - - italic = property(get_italic, set_italic) - - def set_underline(self, value): - """set_underline(bool) -> None - control if text is rendered with an underline""" - - self.underline = bool(value) - - def get_underline(self): - """set_bold(bool) -> None - enable fake rendering of bold text""" - - return self.underline - - def metrics(self, text): - """metrics(text) -> list - Gets the metrics for each character in the passed string.""" - - return self.get_metrics(text) - - def get_ascent(self): - """get_ascent() -> int - get the ascent of the font""" - - return self.get_sized_ascender() - - def get_descent(self): - """get_descent() -> int - get the descent of the font""" - - return self.get_sized_descender() - - def get_height(self): - """get_height() -> int - get the height of the font""" - - return self.get_sized_ascender() - self.get_sized_descender() + 1 - - def get_linesize(self): - """get_linesize() -> int - get the line space of the font text""" - - return self.get_sized_height() - - def size(self, text): - """size(text) -> (width, height) - determine the amount of space needed to render text""" - - return self.get_rect(text).size - - -FontType = Font - - -def get_init(): - """get_init() -> bool - true if the font module is initialized""" - - return _get_init() - - -def SysFont(name, size, bold=0, italic=0, constructor=None): - """pygame.ftfont.SysFont(name, size, bold=False, italic=False, constructor=None) -> Font - Create a pygame Font from system font resources. - - This will search the system fonts for the given font - name. You can also enable bold or italic styles, and - the appropriate system font will be selected if available. - - This will always return a valid Font object, and will - fallback on the builtin pygame font if the given font - is not found. - - Name can also be an iterable of font names, a string of - comma-separated font names, or a bytes of comma-separated - font names, in which case the set of names will be searched - in order. Pygame uses a small set of common font aliases. If the - specific font you ask for is not available, a reasonable - alternative may be used. - - If optional constructor is provided, it must be a function with - signature constructor(fontpath, size, bold, italic) which returns - a Font instance. If None, a pygame.ftfont.Font object is created. - """ - if constructor is None: - - def constructor(fontpath, size, bold, italic): - font = Font(fontpath, size) - font.set_bold(bold) - font.set_italic(italic) - return font - - return _SysFont(name, size, bold, italic, constructor) - - -del _Font, get_default_resolution, encode_file_path diff --git a/venv/Lib/site-packages/pygame/gfxdraw.cp39-win_amd64.pyd b/venv/Lib/site-packages/pygame/gfxdraw.cp39-win_amd64.pyd deleted file mode 100644 index cd47f43..0000000 Binary files a/venv/Lib/site-packages/pygame/gfxdraw.cp39-win_amd64.pyd and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/gfxdraw.pyi b/venv/Lib/site-packages/pygame/gfxdraw.pyi deleted file mode 100644 index 7da2d0f..0000000 --- a/venv/Lib/site-packages/pygame/gfxdraw.pyi +++ /dev/null @@ -1,91 +0,0 @@ -from typing import Sequence - -from pygame.surface import Surface - -from ._common import _ColorValue, _Coordinate, _RectValue - -def pixel(surface: Surface, x: int, y: int, color: _ColorValue) -> None: ... -def hline(surface: Surface, x1: int, x2: int, y: int, color: _ColorValue) -> None: ... -def vline(surface: Surface, x: int, y1: int, y2: int, color: _ColorValue) -> None: ... -def line( - surface: Surface, x1: int, y1: int, x2: int, y2: int, color: _ColorValue -) -> None: ... -def rectangle(surface: Surface, rect: _RectValue, color: _ColorValue) -> None: ... -def box(surface: Surface, rect: _RectValue, color: _ColorValue) -> None: ... -def circle(surface: Surface, x: int, y: int, r: int, color: _ColorValue) -> None: ... -def aacircle(surface: Surface, x: int, y: int, r: int, color: _ColorValue) -> None: ... -def filled_circle( - surface: Surface, x: int, y: int, r: int, color: _ColorValue -) -> None: ... -def ellipse( - surface: Surface, x: int, y: int, rx: int, ry: int, color: _ColorValue -) -> None: ... -def aaellipse( - surface: Surface, x: int, y: int, rx: int, ry: int, color: _ColorValue -) -> None: ... -def filled_ellipse( - surface: Surface, x: int, y: int, rx: int, ry: int, color: _ColorValue -) -> None: ... -def arc( - surface: Surface, - x: int, - y: int, - r: int, - start_angle: int, - atp_angle: int, - color: _ColorValue, -) -> None: ... -def pie( - surface: Surface, - x: int, - y: int, - r: int, - start_angle: int, - atp_angle: int, - color: _ColorValue, -) -> None: ... -def trigon( - surface: Surface, - x1: int, - y1: int, - x2: int, - y2: int, - x3: int, - y3: int, - color: _ColorValue, -) -> None: ... -def aatrigon( - surface: Surface, - x1: int, - y1: int, - x2: int, - y2: int, - x3: int, - y3: int, - color: _ColorValue, -) -> None: ... -def filled_trigon( - surface: Surface, - x1: int, - y1: int, - x2: int, - y2: int, - x3: int, - y3: int, - color: _ColorValue, -) -> None: ... -def polygon( - surface: Surface, points: Sequence[_Coordinate], color: _ColorValue -) -> None: ... -def aapolygon( - surface: Surface, points: Sequence[_Coordinate], color: _ColorValue -) -> None: ... -def filled_polygon( - surface: Surface, points: Sequence[_Coordinate], color: _ColorValue -) -> None: ... -def textured_polygon( - surface: Surface, points: Sequence[_Coordinate], texture: Surface, tx: int, ty: int -) -> None: ... -def bezier( - surface: Surface, points: Sequence[_Coordinate], steps: int, color: _ColorValue -) -> None: ... diff --git a/venv/Lib/site-packages/pygame/image.cp39-win_amd64.pyd b/venv/Lib/site-packages/pygame/image.cp39-win_amd64.pyd deleted file mode 100644 index c08a92a..0000000 Binary files a/venv/Lib/site-packages/pygame/image.cp39-win_amd64.pyd and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/image.pyi b/venv/Lib/site-packages/pygame/image.pyi deleted file mode 100644 index b99a711..0000000 --- a/venv/Lib/site-packages/pygame/image.pyi +++ /dev/null @@ -1,35 +0,0 @@ -from typing import List, Literal, Tuple, Union - -from pygame.bufferproxy import BufferProxy -from pygame.surface import Surface - -from ._common import _FileArg - -_BufferStyle = Union[BufferProxy, bytes, bytearray, memoryview] -_to_string_format = Literal[ - "P", "RGB", "RGBX", "RGBA", "ARGB", "RGBA_PREMULT", "ARGB_PREMULT" -] -_from_buffer_format = Literal["P", "RGB", "BGR", "RGBX", "RGBA", "ARGB"] -_from_string_format = Literal["P", "RGB", "RGBX", "RGBA", "ARGB"] - -def load(filename: _FileArg, namehint: str = "") -> Surface: ... -def save(surface: Surface, filename: _FileArg, namehint: str = "") -> None: ... -def get_sdl_image_version() -> Union[None, Tuple[int, int, int]]: ... -def get_extended() -> bool: ... -def tostring( - surface: Surface, format: _to_string_format, flipped: bool = False -) -> str: ... -def fromstring( - string: str, - size: Union[List[int], Tuple[int, int]], - format: _from_string_format, - flipped: bool = False, -) -> Surface: ... -def frombuffer( - bytes: _BufferStyle, - size: Union[List[int], Tuple[int, int]], - format: _from_buffer_format, -) -> Surface: ... -def load_basic(filename: _FileArg) -> Surface: ... -def load_extended(filename: _FileArg, namehint: str = "") -> Surface: ... -def save_extended(surface: Surface, filename: _FileArg, namehint: str = "") -> None: ... diff --git a/venv/Lib/site-packages/pygame/imageext.cp39-win_amd64.pyd b/venv/Lib/site-packages/pygame/imageext.cp39-win_amd64.pyd deleted file mode 100644 index 0c32289..0000000 Binary files a/venv/Lib/site-packages/pygame/imageext.cp39-win_amd64.pyd and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/joystick.cp39-win_amd64.pyd b/venv/Lib/site-packages/pygame/joystick.cp39-win_amd64.pyd deleted file mode 100644 index 1b10aa6..0000000 Binary files a/venv/Lib/site-packages/pygame/joystick.cp39-win_amd64.pyd and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/joystick.pyi b/venv/Lib/site-packages/pygame/joystick.pyi deleted file mode 100644 index 9ca8532..0000000 --- a/venv/Lib/site-packages/pygame/joystick.pyi +++ /dev/null @@ -1,29 +0,0 @@ -from typing import Tuple - -def init() -> None: ... -def quit() -> None: ... -def get_init() -> bool: ... -def get_count() -> int: ... - -class Joystick(object): - def __init__(self, id: int) -> None: ... - def init(self) -> None: ... - def quit(self) -> None: ... - def get_init(self) -> bool: ... - def get_id(self) -> int: ... - def get_instance_id(self) -> int: ... - def get_guid(self) -> str: ... - def get_power_level(self) -> str: ... - def get_name(self) -> str: ... - def get_numaxes(self) -> int: ... - def get_axis(self, axis_number: int) -> float: ... - def get_numballs(self) -> int: ... - def get_ball(self, ball_number: int) -> Tuple[float, float]: ... - def get_numbuttons(self) -> int: ... - def get_button(self, button: int) -> bool: ... - def get_numhats(self) -> int: ... - def get_hat(self, hat_number: int) -> Tuple[float, float]: ... - def rumble( - self, low_frequency: float, high_frequency: float, duration: int - ) -> bool: ... - def stop_rumble(self) -> None: ... diff --git a/venv/Lib/site-packages/pygame/key.cp39-win_amd64.pyd b/venv/Lib/site-packages/pygame/key.cp39-win_amd64.pyd deleted file mode 100644 index f234e9e..0000000 Binary files a/venv/Lib/site-packages/pygame/key.cp39-win_amd64.pyd and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/key.pyi b/venv/Lib/site-packages/pygame/key.pyi deleted file mode 100644 index d93beec..0000000 --- a/venv/Lib/site-packages/pygame/key.pyi +++ /dev/null @@ -1,15 +0,0 @@ -from typing import Sequence, Tuple - -from ._common import _RectValue - -def get_focused() -> bool: ... -def get_pressed() -> Sequence[bool]: ... -def get_mods() -> int: ... -def set_mods(mods: int) -> None: ... -def set_repeat(delay: int = 0, interval: int = 0) -> None: ... -def get_repeat() -> Tuple[int, int]: ... -def name(key: int) -> str: ... -def key_code(name: str) -> int: ... -def start_text_input() -> None: ... -def stop_text_input() -> None: ... -def set_text_input_rect(rect: _RectValue) -> None: ... diff --git a/venv/Lib/site-packages/pygame/libFLAC-8.dll b/venv/Lib/site-packages/pygame/libFLAC-8.dll deleted file mode 100644 index 71f2e19..0000000 Binary files a/venv/Lib/site-packages/pygame/libFLAC-8.dll and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/libfreetype-6.dll b/venv/Lib/site-packages/pygame/libfreetype-6.dll deleted file mode 100644 index 16ef777..0000000 Binary files a/venv/Lib/site-packages/pygame/libfreetype-6.dll and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/libjpeg-9.dll b/venv/Lib/site-packages/pygame/libjpeg-9.dll deleted file mode 100644 index 9a05528..0000000 Binary files a/venv/Lib/site-packages/pygame/libjpeg-9.dll and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/libmodplug-1.dll b/venv/Lib/site-packages/pygame/libmodplug-1.dll deleted file mode 100644 index 7c05126..0000000 Binary files a/venv/Lib/site-packages/pygame/libmodplug-1.dll and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/libmpg123-0.dll b/venv/Lib/site-packages/pygame/libmpg123-0.dll deleted file mode 100644 index c7809b1..0000000 Binary files a/venv/Lib/site-packages/pygame/libmpg123-0.dll and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/libogg-0.dll b/venv/Lib/site-packages/pygame/libogg-0.dll deleted file mode 100644 index 5133481..0000000 Binary files a/venv/Lib/site-packages/pygame/libogg-0.dll and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/libopus-0.dll b/venv/Lib/site-packages/pygame/libopus-0.dll deleted file mode 100644 index 9ba6c38..0000000 Binary files a/venv/Lib/site-packages/pygame/libopus-0.dll and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/libopusfile-0.dll b/venv/Lib/site-packages/pygame/libopusfile-0.dll deleted file mode 100644 index 97a88b6..0000000 Binary files a/venv/Lib/site-packages/pygame/libopusfile-0.dll and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/libpng16-16.dll b/venv/Lib/site-packages/pygame/libpng16-16.dll deleted file mode 100644 index 709f724..0000000 Binary files a/venv/Lib/site-packages/pygame/libpng16-16.dll and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/libtiff-5.dll b/venv/Lib/site-packages/pygame/libtiff-5.dll deleted file mode 100644 index fc8a7c0..0000000 Binary files a/venv/Lib/site-packages/pygame/libtiff-5.dll and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/libvorbis-0.dll b/venv/Lib/site-packages/pygame/libvorbis-0.dll deleted file mode 100644 index f5ae1bf..0000000 Binary files a/venv/Lib/site-packages/pygame/libvorbis-0.dll and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/libvorbisfile-3.dll b/venv/Lib/site-packages/pygame/libvorbisfile-3.dll deleted file mode 100644 index d078736..0000000 Binary files a/venv/Lib/site-packages/pygame/libvorbisfile-3.dll and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/libwebp-7.dll b/venv/Lib/site-packages/pygame/libwebp-7.dll deleted file mode 100644 index fad57b2..0000000 Binary files a/venv/Lib/site-packages/pygame/libwebp-7.dll and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/locals.py b/venv/Lib/site-packages/pygame/locals.py deleted file mode 100644 index 66e6c24..0000000 --- a/venv/Lib/site-packages/pygame/locals.py +++ /dev/null @@ -1,576 +0,0 @@ -# pygame - Python Game Library -# Copyright (C) 2000-2003 Pete Shinners -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Library General Public -# License as published by the Free Software Foundation; either -# version 2 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Library General Public License for more details. -# -# You should have received a copy of the GNU Library General Public -# License along with this library; if not, write to the Free -# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA -# -# Pete Shinners -# pete@shinners.org - - -"""Set of functions from PyGame that are handy to have in -the local namespace for your module""" - -from pygame.constants import * # pylint: disable=wildcard-import; lgtm[py/polluting-import] -from pygame.rect import Rect -from pygame import color - -Color = color.Color - - -__all__ = [ - "Rect", - "Color", - "ACTIVEEVENT", - "ANYFORMAT", - "APPACTIVE", - "APPFOCUSMOUSE", - "APPINPUTFOCUS", - "ASYNCBLIT", - "AUDIODEVICEADDED", - "AUDIODEVICEREMOVED", - "AUDIO_ALLOW_ANY_CHANGE", - "AUDIO_ALLOW_CHANNELS_CHANGE", - "AUDIO_ALLOW_FORMAT_CHANGE", - "AUDIO_ALLOW_FREQUENCY_CHANGE", - "AUDIO_S16", - "AUDIO_S16LSB", - "AUDIO_S16MSB", - "AUDIO_S16SYS", - "AUDIO_S8", - "AUDIO_U16", - "AUDIO_U16LSB", - "AUDIO_U16MSB", - "AUDIO_U16SYS", - "AUDIO_U8", - "BIG_ENDIAN", - "BLENDMODE_ADD", - "BLENDMODE_BLEND", - "BLENDMODE_MOD", - "BLENDMODE_NONE", - "BLEND_ADD", - "BLEND_MAX", - "BLEND_MIN", - "BLEND_MULT", - "BLEND_PREMULTIPLIED", - "BLEND_ALPHA_SDL2", - "BLEND_RGBA_ADD", - "BLEND_RGBA_MAX", - "BLEND_RGBA_MIN", - "BLEND_RGBA_MULT", - "BLEND_RGBA_SUB", - "BLEND_RGB_ADD", - "BLEND_RGB_MAX", - "BLEND_RGB_MIN", - "BLEND_RGB_MULT", - "BLEND_RGB_SUB", - "BLEND_SUB", - "BUTTON_LEFT", - "BUTTON_MIDDLE", - "BUTTON_RIGHT", - "BUTTON_WHEELDOWN", - "BUTTON_WHEELUP", - "BUTTON_X1", - "BUTTON_X2", - "CONTROLLERAXISMOTION", - "CONTROLLERBUTTONDOWN", - "CONTROLLERBUTTONUP", - "CONTROLLERDEVICEADDED", - "CONTROLLERDEVICEREMAPPED", - "CONTROLLERDEVICEREMOVED", - "CONTROLLERTOUCHPADDOWN", - "CONTROLLERTOUCHPADMOTION", - "CONTROLLERTOUCHPADUP", - "CONTROLLER_AXIS_INVALID", - "CONTROLLER_AXIS_LEFTX", - "CONTROLLER_AXIS_LEFTY", - "CONTROLLER_AXIS_MAX", - "CONTROLLER_AXIS_RIGHTX", - "CONTROLLER_AXIS_RIGHTY", - "CONTROLLER_AXIS_TRIGGERLEFT", - "CONTROLLER_AXIS_TRIGGERRIGHT", - "CONTROLLER_BUTTON_A", - "CONTROLLER_BUTTON_B", - "CONTROLLER_BUTTON_BACK", - "CONTROLLER_BUTTON_DPAD_DOWN", - "CONTROLLER_BUTTON_DPAD_LEFT", - "CONTROLLER_BUTTON_DPAD_RIGHT", - "CONTROLLER_BUTTON_DPAD_UP", - "CONTROLLER_BUTTON_GUIDE", - "CONTROLLER_BUTTON_INVALID", - "CONTROLLER_BUTTON_LEFTSHOULDER", - "CONTROLLER_BUTTON_LEFTSTICK", - "CONTROLLER_BUTTON_MAX", - "CONTROLLER_BUTTON_RIGHTSHOULDER", - "CONTROLLER_BUTTON_RIGHTSTICK", - "CONTROLLER_BUTTON_START", - "CONTROLLER_BUTTON_X", - "CONTROLLER_BUTTON_Y", - "DOUBLEBUF", - "DROPBEGIN", - "DROPCOMPLETE", - "DROPFILE", - "DROPTEXT", - "FINGERDOWN", - "FINGERMOTION", - "FINGERUP", - "FULLSCREEN", - "GL_ACCELERATED_VISUAL", - "GL_ACCUM_ALPHA_SIZE", - "GL_ACCUM_BLUE_SIZE", - "GL_ACCUM_GREEN_SIZE", - "GL_ACCUM_RED_SIZE", - "GL_ALPHA_SIZE", - "GL_BLUE_SIZE", - "GL_BUFFER_SIZE", - "GL_CONTEXT_DEBUG_FLAG", - "GL_CONTEXT_FLAGS", - "GL_CONTEXT_FORWARD_COMPATIBLE_FLAG", - "GL_CONTEXT_MAJOR_VERSION", - "GL_CONTEXT_MINOR_VERSION", - "GL_CONTEXT_PROFILE_COMPATIBILITY", - "GL_CONTEXT_PROFILE_CORE", - "GL_CONTEXT_PROFILE_ES", - "GL_CONTEXT_PROFILE_MASK", - "GL_CONTEXT_RELEASE_BEHAVIOR", - "GL_CONTEXT_RELEASE_BEHAVIOR_FLUSH", - "GL_CONTEXT_RELEASE_BEHAVIOR_NONE", - "GL_CONTEXT_RESET_ISOLATION_FLAG", - "GL_CONTEXT_ROBUST_ACCESS_FLAG", - "GL_DEPTH_SIZE", - "GL_DOUBLEBUFFER", - "GL_FRAMEBUFFER_SRGB_CAPABLE", - "GL_GREEN_SIZE", - "GL_MULTISAMPLEBUFFERS", - "GL_MULTISAMPLESAMPLES", - "GL_RED_SIZE", - "GL_SHARE_WITH_CURRENT_CONTEXT", - "GL_STENCIL_SIZE", - "GL_STEREO", - "GL_SWAP_CONTROL", - "HAT_CENTERED", - "HAT_DOWN", - "HAT_LEFT", - "HAT_LEFTDOWN", - "HAT_LEFTUP", - "HAT_RIGHT", - "HAT_RIGHTDOWN", - "HAT_RIGHTUP", - "HAT_UP", - "HIDDEN", - "HWACCEL", - "HWPALETTE", - "HWSURFACE", - "JOYAXISMOTION", - "JOYBALLMOTION", - "JOYBUTTONDOWN", - "JOYBUTTONUP", - "JOYHATMOTION", - "JOYDEVICEADDED", - "JOYDEVICEREMOVED", - "KEYDOWN", - "KEYUP", - "KMOD_ALT", - "KMOD_CAPS", - "KMOD_CTRL", - "KMOD_GUI", - "KMOD_LALT", - "KMOD_LCTRL", - "KMOD_LGUI", - "KMOD_LMETA", - "KMOD_LSHIFT", - "KMOD_META", - "KMOD_MODE", - "KMOD_NONE", - "KMOD_NUM", - "KMOD_RALT", - "KMOD_RCTRL", - "KMOD_RGUI", - "KMOD_RMETA", - "KMOD_RSHIFT", - "KMOD_SHIFT", - "KSCAN_0", - "KSCAN_1", - "KSCAN_2", - "KSCAN_3", - "KSCAN_4", - "KSCAN_5", - "KSCAN_6", - "KSCAN_7", - "KSCAN_8", - "KSCAN_9", - "KSCAN_A", - "KSCAN_APOSTROPHE", - "KSCAN_B", - "KSCAN_BACKSLASH", - "KSCAN_BACKSPACE", - "KSCAN_BREAK", - "KSCAN_C", - "KSCAN_CAPSLOCK", - "KSCAN_CLEAR", - "KSCAN_COMMA", - "KSCAN_CURRENCYSUBUNIT", - "KSCAN_CURRENCYUNIT", - "KSCAN_D", - "KSCAN_DELETE", - "KSCAN_DOWN", - "KSCAN_E", - "KSCAN_END", - "KSCAN_EQUALS", - "KSCAN_ESCAPE", - "KSCAN_EURO", - "KSCAN_F", - "KSCAN_F1", - "KSCAN_F10", - "KSCAN_F11", - "KSCAN_F12", - "KSCAN_F13", - "KSCAN_F14", - "KSCAN_F15", - "KSCAN_F2", - "KSCAN_F3", - "KSCAN_F4", - "KSCAN_F5", - "KSCAN_F6", - "KSCAN_F7", - "KSCAN_F8", - "KSCAN_F9", - "KSCAN_G", - "KSCAN_GRAVE", - "KSCAN_H", - "KSCAN_HELP", - "KSCAN_HOME", - "KSCAN_I", - "KSCAN_INSERT", - "KSCAN_INTERNATIONAL1", - "KSCAN_INTERNATIONAL2", - "KSCAN_INTERNATIONAL3", - "KSCAN_INTERNATIONAL4", - "KSCAN_INTERNATIONAL5", - "KSCAN_INTERNATIONAL6", - "KSCAN_INTERNATIONAL7", - "KSCAN_INTERNATIONAL8", - "KSCAN_INTERNATIONAL9", - "KSCAN_J", - "KSCAN_K", - "KSCAN_KP0", - "KSCAN_KP1", - "KSCAN_KP2", - "KSCAN_KP3", - "KSCAN_KP4", - "KSCAN_KP5", - "KSCAN_KP6", - "KSCAN_KP7", - "KSCAN_KP8", - "KSCAN_KP9", - "KSCAN_KP_0", - "KSCAN_KP_1", - "KSCAN_KP_2", - "KSCAN_KP_3", - "KSCAN_KP_4", - "KSCAN_KP_5", - "KSCAN_KP_6", - "KSCAN_KP_7", - "KSCAN_KP_8", - "KSCAN_KP_9", - "KSCAN_KP_DIVIDE", - "KSCAN_KP_ENTER", - "KSCAN_KP_EQUALS", - "KSCAN_KP_MINUS", - "KSCAN_KP_MULTIPLY", - "KSCAN_KP_PERIOD", - "KSCAN_KP_PLUS", - "KSCAN_L", - "KSCAN_LALT", - "KSCAN_LANG1", - "KSCAN_LANG2", - "KSCAN_LANG3", - "KSCAN_LANG4", - "KSCAN_LANG5", - "KSCAN_LANG6", - "KSCAN_LANG7", - "KSCAN_LANG8", - "KSCAN_LANG9", - "KSCAN_LCTRL", - "KSCAN_LEFT", - "KSCAN_LEFTBRACKET", - "KSCAN_LGUI", - "KSCAN_LMETA", - "KSCAN_LSHIFT", - "KSCAN_LSUPER", - "KSCAN_M", - "KSCAN_MENU", - "KSCAN_MINUS", - "KSCAN_MODE", - "KSCAN_N", - "KSCAN_NONUSBACKSLASH", - "KSCAN_NONUSHASH", - "KSCAN_NUMLOCK", - "KSCAN_NUMLOCKCLEAR", - "KSCAN_O", - "KSCAN_P", - "KSCAN_PAGEDOWN", - "KSCAN_PAGEUP", - "KSCAN_PAUSE", - "KSCAN_PERIOD", - "KSCAN_POWER", - "KSCAN_PRINT", - "KSCAN_PRINTSCREEN", - "KSCAN_Q", - "KSCAN_R", - "KSCAN_RALT", - "KSCAN_RCTRL", - "KSCAN_RETURN", - "KSCAN_RGUI", - "KSCAN_RIGHT", - "KSCAN_RIGHTBRACKET", - "KSCAN_RMETA", - "KSCAN_RSHIFT", - "KSCAN_RSUPER", - "KSCAN_S", - "KSCAN_SCROLLLOCK", - "KSCAN_SCROLLOCK", - "KSCAN_SEMICOLON", - "KSCAN_SLASH", - "KSCAN_SPACE", - "KSCAN_SYSREQ", - "KSCAN_T", - "KSCAN_TAB", - "KSCAN_U", - "KSCAN_UNKNOWN", - "KSCAN_UP", - "KSCAN_V", - "KSCAN_W", - "KSCAN_X", - "KSCAN_Y", - "KSCAN_Z", - "K_0", - "K_1", - "K_2", - "K_3", - "K_4", - "K_5", - "K_6", - "K_7", - "K_8", - "K_9", - "K_AC_BACK", - "K_AMPERSAND", - "K_ASTERISK", - "K_AT", - "K_BACKQUOTE", - "K_BACKSLASH", - "K_BACKSPACE", - "K_BREAK", - "K_CAPSLOCK", - "K_CARET", - "K_CLEAR", - "K_COLON", - "K_COMMA", - "K_CURRENCYSUBUNIT", - "K_CURRENCYUNIT", - "K_DELETE", - "K_DOLLAR", - "K_DOWN", - "K_END", - "K_EQUALS", - "K_ESCAPE", - "K_EURO", - "K_EXCLAIM", - "K_F1", - "K_F10", - "K_F11", - "K_F12", - "K_F13", - "K_F14", - "K_F15", - "K_F2", - "K_F3", - "K_F4", - "K_F5", - "K_F6", - "K_F7", - "K_F8", - "K_F9", - "K_GREATER", - "K_HASH", - "K_HELP", - "K_HOME", - "K_INSERT", - "K_KP0", - "K_KP1", - "K_KP2", - "K_KP3", - "K_KP4", - "K_KP5", - "K_KP6", - "K_KP7", - "K_KP8", - "K_KP9", - "K_KP_0", - "K_KP_1", - "K_KP_2", - "K_KP_3", - "K_KP_4", - "K_KP_5", - "K_KP_6", - "K_KP_7", - "K_KP_8", - "K_KP_9", - "K_KP_DIVIDE", - "K_KP_ENTER", - "K_KP_EQUALS", - "K_KP_MINUS", - "K_KP_MULTIPLY", - "K_KP_PERIOD", - "K_KP_PLUS", - "K_LALT", - "K_LCTRL", - "K_LEFT", - "K_LEFTBRACKET", - "K_LEFTPAREN", - "K_LESS", - "K_LGUI", - "K_LMETA", - "K_LSHIFT", - "K_LSUPER", - "K_MENU", - "K_MINUS", - "K_MODE", - "K_NUMLOCK", - "K_NUMLOCKCLEAR", - "K_PAGEDOWN", - "K_PAGEUP", - "K_PAUSE", - "K_PERCENT", - "K_PERIOD", - "K_PLUS", - "K_POWER", - "K_PRINT", - "K_PRINTSCREEN", - "K_QUESTION", - "K_QUOTE", - "K_QUOTEDBL", - "K_RALT", - "K_RCTRL", - "K_RETURN", - "K_RGUI", - "K_RIGHT", - "K_RIGHTBRACKET", - "K_RIGHTPAREN", - "K_RMETA", - "K_RSHIFT", - "K_RSUPER", - "K_SCROLLLOCK", - "K_SCROLLOCK", - "K_SEMICOLON", - "K_SLASH", - "K_SPACE", - "K_SYSREQ", - "K_TAB", - "K_UNDERSCORE", - "K_UNKNOWN", - "K_UP", - "K_a", - "K_b", - "K_c", - "K_d", - "K_e", - "K_f", - "K_g", - "K_h", - "K_i", - "K_j", - "K_k", - "K_l", - "K_m", - "K_n", - "K_o", - "K_p", - "K_q", - "K_r", - "K_s", - "K_t", - "K_u", - "K_v", - "K_w", - "K_x", - "K_y", - "K_z", - "LIL_ENDIAN", - "MIDIIN", - "MIDIOUT", - "MOUSEBUTTONDOWN", - "MOUSEBUTTONUP", - "MOUSEMOTION", - "MOUSEWHEEL", - "MULTIGESTURE", - "NOEVENT", - "NOFRAME", - "NUMEVENTS", - "OPENGL", - "OPENGLBLIT", - "PREALLOC", - "QUIT", - "RESIZABLE", - "RLEACCEL", - "RLEACCELOK", - "SCALED", - "SCRAP_BMP", - "SCRAP_CLIPBOARD", - "SCRAP_PBM", - "SCRAP_PPM", - "SCRAP_SELECTION", - "SCRAP_TEXT", - "SHOWN", - "SRCALPHA", - "SRCCOLORKEY", - "SWSURFACE", - "SYSTEM_CURSOR_ARROW", - "SYSTEM_CURSOR_CROSSHAIR", - "SYSTEM_CURSOR_HAND", - "SYSTEM_CURSOR_IBEAM", - "SYSTEM_CURSOR_NO", - "SYSTEM_CURSOR_SIZEALL", - "SYSTEM_CURSOR_SIZENESW", - "SYSTEM_CURSOR_SIZENS", - "SYSTEM_CURSOR_SIZENWSE", - "SYSTEM_CURSOR_SIZEWE", - "SYSTEM_CURSOR_WAIT", - "SYSTEM_CURSOR_WAITARROW", - "SYSWMEVENT", - "TEXTEDITING", - "TEXTINPUT", - "TIMER_RESOLUTION", - "USEREVENT", - "USEREVENT_DROPFILE", - "VIDEOEXPOSE", - "VIDEORESIZE", - "WINDOWSHOWN", - "WINDOWHIDDEN", - "WINDOWEXPOSED", - "WINDOWMOVED", - "WINDOWRESIZED", - "WINDOWSIZECHANGED", - "WINDOWMINIMIZED", - "WINDOWMAXIMIZED", - "WINDOWRESTORED", - "WINDOWENTER", - "WINDOWLEAVE", - "WINDOWFOCUSGAINED", - "WINDOWFOCUSLOST", - "WINDOWCLOSE", - "WINDOWTAKEFOCUS", - "WINDOWHITTEST", -] diff --git a/venv/Lib/site-packages/pygame/macosx.py b/venv/Lib/site-packages/pygame/macosx.py deleted file mode 100644 index fca8e21..0000000 --- a/venv/Lib/site-packages/pygame/macosx.py +++ /dev/null @@ -1,15 +0,0 @@ -import platform -import os -import sys -from pygame.pkgdata import getResource -from pygame import sdlmain_osx - -__all__ = ["Video_AutoInit"] - - -def Video_AutoInit(): - """Called from the base.c just before display module is initialized.""" - if "Darwin" in platform.platform(): - if (os.getcwd() == "/") and len(sys.argv) > 1: - os.chdir(os.path.dirname(sys.argv[0])) - return True diff --git a/venv/Lib/site-packages/pygame/mask.cp39-win_amd64.pyd b/venv/Lib/site-packages/pygame/mask.cp39-win_amd64.pyd deleted file mode 100644 index fb6d539..0000000 Binary files a/venv/Lib/site-packages/pygame/mask.cp39-win_amd64.pyd and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/mask.pyi b/venv/Lib/site-packages/pygame/mask.pyi deleted file mode 100644 index 726d3bf..0000000 --- a/venv/Lib/site-packages/pygame/mask.pyi +++ /dev/null @@ -1,58 +0,0 @@ -from typing import Any, List, Optional, Sequence, Tuple, TypeVar, Union - -from pygame.rect import Rect -from pygame.surface import Surface - -from ._common import _ColorValue, _Coordinate, _RectValue - -_Offset = TypeVar("_Offset", Tuple[int, int], Sequence[int]) - -def from_surface(surface: Surface, threshold: int = 127) -> Mask: ... -def from_threshold( - surface: Surface, - color: _ColorValue, - threshold: _ColorValue = (0, 0, 0, 255), - other_surface: Optional[Surface] = None, - palette_colors: int = 1, -) -> Mask: ... - -class Mask: - def __init__(self, size: _Coordinate, fill: bool = False) -> None: ... - def copy(self) -> Mask: ... - def get_size(self) -> Tuple[int, int]: ... - def get_rect(self, **kwargs: Any) -> Rect: ... # Dict type needs to be completed - def get_at(self, pos: _Coordinate) -> int: ... - def set_at(self, pos: _Coordinate, value: int = 1) -> None: ... - def overlap(self, other: Mask, offset: _Offset) -> Union[Tuple[int, int], None]: ... - def overlap_area(self, other: Mask, offset: _Coordinate) -> int: ... - def overlap_mask(self, other: Mask, offset: _Coordinate) -> Mask: ... - def fill(self) -> None: ... - def clear(self) -> None: ... - def invert(self) -> None: ... - def scale(self, size: _Coordinate) -> Mask: ... - def draw(self, other: Mask, offset: _Coordinate) -> None: ... - def erase(self, other: Mask, offset: _Coordinate) -> None: ... - def count(self) -> int: ... - def centroid(self) -> Tuple[int, int]: ... - def angle(self) -> float: ... - def outline(self, every: int = 1) -> List[Tuple[int, int]]: ... - def convolve( - self, - other: Mask, - output: Optional[Mask] = None, - offset: _Coordinate = (0, 0), - ) -> Mask: ... - def connected_component( - self, pos: Union[List[int], Tuple[int, int]] = ... - ) -> Mask: ... - def connected_components(self, minimum: int = 0) -> List[Mask]: ... - def get_bounding_rects(self) -> Rect: ... - def to_surface( - self, - surface: Optional[Surface] = None, - setsurface: Optional[Surface] = None, - unsetsurface: Optional[Surface] = None, - setcolor: _ColorValue = (255, 255, 255, 255), - unsetcolor: _ColorValue = (0, 0, 0, 255), - dest: Union[_RectValue, _Coordinate] = (0, 0), - ) -> Surface: ... diff --git a/venv/Lib/site-packages/pygame/math.cp39-win_amd64.pyd b/venv/Lib/site-packages/pygame/math.cp39-win_amd64.pyd deleted file mode 100644 index 2ea3471..0000000 Binary files a/venv/Lib/site-packages/pygame/math.cp39-win_amd64.pyd and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/math.pyi b/venv/Lib/site-packages/pygame/math.pyi deleted file mode 100644 index 594acf2..0000000 --- a/venv/Lib/site-packages/pygame/math.pyi +++ /dev/null @@ -1,281 +0,0 @@ -from typing import List, Sequence, Tuple, Union, overload - -class _VectorElementwiseProxy2: - def __add__( - self, other: Union[float, Vector2, _VectorElementwiseProxy2] - ) -> Vector2: ... - def __radd__( - self, other: Union[float, Vector2, _VectorElementwiseProxy2] - ) -> Vector2: ... - def __sub__( - self, other: Union[float, Vector2, _VectorElementwiseProxy2] - ) -> Vector2: ... - def __rsub__( - self, other: Union[float, Vector2, _VectorElementwiseProxy2] - ) -> Vector2: ... - def __mul__( - self, other: Union[float, Vector2, _VectorElementwiseProxy2] - ) -> Vector2: ... - def __rmul__( - self, other: Union[float, Vector2, _VectorElementwiseProxy2] - ) -> Vector2: ... - def __truediv__( - self, other: Union[float, Vector2, _VectorElementwiseProxy2] - ) -> Vector2: ... - def __rtruediv__( - self, other: Union[float, Vector2, _VectorElementwiseProxy2] - ) -> Vector2: ... - def __floordiv__( - self, other: Union[float, Vector2, _VectorElementwiseProxy2] - ) -> Vector2: ... - def __rfloordiv__( - self, other: Union[float, Vector2, _VectorElementwiseProxy2] - ) -> Vector2: ... - def __mod__( - self, other: Union[float, Vector2, _VectorElementwiseProxy2] - ) -> Vector2: ... - def __rmod__( - self, other: Union[float, Vector2, _VectorElementwiseProxy2] - ) -> Vector2: ... - def __pow__( - self, power: Union[float, Vector2, _VectorElementwiseProxy2] - ) -> Vector2: ... - def __rpow__( - self, power: Union[float, Vector2, _VectorElementwiseProxy2] - ) -> Vector2: ... - -class _VectorElementwiseProxy3: - def __add__( - self, other: Union[float, Vector3, _VectorElementwiseProxy3] - ) -> Vector3: ... - def __radd__( - self, other: Union[float, Vector3, _VectorElementwiseProxy3] - ) -> Vector3: ... - def __sub__( - self, other: Union[float, Vector3, _VectorElementwiseProxy3] - ) -> Vector3: ... - def __rsub__( - self, other: Union[float, Vector3, _VectorElementwiseProxy3] - ) -> Vector3: ... - def __mul__( - self, other: Union[float, Vector3, _VectorElementwiseProxy3] - ) -> Vector3: ... - def __rmul__( - self, other: Union[float, Vector3, _VectorElementwiseProxy3] - ) -> Vector3: ... - def __truediv__( - self, other: Union[float, Vector3, _VectorElementwiseProxy3] - ) -> Vector3: ... - def __rtruediv__( - self, other: Union[float, Vector3, _VectorElementwiseProxy3] - ) -> Vector3: ... - def __floordiv__( - self, other: Union[float, Vector3, _VectorElementwiseProxy3] - ) -> Vector3: ... - def __rfloordiv__( - self, other: Union[float, Vector3, _VectorElementwiseProxy3] - ) -> Vector3: ... - def __mod__( - self, other: Union[float, Vector3, _VectorElementwiseProxy3] - ) -> Vector3: ... - def __rmod__( - self, other: Union[float, Vector3, _VectorElementwiseProxy3] - ) -> Vector3: ... - def __pow__( - self, power: Union[float, Vector3, _VectorElementwiseProxy3] - ) -> Vector3: ... - def __rpow__( - self, power: Union[float, Vector3, _VectorElementwiseProxy3] - ) -> Vector3: ... - -class Vector2: - x: float - y: float - xx: Vector2 - xy: Vector2 - yx: Vector2 - yy: Vector2 - __hash__: None # type: ignore - @overload - def __init__( - self, - x: Union[float, Tuple[float, float], List[float], Vector2] = 0, - ) -> None: ... - @overload - def __init__(self, x: float, y: float) -> None: ... - def __setitem__(self, key: int, value: float) -> None: ... - @overload - def __getitem__(self, i: int) -> float: ... - @overload - def __getitem__(self, s: slice) -> List[float]: ... - def __add__(self, other: Vector2) -> Vector2: ... - def __sub__(self, other: Vector2) -> Vector2: ... - @overload - def __mul__(self, other: Vector2) -> float: ... - @overload - def __mul__(self, other: float) -> Vector2: ... - def __rmul__(self, other: float) -> Vector2: ... - def __truediv__(self, other: float) -> Vector2: ... - def __floordiv__(self, other: float) -> Vector2: ... - def __neg__(self) -> Vector2: ... - def __pos__(self) -> Vector2: ... - def __bool__(self) -> bool: ... - def __iadd__(self, other: Vector2) -> Vector2: ... - def __isub__(self, other: Vector2) -> Vector2: ... - @overload - def __imul__(self, other: Vector2) -> float: ... - @overload - def __imul__(self, other: float) -> Vector2: ... - def dot(self, other: Vector2) -> float: ... - def cross(self, other: Vector2) -> Vector2: ... - def magnitude(self) -> float: ... - def magnitude_squared(self) -> float: ... - def length(self) -> float: ... - def length_squared(self) -> float: ... - def normalize(self) -> Vector2: ... - def normalize_ip(self) -> None: ... - def is_normalized(self) -> bool: ... - def scale_to_length(self, value: float) -> None: ... - def reflect(self, other: Vector2) -> Vector2: ... - def reflect_ip(self, other: Vector2) -> None: ... - def distance_to(self, other: Union[Vector2, Sequence[float]]) -> float: ... - def distance_squared_to(self, other: Vector2) -> float: ... - def lerp(self, other: Vector2, value: float) -> Vector2: ... - def slerp(self, other: Vector2, value: float) -> Vector2: ... - def elementwise(self) -> _VectorElementwiseProxy2: ... - def rotate(self, angle: float) -> Vector2: ... - def rotate_rad(self, angle: float) -> Vector2: ... - def rotate_ip(self, angle: float) -> None: ... - def rotate_rad_ip(self, angle: float) -> None: ... - def rotate_ip_rad(self, angle: float) -> None: ... - def angle_to(self, other: Vector2) -> float: ... - def as_polar(self) -> Tuple[float, float]: ... - def from_polar( - self, polar_value: Union[List[float], Tuple[float, float]] - ) -> None: ... - def copy(self) -> Vector2: ... - def update( - self, - x: Union[float, Vector2, Tuple[float, float], List[float]] = 0, - y: float = 0, - ) -> None: ... - -class Vector3: - x: float - y: float - z: float - xx: Vector2 - xy: Vector2 - xz: Vector2 - yx: Vector2 - yy: Vector2 - yz: Vector2 - zx: Vector2 - zy: Vector2 - zz: Vector2 - xxx: Vector3 - xxy: Vector3 - xxz: Vector3 - xyx: Vector3 - xyy: Vector3 - xyz: Vector3 - xzx: Vector3 - xzy: Vector3 - xzz: Vector3 - yxx: Vector3 - yxy: Vector3 - yxz: Vector3 - yyx: Vector3 - yyy: Vector3 - yyz: Vector3 - yzx: Vector3 - yzy: Vector3 - yzz: Vector3 - zxx: Vector3 - zxy: Vector3 - zxz: Vector3 - zyx: Vector3 - zyy: Vector3 - zyz: Vector3 - zzx: Vector3 - zzy: Vector3 - zzz: Vector3 - __hash__: None # type: ignore - @overload - def __init__( - self, - xyz: Union[float, Tuple[float, float, float], List[float], Vector3] = 0, - ) -> None: ... - @overload - def __init__(self, x: float, y: float, z: float) -> None: ... - def __setitem__(self, key: int, value: float) -> None: ... - @overload - def __getitem__(self, i: int) -> float: ... - @overload - def __getitem__(self, s: slice) -> List[float]: ... - def __add__(self, other: Vector3) -> Vector3: ... - def __sub__(self, other: Vector3) -> Vector3: ... - @overload - def __mul__(self, other: Vector3) -> float: ... - @overload - def __mul__(self, other: float) -> Vector3: ... - def __rmul__(self, other: float) -> Vector3: ... - def __truediv__(self, other: float) -> Vector3: ... - def __floordiv__(self, other: float) -> Vector3: ... - def __neg__(self) -> Vector3: ... - def __pos__(self) -> Vector3: ... - def __bool__(self) -> bool: ... - def __iadd__(self, other: Vector3) -> Vector3: ... - def __isub__(self, other: Vector3) -> Vector3: ... - @overload - def __imul__(self, other: Vector3) -> float: ... - @overload - def __imul__(self, other: float) -> Vector3: ... - def dot(self, other: Vector3) -> float: ... - def cross(self, other: Vector3) -> Vector3: ... - def magnitude(self) -> float: ... - def magnitude_squared(self) -> float: ... - def length(self) -> float: ... - def length_squared(self) -> float: ... - def normalize(self) -> Vector3: ... - def normalize_ip(self) -> None: ... - def is_normalized(self) -> bool: ... - def scale_to_length(self, value: float) -> None: ... - def reflect(self, other: Vector3) -> Vector3: ... - def reflect_ip(self, other: Vector3) -> None: ... - def distance_to(self, other: Vector3) -> float: ... - def distance_squared_to(self, other: Vector3) -> float: ... - def lerp(self, other: Vector3, value: float) -> Vector3: ... - def slerp(self, other: Vector3, value: float) -> Vector3: ... - def elementwise(self) -> _VectorElementwiseProxy3: ... - def rotate(self, angle: float, axis: Vector3) -> Vector3: ... - def rotate_rad(self, angle: float, axis: Vector3) -> Vector3: ... - def rotate_ip(self, angle: float, axis: Vector3) -> None: ... - def rotate_rad_ip(self, angle: float, axis: Vector3) -> None: ... - def rotate_ip_rad(self, angle: float, axis: Vector3) -> None: ... - def rotate_x(self, angle: float) -> Vector3: ... - def rotate_x_rad(self, angle: float) -> Vector3: ... - def rotate_x_ip(self, angle: float) -> None: ... - def rotate_x_rad_ip(self, angle: float) -> None: ... - def rotate_x_ip_rad(self, angle: float) -> None: ... - def rotate_y(self, angle: float) -> Vector3: ... - def rotate_y_rad(self, angle: float) -> Vector3: ... - def rotate_y_ip(self, angle: float) -> None: ... - def rotate_y_rad_ip(self, angle: float) -> None: ... - def rotate_y_ip_rad(self, angle: float) -> None: ... - def rotate_z(self, angle: float) -> Vector3: ... - def rotate_z_rad(self, angle: float) -> Vector3: ... - def rotate_z_ip(self, angle: float) -> None: ... - def rotate_z_rad_ip(self, angle: float) -> None: ... - def rotate_z_ip_rad(self, angle: float) -> None: ... - def angle_to(self, other: Vector3) -> float: ... - def as_spherical(self) -> Tuple[float, float, float]: ... - def from_spherical(self, spherical: Tuple[float, float, float]) -> None: ... - def copy(self) -> Vector3: ... - @overload - def update( - self, - xyz: Union[float, Tuple[float, float, float], List[float], Vector3] = 0, - ) -> None: ... - @overload - def update(self, x: int, y: int, z: int) -> None: ... diff --git a/venv/Lib/site-packages/pygame/midi.py b/venv/Lib/site-packages/pygame/midi.py deleted file mode 100644 index bcf8d6b..0000000 --- a/venv/Lib/site-packages/pygame/midi.py +++ /dev/null @@ -1,718 +0,0 @@ -"""pygame.midi -pygame module for interacting with midi input and output. - -The midi module can send output to midi devices, and get input -from midi devices. It can also list midi devices on the system. - -Including real midi devices, and virtual ones. - -It uses the portmidi library. Is portable to which ever platforms -portmidi supports (currently windows, OSX, and linux). - -This uses pyportmidi for now, but may use its own bindings at some -point in the future. The pyportmidi bindings are included with pygame. - -New in pygame 1.9.0. -""" - -# TODO: finish writing tests. -# - likely as interactive tests... so you'd need to plug in -# a midi device. -# TODO: create a background thread version for input threads. -# - that can automatically inject input into the event queue -# once the input object is running. Like joysticks. - -import math -import atexit - -import pygame -import pygame.locals - -import pygame.pypm as _pypm - -# For backward compatibility. -MIDIIN = pygame.locals.MIDIIN -MIDIOUT = pygame.locals.MIDIOUT - -__all__ = [ - "Input", - "MIDIIN", - "MIDIOUT", - "MidiException", - "Output", - "get_count", - "get_default_input_id", - "get_default_output_id", - "get_device_info", - "init", - "midis2events", - "quit", - "get_init", - "time", - "frequency_to_midi", - "midi_to_frequency", - "midi_to_ansi_note", -] - -__theclasses__ = ["Input", "Output"] - - -def _module_init(state=None): - # this is a sneaky dodge to store module level state in a non-public - # function. Helps us dodge using globals. - if state is not None: - _module_init.value = state - return state - - try: - _module_init.value - except AttributeError: - return False - else: - return _module_init.value - - -def init(): - """initialize the midi module - pygame.midi.init(): return None - - Call the initialisation function before using the midi module. - - It is safe to call this more than once. - """ - if not _module_init(): - _pypm.Initialize() - _module_init(True) - atexit.register(quit) - - -def quit(): # pylint: disable=redefined-builtin - """uninitialize the midi module - pygame.midi.quit(): return None - - - Called automatically atexit if you don't call it. - - It is safe to call this function more than once. - """ - if _module_init(): - # TODO: find all Input and Output classes and close them first? - _pypm.Terminate() - _module_init(False) - - -def get_init(): - """returns True if the midi module is currently initialized - pygame.midi.get_init(): return bool - - Returns True if the pygame.midi module is currently initialized. - - New in pygame 1.9.5. - """ - return _module_init() - - -def _check_init(): - if not _module_init(): - raise RuntimeError("pygame.midi not initialised.") - - -def get_count(): - """gets the number of devices. - pygame.midi.get_count(): return num_devices - - - Device ids range from 0 to get_count() -1 - """ - _check_init() - return _pypm.CountDevices() - - -def get_default_input_id(): - """gets default input device number - pygame.midi.get_default_input_id(): return default_id - - - Return the default device ID or -1 if there are no devices. - The result can be passed to the Input()/Output() class. - - On the PC, the user can specify a default device by - setting an environment variable. For example, to use device #1. - - set PM_RECOMMENDED_INPUT_DEVICE=1 - - The user should first determine the available device ID by using - the supplied application "testin" or "testout". - - In general, the registry is a better place for this kind of info, - and with USB devices that can come and go, using integers is not - very reliable for device identification. Under Windows, if - PM_RECOMMENDED_OUTPUT_DEVICE (or PM_RECOMMENDED_INPUT_DEVICE) is - *NOT* found in the environment, then the default device is obtained - by looking for a string in the registry under: - HKEY_LOCAL_MACHINE/SOFTWARE/PortMidi/Recommended_Input_Device - and HKEY_LOCAL_MACHINE/SOFTWARE/PortMidi/Recommended_Output_Device - for a string. The number of the first device with a substring that - matches the string exactly is returned. For example, if the string - in the registry is "USB", and device 1 is named - "In USB MidiSport 1x1", then that will be the default - input because it contains the string "USB". - - In addition to the name, get_device_info() returns "interf", which - is the interface name. (The "interface" is the underlying software - system or API used by PortMidi to access devices. Examples are - MMSystem, DirectX (not implemented), ALSA, OSS (not implemented), etc.) - At present, the only Win32 interface is "MMSystem", the only Linux - interface is "ALSA", and the only Max OS X interface is "CoreMIDI". - To specify both the interface and the device name in the registry, - separate the two with a comma and a space, e.g.: - MMSystem, In USB MidiSport 1x1 - In this case, the string before the comma must be a substring of - the "interf" string, and the string after the space must be a - substring of the "name" name string in order to match the device. - - Note: in the current release, the default is simply the first device - (the input or output device with the lowest PmDeviceID). - """ - _check_init() - return _pypm.GetDefaultInputDeviceID() - - -def get_default_output_id(): - """gets default output device number - pygame.midi.get_default_output_id(): return default_id - - - Return the default device ID or -1 if there are no devices. - The result can be passed to the Input()/Output() class. - - On the PC, the user can specify a default device by - setting an environment variable. For example, to use device #1. - - set PM_RECOMMENDED_OUTPUT_DEVICE=1 - - The user should first determine the available device ID by using - the supplied application "testin" or "testout". - - In general, the registry is a better place for this kind of info, - and with USB devices that can come and go, using integers is not - very reliable for device identification. Under Windows, if - PM_RECOMMENDED_OUTPUT_DEVICE (or PM_RECOMMENDED_INPUT_DEVICE) is - *NOT* found in the environment, then the default device is obtained - by looking for a string in the registry under: - HKEY_LOCAL_MACHINE/SOFTWARE/PortMidi/Recommended_Input_Device - and HKEY_LOCAL_MACHINE/SOFTWARE/PortMidi/Recommended_Output_Device - for a string. The number of the first device with a substring that - matches the string exactly is returned. For example, if the string - in the registry is "USB", and device 1 is named - "In USB MidiSport 1x1", then that will be the default - input because it contains the string "USB". - - In addition to the name, get_device_info() returns "interf", which - is the interface name. (The "interface" is the underlying software - system or API used by PortMidi to access devices. Examples are - MMSystem, DirectX (not implemented), ALSA, OSS (not implemented), etc.) - At present, the only Win32 interface is "MMSystem", the only Linux - interface is "ALSA", and the only Max OS X interface is "CoreMIDI". - To specify both the interface and the device name in the registry, - separate the two with a comma and a space, e.g.: - MMSystem, In USB MidiSport 1x1 - In this case, the string before the comma must be a substring of - the "interf" string, and the string after the space must be a - substring of the "name" name string in order to match the device. - - Note: in the current release, the default is simply the first device - (the input or output device with the lowest PmDeviceID). - """ - _check_init() - return _pypm.GetDefaultOutputDeviceID() - - -def get_device_info(an_id): - """returns information about a midi device - pygame.midi.get_device_info(an_id): return (interf, name, - input, output, - opened) - - interf - a text string describing the device interface, eg 'ALSA'. - name - a text string for the name of the device, eg 'Midi Through Port-0' - input - 0, or 1 if the device is an input device. - output - 0, or 1 if the device is an output device. - opened - 0, or 1 if the device is opened. - - If the id is out of range, the function returns None. - """ - _check_init() - return _pypm.GetDeviceInfo(an_id) - - -class Input(object): - """Input is used to get midi input from midi devices. - Input(device_id) - Input(device_id, buffer_size) - - buffer_size - the number of input events to be buffered waiting to - be read using Input.read() - """ - - def __init__(self, device_id, buffer_size=4096): - """ - The buffer_size specifies the number of input events to be buffered - waiting to be read using Input.read(). - """ - _check_init() - - if device_id == -1: - raise MidiException( - "Device id is -1, not a valid output id. " - "-1 usually means there were no default " - "Output devices." - ) - - try: - result = get_device_info(device_id) - except TypeError: - raise TypeError("an integer is required") - except OverflowError: - raise OverflowError("long int too large to convert to int") - - # and now some nasty looking error checking, to provide nice error - # messages to the kind, lovely, midi using people of wherever. - if result: - _, _, is_input, is_output, _ = result - if is_input: - try: - self._input = _pypm.Input(device_id, buffer_size) - except TypeError: - raise TypeError("an integer is required") - self.device_id = device_id - - elif is_output: - raise MidiException( - "Device id given is not a valid" " input id, it is an output id." - ) - else: - raise MidiException("Device id given is not a valid input id.") - else: - raise MidiException("Device id invalid, out of range.") - - def _check_open(self): - if self._input is None: - raise MidiException("midi not open.") - - def close(self): - """closes a midi stream, flushing any pending buffers. - Input.close(): return None - - PortMidi attempts to close open streams when the application - exits -- this is particularly difficult under Windows. - """ - _check_init() - if self._input is not None: - self._input.Close() - self._input = None - - def read(self, num_events): - """reads num_events midi events from the buffer. - Input.read(num_events): return midi_event_list - - Reads from the Input buffer and gives back midi events. - [[[status,data1,data2,data3],timestamp], - [[status,data1,data2,data3],timestamp],...] - """ - _check_init() - self._check_open() - return self._input.Read(num_events) - - def poll(self): - """returns true if there's data, or false if not. - Input.poll(): return Bool - - raises a MidiException on error. - """ - _check_init() - self._check_open() - - result = self._input.Poll() - if result == _pypm.TRUE: - return True - - if result == _pypm.FALSE: - return False - - err_text = _pypm.GetErrorText(result) - raise MidiException((result, err_text)) - - -class Output(object): - """Output is used to send midi to an output device - Output(device_id) - Output(device_id, latency = 0) - Output(device_id, buffer_size = 4096) - Output(device_id, latency, buffer_size) - - The buffer_size specifies the number of output events to be - buffered waiting for output. (In some cases -- see below -- - PortMidi does not buffer output at all and merely passes data - to a lower-level API, in which case buffersize is ignored.) - - latency is the delay in milliseconds applied to timestamps to determine - when the output should actually occur. (If latency is < 0, 0 is - assumed.) - - If latency is zero, timestamps are ignored and all output is delivered - immediately. If latency is greater than zero, output is delayed until - the message timestamp plus the latency. (NOTE: time is measured - relative to the time source indicated by time_proc. Timestamps are - absolute, not relative delays or offsets.) In some cases, PortMidi - can obtain better timing than your application by passing timestamps - along to the device driver or hardware. Latency may also help you - to synchronize midi data to audio data by matching midi latency to - the audio buffer latency. - - """ - - def __init__(self, device_id, latency=0, buffer_size=256): - """Output(device_id) - Output(device_id, latency = 0) - Output(device_id, buffer_size = 4096) - Output(device_id, latency, buffer_size) - - The buffer_size specifies the number of output events to be - buffered waiting for output. (In some cases -- see below -- - PortMidi does not buffer output at all and merely passes data - to a lower-level API, in which case buffersize is ignored.) - - latency is the delay in milliseconds applied to timestamps to determine - when the output should actually occur. (If latency is < 0, 0 is - assumed.) - - If latency is zero, timestamps are ignored and all output is delivered - immediately. If latency is greater than zero, output is delayed until - the message timestamp plus the latency. (NOTE: time is measured - relative to the time source indicated by time_proc. Timestamps are - absolute, not relative delays or offsets.) In some cases, PortMidi - can obtain better timing than your application by passing timestamps - along to the device driver or hardware. Latency may also help you - to synchronize midi data to audio data by matching midi latency to - the audio buffer latency. - """ - - _check_init() - self._aborted = 0 - - if device_id == -1: - raise MidiException( - "Device id is -1, not a valid output id." - " -1 usually means there were no default " - "Output devices." - ) - - try: - result = get_device_info(device_id) - except TypeError: - raise TypeError("an integer is required") - except OverflowError: - raise OverflowError("long int too large to convert to int") - - # and now some nasty looking error checking, to provide nice error - # messages to the kind, lovely, midi using people of wherever. - if result: - _, _, is_input, is_output, _ = result - if is_output: - try: - self._output = _pypm.Output(device_id, latency, buffer_size) - except TypeError: - raise TypeError("an integer is required") - self.device_id = device_id - - elif is_input: - raise MidiException( - "Device id given is not a valid output " "id, it is an input id." - ) - else: - raise MidiException("Device id given is not a" " valid output id.") - else: - raise MidiException("Device id invalid, out of range.") - - def _check_open(self): - if self._output is None: - raise MidiException("midi not open.") - - if self._aborted: - raise MidiException("midi aborted.") - - def close(self): - """closes a midi stream, flushing any pending buffers. - Output.close(): return None - - PortMidi attempts to close open streams when the application - exits -- this is particularly difficult under Windows. - """ - _check_init() - if self._output is not None: - self._output.Close() - self._output = None - - def abort(self): - """terminates outgoing messages immediately - Output.abort(): return None - - The caller should immediately close the output port; - this call may result in transmission of a partial midi message. - There is no abort for Midi input because the user can simply - ignore messages in the buffer and close an input device at - any time. - """ - - _check_init() - if self._output: - self._output.Abort() - self._aborted = 1 - - def write(self, data): - """writes a list of midi data to the Output - Output.write(data) - - writes series of MIDI information in the form of a list: - write([[[status <,data1><,data2><,data3>],timestamp], - [[status <,data1><,data2><,data3>],timestamp],...]) - fields are optional - example: choose program change 1 at time 20000 and - send note 65 with velocity 100 500 ms later. - write([[[0xc0,0,0],20000],[[0x90,60,100],20500]]) - notes: - 1. timestamps will be ignored if latency = 0. - 2. To get a note to play immediately, send MIDI info with - timestamp read from function Time. - 3. understanding optional data fields: - write([[[0xc0,0,0],20000]]) is equivalent to - write([[[0xc0],20000]]) - - Can send up to 1024 elements in your data list, otherwise an - IndexError exception is raised. - """ - _check_init() - self._check_open() - - self._output.Write(data) - - def write_short(self, status, data1=0, data2=0): - """write_short(status <, data1><, data2>) - Output.write_short(status) - Output.write_short(status, data1 = 0, data2 = 0) - - output MIDI information of 3 bytes or less. - data fields are optional - status byte could be: - 0xc0 = program change - 0x90 = note on - etc. - data bytes are optional and assumed 0 if omitted - example: note 65 on with velocity 100 - write_short(0x90,65,100) - """ - _check_init() - self._check_open() - self._output.WriteShort(status, data1, data2) - - def write_sys_ex(self, when, msg): - """writes a timestamped system-exclusive midi message. - Output.write_sys_ex(when, msg) - - msg - can be a *list* or a *string* - when - a timestamp in miliseconds - example: - (assuming o is an onput MIDI stream) - o.write_sys_ex(0,'\\xF0\\x7D\\x10\\x11\\x12\\x13\\xF7') - is equivalent to - o.write_sys_ex(pygame.midi.time(), - [0xF0,0x7D,0x10,0x11,0x12,0x13,0xF7]) - """ - _check_init() - self._check_open() - self._output.WriteSysEx(when, msg) - - def note_on(self, note, velocity, channel=0): - """turns a midi note on. Note must be off. - Output.note_on(note, velocity, channel=0) - - note is an integer from 0 to 127 - velocity is an integer from 0 to 127 - channel is an integer from 0 to 15 - - Turn a note on in the output stream. The note must already - be off for this to work correctly. - """ - if not 0 <= channel <= 15: - raise ValueError("Channel not between 0 and 15.") - - self.write_short(0x90 + channel, note, velocity) - - def note_off(self, note, velocity=0, channel=0): - """turns a midi note off. Note must be on. - Output.note_off(note, velocity=0, channel=0) - - note is an integer from 0 to 127 - velocity is an integer from 0 to 127 (release velocity) - channel is an integer from 0 to 15 - - Turn a note off in the output stream. The note must already - be on for this to work correctly. - """ - if not 0 <= channel <= 15: - raise ValueError("Channel not between 0 and 15.") - - self.write_short(0x80 + channel, note, velocity) - - def set_instrument(self, instrument_id, channel=0): - """select an instrument for a channel, with a value between 0 and 127 - Output.set_instrument(instrument_id, channel=0) - - Also called "patch change" or "program change". - """ - if not 0 <= instrument_id <= 127: - raise ValueError(f"Undefined instrument id: {instrument_id}") - - if not 0 <= channel <= 15: - raise ValueError("Channel not between 0 and 15.") - - self.write_short(0xC0 + channel, instrument_id) - - def pitch_bend(self, value=0, channel=0): - """modify the pitch of a channel. - Output.pitch_bend(value=0, channel=0) - - Adjust the pitch of a channel. The value is a signed integer - from -8192 to +8191. For example, 0 means "no change", +4096 is - typically a semitone higher, and -8192 is 1 whole tone lower (though - the musical range corresponding to the pitch bend range can also be - changed in some synthesizers). - - If no value is given, the pitch bend is returned to "no change". - """ - if not 0 <= channel <= 15: - raise ValueError("Channel not between 0 and 15.") - - if not -8192 <= value <= 8191: - raise ValueError( - f"Pitch bend value must be between -8192 and +8191, not {value}." - ) - - # "The 14 bit value of the pitch bend is defined so that a value of - # 0x2000 is the center corresponding to the normal pitch of the note - # (no pitch change)." so value=0 should send 0x2000 - value = value + 0x2000 - lsb = value & 0x7F # keep least 7 bits - msb = value >> 7 - self.write_short(0xE0 + channel, lsb, msb) - - -# MIDI commands -# -# 0x80 Note Off (note_off) -# 0x90 Note On (note_on) -# 0xA0 Aftertouch -# 0xB0 Continuous controller -# 0xC0 Patch change (set_instrument?) -# 0xD0 Channel Pressure -# 0xE0 Pitch bend -# 0xF0 (non-musical commands) - - -def time(): - """returns the current time in ms of the PortMidi timer - pygame.midi.time(): return time - - The time is reset to 0, when the module is inited. - """ - _check_init() - return _pypm.Time() - - -def midis2events(midis, device_id): - """converts midi events to pygame events - pygame.midi.midis2events(midis, device_id): return [Event, ...] - - Takes a sequence of midi events and returns list of pygame events. - """ - evs = [] - for midi in midis: - ((status, data1, data2, data3), timestamp) = midi - - event = pygame.event.Event( - MIDIIN, - status=status, - data1=data1, - data2=data2, - data3=data3, - timestamp=timestamp, - vice_id=device_id, - ) - evs.append(event) - - return evs - - -class MidiException(Exception): - """exception that pygame.midi functions and classes can raise - MidiException(errno) - """ - - def __init__(self, value): - super(MidiException, self).__init__(value) - self.parameter = value - - def __str__(self): - return repr(self.parameter) - - -def frequency_to_midi(frequency): - """converts a frequency into a MIDI note. - - Rounds to the closest midi note. - - ::Examples:: - - >>> frequency_to_midi(27.5) - 21 - >>> frequency_to_midi(36.7) - 26 - >>> frequency_to_midi(4186.0) - 108 - """ - return int(round(69 + (12 * math.log(frequency / 440.0)) / math.log(2))) - - -def midi_to_frequency(midi_note): - """Converts a midi note to a frequency. - - ::Examples:: - - >>> midi_to_frequency(21) - 27.5 - >>> midi_to_frequency(26) - 36.7 - >>> midi_to_frequency(108) - 4186.0 - """ - return round(440.0 * 2 ** ((midi_note - 69) * (1.0 / 12.0)), 1) - - -def midi_to_ansi_note(midi_note): - """returns the Ansi Note name for a midi number. - - ::Examples:: - - >>> midi_to_ansi_note(21) - 'A0' - >>> midi_to_ansi_note(102) - 'F#7' - >>> midi_to_ansi_note(108) - 'C8' - """ - notes = ["A", "A#", "B", "C", "C#", "D", "D#", "E", "F", "F#", "G", "G#"] - num_notes = 12 - note_name = notes[int(((midi_note - 21) % num_notes))] - note_number = (midi_note - 12) // num_notes - return f"{note_name}{note_number}" diff --git a/venv/Lib/site-packages/pygame/midi.pyi b/venv/Lib/site-packages/pygame/midi.pyi deleted file mode 100644 index 90cd4a6..0000000 --- a/venv/Lib/site-packages/pygame/midi.pyi +++ /dev/null @@ -1,49 +0,0 @@ -from typing import List, Sequence, Tuple, Union - -from pygame.event import Event - -MIDIIN: int -MIDIOUT: int - -class MidiException(Exception): - def __init__(self, errno: str) -> None: ... - -def init() -> None: ... -def quit() -> None: ... -def get_init() -> bool: ... -def get_count() -> int: ... -def get_default_input_id() -> int: ... -def get_default_output_id() -> int: ... -def get_device_info(an_id: int) -> Tuple[str, str, int, int, int]: ... -def midis2events( - midi_events: Sequence[Sequence[Union[Sequence[int], int]]], device_id: int -) -> List[Event]: ... -def time() -> int: ... -def frequency_to_midi(frequency: float) -> int: ... -def midi_to_frequency(midi_note: int) -> float: ... -def midi_to_ansi_note(midi_note: int) -> str: ... - -class Input: - device_id: int - def __init__(self, device_id: int, buffer_size: int = 4096) -> None: ... - def close(self) -> None: ... - def pool(self) -> bool: ... - def read(self, num_events: int) -> List[List[Union[List[int], int]]]: ... - -class Output: - device_id: int - def __init__( - self, - device_id: int, - latency: int = 0, - buffersize: int = 4096, - ) -> None: ... - def abort(self) -> None: ... - def close(self) -> None: ... - def note_off(self, note: int, velocity: int = 0, channel: int = 0) -> None: ... - def note_on(self, note: int, velocity: int = 0, channel: int = 0) -> None: ... - def set_instrument(self, instrument_id: int, channel: int = 0) -> None: ... - def pitch_bend(self, value: int = 0, channel: int = 0) -> None: ... - def write(self, data: List[List[Union[List[int], int]]]) -> None: ... - def write_short(self, status: int, data1: int = 0, data2: int = 0) -> None: ... - def write_sys_ex(self, msg: Union[List[int], str], when: int) -> None: ... diff --git a/venv/Lib/site-packages/pygame/mixer.cp39-win_amd64.pyd b/venv/Lib/site-packages/pygame/mixer.cp39-win_amd64.pyd deleted file mode 100644 index 4c49c04..0000000 Binary files a/venv/Lib/site-packages/pygame/mixer.cp39-win_amd64.pyd and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/mixer.pyi b/venv/Lib/site-packages/pygame/mixer.pyi deleted file mode 100644 index 52681ad..0000000 --- a/venv/Lib/site-packages/pygame/mixer.pyi +++ /dev/null @@ -1,86 +0,0 @@ -from typing import Any, Optional, Tuple, Union, overload - -import numpy - -from pygame.event import Event - -from . import music as music -from ._common import _FileArg - -def init( - frequency: int = 44100, - size: int = -16, - channels: int = 2, - buffer: int = 512, - devicename: Optional[str] = None, - allowedchanges: int = 5, -) -> None: ... -def pre_init( - frequency: int = 44100, - size: int = -16, - channels: int = 2, - buffer: int = 512, - devicename: Optional[str] = None, - allowedchanges: int = 5, -) -> None: ... -def quit() -> None: ... -def get_init() -> Tuple[int, int, int]: ... -def stop() -> None: ... -def pause() -> None: ... -def unpause() -> None: ... -def fadeout(time: int) -> None: ... -def set_num_channels(count: int) -> None: ... -def get_num_channels() -> int: ... -def set_reserved(count: int) -> int: ... -def find_channel(force: bool = False) -> Channel: ... -def get_busy() -> bool: ... -def get_sdl_mixer_version(linked: bool = True) -> Tuple[int, int, int]: ... - -class Sound: - @overload - def __init__(self, file: _FileArg) -> None: ... - @overload - def __init__( - self, buffer: Any - ) -> None: ... # Buffer protocol is still not implemented in typing - @overload - def __init__( - self, array: numpy.ndarray - ) -> None: ... # Buffer protocol is still not implemented in typing - def play( - self, - loops: int = 0, - maxtime: int = 0, - fade_ms: int = 0, - ) -> Channel: ... - def stop(self) -> None: ... - def fadeout(self, time: int) -> None: ... - def set_volume(self, value: float) -> None: ... - def get_volume(self) -> float: ... - def get_num_channels(self) -> int: ... - def get_length(self) -> float: ... - def get_raw(self) -> bytes: ... - -class Channel: - def __init__(self, id: int) -> None: ... - def play( - self, - sound: Sound, - loops: int = 0, - maxtime: int = 0, - fade_ms: int = 0, - ) -> None: ... - def stop(self) -> None: ... - def pause(self) -> None: ... - def unpause(self) -> None: ... - def fadeout(self, time: int) -> None: ... - @overload - def set_volume(self, value: float) -> None: ... - @overload - def set_volume(self, left: float, right: float) -> None: ... - def get_volume(self) -> float: ... - def get_busy(self) -> bool: ... - def get_sound(self) -> Sound: ... - def get_queue(self) -> Sound: ... - def set_endevent(self, type: Union[int, Event] = 0) -> None: ... - def get_endevent(self) -> int: ... diff --git a/venv/Lib/site-packages/pygame/mixer_music.cp39-win_amd64.pyd b/venv/Lib/site-packages/pygame/mixer_music.cp39-win_amd64.pyd deleted file mode 100644 index de406d3..0000000 Binary files a/venv/Lib/site-packages/pygame/mixer_music.cp39-win_amd64.pyd and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/mouse.cp39-win_amd64.pyd b/venv/Lib/site-packages/pygame/mouse.cp39-win_amd64.pyd deleted file mode 100644 index c1ca322..0000000 Binary files a/venv/Lib/site-packages/pygame/mouse.cp39-win_amd64.pyd and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/mouse.pyi b/venv/Lib/site-packages/pygame/mouse.pyi deleted file mode 100644 index 84b77ed..0000000 --- a/venv/Lib/site-packages/pygame/mouse.pyi +++ /dev/null @@ -1,34 +0,0 @@ -from typing import List, Sequence, Tuple, Union, overload - -from pygame.cursors import Cursor -from pygame.surface import Surface - -def get_pressed( - num_buttons: int = 3, -) -> Union[Tuple[bool, bool, bool], Tuple[bool, bool, bool, bool, bool]]: ... -def get_pos() -> Tuple[int, int]: ... -def get_rel() -> Tuple[int, int]: ... -@overload -def set_pos(pos: Union[List[float], Tuple[float, float]]) -> None: ... -@overload -def set_pos(x: float, y: float) -> None: ... -def set_visible(value: bool) -> int: ... -def get_visible() -> bool: ... -def get_focused() -> bool: ... -@overload -def set_cursor(cursor: Cursor) -> None: ... -@overload -def set_cursor(constant: int) -> None: ... -@overload -def set_cursor( - size: Union[Tuple[int, int], List[int]], - hotspot: Union[Tuple[int, int], List[int]], - xormasks: Sequence[int], - andmasks: Sequence[int], -) -> None: ... -@overload -def set_cursor( - hotspot: Union[Tuple[int, int], List[int]], surface: Surface -) -> None: ... -def get_cursor() -> Cursor: ... -def set_system_cursor(cursor: int) -> None: ... diff --git a/venv/Lib/site-packages/pygame/music.pyi b/venv/Lib/site-packages/pygame/music.pyi deleted file mode 100644 index 91a6f55..0000000 --- a/venv/Lib/site-packages/pygame/music.pyi +++ /dev/null @@ -1,20 +0,0 @@ -from typing import Optional - -from ._common import _FileArg - -def load(filename: _FileArg, namehint: Optional[str] = "") -> None: ... -def unload() -> None: ... -def play(loops: int = 0, start: float = 0.0, fade_ms: int = 0) -> None: ... -def rewind() -> None: ... -def stop() -> None: ... -def pause() -> None: ... -def unpause() -> None: ... -def fadeout(time: int) -> None: ... -def set_volume(volume: float) -> None: ... -def get_volume() -> float: ... -def get_busy() -> bool: ... -def set_pos(pos: float) -> None: ... -def get_pos() -> int: ... -def queue(filename: _FileArg, namehint: str = "", loops: int = 0) -> None: ... -def set_endevent(event_type: int) -> None: ... -def get_endevent() -> int: ... diff --git a/venv/Lib/site-packages/pygame/newbuffer.cp39-win_amd64.pyd b/venv/Lib/site-packages/pygame/newbuffer.cp39-win_amd64.pyd deleted file mode 100644 index 1f7ee61..0000000 Binary files a/venv/Lib/site-packages/pygame/newbuffer.cp39-win_amd64.pyd and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/pixelarray.cp39-win_amd64.pyd b/venv/Lib/site-packages/pygame/pixelarray.cp39-win_amd64.pyd deleted file mode 100644 index de2a5b0..0000000 Binary files a/venv/Lib/site-packages/pygame/pixelarray.cp39-win_amd64.pyd and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/pixelarray.pyi b/venv/Lib/site-packages/pygame/pixelarray.pyi deleted file mode 100644 index f5e61dd..0000000 --- a/venv/Lib/site-packages/pygame/pixelarray.pyi +++ /dev/null @@ -1,35 +0,0 @@ -from typing import Sequence, Tuple - -from pygame.surface import Surface - -from ._common import _ColorValue - -class PixelArray: - surface: Surface - itemsize: int - ndim: int - shape: Tuple[int, ...] - strides: Tuple[int, ...] - def __init__(self, surface: Surface) -> None: ... - def make_surface(self) -> Surface: ... - def replace( - self, - color: _ColorValue, - repcolor: _ColorValue, - distance: float = 0, - weights: Sequence[float] = (0.299, 0.587, 0.114), - ) -> None: ... - def extract( - self, - color: _ColorValue, - distance: float = 0, - weights: Sequence[float] = (0.299, 0.587, 0.114), - ) -> PixelArray: ... - def compare( - self, - array: PixelArray, - distance: float = 0, - weights: Sequence[float] = (0.299, 0.587, 0.114), - ) -> PixelArray: ... - def transpose(self) -> PixelArray: ... - def close(self) -> PixelArray: ... diff --git a/venv/Lib/site-packages/pygame/pixelcopy.cp39-win_amd64.pyd b/venv/Lib/site-packages/pygame/pixelcopy.cp39-win_amd64.pyd deleted file mode 100644 index 56811d5..0000000 Binary files a/venv/Lib/site-packages/pygame/pixelcopy.cp39-win_amd64.pyd and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/pixelcopy.pyi b/venv/Lib/site-packages/pygame/pixelcopy.pyi deleted file mode 100644 index 3734ddf..0000000 --- a/venv/Lib/site-packages/pygame/pixelcopy.pyi +++ /dev/null @@ -1,19 +0,0 @@ -import numpy -from typing_extensions import Literal - -from pygame.surface import Surface - -_kind = Literal["P", "p", "R", "r", "G", "g", "B", "b", "A", "a", "C", "c"] - -def surface_to_array( - array: numpy.ndarray, - surface: Surface, - kind: _kind = "P", - opaque: int = 255, - clear: int = 0, -) -> None: ... -def array_to_surface(surface: Surface, array: numpy.ndarray) -> None: ... -def map_to_array( - array1: numpy.ndarray, array2: numpy.ndarray, surface: Surface -) -> None: ... -def make_surface(array: numpy.ndarray) -> Surface: ... diff --git a/venv/Lib/site-packages/pygame/pkgdata.py b/venv/Lib/site-packages/pygame/pkgdata.py deleted file mode 100644 index 1d89028..0000000 --- a/venv/Lib/site-packages/pygame/pkgdata.py +++ /dev/null @@ -1,79 +0,0 @@ -""" -pkgdata is a simple, extensible way for a package to acquire data file -resources. - -The getResource function is equivalent to the standard idioms, such as -the following minimal implementation: - - import sys, os - - def getResource(identifier, pkgname=__name__): - pkgpath = os.path.dirname(sys.modules[pkgname].__file__) - path = os.path.join(pkgpath, identifier) - return file(os.path.normpath(path), mode='rb') - -When a __loader__ is present on the module given by __name__, it will defer -getResource to its get_data implementation and return it as a file-like -object (such as StringIO). -""" - -__all__ = ["getResource"] -import sys -import os - -from io import BytesIO - -try: - from pkg_resources import resource_stream, resource_exists -except ImportError: - - def resource_exists(_package_or_requirement, _resource_name): - """ - A stub for when we fail to import this function. - - :return: Always returns False - """ - return False - - def resource_stream(_package_of_requirement, _resource_name): - """ - A stub for when we fail to import this function. - - Always raises a NotImplementedError when called. - """ - raise NotImplementedError - - -def getResource(identifier, pkgname=__name__): - """ - Acquire a readable object for a given package name and identifier. - An IOError will be raised if the resource can not be found. - - For example: - mydata = getResource('mypkgdata.jpg').read() - - Note that the package name must be fully qualified, if given, such - that it would be found in sys.modules. - - In some cases, getResource will return a real file object. In that - case, it may be useful to use its name attribute to get the path - rather than use it as a file-like object. For example, you may - be handing data off to a C API. - """ - - # When pyinstaller (or similar tools) are used, resource_exists may raise - # NotImplemented error - try: - if resource_exists(pkgname, identifier): - return resource_stream(pkgname, identifier) - except NotImplementedError: - pass - - mod = sys.modules[pkgname] - path_to_file = getattr(mod, "__file__", None) - if path_to_file is None: - raise IOError(f"{repr(mod)} has no __file__!") - path = os.path.join(os.path.dirname(path_to_file), identifier) - - # pylint: disable=consider-using-with - return open(os.path.normpath(path), "rb") diff --git a/venv/Lib/site-packages/pygame/portmidi.dll b/venv/Lib/site-packages/pygame/portmidi.dll deleted file mode 100644 index cc4361c..0000000 Binary files a/venv/Lib/site-packages/pygame/portmidi.dll and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/py.typed b/venv/Lib/site-packages/pygame/py.typed deleted file mode 100644 index e69de29..0000000 diff --git a/venv/Lib/site-packages/pygame/pygame.ico b/venv/Lib/site-packages/pygame/pygame.ico deleted file mode 100644 index 06f699e..0000000 Binary files a/venv/Lib/site-packages/pygame/pygame.ico and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/pygame_icon.bmp b/venv/Lib/site-packages/pygame/pygame_icon.bmp deleted file mode 100644 index 74aea77..0000000 Binary files a/venv/Lib/site-packages/pygame/pygame_icon.bmp and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/pygame_icon.icns b/venv/Lib/site-packages/pygame/pygame_icon.icns deleted file mode 100644 index 44a67bb..0000000 Binary files a/venv/Lib/site-packages/pygame/pygame_icon.icns and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/pygame_icon_mac.bmp b/venv/Lib/site-packages/pygame/pygame_icon_mac.bmp deleted file mode 100644 index 7b58bb1..0000000 Binary files a/venv/Lib/site-packages/pygame/pygame_icon_mac.bmp and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/pypm.cp39-win_amd64.pyd b/venv/Lib/site-packages/pygame/pypm.cp39-win_amd64.pyd deleted file mode 100644 index ae030e9..0000000 Binary files a/venv/Lib/site-packages/pygame/pypm.cp39-win_amd64.pyd and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/rect.cp39-win_amd64.pyd b/venv/Lib/site-packages/pygame/rect.cp39-win_amd64.pyd deleted file mode 100644 index af03aa2..0000000 Binary files a/venv/Lib/site-packages/pygame/rect.cp39-win_amd64.pyd and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/rect.pyi b/venv/Lib/site-packages/pygame/rect.pyi deleted file mode 100644 index c2d662c..0000000 --- a/venv/Lib/site-packages/pygame/rect.pyi +++ /dev/null @@ -1,229 +0,0 @@ -from typing import Dict, List, Sequence, Tuple, TypeVar, Union, overload - -from pygame.math import Vector2 -from ._common import _Coordinate, _CanBeRect - -_K = TypeVar("_K") -_V = TypeVar("_V") - -class Rect(object): - x: int - y: int - top: int - left: int - bottom: int - right: int - topleft: Tuple[int, int] - bottomleft: Tuple[int, int] - topright: Tuple[int, int] - bottomright: Tuple[int, int] - midtop: Tuple[int, int] - midleft: Tuple[int, int] - midbottom: Tuple[int, int] - midright: Tuple[int, int] - center: Tuple[int, int] - centerx: int - centery: int - size: Tuple[int, int] - width: int - height: int - w: int - h: int - __hash__: None # type: ignore - @overload - def __init__( - self, left: float, top: float, width: float, height: float - ) -> None: ... - @overload - def __init__( - self, - left_top: Union[List[float], Tuple[float, float], Vector2], - width_height: Union[List[float], Tuple[float, float], Vector2], - ) -> None: ... - @overload - def __init__( - self, - left_top_width_height: Union[ - Rect, Tuple[float, float, float, float], List[float] - ], - ) -> None: ... - @overload - def __getitem__(self, i: int) -> int: ... - @overload - def __getitem__(self, s: slice) -> List[int]: ... - def copy(self) -> Rect: ... - @overload - def move(self, x: float, y: float) -> Rect: ... - @overload - def move(self, move_by: _Coordinate) -> Rect: ... - @overload - def move_ip(self, x: float, y: float) -> None: ... - @overload - def move_ip(self, move_by: _Coordinate) -> None: ... - @overload - def inflate(self, x: float, y: float) -> Rect: ... - @overload - def inflate(self, inflate_by: _Coordinate) -> Rect: ... - @overload - def inflate_ip(self, x: float, y: float) -> None: ... - @overload - def inflate_ip(self, inflate_by: _Coordinate) -> None: ... - @overload - def update(self, left: float, top: float, width: float, height: float) -> None: ... - @overload - def update( - self, - left_top: Union[List[float], Tuple[float, float], Vector2], - width_height: Union[List[float], Tuple[float, float], Vector2], - ) -> None: ... - @overload - def update( - self, - left_top_width_height: Union[ - Rect, Tuple[float, float, float, float], List[float] - ], - ) -> None: ... - @overload - def clamp(self, rect: Union[_CanBeRect, Rect]) -> Rect: ... - @overload - def clamp( - self, - left_top: Union[List[float], Tuple[float, float], Vector2], - width_height: Union[List[float], Tuple[float, float], Vector2], - ) -> Rect: ... - @overload - def clamp(self, left: float, top: float, width: float, height: float) -> Rect: ... - @overload - def clamp_ip(self, rect: Union[_CanBeRect, Rect]) -> None: ... - @overload - def clamp_ip( - self, - left_top: Union[List[float], Tuple[float, float], Vector2], - width_height: Union[List[float], Tuple[float, float], Vector2], - ) -> None: ... - @overload - def clamp_ip( - self, left: float, top: float, width: float, height: float - ) -> None: ... - @overload - def clip(self, rect: Union[_CanBeRect, Rect]) -> Rect: ... - @overload - def clip( - self, - left_top: Union[List[float], Tuple[float, float], Vector2], - width_height: Union[List[float], Tuple[float, float], Vector2], - ) -> Rect: ... - @overload - def clip(self, left: float, top: float, width: float, height: float) -> Rect: ... - @overload - def clipline( - self, x1: float, x2: float, x3: float, x4: float - ) -> Union[Tuple[Tuple[int, int], Tuple[int, int]], Tuple[()]]: ... - @overload - def clipline( - self, first_coordinate: _Coordinate, second_coordinate: _Coordinate - ) -> Union[Tuple[Tuple[int, int], Tuple[int, int]], Tuple[()]]: ... - @overload - def clipline( - self, values: Union[Tuple[float, float, float, float], List[float]] - ) -> Union[Tuple[Tuple[int, int], Tuple[int, int]], Tuple[()]]: ... - @overload - def clipline( - self, coordinates: Union[Tuple[_Coordinate, _Coordinate], List[_Coordinate]] - ) -> Union[Tuple[Tuple[int, int], Tuple[int, int]], Tuple[()]]: ... - @overload - def union(self, rect: Union[_CanBeRect, Rect]) -> Rect: ... - @overload - def union( - self, - left_top: Union[List[float], Tuple[float, float], Vector2], - width_height: Union[List[float], Tuple[float, float], Vector2], - ) -> Rect: ... - @overload - def union(self, left: float, top: float, width: float, height: float) -> Rect: ... - @overload - def union_ip(self, rect: Union[_CanBeRect, Rect]) -> None: ... - @overload - def union_ip( - self, - left_top: Union[List[float], Tuple[float, float], Vector2], - width_height: Union[List[float], Tuple[float, float], Vector2], - ) -> None: ... - @overload - def union_ip( - self, left: float, top: float, width: float, height: float - ) -> None: ... - def unionall(self, rect: Sequence[Union[_CanBeRect, Rect]]) -> Rect: ... - def unionall_ip(self, rect_sequence: Sequence[Union[_CanBeRect, Rect]]) -> None: ... - @overload - def fit(self, rect: Union[_CanBeRect, Rect]) -> Rect: ... - @overload - def fit( - self, - left_top: Union[List[float], Tuple[float, float], Vector2], - width_height: Union[List[float], Tuple[float, float], Vector2], - ) -> Rect: ... - @overload - def fit(self, left: float, top: float, width: float, height: float) -> Rect: ... - def normalize(self) -> None: ... - @overload - def __contains__(self, rect: Union[_CanBeRect, Rect, int]) -> bool: ... - @overload - def __contains__( - self, - left_top: Union[List[float], Tuple[float, float], Vector2], - width_height: Union[List[float], Tuple[float, float], Vector2], - ) -> bool: ... - @overload - def __contains__(self, left: float, top: float, width: float, height: float) -> bool: ... - @overload - def contains(self, rect: Union[_CanBeRect, Rect]) -> bool: ... - @overload - def contains( - self, - left_top: Union[List[float], Tuple[float, float], Vector2], - width_height: Union[List[float], Tuple[float, float], Vector2], - ) -> bool: ... - @overload - def contains( - self, left: float, top: float, width: float, height: float - ) -> bool: ... - @overload - def collidepoint(self, x: float, y: float) -> bool: ... - @overload - def collidepoint(self, x_y: Union[List[float], Tuple[float, float]]) -> bool: ... - @overload - def colliderect(self, rect: Union[_CanBeRect, Rect]) -> bool: ... - @overload - def colliderect( - self, - left_top: Union[List[float], Tuple[float, float], Vector2], - width_height: Union[List[float], Tuple[float, float], Vector2], - ) -> bool: ... - @overload - def colliderect( - self, left: float, top: float, width: float, height: float - ) -> bool: ... - def collidelist(self, rect_list: Sequence[Union[Rect, _CanBeRect]]) -> int: ... - def collidelistall( - self, rect_list: Sequence[Union[Rect, _CanBeRect]] - ) -> List[int]: ... - # Also undocumented: the dict collision methods take a 'values' argument - # that defaults to False. If it is False, the keys in rect_dict must be - # Rect-like; otherwise, the values must be Rects. - @overload - def collidedict( - self, rect_dict: Dict[_CanBeRect, _V], values: bool = ... - ) -> Tuple[_CanBeRect, _V]: ... - @overload - def collidedict( - self, rect_dict: Dict[_K, "Rect"], values: bool - ) -> Tuple[_K, "Rect"]: ... - @overload - def collidedictall( - self, rect_dict: Dict[_CanBeRect, _V], values: bool = ... - ) -> List[Tuple[_CanBeRect, _V]]: ... - @overload - def collidedictall( - self, rect_dict: Dict[_K, "Rect"], values: bool - ) -> List[Tuple[_K, "Rect"]]: ... diff --git a/venv/Lib/site-packages/pygame/rwobject.cp39-win_amd64.pyd b/venv/Lib/site-packages/pygame/rwobject.cp39-win_amd64.pyd deleted file mode 100644 index 7766d8a..0000000 Binary files a/venv/Lib/site-packages/pygame/rwobject.cp39-win_amd64.pyd and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/scrap.cp39-win_amd64.pyd b/venv/Lib/site-packages/pygame/scrap.cp39-win_amd64.pyd deleted file mode 100644 index 31e5d8f..0000000 Binary files a/venv/Lib/site-packages/pygame/scrap.cp39-win_amd64.pyd and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/scrap.pyi b/venv/Lib/site-packages/pygame/scrap.pyi deleted file mode 100644 index 7905ce6..0000000 --- a/venv/Lib/site-packages/pygame/scrap.pyi +++ /dev/null @@ -1,10 +0,0 @@ -from typing import AnyStr, List - -def init() -> None: ... -def get_init() -> bool: ... -def get(data_type: str) -> AnyStr: ... -def get_types() -> List[str]: ... -def put(data_type: str, data: AnyStr) -> None: ... -def contains(data_type: str) -> bool: ... -def lost() -> bool: ... -def set_mode(mode: int) -> None: ... diff --git a/venv/Lib/site-packages/pygame/sndarray.py b/venv/Lib/site-packages/pygame/sndarray.py deleted file mode 100644 index 99ac4c7..0000000 --- a/venv/Lib/site-packages/pygame/sndarray.py +++ /dev/null @@ -1,139 +0,0 @@ -## pygame - Python Game Library -## Copyright (C) 2008 Marcus von Appen -## -## This library is free software; you can redistribute it and/or -## modify it under the terms of the GNU Library General Public -## License as published by the Free Software Foundation; either -## version 2 of the License, or (at your option) any later version. -## -## This library is distributed in the hope that it will be useful, -## but WITHOUT ANY WARRANTY; without even the implied warranty of -## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -## Library General Public License for more details. -## -## You should have received a copy of the GNU Library General Public -## License along with this library; if not, write to the Free -## Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA -## -## Marcus von Appen -## mva@sysfault.org - -"""pygame module for accessing sound sample data - -Functions to convert between NumPy arrays and Sound objects. This module -will only be functional when pygame can use the external NumPy package. -If NumPy can't be imported, surfarray becomes a MissingModule object. - -Sound data is made of thousands of samples per second, and each sample -is the amplitude of the wave at a particular moment in time. For -example, in 22-kHz format, element number 5 of the array is the -amplitude of the wave after 5/22000 seconds. - -Each sample is an 8-bit or 16-bit integer, depending on the data format. -A stereo sound file has two values per sample, while a mono sound file -only has one. - -Sounds with 16-bit data will be treated as unsigned integers, -if the sound sample type requests this. -""" - -from pygame import mixer -import numpy - -import warnings - - -__all__ = [ - "array", - "samples", - "make_sound", - "use_arraytype", - "get_arraytype", - "get_arraytypes", -] - - -def array(sound): - """pygame.sndarray.array(Sound): return array - - Copy Sound samples into an array. - - Creates a new array for the sound data and copies the samples. The - array will always be in the format returned from - pygame.mixer.get_init(). - """ - - return numpy.array(sound, copy=True) - - -def samples(sound): - """pygame.sndarray.samples(Sound): return array - - Reference Sound samples into an array. - - Creates a new array that directly references the samples in a Sound - object. Modifying the array will change the Sound. The array will - always be in the format returned from pygame.mixer.get_init(). - """ - - return numpy.array(sound, copy=False) - - -def make_sound(array): - """pygame.sndarray.make_sound(array): return Sound - - Convert an array into a Sound object. - - Create a new playable Sound object from an array. The mixer module - must be initialized and the array format must be similar to the mixer - audio format. - """ - - return mixer.Sound(array=array) - - -def use_arraytype(arraytype): - """pygame.sndarray.use_arraytype(arraytype): return None - - DEPRECATED - only numpy arrays are now supported. - """ - warnings.warn( - DeprecationWarning( - "only numpy arrays are now supported, " - "this function will be removed in a " - "future version of the module" - ) - ) - arraytype = arraytype.lower() - if arraytype != "numpy": - raise ValueError("invalid array type") - - -def get_arraytype(): - """pygame.sndarray.get_arraytype(): return str - - DEPRECATED - only numpy arrays are now supported. - """ - warnings.warn( - DeprecationWarning( - "only numpy arrays are now supported, " - "this function will be removed in a " - "future version of the module" - ) - ) - return "numpy" - - -def get_arraytypes(): - """pygame.sndarray.get_arraytypes(): return tuple - - DEPRECATED - only numpy arrays are now supported. - """ - warnings.warn( - DeprecationWarning( - "only numpy arrays are now supported, " - "this function will be removed in a " - "future version of the module" - ) - ) - return ("numpy",) diff --git a/venv/Lib/site-packages/pygame/sndarray.pyi b/venv/Lib/site-packages/pygame/sndarray.pyi deleted file mode 100644 index 8b0dd65..0000000 --- a/venv/Lib/site-packages/pygame/sndarray.pyi +++ /dev/null @@ -1,12 +0,0 @@ -from typing import Tuple - -import numpy - -from pygame.mixer import Sound - -def array(sound: Sound) -> numpy.ndarray: ... -def samples(sound: Sound) -> numpy.ndarray: ... -def make_sound(array: numpy.ndarray) -> Sound: ... -def use_arraytype(arraytype: str) -> Sound: ... -def get_arraytype() -> str: ... -def get_arraytypes() -> Tuple[str]: ... diff --git a/venv/Lib/site-packages/pygame/sprite.py b/venv/Lib/site-packages/pygame/sprite.py deleted file mode 100644 index 553ccfb..0000000 --- a/venv/Lib/site-packages/pygame/sprite.py +++ /dev/null @@ -1,1782 +0,0 @@ -# pygame - Python Game Library -# Copyright (C) 2000-2003, 2007 Pete Shinners -# (C) 2004 Joe Wreschnig -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Library General Public -# License as published by the Free Software Foundation; either -# version 2 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Library General Public License for more details. -# -# You should have received a copy of the GNU Library General Public -# License along with this library; if not, write to the Free -# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA -# -# Pete Shinners -# pete@shinners.org - -"""pygame module with basic game object classes - -This module contains several simple classes to be used within games. There -are the main Sprite class and several Group classes that contain Sprites. -The use of these classes is entirely optional when using Pygame. The classes -are fairly lightweight and only provide a starting place for the code -that is common to most games. - -The Sprite class is intended to be used as a base class for the different -types of objects in the game. There is also a base Group class that simply -stores sprites. A game could create new types of Group classes that operate -on specially customized Sprite instances they contain. - -The basic Sprite class can draw the Sprites it contains to a Surface. The -Group.draw() method requires that each Sprite have a Surface.image attribute -and a Surface.rect. The Group.clear() method requires these same attributes -and can be used to erase all the Sprites with background. There are also -more advanced Groups: pygame.sprite.RenderUpdates() and -pygame.sprite.OrderedUpdates(). - -Lastly, this module contains several collision functions. These help find -sprites inside multiple groups that have intersecting bounding rectangles. -To find the collisions, the Sprites are required to have a Surface.rect -attribute assigned. - -The groups are designed for high efficiency in removing and adding Sprites -to them. They also allow cheap testing to see if a Sprite already exists in -a Group. A given Sprite can exist in any number of groups. A game could use -some groups to control object rendering, and a completely separate set of -groups to control interaction or player movement. Instead of adding type -attributes or bools to a derived Sprite class, consider keeping the -Sprites inside organized Groups. This will allow for easier lookup later -in the game. - -Sprites and Groups manage their relationships with the add() and remove() -methods. These methods can accept a single or multiple group arguments for -membership. The default initializers for these classes also take a -single group or list of groups as arguments for initial membership. It is safe -to repeatedly add and remove the same Sprite from a Group. - -While it is possible to design sprite and group classes that don't derive -from the Sprite and AbstractGroup classes below, it is strongly recommended -that you extend those when you create a new Sprite or Group class. - -Sprites are not thread safe, so lock them yourself if using threads. - -""" - -# TODO: a group that holds only the 'n' most recent elements. -# sort of like the GroupSingle class, but holding more -# than one sprite -# -# drawing groups that can 'automatically' store the area -# underneath so they can "clear" without needing a background -# function. obviously a little slower than normal, but nice -# to use in many situations. (also remember it must "clear" -# in the reverse order that it draws :]) -# -# the drawing groups should also be able to take a background -# function, instead of just a background surface. the function -# would take a surface and a rectangle on that surface to erase. -# -# perhaps more types of collision functions? the current two -# should handle just about every need, but perhaps more optimized -# specific ones that aren't quite so general but fit into common -# specialized cases. - -from operator import truth -from warnings import warn - -import pygame - -from pygame.rect import Rect -from pygame.time import get_ticks -from pygame.mask import from_surface - - -class Sprite(object): - """simple base class for visible game objects - - pygame.sprite.Sprite(*groups): return Sprite - - The base class for visible game objects. Derived classes will want to - override the Sprite.update() method and assign Sprite.image and Sprite.rect - attributes. The initializer can accept any number of Group instances that - the Sprite will become a member of. - - When subclassing the Sprite class, be sure to call the base initializer - before adding the Sprite to Groups. - - """ - - def __init__(self, *groups): - self.__g = {} # The groups the sprite is in - if groups: - self.add(*groups) - - def add(self, *groups): - """add the sprite to groups - - Sprite.add(*groups): return None - - Any number of Group instances can be passed as arguments. The - Sprite will be added to the Groups it is not already a member of. - - """ - has = self.__g.__contains__ - for group in groups: - if hasattr(group, "_spritegroup"): - if not has(group): - group.add_internal(self) - self.add_internal(group) - else: - self.add(*group) - - def remove(self, *groups): - """remove the sprite from groups - - Sprite.remove(*groups): return None - - Any number of Group instances can be passed as arguments. The Sprite - will be removed from the Groups it is currently a member of. - - """ - has = self.__g.__contains__ - for group in groups: - if hasattr(group, "_spritegroup"): - if has(group): - group.remove_internal(self) - self.remove_internal(group) - else: - self.remove(*group) - - def add_internal(self, group): - """ - For adding this sprite to a group internally. - - :param group: The group we are adding to. - """ - self.__g[group] = 0 - - def remove_internal(self, group): - """ - For removing this sprite from a group internally. - - :param group: The group we are removing from. - """ - del self.__g[group] - - def update(self, *args, **kwargs): - """method to control sprite behavior - - Sprite.update(*args, **kwargs): - - The default implementation of this method does nothing; it's just a - convenient "hook" that you can override. This method is called by - Group.update() with whatever arguments you give it. - - There is no need to use this method if not using the convenience - method by the same name in the Group class. - - """ - - def kill(self): - """remove the Sprite from all Groups - - Sprite.kill(): return None - - The Sprite is removed from all the Groups that contain it. This won't - change anything about the state of the Sprite. It is possible to - continue to use the Sprite after this method has been called, including - adding it to Groups. - - """ - for group in self.__g: - group.remove_internal(self) - self.__g.clear() - - def groups(self): - """list of Groups that contain this Sprite - - Sprite.groups(): return group_list - - Returns a list of all the Groups that contain this Sprite. - - """ - return list(self.__g) - - def alive(self): - """does the sprite belong to any groups - - Sprite.alive(): return bool - - Returns True when the Sprite belongs to one or more Groups. - """ - return truth(self.__g) - - def __repr__(self): - return f"<{self.__class__.__name__} Sprite(in {len(self.__g)} groups)>" - - @property - def layer(self): - """ - Dynamic, read only property for protected _layer attribute. - This will get the _layer variable if it exists. - - If you try to get it before it is set it will raise an attribute error. - - Layer property can only be set before the sprite is added to a group, - after that it is read only and a sprite's layer in a group should be - set via the group's change_layer() method. - - :return: layer as an int, or raise AttributeError. - """ - return getattr(self, "_layer") - - @layer.setter - def layer(self, value): - if not self.alive(): - setattr(self, "_layer", value) - else: - raise AttributeError( - "Can't set layer directly after " - "adding to group. Use " - "group.change_layer(sprite, new_layer) " - "instead." - ) - - -class DirtySprite(Sprite): - """a more featureful subclass of Sprite with more attributes - - pygame.sprite.DirtySprite(*groups): return DirtySprite - - Extra DirtySprite attributes with their default values: - - dirty = 1 - If set to 1, it is repainted and then set to 0 again. - If set to 2, it is always dirty (repainted each frame; - flag is not reset). - If set to 0, it is not dirty and therefore not repainted again. - - blendmode = 0 - It's the special_flags argument of Surface.blit; see the blendmodes in - the Surface.blit documentation - - source_rect = None - This is the source rect to use. Remember that it is relative to the top - left corner (0, 0) of self.image. - - visible = 1 - Normally this is 1. If set to 0, it will not be repainted. (If you - change visible to 1, you must set dirty to 1 for it to be erased from - the screen.) - - _layer = 0 - 0 is the default value but this is able to be set differently - when subclassing. - - """ - - def __init__(self, *groups): - - self.dirty = 1 - - # referred to as special_flags in the documentation of Surface.blit - self.blendmode = 0 - self._visible = 1 - - # Default 0 unless initialized differently. - self._layer = getattr(self, "_layer", 0) - self.source_rect = None - Sprite.__init__(self, *groups) - - def _set_visible(self, val): - """set the visible value (0 or 1) and makes the sprite dirty""" - self._visible = val - if self.dirty < 2: - self.dirty = 1 - - def _get_visible(self): - """return the visible value of that sprite""" - return self._visible - - @property - def visible(self): - """ - You can make this sprite disappear without removing it from the group - assign 0 for invisible and 1 for visible - """ - return self._get_visible() - - @visible.setter - def visible(self, value): - self._set_visible(value) - - @property - def layer(self): - """ - Layer property can only be set before the sprite is added to a group, - after that it is read only and a sprite's layer in a group should be - set via the group's change_layer() method. - - Overwrites dynamic property from sprite class for speed. - """ - return self._layer - - @layer.setter - def layer(self, value): - if not self.alive(): - self._layer = value - else: - raise AttributeError( - "Can't set layer directly after " - "adding to group. Use " - "group.change_layer(sprite, new_layer) " - "instead." - ) - - def __repr__(self): - return ( - f"<{self.__class__.__name__} DirtySprite(in {len(self.groups())} groups)>" - ) - - -class AbstractGroup(object): - """base class for containers of sprites - - AbstractGroup does everything needed to behave as a normal group. You can - easily subclass a new group class from this or the other groups below if - you want to add more features. - - Any AbstractGroup-derived sprite groups act like sequences and support - iteration, len, and so on. - - """ - - # dummy val to identify sprite groups, and avoid infinite recursion - _spritegroup = True - - def __init__(self): - self.spritedict = {} - self.lostsprites = [] - - def sprites(self): - """get a list of sprites in the group - - Group.sprite(): return list - - Returns an object that can be looped over with a 'for' loop. (For now, - it is always a list, but this could change in a future version of - pygame.) Alternatively, you can get the same information by iterating - directly over the sprite group, e.g. 'for sprite in group'. - - """ - return list(self.spritedict) - - def add_internal( - self, - sprite, - layer=None, # noqa pylint: disable=unused-argument; supporting legacy derived classes that override in non-pythonic way - ): - """ - For adding a sprite to this group internally. - - :param sprite: The sprite we are adding. - :param layer: the layer to add to, if the group type supports layers - """ - self.spritedict[sprite] = None - - def remove_internal(self, sprite): - """ - For removing a sprite from this group internally. - - :param sprite: The sprite we are removing. - """ - lost_rect = self.spritedict[sprite] - if lost_rect: - self.lostsprites.append(lost_rect) - del self.spritedict[sprite] - - def has_internal(self, sprite): - """ - For checking if a sprite is in this group internally. - - :param sprite: The sprite we are checking. - """ - return sprite in self.spritedict - - def copy(self): - """copy a group with all the same sprites - - Group.copy(): return Group - - Returns a copy of the group that is an instance of the same class - and has the same sprites in it. - - """ - return self.__class__( # noqa pylint: disable=too-many-function-args - self.sprites() # Needed because copy() won't work on AbstractGroup - ) - - def __iter__(self): - return iter(self.sprites()) - - def __contains__(self, sprite): - return self.has(sprite) - - def add(self, *sprites): - """add sprite(s) to group - - Group.add(sprite, list, group, ...): return None - - Adds a sprite or sequence of sprites to a group. - - """ - for sprite in sprites: - # It's possible that some sprite is also an iterator. - # If this is the case, we should add the sprite itself, - # and not the iterator object. - if isinstance(sprite, Sprite): - if not self.has_internal(sprite): - self.add_internal(sprite) - sprite.add_internal(self) - else: - try: - # See if sprite is an iterator, like a list or sprite - # group. - self.add(*sprite) - except (TypeError, AttributeError): - # Not iterable. This is probably a sprite that is not an - # instance of the Sprite class or is not an instance of a - # subclass of the Sprite class. Alternately, it could be an - # old-style sprite group. - if hasattr(sprite, "_spritegroup"): - for spr in sprite.sprites(): - if not self.has_internal(spr): - self.add_internal(spr) - spr.add_internal(self) - elif not self.has_internal(sprite): - self.add_internal(sprite) - sprite.add_internal(self) - - def remove(self, *sprites): - """remove sprite(s) from group - - Group.remove(sprite, list, or group, ...): return None - - Removes a sprite or sequence of sprites from a group. - - """ - # This function behaves essentially the same as Group.add. It first - # tries to handle each argument as an instance of the Sprite class. If - # that fails, then it tries to handle the argument as an iterable - # object. If that fails, then it tries to handle the argument as an - # old-style sprite group. Lastly, if that fails, it assumes that the - # normal Sprite methods should be used. - for sprite in sprites: - if isinstance(sprite, Sprite): - if self.has_internal(sprite): - self.remove_internal(sprite) - sprite.remove_internal(self) - else: - try: - self.remove(*sprite) - except (TypeError, AttributeError): - if hasattr(sprite, "_spritegroup"): - for spr in sprite.sprites(): - if self.has_internal(spr): - self.remove_internal(spr) - spr.remove_internal(self) - elif self.has_internal(sprite): - self.remove_internal(sprite) - sprite.remove_internal(self) - - def has(self, *sprites): - """ask if group has a sprite or sprites - - Group.has(sprite or group, ...): return bool - - Returns True if the given sprite or sprites are contained in the - group. Alternatively, you can get the same information using the - 'in' operator, e.g. 'sprite in group', 'subgroup in group'. - - """ - if not sprites: - return False # return False if no sprites passed in - - for sprite in sprites: - if isinstance(sprite, Sprite): - # Check for Sprite instance's membership in this group - if not self.has_internal(sprite): - return False - else: - try: - if not self.has(*sprite): - return False - except (TypeError, AttributeError): - if hasattr(sprite, "_spritegroup"): - for spr in sprite.sprites(): - if not self.has_internal(spr): - return False - else: - if not self.has_internal(sprite): - return False - - return True - - def update(self, *args, **kwargs): - """call the update method of every member sprite - - Group.update(*args, **kwargs): return None - - Calls the update method of every member sprite. All arguments that - were passed to this method are passed to the Sprite update function. - - """ - for sprite in self.sprites(): - sprite.update(*args, **kwargs) - - def draw(self, surface): - """draw all sprites onto the surface - - Group.draw(surface): return Rect_list - - Draws all of the member sprites onto the given surface. - - """ - sprites = self.sprites() - if hasattr(surface, "blits"): - self.spritedict.update( - zip(sprites, surface.blits((spr.image, spr.rect) for spr in sprites)) - ) - else: - for spr in sprites: - self.spritedict[spr] = surface.blit(spr.image, spr.rect) - self.lostsprites = [] - dirty = self.lostsprites - - return dirty - - def clear(self, surface, bgd): - """erase the previous position of all sprites - - Group.clear(surface, bgd): return None - - Clears the area under every drawn sprite in the group. The bgd - argument should be Surface which is the same dimensions as the - screen surface. The bgd could also be a function which accepts - the given surface and the area to be cleared as arguments. - - """ - if callable(bgd): - for lost_clear_rect in self.lostsprites: - bgd(surface, lost_clear_rect) - for clear_rect in self.spritedict.values(): - if clear_rect: - bgd(surface, clear_rect) - else: - surface_blit = surface.blit - for lost_clear_rect in self.lostsprites: - surface_blit(bgd, lost_clear_rect, lost_clear_rect) - for clear_rect in self.spritedict.values(): - if clear_rect: - surface_blit(bgd, clear_rect, clear_rect) - - def empty(self): - """remove all sprites - - Group.empty(): return None - - Removes all the sprites from the group. - - """ - for sprite in self.sprites(): - self.remove_internal(sprite) - sprite.remove_internal(self) - - def __nonzero__(self): - return truth(self.sprites()) - - __bool__ = __nonzero__ - - def __len__(self): - """return number of sprites in group - - Group.len(group): return int - - Returns the number of sprites contained in the group. - - """ - return len(self.sprites()) - - def __repr__(self): - return f"<{self.__class__.__name__}({len(self)} sprites)>" - - -class Group(AbstractGroup): - """container class for many Sprites - - pygame.sprite.Group(*sprites): return Group - - A simple container for Sprite objects. This class can be subclassed to - create containers with more specific behaviors. The constructor takes any - number of Sprite arguments to add to the Group. The group supports the - following standard Python operations: - - in test if a Sprite is contained - len the number of Sprites contained - bool test if any Sprites are contained - iter iterate through all the Sprites - - The Sprites in the Group are not ordered, so the Sprites are drawn and - iterated over in no particular order. - - """ - - def __init__(self, *sprites): - AbstractGroup.__init__(self) - self.add(*sprites) - - -RenderPlain = Group -RenderClear = Group - - -class RenderUpdates(Group): - """Group class that tracks dirty updates - - pygame.sprite.RenderUpdates(*sprites): return RenderUpdates - - This class is derived from pygame.sprite.Group(). It has an enhanced draw - method that tracks the changed areas of the screen. - - """ - - def draw(self, surface): - surface_blit = surface.blit - dirty = self.lostsprites - self.lostsprites = [] - dirty_append = dirty.append - for sprite in self.sprites(): - old_rect = self.spritedict[sprite] - new_rect = surface_blit(sprite.image, sprite.rect) - if old_rect: - if new_rect.colliderect(old_rect): - dirty_append(new_rect.union(old_rect)) - else: - dirty_append(new_rect) - dirty_append(old_rect) - else: - dirty_append(new_rect) - self.spritedict[sprite] = new_rect - return dirty - - -class OrderedUpdates(RenderUpdates): - """RenderUpdates class that draws Sprites in order of addition - - pygame.sprite.OrderedUpdates(*spites): return OrderedUpdates - - This class derives from pygame.sprite.RenderUpdates(). It maintains - the order in which the Sprites were added to the Group for rendering. - This makes adding and removing Sprites from the Group a little - slower than regular Groups. - - """ - - def __init__(self, *sprites): - self._spritelist = [] - RenderUpdates.__init__(self, *sprites) - - def sprites(self): - return list(self._spritelist) - - def add_internal(self, sprite, layer=None): - RenderUpdates.add_internal(self, sprite) - self._spritelist.append(sprite) - - def remove_internal(self, sprite): - RenderUpdates.remove_internal(self, sprite) - self._spritelist.remove(sprite) - - -class LayeredUpdates(AbstractGroup): - """LayeredUpdates Group handles layers, which are drawn like OrderedUpdates - - pygame.sprite.LayeredUpdates(*spites, **kwargs): return LayeredUpdates - - This group is fully compatible with pygame.sprite.Sprite. - New in pygame 1.8.0 - - """ - - _init_rect = Rect(0, 0, 0, 0) - - def __init__(self, *sprites, **kwargs): - """initialize an instance of LayeredUpdates with the given attributes - - You can set the default layer through kwargs using 'default_layer' - and an integer for the layer. The default layer is 0. - - If the sprite you add has an attribute _layer, then that layer will be - used. If **kwarg contains 'layer', then the passed sprites will be - added to that layer (overriding the sprite._layer attribute). If - neither the sprite nor **kwarg has a 'layer', then the default layer is - used to add the sprites. - - """ - self._spritelayers = {} - self._spritelist = [] - AbstractGroup.__init__(self) - self._default_layer = kwargs.get("default_layer", 0) - - self.add(*sprites, **kwargs) - - def add_internal(self, sprite, layer=None): - """Do not use this method directly. - - It is used by the group to add a sprite internally. - - """ - self.spritedict[sprite] = self._init_rect - - if layer is None: - try: - layer = sprite.layer - except AttributeError: - layer = self._default_layer - setattr(sprite, "_layer", layer) - elif hasattr(sprite, "_layer"): - setattr(sprite, "_layer", layer) - - sprites = self._spritelist # speedup - sprites_layers = self._spritelayers - sprites_layers[sprite] = layer - - # add the sprite at the right position - # bisect algorithmus - leng = len(sprites) - low = mid = 0 - high = leng - 1 - while low <= high: - mid = low + (high - low) // 2 - if sprites_layers[sprites[mid]] <= layer: - low = mid + 1 - else: - high = mid - 1 - # linear search to find final position - while mid < leng and sprites_layers[sprites[mid]] <= layer: - mid += 1 - sprites.insert(mid, sprite) - - def add(self, *sprites, **kwargs): - """add a sprite or sequence of sprites to a group - - LayeredUpdates.add(*sprites, **kwargs): return None - - If the sprite you add has an attribute _layer, then that layer will be - used. If **kwarg contains 'layer', then the passed sprites will be - added to that layer (overriding the sprite._layer attribute). If - neither the sprite nor **kwarg has a 'layer', then the default layer is - used to add the sprites. - - """ - - if not sprites: - return - layer = kwargs["layer"] if "layer" in kwargs else None - for sprite in sprites: - # It's possible that some sprite is also an iterator. - # If this is the case, we should add the sprite itself, - # and not the iterator object. - if isinstance(sprite, Sprite): - if not self.has_internal(sprite): - self.add_internal(sprite, layer) - sprite.add_internal(self) - else: - try: - # See if sprite is an iterator, like a list or sprite - # group. - self.add(*sprite, **kwargs) - except (TypeError, AttributeError): - # Not iterable. This is probably a sprite that is not an - # instance of the Sprite class or is not an instance of a - # subclass of the Sprite class. Alternately, it could be an - # old-style sprite group. - if hasattr(sprite, "_spritegroup"): - for spr in sprite.sprites(): - if not self.has_internal(spr): - self.add_internal(spr, layer) - spr.add_internal(self) - elif not self.has_internal(sprite): - self.add_internal(sprite, layer) - sprite.add_internal(self) - - def remove_internal(self, sprite): - """Do not use this method directly. - - The group uses it to add a sprite. - - """ - self._spritelist.remove(sprite) - # these dirty rects are suboptimal for one frame - old_rect = self.spritedict[sprite] - if old_rect is not self._init_rect: - self.lostsprites.append(old_rect) # dirty rect - if hasattr(sprite, "rect"): - self.lostsprites.append(sprite.rect) # dirty rect - - del self.spritedict[sprite] - del self._spritelayers[sprite] - - def sprites(self): - """return a ordered list of sprites (first back, last top). - - LayeredUpdates.sprites(): return sprites - - """ - return list(self._spritelist) - - def draw(self, surface): - """draw all sprites in the right order onto the passed surface - - LayeredUpdates.draw(surface): return Rect_list - - """ - spritedict = self.spritedict - surface_blit = surface.blit - dirty = self.lostsprites - self.lostsprites = [] - dirty_append = dirty.append - init_rect = self._init_rect - for spr in self.sprites(): - rec = spritedict[spr] - newrect = surface_blit(spr.image, spr.rect) - if rec is init_rect: - dirty_append(newrect) - else: - if newrect.colliderect(rec): - dirty_append(newrect.union(rec)) - else: - dirty_append(newrect) - dirty_append(rec) - spritedict[spr] = newrect - return dirty - - def get_sprites_at(self, pos): - """return a list with all sprites at that position - - LayeredUpdates.get_sprites_at(pos): return colliding_sprites - - Bottom sprites are listed first; the top ones are listed last. - - """ - _sprites = self._spritelist - rect = Rect(pos, (1, 1)) - colliding_idx = rect.collidelistall(_sprites) - return [_sprites[i] for i in colliding_idx] - - def get_sprite(self, idx): - """return the sprite at the index idx from the groups sprites - - LayeredUpdates.get_sprite(idx): return sprite - - Raises IndexOutOfBounds if the idx is not within range. - - """ - return self._spritelist[idx] - - def remove_sprites_of_layer(self, layer_nr): - """remove all sprites from a layer and return them as a list - - LayeredUpdates.remove_sprites_of_layer(layer_nr): return sprites - - """ - sprites = self.get_sprites_from_layer(layer_nr) - self.remove(*sprites) - return sprites - - # layer methods - def layers(self): - """return a list of unique defined layers defined. - - LayeredUpdates.layers(): return layers - - """ - return sorted(set(self._spritelayers.values())) - - def change_layer(self, sprite, new_layer): - """change the layer of the sprite - - LayeredUpdates.change_layer(sprite, new_layer): return None - - The sprite must have been added to the renderer already. This is not - checked. - - """ - sprites = self._spritelist # speedup - sprites_layers = self._spritelayers # speedup - - sprites.remove(sprite) - sprites_layers.pop(sprite) - - # add the sprite at the right position - # bisect algorithmus - leng = len(sprites) - low = mid = 0 - high = leng - 1 - while low <= high: - mid = low + (high - low) // 2 - if sprites_layers[sprites[mid]] <= new_layer: - low = mid + 1 - else: - high = mid - 1 - # linear search to find final position - while mid < leng and sprites_layers[sprites[mid]] <= new_layer: - mid += 1 - sprites.insert(mid, sprite) - if hasattr(sprite, "_layer"): - setattr(sprite, "_layer", new_layer) - - # add layer info - sprites_layers[sprite] = new_layer - - def get_layer_of_sprite(self, sprite): - """return the layer that sprite is currently in - - If the sprite is not found, then it will return the default layer. - - """ - return self._spritelayers.get(sprite, self._default_layer) - - def get_top_layer(self): - """return the top layer - - LayeredUpdates.get_top_layer(): return layer - - """ - return self._spritelayers[self._spritelist[-1]] - - def get_bottom_layer(self): - """return the bottom layer - - LayeredUpdates.get_bottom_layer(): return layer - - """ - return self._spritelayers[self._spritelist[0]] - - def move_to_front(self, sprite): - """bring the sprite to front layer - - LayeredUpdates.move_to_front(sprite): return None - - Brings the sprite to front by changing the sprite layer to the top-most - layer. The sprite is added at the end of the list of sprites in that - top-most layer. - - """ - self.change_layer(sprite, self.get_top_layer()) - - def move_to_back(self, sprite): - """move the sprite to the bottom layer - - LayeredUpdates.move_to_back(sprite): return None - - Moves the sprite to the bottom layer by moving it to a new layer below - the current bottom layer. - - """ - self.change_layer(sprite, self.get_bottom_layer() - 1) - - def get_top_sprite(self): - """return the topmost sprite - - LayeredUpdates.get_top_sprite(): return Sprite - - """ - return self._spritelist[-1] - - def get_sprites_from_layer(self, layer): - """return all sprites from a layer ordered as they where added - - LayeredUpdates.get_sprites_from_layer(layer): return sprites - - Returns all sprites from a layer. The sprites are ordered in the - sequence that they where added. (The sprites are not removed from the - layer. - - """ - sprites = [] - sprites_append = sprites.append - sprite_layers = self._spritelayers - for spr in self._spritelist: - if sprite_layers[spr] == layer: - sprites_append(spr) - elif sprite_layers[spr] > layer: - # break after because no other will - # follow with same layer - break - return sprites - - def switch_layer(self, layer1_nr, layer2_nr): - """switch the sprites from layer1_nr to layer2_nr - - LayeredUpdates.switch_layer(layer1_nr, layer2_nr): return None - - The layers number must exist. This method does not check for the - existence of the given layers. - - """ - sprites1 = self.remove_sprites_of_layer(layer1_nr) - for spr in self.get_sprites_from_layer(layer2_nr): - self.change_layer(spr, layer1_nr) - self.add(layer=layer2_nr, *sprites1) - - -class LayeredDirty(LayeredUpdates): - """LayeredDirty Group is for DirtySprites; subclasses LayeredUpdates - - pygame.sprite.LayeredDirty(*spites, **kwargs): return LayeredDirty - - This group requires pygame.sprite.DirtySprite or any sprite that - has the following attributes: - image, rect, dirty, visible, blendmode (see doc of DirtySprite). - - It uses the dirty flag technique and is therefore faster than - pygame.sprite.RenderUpdates if you have many static sprites. It - also switches automatically between dirty rect updating and full - screen drawing, so you do no have to worry which would be faster. - - As with the pygame.sprite.Group, you can specify some additional attributes - through kwargs: - _use_update: True/False (default is False) - _default_layer: default layer where the sprites without a layer are - added - _time_threshold: threshold time for switching between dirty rect mode - and fullscreen mode; defaults to updating at 80 frames per second, - which is equal to 1000.0 / 80.0 - - New in pygame 1.8.0 - - """ - - def __init__(self, *sprites, **kwargs): - """initialize group. - - pygame.sprite.LayeredDirty(*spites, **kwargs): return LayeredDirty - - You can specify some additional attributes through kwargs: - _use_update: True/False (default is False) - _default_layer: default layer where the sprites without a layer are - added - _time_threshold: threshold time for switching between dirty rect - mode and fullscreen mode; defaults to updating at 80 frames per - second, which is equal to 1000.0 / 80.0 - - """ - LayeredUpdates.__init__(self, *sprites, **kwargs) - self._clip = None - - self._use_update = False - - self._time_threshold = 1000.0 / 80.0 # 1000.0 / fps - - self._bgd = None - for key, val in kwargs.items(): - if key in ["_use_update", "_time_threshold", "_default_layer"] and hasattr( - self, key - ): - setattr(self, key, val) - - def add_internal(self, sprite, layer=None): - """Do not use this method directly. - - It is used by the group to add a sprite internally. - - """ - # check if all needed attributes are set - if not hasattr(sprite, "dirty"): - raise AttributeError() - if not hasattr(sprite, "visible"): - raise AttributeError() - if not hasattr(sprite, "blendmode"): - raise AttributeError() - - if not isinstance(sprite, DirtySprite): - raise TypeError() - - if sprite.dirty == 0: # set it dirty if it is not - sprite.dirty = 1 - - LayeredUpdates.add_internal(self, sprite, layer) - - def draw( - self, surface, bgd=None - ): # noqa pylint: disable=arguments-differ; unable to change public interface - """draw all sprites in the right order onto the given surface - - LayeredDirty.draw(surface, bgd=None): return Rect_list - - You can pass the background too. If a self.bgd is already set to some - value that is not None, then the bgd argument has no effect. - - """ - # functions and classes assigned locally to speed up loops - orig_clip = surface.get_clip() - latest_clip = self._clip - if latest_clip is None: - latest_clip = orig_clip - - local_sprites = self._spritelist - local_old_rect = self.spritedict - local_update = self.lostsprites - rect_type = Rect - - surf_blit_func = surface.blit - if bgd is not None: - self._bgd = bgd - local_bgd = self._bgd - - surface.set_clip(latest_clip) - # ------- - # 0. decide whether to render with update or flip - start_time = get_ticks() - if self._use_update: # dirty rects mode - # 1. find dirty area on screen and put the rects into - # self.lostsprites still not happy with that part - self._find_dirty_area( - latest_clip, - local_old_rect, - rect_type, - local_sprites, - local_update, - local_update.append, - self._init_rect, - ) - # can it be done better? because that is an O(n**2) algorithm in - # worst case - - # clear using background - if local_bgd is not None: - for rec in local_update: - surf_blit_func(local_bgd, rec, rec) - - # 2. draw - self._draw_dirty_internal( - local_old_rect, rect_type, local_sprites, surf_blit_func, local_update - ) - local_ret = list(local_update) - else: # flip, full screen mode - if local_bgd is not None: - surf_blit_func(local_bgd, (0, 0)) - for spr in local_sprites: - if spr.visible: - local_old_rect[spr] = surf_blit_func( - spr.image, spr.rect, spr.source_rect, spr.blendmode - ) - # return only the part of the screen changed - local_ret = [rect_type(latest_clip)] - - # timing for switching modes - # How may a good threshold be found? It depends on the hardware. - end_time = get_ticks() - if end_time - start_time > self._time_threshold: - self._use_update = False - else: - self._use_update = True - - # emtpy dirty rects list - local_update[:] = [] - - # ------- - # restore original clip - surface.set_clip(orig_clip) - return local_ret - - @staticmethod - def _draw_dirty_internal(_old_rect, _rect, _sprites, _surf_blit, _update): - for spr in _sprites: - if spr.dirty < 1 and spr.visible: - # sprite not dirty; blit only the intersecting part - if spr.source_rect is not None: - # For possible future speed up, source_rect's data - # can be pre-fetched outside of this loop. - _spr_rect = _rect(spr.rect.topleft, spr.source_rect.size) - rect_offset_x = spr.source_rect[0] - _spr_rect[0] - rect_offset_y = spr.source_rect[1] - _spr_rect[1] - else: - _spr_rect = spr.rect - rect_offset_x = -_spr_rect[0] - rect_offset_y = -_spr_rect[1] - - _spr_rect_clip = _spr_rect.clip - - for idx in _spr_rect.collidelistall(_update): - # clip - clip = _spr_rect_clip(_update[idx]) - _surf_blit( - spr.image, - clip, - ( - clip[0] + rect_offset_x, - clip[1] + rect_offset_y, - clip[2], - clip[3], - ), - spr.blendmode, - ) - else: # dirty sprite - if spr.visible: - _old_rect[spr] = _surf_blit( - spr.image, spr.rect, spr.source_rect, spr.blendmode - ) - if spr.dirty == 1: - spr.dirty = 0 - - @staticmethod - def _find_dirty_area( - _clip, _old_rect, _rect, _sprites, _update, _update_append, init_rect - ): - for spr in _sprites: - if spr.dirty > 0: - # chose the right rect - if spr.source_rect: - _union_rect = _rect(spr.rect.topleft, spr.source_rect.size) - else: - _union_rect = _rect(spr.rect) - - _union_rect_collidelist = _union_rect.collidelist - _union_rect_union_ip = _union_rect.union_ip - i = _union_rect_collidelist(_update) - while i > -1: - _union_rect_union_ip(_update[i]) - del _update[i] - i = _union_rect_collidelist(_update) - _update_append(_union_rect.clip(_clip)) - - if _old_rect[spr] is not init_rect: - _union_rect = _rect(_old_rect[spr]) - _union_rect_collidelist = _union_rect.collidelist - _union_rect_union_ip = _union_rect.union_ip - i = _union_rect_collidelist(_update) - while i > -1: - _union_rect_union_ip(_update[i]) - del _update[i] - i = _union_rect_collidelist(_update) - _update_append(_union_rect.clip(_clip)) - - def clear(self, surface, bgd): - """use to set background - - Group.clear(surface, bgd): return None - - """ - self._bgd = bgd - - def repaint_rect(self, screen_rect): - """repaint the given area - - LayeredDirty.repaint_rect(screen_rect): return None - - screen_rect is in screen coordinates. - - """ - if self._clip: - self.lostsprites.append(screen_rect.clip(self._clip)) - else: - self.lostsprites.append(Rect(screen_rect)) - - def set_clip(self, screen_rect=None): - """clip the area where to draw; pass None (default) to reset the clip - - LayeredDirty.set_clip(screen_rect=None): return None - - """ - if screen_rect is None: - self._clip = pygame.display.get_surface().get_rect() - else: - self._clip = screen_rect - self._use_update = False - - def get_clip(self): - """get the area where drawing will occur - - LayeredDirty.get_clip(): return Rect - - """ - return self._clip - - def change_layer(self, sprite, new_layer): - """change the layer of the sprite - - LayeredUpdates.change_layer(sprite, new_layer): return None - - The sprite must have been added to the renderer already. This is not - checked. - - """ - LayeredUpdates.change_layer(self, sprite, new_layer) - if sprite.dirty == 0: - sprite.dirty = 1 - - def set_timing_treshold(self, time_ms): - """set the threshold in milliseconds - - set_timing_treshold(time_ms): return None - - Defaults to 1000.0 / 80.0. This means that the screen will be painted - using the flip method rather than the update method if the update - method is taking so long to update the screen that the frame rate falls - below 80 frames per second. - - Raises TypeError if time_ms is not int or float. - - """ - warn( - "This function will be removed, use set_timing_threshold function instead", - DeprecationWarning, - ) - self.set_timing_threshold(time_ms) - - def set_timing_threshold(self, time_ms): - """set the threshold in milliseconds - - set_timing_threshold(time_ms): return None - - Defaults to 1000.0 / 80.0. This means that the screen will be painted - using the flip method rather than the update method if the update - method is taking so long to update the screen that the frame rate falls - below 80 frames per second. - - Raises TypeError if time_ms is not int or float. - - """ - if isinstance(time_ms, (int, float)): - self._time_threshold = time_ms - else: - raise TypeError( - f"Expected numeric value, got {time_ms.__class__.__name__} instead" - ) - - -class GroupSingle(AbstractGroup): - """A group container that holds a single most recent item. - - This class works just like a regular group, but it only keeps a single - sprite in the group. Whatever sprite has been added to the group last will - be the only sprite in the group. - - You can access its one sprite as the .sprite attribute. Assigning to this - attribute will properly remove the old sprite and then add the new one. - - """ - - def __init__(self, sprite=None): - AbstractGroup.__init__(self) - self.__sprite = None - if sprite is not None: - self.add(sprite) - - def copy(self): - return GroupSingle(self.__sprite) - - def sprites(self): - if self.__sprite is not None: - return [self.__sprite] - return [] - - def add_internal(self, sprite, _=None): - if self.__sprite is not None: - self.__sprite.remove_internal(self) - self.remove_internal(self.__sprite) - self.__sprite = sprite - - def __nonzero__(self): - return self.__sprite is not None - - __bool__ = __nonzero__ - - def _get_sprite(self): - return self.__sprite - - def _set_sprite(self, sprite): - self.add_internal(sprite) - sprite.add_internal(self) - return sprite - - @property - def sprite(self): - """ - Property for the single sprite contained in this group - - :return: The sprite. - """ - return self._get_sprite() - - @sprite.setter - def sprite(self, sprite_to_set): - self._set_sprite(sprite_to_set) - - def remove_internal(self, sprite): - if sprite is self.__sprite: - self.__sprite = None - if sprite in self.spritedict: - AbstractGroup.remove_internal(self, sprite) - - def has_internal(self, sprite): - return self.__sprite is sprite - - # Optimizations... - def __contains__(self, sprite): - return self.__sprite is sprite - - -# Some different collision detection functions that could be used. -def collide_rect(left, right): - """collision detection between two sprites, using rects. - - pygame.sprite.collide_rect(left, right): return bool - - Tests for collision between two sprites. Uses the pygame.Rect colliderect - function to calculate the collision. It is intended to be passed as a - collided callback function to the *collide functions. Sprites must have - "rect" attributes. - - New in pygame 1.8.0 - - """ - return left.rect.colliderect(right.rect) - - -class collide_rect_ratio: # noqa pylint: disable=invalid-name; this is a function-like class - """A callable class that checks for collisions using scaled rects - - The class checks for collisions between two sprites using a scaled version - of the sprites' rects. Is created with a ratio; the instance is then - intended to be passed as a collided callback function to the *collide - functions. - - New in pygame 1.8.1 - - """ - - def __init__(self, ratio): - """create a new collide_rect_ratio callable - - Ratio is expected to be a floating point value used to scale - the underlying sprite rect before checking for collisions. - - """ - self.ratio = ratio - - def __repr__(self): - """ - Turn the class into a string. - """ - # pylint: disable=consider-using-f-string - return "<{klass} @{id:x} {attrs}>".format( - klass=self.__class__.__name__, - id=id(self) & 0xFFFFFF, - attrs=" ".join("{}={!r}".format(k, v) for k, v in self.__dict__.items()), - ) - - def __call__(self, left, right): - """detect collision between two sprites using scaled rects - - pygame.sprite.collide_rect_ratio(ratio)(left, right): return bool - - Tests for collision between two sprites. Uses the pygame.Rect - colliderect function to calculate the collision after scaling the rects - by the stored ratio. Sprites must have "rect" attributes. - - """ - - ratio = self.ratio - - leftrect = left.rect - width = leftrect.width - height = leftrect.height - leftrect = leftrect.inflate(width * ratio - width, height * ratio - height) - - rightrect = right.rect - width = rightrect.width - height = rightrect.height - rightrect = rightrect.inflate(width * ratio - width, height * ratio - height) - - return leftrect.colliderect(rightrect) - - -def collide_circle(left, right): - """detect collision between two sprites using circles - - pygame.sprite.collide_circle(left, right): return bool - - Tests for collision between two sprites by testing whether two circles - centered on the sprites overlap. If the sprites have a "radius" attribute, - then that radius is used to create the circle; otherwise, a circle is - created that is big enough to completely enclose the sprite's rect as - given by the "rect" attribute. This function is intended to be passed as - a collided callback function to the *collide functions. Sprites must have a - "rect" and an optional "radius" attribute. - - New in pygame 1.8.0 - - """ - - xdistance = left.rect.centerx - right.rect.centerx - ydistance = left.rect.centery - right.rect.centery - distancesquared = xdistance ** 2 + ydistance ** 2 - - try: - leftradius = left.radius - except AttributeError: - leftrect = left.rect - # approximating the radius of a square by using half of the diagonal, - # might give false positives (especially if its a long small rect) - leftradius = 0.5 * ((leftrect.width ** 2 + leftrect.height ** 2) ** 0.5) - # store the radius on the sprite for next time - left.radius = leftradius - - try: - rightradius = right.radius - except AttributeError: - rightrect = right.rect - # approximating the radius of a square by using half of the diagonal - # might give false positives (especially if its a long small rect) - rightradius = 0.5 * ((rightrect.width ** 2 + rightrect.height ** 2) ** 0.5) - # store the radius on the sprite for next time - right.radius = rightradius - return distancesquared <= (leftradius + rightradius) ** 2 - - -class collide_circle_ratio( - object -): # noqa pylint: disable=invalid-name; this is a function-like class - """detect collision between two sprites using scaled circles - - This callable class checks for collisions between two sprites using a - scaled version of a sprite's radius. It is created with a ratio as the - argument to the constructor. The instance is then intended to be passed as - a collided callback function to the *collide functions. - - New in pygame 1.8.1 - - """ - - def __init__(self, ratio): - """creates a new collide_circle_ratio callable instance - - The given ratio is expected to be a floating point value used to scale - the underlying sprite radius before checking for collisions. - - When the ratio is ratio=1.0, then it behaves exactly like the - collide_circle method. - - """ - self.ratio = ratio - - def __repr__(self): - """ - Turn the class into a string. - """ - # pylint: disable=consider-using-f-string - return "<{klass} @{id:x} {attrs}>".format( - klass=self.__class__.__name__, - id=id(self) & 0xFFFFFF, - attrs=" ".join("{}={!r}".format(k, v) for k, v in self.__dict__.items()), - ) - - def __call__(self, left, right): - """detect collision between two sprites using scaled circles - - pygame.sprite.collide_circle_radio(ratio)(left, right): return bool - - Tests for collision between two sprites by testing whether two circles - centered on the sprites overlap after scaling the circle's radius by - the stored ratio. If the sprites have a "radius" attribute, that is - used to create the circle; otherwise, a circle is created that is big - enough to completely enclose the sprite's rect as given by the "rect" - attribute. Intended to be passed as a collided callback function to the - *collide functions. Sprites must have a "rect" and an optional "radius" - attribute. - - """ - - ratio = self.ratio - xdistance = left.rect.centerx - right.rect.centerx - ydistance = left.rect.centery - right.rect.centery - distancesquared = xdistance ** 2 + ydistance ** 2 - - try: - leftradius = left.radius - except AttributeError: - leftrect = left.rect - leftradius = 0.5 * ((leftrect.width ** 2 + leftrect.height ** 2) ** 0.5) - # store the radius on the sprite for next time - left.radius = leftradius - leftradius *= ratio - - try: - rightradius = right.radius - except AttributeError: - rightrect = right.rect - rightradius = 0.5 * ((rightrect.width ** 2 + rightrect.height ** 2) ** 0.5) - # store the radius on the sprite for next time - right.radius = rightradius - rightradius *= ratio - - return distancesquared <= (leftradius + rightradius) ** 2 - - -def collide_mask(left, right): - """collision detection between two sprites, using masks. - - pygame.sprite.collide_mask(SpriteLeft, SpriteRight): bool - - Tests for collision between two sprites by testing if their bitmasks - overlap. If the sprites have a "mask" attribute, that is used as the mask; - otherwise, a mask is created from the sprite image. Intended to be passed - as a collided callback function to the *collide functions. Sprites must - have a "rect" and an optional "mask" attribute. - - New in pygame 1.8.0 - - """ - xoffset = right.rect[0] - left.rect[0] - yoffset = right.rect[1] - left.rect[1] - try: - leftmask = left.mask - except AttributeError: - leftmask = from_surface(left.image) - try: - rightmask = right.mask - except AttributeError: - rightmask = from_surface(right.image) - return leftmask.overlap(rightmask, (xoffset, yoffset)) - - -def spritecollide(sprite, group, dokill, collided=None): - """find Sprites in a Group that intersect another Sprite - - pygame.sprite.spritecollide(sprite, group, dokill, collided=None): - return Sprite_list - - Return a list containing all Sprites in a Group that intersect with another - Sprite. Intersection is determined by comparing the Sprite.rect attribute - of each Sprite. - - The dokill argument is a bool. If set to True, all Sprites that collide - will be removed from the Group. - - The collided argument is a callback function used to calculate if two - sprites are colliding. it should take two sprites as values, and return a - bool value indicating if they are colliding. If collided is not passed, all - sprites must have a "rect" value, which is a rectangle of the sprite area, - which will be used to calculate the collision. - - """ - # pull the default collision function in as a local variable outside - # the loop as this makes the loop run faster - default_sprite_collide_func = sprite.rect.colliderect - - if dokill: - - crashed = [] - append = crashed.append - - for group_sprite in group.sprites(): - if collided: - if collided(sprite, group_sprite): - group_sprite.kill() - append(group_sprite) - else: - if default_sprite_collide_func(group_sprite.rect): - group_sprite.kill() - append(group_sprite) - - return crashed - - if collided: - return [ - group_sprite for group_sprite in group if collided(sprite, group_sprite) - ] - - return [ - group_sprite - for group_sprite in group - if default_sprite_collide_func(group_sprite.rect) - ] - - -def groupcollide(groupa, groupb, dokilla, dokillb, collided=None): - """detect collision between a group and another group - - pygame.sprite.groupcollide(groupa, groupb, dokilla, dokillb): - return dict - - Given two groups, this will find the intersections between all sprites in - each group. It returns a dictionary of all sprites in the first group that - collide. The value for each item in the dictionary is a list of the sprites - in the second group it collides with. The two dokill arguments control if - the sprites from either group will be automatically removed from all - groups. Collided is a callback function used to calculate if two sprites - are colliding. it should take two sprites as values, and return a bool - value indicating if they are colliding. If collided is not passed, all - sprites must have a "rect" value, which is a rectangle of the sprite area - that will be used to calculate the collision. - - """ - crashed = {} - # pull the collision function in as a local variable outside - # the loop as this makes the loop run faster - sprite_collide_func = spritecollide - if dokilla: - for group_a_sprite in groupa.sprites(): - collision = sprite_collide_func(group_a_sprite, groupb, dokillb, collided) - if collision: - crashed[group_a_sprite] = collision - group_a_sprite.kill() - else: - for group_a_sprite in groupa: - collision = sprite_collide_func(group_a_sprite, groupb, dokillb, collided) - if collision: - crashed[group_a_sprite] = collision - return crashed - - -def spritecollideany(sprite, group, collided=None): - """finds any sprites in a group that collide with the given sprite - - pygame.sprite.spritecollideany(sprite, group): return sprite - - Given a sprite and a group of sprites, this will return return any single - sprite that collides with with the given sprite. If there are no - collisions, then this returns None. - - If you don't need all the features of the spritecollide function, this - function will be a bit quicker. - - Collided is a callback function used to calculate if two sprites are - colliding. It should take two sprites as values and return a bool value - indicating if they are colliding. If collided is not passed, then all - sprites must have a "rect" value, which is a rectangle of the sprite area, - which will be used to calculate the collision. - - - """ - # pull the default collision function in as a local variable outside - # the loop as this makes the loop run faster - default_sprite_collide_func = sprite.rect.colliderect - - if collided is not None: - for group_sprite in group: - if collided(sprite, group_sprite): - return group_sprite - else: - # Special case old behaviour for speed. - for group_sprite in group: - if default_sprite_collide_func(group_sprite.rect): - return group_sprite - return None diff --git a/venv/Lib/site-packages/pygame/sprite.pyi b/venv/Lib/site-packages/pygame/sprite.pyi deleted file mode 100644 index 4e2f0fd..0000000 --- a/venv/Lib/site-packages/pygame/sprite.pyi +++ /dev/null @@ -1,152 +0,0 @@ -from typing import ( - Any, - Callable, - Dict, - Iterable, - Iterator, - List, - Optional, - Sequence, - SupportsFloat, - Tuple, - Union, -) - -from pygame.rect import Rect -from pygame.surface import Surface - -from ._common import _CanBeRect - -# Some functions violate Liskov substitution principle so mypy will throw errors for this file, but this are the -# best type hints I could do - -class Sprite: - image: Optional[Surface] = None - rect: Optional[Rect] = None - def __init__(self, *groups: AbstractGroup) -> None: ... - def update(self, *args: Any, **kwargs: Any) -> None: ... - def add(self, *groups: AbstractGroup) -> None: ... - def remove(self, *groups: AbstractGroup) -> None: ... - def kill(self) -> None: ... - def alive(self) -> bool: ... - def groups(self) -> List[AbstractGroup]: ... - -class DirtySprite(Sprite): - dirty: int - blendmode: int - source_rect: Rect - visible: int - _layer: int - def _set_visible(self, value: int) -> None: ... - def _get_visible(self) -> int: ... - -class AbstractGroup: - spritedict: Dict[Sprite, Rect] - lostsprites: List[int] # I think - def __init__(self) -> None: ... - def __len__(self) -> int: ... - def __iter__(self) -> Iterator[Sprite]: ... - def copy(self) -> AbstractGroup: ... - def sprites(self) -> List[Sprite]: ... - def add( - self, - *sprites: Union[Sprite, AbstractGroup, Iterable[Union[Sprite, AbstractGroup]]] - ) -> None: ... - def remove(self, *sprites: Sprite) -> None: ... - def has(self, *sprites: Sprite) -> bool: ... - def update(self, *args: Any, **kwargs: Any) -> None: ... - def draw(self, surface: Surface) -> List[Rect]: ... - def clear(self, surface_dest: Surface, background: Surface) -> None: ... - def empty(self) -> None: ... - -class Group(AbstractGroup): - def __init__(self, *sprites: Union[Sprite, Sequence[Sprite]]) -> None: ... - def copy(self) -> Group: ... - -class RenderPlain(Group): - def copy(self) -> RenderPlain: ... - -class RenderClear(Group): - def copy(self) -> RenderClear: ... - -class RenderUpdates(Group): - def copy(self) -> RenderUpdates: ... - def draw(self, surface: Surface) -> List[Rect]: ... - -class OrderedUpdates(RenderUpdates): - def copy(self) -> OrderedUpdates: ... - -class LayeredUpdates(AbstractGroup): - def __init__(self, *sprites: Sprite, **kwargs: Any) -> None: ... - def copy(self) -> LayeredUpdates: ... - def add(self, *sprites: Sprite, **kwargs: Any) -> None: ... - def draw(self, surface: Surface) -> List[Rect]: ... - def get_sprites_at( - self, pos: Union[Tuple[int, int], List[int]] - ) -> List[Sprite]: ... - def get_sprite(self, idx: int) -> Sprite: ... - def remove_sprites_of_layer(self, layer_nr: int) -> List[Sprite]: ... - def layers(self) -> List[int]: ... - def change_layer(self, sprite: Sprite, new_layer: int) -> None: ... - def get_layer_of_sprite(self, sprite: Sprite) -> int: ... - def get_top_layer(self) -> int: ... - def get_bottom_layer(self) -> int: ... - def move_to_front(self, sprite: Sprite) -> None: ... - def move_to_back(self, sprite: Sprite) -> None: ... - def get_top_sprite(self) -> Sprite: ... - def get_sprites_from_layer(self, layer: int) -> List[Sprite]: ... - def switch_layer(self, layer1_nr: int, layer2_nr: int) -> None: ... - -class LayeredDirty(LayeredUpdates): - def __init__(self, *sprites: DirtySprite, **kwargs: Any) -> None: ... - def copy(self) -> LayeredDirty: ... - def draw(self, surface: Surface, bgd: Optional[Surface] = None) -> List[Rect]: ... - def clear(self, surface: Surface, bgd: Surface) -> None: ... - def repaint_rect(self, screen_rect: _CanBeRect) -> None: ... - def set_clip(self, screen_rect: Optional[_CanBeRect] = None) -> None: ... - def get_clip(self) -> Rect: ... - def set_timing_treshold( - self, time_ms: SupportsFloat - ) -> None: ... # This actually accept any value - def set_timing_threshold( - self, time_ms: SupportsFloat - ) -> None: ... # This actually accept any value - -class GroupSingle(AbstractGroup): - sprite: Sprite - def __init__(self, sprite: Optional[Sprite] = None) -> None: ... - def copy(self) -> GroupSingle: ... - -def spritecollide( - sprite: Sprite, - group: AbstractGroup, - dokill: bool, - collided: Optional[Callable[[Sprite, Sprite], bool]] = None, -) -> List[Sprite]: ... -def collide_rect(left: Sprite, right: Sprite) -> bool: ... - -class collide_rect_ratio: - ratio: float - def __init__(self, ratio: float) -> None: ... - def __call__(self, left: Sprite, right: Sprite) -> bool: ... - -def collide_circle(left: Sprite, right: Sprite) -> bool: ... - -class collide_circle_ratio: - ratio: float - def __init__(self, ratio: float) -> None: ... - def __call__(self, left: Sprite, right: Sprite) -> bool: ... - -def collide_mask(sprite1: Sprite, sprite2: Sprite) -> Tuple[int, int]: ... -def groupcollide( - group1: AbstractGroup, - group2: AbstractGroup, - dokill: bool, - dokill2: bool, - collided: Optional[Callable[[Sprite, Sprite], bool]] = None, -) -> Dict[Sprite, Sprite]: ... -def spritecollideany( - sprite: Sprite, - group: AbstractGroup, - collided: Optional[Callable[[Sprite, Sprite], bool]] = None, -) -> Sprite: ... diff --git a/venv/Lib/site-packages/pygame/surface.cp39-win_amd64.pyd b/venv/Lib/site-packages/pygame/surface.cp39-win_amd64.pyd deleted file mode 100644 index 569b82f..0000000 Binary files a/venv/Lib/site-packages/pygame/surface.cp39-win_amd64.pyd and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/surface.pyi b/venv/Lib/site-packages/pygame/surface.pyi deleted file mode 100644 index 7b086a5..0000000 --- a/venv/Lib/site-packages/pygame/surface.pyi +++ /dev/null @@ -1,121 +0,0 @@ -from typing import Any, List, Optional, Sequence, Text, Tuple, Union, overload - -from pygame.bufferproxy import BufferProxy -from pygame.math import Vector2 -from pygame.rect import Rect - -from ._common import _CanBeRect, _ColorValue, _Coordinate, _RgbaOutput - -class Surface(object): - _pixels_address: int - @overload - def __init__( - self, - size: _Coordinate, - flags: int = ..., - depth: int = ..., - masks: Optional[_ColorValue] = ..., - ) -> None: ... - @overload - def __init__( - self, - size: _Coordinate, - flags: int = ..., - surface: Surface = ..., - ) -> None: ... - def blit( - self, - source: Surface, - dest: Union[_Coordinate, _CanBeRect], - area: Optional[_CanBeRect] = ..., - special_flags: int = ..., - ) -> Rect: ... - def blits( - self, - blit_sequence: Sequence[ - Union[ - Tuple[Surface, Union[_Coordinate, _CanBeRect]], - Tuple[Surface, Union[_Coordinate, _CanBeRect], Union[_CanBeRect, int]], - Tuple[Surface, Union[_Coordinate, _CanBeRect], _CanBeRect, int], - ] - ], - doreturn: Union[int, bool] = 1, - ) -> Union[List[Rect], None]: ... - @overload - def convert(self, surface: Surface) -> Surface: ... - @overload - def convert(self, depth: int, flags: int = ...) -> Surface: ... - @overload - def convert(self, masks: _ColorValue, flags: int = ...) -> Surface: ... - @overload - def convert(self) -> Surface: ... - @overload - def convert_alpha(self, surface: Surface) -> Surface: ... - @overload - def convert_alpha(self) -> Surface: ... - def copy(self) -> Surface: ... - def fill( - self, - color: _ColorValue, - rect: Optional[_CanBeRect] = ..., - special_flags: int = ..., - ) -> Rect: ... - def scroll(self, dx: int = ..., dy: int = ...) -> None: ... - @overload - def set_colorkey(self, color: _ColorValue, flags: int = ...) -> None: ... - @overload - def set_colorkey(self, color: None) -> None: ... - def get_colorkey(self) -> Optional[_RgbaOutput]: ... - @overload - def set_alpha(self, value: int, flags: int = ...) -> None: ... - @overload - def set_alpha(self, value: None) -> None: ... - def get_alpha(self) -> Optional[int]: ... - def lock(self) -> None: ... - def unlock(self) -> None: ... - def mustlock(self) -> bool: ... - def get_locked(self) -> bool: ... - def get_locks(self) -> Tuple[Any, ...]: ... - def get_at(self, x_y: Sequence[int]) -> _RgbaOutput: ... - def set_at(self, x_y: Sequence[int], color: _ColorValue) -> None: ... - def get_at_mapped(self, x_y: Sequence[int]) -> int: ... - def get_palette(self) -> List[_RgbaOutput]: ... - def get_palette_at(self, index: int) -> _RgbaOutput: ... - def set_palette(self, palette: List[_ColorValue]) -> None: ... - def set_palette_at(self, index: int, color: _ColorValue) -> None: ... - def map_rgb(self, color: _ColorValue) -> int: ... - def unmap_rgb(self, mapped_int: int) -> _RgbaOutput: ... - def set_clip(self, rect: Optional[_CanBeRect]) -> None: ... - def get_clip(self) -> Rect: ... - @overload - def subsurface(self, rect: Union[_CanBeRect, Rect]) -> Surface: ... - @overload - def subsurface( - self, - left_top: Union[List[float], Tuple[float, float], Vector2], - width_height: Union[List[float], Tuple[float, float], Vector2], - ) -> Surface: ... - @overload - def subsurface( - self, left: float, top: float, width: float, height: float - ) -> Surface: ... - def get_parent(self) -> Surface: ... - def get_abs_parent(self) -> Surface: ... - def get_offset(self) -> Tuple[int, int]: ... - def get_abs_offset(self) -> Tuple[int, int]: ... - def get_size(self) -> Tuple[int, int]: ... - def get_width(self) -> int: ... - def get_height(self) -> int: ... - def get_rect(self, **kwargs: Any) -> Rect: ... - def get_bitsize(self) -> int: ... - def get_bytesize(self) -> int: ... - def get_flags(self) -> int: ... - def get_pitch(self) -> int: ... - def get_masks(self) -> _RgbaOutput: ... - def set_masks(self, color: _ColorValue) -> None: ... - def get_shifts(self) -> _RgbaOutput: ... - def set_shifts(self, color: _ColorValue) -> None: ... - def get_losses(self) -> _RgbaOutput: ... - def get_bounding_rect(self, min_alpha: int = ...) -> Rect: ... - def get_view(self, kind: Text = ...) -> BufferProxy: ... - def get_buffer(self) -> BufferProxy: ... diff --git a/venv/Lib/site-packages/pygame/surfarray.py b/venv/Lib/site-packages/pygame/surfarray.py deleted file mode 100644 index 70653fd..0000000 --- a/venv/Lib/site-packages/pygame/surfarray.py +++ /dev/null @@ -1,447 +0,0 @@ -## pygame - Python Game Library -## Copyright (C) 2007 Marcus von Appen -## -## This library is free software; you can redistribute it and/or -## modify it under the terms of the GNU Library General Public -## License as published by the Free Software Foundation; either -## version 2 of the License, or (at your option) any later version. -## -## This library is distributed in the hope that it will be useful, -## but WITHOUT ANY WARRANTY; without even the implied warranty of -## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -## Library General Public License for more details. -## -## You should have received a copy of the GNU Library General Public -## License along with this library; if not, write to the Free -## Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA -## -## Marcus von Appen -## mva@sysfault.org - -"""pygame module for accessing surface pixel data using array interfaces - -Functions to convert between NumPy arrays and Surface objects. This module -will only be functional when pygame can use the external NumPy package. -If NumPy can't be imported, surfarray becomes a MissingModule object. - -Every pixel is stored as a single integer value to represent the red, -green, and blue colors. The 8bit images use a value that looks into a -colormap. Pixels with higher depth use a bit packing process to place -three or four values into a single number. - -The arrays are indexed by the X axis first, followed by the Y -axis. Arrays that treat the pixels as a single integer are referred to -as 2D arrays. This module can also separate the red, green, and blue -color values into separate indices. These types of arrays are referred -to as 3D arrays, and the last index is 0 for red, 1 for green, and 2 for -blue. -""" - - -from pygame.pixelcopy import ( - array_to_surface, - surface_to_array, - map_array as pix_map_array, - make_surface as pix_make_surface, -) -import numpy -from numpy import ( - array as numpy_array, - empty as numpy_empty, - uint32 as numpy_uint32, - ndarray as numpy_ndarray, -) - -import warnings # will be removed in the future - - -# float96 not available on all numpy versions. -numpy_floats = [] -for type_name in "float32 float64 float96".split(): - if hasattr(numpy, type_name): - numpy_floats.append(getattr(numpy, type_name)) -# Added below due to deprecation of numpy.float. See issue #2814 -numpy_floats.append(float) - -# Pixel sizes corresponding to NumPy supported integer sizes, and therefore -# permissible for 2D reference arrays. -_pixel2d_bitdepths = {8, 16, 32} - - -__all__ = [ - "array2d", - "array3d", - "array_alpha", - "array_blue", - "array_colorkey", - "array_green", - "array_red", - "array_to_surface", - "blit_array", - "get_arraytype", - "get_arraytypes", - "make_surface", - "map_array", - "pixels2d", - "pixels3d", - "pixels_alpha", - "pixels_blue", - "pixels_green", - "pixels_red", - "surface_to_array", - "use_arraytype", -] - - -def blit_array(surface, array): - """pygame.surfarray.blit_array(Surface, array): return None - - Blit directly from a array values. - - Directly copy values from an array into a Surface. This is faster than - converting the array into a Surface and blitting. The array must be the - same dimensions as the Surface and will completely replace all pixel - values. Only integer, ascii character and record arrays are accepted. - - This function will temporarily lock the Surface as the new values are - copied. - """ - if isinstance(array, numpy_ndarray) and array.dtype in numpy_floats: - array = array.round(0).astype(numpy_uint32) - return array_to_surface(surface, array) - - -def make_surface(array): - """pygame.surfarray.make_surface (array): return Surface - - Copy an array to a new surface. - - Create a new Surface that best resembles the data and format on the - array. The array can be 2D or 3D with any sized integer values. - """ - if isinstance(array, numpy_ndarray) and array.dtype in numpy_floats: - array = array.round(0).astype(numpy_uint32) - return pix_make_surface(array) - - -def array2d(surface): - """pygame.surfarray.array2d(Surface): return array - - copy pixels into a 2d array - - Copy the pixels from a Surface into a 2D array. The bit depth of the - surface will control the size of the integer values, and will work - for any type of pixel format. - - This function will temporarily lock the Surface as pixels are copied - (see the Surface.lock - lock the Surface memory for pixel access - method). - """ - bpp = surface.get_bytesize() - try: - dtype = (numpy.uint8, numpy.uint16, numpy.int32, numpy.int32)[bpp - 1] - except IndexError: - raise ValueError(f"unsupported bit depth {bpp * 8} for 2D array") - size = surface.get_size() - array = numpy.empty(size, dtype) - surface_to_array(array, surface) - return array - - -def pixels2d(surface): - """pygame.surfarray.pixels2d(Surface): return array - - reference pixels into a 2d array - - Create a new 2D array that directly references the pixel values in a - Surface. Any changes to the array will affect the pixels in the - Surface. This is a fast operation since no data is copied. - - Pixels from a 24-bit Surface cannot be referenced, but all other - Surface bit depths can. - - The Surface this references will remain locked for the lifetime of - the array (see the Surface.lock - lock the Surface memory for pixel - access method). - """ - if surface.get_bitsize() not in _pixel2d_bitdepths: - raise ValueError("unsupport bit depth for 2D reference array") - try: - return numpy_array(surface.get_view("2"), copy=False) - except (ValueError, TypeError): - raise ValueError( - f"bit depth {surface.get_bitsize()} unsupported for 2D reference array" - ) - - -def array3d(surface): - """pygame.surfarray.array3d(Surface): return array - - copy pixels into a 3d array - - Copy the pixels from a Surface into a 3D array. The bit depth of the - surface will control the size of the integer values, and will work - for any type of pixel format. - - This function will temporarily lock the Surface as pixels are copied - (see the Surface.lock - lock the Surface memory for pixel access - method). - """ - width, height = surface.get_size() - array = numpy.empty((width, height, 3), numpy.uint8) - surface_to_array(array, surface) - return array - - -def pixels3d(surface): - """pygame.surfarray.pixels3d(Surface): return array - - reference pixels into a 3d array - - Create a new 3D array that directly references the pixel values in a - Surface. Any changes to the array will affect the pixels in the - Surface. This is a fast operation since no data is copied. - - This will only work on Surfaces that have 24-bit or 32-bit - formats. Lower pixel formats cannot be referenced. - - The Surface this references will remain locked for the lifetime of - the array (see the Surface.lock - lock the Surface memory for pixel - access method). - """ - return numpy_array(surface.get_view("3"), copy=False) - - -def array_alpha(surface): - """pygame.surfarray.array_alpha(Surface): return array - - copy pixel alphas into a 2d array - - Copy the pixel alpha values (degree of transparency) from a Surface - into a 2D array. This will work for any type of Surface - format. Surfaces without a pixel alpha will return an array with all - opaque values. - - This function will temporarily lock the Surface as pixels are copied - (see the Surface.lock - lock the Surface memory for pixel access - method). - """ - size = surface.get_size() - array = numpy.empty(size, numpy.uint8) - surface_to_array(array, surface, "A") - return array - - -def pixels_alpha(surface): - """pygame.surfarray.pixels_alpha(Surface): return array - - reference pixel alphas into a 2d array - - Create a new 2D array that directly references the alpha values - (degree of transparency) in a Surface. Any changes to the array will - affect the pixels in the Surface. This is a fast operation since no - data is copied. - - This can only work on 32-bit Surfaces with a per-pixel alpha value. - - The Surface this array references will remain locked for the - lifetime of the array. - """ - return numpy.array(surface.get_view("A"), copy=False) - - -def pixels_red(surface): - """pygame.surfarray.pixels_red(Surface): return array - - Reference pixel red into a 2d array. - - Create a new 2D array that directly references the red values - in a Surface. Any changes to the array will affect the pixels - in the Surface. This is a fast operation since no data is copied. - - This can only work on 24-bit or 32-bit Surfaces. - - The Surface this array references will remain locked for the - lifetime of the array. - """ - return numpy.array(surface.get_view("R"), copy=False) - - -def array_red(surface): - """pygame.surfarray.array_red(Surface): return array - - copy pixel red into a 2d array - - Copy the pixel red values from a Surface into a 2D array. This will work - for any type of Surface format. - - This function will temporarily lock the Surface as pixels are copied - (see the Surface.lock - lock the Surface memory for pixel access - method). - """ - size = surface.get_size() - array = numpy.empty(size, numpy.uint8) - surface_to_array(array, surface, "R") - return array - - -def pixels_green(surface): - """pygame.surfarray.pixels_green(Surface): return array - - Reference pixel green into a 2d array. - - Create a new 2D array that directly references the green values - in a Surface. Any changes to the array will affect the pixels - in the Surface. This is a fast operation since no data is copied. - - This can only work on 24-bit or 32-bit Surfaces. - - The Surface this array references will remain locked for the - lifetime of the array. - """ - return numpy.array(surface.get_view("G"), copy=False) - - -def array_green(surface): - """pygame.surfarray.array_green(Surface): return array - - copy pixel green into a 2d array - - Copy the pixel green values from a Surface into a 2D array. This will work - for any type of Surface format. - - This function will temporarily lock the Surface as pixels are copied - (see the Surface.lock - lock the Surface memory for pixel access - method). - """ - size = surface.get_size() - array = numpy.empty(size, numpy.uint8) - surface_to_array(array, surface, "G") - return array - - -def pixels_blue(surface): - """pygame.surfarray.pixels_blue(Surface): return array - - Reference pixel blue into a 2d array. - - Create a new 2D array that directly references the blue values - in a Surface. Any changes to the array will affect the pixels - in the Surface. This is a fast operation since no data is copied. - - This can only work on 24-bit or 32-bit Surfaces. - - The Surface this array references will remain locked for the - lifetime of the array. - """ - return numpy.array(surface.get_view("B"), copy=False) - - -def array_blue(surface): - """pygame.surfarray.array_blue(Surface): return array - - copy pixel blue into a 2d array - - Copy the pixel blue values from a Surface into a 2D array. This will work - for any type of Surface format. - - This function will temporarily lock the Surface as pixels are copied - (see the Surface.lock - lock the Surface memory for pixel access - method). - """ - size = surface.get_size() - array = numpy.empty(size, numpy.uint8) - surface_to_array(array, surface, "B") - return array - - -def array_colorkey(surface): - """pygame.surfarray.array_colorkey(Surface): return array - - copy the colorkey values into a 2d array - - Create a new array with the colorkey transparency value from each - pixel. If the pixel matches the colorkey it will be fully - tranparent; otherwise it will be fully opaque. - - This will work on any type of Surface format. If the image has no - colorkey a solid opaque array will be returned. - - This function will temporarily lock the Surface as pixels are - copied. - """ - size = surface.get_size() - array = numpy.empty(size, numpy.uint8) - surface_to_array(array, surface, "C") - return array - - -def map_array(surface, array): - """pygame.surfarray.map_array(Surface, array3d): return array2d - - map a 3d array into a 2d array - - Convert a 3D array into a 2D array. This will use the given Surface - format to control the conversion. - - Note: arrays do not need to be 3D, as long as the minor axis has - three elements giving the component colours, any array shape can be - used (for example, a single colour can be mapped, or an array of - colours). The array shape is limited to eleven dimensions maximum, - including the three element minor axis. - """ - if array.ndim == 0: - raise ValueError("array must have at least 1 dimension") - shape = array.shape - if shape[-1] != 3: - raise ValueError("array must be a 3d array of 3-value color data") - target = numpy_empty(shape[:-1], numpy.int32) - pix_map_array(target, array, surface) - return target - - -def use_arraytype(arraytype): - """pygame.surfarray.use_arraytype(arraytype): return None - - DEPRECATED - only numpy arrays are now supported. - """ - warnings.warn( - DeprecationWarning( - "only numpy arrays are now supported, " - "this function will be removed in a " - "future version of the module" - ) - ) - arraytype = arraytype.lower() - if arraytype != "numpy": - raise ValueError("invalid array type") - - -def get_arraytype(): - """pygame.surfarray.get_arraytype(): return str - - DEPRECATED - only numpy arrays are now supported. - """ - warnings.warn( - DeprecationWarning( - "only numpy arrays are now supported, " - "this function will be removed in a " - "future version of the module" - ) - ) - return "numpy" - - -def get_arraytypes(): - """pygame.surfarray.get_arraytypes(): return tuple - - DEPRECATED - only numpy arrays are now supported. - """ - warnings.warn( - DeprecationWarning( - "only numpy arrays are now supported, " - "this function will be removed in a " - "future version of the module" - ) - ) - return ("numpy",) diff --git a/venv/Lib/site-packages/pygame/surfarray.pyi b/venv/Lib/site-packages/pygame/surfarray.pyi deleted file mode 100644 index 33d7dbb..0000000 --- a/venv/Lib/site-packages/pygame/surfarray.pyi +++ /dev/null @@ -1,25 +0,0 @@ -from typing import Tuple - -import numpy - -from pygame.surface import Surface - -def array2d(surface: Surface) -> numpy.ndarray: ... -def pixels2d(surface: Surface) -> numpy.ndarray: ... -def array3d(surface: Surface) -> numpy.ndarray: ... -def pixels3d(surface: Surface) -> numpy.ndarray: ... -def array_alpha(surface: Surface) -> numpy.ndarray: ... -def pixels_alpha(surface: Surface) -> numpy.ndarray: ... -def array_red(surface: Surface) -> numpy.ndarray: ... -def pixels_red(surface: Surface) -> numpy.ndarray: ... -def array_green(surface: Surface) -> numpy.ndarray: ... -def pixels_green(surface: Surface) -> numpy.ndarray: ... -def array_blue(surface: Surface) -> numpy.ndarray: ... -def pixels_blue(surface: Surface) -> numpy.ndarray: ... -def array_colorkey(surface: Surface) -> numpy.ndarray: ... -def make_surface(array: numpy.ndarray) -> Surface: ... -def blit_array(surface: Surface, array: numpy.ndarray) -> None: ... -def map_array(surface: Surface, array3d: numpy.ndarray) -> numpy.ndarray: ... -def use_arraytype(arraytype: str) -> None: ... -def get_arraytype() -> str: ... -def get_arraytypes() -> Tuple[str]: ... diff --git a/venv/Lib/site-packages/pygame/surflock.cp39-win_amd64.pyd b/venv/Lib/site-packages/pygame/surflock.cp39-win_amd64.pyd deleted file mode 100644 index 9e4d48e..0000000 Binary files a/venv/Lib/site-packages/pygame/surflock.cp39-win_amd64.pyd and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/sysfont.py b/venv/Lib/site-packages/pygame/sysfont.py deleted file mode 100644 index 588a5b5..0000000 --- a/venv/Lib/site-packages/pygame/sysfont.py +++ /dev/null @@ -1,529 +0,0 @@ -# coding: ascii -# pygame - Python Game Library -# Copyright (C) 2000-2003 Pete Shinners -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Library General Public -# License as published by the Free Software Foundation; either -# version 2 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Library General Public License for more details. -# -# You should have received a copy of the GNU Library General Public -# License along with this library; if not, write to the Free -# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA -# -# Pete Shinners -# pete@shinners.org -"""sysfont, used in the font module to find system fonts""" - -import os -import sys -from os.path import basename, dirname, exists, join, splitext - -from pygame.font import Font - - -OpenType_extensions = frozenset((".ttf", ".ttc", ".otf")) -Sysfonts = {} -Sysalias = {} - -# Python 3 compatibility - - -def toascii(raw): - """convert bytes to ASCII-only string""" - return raw.decode("ascii", "ignore") - - -if os.name == "nt": - import winreg as _winreg -else: - import subprocess - - -def _simplename(name): - """create simple version of the font name""" - # return alphanumeric characters of a string (converted to lowercase) - return "".join(c.lower() for c in name if c.isalnum()) - - -def _addfont(name, bold, italic, font, fontdict): - """insert a font and style into the font dictionary""" - if name not in fontdict: - fontdict[name] = {} - fontdict[name][bold, italic] = font - - -def initsysfonts_win32(): - """initialize fonts dictionary on Windows""" - - fontdir = join(os.environ.get("WINDIR", "C:\\Windows"), "Fonts") - - fonts = {} - - # add fonts entered in the registry - - # find valid registry keys containing font information. - # http://docs.python.org/lib/module-sys.html - # 0 (VER_PLATFORM_WIN32s) Win32s on Windows 3.1 - # 1 (VER_PLATFORM_WIN32_WINDOWS) Windows 95/98/ME - # 2 (VER_PLATFORM_WIN32_NT) Windows NT/2000/XP - # 3 (VER_PLATFORM_WIN32_CE) Windows CE - if sys.getwindowsversion()[0] == 1: - key_name = "SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\Fonts" - else: - key_name = "SOFTWARE\\Microsoft\\Windows NT\\CurrentVersion\\Fonts" - key = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, key_name) - - for i in range(_winreg.QueryInfoKey(key)[1]): - try: - # name is the font's name e.g. Times New Roman (TrueType) - # font is the font's filename e.g. times.ttf - name, font = _winreg.EnumValue(key, i)[0:2] - except EnvironmentError: - break - - # try to handle windows unicode strings for file names with - # international characters - - # here are two documents with some information about it: - # http://www.python.org/peps/pep-0277.html - # https://www.microsoft.com/technet/archive/interopmigration/linux/mvc/lintowin.mspx#ECAA - try: - font = str(font) - except UnicodeEncodeError: - # MBCS is the windows encoding for unicode file names. - try: - font = font.encode("MBCS") - except UnicodeEncodeError: - # no success with str or MBCS encoding... skip this font. - continue - - if splitext(font)[1].lower() not in OpenType_extensions: - continue - if not dirname(font): - font = join(fontdir, font) - - # Some are named A & B, both names should be processed separately - # Ex: the main Cambria file is marked as "Cambria & Cambria Math" - for name in name.split("&"): - _parse_font_entry_win(name, font, fonts) - - return fonts - - -def _parse_font_entry_win(name, font, fonts): - """ - Parse out a simpler name and the font style from the initial file name. - - :param name: The font name - :param font: The font file path - :param fonts: The pygame font dictionary - - :return: Tuple of (bold, italic, name) - """ - true_type_suffix = "(TrueType)" - mods = ("demibold", "narrow", "light", "unicode", "bt", "mt") - if name.endswith(true_type_suffix): - name = name.rstrip(true_type_suffix).rstrip() - name = name.lower().split() - bold = italic = False - for mod in mods: - if mod in name: - name.remove(mod) - if "bold" in name: - name.remove("bold") - bold = True - if "italic" in name: - name.remove("italic") - italic = True - name = "".join(name) - name = _simplename(name) - - _addfont(name, bold, italic, font, fonts) - - -def _parse_font_entry_darwin(name, filepath, fonts): - """ - Parses a font entry for macOS - - :param name: The filepath without extensions or directories - :param filepath: The full path to the font - :param fonts: The pygame font dictionary to add the parsed font data to. - """ - - name = _simplename(name) - - mods = ("regular",) - - for mod in mods: - if mod in name: - name = name.replace(mod, "") - - bold = italic = False - if "bold" in name: - name = name.replace("bold", "") - bold = True - if "italic" in name: - name = name.replace("italic", "") - italic = True - - _addfont(name, bold, italic, filepath, fonts) - - -def _font_finder_darwin(): - locations = [ - "/Library/Fonts", - "/Network/Library/Fonts", - "/System/Library/Fonts", - "/System/Library/Fonts/Supplemental", - ] - - username = os.getenv("USER") - if username: - locations.append("/Users/" + username + "/Library/Fonts") - - strange_root = "/System/Library/Assets/com_apple_MobileAsset_Font3" - if exists(strange_root): - strange_locations = os.listdir(strange_root) - for loc in strange_locations: - locations.append(strange_root + "/" + loc + "/AssetData") - - fonts = {} - - for location in locations: - if not exists(location): - continue - - files = os.listdir(location) - for file in files: - name, extension = splitext(file) - if extension in OpenType_extensions: - _parse_font_entry_darwin(name, join(location, file), fonts) - - return fonts - - -def initsysfonts_darwin(): - """Read the fonts on MacOS, and OS X.""" - # if the X11 binary exists... try and use that. - # Not likely to be there on pre 10.4.x ... or MacOS 10.10+ - if exists("/usr/X11/bin/fc-list"): - fonts = initsysfonts_unix("/usr/X11/bin/fc-list") - # This fc-list path will work with the X11 from the OS X 10.3 installation - # disc - elif exists("/usr/X11R6/bin/fc-list"): - fonts = initsysfonts_unix("/usr/X11R6/bin/fc-list") - else: - # eventually this should probably be the preferred solution - fonts = _font_finder_darwin() - - return fonts - - -# read the fonts on unix -def initsysfonts_unix(path="fc-list"): - """use the fc-list from fontconfig to get a list of fonts""" - fonts = {} - - try: - # pylint: disable=consider-using-with - # subprocess.Popen is not a context manager in all of - # pygame's supported python versions. - - # note, we capture stderr so if fc-list isn't there to stop stderr - # printing. - flout, _ = subprocess.Popen( - f"{path} : file family style", - shell=True, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - close_fds=True, - ).communicate() - except (OSError, ValueError): - return fonts - - entries = toascii(flout) - try: - for entry in entries.split("\n"): - - try: - _parse_font_entry_unix(entry, fonts) - except ValueError: - # try the next one. - pass - - except ValueError: - pass - - return fonts - - -def _parse_font_entry_unix(entry, fonts): - """ - Parses an entry in the unix font data to add to the pygame font - dictionary. - - :param entry: A entry from the unix font list. - :param fonts: The pygame font dictionary to add the parsed font data to. - - """ - filename, family, style = entry.split(":", 2) - if splitext(filename)[1].lower() in OpenType_extensions: - bold = "Bold" in style - italic = "Italic" in style - oblique = "Oblique" in style - for name in family.strip().split(","): - if name: - break - else: - name = splitext(basename(filename))[0] - - _addfont(_simplename(name), bold, italic or oblique, filename, fonts) - - -def create_aliases(): - """Map common fonts that are absent from the system to similar fonts - that are installed in the system - """ - alias_groups = ( - ( - "monospace", - "misc-fixed", - "courier", - "couriernew", - "console", - "fixed", - "mono", - "freemono", - "bitstreamverasansmono", - "verasansmono", - "monotype", - "lucidaconsole", - "consolas", - "dejavusansmono", - "liberationmono", - ), - ( - "sans", - "arial", - "helvetica", - "swiss", - "freesans", - "bitstreamverasans", - "verasans", - "verdana", - "tahoma", - "calibri", - "gillsans", - "segoeui", - "trebuchetms", - "ubuntu", - "dejavusans", - "liberationsans", - ), - ( - "serif", - "times", - "freeserif", - "bitstreamveraserif", - "roman", - "timesroman", - "timesnewroman", - "dutch", - "veraserif", - "georgia", - "cambria", - "constantia", - "dejavuserif", - "liberationserif", - ), - ("wingdings", "wingbats"), - ("comicsansms", "comicsans"), - ) - for alias_set in alias_groups: - for name in alias_set: - if name in Sysfonts: - found = Sysfonts[name] - break - else: - continue - for name in alias_set: - if name not in Sysfonts: - Sysalias[name] = found - - -def initsysfonts(): - """ - Initialise the sysfont module, called once. Locates the installed fonts - and creates some aliases for common font categories. - - Has different initialisation functions for different platforms. - """ - if sys.platform == "win32": - fonts = initsysfonts_win32() - elif sys.platform == "darwin": - fonts = initsysfonts_darwin() - else: - fonts = initsysfonts_unix() - Sysfonts.update(fonts) - create_aliases() - if not Sysfonts: # dummy so we don't try to reinit - Sysfonts[None] = None - - -def font_constructor(fontpath, size, bold, italic): - """ - pygame.font specific declarations - - :param fontpath: path to a font. - :param size: size of a font. - :param bold: bold style, True or False. - :param italic: italic style, True or False. - - :return: A font.Font object. - """ - - font = Font(fontpath, size) - if bold: - font.set_bold(True) - if italic: - font.set_italic(True) - - return font - - -# the exported functions - - -def SysFont(name, size, bold=False, italic=False, constructor=None): - """pygame.font.SysFont(name, size, bold=False, italic=False, constructor=None) -> Font - Create a pygame Font from system font resources. - - This will search the system fonts for the given font - name. You can also enable bold or italic styles, and - the appropriate system font will be selected if available. - - This will always return a valid Font object, and will - fallback on the builtin pygame font if the given font - is not found. - - Name can also be an iterable of font names, a string of - comma-separated font names, or a bytes of comma-separated - font names, in which case the set of names will be searched - in order. Pygame uses a small set of common font aliases. If the - specific font you ask for is not available, a reasonable - alternative may be used. - - If optional constructor is provided, it must be a function with - signature constructor(fontpath, size, bold, italic) which returns - a Font instance. If None, a pygame.font.Font object is created. - """ - if constructor is None: - constructor = font_constructor - - if not Sysfonts: - initsysfonts() - - gotbold = gotitalic = False - fontname = None - if name: - if isinstance(name, (str, bytes)): - name = name.split(b"," if isinstance(name, bytes) else ",") - for single_name in name: - if isinstance(single_name, bytes): - single_name = single_name.decode() - - single_name = _simplename(single_name) - styles = Sysfonts.get(single_name) - if not styles: - styles = Sysalias.get(single_name) - if styles: - plainname = styles.get((False, False)) - fontname = styles.get((bold, italic)) - if not (fontname or plainname): - # Neither requested style, nor plain font exists, so - # return a font with the name requested, but an - # arbitrary style. - (style, fontname) = list(styles.items())[0] - # Attempt to style it as requested. This can't - # unbold or unitalicize anything, but it can - # fake bold and/or fake italicize. - if bold and style[0]: - gotbold = True - if italic and style[1]: - gotitalic = True - elif not fontname: - fontname = plainname - elif plainname != fontname: - gotbold = bold - gotitalic = italic - if fontname: - break - - set_bold = set_italic = False - if bold and not gotbold: - set_bold = True - if italic and not gotitalic: - set_italic = True - - return constructor(fontname, size, set_bold, set_italic) - - -def get_fonts(): - """pygame.font.get_fonts() -> list - get a list of system font names - - Returns the list of all found system fonts. Note that - the names of the fonts will be all lowercase with spaces - removed. This is how pygame internally stores the font - names for matching. - """ - if not Sysfonts: - initsysfonts() - return list(Sysfonts) - - -def match_font(name, bold=0, italic=0): - """pygame.font.match_font(name, bold=0, italic=0) -> name - find the filename for the named system font - - This performs the same font search as the SysFont() - function, only it returns the path to the TTF file - that would be loaded. The font name can also be an - iterable of font names or a string/bytes of comma-separated - font names to try. - - If no match is found, None is returned. - """ - if not Sysfonts: - initsysfonts() - - fontname = None - if isinstance(name, (str, bytes)): - name = name.split(b"," if isinstance(name, bytes) else ",") - - for single_name in name: - if isinstance(single_name, bytes): - single_name = single_name.decode() - - single_name = _simplename(single_name) - styles = Sysfonts.get(single_name) - if not styles: - styles = Sysalias.get(single_name) - if styles: - while not fontname: - fontname = styles.get((bold, italic)) - if italic: - italic = 0 - elif bold: - bold = 0 - elif not fontname: - fontname = list(styles.values())[0] - if fontname: - break - return fontname diff --git a/venv/Lib/site-packages/pygame/tests/__init__.py b/venv/Lib/site-packages/pygame/tests/__init__.py deleted file mode 100644 index dd26586..0000000 --- a/venv/Lib/site-packages/pygame/tests/__init__.py +++ /dev/null @@ -1,40 +0,0 @@ -"""Pygame unit test suite package - -Exports function run() - -A quick way to run the test suite package from the command line -is by importing the go submodule: - -python -m "import pygame.tests" [] - -Command line option --help displays a usage message. Available options -correspond to the pygame.tests.run arguments. - -The xxxx_test submodules of the tests package are unit test suites for -individual parts of Pygame. Each can also be run as a main program. This is -useful if the test, such as cdrom_test, is interactive. - -For Pygame development the test suite can be run from a Pygame distribution -root directory using run_tests.py. Alternately, test/__main__.py can be run -directly. - -""" - -if __name__ == "pygame.tests": - from pygame.tests.test_utils.run_tests import run -elif __name__ == "__main__": - import os - import sys - - pkg_dir = os.path.split(os.path.abspath(__file__))[0] - parent_dir, pkg_name = os.path.split(pkg_dir) - is_pygame_pkg = pkg_name == "tests" and os.path.split(parent_dir)[1] == "pygame" - if not is_pygame_pkg: - sys.path.insert(0, parent_dir) - - if is_pygame_pkg: - import pygame.tests.__main__ - else: - import test.__main__ -else: - from test.test_utils.run_tests import run diff --git a/venv/Lib/site-packages/pygame/tests/__main__.py b/venv/Lib/site-packages/pygame/tests/__main__.py deleted file mode 100644 index abdd92e..0000000 --- a/venv/Lib/site-packages/pygame/tests/__main__.py +++ /dev/null @@ -1,144 +0,0 @@ -"""Load and run the Pygame test suite - -python -c "import pygame.tests.go" [] - -or - -python test/go.py [] - -Command line option --help displays a command line usage message. - -run_tests.py in the main distribution directory is an alternative to test.go - -""" - -import sys - -if __name__ == "__main__": - import os - - pkg_dir = os.path.split(os.path.abspath(__file__))[0] - parent_dir, pkg_name = os.path.split(pkg_dir) - is_pygame_pkg = pkg_name == "tests" and os.path.split(parent_dir)[1] == "pygame" - if not is_pygame_pkg: - sys.path.insert(0, parent_dir) -else: - is_pygame_pkg = __name__.startswith("pygame.tests.") - -if is_pygame_pkg: - from pygame.tests.test_utils.run_tests import run_and_exit - from pygame.tests.test_utils.test_runner import opt_parser -else: - from test.test_utils.run_tests import run_and_exit - from test.test_utils.test_runner import opt_parser - -if is_pygame_pkg: - test_pkg_name = "pygame.tests" -else: - test_pkg_name = "test" -program_name = sys.argv[0] -if program_name == "-c": - program_name = 'python -c "import %s.go"' % test_pkg_name - -########################################################################### -# Set additional command line options -# -# Defined in test_runner.py as it shares options, added to here - -opt_parser.set_usage( - """ - -Runs all or some of the %(pkg)s.xxxx_test tests. - -$ %(exec)s sprite threads -sd - -Runs the sprite and threads module tests isolated in subprocesses, dumping -all failing tests info in the form of a dict. - -""" - % {"pkg": test_pkg_name, "exec": program_name} -) - -opt_parser.add_option( - "-d", "--dump", action="store_true", help="dump results as dict ready to eval" -) - -opt_parser.add_option("-F", "--file", help="dump results to a file") - -opt_parser.add_option( - "-m", - "--multi_thread", - metavar="THREADS", - type="int", - help="run subprocessed tests in x THREADS", -) - -opt_parser.add_option( - "-t", - "--time_out", - metavar="SECONDS", - type="int", - help="kill stalled subprocessed tests after SECONDS", -) - -opt_parser.add_option( - "-f", "--fake", metavar="DIR", help="run fake tests in run_tests__tests/$DIR" -) - -opt_parser.add_option( - "-p", - "--python", - metavar="PYTHON", - help="path to python excutable to run subproccesed tests\n" - "default (sys.executable): %s" % sys.executable, -) - -opt_parser.add_option( - "-I", - "--interactive", - action="store_true", - help="include tests requiring user input", -) - -opt_parser.add_option("-S", "--seed", type="int", help="Randomisation seed") - -########################################################################### -# Set run() keyword arguements according to command line arguemnts. -# args will be the test module list, passed as positional argumemts. - -options, args = opt_parser.parse_args() -kwds = {} -if options.incomplete: - kwds["incomplete"] = True -if options.usesubprocess: - kwds["usesubprocess"] = True -else: - kwds["usesubprocess"] = False -if options.dump: - kwds["dump"] = True -if options.file: - kwds["file"] = options.file -if options.exclude: - kwds["exclude"] = options.exclude -if options.unbuffered: - kwds["unbuffered"] = True -if options.randomize: - kwds["randomize"] = True -if options.seed is not None: - kwds["seed"] = options.seed -if options.multi_thread is not None: - kwds["multi_thread"] = options.multi_thread -if options.time_out is not None: - kwds["time_out"] = options.time_out -if options.fake: - kwds["fake"] = options.fake -if options.python: - kwds["python"] = options.python -if options.interactive: - kwds["interactive"] = True -kwds["verbosity"] = options.verbosity if options.verbosity is not None else 1 - - -########################################################################### -# Run the test suite. -run_and_exit(*args, **kwds) diff --git a/venv/Lib/site-packages/pygame/tests/__pycache__/__init__.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/__pycache__/__init__.cpython-39.pyc deleted file mode 100644 index 068cbe6..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/__pycache__/__init__.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/__pycache__/__main__.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/__pycache__/__main__.cpython-39.pyc deleted file mode 100644 index 01f1334..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/__pycache__/__main__.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/__pycache__/base_test.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/__pycache__/base_test.cpython-39.pyc deleted file mode 100644 index 8ee1ffe..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/__pycache__/base_test.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/__pycache__/blit_test.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/__pycache__/blit_test.cpython-39.pyc deleted file mode 100644 index c58f5e2..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/__pycache__/blit_test.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/__pycache__/bufferproxy_test.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/__pycache__/bufferproxy_test.cpython-39.pyc deleted file mode 100644 index 2c94c39..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/__pycache__/bufferproxy_test.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/__pycache__/camera_test.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/__pycache__/camera_test.cpython-39.pyc deleted file mode 100644 index cb752b9..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/__pycache__/camera_test.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/__pycache__/color_test.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/__pycache__/color_test.cpython-39.pyc deleted file mode 100644 index 6eb145f..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/__pycache__/color_test.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/__pycache__/constants_test.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/__pycache__/constants_test.cpython-39.pyc deleted file mode 100644 index 002801f..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/__pycache__/constants_test.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/__pycache__/controller_test.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/__pycache__/controller_test.cpython-39.pyc deleted file mode 100644 index 2ecc1e3..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/__pycache__/controller_test.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/__pycache__/cursors_test.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/__pycache__/cursors_test.cpython-39.pyc deleted file mode 100644 index 87b5c8e..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/__pycache__/cursors_test.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/__pycache__/display_test.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/__pycache__/display_test.cpython-39.pyc deleted file mode 100644 index 0a928e3..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/__pycache__/display_test.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/__pycache__/docs_test.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/__pycache__/docs_test.cpython-39.pyc deleted file mode 100644 index c6aac1d..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/__pycache__/docs_test.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/__pycache__/draw_test.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/__pycache__/draw_test.cpython-39.pyc deleted file mode 100644 index 0c4a677..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/__pycache__/draw_test.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/__pycache__/event_test.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/__pycache__/event_test.cpython-39.pyc deleted file mode 100644 index 00aed49..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/__pycache__/event_test.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/__pycache__/font_test.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/__pycache__/font_test.cpython-39.pyc deleted file mode 100644 index 5337cfc..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/__pycache__/font_test.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/__pycache__/freetype_tags.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/__pycache__/freetype_tags.cpython-39.pyc deleted file mode 100644 index 6886bd4..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/__pycache__/freetype_tags.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/__pycache__/freetype_test.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/__pycache__/freetype_test.cpython-39.pyc deleted file mode 100644 index 9c81fca..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/__pycache__/freetype_test.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/__pycache__/ftfont_tags.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/__pycache__/ftfont_tags.cpython-39.pyc deleted file mode 100644 index 80cab24..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/__pycache__/ftfont_tags.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/__pycache__/ftfont_test.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/__pycache__/ftfont_test.cpython-39.pyc deleted file mode 100644 index 0a6d832..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/__pycache__/ftfont_test.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/__pycache__/gfxdraw_test.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/__pycache__/gfxdraw_test.cpython-39.pyc deleted file mode 100644 index 53e0a26..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/__pycache__/gfxdraw_test.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/__pycache__/image__save_gl_surface_test.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/__pycache__/image__save_gl_surface_test.cpython-39.pyc deleted file mode 100644 index 1bbbab8..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/__pycache__/image__save_gl_surface_test.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/__pycache__/image_tags.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/__pycache__/image_tags.cpython-39.pyc deleted file mode 100644 index 251eeb8..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/__pycache__/image_tags.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/__pycache__/image_test.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/__pycache__/image_test.cpython-39.pyc deleted file mode 100644 index 029a511..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/__pycache__/image_test.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/__pycache__/imageext_tags.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/__pycache__/imageext_tags.cpython-39.pyc deleted file mode 100644 index 24c78dd..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/__pycache__/imageext_tags.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/__pycache__/imageext_test.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/__pycache__/imageext_test.cpython-39.pyc deleted file mode 100644 index 23eeb0b..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/__pycache__/imageext_test.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/__pycache__/joystick_test.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/__pycache__/joystick_test.cpython-39.pyc deleted file mode 100644 index 62a1517..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/__pycache__/joystick_test.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/__pycache__/key_test.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/__pycache__/key_test.cpython-39.pyc deleted file mode 100644 index 490c80e..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/__pycache__/key_test.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/__pycache__/mask_test.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/__pycache__/mask_test.cpython-39.pyc deleted file mode 100644 index b834562..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/__pycache__/mask_test.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/__pycache__/math_test.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/__pycache__/math_test.cpython-39.pyc deleted file mode 100644 index 89ee428..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/__pycache__/math_test.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/__pycache__/midi_test.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/__pycache__/midi_test.cpython-39.pyc deleted file mode 100644 index 58b6994..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/__pycache__/midi_test.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/__pycache__/mixer_music_tags.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/__pycache__/mixer_music_tags.cpython-39.pyc deleted file mode 100644 index 3062ba2..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/__pycache__/mixer_music_tags.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/__pycache__/mixer_music_test.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/__pycache__/mixer_music_test.cpython-39.pyc deleted file mode 100644 index 44c60cb..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/__pycache__/mixer_music_test.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/__pycache__/mixer_tags.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/__pycache__/mixer_tags.cpython-39.pyc deleted file mode 100644 index 9af7bf9..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/__pycache__/mixer_tags.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/__pycache__/mixer_test.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/__pycache__/mixer_test.cpython-39.pyc deleted file mode 100644 index 7aaae4c..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/__pycache__/mixer_test.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/__pycache__/mouse_test.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/__pycache__/mouse_test.cpython-39.pyc deleted file mode 100644 index f1f5a18..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/__pycache__/mouse_test.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/__pycache__/pixelarray_test.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/__pycache__/pixelarray_test.cpython-39.pyc deleted file mode 100644 index 5010e5a..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/__pycache__/pixelarray_test.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/__pycache__/pixelcopy_test.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/__pycache__/pixelcopy_test.cpython-39.pyc deleted file mode 100644 index f9546dc..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/__pycache__/pixelcopy_test.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/__pycache__/rect_test.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/__pycache__/rect_test.cpython-39.pyc deleted file mode 100644 index 7908fe5..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/__pycache__/rect_test.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/__pycache__/rwobject_test.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/__pycache__/rwobject_test.cpython-39.pyc deleted file mode 100644 index 8f77e69..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/__pycache__/rwobject_test.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/__pycache__/scrap_tags.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/__pycache__/scrap_tags.cpython-39.pyc deleted file mode 100644 index 4b3594f..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/__pycache__/scrap_tags.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/__pycache__/scrap_test.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/__pycache__/scrap_test.cpython-39.pyc deleted file mode 100644 index 1fe3084..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/__pycache__/scrap_test.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/__pycache__/sndarray_tags.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/__pycache__/sndarray_tags.cpython-39.pyc deleted file mode 100644 index 3ed50d0..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/__pycache__/sndarray_tags.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/__pycache__/sndarray_test.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/__pycache__/sndarray_test.cpython-39.pyc deleted file mode 100644 index 2a26136..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/__pycache__/sndarray_test.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/__pycache__/sprite_test.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/__pycache__/sprite_test.cpython-39.pyc deleted file mode 100644 index da4fdea..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/__pycache__/sprite_test.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/__pycache__/surface_test.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/__pycache__/surface_test.cpython-39.pyc deleted file mode 100644 index b1864ad..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/__pycache__/surface_test.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/__pycache__/surfarray_tags.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/__pycache__/surfarray_tags.cpython-39.pyc deleted file mode 100644 index 8864b60..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/__pycache__/surfarray_tags.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/__pycache__/surfarray_test.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/__pycache__/surfarray_test.cpython-39.pyc deleted file mode 100644 index 80b96d3..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/__pycache__/surfarray_test.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/__pycache__/surflock_test.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/__pycache__/surflock_test.cpython-39.pyc deleted file mode 100644 index 631da55..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/__pycache__/surflock_test.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/__pycache__/sysfont_test.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/__pycache__/sysfont_test.cpython-39.pyc deleted file mode 100644 index 904f613..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/__pycache__/sysfont_test.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/__pycache__/test_test_.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/__pycache__/test_test_.cpython-39.pyc deleted file mode 100644 index 624b5fa..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/__pycache__/test_test_.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/__pycache__/threads_test.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/__pycache__/threads_test.cpython-39.pyc deleted file mode 100644 index fd3693a..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/__pycache__/threads_test.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/__pycache__/time_test.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/__pycache__/time_test.cpython-39.pyc deleted file mode 100644 index a233702..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/__pycache__/time_test.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/__pycache__/touch_test.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/__pycache__/touch_test.cpython-39.pyc deleted file mode 100644 index de96604..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/__pycache__/touch_test.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/__pycache__/transform_test.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/__pycache__/transform_test.cpython-39.pyc deleted file mode 100644 index e7ecda3..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/__pycache__/transform_test.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/__pycache__/version_test.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/__pycache__/version_test.cpython-39.pyc deleted file mode 100644 index 2b05c75..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/__pycache__/version_test.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/__pycache__/video_test.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/__pycache__/video_test.cpython-39.pyc deleted file mode 100644 index 9d191c7..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/__pycache__/video_test.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/base_test.py b/venv/Lib/site-packages/pygame/tests/base_test.py deleted file mode 100644 index 7b7d64a..0000000 --- a/venv/Lib/site-packages/pygame/tests/base_test.py +++ /dev/null @@ -1,635 +0,0 @@ -# -*- coding: utf8 -*- - -import sys -import unittest - -import platform - -IS_PYPY = "PyPy" == platform.python_implementation() - -try: - from pygame.tests.test_utils import arrinter -except NameError: - pass -import pygame - - -quit_count = 0 - - -def quit_hook(): - global quit_count - quit_count += 1 - - -class BaseModuleTest(unittest.TestCase): - def tearDown(self): - # Clean up after each test method. - pygame.quit() - - def test_get_sdl_byteorder(self): - """Ensure the SDL byte order is valid""" - byte_order = pygame.get_sdl_byteorder() - expected_options = (pygame.LIL_ENDIAN, pygame.BIG_ENDIAN) - - self.assertIn(byte_order, expected_options) - - def test_get_sdl_version(self): - """Ensure the SDL version is valid""" - self.assertEqual(len(pygame.get_sdl_version()), 3) - - class ExporterBase(object): - def __init__(self, shape, typechar, itemsize): - import ctypes - - ndim = len(shape) - self.ndim = ndim - self.shape = tuple(shape) - array_len = 1 - for d in shape: - array_len *= d - self.size = itemsize * array_len - self.parent = ctypes.create_string_buffer(self.size) - self.itemsize = itemsize - strides = [itemsize] * ndim - for i in range(ndim - 1, 0, -1): - strides[i - 1] = strides[i] * shape[i] - self.strides = tuple(strides) - self.data = ctypes.addressof(self.parent), False - if self.itemsize == 1: - byteorder = "|" - elif sys.byteorder == "big": - byteorder = ">" - else: - byteorder = "<" - self.typestr = byteorder + typechar + str(self.itemsize) - - def assertSame(self, proxy, obj): - self.assertEqual(proxy.length, obj.size) - iface = proxy.__array_interface__ - self.assertEqual(iface["typestr"], obj.typestr) - self.assertEqual(iface["shape"], obj.shape) - self.assertEqual(iface["strides"], obj.strides) - self.assertEqual(iface["data"], obj.data) - - def test_PgObject_GetBuffer_array_interface(self): - from pygame.bufferproxy import BufferProxy - - class Exporter(self.ExporterBase): - def get__array_interface__(self): - return { - "version": 3, - "typestr": self.typestr, - "shape": self.shape, - "strides": self.strides, - "data": self.data, - } - - __array_interface__ = property(get__array_interface__) - # Should be ignored by PgObject_GetBuffer - __array_struct__ = property(lambda self: None) - - _shape = [2, 3, 5, 7, 11] # Some prime numbers - for ndim in range(1, len(_shape)): - o = Exporter(_shape[0:ndim], "i", 2) - v = BufferProxy(o) - self.assertSame(v, o) - ndim = 2 - shape = _shape[0:ndim] - for typechar in ("i", "u"): - for itemsize in (1, 2, 4, 8): - o = Exporter(shape, typechar, itemsize) - v = BufferProxy(o) - self.assertSame(v, o) - for itemsize in (4, 8): - o = Exporter(shape, "f", itemsize) - v = BufferProxy(o) - self.assertSame(v, o) - - # Is the dict received from an exporting object properly released? - # The dict should be freed before PgObject_GetBuffer returns. - # When the BufferProxy v's length property is referenced, v calls - # PgObject_GetBuffer, which in turn references Exporter2 o's - # __array_interface__ property. The Exporter2 instance o returns a - # dict subclass for which it keeps both a regular reference and a - # weak reference. The regular reference should be the only - # remaining reference when PgObject_GetBuffer returns. This is - # verified by first checking the weak reference both before and - # after the regular reference held by o is removed. - - import weakref, gc - - class NoDictError(RuntimeError): - pass - - class WRDict(dict): - """Weak referenceable dict""" - - pass - - class Exporter2(Exporter): - def get__array_interface__2(self): - self.d = WRDict(Exporter.get__array_interface__(self)) - self.dict_ref = weakref.ref(self.d) - return self.d - - __array_interface__ = property(get__array_interface__2) - - def free_dict(self): - self.d = None - - def is_dict_alive(self): - try: - return self.dict_ref() is not None - except AttributeError: - raise NoDictError("__array_interface__ is unread") - - o = Exporter2((2, 4), "u", 4) - v = BufferProxy(o) - self.assertRaises(NoDictError, o.is_dict_alive) - length = v.length - self.assertTrue(o.is_dict_alive()) - o.free_dict() - gc.collect() - self.assertFalse(o.is_dict_alive()) - - def test_GetView_array_struct(self): - from pygame.bufferproxy import BufferProxy - - class Exporter(self.ExporterBase): - def __init__(self, shape, typechar, itemsize): - super(Exporter, self).__init__(shape, typechar, itemsize) - self.view = BufferProxy(self.__dict__) - - def get__array_struct__(self): - return self.view.__array_struct__ - - __array_struct__ = property(get__array_struct__) - # Should not cause PgObject_GetBuffer to fail - __array_interface__ = property(lambda self: None) - - _shape = [2, 3, 5, 7, 11] # Some prime numbers - for ndim in range(1, len(_shape)): - o = Exporter(_shape[0:ndim], "i", 2) - v = BufferProxy(o) - self.assertSame(v, o) - ndim = 2 - shape = _shape[0:ndim] - for typechar in ("i", "u"): - for itemsize in (1, 2, 4, 8): - o = Exporter(shape, typechar, itemsize) - v = BufferProxy(o) - self.assertSame(v, o) - for itemsize in (4, 8): - o = Exporter(shape, "f", itemsize) - v = BufferProxy(o) - self.assertSame(v, o) - - # Check returned cobject/capsule reference count - try: - from sys import getrefcount - except ImportError: - # PyPy: no reference counting - pass - else: - o = Exporter(shape, typechar, itemsize) - self.assertEqual(getrefcount(o.__array_struct__), 1) - - if pygame.HAVE_NEWBUF: - from pygame.tests.test_utils import buftools - - def NEWBUF_assertSame(self, proxy, exp): - buftools = self.buftools - Importer = buftools.Importer - self.assertEqual(proxy.length, exp.len) - imp = Importer(proxy, buftools.PyBUF_RECORDS_RO) - self.assertEqual(imp.readonly, exp.readonly) - self.assertEqual(imp.format, exp.format) - self.assertEqual(imp.itemsize, exp.itemsize) - self.assertEqual(imp.ndim, exp.ndim) - self.assertEqual(imp.shape, exp.shape) - self.assertEqual(imp.strides, exp.strides) - self.assertTrue(imp.suboffsets is None) - - @unittest.skipIf(not pygame.HAVE_NEWBUF, "newbuf not implemented") - @unittest.skipIf(IS_PYPY, "pypy no likey") - def test_newbuf(self): - from pygame.bufferproxy import BufferProxy - - Exporter = self.buftools.Exporter - _shape = [2, 3, 5, 7, 11] # Some prime numbers - for ndim in range(1, len(_shape)): - o = Exporter(_shape[0:ndim], "=h") - v = BufferProxy(o) - self.NEWBUF_assertSame(v, o) - ndim = 2 - shape = _shape[0:ndim] - for format in [ - "b", - "B", - "=h", - "=H", - "=i", - "=I", - "=q", - "=Q", - "f", - "d", - "1h", - "=1h", - "x", - "1x", - "2x", - "3x", - "4x", - "5x", - "6x", - "7x", - "8x", - "9x", - ]: - o = Exporter(shape, format) - v = BufferProxy(o) - self.NEWBUF_assertSame(v, o) - - @unittest.skipIf(not pygame.HAVE_NEWBUF, "newbuf not implemented") - def test_bad_format(self): - from pygame.bufferproxy import BufferProxy - from pygame.newbuffer import BufferMixin - from ctypes import create_string_buffer, addressof - - buftools = self.buftools - Exporter = buftools.Exporter - Importer = buftools.Importer - PyBUF_FORMAT = buftools.PyBUF_FORMAT - - for format in [ - "", - "=", - "1", - " ", - "2h", - "=2h", - "0x", - "11x", - "=!", - "h ", - " h", - "hh", - "?", - ]: - exp = Exporter((1,), format, itemsize=2) - b = BufferProxy(exp) - self.assertRaises(ValueError, Importer, b, PyBUF_FORMAT) - - @unittest.skipIf(not pygame.HAVE_NEWBUF, "newbuf not implemented") - @unittest.skipIf(IS_PYPY, "fails on pypy") - def test_PgDict_AsBuffer_PyBUF_flags(self): - from pygame.bufferproxy import BufferProxy - - is_lil_endian = pygame.get_sdl_byteorder() == pygame.LIL_ENDIAN - fsys, frev = ("<", ">") if is_lil_endian else (">", "<") - buftools = self.buftools - Importer = buftools.Importer - a = BufferProxy( - {"typestr": "|u4", "shape": (10, 2), "data": (9, False)} - ) # 9? No data accesses. - b = Importer(a, buftools.PyBUF_SIMPLE) - self.assertEqual(b.ndim, 0) - self.assertTrue(b.format is None) - self.assertEqual(b.len, a.length) - self.assertEqual(b.itemsize, 4) - self.assertTrue(b.shape is None) - self.assertTrue(b.strides is None) - self.assertTrue(b.suboffsets is None) - self.assertFalse(b.readonly) - self.assertEqual(b.buf, 9) - b = Importer(a, buftools.PyBUF_WRITABLE) - self.assertEqual(b.ndim, 0) - self.assertTrue(b.format is None) - self.assertEqual(b.len, a.length) - self.assertEqual(b.itemsize, 4) - self.assertTrue(b.shape is None) - self.assertTrue(b.strides is None) - self.assertTrue(b.suboffsets is None) - self.assertFalse(b.readonly) - self.assertEqual(b.buf, 9) - b = Importer(a, buftools.PyBUF_ND) - self.assertEqual(b.ndim, 2) - self.assertTrue(b.format is None) - self.assertEqual(b.len, a.length) - self.assertEqual(b.itemsize, 4) - self.assertEqual(b.shape, (10, 2)) - self.assertTrue(b.strides is None) - self.assertTrue(b.suboffsets is None) - self.assertFalse(b.readonly) - self.assertEqual(b.buf, 9) - a = BufferProxy( - { - "typestr": fsys + "i2", - "shape": (5, 10), - "strides": (24, 2), - "data": (42, False), - } - ) # 42? No data accesses. - b = Importer(a, buftools.PyBUF_STRIDES) - self.assertEqual(b.ndim, 2) - self.assertTrue(b.format is None) - self.assertEqual(b.len, 100) - self.assertEqual(b.itemsize, 2) - self.assertEqual(b.shape, (5, 10)) - self.assertEqual(b.strides, (24, 2)) - self.assertTrue(b.suboffsets is None) - self.assertFalse(b.readonly) - self.assertEqual(b.buf, 42) - b = Importer(a, buftools.PyBUF_FULL_RO) - self.assertEqual(b.ndim, 2) - self.assertEqual(b.format, "=h") - self.assertEqual(b.len, 100) - self.assertEqual(b.itemsize, 2) - self.assertEqual(b.shape, (5, 10)) - self.assertEqual(b.strides, (24, 2)) - self.assertTrue(b.suboffsets is None) - self.assertFalse(b.readonly) - self.assertEqual(b.buf, 42) - self.assertRaises(BufferError, Importer, a, buftools.PyBUF_SIMPLE) - self.assertRaises(BufferError, Importer, a, buftools.PyBUF_ND) - self.assertRaises(BufferError, Importer, a, buftools.PyBUF_C_CONTIGUOUS) - self.assertRaises(BufferError, Importer, a, buftools.PyBUF_F_CONTIGUOUS) - self.assertRaises(BufferError, Importer, a, buftools.PyBUF_ANY_CONTIGUOUS) - self.assertRaises(BufferError, Importer, a, buftools.PyBUF_CONTIG) - self.assertRaises(BufferError, Importer, a, buftools.PyBUF_SIMPLE) - self.assertRaises(BufferError, Importer, a, buftools.PyBUF_ND) - self.assertRaises(BufferError, Importer, a, buftools.PyBUF_C_CONTIGUOUS) - self.assertRaises(BufferError, Importer, a, buftools.PyBUF_F_CONTIGUOUS) - self.assertRaises(BufferError, Importer, a, buftools.PyBUF_ANY_CONTIGUOUS) - self.assertRaises(BufferError, Importer, a, buftools.PyBUF_CONTIG) - a = BufferProxy( - { - "typestr": frev + "i2", - "shape": (3, 5, 10), - "strides": (120, 24, 2), - "data": (1000000, True), - } - ) # 1000000? No data accesses. - b = Importer(a, buftools.PyBUF_FULL_RO) - self.assertEqual(b.ndim, 3) - self.assertEqual(b.format, frev + "h") - self.assertEqual(b.len, 300) - self.assertEqual(b.itemsize, 2) - self.assertEqual(b.shape, (3, 5, 10)) - self.assertEqual(b.strides, (120, 24, 2)) - self.assertTrue(b.suboffsets is None) - self.assertTrue(b.readonly) - self.assertEqual(b.buf, 1000000) - self.assertRaises(BufferError, Importer, a, buftools.PyBUF_FULL) - - @unittest.skipIf(IS_PYPY or (not pygame.HAVE_NEWBUF), "newbuf with ctypes") - def test_PgObject_AsBuffer_PyBUF_flags(self): - from pygame.bufferproxy import BufferProxy - import ctypes - - is_lil_endian = pygame.get_sdl_byteorder() == pygame.LIL_ENDIAN - fsys, frev = ("<", ">") if is_lil_endian else (">", "<") - buftools = self.buftools - Importer = buftools.Importer - e = arrinter.Exporter( - (10, 2), typekind="f", itemsize=ctypes.sizeof(ctypes.c_double) - ) - a = BufferProxy(e) - b = Importer(a, buftools.PyBUF_SIMPLE) - self.assertEqual(b.ndim, 0) - self.assertTrue(b.format is None) - self.assertEqual(b.len, e.len) - self.assertEqual(b.itemsize, e.itemsize) - self.assertTrue(b.shape is None) - self.assertTrue(b.strides is None) - self.assertTrue(b.suboffsets is None) - self.assertFalse(b.readonly) - self.assertEqual(b.buf, e.data) - b = Importer(a, buftools.PyBUF_WRITABLE) - self.assertEqual(b.ndim, 0) - self.assertTrue(b.format is None) - self.assertEqual(b.len, e.len) - self.assertEqual(b.itemsize, e.itemsize) - self.assertTrue(b.shape is None) - self.assertTrue(b.strides is None) - self.assertTrue(b.suboffsets is None) - self.assertFalse(b.readonly) - self.assertEqual(b.buf, e.data) - b = Importer(a, buftools.PyBUF_ND) - self.assertEqual(b.ndim, e.nd) - self.assertTrue(b.format is None) - self.assertEqual(b.len, a.length) - self.assertEqual(b.itemsize, e.itemsize) - self.assertEqual(b.shape, e.shape) - self.assertTrue(b.strides is None) - self.assertTrue(b.suboffsets is None) - self.assertFalse(b.readonly) - self.assertEqual(b.buf, e.data) - e = arrinter.Exporter((5, 10), typekind="i", itemsize=2, strides=(24, 2)) - a = BufferProxy(e) - b = Importer(a, buftools.PyBUF_STRIDES) - self.assertEqual(b.ndim, e.nd) - self.assertTrue(b.format is None) - self.assertEqual(b.len, e.len) - self.assertEqual(b.itemsize, e.itemsize) - self.assertEqual(b.shape, e.shape) - self.assertEqual(b.strides, e.strides) - self.assertTrue(b.suboffsets is None) - self.assertFalse(b.readonly) - self.assertEqual(b.buf, e.data) - b = Importer(a, buftools.PyBUF_FULL_RO) - self.assertEqual(b.ndim, e.nd) - self.assertEqual(b.format, "=h") - self.assertEqual(b.len, e.len) - self.assertEqual(b.itemsize, e.itemsize) - self.assertEqual(b.shape, e.shape) - self.assertEqual(b.strides, e.strides) - self.assertTrue(b.suboffsets is None) - self.assertFalse(b.readonly) - self.assertEqual(b.buf, e.data) - self.assertRaises(BufferError, Importer, a, buftools.PyBUF_SIMPLE) - self.assertRaises(BufferError, Importer, a, buftools.PyBUF_WRITABLE) - self.assertRaises(BufferError, Importer, a, buftools.PyBUF_WRITABLE) - self.assertRaises(BufferError, Importer, a, buftools.PyBUF_ND) - self.assertRaises(BufferError, Importer, a, buftools.PyBUF_C_CONTIGUOUS) - self.assertRaises(BufferError, Importer, a, buftools.PyBUF_F_CONTIGUOUS) - self.assertRaises(BufferError, Importer, a, buftools.PyBUF_ANY_CONTIGUOUS) - self.assertRaises(BufferError, Importer, a, buftools.PyBUF_CONTIG) - self.assertRaises(BufferError, Importer, a, buftools.PyBUF_SIMPLE) - self.assertRaises(BufferError, Importer, a, buftools.PyBUF_ND) - self.assertRaises(BufferError, Importer, a, buftools.PyBUF_C_CONTIGUOUS) - self.assertRaises(BufferError, Importer, a, buftools.PyBUF_F_CONTIGUOUS) - self.assertRaises(BufferError, Importer, a, buftools.PyBUF_ANY_CONTIGUOUS) - self.assertRaises(BufferError, Importer, a, buftools.PyBUF_CONTIG) - e = arrinter.Exporter( - (3, 5, 10), - typekind="i", - itemsize=2, - strides=(120, 24, 2), - flags=arrinter.PAI_ALIGNED, - ) - a = BufferProxy(e) - b = Importer(a, buftools.PyBUF_FULL_RO) - self.assertEqual(b.ndim, e.nd) - self.assertEqual(b.format, frev + "h") - self.assertEqual(b.len, e.len) - self.assertEqual(b.itemsize, e.itemsize) - self.assertEqual(b.shape, e.shape) - self.assertEqual(b.strides, e.strides) - self.assertTrue(b.suboffsets is None) - self.assertTrue(b.readonly) - self.assertEqual(b.buf, e.data) - self.assertRaises(BufferError, Importer, a, buftools.PyBUF_FULL) - - def test_PgObject_GetBuffer_exception(self): - # For consistency with surfarray - from pygame.bufferproxy import BufferProxy - - bp = BufferProxy(1) - self.assertRaises(ValueError, getattr, bp, "length") - - def not_init_assertions(self): - self.assertFalse(pygame.get_init(), "pygame shouldn't be initialized") - self.assertFalse(pygame.display.get_init(), "display shouldn't be initialized") - - if "pygame.mixer" in sys.modules: - self.assertFalse(pygame.mixer.get_init(), "mixer shouldn't be initialized") - - if "pygame.font" in sys.modules: - self.assertFalse(pygame.font.get_init(), "init shouldn't be initialized") - - ## !!! TODO : Remove when scrap works for OS X - import platform - - if platform.system().startswith("Darwin"): - return - - try: - self.assertRaises(pygame.error, pygame.scrap.get) - except NotImplementedError: - # Scrap is optional. - pass - - # pygame.cdrom - # pygame.joystick - - def init_assertions(self): - self.assertTrue(pygame.get_init()) - self.assertTrue(pygame.display.get_init()) - - if "pygame.mixer" in sys.modules: - self.assertTrue(pygame.mixer.get_init()) - - if "pygame.font" in sys.modules: - self.assertTrue(pygame.font.get_init()) - - def test_quit__and_init(self): - # __doc__ (as of 2008-06-25) for pygame.base.quit: - - # pygame.quit(): return None - # uninitialize all pygame modules - - # Make sure everything is not init - self.not_init_assertions() - - # Initiate it - pygame.init() - - # Check - self.init_assertions() - - # Quit - pygame.quit() - - # All modules have quit - self.not_init_assertions() - - def test_register_quit(self): - """Ensure that a registered function is called on quit()""" - self.assertEqual(quit_count, 0) - - pygame.init() - pygame.register_quit(quit_hook) - pygame.quit() - - self.assertEqual(quit_count, 1) - - def test_get_error(self): - - # __doc__ (as of 2008-08-02) for pygame.base.get_error: - - # pygame.get_error(): return errorstr - # get the current error message - # - # SDL maintains an internal error message. This message will usually - # be given to you when pygame.error is raised. You will rarely need to - # call this function. - # - - # The first error could be all sorts of nonsense or empty. - e = pygame.get_error() - pygame.set_error("hi") - self.assertEqual(pygame.get_error(), "hi") - pygame.set_error("") - self.assertEqual(pygame.get_error(), "") - - def test_set_error(self): - - # The first error could be all sorts of nonsense or empty. - e = pygame.get_error() - pygame.set_error("hi") - self.assertEqual(pygame.get_error(), "hi") - pygame.set_error("") - self.assertEqual(pygame.get_error(), "") - - def test_unicode_error(self): - pygame.set_error("你好") - self.assertEqual("你好", pygame.get_error()) - - def test_init(self): - """Ensures init() works properly.""" - # Make sure nothing initialized. - self.not_init_assertions() - - # display and joystick must init, at minimum - expected_min_passes = 2 - - # All modules should pass. - expected_fails = 0 - - passes, fails = pygame.init() - - self.init_assertions() - self.assertGreaterEqual(passes, expected_min_passes) - self.assertEqual(fails, expected_fails) - - def test_get_init(self): - # Test if get_init() gets the init state. - self.assertFalse(pygame.get_init()) - - def test_get_init__after_init(self): - # Test if get_init() gets the init state after pygame.init() called. - pygame.init() - - self.assertTrue(pygame.get_init()) - - def test_get_init__after_quit(self): - # Test if get_init() gets the init state after pygame.quit() called. - pygame.init() - pygame.quit() - - self.assertFalse(pygame.get_init()) - - def todo_test_segfault(self): - - # __doc__ (as of 2008-08-02) for pygame.base.segfault: - - # crash - - self.fail() - - -if __name__ == "__main__": - unittest.main() diff --git a/venv/Lib/site-packages/pygame/tests/blit_test.py b/venv/Lib/site-packages/pygame/tests/blit_test.py deleted file mode 100644 index 906e7c4..0000000 --- a/venv/Lib/site-packages/pygame/tests/blit_test.py +++ /dev/null @@ -1,155 +0,0 @@ -import unittest - -import pygame -from pygame.locals import * - - -class BlitTest(unittest.TestCase): - def test_SRCALPHA(self): - """SRCALPHA tests.""" - # blend(s, 0, d) = d - s = pygame.Surface((1, 1), SRCALPHA, 32) - s.fill((255, 255, 255, 0)) - - d = pygame.Surface((1, 1), SRCALPHA, 32) - d.fill((0, 0, 255, 255)) - - s.blit(d, (0, 0)) - self.assertEqual(s.get_at((0, 0)), d.get_at((0, 0))) - - # blend(s, 255, d) = s - s = pygame.Surface((1, 1), SRCALPHA, 32) - s.fill((123, 0, 0, 255)) - s1 = pygame.Surface((1, 1), SRCALPHA, 32) - s1.fill((123, 0, 0, 255)) - d = pygame.Surface((1, 1), SRCALPHA, 32) - d.fill((10, 0, 0, 0)) - s.blit(d, (0, 0)) - self.assertEqual(s.get_at((0, 0)), s1.get_at((0, 0))) - - # TODO: these should be true too. - # blend(0, sA, 0) = 0 - # blend(255, sA, 255) = 255 - # blend(s, sA, d) <= 255 - - def test_BLEND(self): - """BLEND_ tests.""" - - # test that it doesn't overflow, and that it is saturated. - s = pygame.Surface((1, 1), SRCALPHA, 32) - s.fill((255, 255, 255, 0)) - - d = pygame.Surface((1, 1), SRCALPHA, 32) - d.fill((0, 0, 255, 255)) - - s.blit(d, (0, 0), None, BLEND_ADD) - - # print "d %s" % (d.get_at((0,0)),) - # print s.get_at((0,0)) - # self.assertEqual(s.get_at((0,0))[2], 255 ) - # self.assertEqual(s.get_at((0,0))[3], 0 ) - - s.blit(d, (0, 0), None, BLEND_RGBA_ADD) - # print s.get_at((0,0)) - self.assertEqual(s.get_at((0, 0))[3], 255) - - # test adding works. - s.fill((20, 255, 255, 0)) - d.fill((10, 0, 255, 255)) - s.blit(d, (0, 0), None, BLEND_ADD) - self.assertEqual(s.get_at((0, 0))[2], 255) - - # test subbing works. - s.fill((20, 255, 255, 0)) - d.fill((10, 0, 255, 255)) - s.blit(d, (0, 0), None, BLEND_SUB) - self.assertEqual(s.get_at((0, 0))[0], 10) - - # no overflow in sub blend. - s.fill((20, 255, 255, 0)) - d.fill((30, 0, 255, 255)) - s.blit(d, (0, 0), None, BLEND_SUB) - self.assertEqual(s.get_at((0, 0))[0], 0) - - def make_blit_list(self, num_surfs): - - blit_list = [] - for i in range(num_surfs): - dest = (i * 10, 0) - surf = pygame.Surface((10, 10), SRCALPHA, 32) - color = (i * 1, i * 1, i * 1) - surf.fill(color) - blit_list.append((surf, dest)) - return blit_list - - def test_blits(self): - - NUM_SURFS = 255 - PRINT_TIMING = 0 - dst = pygame.Surface((NUM_SURFS * 10, 10), SRCALPHA, 32) - dst.fill((230, 230, 230)) - blit_list = self.make_blit_list(NUM_SURFS) - - def blits(blit_list): - for surface, dest in blit_list: - dst.blit(surface, dest) - - from time import time - - t0 = time() - results = blits(blit_list) - t1 = time() - if PRINT_TIMING: - print("python blits: %s" % (t1 - t0)) - - dst.fill((230, 230, 230)) - t0 = time() - results = dst.blits(blit_list) - t1 = time() - if PRINT_TIMING: - print("Surface.blits :%s" % (t1 - t0)) - - # check if we blit all the different colors in the correct spots. - for i in range(NUM_SURFS): - color = (i * 1, i * 1, i * 1) - self.assertEqual(dst.get_at((i * 10, 0)), color) - self.assertEqual(dst.get_at(((i * 10) + 5, 5)), color) - - self.assertEqual(len(results), NUM_SURFS) - - t0 = time() - results = dst.blits(blit_list, doreturn=0) - t1 = time() - if PRINT_TIMING: - print("Surface.blits doreturn=0: %s" % (t1 - t0)) - self.assertEqual(results, None) - - t0 = time() - results = dst.blits(((surf, dest) for surf, dest in blit_list)) - t1 = time() - if PRINT_TIMING: - print("Surface.blits generator: %s" % (t1 - t0)) - - def test_blits_not_sequence(self): - dst = pygame.Surface((100, 10), SRCALPHA, 32) - self.assertRaises(ValueError, dst.blits, None) - - def test_blits_wrong_length(self): - dst = pygame.Surface((100, 10), SRCALPHA, 32) - self.assertRaises( - ValueError, dst.blits, [pygame.Surface((10, 10), SRCALPHA, 32)] - ) - - def test_blits_bad_surf_args(self): - dst = pygame.Surface((100, 10), SRCALPHA, 32) - self.assertRaises(TypeError, dst.blits, [(None, None)]) - - def test_blits_bad_dest(self): - dst = pygame.Surface((100, 10), SRCALPHA, 32) - self.assertRaises( - TypeError, dst.blits, [(pygame.Surface((10, 10), SRCALPHA, 32), None)] - ) - - -if __name__ == "__main__": - unittest.main() diff --git a/venv/Lib/site-packages/pygame/tests/bufferproxy_test.py b/venv/Lib/site-packages/pygame/tests/bufferproxy_test.py deleted file mode 100644 index 3d9c0b3..0000000 --- a/venv/Lib/site-packages/pygame/tests/bufferproxy_test.py +++ /dev/null @@ -1,507 +0,0 @@ -import re -import weakref -import gc -import ctypes -import unittest - -import pygame -from pygame.bufferproxy import BufferProxy - - -try: - BufferError -except NameError: - from pygame import BufferError - - -class BufferProxyTest(unittest.TestCase): - view_keywords = { - "shape": (5, 4, 3), - "typestr": "|u1", - "data": (0, True), - "strides": (4, 20, 1), - } - - def test_module_name(self): - self.assertEqual(pygame.bufferproxy.__name__, "pygame.bufferproxy") - - def test_class_name(self): - self.assertEqual(BufferProxy.__name__, "BufferProxy") - - def test___array_struct___property(self): - kwds = self.view_keywords - v = BufferProxy(kwds) - d = pygame.get_array_interface(v) - self.assertEqual(len(d), 5) - self.assertEqual(d["version"], 3) - self.assertEqual(d["shape"], kwds["shape"]) - self.assertEqual(d["typestr"], kwds["typestr"]) - self.assertEqual(d["data"], kwds["data"]) - self.assertEqual(d["strides"], kwds["strides"]) - - def test___array_interface___property(self): - kwds = self.view_keywords - v = BufferProxy(kwds) - d = v.__array_interface__ - self.assertEqual(len(d), 5) - self.assertEqual(d["version"], 3) - self.assertEqual(d["shape"], kwds["shape"]) - self.assertEqual(d["typestr"], kwds["typestr"]) - self.assertEqual(d["data"], kwds["data"]) - self.assertEqual(d["strides"], kwds["strides"]) - - def test_parent_property(self): - kwds = dict(self.view_keywords) - p = [] - kwds["parent"] = p - v = BufferProxy(kwds) - - self.assertIs(v.parent, p) - - def test_before(self): - def callback(parent): - success.append(parent is p) - - class MyException(Exception): - pass - - def raise_exception(parent): - raise MyException("Just a test.") - - kwds = dict(self.view_keywords) - p = [] - kwds["parent"] = p - - # For array interface - success = [] - kwds["before"] = callback - v = BufferProxy(kwds) - self.assertEqual(len(success), 0) - d = v.__array_interface__ - self.assertEqual(len(success), 1) - self.assertTrue(success[0]) - d = v.__array_interface__ - self.assertEqual(len(success), 1) - d = v = None - gc.collect() - self.assertEqual(len(success), 1) - - # For array struct - success = [] - kwds["before"] = callback - v = BufferProxy(kwds) - self.assertEqual(len(success), 0) - c = v.__array_struct__ - self.assertEqual(len(success), 1) - self.assertTrue(success[0]) - c = v.__array_struct__ - self.assertEqual(len(success), 1) - c = v = None - gc.collect() - self.assertEqual(len(success), 1) - - # Callback raises an exception - kwds["before"] = raise_exception - v = BufferProxy(kwds) - self.assertRaises(MyException, lambda: v.__array_struct__) - - def test_after(self): - def callback(parent): - success.append(parent is p) - - kwds = dict(self.view_keywords) - p = [] - kwds["parent"] = p - - # For array interface - success = [] - kwds["after"] = callback - v = BufferProxy(kwds) - self.assertEqual(len(success), 0) - d = v.__array_interface__ - self.assertEqual(len(success), 0) - d = v.__array_interface__ - self.assertEqual(len(success), 0) - d = v = None - gc.collect() - self.assertEqual(len(success), 1) - self.assertTrue(success[0]) - - # For array struct - success = [] - kwds["after"] = callback - v = BufferProxy(kwds) - self.assertEqual(len(success), 0) - c = v.__array_struct__ - self.assertEqual(len(success), 0) - c = v.__array_struct__ - self.assertEqual(len(success), 0) - c = v = None - gc.collect() - self.assertEqual(len(success), 1) - self.assertTrue(success[0]) - - def test_attribute(self): - v = BufferProxy(self.view_keywords) - self.assertRaises(AttributeError, getattr, v, "undefined") - v.undefined = 12 - self.assertEqual(v.undefined, 12) - del v.undefined - self.assertRaises(AttributeError, getattr, v, "undefined") - - def test_weakref(self): - v = BufferProxy(self.view_keywords) - weak_v = weakref.ref(v) - - self.assertIs(weak_v(), v) - - v = None - gc.collect() - - self.assertIsNone(weak_v()) - - def test_gc(self): - """refcount agnostic check that contained objects are freed""" - - def before_callback(parent): - return r[0] - - def after_callback(parent): - return r[1] - - class Obj(object): - pass - - p = Obj() - a = Obj() - r = [Obj(), Obj()] - weak_p = weakref.ref(p) - weak_a = weakref.ref(a) - weak_r0 = weakref.ref(r[0]) - weak_r1 = weakref.ref(r[1]) - weak_before = weakref.ref(before_callback) - weak_after = weakref.ref(after_callback) - kwds = dict(self.view_keywords) - kwds["parent"] = p - kwds["before"] = before_callback - kwds["after"] = after_callback - v = BufferProxy(kwds) - v.some_attribute = a - weak_v = weakref.ref(v) - kwds = p = a = before_callback = after_callback = None - gc.collect() - self.assertTrue(weak_p() is not None) - self.assertTrue(weak_a() is not None) - self.assertTrue(weak_before() is not None) - self.assertTrue(weak_after() is not None) - v = None - [gc.collect() for x in range(4)] - self.assertTrue(weak_v() is None) - self.assertTrue(weak_p() is None) - self.assertTrue(weak_a() is None) - self.assertTrue(weak_before() is None) - self.assertTrue(weak_after() is None) - self.assertTrue(weak_r0() is not None) - self.assertTrue(weak_r1() is not None) - r = None - gc.collect() - self.assertTrue(weak_r0() is None) - self.assertTrue(weak_r1() is None) - - # Cycle removal - kwds = dict(self.view_keywords) - kwds["parent"] = [] - v = BufferProxy(kwds) - v.some_attribute = v - tracked = True - for o in gc.get_objects(): - if o is v: - break - else: - tracked = False - self.assertTrue(tracked) - kwds["parent"].append(v) - kwds = None - gc.collect() - n1 = len(gc.garbage) - v = None - gc.collect() - n2 = len(gc.garbage) - self.assertEqual(n2, n1) - - def test_c_api(self): - api = pygame.bufferproxy._PYGAME_C_API - api_type = type(pygame.base._PYGAME_C_API) - - self.assertIsInstance(api, api_type) - - def test_repr(self): - v = BufferProxy(self.view_keywords) - cname = BufferProxy.__name__ - oname, ovalue = re.findall(r"<([^)]+)\(([^)]+)\)>", repr(v))[0] - self.assertEqual(oname, cname) - self.assertEqual(v.length, int(ovalue)) - - def test_subclassing(self): - class MyBufferProxy(BufferProxy): - def __repr__(self): - return "*%s*" % (BufferProxy.__repr__(self),) - - kwds = dict(self.view_keywords) - kwds["parent"] = 0 - v = MyBufferProxy(kwds) - self.assertEqual(v.parent, 0) - r = repr(v) - self.assertEqual(r[:2], "*<") - self.assertEqual(r[-2:], ">*") - - @unittest.skipIf(not pygame.HAVE_NEWBUF, "newbuf not implemented") - def NEWBUF_test_newbuf(self): - from ctypes import string_at - - from pygame.tests.test_utils import buftools - - Exporter = buftools.Exporter - Importer = buftools.Importer - exp = Exporter((10,), "B", readonly=True) - b = BufferProxy(exp) - self.assertEqual(b.length, exp.len) - self.assertEqual(b.raw, string_at(exp.buf, exp.len)) - d = b.__array_interface__ - try: - self.assertEqual(d["typestr"], "|u1") - self.assertEqual(d["shape"], exp.shape) - self.assertEqual(d["strides"], exp.strides) - self.assertEqual(d["data"], (exp.buf, True)) - finally: - d = None - exp = Exporter((3,), "=h") - b = BufferProxy(exp) - self.assertEqual(b.length, exp.len) - self.assertEqual(b.raw, string_at(exp.buf, exp.len)) - d = b.__array_interface__ - try: - lil_endian = pygame.get_sdl_byteorder() == pygame.LIL_ENDIAN - f = "{}i{}".format("<" if lil_endian else ">", exp.itemsize) - self.assertEqual(d["typestr"], f) - self.assertEqual(d["shape"], exp.shape) - self.assertEqual(d["strides"], exp.strides) - self.assertEqual(d["data"], (exp.buf, False)) - finally: - d = None - - exp = Exporter((10, 2), "=i") - b = BufferProxy(exp) - imp = Importer(b, buftools.PyBUF_RECORDS) - self.assertTrue(imp.obj is b) - self.assertEqual(imp.buf, exp.buf) - self.assertEqual(imp.ndim, exp.ndim) - self.assertEqual(imp.format, exp.format) - self.assertEqual(imp.readonly, exp.readonly) - self.assertEqual(imp.itemsize, exp.itemsize) - self.assertEqual(imp.len, exp.len) - self.assertEqual(imp.shape, exp.shape) - self.assertEqual(imp.strides, exp.strides) - self.assertTrue(imp.suboffsets is None) - - d = { - "typestr": "|u1", - "shape": (10,), - "strides": (1,), - "data": (9, True), - } # 9? Will not reading the data anyway. - b = BufferProxy(d) - imp = Importer(b, buftools.PyBUF_SIMPLE) - self.assertTrue(imp.obj is b) - self.assertEqual(imp.buf, 9) - self.assertEqual(imp.len, 10) - self.assertEqual(imp.format, None) - self.assertEqual(imp.itemsize, 1) - self.assertEqual(imp.ndim, 0) - self.assertTrue(imp.readonly) - self.assertTrue(imp.shape is None) - self.assertTrue(imp.strides is None) - self.assertTrue(imp.suboffsets is None) - - try: - pygame.bufferproxy.get_segcount - except AttributeError: - pass - else: - - def test_oldbuf_arg(self): - self.OLDBUF_test_oldbuf_arg() - - def OLDBUF_test_oldbuf_arg(self): - from pygame.bufferproxy import get_segcount, get_read_buffer, get_write_buffer - - content = b"\x01\x00\x00\x02" * 12 - memory = ctypes.create_string_buffer(content) - memaddr = ctypes.addressof(memory) - - def raise_exception(o): - raise ValueError("An exception") - - bf = BufferProxy( - { - "shape": (len(content),), - "typestr": "|u1", - "data": (memaddr, False), - "strides": (1,), - } - ) - seglen, segaddr = get_read_buffer(bf, 0) - self.assertEqual(segaddr, 0) - self.assertEqual(seglen, 0) - seglen, segaddr = get_write_buffer(bf, 0) - self.assertEqual(segaddr, 0) - self.assertEqual(seglen, 0) - segcount, buflen = get_segcount(bf) - self.assertEqual(segcount, 1) - self.assertEqual(buflen, len(content)) - seglen, segaddr = get_read_buffer(bf, 0) - self.assertEqual(segaddr, memaddr) - self.assertEqual(seglen, len(content)) - seglen, segaddr = get_write_buffer(bf, 0) - self.assertEqual(segaddr, memaddr) - self.assertEqual(seglen, len(content)) - - bf = BufferProxy( - { - "shape": (len(content),), - "typestr": "|u1", - "data": (memaddr, True), - "strides": (1,), - } - ) - segcount, buflen = get_segcount(bf) - self.assertEqual(segcount, 1) - self.assertEqual(buflen, len(content)) - seglen, segaddr = get_read_buffer(bf, 0) - self.assertEqual(segaddr, memaddr) - self.assertEqual(seglen, len(content)) - self.assertRaises(ValueError, get_write_buffer, bf, 0) - - bf = BufferProxy( - { - "shape": (len(content),), - "typestr": "|u1", - "data": (memaddr, True), - "strides": (1,), - "before": raise_exception, - } - ) - segcount, buflen = get_segcount(bf) - self.assertEqual(segcount, 0) - self.assertEqual(buflen, 0) - - bf = BufferProxy( - { - "shape": (3, 4), - "typestr": "|u4", - "data": (memaddr, True), - "strides": (12, 4), - } - ) - segcount, buflen = get_segcount(bf) - self.assertEqual(segcount, 3 * 4) - self.assertEqual(buflen, 3 * 4 * 4) - for i in range(0, 4): - seglen, segaddr = get_read_buffer(bf, i) - self.assertEqual(segaddr, memaddr + i * 4) - self.assertEqual(seglen, 4) - - -class BufferProxyLegacyTest(unittest.TestCase): - content = b"\x01\x00\x00\x02" * 12 - buffer = ctypes.create_string_buffer(content) - data = (ctypes.addressof(buffer), True) - - def test_length(self): - - # __doc__ (as of 2008-08-02) for pygame.bufferproxy.BufferProxy.length: - - # The size of the buffer data in bytes. - bf = BufferProxy( - {"shape": (3, 4), "typestr": "|u4", "data": self.data, "strides": (12, 4)} - ) - self.assertEqual(bf.length, len(self.content)) - bf = BufferProxy( - {"shape": (3, 3), "typestr": "|u4", "data": self.data, "strides": (12, 4)} - ) - self.assertEqual(bf.length, 3 * 3 * 4) - - def test_raw(self): - - # __doc__ (as of 2008-08-02) for pygame.bufferproxy.BufferProxy.raw: - - # The raw buffer data as string. The string may contain NUL bytes. - - bf = BufferProxy( - {"shape": (len(self.content),), "typestr": "|u1", "data": self.data} - ) - self.assertEqual(bf.raw, self.content) - bf = BufferProxy( - {"shape": (3, 4), "typestr": "|u4", "data": self.data, "strides": (4, 12)} - ) - self.assertEqual(bf.raw, self.content) - bf = BufferProxy( - {"shape": (3, 4), "typestr": "|u1", "data": self.data, "strides": (16, 4)} - ) - self.assertRaises(ValueError, getattr, bf, "raw") - - def test_write(self): - - # __doc__ (as of 2008-08-02) for pygame.bufferproxy.BufferProxy.write: - - # B.write (bufferproxy, buffer, offset) -> None - # - # Writes raw data to the bufferproxy. - # - # Writes the raw data from buffer to the BufferProxy object, starting - # at the specified offset within the BufferProxy. - # If the length of the passed buffer exceeds the length of the - # BufferProxy (reduced by the offset), an IndexError will be raised. - from ctypes import c_byte, sizeof, addressof, string_at, memset - - nullbyte = "\x00".encode("latin_1") - Buf = c_byte * 10 - data_buf = Buf(*range(1, 3 * sizeof(Buf) + 1, 3)) - data = string_at(data_buf, sizeof(data_buf)) - buf = Buf() - bp = BufferProxy( - {"typestr": "|u1", "shape": (sizeof(buf),), "data": (addressof(buf), False)} - ) - try: - self.assertEqual(bp.raw, nullbyte * sizeof(Buf)) - bp.write(data) - self.assertEqual(bp.raw, data) - memset(buf, 0, sizeof(buf)) - bp.write(data[:3], 2) - raw = bp.raw - self.assertEqual(raw[:2], nullbyte * 2) - self.assertEqual(raw[2:5], data[:3]) - self.assertEqual(raw[5:], nullbyte * (sizeof(Buf) - 5)) - bp.write(data[:3], bp.length - 3) - raw = bp.raw - self.assertEqual(raw[-3:], data[:3]) - self.assertRaises(IndexError, bp.write, data, 1) - self.assertRaises(IndexError, bp.write, data[:5], -1) - self.assertRaises(IndexError, bp.write, data[:5], bp.length) - self.assertRaises(TypeError, bp.write, 12) - bp = BufferProxy( - { - "typestr": "|u1", - "shape": (sizeof(buf),), - "data": (addressof(buf), True), - } - ) - self.assertRaises(pygame.BufferError, bp.write, "123".encode("latin_1")) - finally: - # Make sure bp is garbage collected before buf - bp = None - gc.collect() - - -if __name__ == "__main__": - unittest.main() diff --git a/venv/Lib/site-packages/pygame/tests/camera_test.py b/venv/Lib/site-packages/pygame/tests/camera_test.py deleted file mode 100644 index 79cf0f9..0000000 --- a/venv/Lib/site-packages/pygame/tests/camera_test.py +++ /dev/null @@ -1,5 +0,0 @@ -import unittest - - -class CameraModuleTest(unittest.TestCase): - pass diff --git a/venv/Lib/site-packages/pygame/tests/color_test.py b/venv/Lib/site-packages/pygame/tests/color_test.py deleted file mode 100644 index 16eefd1..0000000 --- a/venv/Lib/site-packages/pygame/tests/color_test.py +++ /dev/null @@ -1,1302 +0,0 @@ -import unittest -import math -import operator -import platform - -import pygame -from pygame.colordict import THECOLORS - - -IS_PYPY = "PyPy" == platform.python_implementation() -################################### CONSTANTS ################################## - -rgba_vals = [0, 1, 62, 63, 126, 127, 255] - -rgba_combinations = [ - (r, g, b, a) - for r in rgba_vals - for g in rgba_vals - for b in rgba_vals - for a in rgba_vals -] - -################################################################################ - - -def rgba_combos_Color_generator(): - for rgba in rgba_combinations: - yield pygame.Color(*rgba) - - -# Python gamma correct -def gamma_correct(rgba_0_255, gamma): - corrected = round(255.0 * math.pow(rgba_0_255 / 255.0, gamma)) - return max(min(int(corrected), 255), 0) - - -################################################################################ - -# TODO: add tests for -# correct_gamma() -- test against statically defined verified correct values -# coerce () -- ?? - - -def _assignr(x, y): - x.r = y - - -def _assigng(x, y): - x.g = y - - -def _assignb(x, y): - x.b = y - - -def _assigna(x, y): - x.a = y - - -def _assign_item(x, p, y): - x[p] = y - - -class ColorTypeTest(unittest.TestCase): - def test_new(self): - c = pygame.Color.__new__(pygame.Color) - self.assertEqual(c, pygame.Color(0, 0, 0, 255)) - self.assertEqual(len(c), 4) - - def test_init(self): - c = pygame.Color(10, 20, 30, 200) - self.assertEqual(c, (10, 20, 30, 200)) - c.set_length(3) - self.assertEqual(len(c), 3) - c.__init__(100, 110, 120, 128) - self.assertEqual(len(c), 4) - self.assertEqual(c, (100, 110, 120, 128)) - - def test_invalid_html_hex_codes(self): - # This was a problem with the way 2 digit hex numbers were - # calculated. The test_hex_digits test is related to the fix. - Color = pygame.color.Color - self.assertRaises(ValueError, lambda: Color("# f000000")) - self.assertRaises(ValueError, lambda: Color("#f 000000")) - self.assertRaises(ValueError, lambda: Color("#-f000000")) - - def test_hex_digits(self): - # This is an implementation specific test. - # Two digit hex numbers are calculated using table lookups - # for the upper and lower digits. - Color = pygame.color.Color - self.assertEqual(Color("#00000000").r, 0x00) - self.assertEqual(Color("#10000000").r, 0x10) - self.assertEqual(Color("#20000000").r, 0x20) - self.assertEqual(Color("#30000000").r, 0x30) - self.assertEqual(Color("#40000000").r, 0x40) - self.assertEqual(Color("#50000000").r, 0x50) - self.assertEqual(Color("#60000000").r, 0x60) - self.assertEqual(Color("#70000000").r, 0x70) - self.assertEqual(Color("#80000000").r, 0x80) - self.assertEqual(Color("#90000000").r, 0x90) - self.assertEqual(Color("#A0000000").r, 0xA0) - self.assertEqual(Color("#B0000000").r, 0xB0) - self.assertEqual(Color("#C0000000").r, 0xC0) - self.assertEqual(Color("#D0000000").r, 0xD0) - self.assertEqual(Color("#E0000000").r, 0xE0) - self.assertEqual(Color("#F0000000").r, 0xF0) - self.assertEqual(Color("#01000000").r, 0x01) - self.assertEqual(Color("#02000000").r, 0x02) - self.assertEqual(Color("#03000000").r, 0x03) - self.assertEqual(Color("#04000000").r, 0x04) - self.assertEqual(Color("#05000000").r, 0x05) - self.assertEqual(Color("#06000000").r, 0x06) - self.assertEqual(Color("#07000000").r, 0x07) - self.assertEqual(Color("#08000000").r, 0x08) - self.assertEqual(Color("#09000000").r, 0x09) - self.assertEqual(Color("#0A000000").r, 0x0A) - self.assertEqual(Color("#0B000000").r, 0x0B) - self.assertEqual(Color("#0C000000").r, 0x0C) - self.assertEqual(Color("#0D000000").r, 0x0D) - self.assertEqual(Color("#0E000000").r, 0x0E) - self.assertEqual(Color("#0F000000").r, 0x0F) - - def test_comparison(self): - Color = pygame.color.Color - - # Check valid comparisons - self.assertTrue(Color(255, 0, 0, 0) == Color(255, 0, 0, 0)) - self.assertTrue(Color(0, 255, 0, 0) == Color(0, 255, 0, 0)) - self.assertTrue(Color(0, 0, 255, 0) == Color(0, 0, 255, 0)) - self.assertTrue(Color(0, 0, 0, 255) == Color(0, 0, 0, 255)) - self.assertFalse(Color(0, 0, 0, 0) == Color(255, 0, 0, 0)) - self.assertFalse(Color(0, 0, 0, 0) == Color(0, 255, 0, 0)) - self.assertFalse(Color(0, 0, 0, 0) == Color(0, 0, 255, 0)) - self.assertFalse(Color(0, 0, 0, 0) == Color(0, 0, 0, 255)) - self.assertTrue(Color(0, 0, 0, 0) != Color(255, 0, 0, 0)) - self.assertTrue(Color(0, 0, 0, 0) != Color(0, 255, 0, 0)) - self.assertTrue(Color(0, 0, 0, 0) != Color(0, 0, 255, 0)) - self.assertTrue(Color(0, 0, 0, 0) != Color(0, 0, 0, 255)) - self.assertFalse(Color(255, 0, 0, 0) != Color(255, 0, 0, 0)) - self.assertFalse(Color(0, 255, 0, 0) != Color(0, 255, 0, 0)) - self.assertFalse(Color(0, 0, 255, 0) != Color(0, 0, 255, 0)) - self.assertFalse(Color(0, 0, 0, 255) != Color(0, 0, 0, 255)) - - self.assertTrue(Color(255, 0, 0, 0) == (255, 0, 0, 0)) - self.assertTrue(Color(0, 255, 0, 0) == (0, 255, 0, 0)) - self.assertTrue(Color(0, 0, 255, 0) == (0, 0, 255, 0)) - self.assertTrue(Color(0, 0, 0, 255) == (0, 0, 0, 255)) - self.assertFalse(Color(0, 0, 0, 0) == (255, 0, 0, 0)) - self.assertFalse(Color(0, 0, 0, 0) == (0, 255, 0, 0)) - self.assertFalse(Color(0, 0, 0, 0) == (0, 0, 255, 0)) - self.assertFalse(Color(0, 0, 0, 0) == (0, 0, 0, 255)) - self.assertTrue(Color(0, 0, 0, 0) != (255, 0, 0, 0)) - self.assertTrue(Color(0, 0, 0, 0) != (0, 255, 0, 0)) - self.assertTrue(Color(0, 0, 0, 0) != (0, 0, 255, 0)) - self.assertTrue(Color(0, 0, 0, 0) != (0, 0, 0, 255)) - self.assertFalse(Color(255, 0, 0, 0) != (255, 0, 0, 0)) - self.assertFalse(Color(0, 255, 0, 0) != (0, 255, 0, 0)) - self.assertFalse(Color(0, 0, 255, 0) != (0, 0, 255, 0)) - self.assertFalse(Color(0, 0, 0, 255) != (0, 0, 0, 255)) - - self.assertTrue((255, 0, 0, 0) == Color(255, 0, 0, 0)) - self.assertTrue((0, 255, 0, 0) == Color(0, 255, 0, 0)) - self.assertTrue((0, 0, 255, 0) == Color(0, 0, 255, 0)) - self.assertTrue((0, 0, 0, 255) == Color(0, 0, 0, 255)) - self.assertFalse((0, 0, 0, 0) == Color(255, 0, 0, 0)) - self.assertFalse((0, 0, 0, 0) == Color(0, 255, 0, 0)) - self.assertFalse((0, 0, 0, 0) == Color(0, 0, 255, 0)) - self.assertFalse((0, 0, 0, 0) == Color(0, 0, 0, 255)) - self.assertTrue((0, 0, 0, 0) != Color(255, 0, 0, 0)) - self.assertTrue((0, 0, 0, 0) != Color(0, 255, 0, 0)) - self.assertTrue((0, 0, 0, 0) != Color(0, 0, 255, 0)) - self.assertTrue((0, 0, 0, 0) != Color(0, 0, 0, 255)) - self.assertFalse((255, 0, 0, 0) != Color(255, 0, 0, 0)) - self.assertFalse((0, 255, 0, 0) != Color(0, 255, 0, 0)) - self.assertFalse((0, 0, 255, 0) != Color(0, 0, 255, 0)) - self.assertFalse((0, 0, 0, 255) != Color(0, 0, 0, 255)) - - class TupleSubclass(tuple): - pass - - self.assertTrue(Color(255, 0, 0, 0) == TupleSubclass((255, 0, 0, 0))) - self.assertTrue(TupleSubclass((255, 0, 0, 0)) == Color(255, 0, 0, 0)) - self.assertFalse(Color(255, 0, 0, 0) != TupleSubclass((255, 0, 0, 0))) - self.assertFalse(TupleSubclass((255, 0, 0, 0)) != Color(255, 0, 0, 0)) - - # These are not supported so will be unequal. - self.assertFalse(Color(255, 0, 0, 0) == "#ff000000") - self.assertTrue(Color(255, 0, 0, 0) != "#ff000000") - - self.assertFalse("#ff000000" == Color(255, 0, 0, 0)) - self.assertTrue("#ff000000" != Color(255, 0, 0, 0)) - - self.assertFalse(Color(255, 0, 0, 0) == 0xFF000000) - self.assertTrue(Color(255, 0, 0, 0) != 0xFF000000) - - self.assertFalse(0xFF000000 == Color(255, 0, 0, 0)) - self.assertTrue(0xFF000000 != Color(255, 0, 0, 0)) - - self.assertFalse(Color(255, 0, 0, 0) == [255, 0, 0, 0]) - self.assertTrue(Color(255, 0, 0, 0) != [255, 0, 0, 0]) - - self.assertFalse([255, 0, 0, 0] == Color(255, 0, 0, 0)) - self.assertTrue([255, 0, 0, 0] != Color(255, 0, 0, 0)) - - # Comparison is not implemented for invalid color values. - class Test(object): - def __eq__(self, other): - return -1 - - def __ne__(self, other): - return -2 - - class TestTuple(tuple): - def __eq__(self, other): - return -1 - - def __ne__(self, other): - return -2 - - t = Test() - t_tuple = TestTuple(("a", 0, 0, 0)) - black = Color("black") - self.assertEqual(black == t, -1) - self.assertEqual(t == black, -1) - self.assertEqual(black != t, -2) - self.assertEqual(t != black, -2) - self.assertEqual(black == t_tuple, -1) - self.assertEqual(black != t_tuple, -2) - self.assertEqual(t_tuple == black, -1) - self.assertEqual(t_tuple != black, -2) - - def test_ignore_whitespace(self): - self.assertEqual(pygame.color.Color("red"), pygame.color.Color(" r e d ")) - - def test_slice(self): - # """|tags: python3_ignore|""" - - # slicing a color gives you back a tuple. - # do all sorts of slice combinations. - c = pygame.Color(1, 2, 3, 4) - - self.assertEqual((1, 2, 3, 4), c[:]) - self.assertEqual((1, 2, 3), c[:-1]) - - self.assertEqual((), c[:-5]) - - self.assertEqual((1, 2, 3, 4), c[:4]) - self.assertEqual((1, 2, 3, 4), c[:5]) - self.assertEqual((1, 2), c[:2]) - self.assertEqual((1,), c[:1]) - self.assertEqual((), c[:0]) - - self.assertEqual((2,), c[1:-2]) - self.assertEqual((3, 4), c[-2:]) - self.assertEqual((4,), c[-1:]) - - # NOTE: assigning to a slice is currently unsupported. - - def test_unpack(self): - # should be able to unpack to r,g,b,a and r,g,b - c = pygame.Color(1, 2, 3, 4) - r, g, b, a = c - self.assertEqual((1, 2, 3, 4), (r, g, b, a)) - self.assertEqual(c, (r, g, b, a)) - - c.set_length(3) - r, g, b = c - self.assertEqual((1, 2, 3), (r, g, b)) - - def test_length(self): - # should be able to unpack to r,g,b,a and r,g,b - c = pygame.Color(1, 2, 3, 4) - self.assertEqual(len(c), 4) - - c.set_length(3) - self.assertEqual(len(c), 3) - - # it keeps the old alpha anyway... - self.assertEqual(c.a, 4) - - # however you can't get the alpha in this way: - self.assertRaises(IndexError, lambda x: c[x], 4) - - c.set_length(4) - self.assertEqual(len(c), 4) - self.assertEqual(len(c), 4) - - self.assertRaises(ValueError, c.set_length, 5) - self.assertRaises(ValueError, c.set_length, -1) - self.assertRaises(ValueError, c.set_length, 0) - self.assertRaises(ValueError, c.set_length, pow(2, 33)) - - def test_case_insensitivity_of_string_args(self): - self.assertEqual(pygame.color.Color("red"), pygame.color.Color("Red")) - - def test_color(self): - """Ensures Color objects can be created.""" - color = pygame.Color(0, 0, 0, 0) - - self.assertIsInstance(color, pygame.Color) - - def test_color__rgba_int_args(self): - """Ensures Color objects can be created using ints.""" - color = pygame.Color(10, 20, 30, 40) - - self.assertEqual(color.r, 10) - self.assertEqual(color.g, 20) - self.assertEqual(color.b, 30) - self.assertEqual(color.a, 40) - - def test_color__rgba_int_args_without_alpha(self): - """Ensures Color objects can be created without providing alpha.""" - color = pygame.Color(10, 20, 30) - - self.assertEqual(color.r, 10) - self.assertEqual(color.g, 20) - self.assertEqual(color.b, 30) - self.assertEqual(color.a, 255) - - def test_color__rgba_int_args_invalid_value(self): - """Ensures invalid values are detected when creating Color objects.""" - self.assertRaises(ValueError, pygame.Color, 257, 10, 105, 44) - self.assertRaises(ValueError, pygame.Color, 10, 257, 105, 44) - self.assertRaises(ValueError, pygame.Color, 10, 105, 257, 44) - self.assertRaises(ValueError, pygame.Color, 10, 105, 44, 257) - - def test_color__rgba_int_args_invalid_value_without_alpha(self): - """Ensures invalid values are detected when creating Color objects - without providing an alpha. - """ - self.assertRaises(ValueError, pygame.Color, 256, 10, 105) - self.assertRaises(ValueError, pygame.Color, 10, 256, 105) - self.assertRaises(ValueError, pygame.Color, 10, 105, 256) - - def test_color__color_object_arg(self): - """Ensures Color objects can be created using Color objects.""" - color_args = (10, 20, 30, 40) - color_obj = pygame.Color(*color_args) - - new_color_obj = pygame.Color(color_obj) - - self.assertIsInstance(new_color_obj, pygame.Color) - self.assertEqual(new_color_obj, color_obj) - self.assertEqual(new_color_obj.r, color_args[0]) - self.assertEqual(new_color_obj.g, color_args[1]) - self.assertEqual(new_color_obj.b, color_args[2]) - self.assertEqual(new_color_obj.a, color_args[3]) - - def test_color__name_str_arg(self): - """Ensures Color objects can be created using str names.""" - for name in ("aquamarine3", "AQUAMARINE3", "AqUAmArIne3"): - color = pygame.Color(name) - - self.assertEqual(color.r, 102) - self.assertEqual(color.g, 205) - self.assertEqual(color.b, 170) - self.assertEqual(color.a, 255) - - def test_color__name_str_arg_from_colordict(self): - """Ensures Color objects can be created using str names - from the THECOLORS dict.""" - for name, values in THECOLORS.items(): - color = pygame.Color(name) - - self.assertEqual(color.r, values[0]) - self.assertEqual(color.g, values[1]) - self.assertEqual(color.b, values[2]) - self.assertEqual(color.a, values[3]) - - def test_color__html_str_arg(self): - """Ensures Color objects can be created using html strings.""" - # See test_webstyle() for related tests. - color = pygame.Color("#a1B2c3D4") - - self.assertEqual(color.r, 0xA1) - self.assertEqual(color.g, 0xB2) - self.assertEqual(color.b, 0xC3) - self.assertEqual(color.a, 0xD4) - - def test_color__hex_str_arg(self): - """Ensures Color objects can be created using hex strings.""" - # See test_webstyle() for related tests. - color = pygame.Color("0x1a2B3c4D") - - self.assertEqual(color.r, 0x1A) - self.assertEqual(color.g, 0x2B) - self.assertEqual(color.b, 0x3C) - self.assertEqual(color.a, 0x4D) - - def test_color__int_arg(self): - """Ensures Color objects can be created using one int value.""" - for value in (0x0, 0xFFFFFFFF, 0xAABBCCDD): - color = pygame.Color(value) - - self.assertEqual(color.r, (value >> 24) & 0xFF) - self.assertEqual(color.g, (value >> 16) & 0xFF) - self.assertEqual(color.b, (value >> 8) & 0xFF) - self.assertEqual(color.a, value & 0xFF) - - def test_color__int_arg_invalid(self): - """Ensures invalid int values are detected when creating Color objects.""" - with self.assertRaises(ValueError): - color = pygame.Color(0x1FFFFFFFF) - - def test_color__sequence_arg(self): - """Ensures Color objects can be created using tuples/lists.""" - color_values = (33, 44, 55, 66) - for seq_type in (tuple, list): - color = pygame.Color(seq_type(color_values)) - - self.assertEqual(color.r, color_values[0]) - self.assertEqual(color.g, color_values[1]) - self.assertEqual(color.b, color_values[2]) - self.assertEqual(color.a, color_values[3]) - - def test_color__sequence_arg_without_alpha(self): - """Ensures Color objects can be created using tuples/lists - without providing an alpha value. - """ - color_values = (33, 44, 55) - for seq_type in (tuple, list): - color = pygame.Color(seq_type(color_values)) - - self.assertEqual(color.r, color_values[0]) - self.assertEqual(color.g, color_values[1]) - self.assertEqual(color.b, color_values[2]) - self.assertEqual(color.a, 255) - - def test_color__sequence_arg_invalid_value(self): - """Ensures invalid sequences are detected when creating Color objects.""" - cls = pygame.Color - for seq_type in (tuple, list): - self.assertRaises(ValueError, cls, seq_type((256, 90, 80, 70))) - self.assertRaises(ValueError, cls, seq_type((100, 256, 80, 70))) - self.assertRaises(ValueError, cls, seq_type((100, 90, 256, 70))) - self.assertRaises(ValueError, cls, seq_type((100, 90, 80, 256))) - - def test_color__sequence_arg_invalid_value_without_alpha(self): - """Ensures invalid sequences are detected when creating Color objects - without providing an alpha. - """ - cls = pygame.Color - for seq_type in (tuple, list): - self.assertRaises(ValueError, cls, seq_type((256, 90, 80))) - self.assertRaises(ValueError, cls, seq_type((100, 256, 80))) - self.assertRaises(ValueError, cls, seq_type((100, 90, 256))) - - def test_color__sequence_arg_invalid_format(self): - """Ensures invalid sequences are detected when creating Color objects - with the wrong number of values. - """ - cls = pygame.Color - for seq_type in (tuple, list): - self.assertRaises(ValueError, cls, seq_type((100,))) - self.assertRaises(ValueError, cls, seq_type((100, 90))) - self.assertRaises(ValueError, cls, seq_type((100, 90, 80, 70, 60))) - - def test_rgba(self): - c = pygame.Color(0) - self.assertEqual(c.r, 0) - self.assertEqual(c.g, 0) - self.assertEqual(c.b, 0) - self.assertEqual(c.a, 0) - - # Test simple assignments - c.r = 123 - self.assertEqual(c.r, 123) - self.assertRaises(ValueError, _assignr, c, 537) - self.assertEqual(c.r, 123) - self.assertRaises(ValueError, _assignr, c, -3) - self.assertEqual(c.r, 123) - - c.g = 55 - self.assertEqual(c.g, 55) - self.assertRaises(ValueError, _assigng, c, 348) - self.assertEqual(c.g, 55) - self.assertRaises(ValueError, _assigng, c, -44) - self.assertEqual(c.g, 55) - - c.b = 77 - self.assertEqual(c.b, 77) - self.assertRaises(ValueError, _assignb, c, 256) - self.assertEqual(c.b, 77) - self.assertRaises(ValueError, _assignb, c, -12) - self.assertEqual(c.b, 77) - - c.a = 255 - self.assertEqual(c.a, 255) - self.assertRaises(ValueError, _assigna, c, 312) - self.assertEqual(c.a, 255) - self.assertRaises(ValueError, _assigna, c, -10) - self.assertEqual(c.a, 255) - - def test_repr(self): - c = pygame.Color(68, 38, 26, 69) - t = "(68, 38, 26, 69)" - self.assertEqual(repr(c), t) - - def test_add(self): - c1 = pygame.Color(0) - self.assertEqual(c1.r, 0) - self.assertEqual(c1.g, 0) - self.assertEqual(c1.b, 0) - self.assertEqual(c1.a, 0) - - c2 = pygame.Color(20, 33, 82, 193) - self.assertEqual(c2.r, 20) - self.assertEqual(c2.g, 33) - self.assertEqual(c2.b, 82) - self.assertEqual(c2.a, 193) - - c3 = c1 + c2 - self.assertEqual(c3.r, 20) - self.assertEqual(c3.g, 33) - self.assertEqual(c3.b, 82) - self.assertEqual(c3.a, 193) - - c3 = c3 + c2 - self.assertEqual(c3.r, 40) - self.assertEqual(c3.g, 66) - self.assertEqual(c3.b, 164) - self.assertEqual(c3.a, 255) - - # Issue #286: Is type checking done for Python 3.x? - self.assertRaises(TypeError, operator.add, c1, None) - self.assertRaises(TypeError, operator.add, None, c1) - - def test_sub(self): - c1 = pygame.Color(0xFFFFFFFF) - self.assertEqual(c1.r, 255) - self.assertEqual(c1.g, 255) - self.assertEqual(c1.b, 255) - self.assertEqual(c1.a, 255) - - c2 = pygame.Color(20, 33, 82, 193) - self.assertEqual(c2.r, 20) - self.assertEqual(c2.g, 33) - self.assertEqual(c2.b, 82) - self.assertEqual(c2.a, 193) - - c3 = c1 - c2 - self.assertEqual(c3.r, 235) - self.assertEqual(c3.g, 222) - self.assertEqual(c3.b, 173) - self.assertEqual(c3.a, 62) - - c3 = c3 - c2 - self.assertEqual(c3.r, 215) - self.assertEqual(c3.g, 189) - self.assertEqual(c3.b, 91) - self.assertEqual(c3.a, 0) - - # Issue #286: Is type checking done for Python 3.x? - self.assertRaises(TypeError, operator.sub, c1, None) - self.assertRaises(TypeError, operator.sub, None, c1) - - def test_mul(self): - c1 = pygame.Color(0x01010101) - self.assertEqual(c1.r, 1) - self.assertEqual(c1.g, 1) - self.assertEqual(c1.b, 1) - self.assertEqual(c1.a, 1) - - c2 = pygame.Color(2, 5, 3, 22) - self.assertEqual(c2.r, 2) - self.assertEqual(c2.g, 5) - self.assertEqual(c2.b, 3) - self.assertEqual(c2.a, 22) - - c3 = c1 * c2 - self.assertEqual(c3.r, 2) - self.assertEqual(c3.g, 5) - self.assertEqual(c3.b, 3) - self.assertEqual(c3.a, 22) - - c3 = c3 * c2 - self.assertEqual(c3.r, 4) - self.assertEqual(c3.g, 25) - self.assertEqual(c3.b, 9) - self.assertEqual(c3.a, 255) - - # Issue #286: Is type checking done for Python 3.x? - self.assertRaises(TypeError, operator.mul, c1, None) - self.assertRaises(TypeError, operator.mul, None, c1) - - def test_div(self): - c1 = pygame.Color(0x80808080) - self.assertEqual(c1.r, 128) - self.assertEqual(c1.g, 128) - self.assertEqual(c1.b, 128) - self.assertEqual(c1.a, 128) - - c2 = pygame.Color(2, 4, 8, 16) - self.assertEqual(c2.r, 2) - self.assertEqual(c2.g, 4) - self.assertEqual(c2.b, 8) - self.assertEqual(c2.a, 16) - - c3 = c1 // c2 - self.assertEqual(c3.r, 64) - self.assertEqual(c3.g, 32) - self.assertEqual(c3.b, 16) - self.assertEqual(c3.a, 8) - - c3 = c3 // c2 - self.assertEqual(c3.r, 32) - self.assertEqual(c3.g, 8) - self.assertEqual(c3.b, 2) - self.assertEqual(c3.a, 0) - - # Issue #286: Is type checking done for Python 3.x? - self.assertRaises(TypeError, operator.floordiv, c1, None) - self.assertRaises(TypeError, operator.floordiv, None, c1) - - # Division by zero check - dividend = pygame.Color(255, 255, 255, 255) - for i in range(4): - divisor = pygame.Color(64, 64, 64, 64) - divisor[i] = 0 - quotient = pygame.Color(3, 3, 3, 3) - quotient[i] = 0 - self.assertEqual(dividend // divisor, quotient) - - def test_mod(self): - c1 = pygame.Color(0xFFFFFFFF) - self.assertEqual(c1.r, 255) - self.assertEqual(c1.g, 255) - self.assertEqual(c1.b, 255) - self.assertEqual(c1.a, 255) - - c2 = pygame.Color(2, 4, 8, 16) - self.assertEqual(c2.r, 2) - self.assertEqual(c2.g, 4) - self.assertEqual(c2.b, 8) - self.assertEqual(c2.a, 16) - - c3 = c1 % c2 - self.assertEqual(c3.r, 1) - self.assertEqual(c3.g, 3) - self.assertEqual(c3.b, 7) - self.assertEqual(c3.a, 15) - - # Issue #286: Is type checking done for Python 3.x? - self.assertRaises(TypeError, operator.mod, c1, None) - self.assertRaises(TypeError, operator.mod, None, c1) - - # Division by zero check - dividend = pygame.Color(255, 255, 255, 255) - for i in range(4): - divisor = pygame.Color(64, 64, 64, 64) - divisor[i] = 0 - quotient = pygame.Color(63, 63, 63, 63) - quotient[i] = 0 - self.assertEqual(dividend % divisor, quotient) - - def test_float(self): - c = pygame.Color(0xCC00CC00) - self.assertEqual(c.r, 204) - self.assertEqual(c.g, 0) - self.assertEqual(c.b, 204) - self.assertEqual(c.a, 0) - self.assertEqual(float(c), float(0xCC00CC00)) - - c = pygame.Color(0x33727592) - self.assertEqual(c.r, 51) - self.assertEqual(c.g, 114) - self.assertEqual(c.b, 117) - self.assertEqual(c.a, 146) - self.assertEqual(float(c), float(0x33727592)) - - def test_oct(self): - c = pygame.Color(0xCC00CC00) - self.assertEqual(c.r, 204) - self.assertEqual(c.g, 0) - self.assertEqual(c.b, 204) - self.assertEqual(c.a, 0) - self.assertEqual(oct(c), oct(0xCC00CC00)) - - c = pygame.Color(0x33727592) - self.assertEqual(c.r, 51) - self.assertEqual(c.g, 114) - self.assertEqual(c.b, 117) - self.assertEqual(c.a, 146) - self.assertEqual(oct(c), oct(0x33727592)) - - def test_hex(self): - c = pygame.Color(0xCC00CC00) - self.assertEqual(c.r, 204) - self.assertEqual(c.g, 0) - self.assertEqual(c.b, 204) - self.assertEqual(c.a, 0) - self.assertEqual(hex(c), hex(0xCC00CC00)) - - c = pygame.Color(0x33727592) - self.assertEqual(c.r, 51) - self.assertEqual(c.g, 114) - self.assertEqual(c.b, 117) - self.assertEqual(c.a, 146) - self.assertEqual(hex(c), hex(0x33727592)) - - def test_webstyle(self): - c = pygame.Color("#CC00CC11") - self.assertEqual(c.r, 204) - self.assertEqual(c.g, 0) - self.assertEqual(c.b, 204) - self.assertEqual(c.a, 17) - self.assertEqual(hex(c), hex(0xCC00CC11)) - - c = pygame.Color("#CC00CC") - self.assertEqual(c.r, 204) - self.assertEqual(c.g, 0) - self.assertEqual(c.b, 204) - self.assertEqual(c.a, 255) - self.assertEqual(hex(c), hex(0xCC00CCFF)) - - c = pygame.Color("0xCC00CC11") - self.assertEqual(c.r, 204) - self.assertEqual(c.g, 0) - self.assertEqual(c.b, 204) - self.assertEqual(c.a, 17) - self.assertEqual(hex(c), hex(0xCC00CC11)) - - c = pygame.Color("0xCC00CC") - self.assertEqual(c.r, 204) - self.assertEqual(c.g, 0) - self.assertEqual(c.b, 204) - self.assertEqual(c.a, 255) - self.assertEqual(hex(c), hex(0xCC00CCFF)) - - self.assertRaises(ValueError, pygame.Color, "#cc00qq") - self.assertRaises(ValueError, pygame.Color, "0xcc00qq") - self.assertRaises(ValueError, pygame.Color, "09abcdef") - self.assertRaises(ValueError, pygame.Color, "09abcde") - self.assertRaises(ValueError, pygame.Color, "quarky") - - def test_int(self): - # This will be a long - c = pygame.Color(0xCC00CC00) - self.assertEqual(c.r, 204) - self.assertEqual(c.g, 0) - self.assertEqual(c.b, 204) - self.assertEqual(c.a, 0) - self.assertEqual(int(c), int(0xCC00CC00)) - - # This will be an int - c = pygame.Color(0x33727592) - self.assertEqual(c.r, 51) - self.assertEqual(c.g, 114) - self.assertEqual(c.b, 117) - self.assertEqual(c.a, 146) - self.assertEqual(int(c), int(0x33727592)) - - def test_long(self): - # This will be a long - c = pygame.Color(0xCC00CC00) - self.assertEqual(c.r, 204) - self.assertEqual(c.g, 0) - self.assertEqual(c.b, 204) - self.assertEqual(c.a, 0) - self.assertEqual(int(c), int(0xCC00CC00)) - - # This will be an int - c = pygame.Color(0x33727592) - self.assertEqual(c.r, 51) - self.assertEqual(c.g, 114) - self.assertEqual(c.b, 117) - self.assertEqual(c.a, 146) - self.assertEqual(int(c), int(0x33727592)) - - def test_normalize(self): - c = pygame.Color(204, 38, 194, 55) - self.assertEqual(c.r, 204) - self.assertEqual(c.g, 38) - self.assertEqual(c.b, 194) - self.assertEqual(c.a, 55) - - t = c.normalize() - - self.assertAlmostEqual(t[0], 0.800000, 5) - self.assertAlmostEqual(t[1], 0.149016, 5) - self.assertAlmostEqual(t[2], 0.760784, 5) - self.assertAlmostEqual(t[3], 0.215686, 5) - - def test_len(self): - c = pygame.Color(204, 38, 194, 55) - self.assertEqual(len(c), 4) - - def test_get_item(self): - c = pygame.Color(204, 38, 194, 55) - self.assertEqual(c[0], 204) - self.assertEqual(c[1], 38) - self.assertEqual(c[2], 194) - self.assertEqual(c[3], 55) - - def test_set_item(self): - c = pygame.Color(204, 38, 194, 55) - self.assertEqual(c[0], 204) - self.assertEqual(c[1], 38) - self.assertEqual(c[2], 194) - self.assertEqual(c[3], 55) - - c[0] = 33 - self.assertEqual(c[0], 33) - c[1] = 48 - self.assertEqual(c[1], 48) - c[2] = 173 - self.assertEqual(c[2], 173) - c[3] = 213 - self.assertEqual(c[3], 213) - - # Now try some 'invalid' ones - self.assertRaises(TypeError, _assign_item, c, 0, 95.485) - self.assertEqual(c[0], 33) - self.assertRaises(ValueError, _assign_item, c, 1, -83) - self.assertEqual(c[1], 48) - self.assertRaises(TypeError, _assign_item, c, 2, "Hello") - self.assertEqual(c[2], 173) - - def test_Color_type_works_for_Surface_get_and_set_colorkey(self): - s = pygame.Surface((32, 32)) - - c = pygame.Color(33, 22, 11, 255) - s.set_colorkey(c) - - get_r, get_g, get_b, get_a = s.get_colorkey() - - self.assertTrue(get_r == c.r) - self.assertTrue(get_g == c.g) - self.assertTrue(get_b == c.b) - self.assertTrue(get_a == c.a) - - ########## HSLA, HSVA, CMY, I1I2I3 ALL ELEMENTS WITHIN SPECIFIED RANGE ######### - - def test_hsla__all_elements_within_limits(self): - for c in rgba_combos_Color_generator(): - h, s, l, a = c.hsla - self.assertTrue(0 <= h <= 360) - self.assertTrue(0 <= s <= 100) - self.assertTrue(0 <= l <= 100) - self.assertTrue(0 <= a <= 100) - - def test_hsva__all_elements_within_limits(self): - for c in rgba_combos_Color_generator(): - h, s, v, a = c.hsva - self.assertTrue(0 <= h <= 360) - self.assertTrue(0 <= s <= 100) - self.assertTrue(0 <= v <= 100) - self.assertTrue(0 <= a <= 100) - - def test_cmy__all_elements_within_limits(self): - for c in rgba_combos_Color_generator(): - c, m, y = c.cmy - self.assertTrue(0 <= c <= 1) - self.assertTrue(0 <= m <= 1) - self.assertTrue(0 <= y <= 1) - - def test_i1i2i3__all_elements_within_limits(self): - for c in rgba_combos_Color_generator(): - i1, i2, i3 = c.i1i2i3 - self.assertTrue(0 <= i1 <= 1) - self.assertTrue(-0.5 <= i2 <= 0.5) - self.assertTrue(-0.5 <= i3 <= 0.5) - - def test_issue_269(self): - """PyColor OverflowError on HSVA with hue value of 360 - - >>> c = pygame.Color(0) - >>> c.hsva = (360,0,0,0) - Traceback (most recent call last): - File "", line 1, in - OverflowError: this is not allowed to happen ever - >>> pygame.ver - '1.9.1release' - >>> - - """ - - c = pygame.Color(0) - c.hsva = 360, 0, 0, 0 - self.assertEqual(c.hsva, (0, 0, 0, 0)) - c.hsva = 360, 100, 100, 100 - self.assertEqual(c.hsva, (0, 100, 100, 100)) - self.assertEqual(c, (255, 0, 0, 255)) - - ####################### COLORSPACE PROPERTY SANITY TESTS ####################### - - def colorspaces_converted_should_not_raise(self, prop): - fails = 0 - - x = 0 - for c in rgba_combos_Color_generator(): - x += 1 - - other = pygame.Color(0) - - try: - setattr(other, prop, getattr(c, prop)) - # eg other.hsla = c.hsla - - except ValueError: - fails += 1 - - self.assertTrue(x > 0, "x is combination counter, 0 means no tests!") - self.assertTrue((fails, x) == (0, x)) - - def test_hsla__sanity_testing_converted_should_not_raise(self): - self.colorspaces_converted_should_not_raise("hsla") - - def test_hsva__sanity_testing_converted_should_not_raise(self): - self.colorspaces_converted_should_not_raise("hsva") - - def test_cmy__sanity_testing_converted_should_not_raise(self): - self.colorspaces_converted_should_not_raise("cmy") - - def test_i1i2i3__sanity_testing_converted_should_not_raise(self): - self.colorspaces_converted_should_not_raise("i1i2i3") - - ################################################################################ - - def colorspaces_converted_should_equate_bar_rounding(self, prop): - for c in rgba_combos_Color_generator(): - other = pygame.Color(0) - - try: - setattr(other, prop, getattr(c, prop)) - # eg other.hsla = c.hsla - - self.assertTrue(abs(other.r - c.r) <= 1) - self.assertTrue(abs(other.b - c.b) <= 1) - self.assertTrue(abs(other.g - c.g) <= 1) - # CMY and I1I2I3 do not care about the alpha - if not prop in ("cmy", "i1i2i3"): - self.assertTrue(abs(other.a - c.a) <= 1) - - except ValueError: - pass # other tests will notify, this tests equation - - def test_hsla__sanity_testing_converted_should_equate_bar_rounding(self): - self.colorspaces_converted_should_equate_bar_rounding("hsla") - - def test_hsva__sanity_testing_converted_should_equate_bar_rounding(self): - self.colorspaces_converted_should_equate_bar_rounding("hsva") - - def test_cmy__sanity_testing_converted_should_equate_bar_rounding(self): - self.colorspaces_converted_should_equate_bar_rounding("cmy") - - def test_i1i2i3__sanity_testing_converted_should_equate_bar_rounding(self): - self.colorspaces_converted_should_equate_bar_rounding("i1i2i3") - - ################################################################################ - - def test_correct_gamma__verified_against_python_implementation(self): - "|tags:slow|" - # gamma_correct defined at top of page - - gammas = [i / 10.0 for i in range(1, 31)] # [0.1 ... 3.0] - gammas_len = len(gammas) - - for i, c in enumerate(rgba_combos_Color_generator()): - gamma = gammas[i % gammas_len] - - corrected = pygame.Color(*[gamma_correct(x, gamma) for x in tuple(c)]) - lib_corrected = c.correct_gamma(gamma) - - self.assertTrue(corrected.r == lib_corrected.r) - self.assertTrue(corrected.g == lib_corrected.g) - self.assertTrue(corrected.b == lib_corrected.b) - self.assertTrue(corrected.a == lib_corrected.a) - - # TODO: test against statically defined verified _correct_ values - # assert corrected.r == 125 etc. - - def test_pickle(self): - import pickle - - c1 = pygame.Color(1, 2, 3, 4) - # c2 = pygame.Color(255,254,253,252) - pickle_string = pickle.dumps(c1) - c1_frompickle = pickle.loads(pickle_string) - self.assertEqual(c1, c1_frompickle) - - ################################################################################ - # only available if ctypes module is also available - - @unittest.skipIf(IS_PYPY, "PyPy has no ctypes") - def test_arraystruct(self): - - import pygame.tests.test_utils.arrinter as ai - import ctypes as ct - - c_byte_p = ct.POINTER(ct.c_byte) - c = pygame.Color(5, 7, 13, 23) - flags = ai.PAI_CONTIGUOUS | ai.PAI_FORTRAN | ai.PAI_ALIGNED | ai.PAI_NOTSWAPPED - for i in range(1, 5): - c.set_length(i) - inter = ai.ArrayInterface(c) - self.assertEqual(inter.two, 2) - self.assertEqual(inter.nd, 1) - self.assertEqual(inter.typekind, "u") - self.assertEqual(inter.itemsize, 1) - self.assertEqual(inter.flags, flags) - self.assertEqual(inter.shape[0], i) - self.assertEqual(inter.strides[0], 1) - data = ct.cast(inter.data, c_byte_p) - for j in range(i): - self.assertEqual(data[j], c[j]) - - @unittest.skipIf(not pygame.HAVE_NEWBUF, "newbuf not implemented") - def test_newbuf(self): - from pygame.tests.test_utils import buftools - from ctypes import cast, POINTER, c_uint8 - - class ColorImporter(buftools.Importer): - def __init__(self, color, flags): - super(ColorImporter, self).__init__(color, flags) - self.items = cast(self.buf, POINTER(c_uint8)) - - def __getitem__(self, index): - if 0 <= index < 4: - return self.items[index] - raise IndexError( - "valid index values are between 0 and 3: " "got {}".format(index) - ) - - def __setitem__(self, index, value): - if 0 <= index < 4: - self.items[index] = value - else: - raise IndexError( - "valid index values are between 0 and 3: " - "got {}".format(index) - ) - - c = pygame.Color(50, 100, 150, 200) - imp = ColorImporter(c, buftools.PyBUF_SIMPLE) - self.assertTrue(imp.obj is c) - self.assertEqual(imp.ndim, 0) - self.assertEqual(imp.itemsize, 1) - self.assertEqual(imp.len, 4) - self.assertTrue(imp.readonly) - self.assertTrue(imp.format is None) - self.assertTrue(imp.shape is None) - self.assertTrue(imp.strides is None) - self.assertTrue(imp.suboffsets is None) - for i in range(4): - self.assertEqual(c[i], imp[i]) - imp[0] = 60 - self.assertEqual(c.r, 60) - imp[1] = 110 - self.assertEqual(c.g, 110) - imp[2] = 160 - self.assertEqual(c.b, 160) - imp[3] = 210 - self.assertEqual(c.a, 210) - imp = ColorImporter(c, buftools.PyBUF_FORMAT) - self.assertEqual(imp.ndim, 0) - self.assertEqual(imp.itemsize, 1) - self.assertEqual(imp.len, 4) - self.assertEqual(imp.format, "B") - self.assertEqual(imp.ndim, 0) - self.assertEqual(imp.itemsize, 1) - self.assertEqual(imp.len, 4) - imp = ColorImporter(c, buftools.PyBUF_ND) - self.assertEqual(imp.ndim, 1) - self.assertEqual(imp.itemsize, 1) - self.assertEqual(imp.len, 4) - self.assertTrue(imp.format is None) - self.assertEqual(imp.shape, (4,)) - self.assertEqual(imp.strides, None) - imp = ColorImporter(c, buftools.PyBUF_STRIDES) - self.assertEqual(imp.ndim, 1) - self.assertTrue(imp.format is None) - self.assertEqual(imp.shape, (4,)) - self.assertEqual(imp.strides, (1,)) - imp = ColorImporter(c, buftools.PyBUF_C_CONTIGUOUS) - self.assertEqual(imp.ndim, 1) - imp = ColorImporter(c, buftools.PyBUF_F_CONTIGUOUS) - self.assertEqual(imp.ndim, 1) - imp = ColorImporter(c, buftools.PyBUF_ANY_CONTIGUOUS) - self.assertEqual(imp.ndim, 1) - for i in range(1, 5): - c.set_length(i) - imp = ColorImporter(c, buftools.PyBUF_ND) - self.assertEqual(imp.ndim, 1) - self.assertEqual(imp.len, i) - self.assertEqual(imp.shape, (i,)) - self.assertRaises(BufferError, ColorImporter, c, buftools.PyBUF_WRITABLE) - - def test_lerp(self): - # setup - Color = pygame.color.Color - - color0 = Color(0, 0, 0, 0) - color128 = Color(128, 128, 128, 128) - color255 = Color(255, 255, 255, 255) - color100 = Color(100, 100, 100, 100) - - # type checking - self.assertTrue(isinstance(color0.lerp(color128, 0.5), Color)) - - # common value testing - self.assertEqual(color0.lerp(color128, 0.5), Color(64, 64, 64, 64)) - self.assertEqual(color0.lerp(color128, 0.5), Color(64, 64, 64, 64)) - self.assertEqual(color128.lerp(color255, 0.5), Color(192, 192, 192, 192)) - self.assertEqual(color0.lerp(color255, 0.5), Color(128, 128, 128, 128)) - - # testing extremes - self.assertEqual(color0.lerp(color100, 0), color0) - self.assertEqual(color0.lerp(color100, 0.01), Color(1, 1, 1, 1)) - self.assertEqual(color0.lerp(color100, 0.99), Color(99, 99, 99, 99)) - self.assertEqual(color0.lerp(color100, 1), color100) - - # kwarg testing - self.assertEqual(color0.lerp(color=color100, amount=0.5), Color(50, 50, 50, 50)) - self.assertEqual(color0.lerp(amount=0.5, color=color100), Color(50, 50, 50, 50)) - - # invalid input testing - self.assertRaises(ValueError, lambda: color0.lerp(color128, 2.5)) - self.assertRaises(ValueError, lambda: color0.lerp(color128, -0.5)) - self.assertRaises(ValueError, lambda: color0.lerp((256, 0, 0, 0), 0.5)) - self.assertRaises(ValueError, lambda: color0.lerp((0, 256, 0, 0), 0.5)) - self.assertRaises(ValueError, lambda: color0.lerp((0, 0, 256, 0), 0.5)) - self.assertRaises(ValueError, lambda: color0.lerp((0, 0, 0, 256), 0.5)) - self.assertRaises(TypeError, lambda: color0.lerp(0.2, 0.5)) - - def test_premul_alpha(self): - # setup - Color = pygame.color.Color - - color0 = Color(0, 0, 0, 0) - alpha0 = Color(255, 255, 255, 0) - alpha49 = Color(255, 0, 0, 49) - alpha67 = Color(0, 255, 0, 67) - alpha73 = Color(0, 0, 255, 73) - alpha128 = Color(255, 255, 255, 128) - alpha199 = Color(255, 255, 255, 199) - alpha255 = Color(128, 128, 128, 255) - - # type checking - self.assertTrue(isinstance(color0.premul_alpha(), Color)) - - # hand crafted value testing - self.assertEqual(alpha0.premul_alpha(), Color(0, 0, 0, 0)) - self.assertEqual(alpha49.premul_alpha(), Color(49, 0, 0, 49)) - self.assertEqual(alpha67.premul_alpha(), Color(0, 67, 0, 67)) - self.assertEqual(alpha73.premul_alpha(), Color(0, 0, 73, 73)) - self.assertEqual(alpha128.premul_alpha(), Color(128, 128, 128, 128)) - self.assertEqual(alpha199.premul_alpha(), Color(199, 199, 199, 199)) - self.assertEqual(alpha255.premul_alpha(), Color(128, 128, 128, 255)) - - # full range of alpha auto sub-testing - test_colors = [ - (200, 30, 74), - (76, 83, 24), - (184, 21, 6), - (74, 4, 74), - (76, 83, 24), - (184, 21, 234), - (160, 30, 74), - (96, 147, 204), - (198, 201, 60), - (132, 89, 74), - (245, 9, 224), - (184, 112, 6), - ] - - for r, g, b in test_colors: - for a in range(255): - with self.subTest(r=r, g=g, b=b, a=a): - alpha = a / 255.0 - self.assertEqual( - Color(r, g, b, a).premul_alpha(), - Color( - ((r + 1) * a) >> 8, - ((g + 1) * a) >> 8, - ((b + 1) * a) >> 8, - a, - ), - ) - - def test_update(self): - c = pygame.color.Color(0, 0, 0) - c.update(1, 2, 3, 4) - - self.assertEqual(c.r, 1) - self.assertEqual(c.g, 2) - self.assertEqual(c.b, 3) - self.assertEqual(c.a, 4) - - c = pygame.color.Color(0, 0, 0) - c.update([1, 2, 3, 4]) - - self.assertEqual(c.r, 1) - self.assertEqual(c.g, 2) - self.assertEqual(c.b, 3) - self.assertEqual(c.a, 4) - - c = pygame.color.Color(0, 0, 0) - c2 = pygame.color.Color(1, 2, 3, 4) - c.update(c2) - - self.assertEqual(c.r, 1) - self.assertEqual(c.g, 2) - self.assertEqual(c.b, 3) - self.assertEqual(c.a, 4) - - c = pygame.color.Color(1, 1, 1) - c.update("black") - - self.assertEqual(c.r, 0) - self.assertEqual(c.g, 0) - self.assertEqual(c.b, 0) - self.assertEqual(c.a, 255) - - c = pygame.color.Color(0, 0, 0, 120) - c.set_length(3) - c.update(1, 2, 3) - self.assertEqual(len(c), 3) - c.set_length(4) - self.assertEqual(c[3], 120) - - c.set_length(3) - c.update(1, 2, 3, 4) - self.assertEqual(len(c), 4) - - -class SubclassTest(unittest.TestCase): - class MyColor(pygame.Color): - def __init__(self, *args, **kwds): - super(SubclassTest.MyColor, self).__init__(*args, **kwds) - self.an_attribute = True - - def test_add(self): - mc1 = self.MyColor(128, 128, 128, 255) - self.assertTrue(mc1.an_attribute) - c2 = pygame.Color(64, 64, 64, 255) - mc2 = mc1 + c2 - self.assertTrue(isinstance(mc2, self.MyColor)) - self.assertRaises(AttributeError, getattr, mc2, "an_attribute") - c3 = c2 + mc1 - self.assertTrue(type(c3) is pygame.Color) - - def test_sub(self): - mc1 = self.MyColor(128, 128, 128, 255) - self.assertTrue(mc1.an_attribute) - c2 = pygame.Color(64, 64, 64, 255) - mc2 = mc1 - c2 - self.assertTrue(isinstance(mc2, self.MyColor)) - self.assertRaises(AttributeError, getattr, mc2, "an_attribute") - c3 = c2 - mc1 - self.assertTrue(type(c3) is pygame.Color) - - def test_mul(self): - mc1 = self.MyColor(128, 128, 128, 255) - self.assertTrue(mc1.an_attribute) - c2 = pygame.Color(64, 64, 64, 255) - mc2 = mc1 * c2 - self.assertTrue(isinstance(mc2, self.MyColor)) - self.assertRaises(AttributeError, getattr, mc2, "an_attribute") - c3 = c2 * mc1 - self.assertTrue(type(c3) is pygame.Color) - - def test_div(self): - mc1 = self.MyColor(128, 128, 128, 255) - self.assertTrue(mc1.an_attribute) - c2 = pygame.Color(64, 64, 64, 255) - mc2 = mc1 // c2 - self.assertTrue(isinstance(mc2, self.MyColor)) - self.assertRaises(AttributeError, getattr, mc2, "an_attribute") - c3 = c2 // mc1 - self.assertTrue(type(c3) is pygame.Color) - - def test_mod(self): - mc1 = self.MyColor(128, 128, 128, 255) - self.assertTrue(mc1.an_attribute) - c2 = pygame.Color(64, 64, 64, 255) - mc2 = mc1 % c2 - self.assertTrue(isinstance(mc2, self.MyColor)) - self.assertRaises(AttributeError, getattr, mc2, "an_attribute") - c3 = c2 % mc1 - self.assertTrue(type(c3) is pygame.Color) - - def test_inv(self): - mc1 = self.MyColor(64, 64, 64, 64) - self.assertTrue(mc1.an_attribute) - mc2 = ~mc1 - self.assertTrue(isinstance(mc2, self.MyColor)) - self.assertRaises(AttributeError, getattr, mc2, "an_attribute") - - def test_correct_gamma(self): - mc1 = self.MyColor(64, 70, 75, 255) - self.assertTrue(mc1.an_attribute) - mc2 = mc1.correct_gamma(0.03) - self.assertTrue(isinstance(mc2, self.MyColor)) - self.assertRaises(AttributeError, getattr, mc2, "an_attribute") - - -################################################################################ - -if __name__ == "__main__": - unittest.main() diff --git a/venv/Lib/site-packages/pygame/tests/constants_test.py b/venv/Lib/site-packages/pygame/tests/constants_test.py deleted file mode 100644 index 452a8fe..0000000 --- a/venv/Lib/site-packages/pygame/tests/constants_test.py +++ /dev/null @@ -1,437 +0,0 @@ -import unittest -import pygame.constants - - -# K_* and KSCAN_* common names. -K_AND_KSCAN_COMMON_NAMES = ( - "UNKNOWN", - "BACKSPACE", - "TAB", - "CLEAR", - "RETURN", - "PAUSE", - "ESCAPE", - "SPACE", - "COMMA", - "MINUS", - "PERIOD", - "SLASH", - "0", - "1", - "2", - "3", - "4", - "5", - "6", - "7", - "8", - "9", - "SEMICOLON", - "EQUALS", - "LEFTBRACKET", - "BACKSLASH", - "RIGHTBRACKET", - "DELETE", - "KP0", - "KP1", - "KP2", - "KP3", - "KP4", - "KP5", - "KP6", - "KP7", - "KP8", - "KP9", - "KP_PERIOD", - "KP_DIVIDE", - "KP_MULTIPLY", - "KP_MINUS", - "KP_PLUS", - "KP_ENTER", - "KP_EQUALS", - "UP", - "DOWN", - "RIGHT", - "LEFT", - "INSERT", - "HOME", - "END", - "PAGEUP", - "PAGEDOWN", - "F1", - "F2", - "F3", - "F4", - "F5", - "F6", - "F7", - "F8", - "F9", - "F10", - "F11", - "F12", - "F13", - "F14", - "F15", - "NUMLOCK", - "CAPSLOCK", - "SCROLLOCK", - "RSHIFT", - "LSHIFT", - "RCTRL", - "LCTRL", - "RALT", - "LALT", - "RMETA", - "LMETA", - "LSUPER", - "RSUPER", - "MODE", - "HELP", - "PRINT", - "SYSREQ", - "BREAK", - "MENU", - "POWER", - "EURO", - "KP_0", - "KP_1", - "KP_2", - "KP_3", - "KP_4", - "KP_5", - "KP_6", - "KP_7", - "KP_8", - "KP_9", - "NUMLOCKCLEAR", - "SCROLLLOCK", - "RGUI", - "LGUI", - "PRINTSCREEN", - "CURRENCYUNIT", - "CURRENCYSUBUNIT", -) - -# Constants that have the same value. -K_AND_KSCAN_COMMON_OVERLAPS = ( - ("KP0", "KP_0"), - ("KP1", "KP_1"), - ("KP2", "KP_2"), - ("KP3", "KP_3"), - ("KP4", "KP_4"), - ("KP5", "KP_5"), - ("KP6", "KP_6"), - ("KP7", "KP_7"), - ("KP8", "KP_8"), - ("KP9", "KP_9"), - ("NUMLOCK", "NUMLOCKCLEAR"), - ("SCROLLOCK", "SCROLLLOCK"), - ("LSUPER", "LMETA", "LGUI"), - ("RSUPER", "RMETA", "RGUI"), - ("PRINT", "PRINTSCREEN"), - ("BREAK", "PAUSE"), - ("EURO", "CURRENCYUNIT"), -) - - -def create_overlap_set(constant_names): - """Helper function to find overlapping constant values/names. - - Returns a set of fronzensets: - set(frozenset(names of overlapping constants), ...) - """ - # Create an overlap dict. - overlap_dict = {} - - for name in constant_names: - value = getattr(pygame.constants, name) - overlap_dict.setdefault(value, set()).add(name) - - # Get all entries with more than 1 value. - overlaps = set() - - for overlap_names in overlap_dict.values(): - if len(overlap_names) > 1: - overlaps.add(frozenset(overlap_names)) - - return overlaps - - -class KConstantsTests(unittest.TestCase): - """Test K_* (key) constants.""" - - # K_* specific names. - K_SPECIFIC_NAMES = ( - "a", - "b", - "c", - "d", - "e", - "f", - "g", - "h", - "i", - "j", - "k", - "l", - "m", - "n", - "o", - "p", - "q", - "r", - "s", - "t", - "u", - "v", - "w", - "x", - "y", - "z", - "QUOTE", - "BACKQUOTE", - "EXCLAIM", - "QUOTEDBL", - "HASH", - "DOLLAR", - "AMPERSAND", - "LEFTPAREN", - "RIGHTPAREN", - "ASTERISK", - "PLUS", - "COLON", - "LESS", - "GREATER", - "QUESTION", - "AT", - "CARET", - "UNDERSCORE", - "PERCENT", - ) - - # Create a sequence of all the K_* constant names. - K_NAMES = tuple("K_" + n for n in K_AND_KSCAN_COMMON_NAMES + K_SPECIFIC_NAMES) - - def test_k__existence(self): - """Ensures K constants exist.""" - for name in self.K_NAMES: - self.assertTrue( - hasattr(pygame.constants, name), "missing constant {}".format(name) - ) - - def test_k__type(self): - """Ensures K constants are the correct type.""" - for name in self.K_NAMES: - value = getattr(pygame.constants, name) - - self.assertIs(type(value), int) - - def test_k__value_overlap(self): - """Ensures no unexpected K constant values overlap.""" - EXPECTED_OVERLAPS = set( - [ - frozenset(["K_" + n for n in item]) - for item in K_AND_KSCAN_COMMON_OVERLAPS - ] - ) - - overlaps = create_overlap_set(self.K_NAMES) - - self.assertSetEqual(overlaps, EXPECTED_OVERLAPS) - - -class KscanConstantsTests(unittest.TestCase): - """Test KSCAN_* (scancode) constants.""" - - # KSCAN_* specific names. - KSCAN_SPECIFIC_NAMES = ( - "A", - "B", - "C", - "D", - "E", - "F", - "G", - "H", - "I", - "J", - "K", - "L", - "M", - "N", - "O", - "P", - "Q", - "R", - "S", - "T", - "U", - "V", - "W", - "X", - "Y", - "Z", - "APOSTROPHE", - "GRAVE", - "INTERNATIONAL1", - "INTERNATIONAL2", - "INTERNATIONAL3", - "INTERNATIONAL4", - "INTERNATIONAL5", - "INTERNATIONAL6", - "INTERNATIONAL7", - "INTERNATIONAL8", - "INTERNATIONAL9", - "LANG1", - "LANG2", - "LANG3", - "LANG4", - "LANG5", - "LANG6", - "LANG7", - "LANG8", - "LANG9", - "NONUSBACKSLASH", - "NONUSHASH", - ) - - # Create a sequence of all the KSCAN_* constant names. - KSCAN_NAMES = tuple( - "KSCAN_" + n for n in K_AND_KSCAN_COMMON_NAMES + KSCAN_SPECIFIC_NAMES - ) - - def test_kscan__existence(self): - """Ensures KSCAN constants exist.""" - for name in self.KSCAN_NAMES: - self.assertTrue( - hasattr(pygame.constants, name), "missing constant {}".format(name) - ) - - def test_kscan__type(self): - """Ensures KSCAN constants are the correct type.""" - for name in self.KSCAN_NAMES: - value = getattr(pygame.constants, name) - - self.assertIs(type(value), int) - - def test_kscan__value_overlap(self): - """Ensures no unexpected KSCAN constant values overlap.""" - EXPECTED_OVERLAPS = set( - [ - frozenset(["KSCAN_" + n for n in item]) - for item in K_AND_KSCAN_COMMON_OVERLAPS - ] - ) - - overlaps = create_overlap_set(self.KSCAN_NAMES) - - self.assertSetEqual(overlaps, EXPECTED_OVERLAPS) - - -class KmodConstantsTests(unittest.TestCase): - """Test KMOD_* (key modifier) constants.""" - - # KMOD_* constant names. - KMOD_CONSTANTS = ( - "KMOD_NONE", - "KMOD_LSHIFT", - "KMOD_RSHIFT", - "KMOD_SHIFT", - "KMOD_LCTRL", - "KMOD_RCTRL", - "KMOD_CTRL", - "KMOD_LALT", - "KMOD_RALT", - "KMOD_ALT", - "KMOD_LMETA", - "KMOD_RMETA", - "KMOD_META", - "KMOD_NUM", - "KMOD_CAPS", - "KMOD_MODE", - "KMOD_LGUI", - "KMOD_RGUI", - "KMOD_GUI", - ) - - def test_kmod__existence(self): - """Ensures KMOD constants exist.""" - for name in self.KMOD_CONSTANTS: - self.assertTrue( - hasattr(pygame.constants, name), "missing constant {}".format(name) - ) - - def test_kmod__type(self): - """Ensures KMOD constants are the correct type.""" - for name in self.KMOD_CONSTANTS: - value = getattr(pygame.constants, name) - - self.assertIs(type(value), int) - - def test_kmod__value_overlap(self): - """Ensures no unexpected KMOD constant values overlap.""" - # KMODs that have the same values. - EXPECTED_OVERLAPS = { - frozenset(["KMOD_LGUI", "KMOD_LMETA"]), - frozenset(["KMOD_RGUI", "KMOD_RMETA"]), - frozenset(["KMOD_GUI", "KMOD_META"]), - } - - overlaps = create_overlap_set(self.KMOD_CONSTANTS) - - self.assertSetEqual(overlaps, EXPECTED_OVERLAPS) - - def test_kmod__no_bitwise_overlap(self): - """Ensures certain KMOD constants have no overlapping bits.""" - NO_BITWISE_OVERLAP = ( - "KMOD_NONE", - "KMOD_LSHIFT", - "KMOD_RSHIFT", - "KMOD_LCTRL", - "KMOD_RCTRL", - "KMOD_LALT", - "KMOD_RALT", - "KMOD_LMETA", - "KMOD_RMETA", - "KMOD_NUM", - "KMOD_CAPS", - "KMOD_MODE", - ) - - kmods = 0 - - for name in NO_BITWISE_OVERLAP: - value = getattr(pygame.constants, name) - - self.assertFalse(kmods & value) - - kmods |= value - - def test_kmod__bitwise_overlap(self): - """Ensures certain KMOD constants have overlapping bits.""" - # KMODS that are comprised of other KMODs. - KMOD_COMPRISED_DICT = { - "KMOD_SHIFT": ("KMOD_LSHIFT", "KMOD_RSHIFT"), - "KMOD_CTRL": ("KMOD_LCTRL", "KMOD_RCTRL"), - "KMOD_ALT": ("KMOD_LALT", "KMOD_RALT"), - "KMOD_META": ("KMOD_LMETA", "KMOD_RMETA"), - "KMOD_GUI": ("KMOD_LGUI", "KMOD_RGUI"), - } - - for base_name, seq_names in KMOD_COMPRISED_DICT.items(): - expected_value = 0 # Reset. - - for name in seq_names: - expected_value |= getattr(pygame.constants, name) - - value = getattr(pygame.constants, base_name) - - self.assertEqual(value, expected_value) - - -################################################################################ - - -if __name__ == "__main__": - unittest.main() diff --git a/venv/Lib/site-packages/pygame/tests/controller_test.py b/venv/Lib/site-packages/pygame/tests/controller_test.py deleted file mode 100644 index f05c00c..0000000 --- a/venv/Lib/site-packages/pygame/tests/controller_test.py +++ /dev/null @@ -1,357 +0,0 @@ -import unittest -import pygame -import pygame._sdl2.controller as controller -from pygame.tests.test_utils import prompt, question - - -class ControllerModuleTest(unittest.TestCase): - def setUp(self): - controller.init() - - def tearDown(self): - controller.quit() - - def test_init(self): - controller.quit() - controller.init() - self.assertTrue(controller.get_init()) - - def test_init__multiple(self): - controller.init() - controller.init() - self.assertTrue(controller.get_init()) - - def test_quit(self): - controller.quit() - self.assertFalse(controller.get_init()) - - def test_quit__multiple(self): - controller.quit() - controller.quit() - self.assertFalse(controller.get_init()) - - def test_get_init(self): - self.assertTrue(controller.get_init()) - - def test_get_eventstate(self): - controller.set_eventstate(True) - self.assertTrue(controller.get_eventstate()) - - controller.set_eventstate(False) - self.assertFalse(controller.get_eventstate()) - - controller.set_eventstate(True) - - def test_get_count(self): - self.assertGreaterEqual(controller.get_count(), 0) - - def test_is_controller(self): - for i in range(controller.get_count()): - if controller.is_controller(i): - c = controller.Controller(i) - self.assertIsInstance(c, controller.Controller) - c.quit() - else: - with self.assertRaises(pygame._sdl2.sdl2.error): - c = controller.Controller(i) - - with self.assertRaises(TypeError): - controller.is_controller("Test") - - def test_name_forindex(self): - self.assertIsNone(controller.name_forindex(-1)) - - -class ControllerTypeTest(unittest.TestCase): - def setUp(self): - controller.init() - - def tearDown(self): - controller.quit() - - def _get_first_controller(self): - for i in range(controller.get_count()): - if controller.is_controller(i): - return controller.Controller(i) - - def test_construction(self): - c = self._get_first_controller() - if c: - self.assertIsInstance(c, controller.Controller) - else: - self.skipTest("No controller connected") - - def test__auto_init(self): - c = self._get_first_controller() - if c: - self.assertTrue(c.get_init()) - else: - self.skipTest("No controller connected") - - def test_get_init(self): - c = self._get_first_controller() - if c: - self.assertTrue(c.get_init()) - c.quit() - self.assertFalse(c.get_init()) - else: - self.skipTest("No controller connected") - - def test_from_joystick(self): - for i in range(controller.get_count()): - if controller.is_controller(i): - joy = pygame.joystick.Joystick(i) - break - else: - self.skipTest("No controller connected") - - c = controller.Controller.from_joystick(joy) - self.assertIsInstance(c, controller.Controller) - - def test_as_joystick(self): - c = self._get_first_controller() - if c: - joy = c.as_joystick() - self.assertIsInstance(joy, type(pygame.joystick.Joystick(0))) - else: - self.skipTest("No controller connected") - - def test_get_mapping(self): - c = self._get_first_controller() - if c: - mapping = c.get_mapping() - self.assertIsInstance(mapping, dict) - self.assertIsNotNone(mapping["a"]) - else: - self.skipTest("No controller connected") - - def test_set_mapping(self): - c = self._get_first_controller() - if c: - mapping = c.get_mapping() - mapping["a"] = "b3" - mapping["y"] = "b0" - c.set_mapping(mapping) - new_mapping = c.get_mapping() - - self.assertEqual(len(mapping), len(new_mapping)) - for i in mapping: - if mapping[i] not in ("a", "y"): - self.assertEqual(mapping[i], new_mapping[i]) - else: - if i == "a": - self.assertEqual(new_mapping[i], mapping["y"]) - else: - self.assertEqual(new_mapping[i], mapping["a"]) - else: - self.skipTest("No controller connected") - - -class ControllerInteractiveTest(unittest.TestCase): - __tags__ = ["interactive"] - - def _get_first_controller(self): - for i in range(controller.get_count()): - if controller.is_controller(i): - return controller.Controller(i) - - def setUp(self): - controller.init() - - def tearDown(self): - controller.quit() - - def test__get_count_interactive(self): - prompt( - "Please connect at least one controller " - "before the test for controller.get_count() starts" - ) - - # Reset the number of joysticks counted - controller.quit() - controller.init() - - joystick_num = controller.get_count() - ans = question( - "get_count() thinks there are {} joysticks " - "connected. Is that correct?".format(joystick_num) - ) - - self.assertTrue(ans) - - def test_set_eventstate_on_interactive(self): - c = self._get_first_controller() - if not c: - self.skipTest("No controller connected") - - pygame.display.init() - pygame.font.init() - - screen = pygame.display.set_mode((400, 400)) - font = pygame.font.Font(None, 20) - running = True - - screen.fill((255, 255, 255)) - screen.blit( - font.render("Press button 'x' (on ps4) or 'a' (on xbox).", True, (0, 0, 0)), - (0, 0), - ) - pygame.display.update() - - controller.set_eventstate(True) - - while running: - for event in pygame.event.get(): - if event.type == pygame.QUIT: - running = False - - if event.type == pygame.CONTROLLERBUTTONDOWN: - running = False - - pygame.display.quit() - pygame.font.quit() - - def test_set_eventstate_off_interactive(self): - c = self._get_first_controller() - if not c: - self.skipTest("No controller connected") - - pygame.display.init() - pygame.font.init() - - screen = pygame.display.set_mode((400, 400)) - font = pygame.font.Font(None, 20) - running = True - - screen.fill((255, 255, 255)) - screen.blit( - font.render("Press button 'x' (on ps4) or 'a' (on xbox).", True, (0, 0, 0)), - (0, 0), - ) - pygame.display.update() - - controller.set_eventstate(False) - - while running: - for event in pygame.event.get(pygame.QUIT): - if event: - running = False - - if c.get_button(pygame.CONTROLLER_BUTTON_A): - if pygame.event.peek(pygame.CONTROLLERBUTTONDOWN): - pygame.display.quit() - pygame.font.quit() - self.fail() - else: - running = False - - pygame.display.quit() - pygame.font.quit() - - def test_get_button_interactive(self): - c = self._get_first_controller() - if not c: - self.skipTest("No controller connected") - - pygame.display.init() - pygame.font.init() - - screen = pygame.display.set_mode((400, 400)) - font = pygame.font.Font(None, 20) - running = True - - label1 = font.render( - "Press button 'x' (on ps4) or 'a' (on xbox).", True, (0, 0, 0) - ) - - label2 = font.render( - 'The two values should match up. Press "y" or "n" to confirm.', - True, - (0, 0, 0), - ) - - is_pressed = [False, False] # event, get_button() - while running: - for event in pygame.event.get(): - if event.type == pygame.QUIT: - running = False - if event.type == pygame.CONTROLLERBUTTONDOWN and event.button == 0: - is_pressed[0] = True - if event.type == pygame.CONTROLLERBUTTONUP and event.button == 0: - is_pressed[0] = False - - if event.type == pygame.KEYDOWN: - if event.key == pygame.K_y: - running = False - if event.key == pygame.K_n: - running = False - pygame.display.quit() - pygame.font.quit() - self.fail() - - is_pressed[1] = c.get_button(pygame.CONTROLLER_BUTTON_A) - - screen.fill((255, 255, 255)) - screen.blit(label1, (0, 0)) - screen.blit(label2, (0, 20)) - screen.blit(font.render(str(is_pressed), True, (0, 0, 0)), (0, 40)) - pygame.display.update() - - pygame.display.quit() - pygame.font.quit() - - def test_get_axis_interactive(self): - c = self._get_first_controller() - if not c: - self.skipTest("No controller connected") - - pygame.display.init() - pygame.font.init() - - screen = pygame.display.set_mode((400, 400)) - font = pygame.font.Font(None, 20) - running = True - - label1 = font.render( - "Press down the right trigger. The value on-screen should", True, (0, 0, 0) - ) - - label2 = font.render( - "indicate how far the trigger is pressed down. This value should", - True, - (0, 0, 0), - ) - - label3 = font.render( - 'be in the range of 0-32767. Press "y" or "n" to confirm.', True, (0, 0, 0) - ) - - while running: - for event in pygame.event.get(): - if event.type == pygame.QUIT: - running = False - - if event.type == pygame.KEYDOWN: - if event.key == pygame.K_y: - running = False - if event.key == pygame.K_n: - running = False - pygame.display.quit() - pygame.font.quit() - self.fail() - - right_trigger = c.get_axis(pygame.CONTROLLER_AXIS_TRIGGERRIGHT) - - screen.fill((255, 255, 255)) - screen.blit(label1, (0, 0)) - screen.blit(label2, (0, 20)) - screen.blit(label3, (0, 40)) - screen.blit(font.render(str(right_trigger), True, (0, 0, 0)), (0, 60)) - pygame.display.update() - - pygame.display.quit() - pygame.font.quit() - - -if __name__ == "__main__": - unittest.main() diff --git a/venv/Lib/site-packages/pygame/tests/cursors_test.py b/venv/Lib/site-packages/pygame/tests/cursors_test.py deleted file mode 100644 index bb8f766..0000000 --- a/venv/Lib/site-packages/pygame/tests/cursors_test.py +++ /dev/null @@ -1,291 +0,0 @@ -import unittest -from pygame.tests.test_utils import fixture_path -import pygame - - -class CursorsModuleTest(unittest.TestCase): - def test_compile(self): - - # __doc__ (as of 2008-06-25) for pygame.cursors.compile: - - # pygame.cursors.compile(strings, black, white,xor) -> data, mask - # compile cursor strings into cursor data - # - # This takes a set of strings with equal length and computes - # the binary data for that cursor. The string widths must be - # divisible by 8. - # - # The black and white arguments are single letter strings that - # tells which characters will represent black pixels, and which - # characters represent white pixels. All other characters are - # considered clear. - # - # This returns a tuple containing the cursor data and cursor mask - # data. Both these arguments are used when setting a cursor with - # pygame.mouse.set_cursor(). - - # Various types of input strings - test_cursor1 = ("X.X.XXXX", "XXXXXX..", " XXXX ") - - test_cursor2 = ( - "X.X.XXXX", - "XXXXXX..", - "XXXXXX ", - "XXXXXX..", - "XXXXXX..", - "XXXXXX", - "XXXXXX..", - "XXXXXX..", - ) - test_cursor3 = (".XX.", " ", ".. ", "X.. X") - - # Test such that total number of strings is not divisible by 8 - with self.assertRaises(ValueError): - pygame.cursors.compile(test_cursor1) - - # Test such that size of individual string is not divisible by 8 - with self.assertRaises(ValueError): - pygame.cursors.compile(test_cursor2) - - # Test such that neither size of individual string nor total number of strings is divisible by 8 - with self.assertRaises(ValueError): - pygame.cursors.compile(test_cursor3) - - # Test that checks whether the byte data from compile funtion is equal to actual byte data - actual_byte_data = ( - 192, - 0, - 0, - 224, - 0, - 0, - 240, - 0, - 0, - 216, - 0, - 0, - 204, - 0, - 0, - 198, - 0, - 0, - 195, - 0, - 0, - 193, - 128, - 0, - 192, - 192, - 0, - 192, - 96, - 0, - 192, - 48, - 0, - 192, - 56, - 0, - 192, - 248, - 0, - 220, - 192, - 0, - 246, - 96, - 0, - 198, - 96, - 0, - 6, - 96, - 0, - 3, - 48, - 0, - 3, - 48, - 0, - 1, - 224, - 0, - 1, - 128, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - ), ( - 192, - 0, - 0, - 224, - 0, - 0, - 240, - 0, - 0, - 248, - 0, - 0, - 252, - 0, - 0, - 254, - 0, - 0, - 255, - 0, - 0, - 255, - 128, - 0, - 255, - 192, - 0, - 255, - 224, - 0, - 255, - 240, - 0, - 255, - 248, - 0, - 255, - 248, - 0, - 255, - 192, - 0, - 247, - 224, - 0, - 199, - 224, - 0, - 7, - 224, - 0, - 3, - 240, - 0, - 3, - 240, - 0, - 1, - 224, - 0, - 1, - 128, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - ) - - cursor = pygame.cursors.compile(pygame.cursors.thickarrow_strings) - self.assertEqual(cursor, actual_byte_data) - - # Test such that cursor byte data obtained from compile function is valid in pygame.mouse.set_cursor() - pygame.display.init() - try: - pygame.mouse.set_cursor((24, 24), (0, 0), *cursor) - except pygame.error as e: - if "not currently supported" in str(e): - unittest.skip("skipping test as set_cursor() is not supported") - finally: - pygame.display.quit() - - ################################################################################ - - def test_load_xbm(self): - # __doc__ (as of 2008-06-25) for pygame.cursors.load_xbm: - - # pygame.cursors.load_xbm(cursorfile, maskfile) -> cursor_args - # reads a pair of XBM files into set_cursor arguments - # - # Arguments can either be filenames or filelike objects - # with the readlines method. Not largely tested, but - # should work with typical XBM files. - - # Test that load_xbm will take filenames as arguments - cursorfile = fixture_path(r"xbm_cursors/white_sizing.xbm") - maskfile = fixture_path(r"xbm_cursors/white_sizing_mask.xbm") - cursor = pygame.cursors.load_xbm(cursorfile, maskfile) - - # Test that load_xbm will take file objects as arguments - with open(cursorfile) as cursor_f, open(maskfile) as mask_f: - cursor = pygame.cursors.load_xbm(cursor_f, mask_f) - - # Can it load using pathlib.Path? - import pathlib - - cursor = pygame.cursors.load_xbm( - pathlib.Path(cursorfile), pathlib.Path(maskfile) - ) - - # Is it in a format that mouse.set_cursor won't blow up on? - pygame.display.init() - try: - pygame.mouse.set_cursor(*cursor) - except pygame.error as e: - if "not currently supported" in str(e): - unittest.skip("skipping test as set_cursor() is not supported") - finally: - pygame.display.quit() - - def test_Cursor(self): - """Ensure that the cursor object parses information properly""" - - c1 = pygame.cursors.Cursor(pygame.SYSTEM_CURSOR_CROSSHAIR) - - self.assertEqual(c1.data, (pygame.SYSTEM_CURSOR_CROSSHAIR,)) - self.assertEqual(c1.type, "system") - - c2 = pygame.cursors.Cursor(c1) - - self.assertEqual(c1, c2) - - with self.assertRaises(TypeError): - pygame.cursors.Cursor(-34002) - with self.assertRaises(TypeError): - pygame.cursors.Cursor("a", "b", "c", "d") - with self.assertRaises(TypeError): - pygame.cursors.Cursor((2,)) - - c3 = pygame.cursors.Cursor((0, 0), pygame.Surface((20, 20))) - - self.assertEqual(c3.data[0], (0, 0)) - self.assertEqual(c3.data[1].get_size(), (20, 20)) - self.assertEqual(c3.type, "color") - - xormask, andmask = pygame.cursors.compile(pygame.cursors.thickarrow_strings) - c4 = pygame.cursors.Cursor((24, 24), (0, 0), xormask, andmask) - - self.assertEqual(c4.data, ((24, 24), (0, 0), xormask, andmask)) - self.assertEqual(c4.type, "bitmap") - - -################################################################################ - -if __name__ == "__main__": - unittest.main() - -################################################################################ diff --git a/venv/Lib/site-packages/pygame/tests/display_test.py b/venv/Lib/site-packages/pygame/tests/display_test.py deleted file mode 100644 index a44ff15..0000000 --- a/venv/Lib/site-packages/pygame/tests/display_test.py +++ /dev/null @@ -1,811 +0,0 @@ -# -*- coding: utf-8 -*- - -import unittest -import os -import time - -import pygame, pygame.transform - -from pygame.tests.test_utils import question - -from pygame import display - - -class DisplayModuleTest(unittest.TestCase): - default_caption = "pygame window" - - def setUp(self): - display.init() - - def tearDown(self): - display.quit() - - def test_Info(self): - inf = pygame.display.Info() - self.assertNotEqual(inf.current_h, -1) - self.assertNotEqual(inf.current_w, -1) - # probably have an older SDL than 1.2.10 if -1. - - screen = pygame.display.set_mode((128, 128)) - inf = pygame.display.Info() - self.assertEqual(inf.current_h, 128) - self.assertEqual(inf.current_w, 128) - - def test_flip(self): - screen = pygame.display.set_mode((100, 100)) - - # test without a change - self.assertIsNone(pygame.display.flip()) - - # test with a change - pygame.Surface.fill(screen, (66, 66, 53)) - self.assertIsNone(pygame.display.flip()) - - # test without display init - pygame.display.quit() - with self.assertRaises(pygame.error): - (pygame.display.flip()) - - # test without window - del screen - with self.assertRaises(pygame.error): - (pygame.display.flip()) - - def test_get_active(self): - """Test the get_active function""" - - # Initially, the display is not active - pygame.display.quit() - self.assertEqual(pygame.display.get_active(), False) - - # get_active defaults to true after a set_mode - pygame.display.init() - pygame.display.set_mode((640, 480)) - self.assertEqual(pygame.display.get_active(), True) - - # get_active after init/quit should be False - # since no display is visible - pygame.display.quit() - pygame.display.init() - self.assertEqual(pygame.display.get_active(), False) - - @unittest.skipIf( - os.environ.get("SDL_VIDEODRIVER") == "dummy", - "requires the SDL_VIDEODRIVER to be a non dummy value", - ) - def test_get_active_iconify(self): - """Test the get_active function after an iconify""" - - # According to the docs, get_active should return - # false if the display is iconified - pygame.display.set_mode((640, 480)) - - pygame.event.clear() - pygame.display.iconify() - - for _ in range(100): - time.sleep(0.01) - pygame.event.pump() - - self.assertEqual(pygame.display.get_active(), False) - - def test_get_caption(self): - screen = display.set_mode((100, 100)) - - self.assertEqual(display.get_caption()[0], self.default_caption) - - def test_set_caption(self): - TEST_CAPTION = "test" - screen = display.set_mode((100, 100)) - - self.assertIsNone(display.set_caption(TEST_CAPTION)) - self.assertEqual(display.get_caption()[0], TEST_CAPTION) - self.assertEqual(display.get_caption()[1], TEST_CAPTION) - - def test_caption_unicode(self): - TEST_CAPTION = "å°" - display.set_caption(TEST_CAPTION) - self.assertEqual(display.get_caption()[0], TEST_CAPTION) - - def test_get_driver(self): - drivers = [ - "aalib", - "android", - "arm", - "cocoa", - "dga", - "directx", - "directfb", - "dummy", - "emscripten", - "fbcon", - "ggi", - "haiku", - "khronos", - "kmsdrm", - "nacl", - "offscreen", - "pandora", - "psp", - "qnx", - "raspberry", - "svgalib", - "uikit", - "vgl", - "vivante", - "wayland", - "windows", - "windib", - "winrt", - "x11", - ] - driver = display.get_driver() - self.assertIn(driver, drivers) - - display.quit() - with self.assertRaises(pygame.error): - driver = display.get_driver() - - def test_get_init(self): - """Ensures the module's initialization state can be retrieved.""" - # display.init() already called in setUp() - self.assertTrue(display.get_init()) - - # This test can be uncommented when issues #991 and #993 are resolved. - @unittest.skipIf(True, "SDL2 issues") - def test_get_surface(self): - """Ensures get_surface gets the current display surface.""" - lengths = (1, 5, 100) - - for expected_size in ((w, h) for w in lengths for h in lengths): - for expected_depth in (8, 16, 24, 32): - expected_surface = display.set_mode(expected_size, 0, expected_depth) - - surface = pygame.display.get_surface() - - self.assertEqual(surface, expected_surface) - self.assertIsInstance(surface, pygame.Surface) - self.assertEqual(surface.get_size(), expected_size) - self.assertEqual(surface.get_bitsize(), expected_depth) - - def test_get_surface__mode_not_set(self): - """Ensures get_surface handles the display mode not being set.""" - surface = pygame.display.get_surface() - - self.assertIsNone(surface) - - def test_get_wm_info(self): - wm_info = display.get_wm_info() - # Assert function returns a dictionary type - self.assertIsInstance(wm_info, dict) - - wm_info_potential_keys = { - "colorbuffer", - "connection", - "data", - "dfb", - "display", - "framebuffer", - "fswindow", - "hdc", - "hglrc", - "hinstance", - "lock_func", - "resolveFramebuffer", - "shell_surface", - "surface", - "taskHandle", - "unlock_func", - "wimpVersion", - "window", - "wmwindow", - } - - # If any unexpected dict keys are present, they - # will be stored in set wm_info_remaining_keys - wm_info_remaining_keys = set(wm_info.keys()).difference(wm_info_potential_keys) - - # Assert set is empty (& therefore does not - # contain unexpected dict keys) - self.assertFalse(wm_info_remaining_keys) - - @unittest.skipIf( - ( - "skipping for all because some failures on rasppi and maybe other platforms" - or os.environ.get("SDL_VIDEODRIVER") == "dummy" - ), - 'OpenGL requires a non-"dummy" SDL_VIDEODRIVER', - ) - def test_gl_get_attribute(self): - - screen = display.set_mode((0, 0), pygame.OPENGL) - - # We create a list where we store the original values of the - # flags before setting them with a different value. - original_values = [] - - original_values.append(pygame.display.gl_get_attribute(pygame.GL_ALPHA_SIZE)) - original_values.append(pygame.display.gl_get_attribute(pygame.GL_DEPTH_SIZE)) - original_values.append(pygame.display.gl_get_attribute(pygame.GL_STENCIL_SIZE)) - original_values.append( - pygame.display.gl_get_attribute(pygame.GL_ACCUM_RED_SIZE) - ) - original_values.append( - pygame.display.gl_get_attribute(pygame.GL_ACCUM_GREEN_SIZE) - ) - original_values.append( - pygame.display.gl_get_attribute(pygame.GL_ACCUM_BLUE_SIZE) - ) - original_values.append( - pygame.display.gl_get_attribute(pygame.GL_ACCUM_ALPHA_SIZE) - ) - original_values.append( - pygame.display.gl_get_attribute(pygame.GL_MULTISAMPLEBUFFERS) - ) - original_values.append( - pygame.display.gl_get_attribute(pygame.GL_MULTISAMPLESAMPLES) - ) - original_values.append(pygame.display.gl_get_attribute(pygame.GL_STEREO)) - - original_values.append( - pygame.display.gl_get_attribute(pygame.GL_ACCELERATED_VISUAL) - ) - original_values.append( - pygame.display.gl_get_attribute(pygame.GL_CONTEXT_MAJOR_VERSION) - ) - original_values.append( - pygame.display.gl_get_attribute(pygame.GL_CONTEXT_MINOR_VERSION) - ) - original_values.append(pygame.display.gl_get_attribute(pygame.GL_CONTEXT_FLAGS)) - original_values.append( - pygame.display.gl_get_attribute(pygame.GL_CONTEXT_PROFILE_MASK) - ) - original_values.append( - pygame.display.gl_get_attribute(pygame.GL_SHARE_WITH_CURRENT_CONTEXT) - ) - original_values.append( - pygame.display.gl_get_attribute(pygame.GL_FRAMEBUFFER_SRGB_CAPABLE) - ) - - # Setting the flags with values supposedly different from the original values - - # assign SDL1-supported values with gl_set_attribute - pygame.display.gl_set_attribute(pygame.GL_ALPHA_SIZE, 8) - pygame.display.gl_set_attribute(pygame.GL_DEPTH_SIZE, 24) - pygame.display.gl_set_attribute(pygame.GL_STENCIL_SIZE, 8) - pygame.display.gl_set_attribute(pygame.GL_ACCUM_RED_SIZE, 16) - pygame.display.gl_set_attribute(pygame.GL_ACCUM_GREEN_SIZE, 16) - pygame.display.gl_set_attribute(pygame.GL_ACCUM_BLUE_SIZE, 16) - pygame.display.gl_set_attribute(pygame.GL_ACCUM_ALPHA_SIZE, 16) - pygame.display.gl_set_attribute(pygame.GL_MULTISAMPLEBUFFERS, 1) - pygame.display.gl_set_attribute(pygame.GL_MULTISAMPLESAMPLES, 1) - pygame.display.gl_set_attribute(pygame.GL_STEREO, 0) - pygame.display.gl_set_attribute(pygame.GL_ACCELERATED_VISUAL, 0) - pygame.display.gl_set_attribute(pygame.GL_CONTEXT_MAJOR_VERSION, 1) - pygame.display.gl_set_attribute(pygame.GL_CONTEXT_MINOR_VERSION, 1) - pygame.display.gl_set_attribute(pygame.GL_CONTEXT_FLAGS, 0) - pygame.display.gl_set_attribute(pygame.GL_CONTEXT_PROFILE_MASK, 0) - pygame.display.gl_set_attribute(pygame.GL_SHARE_WITH_CURRENT_CONTEXT, 0) - pygame.display.gl_set_attribute(pygame.GL_FRAMEBUFFER_SRGB_CAPABLE, 0) - - # We create a list where we store the values that we set each flag to - set_values = [8, 24, 8, 16, 16, 16, 16, 1, 1, 0, 0, 1, 1, 0, 0, 0, 1, 0] - - # We create a list where we store the values after getting them - get_values = [] - - get_values.append(pygame.display.gl_get_attribute(pygame.GL_ALPHA_SIZE)) - get_values.append(pygame.display.gl_get_attribute(pygame.GL_DEPTH_SIZE)) - get_values.append(pygame.display.gl_get_attribute(pygame.GL_STENCIL_SIZE)) - get_values.append(pygame.display.gl_get_attribute(pygame.GL_ACCUM_RED_SIZE)) - get_values.append(pygame.display.gl_get_attribute(pygame.GL_ACCUM_GREEN_SIZE)) - get_values.append(pygame.display.gl_get_attribute(pygame.GL_ACCUM_BLUE_SIZE)) - get_values.append(pygame.display.gl_get_attribute(pygame.GL_ACCUM_ALPHA_SIZE)) - get_values.append(pygame.display.gl_get_attribute(pygame.GL_MULTISAMPLEBUFFERS)) - get_values.append(pygame.display.gl_get_attribute(pygame.GL_MULTISAMPLESAMPLES)) - get_values.append(pygame.display.gl_get_attribute(pygame.GL_STEREO)) - get_values.append(pygame.display.gl_get_attribute(pygame.GL_ACCELERATED_VISUAL)) - get_values.append( - pygame.display.gl_get_attribute(pygame.GL_CONTEXT_MAJOR_VERSION) - ) - get_values.append( - pygame.display.gl_get_attribute(pygame.GL_CONTEXT_MINOR_VERSION) - ) - get_values.append(pygame.display.gl_get_attribute(pygame.GL_CONTEXT_FLAGS)) - get_values.append( - pygame.display.gl_get_attribute(pygame.GL_CONTEXT_PROFILE_MASK) - ) - get_values.append( - pygame.display.gl_get_attribute(pygame.GL_SHARE_WITH_CURRENT_CONTEXT) - ) - get_values.append( - pygame.display.gl_get_attribute(pygame.GL_FRAMEBUFFER_SRGB_CAPABLE) - ) - - # We check to see if the values that we get correspond to the values that we set - # them to or to the original values. - for i in range(len(original_values)): - self.assertTrue( - (get_values[i] == original_values[i]) - or (get_values[i] == set_values[i]) - ) - - # test using non-flag argument - with self.assertRaises(TypeError): - pygame.display.gl_get_attribute("DUMMY") - - def todo_test_gl_set_attribute(self): - - # __doc__ (as of 2008-08-02) for pygame.display.gl_set_attribute: - - # pygame.display.gl_set_attribute(flag, value): return None - # request an opengl display attribute for the display mode - # - # When calling pygame.display.set_mode() with the pygame.OPENGL flag, - # Pygame automatically handles setting the OpenGL attributes like - # color and doublebuffering. OpenGL offers several other attributes - # you may want control over. Pass one of these attributes as the flag, - # and its appropriate value. This must be called before - # pygame.display.set_mode() - # - # The OPENGL flags are; - # GL_ALPHA_SIZE, GL_DEPTH_SIZE, GL_STENCIL_SIZE, GL_ACCUM_RED_SIZE, - # GL_ACCUM_GREEN_SIZE, GL_ACCUM_BLUE_SIZE, GL_ACCUM_ALPHA_SIZE, - # GL_MULTISAMPLEBUFFERS, GL_MULTISAMPLESAMPLES, GL_STEREO - - self.fail() - - @unittest.skipIf( - os.environ.get("SDL_VIDEODRIVER") in ["dummy", "android"], - "iconify is only supported on some video drivers/platforms", - ) - def test_iconify(self): - pygame.display.set_mode((640, 480)) - - self.assertEqual(pygame.display.get_active(), True) - - success = pygame.display.iconify() - - if success: - active_event = window_minimized_event = False - # make sure we cycle the event loop enough to get the display - # hidden. Test that both ACTIVEEVENT and WINDOWMINIMISED event appears - for _ in range(50): - time.sleep(0.01) - for event in pygame.event.get(): - if event.type == pygame.ACTIVEEVENT: - if not event.gain and event.state == pygame.APPACTIVE: - active_event = True - if event.type == pygame.WINDOWMINIMIZED: - window_minimized_event = True - - self.assertTrue(window_minimized_event) - self.assertTrue(active_event) - self.assertFalse(pygame.display.get_active()) - - else: - self.fail("Iconify not supported on this platform, please skip") - - def test_init(self): - """Ensures the module is initialized after init called.""" - # display.init() already called in setUp(), so quit and re-init - display.quit() - display.init() - - self.assertTrue(display.get_init()) - - def test_init__multiple(self): - """Ensures the module is initialized after multiple init calls.""" - display.init() - display.init() - - self.assertTrue(display.get_init()) - - def test_list_modes(self): - modes = pygame.display.list_modes(depth=0, flags=pygame.FULLSCREEN, display=0) - # modes == -1 means any mode is supported. - if modes != -1: - self.assertEqual(len(modes[0]), 2) - self.assertEqual(type(modes[0][0]), int) - - modes = pygame.display.list_modes() - if modes != -1: - self.assertEqual(len(modes[0]), 2) - self.assertEqual(type(modes[0][0]), int) - self.assertEqual(len(modes), len(set(modes))) - - modes = pygame.display.list_modes(depth=0, flags=0, display=0) - if modes != -1: - self.assertEqual(len(modes[0]), 2) - self.assertEqual(type(modes[0][0]), int) - - def test_mode_ok(self): - pygame.display.mode_ok((128, 128)) - modes = pygame.display.list_modes() - if modes != -1: - size = modes[0] - self.assertNotEqual(pygame.display.mode_ok(size), 0) - - pygame.display.mode_ok((128, 128), 0, 32) - pygame.display.mode_ok((128, 128), flags=0, depth=32, display=0) - - def test_mode_ok_fullscreen(self): - modes = pygame.display.list_modes() - if modes != -1: - size = modes[0] - self.assertNotEqual( - pygame.display.mode_ok(size, flags=pygame.FULLSCREEN), 0 - ) - - def test_mode_ok_scaled(self): - modes = pygame.display.list_modes() - if modes != -1: - size = modes[0] - self.assertNotEqual(pygame.display.mode_ok(size, flags=pygame.SCALED), 0) - - def test_get_num_displays(self): - self.assertGreater(pygame.display.get_num_displays(), 0) - - def test_quit(self): - """Ensures the module is not initialized after quit called.""" - display.quit() - - self.assertFalse(display.get_init()) - - def test_quit__multiple(self): - """Ensures the module is not initialized after multiple quit calls.""" - display.quit() - display.quit() - - self.assertFalse(display.get_init()) - - @unittest.skipIf( - os.environ.get("SDL_VIDEODRIVER") == "dummy", "Needs a not dummy videodriver" - ) - def test_set_gamma(self): - pygame.display.set_mode((1, 1)) - - gammas = [0.25, 0.5, 0.88, 1.0] - for gamma in gammas: - with self.subTest(gamma=gamma): - self.assertEqual(pygame.display.set_gamma(gamma), True) - - @unittest.skipIf( - os.environ.get("SDL_VIDEODRIVER") == "dummy", "Needs a not dummy videodriver" - ) - def test_set_gamma__tuple(self): - pygame.display.set_mode((1, 1)) - - gammas = [(0.5, 0.5, 0.5), (1.0, 1.0, 1.0), (0.25, 0.33, 0.44)] - for r, g, b in gammas: - with self.subTest(r=r, g=g, b=b): - self.assertEqual(pygame.display.set_gamma(r, g, b), True) - - @unittest.skipIf( - not hasattr(pygame.display, "set_gamma_ramp"), - "Not all systems and hardware support gamma ramps", - ) - def test_set_gamma_ramp(self): - - # __doc__ (as of 2008-08-02) for pygame.display.set_gamma_ramp: - - # change the hardware gamma ramps with a custom lookup - # pygame.display.set_gamma_ramp(red, green, blue): return bool - # set_gamma_ramp(red, green, blue): return bool - # - # Set the red, green, and blue gamma ramps with an explicit lookup - # table. Each argument should be sequence of 256 integers. The - # integers should range between 0 and 0xffff. Not all systems and - # hardware support gamma ramps, if the function succeeds it will - # return True. - # - pygame.display.set_mode((5, 5)) - r = list(range(256)) - g = [number + 256 for number in r] - b = [number + 256 for number in g] - isSupported = pygame.display.set_gamma_ramp(r, g, b) - if isSupported: - self.assertTrue(pygame.display.set_gamma_ramp(r, g, b)) - else: - self.assertFalse(pygame.display.set_gamma_ramp(r, g, b)) - - def test_set_mode_kwargs(self): - - pygame.display.set_mode(size=(1, 1), flags=0, depth=0, display=0) - - def test_set_mode_scaled(self): - surf = pygame.display.set_mode( - size=(1, 1), flags=pygame.SCALED, depth=0, display=0 - ) - winsize = pygame.display.get_window_size() - self.assertEqual( - winsize[0] % surf.get_size()[0], - 0, - "window width should be a multiple of the surface width", - ) - self.assertEqual( - winsize[1] % surf.get_size()[1], - 0, - "window height should be a multiple of the surface height", - ) - self.assertEqual( - winsize[0] / surf.get_size()[0], winsize[1] / surf.get_size()[1] - ) - - def test_set_mode_vector2(self): - pygame.display.set_mode(pygame.Vector2(1, 1)) - - def test_set_mode_unscaled(self): - """Ensures a window created with SCALED can become smaller.""" - # see https://github.com/pygame/pygame/issues/2327 - - screen = pygame.display.set_mode((300, 300), pygame.SCALED) - self.assertEqual(screen.get_size(), (300, 300)) - - screen = pygame.display.set_mode((200, 200)) - self.assertEqual(screen.get_size(), (200, 200)) - - def test_screensaver_support(self): - pygame.display.set_allow_screensaver(True) - self.assertTrue(pygame.display.get_allow_screensaver()) - pygame.display.set_allow_screensaver(False) - self.assertFalse(pygame.display.get_allow_screensaver()) - pygame.display.set_allow_screensaver() - self.assertTrue(pygame.display.get_allow_screensaver()) - - # the following test fails always with SDL2 - @unittest.skipIf(True, "set_palette() not supported in SDL2") - def test_set_palette(self): - with self.assertRaises(pygame.error): - palette = [1, 2, 3] - pygame.display.set_palette(palette) - pygame.display.set_mode((1024, 768), 0, 8) - palette = [] - self.assertIsNone(pygame.display.set_palette(palette)) - - with self.assertRaises(ValueError): - palette = 12 - pygame.display.set_palette(palette) - with self.assertRaises(TypeError): - palette = [[1, 2], [1, 2]] - pygame.display.set_palette(palette) - with self.assertRaises(TypeError): - palette = [[0, 0, 0, 0, 0]] + [[x, x, x, x, x] for x in range(1, 255)] - pygame.display.set_palette(palette) - with self.assertRaises(TypeError): - palette = "qwerty" - pygame.display.set_palette(palette) - with self.assertRaises(TypeError): - palette = [[123, 123, 123] * 10000] - pygame.display.set_palette(palette) - with self.assertRaises(TypeError): - palette = [1, 2, 3] - pygame.display.set_palette(palette) - - skip_list = ["dummy", "android"] - - @unittest.skipIf( - os.environ.get("SDL_VIDEODRIVER") in skip_list, - "requires the SDL_VIDEODRIVER to be non dummy", - ) - def test_toggle_fullscreen(self): - """Test for toggle fullscreen""" - - # try to toggle fullscreen with no active display - # this should result in an error - pygame.display.quit() - with self.assertRaises(pygame.error): - pygame.display.toggle_fullscreen() - - pygame.display.init() - width_height = (640, 480) - test_surf = pygame.display.set_mode(width_height) - - # try to toggle fullscreen - try: - pygame.display.toggle_fullscreen() - - except pygame.error: - self.fail() - - else: - # if toggle success, the width/height should be a - # value found in list_modes - if pygame.display.toggle_fullscreen() == 1: - boolean = ( - test_surf.get_width(), - test_surf.get_height(), - ) in pygame.display.list_modes( - depth=0, flags=pygame.FULLSCREEN, display=0 - ) - - self.assertEqual(boolean, True) - - # if not original width/height should be preserved - else: - self.assertEqual( - (test_surf.get_width(), test_surf.get_height()), width_height - ) - - -class DisplayUpdateTest(unittest.TestCase): - def question(self, qstr): - """this is used in the interactive subclass.""" - - def setUp(self): - display.init() - self.screen = pygame.display.set_mode((500, 500)) - self.screen.fill("black") - pygame.display.flip() - pygame.event.pump() # so mac updates - - def tearDown(self): - display.quit() - - def test_update_negative(self): - """takes rects with negative values.""" - self.screen.fill("green") - - r1 = pygame.Rect(0, 0, 100, 100) - pygame.display.update(r1) - - r2 = pygame.Rect(-10, 0, 100, 100) - pygame.display.update(r2) - - r3 = pygame.Rect(-10, 0, -100, -100) - pygame.display.update(r3) - - self.question("Is the screen green in (0, 0, 100, 100)?") - - def test_update_sequence(self): - """only updates the part of the display given by the rects.""" - self.screen.fill("green") - rects = [ - pygame.Rect(0, 0, 100, 100), - pygame.Rect(100, 0, 100, 100), - pygame.Rect(200, 0, 100, 100), - pygame.Rect(300, 300, 100, 100), - ] - pygame.display.update(rects) - pygame.event.pump() # so mac updates - - self.question(f"Is the screen green in {rects}?") - - def test_update_none_skipped(self): - """None is skipped inside sequences.""" - self.screen.fill("green") - rects = ( - None, - pygame.Rect(100, 0, 100, 100), - None, - pygame.Rect(200, 0, 100, 100), - pygame.Rect(300, 300, 100, 100), - ) - pygame.display.update(rects) - pygame.event.pump() # so mac updates - - self.question(f"Is the screen green in {rects}?") - - def test_update_none(self): - """does NOT update the display.""" - self.screen.fill("green") - pygame.display.update(None) - pygame.event.pump() # so mac updates - self.question(f"Is the screen black and NOT green?") - - def test_update_no_args(self): - """does NOT update the display.""" - self.screen.fill("green") - pygame.display.update() - pygame.event.pump() # so mac updates - self.question(f"Is the WHOLE screen green?") - - def test_update_args(self): - """updates the display using the args as a rect.""" - self.screen.fill("green") - pygame.display.update(100, 100, 100, 100) - pygame.event.pump() # so mac updates - self.question("Is the screen green in (100, 100, 100, 100)?") - - def test_update_incorrect_args(self): - """raises a ValueError when inputs are wrong.""" - - with self.assertRaises(ValueError): - pygame.display.update(100, "asdf", 100, 100) - - with self.assertRaises(ValueError): - pygame.display.update([100, "asdf", 100, 100]) - - def test_update_no_init(self): - """raises a pygame.error.""" - - pygame.display.quit() - with self.assertRaises(pygame.error): - pygame.display.update() - - -class DisplayUpdateInteractiveTest(DisplayUpdateTest): - """Because we want these tests to run as interactive and not interactive.""" - - __tags__ = ["interactive"] - - def question(self, qstr): - """since this is the interactive sublcass we ask a question.""" - question(qstr) - - -class DisplayInteractiveTest(unittest.TestCase): - - __tags__ = ["interactive"] - - def test_set_icon_interactive(self): - - os.environ["SDL_VIDEO_WINDOW_POS"] = "100,250" - pygame.display.quit() - pygame.display.init() - - test_icon = pygame.Surface((32, 32)) - test_icon.fill((255, 0, 0)) - - pygame.display.set_icon(test_icon) - screen = pygame.display.set_mode((400, 100)) - pygame.display.set_caption("Is the window icon a red square?") - - response = question("Is the display icon red square?") - - self.assertTrue(response) - pygame.display.quit() - - def test_set_gamma_ramp(self): - - os.environ["SDL_VIDEO_WINDOW_POS"] = "100,250" - pygame.display.quit() - pygame.display.init() - - screen = pygame.display.set_mode((400, 100)) - screen.fill((100, 100, 100)) - - blue_ramp = [x * 256 for x in range(0, 256)] - blue_ramp[100] = 150 * 256 # Can't tint too far or gamma ramps fail - normal_ramp = [x * 256 for x in range(0, 256)] - # test to see if this platform supports gamma ramps - gamma_success = False - if pygame.display.set_gamma_ramp(normal_ramp, normal_ramp, blue_ramp): - pygame.display.update() - gamma_success = True - - if gamma_success: - response = question("Is the window background tinted blue?") - self.assertTrue(response) - # restore normal ramp - pygame.display.set_gamma_ramp(normal_ramp, normal_ramp, normal_ramp) - - pygame.display.quit() - - -@unittest.skipIf( - os.environ.get("SDL_VIDEODRIVER") == "dummy", - 'OpenGL requires a non-"dummy" SDL_VIDEODRIVER', -) -class DisplayOpenGLTest(unittest.TestCase): - def test_screen_size_opengl(self): - """returns a surface with the same size requested. - |tags:display,slow,opengl| - """ - pygame.display.init() - screen = pygame.display.set_mode((640, 480), pygame.OPENGL) - self.assertEqual((640, 480), screen.get_size()) - - -class X11CrashTest(unittest.TestCase): - def test_x11_set_mode_crash_gh1654(self): - # Test for https://github.com/pygame/pygame/issues/1654 - # If unfixed, this will trip a segmentation fault - pygame.display.init() - pygame.display.quit() - screen = pygame.display.set_mode((640, 480), 0) - self.assertEqual((640, 480), screen.get_size()) - - -if __name__ == "__main__": - unittest.main() diff --git a/venv/Lib/site-packages/pygame/tests/docs_test.py b/venv/Lib/site-packages/pygame/tests/docs_test.py deleted file mode 100644 index de021a8..0000000 --- a/venv/Lib/site-packages/pygame/tests/docs_test.py +++ /dev/null @@ -1,35 +0,0 @@ -import os -import subprocess -import sys -import unittest - - -class DocsIncludedTest(unittest.TestCase): - def test_doc_import_works(self): - from pygame.docs.__main__ import has_local_docs, open_docs - - @unittest.skipIf("CI" not in os.environ, "Docs not required for local builds") - def test_docs_included(self): - from pygame.docs.__main__ import has_local_docs - - self.assertTrue(has_local_docs()) - - @unittest.skipIf("CI" not in os.environ, "Docs not required for local builds") - def test_docs_command(self): - try: - subprocess.run( - [sys.executable, "-m", "pygame.docs"], - timeout=5, - # check ensures an exception is raised when the process fails - check=True, - # pipe stdout/stderr so that they don't clutter main stdout - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - ) - except subprocess.TimeoutExpired: - # timeout errors are not an issue - pass - - -if __name__ == "__main__": - unittest.main() diff --git a/venv/Lib/site-packages/pygame/tests/draw_test.py b/venv/Lib/site-packages/pygame/tests/draw_test.py deleted file mode 100644 index d876060..0000000 --- a/venv/Lib/site-packages/pygame/tests/draw_test.py +++ /dev/null @@ -1,6563 +0,0 @@ -import math -import unittest -import sys -import warnings - -import pygame -from pygame import draw -from pygame import draw_py -from pygame.locals import SRCALPHA -from pygame.tests import test_utils -from pygame.math import Vector2 - - -RED = BG_RED = pygame.Color("red") -GREEN = FG_GREEN = pygame.Color("green") - -# Clockwise from the top left corner and ending with the center point. -RECT_POSITION_ATTRIBUTES = ( - "topleft", - "midtop", - "topright", - "midright", - "bottomright", - "midbottom", - "bottomleft", - "midleft", - "center", -) - - -def get_border_values(surface, width, height): - """Returns a list containing lists with the values of the surface's - borders. - """ - border_top = [surface.get_at((x, 0)) for x in range(width)] - border_left = [surface.get_at((0, y)) for y in range(height)] - border_right = [surface.get_at((width - 1, y)) for y in range(height)] - border_bottom = [surface.get_at((x, height - 1)) for x in range(width)] - - return [border_top, border_left, border_right, border_bottom] - - -def corners(surface): - """Returns a tuple with the corner positions of the given surface. - - Clockwise from the top left corner. - """ - width, height = surface.get_size() - return ((0, 0), (width - 1, 0), (width - 1, height - 1), (0, height - 1)) - - -def rect_corners_mids_and_center(rect): - """Returns a tuple with each corner, mid, and the center for a given rect. - - Clockwise from the top left corner and ending with the center point. - """ - return ( - rect.topleft, - rect.midtop, - rect.topright, - rect.midright, - rect.bottomright, - rect.midbottom, - rect.bottomleft, - rect.midleft, - rect.center, - ) - - -def border_pos_and_color(surface): - """Yields each border position and its color for a given surface. - - Clockwise from the top left corner. - """ - width, height = surface.get_size() - right, bottom = width - 1, height - 1 - - # Top edge. - for x in range(width): - pos = (x, 0) - yield pos, surface.get_at(pos) - - # Right edge. - # Top right done in top edge loop. - for y in range(1, height): - pos = (right, y) - yield pos, surface.get_at(pos) - - # Bottom edge. - # Bottom right done in right edge loop. - for x in range(right - 1, -1, -1): - pos = (x, bottom) - yield pos, surface.get_at(pos) - - # Left edge. - # Bottom left done in bottom edge loop. Top left done in top edge loop. - for y in range(bottom - 1, 0, -1): - pos = (0, y) - yield pos, surface.get_at(pos) - - -def get_color_points(surface, color, bounds_rect=None, match_color=True): - """Get all the points of a given color on the surface within the given - bounds. - - If bounds_rect is None the full surface is checked. - If match_color is True, all points matching the color are returned, - otherwise all points not matching the color are returned. - """ - get_at = surface.get_at # For possible speed up. - - if bounds_rect is None: - x_range = range(surface.get_width()) - y_range = range(surface.get_height()) - else: - x_range = range(bounds_rect.left, bounds_rect.right) - y_range = range(bounds_rect.top, bounds_rect.bottom) - - surface.lock() # For possible speed up. - - if match_color: - pts = [(x, y) for x in x_range for y in y_range if get_at((x, y)) == color] - else: - pts = [(x, y) for x in x_range for y in y_range if get_at((x, y)) != color] - - surface.unlock() - return pts - - -def create_bounding_rect(surface, surf_color, default_pos): - """Create a rect to bound all the pixels that don't match surf_color. - - The default_pos parameter is used to position the bounding rect for the - case where all pixels match the surf_color. - """ - width, height = surface.get_clip().size - xmin, ymin = width, height - xmax, ymax = -1, -1 - get_at = surface.get_at # For possible speed up. - - surface.lock() # For possible speed up. - - for y in range(height): - for x in range(width): - if get_at((x, y)) != surf_color: - xmin = min(x, xmin) - xmax = max(x, xmax) - ymin = min(y, ymin) - ymax = max(y, ymax) - - surface.unlock() - - if -1 == xmax: - # No points means a 0 sized rect positioned at default_pos. - return pygame.Rect(default_pos, (0, 0)) - return pygame.Rect((xmin, ymin), (xmax - xmin + 1, ymax - ymin + 1)) - - -class InvalidBool(object): - """To help test invalid bool values.""" - - __nonzero__ = None - __bool__ = None - - -class DrawTestCase(unittest.TestCase): - """Base class to test draw module functions.""" - - draw_rect = staticmethod(draw.rect) - draw_polygon = staticmethod(draw.polygon) - draw_circle = staticmethod(draw.circle) - draw_ellipse = staticmethod(draw.ellipse) - draw_arc = staticmethod(draw.arc) - draw_line = staticmethod(draw.line) - draw_lines = staticmethod(draw.lines) - draw_aaline = staticmethod(draw.aaline) - draw_aalines = staticmethod(draw.aalines) - - -class PythonDrawTestCase(unittest.TestCase): - """Base class to test draw_py module functions.""" - - # draw_py is currently missing some functions. - # draw_rect = staticmethod(draw_py.draw_rect) - draw_polygon = staticmethod(draw_py.draw_polygon) - # draw_circle = staticmethod(draw_py.draw_circle) - # draw_ellipse = staticmethod(draw_py.draw_ellipse) - # draw_arc = staticmethod(draw_py.draw_arc) - draw_line = staticmethod(draw_py.draw_line) - draw_lines = staticmethod(draw_py.draw_lines) - draw_aaline = staticmethod(draw_py.draw_aaline) - draw_aalines = staticmethod(draw_py.draw_aalines) - - -### Ellipse Testing ########################################################### - - -class DrawEllipseMixin(object): - """Mixin tests for drawing ellipses. - - This class contains all the general ellipse drawing tests. - """ - - def test_ellipse__args(self): - """Ensures draw ellipse accepts the correct args.""" - bounds_rect = self.draw_ellipse( - pygame.Surface((3, 3)), (0, 10, 0, 50), pygame.Rect((0, 0), (3, 2)), 1 - ) - - self.assertIsInstance(bounds_rect, pygame.Rect) - - def test_ellipse__args_without_width(self): - """Ensures draw ellipse accepts the args without a width.""" - bounds_rect = self.draw_ellipse( - pygame.Surface((2, 2)), (1, 1, 1, 99), pygame.Rect((1, 1), (1, 1)) - ) - - self.assertIsInstance(bounds_rect, pygame.Rect) - - def test_ellipse__args_with_negative_width(self): - """Ensures draw ellipse accepts the args with negative width.""" - bounds_rect = self.draw_ellipse( - pygame.Surface((3, 3)), (0, 10, 0, 50), pygame.Rect((2, 3), (3, 2)), -1 - ) - - self.assertIsInstance(bounds_rect, pygame.Rect) - self.assertEqual(bounds_rect, pygame.Rect(2, 3, 0, 0)) - - def test_ellipse__args_with_width_gt_radius(self): - """Ensures draw ellipse accepts the args with - width > rect.w // 2 and width > rect.h // 2. - """ - rect = pygame.Rect((0, 0), (4, 4)) - bounds_rect = self.draw_ellipse( - pygame.Surface((3, 3)), (0, 10, 0, 50), rect, rect.w // 2 + 1 - ) - - self.assertIsInstance(bounds_rect, pygame.Rect) - - bounds_rect = self.draw_ellipse( - pygame.Surface((3, 3)), (0, 10, 0, 50), rect, rect.h // 2 + 1 - ) - - self.assertIsInstance(bounds_rect, pygame.Rect) - - def test_ellipse__kwargs(self): - """Ensures draw ellipse accepts the correct kwargs - with and without a width arg. - """ - kwargs_list = [ - { - "surface": pygame.Surface((4, 4)), - "color": pygame.Color("yellow"), - "rect": pygame.Rect((0, 0), (3, 2)), - "width": 1, - }, - { - "surface": pygame.Surface((2, 1)), - "color": (0, 10, 20), - "rect": (0, 0, 1, 1), - }, - ] - - for kwargs in kwargs_list: - bounds_rect = self.draw_ellipse(**kwargs) - - self.assertIsInstance(bounds_rect, pygame.Rect) - - def test_ellipse__kwargs_order_independent(self): - """Ensures draw ellipse's kwargs are not order dependent.""" - bounds_rect = self.draw_ellipse( - color=(1, 2, 3), - surface=pygame.Surface((3, 2)), - width=0, - rect=pygame.Rect((1, 0), (1, 1)), - ) - - self.assertIsInstance(bounds_rect, pygame.Rect) - - def test_ellipse__args_missing(self): - """Ensures draw ellipse detects any missing required args.""" - surface = pygame.Surface((1, 1)) - - with self.assertRaises(TypeError): - bounds_rect = self.draw_ellipse(surface, pygame.Color("red")) - - with self.assertRaises(TypeError): - bounds_rect = self.draw_ellipse(surface) - - with self.assertRaises(TypeError): - bounds_rect = self.draw_ellipse() - - def test_ellipse__kwargs_missing(self): - """Ensures draw ellipse detects any missing required kwargs.""" - kwargs = { - "surface": pygame.Surface((1, 2)), - "color": pygame.Color("red"), - "rect": pygame.Rect((1, 0), (2, 2)), - "width": 2, - } - - for name in ("rect", "color", "surface"): - invalid_kwargs = dict(kwargs) - invalid_kwargs.pop(name) # Pop from a copy. - - with self.assertRaises(TypeError): - bounds_rect = self.draw_ellipse(**invalid_kwargs) - - def test_ellipse__arg_invalid_types(self): - """Ensures draw ellipse detects invalid arg types.""" - surface = pygame.Surface((2, 2)) - color = pygame.Color("blue") - rect = pygame.Rect((1, 1), (1, 1)) - - with self.assertRaises(TypeError): - # Invalid width. - bounds_rect = self.draw_ellipse(surface, color, rect, "1") - - with self.assertRaises(TypeError): - # Invalid rect. - bounds_rect = self.draw_ellipse(surface, color, (1, 2, 3, 4, 5), 1) - - with self.assertRaises(TypeError): - # Invalid color. - bounds_rect = self.draw_ellipse(surface, 2.3, rect, 0) - - with self.assertRaises(TypeError): - # Invalid surface. - bounds_rect = self.draw_ellipse(rect, color, rect, 2) - - def test_ellipse__kwarg_invalid_types(self): - """Ensures draw ellipse detects invalid kwarg types.""" - surface = pygame.Surface((3, 3)) - color = pygame.Color("green") - rect = pygame.Rect((0, 1), (1, 1)) - kwargs_list = [ - { - "surface": pygame.Surface, # Invalid surface. - "color": color, - "rect": rect, - "width": 1, - }, - { - "surface": surface, - "color": 2.3, # Invalid color. - "rect": rect, - "width": 1, - }, - { - "surface": surface, - "color": color, - "rect": (0, 0, 0), # Invalid rect. - "width": 1, - }, - {"surface": surface, "color": color, "rect": rect, "width": 1.1}, - ] # Invalid width. - - for kwargs in kwargs_list: - with self.assertRaises(TypeError): - bounds_rect = self.draw_ellipse(**kwargs) - - def test_ellipse__kwarg_invalid_name(self): - """Ensures draw ellipse detects invalid kwarg names.""" - surface = pygame.Surface((2, 3)) - color = pygame.Color("cyan") - rect = pygame.Rect((0, 1), (2, 2)) - kwargs_list = [ - { - "surface": surface, - "color": color, - "rect": rect, - "width": 1, - "invalid": 1, - }, - {"surface": surface, "color": color, "rect": rect, "invalid": 1}, - ] - - for kwargs in kwargs_list: - with self.assertRaises(TypeError): - bounds_rect = self.draw_ellipse(**kwargs) - - def test_ellipse__args_and_kwargs(self): - """Ensures draw ellipse accepts a combination of args/kwargs""" - surface = pygame.Surface((3, 1)) - color = (255, 255, 0, 0) - rect = pygame.Rect((1, 0), (2, 1)) - width = 0 - kwargs = {"surface": surface, "color": color, "rect": rect, "width": width} - - for name in ("surface", "color", "rect", "width"): - kwargs.pop(name) - - if "surface" == name: - bounds_rect = self.draw_ellipse(surface, **kwargs) - elif "color" == name: - bounds_rect = self.draw_ellipse(surface, color, **kwargs) - elif "rect" == name: - bounds_rect = self.draw_ellipse(surface, color, rect, **kwargs) - else: - bounds_rect = self.draw_ellipse(surface, color, rect, width, **kwargs) - - self.assertIsInstance(bounds_rect, pygame.Rect) - - def test_ellipse__valid_width_values(self): - """Ensures draw ellipse accepts different width values.""" - pos = (1, 1) - surface_color = pygame.Color("white") - surface = pygame.Surface((3, 4)) - color = (10, 20, 30, 255) - kwargs = { - "surface": surface, - "color": color, - "rect": pygame.Rect(pos, (3, 2)), - "width": None, - } - - for width in (-1000, -10, -1, 0, 1, 10, 1000): - surface.fill(surface_color) # Clear for each test. - kwargs["width"] = width - expected_color = color if width >= 0 else surface_color - - bounds_rect = self.draw_ellipse(**kwargs) - - self.assertEqual(surface.get_at(pos), expected_color) - self.assertIsInstance(bounds_rect, pygame.Rect) - - def test_ellipse__valid_rect_formats(self): - """Ensures draw ellipse accepts different rect formats.""" - pos = (1, 1) - expected_color = pygame.Color("red") - surface_color = pygame.Color("black") - surface = pygame.Surface((4, 4)) - kwargs = {"surface": surface, "color": expected_color, "rect": None, "width": 0} - rects = (pygame.Rect(pos, (1, 3)), (pos, (2, 1)), (pos[0], pos[1], 1, 1)) - - for rect in rects: - surface.fill(surface_color) # Clear for each test. - kwargs["rect"] = rect - - bounds_rect = self.draw_ellipse(**kwargs) - - self.assertEqual(surface.get_at(pos), expected_color) - self.assertIsInstance(bounds_rect, pygame.Rect) - - def test_ellipse__valid_color_formats(self): - """Ensures draw ellipse accepts different color formats.""" - pos = (1, 1) - green_color = pygame.Color("green") - surface_color = pygame.Color("black") - surface = pygame.Surface((3, 4)) - kwargs = { - "surface": surface, - "color": None, - "rect": pygame.Rect(pos, (1, 2)), - "width": 0, - } - reds = ( - (0, 255, 0), - (0, 255, 0, 255), - surface.map_rgb(green_color), - green_color, - ) - - for color in reds: - surface.fill(surface_color) # Clear for each test. - kwargs["color"] = color - - if isinstance(color, int): - expected_color = surface.unmap_rgb(color) - else: - expected_color = green_color - - bounds_rect = self.draw_ellipse(**kwargs) - - self.assertEqual(surface.get_at(pos), expected_color) - self.assertIsInstance(bounds_rect, pygame.Rect) - - def test_ellipse__invalid_color_formats(self): - """Ensures draw ellipse handles invalid color formats correctly.""" - pos = (1, 1) - surface = pygame.Surface((4, 3)) - kwargs = { - "surface": surface, - "color": None, - "rect": pygame.Rect(pos, (2, 2)), - "width": 1, - } - - for expected_color in (2.3, surface): - kwargs["color"] = expected_color - - with self.assertRaises(TypeError): - bounds_rect = self.draw_ellipse(**kwargs) - - def test_ellipse(self): - """Tests ellipses of differing sizes on surfaces of differing sizes. - - Checks if the number of sides touching the border of the surface is - correct. - """ - left_top = [(0, 0), (1, 0), (0, 1), (1, 1)] - sizes = [(4, 4), (5, 4), (4, 5), (5, 5)] - color = (1, 13, 24, 255) - - def same_size(width, height, border_width): - """Test for ellipses with the same size as the surface.""" - surface = pygame.Surface((width, height)) - - self.draw_ellipse(surface, color, (0, 0, width, height), border_width) - - # For each of the four borders check if it contains the color - borders = get_border_values(surface, width, height) - for border in borders: - self.assertTrue(color in border) - - def not_same_size(width, height, border_width, left, top): - """Test for ellipses that aren't the same size as the surface.""" - surface = pygame.Surface((width, height)) - - self.draw_ellipse( - surface, color, (left, top, width - 1, height - 1), border_width - ) - - borders = get_border_values(surface, width, height) - - # Check if two sides of the ellipse are touching the border - sides_touching = [color in border for border in borders].count(True) - self.assertEqual(sides_touching, 2) - - for width, height in sizes: - for border_width in (0, 1): - same_size(width, height, border_width) - for left, top in left_top: - not_same_size(width, height, border_width, left, top) - - def test_ellipse__big_ellipse(self): - """Test for big ellipse that could overflow in algorithm""" - width = 1025 - height = 1025 - border = 1 - x_value_test = int(0.4 * height) - y_value_test = int(0.4 * height) - surface = pygame.Surface((width, height)) - - self.draw_ellipse(surface, (255, 0, 0), (0, 0, width, height), border) - colored_pixels = 0 - for y in range(height): - if surface.get_at((x_value_test, y)) == (255, 0, 0): - colored_pixels += 1 - for x in range(width): - if surface.get_at((x, y_value_test)) == (255, 0, 0): - colored_pixels += 1 - self.assertEqual(colored_pixels, border * 4) - - def test_ellipse__thick_line(self): - """Ensures a thick lined ellipse is drawn correctly.""" - ellipse_color = pygame.Color("yellow") - surface_color = pygame.Color("black") - surface = pygame.Surface((40, 40)) - rect = pygame.Rect((0, 0), (31, 23)) - rect.center = surface.get_rect().center - - # As the lines get thicker the internals of the ellipse are not - # cleanly defined. So only test up to a few thicknesses before the - # maximum thickness. - for thickness in range(1, min(*rect.size) // 2 - 2): - surface.fill(surface_color) # Clear for each test. - - self.draw_ellipse(surface, ellipse_color, rect, thickness) - - surface.lock() # For possible speed up. - - # Check vertical thickness on the ellipse's top. - x = rect.centerx - y_start = rect.top - y_end = rect.top + thickness - 1 - - for y in range(y_start, y_end + 1): - self.assertEqual(surface.get_at((x, y)), ellipse_color, thickness) - - # Check pixels above and below this line. - self.assertEqual(surface.get_at((x, y_start - 1)), surface_color, thickness) - self.assertEqual(surface.get_at((x, y_end + 1)), surface_color, thickness) - - # Check vertical thickness on the ellipse's bottom. - x = rect.centerx - y_start = rect.bottom - thickness - y_end = rect.bottom - 1 - - for y in range(y_start, y_end + 1): - self.assertEqual(surface.get_at((x, y)), ellipse_color, thickness) - - # Check pixels above and below this line. - self.assertEqual(surface.get_at((x, y_start - 1)), surface_color, thickness) - self.assertEqual(surface.get_at((x, y_end + 1)), surface_color, thickness) - - # Check horizontal thickness on the ellipse's left. - x_start = rect.left - x_end = rect.left + thickness - 1 - y = rect.centery - - for x in range(x_start, x_end + 1): - self.assertEqual(surface.get_at((x, y)), ellipse_color, thickness) - - # Check pixels to the left and right of this line. - self.assertEqual(surface.get_at((x_start - 1, y)), surface_color, thickness) - self.assertEqual(surface.get_at((x_end + 1, y)), surface_color, thickness) - - # Check horizontal thickness on the ellipse's right. - x_start = rect.right - thickness - x_end = rect.right - 1 - y = rect.centery - - for x in range(x_start, x_end + 1): - self.assertEqual(surface.get_at((x, y)), ellipse_color, thickness) - - # Check pixels to the left and right of this line. - self.assertEqual(surface.get_at((x_start - 1, y)), surface_color, thickness) - self.assertEqual(surface.get_at((x_end + 1, y)), surface_color, thickness) - - surface.unlock() - - def test_ellipse__no_holes(self): - width = 80 - height = 70 - surface = pygame.Surface((width + 1, height)) - rect = pygame.Rect(0, 0, width, height) - for thickness in range(1, 37, 5): - surface.fill("BLACK") - self.draw_ellipse(surface, "RED", rect, thickness) - for y in range(height): - number_of_changes = 0 - drawn_pixel = False - for x in range(width + 1): - if ( - not drawn_pixel - and surface.get_at((x, y)) == pygame.Color("RED") - or drawn_pixel - and surface.get_at((x, y)) == pygame.Color("BLACK") - ): - drawn_pixel = not drawn_pixel - number_of_changes += 1 - if y < thickness or y > height - thickness - 1: - self.assertEqual(number_of_changes, 2) - else: - self.assertEqual(number_of_changes, 4) - - def test_ellipse__max_width(self): - """Ensures an ellipse with max width (and greater) is drawn correctly.""" - ellipse_color = pygame.Color("yellow") - surface_color = pygame.Color("black") - surface = pygame.Surface((40, 40)) - rect = pygame.Rect((0, 0), (31, 21)) - rect.center = surface.get_rect().center - max_thickness = (min(*rect.size) + 1) // 2 - - for thickness in range(max_thickness, max_thickness + 3): - surface.fill(surface_color) # Clear for each test. - - self.draw_ellipse(surface, ellipse_color, rect, thickness) - - surface.lock() # For possible speed up. - - # Check vertical thickness. - for y in range(rect.top, rect.bottom): - self.assertEqual(surface.get_at((rect.centerx, y)), ellipse_color) - - # Check horizontal thickness. - for x in range(rect.left, rect.right): - self.assertEqual(surface.get_at((x, rect.centery)), ellipse_color) - - # Check pixels above and below ellipse. - self.assertEqual( - surface.get_at((rect.centerx, rect.top - 1)), surface_color - ) - self.assertEqual( - surface.get_at((rect.centerx, rect.bottom + 1)), surface_color - ) - - # Check pixels to the left and right of the ellipse. - self.assertEqual( - surface.get_at((rect.left - 1, rect.centery)), surface_color - ) - self.assertEqual( - surface.get_at((rect.right + 1, rect.centery)), surface_color - ) - - surface.unlock() - - def _check_1_pixel_sized_ellipse( - self, surface, collide_rect, surface_color, ellipse_color - ): - # Helper method to check the surface for 1 pixel wide and/or high - # ellipses. - surf_w, surf_h = surface.get_size() - - surface.lock() # For possible speed up. - - for pos in ((x, y) for y in range(surf_h) for x in range(surf_w)): - # Since the ellipse is just a line we can use a rect to help find - # where it is expected to be drawn. - if collide_rect.collidepoint(pos): - expected_color = ellipse_color - else: - expected_color = surface_color - - self.assertEqual( - surface.get_at(pos), - expected_color, - "collide_rect={}, pos={}".format(collide_rect, pos), - ) - - surface.unlock() - - def test_ellipse__1_pixel_width(self): - """Ensures an ellipse with a width of 1 is drawn correctly. - - An ellipse with a width of 1 pixel is a vertical line. - """ - ellipse_color = pygame.Color("red") - surface_color = pygame.Color("black") - surf_w, surf_h = 10, 20 - - surface = pygame.Surface((surf_w, surf_h)) - rect = pygame.Rect((0, 0), (1, 0)) - collide_rect = rect.copy() - - # Calculate some positions. - off_left = -1 - off_right = surf_w - off_bottom = surf_h - center_x = surf_w // 2 - center_y = surf_h // 2 - - # Test some even and odd heights. - for ellipse_h in range(6, 10): - collide_rect.h = ellipse_h - rect.h = ellipse_h - - # Calculate some variable positions. - off_top = -(ellipse_h + 1) - half_off_top = -(ellipse_h // 2) - half_off_bottom = surf_h - (ellipse_h // 2) - - # Draw the ellipse in different positions: fully on-surface, - # partially off-surface, and fully off-surface. - positions = ( - (off_left, off_top), - (off_left, half_off_top), - (off_left, center_y), - (off_left, half_off_bottom), - (off_left, off_bottom), - (center_x, off_top), - (center_x, half_off_top), - (center_x, center_y), - (center_x, half_off_bottom), - (center_x, off_bottom), - (off_right, off_top), - (off_right, half_off_top), - (off_right, center_y), - (off_right, half_off_bottom), - (off_right, off_bottom), - ) - - for rect_pos in positions: - surface.fill(surface_color) # Clear before each draw. - rect.topleft = rect_pos - collide_rect.topleft = rect_pos - - self.draw_ellipse(surface, ellipse_color, rect) - - self._check_1_pixel_sized_ellipse( - surface, collide_rect, surface_color, ellipse_color - ) - - def test_ellipse__1_pixel_width_spanning_surface(self): - """Ensures an ellipse with a width of 1 is drawn correctly - when spanning the height of the surface. - - An ellipse with a width of 1 pixel is a vertical line. - """ - ellipse_color = pygame.Color("red") - surface_color = pygame.Color("black") - surf_w, surf_h = 10, 20 - - surface = pygame.Surface((surf_w, surf_h)) - rect = pygame.Rect((0, 0), (1, surf_h + 2)) # Longer than the surface. - - # Draw the ellipse in different positions: on-surface and off-surface. - positions = ( - (-1, -1), # (off_left, off_top) - (0, -1), # (left_edge, off_top) - (surf_w // 2, -1), # (center_x, off_top) - (surf_w - 1, -1), # (right_edge, off_top) - (surf_w, -1), - ) # (off_right, off_top) - - for rect_pos in positions: - surface.fill(surface_color) # Clear before each draw. - rect.topleft = rect_pos - - self.draw_ellipse(surface, ellipse_color, rect) - - self._check_1_pixel_sized_ellipse( - surface, rect, surface_color, ellipse_color - ) - - def test_ellipse__1_pixel_height(self): - """Ensures an ellipse with a height of 1 is drawn correctly. - - An ellipse with a height of 1 pixel is a horizontal line. - """ - ellipse_color = pygame.Color("red") - surface_color = pygame.Color("black") - surf_w, surf_h = 20, 10 - - surface = pygame.Surface((surf_w, surf_h)) - rect = pygame.Rect((0, 0), (0, 1)) - collide_rect = rect.copy() - - # Calculate some positions. - off_right = surf_w - off_top = -1 - off_bottom = surf_h - center_x = surf_w // 2 - center_y = surf_h // 2 - - # Test some even and odd widths. - for ellipse_w in range(6, 10): - collide_rect.w = ellipse_w - rect.w = ellipse_w - - # Calculate some variable positions. - off_left = -(ellipse_w + 1) - half_off_left = -(ellipse_w // 2) - half_off_right = surf_w - (ellipse_w // 2) - - # Draw the ellipse in different positions: fully on-surface, - # partially off-surface, and fully off-surface. - positions = ( - (off_left, off_top), - (half_off_left, off_top), - (center_x, off_top), - (half_off_right, off_top), - (off_right, off_top), - (off_left, center_y), - (half_off_left, center_y), - (center_x, center_y), - (half_off_right, center_y), - (off_right, center_y), - (off_left, off_bottom), - (half_off_left, off_bottom), - (center_x, off_bottom), - (half_off_right, off_bottom), - (off_right, off_bottom), - ) - - for rect_pos in positions: - surface.fill(surface_color) # Clear before each draw. - rect.topleft = rect_pos - collide_rect.topleft = rect_pos - - self.draw_ellipse(surface, ellipse_color, rect) - - self._check_1_pixel_sized_ellipse( - surface, collide_rect, surface_color, ellipse_color - ) - - def test_ellipse__1_pixel_height_spanning_surface(self): - """Ensures an ellipse with a height of 1 is drawn correctly - when spanning the width of the surface. - - An ellipse with a height of 1 pixel is a horizontal line. - """ - ellipse_color = pygame.Color("red") - surface_color = pygame.Color("black") - surf_w, surf_h = 20, 10 - - surface = pygame.Surface((surf_w, surf_h)) - rect = pygame.Rect((0, 0), (surf_w + 2, 1)) # Wider than the surface. - - # Draw the ellipse in different positions: on-surface and off-surface. - positions = ( - (-1, -1), # (off_left, off_top) - (-1, 0), # (off_left, top_edge) - (-1, surf_h // 2), # (off_left, center_y) - (-1, surf_h - 1), # (off_left, bottom_edge) - (-1, surf_h), - ) # (off_left, off_bottom) - - for rect_pos in positions: - surface.fill(surface_color) # Clear before each draw. - rect.topleft = rect_pos - - self.draw_ellipse(surface, ellipse_color, rect) - - self._check_1_pixel_sized_ellipse( - surface, rect, surface_color, ellipse_color - ) - - def test_ellipse__1_pixel_width_and_height(self): - """Ensures an ellipse with a width and height of 1 is drawn correctly. - - An ellipse with a width and height of 1 pixel is a single pixel. - """ - ellipse_color = pygame.Color("red") - surface_color = pygame.Color("black") - surf_w, surf_h = 10, 10 - - surface = pygame.Surface((surf_w, surf_h)) - rect = pygame.Rect((0, 0), (1, 1)) - - # Calculate some positions. - off_left = -1 - off_right = surf_w - off_top = -1 - off_bottom = surf_h - left_edge = 0 - right_edge = surf_w - 1 - top_edge = 0 - bottom_edge = surf_h - 1 - center_x = surf_w // 2 - center_y = surf_h // 2 - - # Draw the ellipse in different positions: center surface, - # top/bottom/left/right edges, and off-surface. - positions = ( - (off_left, off_top), - (off_left, top_edge), - (off_left, center_y), - (off_left, bottom_edge), - (off_left, off_bottom), - (left_edge, off_top), - (left_edge, top_edge), - (left_edge, center_y), - (left_edge, bottom_edge), - (left_edge, off_bottom), - (center_x, off_top), - (center_x, top_edge), - (center_x, center_y), - (center_x, bottom_edge), - (center_x, off_bottom), - (right_edge, off_top), - (right_edge, top_edge), - (right_edge, center_y), - (right_edge, bottom_edge), - (right_edge, off_bottom), - (off_right, off_top), - (off_right, top_edge), - (off_right, center_y), - (off_right, bottom_edge), - (off_right, off_bottom), - ) - - for rect_pos in positions: - surface.fill(surface_color) # Clear before each draw. - rect.topleft = rect_pos - - self.draw_ellipse(surface, ellipse_color, rect) - - self._check_1_pixel_sized_ellipse( - surface, rect, surface_color, ellipse_color - ) - - def test_ellipse__bounding_rect(self): - """Ensures draw ellipse returns the correct bounding rect. - - Tests ellipses on and off the surface and a range of width/thickness - values. - """ - ellipse_color = pygame.Color("red") - surf_color = pygame.Color("black") - min_width = min_height = 5 - max_width = max_height = 7 - sizes = ((min_width, min_height), (max_width, max_height)) - surface = pygame.Surface((20, 20), 0, 32) - surf_rect = surface.get_rect() - # Make a rect that is bigger than the surface to help test drawing - # ellipses off and partially off the surface. - big_rect = surf_rect.inflate(min_width * 2 + 1, min_height * 2 + 1) - - for pos in rect_corners_mids_and_center( - surf_rect - ) + rect_corners_mids_and_center(big_rect): - # Each of the ellipse's rect position attributes will be set to - # the pos value. - for attr in RECT_POSITION_ATTRIBUTES: - # Test using different rect sizes and thickness values. - for width, height in sizes: - ellipse_rect = pygame.Rect((0, 0), (width, height)) - setattr(ellipse_rect, attr, pos) - - for thickness in (0, 1, 2, 3, min(width, height)): - surface.fill(surf_color) # Clear for each test. - - bounding_rect = self.draw_ellipse( - surface, ellipse_color, ellipse_rect, thickness - ) - - # Calculating the expected_rect after the ellipse - # is drawn (it uses what is actually drawn). - expected_rect = create_bounding_rect( - surface, surf_color, ellipse_rect.topleft - ) - - self.assertEqual(bounding_rect, expected_rect) - - def test_ellipse__surface_clip(self): - """Ensures draw ellipse respects a surface's clip area. - - Tests drawing the ellipse filled and unfilled. - """ - surfw = surfh = 30 - ellipse_color = pygame.Color("red") - surface_color = pygame.Color("green") - surface = pygame.Surface((surfw, surfh)) - surface.fill(surface_color) - - clip_rect = pygame.Rect((0, 0), (11, 11)) - clip_rect.center = surface.get_rect().center - pos_rect = clip_rect.copy() # Manages the ellipse's pos. - - for width in (0, 1): # Filled and unfilled. - # Test centering the ellipse along the clip rect's edge. - for center in rect_corners_mids_and_center(clip_rect): - # Get the expected points by drawing the ellipse without the - # clip area set. - pos_rect.center = center - surface.set_clip(None) - surface.fill(surface_color) - self.draw_ellipse(surface, ellipse_color, pos_rect, width) - expected_pts = get_color_points(surface, ellipse_color, clip_rect) - - # Clear the surface and set the clip area. Redraw the ellipse - # and check that only the clip area is modified. - surface.fill(surface_color) - surface.set_clip(clip_rect) - - self.draw_ellipse(surface, ellipse_color, pos_rect, width) - - surface.lock() # For possible speed up. - - # Check all the surface points to ensure only the expected_pts - # are the ellipse_color. - for pt in ((x, y) for x in range(surfw) for y in range(surfh)): - if pt in expected_pts: - expected_color = ellipse_color - else: - expected_color = surface_color - - self.assertEqual(surface.get_at(pt), expected_color, pt) - - surface.unlock() - - -class DrawEllipseTest(DrawEllipseMixin, DrawTestCase): - """Test draw module function ellipse. - - This class inherits the general tests from DrawEllipseMixin. It is also - the class to add any draw.ellipse specific tests to. - """ - - -# Commented out to avoid cluttering the test output. Add back in if draw_py -# ever properly supports drawing ellipses. -# @unittest.skip('draw_py.draw_ellipse not supported yet') -# class PythonDrawEllipseTest(DrawEllipseMixin, PythonDrawTestCase): -# """Test draw_py module function draw_ellipse. -# -# This class inherits the general tests from DrawEllipseMixin. It is also -# the class to add any draw_py.draw_ellipse specific tests to. -# """ - - -### Line/Lines/AALine/AALines Testing ######################################### - - -class BaseLineMixin(object): - """Mixin base for drawing various lines. - - This class contains general helper methods and setup for testing the - different types of lines. - """ - - COLORS = ( - (0, 0, 0), - (255, 0, 0), - (0, 255, 0), - (0, 0, 255), - (255, 255, 0), - (255, 0, 255), - (0, 255, 255), - (255, 255, 255), - ) - - @staticmethod - def _create_surfaces(): - # Create some surfaces with different sizes, depths, and flags. - surfaces = [] - for size in ((49, 49), (50, 50)): - for depth in (8, 16, 24, 32): - for flags in (0, SRCALPHA): - surface = pygame.display.set_mode(size, flags, depth) - surfaces.append(surface) - surfaces.append(surface.convert_alpha()) - return surfaces - - @staticmethod - def _rect_lines(rect): - # Yields pairs of end points and their reverse (to test symmetry). - # Uses a rect with the points radiating from its midleft. - for pt in rect_corners_mids_and_center(rect): - if pt in [rect.midleft, rect.center]: - # Don't bother with these points. - continue - yield (rect.midleft, pt) - yield (pt, rect.midleft) - - -### Line Testing ############################################################## - - -class LineMixin(BaseLineMixin): - """Mixin test for drawing a single line. - - This class contains all the general single line drawing tests. - """ - - def test_line__args(self): - """Ensures draw line accepts the correct args.""" - bounds_rect = self.draw_line( - pygame.Surface((3, 3)), (0, 10, 0, 50), (0, 0), (1, 1), 1 - ) - - self.assertIsInstance(bounds_rect, pygame.Rect) - - def test_line__args_without_width(self): - """Ensures draw line accepts the args without a width.""" - bounds_rect = self.draw_line( - pygame.Surface((2, 2)), (0, 0, 0, 50), (0, 0), (2, 2) - ) - - self.assertIsInstance(bounds_rect, pygame.Rect) - - def test_line__kwargs(self): - """Ensures draw line accepts the correct kwargs - with and without a width arg. - """ - surface = pygame.Surface((4, 4)) - color = pygame.Color("yellow") - start_pos = (1, 1) - end_pos = (2, 2) - kwargs_list = [ - { - "surface": surface, - "color": color, - "start_pos": start_pos, - "end_pos": end_pos, - "width": 1, - }, - { - "surface": surface, - "color": color, - "start_pos": start_pos, - "end_pos": end_pos, - }, - ] - - for kwargs in kwargs_list: - bounds_rect = self.draw_line(**kwargs) - - self.assertIsInstance(bounds_rect, pygame.Rect) - - def test_line__kwargs_order_independent(self): - """Ensures draw line's kwargs are not order dependent.""" - bounds_rect = self.draw_line( - start_pos=(1, 2), - end_pos=(2, 1), - width=2, - color=(10, 20, 30), - surface=pygame.Surface((3, 2)), - ) - - self.assertIsInstance(bounds_rect, pygame.Rect) - - def test_line__args_missing(self): - """Ensures draw line detects any missing required args.""" - surface = pygame.Surface((1, 1)) - color = pygame.Color("blue") - - with self.assertRaises(TypeError): - bounds_rect = self.draw_line(surface, color, (0, 0)) - - with self.assertRaises(TypeError): - bounds_rect = self.draw_line(surface, color) - - with self.assertRaises(TypeError): - bounds_rect = self.draw_line(surface) - - with self.assertRaises(TypeError): - bounds_rect = self.draw_line() - - def test_line__kwargs_missing(self): - """Ensures draw line detects any missing required kwargs.""" - kwargs = { - "surface": pygame.Surface((3, 2)), - "color": pygame.Color("red"), - "start_pos": (2, 1), - "end_pos": (2, 2), - "width": 1, - } - - for name in ("end_pos", "start_pos", "color", "surface"): - invalid_kwargs = dict(kwargs) - invalid_kwargs.pop(name) # Pop from a copy. - - with self.assertRaises(TypeError): - bounds_rect = self.draw_line(**invalid_kwargs) - - def test_line__arg_invalid_types(self): - """Ensures draw line detects invalid arg types.""" - surface = pygame.Surface((2, 2)) - color = pygame.Color("blue") - start_pos = (0, 1) - end_pos = (1, 2) - - with self.assertRaises(TypeError): - # Invalid width. - bounds_rect = self.draw_line(surface, color, start_pos, end_pos, "1") - - with self.assertRaises(TypeError): - # Invalid end_pos. - bounds_rect = self.draw_line(surface, color, start_pos, (1, 2, 3)) - - with self.assertRaises(TypeError): - # Invalid start_pos. - bounds_rect = self.draw_line(surface, color, (1,), end_pos) - - with self.assertRaises(TypeError): - # Invalid color. - bounds_rect = self.draw_line(surface, 2.3, start_pos, end_pos) - - with self.assertRaises(TypeError): - # Invalid surface. - bounds_rect = self.draw_line((1, 2, 3, 4), color, start_pos, end_pos) - - def test_line__kwarg_invalid_types(self): - """Ensures draw line detects invalid kwarg types.""" - surface = pygame.Surface((3, 3)) - color = pygame.Color("green") - start_pos = (1, 0) - end_pos = (2, 0) - width = 1 - kwargs_list = [ - { - "surface": pygame.Surface, # Invalid surface. - "color": color, - "start_pos": start_pos, - "end_pos": end_pos, - "width": width, - }, - { - "surface": surface, - "color": 2.3, # Invalid color. - "start_pos": start_pos, - "end_pos": end_pos, - "width": width, - }, - { - "surface": surface, - "color": color, - "start_pos": (0, 0, 0), # Invalid start_pos. - "end_pos": end_pos, - "width": width, - }, - { - "surface": surface, - "color": color, - "start_pos": start_pos, - "end_pos": (0,), # Invalid end_pos. - "width": width, - }, - { - "surface": surface, - "color": color, - "start_pos": start_pos, - "end_pos": end_pos, - "width": 1.2, - }, - ] # Invalid width. - - for kwargs in kwargs_list: - with self.assertRaises(TypeError): - bounds_rect = self.draw_line(**kwargs) - - def test_line__kwarg_invalid_name(self): - """Ensures draw line detects invalid kwarg names.""" - surface = pygame.Surface((2, 3)) - color = pygame.Color("cyan") - start_pos = (1, 1) - end_pos = (2, 0) - kwargs_list = [ - { - "surface": surface, - "color": color, - "start_pos": start_pos, - "end_pos": end_pos, - "width": 1, - "invalid": 1, - }, - { - "surface": surface, - "color": color, - "start_pos": start_pos, - "end_pos": end_pos, - "invalid": 1, - }, - ] - - for kwargs in kwargs_list: - with self.assertRaises(TypeError): - bounds_rect = self.draw_line(**kwargs) - - def test_line__args_and_kwargs(self): - """Ensures draw line accepts a combination of args/kwargs""" - surface = pygame.Surface((3, 2)) - color = (255, 255, 0, 0) - start_pos = (0, 1) - end_pos = (1, 2) - width = 0 - kwargs = { - "surface": surface, - "color": color, - "start_pos": start_pos, - "end_pos": end_pos, - "width": width, - } - - for name in ("surface", "color", "start_pos", "end_pos", "width"): - kwargs.pop(name) - - if "surface" == name: - bounds_rect = self.draw_line(surface, **kwargs) - elif "color" == name: - bounds_rect = self.draw_line(surface, color, **kwargs) - elif "start_pos" == name: - bounds_rect = self.draw_line(surface, color, start_pos, **kwargs) - elif "end_pos" == name: - bounds_rect = self.draw_line( - surface, color, start_pos, end_pos, **kwargs - ) - else: - bounds_rect = self.draw_line( - surface, color, start_pos, end_pos, width, **kwargs - ) - - self.assertIsInstance(bounds_rect, pygame.Rect) - - def test_line__valid_width_values(self): - """Ensures draw line accepts different width values.""" - line_color = pygame.Color("yellow") - surface_color = pygame.Color("white") - surface = pygame.Surface((3, 4)) - pos = (2, 1) - kwargs = { - "surface": surface, - "color": line_color, - "start_pos": pos, - "end_pos": (2, 2), - "width": None, - } - - for width in (-100, -10, -1, 0, 1, 10, 100): - surface.fill(surface_color) # Clear for each test. - kwargs["width"] = width - expected_color = line_color if width > 0 else surface_color - - bounds_rect = self.draw_line(**kwargs) - - self.assertEqual(surface.get_at(pos), expected_color) - self.assertIsInstance(bounds_rect, pygame.Rect) - - def test_line__valid_start_pos_formats(self): - """Ensures draw line accepts different start_pos formats.""" - expected_color = pygame.Color("red") - surface_color = pygame.Color("black") - surface = pygame.Surface((4, 4)) - kwargs = { - "surface": surface, - "color": expected_color, - "start_pos": None, - "end_pos": (2, 2), - "width": 2, - } - x, y = 2, 1 # start position - - # The point values can be ints or floats. - for start_pos in ((x, y), (x + 0.1, y), (x, y + 0.1), (x + 0.1, y + 0.1)): - # The point type can be a tuple/list/Vector2. - for seq_type in (tuple, list, Vector2): - surface.fill(surface_color) # Clear for each test. - kwargs["start_pos"] = seq_type(start_pos) - - bounds_rect = self.draw_line(**kwargs) - - self.assertEqual(surface.get_at((x, y)), expected_color) - self.assertIsInstance(bounds_rect, pygame.Rect) - - def test_line__valid_end_pos_formats(self): - """Ensures draw line accepts different end_pos formats.""" - expected_color = pygame.Color("red") - surface_color = pygame.Color("black") - surface = pygame.Surface((4, 4)) - kwargs = { - "surface": surface, - "color": expected_color, - "start_pos": (2, 1), - "end_pos": None, - "width": 2, - } - x, y = 2, 2 # end position - - # The point values can be ints or floats. - for end_pos in ((x, y), (x + 0.2, y), (x, y + 0.2), (x + 0.2, y + 0.2)): - # The point type can be a tuple/list/Vector2. - for seq_type in (tuple, list, Vector2): - surface.fill(surface_color) # Clear for each test. - kwargs["end_pos"] = seq_type(end_pos) - - bounds_rect = self.draw_line(**kwargs) - - self.assertEqual(surface.get_at((x, y)), expected_color) - self.assertIsInstance(bounds_rect, pygame.Rect) - - def test_line__invalid_start_pos_formats(self): - """Ensures draw line handles invalid start_pos formats correctly.""" - kwargs = { - "surface": pygame.Surface((4, 4)), - "color": pygame.Color("red"), - "start_pos": None, - "end_pos": (2, 2), - "width": 1, - } - - start_pos_fmts = ( - (2,), # Too few coords. - (2, 1, 0), # Too many coords. - (2, "1"), # Wrong type. - set([2, 1]), # Wrong type. - dict(((2, 1),)), - ) # Wrong type. - - for start_pos in start_pos_fmts: - kwargs["start_pos"] = start_pos - - with self.assertRaises(TypeError): - bounds_rect = self.draw_line(**kwargs) - - def test_line__invalid_end_pos_formats(self): - """Ensures draw line handles invalid end_pos formats correctly.""" - kwargs = { - "surface": pygame.Surface((4, 4)), - "color": pygame.Color("red"), - "start_pos": (2, 2), - "end_pos": None, - "width": 1, - } - - end_pos_fmts = ( - (2,), # Too few coords. - (2, 1, 0), # Too many coords. - (2, "1"), # Wrong type. - set([2, 1]), # Wrong type. - dict(((2, 1),)), - ) # Wrong type. - - for end_pos in end_pos_fmts: - kwargs["end_pos"] = end_pos - - with self.assertRaises(TypeError): - bounds_rect = self.draw_line(**kwargs) - - def test_line__valid_color_formats(self): - """Ensures draw line accepts different color formats.""" - green_color = pygame.Color("green") - surface_color = pygame.Color("black") - surface = pygame.Surface((3, 4)) - pos = (1, 1) - kwargs = { - "surface": surface, - "color": None, - "start_pos": pos, - "end_pos": (2, 1), - "width": 3, - } - greens = ( - (0, 255, 0), - (0, 255, 0, 255), - surface.map_rgb(green_color), - green_color, - ) - - for color in greens: - surface.fill(surface_color) # Clear for each test. - kwargs["color"] = color - - if isinstance(color, int): - expected_color = surface.unmap_rgb(color) - else: - expected_color = green_color - - bounds_rect = self.draw_line(**kwargs) - - self.assertEqual(surface.get_at(pos), expected_color) - self.assertIsInstance(bounds_rect, pygame.Rect) - - def test_line__invalid_color_formats(self): - """Ensures draw line handles invalid color formats correctly.""" - kwargs = { - "surface": pygame.Surface((4, 3)), - "color": None, - "start_pos": (1, 1), - "end_pos": (2, 1), - "width": 1, - } - - for expected_color in (2.3, self): - kwargs["color"] = expected_color - - with self.assertRaises(TypeError): - bounds_rect = self.draw_line(**kwargs) - - def test_line__color(self): - """Tests if the line drawn is the correct color.""" - pos = (0, 0) - for surface in self._create_surfaces(): - for expected_color in self.COLORS: - self.draw_line(surface, expected_color, pos, (1, 0)) - - self.assertEqual( - surface.get_at(pos), expected_color, "pos={}".format(pos) - ) - - def test_line__color_with_thickness(self): - """Ensures a thick line is drawn using the correct color.""" - from_x = 5 - to_x = 10 - y = 5 - for surface in self._create_surfaces(): - for expected_color in self.COLORS: - self.draw_line(surface, expected_color, (from_x, y), (to_x, y), 5) - for pos in ((x, y + i) for i in (-2, 0, 2) for x in (from_x, to_x)): - self.assertEqual( - surface.get_at(pos), expected_color, "pos={}".format(pos) - ) - - def test_line__gaps(self): - """Tests if the line drawn contains any gaps.""" - expected_color = (255, 255, 255) - for surface in self._create_surfaces(): - width = surface.get_width() - self.draw_line(surface, expected_color, (0, 0), (width - 1, 0)) - - for x in range(width): - pos = (x, 0) - self.assertEqual( - surface.get_at(pos), expected_color, "pos={}".format(pos) - ) - - def test_line__gaps_with_thickness(self): - """Ensures a thick line is drawn without any gaps.""" - expected_color = (255, 255, 255) - thickness = 5 - for surface in self._create_surfaces(): - width = surface.get_width() - 1 - h = width // 5 - w = h * 5 - self.draw_line(surface, expected_color, (0, 5), (w, 5 + h), thickness) - - for x in range(w + 1): - for y in range(3, 8): - pos = (x, y + ((x + 2) // 5)) - self.assertEqual( - surface.get_at(pos), expected_color, "pos={}".format(pos) - ) - - def test_line__bounding_rect(self): - """Ensures draw line returns the correct bounding rect. - - Tests lines with endpoints on and off the surface and a range of - width/thickness values. - """ - if isinstance(self, PythonDrawTestCase): - self.skipTest("bounding rects not supported in draw_py.draw_line") - - line_color = pygame.Color("red") - surf_color = pygame.Color("black") - width = height = 30 - # Using a rect to help manage where the lines are drawn. - helper_rect = pygame.Rect((0, 0), (width, height)) - - # Testing surfaces of different sizes. One larger than the helper_rect - # and one smaller (to test lines that span the surface). - for size in ((width + 5, height + 5), (width - 5, height - 5)): - surface = pygame.Surface(size, 0, 32) - surf_rect = surface.get_rect() - - # Move the helper rect to different positions to test line - # endpoints on and off the surface. - for pos in rect_corners_mids_and_center(surf_rect): - helper_rect.center = pos - - # Draw using different thicknesses. - for thickness in range(-1, 5): - for start, end in self._rect_lines(helper_rect): - surface.fill(surf_color) # Clear for each test. - - bounding_rect = self.draw_line( - surface, line_color, start, end, thickness - ) - - if 0 < thickness: - # Calculating the expected_rect after the line is - # drawn (it uses what is actually drawn). - expected_rect = create_bounding_rect( - surface, surf_color, start - ) - else: - # Nothing drawn. - expected_rect = pygame.Rect(start, (0, 0)) - - self.assertEqual( - bounding_rect, - expected_rect, - "start={}, end={}, size={}, thickness={}".format( - start, end, size, thickness - ), - ) - - def test_line__surface_clip(self): - """Ensures draw line respects a surface's clip area.""" - surfw = surfh = 30 - line_color = pygame.Color("red") - surface_color = pygame.Color("green") - surface = pygame.Surface((surfw, surfh)) - surface.fill(surface_color) - - clip_rect = pygame.Rect((0, 0), (11, 11)) - clip_rect.center = surface.get_rect().center - pos_rect = clip_rect.copy() # Manages the line's pos. - - for thickness in (1, 3): # Test different line widths. - # Test centering the line along the clip rect's edge. - for center in rect_corners_mids_and_center(clip_rect): - # Get the expected points by drawing the line without the - # clip area set. - pos_rect.center = center - surface.set_clip(None) - surface.fill(surface_color) - self.draw_line( - surface, line_color, pos_rect.midtop, pos_rect.midbottom, thickness - ) - expected_pts = get_color_points(surface, line_color, clip_rect) - - # Clear the surface and set the clip area. Redraw the line - # and check that only the clip area is modified. - surface.fill(surface_color) - surface.set_clip(clip_rect) - - self.draw_line( - surface, line_color, pos_rect.midtop, pos_rect.midbottom, thickness - ) - - surface.lock() # For possible speed up. - - # Check all the surface points to ensure only the expected_pts - # are the line_color. - for pt in ((x, y) for x in range(surfw) for y in range(surfh)): - if pt in expected_pts: - expected_color = line_color - else: - expected_color = surface_color - - self.assertEqual(surface.get_at(pt), expected_color, pt) - - surface.unlock() - - -# Commented out to avoid cluttering the test output. Add back in if draw_py -# ever fully supports drawing single lines. -# @unittest.skip('draw_py.draw_line not fully supported yet') -# class PythonDrawLineTest(LineMixin, PythonDrawTestCase): -# """Test draw_py module function line. -# -# This class inherits the general tests from LineMixin. It is also the class -# to add any draw_py.draw_line specific tests to. -# """ - - -class DrawLineTest(LineMixin, DrawTestCase): - """Test draw module function line. - - This class inherits the general tests from LineMixin. It is also the class - to add any draw.line specific tests to. - """ - - def test_line_endianness(self): - """test color component order""" - for depth in (24, 32): - surface = pygame.Surface((5, 3), 0, depth) - surface.fill(pygame.Color(0, 0, 0)) - self.draw_line(surface, pygame.Color(255, 0, 0), (0, 1), (2, 1), 1) - - self.assertGreater(surface.get_at((1, 1)).r, 0, "there should be red here") - - surface.fill(pygame.Color(0, 0, 0)) - self.draw_line(surface, pygame.Color(0, 0, 255), (0, 1), (2, 1), 1) - - self.assertGreater(surface.get_at((1, 1)).b, 0, "there should be blue here") - - def test_line(self): - # (l, t), (l, t) - self.surf_size = (320, 200) - self.surf = pygame.Surface(self.surf_size, pygame.SRCALPHA) - self.color = (1, 13, 24, 205) - - drawn = draw.line(self.surf, self.color, (1, 0), (200, 0)) - self.assertEqual( - drawn.right, 201, "end point arg should be (or at least was) inclusive" - ) - - # Should be colored where it's supposed to be - for pt in test_utils.rect_area_pts(drawn): - self.assertEqual(self.surf.get_at(pt), self.color) - - # And not where it shouldn't - for pt in test_utils.rect_outer_bounds(drawn): - self.assertNotEqual(self.surf.get_at(pt), self.color) - - # Line width greater that 1 - line_width = 2 - offset = 5 - a = (offset, offset) - b = (self.surf_size[0] - offset, a[1]) - c = (a[0], self.surf_size[1] - offset) - d = (b[0], c[1]) - e = (a[0] + offset, c[1]) - f = (b[0], c[0] + 5) - lines = [ - (a, d), - (b, c), - (c, b), - (d, a), - (a, b), - (b, a), - (a, c), - (c, a), - (a, e), - (e, a), - (a, f), - (f, a), - (a, a), - ] - - for p1, p2 in lines: - msg = "%s - %s" % (p1, p2) - if p1[0] <= p2[0]: - plow = p1 - phigh = p2 - else: - plow = p2 - phigh = p1 - - self.surf.fill((0, 0, 0)) - rec = draw.line(self.surf, (255, 255, 255), p1, p2, line_width) - xinc = yinc = 0 - - if abs(p1[0] - p2[0]) > abs(p1[1] - p2[1]): - yinc = 1 - else: - xinc = 1 - - for i in range(line_width): - p = (p1[0] + xinc * i, p1[1] + yinc * i) - self.assertEqual(self.surf.get_at(p), (255, 255, 255), msg) - - p = (p2[0] + xinc * i, p2[1] + yinc * i) - self.assertEqual(self.surf.get_at(p), (255, 255, 255), msg) - - p = (plow[0] - 1, plow[1]) - self.assertEqual(self.surf.get_at(p), (0, 0, 0), msg) - - p = (plow[0] + xinc * line_width, plow[1] + yinc * line_width) - self.assertEqual(self.surf.get_at(p), (0, 0, 0), msg) - - p = (phigh[0] + xinc * line_width, phigh[1] + yinc * line_width) - self.assertEqual(self.surf.get_at(p), (0, 0, 0), msg) - - if p1[0] < p2[0]: - rx = p1[0] - else: - rx = p2[0] - - if p1[1] < p2[1]: - ry = p1[1] - else: - ry = p2[1] - - w = abs(p2[0] - p1[0]) + 1 + xinc * (line_width - 1) - h = abs(p2[1] - p1[1]) + 1 + yinc * (line_width - 1) - msg += ", %s" % (rec,) - - self.assertEqual(rec, (rx, ry, w, h), msg) - - def test_line_for_gaps(self): - # This checks bug Thick Line Bug #448 - - width = 200 - height = 200 - surf = pygame.Surface((width, height), pygame.SRCALPHA) - - def white_surrounded_pixels(x, y): - offsets = [(1, 0), (0, 1), (-1, 0), (0, -1)] - WHITE = (255, 255, 255, 255) - return len( - [1 for dx, dy in offsets if surf.get_at((x + dx, y + dy)) == WHITE] - ) - - def check_white_line(start, end): - surf.fill((0, 0, 0)) - pygame.draw.line(surf, (255, 255, 255), start, end, 30) - - BLACK = (0, 0, 0, 255) - for x in range(1, width - 1): - for y in range(1, height - 1): - if surf.get_at((x, y)) == BLACK: - self.assertTrue(white_surrounded_pixels(x, y) < 3) - - check_white_line((50, 50), (140, 0)) - check_white_line((50, 50), (0, 120)) - check_white_line((50, 50), (199, 198)) - - -### Lines Testing ############################################################# - - -class LinesMixin(BaseLineMixin): - """Mixin test for drawing lines. - - This class contains all the general lines drawing tests. - """ - - def test_lines__args(self): - """Ensures draw lines accepts the correct args.""" - bounds_rect = self.draw_lines( - pygame.Surface((3, 3)), (0, 10, 0, 50), False, ((0, 0), (1, 1)), 1 - ) - - self.assertIsInstance(bounds_rect, pygame.Rect) - - def test_lines__args_without_width(self): - """Ensures draw lines accepts the args without a width.""" - bounds_rect = self.draw_lines( - pygame.Surface((2, 2)), (0, 0, 0, 50), False, ((0, 0), (1, 1)) - ) - - self.assertIsInstance(bounds_rect, pygame.Rect) - - def test_lines__kwargs(self): - """Ensures draw lines accepts the correct kwargs - with and without a width arg. - """ - surface = pygame.Surface((4, 4)) - color = pygame.Color("yellow") - points = ((0, 0), (1, 1), (2, 2)) - kwargs_list = [ - { - "surface": surface, - "color": color, - "closed": False, - "points": points, - "width": 1, - }, - {"surface": surface, "color": color, "closed": False, "points": points}, - ] - - for kwargs in kwargs_list: - bounds_rect = self.draw_lines(**kwargs) - - self.assertIsInstance(bounds_rect, pygame.Rect) - - def test_lines__kwargs_order_independent(self): - """Ensures draw lines's kwargs are not order dependent.""" - bounds_rect = self.draw_lines( - closed=1, - points=((0, 0), (1, 1), (2, 2)), - width=2, - color=(10, 20, 30), - surface=pygame.Surface((3, 2)), - ) - - self.assertIsInstance(bounds_rect, pygame.Rect) - - def test_lines__args_missing(self): - """Ensures draw lines detects any missing required args.""" - surface = pygame.Surface((1, 1)) - color = pygame.Color("blue") - - with self.assertRaises(TypeError): - bounds_rect = self.draw_lines(surface, color, 0) - - with self.assertRaises(TypeError): - bounds_rect = self.draw_lines(surface, color) - - with self.assertRaises(TypeError): - bounds_rect = self.draw_lines(surface) - - with self.assertRaises(TypeError): - bounds_rect = self.draw_lines() - - def test_lines__kwargs_missing(self): - """Ensures draw lines detects any missing required kwargs.""" - kwargs = { - "surface": pygame.Surface((3, 2)), - "color": pygame.Color("red"), - "closed": 1, - "points": ((2, 2), (1, 1)), - "width": 1, - } - - for name in ("points", "closed", "color", "surface"): - invalid_kwargs = dict(kwargs) - invalid_kwargs.pop(name) # Pop from a copy. - - with self.assertRaises(TypeError): - bounds_rect = self.draw_lines(**invalid_kwargs) - - def test_lines__arg_invalid_types(self): - """Ensures draw lines detects invalid arg types.""" - surface = pygame.Surface((2, 2)) - color = pygame.Color("blue") - closed = 0 - points = ((1, 2), (2, 1)) - - with self.assertRaises(TypeError): - # Invalid width. - bounds_rect = self.draw_lines(surface, color, closed, points, "1") - - with self.assertRaises(TypeError): - # Invalid points. - bounds_rect = self.draw_lines(surface, color, closed, (1, 2, 3)) - - with self.assertRaises(TypeError): - # Invalid closed. - bounds_rect = self.draw_lines(surface, color, InvalidBool(), points) - - with self.assertRaises(TypeError): - # Invalid color. - bounds_rect = self.draw_lines(surface, 2.3, closed, points) - - with self.assertRaises(TypeError): - # Invalid surface. - bounds_rect = self.draw_lines((1, 2, 3, 4), color, closed, points) - - def test_lines__kwarg_invalid_types(self): - """Ensures draw lines detects invalid kwarg types.""" - valid_kwargs = { - "surface": pygame.Surface((3, 3)), - "color": pygame.Color("green"), - "closed": False, - "points": ((1, 2), (2, 1)), - "width": 1, - } - - invalid_kwargs = { - "surface": pygame.Surface, - "color": 2.3, - "closed": InvalidBool(), - "points": (0, 0, 0), - "width": 1.2, - } - - for kwarg in ("surface", "color", "closed", "points", "width"): - kwargs = dict(valid_kwargs) - kwargs[kwarg] = invalid_kwargs[kwarg] - - with self.assertRaises(TypeError): - bounds_rect = self.draw_lines(**kwargs) - - def test_lines__kwarg_invalid_name(self): - """Ensures draw lines detects invalid kwarg names.""" - surface = pygame.Surface((2, 3)) - color = pygame.Color("cyan") - closed = 1 - points = ((1, 2), (2, 1)) - kwargs_list = [ - { - "surface": surface, - "color": color, - "closed": closed, - "points": points, - "width": 1, - "invalid": 1, - }, - { - "surface": surface, - "color": color, - "closed": closed, - "points": points, - "invalid": 1, - }, - ] - - for kwargs in kwargs_list: - with self.assertRaises(TypeError): - bounds_rect = self.draw_lines(**kwargs) - - def test_lines__args_and_kwargs(self): - """Ensures draw lines accepts a combination of args/kwargs""" - surface = pygame.Surface((3, 2)) - color = (255, 255, 0, 0) - closed = 0 - points = ((1, 2), (2, 1)) - width = 1 - kwargs = { - "surface": surface, - "color": color, - "closed": closed, - "points": points, - "width": width, - } - - for name in ("surface", "color", "closed", "points", "width"): - kwargs.pop(name) - - if "surface" == name: - bounds_rect = self.draw_lines(surface, **kwargs) - elif "color" == name: - bounds_rect = self.draw_lines(surface, color, **kwargs) - elif "closed" == name: - bounds_rect = self.draw_lines(surface, color, closed, **kwargs) - elif "points" == name: - bounds_rect = self.draw_lines(surface, color, closed, points, **kwargs) - else: - bounds_rect = self.draw_lines( - surface, color, closed, points, width, **kwargs - ) - - self.assertIsInstance(bounds_rect, pygame.Rect) - - def test_lines__valid_width_values(self): - """Ensures draw lines accepts different width values.""" - line_color = pygame.Color("yellow") - surface_color = pygame.Color("white") - surface = pygame.Surface((3, 4)) - pos = (1, 1) - kwargs = { - "surface": surface, - "color": line_color, - "closed": False, - "points": (pos, (2, 1)), - "width": None, - } - - for width in (-100, -10, -1, 0, 1, 10, 100): - surface.fill(surface_color) # Clear for each test. - kwargs["width"] = width - expected_color = line_color if width > 0 else surface_color - - bounds_rect = self.draw_lines(**kwargs) - - self.assertEqual(surface.get_at(pos), expected_color) - self.assertIsInstance(bounds_rect, pygame.Rect) - - def test_lines__valid_points_format(self): - """Ensures draw lines accepts different points formats.""" - expected_color = (10, 20, 30, 255) - surface_color = pygame.Color("white") - surface = pygame.Surface((3, 4)) - kwargs = { - "surface": surface, - "color": expected_color, - "closed": False, - "points": None, - "width": 1, - } - - # The point type can be a tuple/list/Vector2. - point_types = ( - (tuple, tuple, tuple, tuple), # all tuples - (list, list, list, list), # all lists - (Vector2, Vector2, Vector2, Vector2), # all Vector2s - (list, Vector2, tuple, Vector2), - ) # mix - - # The point values can be ints or floats. - point_values = ( - ((1, 1), (2, 1), (2, 2), (1, 2)), - ((1, 1), (2.2, 1), (2.1, 2.2), (1, 2.1)), - ) - - # Each sequence of points can be a tuple or a list. - seq_types = (tuple, list) - - for point_type in point_types: - for values in point_values: - check_pos = values[0] - points = [point_type[i](pt) for i, pt in enumerate(values)] - - for seq_type in seq_types: - surface.fill(surface_color) # Clear for each test. - kwargs["points"] = seq_type(points) - - bounds_rect = self.draw_lines(**kwargs) - - self.assertEqual(surface.get_at(check_pos), expected_color) - self.assertIsInstance(bounds_rect, pygame.Rect) - - def test_lines__invalid_points_formats(self): - """Ensures draw lines handles invalid points formats correctly.""" - kwargs = { - "surface": pygame.Surface((4, 4)), - "color": pygame.Color("red"), - "closed": False, - "points": None, - "width": 1, - } - - points_fmts = ( - ((1, 1), (2,)), # Too few coords. - ((1, 1), (2, 2, 2)), # Too many coords. - ((1, 1), (2, "2")), # Wrong type. - ((1, 1), set([2, 3])), # Wrong type. - ((1, 1), dict(((2, 2), (3, 3)))), # Wrong type. - set(((1, 1), (1, 2))), # Wrong type. - dict(((1, 1), (4, 4))), - ) # Wrong type. - - for points in points_fmts: - kwargs["points"] = points - - with self.assertRaises(TypeError): - bounds_rect = self.draw_lines(**kwargs) - - def test_lines__invalid_points_values(self): - """Ensures draw lines handles invalid points values correctly.""" - kwargs = { - "surface": pygame.Surface((4, 4)), - "color": pygame.Color("red"), - "closed": False, - "points": None, - "width": 1, - } - - for points in ([], ((1, 1),)): # Too few points. - for seq_type in (tuple, list): # Test as tuples and lists. - kwargs["points"] = seq_type(points) - - with self.assertRaises(ValueError): - bounds_rect = self.draw_lines(**kwargs) - - def test_lines__valid_closed_values(self): - """Ensures draw lines accepts different closed values.""" - line_color = pygame.Color("blue") - surface_color = pygame.Color("white") - surface = pygame.Surface((3, 4)) - pos = (1, 2) - kwargs = { - "surface": surface, - "color": line_color, - "closed": None, - "points": ((1, 1), (3, 1), (3, 3), (1, 3)), - "width": 1, - } - - true_values = (-7, 1, 10, "2", 3.1, (4,), [5], True) - false_values = (None, "", 0, (), [], False) - - for closed in true_values + false_values: - surface.fill(surface_color) # Clear for each test. - kwargs["closed"] = closed - expected_color = line_color if closed else surface_color - - bounds_rect = self.draw_lines(**kwargs) - - self.assertEqual(surface.get_at(pos), expected_color) - self.assertIsInstance(bounds_rect, pygame.Rect) - - def test_lines__valid_color_formats(self): - """Ensures draw lines accepts different color formats.""" - green_color = pygame.Color("green") - surface_color = pygame.Color("black") - surface = pygame.Surface((3, 4)) - pos = (1, 1) - kwargs = { - "surface": surface, - "color": None, - "closed": False, - "points": (pos, (2, 1)), - "width": 3, - } - greens = ( - (0, 255, 0), - (0, 255, 0, 255), - surface.map_rgb(green_color), - green_color, - ) - - for color in greens: - surface.fill(surface_color) # Clear for each test. - kwargs["color"] = color - - if isinstance(color, int): - expected_color = surface.unmap_rgb(color) - else: - expected_color = green_color - - bounds_rect = self.draw_lines(**kwargs) - - self.assertEqual(surface.get_at(pos), expected_color) - self.assertIsInstance(bounds_rect, pygame.Rect) - - def test_lines__invalid_color_formats(self): - """Ensures draw lines handles invalid color formats correctly.""" - kwargs = { - "surface": pygame.Surface((4, 3)), - "color": None, - "closed": False, - "points": ((1, 1), (1, 2)), - "width": 1, - } - - for expected_color in (2.3, self): - kwargs["color"] = expected_color - - with self.assertRaises(TypeError): - bounds_rect = self.draw_lines(**kwargs) - - def test_lines__color(self): - """Tests if the lines drawn are the correct color. - - Draws lines around the border of the given surface and checks if all - borders of the surface only contain the given color. - """ - for surface in self._create_surfaces(): - for expected_color in self.COLORS: - self.draw_lines(surface, expected_color, True, corners(surface)) - - for pos, color in border_pos_and_color(surface): - self.assertEqual(color, expected_color, "pos={}".format(pos)) - - def test_lines__color_with_thickness(self): - """Ensures thick lines are drawn using the correct color.""" - x_left = y_top = 5 - for surface in self._create_surfaces(): - x_right = surface.get_width() - 5 - y_bottom = surface.get_height() - 5 - endpoints = ( - (x_left, y_top), - (x_right, y_top), - (x_right, y_bottom), - (x_left, y_bottom), - ) - for expected_color in self.COLORS: - self.draw_lines(surface, expected_color, True, endpoints, 3) - - for t in (-1, 0, 1): - for x in range(x_left, x_right + 1): - for y in (y_top, y_bottom): - pos = (x, y + t) - self.assertEqual( - surface.get_at(pos), - expected_color, - "pos={}".format(pos), - ) - for y in range(y_top, y_bottom + 1): - for x in (x_left, x_right): - pos = (x + t, y) - self.assertEqual( - surface.get_at(pos), - expected_color, - "pos={}".format(pos), - ) - - def test_lines__gaps(self): - """Tests if the lines drawn contain any gaps. - - Draws lines around the border of the given surface and checks if - all borders of the surface contain any gaps. - """ - expected_color = (255, 255, 255) - for surface in self._create_surfaces(): - self.draw_lines(surface, expected_color, True, corners(surface)) - - for pos, color in border_pos_and_color(surface): - self.assertEqual(color, expected_color, "pos={}".format(pos)) - - def test_lines__gaps_with_thickness(self): - """Ensures thick lines are drawn without any gaps.""" - expected_color = (255, 255, 255) - x_left = y_top = 5 - for surface in self._create_surfaces(): - h = (surface.get_width() - 11) // 5 - w = h * 5 - x_right = x_left + w - y_bottom = y_top + h - endpoints = ((x_left, y_top), (x_right, y_top), (x_right, y_bottom)) - self.draw_lines(surface, expected_color, True, endpoints, 3) - - for x in range(x_left, x_right + 1): - for t in (-1, 0, 1): - pos = (x, y_top + t) - self.assertEqual( - surface.get_at(pos), expected_color, "pos={}".format(pos) - ) - pos = (x, y_top + t + ((x - 3) // 5)) - self.assertEqual( - surface.get_at(pos), expected_color, "pos={}".format(pos) - ) - for y in range(y_top, y_bottom + 1): - for t in (-1, 0, 1): - pos = (x_right + t, y) - self.assertEqual( - surface.get_at(pos), expected_color, "pos={}".format(pos) - ) - - def test_lines__bounding_rect(self): - """Ensures draw lines returns the correct bounding rect. - - Tests lines with endpoints on and off the surface and a range of - width/thickness values. - """ - line_color = pygame.Color("red") - surf_color = pygame.Color("black") - width = height = 30 - # Using a rect to help manage where the lines are drawn. - pos_rect = pygame.Rect((0, 0), (width, height)) - - # Testing surfaces of different sizes. One larger than the pos_rect - # and one smaller (to test lines that span the surface). - for size in ((width + 5, height + 5), (width - 5, height - 5)): - surface = pygame.Surface(size, 0, 32) - surf_rect = surface.get_rect() - - # Move pos_rect to different positions to test line endpoints on - # and off the surface. - for pos in rect_corners_mids_and_center(surf_rect): - pos_rect.center = pos - # Shape: Triangle (if closed), ^ caret (if not closed). - pts = (pos_rect.midleft, pos_rect.midtop, pos_rect.midright) - pos = pts[0] # Rect position if nothing drawn. - - # Draw using different thickness and closed values. - for thickness in range(-1, 5): - for closed in (True, False): - surface.fill(surf_color) # Clear for each test. - - bounding_rect = self.draw_lines( - surface, line_color, closed, pts, thickness - ) - - if 0 < thickness: - # Calculating the expected_rect after the lines are - # drawn (it uses what is actually drawn). - expected_rect = create_bounding_rect( - surface, surf_color, pos - ) - else: - # Nothing drawn. - expected_rect = pygame.Rect(pos, (0, 0)) - - self.assertEqual(bounding_rect, expected_rect) - - def test_lines__surface_clip(self): - """Ensures draw lines respects a surface's clip area.""" - surfw = surfh = 30 - line_color = pygame.Color("red") - surface_color = pygame.Color("green") - surface = pygame.Surface((surfw, surfh)) - surface.fill(surface_color) - - clip_rect = pygame.Rect((0, 0), (11, 11)) - clip_rect.center = surface.get_rect().center - pos_rect = clip_rect.copy() # Manages the lines's pos. - - # Test centering the pos_rect along the clip rect's edge to allow for - # drawing the lines over the clip_rect's bounds. - for center in rect_corners_mids_and_center(clip_rect): - pos_rect.center = center - pts = (pos_rect.midtop, pos_rect.center, pos_rect.midbottom) - - for closed in (True, False): # Test closed and not closed. - for thickness in (1, 3): # Test different line widths. - # Get the expected points by drawing the lines without the - # clip area set. - surface.set_clip(None) - surface.fill(surface_color) - self.draw_lines(surface, line_color, closed, pts, thickness) - expected_pts = get_color_points(surface, line_color, clip_rect) - - # Clear the surface and set the clip area. Redraw the lines - # and check that only the clip area is modified. - surface.fill(surface_color) - surface.set_clip(clip_rect) - - self.draw_lines(surface, line_color, closed, pts, thickness) - - surface.lock() # For possible speed up. - - # Check all the surface points to ensure only the - # expected_pts are the line_color. - for pt in ((x, y) for x in range(surfw) for y in range(surfh)): - if pt in expected_pts: - expected_color = line_color - else: - expected_color = surface_color - - self.assertEqual(surface.get_at(pt), expected_color, pt) - - surface.unlock() - - -# Commented out to avoid cluttering the test output. Add back in if draw_py -# ever fully supports drawing lines. -# class PythonDrawLinesTest(LinesMixin, PythonDrawTestCase): -# """Test draw_py module function lines. -# -# This class inherits the general tests from LinesMixin. It is also the -# class to add any draw_py.draw_lines specific tests to. -# """ - - -class DrawLinesTest(LinesMixin, DrawTestCase): - """Test draw module function lines. - - This class inherits the general tests from LinesMixin. It is also the class - to add any draw.lines specific tests to. - """ - - -### AALine Testing ############################################################ - - -class AALineMixin(BaseLineMixin): - """Mixin test for drawing a single aaline. - - This class contains all the general single aaline drawing tests. - """ - - def test_aaline__args(self): - """Ensures draw aaline accepts the correct args.""" - bounds_rect = self.draw_aaline( - pygame.Surface((3, 3)), (0, 10, 0, 50), (0, 0), (1, 1), 1 - ) - - self.assertIsInstance(bounds_rect, pygame.Rect) - - def test_aaline__args_without_blend(self): - """Ensures draw aaline accepts the args without a blend.""" - bounds_rect = self.draw_aaline( - pygame.Surface((2, 2)), (0, 0, 0, 50), (0, 0), (2, 2) - ) - - self.assertIsInstance(bounds_rect, pygame.Rect) - - def test_aaline__blend_warning(self): - """From pygame 2, blend=False should raise DeprecationWarning.""" - with warnings.catch_warnings(record=True) as w: - # Cause all warnings to always be triggered. - warnings.simplefilter("always") - # Trigger DeprecationWarning. - self.draw_aaline( - pygame.Surface((2, 2)), (0, 0, 0, 50), (0, 0), (2, 2), False - ) - # Check if there is only one warning and is a DeprecationWarning. - self.assertEqual(len(w), 1) - self.assertTrue(issubclass(w[-1].category, DeprecationWarning)) - - def test_aaline__kwargs(self): - """Ensures draw aaline accepts the correct kwargs - with and without a blend arg. - """ - surface = pygame.Surface((4, 4)) - color = pygame.Color("yellow") - start_pos = (1, 1) - end_pos = (2, 2) - kwargs_list = [ - { - "surface": surface, - "color": color, - "start_pos": start_pos, - "end_pos": end_pos, - "blend": 1, - }, - { - "surface": surface, - "color": color, - "start_pos": start_pos, - "end_pos": end_pos, - }, - ] - - for kwargs in kwargs_list: - bounds_rect = self.draw_aaline(**kwargs) - - self.assertIsInstance(bounds_rect, pygame.Rect) - - def test_aaline__kwargs_order_independent(self): - """Ensures draw aaline's kwargs are not order dependent.""" - bounds_rect = self.draw_aaline( - start_pos=(1, 2), - end_pos=(2, 1), - blend=1, - color=(10, 20, 30), - surface=pygame.Surface((3, 2)), - ) - - self.assertIsInstance(bounds_rect, pygame.Rect) - - def test_aaline__args_missing(self): - """Ensures draw aaline detects any missing required args.""" - surface = pygame.Surface((1, 1)) - color = pygame.Color("blue") - - with self.assertRaises(TypeError): - bounds_rect = self.draw_aaline(surface, color, (0, 0)) - - with self.assertRaises(TypeError): - bounds_rect = self.draw_aaline(surface, color) - - with self.assertRaises(TypeError): - bounds_rect = self.draw_aaline(surface) - - with self.assertRaises(TypeError): - bounds_rect = self.draw_aaline() - - def test_aaline__kwargs_missing(self): - """Ensures draw aaline detects any missing required kwargs.""" - kwargs = { - "surface": pygame.Surface((3, 2)), - "color": pygame.Color("red"), - "start_pos": (2, 1), - "end_pos": (2, 2), - "blend": 1, - } - - for name in ("end_pos", "start_pos", "color", "surface"): - invalid_kwargs = dict(kwargs) - invalid_kwargs.pop(name) # Pop from a copy. - - with self.assertRaises(TypeError): - bounds_rect = self.draw_aaline(**invalid_kwargs) - - def test_aaline__arg_invalid_types(self): - """Ensures draw aaline detects invalid arg types.""" - surface = pygame.Surface((2, 2)) - color = pygame.Color("blue") - start_pos = (0, 1) - end_pos = (1, 2) - - with self.assertRaises(TypeError): - # Invalid blend. - bounds_rect = self.draw_aaline(surface, color, start_pos, end_pos, "1") - - with self.assertRaises(TypeError): - # Invalid end_pos. - bounds_rect = self.draw_aaline(surface, color, start_pos, (1, 2, 3)) - - with self.assertRaises(TypeError): - # Invalid start_pos. - bounds_rect = self.draw_aaline(surface, color, (1,), end_pos) - - with self.assertRaises(ValueError): - # Invalid color. - bounds_rect = self.draw_aaline(surface, "invalid-color", start_pos, end_pos) - - with self.assertRaises(TypeError): - # Invalid surface. - bounds_rect = self.draw_aaline((1, 2, 3, 4), color, start_pos, end_pos) - - def test_aaline__kwarg_invalid_types(self): - """Ensures draw aaline detects invalid kwarg types.""" - surface = pygame.Surface((3, 3)) - color = pygame.Color("green") - start_pos = (1, 0) - end_pos = (2, 0) - blend = 1 - kwargs_list = [ - { - "surface": pygame.Surface, # Invalid surface. - "color": color, - "start_pos": start_pos, - "end_pos": end_pos, - "blend": blend, - }, - { - "surface": surface, - "color": 2.3, # Invalid color. - "start_pos": start_pos, - "end_pos": end_pos, - "blend": blend, - }, - { - "surface": surface, - "color": color, - "start_pos": (0, 0, 0), # Invalid start_pos. - "end_pos": end_pos, - "blend": blend, - }, - { - "surface": surface, - "color": color, - "start_pos": start_pos, - "end_pos": (0,), # Invalid end_pos. - "blend": blend, - }, - { - "surface": surface, - "color": color, - "start_pos": start_pos, - "end_pos": end_pos, - "blend": 1.2, - }, - ] # Invalid blend. - - for kwargs in kwargs_list: - with self.assertRaises(TypeError): - bounds_rect = self.draw_aaline(**kwargs) - - def test_aaline__kwarg_invalid_name(self): - """Ensures draw aaline detects invalid kwarg names.""" - surface = pygame.Surface((2, 3)) - color = pygame.Color("cyan") - start_pos = (1, 1) - end_pos = (2, 0) - kwargs_list = [ - { - "surface": surface, - "color": color, - "start_pos": start_pos, - "end_pos": end_pos, - "blend": 1, - "invalid": 1, - }, - { - "surface": surface, - "color": color, - "start_pos": start_pos, - "end_pos": end_pos, - "invalid": 1, - }, - ] - - for kwargs in kwargs_list: - with self.assertRaises(TypeError): - bounds_rect = self.draw_aaline(**kwargs) - - def test_aaline__args_and_kwargs(self): - """Ensures draw aaline accepts a combination of args/kwargs""" - surface = pygame.Surface((3, 2)) - color = (255, 255, 0, 0) - start_pos = (0, 1) - end_pos = (1, 2) - blend = 0 - kwargs = { - "surface": surface, - "color": color, - "start_pos": start_pos, - "end_pos": end_pos, - "blend": blend, - } - - for name in ("surface", "color", "start_pos", "end_pos", "blend"): - kwargs.pop(name) - - if "surface" == name: - bounds_rect = self.draw_aaline(surface, **kwargs) - elif "color" == name: - bounds_rect = self.draw_aaline(surface, color, **kwargs) - elif "start_pos" == name: - bounds_rect = self.draw_aaline(surface, color, start_pos, **kwargs) - elif "end_pos" == name: - bounds_rect = self.draw_aaline( - surface, color, start_pos, end_pos, **kwargs - ) - else: - bounds_rect = self.draw_aaline( - surface, color, start_pos, end_pos, blend, **kwargs - ) - - self.assertIsInstance(bounds_rect, pygame.Rect) - - def test_aaline__valid_blend_values(self): - """Ensures draw aaline accepts different blend values.""" - expected_color = pygame.Color("yellow") - surface_color = pygame.Color("white") - surface = pygame.Surface((3, 4)) - pos = (2, 1) - kwargs = { - "surface": surface, - "color": expected_color, - "start_pos": pos, - "end_pos": (2, 2), - "blend": None, - } - - for blend in (-10, -2, -1, 0, 1, 2, 10): - surface.fill(surface_color) # Clear for each test. - kwargs["blend"] = blend - - bounds_rect = self.draw_aaline(**kwargs) - - self.assertEqual(surface.get_at(pos), expected_color) - self.assertIsInstance(bounds_rect, pygame.Rect) - - def test_aaline__valid_start_pos_formats(self): - """Ensures draw aaline accepts different start_pos formats.""" - expected_color = pygame.Color("red") - surface_color = pygame.Color("black") - surface = pygame.Surface((4, 4)) - kwargs = { - "surface": surface, - "color": expected_color, - "start_pos": None, - "end_pos": (2, 2), - "blend": 0, - } - x, y = 2, 1 # start position - positions = ((x, y), (x + 0.01, y), (x, y + 0.01), (x + 0.01, y + 0.01)) - - for start_pos in positions: - for seq_type in (tuple, list, Vector2): - surface.fill(surface_color) # Clear for each test. - kwargs["start_pos"] = seq_type(start_pos) - - bounds_rect = self.draw_aaline(**kwargs) - - color = surface.get_at((x, y)) - for i, sub_color in enumerate(expected_color): - # The color could be slightly off the expected color due to - # any fractional position arguments. - self.assertGreaterEqual(color[i] + 6, sub_color, start_pos) - self.assertIsInstance(bounds_rect, pygame.Rect, start_pos) - - def test_aaline__valid_end_pos_formats(self): - """Ensures draw aaline accepts different end_pos formats.""" - expected_color = pygame.Color("red") - surface_color = pygame.Color("black") - surface = pygame.Surface((4, 4)) - kwargs = { - "surface": surface, - "color": expected_color, - "start_pos": (2, 1), - "end_pos": None, - "blend": 0, - } - x, y = 2, 2 # end position - positions = ((x, y), (x + 0.02, y), (x, y + 0.02), (x + 0.02, y + 0.02)) - - for end_pos in positions: - for seq_type in (tuple, list, Vector2): - surface.fill(surface_color) # Clear for each test. - kwargs["end_pos"] = seq_type(end_pos) - - bounds_rect = self.draw_aaline(**kwargs) - - color = surface.get_at((x, y)) - for i, sub_color in enumerate(expected_color): - # The color could be slightly off the expected color due to - # any fractional position arguments. - self.assertGreaterEqual(color[i] + 15, sub_color, end_pos) - self.assertIsInstance(bounds_rect, pygame.Rect, end_pos) - - def test_aaline__invalid_start_pos_formats(self): - """Ensures draw aaline handles invalid start_pos formats correctly.""" - kwargs = { - "surface": pygame.Surface((4, 4)), - "color": pygame.Color("red"), - "start_pos": None, - "end_pos": (2, 2), - "blend": 0, - } - - start_pos_fmts = ( - (2,), # Too few coords. - (2, 1, 0), # Too many coords. - (2, "1"), # Wrong type. - set([2, 1]), # Wrong type. - dict(((2, 1),)), - ) # Wrong type. - - for start_pos in start_pos_fmts: - kwargs["start_pos"] = start_pos - - with self.assertRaises(TypeError): - bounds_rect = self.draw_aaline(**kwargs) - - def test_aaline__invalid_end_pos_formats(self): - """Ensures draw aaline handles invalid end_pos formats correctly.""" - kwargs = { - "surface": pygame.Surface((4, 4)), - "color": pygame.Color("red"), - "start_pos": (2, 2), - "end_pos": None, - "blend": 0, - } - - end_pos_fmts = ( - (2,), # Too few coords. - (2, 1, 0), # Too many coords. - (2, "1"), # Wrong type. - set([2, 1]), # Wrong type. - dict(((2, 1),)), - ) # Wrong type. - - for end_pos in end_pos_fmts: - kwargs["end_pos"] = end_pos - - with self.assertRaises(TypeError): - bounds_rect = self.draw_aaline(**kwargs) - - def test_aaline__valid_color_formats(self): - """Ensures draw aaline accepts different color formats.""" - green_color = pygame.Color("green") - surface_color = pygame.Color("black") - surface = pygame.Surface((3, 4)) - pos = (1, 1) - kwargs = { - "surface": surface, - "color": None, - "start_pos": pos, - "end_pos": (2, 1), - "blend": 0, - } - greens = ( - (0, 255, 0), - (0, 255, 0, 255), - surface.map_rgb(green_color), - green_color, - ) - - for color in greens: - surface.fill(surface_color) # Clear for each test. - kwargs["color"] = color - - if isinstance(color, int): - expected_color = surface.unmap_rgb(color) - else: - expected_color = green_color - - bounds_rect = self.draw_aaline(**kwargs) - - self.assertEqual(surface.get_at(pos), expected_color) - self.assertIsInstance(bounds_rect, pygame.Rect) - - def test_aaline__invalid_color_formats(self): - """Ensures draw aaline handles invalid color formats correctly.""" - kwargs = { - "surface": pygame.Surface((4, 3)), - "color": None, - "start_pos": (1, 1), - "end_pos": (2, 1), - "blend": 0, - } - - for expected_color in (2.3, self): - kwargs["color"] = expected_color - - with self.assertRaises(TypeError): - bounds_rect = self.draw_aaline(**kwargs) - - def test_aaline__color(self): - """Tests if the aaline drawn is the correct color.""" - pos = (0, 0) - for surface in self._create_surfaces(): - for expected_color in self.COLORS: - self.draw_aaline(surface, expected_color, pos, (1, 0)) - - self.assertEqual( - surface.get_at(pos), expected_color, "pos={}".format(pos) - ) - - def test_aaline__gaps(self): - """Tests if the aaline drawn contains any gaps. - - See: #512 - """ - expected_color = (255, 255, 255) - for surface in self._create_surfaces(): - width = surface.get_width() - self.draw_aaline(surface, expected_color, (0, 0), (width - 1, 0)) - - for x in range(width): - pos = (x, 0) - self.assertEqual( - surface.get_at(pos), expected_color, "pos={}".format(pos) - ) - - def test_aaline__bounding_rect(self): - """Ensures draw aaline returns the correct bounding rect. - - Tests lines with endpoints on and off the surface and blending - enabled and disabled. - """ - line_color = pygame.Color("red") - surf_color = pygame.Color("blue") - width = height = 30 - # Using a rect to help manage where the lines are drawn. - helper_rect = pygame.Rect((0, 0), (width, height)) - - # Testing surfaces of different sizes. One larger than the helper_rect - # and one smaller (to test lines that span the surface). - for size in ((width + 5, height + 5), (width - 5, height - 5)): - surface = pygame.Surface(size, 0, 32) - surf_rect = surface.get_rect() - - # Move the helper rect to different positions to test line - # endpoints on and off the surface. - for pos in rect_corners_mids_and_center(surf_rect): - helper_rect.center = pos - - for blend in (False, True): # Test non-blending and blending. - for start, end in self._rect_lines(helper_rect): - surface.fill(surf_color) # Clear for each test. - - bounding_rect = self.draw_aaline( - surface, line_color, start, end, blend - ) - - # Calculating the expected_rect after the line is - # drawn (it uses what is actually drawn). - expected_rect = create_bounding_rect(surface, surf_color, start) - - self.assertEqual(bounding_rect, expected_rect) - - def test_aaline__surface_clip(self): - """Ensures draw aaline respects a surface's clip area.""" - surfw = surfh = 30 - aaline_color = pygame.Color("red") - surface_color = pygame.Color("green") - surface = pygame.Surface((surfw, surfh)) - surface.fill(surface_color) - - clip_rect = pygame.Rect((0, 0), (11, 11)) - clip_rect.center = surface.get_rect().center - pos_rect = clip_rect.copy() # Manages the aaline's pos. - - # Test centering the pos_rect along the clip rect's edge to allow for - # drawing the aaline over the clip_rect's bounds. - for center in rect_corners_mids_and_center(clip_rect): - pos_rect.center = center - - for blend in (0, 1): # Test non-blending and blending. - # Get the expected points by drawing the aaline without the - # clip area set. - surface.set_clip(None) - surface.fill(surface_color) - self.draw_aaline( - surface, aaline_color, pos_rect.midtop, pos_rect.midbottom, blend - ) - - # Need to get the points that are NOT surface_color due to the - # way blend=0 uses the color black to antialias. - expected_pts = get_color_points( - surface, surface_color, clip_rect, False - ) - - # Clear the surface and set the clip area. Redraw the aaline - # and check that only the clip area is modified. - surface.fill(surface_color) - surface.set_clip(clip_rect) - - self.draw_aaline( - surface, aaline_color, pos_rect.midtop, pos_rect.midbottom, blend - ) - - surface.lock() # For possible speed up. - - # Check all the surface points to ensure the expected_pts - # are not surface_color. - for pt in ((x, y) for x in range(surfw) for y in range(surfh)): - if pt in expected_pts: - self.assertNotEqual(surface.get_at(pt), surface_color, pt) - else: - self.assertEqual(surface.get_at(pt), surface_color, pt) - - surface.unlock() - - -# Commented out to avoid cluttering the test output. Add back in if draw_py -# ever fully supports drawing single aalines. -# class PythonDrawAALineTest(AALineMixin, PythonDrawTestCase): -# """Test draw_py module function aaline. -# -# This class inherits the general tests from AALineMixin. It is also the -# class to add any draw_py.draw_aaline specific tests to. -# """ - - -class DrawAALineTest(AALineMixin, DrawTestCase): - """Test draw module function aaline. - - This class inherits the general tests from AALineMixin. It is also the - class to add any draw.aaline specific tests to. - """ - - def test_aaline_endianness(self): - """test color component order""" - for depth in (24, 32): - surface = pygame.Surface((5, 3), 0, depth) - surface.fill(pygame.Color(0, 0, 0)) - self.draw_aaline(surface, pygame.Color(255, 0, 0), (0, 1), (2, 1), 1) - - self.assertGreater(surface.get_at((1, 1)).r, 0, "there should be red here") - - surface.fill(pygame.Color(0, 0, 0)) - self.draw_aaline(surface, pygame.Color(0, 0, 255), (0, 1), (2, 1), 1) - - self.assertGreater(surface.get_at((1, 1)).b, 0, "there should be blue here") - - def _check_antialiasing( - self, from_point, to_point, should, check_points, set_endpoints=True - ): - """Draw a line between two points and check colors of check_points.""" - if set_endpoints: - should[from_point] = should[to_point] = FG_GREEN - - def check_one_direction(from_point, to_point, should): - self.draw_aaline(self.surface, FG_GREEN, from_point, to_point, True) - - for pt in check_points: - color = should.get(pt, BG_RED) - with self.subTest(from_pt=from_point, pt=pt, to=to_point): - self.assertEqual(self.surface.get_at(pt), color) - - # reset - draw.rect(self.surface, BG_RED, (0, 0, 10, 10), 0) - - # it is important to test also opposite direction, the algorithm - # is (#512) or was not symmetric - check_one_direction(from_point, to_point, should) - if from_point != to_point: - check_one_direction(to_point, from_point, should) - - def test_short_non_antialiased_lines(self): - """test very short not anti aliased lines in all directions.""" - - # Horizontal, vertical and diagonal lines should not be anti-aliased, - # even with draw.aaline ... - self.surface = pygame.Surface((10, 10)) - draw.rect(self.surface, BG_RED, (0, 0, 10, 10), 0) - - check_points = [(i, j) for i in range(3, 8) for j in range(3, 8)] - - def check_both_directions(from_pt, to_pt, other_points): - should = {pt: FG_GREEN for pt in other_points} - self._check_antialiasing(from_pt, to_pt, should, check_points) - - # 0. one point - check_both_directions((5, 5), (5, 5), []) - # 1. horizontal - check_both_directions((4, 7), (5, 7), []) - check_both_directions((5, 4), (7, 4), [(6, 4)]) - - # 2. vertical - check_both_directions((5, 5), (5, 6), []) - check_both_directions((6, 4), (6, 6), [(6, 5)]) - # 3. diagonals - check_both_directions((5, 5), (6, 6), []) - check_both_directions((5, 5), (7, 7), [(6, 6)]) - check_both_directions((5, 6), (6, 5), []) - check_both_directions((6, 4), (4, 6), [(5, 5)]) - - def test_short_line_anti_aliasing(self): - - self.surface = pygame.Surface((10, 10)) - draw.rect(self.surface, BG_RED, (0, 0, 10, 10), 0) - - check_points = [(i, j) for i in range(3, 8) for j in range(3, 8)] - - def check_both_directions(from_pt, to_pt, should): - self._check_antialiasing(from_pt, to_pt, should, check_points) - - brown = (127, 127, 0) - reddish = (191, 63, 0) - greenish = (63, 191, 0) - - # lets say dx = abs(x0 - x1) ; dy = abs(y0 - y1) - - # dy / dx = 0.5 - check_both_directions((4, 4), (6, 5), {(5, 4): brown, (5, 5): brown}) - check_both_directions((4, 5), (6, 4), {(5, 4): brown, (5, 5): brown}) - - # dy / dx = 2 - check_both_directions((4, 4), (5, 6), {(4, 5): brown, (5, 5): brown}) - check_both_directions((5, 4), (4, 6), {(4, 5): brown, (5, 5): brown}) - - # some little longer lines; so we need to check more points: - check_points = [(i, j) for i in range(2, 9) for j in range(2, 9)] - # dy / dx = 0.25 - should = { - (4, 3): greenish, - (5, 3): brown, - (6, 3): reddish, - (4, 4): reddish, - (5, 4): brown, - (6, 4): greenish, - } - check_both_directions((3, 3), (7, 4), should) - - should = { - (4, 3): reddish, - (5, 3): brown, - (6, 3): greenish, - (4, 4): greenish, - (5, 4): brown, - (6, 4): reddish, - } - check_both_directions((3, 4), (7, 3), should) - - # dy / dx = 4 - should = { - (4, 4): greenish, - (4, 5): brown, - (4, 6): reddish, - (5, 4): reddish, - (5, 5): brown, - (5, 6): greenish, - } - check_both_directions((4, 3), (5, 7), should) - - should = { - (4, 4): reddish, - (4, 5): brown, - (4, 6): greenish, - (5, 4): greenish, - (5, 5): brown, - (5, 6): reddish, - } - check_both_directions((5, 3), (4, 7), should) - - def test_anti_aliasing_float_coordinates(self): - """Float coordinates should be blended smoothly.""" - - self.surface = pygame.Surface((10, 10)) - draw.rect(self.surface, BG_RED, (0, 0, 10, 10), 0) - - check_points = [(i, j) for i in range(5) for j in range(5)] - brown = (127, 127, 0) - reddish = (191, 63, 0) - greenish = (63, 191, 0) - - # 0. identical point : current implementation does no smoothing... - expected = {(2, 2): FG_GREEN} - self._check_antialiasing( - (1.5, 2), (1.5, 2), expected, check_points, set_endpoints=False - ) - expected = {(2, 3): FG_GREEN} - self._check_antialiasing( - (2.49, 2.7), (2.49, 2.7), expected, check_points, set_endpoints=False - ) - - # 1. horizontal lines - # a) blend endpoints - expected = {(1, 2): brown, (2, 2): FG_GREEN} - self._check_antialiasing( - (1.5, 2), (2, 2), expected, check_points, set_endpoints=False - ) - expected = {(1, 2): brown, (2, 2): FG_GREEN, (3, 2): brown} - self._check_antialiasing( - (1.5, 2), (2.5, 2), expected, check_points, set_endpoints=False - ) - expected = {(2, 2): brown, (1, 2): FG_GREEN} - self._check_antialiasing( - (1, 2), (1.5, 2), expected, check_points, set_endpoints=False - ) - expected = {(1, 2): brown, (2, 2): greenish} - self._check_antialiasing( - (1.5, 2), (1.75, 2), expected, check_points, set_endpoints=False - ) - - # b) blend y-coordinate - expected = {(x, y): brown for x in range(2, 5) for y in (1, 2)} - self._check_antialiasing( - (2, 1.5), (4, 1.5), expected, check_points, set_endpoints=False - ) - - # 2. vertical lines - # a) blend endpoints - expected = {(2, 1): brown, (2, 2): FG_GREEN, (2, 3): brown} - self._check_antialiasing( - (2, 1.5), (2, 2.5), expected, check_points, set_endpoints=False - ) - expected = {(2, 1): brown, (2, 2): greenish} - self._check_antialiasing( - (2, 1.5), (2, 1.75), expected, check_points, set_endpoints=False - ) - # b) blend x-coordinate - expected = {(x, y): brown for x in (1, 2) for y in range(2, 5)} - self._check_antialiasing( - (1.5, 2), (1.5, 4), expected, check_points, set_endpoints=False - ) - # 3. diagonal lines - # a) blend endpoints - expected = {(1, 1): brown, (2, 2): FG_GREEN, (3, 3): brown} - self._check_antialiasing( - (1.5, 1.5), (2.5, 2.5), expected, check_points, set_endpoints=False - ) - expected = {(3, 1): brown, (2, 2): FG_GREEN, (1, 3): brown} - self._check_antialiasing( - (2.5, 1.5), (1.5, 2.5), expected, check_points, set_endpoints=False - ) - # b) blend sidewards - expected = {(2, 1): brown, (2, 2): brown, (3, 2): brown, (3, 3): brown} - self._check_antialiasing( - (2, 1.5), (3, 2.5), expected, check_points, set_endpoints=False - ) - - expected = { - (2, 1): greenish, - (2, 2): reddish, - (3, 2): greenish, - (3, 3): reddish, - (4, 3): greenish, - (4, 4): reddish, - } - - self._check_antialiasing( - (2, 1.25), (4, 3.25), expected, check_points, set_endpoints=False - ) - - def test_anti_aliasing_at_and_outside_the_border(self): - """Ensures antialiasing works correct at a surface's borders.""" - - self.surface = pygame.Surface((10, 10)) - draw.rect(self.surface, BG_RED, (0, 0, 10, 10), 0) - - check_points = [(i, j) for i in range(10) for j in range(10)] - - reddish = (191, 63, 0) - brown = (127, 127, 0) - greenish = (63, 191, 0) - from_point, to_point = (3, 3), (7, 4) - should = { - (4, 3): greenish, - (5, 3): brown, - (6, 3): reddish, - (4, 4): reddish, - (5, 4): brown, - (6, 4): greenish, - } - - for dx, dy in ( - (-4, 0), - (4, 0), # moved to left and right borders - (0, -5), - (0, -4), - (0, -3), # upper border - (0, 5), - (0, 6), - (0, 7), # lower border - (-4, -4), - (-4, -3), - (-3, -4), - ): # upper left corner - first = from_point[0] + dx, from_point[1] + dy - second = to_point[0] + dx, to_point[1] + dy - expected = {(x + dx, y + dy): color for (x, y), color in should.items()} - - self._check_antialiasing(first, second, expected, check_points) - - -### AALines Testing ########################################################### - - -class AALinesMixin(BaseLineMixin): - """Mixin test for drawing aalines. - - This class contains all the general aalines drawing tests. - """ - - def test_aalines__args(self): - """Ensures draw aalines accepts the correct args.""" - bounds_rect = self.draw_aalines( - pygame.Surface((3, 3)), (0, 10, 0, 50), False, ((0, 0), (1, 1)), 1 - ) - - self.assertIsInstance(bounds_rect, pygame.Rect) - - def test_aalines__args_without_blend(self): - """Ensures draw aalines accepts the args without a blend.""" - bounds_rect = self.draw_aalines( - pygame.Surface((2, 2)), (0, 0, 0, 50), False, ((0, 0), (1, 1)) - ) - - self.assertIsInstance(bounds_rect, pygame.Rect) - - def test_aalines__blend_warning(self): - """From pygame 2, blend=False should raise DeprecationWarning.""" - with warnings.catch_warnings(record=True) as w: - # Cause all warnings to always be triggered. - warnings.simplefilter("always") - # Trigger DeprecationWarning. - self.draw_aalines( - pygame.Surface((2, 2)), (0, 0, 0, 50), False, ((0, 0), (1, 1)), False - ) - # Check if there is only one warning and is a DeprecationWarning. - self.assertEqual(len(w), 1) - self.assertTrue(issubclass(w[-1].category, DeprecationWarning)) - - def test_aalines__kwargs(self): - """Ensures draw aalines accepts the correct kwargs - with and without a blend arg. - """ - surface = pygame.Surface((4, 4)) - color = pygame.Color("yellow") - points = ((0, 0), (1, 1), (2, 2)) - kwargs_list = [ - { - "surface": surface, - "color": color, - "closed": False, - "points": points, - "blend": 1, - }, - {"surface": surface, "color": color, "closed": False, "points": points}, - ] - - for kwargs in kwargs_list: - bounds_rect = self.draw_aalines(**kwargs) - - self.assertIsInstance(bounds_rect, pygame.Rect) - - def test_aalines__kwargs_order_independent(self): - """Ensures draw aalines's kwargs are not order dependent.""" - bounds_rect = self.draw_aalines( - closed=1, - points=((0, 0), (1, 1), (2, 2)), - blend=1, - color=(10, 20, 30), - surface=pygame.Surface((3, 2)), - ) - - self.assertIsInstance(bounds_rect, pygame.Rect) - - def test_aalines__args_missing(self): - """Ensures draw aalines detects any missing required args.""" - surface = pygame.Surface((1, 1)) - color = pygame.Color("blue") - - with self.assertRaises(TypeError): - bounds_rect = self.draw_aalines(surface, color, 0) - - with self.assertRaises(TypeError): - bounds_rect = self.draw_aalines(surface, color) - - with self.assertRaises(TypeError): - bounds_rect = self.draw_aalines(surface) - - with self.assertRaises(TypeError): - bounds_rect = self.draw_aalines() - - def test_aalines__kwargs_missing(self): - """Ensures draw aalines detects any missing required kwargs.""" - kwargs = { - "surface": pygame.Surface((3, 2)), - "color": pygame.Color("red"), - "closed": 1, - "points": ((2, 2), (1, 1)), - "blend": 1, - } - - for name in ("points", "closed", "color", "surface"): - invalid_kwargs = dict(kwargs) - invalid_kwargs.pop(name) # Pop from a copy. - - with self.assertRaises(TypeError): - bounds_rect = self.draw_aalines(**invalid_kwargs) - - def test_aalines__arg_invalid_types(self): - """Ensures draw aalines detects invalid arg types.""" - surface = pygame.Surface((2, 2)) - color = pygame.Color("blue") - closed = 0 - points = ((1, 2), (2, 1)) - - with self.assertRaises(TypeError): - # Invalid blend. - bounds_rect = self.draw_aalines(surface, color, closed, points, "1") - - with self.assertRaises(TypeError): - # Invalid points. - bounds_rect = self.draw_aalines(surface, color, closed, (1, 2, 3)) - - with self.assertRaises(TypeError): - # Invalid closed. - bounds_rect = self.draw_aalines(surface, color, InvalidBool(), points) - - with self.assertRaises(TypeError): - # Invalid color. - bounds_rect = self.draw_aalines(surface, 2.3, closed, points) - - with self.assertRaises(TypeError): - # Invalid surface. - bounds_rect = self.draw_aalines((1, 2, 3, 4), color, closed, points) - - def test_aalines__kwarg_invalid_types(self): - """Ensures draw aalines detects invalid kwarg types.""" - valid_kwargs = { - "surface": pygame.Surface((3, 3)), - "color": pygame.Color("green"), - "closed": False, - "points": ((1, 2), (2, 1)), - "blend": 1, - } - - invalid_kwargs = { - "surface": pygame.Surface, - "color": 2.3, - "closed": InvalidBool(), - "points": (0, 0, 0), - "blend": 1.2, - } - - for kwarg in ("surface", "color", "closed", "points", "blend"): - kwargs = dict(valid_kwargs) - kwargs[kwarg] = invalid_kwargs[kwarg] - - with self.assertRaises(TypeError): - bounds_rect = self.draw_aalines(**kwargs) - - def test_aalines__kwarg_invalid_name(self): - """Ensures draw aalines detects invalid kwarg names.""" - surface = pygame.Surface((2, 3)) - color = pygame.Color("cyan") - closed = 1 - points = ((1, 2), (2, 1)) - kwargs_list = [ - { - "surface": surface, - "color": color, - "closed": closed, - "points": points, - "blend": 1, - "invalid": 1, - }, - { - "surface": surface, - "color": color, - "closed": closed, - "points": points, - "invalid": 1, - }, - ] - - for kwargs in kwargs_list: - with self.assertRaises(TypeError): - bounds_rect = self.draw_aalines(**kwargs) - - def test_aalines__args_and_kwargs(self): - """Ensures draw aalines accepts a combination of args/kwargs""" - surface = pygame.Surface((3, 2)) - color = (255, 255, 0, 0) - closed = 0 - points = ((1, 2), (2, 1)) - blend = 1 - kwargs = { - "surface": surface, - "color": color, - "closed": closed, - "points": points, - "blend": blend, - } - - for name in ("surface", "color", "closed", "points", "blend"): - kwargs.pop(name) - - if "surface" == name: - bounds_rect = self.draw_aalines(surface, **kwargs) - elif "color" == name: - bounds_rect = self.draw_aalines(surface, color, **kwargs) - elif "closed" == name: - bounds_rect = self.draw_aalines(surface, color, closed, **kwargs) - elif "points" == name: - bounds_rect = self.draw_aalines( - surface, color, closed, points, **kwargs - ) - else: - bounds_rect = self.draw_aalines( - surface, color, closed, points, blend, **kwargs - ) - - self.assertIsInstance(bounds_rect, pygame.Rect) - - def test_aalines__valid_blend_values(self): - """Ensures draw aalines accepts different blend values.""" - expected_color = pygame.Color("yellow") - surface_color = pygame.Color("white") - surface = pygame.Surface((3, 4)) - pos = (1, 1) - kwargs = { - "surface": surface, - "color": expected_color, - "closed": False, - "points": (pos, (1, 3)), - "blend": None, - } - - for blend in (-10, -2, -1, 0, 1, 2, 10): - surface.fill(surface_color) # Clear for each test. - kwargs["blend"] = blend - - bounds_rect = self.draw_aalines(**kwargs) - - self.assertEqual(surface.get_at(pos), expected_color, blend) - self.assertIsInstance(bounds_rect, pygame.Rect) - - def test_aalines__valid_points_format(self): - """Ensures draw aalines accepts different points formats.""" - expected_color = (10, 20, 30, 255) - surface_color = pygame.Color("white") - surface = pygame.Surface((3, 4)) - kwargs = { - "surface": surface, - "color": expected_color, - "closed": False, - "points": None, - "blend": 0, - } - - # The point type can be a tuple/list/Vector2. - point_types = ( - (tuple, tuple, tuple, tuple), # all tuples - (list, list, list, list), # all lists - (Vector2, Vector2, Vector2, Vector2), # all Vector2s - (list, Vector2, tuple, Vector2), - ) # mix - - # The point values can be ints or floats. - point_values = ( - ((1, 1), (2, 1), (2, 2), (1, 2)), - ((1, 1), (2.2, 1), (2.1, 2.2), (1, 2.1)), - ) - - # Each sequence of points can be a tuple or a list. - seq_types = (tuple, list) - - for point_type in point_types: - for values in point_values: - check_pos = values[0] - points = [point_type[i](pt) for i, pt in enumerate(values)] - - for seq_type in seq_types: - surface.fill(surface_color) # Clear for each test. - kwargs["points"] = seq_type(points) - - bounds_rect = self.draw_aalines(**kwargs) - - self.assertEqual(surface.get_at(check_pos), expected_color) - self.assertIsInstance(bounds_rect, pygame.Rect) - - def test_aalines__invalid_points_formats(self): - """Ensures draw aalines handles invalid points formats correctly.""" - kwargs = { - "surface": pygame.Surface((4, 4)), - "color": pygame.Color("red"), - "closed": False, - "points": None, - "blend": 1, - } - - points_fmts = ( - ((1, 1), (2,)), # Too few coords. - ((1, 1), (2, 2, 2)), # Too many coords. - ((1, 1), (2, "2")), # Wrong type. - ((1, 1), set([2, 3])), # Wrong type. - ((1, 1), dict(((2, 2), (3, 3)))), # Wrong type. - set(((1, 1), (1, 2))), # Wrong type. - dict(((1, 1), (4, 4))), - ) # Wrong type. - - for points in points_fmts: - kwargs["points"] = points - - with self.assertRaises(TypeError): - bounds_rect = self.draw_aalines(**kwargs) - - def test_aalines__invalid_points_values(self): - """Ensures draw aalines handles invalid points values correctly.""" - kwargs = { - "surface": pygame.Surface((4, 4)), - "color": pygame.Color("red"), - "closed": False, - "points": None, - "blend": 1, - } - - for points in ([], ((1, 1),)): # Too few points. - for seq_type in (tuple, list): # Test as tuples and lists. - kwargs["points"] = seq_type(points) - - with self.assertRaises(ValueError): - bounds_rect = self.draw_aalines(**kwargs) - - def test_aalines__valid_closed_values(self): - """Ensures draw aalines accepts different closed values.""" - line_color = pygame.Color("blue") - surface_color = pygame.Color("white") - surface = pygame.Surface((5, 5)) - pos = (1, 3) - kwargs = { - "surface": surface, - "color": line_color, - "closed": None, - "points": ((1, 1), (4, 1), (4, 4), (1, 4)), - "blend": 0, - } - - true_values = (-7, 1, 10, "2", 3.1, (4,), [5], True) - false_values = (None, "", 0, (), [], False) - - for closed in true_values + false_values: - surface.fill(surface_color) # Clear for each test. - kwargs["closed"] = closed - expected_color = line_color if closed else surface_color - - bounds_rect = self.draw_aalines(**kwargs) - - self.assertEqual(surface.get_at(pos), expected_color) - self.assertIsInstance(bounds_rect, pygame.Rect) - - def test_aalines__valid_color_formats(self): - """Ensures draw aalines accepts different color formats.""" - green_color = pygame.Color("green") - surface_color = pygame.Color("black") - surface = pygame.Surface((3, 4)) - pos = (1, 1) - kwargs = { - "surface": surface, - "color": None, - "closed": False, - "points": (pos, (2, 1)), - "blend": 0, - } - greens = ( - (0, 255, 0), - (0, 255, 0, 255), - surface.map_rgb(green_color), - green_color, - ) - - for color in greens: - surface.fill(surface_color) # Clear for each test. - kwargs["color"] = color - - if isinstance(color, int): - expected_color = surface.unmap_rgb(color) - else: - expected_color = green_color - - bounds_rect = self.draw_aalines(**kwargs) - - self.assertEqual(surface.get_at(pos), expected_color) - self.assertIsInstance(bounds_rect, pygame.Rect) - - def test_aalines__invalid_color_formats(self): - """Ensures draw aalines handles invalid color formats correctly.""" - kwargs = { - "surface": pygame.Surface((4, 3)), - "color": None, - "closed": False, - "points": ((1, 1), (1, 2)), - "blend": 0, - } - - for expected_color in (2.3, self): - kwargs["color"] = expected_color - - with self.assertRaises(TypeError): - bounds_rect = self.draw_aalines(**kwargs) - - def test_aalines__color(self): - """Tests if the aalines drawn are the correct color. - - Draws aalines around the border of the given surface and checks if all - borders of the surface only contain the given color. - """ - for surface in self._create_surfaces(): - for expected_color in self.COLORS: - self.draw_aalines(surface, expected_color, True, corners(surface)) - - for pos, color in border_pos_and_color(surface): - self.assertEqual(color, expected_color, "pos={}".format(pos)) - - def test_aalines__gaps(self): - """Tests if the aalines drawn contain any gaps. - - Draws aalines around the border of the given surface and checks if - all borders of the surface contain any gaps. - - See: #512 - """ - expected_color = (255, 255, 255) - for surface in self._create_surfaces(): - self.draw_aalines(surface, expected_color, True, corners(surface)) - - for pos, color in border_pos_and_color(surface): - self.assertEqual(color, expected_color, "pos={}".format(pos)) - - def test_aalines__bounding_rect(self): - """Ensures draw aalines returns the correct bounding rect. - - Tests lines with endpoints on and off the surface and blending - enabled and disabled. - """ - line_color = pygame.Color("red") - surf_color = pygame.Color("blue") - width = height = 30 - # Using a rect to help manage where the lines are drawn. - pos_rect = pygame.Rect((0, 0), (width, height)) - - # Testing surfaces of different sizes. One larger than the pos_rect - # and one smaller (to test lines that span the surface). - for size in ((width + 5, height + 5), (width - 5, height - 5)): - surface = pygame.Surface(size, 0, 32) - surf_rect = surface.get_rect() - - # Move pos_rect to different positions to test line endpoints on - # and off the surface. - for pos in rect_corners_mids_and_center(surf_rect): - pos_rect.center = pos - # Shape: Triangle (if closed), ^ caret (if not closed). - pts = (pos_rect.midleft, pos_rect.midtop, pos_rect.midright) - pos = pts[0] # Rect position if nothing drawn. - - for blend in (False, True): # Test non-blending and blending. - for closed in (True, False): - surface.fill(surf_color) # Clear for each test. - - bounding_rect = self.draw_aalines( - surface, line_color, closed, pts, blend - ) - - # Calculating the expected_rect after the lines are - # drawn (it uses what is actually drawn). - expected_rect = create_bounding_rect(surface, surf_color, pos) - - self.assertEqual(bounding_rect, expected_rect) - - def test_aalines__surface_clip(self): - """Ensures draw aalines respects a surface's clip area.""" - surfw = surfh = 30 - aaline_color = pygame.Color("red") - surface_color = pygame.Color("green") - surface = pygame.Surface((surfw, surfh)) - surface.fill(surface_color) - - clip_rect = pygame.Rect((0, 0), (11, 11)) - clip_rect.center = surface.get_rect().center - pos_rect = clip_rect.copy() # Manages the aalines's pos. - - # Test centering the pos_rect along the clip rect's edge to allow for - # drawing the aalines over the clip_rect's bounds. - for center in rect_corners_mids_and_center(clip_rect): - pos_rect.center = center - pts = (pos_rect.midtop, pos_rect.center, pos_rect.midbottom) - - for closed in (True, False): # Test closed and not closed. - for blend in (0, 1): # Test non-blending and blending. - # Get the expected points by drawing the aalines without - # the clip area set. - surface.set_clip(None) - surface.fill(surface_color) - self.draw_aalines(surface, aaline_color, closed, pts, blend) - - # Need to get the points that are NOT surface_color due to - # the way blend=0 uses the color black to antialias. - expected_pts = get_color_points( - surface, surface_color, clip_rect, False - ) - - # Clear the surface and set the clip area. Redraw the - # aalines and check that only the clip area is modified. - surface.fill(surface_color) - surface.set_clip(clip_rect) - - self.draw_aalines(surface, aaline_color, closed, pts, blend) - - surface.lock() # For possible speed up. - - # Check all the surface points to ensure the expected_pts - # are not surface_color. - for pt in ((x, y) for x in range(surfw) for y in range(surfh)): - if pt in expected_pts: - self.assertNotEqual(surface.get_at(pt), surface_color, pt) - else: - self.assertEqual(surface.get_at(pt), surface_color, pt) - - surface.unlock() - - -# Commented out to avoid cluttering the test output. Add back in if draw_py -# ever fully supports drawing aalines. -# class PythonDrawAALinesTest(AALinesMixin, PythonDrawTestCase): -# """Test draw_py module function aalines. -# -# This class inherits the general tests from AALinesMixin. It is also the -# class to add any draw_py.draw_aalines specific tests to. -# """ - - -class DrawAALinesTest(AALinesMixin, DrawTestCase): - """Test draw module function aalines. - - This class inherits the general tests from AALinesMixin. It is also the - class to add any draw.aalines specific tests to. - """ - - -### Polygon Testing ########################################################### - -SQUARE = ([0, 0], [3, 0], [3, 3], [0, 3]) -DIAMOND = [(1, 3), (3, 5), (5, 3), (3, 1)] -CROSS = ( - [2, 0], - [4, 0], - [4, 2], - [6, 2], - [6, 4], - [4, 4], - [4, 6], - [2, 6], - [2, 4], - [0, 4], - [0, 2], - [2, 2], -) - - -class DrawPolygonMixin(object): - """Mixin tests for drawing polygons. - - This class contains all the general polygon drawing tests. - """ - - def setUp(self): - self.surface = pygame.Surface((20, 20)) - - def test_polygon__args(self): - """Ensures draw polygon accepts the correct args.""" - bounds_rect = self.draw_polygon( - pygame.Surface((3, 3)), (0, 10, 0, 50), ((0, 0), (1, 1), (2, 2)), 1 - ) - - self.assertIsInstance(bounds_rect, pygame.Rect) - - def test_polygon__args_without_width(self): - """Ensures draw polygon accepts the args without a width.""" - bounds_rect = self.draw_polygon( - pygame.Surface((2, 2)), (0, 0, 0, 50), ((0, 0), (1, 1), (2, 2)) - ) - - self.assertIsInstance(bounds_rect, pygame.Rect) - - def test_polygon__kwargs(self): - """Ensures draw polygon accepts the correct kwargs - with and without a width arg. - """ - surface = pygame.Surface((4, 4)) - color = pygame.Color("yellow") - points = ((0, 0), (1, 1), (2, 2)) - kwargs_list = [ - {"surface": surface, "color": color, "points": points, "width": 1}, - {"surface": surface, "color": color, "points": points}, - ] - - for kwargs in kwargs_list: - bounds_rect = self.draw_polygon(**kwargs) - - self.assertIsInstance(bounds_rect, pygame.Rect) - - def test_polygon__kwargs_order_independent(self): - """Ensures draw polygon's kwargs are not order dependent.""" - bounds_rect = self.draw_polygon( - color=(10, 20, 30), - surface=pygame.Surface((3, 2)), - width=0, - points=((0, 1), (1, 2), (2, 3)), - ) - - self.assertIsInstance(bounds_rect, pygame.Rect) - - def test_polygon__args_missing(self): - """Ensures draw polygon detects any missing required args.""" - surface = pygame.Surface((1, 1)) - color = pygame.Color("blue") - - with self.assertRaises(TypeError): - bounds_rect = self.draw_polygon(surface, color) - - with self.assertRaises(TypeError): - bounds_rect = self.draw_polygon(surface) - - with self.assertRaises(TypeError): - bounds_rect = self.draw_polygon() - - def test_polygon__kwargs_missing(self): - """Ensures draw polygon detects any missing required kwargs.""" - kwargs = { - "surface": pygame.Surface((1, 2)), - "color": pygame.Color("red"), - "points": ((2, 1), (2, 2), (2, 3)), - "width": 1, - } - - for name in ("points", "color", "surface"): - invalid_kwargs = dict(kwargs) - invalid_kwargs.pop(name) # Pop from a copy. - - with self.assertRaises(TypeError): - bounds_rect = self.draw_polygon(**invalid_kwargs) - - def test_polygon__arg_invalid_types(self): - """Ensures draw polygon detects invalid arg types.""" - surface = pygame.Surface((2, 2)) - color = pygame.Color("blue") - points = ((0, 1), (1, 2), (1, 3)) - - with self.assertRaises(TypeError): - # Invalid width. - bounds_rect = self.draw_polygon(surface, color, points, "1") - - with self.assertRaises(TypeError): - # Invalid points. - bounds_rect = self.draw_polygon(surface, color, (1, 2, 3)) - - with self.assertRaises(TypeError): - # Invalid color. - bounds_rect = self.draw_polygon(surface, 2.3, points) - - with self.assertRaises(TypeError): - # Invalid surface. - bounds_rect = self.draw_polygon((1, 2, 3, 4), color, points) - - def test_polygon__kwarg_invalid_types(self): - """Ensures draw polygon detects invalid kwarg types.""" - surface = pygame.Surface((3, 3)) - color = pygame.Color("green") - points = ((0, 0), (1, 0), (2, 0)) - width = 1 - kwargs_list = [ - { - "surface": pygame.Surface, # Invalid surface. - "color": color, - "points": points, - "width": width, - }, - { - "surface": surface, - "color": 2.3, # Invalid color. - "points": points, - "width": width, - }, - { - "surface": surface, - "color": color, - "points": ((1,), (1,), (1,)), # Invalid points. - "width": width, - }, - {"surface": surface, "color": color, "points": points, "width": 1.2}, - ] # Invalid width. - - for kwargs in kwargs_list: - with self.assertRaises(TypeError): - bounds_rect = self.draw_polygon(**kwargs) - - def test_polygon__kwarg_invalid_name(self): - """Ensures draw polygon detects invalid kwarg names.""" - surface = pygame.Surface((2, 3)) - color = pygame.Color("cyan") - points = ((1, 1), (1, 2), (1, 3)) - kwargs_list = [ - { - "surface": surface, - "color": color, - "points": points, - "width": 1, - "invalid": 1, - }, - {"surface": surface, "color": color, "points": points, "invalid": 1}, - ] - - for kwargs in kwargs_list: - with self.assertRaises(TypeError): - bounds_rect = self.draw_polygon(**kwargs) - - def test_polygon__args_and_kwargs(self): - """Ensures draw polygon accepts a combination of args/kwargs""" - surface = pygame.Surface((3, 1)) - color = (255, 255, 0, 0) - points = ((0, 1), (1, 2), (2, 3)) - width = 0 - kwargs = {"surface": surface, "color": color, "points": points, "width": width} - - for name in ("surface", "color", "points", "width"): - kwargs.pop(name) - - if "surface" == name: - bounds_rect = self.draw_polygon(surface, **kwargs) - elif "color" == name: - bounds_rect = self.draw_polygon(surface, color, **kwargs) - elif "points" == name: - bounds_rect = self.draw_polygon(surface, color, points, **kwargs) - else: - bounds_rect = self.draw_polygon(surface, color, points, width, **kwargs) - - self.assertIsInstance(bounds_rect, pygame.Rect) - - def test_polygon__valid_width_values(self): - """Ensures draw polygon accepts different width values.""" - surface_color = pygame.Color("white") - surface = pygame.Surface((3, 4)) - color = (10, 20, 30, 255) - kwargs = { - "surface": surface, - "color": color, - "points": ((1, 1), (2, 1), (2, 2), (1, 2)), - "width": None, - } - pos = kwargs["points"][0] - - for width in (-100, -10, -1, 0, 1, 10, 100): - surface.fill(surface_color) # Clear for each test. - kwargs["width"] = width - expected_color = color if width >= 0 else surface_color - - bounds_rect = self.draw_polygon(**kwargs) - - self.assertEqual(surface.get_at(pos), expected_color) - self.assertIsInstance(bounds_rect, pygame.Rect) - - def test_polygon__valid_points_format(self): - """Ensures draw polygon accepts different points formats.""" - expected_color = (10, 20, 30, 255) - surface_color = pygame.Color("white") - surface = pygame.Surface((3, 4)) - kwargs = { - "surface": surface, - "color": expected_color, - "points": None, - "width": 0, - } - - # The point type can be a tuple/list/Vector2. - point_types = ( - (tuple, tuple, tuple, tuple), # all tuples - (list, list, list, list), # all lists - (Vector2, Vector2, Vector2, Vector2), # all Vector2s - (list, Vector2, tuple, Vector2), - ) # mix - - # The point values can be ints or floats. - point_values = ( - ((1, 1), (2, 1), (2, 2), (1, 2)), - ((1, 1), (2.2, 1), (2.1, 2.2), (1, 2.1)), - ) - - # Each sequence of points can be a tuple or a list. - seq_types = (tuple, list) - - for point_type in point_types: - for values in point_values: - check_pos = values[0] - points = [point_type[i](pt) for i, pt in enumerate(values)] - - for seq_type in seq_types: - surface.fill(surface_color) # Clear for each test. - kwargs["points"] = seq_type(points) - - bounds_rect = self.draw_polygon(**kwargs) - - self.assertEqual(surface.get_at(check_pos), expected_color) - self.assertIsInstance(bounds_rect, pygame.Rect) - - def test_polygon__invalid_points_formats(self): - """Ensures draw polygon handles invalid points formats correctly.""" - kwargs = { - "surface": pygame.Surface((4, 4)), - "color": pygame.Color("red"), - "points": None, - "width": 0, - } - - points_fmts = ( - ((1, 1), (2, 1), (2,)), # Too few coords. - ((1, 1), (2, 1), (2, 2, 2)), # Too many coords. - ((1, 1), (2, 1), (2, "2")), # Wrong type. - ((1, 1), (2, 1), set([2, 3])), # Wrong type. - ((1, 1), (2, 1), dict(((2, 2), (3, 3)))), # Wrong type. - set(((1, 1), (2, 1), (2, 2), (1, 2))), # Wrong type. - dict(((1, 1), (2, 2), (3, 3), (4, 4))), - ) # Wrong type. - - for points in points_fmts: - kwargs["points"] = points - - with self.assertRaises(TypeError): - bounds_rect = self.draw_polygon(**kwargs) - - def test_polygon__invalid_points_values(self): - """Ensures draw polygon handles invalid points values correctly.""" - kwargs = { - "surface": pygame.Surface((4, 4)), - "color": pygame.Color("red"), - "points": None, - "width": 0, - } - - points_fmts = ( - tuple(), # Too few points. - ((1, 1),), # Too few points. - ((1, 1), (2, 1)), - ) # Too few points. - - for points in points_fmts: - for seq_type in (tuple, list): # Test as tuples and lists. - kwargs["points"] = seq_type(points) - - with self.assertRaises(ValueError): - bounds_rect = self.draw_polygon(**kwargs) - - def test_polygon__valid_color_formats(self): - """Ensures draw polygon accepts different color formats.""" - green_color = pygame.Color("green") - surface_color = pygame.Color("black") - surface = pygame.Surface((3, 4)) - kwargs = { - "surface": surface, - "color": None, - "points": ((1, 1), (2, 1), (2, 2), (1, 2)), - "width": 0, - } - pos = kwargs["points"][0] - greens = ( - (0, 255, 0), - (0, 255, 0, 255), - surface.map_rgb(green_color), - green_color, - ) - - for color in greens: - surface.fill(surface_color) # Clear for each test. - kwargs["color"] = color - - if isinstance(color, int): - expected_color = surface.unmap_rgb(color) - else: - expected_color = green_color - - bounds_rect = self.draw_polygon(**kwargs) - - self.assertEqual(surface.get_at(pos), expected_color) - self.assertIsInstance(bounds_rect, pygame.Rect) - - def test_polygon__invalid_color_formats(self): - """Ensures draw polygon handles invalid color formats correctly.""" - kwargs = { - "surface": pygame.Surface((4, 3)), - "color": None, - "points": ((1, 1), (2, 1), (2, 2), (1, 2)), - "width": 0, - } - - for expected_color in (2.3, self): - kwargs["color"] = expected_color - - with self.assertRaises(TypeError): - bounds_rect = self.draw_polygon(**kwargs) - - def test_draw_square(self): - self.draw_polygon(self.surface, RED, SQUARE, 0) - # note : there is a discussion (#234) if draw.polygon should include or - # not the right or lower border; here we stick with current behavior, - # eg include those borders ... - for x in range(4): - for y in range(4): - self.assertEqual(self.surface.get_at((x, y)), RED) - - def test_draw_diamond(self): - pygame.draw.rect(self.surface, RED, (0, 0, 10, 10), 0) - self.draw_polygon(self.surface, GREEN, DIAMOND, 0) - # this diamond shape is equivalent to its four corners, plus inner square - for x, y in DIAMOND: - self.assertEqual(self.surface.get_at((x, y)), GREEN, msg=str((x, y))) - for x in range(2, 5): - for y in range(2, 5): - self.assertEqual(self.surface.get_at((x, y)), GREEN) - - def test_1_pixel_high_or_wide_shapes(self): - # 1. one-pixel-high, filled - pygame.draw.rect(self.surface, RED, (0, 0, 10, 10), 0) - self.draw_polygon(self.surface, GREEN, [(x, 2) for x, _y in CROSS], 0) - cross_size = 6 # the maximum x or y coordinate of the cross - for x in range(cross_size + 1): - self.assertEqual(self.surface.get_at((x, 1)), RED) - self.assertEqual(self.surface.get_at((x, 2)), GREEN) - self.assertEqual(self.surface.get_at((x, 3)), RED) - pygame.draw.rect(self.surface, RED, (0, 0, 10, 10), 0) - # 2. one-pixel-high, not filled - self.draw_polygon(self.surface, GREEN, [(x, 5) for x, _y in CROSS], 1) - for x in range(cross_size + 1): - self.assertEqual(self.surface.get_at((x, 4)), RED) - self.assertEqual(self.surface.get_at((x, 5)), GREEN) - self.assertEqual(self.surface.get_at((x, 6)), RED) - pygame.draw.rect(self.surface, RED, (0, 0, 10, 10), 0) - # 3. one-pixel-wide, filled - self.draw_polygon(self.surface, GREEN, [(3, y) for _x, y in CROSS], 0) - for y in range(cross_size + 1): - self.assertEqual(self.surface.get_at((2, y)), RED) - self.assertEqual(self.surface.get_at((3, y)), GREEN) - self.assertEqual(self.surface.get_at((4, y)), RED) - pygame.draw.rect(self.surface, RED, (0, 0, 10, 10), 0) - # 4. one-pixel-wide, not filled - self.draw_polygon(self.surface, GREEN, [(4, y) for _x, y in CROSS], 1) - for y in range(cross_size + 1): - self.assertEqual(self.surface.get_at((3, y)), RED) - self.assertEqual(self.surface.get_at((4, y)), GREEN) - self.assertEqual(self.surface.get_at((5, y)), RED) - - def test_draw_symetric_cross(self): - """non-regression on issue #234 : x and y where handled inconsistently. - - Also, the result is/was different whether we fill or not the polygon. - """ - # 1. case width = 1 (not filled: `polygon` calls internally the `lines` function) - pygame.draw.rect(self.surface, RED, (0, 0, 10, 10), 0) - self.draw_polygon(self.surface, GREEN, CROSS, 1) - inside = [(x, 3) for x in range(1, 6)] + [(3, y) for y in range(1, 6)] - for x in range(10): - for y in range(10): - if (x, y) in inside: - self.assertEqual(self.surface.get_at((x, y)), RED) - elif (x in range(2, 5) and y < 7) or (y in range(2, 5) and x < 7): - # we are on the border of the cross: - self.assertEqual(self.surface.get_at((x, y)), GREEN) - else: - # we are outside - self.assertEqual(self.surface.get_at((x, y)), RED) - - # 2. case width = 0 (filled; this is the example from #234) - pygame.draw.rect(self.surface, RED, (0, 0, 10, 10), 0) - self.draw_polygon(self.surface, GREEN, CROSS, 0) - inside = [(x, 3) for x in range(1, 6)] + [(3, y) for y in range(1, 6)] - for x in range(10): - for y in range(10): - if (x in range(2, 5) and y < 7) or (y in range(2, 5) and x < 7): - # we are on the border of the cross: - self.assertEqual( - self.surface.get_at((x, y)), GREEN, msg=str((x, y)) - ) - else: - # we are outside - self.assertEqual(self.surface.get_at((x, y)), RED) - - def test_illumine_shape(self): - """non-regression on issue #313""" - rect = pygame.Rect((0, 0, 20, 20)) - path_data = [ - (0, 0), - (rect.width - 1, 0), # upper border - (rect.width - 5, 5 - 1), - (5 - 1, 5 - 1), # upper inner - (5 - 1, rect.height - 5), - (0, rect.height - 1), - ] # lower diagonal - # The shape looks like this (the numbers are the indices of path_data) - - # 0**********************1 <-- upper border - # *********************** - # ********************** - # ********************* - # ****3**************2 <-- upper inner border - # ***** - # ***** (more lines here) - # ***** - # ****4 - # **** - # *** - # ** - # 5 - # - - # the current bug is that the "upper inner" line is not drawn, but only - # if 4 or some lower corner exists - pygame.draw.rect(self.surface, RED, (0, 0, 20, 20), 0) - - # 1. First without the corners 4 & 5 - self.draw_polygon(self.surface, GREEN, path_data[:4], 0) - for x in range(20): - self.assertEqual(self.surface.get_at((x, 0)), GREEN) # upper border - for x in range(4, rect.width - 5 + 1): - self.assertEqual(self.surface.get_at((x, 4)), GREEN) # upper inner - - # 2. with the corners 4 & 5 - pygame.draw.rect(self.surface, RED, (0, 0, 20, 20), 0) - self.draw_polygon(self.surface, GREEN, path_data, 0) - for x in range(4, rect.width - 5 + 1): - self.assertEqual(self.surface.get_at((x, 4)), GREEN) # upper inner - - def test_invalid_points(self): - self.assertRaises( - TypeError, - lambda: self.draw_polygon( - self.surface, RED, ((0, 0), (0, 20), (20, 20), 20), 0 - ), - ) - - def test_polygon__bounding_rect(self): - """Ensures draw polygon returns the correct bounding rect. - - Tests polygons on and off the surface and a range of width/thickness - values. - """ - polygon_color = pygame.Color("red") - surf_color = pygame.Color("black") - min_width = min_height = 5 - max_width = max_height = 7 - sizes = ((min_width, min_height), (max_width, max_height)) - surface = pygame.Surface((20, 20), 0, 32) - surf_rect = surface.get_rect() - # Make a rect that is bigger than the surface to help test drawing - # polygons off and partially off the surface. - big_rect = surf_rect.inflate(min_width * 2 + 1, min_height * 2 + 1) - - for pos in rect_corners_mids_and_center( - surf_rect - ) + rect_corners_mids_and_center(big_rect): - # A rect (pos_rect) is used to help create and position the - # polygon. Each of this rect's position attributes will be set to - # the pos value. - for attr in RECT_POSITION_ATTRIBUTES: - # Test using different rect sizes and thickness values. - for width, height in sizes: - pos_rect = pygame.Rect((0, 0), (width, height)) - setattr(pos_rect, attr, pos) - # Points form a triangle with no fully - # horizontal/vertical lines. - vertices = ( - pos_rect.midleft, - pos_rect.midtop, - pos_rect.bottomright, - ) - - for thickness in range(4): - surface.fill(surf_color) # Clear for each test. - - bounding_rect = self.draw_polygon( - surface, polygon_color, vertices, thickness - ) - - # Calculating the expected_rect after the polygon - # is drawn (it uses what is actually drawn). - expected_rect = create_bounding_rect( - surface, surf_color, vertices[0] - ) - - self.assertEqual( - bounding_rect, - expected_rect, - "thickness={}".format(thickness), - ) - - def test_polygon__surface_clip(self): - """Ensures draw polygon respects a surface's clip area. - - Tests drawing the polygon filled and unfilled. - """ - surfw = surfh = 30 - polygon_color = pygame.Color("red") - surface_color = pygame.Color("green") - surface = pygame.Surface((surfw, surfh)) - surface.fill(surface_color) - - clip_rect = pygame.Rect((0, 0), (8, 10)) - clip_rect.center = surface.get_rect().center - pos_rect = clip_rect.copy() # Manages the polygon's pos. - - for width in (0, 1): # Filled and unfilled. - # Test centering the polygon along the clip rect's edge. - for center in rect_corners_mids_and_center(clip_rect): - # Get the expected points by drawing the polygon without the - # clip area set. - pos_rect.center = center - vertices = ( - pos_rect.topleft, - pos_rect.topright, - pos_rect.bottomright, - pos_rect.bottomleft, - ) - surface.set_clip(None) - surface.fill(surface_color) - self.draw_polygon(surface, polygon_color, vertices, width) - expected_pts = get_color_points(surface, polygon_color, clip_rect) - - # Clear the surface and set the clip area. Redraw the polygon - # and check that only the clip area is modified. - surface.fill(surface_color) - surface.set_clip(clip_rect) - - self.draw_polygon(surface, polygon_color, vertices, width) - - surface.lock() # For possible speed up. - - # Check all the surface points to ensure only the expected_pts - # are the polygon_color. - for pt in ((x, y) for x in range(surfw) for y in range(surfh)): - if pt in expected_pts: - expected_color = polygon_color - else: - expected_color = surface_color - - self.assertEqual(surface.get_at(pt), expected_color, pt) - - surface.unlock() - - -class DrawPolygonTest(DrawPolygonMixin, DrawTestCase): - """Test draw module function polygon. - - This class inherits the general tests from DrawPolygonMixin. It is also - the class to add any draw.polygon specific tests to. - """ - - -# Commented out to avoid cluttering the test output. Add back in if draw_py -# ever fully supports drawing polygons. -# @unittest.skip('draw_py.draw_polygon not fully supported yet') -# class PythonDrawPolygonTest(DrawPolygonMixin, PythonDrawTestCase): -# """Test draw_py module function draw_polygon. -# -# This class inherits the general tests from DrawPolygonMixin. It is also -# the class to add any draw_py.draw_polygon specific tests to. -# """ - - -### Rect Testing ############################################################## - - -class DrawRectMixin(object): - """Mixin tests for drawing rects. - - This class contains all the general rect drawing tests. - """ - - def test_rect__args(self): - """Ensures draw rect accepts the correct args.""" - bounds_rect = self.draw_rect( - pygame.Surface((2, 2)), - (20, 10, 20, 150), - pygame.Rect((0, 0), (1, 1)), - 2, - 1, - 2, - 3, - 4, - 5, - ) - - self.assertIsInstance(bounds_rect, pygame.Rect) - - def test_rect__args_without_width(self): - """Ensures draw rect accepts the args without a width and borders.""" - bounds_rect = self.draw_rect( - pygame.Surface((3, 5)), (0, 0, 0, 255), pygame.Rect((0, 0), (1, 1)) - ) - - self.assertIsInstance(bounds_rect, pygame.Rect) - - def test_rect__kwargs(self): - """Ensures draw rect accepts the correct kwargs - with and without a width and border_radius arg. - """ - kwargs_list = [ - { - "surface": pygame.Surface((5, 5)), - "color": pygame.Color("red"), - "rect": pygame.Rect((0, 0), (1, 2)), - "width": 1, - "border_radius": 10, - "border_top_left_radius": 5, - "border_top_right_radius": 20, - "border_bottom_left_radius": 15, - "border_bottom_right_radius": 0, - }, - { - "surface": pygame.Surface((1, 2)), - "color": (0, 100, 200), - "rect": (0, 0, 1, 1), - }, - ] - - for kwargs in kwargs_list: - bounds_rect = self.draw_rect(**kwargs) - - self.assertIsInstance(bounds_rect, pygame.Rect) - - def test_rect__kwargs_order_independent(self): - """Ensures draw rect's kwargs are not order dependent.""" - bounds_rect = self.draw_rect( - color=(0, 1, 2), - border_radius=10, - surface=pygame.Surface((2, 3)), - border_top_left_radius=5, - width=-2, - border_top_right_radius=20, - border_bottom_right_radius=0, - rect=pygame.Rect((0, 0), (0, 0)), - border_bottom_left_radius=15, - ) - - self.assertIsInstance(bounds_rect, pygame.Rect) - - def test_rect__args_missing(self): - """Ensures draw rect detects any missing required args.""" - surface = pygame.Surface((1, 1)) - - with self.assertRaises(TypeError): - bounds_rect = self.draw_rect(surface, pygame.Color("white")) - - with self.assertRaises(TypeError): - bounds_rect = self.draw_rect(surface) - - with self.assertRaises(TypeError): - bounds_rect = self.draw_rect() - - def test_rect__kwargs_missing(self): - """Ensures draw rect detects any missing required kwargs.""" - kwargs = { - "surface": pygame.Surface((1, 3)), - "color": pygame.Color("red"), - "rect": pygame.Rect((0, 0), (2, 2)), - "width": 5, - "border_radius": 10, - "border_top_left_radius": 5, - "border_top_right_radius": 20, - "border_bottom_left_radius": 15, - "border_bottom_right_radius": 0, - } - - for name in ("rect", "color", "surface"): - invalid_kwargs = dict(kwargs) - invalid_kwargs.pop(name) # Pop from a copy. - - with self.assertRaises(TypeError): - bounds_rect = self.draw_rect(**invalid_kwargs) - - def test_rect__arg_invalid_types(self): - """Ensures draw rect detects invalid arg types.""" - surface = pygame.Surface((3, 3)) - color = pygame.Color("white") - rect = pygame.Rect((1, 1), (1, 1)) - - with self.assertRaises(TypeError): - # Invalid border_bottom_right_radius. - bounds_rect = self.draw_rect( - surface, color, rect, 2, border_bottom_right_radius="rad" - ) - - with self.assertRaises(TypeError): - # Invalid border_bottom_left_radius. - bounds_rect = self.draw_rect( - surface, color, rect, 2, border_bottom_left_radius="rad" - ) - - with self.assertRaises(TypeError): - # Invalid border_top_right_radius. - bounds_rect = self.draw_rect( - surface, color, rect, 2, border_top_right_radius="rad" - ) - - with self.assertRaises(TypeError): - # Invalid border_top_left_radius. - bounds_rect = self.draw_rect( - surface, color, rect, 2, border_top_left_radius="draw" - ) - - with self.assertRaises(TypeError): - # Invalid border_radius. - bounds_rect = self.draw_rect(surface, color, rect, 2, "rad") - - with self.assertRaises(TypeError): - # Invalid width. - bounds_rect = self.draw_rect(surface, color, rect, "2", 4) - - with self.assertRaises(TypeError): - # Invalid rect. - bounds_rect = self.draw_rect(surface, color, (1, 2, 3), 2, 6) - - with self.assertRaises(TypeError): - # Invalid color. - bounds_rect = self.draw_rect(surface, 2.3, rect, 3, 8) - - with self.assertRaises(TypeError): - # Invalid surface. - bounds_rect = self.draw_rect(rect, color, rect, 4, 10) - - def test_rect__kwarg_invalid_types(self): - """Ensures draw rect detects invalid kwarg types.""" - surface = pygame.Surface((2, 3)) - color = pygame.Color("red") - rect = pygame.Rect((0, 0), (1, 1)) - kwargs_list = [ - { - "surface": pygame.Surface, # Invalid surface. - "color": color, - "rect": rect, - "width": 1, - "border_radius": 10, - "border_top_left_radius": 5, - "border_top_right_radius": 20, - "border_bottom_left_radius": 15, - "border_bottom_right_radius": 0, - }, - { - "surface": surface, - "color": 2.3, # Invalid color. - "rect": rect, - "width": 1, - "border_radius": 10, - "border_top_left_radius": 5, - "border_top_right_radius": 20, - "border_bottom_left_radius": 15, - "border_bottom_right_radius": 0, - }, - { - "surface": surface, - "color": color, - "rect": (1, 1, 2), # Invalid rect. - "width": 1, - "border_radius": 10, - "border_top_left_radius": 5, - "border_top_right_radius": 20, - "border_bottom_left_radius": 15, - "border_bottom_right_radius": 0, - }, - { - "surface": surface, - "color": color, - "rect": rect, - "width": 1.1, # Invalid width. - "border_radius": 10, - "border_top_left_radius": 5, - "border_top_right_radius": 20, - "border_bottom_left_radius": 15, - "border_bottom_right_radius": 0, - }, - { - "surface": surface, - "color": color, - "rect": rect, - "width": 1, - "border_radius": 10.5, # Invalid border_radius. - "border_top_left_radius": 5, - "border_top_right_radius": 20, - "border_bottom_left_radius": 15, - "border_bottom_right_radius": 0, - }, - { - "surface": surface, - "color": color, - "rect": rect, - "width": 1, - "border_radius": 10, - "border_top_left_radius": 5.5, # Invalid top_left_radius. - "border_top_right_radius": 20, - "border_bottom_left_radius": 15, - "border_bottom_right_radius": 0, - }, - { - "surface": surface, - "color": color, - "rect": rect, - "width": 1, - "border_radius": 10, - "border_top_left_radius": 5, - "border_top_right_radius": "a", # Invalid top_right_radius. - "border_bottom_left_radius": 15, - "border_bottom_right_radius": 0, - }, - { - "surface": surface, - "color": color, - "rect": rect, - "width": 1, - "border_radius": 10, - "border_top_left_radius": 5, - "border_top_right_radius": 20, - "border_bottom_left_radius": "c", # Invalid bottom_left_radius - "border_bottom_right_radius": 0, - }, - { - "surface": surface, - "color": color, - "rect": rect, - "width": 1, - "border_radius": 10, - "border_top_left_radius": 5, - "border_top_right_radius": 20, - "border_bottom_left_radius": 15, - "border_bottom_right_radius": "d", # Invalid bottom_right. - }, - ] - - for kwargs in kwargs_list: - with self.assertRaises(TypeError): - bounds_rect = self.draw_rect(**kwargs) - - def test_rect__kwarg_invalid_name(self): - """Ensures draw rect detects invalid kwarg names.""" - surface = pygame.Surface((2, 1)) - color = pygame.Color("green") - rect = pygame.Rect((0, 0), (3, 3)) - kwargs_list = [ - { - "surface": surface, - "color": color, - "rect": rect, - "width": 1, - "border_radius": 10, - "border_top_left_radius": 5, - "border_top_right_radius": 20, - "border_bottom_left_radius": 15, - "border_bottom_right_radius": 0, - "invalid": 1, - }, - {"surface": surface, "color": color, "rect": rect, "invalid": 1}, - ] - - for kwargs in kwargs_list: - with self.assertRaises(TypeError): - bounds_rect = self.draw_rect(**kwargs) - - def test_rect__args_and_kwargs(self): - """Ensures draw rect accepts a combination of args/kwargs""" - surface = pygame.Surface((3, 1)) - color = (255, 255, 255, 0) - rect = pygame.Rect((1, 0), (2, 5)) - width = 0 - kwargs = {"surface": surface, "color": color, "rect": rect, "width": width} - - for name in ("surface", "color", "rect", "width"): - kwargs.pop(name) - - if "surface" == name: - bounds_rect = self.draw_rect(surface, **kwargs) - elif "color" == name: - bounds_rect = self.draw_rect(surface, color, **kwargs) - elif "rect" == name: - bounds_rect = self.draw_rect(surface, color, rect, **kwargs) - else: - bounds_rect = self.draw_rect(surface, color, rect, width, **kwargs) - self.assertIsInstance(bounds_rect, pygame.Rect) - - def test_rect__valid_width_values(self): - """Ensures draw rect accepts different width values.""" - pos = (1, 1) - surface_color = pygame.Color("black") - surface = pygame.Surface((3, 4)) - color = (1, 2, 3, 255) - kwargs = { - "surface": surface, - "color": color, - "rect": pygame.Rect(pos, (2, 2)), - "width": None, - } - - for width in (-1000, -10, -1, 0, 1, 10, 1000): - surface.fill(surface_color) # Clear for each test. - kwargs["width"] = width - expected_color = color if width >= 0 else surface_color - - bounds_rect = self.draw_rect(**kwargs) - - self.assertEqual(surface.get_at(pos), expected_color) - self.assertIsInstance(bounds_rect, pygame.Rect) - - def test_rect__valid_rect_formats(self): - """Ensures draw rect accepts different rect formats.""" - pos = (1, 1) - expected_color = pygame.Color("yellow") - surface_color = pygame.Color("black") - surface = pygame.Surface((3, 4)) - kwargs = {"surface": surface, "color": expected_color, "rect": None, "width": 0} - rects = ( - pygame.Rect(pos, (1, 1)), - (pos, (2, 2)), - (pos[0], pos[1], 3, 3), - [pos, (2.1, 2.2)], - ) - - for rect in rects: - surface.fill(surface_color) # Clear for each test. - kwargs["rect"] = rect - - bounds_rect = self.draw_rect(**kwargs) - - self.assertEqual(surface.get_at(pos), expected_color) - self.assertIsInstance(bounds_rect, pygame.Rect) - - def test_rect__invalid_rect_formats(self): - """Ensures draw rect handles invalid rect formats correctly.""" - kwargs = { - "surface": pygame.Surface((4, 4)), - "color": pygame.Color("red"), - "rect": None, - "width": 0, - } - - invalid_fmts = ( - [], - [1], - [1, 2], - [1, 2, 3], - [1, 2, 3, 4, 5], - set([1, 2, 3, 4]), - [1, 2, 3, "4"], - ) - - for rect in invalid_fmts: - kwargs["rect"] = rect - - with self.assertRaises(TypeError): - bounds_rect = self.draw_rect(**kwargs) - - def test_rect__valid_color_formats(self): - """Ensures draw rect accepts different color formats.""" - pos = (1, 1) - red_color = pygame.Color("red") - surface_color = pygame.Color("black") - surface = pygame.Surface((3, 4)) - kwargs = { - "surface": surface, - "color": None, - "rect": pygame.Rect(pos, (1, 1)), - "width": 3, - } - reds = ((255, 0, 0), (255, 0, 0, 255), surface.map_rgb(red_color), red_color) - - for color in reds: - surface.fill(surface_color) # Clear for each test. - kwargs["color"] = color - - if isinstance(color, int): - expected_color = surface.unmap_rgb(color) - else: - expected_color = red_color - - bounds_rect = self.draw_rect(**kwargs) - - self.assertEqual(surface.get_at(pos), expected_color) - self.assertIsInstance(bounds_rect, pygame.Rect) - - def test_rect__invalid_color_formats(self): - """Ensures draw rect handles invalid color formats correctly.""" - pos = (1, 1) - surface = pygame.Surface((3, 4)) - kwargs = { - "surface": surface, - "color": None, - "rect": pygame.Rect(pos, (1, 1)), - "width": 1, - } - - for expected_color in (2.3, self): - kwargs["color"] = expected_color - - with self.assertRaises(TypeError): - bounds_rect = self.draw_rect(**kwargs) - - def test_rect__fill(self): - self.surf_w, self.surf_h = self.surf_size = (320, 200) - self.surf = pygame.Surface(self.surf_size, pygame.SRCALPHA) - self.color = (1, 13, 24, 205) - rect = pygame.Rect(10, 10, 25, 20) - drawn = self.draw_rect(self.surf, self.color, rect, 0) - - self.assertEqual(drawn, rect) - - # Should be colored where it's supposed to be - for pt in test_utils.rect_area_pts(rect): - color_at_pt = self.surf.get_at(pt) - - self.assertEqual(color_at_pt, self.color) - - # And not where it shouldn't - for pt in test_utils.rect_outer_bounds(rect): - color_at_pt = self.surf.get_at(pt) - - self.assertNotEqual(color_at_pt, self.color) - - # Issue #310: Cannot draw rectangles that are 1 pixel high - bgcolor = pygame.Color("black") - self.surf.fill(bgcolor) - hrect = pygame.Rect(1, 1, self.surf_w - 2, 1) - vrect = pygame.Rect(1, 3, 1, self.surf_h - 4) - - drawn = self.draw_rect(self.surf, self.color, hrect, 0) - - self.assertEqual(drawn, hrect) - - x, y = hrect.topleft - w, h = hrect.size - - self.assertEqual(self.surf.get_at((x - 1, y)), bgcolor) - self.assertEqual(self.surf.get_at((x + w, y)), bgcolor) - for i in range(x, x + w): - self.assertEqual(self.surf.get_at((i, y)), self.color) - - drawn = self.draw_rect(self.surf, self.color, vrect, 0) - - self.assertEqual(drawn, vrect) - - x, y = vrect.topleft - w, h = vrect.size - - self.assertEqual(self.surf.get_at((x, y - 1)), bgcolor) - self.assertEqual(self.surf.get_at((x, y + h)), bgcolor) - for i in range(y, y + h): - self.assertEqual(self.surf.get_at((x, i)), self.color) - - def test_rect__one_pixel_lines(self): - self.surf = pygame.Surface((320, 200), pygame.SRCALPHA) - self.color = (1, 13, 24, 205) - - rect = pygame.Rect(10, 10, 56, 20) - - drawn = self.draw_rect(self.surf, self.color, rect, 1) - - self.assertEqual(drawn, rect) - - # Should be colored where it's supposed to be - for pt in test_utils.rect_perimeter_pts(drawn): - color_at_pt = self.surf.get_at(pt) - - self.assertEqual(color_at_pt, self.color) - - # And not where it shouldn't - for pt in test_utils.rect_outer_bounds(drawn): - color_at_pt = self.surf.get_at(pt) - - self.assertNotEqual(color_at_pt, self.color) - - def test_rect__draw_line_width(self): - surface = pygame.Surface((100, 100)) - surface.fill("black") - color = pygame.Color(255, 255, 255) - rect_width = 80 - rect_height = 50 - line_width = 10 - pygame.draw.rect( - surface, color, pygame.Rect(0, 0, rect_width, rect_height), line_width - ) - for i in range(line_width): - self.assertEqual(surface.get_at((i, i)), color) - self.assertEqual(surface.get_at((rect_width - i - 1, i)), color) - self.assertEqual(surface.get_at((i, rect_height - i - 1)), color) - self.assertEqual( - surface.get_at((rect_width - i - 1, rect_height - i - 1)), color - ) - self.assertEqual(surface.get_at((line_width, line_width)), (0, 0, 0)) - self.assertEqual( - surface.get_at((rect_width - line_width - 1, line_width)), (0, 0, 0) - ) - self.assertEqual( - surface.get_at((line_width, rect_height - line_width - 1)), (0, 0, 0) - ) - self.assertEqual( - surface.get_at((rect_width - line_width - 1, rect_height - line_width - 1)), - (0, 0, 0), - ) - - def test_rect__bounding_rect(self): - """Ensures draw rect returns the correct bounding rect. - - Tests rects on and off the surface and a range of width/thickness - values. - """ - rect_color = pygame.Color("red") - surf_color = pygame.Color("black") - min_width = min_height = 5 - max_width = max_height = 7 - sizes = ((min_width, min_height), (max_width, max_height)) - surface = pygame.Surface((20, 20), 0, 32) - surf_rect = surface.get_rect() - # Make a rect that is bigger than the surface to help test drawing - # rects off and partially off the surface. - big_rect = surf_rect.inflate(min_width * 2 + 1, min_height * 2 + 1) - - for pos in rect_corners_mids_and_center( - surf_rect - ) + rect_corners_mids_and_center(big_rect): - # Each of the rect's position attributes will be set to the pos - # value. - for attr in RECT_POSITION_ATTRIBUTES: - # Test using different rect sizes and thickness values. - for width, height in sizes: - rect = pygame.Rect((0, 0), (width, height)) - setattr(rect, attr, pos) - - for thickness in range(4): - surface.fill(surf_color) # Clear for each test. - - bounding_rect = self.draw_rect( - surface, rect_color, rect, thickness - ) - - # Calculating the expected_rect after the rect is - # drawn (it uses what is actually drawn). - expected_rect = create_bounding_rect( - surface, surf_color, rect.topleft - ) - - self.assertEqual( - bounding_rect, - expected_rect, - "thickness={}".format(thickness), - ) - - def test_rect__surface_clip(self): - """Ensures draw rect respects a surface's clip area. - - Tests drawing the rect filled and unfilled. - """ - surfw = surfh = 30 - rect_color = pygame.Color("red") - surface_color = pygame.Color("green") - surface = pygame.Surface((surfw, surfh)) - surface.fill(surface_color) - - clip_rect = pygame.Rect((0, 0), (8, 10)) - clip_rect.center = surface.get_rect().center - test_rect = clip_rect.copy() # Manages the rect's pos. - - for width in (0, 1): # Filled and unfilled. - # Test centering the rect along the clip rect's edge. - for center in rect_corners_mids_and_center(clip_rect): - # Get the expected points by drawing the rect without the - # clip area set. - test_rect.center = center - surface.set_clip(None) - surface.fill(surface_color) - self.draw_rect(surface, rect_color, test_rect, width) - expected_pts = get_color_points(surface, rect_color, clip_rect) - - # Clear the surface and set the clip area. Redraw the rect - # and check that only the clip area is modified. - surface.fill(surface_color) - surface.set_clip(clip_rect) - - self.draw_rect(surface, rect_color, test_rect, width) - - surface.lock() # For possible speed up. - - # Check all the surface points to ensure only the expected_pts - # are the rect_color. - for pt in ((x, y) for x in range(surfw) for y in range(surfh)): - if pt in expected_pts: - expected_color = rect_color - else: - expected_color = surface_color - - self.assertEqual(surface.get_at(pt), expected_color, pt) - - surface.unlock() - - -class DrawRectTest(DrawRectMixin, DrawTestCase): - """Test draw module function rect. - - This class inherits the general tests from DrawRectMixin. It is also the - class to add any draw.rect specific tests to. - """ - - -# Commented out to avoid cluttering the test output. Add back in if draw_py -# ever properly supports drawing rects. -# @unittest.skip('draw_py.draw_rect not supported yet') -# class PythonDrawRectTest(DrawRectMixin, PythonDrawTestCase): -# """Test draw_py module function draw_rect. -# -# This class inherits the general tests from DrawRectMixin. It is also the -# class to add any draw_py.draw_rect specific tests to. -# """ - - -### Circle Testing ############################################################ - - -class DrawCircleMixin(object): - """Mixin tests for drawing circles. - - This class contains all the general circle drawing tests. - """ - - def test_circle__args(self): - """Ensures draw circle accepts the correct args.""" - bounds_rect = self.draw_circle( - pygame.Surface((3, 3)), (0, 10, 0, 50), (0, 0), 3, 1, 1, 0, 1, 1 - ) - - self.assertIsInstance(bounds_rect, pygame.Rect) - - def test_circle__args_without_width(self): - """Ensures draw circle accepts the args without a width and - quadrants.""" - bounds_rect = self.draw_circle(pygame.Surface((2, 2)), (0, 0, 0, 50), (1, 1), 1) - - self.assertIsInstance(bounds_rect, pygame.Rect) - - def test_circle__args_with_negative_width(self): - """Ensures draw circle accepts the args with negative width.""" - bounds_rect = self.draw_circle( - pygame.Surface((2, 2)), (0, 0, 0, 50), (1, 1), 1, -1 - ) - - self.assertIsInstance(bounds_rect, pygame.Rect) - self.assertEqual(bounds_rect, pygame.Rect(1, 1, 0, 0)) - - def test_circle__args_with_width_gt_radius(self): - """Ensures draw circle accepts the args with width > radius.""" - bounds_rect = self.draw_circle( - pygame.Surface((2, 2)), (0, 0, 0, 50), (1, 1), 2, 3, 0, 0, 0, 0 - ) - - self.assertIsInstance(bounds_rect, pygame.Rect) - self.assertEqual(bounds_rect, pygame.Rect(0, 0, 2, 2)) - - def test_circle__kwargs(self): - """Ensures draw circle accepts the correct kwargs - with and without a width and quadrant arguments. - """ - kwargs_list = [ - { - "surface": pygame.Surface((4, 4)), - "color": pygame.Color("yellow"), - "center": (2, 2), - "radius": 2, - "width": 1, - "draw_top_right": True, - "draw_top_left": True, - "draw_bottom_left": False, - "draw_bottom_right": True, - }, - { - "surface": pygame.Surface((2, 1)), - "color": (0, 10, 20), - "center": (1, 1), - "radius": 1, - }, - ] - - for kwargs in kwargs_list: - bounds_rect = self.draw_circle(**kwargs) - - self.assertIsInstance(bounds_rect, pygame.Rect) - - def test_circle__kwargs_order_independent(self): - """Ensures draw circle's kwargs are not order dependent.""" - bounds_rect = self.draw_circle( - draw_top_right=False, - color=(10, 20, 30), - surface=pygame.Surface((3, 2)), - width=0, - draw_bottom_left=False, - center=(1, 0), - draw_bottom_right=False, - radius=2, - draw_top_left=True, - ) - - self.assertIsInstance(bounds_rect, pygame.Rect) - - def test_circle__args_missing(self): - """Ensures draw circle detects any missing required args.""" - surface = pygame.Surface((1, 1)) - color = pygame.Color("blue") - - with self.assertRaises(TypeError): - bounds_rect = self.draw_circle(surface, color, (0, 0)) - - with self.assertRaises(TypeError): - bounds_rect = self.draw_circle(surface, color) - - with self.assertRaises(TypeError): - bounds_rect = self.draw_circle(surface) - - with self.assertRaises(TypeError): - bounds_rect = self.draw_circle() - - def test_circle__kwargs_missing(self): - """Ensures draw circle detects any missing required kwargs.""" - kwargs = { - "surface": pygame.Surface((1, 2)), - "color": pygame.Color("red"), - "center": (1, 0), - "radius": 2, - "width": 1, - "draw_top_right": False, - "draw_top_left": False, - "draw_bottom_left": False, - "draw_bottom_right": True, - } - - for name in ("radius", "center", "color", "surface"): - invalid_kwargs = dict(kwargs) - invalid_kwargs.pop(name) # Pop from a copy. - - with self.assertRaises(TypeError): - bounds_rect = self.draw_circle(**invalid_kwargs) - - def test_circle__arg_invalid_types(self): - """Ensures draw circle detects invalid arg types.""" - surface = pygame.Surface((2, 2)) - color = pygame.Color("blue") - center = (1, 1) - radius = 1 - - with self.assertRaises(TypeError): - # Invalid draw_top_right. - bounds_rect = self.draw_circle( - surface, color, center, radius, 1, "a", 1, 1, 1 - ) - - with self.assertRaises(TypeError): - # Invalid draw_top_left. - bounds_rect = self.draw_circle( - surface, color, center, radius, 1, 1, "b", 1, 1 - ) - - with self.assertRaises(TypeError): - # Invalid draw_bottom_left. - bounds_rect = self.draw_circle( - surface, color, center, radius, 1, 1, 1, "c", 1 - ) - - with self.assertRaises(TypeError): - # Invalid draw_bottom_right. - bounds_rect = self.draw_circle( - surface, color, center, radius, 1, 1, 1, 1, "d" - ) - - with self.assertRaises(TypeError): - # Invalid width. - bounds_rect = self.draw_circle(surface, color, center, radius, "1") - - with self.assertRaises(TypeError): - # Invalid radius. - bounds_rect = self.draw_circle(surface, color, center, "2") - - with self.assertRaises(TypeError): - # Invalid center. - bounds_rect = self.draw_circle(surface, color, (1, 2, 3), radius) - - with self.assertRaises(TypeError): - # Invalid color. - bounds_rect = self.draw_circle(surface, 2.3, center, radius) - - with self.assertRaises(TypeError): - # Invalid surface. - bounds_rect = self.draw_circle((1, 2, 3, 4), color, center, radius) - - def test_circle__kwarg_invalid_types(self): - """Ensures draw circle detects invalid kwarg types.""" - surface = pygame.Surface((3, 3)) - color = pygame.Color("green") - center = (0, 1) - radius = 1 - width = 1 - quadrant = 1 - kwargs_list = [ - { - "surface": pygame.Surface, # Invalid surface. - "color": color, - "center": center, - "radius": radius, - "width": width, - "draw_top_right": True, - "draw_top_left": True, - "draw_bottom_left": True, - "draw_bottom_right": True, - }, - { - "surface": surface, - "color": 2.3, # Invalid color. - "center": center, - "radius": radius, - "width": width, - "draw_top_right": True, - "draw_top_left": True, - "draw_bottom_left": True, - "draw_bottom_right": True, - }, - { - "surface": surface, - "color": color, - "center": (1, 1, 1), # Invalid center. - "radius": radius, - "width": width, - "draw_top_right": True, - "draw_top_left": True, - "draw_bottom_left": True, - "draw_bottom_right": True, - }, - { - "surface": surface, - "color": color, - "center": center, - "radius": "1", # Invalid radius. - "width": width, - "draw_top_right": True, - "draw_top_left": True, - "draw_bottom_left": True, - "draw_bottom_right": True, - }, - { - "surface": surface, - "color": color, - "center": center, - "radius": radius, - "width": 1.2, # Invalid width. - "draw_top_right": True, - "draw_top_left": True, - "draw_bottom_left": True, - "draw_bottom_right": True, - }, - { - "surface": surface, - "color": color, - "center": center, - "radius": radius, - "width": width, - "draw_top_right": "True", # Invalid draw_top_right - "draw_top_left": True, - "draw_bottom_left": True, - "draw_bottom_right": True, - }, - { - "surface": surface, - "color": color, - "center": center, - "radius": radius, - "width": width, - "draw_top_right": True, - "draw_top_left": "True", # Invalid draw_top_left - "draw_bottom_left": True, - "draw_bottom_right": True, - }, - { - "surface": surface, - "color": color, - "center": center, - "radius": radius, - "width": width, - "draw_top_right": True, - "draw_top_left": True, - "draw_bottom_left": 3.14, # Invalid draw_bottom_left - "draw_bottom_right": True, - }, - { - "surface": surface, - "color": color, - "center": center, - "radius": radius, - "width": width, - "draw_top_right": True, - "draw_top_left": True, - "draw_bottom_left": True, - "draw_bottom_right": "quadrant", # Invalid draw_bottom_right - }, - ] - - for kwargs in kwargs_list: - with self.assertRaises(TypeError): - bounds_rect = self.draw_circle(**kwargs) - - def test_circle__kwarg_invalid_name(self): - """Ensures draw circle detects invalid kwarg names.""" - surface = pygame.Surface((2, 3)) - color = pygame.Color("cyan") - center = (0, 0) - radius = 2 - kwargs_list = [ - { - "surface": surface, - "color": color, - "center": center, - "radius": radius, - "width": 1, - "quadrant": 1, - "draw_top_right": True, - "draw_top_left": True, - "draw_bottom_left": True, - "draw_bottom_right": True, - }, - { - "surface": surface, - "color": color, - "center": center, - "radius": radius, - "invalid": 1, - }, - ] - - for kwargs in kwargs_list: - with self.assertRaises(TypeError): - bounds_rect = self.draw_circle(**kwargs) - - def test_circle__args_and_kwargs(self): - """Ensures draw circle accepts a combination of args/kwargs""" - surface = pygame.Surface((3, 1)) - color = (255, 255, 0, 0) - center = (1, 0) - radius = 2 - width = 0 - draw_top_right = True - draw_top_left = False - draw_bottom_left = False - draw_bottom_right = True - kwargs = { - "surface": surface, - "color": color, - "center": center, - "radius": radius, - "width": width, - "draw_top_right": True, - "draw_top_left": True, - "draw_bottom_left": True, - "draw_bottom_right": True, - } - - for name in ( - "surface", - "color", - "center", - "radius", - "width", - "draw_top_right", - "draw_top_left", - "draw_bottom_left", - "draw_bottom_right", - ): - kwargs.pop(name) - - if "surface" == name: - bounds_rect = self.draw_circle(surface, **kwargs) - elif "color" == name: - bounds_rect = self.draw_circle(surface, color, **kwargs) - elif "center" == name: - bounds_rect = self.draw_circle(surface, color, center, **kwargs) - elif "radius" == name: - bounds_rect = self.draw_circle(surface, color, center, radius, **kwargs) - elif "width" == name: - bounds_rect = self.draw_circle( - surface, color, center, radius, width, **kwargs - ) - elif "draw_top_right" == name: - bounds_rect = self.draw_circle( - surface, color, center, radius, width, draw_top_right, **kwargs - ) - elif "draw_top_left" == name: - bounds_rect = self.draw_circle( - surface, - color, - center, - radius, - width, - draw_top_right, - draw_top_left, - **kwargs - ) - elif "draw_bottom_left" == name: - bounds_rect = self.draw_circle( - surface, - color, - center, - radius, - width, - draw_top_right, - draw_top_left, - draw_bottom_left, - **kwargs - ) - else: - bounds_rect = self.draw_circle( - surface, - color, - center, - radius, - width, - draw_top_right, - draw_top_left, - draw_bottom_left, - draw_bottom_right, - **kwargs - ) - - self.assertIsInstance(bounds_rect, pygame.Rect) - - def test_circle__valid_width_values(self): - """Ensures draw circle accepts different width values.""" - center = (2, 2) - radius = 1 - pos = (center[0] - radius, center[1]) - surface_color = pygame.Color("white") - surface = pygame.Surface((3, 4)) - color = (10, 20, 30, 255) - kwargs = { - "surface": surface, - "color": color, - "center": center, - "radius": radius, - "width": None, - "draw_top_right": True, - "draw_top_left": True, - "draw_bottom_left": True, - "draw_bottom_right": True, - } - - for width in (-100, -10, -1, 0, 1, 10, 100): - surface.fill(surface_color) # Clear for each test. - kwargs["width"] = width - expected_color = color if width >= 0 else surface_color - - bounds_rect = self.draw_circle(**kwargs) - - self.assertEqual(surface.get_at(pos), expected_color) - self.assertIsInstance(bounds_rect, pygame.Rect) - - def test_circle__valid_radius_values(self): - """Ensures draw circle accepts different radius values.""" - pos = center = (2, 2) - surface_color = pygame.Color("white") - surface = pygame.Surface((3, 4)) - color = (10, 20, 30, 255) - kwargs = { - "surface": surface, - "color": color, - "center": center, - "radius": None, - "width": 0, - "draw_top_right": True, - "draw_top_left": True, - "draw_bottom_left": True, - "draw_bottom_right": True, - } - - for radius in (-10, -1, 0, 1, 10): - surface.fill(surface_color) # Clear for each test. - kwargs["radius"] = radius - expected_color = color if radius > 0 else surface_color - - bounds_rect = self.draw_circle(**kwargs) - - self.assertEqual(surface.get_at(pos), expected_color) - self.assertIsInstance(bounds_rect, pygame.Rect) - - def test_circle__valid_center_formats(self): - """Ensures draw circle accepts different center formats.""" - expected_color = pygame.Color("red") - surface_color = pygame.Color("black") - surface = pygame.Surface((4, 4)) - kwargs = { - "surface": surface, - "color": expected_color, - "center": None, - "radius": 1, - "width": 0, - "draw_top_right": True, - "draw_top_left": True, - "draw_bottom_left": True, - "draw_bottom_right": True, - } - x, y = 2, 2 # center position - - # The center values can be ints or floats. - for center in ((x, y), (x + 0.1, y), (x, y + 0.1), (x + 0.1, y + 0.1)): - # The center type can be a tuple/list/Vector2. - for seq_type in (tuple, list, Vector2): - surface.fill(surface_color) # Clear for each test. - kwargs["center"] = seq_type(center) - - bounds_rect = self.draw_circle(**kwargs) - - self.assertEqual(surface.get_at((x, y)), expected_color) - self.assertIsInstance(bounds_rect, pygame.Rect) - - def test_circle__valid_color_formats(self): - """Ensures draw circle accepts different color formats.""" - center = (2, 2) - radius = 1 - pos = (center[0] - radius, center[1]) - green_color = pygame.Color("green") - surface_color = pygame.Color("black") - surface = pygame.Surface((3, 4)) - kwargs = { - "surface": surface, - "color": None, - "center": center, - "radius": radius, - "width": 0, - "draw_top_right": True, - "draw_top_left": True, - "draw_bottom_left": True, - "draw_bottom_right": True, - } - greens = ( - (0, 255, 0), - (0, 255, 0, 255), - surface.map_rgb(green_color), - green_color, - ) - - for color in greens: - surface.fill(surface_color) # Clear for each test. - kwargs["color"] = color - - if isinstance(color, int): - expected_color = surface.unmap_rgb(color) - else: - expected_color = green_color - - bounds_rect = self.draw_circle(**kwargs) - - self.assertEqual(surface.get_at(pos), expected_color) - self.assertIsInstance(bounds_rect, pygame.Rect) - - def test_circle__invalid_color_formats(self): - """Ensures draw circle handles invalid color formats correctly.""" - kwargs = { - "surface": pygame.Surface((4, 3)), - "color": None, - "center": (1, 2), - "radius": 1, - "width": 0, - "draw_top_right": True, - "draw_top_left": True, - "draw_bottom_left": True, - "draw_bottom_right": True, - } - - for expected_color in (2.3, self): - kwargs["color"] = expected_color - - with self.assertRaises(TypeError): - bounds_rect = self.draw_circle(**kwargs) - - def test_circle__floats(self): - """Ensure that floats are accepted.""" - draw.circle( - surface=pygame.Surface((4, 4)), - color=(255, 255, 127), - center=(1.5, 1.5), - radius=1.3, - width=0, - draw_top_right=True, - draw_top_left=True, - draw_bottom_left=True, - draw_bottom_right=True, - ) - - draw.circle( - surface=pygame.Surface((4, 4)), - color=(255, 255, 127), - center=Vector2(1.5, 1.5), - radius=1.3, - width=0, - draw_top_right=True, - draw_top_left=True, - draw_bottom_left=True, - draw_bottom_right=True, - ) - - draw.circle(pygame.Surface((2, 2)), (0, 0, 0, 50), (1.3, 1.3), 1.2) - - # def test_circle_clip(self): - # """ maybe useful to help work out circle clip algorithm.""" - # MAX = max - # MIN = min - # posx=30 - # posy=15 - # radius=1 - # l=29 - # t=14 - # r=30 - # b=16 - # clip_rect_x=0 - # clip_rect_y=0 - # clip_rect_w=30 - # clip_rect_h=30 - - # l = MAX(posx - radius, clip_rect_x) - # t = MAX(posy - radius, clip_rect_y) - # r = MIN(posx + radius, clip_rect_x + clip_rect_w) - # b = MIN(posy + radius, clip_rect_y + clip_rect_h) - - # l, t, MAX(r - l, 0), MAX(b - t, 0) - - def test_circle__bounding_rect(self): - """Ensures draw circle returns the correct bounding rect. - - Tests circles on and off the surface and a range of width/thickness - values. - """ - circle_color = pygame.Color("red") - surf_color = pygame.Color("black") - max_radius = 3 - surface = pygame.Surface((30, 30), 0, 32) - surf_rect = surface.get_rect() - # Make a rect that is bigger than the surface to help test drawing - # circles off and partially off the surface. Make this rect such that - # when centering the test circle on one of its corners, the circle is - # drawn fully off the test surface, but a rect bounding the circle - # would still overlap with the test surface. - big_rect = surf_rect.inflate(max_radius * 2 - 1, max_radius * 2 - 1) - - for pos in rect_corners_mids_and_center( - surf_rect - ) + rect_corners_mids_and_center(big_rect): - # Test using different radius and thickness values. - for radius in range(max_radius + 1): - for thickness in range(radius + 1): - surface.fill(surf_color) # Clear for each test. - - bounding_rect = self.draw_circle( - surface, circle_color, pos, radius, thickness - ) - - # Calculating the expected_rect after the circle is - # drawn (it uses what is actually drawn). - expected_rect = create_bounding_rect(surface, surf_color, pos) - # print("pos:%s:, radius:%s:, thickness:%s:" % (pos, radius, thickness)) - self.assertEqual(bounding_rect, expected_rect) - - def test_circle_negative_radius(self): - """Ensures negative radius circles return zero sized bounding rect.""" - surf = pygame.Surface((200, 200)) - color = (0, 0, 0, 50) - center = surf.get_height() // 2, surf.get_height() // 2 - - bounding_rect = self.draw_circle(surf, color, center, radius=-1, width=1) - self.assertEqual(bounding_rect.size, (0, 0)) - - def test_circle_zero_radius(self): - """Ensures zero radius circles does not draw a center pixel. - - NOTE: This is backwards incompatible behaviour with 1.9.x. - """ - surf = pygame.Surface((200, 200)) - circle_color = pygame.Color("red") - surf_color = pygame.Color("black") - surf.fill((0, 0, 0)) - center = (100, 100) - radius = 0 - width = 1 - - bounding_rect = self.draw_circle(surf, circle_color, center, radius, width) - expected_rect = create_bounding_rect(surf, surf_color, center) - self.assertEqual(bounding_rect, expected_rect) - self.assertEqual(bounding_rect, pygame.Rect(100, 100, 0, 0)) - - def test_circle__surface_clip(self): - """Ensures draw circle respects a surface's clip area. - - Tests drawing the circle filled and unfilled. - """ - surfw = surfh = 25 - circle_color = pygame.Color("red") - surface_color = pygame.Color("green") - surface = pygame.Surface((surfw, surfh)) - surface.fill(surface_color) - - clip_rect = pygame.Rect((0, 0), (10, 10)) - clip_rect.center = surface.get_rect().center - radius = clip_rect.w // 2 + 1 - - for width in (0, 1): # Filled and unfilled. - # Test centering the circle along the clip rect's edge. - for center in rect_corners_mids_and_center(clip_rect): - # Get the expected points by drawing the circle without the - # clip area set. - surface.set_clip(None) - surface.fill(surface_color) - self.draw_circle(surface, circle_color, center, radius, width) - expected_pts = get_color_points(surface, circle_color, clip_rect) - - # Clear the surface and set the clip area. Redraw the circle - # and check that only the clip area is modified. - surface.fill(surface_color) - surface.set_clip(clip_rect) - - self.draw_circle(surface, circle_color, center, radius, width) - - surface.lock() # For possible speed up. - - # Check all the surface points to ensure only the expected_pts - # are the circle_color. - for pt in ((x, y) for x in range(surfw) for y in range(surfh)): - if pt in expected_pts: - expected_color = circle_color - else: - expected_color = surface_color - - self.assertEqual(surface.get_at(pt), expected_color, pt) - - surface.unlock() - - def test_circle_shape(self): - """Ensures there are no holes in the circle, and no overdrawing. - - Tests drawing a thick circle. - Measures the distance of the drawn pixels from the circle center. - """ - surfw = surfh = 100 - circle_color = pygame.Color("red") - surface_color = pygame.Color("green") - surface = pygame.Surface((surfw, surfh)) - surface.fill(surface_color) - - (cx, cy) = center = (50, 50) - radius = 45 - width = 25 - - dest_rect = self.draw_circle(surface, circle_color, center, radius, width) - - for pt in test_utils.rect_area_pts(dest_rect): - x, y = pt - sqr_distance = (x - cx) ** 2 + (y - cy) ** 2 - if (radius - width + 1) ** 2 < sqr_distance < (radius - 1) ** 2: - self.assertEqual(surface.get_at(pt), circle_color) - if ( - sqr_distance < (radius - width - 1) ** 2 - or sqr_distance > (radius + 1) ** 2 - ): - self.assertEqual(surface.get_at(pt), surface_color) - - def test_circle__diameter(self): - """Ensures draw circle is twice size of radius high and wide.""" - surf = pygame.Surface((200, 200)) - color = (0, 0, 0, 50) - center = surf.get_height() // 2, surf.get_height() // 2 - width = 1 - radius = 6 - for radius in range(1, 65): - bounding_rect = self.draw_circle(surf, color, center, radius, width) - self.assertEqual(bounding_rect.width, radius * 2) - self.assertEqual(bounding_rect.height, radius * 2) - - -class DrawCircleTest(DrawCircleMixin, DrawTestCase): - """Test draw module function circle. - - This class inherits the general tests from DrawCircleMixin. It is also - the class to add any draw.circle specific tests to. - """ - - -# Commented out to avoid cluttering the test output. Add back in if draw_py -# ever properly supports drawing circles. -# @unittest.skip('draw_py.draw_circle not supported yet') -# class PythonDrawCircleTest(DrawCircleMixin, PythonDrawTestCase): -# """Test draw_py module function draw_circle." -# -# This class inherits the general tests from DrawCircleMixin. It is also -# the class to add any draw_py.draw_circle specific tests to. -# """ - - -### Arc Testing ############################################################### - - -class DrawArcMixin(object): - """Mixin tests for drawing arcs. - - This class contains all the general arc drawing tests. - """ - - def test_arc__args(self): - """Ensures draw arc accepts the correct args.""" - bounds_rect = self.draw_arc( - pygame.Surface((3, 3)), (0, 10, 0, 50), (1, 1, 2, 2), 0, 1, 1 - ) - - self.assertIsInstance(bounds_rect, pygame.Rect) - - def test_arc__args_without_width(self): - """Ensures draw arc accepts the args without a width.""" - bounds_rect = self.draw_arc( - pygame.Surface((2, 2)), (1, 1, 1, 99), pygame.Rect((0, 0), (2, 2)), 1.1, 2.1 - ) - - self.assertIsInstance(bounds_rect, pygame.Rect) - - def test_arc__args_with_negative_width(self): - """Ensures draw arc accepts the args with negative width.""" - bounds_rect = self.draw_arc( - pygame.Surface((3, 3)), (10, 10, 50, 50), (1, 1, 2, 2), 0, 1, -1 - ) - - self.assertIsInstance(bounds_rect, pygame.Rect) - self.assertEqual(bounds_rect, pygame.Rect(1, 1, 0, 0)) - - def test_arc__args_with_width_gt_radius(self): - """Ensures draw arc accepts the args with - width > rect.w // 2 and width > rect.h // 2. - """ - rect = pygame.Rect((0, 0), (4, 4)) - bounds_rect = self.draw_arc( - pygame.Surface((3, 3)), (10, 10, 50, 50), rect, 0, 45, rect.w // 2 + 1 - ) - - self.assertIsInstance(bounds_rect, pygame.Rect) - - bounds_rect = self.draw_arc( - pygame.Surface((3, 3)), (10, 10, 50, 50), rect, 0, 45, rect.h // 2 + 1 - ) - - self.assertIsInstance(bounds_rect, pygame.Rect) - - def test_arc__kwargs(self): - """Ensures draw arc accepts the correct kwargs - with and without a width arg. - """ - kwargs_list = [ - { - "surface": pygame.Surface((4, 4)), - "color": pygame.Color("yellow"), - "rect": pygame.Rect((0, 0), (3, 2)), - "start_angle": 0.5, - "stop_angle": 3, - "width": 1, - }, - { - "surface": pygame.Surface((2, 1)), - "color": (0, 10, 20), - "rect": (0, 0, 2, 2), - "start_angle": 1, - "stop_angle": 3.1, - }, - ] - - for kwargs in kwargs_list: - bounds_rect = self.draw_arc(**kwargs) - - self.assertIsInstance(bounds_rect, pygame.Rect) - - def test_arc__kwargs_order_independent(self): - """Ensures draw arc's kwargs are not order dependent.""" - bounds_rect = self.draw_arc( - stop_angle=1, - start_angle=2.2, - color=(1, 2, 3), - surface=pygame.Surface((3, 2)), - width=1, - rect=pygame.Rect((1, 0), (2, 3)), - ) - - self.assertIsInstance(bounds_rect, pygame.Rect) - - def test_arc__args_missing(self): - """Ensures draw arc detects any missing required args.""" - surface = pygame.Surface((1, 1)) - color = pygame.Color("red") - rect = pygame.Rect((0, 0), (2, 2)) - - with self.assertRaises(TypeError): - bounds_rect = self.draw_arc(surface, color, rect, 0.1) - - with self.assertRaises(TypeError): - bounds_rect = self.draw_arc(surface, color, rect) - - with self.assertRaises(TypeError): - bounds_rect = self.draw_arc(surface, color) - - with self.assertRaises(TypeError): - bounds_rect = self.draw_arc(surface) - - with self.assertRaises(TypeError): - bounds_rect = self.draw_arc() - - def test_arc__kwargs_missing(self): - """Ensures draw arc detects any missing required kwargs.""" - kwargs = { - "surface": pygame.Surface((1, 2)), - "color": pygame.Color("red"), - "rect": pygame.Rect((1, 0), (2, 2)), - "start_angle": 0.1, - "stop_angle": 2, - "width": 1, - } - - for name in ("stop_angle", "start_angle", "rect", "color", "surface"): - invalid_kwargs = dict(kwargs) - invalid_kwargs.pop(name) # Pop from a copy. - - with self.assertRaises(TypeError): - bounds_rect = self.draw_arc(**invalid_kwargs) - - def test_arc__arg_invalid_types(self): - """Ensures draw arc detects invalid arg types.""" - surface = pygame.Surface((2, 2)) - color = pygame.Color("blue") - rect = pygame.Rect((1, 1), (3, 3)) - - with self.assertRaises(TypeError): - # Invalid width. - bounds_rect = self.draw_arc(surface, color, rect, 0, 1, "1") - - with self.assertRaises(TypeError): - # Invalid stop_angle. - bounds_rect = self.draw_arc(surface, color, rect, 0, "1", 1) - - with self.assertRaises(TypeError): - # Invalid start_angle. - bounds_rect = self.draw_arc(surface, color, rect, "1", 0, 1) - - with self.assertRaises(TypeError): - # Invalid rect. - bounds_rect = self.draw_arc(surface, color, (1, 2, 3, 4, 5), 0, 1, 1) - - with self.assertRaises(TypeError): - # Invalid color. - bounds_rect = self.draw_arc(surface, 2.3, rect, 0, 1, 1) - - with self.assertRaises(TypeError): - # Invalid surface. - bounds_rect = self.draw_arc(rect, color, rect, 0, 1, 1) - - def test_arc__kwarg_invalid_types(self): - """Ensures draw arc detects invalid kwarg types.""" - surface = pygame.Surface((3, 3)) - color = pygame.Color("green") - rect = pygame.Rect((0, 1), (4, 2)) - start = 3 - stop = 4 - kwargs_list = [ - { - "surface": pygame.Surface, # Invalid surface. - "color": color, - "rect": rect, - "start_angle": start, - "stop_angle": stop, - "width": 1, - }, - { - "surface": surface, - "color": 2.3, # Invalid color. - "rect": rect, - "start_angle": start, - "stop_angle": stop, - "width": 1, - }, - { - "surface": surface, - "color": color, - "rect": (0, 0, 0), # Invalid rect. - "start_angle": start, - "stop_angle": stop, - "width": 1, - }, - { - "surface": surface, - "color": color, - "rect": rect, - "start_angle": "1", # Invalid start_angle. - "stop_angle": stop, - "width": 1, - }, - { - "surface": surface, - "color": color, - "rect": rect, - "start_angle": start, - "stop_angle": "1", # Invalid stop_angle. - "width": 1, - }, - { - "surface": surface, - "color": color, - "rect": rect, - "start_angle": start, - "stop_angle": stop, - "width": 1.1, - }, - ] # Invalid width. - - for kwargs in kwargs_list: - with self.assertRaises(TypeError): - bounds_rect = self.draw_arc(**kwargs) - - def test_arc__kwarg_invalid_name(self): - """Ensures draw arc detects invalid kwarg names.""" - surface = pygame.Surface((2, 3)) - color = pygame.Color("cyan") - rect = pygame.Rect((0, 1), (2, 2)) - start = 0.9 - stop = 2.3 - kwargs_list = [ - { - "surface": surface, - "color": color, - "rect": rect, - "start_angle": start, - "stop_angle": stop, - "width": 1, - "invalid": 1, - }, - { - "surface": surface, - "color": color, - "rect": rect, - "start_angle": start, - "stop_angle": stop, - "invalid": 1, - }, - ] - - for kwargs in kwargs_list: - with self.assertRaises(TypeError): - bounds_rect = self.draw_arc(**kwargs) - - def test_arc__args_and_kwargs(self): - """Ensures draw arc accepts a combination of args/kwargs""" - surface = pygame.Surface((3, 1)) - color = (255, 255, 0, 0) - rect = pygame.Rect((1, 0), (2, 3)) - start = 0.6 - stop = 2 - width = 1 - kwargs = { - "surface": surface, - "color": color, - "rect": rect, - "start_angle": start, - "stop_angle": stop, - "width": width, - } - - for name in ("surface", "color", "rect", "start_angle", "stop_angle"): - kwargs.pop(name) - - if "surface" == name: - bounds_rect = self.draw_arc(surface, **kwargs) - elif "color" == name: - bounds_rect = self.draw_arc(surface, color, **kwargs) - elif "rect" == name: - bounds_rect = self.draw_arc(surface, color, rect, **kwargs) - elif "start_angle" == name: - bounds_rect = self.draw_arc(surface, color, rect, start, **kwargs) - elif "stop_angle" == name: - bounds_rect = self.draw_arc(surface, color, rect, start, stop, **kwargs) - else: - bounds_rect = self.draw_arc( - surface, color, rect, start, stop, width, **kwargs - ) - - self.assertIsInstance(bounds_rect, pygame.Rect) - - def test_arc__valid_width_values(self): - """Ensures draw arc accepts different width values.""" - arc_color = pygame.Color("yellow") - surface_color = pygame.Color("white") - surface = pygame.Surface((6, 6)) - rect = pygame.Rect((0, 0), (4, 4)) - rect.center = surface.get_rect().center - pos = rect.centerx + 1, rect.centery + 1 - kwargs = { - "surface": surface, - "color": arc_color, - "rect": rect, - "start_angle": 0, - "stop_angle": 7, - "width": None, - } - - for width in (-50, -10, -3, -2, -1, 0, 1, 2, 3, 10, 50): - msg = "width={}".format(width) - surface.fill(surface_color) # Clear for each test. - kwargs["width"] = width - expected_color = arc_color if width > 0 else surface_color - - bounds_rect = self.draw_arc(**kwargs) - - self.assertEqual(surface.get_at(pos), expected_color, msg) - self.assertIsInstance(bounds_rect, pygame.Rect, msg) - - def test_arc__valid_stop_angle_values(self): - """Ensures draw arc accepts different stop_angle values.""" - expected_color = pygame.Color("blue") - surface_color = pygame.Color("white") - surface = pygame.Surface((6, 6)) - rect = pygame.Rect((0, 0), (4, 4)) - rect.center = surface.get_rect().center - pos = rect.centerx, rect.centery + 1 - kwargs = { - "surface": surface, - "color": expected_color, - "rect": rect, - "start_angle": -17, - "stop_angle": None, - "width": 1, - } - - for stop_angle in (-10, -5.5, -1, 0, 1, 5.5, 10): - msg = "stop_angle={}".format(stop_angle) - surface.fill(surface_color) # Clear for each test. - kwargs["stop_angle"] = stop_angle - - bounds_rect = self.draw_arc(**kwargs) - - self.assertEqual(surface.get_at(pos), expected_color, msg) - self.assertIsInstance(bounds_rect, pygame.Rect, msg) - - def test_arc__valid_start_angle_values(self): - """Ensures draw arc accepts different start_angle values.""" - expected_color = pygame.Color("blue") - surface_color = pygame.Color("white") - surface = pygame.Surface((6, 6)) - rect = pygame.Rect((0, 0), (4, 4)) - rect.center = surface.get_rect().center - pos = rect.centerx + 1, rect.centery + 1 - kwargs = { - "surface": surface, - "color": expected_color, - "rect": rect, - "start_angle": None, - "stop_angle": 17, - "width": 1, - } - - for start_angle in (-10.0, -5.5, -1, 0, 1, 5.5, 10.0): - msg = "start_angle={}".format(start_angle) - surface.fill(surface_color) # Clear for each test. - kwargs["start_angle"] = start_angle - - bounds_rect = self.draw_arc(**kwargs) - - self.assertEqual(surface.get_at(pos), expected_color, msg) - self.assertIsInstance(bounds_rect, pygame.Rect, msg) - - def test_arc__valid_rect_formats(self): - """Ensures draw arc accepts different rect formats.""" - expected_color = pygame.Color("red") - surface_color = pygame.Color("black") - surface = pygame.Surface((6, 6)) - rect = pygame.Rect((0, 0), (4, 4)) - rect.center = surface.get_rect().center - pos = rect.centerx + 1, rect.centery + 1 - kwargs = { - "surface": surface, - "color": expected_color, - "rect": None, - "start_angle": 0, - "stop_angle": 7, - "width": 1, - } - rects = (rect, (rect.topleft, rect.size), (rect.x, rect.y, rect.w, rect.h)) - - for rect in rects: - surface.fill(surface_color) # Clear for each test. - kwargs["rect"] = rect - - bounds_rect = self.draw_arc(**kwargs) - - self.assertEqual(surface.get_at(pos), expected_color) - self.assertIsInstance(bounds_rect, pygame.Rect) - - def test_arc__valid_color_formats(self): - """Ensures draw arc accepts different color formats.""" - green_color = pygame.Color("green") - surface_color = pygame.Color("black") - surface = pygame.Surface((6, 6)) - rect = pygame.Rect((0, 0), (4, 4)) - rect.center = surface.get_rect().center - pos = rect.centerx + 1, rect.centery + 1 - kwargs = { - "surface": surface, - "color": None, - "rect": rect, - "start_angle": 0, - "stop_angle": 7, - "width": 1, - } - greens = ( - (0, 255, 0), - (0, 255, 0, 255), - surface.map_rgb(green_color), - green_color, - ) - - for color in greens: - surface.fill(surface_color) # Clear for each test. - kwargs["color"] = color - - if isinstance(color, int): - expected_color = surface.unmap_rgb(color) - else: - expected_color = green_color - - bounds_rect = self.draw_arc(**kwargs) - - self.assertEqual(surface.get_at(pos), expected_color) - self.assertIsInstance(bounds_rect, pygame.Rect) - - def test_arc__invalid_color_formats(self): - """Ensures draw arc handles invalid color formats correctly.""" - pos = (1, 1) - surface = pygame.Surface((4, 3)) - kwargs = { - "surface": surface, - "color": None, - "rect": pygame.Rect(pos, (2, 2)), - "start_angle": 5, - "stop_angle": 6.1, - "width": 1, - } - - for expected_color in (2.3, self): - kwargs["color"] = expected_color - - with self.assertRaises(TypeError): - bounds_rect = self.draw_arc(**kwargs) - - def todo_test_arc(self): - """Ensure draw arc works correctly.""" - self.fail() - - def test_arc__bounding_rect(self): - """Ensures draw arc returns the correct bounding rect. - - Tests arcs on and off the surface and a range of width/thickness - values. - """ - arc_color = pygame.Color("red") - surf_color = pygame.Color("black") - min_width = min_height = 5 - max_width = max_height = 7 - sizes = ((min_width, min_height), (max_width, max_height)) - surface = pygame.Surface((20, 20), 0, 32) - surf_rect = surface.get_rect() - # Make a rect that is bigger than the surface to help test drawing - # arcs off and partially off the surface. - big_rect = surf_rect.inflate(min_width * 2 + 1, min_height * 2 + 1) - - # Max angle allows for a full circle to be drawn. - start_angle = 0 - stop_angles = (0, 2, 3, 5, math.ceil(2 * math.pi)) - - for pos in rect_corners_mids_and_center( - surf_rect - ) + rect_corners_mids_and_center(big_rect): - # Each of the arc's rect position attributes will be set to the pos - # value. - for attr in RECT_POSITION_ATTRIBUTES: - # Test using different rect sizes, thickness values and stop - # angles. - for width, height in sizes: - arc_rect = pygame.Rect((0, 0), (width, height)) - setattr(arc_rect, attr, pos) - - for thickness in (0, 1, 2, 3, min(width, height)): - for stop_angle in stop_angles: - surface.fill(surf_color) # Clear for each test. - - bounding_rect = self.draw_arc( - surface, - arc_color, - arc_rect, - start_angle, - stop_angle, - thickness, - ) - - # Calculating the expected_rect after the arc - # is drawn (it uses what is actually drawn). - expected_rect = create_bounding_rect( - surface, surf_color, arc_rect.topleft - ) - - self.assertEqual( - bounding_rect, - expected_rect, - "thickness={}".format(thickness), - ) - - def test_arc__surface_clip(self): - """Ensures draw arc respects a surface's clip area.""" - surfw = surfh = 30 - start = 0.1 - end = 0 # end < start so a full circle will be drawn - arc_color = pygame.Color("red") - surface_color = pygame.Color("green") - surface = pygame.Surface((surfw, surfh)) - surface.fill(surface_color) - - clip_rect = pygame.Rect((0, 0), (11, 11)) - clip_rect.center = surface.get_rect().center - pos_rect = clip_rect.copy() # Manages the arc's pos. - - for thickness in (1, 3): # Different line widths. - # Test centering the arc along the clip rect's edge. - for center in rect_corners_mids_and_center(clip_rect): - # Get the expected points by drawing the arc without the - # clip area set. - pos_rect.center = center - surface.set_clip(None) - surface.fill(surface_color) - self.draw_arc(surface, arc_color, pos_rect, start, end, thickness) - expected_pts = get_color_points(surface, arc_color, clip_rect) - - # Clear the surface and set the clip area. Redraw the arc - # and check that only the clip area is modified. - surface.fill(surface_color) - surface.set_clip(clip_rect) - - self.draw_arc(surface, arc_color, pos_rect, start, end, thickness) - - surface.lock() # For possible speed up. - - # Check all the surface points to ensure only the expected_pts - # are the arc_color. - for pt in ((x, y) for x in range(surfw) for y in range(surfh)): - if pt in expected_pts: - expected_color = arc_color - else: - expected_color = surface_color - - self.assertEqual(surface.get_at(pt), expected_color, pt) - - surface.unlock() - - -class DrawArcTest(DrawArcMixin, DrawTestCase): - """Test draw module function arc. - - This class inherits the general tests from DrawArcMixin. It is also the - class to add any draw.arc specific tests to. - """ - - -# Commented out to avoid cluttering the test output. Add back in if draw_py -# ever properly supports drawing arcs. -# @unittest.skip('draw_py.draw_arc not supported yet') -# class PythonDrawArcTest(DrawArcMixin, PythonDrawTestCase): -# """Test draw_py module function draw_arc. -# -# This class inherits the general tests from DrawArcMixin. It is also the -# class to add any draw_py.draw_arc specific tests to. -# """ - - -### Draw Module Testing ####################################################### - - -class DrawModuleTest(unittest.TestCase): - """General draw module tests.""" - - def test_path_data_validation(self): - """Test validation of multi-point drawing methods. - - See bug #521 - """ - surf = pygame.Surface((5, 5)) - rect = pygame.Rect(0, 0, 5, 5) - bad_values = ( - "text", - b"bytes", - 1 + 1j, # string, bytes, complex, - object(), - (lambda x: x), - ) # object, function - bad_points = list(bad_values) + [(1,), (1, 2, 3)] # wrong tuple length - bad_points.extend((1, v) for v in bad_values) # one wrong value - good_path = [(1, 1), (1, 3), (3, 3), (3, 1)] - # A) draw.lines - check_pts = [(x, y) for x in range(5) for y in range(5)] - - for method, is_polgon in ( - (draw.lines, 0), - (draw.aalines, 0), - (draw.polygon, 1), - ): - for val in bad_values: - # 1. at the beginning - draw.rect(surf, RED, rect, 0) - with self.assertRaises(TypeError): - if is_polgon: - method(surf, GREEN, [val] + good_path, 0) - else: - method(surf, GREEN, True, [val] + good_path) - - # make sure, nothing was drawn : - self.assertTrue(all(surf.get_at(pt) == RED for pt in check_pts)) - - # 2. not at the beginning (was not checked) - draw.rect(surf, RED, rect, 0) - with self.assertRaises(TypeError): - path = good_path[:2] + [val] + good_path[2:] - if is_polgon: - method(surf, GREEN, path, 0) - else: - method(surf, GREEN, True, path) - - # make sure, nothing was drawn : - self.assertTrue(all(surf.get_at(pt) == RED for pt in check_pts)) - - def test_color_validation(self): - surf = pygame.Surface((10, 10)) - colors = 123456, (1, 10, 100), RED, "#ab12df", "red" - points = ((0, 0), (1, 1), (1, 0)) - - # 1. valid colors - for col in colors: - draw.line(surf, col, (0, 0), (1, 1)) - draw.aaline(surf, col, (0, 0), (1, 1)) - draw.aalines(surf, col, True, points) - draw.lines(surf, col, True, points) - draw.arc(surf, col, pygame.Rect(0, 0, 3, 3), 15, 150) - draw.ellipse(surf, col, pygame.Rect(0, 0, 3, 6), 1) - draw.circle(surf, col, (7, 3), 2) - draw.polygon(surf, col, points, 0) - - # 2. invalid colors - for col in (1.256, object(), None): - with self.assertRaises(TypeError): - draw.line(surf, col, (0, 0), (1, 1)) - - with self.assertRaises(TypeError): - draw.aaline(surf, col, (0, 0), (1, 1)) - - with self.assertRaises(TypeError): - draw.aalines(surf, col, True, points) - - with self.assertRaises(TypeError): - draw.lines(surf, col, True, points) - - with self.assertRaises(TypeError): - draw.arc(surf, col, pygame.Rect(0, 0, 3, 3), 15, 150) - - with self.assertRaises(TypeError): - draw.ellipse(surf, col, pygame.Rect(0, 0, 3, 6), 1) - - with self.assertRaises(TypeError): - draw.circle(surf, col, (7, 3), 2) - - with self.assertRaises(TypeError): - draw.polygon(surf, col, points, 0) - - -############################################################################### - - -if __name__ == "__main__": - unittest.main() diff --git a/venv/Lib/site-packages/pygame/tests/event_test.py b/venv/Lib/site-packages/pygame/tests/event_test.py deleted file mode 100644 index 8d7dfe0..0000000 --- a/venv/Lib/site-packages/pygame/tests/event_test.py +++ /dev/null @@ -1,840 +0,0 @@ -import os -import sys -import unittest -import collections - -import pygame - - -EVENT_TYPES = ( - # pygame.NOEVENT, - # pygame.ACTIVEEVENT, - pygame.KEYDOWN, - pygame.KEYUP, - pygame.MOUSEMOTION, - pygame.MOUSEBUTTONDOWN, - pygame.MOUSEBUTTONUP, - pygame.JOYAXISMOTION, - pygame.JOYBALLMOTION, - pygame.JOYHATMOTION, - pygame.JOYBUTTONDOWN, - pygame.JOYBUTTONUP, - pygame.VIDEORESIZE, - pygame.VIDEOEXPOSE, - pygame.QUIT, - pygame.SYSWMEVENT, - pygame.USEREVENT, - # pygame.NUMEVENTS, -) - -EVENT_TEST_PARAMS = collections.defaultdict(dict) -EVENT_TEST_PARAMS.update( - { - pygame.KEYDOWN: {"key": pygame.K_SPACE}, - pygame.KEYUP: {"key": pygame.K_SPACE}, - pygame.MOUSEMOTION: dict(), - pygame.MOUSEBUTTONDOWN: dict(button=1), - pygame.MOUSEBUTTONUP: dict(button=1), - } -) - - -NAMES_AND_EVENTS = ( - ("NoEvent", pygame.NOEVENT), - ("ActiveEvent", pygame.ACTIVEEVENT), - ("KeyDown", pygame.KEYDOWN), - ("KeyUp", pygame.KEYUP), - ("MouseMotion", pygame.MOUSEMOTION), - ("MouseButtonDown", pygame.MOUSEBUTTONDOWN), - ("MouseButtonUp", pygame.MOUSEBUTTONUP), - ("JoyAxisMotion", pygame.JOYAXISMOTION), - ("JoyBallMotion", pygame.JOYBALLMOTION), - ("JoyHatMotion", pygame.JOYHATMOTION), - ("JoyButtonDown", pygame.JOYBUTTONDOWN), - ("JoyButtonUp", pygame.JOYBUTTONUP), - ("VideoResize", pygame.VIDEORESIZE), - ("VideoExpose", pygame.VIDEOEXPOSE), - ("Quit", pygame.QUIT), - ("SysWMEvent", pygame.SYSWMEVENT), - ("MidiIn", pygame.MIDIIN), - ("MidiOut", pygame.MIDIOUT), - ("UserEvent", pygame.USEREVENT), - ("Unknown", 0xFFFF), - ("FingerMotion", pygame.FINGERMOTION), - ("FingerDown", pygame.FINGERDOWN), - ("FingerUp", pygame.FINGERUP), - ("MultiGesture", pygame.MULTIGESTURE), - ("MouseWheel", pygame.MOUSEWHEEL), - ("TextInput", pygame.TEXTINPUT), - ("TextEditing", pygame.TEXTEDITING), - ("ControllerAxisMotion", pygame.CONTROLLERAXISMOTION), - ("ControllerButtonDown", pygame.CONTROLLERBUTTONDOWN), - ("ControllerButtonUp", pygame.CONTROLLERBUTTONUP), - ("ControllerDeviceAdded", pygame.CONTROLLERDEVICEADDED), - ("ControllerDeviceRemoved", pygame.CONTROLLERDEVICEREMOVED), - ("ControllerDeviceMapped", pygame.CONTROLLERDEVICEREMAPPED), - ("DropFile", pygame.DROPFILE), -) - -# Add in any SDL 2.0.4 specific events. -if pygame.get_sdl_version() >= (2, 0, 4): - NAMES_AND_EVENTS += ( - ("AudioDeviceAdded", pygame.AUDIODEVICEADDED), - ("AudioDeviceRemoved", pygame.AUDIODEVICEREMOVED), - ) - -# Add in any SDL 2.0.5 specific events. -if pygame.get_sdl_version() >= (2, 0, 5): - NAMES_AND_EVENTS += ( - ("DropText", pygame.DROPTEXT), - ("DropBegin", pygame.DROPBEGIN), - ("DropComplete", pygame.DROPCOMPLETE), - ) - - -class EventTypeTest(unittest.TestCase): - def test_Event(self): - """Ensure an Event object can be created.""" - e = pygame.event.Event(pygame.USEREVENT, some_attr=1, other_attr="1") - - self.assertEqual(e.some_attr, 1) - self.assertEqual(e.other_attr, "1") - - # Event now uses tp_dictoffset and tp_members: request 62 - # on Motherhamster Bugzilla. - self.assertEqual(e.type, pygame.USEREVENT) - self.assertIs(e.dict, e.__dict__) - - e.some_attr = 12 - - self.assertEqual(e.some_attr, 12) - - e.new_attr = 15 - - self.assertEqual(e.new_attr, 15) - - self.assertRaises(AttributeError, setattr, e, "type", 0) - self.assertRaises(AttributeError, setattr, e, "dict", None) - - # Ensure attributes are visible to dir(), part of the original - # posted request. - d = dir(e) - attrs = ("type", "dict", "__dict__", "some_attr", "other_attr", "new_attr") - - for attr in attrs: - self.assertIn(attr, d) - - def test_as_str(self): - # Bug reported on Pygame mailing list July 24, 2011: - # For Python 3.x str(event) to raises an UnicodeEncodeError when - # an event attribute is a string with a non-ascii character. - try: - str(pygame.event.Event(EVENT_TYPES[0], a="\xed")) - except UnicodeEncodeError: - self.fail("Event object raised exception for non-ascii character") - # Passed. - - -race_condition_notification = """ -This test is dependent on timing. The event queue is cleared in preparation for -tests. There is a small window where outside events from the OS may have effected -results. Try running the test again. -""" - - -class EventModuleArgsTest(unittest.TestCase): - def setUp(self): - pygame.display.init() - pygame.event.clear() - - def tearDown(self): - pygame.display.quit() - - def test_get(self): - pygame.event.get() - pygame.event.get(None) - pygame.event.get(None, True) - - pygame.event.get(pump=False) - pygame.event.get(pump=True) - pygame.event.get(eventtype=None) - pygame.event.get(eventtype=[pygame.KEYUP, pygame.KEYDOWN]) - pygame.event.get(eventtype=pygame.USEREVENT, pump=False) - - def test_clear(self): - pygame.event.clear() - pygame.event.clear(None) - pygame.event.clear(None, True) - - pygame.event.clear(pump=False) - pygame.event.clear(pump=True) - pygame.event.clear(eventtype=None) - pygame.event.clear(eventtype=[pygame.KEYUP, pygame.KEYDOWN]) - pygame.event.clear(eventtype=pygame.USEREVENT, pump=False) - - def test_peek(self): - pygame.event.peek() - pygame.event.peek(None) - pygame.event.peek(None, True) - - pygame.event.peek(pump=False) - pygame.event.peek(pump=True) - pygame.event.peek(eventtype=None) - pygame.event.peek(eventtype=[pygame.KEYUP, pygame.KEYDOWN]) - pygame.event.peek(eventtype=pygame.USEREVENT, pump=False) - - -class EventCustomTypeTest(unittest.TestCase): - """Those tests are special in that they need the _custom_event counter to - be reset before and/or after being run.""" - - def setUp(self): - pygame.quit() - pygame.init() - pygame.display.init() - - def tearDown(self): - pygame.quit() - - def test_custom_type(self): - self.assertEqual(pygame.event.custom_type(), pygame.USEREVENT + 1) - atype = pygame.event.custom_type() - atype2 = pygame.event.custom_type() - - self.assertEqual(atype, atype2 - 1) - - ev = pygame.event.Event(atype) - pygame.event.post(ev) - queue = pygame.event.get(atype) - self.assertEqual(len(queue), 1) - self.assertEqual(queue[0].type, atype) - - def test_custom_type__end_boundary(self): - """Ensure custom_type() raises error when no more custom types. - - The last allowed custom type number should be (pygame.NUMEVENTS - 1). - """ - start = pygame.event.custom_type() + 1 - for i in range(start, pygame.NUMEVENTS): - last = pygame.event.custom_type() - self.assertEqual(last, pygame.NUMEVENTS - 1) - with self.assertRaises(pygame.error): - pygame.event.custom_type() - - def test_custom_type__reset(self): - """Ensure custom events get 'deregistered' by quit().""" - before = pygame.event.custom_type() - self.assertEqual(before, pygame.event.custom_type() - 1) - pygame.quit() - pygame.init() - pygame.display.init() - self.assertEqual(before, pygame.event.custom_type()) - - -class EventModuleTest(unittest.TestCase): - def _assertCountEqual(self, *args, **kwargs): - # Handle method name differences between Python versions. - # Is this still needed? - self.assertCountEqual(*args, **kwargs) - - def _assertExpectedEvents(self, expected, got): - """Find events like expected events, raise on unexpected or missing, - ignore additional event properties if expected properties are present.""" - - # This does greedy matching, don't encode an NP-hard problem - # into your input data, *please* - items_left = got[:] - for expected_element in expected: - for item in items_left: - for key in expected_element.__dict__: - if item.__dict__[key] != expected_element.__dict__[key]: - break - else: - # found item! - items_left.remove(item) - break - else: - raise AssertionError( - "Expected " - + str(expected_element) - + " among remaining events " - + str(items_left) - + " out of " - + str(got) - ) - if len(items_left) > 0: - raise AssertionError("Unexpected Events: " + str(items_left)) - - def setUp(self): - pygame.display.init() - pygame.event.clear() # flush events - - def tearDown(self): - pygame.event.clear() # flush events - pygame.display.quit() - - def test_event_numevents(self): - """Ensures NUMEVENTS does not exceed the maximum SDL number of events.""" - # Ref: https://www.libsdl.org/tmp/SDL/include/SDL_events.h - MAX_SDL_EVENTS = 0xFFFF # SDL_LASTEVENT = 0xFFFF - - self.assertLessEqual(pygame.NUMEVENTS, MAX_SDL_EVENTS) - - def test_event_attribute(self): - e1 = pygame.event.Event(pygame.USEREVENT, attr1="attr1") - self.assertEqual(e1.attr1, "attr1") - - def test_set_blocked(self): - """Ensure events can be blocked from the queue.""" - event = EVENT_TYPES[0] - pygame.event.set_blocked(event) - - self.assertTrue(pygame.event.get_blocked(event)) - - pygame.event.post( - pygame.event.Event(event, **EVENT_TEST_PARAMS[EVENT_TYPES[0]]) - ) - ret = pygame.event.get() - should_be_blocked = [e for e in ret if e.type == event] - - self.assertEqual(should_be_blocked, []) - - def test_set_blocked__event_sequence(self): - """Ensure a sequence of event types can be blocked.""" - event_types = [ - pygame.KEYDOWN, - pygame.KEYUP, - pygame.MOUSEMOTION, - pygame.MOUSEBUTTONDOWN, - pygame.MOUSEBUTTONUP, - ] - - pygame.event.set_blocked(event_types) - - for etype in event_types: - self.assertTrue(pygame.event.get_blocked(etype)) - - def test_set_blocked_all(self): - """Ensure all events can be unblocked at once.""" - pygame.event.set_blocked(None) - - for e in EVENT_TYPES: - self.assertTrue(pygame.event.get_blocked(e)) - - def test_post__and_poll(self): - """Ensure events can be posted to the queue.""" - e1 = pygame.event.Event(pygame.USEREVENT, attr1="attr1") - pygame.event.post(e1) - posted_event = pygame.event.poll() - - self.assertEqual(e1.attr1, posted_event.attr1, race_condition_notification) - - # fuzzing event types - for i in range(1, 13): - pygame.event.post( - pygame.event.Event(EVENT_TYPES[i], **EVENT_TEST_PARAMS[EVENT_TYPES[i]]) - ) - - self.assertEqual( - pygame.event.poll().type, EVENT_TYPES[i], race_condition_notification - ) - - def test_post_and_get_keydown(self): - """Ensure keydown events can be posted to the queue.""" - activemodkeys = pygame.key.get_mods() - - events = [ - pygame.event.Event(pygame.KEYDOWN, key=pygame.K_p), - pygame.event.Event(pygame.KEYDOWN, key=pygame.K_y, mod=activemodkeys), - pygame.event.Event(pygame.KEYDOWN, key=pygame.K_g, unicode="g"), - pygame.event.Event(pygame.KEYDOWN, key=pygame.K_a, unicode=None), - pygame.event.Event(pygame.KEYDOWN, key=pygame.K_m, mod=None, window=None), - pygame.event.Event( - pygame.KEYDOWN, key=pygame.K_e, mod=activemodkeys, unicode="e" - ), - ] - - for e in events: - pygame.event.post(e) - posted_event = pygame.event.poll() - self.assertEqual(e, posted_event, race_condition_notification) - - def test_post_large_user_event(self): - pygame.event.post(pygame.event.Event(pygame.USEREVENT, {"a": "a" * 1024})) - e = pygame.event.poll() - - self.assertEqual(e.type, pygame.USEREVENT) - self.assertEqual(e.a, "a" * 1024) - - def test_post_blocked(self): - """ - Test blocked events are not posted. Also test whether post() - returns a boolean correctly - """ - pygame.event.set_blocked(pygame.USEREVENT) - self.assertFalse(pygame.event.post(pygame.event.Event(pygame.USEREVENT))) - self.assertFalse(pygame.event.poll()) - pygame.event.set_allowed(pygame.USEREVENT) - self.assertTrue(pygame.event.post(pygame.event.Event(pygame.USEREVENT))) - self.assertEqual(pygame.event.poll(), pygame.event.Event(pygame.USEREVENT)) - - def test_get(self): - """Ensure get() retrieves all the events on the queue.""" - event_cnt = 10 - for _ in range(event_cnt): - pygame.event.post(pygame.event.Event(pygame.USEREVENT)) - - queue = pygame.event.get() - - self.assertEqual(len(queue), event_cnt) - self.assertTrue(all(e.type == pygame.USEREVENT for e in queue)) - - def test_get_type(self): - ev = pygame.event.Event(pygame.USEREVENT) - pygame.event.post(ev) - queue = pygame.event.get(pygame.USEREVENT) - self.assertEqual(len(queue), 1) - self.assertEqual(queue[0].type, pygame.USEREVENT) - - TESTEVENTS = 10 - for _ in range(TESTEVENTS): - pygame.event.post(ev) - q = pygame.event.get([pygame.USEREVENT]) - self.assertEqual(len(q), TESTEVENTS) - for event in q: - self.assertEqual(event, ev) - - def test_get_exclude_throw(self): - self.assertRaises( - pygame.error, pygame.event.get, pygame.KEYDOWN, False, pygame.KEYUP - ) - - def test_get_exclude(self): - pygame.event.post(pygame.event.Event(pygame.USEREVENT)) - pygame.event.post(pygame.event.Event(pygame.KEYDOWN)) - - queue = pygame.event.get(exclude=pygame.KEYDOWN) - self.assertEqual(len(queue), 1) - self.assertEqual(queue[0].type, pygame.USEREVENT) - - pygame.event.post(pygame.event.Event(pygame.KEYUP)) - pygame.event.post(pygame.event.Event(pygame.USEREVENT)) - queue = pygame.event.get(exclude=(pygame.KEYDOWN, pygame.KEYUP)) - self.assertEqual(len(queue), 1) - self.assertEqual(queue[0].type, pygame.USEREVENT) - - queue = pygame.event.get() - self.assertEqual(len(queue), 2) - - def test_get__empty_queue(self): - """Ensure get() works correctly on an empty queue.""" - expected_events = [] - pygame.event.clear() - - # Ensure all events can be checked. - retrieved_events = pygame.event.get() - - self.assertListEqual(retrieved_events, expected_events) - - # Ensure events can be checked individually. - for event_type in EVENT_TYPES: - retrieved_events = pygame.event.get(event_type) - - self.assertListEqual(retrieved_events, expected_events) - - # Ensure events can be checked as a sequence. - retrieved_events = pygame.event.get(EVENT_TYPES) - - self.assertListEqual(retrieved_events, expected_events) - - def test_get__event_sequence(self): - """Ensure get() can handle a sequence of event types.""" - event_types = [pygame.KEYDOWN, pygame.KEYUP, pygame.MOUSEMOTION] - other_event_type = pygame.MOUSEBUTTONUP - - # Test when no events in the queue. - expected_events = [] - pygame.event.clear() - retrieved_events = pygame.event.get(event_types) - - # don't use self._assertCountEqual here. This checks for - # expected properties in events, and ignores unexpected ones, for - # forward compatibility with SDL2. - self._assertExpectedEvents(expected=expected_events, got=retrieved_events) - - # Test when an event type not in the list is in the queue. - expected_events = [] - pygame.event.clear() - pygame.event.post( - pygame.event.Event(other_event_type, **EVENT_TEST_PARAMS[other_event_type]) - ) - - retrieved_events = pygame.event.get(event_types) - - self._assertExpectedEvents(expected=expected_events, got=retrieved_events) - - # Test when 1 event type in the list is in the queue. - expected_events = [ - pygame.event.Event(event_types[0], **EVENT_TEST_PARAMS[event_types[0]]) - ] - pygame.event.clear() - pygame.event.post(expected_events[0]) - - retrieved_events = pygame.event.get(event_types) - - self._assertExpectedEvents(expected=expected_events, got=retrieved_events) - - # Test all events in the list are in the queue. - pygame.event.clear() - expected_events = [] - - for etype in event_types: - expected_events.append( - pygame.event.Event(etype, **EVENT_TEST_PARAMS[etype]) - ) - pygame.event.post(expected_events[-1]) - - retrieved_events = pygame.event.get(event_types) - - self._assertExpectedEvents(expected=expected_events, got=retrieved_events) - - def test_clear(self): - """Ensure clear() removes all the events on the queue.""" - for e in EVENT_TYPES: - pygame.event.post(pygame.event.Event(e, **EVENT_TEST_PARAMS[e])) - poll_event = pygame.event.poll() - - self.assertNotEqual(poll_event.type, pygame.NOEVENT) - - pygame.event.clear() - poll_event = pygame.event.poll() - - self.assertEqual(poll_event.type, pygame.NOEVENT, race_condition_notification) - - def test_clear__empty_queue(self): - """Ensure clear() works correctly on an empty queue.""" - expected_events = [] - pygame.event.clear() - - # Test calling clear() on an already empty queue. - pygame.event.clear() - - retrieved_events = pygame.event.get() - - self.assertListEqual(retrieved_events, expected_events) - - def test_clear__event_sequence(self): - """Ensure a sequence of event types can be cleared from the queue.""" - cleared_event_types = EVENT_TYPES[:5] - expected_event_types = EVENT_TYPES[5:10] - expected_events = [] - - # Add the events to the queue. - for etype in cleared_event_types: - pygame.event.post(pygame.event.Event(etype, **EVENT_TEST_PARAMS[etype])) - - for etype in expected_events: - expected_events.append( - pygame.event.Event(etype, **EVENT_TEST_PARAMS[etype]) - ) - pygame.event.post(expected_events[-1]) - - # Clear the cleared_events from the queue. - pygame.event.clear(cleared_event_types) - - # Check the rest of the events in the queue. - remaining_events = pygame.event.get() - - self._assertCountEqual(remaining_events, expected_events) - - def test_event_name(self): - """Ensure event_name() returns the correct event name.""" - for expected_name, event in NAMES_AND_EVENTS: - self.assertEqual( - pygame.event.event_name(event), expected_name, "0x{:X}".format(event) - ) - - def test_event_name__userevent_range(self): - """Ensures event_name() returns the correct name for user events. - - Tests the full range of user events. - """ - expected_name = "UserEvent" - - for event in range(pygame.USEREVENT, pygame.NUMEVENTS): - self.assertEqual( - pygame.event.event_name(event), expected_name, "0x{:X}".format(event) - ) - - def test_event_name__userevent_boundary(self): - """Ensures event_name() does not return 'UserEvent' for events - just outside the user event range. - """ - unexpected_name = "UserEvent" - - for event in (pygame.USEREVENT - 1, pygame.NUMEVENTS): - self.assertNotEqual( - pygame.event.event_name(event), unexpected_name, "0x{:X}".format(event) - ) - - def test_wait(self): - """Ensure wait() waits for an event on the queue.""" - # Test case without timeout. - event = pygame.event.Event(EVENT_TYPES[0], **EVENT_TEST_PARAMS[EVENT_TYPES[0]]) - pygame.event.post(event) - wait_event = pygame.event.wait() - - self.assertEqual(wait_event.type, event.type) - - # Test case with timeout and no event in the queue. - wait_event = pygame.event.wait(250) - self.assertEqual(wait_event.type, pygame.NOEVENT) - - # Test case with timeout and an event in the queue. - event = pygame.event.Event(EVENT_TYPES[0], **EVENT_TEST_PARAMS[EVENT_TYPES[0]]) - pygame.event.post(event) - wait_event = pygame.event.wait(250) - - self.assertEqual(wait_event.type, event.type) - - def test_peek(self): - """Ensure queued events can be peeked at.""" - event_types = [pygame.KEYDOWN, pygame.KEYUP, pygame.MOUSEMOTION] - - for event_type in event_types: - pygame.event.post( - pygame.event.Event(event_type, **EVENT_TEST_PARAMS[event_type]) - ) - - # Ensure events can be checked individually. - for event_type in event_types: - self.assertTrue(pygame.event.peek(event_type)) - - # Ensure events can be checked as a sequence. - self.assertTrue(pygame.event.peek(event_types)) - - def test_peek__event_sequence(self): - """Ensure peek() can handle a sequence of event types.""" - event_types = [pygame.KEYDOWN, pygame.KEYUP, pygame.MOUSEMOTION] - other_event_type = pygame.MOUSEBUTTONUP - - # Test when no events in the queue. - pygame.event.clear() - peeked = pygame.event.peek(event_types) - - self.assertFalse(peeked) - - # Test when an event type not in the list is in the queue. - pygame.event.clear() - pygame.event.post( - pygame.event.Event(other_event_type, **EVENT_TEST_PARAMS[other_event_type]) - ) - - peeked = pygame.event.peek(event_types) - - self.assertFalse(peeked) - - # Test when 1 event type in the list is in the queue. - pygame.event.clear() - pygame.event.post( - pygame.event.Event(event_types[0], **EVENT_TEST_PARAMS[event_types[0]]) - ) - - peeked = pygame.event.peek(event_types) - - self.assertTrue(peeked) - - # Test all events in the list are in the queue. - pygame.event.clear() - for etype in event_types: - pygame.event.post(pygame.event.Event(etype, **EVENT_TEST_PARAMS[etype])) - - peeked = pygame.event.peek(event_types) - - self.assertTrue(peeked) - - def test_peek__empty_queue(self): - """Ensure peek() works correctly on an empty queue.""" - pygame.event.clear() - - # Ensure all events can be checked. - peeked = pygame.event.peek() - - self.assertFalse(peeked) - - # Ensure events can be checked individually. - for event_type in EVENT_TYPES: - peeked = pygame.event.peek(event_type) - self.assertFalse(peeked) - - # Ensure events can be checked as a sequence. - peeked = pygame.event.peek(EVENT_TYPES) - - self.assertFalse(peeked) - - def test_set_allowed(self): - """Ensure a blocked event type can be unblocked/allowed.""" - event = EVENT_TYPES[0] - pygame.event.set_blocked(event) - - self.assertTrue(pygame.event.get_blocked(event)) - - pygame.event.set_allowed(event) - - self.assertFalse(pygame.event.get_blocked(event)) - - def test_set_allowed__event_sequence(self): - """Ensure a sequence of blocked event types can be unblocked/allowed.""" - event_types = [ - pygame.KEYDOWN, - pygame.KEYUP, - pygame.MOUSEMOTION, - pygame.MOUSEBUTTONDOWN, - pygame.MOUSEBUTTONUP, - ] - pygame.event.set_blocked(event_types) - - pygame.event.set_allowed(event_types) - - for etype in event_types: - self.assertFalse(pygame.event.get_blocked(etype)) - - def test_set_allowed_all(self): - """Ensure all events can be unblocked/allowed at once.""" - pygame.event.set_blocked(None) - - for e in EVENT_TYPES: - self.assertTrue(pygame.event.get_blocked(e)) - - pygame.event.set_allowed(None) - - for e in EVENT_TYPES: - self.assertFalse(pygame.event.get_blocked(e)) - - def test_pump(self): - """Ensure pump() functions properly.""" - pygame.event.pump() - - # @unittest.skipIf( - # os.environ.get("SDL_VIDEODRIVER") == "dummy", - # 'requires the SDL_VIDEODRIVER to be a non "dummy" value', - # ) - # Fails on SDL 2.0.18 - @unittest.skip("flaky test, and broken on 2.0.18 windows") - def test_set_grab__and_get_symmetric(self): - """Ensure event grabbing can be enabled and disabled. - - WARNING: Moving the mouse off the display during this test can cause it - to fail. - """ - surf = pygame.display.set_mode((10, 10)) - pygame.event.set_grab(True) - - self.assertTrue(pygame.event.get_grab()) - - pygame.event.set_grab(False) - - self.assertFalse(pygame.event.get_grab()) - - def test_event_equality(self): - """Ensure an events can be compared correctly.""" - a = pygame.event.Event(EVENT_TYPES[0], a=1) - b = pygame.event.Event(EVENT_TYPES[0], a=1) - c = pygame.event.Event(EVENT_TYPES[1], a=1) - d = pygame.event.Event(EVENT_TYPES[0], a=2) - - self.assertTrue(a == a) - self.assertFalse(a != a) - self.assertTrue(a == b) - self.assertFalse(a != b) - self.assertTrue(a != c) - self.assertFalse(a == c) - self.assertTrue(a != d) - self.assertFalse(a == d) - - def test_get_blocked(self): - """Ensure an event's blocked state can be retrieved.""" - # Test each event is not blocked. - pygame.event.set_allowed(None) - - for etype in EVENT_TYPES: - blocked = pygame.event.get_blocked(etype) - - self.assertFalse(blocked) - - # Test each event type is blocked. - pygame.event.set_blocked(None) - - for etype in EVENT_TYPES: - blocked = pygame.event.get_blocked(etype) - - self.assertTrue(blocked) - - def test_get_blocked__event_sequence(self): - """Ensure get_blocked() can handle a sequence of event types.""" - event_types = [ - pygame.KEYDOWN, - pygame.KEYUP, - pygame.MOUSEMOTION, - pygame.MOUSEBUTTONDOWN, - pygame.MOUSEBUTTONUP, - ] - - # Test no event types in the list are blocked. - blocked = pygame.event.get_blocked(event_types) - - self.assertFalse(blocked) - - # Test when 1 event type in the list is blocked. - pygame.event.set_blocked(event_types[2]) - - blocked = pygame.event.get_blocked(event_types) - - self.assertTrue(blocked) - - # Test all event types in the list are blocked. - pygame.event.set_blocked(event_types) - - blocked = pygame.event.get_blocked(event_types) - - self.assertTrue(blocked) - - # @unittest.skipIf( - # os.environ.get("SDL_VIDEODRIVER") == "dummy", - # 'requires the SDL_VIDEODRIVER to be a non "dummy" value', - # ) - # Fails on SDL 2.0.18 - @unittest.skip("flaky test, and broken on 2.0.18 windows") - def test_get_grab(self): - """Ensure get_grab() works as expected""" - surf = pygame.display.set_mode((10, 10)) - # Test 5 times - for i in range(5): - pygame.event.set_grab(i % 2) - self.assertEqual(pygame.event.get_grab(), i % 2) - - def test_poll(self): - """Ensure poll() works as expected""" - pygame.event.clear() - ev = pygame.event.poll() - # poll() on empty queue should return NOEVENT - self.assertEqual(ev.type, pygame.NOEVENT) - - # test poll returns stuff in same order - e1 = pygame.event.Event(pygame.USEREVENT) - e2 = pygame.event.Event(pygame.KEYDOWN, key=pygame.K_a) - e3 = pygame.event.Event(pygame.KEYUP, key=pygame.K_a) - pygame.event.post(e1) - pygame.event.post(e2) - pygame.event.post(e3) - - self.assertEqual(pygame.event.poll().type, e1.type) - self.assertEqual(pygame.event.poll().type, e2.type) - self.assertEqual(pygame.event.poll().type, e3.type) - self.assertEqual(pygame.event.poll().type, pygame.NOEVENT) - - -################################################################################ - -if __name__ == "__main__": - unittest.main() diff --git a/venv/Lib/site-packages/pygame/tests/fixtures/fonts/A_PyGameMono-8.png b/venv/Lib/site-packages/pygame/tests/fixtures/fonts/A_PyGameMono-8.png deleted file mode 100644 index b15961f..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/fixtures/fonts/A_PyGameMono-8.png and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/fixtures/fonts/PyGameMono-18-100dpi.bdf b/venv/Lib/site-packages/pygame/tests/fixtures/fonts/PyGameMono-18-100dpi.bdf deleted file mode 100644 index a88f083..0000000 --- a/venv/Lib/site-packages/pygame/tests/fixtures/fonts/PyGameMono-18-100dpi.bdf +++ /dev/null @@ -1,165 +0,0 @@ -STARTFONT 2.1 -FONT -FontForge-PyGameMono-Medium-R-Normal--25-180-100-100-M-250-ISO10646-1 -SIZE 18 100 100 -FONTBOUNDINGBOX 21 22 0 0 -COMMENT "Generated by fontforge, http://fontforge.sourceforge.net" -COMMENT "Created by Lenard Lindstrom,,, with FontForge 2.0 (http://fontforge.sf.net)" -STARTPROPERTIES 29 -FOUNDRY "FontForge" -FAMILY_NAME "PyGameMono" -WEIGHT_NAME "Medium" -SLANT "R" -SETWIDTH_NAME "Normal" -ADD_STYLE_NAME "" -PIXEL_SIZE 25 -POINT_SIZE 180 -RESOLUTION_X 100 -RESOLUTION_Y 100 -SPACING "M" -AVERAGE_WIDTH 250 -CHARSET_REGISTRY "ISO10646" -CHARSET_ENCODING "1" -FONTNAME_REGISTRY "" -CHARSET_COLLECTIONS "ISO10646-1" -FONT_NAME "PyGameMono" -FACE_NAME "PyGame Mono" -FONT_VERSION "001.000" -FONT_ASCENT 20 -FONT_DESCENT 5 -UNDERLINE_POSITION -2 -UNDERLINE_THICKNESS 2 -RAW_ASCENT 800 -RAW_DESCENT 200 -RELATIVE_WEIGHT 50 -RELATIVE_SETWIDTH 50 -FIGURE_WIDTH -1 -AVG_UPPERCASE_WIDTH 250 -ENDPROPERTIES -CHARS 5 -STARTCHAR .notdef -ENCODING 0 -SWIDTH 1000 0 -DWIDTH 25 0 -BBX 20 20 0 0 -BITMAP -FFFFF0 -FFFFF0 -FE07F0 -F801F0 -F000F0 -E00070 -E00070 -C00030 -C00030 -C00030 -C00030 -C00030 -C00030 -E00070 -E00070 -F000F0 -F801F0 -FE07F0 -FFFFF0 -FFFFF0 -ENDCHAR -STARTCHAR A -ENCODING 65 -SWIDTH 1000 0 -DWIDTH 25 0 -BBX 20 21 0 1 -BITMAP -03FC00 -1FFF80 -3FFFC0 -7C03E0 -F000F0 -E00070 -E00070 -F000F0 -FC03F0 -FFFFF0 -FFFFF0 -FFFFF0 -FF0FF0 -7C03F0 -7801E0 -7800E0 -7000E0 -700060 -600060 -200040 -200040 -ENDCHAR -STARTCHAR B -ENCODING 66 -SWIDTH 1000 0 -DWIDTH 25 0 -BBX 18 20 1 0 -BITMAP -FFFE00 -FFFF80 -7E0780 -7801C0 -7000C0 -3000C0 -3000C0 -3801C0 -3E0780 -3FFF00 -3FFF00 -3E0780 -380180 -3000C0 -3000C0 -3000C0 -7801C0 -7E07C0 -FFFF80 -FFFE00 -ENDCHAR -STARTCHAR C -ENCODING 67 -SWIDTH 1000 0 -DWIDTH 25 0 -BBX 20 20 0 0 -BITMAP -00FC00 -03FF00 -0FFF80 -1F03E0 -3E0070 -7C0010 -780000 -F80000 -F00000 -F00000 -F00000 -F00000 -F80000 -780000 -7C0010 -3E0070 -1F01E0 -0FFFC0 -03FF80 -00FE00 -ENDCHAR -STARTCHAR u13079 -ENCODING 77945 -SWIDTH 1000 0 -DWIDTH 25 0 -BBX 21 10 0 5 -BITMAP -03FC00 -0FFF80 -1E73C0 -78F8F0 -F0F878 -70F870 -3870E0 -1E03C0 -0FFF80 -03FC00 -ENDCHAR -ENDFONT diff --git a/venv/Lib/site-packages/pygame/tests/fixtures/fonts/PyGameMono-18-75dpi.bdf b/venv/Lib/site-packages/pygame/tests/fixtures/fonts/PyGameMono-18-75dpi.bdf deleted file mode 100644 index 127f704..0000000 --- a/venv/Lib/site-packages/pygame/tests/fixtures/fonts/PyGameMono-18-75dpi.bdf +++ /dev/null @@ -1,143 +0,0 @@ -STARTFONT 2.1 -FONT -FontForge-PyGameMono-Medium-R-Normal--19-180-75-75-M-190-ISO10646-1 -SIZE 18 75 75 -FONTBOUNDINGBOX 15 17 0 0 -COMMENT "Generated by fontforge, http://fontforge.sourceforge.net" -COMMENT "Created by Lenard Lindstrom,,, with FontForge 2.0 (http://fontforge.sf.net)" -STARTPROPERTIES 29 -FOUNDRY "FontForge" -FAMILY_NAME "PyGameMono" -WEIGHT_NAME "Medium" -SLANT "R" -SETWIDTH_NAME "Normal" -ADD_STYLE_NAME "" -PIXEL_SIZE 19 -POINT_SIZE 180 -RESOLUTION_X 75 -RESOLUTION_Y 75 -SPACING "M" -AVERAGE_WIDTH 190 -CHARSET_REGISTRY "ISO10646" -CHARSET_ENCODING "1" -FONTNAME_REGISTRY "" -CHARSET_COLLECTIONS "ISO10646-1" -FONT_NAME "PyGameMono" -FACE_NAME "PyGame Mono" -FONT_VERSION "001.000" -FONT_ASCENT 15 -FONT_DESCENT 4 -UNDERLINE_POSITION -2 -UNDERLINE_THICKNESS 1 -RAW_ASCENT 800 -RAW_DESCENT 200 -RELATIVE_WEIGHT 50 -RELATIVE_SETWIDTH 50 -FIGURE_WIDTH -1 -AVG_UPPERCASE_WIDTH 190 -ENDPROPERTIES -CHARS 5 -STARTCHAR .notdef -ENCODING 0 -SWIDTH 1000 0 -DWIDTH 19 0 -BBX 15 15 0 0 -BITMAP -FFFE -FFFE -FC7E -F01E -E00E -C006 -C006 -C006 -C006 -C006 -E00E -F01E -FC7E -FFFE -FFFE -ENDCHAR -STARTCHAR A -ENCODING 65 -SWIDTH 1000 0 -DWIDTH 19 0 -BBX 15 17 0 0 -BITMAP -0FE0 -3FF8 -783C -F01E -E00E -E00E -F01E -F83E -FFFE -FFFE -FC7E -701C -701C -600C -600C -4004 -4004 -ENDCHAR -STARTCHAR B -ENCODING 66 -SWIDTH 1000 0 -DWIDTH 19 0 -BBX 15 15 0 0 -BITMAP -FFF8 -7FFC -780E -3006 -3006 -380E -3FF8 -3FF8 -3FF8 -380E -3006 -3006 -7C1E -7FFC -FFF8 -ENDCHAR -STARTCHAR C -ENCODING 67 -SWIDTH 1000 0 -DWIDTH 19 0 -BBX 15 15 0 0 -BITMAP -03E0 -0FF8 -3C1C -7806 -7000 -E000 -E000 -E000 -E000 -E000 -7000 -7806 -3C1C -0FF8 -03E0 -ENDCHAR -STARTCHAR u13079 -ENCODING 77945 -SWIDTH 1000 0 -DWIDTH 19 0 -BBX 15 7 0 4 -BITMAP -0FE0 -3838 -638C -E38E -638C -3838 -0FE0 -ENDCHAR -ENDFONT diff --git a/venv/Lib/site-packages/pygame/tests/fixtures/fonts/PyGameMono-8.bdf b/venv/Lib/site-packages/pygame/tests/fixtures/fonts/PyGameMono-8.bdf deleted file mode 100644 index 17bef06..0000000 --- a/venv/Lib/site-packages/pygame/tests/fixtures/fonts/PyGameMono-8.bdf +++ /dev/null @@ -1,103 +0,0 @@ -STARTFONT 2.1 -FONT -FontForge-PyGameMono-Medium-R-Normal--8-80-75-75-C-80-ISO10646-1 -SIZE 8 75 75 -FONTBOUNDINGBOX 6 7 0 0 -COMMENT "Generated by fontforge, http://fontforge.sourceforge.net" -COMMENT "Created by Lenard Lindstrom,,, with FontForge 2.0 (http://fontforge.sf.net)" -STARTPROPERTIES 29 -FOUNDRY "FontForge" -FAMILY_NAME "PyGameMono" -WEIGHT_NAME "Medium" -SLANT "R" -SETWIDTH_NAME "Normal" -ADD_STYLE_NAME "" -PIXEL_SIZE 8 -POINT_SIZE 80 -RESOLUTION_X 75 -RESOLUTION_Y 75 -SPACING "C" -AVERAGE_WIDTH 80 -CHARSET_REGISTRY "ISO10646" -CHARSET_ENCODING "1" -FONTNAME_REGISTRY "" -CHARSET_COLLECTIONS "ISO10646-1" -FONT_NAME "PyGameMono" -FACE_NAME "PyGame Mono" -FONT_VERSION "001.000" -FONT_ASCENT 6 -FONT_DESCENT 2 -UNDERLINE_POSITION -1 -UNDERLINE_THICKNESS 1 -RAW_ASCENT 800 -RAW_DESCENT 200 -RELATIVE_WEIGHT 50 -RELATIVE_SETWIDTH 50 -FIGURE_WIDTH -1 -AVG_UPPERCASE_WIDTH 80 -ENDPROPERTIES -CHARS 5 -STARTCHAR .notdef -ENCODING 0 -SWIDTH 1000 0 -DWIDTH 8 0 -BBX 6 6 0 0 -BITMAP -FC -84 -84 -84 -84 -FC -ENDCHAR -STARTCHAR A -ENCODING 65 -SWIDTH 1000 0 -DWIDTH 8 0 -BBX 6 7 0 0 -BITMAP -78 -84 -84 -FC -84 -84 -84 -ENDCHAR -STARTCHAR B -ENCODING 66 -SWIDTH 1000 0 -DWIDTH 8 0 -BBX 6 6 0 0 -BITMAP -FC -44 -78 -4C -44 -FC -ENDCHAR -STARTCHAR C -ENCODING 67 -SWIDTH 1000 0 -DWIDTH 8 0 -BBX 6 6 0 0 -BITMAP -78 -C4 -C0 -C0 -C4 -78 -ENDCHAR -STARTCHAR u13079 -ENCODING 77945 -SWIDTH 1000 0 -DWIDTH 8 0 -BBX 6 4 0 1 -BITMAP -78 -B4 -B4 -78 -ENDCHAR -ENDFONT diff --git a/venv/Lib/site-packages/pygame/tests/fixtures/fonts/PyGameMono.otf b/venv/Lib/site-packages/pygame/tests/fixtures/fonts/PyGameMono.otf deleted file mode 100644 index 5e9b66c..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/fixtures/fonts/PyGameMono.otf and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/fixtures/fonts/test_fixed.otf b/venv/Lib/site-packages/pygame/tests/fixtures/fonts/test_fixed.otf deleted file mode 100644 index 3488898..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/fixtures/fonts/test_fixed.otf and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/fixtures/fonts/test_sans.ttf b/venv/Lib/site-packages/pygame/tests/fixtures/fonts/test_sans.ttf deleted file mode 100644 index 09fac2f..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/fixtures/fonts/test_sans.ttf and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/fixtures/fonts/u13079_PyGameMono-8.png b/venv/Lib/site-packages/pygame/tests/fixtures/fonts/u13079_PyGameMono-8.png deleted file mode 100644 index 911da8a..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/fixtures/fonts/u13079_PyGameMono-8.png and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/fixtures/xbm_cursors/white_sizing.xbm b/venv/Lib/site-packages/pygame/tests/fixtures/xbm_cursors/white_sizing.xbm deleted file mode 100644 index d334d8d..0000000 --- a/venv/Lib/site-packages/pygame/tests/fixtures/xbm_cursors/white_sizing.xbm +++ /dev/null @@ -1,8 +0,0 @@ -#define resize_white_width 16 -#define resize_white_height 16 -#define resize_white_x_hot 7 -#define resize_white_y_hot 7 -static unsigned char resize_white_bits[] = { - 0xff, 0x03, 0x01, 0x02, 0xfd, 0x03, 0x05, 0x00, 0xf5, 0x0f, 0x15, 0x08, - 0xd5, 0xeb, 0x55, 0xaa, 0x55, 0xaa, 0xd7, 0xab, 0x10, 0xa8, 0xf0, 0xb7, - 0x00, 0xa8, 0xc0, 0x9f, 0x40, 0x80, 0xc0, 0xff}; diff --git a/venv/Lib/site-packages/pygame/tests/fixtures/xbm_cursors/white_sizing_mask.xbm b/venv/Lib/site-packages/pygame/tests/fixtures/xbm_cursors/white_sizing_mask.xbm deleted file mode 100644 index f00bc46..0000000 --- a/venv/Lib/site-packages/pygame/tests/fixtures/xbm_cursors/white_sizing_mask.xbm +++ /dev/null @@ -1,8 +0,0 @@ -#define resize_white_mask_width 16 -#define resize_white_mask_height 16 -#define resize_white_mask_x_hot 7 -#define resize_white_mask_y_hot 7 -static unsigned char resize_white_mask_bits[] = { - 0xff, 0x03, 0xff, 0x03, 0xff, 0x03, 0x07, 0x00, 0xf7, 0x0f, 0xf7, 0x0f, - 0xf7, 0xef, 0x77, 0xee, 0x77, 0xee, 0xf7, 0xef, 0xf0, 0xef, 0xf0, 0xff, - 0x00, 0xf8, 0xc0, 0xff, 0xc0, 0xff, 0xc0, 0xff}; diff --git a/venv/Lib/site-packages/pygame/tests/font_test.py b/venv/Lib/site-packages/pygame/tests/font_test.py deleted file mode 100644 index e7bbf67..0000000 --- a/venv/Lib/site-packages/pygame/tests/font_test.py +++ /dev/null @@ -1,633 +0,0 @@ -# -*- coding: utf-8 -*- - -import sys -import os -import unittest -import pathlib -import platform - -import pygame -from pygame import font as pygame_font # So font can be replaced with ftfont - - -FONTDIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), "fixtures", "fonts") - -UCS_4 = sys.maxunicode > 0xFFFF - - -def equal_images(s1, s2): - size = s1.get_size() - if s2.get_size() != size: - return False - w, h = size - for x in range(w): - for y in range(h): - if s1.get_at((x, y)) != s2.get_at((x, y)): - return False - return True - - -IS_PYPY = "PyPy" == platform.python_implementation() - - -@unittest.skipIf(IS_PYPY, "pypy skip known failure") # TODO -class FontModuleTest(unittest.TestCase): - def setUp(self): - pygame_font.init() - - def tearDown(self): - pygame_font.quit() - - def test_SysFont(self): - # Can only check that a font object is returned. - fonts = pygame_font.get_fonts() - if "arial" in fonts: - # Try to use arial font if it is there, rather than a random font - # which can be different depending on installed fonts on the system. - font_name = "arial" - else: - font_name = sorted(fonts)[0] - o = pygame_font.SysFont(font_name, 20) - self.assertTrue(isinstance(o, pygame_font.FontType)) - o = pygame_font.SysFont(font_name, 20, italic=True) - self.assertTrue(isinstance(o, pygame_font.FontType)) - o = pygame_font.SysFont(font_name, 20, bold=True) - self.assertTrue(isinstance(o, pygame_font.FontType)) - o = pygame_font.SysFont("thisisnotafont", 20) - self.assertTrue(isinstance(o, pygame_font.FontType)) - - def test_get_default_font(self): - self.assertEqual(pygame_font.get_default_font(), "freesansbold.ttf") - - def test_get_fonts_returns_something(self): - fnts = pygame_font.get_fonts() - self.assertTrue(fnts) - - # to test if some files exist... - # def XXtest_has_file_osx_10_5_sdk(self): - # import os - # f = "/Developer/SDKs/MacOSX10.5.sdk/usr/X11/include/ft2build.h" - # self.assertEqual(os.path.exists(f), True) - - # def XXtest_has_file_osx_10_4_sdk(self): - # import os - # f = "/Developer/SDKs/MacOSX10.4u.sdk/usr/X11R6/include/ft2build.h" - # self.assertEqual(os.path.exists(f), True) - - def test_get_fonts(self): - fnts = pygame_font.get_fonts() - - self.assertTrue(fnts, msg=repr(fnts)) - - for name in fnts: - # note, on ubuntu 2.6 they are all unicode strings. - - self.assertTrue(isinstance(name, str), name) - # Font names can be comprised of only numeric characters, so - # just checking name.islower() will not work as expected here. - self.assertFalse(any(c.isupper() for c in name)) - self.assertTrue(name.isalnum(), name) - - def test_get_init(self): - self.assertTrue(pygame_font.get_init()) - pygame_font.quit() - self.assertFalse(pygame_font.get_init()) - - def test_init(self): - pygame_font.init() - - def test_match_font_all_exist(self): - fonts = pygame_font.get_fonts() - - # Ensure all listed fonts are in fact available, and the returned file - # name is a full path. - for font in fonts: - path = pygame_font.match_font(font) - self.assertFalse(path is None) - self.assertTrue(os.path.isabs(path)) - - def test_match_font_name(self): - """That match_font accepts names of various types""" - font = pygame_font.get_fonts()[0] - font_path = pygame_font.match_font(font) - self.assertIsNotNone(font_path) - font_b = font.encode() - not_a_font = "thisisnotafont" - not_a_font_b = b"thisisnotafont" - good_font_names = [ - # Check single name bytes. - font_b, - # Check string of comma-separated names. - ",".join([not_a_font, font, not_a_font]), - # Check list of names. - [not_a_font, font, not_a_font], - # Check generator: - (name for name in [not_a_font, font, not_a_font]), - # Check comma-separated bytes. - b",".join([not_a_font_b, font_b, not_a_font_b]), - # Check list of bytes. - [not_a_font_b, font_b, not_a_font_b], - # Check mixed list of bytes and string. - [font, not_a_font, font_b, not_a_font_b], - ] - for font_name in good_font_names: - self.assertEqual(pygame_font.match_font(font_name), font_path, font_name) - - def test_not_match_font_name(self): - """match_font return None when names of various types do not exist""" - not_a_font = "thisisnotafont" - not_a_font_b = b"thisisnotafont" - bad_font_names = [ - not_a_font, - ",".join([not_a_font, not_a_font, not_a_font]), - [not_a_font, not_a_font, not_a_font], - (name for name in [not_a_font, not_a_font, not_a_font]), - not_a_font_b, - b",".join([not_a_font_b, not_a_font_b, not_a_font_b]), - [not_a_font_b, not_a_font_b, not_a_font_b], - [not_a_font, not_a_font_b, not_a_font], - ] - for font_name in bad_font_names: - self.assertIsNone(pygame_font.match_font(font_name), font_name) - - def test_match_font_bold(self): - fonts = pygame_font.get_fonts() - - # Look for a bold font. - self.assertTrue(any(pygame_font.match_font(font, bold=True) for font in fonts)) - - def test_match_font_italic(self): - fonts = pygame_font.get_fonts() - - # Look for an italic font. - self.assertTrue( - any(pygame_font.match_font(font, italic=True) for font in fonts) - ) - - def test_issue_742(self): - """that the font background does not crash.""" - surf = pygame.Surface((320, 240)) - font = pygame_font.Font(None, 24) - image = font.render("Test", 0, (255, 255, 255), (0, 0, 0)) - self.assertIsNone(image.get_colorkey()) - image.set_alpha(255) - surf.blit(image, (0, 0)) - - def test_issue_font_alphablit(self): - """Check that blitting anti-aliased text doesn't - change the background blue""" - pygame.display.set_mode((600, 400)) - - font = pygame_font.Font(None, 24) - - (color, text, center, pos) = ((160, 200, 250), "Music", (190, 170), "midright") - img1 = font.render(text, True, color) - - img = pygame.Surface(img1.get_size(), depth=32) - pre_blit_corner_pixel = img.get_at((0, 0)) - img.blit(img1, (0, 0)) - post_blit_corner_pixel = img.get_at((0, 0)) - - self.assertEqual(pre_blit_corner_pixel, post_blit_corner_pixel) - - def test_segfault_after_reinit(self): - """Reinitialization of font module should not cause - segmentation fault""" - import gc - - font = pygame_font.Font(None, 20) - pygame_font.quit() - pygame_font.init() - del font - gc.collect() - - def test_quit(self): - pygame_font.quit() - - -@unittest.skipIf(IS_PYPY, "pypy skip known failure") # TODO -class FontTest(unittest.TestCase): - def setUp(self): - pygame_font.init() - - def tearDown(self): - pygame_font.quit() - - def test_render_args(self): - screen = pygame.display.set_mode((600, 400)) - rect = screen.get_rect() - f = pygame_font.Font(None, 20) - screen.fill((10, 10, 10)) - font_surface = f.render(" bar", True, (0, 0, 0), (255, 255, 255)) - font_rect = font_surface.get_rect() - font_rect.topleft = rect.topleft - self.assertTrue(font_surface) - screen.blit(font_surface, font_rect, font_rect) - pygame.display.update() - self.assertEqual(tuple(screen.get_at((0, 0)))[:3], (255, 255, 255)) - self.assertEqual(tuple(screen.get_at(font_rect.topleft))[:3], (255, 255, 255)) - - # If we don't have a real display, don't do this test. - # Transparent background doesn't seem to work without a read video card. - if os.environ.get("SDL_VIDEODRIVER") != "dummy": - screen.fill((10, 10, 10)) - font_surface = f.render(" bar", True, (0, 0, 0), None) - font_rect = font_surface.get_rect() - font_rect.topleft = rect.topleft - self.assertTrue(font_surface) - screen.blit(font_surface, font_rect, font_rect) - pygame.display.update() - self.assertEqual(tuple(screen.get_at((0, 0)))[:3], (10, 10, 10)) - self.assertEqual(tuple(screen.get_at(font_rect.topleft))[:3], (10, 10, 10)) - - screen.fill((10, 10, 10)) - font_surface = f.render(" bar", True, (0, 0, 0)) - font_rect = font_surface.get_rect() - font_rect.topleft = rect.topleft - self.assertTrue(font_surface) - screen.blit(font_surface, font_rect, font_rect) - pygame.display.update(rect) - self.assertEqual(tuple(screen.get_at((0, 0)))[:3], (10, 10, 10)) - self.assertEqual(tuple(screen.get_at(font_rect.topleft))[:3], (10, 10, 10)) - - -@unittest.skipIf(IS_PYPY, "pypy skip known failure") # TODO -class FontTypeTest(unittest.TestCase): - def setUp(self): - pygame_font.init() - - def tearDown(self): - pygame_font.quit() - - def test_get_ascent(self): - # Ckecking ascent would need a custom test font to do properly. - f = pygame_font.Font(None, 20) - ascent = f.get_ascent() - self.assertTrue(isinstance(ascent, int)) - self.assertTrue(ascent > 0) - s = f.render("X", False, (255, 255, 255)) - self.assertTrue(s.get_size()[1] > ascent) - - def test_get_descent(self): - # Ckecking descent would need a custom test font to do properly. - f = pygame_font.Font(None, 20) - descent = f.get_descent() - self.assertTrue(isinstance(descent, int)) - self.assertTrue(descent < 0) - - def test_get_height(self): - # Ckecking height would need a custom test font to do properly. - f = pygame_font.Font(None, 20) - height = f.get_height() - self.assertTrue(isinstance(height, int)) - self.assertTrue(height > 0) - s = f.render("X", False, (255, 255, 255)) - self.assertTrue(s.get_size()[1] == height) - - def test_get_linesize(self): - # Ckecking linesize would need a custom test font to do properly. - # Questions: How do linesize, height and descent relate? - f = pygame_font.Font(None, 20) - linesize = f.get_linesize() - self.assertTrue(isinstance(linesize, int)) - self.assertTrue(linesize > 0) - - def test_metrics(self): - # Ensure bytes decoding works correctly. Can only compare results - # with unicode for now. - f = pygame_font.Font(None, 20) - um = f.metrics(".") - bm = f.metrics(b".") - - self.assertEqual(len(um), 1) - self.assertEqual(len(bm), 1) - self.assertIsNotNone(um[0]) - self.assertEqual(um, bm) - - u = "\u212A" - b = u.encode("UTF-16")[2:] # Keep byte order consistent. [2:] skips BOM - bm = f.metrics(b) - - self.assertEqual(len(bm), 2) - - try: # FIXME why do we do this try/except ? - um = f.metrics(u) - except pygame.error: - pass - else: - self.assertEqual(len(um), 1) - self.assertNotEqual(bm[0], um[0]) - self.assertNotEqual(bm[1], um[0]) - - if UCS_4: - u = u"\U00013000" - bm = f.metrics(u) - - self.assertEqual(len(bm), 1) - self.assertIsNone(bm[0]) - - return # unfinished - # The documentation is useless here. How large a list? - # How do list positions relate to character codes? - # What about unicode characters? - - # __doc__ (as of 2008-08-02) for pygame_font.Font.metrics: - - # Font.metrics(text): return list - # Gets the metrics for each character in the pased string. - # - # The list contains tuples for each character, which contain the - # minimum X offset, the maximum X offset, the minimum Y offset, the - # maximum Y offset and the advance offset (bearing plus width) of the - # character. [(minx, maxx, miny, maxy, advance), (minx, maxx, miny, - # maxy, advance), ...] - - self.fail() - - def test_render(self): - f = pygame_font.Font(None, 20) - s = f.render("foo", True, [0, 0, 0], [255, 255, 255]) - s = f.render("xxx", True, [0, 0, 0], [255, 255, 255]) - s = f.render("", True, [0, 0, 0], [255, 255, 255]) - s = f.render("foo", False, [0, 0, 0], [255, 255, 255]) - s = f.render("xxx", False, [0, 0, 0], [255, 255, 255]) - s = f.render("xxx", False, [0, 0, 0]) - s = f.render(" ", False, [0, 0, 0]) - s = f.render(" ", False, [0, 0, 0], [255, 255, 255]) - # null text should be 0 pixel wide. - s = f.render("", False, [0, 0, 0], [255, 255, 255]) - self.assertEqual(s.get_size()[0], 0) - # None text should be 0 pixel wide. - s = f.render(None, False, [0, 0, 0], [255, 255, 255]) - self.assertEqual(s.get_size()[0], 0) - # Non-text should raise a TypeError. - self.assertRaises(TypeError, f.render, [], False, [0, 0, 0], [255, 255, 255]) - self.assertRaises(TypeError, f.render, 1, False, [0, 0, 0], [255, 255, 255]) - # is background transparent for antialiasing? - s = f.render(".", True, [255, 255, 255]) - self.assertEqual(s.get_at((0, 0))[3], 0) - # is Unicode and bytes encoding correct? - # Cannot really test if the correct characters are rendered, but - # at least can assert the encodings differ. - su = f.render(".", False, [0, 0, 0], [255, 255, 255]) - sb = f.render(b".", False, [0, 0, 0], [255, 255, 255]) - self.assertTrue(equal_images(su, sb)) - u = "\u212A" - b = u.encode("UTF-16")[2:] # Keep byte order consistent. [2:] skips BOM - sb = f.render(b, False, [0, 0, 0], [255, 255, 255]) - try: # FIXME why do we do this try/except ? - su = f.render(u, False, [0, 0, 0], [255, 255, 255]) - except pygame.error: - pass - else: - self.assertFalse(equal_images(su, sb)) - - b = b"ab\x00cd" - self.assertRaises(ValueError, f.render, b, 0, [0, 0, 0]) - u = "ab\x00cd" - self.assertRaises(ValueError, f.render, b, 0, [0, 0, 0]) - - def test_render_ucs2_ucs4(self): - """that it renders without raising if there is a new enough SDL_ttf.""" - f = pygame_font.Font(None, 20) - # If the font module is SDL_ttf < 2.0.15 based, then it only supports UCS-2 - # it will raise an exception for an out-of-range UCS-4 code point. - if UCS_4 and hasattr(pygame_font, "UCS_4"): - ucs_2 = "\uFFEE" - s = f.render(ucs_2, False, [0, 0, 0], [255, 255, 255]) - ucs_4 = "\U00010000" - s = f.render(ucs_4, False, [0, 0, 0], [255, 255, 255]) - - def test_set_bold(self): - f = pygame_font.Font(None, 20) - self.assertFalse(f.get_bold()) - f.set_bold(True) - self.assertTrue(f.get_bold()) - f.set_bold(False) - self.assertFalse(f.get_bold()) - - def test_set_italic(self): - f = pygame_font.Font(None, 20) - self.assertFalse(f.get_italic()) - f.set_italic(True) - self.assertTrue(f.get_italic()) - f.set_italic(False) - self.assertFalse(f.get_italic()) - - def test_set_underline(self): - f = pygame_font.Font(None, 20) - self.assertFalse(f.get_underline()) - f.set_underline(True) - self.assertTrue(f.get_underline()) - f.set_underline(False) - self.assertFalse(f.get_underline()) - - def test_bold_attr(self): - f = pygame_font.Font(None, 20) - self.assertFalse(f.bold) - f.bold = True - self.assertTrue(f.bold) - f.bold = False - self.assertFalse(f.bold) - - def test_set_italic_property(self): - f = pygame_font.Font(None, 20) - self.assertFalse(f.italic) - f.italic = True - self.assertTrue(f.italic) - f.italic = False - self.assertFalse(f.italic) - - def test_set_underline_property(self): - f = pygame_font.Font(None, 20) - self.assertFalse(f.underline) - f.underline = True - self.assertTrue(f.underline) - f.underline = False - self.assertFalse(f.underline) - - def test_size(self): - f = pygame_font.Font(None, 20) - text = "Xg" - size = f.size(text) - w, h = size - s = f.render(text, False, (255, 255, 255)) - btext = text.encode("ascii") - - self.assertIsInstance(w, int) - self.assertIsInstance(h, int) - self.assertEqual(s.get_size(), size) - self.assertEqual(f.size(btext), size) - - text = "\u212A" - btext = text.encode("UTF-16")[2:] # Keep the byte order consistent. - bsize = f.size(btext) - size = f.size(text) - - self.assertNotEqual(size, bsize) - - def test_font_file_not_found(self): - # A per BUG reported by Bo Jangeborg on pygame-user mailing list, - # http://www.mail-archive.com/pygame-users@seul.org/msg11675.html - - pygame_font.init() - self.assertRaises( - FileNotFoundError, pygame_font.Font, str("some-fictional-font.ttf"), 20 - ) - - def test_load_from_file(self): - font_name = pygame_font.get_default_font() - font_path = os.path.join( - os.path.split(pygame.__file__)[0], pygame_font.get_default_font() - ) - f = pygame_font.Font(font_path, 20) - - def test_load_from_pathlib(self): - font_name = pygame_font.get_default_font() - font_path = os.path.join( - os.path.split(pygame.__file__)[0], pygame_font.get_default_font() - ) - f = pygame_font.Font(pathlib.Path(font_path), 20) - - def test_load_from_file_obj(self): - font_name = pygame_font.get_default_font() - font_path = os.path.join( - os.path.split(pygame.__file__)[0], pygame_font.get_default_font() - ) - with open(font_path, "rb") as f: - font = pygame_font.Font(f, 20) - - def test_load_default_font_filename(self): - # In font_init, a special case is when the filename argument is - # identical to the default font file name. - f = pygame_font.Font(pygame_font.get_default_font(), 20) - - def _load_unicode(self, path): - import shutil - - fdir = str(FONTDIR) - temp = os.path.join(fdir, path) - pgfont = os.path.join(fdir, u"test_sans.ttf") - shutil.copy(pgfont, temp) - try: - with open(temp, "rb") as f: - pass - except FileNotFoundError: - raise unittest.SkipTest("the path cannot be opened") - try: - pygame_font.Font(temp, 20) - finally: - os.remove(temp) - - def test_load_from_file_unicode_0(self): - """ASCII string as a unicode object""" - self._load_unicode(u"temp_file.ttf") - - def test_load_from_file_unicode_1(self): - self._load_unicode(u"你好.ttf") - - def test_load_from_file_bytes(self): - font_path = os.path.join( - os.path.split(pygame.__file__)[0], pygame_font.get_default_font() - ) - filesystem_encoding = sys.getfilesystemencoding() - filesystem_errors = "replace" if sys.platform == "win32" else "surrogateescape" - try: # FIXME why do we do this try/except ? - font_path = font_path.decode(filesystem_encoding, filesystem_errors) - except AttributeError: - pass - bfont_path = font_path.encode(filesystem_encoding, filesystem_errors) - f = pygame_font.Font(bfont_path, 20) - - -@unittest.skipIf(IS_PYPY, "pypy skip known failure") # TODO -class VisualTests(unittest.TestCase): - - __tags__ = ["interactive"] - - screen = None - aborted = False - - def setUp(self): - if self.screen is None: - pygame.init() - self.screen = pygame.display.set_mode((600, 200)) - self.screen.fill((255, 255, 255)) - pygame.display.flip() - self.f = pygame_font.Font(None, 32) - - def abort(self): - if self.screen is not None: - pygame.quit() - self.aborted = True - - def query(self, bold=False, italic=False, underline=False, antialiase=False): - if self.aborted: - return False - spacing = 10 - offset = 20 - y = spacing - f = self.f - screen = self.screen - screen.fill((255, 255, 255)) - pygame.display.flip() - if not (bold or italic or underline or antialiase): - text = "normal" - else: - modes = [] - if bold: - modes.append("bold") - if italic: - modes.append("italic") - if underline: - modes.append("underlined") - if antialiase: - modes.append("antialiased") - text = "%s (y/n):" % ("-".join(modes),) - f.set_bold(bold) - f.set_italic(italic) - f.set_underline(underline) - s = f.render(text, antialiase, (0, 0, 0)) - screen.blit(s, (offset, y)) - y += s.get_size()[1] + spacing - f.set_bold(False) - f.set_italic(False) - f.set_underline(False) - s = f.render("(some comparison text)", False, (0, 0, 0)) - screen.blit(s, (offset, y)) - pygame.display.flip() - while 1: - for evt in pygame.event.get(): - if evt.type == pygame.KEYDOWN: - if evt.key == pygame.K_ESCAPE: - self.abort() - return False - if evt.key == pygame.K_y: - return True - if evt.key == pygame.K_n: - return False - if evt.type == pygame.QUIT: - self.abort() - return False - - def test_bold(self): - self.assertTrue(self.query(bold=True)) - - def test_italic(self): - self.assertTrue(self.query(italic=True)) - - def test_underline(self): - self.assertTrue(self.query(underline=True)) - - def test_antialiase(self): - self.assertTrue(self.query(antialiase=True)) - - def test_bold_antialiase(self): - self.assertTrue(self.query(bold=True, antialiase=True)) - - def test_italic_underline(self): - self.assertTrue(self.query(italic=True, underline=True)) - - -if __name__ == "__main__": - unittest.main() diff --git a/venv/Lib/site-packages/pygame/tests/freetype_tags.py b/venv/Lib/site-packages/pygame/tests/freetype_tags.py deleted file mode 100644 index d84cbb7..0000000 --- a/venv/Lib/site-packages/pygame/tests/freetype_tags.py +++ /dev/null @@ -1,11 +0,0 @@ -__tags__ = ["development"] - -exclude = False - -try: - import pygame.freetype -except ImportError: - exclude = True - -if exclude: - __tags__.extend(["ignore", "subprocess_ignore"]) diff --git a/venv/Lib/site-packages/pygame/tests/freetype_test.py b/venv/Lib/site-packages/pygame/tests/freetype_test.py deleted file mode 100644 index 815dfe3..0000000 --- a/venv/Lib/site-packages/pygame/tests/freetype_test.py +++ /dev/null @@ -1,1799 +0,0 @@ -import os - -if os.environ.get("SDL_VIDEODRIVER") == "dummy": - __tags__ = ("ignore", "subprocess_ignore") - -import unittest -import ctypes -import weakref -import gc -import pathlib -import platform - -IS_PYPY = "PyPy" == platform.python_implementation() - - -try: - from pygame.tests.test_utils import arrinter -except NameError: - pass - -import pygame - -try: - import pygame.freetype as ft -except ImportError: - ft = None - - -FONTDIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), "fixtures", "fonts") - - -def nullfont(): - """return an uninitialized font instance""" - return ft.Font.__new__(ft.Font) - - -max_point_size_FX6 = 0x7FFFFFFF -max_point_size = max_point_size_FX6 >> 6 -max_point_size_f = max_point_size_FX6 * 0.015625 - - -def surf_same_image(a, b): - """Return True if a's pixel buffer is identical to b's""" - - a_sz = a.get_height() * a.get_pitch() - b_sz = b.get_height() * b.get_pitch() - if a_sz != b_sz: - return False - a_bytes = ctypes.string_at(a._pixels_address, a_sz) - b_bytes = ctypes.string_at(b._pixels_address, b_sz) - return a_bytes == b_bytes - - -class FreeTypeFontTest(unittest.TestCase): - - _fixed_path = os.path.join(FONTDIR, "test_fixed.otf") - _sans_path = os.path.join(FONTDIR, "test_sans.ttf") - _mono_path = os.path.join(FONTDIR, "PyGameMono.otf") - _bmp_8_75dpi_path = os.path.join(FONTDIR, "PyGameMono-8.bdf") - _bmp_18_75dpi_path = os.path.join(FONTDIR, "PyGameMono-18-75dpi.bdf") - _bmp_18_100dpi_path = os.path.join(FONTDIR, "PyGameMono-18-100dpi.bdf") - _TEST_FONTS = {} - - @classmethod - def setUpClass(cls): - ft.init() - - # Setup the test fonts. - - # Inconsolata is an open-source font designed by Raph Levien. - # Licensed under the Open Font License. - # http://www.levien.com/type/myfonts/inconsolata.html - cls._TEST_FONTS["fixed"] = ft.Font(cls._fixed_path) - - # Liberation Sans is an open-source font designed by Steve Matteson. - # Licensed under the GNU GPL. - # https://fedorahosted.org/liberation-fonts/ - cls._TEST_FONTS["sans"] = ft.Font(cls._sans_path) - - # A scalable mono test font made for pygame. It contains only - # a few glyphs: '\0', 'A', 'B', 'C', and U+13079. - # It also contains two bitmap sizes: 8.0 X 8.0 and 19.0 X 19.0. - cls._TEST_FONTS["mono"] = ft.Font(cls._mono_path) - - # A fixed size bitmap mono test font made for pygame. - # It contains only a few glyphs: '\0', 'A', 'B', 'C', and U+13079. - # The size is 8.0 X 8.0. - cls._TEST_FONTS["bmp-8-75dpi"] = ft.Font(cls._bmp_8_75dpi_path) - - # A fixed size bitmap mono test font made for pygame. - # It contains only a few glyphs: '\0', 'A', 'B', 'C', and U+13079. - # The size is 8.0 X 8.0. - cls._TEST_FONTS["bmp-18-75dpi"] = ft.Font(cls._bmp_18_75dpi_path) - - # A fixed size bitmap mono test font made for pygame. - # It contains only a few glyphs: '\0', 'A', 'B', 'C', and U+13079. - # The size is 8.0 X 8.0. - cls._TEST_FONTS["bmp-18-100dpi"] = ft.Font(cls._bmp_18_100dpi_path) - - @classmethod - def tearDownClass(cls): - ft.quit() - - def test_freetype_defaultfont(self): - font = ft.Font(None) - self.assertEqual(font.name, "FreeSans") - - def test_freetype_Font_init(self): - - self.assertRaises( - FileNotFoundError, ft.Font, os.path.join(FONTDIR, "nonexistent.ttf") - ) - - f = self._TEST_FONTS["sans"] - self.assertIsInstance(f, ft.Font) - - f = self._TEST_FONTS["fixed"] - self.assertIsInstance(f, ft.Font) - - # Test keyword arguments - f = ft.Font(size=22, file=None) - self.assertEqual(f.size, 22) - f = ft.Font(font_index=0, file=None) - self.assertNotEqual(ft.get_default_resolution(), 100) - f = ft.Font(resolution=100, file=None) - self.assertEqual(f.resolution, 100) - f = ft.Font(ucs4=True, file=None) - self.assertTrue(f.ucs4) - self.assertRaises(OverflowError, ft.Font, file=None, size=(max_point_size + 1)) - self.assertRaises(OverflowError, ft.Font, file=None, size=-1) - - f = ft.Font(None, size=24) - self.assertTrue(f.height > 0) - self.assertRaises( - FileNotFoundError, f.__init__, os.path.join(FONTDIR, "nonexistent.ttf") - ) - - # Test attribute preservation during reinitalization - f = ft.Font(self._sans_path, size=24, ucs4=True) - self.assertEqual(f.name, "Liberation Sans") - self.assertTrue(f.scalable) - self.assertFalse(f.fixed_width) - self.assertTrue(f.antialiased) - self.assertFalse(f.oblique) - self.assertTrue(f.ucs4) - f.antialiased = False - f.oblique = True - f.__init__(self._mono_path) - self.assertEqual(f.name, "PyGameMono") - self.assertTrue(f.scalable) - self.assertTrue(f.fixed_width) - self.assertFalse(f.antialiased) - self.assertTrue(f.oblique) - self.assertTrue(f.ucs4) - - # For a bitmap font, the size is automatically set to the first - # size in the available sizes list. - f = ft.Font(self._bmp_8_75dpi_path) - sizes = f.get_sizes() - self.assertEqual(len(sizes), 1) - size_pt, width_px, height_px, x_ppem, y_ppem = sizes[0] - self.assertEqual(f.size, (x_ppem, y_ppem)) - f.__init__(self._bmp_8_75dpi_path, size=12) - self.assertEqual(f.size, 12.0) - - @unittest.skipIf(IS_PYPY, "PyPy doesn't use refcounting") - def test_freetype_Font_dealloc(self): - import sys - - handle = open(self._sans_path, "rb") - - def load_font(): - tempFont = ft.Font(handle) - - try: - load_font() - - self.assertEqual(sys.getrefcount(handle), 2) - finally: - # Ensures file is closed even if test fails. - handle.close() - - def test_freetype_Font_kerning(self): - """Ensures get/set works with the kerning property.""" - ft_font = self._TEST_FONTS["sans"] - - # Test default is disabled. - self.assertFalse(ft_font.kerning) - - # Test setting to True. - ft_font.kerning = True - - self.assertTrue(ft_font.kerning) - - # Test setting to False. - ft_font.kerning = False - - self.assertFalse(ft_font.kerning) - - def test_freetype_Font_kerning__enabled(self): - """Ensures exceptions are not raised when calling freetype methods - while kerning is enabled. - - Note: This does not test what changes occur to a rendered font by - having kerning enabled. - - Related to issue #367. - """ - surface = pygame.Surface((10, 10), 0, 32) - TEST_TEXT = "Freetype Font" - ft_font = self._TEST_FONTS["bmp-8-75dpi"] - - ft_font.kerning = True - - # Call different methods to ensure they don't raise an exception. - metrics = ft_font.get_metrics(TEST_TEXT) - self.assertIsInstance(metrics, list) - - rect = ft_font.get_rect(TEST_TEXT) - self.assertIsInstance(rect, pygame.Rect) - - font_surf, rect = ft_font.render(TEST_TEXT) - self.assertIsInstance(font_surf, pygame.Surface) - self.assertIsInstance(rect, pygame.Rect) - - rect = ft_font.render_to(surface, (0, 0), TEST_TEXT) - self.assertIsInstance(rect, pygame.Rect) - - buf, size = ft_font.render_raw(TEST_TEXT) - self.assertIsInstance(buf, bytes) - self.assertIsInstance(size, tuple) - - rect = ft_font.render_raw_to(surface.get_view("2"), TEST_TEXT) - self.assertIsInstance(rect, pygame.Rect) - - def test_freetype_Font_scalable(self): - - f = self._TEST_FONTS["sans"] - self.assertTrue(f.scalable) - - self.assertRaises(RuntimeError, lambda: nullfont().scalable) - - def test_freetype_Font_fixed_width(self): - - f = self._TEST_FONTS["sans"] - self.assertFalse(f.fixed_width) - - f = self._TEST_FONTS["mono"] - self.assertTrue(f.fixed_width) - - self.assertRaises(RuntimeError, lambda: nullfont().fixed_width) - - def test_freetype_Font_fixed_sizes(self): - - f = self._TEST_FONTS["sans"] - self.assertEqual(f.fixed_sizes, 0) - f = self._TEST_FONTS["bmp-8-75dpi"] - self.assertEqual(f.fixed_sizes, 1) - f = self._TEST_FONTS["mono"] - self.assertEqual(f.fixed_sizes, 2) - - def test_freetype_Font_get_sizes(self): - f = self._TEST_FONTS["sans"] - szlist = f.get_sizes() - self.assertIsInstance(szlist, list) - self.assertEqual(len(szlist), 0) - - f = self._TEST_FONTS["bmp-8-75dpi"] - szlist = f.get_sizes() - self.assertIsInstance(szlist, list) - self.assertEqual(len(szlist), 1) - - size8 = szlist[0] - self.assertIsInstance(size8[0], int) - self.assertEqual(size8[0], 8) - self.assertIsInstance(size8[1], int) - self.assertIsInstance(size8[2], int) - self.assertIsInstance(size8[3], float) - self.assertEqual(int(size8[3] * 64.0 + 0.5), 8 * 64) - self.assertIsInstance(size8[4], float) - self.assertEqual(int(size8[4] * 64.0 + 0.5), 8 * 64) - - f = self._TEST_FONTS["mono"] - szlist = f.get_sizes() - self.assertIsInstance(szlist, list) - self.assertEqual(len(szlist), 2) - - size8 = szlist[0] - self.assertEqual(size8[3], 8) - self.assertEqual(int(size8[3] * 64.0 + 0.5), 8 * 64) - self.assertEqual(int(size8[4] * 64.0 + 0.5), 8 * 64) - - size19 = szlist[1] - self.assertEqual(size19[3], 19) - self.assertEqual(int(size19[3] * 64.0 + 0.5), 19 * 64) - self.assertEqual(int(size19[4] * 64.0 + 0.5), 19 * 64) - - def test_freetype_Font_use_bitmap_strikes(self): - f = self._TEST_FONTS["mono"] - try: - # use_bitmap_strikes == True - # - self.assertTrue(f.use_bitmap_strikes) - - # bitmap compatible properties - s_strike, sz = f.render_raw("A", size=19) - try: - f.vertical = True - s_strike_vert, sz = f.render_raw("A", size=19) - finally: - f.vertical = False - try: - f.wide = True - s_strike_wide, sz = f.render_raw("A", size=19) - finally: - f.wide = False - try: - f.underline = True - s_strike_underline, sz = f.render_raw("A", size=19) - finally: - f.underline = False - - # bitmap incompatible properties - s_strike_rot45, sz = f.render_raw("A", size=19, rotation=45) - try: - f.strong = True - s_strike_strong, sz = f.render_raw("A", size=19) - finally: - f.strong = False - try: - f.oblique = True - s_strike_oblique, sz = f.render_raw("A", size=19) - finally: - f.oblique = False - - # compare with use_bitmap_strikes == False - # - f.use_bitmap_strikes = False - self.assertFalse(f.use_bitmap_strikes) - - # bitmap compatible properties - s_outline, sz = f.render_raw("A", size=19) - self.assertNotEqual(s_outline, s_strike) - try: - f.vertical = True - s_outline, sz = f.render_raw("A", size=19) - self.assertNotEqual(s_outline, s_strike_vert) - finally: - f.vertical = False - try: - f.wide = True - s_outline, sz = f.render_raw("A", size=19) - self.assertNotEqual(s_outline, s_strike_wide) - finally: - f.wide = False - try: - f.underline = True - s_outline, sz = f.render_raw("A", size=19) - self.assertNotEqual(s_outline, s_strike_underline) - finally: - f.underline = False - - # bitmap incompatible properties - s_outline, sz = f.render_raw("A", size=19, rotation=45) - self.assertEqual(s_outline, s_strike_rot45) - try: - f.strong = True - s_outline, sz = f.render_raw("A", size=19) - self.assertEqual(s_outline, s_strike_strong) - finally: - f.strong = False - try: - f.oblique = True - s_outline, sz = f.render_raw("A", size=19) - self.assertEqual(s_outline, s_strike_oblique) - finally: - f.oblique = False - finally: - f.use_bitmap_strikes = True - - def test_freetype_Font_bitmap_files(self): - """Ensure bitmap file restrictions are caught""" - f = self._TEST_FONTS["bmp-8-75dpi"] - f_null = nullfont() - s = pygame.Surface((10, 10), 0, 32) - a = s.get_view("3") - - exception = AttributeError - self.assertRaises(exception, setattr, f, "strong", True) - self.assertRaises(exception, setattr, f, "oblique", True) - self.assertRaises(exception, setattr, f, "style", ft.STYLE_STRONG) - self.assertRaises(exception, setattr, f, "style", ft.STYLE_OBLIQUE) - exception = RuntimeError - self.assertRaises(exception, setattr, f_null, "strong", True) - self.assertRaises(exception, setattr, f_null, "oblique", True) - self.assertRaises(exception, setattr, f_null, "style", ft.STYLE_STRONG) - self.assertRaises(exception, setattr, f_null, "style", ft.STYLE_OBLIQUE) - exception = ValueError - self.assertRaises(exception, f.render, "A", (0, 0, 0), size=8, rotation=1) - self.assertRaises( - exception, f.render, "A", (0, 0, 0), size=8, style=ft.STYLE_OBLIQUE - ) - self.assertRaises( - exception, f.render, "A", (0, 0, 0), size=8, style=ft.STYLE_STRONG - ) - self.assertRaises(exception, f.render_raw, "A", size=8, rotation=1) - self.assertRaises(exception, f.render_raw, "A", size=8, style=ft.STYLE_OBLIQUE) - self.assertRaises(exception, f.render_raw, "A", size=8, style=ft.STYLE_STRONG) - self.assertRaises( - exception, f.render_to, s, (0, 0), "A", (0, 0, 0), size=8, rotation=1 - ) - self.assertRaises( - exception, - f.render_to, - s, - (0, 0), - "A", - (0, 0, 0), - size=8, - style=ft.STYLE_OBLIQUE, - ) - self.assertRaises( - exception, - f.render_to, - s, - (0, 0), - "A", - (0, 0, 0), - size=8, - style=ft.STYLE_STRONG, - ) - self.assertRaises(exception, f.render_raw_to, a, "A", size=8, rotation=1) - self.assertRaises( - exception, f.render_raw_to, a, "A", size=8, style=ft.STYLE_OBLIQUE - ) - self.assertRaises( - exception, f.render_raw_to, a, "A", size=8, style=ft.STYLE_STRONG - ) - self.assertRaises(exception, f.get_rect, "A", size=8, rotation=1) - self.assertRaises(exception, f.get_rect, "A", size=8, style=ft.STYLE_OBLIQUE) - self.assertRaises(exception, f.get_rect, "A", size=8, style=ft.STYLE_STRONG) - - # Unsupported point size - exception = pygame.error - self.assertRaises(exception, f.get_rect, "A", size=42) - self.assertRaises(exception, f.get_metrics, "A", size=42) - self.assertRaises(exception, f.get_sized_ascender, 42) - self.assertRaises(exception, f.get_sized_descender, 42) - self.assertRaises(exception, f.get_sized_height, 42) - self.assertRaises(exception, f.get_sized_glyph_height, 42) - - def test_freetype_Font_get_metrics(self): - - font = self._TEST_FONTS["sans"] - - metrics = font.get_metrics("ABCD", size=24) - self.assertEqual(len(metrics), len("ABCD")) - self.assertIsInstance(metrics, list) - - for metrics_tuple in metrics: - self.assertIsInstance(metrics_tuple, tuple, metrics_tuple) - self.assertEqual(len(metrics_tuple), 6) - - for m in metrics_tuple[:4]: - self.assertIsInstance(m, int) - - for m in metrics_tuple[4:]: - self.assertIsInstance(m, float) - - # test for empty string - metrics = font.get_metrics("", size=24) - self.assertEqual(metrics, []) - - # test for invalid string - self.assertRaises(TypeError, font.get_metrics, 24, 24) - - # raises exception when uninitalized - self.assertRaises(RuntimeError, nullfont().get_metrics, "a", size=24) - - def test_freetype_Font_get_rect(self): - - font = self._TEST_FONTS["sans"] - - def test_rect(r): - self.assertIsInstance(r, pygame.Rect) - - rect_default = font.get_rect("ABCDabcd", size=24) - test_rect(rect_default) - self.assertTrue(rect_default.size > (0, 0)) - self.assertTrue(rect_default.width > rect_default.height) - - rect_bigger = font.get_rect("ABCDabcd", size=32) - test_rect(rect_bigger) - self.assertTrue(rect_bigger.size > rect_default.size) - - rect_strong = font.get_rect("ABCDabcd", size=24, style=ft.STYLE_STRONG) - test_rect(rect_strong) - self.assertTrue(rect_strong.size > rect_default.size) - - font.vertical = True - rect_vert = font.get_rect("ABCDabcd", size=24) - test_rect(rect_vert) - self.assertTrue(rect_vert.width < rect_vert.height) - font.vertical = False - - rect_oblique = font.get_rect("ABCDabcd", size=24, style=ft.STYLE_OBLIQUE) - test_rect(rect_oblique) - self.assertTrue(rect_oblique.width > rect_default.width) - self.assertTrue(rect_oblique.height == rect_default.height) - - rect_under = font.get_rect("ABCDabcd", size=24, style=ft.STYLE_UNDERLINE) - test_rect(rect_under) - self.assertTrue(rect_under.width == rect_default.width) - self.assertTrue(rect_under.height > rect_default.height) - - # Rect size should change if UTF surrogate pairs are treated as - # one code point or two. - ufont = self._TEST_FONTS["mono"] - rect_utf32 = ufont.get_rect("\U00013079", size=24) - rect_utf16 = ufont.get_rect("\uD80C\uDC79", size=24) - self.assertEqual(rect_utf16, rect_utf32) - ufont.ucs4 = True - try: - rect_utf16 = ufont.get_rect("\uD80C\uDC79", size=24) - finally: - ufont.ucs4 = False - self.assertNotEqual(rect_utf16, rect_utf32) - - self.assertRaises(RuntimeError, nullfont().get_rect, "a", size=24) - - # text stretching - rect12 = font.get_rect("A", size=12.0) - rect24 = font.get_rect("A", size=24.0) - rect_x = font.get_rect("A", size=(24.0, 12.0)) - self.assertEqual(rect_x.width, rect24.width) - self.assertEqual(rect_x.height, rect12.height) - rect_y = font.get_rect("A", size=(12.0, 24.0)) - self.assertEqual(rect_y.width, rect12.width) - self.assertEqual(rect_y.height, rect24.height) - - def test_freetype_Font_height(self): - - f = self._TEST_FONTS["sans"] - self.assertEqual(f.height, 2355) - - f = self._TEST_FONTS["fixed"] - self.assertEqual(f.height, 1100) - - self.assertRaises(RuntimeError, lambda: nullfont().height) - - def test_freetype_Font_name(self): - - f = self._TEST_FONTS["sans"] - self.assertEqual(f.name, "Liberation Sans") - - f = self._TEST_FONTS["fixed"] - self.assertEqual(f.name, "Inconsolata") - - nf = nullfont() - self.assertEqual(nf.name, repr(nf)) - - def test_freetype_Font_size(self): - - f = ft.Font(None, size=12) - self.assertEqual(f.size, 12) - f.size = 22 - self.assertEqual(f.size, 22) - f.size = 0 - self.assertEqual(f.size, 0) - f.size = max_point_size - self.assertEqual(f.size, max_point_size) - f.size = 6.5 - self.assertEqual(f.size, 6.5) - f.size = max_point_size_f - self.assertEqual(f.size, max_point_size_f) - self.assertRaises(OverflowError, setattr, f, "size", -1) - self.assertRaises(OverflowError, setattr, f, "size", (max_point_size + 1)) - - f.size = 24.0, 0 - size = f.size - self.assertIsInstance(size, float) - self.assertEqual(size, 24.0) - - f.size = 16, 16 - size = f.size - self.assertIsInstance(size, tuple) - self.assertEqual(len(size), 2) - - x, y = size - self.assertIsInstance(x, float) - self.assertEqual(x, 16.0) - self.assertIsInstance(y, float) - self.assertEqual(y, 16.0) - - f.size = 20.5, 22.25 - x, y = f.size - self.assertEqual(x, 20.5) - self.assertEqual(y, 22.25) - - f.size = 0, 0 - size = f.size - self.assertIsInstance(size, float) - self.assertEqual(size, 0.0) - self.assertRaises(ValueError, setattr, f, "size", (0, 24.0)) - self.assertRaises(TypeError, setattr, f, "size", (24.0,)) - self.assertRaises(TypeError, setattr, f, "size", (24.0, 0, 0)) - self.assertRaises(TypeError, setattr, f, "size", (24.0j, 24.0)) - self.assertRaises(TypeError, setattr, f, "size", (24.0, 24.0j)) - self.assertRaises(OverflowError, setattr, f, "size", (-1, 16)) - self.assertRaises(OverflowError, setattr, f, "size", (max_point_size + 1, 16)) - self.assertRaises(OverflowError, setattr, f, "size", (16, -1)) - self.assertRaises(OverflowError, setattr, f, "size", (16, max_point_size + 1)) - - # bitmap files with identical point size but differing ppems. - f75 = self._TEST_FONTS["bmp-18-75dpi"] - sizes = f75.get_sizes() - self.assertEqual(len(sizes), 1) - size_pt, width_px, height_px, x_ppem, y_ppem = sizes[0] - self.assertEqual(size_pt, 18) - self.assertEqual(x_ppem, 19.0) - self.assertEqual(y_ppem, 19.0) - rect = f75.get_rect("A", size=18) - rect = f75.get_rect("A", size=19) - rect = f75.get_rect("A", size=(19.0, 19.0)) - self.assertRaises(pygame.error, f75.get_rect, "A", size=17) - f100 = self._TEST_FONTS["bmp-18-100dpi"] - sizes = f100.get_sizes() - self.assertEqual(len(sizes), 1) - size_pt, width_px, height_px, x_ppem, y_ppem = sizes[0] - self.assertEqual(size_pt, 18) - self.assertEqual(x_ppem, 25.0) - self.assertEqual(y_ppem, 25.0) - rect = f100.get_rect("A", size=18) - rect = f100.get_rect("A", size=25) - rect = f100.get_rect("A", size=(25.0, 25.0)) - self.assertRaises(pygame.error, f100.get_rect, "A", size=17) - - def test_freetype_Font_rotation(self): - - test_angles = [ - (30, 30), - (360, 0), - (390, 30), - (720, 0), - (764, 44), - (-30, 330), - (-360, 0), - (-390, 330), - (-720, 0), - (-764, 316), - ] - - f = ft.Font(None) - self.assertEqual(f.rotation, 0) - for r, r_reduced in test_angles: - f.rotation = r - self.assertEqual( - f.rotation, - r_reduced, - "for angle %d: %d != %d" % (r, f.rotation, r_reduced), - ) - self.assertRaises(TypeError, setattr, f, "rotation", "12") - - def test_freetype_Font_render_to(self): - # Rendering to an existing target surface is equivalent to - # blitting a surface returned by Font.render with the target. - font = self._TEST_FONTS["sans"] - - surf = pygame.Surface((800, 600)) - color = pygame.Color(0, 0, 0) - - rrect = font.render_to(surf, (32, 32), "FoobarBaz", color, None, size=24) - self.assertIsInstance(rrect, pygame.Rect) - self.assertEqual(rrect.topleft, (32, 32)) - self.assertNotEqual(rrect.bottomright, (32, 32)) - - rcopy = rrect.copy() - rcopy.topleft = (32, 32) - self.assertTrue(surf.get_rect().contains(rcopy)) - - rect = pygame.Rect(20, 20, 2, 2) - rrect = font.render_to(surf, rect, "FoobarBax", color, None, size=24) - self.assertEqual(rect.topleft, rrect.topleft) - self.assertNotEqual(rrect.size, rect.size) - rrect = font.render_to(surf, (20.1, 18.9), "FoobarBax", color, None, size=24) - - rrect = font.render_to(surf, rect, "", color, None, size=24) - self.assertFalse(rrect) - self.assertEqual(rrect.height, font.get_sized_height(24)) - - # invalid surf test - self.assertRaises(TypeError, font.render_to, "not a surface", "text", color) - self.assertRaises(TypeError, font.render_to, pygame.Surface, "text", color) - - # invalid dest test - for dest in [ - None, - 0, - "a", - "ab", - (), - (1,), - ("a", 2), - (1, "a"), - (1 + 2j, 2), - (1, 1 + 2j), - (1, int), - (int, 1), - ]: - self.assertRaises( - TypeError, font.render_to, surf, dest, "foobar", color, size=24 - ) - - # misc parameter test - self.assertRaises(ValueError, font.render_to, surf, (0, 0), "foobar", color) - self.assertRaises( - TypeError, font.render_to, surf, (0, 0), "foobar", color, 2.3, size=24 - ) - self.assertRaises( - ValueError, - font.render_to, - surf, - (0, 0), - "foobar", - color, - None, - style=42, - size=24, - ) - self.assertRaises( - TypeError, - font.render_to, - surf, - (0, 0), - "foobar", - color, - None, - style=None, - size=24, - ) - self.assertRaises( - ValueError, - font.render_to, - surf, - (0, 0), - "foobar", - color, - None, - style=97, - size=24, - ) - - def test_freetype_Font_render(self): - - font = self._TEST_FONTS["sans"] - - surf = pygame.Surface((800, 600)) - color = pygame.Color(0, 0, 0) - - rend = font.render("FoobarBaz", pygame.Color(0, 0, 0), None, size=24) - self.assertIsInstance(rend, tuple) - self.assertEqual(len(rend), 2) - self.assertIsInstance(rend[0], pygame.Surface) - self.assertIsInstance(rend[1], pygame.Rect) - self.assertEqual(rend[0].get_rect().size, rend[1].size) - - s, r = font.render("", pygame.Color(0, 0, 0), None, size=24) - self.assertEqual(r.width, 0) - self.assertEqual(r.height, font.get_sized_height(24)) - self.assertEqual(s.get_size(), r.size) - self.assertEqual(s.get_bitsize(), 32) - - # misc parameter test - self.assertRaises(ValueError, font.render, "foobar", color) - self.assertRaises(TypeError, font.render, "foobar", color, 2.3, size=24) - self.assertRaises( - ValueError, font.render, "foobar", color, None, style=42, size=24 - ) - self.assertRaises( - TypeError, font.render, "foobar", color, None, style=None, size=24 - ) - self.assertRaises( - ValueError, font.render, "foobar", color, None, style=97, size=24 - ) - - # valid surrogate pairs - font2 = self._TEST_FONTS["mono"] - ucs4 = font2.ucs4 - try: - font2.ucs4 = False - rend1 = font2.render("\uD80C\uDC79", color, size=24) - rend2 = font2.render("\U00013079", color, size=24) - self.assertEqual(rend1[1], rend2[1]) - font2.ucs4 = True - rend1 = font2.render("\uD80C\uDC79", color, size=24) - self.assertNotEqual(rend1[1], rend2[1]) - finally: - font2.ucs4 = ucs4 - - # malformed surrogate pairs - self.assertRaises(UnicodeEncodeError, font.render, "\uD80C", color, size=24) - self.assertRaises(UnicodeEncodeError, font.render, "\uDCA7", color, size=24) - self.assertRaises( - UnicodeEncodeError, font.render, "\uD7FF\uDCA7", color, size=24 - ) - self.assertRaises( - UnicodeEncodeError, font.render, "\uDC00\uDCA7", color, size=24 - ) - self.assertRaises( - UnicodeEncodeError, font.render, "\uD80C\uDBFF", color, size=24 - ) - self.assertRaises( - UnicodeEncodeError, font.render, "\uD80C\uE000", color, size=24 - ) - - # raises exception when uninitalized - self.assertRaises(RuntimeError, nullfont().render, "a", (0, 0, 0), size=24) - - # Confirm the correct glpyhs are returned for a couple of - # unicode code points, 'A' and '\U00023079'. For each code point - # the rendered glyph is compared with an image of glyph bitmap - # as exported by FontForge. - path = os.path.join(FONTDIR, "A_PyGameMono-8.png") - A = pygame.image.load(path) - path = os.path.join(FONTDIR, "u13079_PyGameMono-8.png") - u13079 = pygame.image.load(path) - - font = self._TEST_FONTS["mono"] - font.ucs4 = False - A_rendered, r = font.render("A", bgcolor=pygame.Color("white"), size=8) - u13079_rendered, r = font.render( - "\U00013079", bgcolor=pygame.Color("white"), size=8 - ) - - # before comparing the surfaces, make sure they are the same - # pixel format. Use 32-bit SRCALPHA to avoid row padding and - # undefined bytes (the alpha byte will be set to 255.) - bitmap = pygame.Surface(A.get_size(), pygame.SRCALPHA, 32) - bitmap.blit(A, (0, 0)) - rendering = pygame.Surface(A_rendered.get_size(), pygame.SRCALPHA, 32) - rendering.blit(A_rendered, (0, 0)) - self.assertTrue(surf_same_image(rendering, bitmap)) - bitmap = pygame.Surface(u13079.get_size(), pygame.SRCALPHA, 32) - bitmap.blit(u13079, (0, 0)) - rendering = pygame.Surface(u13079_rendered.get_size(), pygame.SRCALPHA, 32) - rendering.blit(u13079_rendered, (0, 0)) - self.assertTrue(surf_same_image(rendering, bitmap)) - - def test_freetype_Font_render_mono(self): - font = self._TEST_FONTS["sans"] - color = pygame.Color("black") - colorkey = pygame.Color("white") - text = "." - - save_antialiased = font.antialiased - font.antialiased = False - try: - surf, r = font.render(text, color, size=24) - self.assertEqual(surf.get_bitsize(), 8) - flags = surf.get_flags() - self.assertTrue(flags & pygame.SRCCOLORKEY) - self.assertFalse(flags & (pygame.SRCALPHA | pygame.HWSURFACE)) - self.assertEqual(surf.get_colorkey(), colorkey) - self.assertIsNone(surf.get_alpha()) - - translucent_color = pygame.Color(*color) - translucent_color.a = 55 - surf, r = font.render(text, translucent_color, size=24) - self.assertEqual(surf.get_bitsize(), 8) - flags = surf.get_flags() - self.assertTrue(flags & (pygame.SRCCOLORKEY | pygame.SRCALPHA)) - self.assertFalse(flags & pygame.HWSURFACE) - self.assertEqual(surf.get_colorkey(), colorkey) - self.assertEqual(surf.get_alpha(), translucent_color.a) - - surf, r = font.render(text, color, colorkey, size=24) - self.assertEqual(surf.get_bitsize(), 32) - finally: - font.antialiased = save_antialiased - - def test_freetype_Font_render_to_mono(self): - # Blitting is done in two stages. First the target is alpha filled - # with the background color, if any. Second, the foreground - # color is alpha blitted to the background. - font = self._TEST_FONTS["sans"] - text = " ." - rect = font.get_rect(text, size=24) - size = rect.size - fg = pygame.Surface((1, 1), pygame.SRCALPHA, 32) - bg = pygame.Surface((1, 1), pygame.SRCALPHA, 32) - surrogate = pygame.Surface((1, 1), pygame.SRCALPHA, 32) - surfaces = [ - pygame.Surface(size, 0, 8), - pygame.Surface(size, 0, 16), - pygame.Surface(size, pygame.SRCALPHA, 16), - pygame.Surface(size, 0, 24), - pygame.Surface(size, 0, 32), - pygame.Surface(size, pygame.SRCALPHA, 32), - ] - fg_colors = [ - surfaces[0].get_palette_at(2), - surfaces[1].unmap_rgb(surfaces[1].map_rgb((128, 64, 200))), - surfaces[2].unmap_rgb(surfaces[2].map_rgb((99, 0, 100, 64))), - (128, 97, 213), - (128, 97, 213), - (128, 97, 213, 60), - ] - fg_colors = [pygame.Color(*c) for c in fg_colors] - self.assertEqual(len(surfaces), len(fg_colors)) # integrity check - bg_colors = [ - surfaces[0].get_palette_at(4), - surfaces[1].unmap_rgb(surfaces[1].map_rgb((220, 20, 99))), - surfaces[2].unmap_rgb(surfaces[2].map_rgb((55, 200, 0, 86))), - (255, 120, 13), - (255, 120, 13), - (255, 120, 13, 180), - ] - bg_colors = [pygame.Color(*c) for c in bg_colors] - self.assertEqual(len(surfaces), len(bg_colors)) # integrity check - - save_antialiased = font.antialiased - font.antialiased = False - try: - fill_color = pygame.Color("black") - for i, surf in enumerate(surfaces): - surf.fill(fill_color) - fg_color = fg_colors[i] - fg.set_at((0, 0), fg_color) - surf.blit(fg, (0, 0)) - r_fg_color = surf.get_at((0, 0)) - surf.set_at((0, 0), fill_color) - rrect = font.render_to(surf, (0, 0), text, fg_color, size=24) - bottomleft = 0, rrect.height - 1 - self.assertEqual( - surf.get_at(bottomleft), - fill_color, - "Position: {}. Depth: {}." - " fg_color: {}.".format(bottomleft, surf.get_bitsize(), fg_color), - ) - bottomright = rrect.width - 1, rrect.height - 1 - self.assertEqual( - surf.get_at(bottomright), - r_fg_color, - "Position: {}. Depth: {}." - " fg_color: {}.".format(bottomright, surf.get_bitsize(), fg_color), - ) - for i, surf in enumerate(surfaces): - surf.fill(fill_color) - fg_color = fg_colors[i] - bg_color = bg_colors[i] - bg.set_at((0, 0), bg_color) - fg.set_at((0, 0), fg_color) - if surf.get_bitsize() == 24: - # For a 24 bit target surface test against Pygame's alpha - # blit as there appears to be a problem with SDL's alpha - # blit: - # - # self.assertEqual(surf.get_at(bottomright), r_fg_color) - # - # raises - # - # AssertionError: (128, 97, 213, 255) != (129, 98, 213, 255) - # - surrogate.set_at((0, 0), fill_color) - surrogate.blit(bg, (0, 0)) - r_bg_color = surrogate.get_at((0, 0)) - surrogate.blit(fg, (0, 0)) - r_fg_color = surrogate.get_at((0, 0)) - else: - # Surface blit values for comparison. - surf.blit(bg, (0, 0)) - r_bg_color = surf.get_at((0, 0)) - surf.blit(fg, (0, 0)) - r_fg_color = surf.get_at((0, 0)) - surf.set_at((0, 0), fill_color) - rrect = font.render_to(surf, (0, 0), text, fg_color, bg_color, size=24) - bottomleft = 0, rrect.height - 1 - self.assertEqual(surf.get_at(bottomleft), r_bg_color) - bottomright = rrect.width - 1, rrect.height - 1 - self.assertEqual(surf.get_at(bottomright), r_fg_color) - finally: - font.antialiased = save_antialiased - - def test_freetype_Font_render_raw(self): - - font = self._TEST_FONTS["sans"] - - text = "abc" - size = font.get_rect(text, size=24).size - rend = font.render_raw(text, size=24) - self.assertIsInstance(rend, tuple) - self.assertEqual(len(rend), 2) - - r, s = rend - self.assertIsInstance(r, bytes) - self.assertIsInstance(s, tuple) - self.assertTrue(len(s), 2) - - w, h = s - self.assertIsInstance(w, int) - self.assertIsInstance(h, int) - self.assertEqual(s, size) - self.assertEqual(len(r), w * h) - - r, (w, h) = font.render_raw("", size=24) - self.assertEqual(w, 0) - self.assertEqual(h, font.height) - self.assertEqual(len(r), 0) - - # bug with decenders: this would crash - rend = font.render_raw("render_raw", size=24) - - # bug with non-printable characters: this would cause a crash - # because the text length was not adjusted for skipped characters. - text = "".join([chr(i) for i in range(31, 64)]) - rend = font.render_raw(text, size=10) - - def test_freetype_Font_render_raw_to(self): - - # This only checks that blits do not crash. It needs to check: - # - int values - # - invert option - # - - font = self._TEST_FONTS["sans"] - text = "abc" - - # No frills antialiased render to int1 (__render_glyph_INT) - srect = font.get_rect(text, size=24) - surf = pygame.Surface(srect.size, 0, 8) - rrect = font.render_raw_to(surf.get_view("2"), text, size=24) - self.assertEqual(rrect, srect) - - for bpp in [24, 32]: - surf = pygame.Surface(srect.size, 0, bpp) - rrect = font.render_raw_to(surf.get_view("r"), text, size=24) - self.assertEqual(rrect, srect) - - # Underlining to int1 (__fill_glyph_INT) - srect = font.get_rect(text, size=24, style=ft.STYLE_UNDERLINE) - surf = pygame.Surface(srect.size, 0, 8) - rrect = font.render_raw_to( - surf.get_view("2"), text, size=24, style=ft.STYLE_UNDERLINE - ) - self.assertEqual(rrect, srect) - - for bpp in [24, 32]: - surf = pygame.Surface(srect.size, 0, bpp) - rrect = font.render_raw_to( - surf.get_view("r"), text, size=24, style=ft.STYLE_UNDERLINE - ) - self.assertEqual(rrect, srect) - - # Unaliased (mono) rendering to int1 (__render_glyph_MONO_as_INT) - font.antialiased = False - try: - srect = font.get_rect(text, size=24) - surf = pygame.Surface(srect.size, 0, 8) - rrect = font.render_raw_to(surf.get_view("2"), text, size=24) - self.assertEqual(rrect, srect) - - for bpp in [24, 32]: - surf = pygame.Surface(srect.size, 0, bpp) - rrect = font.render_raw_to(surf.get_view("r"), text, size=24) - self.assertEqual(rrect, srect) - finally: - font.antialiased = True - - # Antialiased render to ints sized greater than 1 byte - # (__render_glyph_INT) - srect = font.get_rect(text, size=24) - - for bpp in [16, 24, 32]: - surf = pygame.Surface(srect.size, 0, bpp) - rrect = font.render_raw_to(surf.get_view("2"), text, size=24) - self.assertEqual(rrect, srect) - - # Underline render to ints sized greater than 1 byte - # (__fill_glyph_INT) - srect = font.get_rect(text, size=24, style=ft.STYLE_UNDERLINE) - - for bpp in [16, 24, 32]: - surf = pygame.Surface(srect.size, 0, bpp) - rrect = font.render_raw_to( - surf.get_view("2"), text, size=24, style=ft.STYLE_UNDERLINE - ) - self.assertEqual(rrect, srect) - - # Unaliased (mono) rendering to ints greater than 1 byte - # (__render_glyph_MONO_as_INT) - font.antialiased = False - try: - srect = font.get_rect(text, size=24) - - for bpp in [16, 24, 32]: - surf = pygame.Surface(srect.size, 0, bpp) - rrect = font.render_raw_to(surf.get_view("2"), text, size=24) - self.assertEqual(rrect, srect) - finally: - font.antialiased = True - - # Invalid dest parameter test. - srect = font.get_rect(text, size=24) - surf_buf = pygame.Surface(srect.size, 0, 32).get_view("2") - - for dest in [ - 0, - "a", - "ab", - (), - (1,), - ("a", 2), - (1, "a"), - (1 + 2j, 2), - (1, 1 + 2j), - (1, int), - (int, 1), - ]: - self.assertRaises( - TypeError, font.render_raw_to, surf_buf, text, dest, size=24 - ) - - def test_freetype_Font_text_is_None(self): - f = ft.Font(self._sans_path, 36) - f.style = ft.STYLE_NORMAL - f.rotation = 0 - text = "ABCD" - - # reference values - get_rect = f.get_rect(text) - f.vertical = True - get_rect_vert = f.get_rect(text) - - self.assertTrue(get_rect_vert.width < get_rect.width) - self.assertTrue(get_rect_vert.height > get_rect.height) - f.vertical = False - render_to_surf = pygame.Surface(get_rect.size, pygame.SRCALPHA, 32) - - if IS_PYPY: - return - - arr = arrinter.Array(get_rect.size, "u", 1) - render = f.render(text, (0, 0, 0)) - render_to = f.render_to(render_to_surf, (0, 0), text, (0, 0, 0)) - render_raw = f.render_raw(text) - render_raw_to = f.render_raw_to(arr, text) - - # comparisons - surf = pygame.Surface(get_rect.size, pygame.SRCALPHA, 32) - self.assertEqual(f.get_rect(None), get_rect) - s, r = f.render(None, (0, 0, 0)) - self.assertEqual(r, render[1]) - self.assertTrue(surf_same_image(s, render[0])) - r = f.render_to(surf, (0, 0), None, (0, 0, 0)) - self.assertEqual(r, render_to) - self.assertTrue(surf_same_image(surf, render_to_surf)) - px, sz = f.render_raw(None) - self.assertEqual(sz, render_raw[1]) - self.assertEqual(px, render_raw[0]) - sz = f.render_raw_to(arr, None) - self.assertEqual(sz, render_raw_to) - - def test_freetype_Font_text_is_None(self): - f = ft.Font(self._sans_path, 36) - f.style = ft.STYLE_NORMAL - f.rotation = 0 - text = "ABCD" - - # reference values - get_rect = f.get_rect(text) - f.vertical = True - get_rect_vert = f.get_rect(text) - - # vertical: trigger glyph positioning. - f.vertical = True - r = f.get_rect(None) - self.assertEqual(r, get_rect_vert) - f.vertical = False - - # wide style: trigger glyph reload - r = f.get_rect(None, style=ft.STYLE_WIDE) - self.assertEqual(r.height, get_rect.height) - self.assertTrue(r.width > get_rect.width) - r = f.get_rect(None) - self.assertEqual(r, get_rect) - - # rotated: trigger glyph reload - r = f.get_rect(None, rotation=90) - self.assertEqual(r.width, get_rect.height) - self.assertEqual(r.height, get_rect.width) - - # this method will not support None text - self.assertRaises(TypeError, f.get_metrics, None) - - def test_freetype_Font_fgcolor(self): - f = ft.Font(self._bmp_8_75dpi_path) - notdef = "\0" # the PyGameMono .notdef glyph has a pixel at (0, 0) - f.origin = False - f.pad = False - black = pygame.Color("black") # initial color - green = pygame.Color("green") - alpha128 = pygame.Color(10, 20, 30, 128) - - c = f.fgcolor - self.assertIsInstance(c, pygame.Color) - self.assertEqual(c, black) - - s, r = f.render(notdef) - self.assertEqual(s.get_at((0, 0)), black) - - f.fgcolor = green - self.assertEqual(f.fgcolor, green) - - s, r = f.render(notdef) - self.assertEqual(s.get_at((0, 0)), green) - - f.fgcolor = alpha128 - s, r = f.render(notdef) - self.assertEqual(s.get_at((0, 0)), alpha128) - - surf = pygame.Surface(f.get_rect(notdef).size, pygame.SRCALPHA, 32) - f.render_to(surf, (0, 0), None) - self.assertEqual(surf.get_at((0, 0)), alpha128) - - self.assertRaises(AttributeError, setattr, f, "fgcolor", None) - - def test_freetype_Font_bgcolor(self): - f = ft.Font(None, 32) - zero = "0" # the default font 0 glyph does not have a pixel at (0, 0) - f.origin = False - f.pad = False - - transparent_black = pygame.Color(0, 0, 0, 0) # initial color - green = pygame.Color("green") - alpha128 = pygame.Color(10, 20, 30, 128) - - c = f.bgcolor - self.assertIsInstance(c, pygame.Color) - self.assertEqual(c, transparent_black) - - s, r = f.render(zero, pygame.Color(255, 255, 255)) - self.assertEqual(s.get_at((0, 0)), transparent_black) - - f.bgcolor = green - self.assertEqual(f.bgcolor, green) - - s, r = f.render(zero) - self.assertEqual(s.get_at((0, 0)), green) - - f.bgcolor = alpha128 - s, r = f.render(zero) - self.assertEqual(s.get_at((0, 0)), alpha128) - - surf = pygame.Surface(f.get_rect(zero).size, pygame.SRCALPHA, 32) - f.render_to(surf, (0, 0), None) - self.assertEqual(surf.get_at((0, 0)), alpha128) - - self.assertRaises(AttributeError, setattr, f, "bgcolor", None) - - @unittest.skipIf(not pygame.HAVE_NEWBUF, "newbuf not implemented") - @unittest.skipIf(IS_PYPY, "pypy no likey") - def test_newbuf(self): - from pygame.tests.test_utils import buftools - - Exporter = buftools.Exporter - font = self._TEST_FONTS["sans"] - srect = font.get_rect("Hi", size=12) - for format in [ - "b", - "B", - "h", - "H", - "i", - "I", - "l", - "L", - "q", - "Q", - "x", - "1x", - "2x", - "3x", - "4x", - "5x", - "6x", - "7x", - "8x", - "9x", - "h", - "=h", - "@h", - "!h", - "1h", - "=1h", - ]: - newbuf = Exporter(srect.size, format=format) - rrect = font.render_raw_to(newbuf, "Hi", size=12) - self.assertEqual(rrect, srect) - # Some unsupported formats - for format in ["f", "d", "2h", "?", "hh"]: - newbuf = Exporter(srect.size, format=format, itemsize=4) - self.assertRaises(ValueError, font.render_raw_to, newbuf, "Hi", size=12) - - def test_freetype_Font_style(self): - - font = self._TEST_FONTS["sans"] - - # make sure STYLE_NORMAL is the default value - self.assertEqual(ft.STYLE_NORMAL, font.style) - - # make sure we check for style type - with self.assertRaises(TypeError): - font.style = "None" - with self.assertRaises(TypeError): - font.style = None - - # make sure we only accept valid constants - with self.assertRaises(ValueError): - font.style = 112 - - # make assure no assignments happened - self.assertEqual(ft.STYLE_NORMAL, font.style) - - # test assignement - font.style = ft.STYLE_UNDERLINE - self.assertEqual(ft.STYLE_UNDERLINE, font.style) - - # test complex styles - st = ft.STYLE_STRONG | ft.STYLE_UNDERLINE | ft.STYLE_OBLIQUE - - font.style = st - self.assertEqual(st, font.style) - - # and that STYLE_DEFAULT has no effect (continued from above) - self.assertNotEqual(st, ft.STYLE_DEFAULT) - font.style = ft.STYLE_DEFAULT - self.assertEqual(st, font.style) - - # revert changes - font.style = ft.STYLE_NORMAL - self.assertEqual(ft.STYLE_NORMAL, font.style) - - def test_freetype_Font_resolution(self): - text = "|" # Differs in width and height - resolution = ft.get_default_resolution() - new_font = ft.Font(self._sans_path, resolution=2 * resolution) - self.assertEqual(new_font.resolution, 2 * resolution) - size_normal = self._TEST_FONTS["sans"].get_rect(text, size=24).size - size_scaled = new_font.get_rect(text, size=24).size - size_by_2 = size_normal[0] * 2 - self.assertTrue( - size_by_2 + 2 >= size_scaled[0] >= size_by_2 - 2, - "%i not equal %i" % (size_scaled[1], size_by_2), - ) - size_by_2 = size_normal[1] * 2 - self.assertTrue( - size_by_2 + 2 >= size_scaled[1] >= size_by_2 - 2, - "%i not equal %i" % (size_scaled[1], size_by_2), - ) - new_resolution = resolution + 10 - ft.set_default_resolution(new_resolution) - try: - new_font = ft.Font(self._sans_path, resolution=0) - self.assertEqual(new_font.resolution, new_resolution) - finally: - ft.set_default_resolution() - - def test_freetype_Font_path(self): - self.assertEqual(self._TEST_FONTS["sans"].path, self._sans_path) - self.assertRaises(AttributeError, getattr, nullfont(), "path") - - # This Font cache test is conditional on freetype being built by a debug - # version of Python or with the C macro PGFT_DEBUG_CACHE defined. - def test_freetype_Font_cache(self): - glyphs = "abcde" - glen = len(glyphs) - other_glyphs = "123" - oglen = len(other_glyphs) - uempty = str("") - ## many_glyphs = (uempty.join([chr(i) for i in range(32,127)] + - ## [chr(i) for i in range(161,172)] + - ## [chr(i) for i in range(174,239)])) - many_glyphs = uempty.join([chr(i) for i in range(32, 127)]) - mglen = len(many_glyphs) - - count = 0 - access = 0 - hit = 0 - miss = 0 - - f = ft.Font(None, size=24, font_index=0, resolution=72, ucs4=False) - f.style = ft.STYLE_NORMAL - f.antialiased = True - - # Ensure debug counters are zero - self.assertEqual(f._debug_cache_stats, (0, 0, 0, 0, 0)) - # Load some basic glyphs - count = access = miss = glen - f.render_raw(glyphs) - self.assertEqual(f._debug_cache_stats, (count, 0, access, hit, miss)) - # Vertical should not affect the cache - access += glen - hit += glen - f.vertical = True - f.render_raw(glyphs) - f.vertical = False - self.assertEqual(f._debug_cache_stats, (count, 0, access, hit, miss)) - # New glyphs will - count += oglen - access += oglen - miss += oglen - f.render_raw(other_glyphs) - self.assertEqual(f._debug_cache_stats, (count, 0, access, hit, miss)) - # Point size does - count += glen - access += glen - miss += glen - f.render_raw(glyphs, size=12) - self.assertEqual(f._debug_cache_stats, (count, 0, access, hit, miss)) - # Underline style does not - access += oglen - hit += oglen - f.underline = True - f.render_raw(other_glyphs) - f.underline = False - self.assertEqual(f._debug_cache_stats, (count, 0, access, hit, miss)) - # Oblique style does - count += glen - access += glen - miss += glen - f.oblique = True - f.render_raw(glyphs) - f.oblique = False - self.assertEqual(f._debug_cache_stats, (count, 0, access, hit, miss)) - # Strong style does; by this point cache clears can happen - count += glen - access += glen - miss += glen - f.strong = True - f.render_raw(glyphs) - f.strong = False - ccount, cdelete_count, caccess, chit, cmiss = f._debug_cache_stats - self.assertEqual( - (ccount + cdelete_count, caccess, chit, cmiss), (count, access, hit, miss) - ) - # Rotation does - count += glen - access += glen - miss += glen - f.render_raw(glyphs, rotation=10) - ccount, cdelete_count, caccess, chit, cmiss = f._debug_cache_stats - self.assertEqual( - (ccount + cdelete_count, caccess, chit, cmiss), (count, access, hit, miss) - ) - # aliased (mono) glyphs do - count += oglen - access += oglen - miss += oglen - f.antialiased = False - f.render_raw(other_glyphs) - f.antialiased = True - ccount, cdelete_count, caccess, chit, cmiss = f._debug_cache_stats - self.assertEqual( - (ccount + cdelete_count, caccess, chit, cmiss), (count, access, hit, miss) - ) - # Trigger a cleanup for sure. - count += 2 * mglen - access += 2 * mglen - miss += 2 * mglen - f.get_metrics(many_glyphs, size=8) - f.get_metrics(many_glyphs, size=10) - ccount, cdelete_count, caccess, chit, cmiss = f._debug_cache_stats - self.assertTrue(ccount < count) - self.assertEqual( - (ccount + cdelete_count, caccess, chit, cmiss), (count, access, hit, miss) - ) - - try: - ft.Font._debug_cache_stats - except AttributeError: - del test_freetype_Font_cache - - def test_undefined_character_code(self): - # To be consistent with pygame.font.Font, undefined codes - # are rendered as the undefined character, and has metrics - # of None. - font = self._TEST_FONTS["sans"] - - img, size1 = font.render(chr(1), (0, 0, 0), size=24) - img, size0 = font.render("", (0, 0, 0), size=24) - self.assertTrue(size1.width > size0.width) - - metrics = font.get_metrics(chr(1) + chr(48), size=24) - self.assertEqual(len(metrics), 2) - self.assertIsNone(metrics[0]) - self.assertIsInstance(metrics[1], tuple) - - def test_issue_242(self): - """Issue #242: get_rect() uses 0 as default style""" - - # Issue #242: freetype.Font.get_rect() ignores style defaults when - # the style argument is not given - # - # The text boundary rectangle returned by freetype.Font.get_rect() - # should match the boundary of the same text rendered directly to a - # surface. This permits accurate text positioning. To work properly, - # get_rect() should calculate the text boundary to reflect text style, - # such as underline. Instead, it ignores the style settings for the - # Font object when the style argument is omitted. - # - # When the style argument is not given, freetype.get_rect() uses - # unstyled text when calculating the boundary rectangle. This is - # because _ftfont_getrect(), in _freetype.c, set the default - # style to 0 rather than FT_STYLE_DEFAULT. - # - font = self._TEST_FONTS["sans"] - - # Try wide style on a wide character. - prev_style = font.wide - font.wide = True - try: - rect = font.get_rect("M", size=64) - surf, rrect = font.render(None, size=64) - self.assertEqual(rect, rrect) - finally: - font.wide = prev_style - - # Try strong style on several wide characters. - prev_style = font.strong - font.strong = True - try: - rect = font.get_rect("Mm_", size=64) - surf, rrect = font.render(None, size=64) - self.assertEqual(rect, rrect) - finally: - font.strong = prev_style - - # Try oblique style on a tall, narrow character. - prev_style = font.oblique - font.oblique = True - try: - rect = font.get_rect("|", size=64) - surf, rrect = font.render(None, size=64) - self.assertEqual(rect, rrect) - finally: - font.oblique = prev_style - - # Try underline style on a glyphless character. - prev_style = font.underline - font.underline = True - try: - rect = font.get_rect(" ", size=64) - surf, rrect = font.render(None, size=64) - self.assertEqual(rect, rrect) - finally: - font.underline = prev_style - - def test_issue_237(self): - """Issue #237: Memory overrun when rendered with underlining""" - - # Issue #237: Memory overrun when text without descenders is rendered - # with underlining - # - # The bug crashes the Python interpreter. The bug is caught with C - # assertions in ft_render_cb.c when the Pygame module is compiled - # for debugging. So far it is only known to affect Times New Roman. - # - name = "Times New Roman" - font = ft.SysFont(name, 19) - if font.name != name: - # The font is unavailable, so skip the test. - return - font.underline = True - s, r = font.render("Amazon", size=19) - - # Some other checks to make sure nothing else broke. - for adj in [-2, -1.9, -1, 0, 1.9, 2]: - font.underline_adjustment = adj - s, r = font.render("Amazon", size=19) - - def test_issue_243(self): - """Issue Y: trailing space ignored in boundary calculation""" - - # Issue #243: For a string with trailing spaces, freetype ignores the - # last space in boundary calculations - # - font = self._TEST_FONTS["fixed"] - r1 = font.get_rect(" ", size=64) - self.assertTrue(r1.width > 1) - r2 = font.get_rect(" ", size=64) - self.assertEqual(r2.width, 2 * r1.width) - - def test_garbage_collection(self): - """Check reference counting on returned new references""" - - def ref_items(seq): - return [weakref.ref(o) for o in seq] - - font = self._TEST_FONTS["bmp-8-75dpi"] - font.size = font.get_sizes()[0][0] - text = "A" - rect = font.get_rect(text) - surf = pygame.Surface(rect.size, pygame.SRCALPHA, 32) - refs = [] - refs.extend(ref_items(font.render(text, (0, 0, 0)))) - refs.append(weakref.ref(font.render_to(surf, (0, 0), text, (0, 0, 0)))) - refs.append(weakref.ref(font.get_rect(text))) - - n = len(refs) - self.assertTrue(n > 0) - - # for pypy we garbage collection twice. - for i in range(2): - gc.collect() - - for i in range(n): - self.assertIsNone(refs[i](), "ref %d not collected" % i) - - try: - from sys import getrefcount - except ImportError: - pass - else: - array = arrinter.Array(rect.size, "u", 1) - o = font.render_raw(text) - self.assertEqual(getrefcount(o), 2) - self.assertEqual(getrefcount(o[0]), 2) - self.assertEqual(getrefcount(o[1]), 2) - self.assertEqual(getrefcount(font.render_raw_to(array, text)), 1) - o = font.get_metrics("AB") - self.assertEqual(getrefcount(o), 2) - for i in range(len(o)): - self.assertEqual(getrefcount(o[i]), 2, "refcount fail for item %d" % i) - o = font.get_sizes() - self.assertEqual(getrefcount(o), 2) - for i in range(len(o)): - self.assertEqual(getrefcount(o[i]), 2, "refcount fail for item %d" % i) - - def test_display_surface_quit(self): - """Font.render_to() on a closed display surface""" - - # The Font.render_to() method checks that PySurfaceObject.surf is NULL - # and raise a exception if it is. This fixes a bug in Pygame revision - # 0600ea4f1cfb and earlier where Pygame segfaults instead. - null_surface = pygame.Surface.__new__(pygame.Surface) - f = self._TEST_FONTS["sans"] - self.assertRaises( - pygame.error, f.render_to, null_surface, (0, 0), "Crash!", size=12 - ) - - def test_issue_565(self): - """get_metrics supporting rotation/styles/size""" - - tests = [ - {"method": "size", "value": 36, "msg": "metrics same for size"}, - {"method": "rotation", "value": 90, "msg": "metrics same for rotation"}, - {"method": "oblique", "value": True, "msg": "metrics same for oblique"}, - ] - text = "|" - - def run_test(method, value, msg): - font = ft.Font(self._sans_path, size=24) - before = font.get_metrics(text) - font.__setattr__(method, value) - after = font.get_metrics(text) - self.assertNotEqual(before, after, msg) - - for test in tests: - run_test(test["method"], test["value"], test["msg"]) - - def test_freetype_SysFont_name(self): - """that SysFont accepts names of various types""" - fonts = pygame.font.get_fonts() - size = 12 - - # Check single name string: - font_name = ft.SysFont(fonts[0], size).name - self.assertFalse(font_name is None) - - # Check string of comma-separated names. - names = ",".join(fonts) - font_name_2 = ft.SysFont(names, size).name - self.assertEqual(font_name_2, font_name) - - # Check list of names. - font_name_2 = ft.SysFont(fonts, size).name - self.assertEqual(font_name_2, font_name) - - # Check generator: - names = (name for name in fonts) - font_name_2 = ft.SysFont(names, size).name - self.assertEqual(font_name_2, font_name) - - fonts_b = [f.encode() for f in fonts] - - # Check single name bytes. - font_name_2 = ft.SysFont(fonts_b[0], size).name - self.assertEqual(font_name_2, font_name) - - # Check comma-separated bytes. - names = b",".join(fonts_b) - font_name_2 = ft.SysFont(names, size).name - self.assertEqual(font_name_2, font_name) - - # Check list of bytes. - font_name_2 = ft.SysFont(fonts_b, size).name - self.assertEqual(font_name_2, font_name) - - # Check mixed list of bytes and string. - names = [fonts[0], fonts_b[1], fonts[2], fonts_b[3]] - font_name_2 = ft.SysFont(names, size).name - self.assertEqual(font_name_2, font_name) - - def test_pathlib(self): - f = ft.Font(pathlib.Path(self._fixed_path), 20) - - -class FreeTypeTest(unittest.TestCase): - def setUp(self): - ft.init() - - def tearDown(self): - ft.quit() - - def test_resolution(self): - try: - ft.set_default_resolution() - resolution = ft.get_default_resolution() - self.assertEqual(resolution, 72) - new_resolution = resolution + 10 - ft.set_default_resolution(new_resolution) - self.assertEqual(ft.get_default_resolution(), new_resolution) - ft.init(resolution=resolution + 20) - self.assertEqual(ft.get_default_resolution(), new_resolution) - finally: - ft.set_default_resolution() - - def test_autoinit_and_autoquit(self): - pygame.init() - self.assertTrue(ft.get_init()) - pygame.quit() - self.assertFalse(ft.get_init()) - - # Ensure autoquit is replaced at init time - pygame.init() - self.assertTrue(ft.get_init()) - pygame.quit() - self.assertFalse(ft.get_init()) - - def test_init(self): - # Test if module initialized after calling init(). - ft.quit() - ft.init() - - self.assertTrue(ft.get_init()) - - def test_init__multiple(self): - # Test if module initialized after multiple init() calls. - ft.init() - ft.init() - - self.assertTrue(ft.get_init()) - - def test_quit(self): - # Test if module uninitialized after calling quit(). - ft.quit() - - self.assertFalse(ft.get_init()) - - def test_quit__multiple(self): - # Test if module initialized after multiple quit() calls. - ft.quit() - ft.quit() - - self.assertFalse(ft.get_init()) - - def test_get_init(self): - # Test if get_init() gets the init state. - self.assertTrue(ft.get_init()) - - def test_cache_size(self): - DEFAULT_CACHE_SIZE = 64 - self.assertEqual(ft.get_cache_size(), DEFAULT_CACHE_SIZE) - ft.quit() - self.assertEqual(ft.get_cache_size(), 0) - new_cache_size = DEFAULT_CACHE_SIZE * 2 - ft.init(cache_size=new_cache_size) - self.assertEqual(ft.get_cache_size(), new_cache_size) - - def test_get_error(self): - """Ensures get_error() is initially empty (None).""" - error_msg = ft.get_error() - - self.assertIsNone(error_msg) - - -if __name__ == "__main__": - unittest.main() diff --git a/venv/Lib/site-packages/pygame/tests/ftfont_tags.py b/venv/Lib/site-packages/pygame/tests/ftfont_tags.py deleted file mode 100644 index 0d538f4..0000000 --- a/venv/Lib/site-packages/pygame/tests/ftfont_tags.py +++ /dev/null @@ -1,11 +0,0 @@ -__tags__ = ["development"] - -exclude = False - -try: - import pygame.ftfont -except ImportError: - exclude = True - -if exclude: - __tags__.extend(["ignore", "subprocess_ignore"]) diff --git a/venv/Lib/site-packages/pygame/tests/ftfont_test.py b/venv/Lib/site-packages/pygame/tests/ftfont_test.py deleted file mode 100644 index 1f71204..0000000 --- a/venv/Lib/site-packages/pygame/tests/ftfont_test.py +++ /dev/null @@ -1,19 +0,0 @@ -import sys -import os -import unittest -from pygame.tests import font_test - -import pygame.ftfont - -font_test.pygame_font = pygame.ftfont -# Disable UCS-4 specific tests as this "Font" type does accept UCS-4 codes. -font_test.UCS_4 = False - -for name in dir(font_test): - obj = getattr(font_test, name) - if isinstance(obj, type) and issubclass(obj, unittest.TestCase): # conditional and - new_name = "Ft%s" % name - globals()[new_name] = type(new_name, (obj,), {}) - -if __name__ == "__main__": - unittest.main() diff --git a/venv/Lib/site-packages/pygame/tests/gfxdraw_test.py b/venv/Lib/site-packages/pygame/tests/gfxdraw_test.py deleted file mode 100644 index 293cef3..0000000 --- a/venv/Lib/site-packages/pygame/tests/gfxdraw_test.py +++ /dev/null @@ -1,877 +0,0 @@ -import unittest -import pygame -import pygame.gfxdraw -from pygame.locals import * -from pygame.tests.test_utils import SurfaceSubclass - - -def intensity(c, i): - """Return color c changed by intensity i - - For 0 <= i <= 127 the color is a shade, with 0 being black, 127 being the - unaltered color. - - For 128 <= i <= 255 the color is a tint, with 255 being white, 128 the - unaltered color. - - """ - r, g, b = c[0:3] - if 0 <= i <= 127: - # Darken - return ((r * i) // 127, (g * i) // 127, (b * i) // 127) - # Lighten - return ( - r + ((255 - r) * (255 - i)) // 127, - g + ((255 - g) * (255 - i)) // 127, - b + ((255 - b) * (255 - i)) // 127, - ) - - -class GfxdrawDefaultTest(unittest.TestCase): - - is_started = False - - foreground_color = (128, 64, 8) - background_color = (255, 255, 255) - - def make_palette(base_color): - """Return color palette that is various intensities of base_color""" - # Need this function for Python 3.x so the base_color - # is within the scope of the list comprehension. - return [intensity(base_color, i) for i in range(0, 256)] - - default_palette = make_palette(foreground_color) - - default_size = (100, 100) - - def check_at(self, surf, posn, color): - sc = surf.get_at(posn) - fail_msg = "%s != %s at %s, bitsize: %i, flags: %i, masks: %s" % ( - sc, - color, - posn, - surf.get_bitsize(), - surf.get_flags(), - surf.get_masks(), - ) - self.assertEqual(sc, color, fail_msg) - - def check_not_at(self, surf, posn, color): - sc = surf.get_at(posn) - fail_msg = "%s != %s at %s, bitsize: %i, flags: %i, masks: %s" % ( - sc, - color, - posn, - surf.get_bitsize(), - surf.get_flags(), - surf.get_masks(), - ) - self.assertNotEqual(sc, color, fail_msg) - - @classmethod - def setUpClass(cls): - # Necessary for Surface.set_palette. - pygame.init() - pygame.display.set_mode((1, 1)) - - @classmethod - def tearDownClass(cls): - pygame.quit() - - def setUp(self): - # This makes sure pygame is always initialized before each test (in - # case a test calls pygame.quit()). - if not pygame.get_init(): - pygame.init() - - Surface = pygame.Surface - size = self.default_size - palette = self.default_palette - if not self.is_started: - # Create test surfaces - self.surfaces = [ - Surface(size, 0, 8), - Surface(size, SRCALPHA, 16), - Surface(size, SRCALPHA, 32), - ] - self.surfaces[0].set_palette(palette) - nonpalette_fmts = ( - # (8, (0xe0, 0x1c, 0x3, 0x0)), - (12, (0xF00, 0xF0, 0xF, 0x0)), - (15, (0x7C00, 0x3E0, 0x1F, 0x0)), - (15, (0x1F, 0x3E0, 0x7C00, 0x0)), - (16, (0xF00, 0xF0, 0xF, 0xF000)), - (16, (0xF000, 0xF00, 0xF0, 0xF)), - (16, (0xF, 0xF0, 0xF00, 0xF000)), - (16, (0xF0, 0xF00, 0xF000, 0xF)), - (16, (0x7C00, 0x3E0, 0x1F, 0x8000)), - (16, (0xF800, 0x7C0, 0x3E, 0x1)), - (16, (0x1F, 0x3E0, 0x7C00, 0x8000)), - (16, (0x3E, 0x7C0, 0xF800, 0x1)), - (16, (0xF800, 0x7E0, 0x1F, 0x0)), - (16, (0x1F, 0x7E0, 0xF800, 0x0)), - (24, (0xFF, 0xFF00, 0xFF0000, 0x0)), - (24, (0xFF0000, 0xFF00, 0xFF, 0x0)), - (32, (0xFF0000, 0xFF00, 0xFF, 0x0)), - (32, (0xFF000000, 0xFF0000, 0xFF00, 0x0)), - (32, (0xFF, 0xFF00, 0xFF0000, 0x0)), - (32, (0xFF00, 0xFF0000, 0xFF000000, 0x0)), - (32, (0xFF0000, 0xFF00, 0xFF, 0xFF000000)), - (32, (0xFF000000, 0xFF0000, 0xFF00, 0xFF)), - (32, (0xFF, 0xFF00, 0xFF0000, 0xFF000000)), - (32, (0xFF00, 0xFF0000, 0xFF000000, 0xFF)), - ) - for bitsize, masks in nonpalette_fmts: - self.surfaces.append(Surface(size, 0, bitsize, masks)) - for surf in self.surfaces: - surf.fill(self.background_color) - - def test_gfxdraw__subclassed_surface(self): - """Ensure pygame.gfxdraw works on subclassed surfaces.""" - surface = SurfaceSubclass((11, 13), SRCALPHA, 32) - surface.fill(pygame.Color("blue")) - expected_color = pygame.Color("red") - x, y = 1, 2 - - pygame.gfxdraw.pixel(surface, x, y, expected_color) - - self.assertEqual(surface.get_at((x, y)), expected_color) - - def test_pixel(self): - """pixel(surface, x, y, color): return None""" - fg = self.foreground_color - bg = self.background_color - for surf in self.surfaces: - fg_adjusted = surf.unmap_rgb(surf.map_rgb(fg)) - bg_adjusted = surf.unmap_rgb(surf.map_rgb(bg)) - pygame.gfxdraw.pixel(surf, 2, 2, fg) - for x in range(1, 4): - for y in range(1, 4): - if x == 2 and y == 2: - self.check_at(surf, (x, y), fg_adjusted) - else: - self.check_at(surf, (x, y), bg_adjusted) - - def test_hline(self): - """hline(surface, x1, x2, y, color): return None""" - fg = self.foreground_color - bg = self.background_color - startx = 10 - stopx = 80 - y = 50 - fg_test_points = [(startx, y), (stopx, y), ((stopx - startx) // 2, y)] - bg_test_points = [ - (startx - 1, y), - (stopx + 1, y), - (startx, y - 1), - (startx, y + 1), - (stopx, y - 1), - (stopx, y + 1), - ] - for surf in self.surfaces: - fg_adjusted = surf.unmap_rgb(surf.map_rgb(fg)) - bg_adjusted = surf.unmap_rgb(surf.map_rgb(bg)) - pygame.gfxdraw.hline(surf, startx, stopx, y, fg) - for posn in fg_test_points: - self.check_at(surf, posn, fg_adjusted) - for posn in bg_test_points: - self.check_at(surf, posn, bg_adjusted) - - def test_vline(self): - """vline(surface, x, y1, y2, color): return None""" - fg = self.foreground_color - bg = self.background_color - x = 50 - starty = 10 - stopy = 80 - fg_test_points = [(x, starty), (x, stopy), (x, (stopy - starty) // 2)] - bg_test_points = [ - (x, starty - 1), - (x, stopy + 1), - (x - 1, starty), - (x + 1, starty), - (x - 1, stopy), - (x + 1, stopy), - ] - for surf in self.surfaces: - fg_adjusted = surf.unmap_rgb(surf.map_rgb(fg)) - bg_adjusted = surf.unmap_rgb(surf.map_rgb(bg)) - pygame.gfxdraw.vline(surf, x, starty, stopy, fg) - for posn in fg_test_points: - self.check_at(surf, posn, fg_adjusted) - for posn in bg_test_points: - self.check_at(surf, posn, bg_adjusted) - - def test_rectangle(self): - """rectangle(surface, rect, color): return None""" - fg = self.foreground_color - bg = self.background_color - rect = pygame.Rect(10, 15, 55, 62) - rect_tuple = tuple(rect) - fg_test_points = [ - rect.topleft, - (rect.right - 1, rect.top), - (rect.left, rect.bottom - 1), - (rect.right - 1, rect.bottom - 1), - ] - bg_test_points = [ - (rect.left - 1, rect.top - 1), - (rect.left + 1, rect.top + 1), - (rect.right, rect.top - 1), - (rect.right - 2, rect.top + 1), - (rect.left - 1, rect.bottom), - (rect.left + 1, rect.bottom - 2), - (rect.right, rect.bottom), - (rect.right - 2, rect.bottom - 2), - ] - for surf in self.surfaces: - fg_adjusted = surf.unmap_rgb(surf.map_rgb(fg)) - bg_adjusted = surf.unmap_rgb(surf.map_rgb(bg)) - pygame.gfxdraw.rectangle(surf, rect, fg) - for posn in fg_test_points: - self.check_at(surf, posn, fg_adjusted) - for posn in bg_test_points: - self.check_at(surf, posn, bg_adjusted) - surf.fill(bg) - pygame.gfxdraw.rectangle(surf, rect_tuple, fg) - for posn in fg_test_points: - self.check_at(surf, posn, fg_adjusted) - for posn in bg_test_points: - self.check_at(surf, posn, bg_adjusted) - - def test_box(self): - """box(surface, rect, color): return None""" - fg = self.foreground_color - bg = self.background_color - rect = pygame.Rect(10, 15, 55, 62) - rect_tuple = tuple(rect) - fg_test_points = [ - rect.topleft, - (rect.left + 1, rect.top + 1), - (rect.right - 1, rect.top), - (rect.right - 2, rect.top + 1), - (rect.left, rect.bottom - 1), - (rect.left + 1, rect.bottom - 2), - (rect.right - 1, rect.bottom - 1), - (rect.right - 2, rect.bottom - 2), - ] - bg_test_points = [ - (rect.left - 1, rect.top - 1), - (rect.right, rect.top - 1), - (rect.left - 1, rect.bottom), - (rect.right, rect.bottom), - ] - for surf in self.surfaces: - fg_adjusted = surf.unmap_rgb(surf.map_rgb(fg)) - bg_adjusted = surf.unmap_rgb(surf.map_rgb(bg)) - pygame.gfxdraw.box(surf, rect, fg) - for posn in fg_test_points: - self.check_at(surf, posn, fg_adjusted) - for posn in bg_test_points: - self.check_at(surf, posn, bg_adjusted) - surf.fill(bg) - pygame.gfxdraw.box(surf, rect_tuple, fg) - for posn in fg_test_points: - self.check_at(surf, posn, fg_adjusted) - for posn in bg_test_points: - self.check_at(surf, posn, bg_adjusted) - - def test_line(self): - """line(surface, x1, y1, x2, y2, color): return None""" - fg = self.foreground_color - bg = self.background_color - x1 = 10 - y1 = 15 - x2 = 92 - y2 = 77 - fg_test_points = [(x1, y1), (x2, y2)] - bg_test_points = [ - (x1 - 1, y1), - (x1, y1 - 1), - (x1 - 1, y1 - 1), - (x2 + 1, y2), - (x2, y2 + 1), - (x2 + 1, y2 + 1), - ] - for surf in self.surfaces: - fg_adjusted = surf.unmap_rgb(surf.map_rgb(fg)) - bg_adjusted = surf.unmap_rgb(surf.map_rgb(bg)) - pygame.gfxdraw.line(surf, x1, y1, x2, y2, fg) - for posn in fg_test_points: - self.check_at(surf, posn, fg_adjusted) - for posn in bg_test_points: - self.check_at(surf, posn, bg_adjusted) - - def test_circle(self): - """circle(surface, x, y, r, color): return None""" - fg = self.foreground_color - bg = self.background_color - x = 45 - y = 40 - r = 30 - fg_test_points = [(x, y - r), (x, y + r), (x - r, y), (x + r, y)] - bg_test_points = [ - (x, y), - (x, y - r + 1), - (x, y - r - 1), - (x, y + r + 1), - (x, y + r - 1), - (x - r - 1, y), - (x - r + 1, y), - (x + r + 1, y), - (x + r - 1, y), - ] - for surf in self.surfaces: - fg_adjusted = surf.unmap_rgb(surf.map_rgb(fg)) - bg_adjusted = surf.unmap_rgb(surf.map_rgb(bg)) - pygame.gfxdraw.circle(surf, x, y, r, fg) - for posn in fg_test_points: - self.check_at(surf, posn, fg_adjusted) - for posn in bg_test_points: - self.check_at(surf, posn, bg_adjusted) - - def test_arc(self): - """arc(surface, x, y, r, start, end, color): return None""" - fg = self.foreground_color - bg = self.background_color - x = 45 - y = 40 - r = 30 - start = 0 # +x direction, but not (x + r, y) (?) - end = 90 # -y direction, including (x, y + r) - fg_test_points = [(x, y + r), (x + r, y + 1)] - bg_test_points = [ - (x, y), - (x, y - r), - (x - r, y), - (x, y + r + 1), - (x, y + r - 1), - (x - 1, y + r), - (x + r + 1, y), - (x + r - 1, y), - (x + r, y), - ] - for surf in self.surfaces: - fg_adjusted = surf.unmap_rgb(surf.map_rgb(fg)) - bg_adjusted = surf.unmap_rgb(surf.map_rgb(bg)) - pygame.gfxdraw.arc(surf, x, y, r, start, end, fg) - for posn in fg_test_points: - self.check_at(surf, posn, fg_adjusted) - for posn in bg_test_points: - self.check_at(surf, posn, bg_adjusted) - - def test_aacircle(self): - """aacircle(surface, x, y, r, color): return None""" - fg = self.foreground_color - bg = self.background_color - x = 45 - y = 40 - r = 30 - fg_test_points = [(x, y - r), (x, y + r), (x - r, y), (x + r, y)] - bg_test_points = [ - (x, y), - (x, y - r + 1), - (x, y - r - 1), - (x, y + r + 1), - (x, y + r - 1), - (x - r - 1, y), - (x - r + 1, y), - (x + r + 1, y), - (x + r - 1, y), - ] - for surf in self.surfaces: - fg_adjusted = surf.unmap_rgb(surf.map_rgb(fg)) - bg_adjusted = surf.unmap_rgb(surf.map_rgb(bg)) - pygame.gfxdraw.aacircle(surf, x, y, r, fg) - for posn in fg_test_points: - self.check_not_at(surf, posn, bg_adjusted) - for posn in bg_test_points: - self.check_at(surf, posn, bg_adjusted) - - def test_filled_circle(self): - """filled_circle(surface, x, y, r, color): return None""" - fg = self.foreground_color - bg = self.background_color - x = 45 - y = 40 - r = 30 - fg_test_points = [ - (x, y - r), - (x, y - r + 1), - (x, y + r), - (x, y + r - 1), - (x - r, y), - (x - r + 1, y), - (x + r, y), - (x + r - 1, y), - (x, y), - ] - bg_test_points = [ - (x, y - r - 1), - (x, y + r + 1), - (x - r - 1, y), - (x + r + 1, y), - ] - for surf in self.surfaces: - fg_adjusted = surf.unmap_rgb(surf.map_rgb(fg)) - bg_adjusted = surf.unmap_rgb(surf.map_rgb(bg)) - pygame.gfxdraw.filled_circle(surf, x, y, r, fg) - for posn in fg_test_points: - self.check_at(surf, posn, fg_adjusted) - for posn in bg_test_points: - self.check_at(surf, posn, bg_adjusted) - - def test_ellipse(self): - """ellipse(surface, x, y, rx, ry, color): return None""" - fg = self.foreground_color - bg = self.background_color - x = 45 - y = 40 - rx = 30 - ry = 35 - fg_test_points = [(x, y - ry), (x, y + ry), (x - rx, y), (x + rx, y)] - bg_test_points = [ - (x, y), - (x, y - ry + 1), - (x, y - ry - 1), - (x, y + ry + 1), - (x, y + ry - 1), - (x - rx - 1, y), - (x - rx + 1, y), - (x + rx + 1, y), - (x + rx - 1, y), - ] - for surf in self.surfaces: - fg_adjusted = surf.unmap_rgb(surf.map_rgb(fg)) - bg_adjusted = surf.unmap_rgb(surf.map_rgb(bg)) - pygame.gfxdraw.ellipse(surf, x, y, rx, ry, fg) - for posn in fg_test_points: - self.check_at(surf, posn, fg_adjusted) - for posn in bg_test_points: - self.check_at(surf, posn, bg_adjusted) - - def test_aaellipse(self): - """aaellipse(surface, x, y, rx, ry, color): return None""" - fg = self.foreground_color - bg = self.background_color - x = 45 - y = 40 - rx = 30 - ry = 35 - fg_test_points = [(x, y - ry), (x, y + ry), (x - rx, y), (x + rx, y)] - bg_test_points = [ - (x, y), - (x, y - ry + 1), - (x, y - ry - 1), - (x, y + ry + 1), - (x, y + ry - 1), - (x - rx - 1, y), - (x - rx + 1, y), - (x + rx + 1, y), - (x + rx - 1, y), - ] - for surf in self.surfaces: - fg_adjusted = surf.unmap_rgb(surf.map_rgb(fg)) - bg_adjusted = surf.unmap_rgb(surf.map_rgb(bg)) - pygame.gfxdraw.aaellipse(surf, x, y, rx, ry, fg) - for posn in fg_test_points: - self.check_not_at(surf, posn, bg_adjusted) - for posn in bg_test_points: - self.check_at(surf, posn, bg_adjusted) - - def test_filled_ellipse(self): - """filled_ellipse(surface, x, y, rx, ry, color): return None""" - fg = self.foreground_color - bg = self.background_color - x = 45 - y = 40 - rx = 30 - ry = 35 - fg_test_points = [ - (x, y - ry), - (x, y - ry + 1), - (x, y + ry), - (x, y + ry - 1), - (x - rx, y), - (x - rx + 1, y), - (x + rx, y), - (x + rx - 1, y), - (x, y), - ] - bg_test_points = [ - (x, y - ry - 1), - (x, y + ry + 1), - (x - rx - 1, y), - (x + rx + 1, y), - ] - for surf in self.surfaces: - fg_adjusted = surf.unmap_rgb(surf.map_rgb(fg)) - bg_adjusted = surf.unmap_rgb(surf.map_rgb(bg)) - pygame.gfxdraw.filled_ellipse(surf, x, y, rx, ry, fg) - for posn in fg_test_points: - self.check_at(surf, posn, fg_adjusted) - for posn in bg_test_points: - self.check_at(surf, posn, bg_adjusted) - - def test_pie(self): - """pie(surface, x, y, r, start, end, color): return None""" - fg = self.foreground_color - bg = self.background_color - x = 45 - y = 40 - r = 30 - start = 0 # +x direction, including (x + r, y) - end = 90 # -y direction, but not (x, y + r) (?) - fg_test_points = [(x, y), (x + 1, y), (x, y + 1), (x + r, y)] - bg_test_points = [ - (x - 1, y), - (x, y - 1), - (x - 1, y - 1), - (x + 1, y + 1), - (x + r + 1, y), - (x + r, y - 1), - (x, y + r + 1), - ] - for surf in self.surfaces: - fg_adjusted = surf.unmap_rgb(surf.map_rgb(fg)) - bg_adjusted = surf.unmap_rgb(surf.map_rgb(bg)) - pygame.gfxdraw.pie(surf, x, y, r, start, end, fg) - for posn in fg_test_points: - self.check_at(surf, posn, fg_adjusted) - for posn in bg_test_points: - self.check_at(surf, posn, bg_adjusted) - - def test_trigon(self): - """trigon(surface, x1, y1, x2, y2, x3, y3, color): return None""" - fg = self.foreground_color - bg = self.background_color - x1 = 10 - y1 = 15 - x2 = 92 - y2 = 77 - x3 = 20 - y3 = 60 - fg_test_points = [(x1, y1), (x2, y2), (x3, y3)] - bg_test_points = [ - (x1 - 1, y1 - 1), - (x2 + 1, y2 + 1), - (x3 - 1, y3 + 1), - (x1 + 10, y1 + 30), - ] - for surf in self.surfaces: - fg_adjusted = surf.unmap_rgb(surf.map_rgb(fg)) - bg_adjusted = surf.unmap_rgb(surf.map_rgb(bg)) - pygame.gfxdraw.trigon(surf, x1, y1, x2, y2, x3, y3, fg) - for posn in fg_test_points: - self.check_at(surf, posn, fg_adjusted) - for posn in bg_test_points: - self.check_at(surf, posn, bg_adjusted) - - def test_aatrigon(self): - """aatrigon(surface, x1, y1, x2, y2, x3, y3, color): return None""" - fg = self.foreground_color - bg = self.background_color - x1 = 10 - y1 = 15 - x2 = 92 - y2 = 77 - x3 = 20 - y3 = 60 - fg_test_points = [(x1, y1), (x2, y2), (x3, y3)] - bg_test_points = [ - (x1 - 1, y1 - 1), - (x2 + 1, y2 + 1), - (x3 - 1, y3 + 1), - (x1 + 10, y1 + 30), - ] - for surf in self.surfaces: - fg_adjusted = surf.unmap_rgb(surf.map_rgb(fg)) - bg_adjusted = surf.unmap_rgb(surf.map_rgb(bg)) - pygame.gfxdraw.aatrigon(surf, x1, y1, x2, y2, x3, y3, fg) - for posn in fg_test_points: - self.check_not_at(surf, posn, bg_adjusted) - for posn in bg_test_points: - self.check_at(surf, posn, bg_adjusted) - - def test_aatrigon__with_horizontal_edge(self): - """Ensure aatrigon draws horizontal edges correctly. - - This test creates 2 surfaces and draws an aatrigon on each. The pixels - on each surface are compared to ensure they are the same. The only - difference between the 2 aatrigons is the order the points are drawn. - The order of the points should have no impact on the final drawing. - - Related to issue #622. - """ - bg_color = pygame.Color("white") - line_color = pygame.Color("black") - width, height = 11, 10 - expected_surface = pygame.Surface((width, height), 0, 32) - expected_surface.fill(bg_color) - surface = pygame.Surface((width, height), 0, 32) - surface.fill(bg_color) - - x1, y1 = width - 1, 0 - x2, y2 = (width - 1) // 2, height - 1 - x3, y3 = 0, 0 - - # The points in this order draw as expected. - pygame.gfxdraw.aatrigon(expected_surface, x1, y1, x2, y2, x3, y3, line_color) - - # The points in reverse order fail to draw the horizontal edge along - # the top. - pygame.gfxdraw.aatrigon(surface, x3, y3, x2, y2, x1, y1, line_color) - - # The surfaces are locked for a possible speed up of pixel access. - expected_surface.lock() - surface.lock() - for x in range(width): - for y in range(height): - self.assertEqual( - expected_surface.get_at((x, y)), - surface.get_at((x, y)), - "pos=({}, {})".format(x, y), - ) - - surface.unlock() - expected_surface.unlock() - - def test_filled_trigon(self): - """filled_trigon(surface, x1, y1, x2, y2, x3, y3, color): return None""" - fg = self.foreground_color - bg = self.background_color - x1 = 10 - y1 = 15 - x2 = 92 - y2 = 77 - x3 = 20 - y3 = 60 - fg_test_points = [(x1, y1), (x2, y2), (x3, y3), (x1 + 10, y1 + 30)] - bg_test_points = [(x1 - 1, y1 - 1), (x2 + 1, y2 + 1), (x3 - 1, y3 + 1)] - for surf in self.surfaces: - fg_adjusted = surf.unmap_rgb(surf.map_rgb(fg)) - bg_adjusted = surf.unmap_rgb(surf.map_rgb(bg)) - pygame.gfxdraw.filled_trigon(surf, x1, y1, x2, y2, x3, y3, fg) - for posn in fg_test_points: - self.check_at(surf, posn, fg_adjusted) - for posn in bg_test_points: - self.check_at(surf, posn, bg_adjusted) - - def test_polygon(self): - """polygon(surface, points, color): return None""" - fg = self.foreground_color - bg = self.background_color - points = [(10, 80), (10, 15), (92, 25), (92, 80)] - fg_test_points = points + [ - (points[0][0], points[0][1] - 1), - (points[0][0] + 1, points[0][1]), - (points[3][0] - 1, points[3][1]), - (points[3][0], points[3][1] - 1), - (points[2][0], points[2][1] + 1), - ] - bg_test_points = [ - (points[0][0] - 1, points[0][1]), - (points[0][0], points[0][1] + 1), - (points[0][0] - 1, points[0][1] + 1), - (points[0][0] + 1, points[0][1] - 1), - (points[3][0] + 1, points[3][1]), - (points[3][0], points[3][1] + 1), - (points[3][0] + 1, points[3][1] + 1), - (points[3][0] - 1, points[3][1] - 1), - (points[2][0] + 1, points[2][1]), - (points[2][0] - 1, points[2][1] + 1), - (points[1][0] - 1, points[1][1]), - (points[1][0], points[1][1] - 1), - (points[1][0] - 1, points[1][1] - 1), - ] - for surf in self.surfaces: - fg_adjusted = surf.unmap_rgb(surf.map_rgb(fg)) - bg_adjusted = surf.unmap_rgb(surf.map_rgb(bg)) - pygame.gfxdraw.polygon(surf, points, fg) - for posn in fg_test_points: - self.check_at(surf, posn, fg_adjusted) - for posn in bg_test_points: - self.check_at(surf, posn, bg_adjusted) - - def test_aapolygon(self): - """aapolygon(surface, points, color): return None""" - fg = self.foreground_color - bg = self.background_color - points = [(10, 80), (10, 15), (92, 25), (92, 80)] - fg_test_points = points - bg_test_points = [ - (points[0][0] - 1, points[0][1]), - (points[0][0], points[0][1] + 1), - (points[0][0] - 1, points[0][1] + 1), - (points[0][0] + 1, points[0][1] - 1), - (points[3][0] + 1, points[3][1]), - (points[3][0], points[3][1] + 1), - (points[3][0] + 1, points[3][1] + 1), - (points[3][0] - 1, points[3][1] - 1), - (points[2][0] + 1, points[2][1]), - (points[2][0] - 1, points[2][1] + 1), - (points[1][0] - 1, points[1][1]), - (points[1][0], points[1][1] - 1), - (points[1][0] - 1, points[1][1] - 1), - ] - for surf in self.surfaces: - fg_adjusted = surf.unmap_rgb(surf.map_rgb(fg)) - bg_adjusted = surf.unmap_rgb(surf.map_rgb(bg)) - pygame.gfxdraw.aapolygon(surf, points, fg) - for posn in fg_test_points: - self.check_at(surf, posn, fg_adjusted) - for posn in bg_test_points: - self.check_not_at(surf, posn, fg_adjusted) - for posn in bg_test_points: - self.check_at(surf, posn, bg_adjusted) - - def test_aapolygon__with_horizontal_edge(self): - """Ensure aapolygon draws horizontal edges correctly. - - This test creates 2 surfaces and draws a polygon on each. The pixels - on each surface are compared to ensure they are the same. The only - difference between the 2 polygons is that one is drawn using - aapolygon() and the other using multiple line() calls. They should - produce the same final drawing. - - Related to issue #622. - """ - bg_color = pygame.Color("white") - line_color = pygame.Color("black") - width, height = 11, 10 - expected_surface = pygame.Surface((width, height), 0, 32) - expected_surface.fill(bg_color) - surface = pygame.Surface((width, height), 0, 32) - surface.fill(bg_color) - - points = ((0, 0), (0, height - 1), (width - 1, height - 1), (width - 1, 0)) - - # The points are used to draw the expected aapolygon using the line() - # function. - for (x1, y1), (x2, y2) in zip(points, points[1:] + points[:1]): - pygame.gfxdraw.line(expected_surface, x1, y1, x2, y2, line_color) - - # The points in this order fail to draw the horizontal edge along - # the top. - pygame.gfxdraw.aapolygon(surface, points, line_color) - - # The surfaces are locked for a possible speed up of pixel access. - expected_surface.lock() - surface.lock() - for x in range(width): - for y in range(height): - self.assertEqual( - expected_surface.get_at((x, y)), - surface.get_at((x, y)), - "pos=({}, {})".format(x, y), - ) - - surface.unlock() - expected_surface.unlock() - - def test_filled_polygon(self): - """filled_polygon(surface, points, color): return None""" - fg = self.foreground_color - bg = self.background_color - points = [(10, 80), (10, 15), (92, 25), (92, 80)] - fg_test_points = points + [ - (points[0][0], points[0][1] - 1), - (points[0][0] + 1, points[0][1]), - (points[0][0] + 1, points[0][1] - 1), - (points[3][0] - 1, points[3][1]), - (points[3][0], points[3][1] - 1), - (points[3][0] - 1, points[3][1] - 1), - (points[2][0], points[2][1] + 1), - (points[2][0] - 1, points[2][1] + 1), - ] - bg_test_points = [ - (points[0][0] - 1, points[0][1]), - (points[0][0], points[0][1] + 1), - (points[0][0] - 1, points[0][1] + 1), - (points[3][0] + 1, points[3][1]), - (points[3][0], points[3][1] + 1), - (points[3][0] + 1, points[3][1] + 1), - (points[2][0] + 1, points[2][1]), - (points[1][0] - 1, points[1][1]), - (points[1][0], points[1][1] - 1), - (points[1][0] - 1, points[1][1] - 1), - ] - for surf in self.surfaces: - fg_adjusted = surf.unmap_rgb(surf.map_rgb(fg)) - bg_adjusted = surf.unmap_rgb(surf.map_rgb(bg)) - pygame.gfxdraw.filled_polygon(surf, points, fg) - for posn in fg_test_points: - self.check_at(surf, posn, fg_adjusted) - for posn in bg_test_points: - self.check_at(surf, posn, bg_adjusted) - - def test_textured_polygon(self): - """textured_polygon(surface, points, texture, tx, ty): return None""" - w, h = self.default_size - fg = self.foreground_color - bg = self.background_color - tx = 0 - ty = 0 - texture = pygame.Surface((w + tx, h + ty), 0, 24) - texture.fill(fg, (0, 0, w, h)) - points = [(10, 80), (10, 15), (92, 25), (92, 80)] - # Don't know how to really check this as boarder points may - # or may not be included in the textured polygon. - fg_test_points = [(points[1][0] + 30, points[1][1] + 40)] - bg_test_points = [ - (points[0][0] - 1, points[0][1]), - (points[0][0], points[0][1] + 1), - (points[0][0] - 1, points[0][1] + 1), - (points[3][0] + 1, points[3][1]), - (points[3][0], points[3][1] + 1), - (points[3][0] + 1, points[3][1] + 1), - (points[2][0] + 1, points[2][1]), - (points[1][0] - 1, points[1][1]), - (points[1][0], points[1][1] - 1), - (points[1][0] - 1, points[1][1] - 1), - ] - for surf in self.surfaces[1:]: - fg_adjusted = surf.unmap_rgb(surf.map_rgb(fg)) - bg_adjusted = surf.unmap_rgb(surf.map_rgb(bg)) - pygame.gfxdraw.textured_polygon(surf, points, texture, -tx, -ty) - for posn in fg_test_points: - self.check_at(surf, posn, fg_adjusted) - for posn in bg_test_points: - self.check_at(surf, posn, bg_adjusted) - - # Alpha blit to 8 bits-per-pixel surface forbidden. - texture = pygame.Surface(self.default_size, SRCALPHA, 32) - self.assertRaises( - ValueError, - pygame.gfxdraw.textured_polygon, - self.surfaces[0], - points, - texture, - 0, - 0, - ) - - def test_bezier(self): - """bezier(surface, points, steps, color): return None""" - fg = self.foreground_color - bg = self.background_color - points = [(10, 50), (25, 15), (60, 80), (92, 30)] - fg_test_points = [points[0], points[3]] - bg_test_points = [ - (points[0][0] - 1, points[0][1]), - (points[3][0] + 1, points[3][1]), - (points[1][0], points[1][1] + 3), - (points[2][0], points[2][1] - 3), - ] - for surf in self.surfaces: - fg_adjusted = surf.unmap_rgb(surf.map_rgb(fg)) - bg_adjusted = surf.unmap_rgb(surf.map_rgb(bg)) - pygame.gfxdraw.bezier(surf, points, 30, fg) - for posn in fg_test_points: - self.check_at(surf, posn, fg_adjusted) - for posn in bg_test_points: - self.check_at(surf, posn, bg_adjusted) - - -if __name__ == "__main__": - unittest.main() diff --git a/venv/Lib/site-packages/pygame/tests/image__save_gl_surface_test.py b/venv/Lib/site-packages/pygame/tests/image__save_gl_surface_test.py deleted file mode 100644 index 2932f42..0000000 --- a/venv/Lib/site-packages/pygame/tests/image__save_gl_surface_test.py +++ /dev/null @@ -1,46 +0,0 @@ -import os -import unittest - -from pygame.tests import test_utils -import pygame -from pygame.locals import * - - -@unittest.skipIf( - os.environ.get("SDL_VIDEODRIVER") == "dummy", - 'OpenGL requires a non-"dummy" SDL_VIDEODRIVER', -) -class GL_ImageSave(unittest.TestCase): - def test_image_save_works_with_opengl_surfaces(self): - """ - |tags:display,slow,opengl| - """ - - pygame.display.init() - screen = pygame.display.set_mode((640, 480), OPENGL | DOUBLEBUF) - pygame.display.flip() - - tmp_dir = test_utils.get_tmp_dir() - # Try the imageext module. - tmp_file = os.path.join(tmp_dir, "opengl_save_surface_test.png") - pygame.image.save(screen, tmp_file) - - self.assertTrue(os.path.exists(tmp_file)) - - os.remove(tmp_file) - - # Only test the image module. - tmp_file = os.path.join(tmp_dir, "opengl_save_surface_test.bmp") - pygame.image.save(screen, tmp_file) - - self.assertTrue(os.path.exists(tmp_file)) - - os.remove(tmp_file) - - # stops tonnes of tmp dirs building up in trunk dir - os.rmdir(tmp_dir) - pygame.display.quit() - - -if __name__ == "__main__": - unittest.main() diff --git a/venv/Lib/site-packages/pygame/tests/image_tags.py b/venv/Lib/site-packages/pygame/tests/image_tags.py deleted file mode 100644 index d847903..0000000 --- a/venv/Lib/site-packages/pygame/tests/image_tags.py +++ /dev/null @@ -1,7 +0,0 @@ -__tags__ = [] - -import pygame -import sys - -if "pygame.image" not in sys.modules: - __tags__.extend(("ignore", "subprocess_ignore")) diff --git a/venv/Lib/site-packages/pygame/tests/image_test.py b/venv/Lib/site-packages/pygame/tests/image_test.py deleted file mode 100644 index a3522fb..0000000 --- a/venv/Lib/site-packages/pygame/tests/image_test.py +++ /dev/null @@ -1,1115 +0,0 @@ -# -*- coding: utf-8 -*- - -import array -import binascii -import io -import os -import tempfile -import unittest -import glob -import pathlib - -from pygame.tests.test_utils import example_path, png, tostring -import pygame, pygame.image, pygame.pkgdata - - -def test_magic(f, magic_hexes): - """Tests a given file to see if the magic hex matches.""" - data = f.read(len(magic_hexes)) - if len(data) != len(magic_hexes): - return 0 - for i, magic_hex in enumerate(magic_hexes): - if magic_hex != data[i]: - return 0 - return 1 - - -class ImageModuleTest(unittest.TestCase): - def testLoadIcon(self): - """see if we can load the pygame icon.""" - f = pygame.pkgdata.getResource("pygame_icon.bmp") - self.assertEqual(f.mode, "rb") - - surf = pygame.image.load_basic(f) - - self.assertEqual(surf.get_at((0, 0)), (5, 4, 5, 255)) - self.assertEqual(surf.get_height(), 32) - self.assertEqual(surf.get_width(), 32) - - def testLoadPNG(self): - """see if we can load a png with color values in the proper channels.""" - # Create a PNG file with known colors - reddish_pixel = (210, 0, 0, 255) - greenish_pixel = (0, 220, 0, 255) - bluish_pixel = (0, 0, 230, 255) - greyish_pixel = (110, 120, 130, 140) - pixel_array = [reddish_pixel + greenish_pixel, bluish_pixel + greyish_pixel] - - f_descriptor, f_path = tempfile.mkstemp(suffix=".png") - - with os.fdopen(f_descriptor, "wb") as f: - w = png.Writer(2, 2, alpha=True) - w.write(f, pixel_array) - - # Read the PNG file and verify that pygame interprets it correctly - surf = pygame.image.load(f_path) - - self.assertEqual(surf.get_at((0, 0)), reddish_pixel) - self.assertEqual(surf.get_at((1, 0)), greenish_pixel) - self.assertEqual(surf.get_at((0, 1)), bluish_pixel) - self.assertEqual(surf.get_at((1, 1)), greyish_pixel) - - # Read the PNG file obj. and verify that pygame interprets it correctly - with open(f_path, "rb") as f: - surf = pygame.image.load(f) - - self.assertEqual(surf.get_at((0, 0)), reddish_pixel) - self.assertEqual(surf.get_at((1, 0)), greenish_pixel) - self.assertEqual(surf.get_at((0, 1)), bluish_pixel) - self.assertEqual(surf.get_at((1, 1)), greyish_pixel) - - os.remove(f_path) - - def testLoadJPG(self): - """to see if we can load a jpg.""" - f = example_path("data/alien1.jpg") - surf = pygame.image.load(f) - - with open(f, "rb") as f: - surf = pygame.image.load(f) - - def testLoadBytesIO(self): - """to see if we can load images with BytesIO.""" - files = [ - "data/alien1.png", - "data/alien1.jpg", - "data/alien1.gif", - "data/asprite.bmp", - ] - - for fname in files: - with self.subTest(fname=fname): - with open(example_path(fname), "rb") as f: - img_bytes = f.read() - img_file = io.BytesIO(img_bytes) - image = pygame.image.load(img_file) - - def testSaveJPG(self): - """JPG equivalent to issue #211 - color channel swapping - - Make sure the SDL surface color masks represent the rgb memory format - required by the JPG library. The masks are machine endian dependent - """ - - from pygame import Color, Rect - - # The source image is a 2 by 2 square of four colors. Since JPEG is - # lossy, there can be color bleed. Make each color square 16 by 16, - # to avoid the significantly color value distorts found at color - # boundaries due to the compression value set by Pygame. - square_len = 16 - sz = 2 * square_len, 2 * square_len - - # +---------------------------------+ - # | red | green | - # |----------------+----------------| - # | blue | (255, 128, 64) | - # +---------------------------------+ - # - # as (rect, color) pairs. - def as_rect(square_x, square_y): - return Rect( - square_x * square_len, square_y * square_len, square_len, square_len - ) - - squares = [ - (as_rect(0, 0), Color("red")), - (as_rect(1, 0), Color("green")), - (as_rect(0, 1), Color("blue")), - (as_rect(1, 1), Color(255, 128, 64)), - ] - - # A surface format which is not directly usable with libjpeg. - surf = pygame.Surface(sz, 0, 32) - for rect, color in squares: - surf.fill(color, rect) - - # Assume pygame.image.Load works correctly as it is handled by the - # third party SDL_image library. - f_path = tempfile.mktemp(suffix=".jpg") - pygame.image.save(surf, f_path) - jpg_surf = pygame.image.load(f_path) - - # Allow for small differences in the restored colors. - def approx(c): - mask = 0xFC - return pygame.Color(c.r & mask, c.g & mask, c.b & mask) - - offset = square_len // 2 - for rect, color in squares: - posn = rect.move((offset, offset)).topleft - self.assertEqual(approx(jpg_surf.get_at(posn)), approx(color)) - - os.remove(f_path) - - def testSavePNG32(self): - """see if we can save a png with color values in the proper channels.""" - # Create a PNG file with known colors - reddish_pixel = (215, 0, 0, 255) - greenish_pixel = (0, 225, 0, 255) - bluish_pixel = (0, 0, 235, 255) - greyish_pixel = (115, 125, 135, 145) - - surf = pygame.Surface((1, 4), pygame.SRCALPHA, 32) - surf.set_at((0, 0), reddish_pixel) - surf.set_at((0, 1), greenish_pixel) - surf.set_at((0, 2), bluish_pixel) - surf.set_at((0, 3), greyish_pixel) - - f_path = tempfile.mktemp(suffix=".png") - pygame.image.save(surf, f_path) - - try: - # Read the PNG file and verify that pygame saved it correctly - reader = png.Reader(filename=f_path) - width, height, pixels, metadata = reader.asRGBA8() - - # pixels is a generator - self.assertEqual(tuple(next(pixels)), reddish_pixel) - self.assertEqual(tuple(next(pixels)), greenish_pixel) - self.assertEqual(tuple(next(pixels)), bluish_pixel) - self.assertEqual(tuple(next(pixels)), greyish_pixel) - - finally: - # Ensures proper clean up. - if not reader.file.closed: - reader.file.close() - del reader - os.remove(f_path) - - def testSavePNG24(self): - """see if we can save a png with color values in the proper channels.""" - # Create a PNG file with known colors - reddish_pixel = (215, 0, 0) - greenish_pixel = (0, 225, 0) - bluish_pixel = (0, 0, 235) - greyish_pixel = (115, 125, 135) - - surf = pygame.Surface((1, 4), 0, 24) - surf.set_at((0, 0), reddish_pixel) - surf.set_at((0, 1), greenish_pixel) - surf.set_at((0, 2), bluish_pixel) - surf.set_at((0, 3), greyish_pixel) - - f_path = tempfile.mktemp(suffix=".png") - pygame.image.save(surf, f_path) - - try: - # Read the PNG file and verify that pygame saved it correctly - reader = png.Reader(filename=f_path) - width, height, pixels, metadata = reader.asRGB8() - - # pixels is a generator - self.assertEqual(tuple(next(pixels)), reddish_pixel) - self.assertEqual(tuple(next(pixels)), greenish_pixel) - self.assertEqual(tuple(next(pixels)), bluish_pixel) - self.assertEqual(tuple(next(pixels)), greyish_pixel) - - finally: - # Ensures proper clean up. - if not reader.file.closed: - reader.file.close() - del reader - os.remove(f_path) - - def test_save(self): - - s = pygame.Surface((10, 10)) - s.fill((23, 23, 23)) - magic_hex = {} - magic_hex["jpg"] = [0xFF, 0xD8, 0xFF, 0xE0] - magic_hex["png"] = [0x89, 0x50, 0x4E, 0x47] - # magic_hex['tga'] = [0x0, 0x0, 0xa] - magic_hex["bmp"] = [0x42, 0x4D] - - formats = ["jpg", "png", "bmp"] - # uppercase too... JPG - formats = formats + [x.upper() for x in formats] - - for fmt in formats: - try: - temp_filename = "%s.%s" % ("tmpimg", fmt) - pygame.image.save(s, temp_filename) - - # Using 'with' ensures the file is closed even if test fails. - with open(temp_filename, "rb") as handle: - # Test the magic numbers at the start of the file to ensure - # they are saved as the correct file type. - self.assertEqual( - (1, fmt), (test_magic(handle, magic_hex[fmt.lower()]), fmt) - ) - - # load the file to make sure it was saved correctly. - # Note load can load a jpg saved with a .png file name. - s2 = pygame.image.load(temp_filename) - # compare contents, might only work reliably for png... - # but because it's all one color it seems to work with jpg. - self.assertEqual(s2.get_at((0, 0)), s.get_at((0, 0))) - finally: - # clean up the temp file, comment out to leave tmp file after run. - os.remove(temp_filename) - - def test_save_to_fileobject(self): - s = pygame.Surface((1, 1)) - s.fill((23, 23, 23)) - bytes_stream = io.BytesIO() - - pygame.image.save(s, bytes_stream) - bytes_stream.seek(0) - s2 = pygame.image.load(bytes_stream, "tga") - self.assertEqual(s.get_at((0, 0)), s2.get_at((0, 0))) - - def test_save_tga(self): - s = pygame.Surface((1, 1)) - s.fill((23, 23, 23)) - with tempfile.NamedTemporaryFile(suffix=".tga", delete=False) as f: - temp_filename = f.name - - try: - pygame.image.save(s, temp_filename) - s2 = pygame.image.load(temp_filename) - self.assertEqual(s2.get_at((0, 0)), s.get_at((0, 0))) - finally: - # clean up the temp file, even if test fails - os.remove(temp_filename) - - def test_save_pathlib(self): - surf = pygame.Surface((1, 1)) - surf.fill((23, 23, 23)) - with tempfile.NamedTemporaryFile(suffix=".tga", delete=False) as f: - temp_filename = f.name - - path = pathlib.Path(temp_filename) - try: - pygame.image.save(surf, path) - s2 = pygame.image.load(path) - self.assertEqual(s2.get_at((0, 0)), surf.get_at((0, 0))) - finally: - os.remove(temp_filename) - - def test_save__to_fileobject_w_namehint_argument(self): - s = pygame.Surface((10, 10)) - s.fill((23, 23, 23)) - magic_hex = {} - magic_hex["jpg"] = [0xFF, 0xD8, 0xFF, 0xE0] - magic_hex["png"] = [0x89, 0x50, 0x4E, 0x47] - magic_hex["bmp"] = [0x42, 0x4D] - - formats = ["tga", "jpg", "bmp", "png"] - # uppercase too... JPG - formats = formats + [x.upper() for x in formats] - - SDL_Im_version = pygame.image.get_sdl_image_version() - # We assume here that minor version and patch level of SDL_Image - # never goes above 99 - isAtLeastSDL_image_2_0_2 = (SDL_Im_version is not None) and ( - SDL_Im_version[0] * 10000 + SDL_Im_version[1] * 100 + SDL_Im_version[2] - ) >= 20002 - for fmt in formats: - tmp_file, tmp_filename = tempfile.mkstemp(suffix=".%s" % fmt) - if not isAtLeastSDL_image_2_0_2 and fmt.lower() == "jpg": - with os.fdopen(tmp_file, "wb") as handle: - with self.assertRaises(pygame.error): - pygame.image.save(s, handle, tmp_filename) - else: - with os.fdopen(tmp_file, "r+b") as handle: - pygame.image.save(s, handle, tmp_filename) - - if fmt.lower() in magic_hex: - # Test the magic numbers at the start of the file to - # ensure they are saved as the correct file type. - handle.seek(0) - self.assertEqual( - (1, fmt), (test_magic(handle, magic_hex[fmt.lower()]), fmt) - ) - # load the file to make sure it was saved correctly. - handle.flush() - handle.seek(0) - s2 = pygame.image.load(handle, tmp_filename) - self.assertEqual(s2.get_at((0, 0)), s.get_at((0, 0))) - os.remove(tmp_filename) - - def test_save_colorkey(self): - """make sure the color key is not changed when saving.""" - s = pygame.Surface((10, 10), pygame.SRCALPHA, 32) - s.fill((23, 23, 23)) - s.set_colorkey((0, 0, 0)) - colorkey1 = s.get_colorkey() - p1 = s.get_at((0, 0)) - - temp_filename = "tmpimg.png" - try: - pygame.image.save(s, temp_filename) - s2 = pygame.image.load(temp_filename) - finally: - os.remove(temp_filename) - - colorkey2 = s.get_colorkey() - # check that the pixel and the colorkey is correct. - self.assertEqual(colorkey1, colorkey2) - self.assertEqual(p1, s2.get_at((0, 0))) - - def test_load_unicode_path(self): - import shutil - - orig = example_path("data/asprite.bmp") - temp = os.path.join(example_path("data"), u"你好.bmp") - shutil.copy(orig, temp) - try: - im = pygame.image.load(temp) - finally: - os.remove(temp) - - def _unicode_save(self, temp_file): - im = pygame.Surface((10, 10), 0, 32) - try: - with open(temp_file, "w") as f: - pass - os.remove(temp_file) - except IOError: - raise unittest.SkipTest("the path cannot be opened") - - self.assertFalse(os.path.exists(temp_file)) - - try: - pygame.image.save(im, temp_file) - - self.assertGreater(os.path.getsize(temp_file), 10) - finally: - try: - os.remove(temp_file) - except EnvironmentError: - pass - - def test_save_unicode_path(self): - """save unicode object with non-ASCII chars""" - self._unicode_save(u"你好.bmp") - - def assertPremultipliedAreEqual(self, string1, string2, source_string): - self.assertEqual(len(string1), len(string2)) - block_size = 20 - if string1 != string2: - for block_start in range(0, len(string1), block_size): - block_end = min(block_start + block_size, len(string1)) - block1 = string1[block_start:block_end] - block2 = string2[block_start:block_end] - if block1 != block2: - source_block = source_string[block_start:block_end] - msg = ( - "string difference in %d to %d of %d:\n%s\n%s\nsource:\n%s" - % ( - block_start, - block_end, - len(string1), - binascii.hexlify(block1), - binascii.hexlify(block2), - binascii.hexlify(source_block), - ) - ) - self.fail(msg) - - def test_to_string__premultiplied(self): - """test to make sure we can export a surface to a premultiplied alpha string""" - - def convertRGBAtoPremultiplied(surface_to_modify): - for x in range(surface_to_modify.get_width()): - for y in range(surface_to_modify.get_height()): - color = surface_to_modify.get_at((x, y)) - premult_color = ( - color[0] * color[3] / 255, - color[1] * color[3] / 255, - color[2] * color[3] / 255, - color[3], - ) - surface_to_modify.set_at((x, y), premult_color) - - test_surface = pygame.Surface((256, 256), pygame.SRCALPHA, 32) - for x in range(test_surface.get_width()): - for y in range(test_surface.get_height()): - i = x + y * test_surface.get_width() - test_surface.set_at( - (x, y), ((i * 7) % 256, (i * 13) % 256, (i * 27) % 256, y) - ) - premultiplied_copy = test_surface.copy() - convertRGBAtoPremultiplied(premultiplied_copy) - self.assertPremultipliedAreEqual( - pygame.image.tostring(test_surface, "RGBA_PREMULT"), - pygame.image.tostring(premultiplied_copy, "RGBA"), - pygame.image.tostring(test_surface, "RGBA"), - ) - self.assertPremultipliedAreEqual( - pygame.image.tostring(test_surface, "ARGB_PREMULT"), - pygame.image.tostring(premultiplied_copy, "ARGB"), - pygame.image.tostring(test_surface, "ARGB"), - ) - - no_alpha_surface = pygame.Surface((256, 256), 0, 24) - self.assertRaises( - ValueError, pygame.image.tostring, no_alpha_surface, "RGBA_PREMULT" - ) - - # Custom assert method to check for identical surfaces. - def _assertSurfaceEqual(self, surf_a, surf_b, msg=None): - a_width, a_height = surf_a.get_width(), surf_a.get_height() - - # Check a few things to see if the surfaces are equal. - self.assertEqual(a_width, surf_b.get_width(), msg) - self.assertEqual(a_height, surf_b.get_height(), msg) - self.assertEqual(surf_a.get_size(), surf_b.get_size(), msg) - self.assertEqual(surf_a.get_rect(), surf_b.get_rect(), msg) - self.assertEqual(surf_a.get_colorkey(), surf_b.get_colorkey(), msg) - self.assertEqual(surf_a.get_alpha(), surf_b.get_alpha(), msg) - self.assertEqual(surf_a.get_flags(), surf_b.get_flags(), msg) - self.assertEqual(surf_a.get_bitsize(), surf_b.get_bitsize(), msg) - self.assertEqual(surf_a.get_bytesize(), surf_b.get_bytesize(), msg) - # Anything else? - - # Making the method lookups local for a possible speed up. - surf_a_get_at = surf_a.get_at - surf_b_get_at = surf_b.get_at - for y in range(a_height): - for x in range(a_width): - self.assertEqual( - surf_a_get_at((x, y)), - surf_b_get_at((x, y)), - "%s (pixel: %d, %d)" % (msg, x, y), - ) - - def test_fromstring__and_tostring(self): - """Ensure methods tostring() and fromstring() are symmetric.""" - - #################################################################### - def RotateRGBAtoARGB(str_buf): - byte_buf = array.array("B", str_buf) - num_quads = len(byte_buf) // 4 - for i in range(num_quads): - alpha = byte_buf[i * 4 + 3] - byte_buf[i * 4 + 3] = byte_buf[i * 4 + 2] - byte_buf[i * 4 + 2] = byte_buf[i * 4 + 1] - byte_buf[i * 4 + 1] = byte_buf[i * 4 + 0] - byte_buf[i * 4 + 0] = alpha - return tostring(byte_buf) - - #################################################################### - def RotateARGBtoRGBA(str_buf): - byte_buf = array.array("B", str_buf) - num_quads = len(byte_buf) // 4 - for i in range(num_quads): - alpha = byte_buf[i * 4 + 0] - byte_buf[i * 4 + 0] = byte_buf[i * 4 + 1] - byte_buf[i * 4 + 1] = byte_buf[i * 4 + 2] - byte_buf[i * 4 + 2] = byte_buf[i * 4 + 3] - byte_buf[i * 4 + 3] = alpha - return tostring(byte_buf) - - #################################################################### - test_surface = pygame.Surface((64, 256), flags=pygame.SRCALPHA, depth=32) - for i in range(256): - for j in range(16): - intensity = j * 16 + 15 - test_surface.set_at((j + 0, i), (intensity, i, i, i)) - test_surface.set_at((j + 16, i), (i, intensity, i, i)) - test_surface.set_at((j + 32, i), (i, i, intensity, i)) - test_surface.set_at((j + 32, i), (i, i, i, intensity)) - - self._assertSurfaceEqual( - test_surface, test_surface, "failing with identical surfaces" - ) - - rgba_buf = pygame.image.tostring(test_surface, "RGBA") - rgba_buf = RotateARGBtoRGBA(RotateRGBAtoARGB(rgba_buf)) - test_rotate_functions = pygame.image.fromstring( - rgba_buf, test_surface.get_size(), "RGBA" - ) - - self._assertSurfaceEqual( - test_surface, test_rotate_functions, "rotate functions are not symmetric" - ) - - rgba_buf = pygame.image.tostring(test_surface, "RGBA") - argb_buf = RotateRGBAtoARGB(rgba_buf) - test_from_argb_string = pygame.image.fromstring( - argb_buf, test_surface.get_size(), "ARGB" - ) - - self._assertSurfaceEqual( - test_surface, test_from_argb_string, '"RGBA" rotated to "ARGB" failed' - ) - - argb_buf = pygame.image.tostring(test_surface, "ARGB") - rgba_buf = RotateARGBtoRGBA(argb_buf) - test_to_argb_string = pygame.image.fromstring( - rgba_buf, test_surface.get_size(), "RGBA" - ) - - self._assertSurfaceEqual( - test_surface, test_to_argb_string, '"ARGB" rotated to "RGBA" failed' - ) - - for fmt in ("ARGB", "RGBA"): - fmt_buf = pygame.image.tostring(test_surface, fmt) - test_to_from_fmt_string = pygame.image.fromstring( - fmt_buf, test_surface.get_size(), fmt - ) - - self._assertSurfaceEqual( - test_surface, - test_to_from_fmt_string, - "tostring/fromstring functions are not " - 'symmetric with "{}" format'.format(fmt), - ) - - def test_tostring_depth_24(self): - test_surface = pygame.Surface((64, 256), depth=24) - for i in range(256): - for j in range(16): - intensity = j * 16 + 15 - test_surface.set_at((j + 0, i), (intensity, i, i, i)) - test_surface.set_at((j + 16, i), (i, intensity, i, i)) - test_surface.set_at((j + 32, i), (i, i, intensity, i)) - test_surface.set_at((j + 32, i), (i, i, i, intensity)) - - fmt = "RGB" - fmt_buf = pygame.image.tostring(test_surface, fmt) - test_to_from_fmt_string = pygame.image.fromstring( - fmt_buf, test_surface.get_size(), fmt - ) - - self._assertSurfaceEqual( - test_surface, - test_to_from_fmt_string, - "tostring/fromstring functions are not " - 'symmetric with "{}" format'.format(fmt), - ) - - def test_frombuffer_8bit(self): - """test reading pixel data from a bytes buffer""" - pygame.display.init() - eight_bit_palette_buffer = bytearray( - [0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 2, 3, 3, 3, 3] - ) - - eight_bit_surf = pygame.image.frombuffer(eight_bit_palette_buffer, (4, 4), "P") - eight_bit_surf.set_palette( - [(255, 10, 20), (255, 255, 255), (0, 0, 0), (50, 200, 20)] - ) - self.assertEqual(eight_bit_surf.get_at((0, 0)), pygame.Color(255, 10, 20)) - self.assertEqual(eight_bit_surf.get_at((1, 1)), pygame.Color(255, 255, 255)) - self.assertEqual(eight_bit_surf.get_at((2, 2)), pygame.Color(0, 0, 0)) - self.assertEqual(eight_bit_surf.get_at((3, 3)), pygame.Color(50, 200, 20)) - - def test_frombuffer_RGB(self): - rgb_buffer = bytearray( - [ - 255, - 10, - 20, - 255, - 10, - 20, - 255, - 10, - 20, - 255, - 10, - 20, - 255, - 255, - 255, - 255, - 255, - 255, - 255, - 255, - 255, - 255, - 255, - 255, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 50, - 200, - 20, - 50, - 200, - 20, - 50, - 200, - 20, - 50, - 200, - 20, - ] - ) - - rgb_surf = pygame.image.frombuffer(rgb_buffer, (4, 4), "RGB") - self.assertEqual(rgb_surf.get_at((0, 0)), pygame.Color(255, 10, 20)) - self.assertEqual(rgb_surf.get_at((1, 1)), pygame.Color(255, 255, 255)) - self.assertEqual(rgb_surf.get_at((2, 2)), pygame.Color(0, 0, 0)) - self.assertEqual(rgb_surf.get_at((3, 3)), pygame.Color(50, 200, 20)) - - def test_frombuffer_BGR(self): - bgr_buffer = bytearray( - [ - 20, - 10, - 255, - 20, - 10, - 255, - 20, - 10, - 255, - 20, - 10, - 255, - 255, - 255, - 255, - 255, - 255, - 255, - 255, - 255, - 255, - 255, - 255, - 255, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 20, - 200, - 50, - 20, - 200, - 50, - 20, - 200, - 50, - 20, - 200, - 50, - ] - ) - - bgr_surf = pygame.image.frombuffer(bgr_buffer, (4, 4), "BGR") - self.assertEqual(bgr_surf.get_at((0, 0)), pygame.Color(255, 10, 20)) - self.assertEqual(bgr_surf.get_at((1, 1)), pygame.Color(255, 255, 255)) - self.assertEqual(bgr_surf.get_at((2, 2)), pygame.Color(0, 0, 0)) - self.assertEqual(bgr_surf.get_at((3, 3)), pygame.Color(50, 200, 20)) - - def test_frombuffer_RGBX(self): - rgbx_buffer = bytearray( - [ - 255, - 10, - 20, - 255, - 255, - 10, - 20, - 255, - 255, - 10, - 20, - 255, - 255, - 10, - 20, - 255, - 255, - 255, - 255, - 255, - 255, - 255, - 255, - 255, - 255, - 255, - 255, - 255, - 255, - 255, - 255, - 255, - 0, - 0, - 0, - 255, - 0, - 0, - 0, - 255, - 0, - 0, - 0, - 255, - 0, - 0, - 0, - 255, - 50, - 200, - 20, - 255, - 50, - 200, - 20, - 255, - 50, - 200, - 20, - 255, - 50, - 200, - 20, - 255, - ] - ) - - rgbx_surf = pygame.image.frombuffer(rgbx_buffer, (4, 4), "RGBX") - self.assertEqual(rgbx_surf.get_at((0, 0)), pygame.Color(255, 10, 20, 255)) - self.assertEqual(rgbx_surf.get_at((1, 1)), pygame.Color(255, 255, 255, 255)) - self.assertEqual(rgbx_surf.get_at((2, 2)), pygame.Color(0, 0, 0, 255)) - self.assertEqual(rgbx_surf.get_at((3, 3)), pygame.Color(50, 200, 20, 255)) - - def test_frombuffer_RGBA(self): - rgba_buffer = bytearray( - [ - 255, - 10, - 20, - 200, - 255, - 10, - 20, - 200, - 255, - 10, - 20, - 200, - 255, - 10, - 20, - 200, - 255, - 255, - 255, - 127, - 255, - 255, - 255, - 127, - 255, - 255, - 255, - 127, - 255, - 255, - 255, - 127, - 0, - 0, - 0, - 79, - 0, - 0, - 0, - 79, - 0, - 0, - 0, - 79, - 0, - 0, - 0, - 79, - 50, - 200, - 20, - 255, - 50, - 200, - 20, - 255, - 50, - 200, - 20, - 255, - 50, - 200, - 20, - 255, - ] - ) - - rgba_surf = pygame.image.frombuffer(rgba_buffer, (4, 4), "RGBA") - self.assertEqual(rgba_surf.get_at((0, 0)), pygame.Color(255, 10, 20, 200)) - self.assertEqual(rgba_surf.get_at((1, 1)), pygame.Color(255, 255, 255, 127)) - self.assertEqual(rgba_surf.get_at((2, 2)), pygame.Color(0, 0, 0, 79)) - self.assertEqual(rgba_surf.get_at((3, 3)), pygame.Color(50, 200, 20, 255)) - - def test_frombuffer_ARGB(self): - argb_buffer = bytearray( - [ - 200, - 255, - 10, - 20, - 200, - 255, - 10, - 20, - 200, - 255, - 10, - 20, - 200, - 255, - 10, - 20, - 127, - 255, - 255, - 255, - 127, - 255, - 255, - 255, - 127, - 255, - 255, - 255, - 127, - 255, - 255, - 255, - 79, - 0, - 0, - 0, - 79, - 0, - 0, - 0, - 79, - 0, - 0, - 0, - 79, - 0, - 0, - 0, - 255, - 50, - 200, - 20, - 255, - 50, - 200, - 20, - 255, - 50, - 200, - 20, - 255, - 50, - 200, - 20, - ] - ) - - argb_surf = pygame.image.frombuffer(argb_buffer, (4, 4), "ARGB") - self.assertEqual(argb_surf.get_at((0, 0)), pygame.Color(255, 10, 20, 200)) - self.assertEqual(argb_surf.get_at((1, 1)), pygame.Color(255, 255, 255, 127)) - self.assertEqual(argb_surf.get_at((2, 2)), pygame.Color(0, 0, 0, 79)) - self.assertEqual(argb_surf.get_at((3, 3)), pygame.Color(50, 200, 20, 255)) - - def test_get_extended(self): - # Create a png file and try to load it. If it cannot, get_extended() should return False - raw_image = [] - raw_image.append((200, 200, 200, 255, 100, 100, 100, 255)) - - f_descriptor, f_path = tempfile.mkstemp(suffix=".png") - - with os.fdopen(f_descriptor, "wb") as file: - w = png.Writer(2, 1, alpha=True) - w.write(file, raw_image) - - try: - surf = pygame.image.load(f_path) - loaded = True - except pygame.error: - loaded = False - - self.assertEqual(pygame.image.get_extended(), loaded) - os.remove(f_path) - - def test_get_sdl_image_version(self): - # If get_extended() returns False then get_sdl_image_version() should - # return None - if not pygame.image.get_extended(): - self.assertIsNone(pygame.image.get_sdl_image_version()) - else: - expected_length = 3 - expected_type = tuple - expected_item_type = int - - version = pygame.image.get_sdl_image_version() - - self.assertIsInstance(version, expected_type) - self.assertEqual(len(version), expected_length) - - for item in version: - self.assertIsInstance(item, expected_item_type) - - def test_load_basic(self): - """to see if we can load bmp from files and/or file-like objects in memory""" - - # pygame.image.load(filename): return Surface - - # test loading from a file - s = pygame.image.load_basic(example_path("data/asprite.bmp")) - self.assertEqual(s.get_at((0, 0)), (255, 255, 255, 255)) - - # test loading from io.BufferedReader - f = pygame.pkgdata.getResource("pygame_icon.bmp") - self.assertEqual(f.mode, "rb") - - surf = pygame.image.load_basic(f) - - self.assertEqual(surf.get_at((0, 0)), (5, 4, 5, 255)) - self.assertEqual(surf.get_height(), 32) - self.assertEqual(surf.get_width(), 32) - - f.close() - - def test_load_extended(self): - """can load different format images. - - We test loading the following file types: - bmp, png, jpg, gif (non-animated), pcx, tga (uncompressed), tif, xpm, ppm, pgm - Following file types are tested when using SDL 2 - svg, pnm, webp - All the loaded images are smaller than 32 x 32 pixels. - """ - - filename_expected_color = [ - ("asprite.bmp", (255, 255, 255, 255)), - ("laplacian.png", (10, 10, 70, 255)), - ("red.jpg", (254, 0, 0, 255)), - ("blue.gif", (0, 0, 255, 255)), - ("green.pcx", (0, 255, 0, 255)), - ("yellow.tga", (255, 255, 0, 255)), - ("turquoise.tif", (0, 255, 255, 255)), - ("purple.xpm", (255, 0, 255, 255)), - ("black.ppm", (0, 0, 0, 255)), - ("grey.pgm", (120, 120, 120, 255)), - ("teal.svg", (0, 128, 128, 255)), - ("crimson.pnm", (220, 20, 60, 255)), - ("scarlet.webp", (252, 14, 53, 255)), - ] - - for filename, expected_color in filename_expected_color: - with self.subTest( - "Test loading a " + filename.split(".")[-1], - filename="examples/data/" + filename, - expected_color=expected_color, - ): - surf = pygame.image.load_extended(example_path("data/" + filename)) - self.assertEqual(surf.get_at((0, 0)), expected_color) - - def test_load_pathlib(self): - """works loading using a Path argument.""" - path = pathlib.Path(example_path("data/asprite.bmp")) - surf = pygame.image.load_extended(path) - self.assertEqual(surf.get_at((0, 0)), (255, 255, 255, 255)) - - def test_save_extended(self): - surf = pygame.Surface((5, 5)) - surf.fill((23, 23, 23)) - - passing_formats = ["jpg", "png"] - passing_formats += [fmt.upper() for fmt in passing_formats] - - magic_hex = {} - magic_hex["jpg"] = [0xFF, 0xD8, 0xFF, 0xE0] - magic_hex["png"] = [0x89, 0x50, 0x4E, 0x47] - - failing_formats = ["bmp", "tga"] - failing_formats += [fmt.upper() for fmt in failing_formats] - - # check that .jpg and .png save - for fmt in passing_formats: - temp_file_name = "temp_file.%s" % fmt - # save image as .jpg and .png - pygame.image.save_extended(surf, temp_file_name) - with open(temp_file_name, "rb") as file: - # Test the magic numbers at the start of the file to ensure - # they are saved as the correct file type. - self.assertEqual(1, (test_magic(file, magic_hex[fmt.lower()]))) - # load the file to make sure it was saved correctly - loaded_file = pygame.image.load(temp_file_name) - self.assertEqual(loaded_file.get_at((0, 0)), surf.get_at((0, 0))) - # clean up the temp file - os.remove(temp_file_name) - # check that .bmp and .tga do not save - for fmt in failing_formats: - self.assertRaises( - pygame.error, pygame.image.save_extended, surf, "temp_file.%s" % fmt - ) - - def threads_load(self, images): - import pygame.threads - - for i in range(10): - surfs = pygame.threads.tmap(pygame.image.load, images) - for s in surfs: - self.assertIsInstance(s, pygame.Surface) - - def test_load_png_threads(self): - self.threads_load(glob.glob(example_path("data/*.png"))) - - def test_load_jpg_threads(self): - self.threads_load(glob.glob(example_path("data/*.jpg"))) - - def test_load_bmp_threads(self): - self.threads_load(glob.glob(example_path("data/*.bmp"))) - - def test_load_gif_threads(self): - self.threads_load(glob.glob(example_path("data/*.gif"))) - - -if __name__ == "__main__": - unittest.main() diff --git a/venv/Lib/site-packages/pygame/tests/imageext_tags.py b/venv/Lib/site-packages/pygame/tests/imageext_tags.py deleted file mode 100644 index 25cff74..0000000 --- a/venv/Lib/site-packages/pygame/tests/imageext_tags.py +++ /dev/null @@ -1,7 +0,0 @@ -__tags__ = [] - -import pygame -import sys - -if "pygame.imageext" not in sys.modules: - __tags__.extend(("ignore", "subprocess_ignore")) diff --git a/venv/Lib/site-packages/pygame/tests/imageext_test.py b/venv/Lib/site-packages/pygame/tests/imageext_test.py deleted file mode 100644 index 19faf83..0000000 --- a/venv/Lib/site-packages/pygame/tests/imageext_test.py +++ /dev/null @@ -1,94 +0,0 @@ -# -*- coding: utf8 -*- -import os -import os.path -import sys -import unittest - -from pygame.tests.test_utils import example_path -import pygame, pygame.image, pygame.pkgdata - - -imageext = sys.modules["pygame.imageext"] - - -class ImageextModuleTest(unittest.TestCase): - # Most of the testing is done indirectly through image_test.py - # This just confirms file path encoding and error handling. - def test_save_non_string_file(self): - im = pygame.Surface((10, 10), 0, 32) - self.assertRaises(TypeError, imageext.save_extended, im, []) - - def test_load_non_string_file(self): - self.assertRaises(TypeError, imageext.load_extended, []) - - @unittest.skip("SDL silently removes invalid characters") - def test_save_bad_filename(self): - im = pygame.Surface((10, 10), 0, 32) - u = u"a\x00b\x00c.png" - self.assertRaises(pygame.error, imageext.save_extended, im, u) - - @unittest.skip("SDL silently removes invalid characters") - def test_load_bad_filename(self): - u = u"a\x00b\x00c.png" - self.assertRaises(pygame.error, imageext.load_extended, u) - - def test_save_unknown_extension(self): - im = pygame.Surface((10, 10), 0, 32) - s = "foo.bar" - self.assertRaises(pygame.error, imageext.save_extended, im, s) - - def test_load_unknown_extension(self): - s = "foo.bar" - self.assertRaises(FileNotFoundError, imageext.load_extended, s) - - def test_load_unknown_file(self): - s = "nonexistent.png" - self.assertRaises(FileNotFoundError, imageext.load_extended, s) - - def test_load_unicode_path_0(self): - u = example_path("data/alien1.png") - im = imageext.load_extended(u) - - def test_load_unicode_path_1(self): - """non-ASCII unicode""" - import shutil - - orig = example_path("data/alien1.png") - temp = os.path.join(example_path("data"), u"你好.png") - shutil.copy(orig, temp) - try: - im = imageext.load_extended(temp) - finally: - os.remove(temp) - - def _unicode_save(self, temp_file): - im = pygame.Surface((10, 10), 0, 32) - try: - with open(temp_file, "w") as f: - pass - os.remove(temp_file) - except IOError: - raise unittest.SkipTest("the path cannot be opened") - - self.assertFalse(os.path.exists(temp_file)) - - try: - imageext.save_extended(im, temp_file) - - self.assertGreater(os.path.getsize(temp_file), 10) - finally: - try: - os.remove(temp_file) - except EnvironmentError: - pass - - def test_save_unicode_path_0(self): - """unicode object with ASCII chars""" - self._unicode_save(u"temp_file.png") - - def test_save_unicode_path_1(self): - self._unicode_save(u"你好.png") - - -if __name__ == "__main__": - unittest.main() diff --git a/venv/Lib/site-packages/pygame/tests/joystick_test.py b/venv/Lib/site-packages/pygame/tests/joystick_test.py deleted file mode 100644 index e8cea2d..0000000 --- a/venv/Lib/site-packages/pygame/tests/joystick_test.py +++ /dev/null @@ -1,172 +0,0 @@ -import unittest -from pygame.tests.test_utils import question, prompt - -import pygame -import pygame._sdl2.controller - - -class JoystickTypeTest(unittest.TestCase): - def todo_test_Joystick(self): - - # __doc__ (as of 2008-08-02) for pygame.joystick.Joystick: - - # pygame.joystick.Joystick(id): return Joystick - # create a new Joystick object - # - # Create a new joystick to access a physical device. The id argument - # must be a value from 0 to pygame.joystick.get_count()-1. - # - # To access most of the Joystick methods, you'll need to init() the - # Joystick. This is separate from making sure the joystick module is - # initialized. When multiple Joysticks objects are created for the - # same physical joystick device (i.e., they have the same ID number), - # the state and values for those Joystick objects will be shared. - # - # The Joystick object allows you to get information about the types of - # controls on a joystick device. Once the device is initialized the - # Pygame event queue will start receiving events about its input. - # - # You can call the Joystick.get_name() and Joystick.get_id() functions - # without initializing the Joystick object. - # - - self.fail() - - -class JoystickModuleTest(unittest.TestCase): - def test_get_init(self): - # Check that get_init() matches what is actually happening - def error_check_get_init(): - try: - pygame.joystick.get_count() - except pygame.error: - return False - return True - - # Start uninitialised - self.assertEqual(pygame.joystick.get_init(), False) - - pygame.joystick.init() - self.assertEqual(pygame.joystick.get_init(), error_check_get_init()) # True - pygame.joystick.quit() - self.assertEqual(pygame.joystick.get_init(), error_check_get_init()) # False - - pygame.joystick.init() - pygame.joystick.init() - self.assertEqual(pygame.joystick.get_init(), error_check_get_init()) # True - pygame.joystick.quit() - self.assertEqual(pygame.joystick.get_init(), error_check_get_init()) # False - - pygame.joystick.quit() - self.assertEqual(pygame.joystick.get_init(), error_check_get_init()) # False - - for i in range(100): - pygame.joystick.init() - self.assertEqual(pygame.joystick.get_init(), error_check_get_init()) # True - pygame.joystick.quit() - self.assertEqual(pygame.joystick.get_init(), error_check_get_init()) # False - - for i in range(100): - pygame.joystick.quit() - self.assertEqual(pygame.joystick.get_init(), error_check_get_init()) # False - - def test_init(self): - """ - This unit test is for joystick.init() - It was written to help reduce maintenance costs - and to help test against changes to the code or - different platforms. - """ - pygame.quit() - # test that pygame.init automatically calls joystick.init - pygame.init() - self.assertEqual(pygame.joystick.get_init(), True) - - # Controller module interferes with the joystick module. - pygame._sdl2.controller.quit() - - # test that get_count doesn't work w/o joystick init - # this is done before and after an init to test - # that init activates the joystick functions - pygame.joystick.quit() - with self.assertRaises(pygame.error): - pygame.joystick.get_count() - - # test explicit call(s) to joystick.init. - # Also test that get_count works once init is called - iterations = 20 - for i in range(iterations): - pygame.joystick.init() - self.assertEqual(pygame.joystick.get_init(), True) - self.assertIsNotNone(pygame.joystick.get_count()) - - def test_quit(self): - """Test if joystick.quit works.""" - - pygame.joystick.init() - - self.assertIsNotNone(pygame.joystick.get_count()) # Is not None before quit - - pygame.joystick.quit() - - with self.assertRaises(pygame.error): # Raises error if quit worked - pygame.joystick.get_count() - - def test_get_count(self): - # Test that get_count correctly returns a non-negative number of joysticks - pygame.joystick.init() - - try: - count = pygame.joystick.get_count() - self.assertGreaterEqual( - count, 0, ("joystick.get_count() must " "return a value >= 0") - ) - finally: - pygame.joystick.quit() - - -class JoystickInteractiveTest(unittest.TestCase): - - __tags__ = ["interactive"] - - def test_get_count_interactive(self): - # Test get_count correctly identifies number of connected joysticks - prompt( - ( - "Please connect any joysticks/controllers now before starting the " - "joystick.get_count() test." - ) - ) - - pygame.joystick.init() - # pygame.joystick.get_count(): return count - # number of joysticks on the system, 0 means no joysticks connected - count = pygame.joystick.get_count() - - response = question( - ( - "NOTE: Having Steam open may add an extra virtual controller for " - "each joystick/controller physically plugged in.\n" - "joystick.get_count() thinks there is [{}] joystick(s)/controller(s)" - "connected to this system. Is this correct?".format(count) - ) - ) - - self.assertTrue(response) - - # When you create Joystick objects using Joystick(id), you pass an - # integer that must be lower than this count. - # Test Joystick(id) for each connected joystick - if count != 0: - for x in range(count): - pygame.joystick.Joystick(x) - with self.assertRaises(pygame.error): - pygame.joystick.Joystick(count) - - pygame.joystick.quit() - - -################################################################################ - -if __name__ == "__main__": - unittest.main() diff --git a/venv/Lib/site-packages/pygame/tests/key_test.py b/venv/Lib/site-packages/pygame/tests/key_test.py deleted file mode 100644 index a15199f..0000000 --- a/venv/Lib/site-packages/pygame/tests/key_test.py +++ /dev/null @@ -1,110 +0,0 @@ -import os -import sys -import time -import unittest -import pygame -import pygame.key - - -class KeyModuleTest(unittest.TestCase): - @classmethod - def setUpClass(cls): - pygame.init() - - @classmethod - def tearDownClass(cls): - pygame.quit() - - def setUp(cls): - # This makes sure pygame is always initialized before each test (in - # case a test calls pygame.quit()). - if not pygame.get_init(): - pygame.init() - if not pygame.display.get_init(): - pygame.display.init() - - def test_import(self): - """does it import?""" - import pygame.key - - # fixme: test_get_focused failing systematically in some linux - # fixme: test_get_focused failing on SDL 2.0.18 on Windows - @unittest.skip("flaky test, and broken on 2.0.18 windows") - def test_get_focused(self): - # This test fails in SDL2 in some linux - # This test was skipped in SDL1. - focused = pygame.key.get_focused() - self.assertFalse(focused) # No window to focus - self.assertIsInstance(focused, int) - # Dummy video driver never gets keyboard focus. - if os.environ.get("SDL_VIDEODRIVER") != "dummy": - # Positive test, fullscreen with events grabbed - display_sizes = pygame.display.list_modes() - if display_sizes == -1: - display_sizes = [(500, 500)] - pygame.display.set_mode(size=display_sizes[-1], flags=pygame.FULLSCREEN) - pygame.event.set_grab(True) - # Pump event queue to get window focus on macos - pygame.event.pump() - focused = pygame.key.get_focused() - self.assertIsInstance(focused, int) - self.assertTrue(focused) - # Now test negative, iconify takes away focus - pygame.event.clear() - # TODO: iconify test fails in windows - if os.name != "nt": - pygame.display.iconify() - # Apparent need to pump event queue in order to make sure iconify - # happens. See display_test.py's test_get_active_iconify - for _ in range(50): - time.sleep(0.01) - pygame.event.pump() - self.assertFalse(pygame.key.get_focused()) - # Test if focus is returned when iconify is gone - pygame.display.set_mode(size=display_sizes[-1], flags=pygame.FULLSCREEN) - for i in range(50): - time.sleep(0.01) - pygame.event.pump() - self.assertTrue(pygame.key.get_focused()) - # Test if a quit display raises an error: - pygame.display.quit() - with self.assertRaises(pygame.error) as cm: - pygame.key.get_focused() - - def test_get_pressed(self): - states = pygame.key.get_pressed() - self.assertEqual(states[pygame.K_RIGHT], 0) - - def test_name(self): - self.assertEqual(pygame.key.name(pygame.K_RETURN), "return") - self.assertEqual(pygame.key.name(pygame.K_0), "0") - self.assertEqual(pygame.key.name(pygame.K_SPACE), "space") - - def test_key_code(self): - self.assertEqual(pygame.key.key_code("return"), pygame.K_RETURN) - self.assertEqual(pygame.key.key_code("0"), pygame.K_0) - self.assertEqual(pygame.key.key_code("space"), pygame.K_SPACE) - - self.assertRaises(ValueError, pygame.key.key_code, "fizzbuzz") - - def test_set_and_get_mods(self): - pygame.key.set_mods(pygame.KMOD_CTRL) - self.assertEqual(pygame.key.get_mods(), pygame.KMOD_CTRL) - - pygame.key.set_mods(pygame.KMOD_ALT) - self.assertEqual(pygame.key.get_mods(), pygame.KMOD_ALT) - pygame.key.set_mods(pygame.KMOD_CTRL | pygame.KMOD_ALT) - self.assertEqual(pygame.key.get_mods(), pygame.KMOD_CTRL | pygame.KMOD_ALT) - - def test_set_and_get_repeat(self): - self.assertEqual(pygame.key.get_repeat(), (0, 0)) - - pygame.key.set_repeat(10, 15) - self.assertEqual(pygame.key.get_repeat(), (10, 15)) - - pygame.key.set_repeat() - self.assertEqual(pygame.key.get_repeat(), (0, 0)) - - -if __name__ == "__main__": - unittest.main() diff --git a/venv/Lib/site-packages/pygame/tests/mask_test.py b/venv/Lib/site-packages/pygame/tests/mask_test.py deleted file mode 100644 index 56c0917..0000000 --- a/venv/Lib/site-packages/pygame/tests/mask_test.py +++ /dev/null @@ -1,6440 +0,0 @@ -from collections import OrderedDict -import copy -import platform -import random -import unittest -import sys - -import pygame -from pygame.locals import * -from pygame.math import Vector2 -from pygame.tests.test_utils import AssertRaisesRegexMixin - - -IS_PYPY = "PyPy" == platform.python_implementation() - - -def random_mask(size=(100, 100)): - """random_mask(size=(100,100)): return Mask - Create a mask of the given size, with roughly half the bits set at random.""" - m = pygame.Mask(size) - for i in range(size[0] * size[1] // 2): - x, y = random.randint(0, size[0] - 1), random.randint(0, size[1] - 1) - m.set_at((x, y)) - return m - - -def maskFromSurface(surface, threshold=127): - mask = pygame.Mask(surface.get_size()) - key = surface.get_colorkey() - if key: - for y in range(surface.get_height()): - for x in range(surface.get_width()): - if surface.get_at((x + 0.1, y + 0.1)) != key: - mask.set_at((x, y), 1) - else: - for y in range(surface.get_height()): - for x in range(surface.get_width()): - if surface.get_at((x, y))[3] > threshold: - mask.set_at((x, y), 1) - return mask - - -def create_bounding_rect(points): - """Creates a bounding rect from the given points.""" - xmin = xmax = points[0][0] - ymin = ymax = points[0][1] - - for x, y in points[1:]: - xmin = min(x, xmin) - xmax = max(x, xmax) - ymin = min(y, ymin) - ymax = max(y, ymax) - - return pygame.Rect((xmin, ymin), (xmax - xmin + 1, ymax - ymin + 1)) - - -def zero_size_pairs(width, height): - """Creates a generator which yields pairs of sizes. - - For each pair of sizes at least one of the sizes will have a 0 in it. - """ - sizes = ((width, height), (width, 0), (0, height), (0, 0)) - - return ((a, b) for a in sizes for b in sizes if 0 in a or 0 in b) - - -def corners(mask): - """Returns a tuple with the corner positions of the given mask. - - Clockwise from the top left corner. - """ - width, height = mask.get_size() - return ((0, 0), (width - 1, 0), (width - 1, height - 1), (0, height - 1)) - - -def off_corners(rect): - """Returns a tuple with the positions off of the corners of the given rect. - - Clockwise from the top left corner. - """ - return ( - (rect.left - 1, rect.top), - (rect.left - 1, rect.top - 1), - (rect.left, rect.top - 1), - (rect.right - 1, rect.top - 1), - (rect.right, rect.top - 1), - (rect.right, rect.top), - (rect.right, rect.bottom - 1), - (rect.right, rect.bottom), - (rect.right - 1, rect.bottom), - (rect.left, rect.bottom), - (rect.left - 1, rect.bottom), - (rect.left - 1, rect.bottom - 1), - ) - - -def assertSurfaceFilled(testcase, surface, expected_color, area_rect=None): - """Checks to see if the given surface is filled with the given color. - - If an area_rect is provided, only check that area of the surface. - """ - if area_rect is None: - x_range = range(surface.get_width()) - y_range = range(surface.get_height()) - else: - area_rect.normalize() - area_rect = area_rect.clip(surface.get_rect()) - x_range = range(area_rect.left, area_rect.right) - y_range = range(area_rect.top, area_rect.bottom) - - surface.lock() # Lock for possible speed up. - for pos in ((x, y) for y in y_range for x in x_range): - testcase.assertEqual(surface.get_at(pos), expected_color, pos) - surface.unlock() - - -def assertSurfaceFilledIgnoreArea(testcase, surface, expected_color, ignore_rect): - """Checks if the surface is filled with the given color. The - ignore_rect area is not checked. - """ - x_range = range(surface.get_width()) - y_range = range(surface.get_height()) - ignore_rect.normalize() - - surface.lock() # Lock for possible speed up. - for pos in ((x, y) for y in y_range for x in x_range): - if not ignore_rect.collidepoint(pos): - testcase.assertEqual(surface.get_at(pos), expected_color, pos) - surface.unlock() - - -def assertMaskEqual(testcase, m1, m2, msg=None): - """Checks to see if the 2 given masks are equal.""" - m1_count = m1.count() - - testcase.assertEqual(m1.get_size(), m2.get_size(), msg=msg) - testcase.assertEqual(m1_count, m2.count(), msg=msg) - testcase.assertEqual(m1_count, m1.overlap_area(m2, (0, 0)), msg=msg) - - # This can be used to help debug exact locations. - ##for i in range(m1.get_size()[0]): - ## for j in range(m1.get_size()[1]): - ## testcase.assertEqual(m1.get_at((i, j)), m2.get_at((i, j))) - - -# @unittest.skipIf(IS_PYPY, "pypy has lots of mask failures") # TODO -class MaskTypeTest(AssertRaisesRegexMixin, unittest.TestCase): - ORIGIN_OFFSETS = ( - (0, 0), - (0, 1), - (1, 1), - (1, 0), - (1, -1), - (0, -1), - (-1, -1), - (-1, 0), - (-1, 1), - ) - - @unittest.skipIf(IS_PYPY, "Segfaults on pypy") - def test_mask(self): - """Ensure masks are created correctly without fill parameter.""" - expected_count = 0 - expected_size = (11, 23) - - mask1 = pygame.mask.Mask(expected_size) - mask2 = pygame.mask.Mask(size=expected_size) - - self.assertIsInstance(mask1, pygame.mask.Mask) - self.assertEqual(mask1.count(), expected_count) - self.assertEqual(mask1.get_size(), expected_size) - - self.assertIsInstance(mask2, pygame.mask.Mask) - self.assertEqual(mask2.count(), expected_count) - self.assertEqual(mask2.get_size(), expected_size) - - def test_mask__negative_size(self): - """Ensure the mask constructor handles negative sizes correctly.""" - for size in ((1, -1), (-1, 1), (-1, -1)): - with self.assertRaises(ValueError): - mask = pygame.Mask(size) - - def test_mask__fill_kwarg(self): - """Ensure masks are created correctly using the fill keyword.""" - width, height = 37, 47 - expected_size = (width, height) - fill_counts = {True: width * height, False: 0} - - for fill, expected_count in fill_counts.items(): - msg = "fill={}".format(fill) - - mask = pygame.mask.Mask(expected_size, fill=fill) - - self.assertIsInstance(mask, pygame.mask.Mask, msg) - self.assertEqual(mask.count(), expected_count, msg) - self.assertEqual(mask.get_size(), expected_size, msg) - - @unittest.skipIf(IS_PYPY, "Segfaults on pypy") - def test_mask__fill_kwarg_bit_boundaries(self): - """Ensures masks are created correctly using the fill keyword - over a range of sizes. - - Tests masks of different sizes, including: - -masks 31 to 33 bits wide (32 bit boundaries) - -masks 63 to 65 bits wide (64 bit boundaries) - """ - for height in range(1, 4): - for width in range(1, 66): - expected_count = width * height - expected_size = (width, height) - msg = "size={}".format(expected_size) - - mask = pygame.mask.Mask(expected_size, fill=True) - - self.assertIsInstance(mask, pygame.mask.Mask, msg) - self.assertEqual(mask.count(), expected_count, msg) - self.assertEqual(mask.get_size(), expected_size, msg) - - def test_mask__fill_arg(self): - """Ensure masks are created correctly using a fill arg.""" - width, height = 59, 71 - expected_size = (width, height) - fill_counts = {True: width * height, False: 0} - - for fill, expected_count in fill_counts.items(): - msg = "fill={}".format(fill) - - mask = pygame.mask.Mask(expected_size, fill) - - self.assertIsInstance(mask, pygame.mask.Mask, msg) - self.assertEqual(mask.count(), expected_count, msg) - self.assertEqual(mask.get_size(), expected_size, msg) - - def test_mask__size_kwarg(self): - """Ensure masks are created correctly using the size keyword.""" - width, height = 73, 83 - expected_size = (width, height) - fill_counts = {True: width * height, False: 0} - - for fill, expected_count in fill_counts.items(): - msg = "fill={}".format(fill) - - mask1 = pygame.mask.Mask(fill=fill, size=expected_size) - mask2 = pygame.mask.Mask(size=expected_size, fill=fill) - - self.assertIsInstance(mask1, pygame.mask.Mask, msg) - self.assertIsInstance(mask2, pygame.mask.Mask, msg) - self.assertEqual(mask1.count(), expected_count, msg) - self.assertEqual(mask2.count(), expected_count, msg) - self.assertEqual(mask1.get_size(), expected_size, msg) - self.assertEqual(mask2.get_size(), expected_size, msg) - - def test_copy(self): - """Ensures copy works correctly with some bits set and unset.""" - # Test different widths and heights. - for width in (31, 32, 33, 63, 64, 65): - for height in (31, 32, 33, 63, 64, 65): - mask = pygame.mask.Mask((width, height)) - - # Create a checkerboard pattern of set/unset bits. - for x in range(width): - for y in range(x & 1, height, 2): - mask.set_at((x, y)) - - # Test both the copy() and __copy__() methods. - for mask_copy in (mask.copy(), copy.copy(mask)): - self.assertIsInstance(mask_copy, pygame.mask.Mask) - self.assertIsNot(mask_copy, mask) - assertMaskEqual(self, mask_copy, mask) - - def test_copy__full(self): - """Ensures copy works correctly on a filled masked.""" - # Test different widths and heights. - for width in (31, 32, 33, 63, 64, 65): - for height in (31, 32, 33, 63, 64, 65): - mask = pygame.mask.Mask((width, height), fill=True) - - # Test both the copy() and __copy__() methods. - for mask_copy in (mask.copy(), copy.copy(mask)): - self.assertIsInstance(mask_copy, pygame.mask.Mask) - self.assertIsNot(mask_copy, mask) - assertMaskEqual(self, mask_copy, mask) - - def test_copy__empty(self): - """Ensures copy works correctly on an empty mask.""" - for width in (31, 32, 33, 63, 64, 65): - for height in (31, 32, 33, 63, 64, 65): - mask = pygame.mask.Mask((width, height)) - - # Test both the copy() and __copy__() methods. - for mask_copy in (mask.copy(), copy.copy(mask)): - self.assertIsInstance(mask_copy, pygame.mask.Mask) - self.assertIsNot(mask_copy, mask) - assertMaskEqual(self, mask_copy, mask) - - def test_copy__independent(self): - """Ensures copy makes an independent copy of the mask.""" - mask_set_pos = (64, 1) - mask_copy_set_pos = (64, 2) - mask = pygame.mask.Mask((65, 3)) - - # Test both the copy() and __copy__() methods. - mask_copies = (mask.copy(), copy.copy(mask)) - mask.set_at(mask_set_pos) - - for mask_copy in mask_copies: - mask_copy.set_at(mask_copy_set_pos) - - self.assertIsNot(mask_copy, mask) - self.assertNotEqual( - mask_copy.get_at(mask_set_pos), mask.get_at(mask_set_pos) - ) - self.assertNotEqual( - mask_copy.get_at(mask_copy_set_pos), mask.get_at(mask_copy_set_pos) - ) - - def test_get_size(self): - """Ensure a mask's size is correctly retrieved.""" - expected_size = (93, 101) - mask = pygame.mask.Mask(expected_size) - - self.assertEqual(mask.get_size(), expected_size) - - def test_get_rect(self): - """Ensures get_rect works correctly.""" - expected_rect = pygame.Rect((0, 0), (11, 13)) - - # Test on full and empty masks. - for fill in (True, False): - mask = pygame.mask.Mask(expected_rect.size, fill=fill) - - rect = mask.get_rect() - - self.assertEqual(rect, expected_rect) - - @unittest.skipIf(IS_PYPY, "Segfaults on pypy") - def test_get_rect__one_kwarg(self): - """Ensures get_rect supports a single rect attribute kwarg. - - Tests all the rect attributes. - """ - # Rect attributes that take a single value. - RECT_SINGLE_VALUE_ATTRIBUTES = ( - "x", - "y", - "top", - "left", - "bottom", - "right", - "centerx", - "centery", - "width", - "height", - "w", - "h", - ) - - # Rect attributes that take 2 values. - RECT_DOUBLE_VALUE_ATTRIBUTES = ( - "topleft", - "bottomleft", - "topright", - "bottomright", - "midtop", - "midleft", - "midbottom", - "midright", - "center", - "size", - ) - - # Testing ints/floats and tuples/lists/Vector2s. - # {attribute_names : attribute_values} - rect_attributes = { - RECT_SINGLE_VALUE_ATTRIBUTES: (3, 5.1), - RECT_DOUBLE_VALUE_ATTRIBUTES: ((1, 2.2), [2.3, 3], Vector2(0, 1)), - } - - size = (7, 3) - mask = pygame.mask.Mask(size) - - for attributes, values in rect_attributes.items(): - for attribute in attributes: - for value in values: - expected_rect = pygame.Rect((0, 0), size) - setattr(expected_rect, attribute, value) - - rect = mask.get_rect(**{attribute: value}) - - self.assertEqual(rect, expected_rect) - - def test_get_rect__multiple_kwargs(self): - """Ensures get_rect supports multiple rect attribute kwargs.""" - mask = pygame.mask.Mask((5, 4)) - expected_rect = pygame.Rect((0, 0), (0, 0)) - kwargs = {"x": 7.1, "top": -1, "size": Vector2(2, 3.2)} - - for attrib, value in kwargs.items(): - setattr(expected_rect, attrib, value) - - rect = mask.get_rect(**kwargs) - - self.assertEqual(rect, expected_rect) - - def test_get_rect__no_arg_support(self): - """Ensures get_rect only supports kwargs.""" - mask = pygame.mask.Mask((4, 5)) - - with self.assertRaises(TypeError): - rect = mask.get_rect(3) - - with self.assertRaises(TypeError): - rect = mask.get_rect((1, 2)) - - @unittest.skipIf(IS_PYPY, "Segfaults on pypy") - def test_get_rect__invalid_kwarg_name(self): - """Ensures get_rect detects invalid kwargs.""" - mask = pygame.mask.Mask((1, 2)) - - with self.assertRaises(AttributeError): - rect = mask.get_rect(righte=11) - - with self.assertRaises(AttributeError): - rect = mask.get_rect(toplef=(1, 1)) - - with self.assertRaises(AttributeError): - rect = mask.get_rect(move=(3, 2)) - - def test_get_rect__invalid_kwarg_format(self): - """Ensures get_rect detects invalid kwarg formats.""" - mask = pygame.mask.Mask((3, 11)) - - with self.assertRaises(TypeError): - rect = mask.get_rect(right="1") # Wrong type. - - with self.assertRaises(TypeError): - rect = mask.get_rect(bottom=(1,)) # Wrong type. - - with self.assertRaises(TypeError): - rect = mask.get_rect(centerx=(1, 1)) # Wrong type. - - with self.assertRaises(TypeError): - rect = mask.get_rect(midleft=(1, "1")) # Wrong type. - - with self.assertRaises(TypeError): - rect = mask.get_rect(topright=(1,)) # Too few. - - with self.assertRaises(TypeError): - rect = mask.get_rect(bottomleft=(1, 2, 3)) # Too many. - - with self.assertRaises(TypeError): - rect = mask.get_rect(midbottom=1) # Wrong type. - - def test_get_at(self): - """Ensure individual mask bits are correctly retrieved.""" - width, height = 5, 7 - mask0 = pygame.mask.Mask((width, height)) - mask1 = pygame.mask.Mask((width, height), fill=True) - mask0_expected_bit = 0 - mask1_expected_bit = 1 - pos = (width - 1, height - 1) - - # Check twice to make sure bits aren't toggled. - self.assertEqual(mask0.get_at(pos), mask0_expected_bit) - self.assertEqual(mask0.get_at(pos=pos), mask0_expected_bit) - self.assertEqual(mask1.get_at(Vector2(pos)), mask1_expected_bit) - self.assertEqual(mask1.get_at(pos=Vector2(pos)), mask1_expected_bit) - - def test_get_at__out_of_bounds(self): - """Ensure get_at() checks bounds.""" - width, height = 11, 3 - mask = pygame.mask.Mask((width, height)) - - with self.assertRaises(IndexError): - mask.get_at((width, 0)) - - with self.assertRaises(IndexError): - mask.get_at((0, height)) - - with self.assertRaises(IndexError): - mask.get_at((-1, 0)) - - with self.assertRaises(IndexError): - mask.get_at((0, -1)) - - def test_set_at(self): - """Ensure individual mask bits are set to 1.""" - width, height = 13, 17 - mask0 = pygame.mask.Mask((width, height)) - mask1 = pygame.mask.Mask((width, height), fill=True) - mask0_expected_count = 1 - mask1_expected_count = mask1.count() - expected_bit = 1 - pos = (width - 1, height - 1) - - mask0.set_at(pos, expected_bit) # set 0 to 1 - mask1.set_at(pos=Vector2(pos), value=expected_bit) # set 1 to 1 - - self.assertEqual(mask0.get_at(pos), expected_bit) - self.assertEqual(mask0.count(), mask0_expected_count) - self.assertEqual(mask1.get_at(pos), expected_bit) - self.assertEqual(mask1.count(), mask1_expected_count) - - def test_set_at__to_0(self): - """Ensure individual mask bits are set to 0.""" - width, height = 11, 7 - mask0 = pygame.mask.Mask((width, height)) - mask1 = pygame.mask.Mask((width, height), fill=True) - mask0_expected_count = 0 - mask1_expected_count = mask1.count() - 1 - expected_bit = 0 - pos = (width - 1, height - 1) - - mask0.set_at(pos, expected_bit) # set 0 to 0 - mask1.set_at(pos, expected_bit) # set 1 to 0 - - self.assertEqual(mask0.get_at(pos), expected_bit) - self.assertEqual(mask0.count(), mask0_expected_count) - self.assertEqual(mask1.get_at(pos), expected_bit) - self.assertEqual(mask1.count(), mask1_expected_count) - - def test_set_at__default_value(self): - """Ensure individual mask bits are set using the default value.""" - width, height = 3, 21 - mask0 = pygame.mask.Mask((width, height)) - mask1 = pygame.mask.Mask((width, height), fill=True) - mask0_expected_count = 1 - mask1_expected_count = mask1.count() - expected_bit = 1 - pos = (width - 1, height - 1) - - mask0.set_at(pos) # set 0 to 1 - mask1.set_at(pos) # set 1 to 1 - - self.assertEqual(mask0.get_at(pos), expected_bit) - self.assertEqual(mask0.count(), mask0_expected_count) - self.assertEqual(mask1.get_at(pos), expected_bit) - self.assertEqual(mask1.count(), mask1_expected_count) - - def test_set_at__out_of_bounds(self): - """Ensure set_at() checks bounds.""" - width, height = 11, 3 - mask = pygame.mask.Mask((width, height)) - - with self.assertRaises(IndexError): - mask.set_at((width, 0)) - - with self.assertRaises(IndexError): - mask.set_at((0, height)) - - with self.assertRaises(IndexError): - mask.set_at((-1, 0)) - - with self.assertRaises(IndexError): - mask.set_at((0, -1)) - - def test_overlap(self): - """Ensure the overlap intersection is correctly calculated. - - Testing the different combinations of full/empty masks: - (mask1-filled) 1 overlap 1 (mask2-filled) - (mask1-empty) 0 overlap 1 (mask2-filled) - (mask1-filled) 1 overlap 0 (mask2-empty) - (mask1-empty) 0 overlap 0 (mask2-empty) - """ - expected_size = (4, 4) - offset = (0, 0) - expected_default = None - expected_overlaps = {(True, True): offset} - - for fill2 in (True, False): - mask2 = pygame.mask.Mask(expected_size, fill=fill2) - mask2_count = mask2.count() - - for fill1 in (True, False): - key = (fill1, fill2) - msg = "key={}".format(key) - mask1 = pygame.mask.Mask(expected_size, fill=fill1) - mask1_count = mask1.count() - expected_pos = expected_overlaps.get(key, expected_default) - - overlap_pos = mask1.overlap(mask2, offset) - - self.assertEqual(overlap_pos, expected_pos, msg) - - # Ensure mask1/mask2 unchanged. - self.assertEqual(mask1.count(), mask1_count, msg) - self.assertEqual(mask2.count(), mask2_count, msg) - self.assertEqual(mask1.get_size(), expected_size, msg) - self.assertEqual(mask2.get_size(), expected_size, msg) - - def test_overlap__offset(self): - """Ensure an offset overlap intersection is correctly calculated.""" - mask1 = pygame.mask.Mask((65, 3), fill=True) - mask2 = pygame.mask.Mask((66, 4), fill=True) - mask1_count = mask1.count() - mask2_count = mask2.count() - mask1_size = mask1.get_size() - mask2_size = mask2.get_size() - - for offset in self.ORIGIN_OFFSETS: - msg = "offset={}".format(offset) - expected_pos = (max(offset[0], 0), max(offset[1], 0)) - - overlap_pos = mask1.overlap(other=mask2, offset=offset) - - self.assertEqual(overlap_pos, expected_pos, msg) - - # Ensure mask1/mask2 unchanged. - self.assertEqual(mask1.count(), mask1_count, msg) - self.assertEqual(mask2.count(), mask2_count, msg) - self.assertEqual(mask1.get_size(), mask1_size, msg) - self.assertEqual(mask2.get_size(), mask2_size, msg) - - def test_overlap__offset_with_unset_bits(self): - """Ensure an offset overlap intersection is correctly calculated - when (0, 0) bits not set.""" - mask1 = pygame.mask.Mask((65, 3), fill=True) - mask2 = pygame.mask.Mask((66, 4), fill=True) - unset_pos = (0, 0) - mask1.set_at(unset_pos, 0) - mask2.set_at(unset_pos, 0) - mask1_count = mask1.count() - mask2_count = mask2.count() - mask1_size = mask1.get_size() - mask2_size = mask2.get_size() - - for offset in self.ORIGIN_OFFSETS: - msg = "offset={}".format(offset) - x, y = offset - expected_y = max(y, 0) - if 0 == y: - expected_x = max(x + 1, 1) - elif 0 < y: - expected_x = max(x + 1, 0) - else: - expected_x = max(x, 1) - - overlap_pos = mask1.overlap(mask2, Vector2(offset)) - - self.assertEqual(overlap_pos, (expected_x, expected_y), msg) - - # Ensure mask1/mask2 unchanged. - self.assertEqual(mask1.count(), mask1_count, msg) - self.assertEqual(mask2.count(), mask2_count, msg) - self.assertEqual(mask1.get_size(), mask1_size, msg) - self.assertEqual(mask2.get_size(), mask2_size, msg) - self.assertEqual(mask1.get_at(unset_pos), 0, msg) - self.assertEqual(mask2.get_at(unset_pos), 0, msg) - - def test_overlap__no_overlap(self): - """Ensure an offset overlap intersection is correctly calculated - when there is no overlap.""" - mask1 = pygame.mask.Mask((65, 3), fill=True) - mask1_count = mask1.count() - mask1_size = mask1.get_size() - - mask2_w, mask2_h = 67, 5 - mask2_size = (mask2_w, mask2_h) - mask2 = pygame.mask.Mask(mask2_size) - set_pos = (mask2_w - 1, mask2_h - 1) - mask2.set_at(set_pos) - mask2_count = 1 - - for offset in self.ORIGIN_OFFSETS: - msg = "offset={}".format(offset) - - overlap_pos = mask1.overlap(mask2, offset) - - self.assertIsNone(overlap_pos, msg) - - # Ensure mask1/mask2 unchanged. - self.assertEqual(mask1.count(), mask1_count, msg) - self.assertEqual(mask2.count(), mask2_count, msg) - self.assertEqual(mask1.get_size(), mask1_size, msg) - self.assertEqual(mask2.get_size(), mask2_size, msg) - self.assertEqual(mask2.get_at(set_pos), 1, msg) - - def test_overlap__offset_boundary(self): - """Ensures overlap handles offsets and boundaries correctly.""" - mask1 = pygame.mask.Mask((13, 3), fill=True) - mask2 = pygame.mask.Mask((7, 5), fill=True) - mask1_count = mask1.count() - mask2_count = mask2.count() - mask1_size = mask1.get_size() - mask2_size = mask2.get_size() - - # Check the 4 boundaries. - offsets = ( - (mask1_size[0], 0), # off right - (0, mask1_size[1]), # off bottom - (-mask2_size[0], 0), # off left - (0, -mask2_size[1]), - ) # off top - - for offset in offsets: - msg = "offset={}".format(offset) - - overlap_pos = mask1.overlap(mask2, offset) - - self.assertIsNone(overlap_pos, msg) - - # Ensure mask1/mask2 unchanged. - self.assertEqual(mask1.count(), mask1_count, msg) - self.assertEqual(mask2.count(), mask2_count, msg) - self.assertEqual(mask1.get_size(), mask1_size, msg) - self.assertEqual(mask2.get_size(), mask2_size, msg) - - @unittest.skipIf(IS_PYPY, "Segfaults on pypy") - def test_overlap__bit_boundaries(self): - """Ensures overlap handles masks of different sizes correctly. - - Tests masks of different sizes, including: - -masks 31 to 33 bits wide (32 bit boundaries) - -masks 63 to 65 bits wide (64 bit boundaries) - """ - for height in range(2, 4): - for width in range(2, 66): - mask_size = (width, height) - mask_count = width * height - mask1 = pygame.mask.Mask(mask_size, fill=True) - mask2 = pygame.mask.Mask(mask_size, fill=True) - - # Testing masks offset from each other. - for offset in self.ORIGIN_OFFSETS: - msg = "size={}, offset={}".format(mask_size, offset) - expected_pos = (max(offset[0], 0), max(offset[1], 0)) - - overlap_pos = mask1.overlap(mask2, offset) - - self.assertEqual(overlap_pos, expected_pos, msg) - - # Ensure mask1/mask2 unchanged. - self.assertEqual(mask1.count(), mask_count, msg) - self.assertEqual(mask2.count(), mask_count, msg) - self.assertEqual(mask1.get_size(), mask_size, msg) - self.assertEqual(mask2.get_size(), mask_size, msg) - - @unittest.skipIf(IS_PYPY, "Segfaults on pypy") - def test_overlap__invalid_mask_arg(self): - """Ensure overlap handles invalid mask arguments correctly.""" - size = (5, 3) - offset = (0, 0) - mask = pygame.mask.Mask(size) - invalid_mask = pygame.Surface(size) - - with self.assertRaises(TypeError): - overlap_pos = mask.overlap(invalid_mask, offset) - - def test_overlap__invalid_offset_arg(self): - """Ensure overlap handles invalid offset arguments correctly.""" - size = (2, 7) - offset = "(0, 0)" - mask1 = pygame.mask.Mask(size) - mask2 = pygame.mask.Mask(size) - - with self.assertRaises(TypeError): - overlap_pos = mask1.overlap(mask2, offset) - - @unittest.skipIf(IS_PYPY, "Segfaults on pypy") - def test_overlap_area(self): - """Ensure the overlap_area is correctly calculated. - - Testing the different combinations of full/empty masks: - (mask1-filled) 1 overlap_area 1 (mask2-filled) - (mask1-empty) 0 overlap_area 1 (mask2-filled) - (mask1-filled) 1 overlap_area 0 (mask2-empty) - (mask1-empty) 0 overlap_area 0 (mask2-empty) - """ - expected_size = width, height = (4, 4) - offset = (0, 0) - expected_default = 0 - expected_counts = {(True, True): width * height} - - for fill2 in (True, False): - mask2 = pygame.mask.Mask(expected_size, fill=fill2) - mask2_count = mask2.count() - - for fill1 in (True, False): - key = (fill1, fill2) - msg = "key={}".format(key) - mask1 = pygame.mask.Mask(expected_size, fill=fill1) - mask1_count = mask1.count() - expected_count = expected_counts.get(key, expected_default) - - overlap_count = mask1.overlap_area(mask2, offset) - - self.assertEqual(overlap_count, expected_count, msg) - - # Ensure mask1/mask2 unchanged. - self.assertEqual(mask1.count(), mask1_count, msg) - self.assertEqual(mask2.count(), mask2_count, msg) - self.assertEqual(mask1.get_size(), expected_size, msg) - self.assertEqual(mask2.get_size(), expected_size, msg) - - @unittest.skipIf(IS_PYPY, "Segfaults on pypy") - def test_overlap_area__offset(self): - """Ensure an offset overlap_area is correctly calculated.""" - mask1 = pygame.mask.Mask((65, 3), fill=True) - mask2 = pygame.mask.Mask((66, 4), fill=True) - mask1_count = mask1.count() - mask2_count = mask2.count() - mask1_size = mask1.get_size() - mask2_size = mask2.get_size() - - # Using rects to help determine the overlapping area. - rect1 = mask1.get_rect() - rect2 = mask2.get_rect() - - for offset in self.ORIGIN_OFFSETS: - msg = "offset={}".format(offset) - rect2.topleft = offset - overlap_rect = rect1.clip(rect2) - expected_count = overlap_rect.w * overlap_rect.h - - overlap_count = mask1.overlap_area(other=mask2, offset=offset) - - self.assertEqual(overlap_count, expected_count, msg) - - # Ensure mask1/mask2 unchanged. - self.assertEqual(mask1.count(), mask1_count, msg) - self.assertEqual(mask2.count(), mask2_count, msg) - self.assertEqual(mask1.get_size(), mask1_size, msg) - self.assertEqual(mask2.get_size(), mask2_size, msg) - - def test_overlap_area__offset_boundary(self): - """Ensures overlap_area handles offsets and boundaries correctly.""" - mask1 = pygame.mask.Mask((11, 3), fill=True) - mask2 = pygame.mask.Mask((5, 7), fill=True) - mask1_count = mask1.count() - mask2_count = mask2.count() - mask1_size = mask1.get_size() - mask2_size = mask2.get_size() - expected_count = 0 - - # Check the 4 boundaries. - offsets = ( - (mask1_size[0], 0), # off right - (0, mask1_size[1]), # off bottom - (-mask2_size[0], 0), # off left - (0, -mask2_size[1]), - ) # off top - - for offset in offsets: - msg = "offset={}".format(offset) - - overlap_count = mask1.overlap_area(mask2, Vector2(offset)) - - self.assertEqual(overlap_count, expected_count, msg) - - # Ensure mask1/mask2 unchanged. - self.assertEqual(mask1.count(), mask1_count, msg) - self.assertEqual(mask2.count(), mask2_count, msg) - self.assertEqual(mask1.get_size(), mask1_size, msg) - self.assertEqual(mask2.get_size(), mask2_size, msg) - - @unittest.skipIf(IS_PYPY, "Segfaults on pypy") - def test_overlap_area__bit_boundaries(self): - """Ensures overlap_area handles masks of different sizes correctly. - - Tests masks of different sizes, including: - -masks 31 to 33 bits wide (32 bit boundaries) - -masks 63 to 65 bits wide (64 bit boundaries) - """ - for height in range(2, 4): - for width in range(2, 66): - mask_size = (width, height) - mask_count = width * height - mask1 = pygame.mask.Mask(mask_size, fill=True) - mask2 = pygame.mask.Mask(mask_size, fill=True) - - # Using rects to help determine the overlapping area. - rect1 = mask1.get_rect() - rect2 = mask2.get_rect() - - # Testing masks offset from each other. - for offset in self.ORIGIN_OFFSETS: - msg = "size={}, offset={}".format(mask_size, offset) - rect2.topleft = offset - overlap_rect = rect1.clip(rect2) - expected_overlap_count = overlap_rect.w * overlap_rect.h - - overlap_count = mask1.overlap_area(mask2, offset) - - self.assertEqual(overlap_count, expected_overlap_count, msg) - - # Ensure mask1/mask2 unchanged. - self.assertEqual(mask1.count(), mask_count, msg) - self.assertEqual(mask2.count(), mask_count, msg) - self.assertEqual(mask1.get_size(), mask_size, msg) - self.assertEqual(mask2.get_size(), mask_size, msg) - - def test_overlap_area__invalid_mask_arg(self): - """Ensure overlap_area handles invalid mask arguments correctly.""" - size = (3, 5) - offset = (0, 0) - mask = pygame.mask.Mask(size) - invalid_mask = pygame.Surface(size) - - with self.assertRaises(TypeError): - overlap_count = mask.overlap_area(invalid_mask, offset) - - def test_overlap_area__invalid_offset_arg(self): - """Ensure overlap_area handles invalid offset arguments correctly.""" - size = (7, 2) - offset = "(0, 0)" - mask1 = pygame.mask.Mask(size) - mask2 = pygame.mask.Mask(size) - - with self.assertRaises(TypeError): - overlap_count = mask1.overlap_area(mask2, offset) - - def test_overlap_mask(self): - """Ensure overlap_mask's mask has correct bits set. - - Testing the different combinations of full/empty masks: - (mask1-filled) 1 overlap_mask 1 (mask2-filled) - (mask1-empty) 0 overlap_mask 1 (mask2-filled) - (mask1-filled) 1 overlap_mask 0 (mask2-empty) - (mask1-empty) 0 overlap_mask 0 (mask2-empty) - """ - expected_size = (4, 4) - offset = (0, 0) - expected_default = pygame.mask.Mask(expected_size) - expected_masks = {(True, True): pygame.mask.Mask(expected_size, fill=True)} - - for fill2 in (True, False): - mask2 = pygame.mask.Mask(expected_size, fill=fill2) - mask2_count = mask2.count() - - for fill1 in (True, False): - key = (fill1, fill2) - msg = "key={}".format(key) - mask1 = pygame.mask.Mask(expected_size, fill=fill1) - mask1_count = mask1.count() - expected_mask = expected_masks.get(key, expected_default) - - overlap_mask = mask1.overlap_mask(other=mask2, offset=offset) - - self.assertIsInstance(overlap_mask, pygame.mask.Mask, msg) - assertMaskEqual(self, overlap_mask, expected_mask, msg) - - # Ensure mask1/mask2 unchanged. - self.assertEqual(mask1.count(), mask1_count, msg) - self.assertEqual(mask2.count(), mask2_count, msg) - self.assertEqual(mask1.get_size(), expected_size, msg) - self.assertEqual(mask2.get_size(), expected_size, msg) - - @unittest.skipIf(IS_PYPY, "Segfaults on pypy") - def test_overlap_mask__bits_set(self): - """Ensure overlap_mask's mask has correct bits set.""" - mask1 = pygame.mask.Mask((50, 50), fill=True) - mask2 = pygame.mask.Mask((300, 10), fill=True) - mask1_count = mask1.count() - mask2_count = mask2.count() - mask1_size = mask1.get_size() - mask2_size = mask2.get_size() - - mask3 = mask1.overlap_mask(mask2, (-1, 0)) - - for i in range(50): - for j in range(10): - self.assertEqual(mask3.get_at((i, j)), 1, "({}, {})".format(i, j)) - - for i in range(50): - for j in range(11, 50): - self.assertEqual(mask3.get_at((i, j)), 0, "({}, {})".format(i, j)) - - # Ensure mask1/mask2 unchanged. - self.assertEqual(mask1.count(), mask1_count) - self.assertEqual(mask2.count(), mask2_count) - self.assertEqual(mask1.get_size(), mask1_size) - self.assertEqual(mask2.get_size(), mask2_size) - - def test_overlap_mask__offset(self): - """Ensure an offset overlap_mask's mask is correctly calculated.""" - mask1 = pygame.mask.Mask((65, 3), fill=True) - mask2 = pygame.mask.Mask((66, 4), fill=True) - mask1_count = mask1.count() - mask2_count = mask2.count() - mask1_size = mask1.get_size() - mask2_size = mask2.get_size() - expected_mask = pygame.Mask(mask1_size) - - # Using rects to help determine the overlapping area. - rect1 = mask1.get_rect() - rect2 = mask2.get_rect() - - for offset in self.ORIGIN_OFFSETS: - msg = "offset={}".format(offset) - rect2.topleft = offset - overlap_rect = rect1.clip(rect2) - expected_mask.clear() - expected_mask.draw( - pygame.Mask(overlap_rect.size, fill=True), overlap_rect.topleft - ) - - overlap_mask = mask1.overlap_mask(mask2, offset) - - self.assertIsInstance(overlap_mask, pygame.mask.Mask, msg) - assertMaskEqual(self, overlap_mask, expected_mask, msg) - - # Ensure mask1/mask2 unchanged. - self.assertEqual(mask1.count(), mask1_count, msg) - self.assertEqual(mask2.count(), mask2_count, msg) - self.assertEqual(mask1.get_size(), mask1_size, msg) - self.assertEqual(mask2.get_size(), mask2_size, msg) - - @unittest.skipIf(IS_PYPY, "Segfaults on pypy") - def test_overlap_mask__specific_offsets(self): - """Ensure an offset overlap_mask's mask is correctly calculated. - - Testing the specific case of: - -both masks are wider than 32 bits - -a positive offset is used - -the mask calling overlap_mask() is wider than the mask passed in - """ - mask1 = pygame.mask.Mask((65, 5), fill=True) - mask2 = pygame.mask.Mask((33, 3), fill=True) - expected_mask = pygame.Mask(mask1.get_size()) - - # Using rects to help determine the overlapping area. - rect1 = mask1.get_rect() - rect2 = mask2.get_rect() - - # This rect's corners are used to move rect2 around the inside of - # rect1. - corner_rect = rect1.inflate(-2, -2) - - for corner in ("topleft", "topright", "bottomright", "bottomleft"): - setattr(rect2, corner, getattr(corner_rect, corner)) - offset = rect2.topleft - msg = "offset={}".format(offset) - overlap_rect = rect1.clip(rect2) - expected_mask.clear() - expected_mask.draw( - pygame.Mask(overlap_rect.size, fill=True), overlap_rect.topleft - ) - - overlap_mask = mask1.overlap_mask(mask2, offset) - - self.assertIsInstance(overlap_mask, pygame.mask.Mask, msg) - assertMaskEqual(self, overlap_mask, expected_mask, msg) - - def test_overlap_mask__offset_boundary(self): - """Ensures overlap_mask handles offsets and boundaries correctly.""" - mask1 = pygame.mask.Mask((9, 3), fill=True) - mask2 = pygame.mask.Mask((11, 5), fill=True) - mask1_count = mask1.count() - mask2_count = mask2.count() - mask1_size = mask1.get_size() - mask2_size = mask2.get_size() - expected_count = 0 - expected_size = mask1_size - - # Check the 4 boundaries. - offsets = ( - (mask1_size[0], 0), # off right - (0, mask1_size[1]), # off bottom - (-mask2_size[0], 0), # off left - (0, -mask2_size[1]), - ) # off top - - for offset in offsets: - msg = "offset={}".format(offset) - - overlap_mask = mask1.overlap_mask(mask2, offset) - - self.assertIsInstance(overlap_mask, pygame.mask.Mask, msg) - self.assertEqual(overlap_mask.count(), expected_count, msg) - self.assertEqual(overlap_mask.get_size(), expected_size, msg) - - # Ensure mask1/mask2 unchanged. - self.assertEqual(mask1.count(), mask1_count, msg) - self.assertEqual(mask2.count(), mask2_count, msg) - self.assertEqual(mask1.get_size(), mask1_size, msg) - self.assertEqual(mask2.get_size(), mask2_size, msg) - - @unittest.skipIf(IS_PYPY, "Segfaults on pypy") - def test_overlap_mask__bit_boundaries(self): - """Ensures overlap_mask handles masks of different sizes correctly. - - Tests masks of different sizes, including: - -masks 31 to 33 bits wide (32 bit boundaries) - -masks 63 to 65 bits wide (64 bit boundaries) - """ - for height in range(2, 4): - for width in range(2, 66): - mask_size = (width, height) - mask_count = width * height - mask1 = pygame.mask.Mask(mask_size, fill=True) - mask2 = pygame.mask.Mask(mask_size, fill=True) - expected_mask = pygame.Mask(mask_size) - - # Using rects to help determine the overlapping area. - rect1 = mask1.get_rect() - rect2 = mask2.get_rect() - - # Testing masks offset from each other. - for offset in self.ORIGIN_OFFSETS: - msg = "size={}, offset={}".format(mask_size, offset) - rect2.topleft = offset - overlap_rect = rect1.clip(rect2) - expected_mask.clear() - expected_mask.draw( - pygame.Mask(overlap_rect.size, fill=True), overlap_rect.topleft - ) - - overlap_mask = mask1.overlap_mask(mask2, offset) - - self.assertIsInstance(overlap_mask, pygame.mask.Mask, msg) - assertMaskEqual(self, overlap_mask, expected_mask, msg) - - # Ensure mask1/mask2 unchanged. - self.assertEqual(mask1.count(), mask_count, msg) - self.assertEqual(mask2.count(), mask_count, msg) - self.assertEqual(mask1.get_size(), mask_size, msg) - self.assertEqual(mask2.get_size(), mask_size, msg) - - def test_overlap_mask__invalid_mask_arg(self): - """Ensure overlap_mask handles invalid mask arguments correctly.""" - size = (3, 2) - offset = (0, 0) - mask = pygame.mask.Mask(size) - invalid_mask = pygame.Surface(size) - - with self.assertRaises(TypeError): - overlap_mask = mask.overlap_mask(invalid_mask, offset) - - def test_overlap_mask__invalid_offset_arg(self): - """Ensure overlap_mask handles invalid offset arguments correctly.""" - size = (5, 2) - offset = "(0, 0)" - mask1 = pygame.mask.Mask(size) - mask2 = pygame.mask.Mask(size) - - with self.assertRaises(TypeError): - overlap_mask = mask1.overlap_mask(mask2, offset) - - def test_mask_access(self): - """do the set_at, and get_at parts work correctly?""" - m = pygame.Mask((10, 10)) - m.set_at((0, 0), 1) - self.assertEqual(m.get_at((0, 0)), 1) - m.set_at((9, 0), 1) - self.assertEqual(m.get_at((9, 0)), 1) - - # s = pygame.Surface((10,10)) - # s.set_at((1,0), (0, 0, 1, 255)) - # self.assertEqual(s.get_at((1,0)), (0, 0, 1, 255)) - # s.set_at((-1,0), (0, 0, 1, 255)) - - # out of bounds, should get IndexError - self.assertRaises(IndexError, lambda: m.get_at((-1, 0))) - self.assertRaises(IndexError, lambda: m.set_at((-1, 0), 1)) - self.assertRaises(IndexError, lambda: m.set_at((10, 0), 1)) - self.assertRaises(IndexError, lambda: m.set_at((0, 10), 1)) - - def test_fill(self): - """Ensure a mask can be filled.""" - width, height = 11, 23 - expected_count = width * height - expected_size = (width, height) - mask = pygame.mask.Mask(expected_size) - - mask.fill() - - self.assertEqual(mask.count(), expected_count) - self.assertEqual(mask.get_size(), expected_size) - - @unittest.skipIf(IS_PYPY, "Segfaults on pypy") - def test_fill__bit_boundaries(self): - """Ensures masks of different sizes are filled correctly. - - Tests masks of different sizes, including: - -masks 31 to 33 bits wide (32 bit boundaries) - -masks 63 to 65 bits wide (64 bit boundaries) - """ - for height in range(1, 4): - for width in range(1, 66): - mask = pygame.mask.Mask((width, height)) - expected_count = width * height - - mask.fill() - - self.assertEqual( - mask.count(), expected_count, "size=({}, {})".format(width, height) - ) - - def test_clear(self): - """Ensure a mask can be cleared.""" - expected_count = 0 - expected_size = (13, 27) - mask = pygame.mask.Mask(expected_size, fill=True) - - mask.clear() - - self.assertEqual(mask.count(), expected_count) - self.assertEqual(mask.get_size(), expected_size) - - def test_clear__bit_boundaries(self): - """Ensures masks of different sizes are cleared correctly. - - Tests masks of different sizes, including: - -masks 31 to 33 bits wide (32 bit boundaries) - -masks 63 to 65 bits wide (64 bit boundaries) - """ - expected_count = 0 - - for height in range(1, 4): - for width in range(1, 66): - mask = pygame.mask.Mask((width, height), fill=True) - - mask.clear() - - self.assertEqual( - mask.count(), expected_count, "size=({}, {})".format(width, height) - ) - - @unittest.skipIf(IS_PYPY, "Segfaults on pypy") - def test_invert(self): - """Ensure a mask can be inverted.""" - side = 73 - expected_size = (side, side) - mask1 = pygame.mask.Mask(expected_size) - mask2 = pygame.mask.Mask(expected_size, fill=True) - expected_count1 = side * side - expected_count2 = 0 - - for i in range(side): - expected_count1 -= 1 - expected_count2 += 1 - pos = (i, i) - mask1.set_at(pos) - mask2.set_at(pos, 0) - - mask1.invert() - mask2.invert() - - self.assertEqual(mask1.count(), expected_count1) - self.assertEqual(mask2.count(), expected_count2) - self.assertEqual(mask1.get_size(), expected_size) - self.assertEqual(mask2.get_size(), expected_size) - - for i in range(side): - pos = (i, i) - msg = "pos={}".format(pos) - - self.assertEqual(mask1.get_at(pos), 0, msg) - self.assertEqual(mask2.get_at(pos), 1, msg) - - def test_invert__full(self): - """Ensure a full mask can be inverted.""" - expected_count = 0 - expected_size = (43, 97) - mask = pygame.mask.Mask(expected_size, fill=True) - - mask.invert() - - self.assertEqual(mask.count(), expected_count) - self.assertEqual(mask.get_size(), expected_size) - - def test_invert__empty(self): - """Ensure an empty mask can be inverted.""" - width, height = 43, 97 - expected_size = (width, height) - expected_count = width * height - mask = pygame.mask.Mask(expected_size) - - mask.invert() - - self.assertEqual(mask.count(), expected_count) - self.assertEqual(mask.get_size(), expected_size) - - @unittest.skipIf(IS_PYPY, "Segfaults on pypy") - def test_invert__bit_boundaries(self): - """Ensures masks of different sizes are inverted correctly. - - Tests masks of different sizes, including: - -masks 31 to 33 bits wide (32 bit boundaries) - -masks 63 to 65 bits wide (64 bit boundaries) - """ - for fill in (True, False): - for height in range(1, 4): - for width in range(1, 66): - mask = pygame.mask.Mask((width, height), fill=fill) - expected_count = 0 if fill else width * height - - mask.invert() - - self.assertEqual( - mask.count(), - expected_count, - "fill={}, size=({}, {})".format(fill, width, height), - ) - - @unittest.skipIf(IS_PYPY, "Segfaults on pypy") - def test_scale(self): - """Ensure a mask can be scaled.""" - width, height = 43, 61 - original_size = (width, height) - - for fill in (True, False): - original_mask = pygame.mask.Mask(original_size, fill=fill) - original_count = width * height if fill else 0 - - # Test a range of sizes. Also tests scaling to 'same' - # size when new_w, new_h = width, height - for new_w in range(width - 10, width + 10): - for new_h in range(height - 10, height + 10): - expected_size = (new_w, new_h) - expected_count = new_w * new_h if fill else 0 - msg = "size={}".format(expected_size) - - mask = original_mask.scale(scale=expected_size) - - self.assertIsInstance(mask, pygame.mask.Mask, msg) - self.assertEqual(mask.count(), expected_count, msg) - self.assertEqual(mask.get_size(), expected_size) - - # Ensure the original mask is unchanged. - self.assertEqual(original_mask.count(), original_count, msg) - self.assertEqual(original_mask.get_size(), original_size, msg) - - def test_scale__negative_size(self): - """Ensure scale handles negative sizes correctly.""" - mask = pygame.Mask((100, 100)) - - with self.assertRaises(ValueError): - mask.scale((-1, -1)) - - with self.assertRaises(ValueError): - mask.scale(Vector2(-1, 10)) - - with self.assertRaises(ValueError): - mask.scale((10, -1)) - - def test_draw(self): - """Ensure a mask can be drawn onto another mask. - - Testing the different combinations of full/empty masks: - (mask1-filled) 1 draw 1 (mask2-filled) - (mask1-empty) 0 draw 1 (mask2-filled) - (mask1-filled) 1 draw 0 (mask2-empty) - (mask1-empty) 0 draw 0 (mask2-empty) - """ - expected_size = (4, 4) - offset = (0, 0) - expected_default = pygame.mask.Mask(expected_size, fill=True) - expected_masks = {(False, False): pygame.mask.Mask(expected_size)} - - for fill2 in (True, False): - mask2 = pygame.mask.Mask(expected_size, fill=fill2) - mask2_count = mask2.count() - - for fill1 in (True, False): - key = (fill1, fill2) - msg = "key={}".format(key) - mask1 = pygame.mask.Mask(expected_size, fill=fill1) - expected_mask = expected_masks.get(key, expected_default) - - mask1.draw(mask2, offset) - - assertMaskEqual(self, mask1, expected_mask, msg) - - # Ensure mask2 unchanged. - self.assertEqual(mask2.count(), mask2_count, msg) - self.assertEqual(mask2.get_size(), expected_size, msg) - - def test_draw__offset(self): - """Ensure an offset mask can be drawn onto another mask.""" - mask1 = pygame.mask.Mask((65, 3)) - mask2 = pygame.mask.Mask((66, 4), fill=True) - mask2_count = mask2.count() - mask2_size = mask2.get_size() - expected_mask = pygame.Mask(mask1.get_size()) - - # Using rects to help determine the overlapping area. - rect1 = mask1.get_rect() - rect2 = mask2.get_rect() - - for offset in self.ORIGIN_OFFSETS: - msg = "offset={}".format(offset) - rect2.topleft = offset - overlap_rect = rect1.clip(rect2) - expected_mask.clear() - - # Normally draw() could be used to set these bits, but the draw() - # method is being tested here, so a loop is used instead. - for x in range(overlap_rect.left, overlap_rect.right): - for y in range(overlap_rect.top, overlap_rect.bottom): - expected_mask.set_at((x, y)) - mask1.clear() # Ensure it's empty for testing each offset. - - mask1.draw(other=mask2, offset=offset) - - assertMaskEqual(self, mask1, expected_mask, msg) - - # Ensure mask2 unchanged. - self.assertEqual(mask2.count(), mask2_count, msg) - self.assertEqual(mask2.get_size(), mask2_size, msg) - - def test_draw__specific_offsets(self): - """Ensure an offset mask can be drawn onto another mask. - - Testing the specific case of: - -both masks are wider than 32 bits - -a positive offset is used - -the mask calling draw() is wider than the mask passed in - """ - mask1 = pygame.mask.Mask((65, 5)) - mask2 = pygame.mask.Mask((33, 3), fill=True) - expected_mask = pygame.Mask(mask1.get_size()) - - # Using rects to help determine the overlapping area. - rect1 = mask1.get_rect() - rect2 = mask2.get_rect() - - # This rect's corners are used to move rect2 around the inside of - # rect1. - corner_rect = rect1.inflate(-2, -2) - - for corner in ("topleft", "topright", "bottomright", "bottomleft"): - setattr(rect2, corner, getattr(corner_rect, corner)) - offset = rect2.topleft - msg = "offset={}".format(offset) - overlap_rect = rect1.clip(rect2) - expected_mask.clear() - - # Normally draw() could be used to set these bits, but the draw() - # method is being tested here, so a loop is used instead. - for x in range(overlap_rect.left, overlap_rect.right): - for y in range(overlap_rect.top, overlap_rect.bottom): - expected_mask.set_at((x, y)) - mask1.clear() # Ensure it's empty for testing each offset. - - mask1.draw(mask2, offset) - - assertMaskEqual(self, mask1, expected_mask, msg) - - def test_draw__offset_boundary(self): - """Ensures draw handles offsets and boundaries correctly.""" - mask1 = pygame.mask.Mask((13, 5)) - mask2 = pygame.mask.Mask((7, 3), fill=True) - mask1_count = mask1.count() - mask2_count = mask2.count() - mask1_size = mask1.get_size() - mask2_size = mask2.get_size() - - # Check the 4 boundaries. - offsets = ( - (mask1_size[0], 0), # off right - (0, mask1_size[1]), # off bottom - (-mask2_size[0], 0), # off left - (0, -mask2_size[1]), - ) # off top - - for offset in offsets: - msg = "offset={}".format(offset) - - mask1.draw(mask2, offset) - - # Ensure mask1/mask2 unchanged. - self.assertEqual(mask1.count(), mask1_count, msg) - self.assertEqual(mask2.count(), mask2_count, msg) - self.assertEqual(mask1.get_size(), mask1_size, msg) - self.assertEqual(mask2.get_size(), mask2_size, msg) - - @unittest.skipIf(IS_PYPY, "Segfaults on pypy") - def test_draw__bit_boundaries(self): - """Ensures draw handles masks of different sizes correctly. - - Tests masks of different sizes, including: - -masks 31 to 33 bits wide (32 bit boundaries) - -masks 63 to 65 bits wide (64 bit boundaries) - """ - for height in range(2, 4): - for width in range(2, 66): - mask_size = (width, height) - mask_count = width * height - mask1 = pygame.mask.Mask(mask_size) - mask2 = pygame.mask.Mask(mask_size, fill=True) - expected_mask = pygame.Mask(mask_size) - - # Using rects to help determine the overlapping area. - rect1 = mask1.get_rect() - rect2 = mask2.get_rect() - - # Testing masks offset from each other. - for offset in self.ORIGIN_OFFSETS: - msg = "size={}, offset={}".format(mask_size, offset) - rect2.topleft = offset - overlap_rect = rect1.clip(rect2) - expected_mask.clear() - - # Normally draw() could be used to set these bits, but the - # draw() method is being tested here, so a loop is used - # instead. - for x in range(overlap_rect.left, overlap_rect.right): - for y in range(overlap_rect.top, overlap_rect.bottom): - expected_mask.set_at((x, y)) - mask1.clear() # Ensure it's empty for each test. - - mask1.draw(mask2, offset) - - assertMaskEqual(self, mask1, expected_mask, msg) - - # Ensure mask2 unchanged. - self.assertEqual(mask2.count(), mask_count, msg) - self.assertEqual(mask2.get_size(), mask_size, msg) - - def test_draw__invalid_mask_arg(self): - """Ensure draw handles invalid mask arguments correctly.""" - size = (7, 3) - offset = (0, 0) - mask = pygame.mask.Mask(size) - invalid_mask = pygame.Surface(size) - - with self.assertRaises(TypeError): - mask.draw(invalid_mask, offset) - - def test_draw__invalid_offset_arg(self): - """Ensure draw handles invalid offset arguments correctly.""" - size = (5, 7) - offset = "(0, 0)" - mask1 = pygame.mask.Mask(size) - mask2 = pygame.mask.Mask(size) - - with self.assertRaises(TypeError): - mask1.draw(mask2, offset) - - def test_erase(self): - """Ensure a mask can erase another mask. - - Testing the different combinations of full/empty masks: - (mask1-filled) 1 erase 1 (mask2-filled) - (mask1-empty) 0 erase 1 (mask2-filled) - (mask1-filled) 1 erase 0 (mask2-empty) - (mask1-empty) 0 erase 0 (mask2-empty) - """ - expected_size = (4, 4) - offset = (0, 0) - expected_default = pygame.mask.Mask(expected_size) - expected_masks = {(True, False): pygame.mask.Mask(expected_size, fill=True)} - - for fill2 in (True, False): - mask2 = pygame.mask.Mask(expected_size, fill=fill2) - mask2_count = mask2.count() - - for fill1 in (True, False): - key = (fill1, fill2) - msg = "key={}".format(key) - mask1 = pygame.mask.Mask(expected_size, fill=fill1) - expected_mask = expected_masks.get(key, expected_default) - - mask1.erase(mask2, offset) - - assertMaskEqual(self, mask1, expected_mask, msg) - - # Ensure mask2 unchanged. - self.assertEqual(mask2.count(), mask2_count, msg) - self.assertEqual(mask2.get_size(), expected_size, msg) - - def test_erase__offset(self): - """Ensure an offset mask can erase another mask.""" - mask1 = pygame.mask.Mask((65, 3)) - mask2 = pygame.mask.Mask((66, 4), fill=True) - mask2_count = mask2.count() - mask2_size = mask2.get_size() - expected_mask = pygame.Mask(mask1.get_size()) - - # Using rects to help determine the overlapping area. - rect1 = mask1.get_rect() - rect2 = mask2.get_rect() - - for offset in self.ORIGIN_OFFSETS: - msg = "offset={}".format(offset) - rect2.topleft = offset - overlap_rect = rect1.clip(rect2) - expected_mask.fill() - - # Normally erase() could be used to clear these bits, but the - # erase() method is being tested here, so a loop is used instead. - for x in range(overlap_rect.left, overlap_rect.right): - for y in range(overlap_rect.top, overlap_rect.bottom): - expected_mask.set_at((x, y), 0) - mask1.fill() # Ensure it's filled for testing each offset. - - mask1.erase(other=mask2, offset=offset) - - assertMaskEqual(self, mask1, expected_mask, msg) - - # Ensure mask2 unchanged. - self.assertEqual(mask2.count(), mask2_count, msg) - self.assertEqual(mask2.get_size(), mask2_size, msg) - - def test_erase__specific_offsets(self): - """Ensure an offset mask can erase another mask. - - Testing the specific case of: - -both masks are wider than 32 bits - -a positive offset is used - -the mask calling erase() is wider than the mask passed in - """ - mask1 = pygame.mask.Mask((65, 5)) - mask2 = pygame.mask.Mask((33, 3), fill=True) - expected_mask = pygame.Mask(mask1.get_size()) - - # Using rects to help determine the overlapping area. - rect1 = mask1.get_rect() - rect2 = mask2.get_rect() - - # This rect's corners are used to move rect2 around the inside of - # rect1. - corner_rect = rect1.inflate(-2, -2) - - for corner in ("topleft", "topright", "bottomright", "bottomleft"): - setattr(rect2, corner, getattr(corner_rect, corner)) - offset = rect2.topleft - msg = "offset={}".format(offset) - overlap_rect = rect1.clip(rect2) - expected_mask.fill() - - # Normally erase() could be used to clear these bits, but the - # erase() method is being tested here, so a loop is used instead. - for x in range(overlap_rect.left, overlap_rect.right): - for y in range(overlap_rect.top, overlap_rect.bottom): - expected_mask.set_at((x, y), 0) - mask1.fill() # Ensure it's filled for testing each offset. - - mask1.erase(mask2, Vector2(offset)) - - assertMaskEqual(self, mask1, expected_mask, msg) - - def test_erase__offset_boundary(self): - """Ensures erase handles offsets and boundaries correctly.""" - mask1 = pygame.mask.Mask((7, 11), fill=True) - mask2 = pygame.mask.Mask((3, 13), fill=True) - mask1_count = mask1.count() - mask2_count = mask2.count() - mask1_size = mask1.get_size() - mask2_size = mask2.get_size() - - # Check the 4 boundaries. - offsets = ( - (mask1_size[0], 0), # off right - (0, mask1_size[1]), # off bottom - (-mask2_size[0], 0), # off left - (0, -mask2_size[1]), - ) # off top - - for offset in offsets: - msg = "offset={}".format(offset) - - mask1.erase(mask2, offset) - - # Ensure mask1/mask2 unchanged. - self.assertEqual(mask1.count(), mask1_count, msg) - self.assertEqual(mask2.count(), mask2_count, msg) - self.assertEqual(mask1.get_size(), mask1_size, msg) - self.assertEqual(mask2.get_size(), mask2_size, msg) - - @unittest.skipIf(IS_PYPY, "Segfaults on pypy") - def test_erase__bit_boundaries(self): - """Ensures erase handles masks of different sizes correctly. - - Tests masks of different sizes, including: - -masks 31 to 33 bits wide (32 bit boundaries) - -masks 63 to 65 bits wide (64 bit boundaries) - """ - for height in range(2, 4): - for width in range(2, 66): - mask_size = (width, height) - mask_count = width * height - mask1 = pygame.mask.Mask(mask_size) - mask2 = pygame.mask.Mask(mask_size, fill=True) - expected_mask = pygame.Mask(mask_size) - - # Using rects to help determine the overlapping area. - rect1 = mask1.get_rect() - rect2 = mask2.get_rect() - - # Testing masks offset from each other. - for offset in self.ORIGIN_OFFSETS: - msg = "size={}, offset={}".format(mask_size, offset) - rect2.topleft = offset - overlap_rect = rect1.clip(rect2) - expected_mask.fill() - - # Normally erase() could be used to clear these bits, but - # the erase() method is being tested here, so a loop is - # used instead. - for x in range(overlap_rect.left, overlap_rect.right): - for y in range(overlap_rect.top, overlap_rect.bottom): - expected_mask.set_at((x, y), 0) - mask1.fill() # Ensure it's filled for each test. - - mask1.erase(mask2, offset) - - assertMaskEqual(self, mask1, expected_mask, msg) - - # Ensure mask2 unchanged. - self.assertEqual(mask2.count(), mask_count, msg) - self.assertEqual(mask2.get_size(), mask_size, msg) - - def test_erase__invalid_mask_arg(self): - """Ensure erase handles invalid mask arguments correctly.""" - size = (3, 7) - offset = (0, 0) - mask = pygame.mask.Mask(size) - invalid_mask = pygame.Surface(size) - - with self.assertRaises(TypeError): - mask.erase(invalid_mask, offset) - - def test_erase__invalid_offset_arg(self): - """Ensure erase handles invalid offset arguments correctly.""" - size = (7, 5) - offset = "(0, 0)" - mask1 = pygame.mask.Mask(size) - mask2 = pygame.mask.Mask(size) - - with self.assertRaises(TypeError): - mask1.erase(mask2, offset) - - def test_count(self): - """Ensure a mask's set bits are correctly counted.""" - side = 67 - expected_size = (side, side) - expected_count = 0 - mask = pygame.mask.Mask(expected_size) - - for i in range(side): - expected_count += 1 - mask.set_at((i, i)) - - count = mask.count() - - self.assertEqual(count, expected_count) - self.assertEqual(mask.get_size(), expected_size) - - @unittest.skipIf(IS_PYPY, "Segfaults on pypy") - def test_count__bit_boundaries(self): - """Ensures the set bits of different sized masks are counted correctly. - - Tests masks of different sizes, including: - -masks 31 to 33 bits wide (32 bit boundaries) - -masks 63 to 65 bits wide (64 bit boundaries) - """ - for fill in (True, False): - for height in range(1, 4): - for width in range(1, 66): - mask = pygame.mask.Mask((width, height), fill=fill) - expected_count = width * height if fill else 0 - - # Test toggling each bit. - for pos in ((x, y) for y in range(height) for x in range(width)): - if fill: - mask.set_at(pos, 0) - expected_count -= 1 - else: - mask.set_at(pos, 1) - expected_count += 1 - - count = mask.count() - - self.assertEqual( - count, - expected_count, - "fill={}, size=({}, {}), pos={}".format( - fill, width, height, pos - ), - ) - - def test_count__full_mask(self): - """Ensure a full mask's set bits are correctly counted.""" - width, height = 17, 97 - expected_size = (width, height) - expected_count = width * height - mask = pygame.mask.Mask(expected_size, fill=True) - - count = mask.count() - - self.assertEqual(count, expected_count) - self.assertEqual(mask.get_size(), expected_size) - - def test_count__empty_mask(self): - """Ensure an empty mask's set bits are correctly counted.""" - expected_count = 0 - expected_size = (13, 27) - mask = pygame.mask.Mask(expected_size) - - count = mask.count() - - self.assertEqual(count, expected_count) - self.assertEqual(mask.get_size(), expected_size) - - def test_centroid(self): - """Ensure a filled mask's centroid is correctly calculated.""" - mask = pygame.mask.Mask((5, 7), fill=True) - expected_centroid = mask.get_rect().center - - centroid = mask.centroid() - - self.assertEqual(centroid, expected_centroid) - - def test_centroid__empty_mask(self): - """Ensure an empty mask's centroid is correctly calculated.""" - expected_centroid = (0, 0) - expected_size = (101, 103) - mask = pygame.mask.Mask(expected_size) - - centroid = mask.centroid() - - self.assertEqual(centroid, expected_centroid) - self.assertEqual(mask.get_size(), expected_size) - - def test_centroid__single_row(self): - """Ensure a mask's centroid is correctly calculated - when setting points along a single row.""" - width, height = (5, 7) - mask = pygame.mask.Mask((width, height)) - - for y in range(height): - mask.clear() # Clear for each row. - - for x in range(width): - mask.set_at((x, y)) - expected_centroid = (x // 2, y) - - centroid = mask.centroid() - - self.assertEqual(centroid, expected_centroid) - - def test_centroid__two_rows(self): - """Ensure a mask's centroid is correctly calculated - when setting points along two rows.""" - width, height = (5, 7) - mask = pygame.mask.Mask((width, height)) - - # The first row is tested with each of the other rows. - for y in range(1, height): - mask.clear() # Clear for each set of rows. - - for x in range(width): - mask.set_at((x, 0)) - mask.set_at((x, y)) - expected_centroid = (x // 2, y // 2) - - centroid = mask.centroid() - - self.assertEqual(centroid, expected_centroid) - - def test_centroid__single_column(self): - """Ensure a mask's centroid is correctly calculated - when setting points along a single column.""" - width, height = (5, 7) - mask = pygame.mask.Mask((width, height)) - - for x in range(width): - mask.clear() # Clear for each column. - - for y in range(height): - mask.set_at((x, y)) - expected_centroid = (x, y // 2) - - centroid = mask.centroid() - - self.assertEqual(centroid, expected_centroid) - - def test_centroid__two_columns(self): - """Ensure a mask's centroid is correctly calculated - when setting points along two columns.""" - width, height = (5, 7) - mask = pygame.mask.Mask((width, height)) - - # The first column is tested with each of the other columns. - for x in range(1, width): - mask.clear() # Clear for each set of columns. - - for y in range(height): - mask.set_at((0, y)) - mask.set_at((x, y)) - expected_centroid = (x // 2, y // 2) - - centroid = mask.centroid() - - self.assertEqual(centroid, expected_centroid) - - def test_centroid__all_corners(self): - """Ensure a mask's centroid is correctly calculated - when its corners are set.""" - mask = pygame.mask.Mask((5, 7)) - expected_centroid = mask.get_rect().center - - for corner in corners(mask): - mask.set_at(corner) - - centroid = mask.centroid() - - self.assertEqual(centroid, expected_centroid) - - def test_centroid__two_corners(self): - """Ensure a mask's centroid is correctly calculated - when only two corners are set.""" - mask = pygame.mask.Mask((5, 7)) - mask_rect = mask.get_rect() - mask_corners = corners(mask) - - for i, corner1 in enumerate(mask_corners): - for corner2 in mask_corners[i + 1 :]: - mask.clear() # Clear for each pair of corners. - mask.set_at(corner1) - mask.set_at(corner2) - - if corner1[0] == corner2[0]: - expected_centroid = (corner1[0], abs(corner1[1] - corner2[1]) // 2) - elif corner1[1] == corner2[1]: - expected_centroid = (abs(corner1[0] - corner2[0]) // 2, corner1[1]) - else: - expected_centroid = mask_rect.center - - centroid = mask.centroid() - - self.assertEqual(centroid, expected_centroid) - - def todo_test_angle(self): - """Ensure a mask's orientation angle is correctly calculated.""" - self.fail() - - def test_angle__empty_mask(self): - """Ensure an empty mask's angle is correctly calculated.""" - expected_angle = 0.0 - expected_size = (107, 43) - mask = pygame.mask.Mask(expected_size) - - angle = mask.angle() - - self.assertIsInstance(angle, float) - self.assertAlmostEqual(angle, expected_angle) - self.assertEqual(mask.get_size(), expected_size) - - def test_drawing(self): - """Test fill, clear, invert, draw, erase""" - m = pygame.Mask((100, 100)) - self.assertEqual(m.count(), 0) - - m.fill() - self.assertEqual(m.count(), 10000) - - m2 = pygame.Mask((10, 10), fill=True) - m.erase(m2, (50, 50)) - self.assertEqual(m.count(), 9900) - - m.invert() - self.assertEqual(m.count(), 100) - - m.draw(m2, (0, 0)) - self.assertEqual(m.count(), 200) - - m.clear() - self.assertEqual(m.count(), 0) - - def test_outline(self): - """ """ - - m = pygame.Mask((20, 20)) - self.assertEqual(m.outline(), []) - - m.set_at((10, 10), 1) - self.assertEqual(m.outline(), [(10, 10)]) - - m.set_at((10, 12), 1) - self.assertEqual(m.outline(10), [(10, 10)]) - - m.set_at((11, 11), 1) - self.assertEqual( - m.outline(), [(10, 10), (11, 11), (10, 12), (11, 11), (10, 10)] - ) - self.assertEqual(m.outline(every=2), [(10, 10), (10, 12), (10, 10)]) - - # TODO: Test more corner case outlines. - - def test_convolve__size(self): - sizes = [(1, 1), (31, 31), (32, 32), (100, 100)] - for s1 in sizes: - m1 = pygame.Mask(s1) - for s2 in sizes: - m2 = pygame.Mask(s2) - o = m1.convolve(m2) - - self.assertIsInstance(o, pygame.mask.Mask) - - for i in (0, 1): - self.assertEqual( - o.get_size()[i], m1.get_size()[i] + m2.get_size()[i] - 1 - ) - - def test_convolve__point_identities(self): - """Convolving with a single point is the identity, while convolving a point with something flips it.""" - m = random_mask((100, 100)) - k = pygame.Mask((1, 1)) - k.set_at((0, 0)) - - convolve_mask = m.convolve(k) - - self.assertIsInstance(convolve_mask, pygame.mask.Mask) - assertMaskEqual(self, m, convolve_mask) - - convolve_mask = k.convolve(k.convolve(m)) - - self.assertIsInstance(convolve_mask, pygame.mask.Mask) - assertMaskEqual(self, m, convolve_mask) - - def test_convolve__with_output(self): - """checks that convolution modifies only the correct portion of the output""" - - m = random_mask((10, 10)) - k = pygame.Mask((2, 2)) - k.set_at((0, 0)) - - o = pygame.Mask((50, 50)) - test = pygame.Mask((50, 50)) - - m.convolve(k, o) - test.draw(m, (1, 1)) - - self.assertIsInstance(o, pygame.mask.Mask) - assertMaskEqual(self, o, test) - - o.clear() - test.clear() - - m.convolve(other=k, output=o, offset=Vector2(10, 10)) - test.draw(m, (11, 11)) - - self.assertIsInstance(o, pygame.mask.Mask) - assertMaskEqual(self, o, test) - - def test_convolve__out_of_range(self): - full = pygame.Mask((2, 2), fill=True) - # Tuple of points (out of range) and the expected count for each. - pts_data = (((0, 3), 0), ((0, 2), 3), ((-2, -2), 1), ((-3, -3), 0)) - - for pt, expected_count in pts_data: - convolve_mask = full.convolve(full, None, pt) - - self.assertIsInstance(convolve_mask, pygame.mask.Mask) - self.assertEqual(convolve_mask.count(), expected_count) - - @unittest.skipIf(IS_PYPY, "Segfaults on pypy") - def test_convolve(self): - """Tests the definition of convolution""" - m1 = random_mask((100, 100)) - m2 = random_mask((100, 100)) - conv = m1.convolve(m2) - - self.assertIsInstance(conv, pygame.mask.Mask) - for i in range(conv.get_size()[0]): - for j in range(conv.get_size()[1]): - self.assertEqual( - conv.get_at((i, j)) == 0, m1.overlap(m2, (i - 99, j - 99)) is None - ) - - def _draw_component_pattern_box(self, mask, size, pos, inverse=False): - # Helper method to create/draw a 'box' pattern for testing. - # - # 111 - # 101 3x3 example pattern - # 111 - pattern = pygame.mask.Mask((size, size), fill=True) - pattern.set_at((size // 2, size // 2), 0) - - if inverse: - mask.erase(pattern, pos) - pattern.invert() - else: - mask.draw(pattern, pos) - - return pattern - - def _draw_component_pattern_x(self, mask, size, pos, inverse=False): - # Helper method to create/draw an 'X' pattern for testing. - # - # 101 - # 010 3x3 example pattern - # 101 - pattern = pygame.mask.Mask((size, size)) - - ymax = size - 1 - for y in range(size): - for x in range(size): - if x in [y, ymax - y]: - pattern.set_at((x, y)) - - if inverse: - mask.erase(pattern, pos) - pattern.invert() - else: - mask.draw(pattern, pos) - - return pattern - - def _draw_component_pattern_plus(self, mask, size, pos, inverse=False): - # Helper method to create/draw a '+' pattern for testing. - # - # 010 - # 111 3x3 example pattern - # 010 - pattern = pygame.mask.Mask((size, size)) - - xmid = ymid = size // 2 - for y in range(size): - for x in range(size): - if x == xmid or y == ymid: - pattern.set_at((x, y)) - - if inverse: - mask.erase(pattern, pos) - pattern.invert() - else: - mask.draw(pattern, pos) - - return pattern - - def test_connected_component(self): - """Ensure a mask's connected component is correctly calculated.""" - width, height = 41, 27 - expected_size = (width, height) - original_mask = pygame.mask.Mask(expected_size) - patterns = [] # Patterns and offsets. - - # Draw some connected patterns on the original mask. - offset = (0, 0) - pattern = self._draw_component_pattern_x(original_mask, 3, offset) - patterns.append((pattern, offset)) - - size = 4 - offset = (width - size, 0) - pattern = self._draw_component_pattern_plus(original_mask, size, offset) - patterns.append((pattern, offset)) - - # Make this one the largest connected component. - offset = (width // 2, height // 2) - pattern = self._draw_component_pattern_box(original_mask, 7, offset) - patterns.append((pattern, offset)) - - expected_pattern, expected_offset = patterns[-1] - expected_count = expected_pattern.count() - original_count = sum(p.count() for p, _ in patterns) - - mask = original_mask.connected_component() - - self.assertIsInstance(mask, pygame.mask.Mask) - self.assertEqual(mask.count(), expected_count) - self.assertEqual(mask.get_size(), expected_size) - self.assertEqual( - mask.overlap_area(expected_pattern, expected_offset), expected_count - ) - - # Ensure the original mask is unchanged. - self.assertEqual(original_mask.count(), original_count) - self.assertEqual(original_mask.get_size(), expected_size) - - for pattern, offset in patterns: - self.assertEqual( - original_mask.overlap_area(pattern, offset), pattern.count() - ) - - def test_connected_component__full_mask(self): - """Ensure a mask's connected component is correctly calculated - when the mask is full. - """ - expected_size = (23, 31) - original_mask = pygame.mask.Mask(expected_size, fill=True) - expected_count = original_mask.count() - - mask = original_mask.connected_component() - - self.assertIsInstance(mask, pygame.mask.Mask) - self.assertEqual(mask.count(), expected_count) - self.assertEqual(mask.get_size(), expected_size) - - # Ensure the original mask is unchanged. - self.assertEqual(original_mask.count(), expected_count) - self.assertEqual(original_mask.get_size(), expected_size) - - def test_connected_component__empty_mask(self): - """Ensure a mask's connected component is correctly calculated - when the mask is empty. - """ - expected_size = (37, 43) - original_mask = pygame.mask.Mask(expected_size) - original_count = original_mask.count() - expected_count = 0 - - mask = original_mask.connected_component() - - self.assertIsInstance(mask, pygame.mask.Mask) - self.assertEqual(mask.count(), expected_count) - self.assertEqual(mask.get_size(), expected_size) - - # Ensure the original mask is unchanged. - self.assertEqual(original_mask.count(), original_count) - self.assertEqual(original_mask.get_size(), expected_size) - - def test_connected_component__one_set_bit(self): - """Ensure a mask's connected component is correctly calculated - when the coordinate's bit is set with a connected component of 1 bit. - """ - width, height = 71, 67 - expected_size = (width, height) - original_mask = pygame.mask.Mask(expected_size, fill=True) - xset, yset = width // 2, height // 2 - set_pos = (xset, yset) - expected_offset = (xset - 1, yset - 1) - - # This isolates the bit at set_pos from all the other bits. - expected_pattern = self._draw_component_pattern_box( - original_mask, 3, expected_offset, inverse=True - ) - expected_count = 1 - original_count = original_mask.count() - - mask = original_mask.connected_component(set_pos) - - self.assertIsInstance(mask, pygame.mask.Mask) - self.assertEqual(mask.count(), expected_count) - self.assertEqual(mask.get_size(), expected_size) - self.assertEqual( - mask.overlap_area(expected_pattern, expected_offset), expected_count - ) - - # Ensure the original mask is unchanged. - self.assertEqual(original_mask.count(), original_count) - self.assertEqual(original_mask.get_size(), expected_size) - self.assertEqual( - original_mask.overlap_area(expected_pattern, expected_offset), - expected_count, - ) - - def test_connected_component__multi_set_bits(self): - """Ensure a mask's connected component is correctly calculated - when the coordinate's bit is set with a connected component of > 1 bit. - """ - expected_size = (113, 67) - original_mask = pygame.mask.Mask(expected_size) - p_width, p_height = 11, 13 - set_pos = xset, yset = 11, 21 - expected_offset = (xset - 1, yset - 1) - expected_pattern = pygame.mask.Mask((p_width, p_height), fill=True) - - # Make an unsymmetrical pattern. All the set bits need to be connected - # in the resulting pattern for this to work properly. - for y in range(3, p_height): - for x in range(1, p_width): - if x in [y, y - 3, p_width - 4]: - expected_pattern.set_at((x, y), 0) - - expected_count = expected_pattern.count() - original_mask.draw(expected_pattern, expected_offset) - - mask = original_mask.connected_component(set_pos) - - self.assertIsInstance(mask, pygame.mask.Mask) - self.assertEqual(mask.count(), expected_count) - self.assertEqual(mask.get_size(), expected_size) - self.assertEqual( - mask.overlap_area(expected_pattern, expected_offset), expected_count - ) - - # Ensure the original mask is unchanged. - self.assertEqual(original_mask.count(), expected_count) - self.assertEqual(original_mask.get_size(), expected_size) - self.assertEqual( - original_mask.overlap_area(expected_pattern, expected_offset), - expected_count, - ) - - def test_connected_component__unset_bit(self): - """Ensure a mask's connected component is correctly calculated - when the coordinate's bit is unset. - """ - width, height = 109, 101 - expected_size = (width, height) - original_mask = pygame.mask.Mask(expected_size, fill=True) - unset_pos = (width // 2, height // 2) - original_mask.set_at(unset_pos, 0) - original_count = original_mask.count() - expected_count = 0 - - mask = original_mask.connected_component(unset_pos) - - self.assertIsInstance(mask, pygame.mask.Mask) - self.assertEqual(mask.count(), expected_count) - self.assertEqual(mask.get_size(), expected_size) - - # Ensure the original mask is unchanged. - self.assertEqual(original_mask.count(), original_count) - self.assertEqual(original_mask.get_size(), expected_size) - self.assertEqual(original_mask.get_at(unset_pos), 0) - - def test_connected_component__out_of_bounds(self): - """Ensure connected_component() checks bounds.""" - width, height = 19, 11 - original_size = (width, height) - original_mask = pygame.mask.Mask(original_size, fill=True) - original_count = original_mask.count() - - for pos in ((0, -1), (-1, 0), (0, height + 1), (width + 1, 0)): - with self.assertRaises(IndexError): - mask = original_mask.connected_component(pos) - - # Ensure the original mask is unchanged. - self.assertEqual(original_mask.count(), original_count) - self.assertEqual(original_mask.get_size(), original_size) - - def test_connected_components(self): - """ """ - m = pygame.Mask((10, 10)) - - self.assertListEqual(m.connected_components(), []) - - comp = m.connected_component() - - self.assertEqual(m.count(), comp.count()) - - m.set_at((0, 0), 1) - m.set_at((1, 1), 1) - comp = m.connected_component() - comps = m.connected_components() - comps1 = m.connected_components(1) - comps2 = m.connected_components(2) - comps3 = m.connected_components(3) - - self.assertEqual(comp.count(), comps[0].count()) - self.assertEqual(comps1[0].count(), 2) - self.assertEqual(comps2[0].count(), 2) - self.assertListEqual(comps3, []) - - m.set_at((9, 9), 1) - comp = m.connected_component() - comp1 = m.connected_component((1, 1)) - comp2 = m.connected_component((2, 2)) - comps = m.connected_components() - comps1 = m.connected_components(1) - comps2 = m.connected_components(minimum=2) - comps3 = m.connected_components(3) - - self.assertEqual(comp.count(), 2) - self.assertEqual(comp1.count(), 2) - self.assertEqual(comp2.count(), 0) - self.assertEqual(len(comps), 2) - self.assertEqual(len(comps1), 2) - self.assertEqual(len(comps2), 1) - self.assertEqual(len(comps3), 0) - - for mask in comps: - self.assertIsInstance(mask, pygame.mask.Mask) - - def test_connected_components__negative_min_with_empty_mask(self): - """Ensures connected_components() properly handles negative min values - when the mask is empty. - - Negative and zero values for the min parameter (minimum number of bits - per connected component) equate to setting it to one. - """ - expected_comps = [] - mask_count = 0 - mask_size = (65, 13) - mask = pygame.mask.Mask(mask_size) - - connected_comps = mask.connected_components(-1) - - self.assertListEqual(connected_comps, expected_comps) - - # Ensure the original mask is unchanged. - self.assertEqual(mask.count(), mask_count) - self.assertEqual(mask.get_size(), mask_size) - - def test_connected_components__negative_min_with_full_mask(self): - """Ensures connected_components() properly handles negative min values - when the mask is full. - - Negative and zero values for the min parameter (minimum number of bits - per connected component) equate to setting it to one. - """ - mask_size = (64, 11) - mask = pygame.mask.Mask(mask_size, fill=True) - mask_count = mask.count() - expected_len = 1 - - connected_comps = mask.connected_components(-2) - - self.assertEqual(len(connected_comps), expected_len) - assertMaskEqual(self, connected_comps[0], mask) - - # Ensure the original mask is unchanged. - self.assertEqual(mask.count(), mask_count) - self.assertEqual(mask.get_size(), mask_size) - - def test_connected_components__negative_min_with_some_bits_set(self): - """Ensures connected_components() properly handles negative min values - when the mask has some bits set. - - Negative and zero values for the min parameter (minimum number of bits - per connected component) equate to setting it to one. - """ - mask_size = (64, 12) - mask = pygame.mask.Mask(mask_size) - expected_comps = {} - - # Set the corners and the center positions. A new expected component - # mask is created for each point. - for corner in corners(mask): - mask.set_at(corner) - - new_mask = pygame.mask.Mask(mask_size) - new_mask.set_at(corner) - expected_comps[corner] = new_mask - - center = (mask_size[0] // 2, mask_size[1] // 2) - mask.set_at(center) - - new_mask = pygame.mask.Mask(mask_size) - new_mask.set_at(center) - expected_comps[center] = new_mask - mask_count = mask.count() - - connected_comps = mask.connected_components(-3) - - self.assertEqual(len(connected_comps), len(expected_comps)) - - for comp in connected_comps: - # Since the masks in the connected component list can be in any - # order, loop the expected components to find its match. - found = False - - for pt in tuple(expected_comps.keys()): - if comp.get_at(pt): - found = True - assertMaskEqual(self, comp, expected_comps[pt]) - del expected_comps[pt] # Entry removed so it isn't reused. - break - - self.assertTrue(found, "missing component for pt={}".format(pt)) - - # Ensure the original mask is unchanged. - self.assertEqual(mask.count(), mask_count) - self.assertEqual(mask.get_size(), mask_size) - - @unittest.skipIf(IS_PYPY, "Segfaults on pypy") - def test_get_bounding_rects(self): - """Ensures get_bounding_rects works correctly.""" - # Create masks with different set point groups. Each group of - # connected set points will be contained in its own bounding rect. - # Diagonal points are considered connected. - mask_data = [] # [((size), ((rect1_pts), ...)), ...] - - # Mask 1: - # |0123456789 - # -+---------- - # 0|1100000000 - # 1|1000000000 - # 2|0000000000 - # 3|1001000000 - # 4|0000000000 - # 5|0000000000 - # 6|0000000000 - # 7|0000000000 - # 8|0000000000 - # 9|0000000000 - mask_data.append( - ( - (10, 10), # size - # Points to set for the 3 bounding rects. - (((0, 0), (1, 0), (0, 1)), ((0, 3),), ((3, 3),)), # rect1 # rect2 - ) - ) # rect3 - - # Mask 2: - # |0123 - # -+---- - # 0|1100 - # 1|1111 - mask_data.append( - ( - (4, 2), # size - # Points to set for the 1 bounding rect. - (((0, 0), (1, 0), (0, 1), (1, 1), (2, 1), (3, 1)),), - ) - ) - - # Mask 3: - # |01234 - # -+----- - # 0|00100 - # 1|01110 - # 2|00100 - mask_data.append( - ( - (5, 3), # size - # Points to set for the 1 bounding rect. - (((2, 0), (1, 1), (2, 1), (3, 1), (2, 2)),), - ) - ) - - # Mask 4: - # |01234 - # -+----- - # 0|00010 - # 1|00100 - # 2|01000 - mask_data.append( - ( - (5, 3), # size - # Points to set for the 1 bounding rect. - (((3, 0), (2, 1), (1, 2)),), - ) - ) - - # Mask 5: - # |01234 - # -+----- - # 0|00011 - # 1|11111 - mask_data.append( - ( - (5, 2), # size - # Points to set for the 1 bounding rect. - (((3, 0), (4, 0), (0, 1), (1, 1), (2, 1), (3, 1)),), - ) - ) - - # Mask 6: - # |01234 - # -+----- - # 0|10001 - # 1|00100 - # 2|10001 - mask_data.append( - ( - (5, 3), # size - # Points to set for the 5 bounding rects. - ( - ((0, 0),), # rect1 - ((4, 0),), # rect2 - ((2, 1),), # rect3 - ((0, 2),), # rect4 - ((4, 2),), - ), - ) - ) # rect5 - - for size, rect_point_tuples in mask_data: - rects = [] - mask = pygame.Mask(size) - - for rect_points in rect_point_tuples: - rects.append(create_bounding_rect(rect_points)) - for pt in rect_points: - mask.set_at(pt) - - expected_rects = sorted(rects, key=tuple) - - rects = mask.get_bounding_rects() - - self.assertListEqual( - sorted(mask.get_bounding_rects(), key=tuple), - expected_rects, - "size={}".format(size), - ) - - @unittest.skipIf(IS_PYPY, "Segfaults on pypy") - def test_to_surface(self): - """Ensures empty and full masks can be drawn onto surfaces.""" - expected_ref_count = 3 - size = (33, 65) - surface = pygame.Surface(size, SRCALPHA, 32) - surface_color = pygame.Color("red") - test_fills = ((pygame.Color("white"), True), (pygame.Color("black"), False)) - - for expected_color, fill in test_fills: - surface.fill(surface_color) - mask = pygame.mask.Mask(size, fill=fill) - - to_surface = mask.to_surface(surface) - - self.assertIs(to_surface, surface) - if not IS_PYPY: - self.assertEqual(sys.getrefcount(to_surface), expected_ref_count) - self.assertEqual(to_surface.get_size(), size) - assertSurfaceFilled(self, to_surface, expected_color) - - def test_to_surface__create_surface(self): - """Ensures empty and full masks can be drawn onto a created surface.""" - expected_ref_count = 2 - expected_flag = SRCALPHA - expected_depth = 32 - size = (33, 65) - test_fills = ((pygame.Color("white"), True), (pygame.Color("black"), False)) - - for expected_color, fill in test_fills: - mask = pygame.mask.Mask(size, fill=fill) - - for use_arg in (True, False): - if use_arg: - to_surface = mask.to_surface(None) - else: - to_surface = mask.to_surface() - - self.assertIsInstance(to_surface, pygame.Surface) - if not IS_PYPY: - self.assertEqual(sys.getrefcount(to_surface), expected_ref_count) - self.assertTrue(to_surface.get_flags() & expected_flag) - self.assertEqual(to_surface.get_bitsize(), expected_depth) - self.assertEqual(to_surface.get_size(), size) - assertSurfaceFilled(self, to_surface, expected_color) - - def test_to_surface__surface_param(self): - """Ensures to_surface accepts a surface arg/kwarg.""" - expected_ref_count = 4 - expected_color = pygame.Color("white") - surface_color = pygame.Color("red") - size = (5, 3) - mask = pygame.mask.Mask(size, fill=True) - surface = pygame.Surface(size) - kwargs = {"surface": surface} - - for use_kwargs in (True, False): - surface.fill(surface_color) - - if use_kwargs: - to_surface = mask.to_surface(**kwargs) - else: - to_surface = mask.to_surface(kwargs["surface"]) - - self.assertIs(to_surface, surface) - if not IS_PYPY: - self.assertEqual(sys.getrefcount(to_surface), expected_ref_count) - self.assertEqual(to_surface.get_size(), size) - assertSurfaceFilled(self, to_surface, expected_color) - - def test_to_surface__setsurface_param(self): - """Ensures to_surface accepts a setsurface arg/kwarg.""" - expected_ref_count = 2 - expected_flag = SRCALPHA - expected_depth = 32 - expected_color = pygame.Color("red") - size = (5, 3) - mask = pygame.mask.Mask(size, fill=True) - setsurface = pygame.Surface(size, expected_flag, expected_depth) - setsurface.fill(expected_color) - kwargs = {"setsurface": setsurface} - - for use_kwargs in (True, False): - if use_kwargs: - to_surface = mask.to_surface(**kwargs) - else: - to_surface = mask.to_surface(None, kwargs["setsurface"]) - - self.assertIsInstance(to_surface, pygame.Surface) - - if not IS_PYPY: - self.assertEqual(sys.getrefcount(to_surface), expected_ref_count) - self.assertTrue(to_surface.get_flags() & expected_flag) - self.assertEqual(to_surface.get_bitsize(), expected_depth) - self.assertEqual(to_surface.get_size(), size) - assertSurfaceFilled(self, to_surface, expected_color) - - def test_to_surface__unsetsurface_param(self): - """Ensures to_surface accepts a unsetsurface arg/kwarg.""" - expected_ref_count = 2 - expected_flag = SRCALPHA - expected_depth = 32 - expected_color = pygame.Color("red") - size = (5, 3) - mask = pygame.mask.Mask(size) - unsetsurface = pygame.Surface(size, expected_flag, expected_depth) - unsetsurface.fill(expected_color) - kwargs = {"unsetsurface": unsetsurface} - - for use_kwargs in (True, False): - if use_kwargs: - to_surface = mask.to_surface(**kwargs) - else: - to_surface = mask.to_surface(None, None, kwargs["unsetsurface"]) - - self.assertIsInstance(to_surface, pygame.Surface) - if not IS_PYPY: - self.assertEqual(sys.getrefcount(to_surface), expected_ref_count) - self.assertTrue(to_surface.get_flags() & expected_flag) - self.assertEqual(to_surface.get_bitsize(), expected_depth) - self.assertEqual(to_surface.get_size(), size) - assertSurfaceFilled(self, to_surface, expected_color) - - def test_to_surface__setcolor_param(self): - """Ensures to_surface accepts a setcolor arg/kwarg.""" - expected_ref_count = 2 - expected_flag = SRCALPHA - expected_depth = 32 - expected_color = pygame.Color("red") - size = (5, 3) - mask = pygame.mask.Mask(size, fill=True) - kwargs = {"setcolor": expected_color} - - for use_kwargs in (True, False): - if use_kwargs: - to_surface = mask.to_surface(**kwargs) - else: - to_surface = mask.to_surface(None, None, None, kwargs["setcolor"]) - - self.assertIsInstance(to_surface, pygame.Surface) - if not IS_PYPY: - self.assertEqual(sys.getrefcount(to_surface), expected_ref_count) - self.assertTrue(to_surface.get_flags() & expected_flag) - self.assertEqual(to_surface.get_bitsize(), expected_depth) - self.assertEqual(to_surface.get_size(), size) - assertSurfaceFilled(self, to_surface, expected_color) - - def test_to_surface__setcolor_default(self): - """Ensures the default setcolor is correct.""" - expected_color = pygame.Color("white") - size = (3, 7) - mask = pygame.mask.Mask(size, fill=True) - - to_surface = mask.to_surface( - surface=None, setsurface=None, unsetsurface=None, unsetcolor=None - ) - - self.assertEqual(to_surface.get_size(), size) - assertSurfaceFilled(self, to_surface, expected_color) - - def test_to_surface__unsetcolor_param(self): - """Ensures to_surface accepts a unsetcolor arg/kwarg.""" - expected_ref_count = 2 - expected_flag = SRCALPHA - expected_depth = 32 - expected_color = pygame.Color("red") - size = (5, 3) - mask = pygame.mask.Mask(size) - kwargs = {"unsetcolor": expected_color} - - for use_kwargs in (True, False): - if use_kwargs: - to_surface = mask.to_surface(**kwargs) - else: - to_surface = mask.to_surface( - None, None, None, None, kwargs["unsetcolor"] - ) - - self.assertIsInstance(to_surface, pygame.Surface) - if not IS_PYPY: - self.assertEqual(sys.getrefcount(to_surface), expected_ref_count) - self.assertTrue(to_surface.get_flags() & expected_flag) - self.assertEqual(to_surface.get_bitsize(), expected_depth) - self.assertEqual(to_surface.get_size(), size) - assertSurfaceFilled(self, to_surface, expected_color) - - def test_to_surface__unsetcolor_default(self): - """Ensures the default unsetcolor is correct.""" - expected_color = pygame.Color("black") - size = (3, 7) - mask = pygame.mask.Mask(size) - - to_surface = mask.to_surface( - surface=None, setsurface=None, unsetsurface=None, setcolor=None - ) - - self.assertEqual(to_surface.get_size(), size) - assertSurfaceFilled(self, to_surface, expected_color) - - def test_to_surface__dest_param(self): - """Ensures to_surface accepts a dest arg/kwarg.""" - expected_ref_count = 2 - expected_flag = SRCALPHA - expected_depth = 32 - default_surface_color = (0, 0, 0, 0) - default_unsetcolor = pygame.Color("black") - dest = (0, 0) - size = (5, 3) - mask = pygame.mask.Mask(size) - kwargs = {"dest": dest} - - for use_kwargs in (True, False): - if use_kwargs: - expected_color = default_unsetcolor - - to_surface = mask.to_surface(**kwargs) - else: - expected_color = default_surface_color - - to_surface = mask.to_surface( - None, None, None, None, None, kwargs["dest"] - ) - - self.assertIsInstance(to_surface, pygame.Surface) - if not IS_PYPY: - self.assertEqual(sys.getrefcount(to_surface), expected_ref_count) - self.assertTrue(to_surface.get_flags() & expected_flag) - self.assertEqual(to_surface.get_bitsize(), expected_depth) - self.assertEqual(to_surface.get_size(), size) - assertSurfaceFilled(self, to_surface, expected_color) - - def test_to_surface__dest_default(self): - """Ensures the default dest is correct.""" - expected_color = pygame.Color("white") - surface_color = pygame.Color("red") - - mask_size = (3, 2) - mask = pygame.mask.Mask(mask_size, fill=True) - mask_rect = mask.get_rect() - - # Make the surface bigger than the mask. - surf_size = (mask_size[0] + 2, mask_size[1] + 1) - surface = pygame.Surface(surf_size, SRCALPHA, 32) - surface.fill(surface_color) - - to_surface = mask.to_surface( - surface, setsurface=None, unsetsurface=None, unsetcolor=None - ) - - self.assertIs(to_surface, surface) - self.assertEqual(to_surface.get_size(), surf_size) - assertSurfaceFilled(self, to_surface, expected_color, mask_rect) - assertSurfaceFilledIgnoreArea(self, to_surface, surface_color, mask_rect) - - @unittest.expectedFailure - def test_to_surface__area_param(self): - """Ensures to_surface accepts an area arg/kwarg.""" - expected_ref_count = 2 - expected_flag = SRCALPHA - expected_depth = 32 - default_surface_color = (0, 0, 0, 0) - default_unsetcolor = pygame.Color("black") - size = (5, 3) - mask = pygame.mask.Mask(size) - kwargs = {"area": mask.get_rect()} - - for use_kwargs in (True, False): - if use_kwargs: - expected_color = default_unsetcolor - - to_surface = mask.to_surface(**kwargs) - else: - expected_color = default_surface_color - - to_surface = mask.to_surface( - None, None, None, None, None, (0, 0), kwargs["area"] - ) - - self.assertIsInstance(to_surface, pygame.Surface) - if not IS_PYPY: - self.assertEqual(sys.getrefcount(to_surface), expected_ref_count) - self.assertTrue(to_surface.get_flags() & expected_flag) - self.assertEqual(to_surface.get_bitsize(), expected_depth) - self.assertEqual(to_surface.get_size(), size) - assertSurfaceFilled(self, to_surface, expected_color) - - def test_to_surface__area_default(self): - """Ensures the default area is correct.""" - expected_color = pygame.Color("white") - surface_color = pygame.Color("red") - - mask_size = (3, 2) - mask = pygame.mask.Mask(mask_size, fill=True) - mask_rect = mask.get_rect() - - # Make the surface bigger than the mask. The default area is the full - # area of the mask. - surf_size = (mask_size[0] + 2, mask_size[1] + 1) - surface = pygame.Surface(surf_size, SRCALPHA, 32) - surface.fill(surface_color) - - to_surface = mask.to_surface( - surface, setsurface=None, unsetsurface=None, unsetcolor=None - ) - - self.assertIs(to_surface, surface) - self.assertEqual(to_surface.get_size(), surf_size) - assertSurfaceFilled(self, to_surface, expected_color, mask_rect) - assertSurfaceFilledIgnoreArea(self, to_surface, surface_color, mask_rect) - - def test_to_surface__kwargs(self): - """Ensures to_surface accepts the correct kwargs.""" - expected_color = pygame.Color("white") - size = (5, 3) - mask = pygame.mask.Mask(size, fill=True) - surface = pygame.Surface(size) - surface_color = pygame.Color("red") - setsurface = surface.copy() - setsurface.fill(expected_color) - - test_data = ( - (None, None), # None entry allows loop to test all kwargs on first pass. - ("dest", (0, 0)), - ("unsetcolor", pygame.Color("yellow")), - ("setcolor", expected_color), - ("unsetsurface", surface.copy()), - ("setsurface", setsurface), - ("surface", surface), - ) - - kwargs = dict(test_data) - - for name, _ in test_data: - kwargs.pop(name) - surface.fill(surface_color) # Clear for each test. - - to_surface = mask.to_surface(**kwargs) - - self.assertEqual(to_surface.get_size(), size) - assertSurfaceFilled(self, to_surface, expected_color) - - def test_to_surface__kwargs_create_surface(self): - """Ensures to_surface accepts the correct kwargs - when creating a surface. - """ - expected_color = pygame.Color("black") - size = (5, 3) - mask = pygame.mask.Mask(size) - setsurface = pygame.Surface(size, SRCALPHA, 32) - setsurface_color = pygame.Color("red") - setsurface.fill(setsurface_color) - unsetsurface = setsurface.copy() - unsetsurface.fill(expected_color) - - test_data = ( - (None, None), # None entry allows loop to test all kwargs on first pass. - ("dest", (0, 0)), - ("unsetcolor", expected_color), - ("setcolor", pygame.Color("yellow")), - ("unsetsurface", unsetsurface), - ("setsurface", setsurface), - ("surface", None), - ) - kwargs = dict(test_data) - - for name, _ in test_data: - kwargs.pop(name) - - to_surface = mask.to_surface(**kwargs) - - self.assertIsInstance(to_surface, pygame.Surface) - self.assertEqual(to_surface.get_size(), size) - assertSurfaceFilled(self, to_surface, expected_color) - - def test_to_surface__kwargs_order_independent(self): - """Ensures to_surface kwargs are not order dependent.""" - expected_color = pygame.Color("blue") - size = (3, 2) - mask = pygame.mask.Mask(size, fill=True) - surface = pygame.Surface(size) - - to_surface = mask.to_surface( - dest=(0, 0), - setcolor=expected_color, - unsetcolor=None, - surface=surface, - unsetsurface=pygame.Surface(size), - setsurface=None, - ) - - self.assertIs(to_surface, surface) - self.assertEqual(to_surface.get_size(), size) - assertSurfaceFilled(self, to_surface, expected_color) - - def test_to_surface__args_invalid_types(self): - """Ensures to_surface detects invalid kwarg types.""" - size = (3, 2) - mask = pygame.mask.Mask(size, fill=True) - invalid_surf = pygame.Color("green") - invalid_color = pygame.Surface(size) - - with self.assertRaises(TypeError): - # Invalid dest. - mask.to_surface(None, None, None, None, None, (0,)) - - with self.assertRaises(TypeError): - # Invalid unsetcolor. - mask.to_surface(None, None, None, None, invalid_color) - - with self.assertRaises(TypeError): - # Invalid setcolor. - mask.to_surface(None, None, None, invalid_color, None) - - with self.assertRaises(TypeError): - # Invalid unsetsurface. - mask.to_surface(None, None, invalid_surf, None, None) - - with self.assertRaises(TypeError): - # Invalid setsurface. - mask.to_surface(None, invalid_surf, None, None, None) - - with self.assertRaises(TypeError): - # Invalid surface. - mask.to_surface(invalid_surf, None, None, None, None) - - def test_to_surface__kwargs_invalid_types(self): - """Ensures to_surface detects invalid kwarg types.""" - size = (3, 2) - mask = pygame.mask.Mask(size) - - valid_kwargs = { - "surface": pygame.Surface(size), - "setsurface": pygame.Surface(size), - "unsetsurface": pygame.Surface(size), - "setcolor": pygame.Color("green"), - "unsetcolor": pygame.Color("green"), - "dest": (0, 0), - } - - invalid_kwargs = { - "surface": (1, 2, 3, 4), - "setsurface": pygame.Color("green"), - "unsetsurface": ((1, 2), (2, 1)), - "setcolor": pygame.Mask((1, 2)), - "unsetcolor": pygame.Surface((2, 2)), - "dest": (0, 0, 0), - } - - kwarg_order = ( - "surface", - "setsurface", - "unsetsurface", - "setcolor", - "unsetcolor", - "dest", - ) - - for kwarg in kwarg_order: - kwargs = dict(valid_kwargs) - kwargs[kwarg] = invalid_kwargs[kwarg] - - with self.assertRaises(TypeError): - mask.to_surface(**kwargs) - - def test_to_surface__kwargs_invalid_name(self): - """Ensures to_surface detects invalid kwarg names.""" - mask = pygame.mask.Mask((3, 2)) - kwargs = {"setcolour": pygame.Color("red")} - - with self.assertRaises(TypeError): - mask.to_surface(**kwargs) - - def test_to_surface__args_and_kwargs(self): - """Ensures to_surface accepts a combination of args/kwargs""" - size = (5, 3) - - surface_color = pygame.Color("red") - setsurface_color = pygame.Color("yellow") - unsetsurface_color = pygame.Color("blue") - setcolor = pygame.Color("green") - unsetcolor = pygame.Color("cyan") - - surface = pygame.Surface(size, SRCALPHA, 32) - setsurface = surface.copy() - unsetsurface = surface.copy() - - setsurface.fill(setsurface_color) - unsetsurface.fill(unsetsurface_color) - - mask = pygame.mask.Mask(size, fill=True) - expected_color = setsurface_color - - test_data = ( - (None, None), # None entry allows loop to test all kwargs on first pass. - ("surface", surface), - ("setsurface", setsurface), - ("unsetsurface", unsetsurface), - ("setcolor", setcolor), - ("unsetcolor", unsetcolor), - ("dest", (0, 0)), - ) - - args = [] - kwargs = dict(test_data) - - # Loop gradually moves the kwargs to args. - for name, value in test_data: - if name is not None: - args.append(value) - kwargs.pop(name) - - surface.fill(surface_color) - - to_surface = mask.to_surface(*args, **kwargs) - - assertSurfaceFilled(self, to_surface, expected_color) - - def test_to_surface__valid_setcolor_formats(self): - """Ensures to_surface handles valid setcolor formats correctly.""" - size = (5, 3) - mask = pygame.mask.Mask(size, fill=True) - surface = pygame.Surface(size, SRCALPHA, 32) - expected_color = pygame.Color("green") - test_colors = ( - (0, 255, 0), - (0, 255, 0, 255), - surface.map_rgb(expected_color), - expected_color, - "green", - "#00FF00FF", - "0x00FF00FF", - ) - - for setcolor in test_colors: - to_surface = mask.to_surface(setcolor=setcolor) - - assertSurfaceFilled(self, to_surface, expected_color) - - def test_to_surface__valid_unsetcolor_formats(self): - """Ensures to_surface handles valid unsetcolor formats correctly.""" - size = (5, 3) - mask = pygame.mask.Mask(size) - surface = pygame.Surface(size, SRCALPHA, 32) - expected_color = pygame.Color("green") - test_colors = ( - (0, 255, 0), - (0, 255, 0, 255), - surface.map_rgb(expected_color), - expected_color, - "green", - "#00FF00FF", - "0x00FF00FF", - ) - - for unsetcolor in test_colors: - to_surface = mask.to_surface(unsetcolor=unsetcolor) - - assertSurfaceFilled(self, to_surface, expected_color) - - def test_to_surface__invalid_setcolor_formats(self): - """Ensures to_surface handles invalid setcolor formats correctly.""" - mask = pygame.mask.Mask((5, 3)) - - for setcolor in ("green color", "#00FF00FF0", "0x00FF00FF0", (1, 2)): - with self.assertRaises(ValueError): - mask.to_surface(setcolor=setcolor) - - for setcolor in (pygame.Surface((1, 2)), pygame.Mask((2, 1)), 1.1): - with self.assertRaises(TypeError): - mask.to_surface(setcolor=setcolor) - - def test_to_surface__invalid_unsetcolor_formats(self): - """Ensures to_surface handles invalid unsetcolor formats correctly.""" - mask = pygame.mask.Mask((5, 3)) - - for unsetcolor in ("green color", "#00FF00FF0", "0x00FF00FF0", (1, 2)): - with self.assertRaises(ValueError): - mask.to_surface(unsetcolor=unsetcolor) - - for unsetcolor in (pygame.Surface((1, 2)), pygame.Mask((2, 1)), 1.1): - with self.assertRaises(TypeError): - mask.to_surface(unsetcolor=unsetcolor) - - def test_to_surface__valid_dest_formats(self): - """Ensures to_surface handles valid dest formats correctly.""" - expected_color = pygame.Color("white") - mask = pygame.mask.Mask((3, 5), fill=True) - dests = ( - (0, 0), - [0, 0], - Vector2(0, 0), - (0, 0, 100, 100), - pygame.Rect((0, 0), (10, 10)), - ) - - for dest in dests: - to_surface = mask.to_surface(dest=dest) - - assertSurfaceFilled(self, to_surface, expected_color) - - def test_to_surface__invalid_dest_formats(self): - """Ensures to_surface handles invalid dest formats correctly.""" - mask = pygame.mask.Mask((3, 5)) - invalid_dests = ( - (0,), # Incorrect size. - (0, 0, 0), # Incorrect size. - set([0, 1]), # Incorrect type. - {0: 1}, # Incorrect type. - Rect, - ) # Incorrect type. - - for dest in invalid_dests: - with self.assertRaises(TypeError): - mask.to_surface(dest=dest) - - def test_to_surface__negative_sized_dest_rect(self): - """Ensures to_surface correctly handles negative sized dest rects.""" - expected_color = pygame.Color("white") - mask = pygame.mask.Mask((3, 5), fill=True) - dests = ( - pygame.Rect((0, 0), (10, -10)), - pygame.Rect((0, 0), (-10, 10)), - pygame.Rect((0, 0), (-10, -10)), - ) - - for dest in dests: - to_surface = mask.to_surface(dest=dest) - - assertSurfaceFilled(self, to_surface, expected_color) - - def test_to_surface__zero_sized_dest_rect(self): - """Ensures to_surface correctly handles zero sized dest rects.""" - expected_color = pygame.Color("white") - mask = pygame.mask.Mask((3, 5), fill=True) - dests = ( - pygame.Rect((0, 0), (0, 10)), - pygame.Rect((0, 0), (10, 0)), - pygame.Rect((0, 0), (0, 0)), - ) - - for dest in dests: - to_surface = mask.to_surface(dest=dest) - - assertSurfaceFilled(self, to_surface, expected_color) - - @unittest.expectedFailure - def test_to_surface__valid_area_formats(self): - """Ensures to_surface handles valid area formats correctly.""" - size = (3, 5) - surface_color = pygame.Color("red") - expected_color = pygame.Color("white") - surface = pygame.Surface(size) - mask = pygame.mask.Mask(size, fill=True) - area_pos = (0, 0) - area_size = (2, 1) - areas = ( - (area_pos[0], area_pos[1], area_size[0], area_size[1]), - (area_pos, area_size), - (area_pos, list(area_size)), - (list(area_pos), area_size), - (list(area_pos), list(area_size)), - [area_pos[0], area_pos[1], area_size[0], area_size[1]], - [area_pos, area_size], - [area_pos, list(area_size)], - [list(area_pos), area_size], - [list(area_pos), list(area_size)], - pygame.Rect(area_pos, area_size), - ) - - for area in areas: - surface.fill(surface_color) - area_rect = pygame.Rect(area) - - to_surface = mask.to_surface(surface, area=area) - - assertSurfaceFilled(self, to_surface, expected_color, area_rect) - assertSurfaceFilledIgnoreArea(self, to_surface, surface_color, area_rect) - - @unittest.expectedFailure - def test_to_surface__invalid_area_formats(self): - """Ensures to_surface handles invalid area formats correctly.""" - mask = pygame.mask.Mask((3, 5)) - invalid_areas = ( - (0,), # Incorrect size. - (0, 0), # Incorrect size. - (0, 0, 1), # Incorrect size. - ((0, 0), (1,)), # Incorrect size. - ((0,), (1, 1)), # Incorrect size. - set([0, 1, 2, 3]), # Incorrect type. - {0: 1, 2: 3}, # Incorrect type. - Rect, # Incorrect type. - ) - - for area in invalid_areas: - with self.assertRaisesRegex(TypeError, "invalid area argument"): - unused_to_surface = mask.to_surface(area=area) - - @unittest.expectedFailure - def test_to_surface__negative_sized_area_rect(self): - """Ensures to_surface correctly handles negative sized area rects.""" - size = (3, 5) - surface_color = pygame.Color("red") - expected_color = pygame.Color("white") - surface = pygame.Surface(size) - mask = pygame.mask.Mask(size) - mask.set_at((0, 0)) - - # These rects should cause position (0, 0) of the mask to be drawn. - areas = ( - pygame.Rect((0, 1), (1, -1)), - pygame.Rect((1, 0), (-1, 1)), - pygame.Rect((1, 1), (-1, -1)), - ) - - for area in areas: - surface.fill(surface_color) - - to_surface = mask.to_surface(surface, area=area) - - assertSurfaceFilled(self, to_surface, expected_color, area) - assertSurfaceFilledIgnoreArea(self, to_surface, surface_color, area) - - @unittest.expectedFailure - def test_to_surface__zero_sized_area_rect(self): - """Ensures to_surface correctly handles zero sized area rects.""" - size = (3, 5) - expected_color = pygame.Color("red") - surface = pygame.Surface(size) - mask = pygame.mask.Mask(size, fill=True) - - # Zero sized rect areas should cause none of the mask to be drawn. - areas = ( - pygame.Rect((0, 0), (0, 1)), - pygame.Rect((0, 0), (1, 0)), - pygame.Rect((0, 0), (0, 0)), - ) - - for area in areas: - surface.fill(expected_color) - - to_surface = mask.to_surface(surface, area=area) - - assertSurfaceFilled(self, to_surface, expected_color) - - def test_to_surface__default_surface_with_param_combinations(self): - """Ensures to_surface works with a default surface value - and combinations of other parameters. - - This tests many different parameter combinations with full and empty - masks. - """ - expected_ref_count = 2 - expected_flag = SRCALPHA - expected_depth = 32 - size = (5, 3) - dest = (0, 0) - - default_surface_color = (0, 0, 0, 0) - setsurface_color = pygame.Color("yellow") - unsetsurface_color = pygame.Color("blue") - setcolor = pygame.Color("green") - unsetcolor = pygame.Color("cyan") - - setsurface = pygame.Surface(size, expected_flag, expected_depth) - unsetsurface = setsurface.copy() - - setsurface.fill(setsurface_color) - unsetsurface.fill(unsetsurface_color) - - kwargs = { - "setsurface": None, - "unsetsurface": None, - "setcolor": None, - "unsetcolor": None, - "dest": None, - } - - for fill in (True, False): - mask = pygame.mask.Mask(size, fill=fill) - - # Test different combinations of parameters. - for setsurface_param in (setsurface, None): - kwargs["setsurface"] = setsurface_param - - for unsetsurface_param in (unsetsurface, None): - kwargs["unsetsurface"] = unsetsurface_param - - for setcolor_param in (setcolor, None): - kwargs["setcolor"] = setcolor_param - - for unsetcolor_param in (unsetcolor, None): - kwargs["unsetcolor"] = unsetcolor_param - - for dest_param in (dest, None): - if dest_param is None: - kwargs.pop("dest", None) - else: - kwargs["dest"] = dest_param - - if fill: - if setsurface_param is not None: - expected_color = setsurface_color - elif setcolor_param is not None: - expected_color = setcolor - else: - expected_color = default_surface_color - else: - if unsetsurface_param is not None: - expected_color = unsetsurface_color - elif unsetcolor_param is not None: - expected_color = unsetcolor - else: - expected_color = default_surface_color - - to_surface = mask.to_surface(**kwargs) - - self.assertIsInstance(to_surface, pygame.Surface) - if not IS_PYPY: - self.assertEqual( - sys.getrefcount(to_surface), expected_ref_count - ) - self.assertTrue(to_surface.get_flags() & expected_flag) - self.assertEqual( - to_surface.get_bitsize(), expected_depth - ) - self.assertEqual(to_surface.get_size(), size) - assertSurfaceFilled(self, to_surface, expected_color) - - def test_to_surface__surface_with_param_combinations(self): - """Ensures to_surface works with a surface value - and combinations of other parameters. - - This tests many different parameter combinations with full and empty - masks. - """ - expected_ref_count = 4 - expected_flag = SRCALPHA - expected_depth = 32 - size = (5, 3) - dest = (0, 0) - - surface_color = pygame.Color("red") - setsurface_color = pygame.Color("yellow") - unsetsurface_color = pygame.Color("blue") - setcolor = pygame.Color("green") - unsetcolor = pygame.Color("cyan") - - surface = pygame.Surface(size, expected_flag, expected_depth) - setsurface = surface.copy() - unsetsurface = surface.copy() - - setsurface.fill(setsurface_color) - unsetsurface.fill(unsetsurface_color) - - kwargs = { - "surface": surface, - "setsurface": None, - "unsetsurface": None, - "setcolor": None, - "unsetcolor": None, - "dest": None, - } - - for fill in (True, False): - mask = pygame.mask.Mask(size, fill=fill) - - # Test different combinations of parameters. - for setsurface_param in (setsurface, None): - kwargs["setsurface"] = setsurface_param - - for unsetsurface_param in (unsetsurface, None): - kwargs["unsetsurface"] = unsetsurface_param - - for setcolor_param in (setcolor, None): - kwargs["setcolor"] = setcolor_param - - for unsetcolor_param in (unsetcolor, None): - kwargs["unsetcolor"] = unsetcolor_param - surface.fill(surface_color) # Clear for each test. - - for dest_param in (dest, None): - if dest_param is None: - kwargs.pop("dest", None) - else: - kwargs["dest"] = dest_param - - if fill: - if setsurface_param is not None: - expected_color = setsurface_color - elif setcolor_param is not None: - expected_color = setcolor - else: - expected_color = surface_color - else: - if unsetsurface_param is not None: - expected_color = unsetsurface_color - elif unsetcolor_param is not None: - expected_color = unsetcolor - else: - expected_color = surface_color - - to_surface = mask.to_surface(**kwargs) - - self.assertIs(to_surface, surface) - if not IS_PYPY: - self.assertEqual( - sys.getrefcount(to_surface), expected_ref_count - ) - self.assertTrue(to_surface.get_flags() & expected_flag) - self.assertEqual( - to_surface.get_bitsize(), expected_depth - ) - self.assertEqual(to_surface.get_size(), size) - assertSurfaceFilled(self, to_surface, expected_color) - - def test_to_surface__set_and_unset_bits(self): - """Ensures that to_surface works correctly with with set/unset bits - when using the defaults for setcolor and unsetcolor. - """ - default_setcolor = pygame.Color("white") - default_unsetcolor = pygame.Color("black") - width, height = size = (10, 20) - mask = pygame.mask.Mask(size) - mask_rect = mask.get_rect() - - surface = pygame.Surface(size) - surface_color = pygame.Color("red") - - # Create a checkerboard pattern of set/unset bits. - for pos in ((x, y) for x in range(width) for y in range(x & 1, height, 2)): - mask.set_at(pos) - - # Test different dest values. - for dest in self.ORIGIN_OFFSETS: - mask_rect.topleft = dest - surface.fill(surface_color) - - to_surface = mask.to_surface(surface, dest=dest) - - to_surface.lock() # Lock for possible speed up. - for pos in ((x, y) for x in range(width) for y in range(height)): - mask_pos = (pos[0] - dest[0], pos[1] - dest[1]) - if not mask_rect.collidepoint(pos): - expected_color = surface_color - elif mask.get_at(mask_pos): - expected_color = default_setcolor - else: - expected_color = default_unsetcolor - - self.assertEqual(to_surface.get_at(pos), expected_color, (dest, pos)) - to_surface.unlock() - - def test_to_surface__set_and_unset_bits_with_setsurface_unsetsurface(self): - """Ensures that to_surface works correctly with with set/unset bits - when using setsurface and unsetsurface. - """ - width, height = size = (10, 20) - mask = pygame.mask.Mask(size) - mask_rect = mask.get_rect() - - surface = pygame.Surface(size) - surface_color = pygame.Color("red") - - setsurface = surface.copy() - setsurface_color = pygame.Color("green") - setsurface.fill(setsurface_color) - - unsetsurface = surface.copy() - unsetsurface_color = pygame.Color("blue") - unsetsurface.fill(unsetsurface_color) - - # Create a checkerboard pattern of set/unset bits. - for pos in ((x, y) for x in range(width) for y in range(x & 1, height, 2)): - mask.set_at(pos) - - # Test different dest values. - for dest in self.ORIGIN_OFFSETS: - mask_rect.topleft = dest - - # Tests the color parameters set to None and also as their - # default values. Should have no effect as they are not being - # used, but this exercises different to_surface() code. - for disable_color_params in (True, False): - surface.fill(surface_color) # Clear for each test. - - if disable_color_params: - to_surface = mask.to_surface( - surface, - dest=dest, - setsurface=setsurface, - unsetsurface=unsetsurface, - setcolor=None, - unsetcolor=None, - ) - else: - to_surface = mask.to_surface( - surface, - dest=dest, - setsurface=setsurface, - unsetsurface=unsetsurface, - ) - - to_surface.lock() # Lock for possible speed up. - - for pos in ((x, y) for x in range(width) for y in range(height)): - mask_pos = (pos[0] - dest[0], pos[1] - dest[1]) - - if not mask_rect.collidepoint(pos): - expected_color = surface_color - elif mask.get_at(mask_pos): - expected_color = setsurface_color - else: - expected_color = unsetsurface_color - - self.assertEqual(to_surface.get_at(pos), expected_color) - to_surface.unlock() - - def test_to_surface__surface_narrower_than_mask(self): - """Ensures that surfaces narrower than the mask work correctly. - - For this test the surface's width is less than the mask's width. - """ - default_setcolor = pygame.Color("white") - default_unsetcolor = pygame.Color("black") - mask_size = (10, 20) - narrow_size = (6, 20) - - surface = pygame.Surface(narrow_size) - surface_color = pygame.Color("red") - - for fill in (True, False): - mask = pygame.mask.Mask(mask_size, fill=fill) - surface.fill(surface_color) # Clear for each test. - expected_color = default_setcolor if fill else default_unsetcolor - - to_surface = mask.to_surface(surface) - - self.assertIs(to_surface, surface) - self.assertEqual(to_surface.get_size(), narrow_size) - assertSurfaceFilled(self, to_surface, expected_color) - - def test_to_surface__setsurface_narrower_than_mask(self): - """Ensures that setsurfaces narrower than the mask work correctly. - - For this test the setsurface's width is less than the mask's width. - """ - default_setcolor = pygame.Color("white") - default_unsetcolor = pygame.Color("black") - mask_size = (10, 20) - narrow_size = (6, 20) - - setsurface = pygame.Surface(narrow_size, SRCALPHA, 32) - setsurface_color = pygame.Color("red") - setsurface.fill(setsurface_color) - setsurface_rect = setsurface.get_rect() - - for fill in (True, False): - mask = pygame.mask.Mask(mask_size, fill=fill) - - to_surface = mask.to_surface(setsurface=setsurface) - - self.assertIsInstance(to_surface, pygame.Surface) - self.assertEqual(to_surface.get_size(), mask_size) - - # Different checks depending on if the mask was filled or not. - if fill: - assertSurfaceFilled(self, to_surface, setsurface_color, setsurface_rect) - assertSurfaceFilledIgnoreArea( - self, to_surface, default_setcolor, setsurface_rect - ) - else: - assertSurfaceFilled(self, to_surface, default_unsetcolor) - - def test_to_surface__unsetsurface_narrower_than_mask(self): - """Ensures that unsetsurfaces narrower than the mask work correctly. - - For this test the unsetsurface's width is less than the mask's width. - """ - default_setcolor = pygame.Color("white") - default_unsetcolor = pygame.Color("black") - mask_size = (10, 20) - narrow_size = (6, 20) - - unsetsurface = pygame.Surface(narrow_size, SRCALPHA, 32) - unsetsurface_color = pygame.Color("red") - unsetsurface.fill(unsetsurface_color) - unsetsurface_rect = unsetsurface.get_rect() - - for fill in (True, False): - mask = pygame.mask.Mask(mask_size, fill=fill) - - to_surface = mask.to_surface(unsetsurface=unsetsurface) - - self.assertIsInstance(to_surface, pygame.Surface) - self.assertEqual(to_surface.get_size(), mask_size) - - # Different checks depending on if the mask was filled or not. - if fill: - assertSurfaceFilled(self, to_surface, default_setcolor) - else: - assertSurfaceFilled( - self, to_surface, unsetsurface_color, unsetsurface_rect - ) - assertSurfaceFilledIgnoreArea( - self, to_surface, default_unsetcolor, unsetsurface_rect - ) - - def test_to_surface__setsurface_narrower_than_mask_and_colors_none(self): - """Ensures that setsurfaces narrower than the mask work correctly - when setcolor and unsetcolor are set to None. - - For this test the setsurface's width is less than the mask's width. - """ - default_surface_color = (0, 0, 0, 0) - mask_size = (10, 20) - narrow_size = (6, 20) - - setsurface = pygame.Surface(narrow_size, SRCALPHA, 32) - setsurface_color = pygame.Color("red") - setsurface.fill(setsurface_color) - setsurface_rect = setsurface.get_rect() - - for fill in (True, False): - mask = pygame.mask.Mask(mask_size, fill=fill) - - to_surface = mask.to_surface( - setsurface=setsurface, setcolor=None, unsetcolor=None - ) - - self.assertIsInstance(to_surface, pygame.Surface) - self.assertEqual(to_surface.get_size(), mask_size) - - # Different checks depending on if the mask was filled or not. - if fill: - assertSurfaceFilled(self, to_surface, setsurface_color, setsurface_rect) - assertSurfaceFilledIgnoreArea( - self, to_surface, default_surface_color, setsurface_rect - ) - else: - assertSurfaceFilled(self, to_surface, default_surface_color) - - def test_to_surface__unsetsurface_narrower_than_mask_and_colors_none(self): - """Ensures that unsetsurfaces narrower than the mask work correctly - when setcolor and unsetcolor are set to None. - - For this test the unsetsurface's width is less than the mask's width. - """ - default_surface_color = (0, 0, 0, 0) - mask_size = (10, 20) - narrow_size = (6, 20) - - unsetsurface = pygame.Surface(narrow_size, SRCALPHA, 32) - unsetsurface_color = pygame.Color("red") - unsetsurface.fill(unsetsurface_color) - unsetsurface_rect = unsetsurface.get_rect() - - for fill in (True, False): - mask = pygame.mask.Mask(mask_size, fill=fill) - - to_surface = mask.to_surface( - unsetsurface=unsetsurface, setcolor=None, unsetcolor=None - ) - - self.assertIsInstance(to_surface, pygame.Surface) - self.assertEqual(to_surface.get_size(), mask_size) - - # Different checks depending on if the mask was filled or not. - if fill: - assertSurfaceFilled(self, to_surface, default_surface_color) - else: - assertSurfaceFilled( - self, to_surface, unsetsurface_color, unsetsurface_rect - ) - assertSurfaceFilledIgnoreArea( - self, to_surface, default_surface_color, unsetsurface_rect - ) - - def test_to_surface__surface_wider_than_mask(self): - """Ensures that surfaces wider than the mask work correctly. - - For this test the surface's width is greater than the mask's width. - """ - default_setcolor = pygame.Color("white") - default_unsetcolor = pygame.Color("black") - mask_size = (6, 15) - wide_size = (11, 15) - - surface = pygame.Surface(wide_size) - surface_color = pygame.Color("red") - - for fill in (True, False): - mask = pygame.mask.Mask(mask_size, fill=fill) - mask_rect = mask.get_rect() - surface.fill(surface_color) # Clear for each test. - expected_color = default_setcolor if fill else default_unsetcolor - - to_surface = mask.to_surface(surface) - - self.assertIs(to_surface, surface) - self.assertEqual(to_surface.get_size(), wide_size) - assertSurfaceFilled(self, to_surface, expected_color, mask_rect) - assertSurfaceFilledIgnoreArea(self, to_surface, surface_color, mask_rect) - - def test_to_surface__setsurface_wider_than_mask(self): - """Ensures that setsurfaces wider than the mask work correctly. - - For this test the setsurface's width is greater than the mask's width. - """ - default_unsetcolor = pygame.Color("black") - mask_size = (6, 15) - wide_size = (11, 15) - - setsurface = pygame.Surface(wide_size, SRCALPHA, 32) - setsurface_color = pygame.Color("red") - setsurface.fill(setsurface_color) - - for fill in (True, False): - mask = pygame.mask.Mask(mask_size, fill=fill) - expected_color = setsurface_color if fill else default_unsetcolor - - to_surface = mask.to_surface(setsurface=setsurface) - - self.assertIsInstance(to_surface, pygame.Surface) - self.assertEqual(to_surface.get_size(), mask_size) - assertSurfaceFilled(self, to_surface, expected_color) - - def test_to_surface__unsetsurface_wider_than_mask(self): - """Ensures that unsetsurfaces wider than the mask work correctly. - - For this test the unsetsurface's width is greater than the mask's - width. - """ - default_setcolor = pygame.Color("white") - mask_size = (6, 15) - wide_size = (11, 15) - - unsetsurface = pygame.Surface(wide_size, SRCALPHA, 32) - unsetsurface_color = pygame.Color("red") - unsetsurface.fill(unsetsurface_color) - - for fill in (True, False): - mask = pygame.mask.Mask(mask_size, fill=fill) - expected_color = default_setcolor if fill else unsetsurface_color - - to_surface = mask.to_surface(unsetsurface=unsetsurface) - - self.assertIsInstance(to_surface, pygame.Surface) - self.assertEqual(to_surface.get_size(), mask_size) - assertSurfaceFilled(self, to_surface, expected_color) - - def test_to_surface__surface_shorter_than_mask(self): - """Ensures that surfaces shorter than the mask work correctly. - - For this test the surface's height is less than the mask's height. - """ - default_setcolor = pygame.Color("white") - default_unsetcolor = pygame.Color("black") - mask_size = (10, 11) - short_size = (10, 6) - - surface = pygame.Surface(short_size) - surface_color = pygame.Color("red") - - for fill in (True, False): - mask = pygame.mask.Mask(mask_size, fill=fill) - surface.fill(surface_color) # Clear for each test. - expected_color = default_setcolor if fill else default_unsetcolor - - to_surface = mask.to_surface(surface) - - self.assertIs(to_surface, surface) - self.assertEqual(to_surface.get_size(), short_size) - assertSurfaceFilled(self, to_surface, expected_color) - - def test_to_surface__setsurface_shorter_than_mask(self): - """Ensures that setsurfaces shorter than the mask work correctly. - - For this test the setsurface's height is less than the mask's height. - """ - default_setcolor = pygame.Color("white") - default_unsetcolor = pygame.Color("black") - mask_size = (10, 11) - short_size = (10, 6) - - setsurface = pygame.Surface(short_size, SRCALPHA, 32) - setsurface_color = pygame.Color("red") - setsurface.fill(setsurface_color) - setsurface_rect = setsurface.get_rect() - - for fill in (True, False): - mask = pygame.mask.Mask(mask_size, fill=fill) - - to_surface = mask.to_surface(setsurface=setsurface) - - self.assertIsInstance(to_surface, pygame.Surface) - self.assertEqual(to_surface.get_size(), mask_size) - - # Different checks depending on if the mask was filled or not. - if fill: - assertSurfaceFilled(self, to_surface, setsurface_color, setsurface_rect) - assertSurfaceFilledIgnoreArea( - self, to_surface, default_setcolor, setsurface_rect - ) - else: - assertSurfaceFilled(self, to_surface, default_unsetcolor) - - def test_to_surface__unsetsurface_shorter_than_mask(self): - """Ensures that unsetsurfaces shorter than the mask work correctly. - - For this test the unsetsurface's height is less than the mask's height. - """ - default_setcolor = pygame.Color("white") - default_unsetcolor = pygame.Color("black") - mask_size = (10, 11) - short_size = (10, 6) - - unsetsurface = pygame.Surface(short_size, SRCALPHA, 32) - unsetsurface_color = pygame.Color("red") - unsetsurface.fill(unsetsurface_color) - unsetsurface_rect = unsetsurface.get_rect() - - for fill in (True, False): - mask = pygame.mask.Mask(mask_size, fill=fill) - - to_surface = mask.to_surface(unsetsurface=unsetsurface) - - self.assertIsInstance(to_surface, pygame.Surface) - self.assertEqual(to_surface.get_size(), mask_size) - - # Different checks depending on if the mask was filled or not. - if fill: - assertSurfaceFilled(self, to_surface, default_setcolor) - else: - assertSurfaceFilled( - self, to_surface, unsetsurface_color, unsetsurface_rect - ) - assertSurfaceFilledIgnoreArea( - self, to_surface, default_unsetcolor, unsetsurface_rect - ) - - def test_to_surface__setsurface_shorter_than_mask_and_colors_none(self): - """Ensures that setsurfaces shorter than the mask work correctly - when setcolor and unsetcolor are set to None. - - For this test the setsurface's height is less than the mask's height. - """ - default_surface_color = (0, 0, 0, 0) - mask_size = (10, 11) - short_size = (10, 6) - - setsurface = pygame.Surface(short_size, SRCALPHA, 32) - setsurface_color = pygame.Color("red") - setsurface.fill(setsurface_color) - setsurface_rect = setsurface.get_rect() - - for fill in (True, False): - mask = pygame.mask.Mask(mask_size, fill=fill) - - to_surface = mask.to_surface( - setsurface=setsurface, setcolor=None, unsetcolor=None - ) - - self.assertIsInstance(to_surface, pygame.Surface) - self.assertEqual(to_surface.get_size(), mask_size) - - # Different checks depending on if the mask was filled or not. - if fill: - assertSurfaceFilled(self, to_surface, setsurface_color, setsurface_rect) - assertSurfaceFilledIgnoreArea( - self, to_surface, default_surface_color, setsurface_rect - ) - else: - assertSurfaceFilled(self, to_surface, default_surface_color) - - def test_to_surface__unsetsurface_shorter_than_mask_and_colors_none(self): - """Ensures that unsetsurfaces shorter than the mask work correctly - when setcolor and unsetcolor are set to None. - - For this test the unsetsurface's height is less than the mask's height. - """ - default_surface_color = (0, 0, 0, 0) - mask_size = (10, 11) - short_size = (10, 6) - - unsetsurface = pygame.Surface(short_size, SRCALPHA, 32) - unsetsurface_color = pygame.Color("red") - unsetsurface.fill(unsetsurface_color) - unsetsurface_rect = unsetsurface.get_rect() - - for fill in (True, False): - mask = pygame.mask.Mask(mask_size, fill=fill) - - to_surface = mask.to_surface( - unsetsurface=unsetsurface, setcolor=None, unsetcolor=None - ) - - self.assertIsInstance(to_surface, pygame.Surface) - self.assertEqual(to_surface.get_size(), mask_size) - - # Different checks depending on if the mask was filled or not. - if fill: - assertSurfaceFilled(self, to_surface, default_surface_color) - else: - assertSurfaceFilled( - self, to_surface, unsetsurface_color, unsetsurface_rect - ) - assertSurfaceFilledIgnoreArea( - self, to_surface, default_surface_color, unsetsurface_rect - ) - - def test_to_surface__surface_taller_than_mask(self): - """Ensures that surfaces taller than the mask work correctly. - - For this test the surface's height is greater than the mask's height. - """ - default_setcolor = pygame.Color("white") - default_unsetcolor = pygame.Color("black") - mask_size = (10, 6) - tall_size = (10, 11) - - surface = pygame.Surface(tall_size) - surface_color = pygame.Color("red") - - for fill in (True, False): - mask = pygame.mask.Mask(mask_size, fill=fill) - mask_rect = mask.get_rect() - surface.fill(surface_color) # Clear for each test. - expected_color = default_setcolor if fill else default_unsetcolor - - to_surface = mask.to_surface(surface) - - self.assertIs(to_surface, surface) - self.assertEqual(to_surface.get_size(), tall_size) - assertSurfaceFilled(self, to_surface, expected_color, mask_rect) - assertSurfaceFilledIgnoreArea(self, to_surface, surface_color, mask_rect) - - def test_to_surface__setsurface_taller_than_mask(self): - """Ensures that setsurfaces taller than the mask work correctly. - - For this test the setsurface's height is greater than the mask's - height. - """ - default_unsetcolor = pygame.Color("black") - mask_size = (10, 6) - tall_size = (10, 11) - - setsurface = pygame.Surface(tall_size, SRCALPHA, 32) - setsurface_color = pygame.Color("red") - setsurface.fill(setsurface_color) - - for fill in (True, False): - mask = pygame.mask.Mask(mask_size, fill=fill) - expected_color = setsurface_color if fill else default_unsetcolor - - to_surface = mask.to_surface(setsurface=setsurface) - - self.assertIsInstance(to_surface, pygame.Surface) - self.assertEqual(to_surface.get_size(), mask_size) - assertSurfaceFilled(self, to_surface, expected_color) - - def test_to_surface__unsetsurface_taller_than_mask(self): - """Ensures that unsetsurfaces taller than the mask work correctly. - - For this test the unsetsurface's height is greater than the mask's - height. - """ - default_setcolor = pygame.Color("white") - mask_size = (10, 6) - tall_size = (10, 11) - - unsetsurface = pygame.Surface(tall_size, SRCALPHA, 32) - unsetsurface_color = pygame.Color("red") - unsetsurface.fill(unsetsurface_color) - - for fill in (True, False): - mask = pygame.mask.Mask(mask_size, fill=fill) - expected_color = default_setcolor if fill else unsetsurface_color - - to_surface = mask.to_surface(unsetsurface=unsetsurface) - - self.assertIsInstance(to_surface, pygame.Surface) - self.assertEqual(to_surface.get_size(), mask_size) - assertSurfaceFilled(self, to_surface, expected_color) - - def test_to_surface__surface_wider_and_taller_than_mask(self): - """Ensures that surfaces wider and taller than the mask work correctly. - - For this test the surface's width is greater than the mask's width and - the surface's height is greater than the mask's height. - """ - default_setcolor = pygame.Color("white") - default_unsetcolor = pygame.Color("black") - mask_size = (6, 8) - wide_tall_size = (11, 15) - - surface = pygame.Surface(wide_tall_size) - surface_color = pygame.Color("red") - - for fill in (True, False): - mask = pygame.mask.Mask(mask_size, fill=fill) - mask_rect = mask.get_rect() - surface.fill(surface_color) # Clear for each test. - expected_color = default_setcolor if fill else default_unsetcolor - - to_surface = mask.to_surface(surface) - - self.assertIs(to_surface, surface) - self.assertEqual(to_surface.get_size(), wide_tall_size) - assertSurfaceFilled(self, to_surface, expected_color, mask_rect) - assertSurfaceFilledIgnoreArea(self, to_surface, surface_color, mask_rect) - - def test_to_surface__setsurface_wider_and_taller_than_mask(self): - """Ensures that setsurfaces wider and taller than the mask work - correctly. - - For this test the setsurface's width is greater than the mask's width - and the setsurface's height is greater than the mask's height. - """ - default_unsetcolor = pygame.Color("black") - mask_size = (6, 8) - wide_tall_size = (11, 15) - - setsurface = pygame.Surface(wide_tall_size, SRCALPHA, 32) - setsurface_color = pygame.Color("red") - setsurface.fill(setsurface_color) - - for fill in (True, False): - mask = pygame.mask.Mask(mask_size, fill=fill) - expected_color = setsurface_color if fill else default_unsetcolor - - to_surface = mask.to_surface(setsurface=setsurface) - - self.assertIsInstance(to_surface, pygame.Surface) - self.assertEqual(to_surface.get_size(), mask_size) - assertSurfaceFilled(self, to_surface, expected_color) - - def test_to_surface__unsetsurface_wider_and_taller_than_mask(self): - """Ensures that unsetsurfaces wider and taller than the mask work - correctly. - - For this test the unsetsurface's width is greater than the mask's width - and the unsetsurface's height is greater than the mask's height. - """ - default_setcolor = pygame.Color("white") - mask_size = (6, 8) - wide_tall_size = (11, 15) - - unsetsurface = pygame.Surface(wide_tall_size, SRCALPHA, 32) - unsetsurface_color = pygame.Color("red") - unsetsurface.fill(unsetsurface_color) - - for fill in (True, False): - mask = pygame.mask.Mask(mask_size, fill=fill) - expected_color = default_setcolor if fill else unsetsurface_color - - to_surface = mask.to_surface(unsetsurface=unsetsurface) - - self.assertIsInstance(to_surface, pygame.Surface) - self.assertEqual(to_surface.get_size(), mask_size) - assertSurfaceFilled(self, to_surface, expected_color) - - def test_to_surface__surface_wider_and_shorter_than_mask(self): - """Ensures that surfaces wider and shorter than the mask work - correctly. - - For this test the surface's width is greater than the mask's width and - the surface's height is less than the mask's height. - """ - default_setcolor = pygame.Color("white") - default_unsetcolor = pygame.Color("black") - mask_size = (7, 11) - wide_short_size = (13, 6) - - surface = pygame.Surface(wide_short_size) - surface_color = pygame.Color("red") - - for fill in (True, False): - mask = pygame.mask.Mask(mask_size, fill=fill) - mask_rect = mask.get_rect() - surface.fill(surface_color) # Clear for each test. - expected_color = default_setcolor if fill else default_unsetcolor - - to_surface = mask.to_surface(surface) - - self.assertIs(to_surface, surface) - self.assertEqual(to_surface.get_size(), wide_short_size) - assertSurfaceFilled(self, to_surface, expected_color, mask_rect) - assertSurfaceFilledIgnoreArea(self, to_surface, surface_color, mask_rect) - - def test_to_surface__setsurface_wider_and_shorter_than_mask(self): - """Ensures that setsurfaces wider and shorter than the mask work - correctly. - - For this test the setsurface's width is greater than the mask's width - and the setsurface's height is less than the mask's height. - """ - default_setcolor = pygame.Color("white") - default_unsetcolor = pygame.Color("black") - mask_size = (7, 11) - wide_short_size = (10, 6) - - setsurface = pygame.Surface(wide_short_size, SRCALPHA, 32) - setsurface_color = pygame.Color("red") - setsurface.fill(setsurface_color) - setsurface_rect = setsurface.get_rect() - - for fill in (True, False): - mask = pygame.mask.Mask(mask_size, fill=fill) - - to_surface = mask.to_surface(setsurface=setsurface) - - self.assertIsInstance(to_surface, pygame.Surface) - self.assertEqual(to_surface.get_size(), mask_size) - - # Different checks depending on if the mask was filled or not. - if fill: - assertSurfaceFilled(self, to_surface, setsurface_color, setsurface_rect) - assertSurfaceFilledIgnoreArea( - self, to_surface, default_setcolor, setsurface_rect - ) - else: - assertSurfaceFilled(self, to_surface, default_unsetcolor) - - def test_to_surface__unsetsurface_wider_and_shorter_than_mask(self): - """Ensures that unsetsurfaces wider and shorter than the mask work - correctly. - - For this test the unsetsurface's width is greater than the mask's width - and the unsetsurface's height is less than the mask's height. - """ - default_setcolor = pygame.Color("white") - default_unsetcolor = pygame.Color("black") - mask_size = (7, 11) - wide_short_size = (10, 6) - - unsetsurface = pygame.Surface(wide_short_size, SRCALPHA, 32) - unsetsurface_color = pygame.Color("red") - unsetsurface.fill(unsetsurface_color) - unsetsurface_rect = unsetsurface.get_rect() - - for fill in (True, False): - mask = pygame.mask.Mask(mask_size, fill=fill) - - to_surface = mask.to_surface(unsetsurface=unsetsurface) - - self.assertIsInstance(to_surface, pygame.Surface) - self.assertEqual(to_surface.get_size(), mask_size) - - # Different checks depending on if the mask was filled or not. - if fill: - assertSurfaceFilled(self, to_surface, default_setcolor) - else: - assertSurfaceFilled( - self, to_surface, unsetsurface_color, unsetsurface_rect - ) - assertSurfaceFilledIgnoreArea( - self, to_surface, default_unsetcolor, unsetsurface_rect - ) - - def test_to_surface__surface_narrower_and_taller_than_mask(self): - """Ensures that surfaces narrower and taller than the mask work - correctly. - - For this test the surface's width is less than the mask's width and - the surface's height is greater than the mask's height. - """ - default_setcolor = pygame.Color("white") - default_unsetcolor = pygame.Color("black") - mask_size = (10, 8) - narrow_tall_size = (6, 15) - - surface = pygame.Surface(narrow_tall_size) - surface_color = pygame.Color("red") - - for fill in (True, False): - mask = pygame.mask.Mask(mask_size, fill=fill) - mask_rect = mask.get_rect() - surface.fill(surface_color) # Clear for each test. - expected_color = default_setcolor if fill else default_unsetcolor - - to_surface = mask.to_surface(surface) - - self.assertIs(to_surface, surface) - self.assertEqual(to_surface.get_size(), narrow_tall_size) - assertSurfaceFilled(self, to_surface, expected_color, mask_rect) - assertSurfaceFilledIgnoreArea(self, to_surface, surface_color, mask_rect) - - def test_to_surface__setsurface_narrower_and_taller_than_mask(self): - """Ensures that setsurfaces narrower and taller than the mask work - correctly. - - For this test the setsurface's width is less than the mask's width - and the setsurface's height is greater than the mask's height. - """ - default_setcolor = pygame.Color("white") - default_unsetcolor = pygame.Color("black") - mask_size = (10, 8) - narrow_tall_size = (6, 15) - - setsurface = pygame.Surface(narrow_tall_size, SRCALPHA, 32) - setsurface_color = pygame.Color("red") - setsurface.fill(setsurface_color) - setsurface_rect = setsurface.get_rect() - - for fill in (True, False): - mask = pygame.mask.Mask(mask_size, fill=fill) - - to_surface = mask.to_surface(setsurface=setsurface) - - self.assertIsInstance(to_surface, pygame.Surface) - self.assertEqual(to_surface.get_size(), mask_size) - - # Different checks depending on if the mask was filled or not. - if fill: - assertSurfaceFilled(self, to_surface, setsurface_color, setsurface_rect) - assertSurfaceFilledIgnoreArea( - self, to_surface, default_setcolor, setsurface_rect - ) - else: - assertSurfaceFilled(self, to_surface, default_unsetcolor) - - def test_to_surface__unsetsurface_narrower_and_taller_than_mask(self): - """Ensures that unsetsurfaces narrower and taller than the mask work - correctly. - - For this test the unsetsurface's width is less than the mask's width - and the unsetsurface's height is greater than the mask's height. - """ - default_setcolor = pygame.Color("white") - default_unsetcolor = pygame.Color("black") - mask_size = (10, 8) - narrow_tall_size = (6, 15) - - unsetsurface = pygame.Surface(narrow_tall_size, SRCALPHA, 32) - unsetsurface_color = pygame.Color("red") - unsetsurface.fill(unsetsurface_color) - unsetsurface_rect = unsetsurface.get_rect() - - for fill in (True, False): - mask = pygame.mask.Mask(mask_size, fill=fill) - - to_surface = mask.to_surface(unsetsurface=unsetsurface) - - self.assertIsInstance(to_surface, pygame.Surface) - self.assertEqual(to_surface.get_size(), mask_size) - - # Different checks depending on if the mask was filled or not. - if fill: - assertSurfaceFilled(self, to_surface, default_setcolor) - else: - assertSurfaceFilled( - self, to_surface, unsetsurface_color, unsetsurface_rect - ) - assertSurfaceFilledIgnoreArea( - self, to_surface, default_unsetcolor, unsetsurface_rect - ) - - def test_to_surface__surface_narrower_and_shorter_than_mask(self): - """Ensures that surfaces narrower and shorter than the mask work - correctly. - - For this test the surface's width is less than the mask's width and - the surface's height is less than the mask's height. - """ - default_setcolor = pygame.Color("white") - default_unsetcolor = pygame.Color("black") - mask_size = (10, 18) - narrow_short_size = (6, 15) - - surface = pygame.Surface(narrow_short_size) - surface_color = pygame.Color("red") - - for fill in (True, False): - mask = pygame.mask.Mask(mask_size, fill=fill) - mask_rect = mask.get_rect() - surface.fill(surface_color) # Clear for each test. - expected_color = default_setcolor if fill else default_unsetcolor - - to_surface = mask.to_surface(surface) - - self.assertIs(to_surface, surface) - self.assertEqual(to_surface.get_size(), narrow_short_size) - assertSurfaceFilled(self, to_surface, expected_color, mask_rect) - assertSurfaceFilledIgnoreArea(self, to_surface, surface_color, mask_rect) - - def test_to_surface__setsurface_narrower_and_shorter_than_mask(self): - """Ensures that setsurfaces narrower and shorter than the mask work - correctly. - - For this test the setsurface's width is less than the mask's width - and the setsurface's height is less than the mask's height. - """ - default_setcolor = pygame.Color("white") - default_unsetcolor = pygame.Color("black") - mask_size = (10, 18) - narrow_short_size = (6, 15) - - setsurface = pygame.Surface(narrow_short_size, SRCALPHA, 32) - setsurface_color = pygame.Color("red") - setsurface.fill(setsurface_color) - setsurface_rect = setsurface.get_rect() - - for fill in (True, False): - mask = pygame.mask.Mask(mask_size, fill=fill) - - to_surface = mask.to_surface(setsurface=setsurface) - - self.assertIsInstance(to_surface, pygame.Surface) - self.assertEqual(to_surface.get_size(), mask_size) - - # Different checks depending on if the mask was filled or not. - if fill: - assertSurfaceFilled(self, to_surface, setsurface_color, setsurface_rect) - assertSurfaceFilledIgnoreArea( - self, to_surface, default_setcolor, setsurface_rect - ) - else: - assertSurfaceFilled(self, to_surface, default_unsetcolor) - - def test_to_surface__unsetsurface_narrower_and_shorter_than_mask(self): - """Ensures that unsetsurfaces narrower and shorter than the mask work - correctly. - - For this test the unsetsurface's width is less than the mask's width - and the unsetsurface's height is less than the mask's height. - """ - default_setcolor = pygame.Color("white") - default_unsetcolor = pygame.Color("black") - mask_size = (10, 18) - narrow_short_size = (6, 15) - - unsetsurface = pygame.Surface(narrow_short_size, SRCALPHA, 32) - unsetsurface_color = pygame.Color("red") - unsetsurface.fill(unsetsurface_color) - unsetsurface_rect = unsetsurface.get_rect() - - for fill in (True, False): - mask = pygame.mask.Mask(mask_size, fill=fill) - - to_surface = mask.to_surface(unsetsurface=unsetsurface) - - self.assertIsInstance(to_surface, pygame.Surface) - self.assertEqual(to_surface.get_size(), mask_size) - - # Different checks depending on if the mask was filled or not. - if fill: - assertSurfaceFilled(self, to_surface, default_setcolor) - else: - assertSurfaceFilled( - self, to_surface, unsetsurface_color, unsetsurface_rect - ) - assertSurfaceFilledIgnoreArea( - self, to_surface, default_unsetcolor, unsetsurface_rect - ) - - @unittest.skipIf(IS_PYPY, "Segfaults on pypy") - def test_to_surface__all_surfaces_different_sizes_than_mask(self): - """Ensures that all the surface parameters can be of different sizes.""" - default_setcolor = pygame.Color("white") - default_unsetcolor = pygame.Color("black") - surface_color = pygame.Color("red") - setsurface_color = pygame.Color("green") - unsetsurface_color = pygame.Color("blue") - - mask_size = (10, 15) - surface_size = (11, 14) - setsurface_size = (9, 8) - unsetsurface_size = (12, 16) - - surface = pygame.Surface(surface_size) - setsurface = pygame.Surface(setsurface_size) - unsetsurface = pygame.Surface(unsetsurface_size) - - surface.fill(surface_color) - setsurface.fill(setsurface_color) - unsetsurface.fill(unsetsurface_color) - - surface_rect = surface.get_rect() - setsurface_rect = setsurface.get_rect() - unsetsurface_rect = unsetsurface.get_rect() - - # Create a mask that is filled except for a rect in the center. - mask = pygame.mask.Mask(mask_size, fill=True) - mask_rect = mask.get_rect() - unfilled_rect = pygame.Rect((0, 0), (4, 5)) - unfilled_rect.center = mask_rect.center - - for pos in ( - (x, y) - for x in range(unfilled_rect.x, unfilled_rect.w) - for y in range(unfilled_rect.y, unfilled_rect.h) - ): - mask.set_at(pos, 0) - - to_surface = mask.to_surface(surface, setsurface, unsetsurface) - - self.assertIs(to_surface, surface) - self.assertEqual(to_surface.get_size(), surface_size) - - # Check each surface pixel for the correct color. - to_surface.lock() # Lock for possible speed up. - - for pos in ( - (x, y) for x in range(surface_rect.w) for y in range(surface_rect.h) - ): - if not mask_rect.collidepoint(pos): - expected_color = surface_color - elif mask.get_at(pos): - # Checking set bit colors. - if setsurface_rect.collidepoint(pos): - expected_color = setsurface_color - else: - expected_color = default_setcolor - else: - # Checking unset bit colors. - if unsetsurface_rect.collidepoint(pos): - expected_color = unsetsurface_color - else: - expected_color = default_unsetcolor - - self.assertEqual(to_surface.get_at(pos), expected_color) - - to_surface.unlock() - - def test_to_surface__dest_locations(self): - """Ensures dest values can be different locations on/off the surface.""" - SIDE = 7 - surface = pygame.Surface((SIDE, SIDE)) - surface_rect = surface.get_rect() - dest_rect = surface_rect.copy() - - surface_color = pygame.Color("red") - default_setcolor = pygame.Color("white") - default_unsetcolor = pygame.Color("black") - - directions = ( - ((s, 0) for s in range(-SIDE, SIDE + 1)), # left to right - ((0, s) for s in range(-SIDE, SIDE + 1)), # top to bottom - ((s, s) for s in range(-SIDE, SIDE + 1)), # topleft to bottomright diag - ((-s, s) for s in range(-SIDE, SIDE + 1)), # topright to bottomleft diag - ) - - for fill in (True, False): - mask = pygame.mask.Mask((SIDE, SIDE), fill=fill) - expected_color = default_setcolor if fill else default_unsetcolor - - for direction in directions: - for pos in direction: - dest_rect.topleft = pos - overlap_rect = dest_rect.clip(surface_rect) - surface.fill(surface_color) - - to_surface = mask.to_surface(surface, dest=dest_rect) - - assertSurfaceFilled(self, to_surface, expected_color, overlap_rect) - assertSurfaceFilledIgnoreArea( - self, to_surface, surface_color, overlap_rect - ) - - @unittest.expectedFailure - @unittest.skipIf(IS_PYPY, "Segfaults on pypy") - def test_to_surface__area_locations(self): - """Ensures area rects can be different locations on/off the mask.""" - SIDE = 7 - surface = pygame.Surface((SIDE, SIDE)) - - surface_color = pygame.Color("red") - default_setcolor = pygame.Color("white") - default_unsetcolor = pygame.Color("black") - - directions = ( - ((s, 0) for s in range(-SIDE, SIDE + 1)), # left to right - ((0, s) for s in range(-SIDE, SIDE + 1)), # top to bottom - ((s, s) for s in range(-SIDE, SIDE + 1)), # topleft to bottomright diag - ((-s, s) for s in range(-SIDE, SIDE + 1)), # topright to bottomleft diag - ) - - for fill in (True, False): - mask = pygame.mask.Mask((SIDE, SIDE), fill=fill) - mask_rect = mask.get_rect() - area_rect = mask_rect.copy() - expected_color = default_setcolor if fill else default_unsetcolor - - for direction in directions: - for pos in direction: - area_rect.topleft = pos - overlap_rect = area_rect.clip(mask_rect) - overlap_rect.topleft = (0, 0) - surface.fill(surface_color) - - to_surface = mask.to_surface(surface, area=area_rect) - - assertSurfaceFilled(self, to_surface, expected_color, overlap_rect) - assertSurfaceFilledIgnoreArea( - self, to_surface, surface_color, overlap_rect - ) - - @unittest.expectedFailure - def test_to_surface__dest_and_area_locations(self): - """Ensures dest/area values can be different locations on/off the - surface/mask. - """ - SIDE = 5 - surface = pygame.Surface((SIDE, SIDE)) - surface_rect = surface.get_rect() - dest_rect = surface_rect.copy() - - surface_color = pygame.Color("red") - default_setcolor = pygame.Color("white") - default_unsetcolor = pygame.Color("black") - - dest_directions = ( - ((s, 0) for s in range(-SIDE, SIDE + 1)), # left to right - ((0, s) for s in range(-SIDE, SIDE + 1)), # top to bottom - ((s, s) for s in range(-SIDE, SIDE + 1)), # topleft to bottomright diag - ((-s, s) for s in range(-SIDE, SIDE + 1)), # topright to bottomleft diag - ) - - # Using only the topleft to bottomright diagonal to test the area (to - # reduce the number of loop iterations). - area_positions = list(dest_directions[2]) - - for fill in (True, False): - mask = pygame.mask.Mask((SIDE, SIDE), fill=fill) - mask_rect = mask.get_rect() - area_rect = mask_rect.copy() - expected_color = default_setcolor if fill else default_unsetcolor - - for dest_direction in dest_directions: - for dest_pos in dest_direction: - dest_rect.topleft = dest_pos - - for area_pos in area_positions: - area_rect.topleft = area_pos - area_overlap_rect = area_rect.clip(mask_rect) - area_overlap_rect.topleft = dest_rect.topleft - dest_overlap_rect = dest_rect.clip(area_overlap_rect) - - surface.fill(surface_color) - - to_surface = mask.to_surface( - surface, dest=dest_rect, area=area_rect - ) - - assertSurfaceFilled( - self, to_surface, expected_color, dest_overlap_rect - ) - assertSurfaceFilledIgnoreArea( - self, to_surface, surface_color, dest_overlap_rect - ) - - @unittest.expectedFailure - def test_to_surface__area_sizes(self): - """Ensures area rects can be different sizes.""" - SIDE = 7 - SIZES = ( - (0, 0), - (0, 1), - (1, 0), - (1, 1), - (SIDE - 1, SIDE - 1), - (SIDE - 1, SIDE), - (SIDE, SIDE - 1), - (SIDE, SIDE), - (SIDE + 1, SIDE), - (SIDE, SIDE + 1), - (SIDE + 1, SIDE + 1), - ) - - surface = pygame.Surface((SIDE, SIDE)) - surface_color = pygame.Color("red") - default_setcolor = pygame.Color("white") - default_unsetcolor = pygame.Color("black") - - for fill in (True, False): - mask = pygame.mask.Mask((SIDE, SIDE), fill=fill) - mask_rect = mask.get_rect() - expected_color = default_setcolor if fill else default_unsetcolor - - for size in SIZES: - area_rect = pygame.Rect((0, 0), size) - - for pos in self.ORIGIN_OFFSETS: - area_rect.topleft = pos - overlap_rect = area_rect.clip(mask_rect) - overlap_rect.topleft = (0, 0) - surface.fill(surface_color) - - to_surface = mask.to_surface(surface, area=area_rect) - - assertSurfaceFilled(self, to_surface, expected_color, overlap_rect) - assertSurfaceFilledIgnoreArea( - self, to_surface, surface_color, overlap_rect - ) - - def test_to_surface__surface_color_alphas(self): - """Ensures the setsurface/unsetsurface color alpha values are respected.""" - size = (13, 17) - setsurface_color = pygame.Color("green") - setsurface_color.a = 53 - unsetsurface_color = pygame.Color("blue") - unsetsurface_color.a = 109 - - setsurface = pygame.Surface(size, flags=SRCALPHA, depth=32) - unsetsurface = pygame.Surface(size, flags=SRCALPHA, depth=32) - - setsurface.fill(setsurface_color) - unsetsurface.fill(unsetsurface_color) - - for fill in (True, False): - mask = pygame.mask.Mask(size, fill=fill) - expected_color = setsurface_color if fill else unsetsurface_color - - to_surface = mask.to_surface( - setsurface=setsurface, unsetsurface=unsetsurface - ) - - self.assertIsInstance(to_surface, pygame.Surface) - self.assertEqual(to_surface.get_size(), size) - assertSurfaceFilled(self, to_surface, expected_color) - - def test_to_surface__color_alphas(self): - """Ensures the setcolor/unsetcolor alpha values are respected.""" - size = (13, 17) - setcolor = pygame.Color("green") - setcolor.a = 35 - unsetcolor = pygame.Color("blue") - unsetcolor.a = 213 - - for fill in (True, False): - mask = pygame.mask.Mask(size, fill=fill) - expected_color = setcolor if fill else unsetcolor - - to_surface = mask.to_surface(setcolor=setcolor, unsetcolor=unsetcolor) - - self.assertIsInstance(to_surface, pygame.Surface) - self.assertEqual(to_surface.get_size(), size) - assertSurfaceFilled(self, to_surface, expected_color) - - def test_to_surface__depths(self): - """Ensures to_surface works correctly with supported surface depths.""" - size = (13, 17) - surface_color = pygame.Color("red") - setsurface_color = pygame.Color("green") - unsetsurface_color = pygame.Color("blue") - - for depth in (8, 16, 24, 32): - surface = pygame.Surface(size, depth=depth) - setsurface = pygame.Surface(size, depth=depth) - unsetsurface = pygame.Surface(size, depth=depth) - - surface.fill(surface_color) - setsurface.fill(setsurface_color) - unsetsurface.fill(unsetsurface_color) - - for fill in (True, False): - mask = pygame.mask.Mask(size, fill=fill) - - # For non-32 bit depths, the actual color can be different from - # what was filled. - expected_color = ( - setsurface.get_at((0, 0)) if fill else unsetsurface.get_at((0, 0)) - ) - - to_surface = mask.to_surface(surface, setsurface, unsetsurface) - - self.assertIsInstance(to_surface, pygame.Surface) - self.assertEqual(to_surface.get_size(), size) - assertSurfaceFilled(self, to_surface, expected_color) - - def test_to_surface__different_depths(self): - """Ensures an exception is raised when surfaces have different depths.""" - size = (13, 17) - surface_color = pygame.Color("red") - setsurface_color = pygame.Color("green") - unsetsurface_color = pygame.Color("blue") - mask = pygame.mask.Mask(size) - - # Test different combinations of depths. - test_depths = ( - (8, 8, 16), # surface/setsurface/unsetsurface - (8, 8, 24), - (8, 8, 32), - (16, 16, 24), - (16, 16, 32), - (24, 16, 8), - (32, 16, 16), - (32, 32, 16), - (32, 24, 32), - ) - - for depths in test_depths: - surface = pygame.Surface(size, depth=depths[0]) - setsurface = pygame.Surface(size, depth=depths[1]) - unsetsurface = pygame.Surface(size, depth=depths[2]) - - surface.fill(surface_color) - setsurface.fill(setsurface_color) - unsetsurface.fill(unsetsurface_color) - - with self.assertRaises(ValueError): - mask.to_surface(surface, setsurface, unsetsurface) - - def test_to_surface__different_depths_with_created_surfaces(self): - """Ensures an exception is raised when surfaces have different depths - than the created surface. - """ - size = (13, 17) - setsurface_color = pygame.Color("green") - unsetsurface_color = pygame.Color("blue") - mask = pygame.mask.Mask(size) - - # Test different combinations of depths. The created surface always has - # a depth of 32. - test_depths = ( - (8, 8), # setsurface/unsetsurface - (16, 16), - (24, 24), - (24, 16), - (32, 8), - (32, 16), - (32, 24), - (16, 32), - ) - - for set_depth, unset_depth in test_depths: - setsurface = pygame.Surface(size, depth=set_depth) - unsetsurface = pygame.Surface(size, depth=unset_depth) - - setsurface.fill(setsurface_color) - unsetsurface.fill(unsetsurface_color) - - with self.assertRaises(ValueError): - mask.to_surface(setsurface=setsurface, unsetsurface=unsetsurface) - - def test_to_surface__same_srcalphas(self): - """Ensures to_surface works correctly when the SRCALPHA flag is set or not.""" - size = (13, 17) - surface_color = pygame.Color("red") - setsurface_color = pygame.Color("green") - unsetsurface_color = pygame.Color("blue") - - for depth in (16, 32): - for flags in (0, SRCALPHA): - surface = pygame.Surface(size, flags=flags, depth=depth) - setsurface = pygame.Surface(size, flags=flags, depth=depth) - unsetsurface = pygame.Surface(size, flags=flags, depth=depth) - - surface.fill(surface_color) - setsurface.fill(setsurface_color) - unsetsurface.fill(unsetsurface_color) - - for fill in (True, False): - mask = pygame.mask.Mask(size, fill=fill) - expected_color = setsurface_color if fill else unsetsurface_color - - to_surface = mask.to_surface(surface, setsurface, unsetsurface) - - self.assertIsInstance(to_surface, pygame.Surface) - self.assertEqual(to_surface.get_size(), size) - assertSurfaceFilled(self, to_surface, expected_color) - if flags: - self.assertTrue(to_surface.get_flags() & flags) - - def test_to_surface__same_srcalphas_with_created_surfaces(self): - """Ensures to_surface works correctly when it creates a surface - and the SRCALPHA flag is set on both setsurface and unsetsurface. - """ - size = (13, 17) - setsurface_color = pygame.Color("green") - unsetsurface_color = pygame.Color("blue") - # The created surface always has a depth of 32 and the SRCALPHA flag set. - expected_flags = SRCALPHA - - setsurface = pygame.Surface(size, flags=expected_flags, depth=32) - unsetsurface = pygame.Surface(size, flags=expected_flags, depth=32) - - setsurface.fill(setsurface_color) - unsetsurface.fill(unsetsurface_color) - - for fill in (True, False): - mask = pygame.mask.Mask(size, fill=fill) - expected_color = setsurface_color if fill else unsetsurface_color - - to_surface = mask.to_surface( - setsurface=setsurface, unsetsurface=unsetsurface - ) - - self.assertIsInstance(to_surface, pygame.Surface) - self.assertEqual(to_surface.get_size(), size) - assertSurfaceFilled(self, to_surface, expected_color) - self.assertTrue(to_surface.get_flags() & expected_flags) - - def test_to_surface__different_srcalphas(self): - """Ensures an exception is raised when surfaces have different SRCALPHA - flag settings. - """ - size = (13, 17) - surface_color = pygame.Color("red") - setsurface_color = pygame.Color("green") - unsetsurface_color = pygame.Color("blue") - mask = pygame.mask.Mask(size) - - # Test different combinations of SRCALPHA flags. - test_flags = ( - (SRCALPHA, 0, 0), # surface/setsurface/unsetsurface - (SRCALPHA, SRCALPHA, 0), - (0, SRCALPHA, SRCALPHA), - (0, 0, SRCALPHA), - ) - - for depth in (16, 32): - for flags in test_flags: - surface = pygame.Surface(size, flags=flags[0], depth=depth) - setsurface = pygame.Surface(size, flags=flags[1], depth=depth) - unsetsurface = pygame.Surface(size, flags=flags[2], depth=depth) - - surface.fill(surface_color) - setsurface.fill(setsurface_color) - unsetsurface.fill(unsetsurface_color) - - with self.assertRaises(ValueError): - mask.to_surface(surface, setsurface, unsetsurface) - - def test_to_surface__different_srcalphas_with_created_surfaces(self): - """Ensures an exception is raised when surfaces have different SRCALPHA - flag settings than the created surface. - """ - size = (13, 17) - setsurface_color = pygame.Color("green") - unsetsurface_color = pygame.Color("blue") - mask = pygame.mask.Mask(size) - - for depth in (16, 32): - # Test different combinations of SRCALPHA flags. The created - # surface always has the SRCALPHA flag set. - for flags in ((0, 0), (SRCALPHA, 0), (0, SRCALPHA)): - setsurface = pygame.Surface(size, flags=flags[0], depth=depth) - unsetsurface = pygame.Surface(size, flags=flags[1], depth=depth) - - setsurface.fill(setsurface_color) - unsetsurface.fill(unsetsurface_color) - - with self.assertRaises(ValueError): - mask.to_surface(setsurface=setsurface, unsetsurface=unsetsurface) - - def test_to_surface__dest_on_surface(self): - """Ensures dest values on the surface work correctly - when using the defaults for setcolor and unsetcolor. - """ - default_setcolor = pygame.Color("white") - default_unsetcolor = pygame.Color("black") - width, height = size = (5, 9) - surface = pygame.Surface(size, SRCALPHA, 32) - surface_color = pygame.Color("red") - - for fill in (True, False): - mask = pygame.mask.Mask(size, fill=fill) - mask_rect = mask.get_rect() - expected_color = default_setcolor if fill else default_unsetcolor - - # Test the dest parameter at different locations on the surface. - for dest in ((x, y) for y in range(height) for x in range(width)): - surface.fill(surface_color) # Clear for each test. - mask_rect.topleft = dest - - to_surface = mask.to_surface(surface, dest=dest) - - self.assertIs(to_surface, surface) - self.assertEqual(to_surface.get_size(), size) - assertSurfaceFilled(self, to_surface, expected_color, mask_rect) - assertSurfaceFilledIgnoreArea( - self, to_surface, surface_color, mask_rect - ) - - def test_to_surface__dest_on_surface_with_setsurface_unsetsurface(self): - """Ensures dest values on the surface work correctly - when using setsurface and unsetsurface. - """ - width, height = size = (5, 9) - surface = pygame.Surface(size, SRCALPHA, 32) - surface_color = pygame.Color("red") - - setsurface = surface.copy() - setsurface_color = pygame.Color("green") - setsurface.fill(setsurface_color) - - unsetsurface = surface.copy() - unsetsurface_color = pygame.Color("blue") - unsetsurface.fill(unsetsurface_color) - - # Using different kwargs to exercise different to_surface() code. - # Should not have any impact on the resulting drawn surfaces. - kwargs = { - "surface": surface, - "setsurface": setsurface, - "unsetsurface": unsetsurface, - "dest": None, - } - - color_kwargs = dict(kwargs) - color_kwargs.update((("setcolor", None), ("unsetcolor", None))) - - for fill in (True, False): - mask = pygame.mask.Mask(size, fill=fill) - mask_rect = mask.get_rect() - expected_color = setsurface_color if fill else unsetsurface_color - - # Test the dest parameter at different locations on the surface. - for dest in ((x, y) for y in range(height) for x in range(width)): - mask_rect.topleft = dest - - for use_color_params in (True, False): - surface.fill(surface_color) # Clear for each test. - - test_kwargs = color_kwargs if use_color_params else kwargs - test_kwargs["dest"] = dest - to_surface = mask.to_surface(**test_kwargs) - - self.assertIs(to_surface, surface) - self.assertEqual(to_surface.get_size(), size) - assertSurfaceFilled(self, to_surface, expected_color, mask_rect) - assertSurfaceFilledIgnoreArea( - self, to_surface, surface_color, mask_rect - ) - - def test_to_surface__dest_off_surface(self): - """Ensures dest values off the surface work correctly - when using the defaults for setcolor and unsetcolor. - """ - default_setcolor = pygame.Color("white") - default_unsetcolor = pygame.Color("black") - width, height = size = (5, 7) - surface = pygame.Surface(size, SRCALPHA, 32) - surface_color = pygame.Color("red") - - # Test different dests off the surface. - dests = [(-width, -height), (-width, 0), (0, -height)] - dests.extend(off_corners(surface.get_rect())) - - for fill in (True, False): - mask = pygame.mask.Mask(size, fill=fill) - mask_rect = mask.get_rect() - expected_color = default_setcolor if fill else default_unsetcolor - - for dest in dests: - surface.fill(surface_color) # Clear for each test. - mask_rect.topleft = dest - - to_surface = mask.to_surface(surface, dest=dest) - - self.assertIs(to_surface, surface) - self.assertEqual(to_surface.get_size(), size) - assertSurfaceFilled(self, to_surface, expected_color, mask_rect) - assertSurfaceFilledIgnoreArea( - self, to_surface, surface_color, mask_rect - ) - - def test_to_surface__dest_off_surface_with_setsurface_unsetsurface(self): - """Ensures dest values off the surface work correctly - when using setsurface and unsetsurface. - """ - width, height = size = (5, 7) - surface = pygame.Surface(size, SRCALPHA, 32) - surface_color = pygame.Color("red") - - setsurface = surface.copy() - setsurface_color = pygame.Color("green") - setsurface.fill(setsurface_color) - - unsetsurface = surface.copy() - unsetsurface_color = pygame.Color("blue") - unsetsurface.fill(unsetsurface_color) - - # Test different dests off the surface. - dests = [(-width, -height), (-width, 0), (0, -height)] - dests.extend(off_corners(surface.get_rect())) - - # Using different kwargs to exercise different to_surface() code. - # Should not have any impact on the resulting drawn surfaces. - kwargs = { - "surface": surface, - "setsurface": setsurface, - "unsetsurface": unsetsurface, - "dest": None, - } - - color_kwargs = dict(kwargs) - color_kwargs.update((("setcolor", None), ("unsetcolor", None))) - - for fill in (True, False): - mask = pygame.mask.Mask(size, fill=fill) - mask_rect = mask.get_rect() - expected_color = setsurface_color if fill else unsetsurface_color - - for dest in dests: - mask_rect.topleft = dest - - for use_color_params in (True, False): - surface.fill(surface_color) # Clear for each test. - test_kwargs = color_kwargs if use_color_params else kwargs - test_kwargs["dest"] = dest - to_surface = mask.to_surface(**test_kwargs) - - self.assertIs(to_surface, surface) - self.assertEqual(to_surface.get_size(), size) - assertSurfaceFilled(self, to_surface, expected_color, mask_rect) - assertSurfaceFilledIgnoreArea( - self, to_surface, surface_color, mask_rect - ) - - @unittest.expectedFailure - @unittest.skipIf(IS_PYPY, "Segfaults on pypy") - def test_to_surface__area_on_mask(self): - """Ensures area values on the mask work correctly - when using the defaults for setcolor and unsetcolor. - """ - default_setcolor = pygame.Color("white") - default_unsetcolor = pygame.Color("black") - width, height = size = (5, 9) - surface = pygame.Surface(size, SRCALPHA, 32) - surface_color = pygame.Color("red") - - for fill in (True, False): - mask = pygame.mask.Mask(size, fill=fill) - mask_rect = mask.get_rect() - area_rect = mask_rect.copy() - expected_color = default_setcolor if fill else default_unsetcolor - - # Testing the area parameter at different locations on the mask. - for pos in ((x, y) for y in range(height) for x in range(width)): - surface.fill(surface_color) # Clear for each test. - area_rect.topleft = pos - overlap_rect = mask_rect.clip(area_rect) - overlap_rect.topleft = (0, 0) - - to_surface = mask.to_surface(surface, area=area_rect) - - self.assertIs(to_surface, surface) - self.assertEqual(to_surface.get_size(), size) - assertSurfaceFilled(self, to_surface, expected_color, overlap_rect) - assertSurfaceFilledIgnoreArea( - self, to_surface, surface_color, overlap_rect - ) - - @unittest.expectedFailure - def test_to_surface__area_on_mask_with_setsurface_unsetsurface(self): - """Ensures area values on the mask work correctly - when using setsurface and unsetsurface. - """ - width, height = size = (5, 9) - surface = pygame.Surface(size, SRCALPHA, 32) - surface_color = pygame.Color("red") - - setsurface = surface.copy() - setsurface_color = pygame.Color("green") - setsurface.fill(setsurface_color) - - unsetsurface = surface.copy() - unsetsurface_color = pygame.Color("blue") - unsetsurface.fill(unsetsurface_color) - - # Using the values in kwargs vs color_kwargs tests different to_surface - # code. Should not have any impact on the resulting drawn surfaces. - kwargs = { - "surface": surface, - "setsurface": setsurface, - "unsetsurface": unsetsurface, - "area": pygame.Rect((0, 0), size), - } - - color_kwargs = dict(kwargs) - color_kwargs.update((("setcolor", None), ("unsetcolor", None))) - - for fill in (True, False): - mask = pygame.mask.Mask(size, fill=fill) - mask_rect = mask.get_rect() - area_rect = mask_rect.copy() - expected_color = setsurface_color if fill else unsetsurface_color - - # Testing the area parameter at different locations on the mask. - for pos in ((x, y) for y in range(height) for x in range(width)): - area_rect.topleft = pos - overlap_rect = mask_rect.clip(area_rect) - overlap_rect.topleft = (0, 0) - - for use_color_params in (True, False): - surface.fill(surface_color) # Clear for each test. - test_kwargs = color_kwargs if use_color_params else kwargs - test_kwargs["area"].topleft = pos - overlap_rect = mask_rect.clip(test_kwargs["area"]) - overlap_rect.topleft = (0, 0) - - to_surface = mask.to_surface(**test_kwargs) - - self.assertIs(to_surface, surface) - self.assertEqual(to_surface.get_size(), size) - assertSurfaceFilled(self, to_surface, expected_color, overlap_rect) - assertSurfaceFilledIgnoreArea( - self, to_surface, surface_color, overlap_rect - ) - - @unittest.expectedFailure - @unittest.skipIf(IS_PYPY, "Segfaults on pypy") - def test_to_surface__area_off_mask(self): - """Ensures area values off the mask work correctly - when using the defaults for setcolor and unsetcolor. - """ - default_setcolor = pygame.Color("white") - default_unsetcolor = pygame.Color("black") - width, height = size = (5, 7) - surface = pygame.Surface(size, SRCALPHA, 32) - surface_color = pygame.Color("red") - - # Testing positions off the mask. - positions = [(-width, -height), (-width, 0), (0, -height)] - positions.extend(off_corners(pygame.Rect((0, 0), (width, height)))) - - for fill in (True, False): - mask = pygame.mask.Mask(size, fill=fill) - mask_rect = mask.get_rect() - area_rect = mask_rect.copy() - expected_color = default_setcolor if fill else default_unsetcolor - - for pos in positions: - surface.fill(surface_color) # Clear for each test. - area_rect.topleft = pos - overlap_rect = mask_rect.clip(area_rect) - overlap_rect.topleft = (0, 0) - - to_surface = mask.to_surface(surface, area=area_rect) - - self.assertIs(to_surface, surface) - self.assertEqual(to_surface.get_size(), size) - assertSurfaceFilled(self, to_surface, expected_color, overlap_rect) - assertSurfaceFilledIgnoreArea( - self, to_surface, surface_color, overlap_rect - ) - - @unittest.expectedFailure - @unittest.skipIf(IS_PYPY, "Segfaults on pypy") - def test_to_surface__area_off_mask_with_setsurface_unsetsurface(self): - """Ensures area values off the mask work correctly - when using setsurface and unsetsurface. - """ - width, height = size = (5, 7) - surface = pygame.Surface(size, SRCALPHA, 32) - surface_color = pygame.Color("red") - - setsurface = surface.copy() - setsurface_color = pygame.Color("green") - setsurface.fill(setsurface_color) - - unsetsurface = surface.copy() - unsetsurface_color = pygame.Color("blue") - unsetsurface.fill(unsetsurface_color) - - # Testing positions off the mask. - positions = [(-width, -height), (-width, 0), (0, -height)] - positions.extend(off_corners(pygame.Rect((0, 0), (width, height)))) - - # Using the values in kwargs vs color_kwargs tests different to_surface - # code. Should not have any impact on the resulting drawn surfaces. - kwargs = { - "surface": surface, - "setsurface": setsurface, - "unsetsurface": unsetsurface, - "area": pygame.Rect((0, 0), size), - } - - color_kwargs = dict(kwargs) - color_kwargs.update((("setcolor", None), ("unsetcolor", None))) - - for fill in (True, False): - mask = pygame.mask.Mask(size, fill=fill) - mask_rect = mask.get_rect() - expected_color = setsurface_color if fill else unsetsurface_color - - for pos in positions: - for use_color_params in (True, False): - surface.fill(surface_color) # Clear for each test. - test_kwargs = color_kwargs if use_color_params else kwargs - test_kwargs["area"].topleft = pos - overlap_rect = mask_rect.clip(test_kwargs["area"]) - overlap_rect.topleft = (0, 0) - - to_surface = mask.to_surface(**test_kwargs) - - self.assertIs(to_surface, surface) - self.assertEqual(to_surface.get_size(), size) - assertSurfaceFilled(self, to_surface, expected_color, overlap_rect) - assertSurfaceFilledIgnoreArea( - self, to_surface, surface_color, overlap_rect - ) - - def test_to_surface__surface_with_zero_size(self): - """Ensures zero sized surfaces are handled correctly.""" - expected_ref_count = 3 - size = (0, 0) - surface = pygame.Surface(size) - mask = pygame.mask.Mask((3, 4), fill=True) - - to_surface = mask.to_surface(surface) - - self.assertIs(to_surface, surface) - if not IS_PYPY: - self.assertEqual(sys.getrefcount(to_surface), expected_ref_count) - self.assertEqual(to_surface.get_size(), size) - - def test_to_surface__setsurface_with_zero_size(self): - """Ensures zero sized setsurfaces are handled correctly.""" - expected_ref_count = 2 - expected_flag = SRCALPHA - expected_depth = 32 - expected_color = pygame.Color("white") # Default setcolor. - mask_size = (2, 4) - mask = pygame.mask.Mask(mask_size, fill=True) - setsurface = pygame.Surface((0, 0), expected_flag, expected_depth) - - to_surface = mask.to_surface(setsurface=setsurface) - - self.assertIsInstance(to_surface, pygame.Surface) - if not IS_PYPY: - self.assertEqual(sys.getrefcount(to_surface), expected_ref_count) - self.assertTrue(to_surface.get_flags() & expected_flag) - self.assertEqual(to_surface.get_bitsize(), expected_depth) - self.assertEqual(to_surface.get_size(), mask_size) - assertSurfaceFilled(self, to_surface, expected_color) - - def test_to_surface__unsetsurface_with_zero_size(self): - """Ensures zero sized unsetsurfaces are handled correctly.""" - expected_ref_count = 2 - expected_flag = SRCALPHA - expected_depth = 32 - expected_color = pygame.Color("black") # Default unsetcolor. - mask_size = (4, 2) - mask = pygame.mask.Mask(mask_size) - unsetsurface = pygame.Surface((0, 0), expected_flag, expected_depth) - - to_surface = mask.to_surface(unsetsurface=unsetsurface) - - self.assertIsInstance(to_surface, pygame.Surface) - if not IS_PYPY: - self.assertEqual(sys.getrefcount(to_surface), expected_ref_count) - self.assertTrue(to_surface.get_flags() & expected_flag) - self.assertEqual(to_surface.get_bitsize(), expected_depth) - self.assertEqual(to_surface.get_size(), mask_size) - assertSurfaceFilled(self, to_surface, expected_color) - - def test_zero_mask(self): - """Ensures masks can be created with zero sizes.""" - for size in ((100, 0), (0, 100), (0, 0)): - for fill in (True, False): - msg = "size={}, fill={}".format(size, fill) - - mask = pygame.mask.Mask(size, fill=fill) - - self.assertIsInstance(mask, pygame.mask.Mask, msg) - self.assertEqual(mask.get_size(), size, msg) - - def test_zero_mask_copy(self): - """Ensures copy correctly handles zero sized masks.""" - for expected_size in ((11, 0), (0, 11), (0, 0)): - mask = pygame.mask.Mask(expected_size) - - mask_copy = mask.copy() - - self.assertIsInstance(mask_copy, pygame.mask.Mask) - self.assertIsNot(mask_copy, mask) - assertMaskEqual(self, mask_copy, mask) - - def test_zero_mask_get_size(self): - """Ensures get_size correctly handles zero sized masks.""" - for expected_size in ((41, 0), (0, 40), (0, 0)): - mask = pygame.mask.Mask(expected_size) - - size = mask.get_size() - - self.assertEqual(size, expected_size) - - def test_zero_mask_get_rect(self): - """Ensures get_rect correctly handles zero sized masks.""" - for expected_size in ((4, 0), (0, 4), (0, 0)): - expected_rect = pygame.Rect((0, 0), expected_size) - mask = pygame.mask.Mask(expected_size) - - rect = mask.get_rect() - - self.assertEqual(rect, expected_rect) - - def test_zero_mask_get_at(self): - """Ensures get_at correctly handles zero sized masks.""" - for size in ((51, 0), (0, 50), (0, 0)): - mask = pygame.mask.Mask(size) - - with self.assertRaises(IndexError): - value = mask.get_at((0, 0)) - - def test_zero_mask_set_at(self): - """Ensures set_at correctly handles zero sized masks.""" - for size in ((31, 0), (0, 30), (0, 0)): - mask = pygame.mask.Mask(size) - - with self.assertRaises(IndexError): - mask.set_at((0, 0)) - - def test_zero_mask_overlap(self): - """Ensures overlap correctly handles zero sized masks. - - Tests combinations of sized and zero sized masks. - """ - offset = (0, 0) - - for size1, size2 in zero_size_pairs(51, 42): - msg = "size1={}, size2={}".format(size1, size2) - mask1 = pygame.mask.Mask(size1, fill=True) - mask2 = pygame.mask.Mask(size2, fill=True) - - overlap_pos = mask1.overlap(mask2, offset) - - self.assertIsNone(overlap_pos, msg) - - def test_zero_mask_overlap_area(self): - """Ensures overlap_area correctly handles zero sized masks. - - Tests combinations of sized and zero sized masks. - """ - offset = (0, 0) - expected_count = 0 - - for size1, size2 in zero_size_pairs(41, 52): - msg = "size1={}, size2={}".format(size1, size2) - mask1 = pygame.mask.Mask(size1, fill=True) - mask2 = pygame.mask.Mask(size2, fill=True) - - overlap_count = mask1.overlap_area(mask2, offset) - - self.assertEqual(overlap_count, expected_count, msg) - - def test_zero_mask_overlap_mask(self): - """Ensures overlap_mask correctly handles zero sized masks. - - Tests combinations of sized and zero sized masks. - """ - offset = (0, 0) - expected_count = 0 - - for size1, size2 in zero_size_pairs(43, 53): - msg = "size1={}, size2={}".format(size1, size2) - mask1 = pygame.mask.Mask(size1, fill=True) - mask2 = pygame.mask.Mask(size2, fill=True) - - overlap_mask = mask1.overlap_mask(mask2, offset) - - self.assertIsInstance(overlap_mask, pygame.mask.Mask, msg) - self.assertEqual(overlap_mask.count(), expected_count, msg) - self.assertEqual(overlap_mask.get_size(), size1, msg) - - def test_zero_mask_fill(self): - """Ensures fill correctly handles zero sized masks.""" - expected_count = 0 - - for size in ((100, 0), (0, 100), (0, 0)): - mask = pygame.mask.Mask(size) - - mask.fill() - - self.assertEqual(mask.count(), expected_count, "size={}".format(size)) - - def test_zero_mask_clear(self): - sizes = ((100, 0), (0, 100), (0, 0)) - - for size in sizes: - mask = pygame.mask.Mask(size) - mask.clear() - self.assertEqual(mask.count(), 0) - - def test_zero_mask_flip(self): - sizes = ((100, 0), (0, 100), (0, 0)) - - for size in sizes: - mask = pygame.mask.Mask(size) - mask.invert() - self.assertEqual(mask.count(), 0) - - def test_zero_mask_scale(self): - sizes = ((100, 0), (0, 100), (0, 0)) - - for size in sizes: - mask = pygame.mask.Mask(size) - mask2 = mask.scale((2, 3)) - - self.assertIsInstance(mask2, pygame.mask.Mask) - self.assertEqual(mask2.get_size(), (2, 3)) - - def test_zero_mask_draw(self): - """Ensures draw correctly handles zero sized masks. - - Tests combinations of sized and zero sized masks. - """ - offset = (0, 0) - - for size1, size2 in zero_size_pairs(31, 37): - msg = "size1={}, size2={}".format(size1, size2) - mask1 = pygame.mask.Mask(size1, fill=True) - mask2 = pygame.mask.Mask(size2, fill=True) - expected_count = mask1.count() - - mask1.draw(mask2, offset) - - self.assertEqual(mask1.count(), expected_count, msg) - self.assertEqual(mask1.get_size(), size1, msg) - - def test_zero_mask_erase(self): - """Ensures erase correctly handles zero sized masks. - - Tests combinations of sized and zero sized masks. - """ - offset = (0, 0) - - for size1, size2 in zero_size_pairs(29, 23): - msg = "size1={}, size2={}".format(size1, size2) - mask1 = pygame.mask.Mask(size1, fill=True) - mask2 = pygame.mask.Mask(size2, fill=True) - expected_count = mask1.count() - - mask1.erase(mask2, offset) - - self.assertEqual(mask1.count(), expected_count, msg) - self.assertEqual(mask1.get_size(), size1, msg) - - def test_zero_mask_count(self): - sizes = ((100, 0), (0, 100), (0, 0)) - - for size in sizes: - mask = pygame.mask.Mask(size, fill=True) - self.assertEqual(mask.count(), 0) - - def test_zero_mask_centroid(self): - sizes = ((100, 0), (0, 100), (0, 0)) - - for size in sizes: - mask = pygame.mask.Mask(size) - self.assertEqual(mask.centroid(), (0, 0)) - - def test_zero_mask_angle(self): - sizes = ((100, 0), (0, 100), (0, 0)) - - for size in sizes: - mask = pygame.mask.Mask(size) - self.assertEqual(mask.angle(), 0.0) - - def test_zero_mask_outline(self): - """Ensures outline correctly handles zero sized masks.""" - expected_points = [] - - for size in ((61, 0), (0, 60), (0, 0)): - mask = pygame.mask.Mask(size) - - points = mask.outline() - - self.assertListEqual(points, expected_points, "size={}".format(size)) - - def test_zero_mask_outline__with_arg(self): - """Ensures outline correctly handles zero sized masks - when using the skip pixels argument.""" - expected_points = [] - - for size in ((66, 0), (0, 65), (0, 0)): - mask = pygame.mask.Mask(size) - - points = mask.outline(10) - - self.assertListEqual(points, expected_points, "size={}".format(size)) - - def test_zero_mask_convolve(self): - """Ensures convolve correctly handles zero sized masks. - - Tests the different combinations of sized and zero sized masks. - """ - for size1 in ((17, 13), (71, 0), (0, 70), (0, 0)): - mask1 = pygame.mask.Mask(size1, fill=True) - - for size2 in ((11, 7), (81, 0), (0, 60), (0, 0)): - msg = "sizes={}, {}".format(size1, size2) - mask2 = pygame.mask.Mask(size2, fill=True) - expected_size = ( - max(0, size1[0] + size2[0] - 1), - max(0, size1[1] + size2[1] - 1), - ) - - mask = mask1.convolve(mask2) - - self.assertIsInstance(mask, pygame.mask.Mask, msg) - self.assertIsNot(mask, mask2, msg) - self.assertEqual(mask.get_size(), expected_size, msg) - - def test_zero_mask_convolve__with_output_mask(self): - """Ensures convolve correctly handles zero sized masks - when using an output mask argument. - - Tests the different combinations of sized and zero sized masks. - """ - for size1 in ((11, 17), (91, 0), (0, 90), (0, 0)): - mask1 = pygame.mask.Mask(size1, fill=True) - - for size2 in ((13, 11), (83, 0), (0, 62), (0, 0)): - mask2 = pygame.mask.Mask(size2, fill=True) - - for output_size in ((7, 5), (71, 0), (0, 70), (0, 0)): - msg = "sizes={}, {}, {}".format(size1, size2, output_size) - output_mask = pygame.mask.Mask(output_size) - - mask = mask1.convolve(mask2, output_mask) - - self.assertIsInstance(mask, pygame.mask.Mask, msg) - self.assertIs(mask, output_mask, msg) - self.assertEqual(mask.get_size(), output_size, msg) - - def test_zero_mask_connected_component(self): - """Ensures connected_component correctly handles zero sized masks.""" - expected_count = 0 - - for size in ((81, 0), (0, 80), (0, 0)): - msg = "size={}".format(size) - mask = pygame.mask.Mask(size) - - cc_mask = mask.connected_component() - - self.assertIsInstance(cc_mask, pygame.mask.Mask, msg) - self.assertEqual(cc_mask.get_size(), size) - self.assertEqual(cc_mask.count(), expected_count, msg) - - def test_zero_mask_connected_component__indexed(self): - """Ensures connected_component correctly handles zero sized masks - when using an index argument.""" - for size in ((91, 0), (0, 90), (0, 0)): - mask = pygame.mask.Mask(size) - - with self.assertRaises(IndexError): - cc_mask = mask.connected_component((0, 0)) - - def test_zero_mask_connected_components(self): - """Ensures connected_components correctly handles zero sized masks.""" - expected_cc_masks = [] - - for size in ((11, 0), (0, 10), (0, 0)): - mask = pygame.mask.Mask(size) - - cc_masks = mask.connected_components() - - self.assertListEqual(cc_masks, expected_cc_masks, "size={}".format(size)) - - def test_zero_mask_get_bounding_rects(self): - """Ensures get_bounding_rects correctly handles zero sized masks.""" - expected_bounding_rects = [] - - for size in ((21, 0), (0, 20), (0, 0)): - mask = pygame.mask.Mask(size) - - bounding_rects = mask.get_bounding_rects() - - self.assertListEqual( - bounding_rects, expected_bounding_rects, "size={}".format(size) - ) - - def test_zero_mask_to_surface(self): - """Ensures to_surface correctly handles zero sized masks and surfaces.""" - mask_color = pygame.Color("blue") - surf_color = pygame.Color("red") - - for surf_size in ((7, 3), (7, 0), (0, 7), (0, 0)): - surface = pygame.Surface(surf_size, SRCALPHA, 32) - surface.fill(surf_color) - - for mask_size in ((5, 0), (0, 5), (0, 0)): - mask = pygame.mask.Mask(mask_size, fill=True) - - to_surface = mask.to_surface(surface, setcolor=mask_color) - - self.assertIs(to_surface, surface) - self.assertEqual(to_surface.get_size(), surf_size) - - if 0 not in surf_size: - assertSurfaceFilled(self, to_surface, surf_color) - - def test_zero_mask_to_surface__create_surface(self): - """Ensures to_surface correctly handles zero sized masks and surfaces - when it has to create a default surface. - """ - mask_color = pygame.Color("blue") - - for mask_size in ((3, 0), (0, 3), (0, 0)): - mask = pygame.mask.Mask(mask_size, fill=True) - - to_surface = mask.to_surface(setcolor=mask_color) - - self.assertIsInstance(to_surface, pygame.Surface) - self.assertEqual(to_surface.get_size(), mask_size) - - -class SubMask(pygame.mask.Mask): - """Subclass of the Mask class to help test subclassing.""" - - def __init__(self, *args, **kwargs): - super(SubMask, self).__init__(*args, **kwargs) - self.test_attribute = True - - -class SubMaskCopy(SubMask): - """Subclass of the Mask class to help test copying subclasses.""" - - def copy(self): - mask_copy = super(SubMaskCopy, self).copy() - mask_copy.test_attribute = self.test_attribute - return mask_copy - - -class SubMaskDunderCopy(SubMask): - """Subclass of the Mask class to help test copying subclasses.""" - - def __copy__(self): - mask_copy = super(SubMaskDunderCopy, self).__copy__() - mask_copy.test_attribute = self.test_attribute - return mask_copy - - -class SubMaskCopyAndDunderCopy(SubMaskDunderCopy): - """Subclass of the Mask class to help test copying subclasses.""" - - def copy(self): - return super(SubMaskCopyAndDunderCopy, self).copy() - - -class MaskSubclassTest(unittest.TestCase): - """Test subclassed Masks.""" - - def test_subclass_mask(self): - """Ensures the Mask class can be subclassed.""" - mask = SubMask((5, 3), fill=True) - - self.assertIsInstance(mask, pygame.mask.Mask) - self.assertIsInstance(mask, SubMask) - self.assertTrue(mask.test_attribute) - - def test_subclass_copy(self): - """Ensures copy works for subclassed Masks.""" - mask = SubMask((65, 2), fill=True) - - # Test both the copy() and __copy__() methods. - for mask_copy in (mask.copy(), copy.copy(mask)): - self.assertIsInstance(mask_copy, pygame.mask.Mask) - self.assertIsInstance(mask_copy, SubMask) - self.assertIsNot(mask_copy, mask) - assertMaskEqual(self, mask_copy, mask) - # No subclass attributes because copy()/__copy__() not overridden. - self.assertFalse(hasattr(mask_copy, "test_attribute")) - - def test_subclass_copy__override_copy(self): - """Ensures copy works for subclassed Masks overriding copy.""" - mask = SubMaskCopy((65, 2), fill=True) - - # Test both the copy() and __copy__() methods. - for i, mask_copy in enumerate((mask.copy(), copy.copy(mask))): - self.assertIsInstance(mask_copy, pygame.mask.Mask) - self.assertIsInstance(mask_copy, SubMaskCopy) - self.assertIsNot(mask_copy, mask) - assertMaskEqual(self, mask_copy, mask) - - if 1 == i: - # No subclass attributes because __copy__() not overridden. - self.assertFalse(hasattr(mask_copy, "test_attribute")) - else: - self.assertTrue(mask_copy.test_attribute) - - def test_subclass_copy__override_dunder_copy(self): - """Ensures copy works for subclassed Masks overriding __copy__.""" - mask = SubMaskDunderCopy((65, 2), fill=True) - - # Test both the copy() and __copy__() methods. - for mask_copy in (mask.copy(), copy.copy(mask)): - self.assertIsInstance(mask_copy, pygame.mask.Mask) - self.assertIsInstance(mask_copy, SubMaskDunderCopy) - self.assertIsNot(mask_copy, mask) - assertMaskEqual(self, mask_copy, mask) - # Calls to copy() eventually call __copy__() internally so the - # attributes will be copied. - self.assertTrue(mask_copy.test_attribute) - - def test_subclass_copy__override_both_copy_methods(self): - """Ensures copy works for subclassed Masks overriding copy/__copy__.""" - mask = SubMaskCopyAndDunderCopy((65, 2), fill=True) - - # Test both the copy() and __copy__() methods. - for mask_copy in (mask.copy(), copy.copy(mask)): - self.assertIsInstance(mask_copy, pygame.mask.Mask) - self.assertIsInstance(mask_copy, SubMaskCopyAndDunderCopy) - self.assertIsNot(mask_copy, mask) - assertMaskEqual(self, mask_copy, mask) - self.assertTrue(mask_copy.test_attribute) - - def test_subclass_get_size(self): - """Ensures get_size works for subclassed Masks.""" - expected_size = (2, 3) - mask = SubMask(expected_size) - - size = mask.get_size() - - self.assertEqual(size, expected_size) - - def test_subclass_mask_get_rect(self): - """Ensures get_rect works for subclassed Masks.""" - expected_rect = pygame.Rect((0, 0), (65, 33)) - mask = SubMask(expected_rect.size, fill=True) - - rect = mask.get_rect() - - self.assertEqual(rect, expected_rect) - - def test_subclass_get_at(self): - """Ensures get_at works for subclassed Masks.""" - expected_bit = 1 - mask = SubMask((3, 2), fill=True) - - bit = mask.get_at((0, 0)) - - self.assertEqual(bit, expected_bit) - - def test_subclass_set_at(self): - """Ensures set_at works for subclassed Masks.""" - expected_bit = 1 - expected_count = 1 - pos = (0, 0) - mask = SubMask(fill=False, size=(4, 2)) - - mask.set_at(pos) - - self.assertEqual(mask.get_at(pos), expected_bit) - self.assertEqual(mask.count(), expected_count) - - def test_subclass_overlap(self): - """Ensures overlap works for subclassed Masks.""" - expected_pos = (0, 0) - mask_size = (2, 3) - masks = (pygame.mask.Mask(fill=True, size=mask_size), SubMask(mask_size, True)) - arg_masks = ( - pygame.mask.Mask(fill=True, size=mask_size), - SubMask(mask_size, True), - ) - - # Test different combinations of subclassed and non-subclassed Masks. - for mask in masks: - for arg_mask in arg_masks: - overlap_pos = mask.overlap(arg_mask, (0, 0)) - - self.assertEqual(overlap_pos, expected_pos) - - def test_subclass_overlap_area(self): - """Ensures overlap_area works for subclassed Masks.""" - mask_size = (3, 2) - expected_count = mask_size[0] * mask_size[1] - masks = (pygame.mask.Mask(fill=True, size=mask_size), SubMask(mask_size, True)) - arg_masks = ( - pygame.mask.Mask(fill=True, size=mask_size), - SubMask(mask_size, True), - ) - - # Test different combinations of subclassed and non-subclassed Masks. - for mask in masks: - for arg_mask in arg_masks: - overlap_count = mask.overlap_area(arg_mask, (0, 0)) - - self.assertEqual(overlap_count, expected_count) - - def test_subclass_overlap_mask(self): - """Ensures overlap_mask works for subclassed Masks.""" - expected_size = (4, 5) - expected_count = expected_size[0] * expected_size[1] - masks = ( - pygame.mask.Mask(fill=True, size=expected_size), - SubMask(expected_size, True), - ) - arg_masks = ( - pygame.mask.Mask(fill=True, size=expected_size), - SubMask(expected_size, True), - ) - - # Test different combinations of subclassed and non-subclassed Masks. - for mask in masks: - for arg_mask in arg_masks: - overlap_mask = mask.overlap_mask(arg_mask, (0, 0)) - - self.assertIsInstance(overlap_mask, pygame.mask.Mask) - self.assertNotIsInstance(overlap_mask, SubMask) - self.assertEqual(overlap_mask.count(), expected_count) - self.assertEqual(overlap_mask.get_size(), expected_size) - - def test_subclass_fill(self): - """Ensures fill works for subclassed Masks.""" - mask_size = (2, 4) - expected_count = mask_size[0] * mask_size[1] - mask = SubMask(fill=False, size=mask_size) - - mask.fill() - - self.assertEqual(mask.count(), expected_count) - - def test_subclass_clear(self): - """Ensures clear works for subclassed Masks.""" - mask_size = (4, 3) - expected_count = 0 - mask = SubMask(mask_size, True) - - mask.clear() - - self.assertEqual(mask.count(), expected_count) - - def test_subclass_invert(self): - """Ensures invert works for subclassed Masks.""" - mask_size = (1, 4) - expected_count = mask_size[0] * mask_size[1] - mask = SubMask(fill=False, size=mask_size) - - mask.invert() - - self.assertEqual(mask.count(), expected_count) - - def test_subclass_scale(self): - """Ensures scale works for subclassed Masks.""" - expected_size = (5, 2) - mask = SubMask((1, 4)) - - scaled_mask = mask.scale(expected_size) - - self.assertIsInstance(scaled_mask, pygame.mask.Mask) - self.assertNotIsInstance(scaled_mask, SubMask) - self.assertEqual(scaled_mask.get_size(), expected_size) - - def test_subclass_draw(self): - """Ensures draw works for subclassed Masks.""" - mask_size = (5, 4) - expected_count = mask_size[0] * mask_size[1] - arg_masks = ( - pygame.mask.Mask(fill=True, size=mask_size), - SubMask(mask_size, True), - ) - - # Test different combinations of subclassed and non-subclassed Masks. - for mask in (pygame.mask.Mask(mask_size), SubMask(mask_size)): - for arg_mask in arg_masks: - mask.clear() # Clear for each test. - - mask.draw(arg_mask, (0, 0)) - - self.assertEqual(mask.count(), expected_count) - - def test_subclass_erase(self): - """Ensures erase works for subclassed Masks.""" - mask_size = (3, 4) - expected_count = 0 - masks = (pygame.mask.Mask(mask_size, True), SubMask(mask_size, True)) - arg_masks = (pygame.mask.Mask(mask_size, True), SubMask(mask_size, True)) - - # Test different combinations of subclassed and non-subclassed Masks. - for mask in masks: - for arg_mask in arg_masks: - mask.fill() # Fill for each test. - - mask.erase(arg_mask, (0, 0)) - - self.assertEqual(mask.count(), expected_count) - - def test_subclass_count(self): - """Ensures count works for subclassed Masks.""" - mask_size = (5, 2) - expected_count = mask_size[0] * mask_size[1] - 1 - mask = SubMask(fill=True, size=mask_size) - mask.set_at((1, 1), 0) - - count = mask.count() - - self.assertEqual(count, expected_count) - - def test_subclass_centroid(self): - """Ensures centroid works for subclassed Masks.""" - expected_centroid = (0, 0) - mask_size = (3, 2) - mask = SubMask((3, 2)) - - centroid = mask.centroid() - - self.assertEqual(centroid, expected_centroid) - - def test_subclass_angle(self): - """Ensures angle works for subclassed Masks.""" - expected_angle = 0.0 - mask = SubMask(size=(5, 4)) - - angle = mask.angle() - - self.assertAlmostEqual(angle, expected_angle) - - def test_subclass_outline(self): - """Ensures outline works for subclassed Masks.""" - expected_outline = [] - mask = SubMask((3, 4)) - - outline = mask.outline() - - self.assertListEqual(outline, expected_outline) - - def test_subclass_convolve(self): - """Ensures convolve works for subclassed Masks.""" - width, height = 7, 5 - mask_size = (width, height) - expected_count = 0 - expected_size = (max(0, width * 2 - 1), max(0, height * 2 - 1)) - - arg_masks = (pygame.mask.Mask(mask_size), SubMask(mask_size)) - output_masks = (pygame.mask.Mask(mask_size), SubMask(mask_size)) - - # Test different combinations of subclassed and non-subclassed Masks. - for mask in (pygame.mask.Mask(mask_size), SubMask(mask_size)): - for arg_mask in arg_masks: - convolve_mask = mask.convolve(arg_mask) - - self.assertIsInstance(convolve_mask, pygame.mask.Mask) - self.assertNotIsInstance(convolve_mask, SubMask) - self.assertEqual(convolve_mask.count(), expected_count) - self.assertEqual(convolve_mask.get_size(), expected_size) - - # Test subclassed masks for the output_mask as well. - for output_mask in output_masks: - convolve_mask = mask.convolve(arg_mask, output_mask) - - self.assertIsInstance(convolve_mask, pygame.mask.Mask) - self.assertEqual(convolve_mask.count(), expected_count) - self.assertEqual(convolve_mask.get_size(), mask_size) - - if isinstance(output_mask, SubMask): - self.assertIsInstance(convolve_mask, SubMask) - else: - self.assertNotIsInstance(convolve_mask, SubMask) - - def test_subclass_connected_component(self): - """Ensures connected_component works for subclassed Masks.""" - expected_count = 0 - expected_size = (3, 4) - mask = SubMask(expected_size) - - cc_mask = mask.connected_component() - - self.assertIsInstance(cc_mask, pygame.mask.Mask) - self.assertNotIsInstance(cc_mask, SubMask) - self.assertEqual(cc_mask.count(), expected_count) - self.assertEqual(cc_mask.get_size(), expected_size) - - def test_subclass_connected_components(self): - """Ensures connected_components works for subclassed Masks.""" - expected_ccs = [] - mask = SubMask((5, 4)) - - ccs = mask.connected_components() - - self.assertListEqual(ccs, expected_ccs) - - def test_subclass_get_bounding_rects(self): - """Ensures get_bounding_rects works for subclassed Masks.""" - expected_bounding_rects = [] - mask = SubMask((3, 2)) - - bounding_rects = mask.get_bounding_rects() - - self.assertListEqual(bounding_rects, expected_bounding_rects) - - def test_subclass_to_surface(self): - """Ensures to_surface works for subclassed Masks.""" - expected_color = pygame.Color("blue") - size = (5, 3) - mask = SubMask(size, fill=True) - surface = pygame.Surface(size, SRCALPHA, 32) - surface.fill(pygame.Color("red")) - - to_surface = mask.to_surface(surface, setcolor=expected_color) - - self.assertIs(to_surface, surface) - self.assertEqual(to_surface.get_size(), size) - assertSurfaceFilled(self, to_surface, expected_color) - - -@unittest.skipIf(IS_PYPY, "pypy has lots of mask failures") # TODO -class MaskModuleTest(unittest.TestCase): - def test_from_surface(self): - """Ensures from_surface creates a mask with the correct bits set. - - This test checks the masks created by the from_surface function using - 16 and 32 bit surfaces. Each alpha value (0-255) is tested against - several different threshold values. - Note: On 16 bit surface the requested alpha value can differ from what - is actually set. This test uses the value read from the surface. - """ - threshold_count = 256 - surface_color = [55, 155, 255, 0] - expected_size = (11, 9) - all_set_count = expected_size[0] * expected_size[1] - none_set_count = 0 - - for depth in (16, 32): - surface = pygame.Surface(expected_size, SRCALPHA, depth) - - for alpha in range(threshold_count): - surface_color[3] = alpha - surface.fill(surface_color) - - if depth < 32: - # On surfaces with depths < 32 the requested alpha can be - # different than what gets set. Use the value read from the - # surface. - alpha = surface.get_at((0, 0))[3] - - # Test the mask created at threshold values low, high and - # around alpha. - threshold_test_values = set( - [-1, 0, alpha - 1, alpha, alpha + 1, 255, 256] - ) - - for threshold in threshold_test_values: - msg = "depth={}, alpha={}, threshold={}".format( - depth, alpha, threshold - ) - - if alpha > threshold: - expected_count = all_set_count - else: - expected_count = none_set_count - - mask = pygame.mask.from_surface( - surface=surface, threshold=threshold - ) - - self.assertIsInstance(mask, pygame.mask.Mask, msg) - self.assertEqual(mask.get_size(), expected_size, msg) - self.assertEqual(mask.count(), expected_count, msg) - - def test_from_surface__different_alphas_32bit(self): - """Ensures from_surface creates a mask with the correct bits set - when pixels have different alpha values (32 bits surfaces). - - This test checks the masks created by the from_surface function using - a 32 bit surface. The surface is created with each pixel having a - different alpha value (0-255). This surface is tested over a range - of threshold values (0-255). - """ - offset = (0, 0) - threshold_count = 256 - surface_color = [10, 20, 30, 0] - expected_size = (threshold_count, 1) - expected_mask = pygame.Mask(expected_size, fill=True) - surface = pygame.Surface(expected_size, SRCALPHA, 32) - - # Give each pixel a different alpha. - surface.lock() # Lock for possible speed up. - for a in range(threshold_count): - surface_color[3] = a - surface.set_at((a, 0), surface_color) - surface.unlock() - - # Test the mask created for each different alpha threshold. - for threshold in range(threshold_count): - msg = "threshold={}".format(threshold) - expected_mask.set_at((threshold, 0), 0) - expected_count = expected_mask.count() - - mask = pygame.mask.from_surface(surface, threshold) - - self.assertIsInstance(mask, pygame.mask.Mask, msg) - self.assertEqual(mask.get_size(), expected_size, msg) - self.assertEqual(mask.count(), expected_count, msg) - self.assertEqual( - mask.overlap_area(expected_mask, offset), expected_count, msg - ) - - def test_from_surface__different_alphas_16bit(self): - """Ensures from_surface creates a mask with the correct bits set - when pixels have different alpha values (16 bit surfaces). - - This test checks the masks created by the from_surface function using - a 16 bit surface. Each pixel of the surface is set with a different - alpha value (0-255), but since this is a 16 bit surface the requested - alpha value can differ from what is actually set. The resulting surface - will have groups of alpha values which complicates the test as the - alpha groups will all be set/unset at a given threshold. The setup - calculates these groups and an expected mask for each. This test data - is then used to test each alpha grouping over a range of threshold - values. - """ - threshold_count = 256 - surface_color = [110, 120, 130, 0] - expected_size = (threshold_count, 1) - surface = pygame.Surface(expected_size, SRCALPHA, 16) - - # Give each pixel a different alpha. - surface.lock() # Lock for possible speed up. - for a in range(threshold_count): - surface_color[3] = a - surface.set_at((a, 0), surface_color) - surface.unlock() - - alpha_thresholds = OrderedDict() - special_thresholds = set() - - # Create the threshold ranges and identify any thresholds that need - # special handling. - for threshold in range(threshold_count): - # On surfaces with depths < 32 the requested alpha can be different - # than what gets set. Use the value read from the surface. - alpha = surface.get_at((threshold, 0))[3] - - if alpha not in alpha_thresholds: - alpha_thresholds[alpha] = [threshold] - else: - alpha_thresholds[alpha].append(threshold) - - if threshold < alpha: - special_thresholds.add(threshold) - - # Use each threshold group to create an expected mask. - test_data = [] # [(from_threshold, to_threshold, expected_mask), ...] - offset = (0, 0) - erase_mask = pygame.Mask(expected_size) - exp_mask = pygame.Mask(expected_size, fill=True) - - for thresholds in alpha_thresholds.values(): - for threshold in thresholds: - if threshold in special_thresholds: - # Any special thresholds just reuse previous exp_mask. - test_data.append((threshold, threshold + 1, exp_mask)) - else: - to_threshold = thresholds[-1] + 1 - - # Make the expected mask by erasing the unset bits. - for thres in range(to_threshold): - erase_mask.set_at((thres, 0), 1) - - exp_mask = pygame.Mask(expected_size, fill=True) - exp_mask.erase(erase_mask, offset) - test_data.append((threshold, to_threshold, exp_mask)) - break - - # All the setup is done. Now test the masks created over the threshold - # ranges. - for from_threshold, to_threshold, expected_mask in test_data: - expected_count = expected_mask.count() - - for threshold in range(from_threshold, to_threshold): - msg = "threshold={}".format(threshold) - - mask = pygame.mask.from_surface(surface, threshold) - - self.assertIsInstance(mask, pygame.mask.Mask, msg) - self.assertEqual(mask.get_size(), expected_size, msg) - self.assertEqual(mask.count(), expected_count, msg) - self.assertEqual( - mask.overlap_area(expected_mask, offset), expected_count, msg - ) - - def test_from_surface__with_colorkey_mask_cleared(self): - """Ensures from_surface creates a mask with the correct bits set - when the surface uses a colorkey. - - The surface is filled with the colorkey color so the resulting masks - are expected to have no bits set. - """ - colorkeys = ((0, 0, 0), (1, 2, 3), (50, 100, 200), (255, 255, 255)) - expected_size = (7, 11) - expected_count = 0 - - for depth in (8, 16, 24, 32): - msg = "depth={}".format(depth) - surface = pygame.Surface(expected_size, 0, depth) - - for colorkey in colorkeys: - surface.set_colorkey(colorkey) - # With some depths (i.e. 8 and 16) the actual colorkey can be - # different than what was requested via the set. - surface.fill(surface.get_colorkey()) - - mask = pygame.mask.from_surface(surface) - - self.assertIsInstance(mask, pygame.mask.Mask, msg) - self.assertEqual(mask.get_size(), expected_size, msg) - self.assertEqual(mask.count(), expected_count, msg) - - def test_from_surface__with_colorkey_mask_filled(self): - """Ensures from_surface creates a mask with the correct bits set - when the surface uses a colorkey. - - The surface is filled with a color that is not the colorkey color so - the resulting masks are expected to have all bits set. - """ - colorkeys = ((0, 0, 0), (1, 2, 3), (10, 100, 200), (255, 255, 255)) - surface_color = (50, 100, 200) - expected_size = (11, 7) - expected_count = expected_size[0] * expected_size[1] - - for depth in (8, 16, 24, 32): - msg = "depth={}".format(depth) - surface = pygame.Surface(expected_size, 0, depth) - surface.fill(surface_color) - - for colorkey in colorkeys: - surface.set_colorkey(colorkey) - - mask = pygame.mask.from_surface(surface) - - self.assertIsInstance(mask, pygame.mask.Mask, msg) - self.assertEqual(mask.get_size(), expected_size, msg) - self.assertEqual(mask.count(), expected_count, msg) - - def test_from_surface__with_colorkey_mask_pattern(self): - """Ensures from_surface creates a mask with the correct bits set - when the surface uses a colorkey. - - The surface is filled with alternating pixels of colorkey and - non-colorkey colors, so the resulting masks are expected to have - alternating bits set. - """ - - def alternate(func, set_value, unset_value, width, height): - # Helper function to set alternating values. - setbit = False - for pos in ((x, y) for x in range(width) for y in range(height)): - func(pos, set_value if setbit else unset_value) - setbit = not setbit - - surface_color = (5, 10, 20) - colorkey = (50, 60, 70) - expected_size = (11, 2) - expected_mask = pygame.mask.Mask(expected_size) - alternate(expected_mask.set_at, 1, 0, *expected_size) - expected_count = expected_mask.count() - offset = (0, 0) - - for depth in (8, 16, 24, 32): - msg = "depth={}".format(depth) - surface = pygame.Surface(expected_size, 0, depth) - # Fill the surface with alternating colors. - alternate(surface.set_at, surface_color, colorkey, *expected_size) - surface.set_colorkey(colorkey) - - mask = pygame.mask.from_surface(surface) - - self.assertIsInstance(mask, pygame.mask.Mask, msg) - self.assertEqual(mask.get_size(), expected_size, msg) - self.assertEqual(mask.count(), expected_count, msg) - self.assertEqual( - mask.overlap_area(expected_mask, offset), expected_count, msg - ) - - def test_from_threshold(self): - """Does mask.from_threshold() work correctly?""" - - a = [16, 24, 32] - - for i in a: - surf = pygame.surface.Surface((70, 70), 0, i) - surf.fill((100, 50, 200), (20, 20, 20, 20)) - mask = pygame.mask.from_threshold( - surf, (100, 50, 200, 255), (10, 10, 10, 255) - ) - - rects = mask.get_bounding_rects() - - self.assertEqual(mask.count(), 400) - self.assertEqual(mask.get_bounding_rects(), [pygame.Rect((20, 20, 20, 20))]) - - for i in a: - surf = pygame.surface.Surface((70, 70), 0, i) - surf2 = pygame.surface.Surface((70, 70), 0, i) - surf.fill((100, 100, 100)) - surf2.fill((150, 150, 150)) - surf2.fill((100, 100, 100), (40, 40, 10, 10)) - mask = pygame.mask.from_threshold( - surface=surf, - color=(0, 0, 0, 0), - threshold=(10, 10, 10, 255), - othersurface=surf2, - ) - - self.assertIsInstance(mask, pygame.mask.Mask) - self.assertEqual(mask.count(), 100) - self.assertEqual(mask.get_bounding_rects(), [pygame.Rect((40, 40, 10, 10))]) - - def test_zero_size_from_surface(self): - """Ensures from_surface can create masks from zero sized surfaces.""" - for size in ((100, 0), (0, 100), (0, 0)): - mask = pygame.mask.from_surface(pygame.Surface(size)) - - self.assertIsInstance(mask, pygame.mask.MaskType, "size={}".format(size)) - self.assertEqual(mask.get_size(), size) - - def test_zero_size_from_threshold(self): - a = [16, 24, 32] - sizes = ((100, 0), (0, 100), (0, 0)) - - for size in sizes: - for i in a: - surf = pygame.surface.Surface(size, 0, i) - surf.fill((100, 50, 200), (20, 20, 20, 20)) - mask = pygame.mask.from_threshold( - surf, (100, 50, 200, 255), (10, 10, 10, 255) - ) - - self.assertEqual(mask.count(), 0) - - rects = mask.get_bounding_rects() - self.assertEqual(rects, []) - - for i in a: - surf = pygame.surface.Surface(size, 0, i) - surf2 = pygame.surface.Surface(size, 0, i) - surf.fill((100, 100, 100)) - surf2.fill((150, 150, 150)) - surf2.fill((100, 100, 100), (40, 40, 10, 10)) - mask = pygame.mask.from_threshold( - surf, (0, 0, 0, 0), (10, 10, 10, 255), surf2 - ) - - self.assertIsInstance(mask, pygame.mask.Mask) - self.assertEqual(mask.count(), 0) - - rects = mask.get_bounding_rects() - self.assertEqual(rects, []) - - def test_buffer_interface(self): - size = (1000, 100) - pixels_set = ((0, 1), (100, 10), (173, 90)) - pixels_unset = ((0, 0), (101, 10), (173, 91)) - - mask = pygame.Mask(size) - for point in pixels_set: - mask.set_at(point, 1) - - view = memoryview(mask) - intwidth = 8 * view.strides[1] - - for point in pixels_set: - x, y = point - col = x // intwidth - self.assertEqual( - (view[col, y] >> (x % intwidth)) & 1, - 1, - "the pixel at {} is not set to 1".format(point), - ) - - for point in pixels_unset: - x, y = point - col = x // intwidth - self.assertEqual( - (view[col, y] >> (x % intwidth)) & 1, - 0, - "the pixel at {} is not set to 0".format(point), - ) - - -if __name__ == "__main__": - unittest.main() diff --git a/venv/Lib/site-packages/pygame/tests/math_test.py b/venv/Lib/site-packages/pygame/tests/math_test.py deleted file mode 100644 index 1d8cd63..0000000 --- a/venv/Lib/site-packages/pygame/tests/math_test.py +++ /dev/null @@ -1,2326 +0,0 @@ -# -*- coding: utf-8 -*- -import sys -import unittest -import math -import platform - -import pygame.math -from pygame.math import Vector2, Vector3 - -IS_PYPY = "PyPy" == platform.python_implementation() - - -class Vector2TypeTest(unittest.TestCase): - def setUp(self): - self.zeroVec = Vector2() - self.e1 = Vector2(1, 0) - self.e2 = Vector2(0, 1) - self.t1 = (1.2, 3.4) - self.l1 = list(self.t1) - self.v1 = Vector2(self.t1) - self.t2 = (5.6, 7.8) - self.l2 = list(self.t2) - self.v2 = Vector2(self.t2) - self.s1 = 5.6 - self.s2 = 7.8 - - def testConstructionDefault(self): - v = Vector2() - self.assertEqual(v.x, 0.0) - self.assertEqual(v.y, 0.0) - - def testConstructionScalar(self): - v = Vector2(1) - self.assertEqual(v.x, 1.0) - self.assertEqual(v.y, 1.0) - - def testConstructionScalarKeywords(self): - v = Vector2(x=1) - self.assertEqual(v.x, 1.0) - self.assertEqual(v.y, 1.0) - - def testConstructionKeywords(self): - v = Vector2(x=1, y=2) - self.assertEqual(v.x, 1.0) - self.assertEqual(v.y, 2.0) - - def testConstructionXY(self): - v = Vector2(1.2, 3.4) - self.assertEqual(v.x, 1.2) - self.assertEqual(v.y, 3.4) - - def testConstructionTuple(self): - v = Vector2((1.2, 3.4)) - self.assertEqual(v.x, 1.2) - self.assertEqual(v.y, 3.4) - - def testConstructionList(self): - v = Vector2([1.2, 3.4]) - self.assertEqual(v.x, 1.2) - self.assertEqual(v.y, 3.4) - - def testConstructionVector2(self): - v = Vector2(Vector2(1.2, 3.4)) - self.assertEqual(v.x, 1.2) - self.assertEqual(v.y, 3.4) - - def testAttributeAccess(self): - tmp = self.v1.x - self.assertEqual(tmp, self.v1.x) - self.assertEqual(tmp, self.v1[0]) - tmp = self.v1.y - self.assertEqual(tmp, self.v1.y) - self.assertEqual(tmp, self.v1[1]) - self.v1.x = 3.141 - self.assertEqual(self.v1.x, 3.141) - self.v1.y = 3.141 - self.assertEqual(self.v1.y, 3.141) - - def assign_nonfloat(): - v = Vector2() - v.x = "spam" - - self.assertRaises(TypeError, assign_nonfloat) - - def testCopy(self): - v_copy0 = Vector2(2004.0, 2022.0) - v_copy1 = v_copy0.copy() - self.assertEqual(v_copy0.x, v_copy1.x) - self.assertEqual(v_copy0.y, v_copy1.y) - - def testSequence(self): - v = Vector2(1.2, 3.4) - Vector2()[:] - self.assertEqual(len(v), 2) - self.assertEqual(v[0], 1.2) - self.assertEqual(v[1], 3.4) - self.assertRaises(IndexError, lambda: v[2]) - self.assertEqual(v[-1], 3.4) - self.assertEqual(v[-2], 1.2) - self.assertRaises(IndexError, lambda: v[-3]) - self.assertEqual(v[:], [1.2, 3.4]) - self.assertEqual(v[1:], [3.4]) - self.assertEqual(v[:1], [1.2]) - self.assertEqual(list(v), [1.2, 3.4]) - self.assertEqual(tuple(v), (1.2, 3.4)) - v[0] = 5.6 - v[1] = 7.8 - self.assertEqual(v.x, 5.6) - self.assertEqual(v.y, 7.8) - v[:] = [9.1, 11.12] - self.assertEqual(v.x, 9.1) - self.assertEqual(v.y, 11.12) - - def overpopulate(): - v = Vector2() - v[:] = [1, 2, 3] - - self.assertRaises(ValueError, overpopulate) - - def underpopulate(): - v = Vector2() - v[:] = [1] - - self.assertRaises(ValueError, underpopulate) - - def assign_nonfloat(): - v = Vector2() - v[0] = "spam" - - self.assertRaises(TypeError, assign_nonfloat) - - def testExtendedSlicing(self): - # deletion - def delSlice(vec, start=None, stop=None, step=None): - if start is not None and stop is not None and step is not None: - del vec[start:stop:step] - elif start is not None and stop is None and step is not None: - del vec[start::step] - elif start is None and stop is None and step is not None: - del vec[::step] - - v = Vector2(self.v1) - self.assertRaises(TypeError, delSlice, v, None, None, 2) - self.assertRaises(TypeError, delSlice, v, 1, None, 2) - self.assertRaises(TypeError, delSlice, v, 1, 2, 1) - - # assignment - v = Vector2(self.v1) - v[::2] = [-1] - self.assertEqual(v, [-1, self.v1.y]) - v = Vector2(self.v1) - v[::-2] = [10] - self.assertEqual(v, [self.v1.x, 10]) - v = Vector2(self.v1) - v[::-1] = v - self.assertEqual(v, [self.v1.y, self.v1.x]) - a = Vector2(self.v1) - b = Vector2(self.v1) - c = Vector2(self.v1) - a[1:2] = [2.2] - b[slice(1, 2)] = [2.2] - c[1:2:] = (2.2,) - self.assertEqual(a, b) - self.assertEqual(a, c) - self.assertEqual(type(a), type(self.v1)) - self.assertEqual(type(b), type(self.v1)) - self.assertEqual(type(c), type(self.v1)) - - def testAdd(self): - v3 = self.v1 + self.v2 - self.assertTrue(isinstance(v3, type(self.v1))) - self.assertEqual(v3.x, self.v1.x + self.v2.x) - self.assertEqual(v3.y, self.v1.y + self.v2.y) - v3 = self.v1 + self.t2 - self.assertTrue(isinstance(v3, type(self.v1))) - self.assertEqual(v3.x, self.v1.x + self.t2[0]) - self.assertEqual(v3.y, self.v1.y + self.t2[1]) - v3 = self.v1 + self.l2 - self.assertTrue(isinstance(v3, type(self.v1))) - self.assertEqual(v3.x, self.v1.x + self.l2[0]) - self.assertEqual(v3.y, self.v1.y + self.l2[1]) - v3 = self.t1 + self.v2 - self.assertTrue(isinstance(v3, type(self.v1))) - self.assertEqual(v3.x, self.t1[0] + self.v2.x) - self.assertEqual(v3.y, self.t1[1] + self.v2.y) - v3 = self.l1 + self.v2 - self.assertTrue(isinstance(v3, type(self.v1))) - self.assertEqual(v3.x, self.l1[0] + self.v2.x) - self.assertEqual(v3.y, self.l1[1] + self.v2.y) - - def testSub(self): - v3 = self.v1 - self.v2 - self.assertTrue(isinstance(v3, type(self.v1))) - self.assertEqual(v3.x, self.v1.x - self.v2.x) - self.assertEqual(v3.y, self.v1.y - self.v2.y) - v3 = self.v1 - self.t2 - self.assertTrue(isinstance(v3, type(self.v1))) - self.assertEqual(v3.x, self.v1.x - self.t2[0]) - self.assertEqual(v3.y, self.v1.y - self.t2[1]) - v3 = self.v1 - self.l2 - self.assertTrue(isinstance(v3, type(self.v1))) - self.assertEqual(v3.x, self.v1.x - self.l2[0]) - self.assertEqual(v3.y, self.v1.y - self.l2[1]) - v3 = self.t1 - self.v2 - self.assertTrue(isinstance(v3, type(self.v1))) - self.assertEqual(v3.x, self.t1[0] - self.v2.x) - self.assertEqual(v3.y, self.t1[1] - self.v2.y) - v3 = self.l1 - self.v2 - self.assertTrue(isinstance(v3, type(self.v1))) - self.assertEqual(v3.x, self.l1[0] - self.v2.x) - self.assertEqual(v3.y, self.l1[1] - self.v2.y) - - def testScalarMultiplication(self): - v = self.s1 * self.v1 - self.assertTrue(isinstance(v, type(self.v1))) - self.assertEqual(v.x, self.s1 * self.v1.x) - self.assertEqual(v.y, self.s1 * self.v1.y) - v = self.v1 * self.s2 - self.assertEqual(v.x, self.v1.x * self.s2) - self.assertEqual(v.y, self.v1.y * self.s2) - - def testScalarDivision(self): - v = self.v1 / self.s1 - self.assertTrue(isinstance(v, type(self.v1))) - self.assertAlmostEqual(v.x, self.v1.x / self.s1) - self.assertAlmostEqual(v.y, self.v1.y / self.s1) - v = self.v1 // self.s2 - self.assertTrue(isinstance(v, type(self.v1))) - self.assertEqual(v.x, self.v1.x // self.s2) - self.assertEqual(v.y, self.v1.y // self.s2) - - def testBool(self): - self.assertEqual(bool(self.zeroVec), False) - self.assertEqual(bool(self.v1), True) - self.assertTrue(not self.zeroVec) - self.assertTrue(self.v1) - - def testUnary(self): - v = +self.v1 - self.assertTrue(isinstance(v, type(self.v1))) - self.assertEqual(v.x, self.v1.x) - self.assertEqual(v.y, self.v1.y) - self.assertNotEqual(id(v), id(self.v1)) - v = -self.v1 - self.assertTrue(isinstance(v, type(self.v1))) - self.assertEqual(v.x, -self.v1.x) - self.assertEqual(v.y, -self.v1.y) - self.assertNotEqual(id(v), id(self.v1)) - - def testCompare(self): - int_vec = Vector2(3, -2) - flt_vec = Vector2(3.0, -2.0) - zero_vec = Vector2(0, 0) - self.assertEqual(int_vec == flt_vec, True) - self.assertEqual(int_vec != flt_vec, False) - self.assertEqual(int_vec != zero_vec, True) - self.assertEqual(flt_vec == zero_vec, False) - self.assertEqual(int_vec == (3, -2), True) - self.assertEqual(int_vec != (3, -2), False) - self.assertEqual(int_vec != [0, 0], True) - self.assertEqual(int_vec == [0, 0], False) - self.assertEqual(int_vec != 5, True) - self.assertEqual(int_vec == 5, False) - self.assertEqual(int_vec != [3, -2, 0], True) - self.assertEqual(int_vec == [3, -2, 0], False) - - def testStr(self): - v = Vector2(1.2, 3.4) - self.assertEqual(str(v), "[1.2, 3.4]") - - def testRepr(self): - v = Vector2(1.2, 3.4) - self.assertEqual(v.__repr__(), "") - self.assertEqual(v, Vector2(v.__repr__())) - - def testIter(self): - it = self.v1.__iter__() - next_ = it.__next__ - self.assertEqual(next_(), self.v1[0]) - self.assertEqual(next_(), self.v1[1]) - self.assertRaises(StopIteration, lambda: next_()) - it1 = self.v1.__iter__() - it2 = self.v1.__iter__() - self.assertNotEqual(id(it1), id(it2)) - self.assertEqual(id(it1), id(it1.__iter__())) - self.assertEqual(list(it1), list(it2)) - self.assertEqual(list(self.v1.__iter__()), self.l1) - idx = 0 - for val in self.v1: - self.assertEqual(val, self.v1[idx]) - idx += 1 - - def test_rotate(self): - v1 = Vector2(1, 0) - v2 = v1.rotate(90) - v3 = v1.rotate(90 + 360) - self.assertEqual(v1.x, 1) - self.assertEqual(v1.y, 0) - self.assertEqual(v2.x, 0) - self.assertEqual(v2.y, 1) - self.assertEqual(v3.x, v2.x) - self.assertEqual(v3.y, v2.y) - v1 = Vector2(-1, -1) - v2 = v1.rotate(-90) - self.assertEqual(v2.x, -1) - self.assertEqual(v2.y, 1) - v2 = v1.rotate(360) - self.assertEqual(v1.x, v2.x) - self.assertEqual(v1.y, v2.y) - v2 = v1.rotate(0) - self.assertEqual(v1.x, v2.x) - self.assertEqual(v1.y, v2.y) - # issue 214 - self.assertEqual(Vector2(0, 1).rotate(359.99999999), Vector2(0, 1)) - - def test_rotate_rad(self): - tests = ( - ((1, 0), math.pi), - ((1, 0), math.pi / 2), - ((1, 0), -math.pi / 2), - ((1, 0), math.pi / 4), - ) - for initialVec, radians in tests: - self.assertEqual( - Vector2(initialVec).rotate_rad(radians), - (math.cos(radians), math.sin(radians)), - ) - - def test_rotate_ip(self): - v = Vector2(1, 0) - self.assertEqual(v.rotate_ip(90), None) - self.assertEqual(v.x, 0) - self.assertEqual(v.y, 1) - v = Vector2(-1, -1) - v.rotate_ip(-90) - self.assertEqual(v.x, -1) - self.assertEqual(v.y, 1) - - def test_rotate_rad_ip(self): - tests = ( - ((1, 0), math.pi), - ((1, 0), math.pi / 2), - ((1, 0), -math.pi / 2), - ((1, 0), math.pi / 4), - ) - for initialVec, radians in tests: - vec = Vector2(initialVec) - vec.rotate_rad_ip(radians) - self.assertEqual(vec, (math.cos(radians), math.sin(radians))) - - def test_normalize(self): - v = self.v1.normalize() - # length is 1 - self.assertAlmostEqual(v.x * v.x + v.y * v.y, 1.0) - # v1 is unchanged - self.assertEqual(self.v1.x, self.l1[0]) - self.assertEqual(self.v1.y, self.l1[1]) - # v2 is parallel to v1 - self.assertAlmostEqual(self.v1.x * v.y - self.v1.y * v.x, 0.0) - self.assertRaises(ValueError, lambda: self.zeroVec.normalize()) - - def test_normalize_ip(self): - v = +self.v1 - # v has length != 1 before normalizing - self.assertNotEqual(v.x * v.x + v.y * v.y, 1.0) - # inplace operations should return None - self.assertEqual(v.normalize_ip(), None) - # length is 1 - self.assertAlmostEqual(v.x * v.x + v.y * v.y, 1.0) - # v2 is parallel to v1 - self.assertAlmostEqual(self.v1.x * v.y - self.v1.y * v.x, 0.0) - self.assertRaises(ValueError, lambda: self.zeroVec.normalize_ip()) - - def test_is_normalized(self): - self.assertEqual(self.v1.is_normalized(), False) - v = self.v1.normalize() - self.assertEqual(v.is_normalized(), True) - self.assertEqual(self.e2.is_normalized(), True) - self.assertEqual(self.zeroVec.is_normalized(), False) - - def test_cross(self): - self.assertEqual( - self.v1.cross(self.v2), self.v1.x * self.v2.y - self.v1.y * self.v2.x - ) - self.assertEqual( - self.v1.cross(self.l2), self.v1.x * self.l2[1] - self.v1.y * self.l2[0] - ) - self.assertEqual( - self.v1.cross(self.t2), self.v1.x * self.t2[1] - self.v1.y * self.t2[0] - ) - self.assertEqual(self.v1.cross(self.v2), -self.v2.cross(self.v1)) - self.assertEqual(self.v1.cross(self.v1), 0) - - def test_dot(self): - self.assertAlmostEqual( - self.v1.dot(self.v2), self.v1.x * self.v2.x + self.v1.y * self.v2.y - ) - self.assertAlmostEqual( - self.v1.dot(self.l2), self.v1.x * self.l2[0] + self.v1.y * self.l2[1] - ) - self.assertAlmostEqual( - self.v1.dot(self.t2), self.v1.x * self.t2[0] + self.v1.y * self.t2[1] - ) - self.assertEqual(self.v1.dot(self.v2), self.v2.dot(self.v1)) - self.assertEqual(self.v1.dot(self.v2), self.v1 * self.v2) - - def test_angle_to(self): - self.assertEqual( - self.v1.rotate(self.v1.angle_to(self.v2)).normalize(), self.v2.normalize() - ) - self.assertEqual(Vector2(1, 1).angle_to((-1, 1)), 90) - self.assertEqual(Vector2(1, 0).angle_to((0, -1)), -90) - self.assertEqual(Vector2(1, 0).angle_to((-1, 1)), 135) - self.assertEqual(abs(Vector2(1, 0).angle_to((-1, 0))), 180) - - def test_scale_to_length(self): - v = Vector2(1, 1) - v.scale_to_length(2.5) - self.assertEqual(v, Vector2(2.5, 2.5) / math.sqrt(2)) - self.assertRaises(ValueError, lambda: self.zeroVec.scale_to_length(1)) - self.assertEqual(v.scale_to_length(0), None) - self.assertEqual(v, self.zeroVec) - - def test_length(self): - self.assertEqual(Vector2(3, 4).length(), 5) - self.assertEqual(Vector2(-3, 4).length(), 5) - self.assertEqual(self.zeroVec.length(), 0) - - def test_length_squared(self): - self.assertEqual(Vector2(3, 4).length_squared(), 25) - self.assertEqual(Vector2(-3, 4).length_squared(), 25) - self.assertEqual(self.zeroVec.length_squared(), 0) - - def test_reflect(self): - v = Vector2(1, -1) - n = Vector2(0, 1) - self.assertEqual(v.reflect(n), Vector2(1, 1)) - self.assertEqual(v.reflect(3 * n), v.reflect(n)) - self.assertEqual(v.reflect(-v), -v) - self.assertRaises(ValueError, lambda: v.reflect(self.zeroVec)) - - def test_reflect_ip(self): - v1 = Vector2(1, -1) - v2 = Vector2(v1) - n = Vector2(0, 1) - self.assertEqual(v2.reflect_ip(n), None) - self.assertEqual(v2, Vector2(1, 1)) - v2 = Vector2(v1) - v2.reflect_ip(3 * n) - self.assertEqual(v2, v1.reflect(n)) - v2 = Vector2(v1) - v2.reflect_ip(-v1) - self.assertEqual(v2, -v1) - self.assertRaises(ValueError, lambda: v2.reflect_ip(Vector2())) - - def test_distance_to(self): - diff = self.v1 - self.v2 - self.assertEqual(self.e1.distance_to(self.e2), math.sqrt(2)) - self.assertAlmostEqual( - self.v1.distance_to(self.v2), math.sqrt(diff.x * diff.x + diff.y * diff.y) - ) - self.assertEqual(self.v1.distance_to(self.v1), 0) - self.assertEqual(self.v1.distance_to(self.v2), self.v2.distance_to(self.v1)) - - def test_distance_squared_to(self): - diff = self.v1 - self.v2 - self.assertEqual(self.e1.distance_squared_to(self.e2), 2) - self.assertAlmostEqual( - self.v1.distance_squared_to(self.v2), diff.x * diff.x + diff.y * diff.y - ) - self.assertEqual(self.v1.distance_squared_to(self.v1), 0) - self.assertEqual( - self.v1.distance_squared_to(self.v2), self.v2.distance_squared_to(self.v1) - ) - - def test_update(self): - v = Vector2(3, 4) - v.update(0) - self.assertEqual(v, Vector2((0, 0))) - v.update(5, 1) - self.assertEqual(v, Vector2(5, 1)) - v.update((4, 1)) - self.assertNotEqual(v, Vector2((5, 1))) - - def test_swizzle(self): - self.assertEqual(self.v1.yx, (self.v1.y, self.v1.x)) - self.assertEqual( - self.v1.xxyyxy, - (self.v1.x, self.v1.x, self.v1.y, self.v1.y, self.v1.x, self.v1.y), - ) - self.v1.xy = self.t2 - self.assertEqual(self.v1, self.t2) - self.v1.yx = self.t2 - self.assertEqual(self.v1, (self.t2[1], self.t2[0])) - self.assertEqual(type(self.v1), Vector2) - - def invalidSwizzleX(): - Vector2().xx = (1, 2) - - def invalidSwizzleY(): - Vector2().yy = (1, 2) - - self.assertRaises(AttributeError, invalidSwizzleX) - self.assertRaises(AttributeError, invalidSwizzleY) - - def invalidAssignment(): - Vector2().xy = 3 - - self.assertRaises(TypeError, invalidAssignment) - - def unicodeAttribute(): - getattr(Vector2(), "ä") - - self.assertRaises(AttributeError, unicodeAttribute) - - def test_swizzle_return_types(self): - self.assertEqual(type(self.v1.x), float) - self.assertEqual(type(self.v1.xy), Vector2) - self.assertEqual(type(self.v1.xyx), Vector3) - # but we don't have vector4 or above... so tuple. - self.assertEqual(type(self.v1.xyxy), tuple) - self.assertEqual(type(self.v1.xyxyx), tuple) - - def test_elementwise(self): - # behaviour for "elementwise op scalar" - self.assertEqual( - self.v1.elementwise() + self.s1, (self.v1.x + self.s1, self.v1.y + self.s1) - ) - self.assertEqual( - self.v1.elementwise() - self.s1, (self.v1.x - self.s1, self.v1.y - self.s1) - ) - self.assertEqual( - self.v1.elementwise() * self.s2, (self.v1.x * self.s2, self.v1.y * self.s2) - ) - self.assertEqual( - self.v1.elementwise() / self.s2, (self.v1.x / self.s2, self.v1.y / self.s2) - ) - self.assertEqual( - self.v1.elementwise() // self.s1, - (self.v1.x // self.s1, self.v1.y // self.s1), - ) - self.assertEqual( - self.v1.elementwise() ** self.s1, - (self.v1.x ** self.s1, self.v1.y ** self.s1), - ) - self.assertEqual( - self.v1.elementwise() % self.s1, (self.v1.x % self.s1, self.v1.y % self.s1) - ) - self.assertEqual( - self.v1.elementwise() > self.s1, self.v1.x > self.s1 and self.v1.y > self.s1 - ) - self.assertEqual( - self.v1.elementwise() < self.s1, self.v1.x < self.s1 and self.v1.y < self.s1 - ) - self.assertEqual( - self.v1.elementwise() == self.s1, - self.v1.x == self.s1 and self.v1.y == self.s1, - ) - self.assertEqual( - self.v1.elementwise() != self.s1, - self.v1.x != self.s1 and self.v1.y != self.s1, - ) - self.assertEqual( - self.v1.elementwise() >= self.s1, - self.v1.x >= self.s1 and self.v1.y >= self.s1, - ) - self.assertEqual( - self.v1.elementwise() <= self.s1, - self.v1.x <= self.s1 and self.v1.y <= self.s1, - ) - self.assertEqual( - self.v1.elementwise() != self.s1, - self.v1.x != self.s1 and self.v1.y != self.s1, - ) - # behaviour for "scalar op elementwise" - self.assertEqual(5 + self.v1.elementwise(), Vector2(5, 5) + self.v1) - self.assertEqual(3.5 - self.v1.elementwise(), Vector2(3.5, 3.5) - self.v1) - self.assertEqual(7.5 * self.v1.elementwise(), 7.5 * self.v1) - self.assertEqual( - -3.5 / self.v1.elementwise(), (-3.5 / self.v1.x, -3.5 / self.v1.y) - ) - self.assertEqual( - -3.5 // self.v1.elementwise(), (-3.5 // self.v1.x, -3.5 // self.v1.y) - ) - self.assertEqual( - -(3.5 ** self.v1.elementwise()), (-(3.5 ** self.v1.x), -(3.5 ** self.v1.y)) - ) - self.assertEqual(3 % self.v1.elementwise(), (3 % self.v1.x, 3 % self.v1.y)) - self.assertEqual(2 < self.v1.elementwise(), 2 < self.v1.x and 2 < self.v1.y) - self.assertEqual(2 > self.v1.elementwise(), 2 > self.v1.x and 2 > self.v1.y) - self.assertEqual(1 == self.v1.elementwise(), 1 == self.v1.x and 1 == self.v1.y) - self.assertEqual(1 != self.v1.elementwise(), 1 != self.v1.x and 1 != self.v1.y) - self.assertEqual(2 <= self.v1.elementwise(), 2 <= self.v1.x and 2 <= self.v1.y) - self.assertEqual( - -7 >= self.v1.elementwise(), -7 >= self.v1.x and -7 >= self.v1.y - ) - self.assertEqual( - -7 != self.v1.elementwise(), -7 != self.v1.x and -7 != self.v1.y - ) - - # behaviour for "elementwise op vector" - self.assertEqual(type(self.v1.elementwise() * self.v2), type(self.v1)) - self.assertEqual(self.v1.elementwise() + self.v2, self.v1 + self.v2) - self.assertEqual(self.v1.elementwise() + self.v2, self.v1 + self.v2) - self.assertEqual(self.v1.elementwise() - self.v2, self.v1 - self.v2) - self.assertEqual( - self.v1.elementwise() * self.v2, - (self.v1.x * self.v2.x, self.v1.y * self.v2.y), - ) - self.assertEqual( - self.v1.elementwise() / self.v2, - (self.v1.x / self.v2.x, self.v1.y / self.v2.y), - ) - self.assertEqual( - self.v1.elementwise() // self.v2, - (self.v1.x // self.v2.x, self.v1.y // self.v2.y), - ) - self.assertEqual( - self.v1.elementwise() ** self.v2, - (self.v1.x ** self.v2.x, self.v1.y ** self.v2.y), - ) - self.assertEqual( - self.v1.elementwise() % self.v2, - (self.v1.x % self.v2.x, self.v1.y % self.v2.y), - ) - self.assertEqual( - self.v1.elementwise() > self.v2, - self.v1.x > self.v2.x and self.v1.y > self.v2.y, - ) - self.assertEqual( - self.v1.elementwise() < self.v2, - self.v1.x < self.v2.x and self.v1.y < self.v2.y, - ) - self.assertEqual( - self.v1.elementwise() >= self.v2, - self.v1.x >= self.v2.x and self.v1.y >= self.v2.y, - ) - self.assertEqual( - self.v1.elementwise() <= self.v2, - self.v1.x <= self.v2.x and self.v1.y <= self.v2.y, - ) - self.assertEqual( - self.v1.elementwise() == self.v2, - self.v1.x == self.v2.x and self.v1.y == self.v2.y, - ) - self.assertEqual( - self.v1.elementwise() != self.v2, - self.v1.x != self.v2.x and self.v1.y != self.v2.y, - ) - # behaviour for "vector op elementwise" - self.assertEqual(self.v2 + self.v1.elementwise(), self.v2 + self.v1) - self.assertEqual(self.v2 - self.v1.elementwise(), self.v2 - self.v1) - self.assertEqual( - self.v2 * self.v1.elementwise(), - (self.v2.x * self.v1.x, self.v2.y * self.v1.y), - ) - self.assertEqual( - self.v2 / self.v1.elementwise(), - (self.v2.x / self.v1.x, self.v2.y / self.v1.y), - ) - self.assertEqual( - self.v2 // self.v1.elementwise(), - (self.v2.x // self.v1.x, self.v2.y // self.v1.y), - ) - self.assertEqual( - self.v2 ** self.v1.elementwise(), - (self.v2.x ** self.v1.x, self.v2.y ** self.v1.y), - ) - self.assertEqual( - self.v2 % self.v1.elementwise(), - (self.v2.x % self.v1.x, self.v2.y % self.v1.y), - ) - self.assertEqual( - self.v2 < self.v1.elementwise(), - self.v2.x < self.v1.x and self.v2.y < self.v1.y, - ) - self.assertEqual( - self.v2 > self.v1.elementwise(), - self.v2.x > self.v1.x and self.v2.y > self.v1.y, - ) - self.assertEqual( - self.v2 <= self.v1.elementwise(), - self.v2.x <= self.v1.x and self.v2.y <= self.v1.y, - ) - self.assertEqual( - self.v2 >= self.v1.elementwise(), - self.v2.x >= self.v1.x and self.v2.y >= self.v1.y, - ) - self.assertEqual( - self.v2 == self.v1.elementwise(), - self.v2.x == self.v1.x and self.v2.y == self.v1.y, - ) - self.assertEqual( - self.v2 != self.v1.elementwise(), - self.v2.x != self.v1.x and self.v2.y != self.v1.y, - ) - - # behaviour for "elementwise op elementwise" - self.assertEqual( - self.v2.elementwise() + self.v1.elementwise(), self.v2 + self.v1 - ) - self.assertEqual( - self.v2.elementwise() - self.v1.elementwise(), self.v2 - self.v1 - ) - self.assertEqual( - self.v2.elementwise() * self.v1.elementwise(), - (self.v2.x * self.v1.x, self.v2.y * self.v1.y), - ) - self.assertEqual( - self.v2.elementwise() / self.v1.elementwise(), - (self.v2.x / self.v1.x, self.v2.y / self.v1.y), - ) - self.assertEqual( - self.v2.elementwise() // self.v1.elementwise(), - (self.v2.x // self.v1.x, self.v2.y // self.v1.y), - ) - self.assertEqual( - self.v2.elementwise() ** self.v1.elementwise(), - (self.v2.x ** self.v1.x, self.v2.y ** self.v1.y), - ) - self.assertEqual( - self.v2.elementwise() % self.v1.elementwise(), - (self.v2.x % self.v1.x, self.v2.y % self.v1.y), - ) - self.assertEqual( - self.v2.elementwise() < self.v1.elementwise(), - self.v2.x < self.v1.x and self.v2.y < self.v1.y, - ) - self.assertEqual( - self.v2.elementwise() > self.v1.elementwise(), - self.v2.x > self.v1.x and self.v2.y > self.v1.y, - ) - self.assertEqual( - self.v2.elementwise() <= self.v1.elementwise(), - self.v2.x <= self.v1.x and self.v2.y <= self.v1.y, - ) - self.assertEqual( - self.v2.elementwise() >= self.v1.elementwise(), - self.v2.x >= self.v1.x and self.v2.y >= self.v1.y, - ) - self.assertEqual( - self.v2.elementwise() == self.v1.elementwise(), - self.v2.x == self.v1.x and self.v2.y == self.v1.y, - ) - self.assertEqual( - self.v2.elementwise() != self.v1.elementwise(), - self.v2.x != self.v1.x and self.v2.y != self.v1.y, - ) - - # other behaviour - self.assertEqual(abs(self.v1.elementwise()), (abs(self.v1.x), abs(self.v1.y))) - self.assertEqual(-self.v1.elementwise(), -self.v1) - self.assertEqual(+self.v1.elementwise(), +self.v1) - self.assertEqual(bool(self.v1.elementwise()), bool(self.v1)) - self.assertEqual(bool(Vector2().elementwise()), bool(Vector2())) - self.assertEqual(self.zeroVec.elementwise() ** 0, (1, 1)) - self.assertRaises(ValueError, lambda: pow(Vector2(-1, 0).elementwise(), 1.2)) - self.assertRaises(ZeroDivisionError, lambda: self.zeroVec.elementwise() ** -1) - - def test_elementwise(self): - v1 = self.v1 - v2 = self.v2 - s1 = self.s1 - s2 = self.s2 - # behaviour for "elementwise op scalar" - self.assertEqual(v1.elementwise() + s1, (v1.x + s1, v1.y + s1)) - self.assertEqual(v1.elementwise() - s1, (v1.x - s1, v1.y - s1)) - self.assertEqual(v1.elementwise() * s2, (v1.x * s2, v1.y * s2)) - self.assertEqual(v1.elementwise() / s2, (v1.x / s2, v1.y / s2)) - self.assertEqual(v1.elementwise() // s1, (v1.x // s1, v1.y // s1)) - self.assertEqual(v1.elementwise() ** s1, (v1.x ** s1, v1.y ** s1)) - self.assertEqual(v1.elementwise() % s1, (v1.x % s1, v1.y % s1)) - self.assertEqual(v1.elementwise() > s1, v1.x > s1 and v1.y > s1) - self.assertEqual(v1.elementwise() < s1, v1.x < s1 and v1.y < s1) - self.assertEqual(v1.elementwise() == s1, v1.x == s1 and v1.y == s1) - self.assertEqual(v1.elementwise() != s1, s1 not in [v1.x, v1.y]) - self.assertEqual(v1.elementwise() >= s1, v1.x >= s1 and v1.y >= s1) - self.assertEqual(v1.elementwise() <= s1, v1.x <= s1 and v1.y <= s1) - self.assertEqual(v1.elementwise() != s1, s1 not in [v1.x, v1.y]) - # behaviour for "scalar op elementwise" - self.assertEqual(s1 + v1.elementwise(), (s1 + v1.x, s1 + v1.y)) - self.assertEqual(s1 - v1.elementwise(), (s1 - v1.x, s1 - v1.y)) - self.assertEqual(s1 * v1.elementwise(), (s1 * v1.x, s1 * v1.y)) - self.assertEqual(s1 / v1.elementwise(), (s1 / v1.x, s1 / v1.y)) - self.assertEqual(s1 // v1.elementwise(), (s1 // v1.x, s1 // v1.y)) - self.assertEqual(s1 ** v1.elementwise(), (s1 ** v1.x, s1 ** v1.y)) - self.assertEqual(s1 % v1.elementwise(), (s1 % v1.x, s1 % v1.y)) - self.assertEqual(s1 < v1.elementwise(), s1 < v1.x and s1 < v1.y) - self.assertEqual(s1 > v1.elementwise(), s1 > v1.x and s1 > v1.y) - self.assertEqual(s1 == v1.elementwise(), s1 == v1.x and s1 == v1.y) - self.assertEqual(s1 != v1.elementwise(), s1 not in [v1.x, v1.y]) - self.assertEqual(s1 <= v1.elementwise(), s1 <= v1.x and s1 <= v1.y) - self.assertEqual(s1 >= v1.elementwise(), s1 >= v1.x and s1 >= v1.y) - self.assertEqual(s1 != v1.elementwise(), s1 not in [v1.x, v1.y]) - - # behaviour for "elementwise op vector" - self.assertEqual(type(v1.elementwise() * v2), type(v1)) - self.assertEqual(v1.elementwise() + v2, v1 + v2) - self.assertEqual(v1.elementwise() - v2, v1 - v2) - self.assertEqual(v1.elementwise() * v2, (v1.x * v2.x, v1.y * v2.y)) - self.assertEqual(v1.elementwise() / v2, (v1.x / v2.x, v1.y / v2.y)) - self.assertEqual(v1.elementwise() // v2, (v1.x // v2.x, v1.y // v2.y)) - self.assertEqual(v1.elementwise() ** v2, (v1.x ** v2.x, v1.y ** v2.y)) - self.assertEqual(v1.elementwise() % v2, (v1.x % v2.x, v1.y % v2.y)) - self.assertEqual(v1.elementwise() > v2, v1.x > v2.x and v1.y > v2.y) - self.assertEqual(v1.elementwise() < v2, v1.x < v2.x and v1.y < v2.y) - self.assertEqual(v1.elementwise() >= v2, v1.x >= v2.x and v1.y >= v2.y) - self.assertEqual(v1.elementwise() <= v2, v1.x <= v2.x and v1.y <= v2.y) - self.assertEqual(v1.elementwise() == v2, v1.x == v2.x and v1.y == v2.y) - self.assertEqual(v1.elementwise() != v2, v1.x != v2.x and v1.y != v2.y) - # behaviour for "vector op elementwise" - self.assertEqual(v2 + v1.elementwise(), v2 + v1) - self.assertEqual(v2 - v1.elementwise(), v2 - v1) - self.assertEqual(v2 * v1.elementwise(), (v2.x * v1.x, v2.y * v1.y)) - self.assertEqual(v2 / v1.elementwise(), (v2.x / v1.x, v2.y / v1.y)) - self.assertEqual(v2 // v1.elementwise(), (v2.x // v1.x, v2.y // v1.y)) - self.assertEqual(v2 ** v1.elementwise(), (v2.x ** v1.x, v2.y ** v1.y)) - self.assertEqual(v2 % v1.elementwise(), (v2.x % v1.x, v2.y % v1.y)) - self.assertEqual(v2 < v1.elementwise(), v2.x < v1.x and v2.y < v1.y) - self.assertEqual(v2 > v1.elementwise(), v2.x > v1.x and v2.y > v1.y) - self.assertEqual(v2 <= v1.elementwise(), v2.x <= v1.x and v2.y <= v1.y) - self.assertEqual(v2 >= v1.elementwise(), v2.x >= v1.x and v2.y >= v1.y) - self.assertEqual(v2 == v1.elementwise(), v2.x == v1.x and v2.y == v1.y) - self.assertEqual(v2 != v1.elementwise(), v2.x != v1.x and v2.y != v1.y) - - # behaviour for "elementwise op elementwise" - self.assertEqual(v2.elementwise() + v1.elementwise(), v2 + v1) - self.assertEqual(v2.elementwise() - v1.elementwise(), v2 - v1) - self.assertEqual( - v2.elementwise() * v1.elementwise(), (v2.x * v1.x, v2.y * v1.y) - ) - self.assertEqual( - v2.elementwise() / v1.elementwise(), (v2.x / v1.x, v2.y / v1.y) - ) - self.assertEqual( - v2.elementwise() // v1.elementwise(), (v2.x // v1.x, v2.y // v1.y) - ) - self.assertEqual( - v2.elementwise() ** v1.elementwise(), (v2.x ** v1.x, v2.y ** v1.y) - ) - self.assertEqual( - v2.elementwise() % v1.elementwise(), (v2.x % v1.x, v2.y % v1.y) - ) - self.assertEqual( - v2.elementwise() < v1.elementwise(), v2.x < v1.x and v2.y < v1.y - ) - self.assertEqual( - v2.elementwise() > v1.elementwise(), v2.x > v1.x and v2.y > v1.y - ) - self.assertEqual( - v2.elementwise() <= v1.elementwise(), v2.x <= v1.x and v2.y <= v1.y - ) - self.assertEqual( - v2.elementwise() >= v1.elementwise(), v2.x >= v1.x and v2.y >= v1.y - ) - self.assertEqual( - v2.elementwise() == v1.elementwise(), v2.x == v1.x and v2.y == v1.y - ) - self.assertEqual( - v2.elementwise() != v1.elementwise(), v2.x != v1.x and v2.y != v1.y - ) - - # other behaviour - self.assertEqual(abs(v1.elementwise()), (abs(v1.x), abs(v1.y))) - self.assertEqual(-v1.elementwise(), -v1) - self.assertEqual(+v1.elementwise(), +v1) - self.assertEqual(bool(v1.elementwise()), bool(v1)) - self.assertEqual(bool(Vector2().elementwise()), bool(Vector2())) - self.assertEqual(self.zeroVec.elementwise() ** 0, (1, 1)) - self.assertRaises(ValueError, lambda: pow(Vector2(-1, 0).elementwise(), 1.2)) - self.assertRaises(ZeroDivisionError, lambda: self.zeroVec.elementwise() ** -1) - self.assertRaises(ZeroDivisionError, lambda: self.zeroVec.elementwise() ** -1) - self.assertRaises(ZeroDivisionError, lambda: Vector2(1, 1).elementwise() / 0) - self.assertRaises(ZeroDivisionError, lambda: Vector2(1, 1).elementwise() // 0) - self.assertRaises(ZeroDivisionError, lambda: Vector2(1, 1).elementwise() % 0) - self.assertRaises( - ZeroDivisionError, lambda: Vector2(1, 1).elementwise() / self.zeroVec - ) - self.assertRaises( - ZeroDivisionError, lambda: Vector2(1, 1).elementwise() // self.zeroVec - ) - self.assertRaises( - ZeroDivisionError, lambda: Vector2(1, 1).elementwise() % self.zeroVec - ) - self.assertRaises(ZeroDivisionError, lambda: 2 / self.zeroVec.elementwise()) - self.assertRaises(ZeroDivisionError, lambda: 2 // self.zeroVec.elementwise()) - self.assertRaises(ZeroDivisionError, lambda: 2 % self.zeroVec.elementwise()) - - def test_slerp(self): - self.assertRaises(ValueError, lambda: self.zeroVec.slerp(self.v1, 0.5)) - self.assertRaises(ValueError, lambda: self.v1.slerp(self.zeroVec, 0.5)) - self.assertRaises(ValueError, lambda: self.zeroVec.slerp(self.zeroVec, 0.5)) - v1 = Vector2(1, 0) - v2 = Vector2(0, 1) - steps = 10 - angle_step = v1.angle_to(v2) / steps - for i, u in ((i, v1.slerp(v2, i / float(steps))) for i in range(steps + 1)): - self.assertAlmostEqual(u.length(), 1) - self.assertAlmostEqual(v1.angle_to(u), i * angle_step) - self.assertEqual(u, v2) - - v1 = Vector2(100, 0) - v2 = Vector2(0, 10) - radial_factor = v2.length() / v1.length() - for i, u in ((i, v1.slerp(v2, -i / float(steps))) for i in range(steps + 1)): - self.assertAlmostEqual( - u.length(), - (v2.length() - v1.length()) * (float(i) / steps) + v1.length(), - ) - self.assertEqual(u, v2) - self.assertEqual(v1.slerp(v1, 0.5), v1) - self.assertEqual(v2.slerp(v2, 0.5), v2) - self.assertRaises(ValueError, lambda: v1.slerp(-v1, 0.5)) - - def test_lerp(self): - v1 = Vector2(0, 0) - v2 = Vector2(10, 10) - self.assertEqual(v1.lerp(v2, 0.5), (5, 5)) - self.assertRaises(ValueError, lambda: v1.lerp(v2, 2.5)) - - v1 = Vector2(-10, -5) - v2 = Vector2(10, 10) - self.assertEqual(v1.lerp(v2, 0.5), (0, 2.5)) - - def test_polar(self): - v = Vector2() - v.from_polar(self.v1.as_polar()) - self.assertEqual(self.v1, v) - self.assertEqual(self.e1.as_polar(), (1, 0)) - self.assertEqual(self.e2.as_polar(), (1, 90)) - self.assertEqual((2 * self.e2).as_polar(), (2, 90)) - self.assertRaises(TypeError, lambda: v.from_polar((None, None))) - self.assertRaises(TypeError, lambda: v.from_polar("ab")) - self.assertRaises(TypeError, lambda: v.from_polar((None, 1))) - self.assertRaises(TypeError, lambda: v.from_polar((1, 2, 3))) - self.assertRaises(TypeError, lambda: v.from_polar((1,))) - self.assertRaises(TypeError, lambda: v.from_polar(1, 2)) - v.from_polar((0.5, 90)) - self.assertEqual(v, 0.5 * self.e2) - v.from_polar((1, 0)) - self.assertEqual(v, self.e1) - - def test_subclass_operation(self): - class Vector(pygame.math.Vector2): - pass - - vec = Vector() - - vec_a = Vector(2, 0) - vec_b = Vector(0, 1) - - vec_a + vec_b - vec_a *= 2 - - def test_project_v2_onto_x_axis(self): - """Project onto x-axis, e.g. get the component pointing in the x-axis direction.""" - # arrange - v = Vector2(2, 2) - x_axis = Vector2(10, 0) - - # act - actual = v.project(x_axis) - - # assert - self.assertEqual(v.x, actual.x) - self.assertEqual(0, actual.y) - - def test_project_v2_onto_y_axis(self): - """Project onto y-axis, e.g. get the component pointing in the y-axis direction.""" - # arrange - v = Vector2(2, 2) - y_axis = Vector2(0, 100) - - # act - actual = v.project(y_axis) - - # assert - self.assertEqual(0, actual.x) - self.assertEqual(v.y, actual.y) - - def test_project_v2_onto_other(self): - """Project onto other vector.""" - # arrange - v = Vector2(2, 3) - other = Vector2(3, 5) - - # act - actual = v.project(other) - - # assert - expected = v.dot(other) / other.dot(other) * other - self.assertEqual(expected.x, actual.x) - self.assertEqual(expected.y, actual.y) - - def test_project_v2_raises_if_other_has_zero_length(self): - """Check if exception is raise when projected on vector has zero length.""" - # arrange - v = Vector2(2, 3) - other = Vector2(0, 0) - - # act / assert - self.assertRaises(ValueError, v.project, other) - - def test_project_v2_onto_other_as_tuple(self): - """Project onto other tuple as vector.""" - # arrange - v = Vector2(2, 3) - other = Vector2(3, 5) - - # act - actual = v.project(tuple(other)) - - # assert - expected = v.dot(other) / other.dot(other) * other - self.assertEqual(expected.x, actual.x) - self.assertEqual(expected.y, actual.y) - - def test_project_v2_onto_other_as_list(self): - """Project onto other list as vector.""" - # arrange - v = Vector2(2, 3) - other = Vector2(3, 5) - - # act - actual = v.project(list(other)) - - # assert - expected = v.dot(other) / other.dot(other) * other - self.assertEqual(expected.x, actual.x) - self.assertEqual(expected.y, actual.y) - - def test_project_v2_raises_if_other_has_zero_length(self): - """Check if exception is raise when projected on vector has zero length.""" - # arrange - v = Vector2(2, 3) - other = Vector2(0, 0) - - # act / assert - self.assertRaises(ValueError, v.project, other) - - def test_project_v2_raises_if_other_is_not_iterable(self): - """Check if exception is raise when projected on vector is not iterable.""" - # arrange - v = Vector2(2, 3) - other = 10 - - # act / assert - self.assertRaises(TypeError, v.project, other) - - -class Vector3TypeTest(unittest.TestCase): - def setUp(self): - self.zeroVec = Vector3() - self.e1 = Vector3(1, 0, 0) - self.e2 = Vector3(0, 1, 0) - self.e3 = Vector3(0, 0, 1) - self.t1 = (1.2, 3.4, 9.6) - self.l1 = list(self.t1) - self.v1 = Vector3(self.t1) - self.t2 = (5.6, 7.8, 2.1) - self.l2 = list(self.t2) - self.v2 = Vector3(self.t2) - self.s1 = 5.6 - self.s2 = 7.8 - - def testConstructionDefault(self): - v = Vector3() - self.assertEqual(v.x, 0.0) - self.assertEqual(v.y, 0.0) - self.assertEqual(v.z, 0.0) - - def testConstructionXYZ(self): - v = Vector3(1.2, 3.4, 9.6) - self.assertEqual(v.x, 1.2) - self.assertEqual(v.y, 3.4) - self.assertEqual(v.z, 9.6) - - def testConstructionTuple(self): - v = Vector3((1.2, 3.4, 9.6)) - self.assertEqual(v.x, 1.2) - self.assertEqual(v.y, 3.4) - self.assertEqual(v.z, 9.6) - - def testConstructionList(self): - v = Vector3([1.2, 3.4, -9.6]) - self.assertEqual(v.x, 1.2) - self.assertEqual(v.y, 3.4) - self.assertEqual(v.z, -9.6) - - def testConstructionVector3(self): - v = Vector3(Vector3(1.2, 3.4, -9.6)) - self.assertEqual(v.x, 1.2) - self.assertEqual(v.y, 3.4) - self.assertEqual(v.z, -9.6) - - def testConstructionScalar(self): - v = Vector3(1) - self.assertEqual(v.x, 1.0) - self.assertEqual(v.y, 1.0) - self.assertEqual(v.z, 1.0) - - def testConstructionScalarKeywords(self): - v = Vector3(x=1) - self.assertEqual(v.x, 1.0) - self.assertEqual(v.y, 1.0) - self.assertEqual(v.z, 1.0) - - def testConstructionKeywords(self): - v = Vector3(x=1, y=2, z=3) - self.assertEqual(v.x, 1.0) - self.assertEqual(v.y, 2.0) - self.assertEqual(v.z, 3.0) - - def testConstructionMissing(self): - def assign_missing_value(): - v = Vector3(1, 2) - - self.assertRaises(ValueError, assign_missing_value) - - def assign_missing_value(): - v = Vector3(x=1, y=2) - - self.assertRaises(ValueError, assign_missing_value) - - def testAttributeAccess(self): - tmp = self.v1.x - self.assertEqual(tmp, self.v1.x) - self.assertEqual(tmp, self.v1[0]) - tmp = self.v1.y - self.assertEqual(tmp, self.v1.y) - self.assertEqual(tmp, self.v1[1]) - tmp = self.v1.z - self.assertEqual(tmp, self.v1.z) - self.assertEqual(tmp, self.v1[2]) - self.v1.x = 3.141 - self.assertEqual(self.v1.x, 3.141) - self.v1.y = 3.141 - self.assertEqual(self.v1.y, 3.141) - self.v1.z = 3.141 - self.assertEqual(self.v1.z, 3.141) - - def assign_nonfloat(): - v = Vector2() - v.x = "spam" - - self.assertRaises(TypeError, assign_nonfloat) - - def testSequence(self): - v = Vector3(1.2, 3.4, -9.6) - self.assertEqual(len(v), 3) - self.assertEqual(v[0], 1.2) - self.assertEqual(v[1], 3.4) - self.assertEqual(v[2], -9.6) - self.assertRaises(IndexError, lambda: v[3]) - self.assertEqual(v[-1], -9.6) - self.assertEqual(v[-2], 3.4) - self.assertEqual(v[-3], 1.2) - self.assertRaises(IndexError, lambda: v[-4]) - self.assertEqual(v[:], [1.2, 3.4, -9.6]) - self.assertEqual(v[1:], [3.4, -9.6]) - self.assertEqual(v[:1], [1.2]) - self.assertEqual(v[:-1], [1.2, 3.4]) - self.assertEqual(v[1:2], [3.4]) - self.assertEqual(list(v), [1.2, 3.4, -9.6]) - self.assertEqual(tuple(v), (1.2, 3.4, -9.6)) - v[0] = 5.6 - v[1] = 7.8 - v[2] = -2.1 - self.assertEqual(v.x, 5.6) - self.assertEqual(v.y, 7.8) - self.assertEqual(v.z, -2.1) - v[:] = [9.1, 11.12, -13.41] - self.assertEqual(v.x, 9.1) - self.assertEqual(v.y, 11.12) - self.assertEqual(v.z, -13.41) - - def overpopulate(): - v = Vector3() - v[:] = [1, 2, 3, 4] - - self.assertRaises(ValueError, overpopulate) - - def underpopulate(): - v = Vector3() - v[:] = [1] - - self.assertRaises(ValueError, underpopulate) - - def assign_nonfloat(): - v = Vector2() - v[0] = "spam" - - self.assertRaises(TypeError, assign_nonfloat) - - def testExtendedSlicing(self): - # deletion - def delSlice(vec, start=None, stop=None, step=None): - if start is not None and stop is not None and step is not None: - del vec[start:stop:step] - elif start is not None and stop is None and step is not None: - del vec[start::step] - elif start is None and stop is None and step is not None: - del vec[::step] - - v = Vector3(self.v1) - self.assertRaises(TypeError, delSlice, v, None, None, 2) - self.assertRaises(TypeError, delSlice, v, 1, None, 2) - self.assertRaises(TypeError, delSlice, v, 1, 2, 1) - - # assignment - v = Vector3(self.v1) - v[::2] = [-1.1, -2.2] - self.assertEqual(v, [-1.1, self.v1.y, -2.2]) - v = Vector3(self.v1) - v[::-2] = [10, 20] - self.assertEqual(v, [20, self.v1.y, 10]) - v = Vector3(self.v1) - v[::-1] = v - self.assertEqual(v, [self.v1.z, self.v1.y, self.v1.x]) - a = Vector3(self.v1) - b = Vector3(self.v1) - c = Vector3(self.v1) - a[1:2] = [2.2] - b[slice(1, 2)] = [2.2] - c[1:2:] = (2.2,) - self.assertEqual(a, b) - self.assertEqual(a, c) - self.assertEqual(type(a), type(self.v1)) - self.assertEqual(type(b), type(self.v1)) - self.assertEqual(type(c), type(self.v1)) - - def testAdd(self): - v3 = self.v1 + self.v2 - self.assertTrue(isinstance(v3, type(self.v1))) - self.assertEqual(v3.x, self.v1.x + self.v2.x) - self.assertEqual(v3.y, self.v1.y + self.v2.y) - self.assertEqual(v3.z, self.v1.z + self.v2.z) - v3 = self.v1 + self.t2 - self.assertTrue(isinstance(v3, type(self.v1))) - self.assertEqual(v3.x, self.v1.x + self.t2[0]) - self.assertEqual(v3.y, self.v1.y + self.t2[1]) - self.assertEqual(v3.z, self.v1.z + self.t2[2]) - v3 = self.v1 + self.l2 - self.assertTrue(isinstance(v3, type(self.v1))) - self.assertEqual(v3.x, self.v1.x + self.l2[0]) - self.assertEqual(v3.y, self.v1.y + self.l2[1]) - self.assertEqual(v3.z, self.v1.z + self.l2[2]) - v3 = self.t1 + self.v2 - self.assertTrue(isinstance(v3, type(self.v1))) - self.assertEqual(v3.x, self.t1[0] + self.v2.x) - self.assertEqual(v3.y, self.t1[1] + self.v2.y) - self.assertEqual(v3.z, self.t1[2] + self.v2.z) - v3 = self.l1 + self.v2 - self.assertTrue(isinstance(v3, type(self.v1))) - self.assertEqual(v3.x, self.l1[0] + self.v2.x) - self.assertEqual(v3.y, self.l1[1] + self.v2.y) - self.assertEqual(v3.z, self.l1[2] + self.v2.z) - - def testSub(self): - v3 = self.v1 - self.v2 - self.assertTrue(isinstance(v3, type(self.v1))) - self.assertEqual(v3.x, self.v1.x - self.v2.x) - self.assertEqual(v3.y, self.v1.y - self.v2.y) - self.assertEqual(v3.z, self.v1.z - self.v2.z) - v3 = self.v1 - self.t2 - self.assertTrue(isinstance(v3, type(self.v1))) - self.assertEqual(v3.x, self.v1.x - self.t2[0]) - self.assertEqual(v3.y, self.v1.y - self.t2[1]) - self.assertEqual(v3.z, self.v1.z - self.t2[2]) - v3 = self.v1 - self.l2 - self.assertTrue(isinstance(v3, type(self.v1))) - self.assertEqual(v3.x, self.v1.x - self.l2[0]) - self.assertEqual(v3.y, self.v1.y - self.l2[1]) - self.assertEqual(v3.z, self.v1.z - self.l2[2]) - v3 = self.t1 - self.v2 - self.assertTrue(isinstance(v3, type(self.v1))) - self.assertEqual(v3.x, self.t1[0] - self.v2.x) - self.assertEqual(v3.y, self.t1[1] - self.v2.y) - self.assertEqual(v3.z, self.t1[2] - self.v2.z) - v3 = self.l1 - self.v2 - self.assertTrue(isinstance(v3, type(self.v1))) - self.assertEqual(v3.x, self.l1[0] - self.v2.x) - self.assertEqual(v3.y, self.l1[1] - self.v2.y) - self.assertEqual(v3.z, self.l1[2] - self.v2.z) - - def testScalarMultiplication(self): - v = self.s1 * self.v1 - self.assertTrue(isinstance(v, type(self.v1))) - self.assertEqual(v.x, self.s1 * self.v1.x) - self.assertEqual(v.y, self.s1 * self.v1.y) - self.assertEqual(v.z, self.s1 * self.v1.z) - v = self.v1 * self.s2 - self.assertEqual(v.x, self.v1.x * self.s2) - self.assertEqual(v.y, self.v1.y * self.s2) - self.assertEqual(v.z, self.v1.z * self.s2) - - def testScalarDivision(self): - v = self.v1 / self.s1 - self.assertTrue(isinstance(v, type(self.v1))) - self.assertAlmostEqual(v.x, self.v1.x / self.s1) - self.assertAlmostEqual(v.y, self.v1.y / self.s1) - self.assertAlmostEqual(v.z, self.v1.z / self.s1) - v = self.v1 // self.s2 - self.assertTrue(isinstance(v, type(self.v1))) - self.assertEqual(v.x, self.v1.x // self.s2) - self.assertEqual(v.y, self.v1.y // self.s2) - self.assertEqual(v.z, self.v1.z // self.s2) - - def testBool(self): - self.assertEqual(bool(self.zeroVec), False) - self.assertEqual(bool(self.v1), True) - self.assertTrue(not self.zeroVec) - self.assertTrue(self.v1) - - def testUnary(self): - v = +self.v1 - self.assertTrue(isinstance(v, type(self.v1))) - self.assertEqual(v.x, self.v1.x) - self.assertEqual(v.y, self.v1.y) - self.assertEqual(v.z, self.v1.z) - self.assertNotEqual(id(v), id(self.v1)) - v = -self.v1 - self.assertTrue(isinstance(v, type(self.v1))) - self.assertEqual(v.x, -self.v1.x) - self.assertEqual(v.y, -self.v1.y) - self.assertEqual(v.z, -self.v1.z) - self.assertNotEqual(id(v), id(self.v1)) - - def testCompare(self): - int_vec = Vector3(3, -2, 13) - flt_vec = Vector3(3.0, -2.0, 13.0) - zero_vec = Vector3(0, 0, 0) - self.assertEqual(int_vec == flt_vec, True) - self.assertEqual(int_vec != flt_vec, False) - self.assertEqual(int_vec != zero_vec, True) - self.assertEqual(flt_vec == zero_vec, False) - self.assertEqual(int_vec == (3, -2, 13), True) - self.assertEqual(int_vec != (3, -2, 13), False) - self.assertEqual(int_vec != [0, 0], True) - self.assertEqual(int_vec == [0, 0], False) - self.assertEqual(int_vec != 5, True) - self.assertEqual(int_vec == 5, False) - self.assertEqual(int_vec != [3, -2, 0, 1], True) - self.assertEqual(int_vec == [3, -2, 0, 1], False) - - def testStr(self): - v = Vector3(1.2, 3.4, 5.6) - self.assertEqual(str(v), "[1.2, 3.4, 5.6]") - - def testRepr(self): - v = Vector3(1.2, 3.4, -9.6) - self.assertEqual(v.__repr__(), "") - self.assertEqual(v, Vector3(v.__repr__())) - - def testIter(self): - it = self.v1.__iter__() - next_ = it.__next__ - self.assertEqual(next_(), self.v1[0]) - self.assertEqual(next_(), self.v1[1]) - self.assertEqual(next_(), self.v1[2]) - self.assertRaises(StopIteration, lambda: next_()) - it1 = self.v1.__iter__() - it2 = self.v1.__iter__() - self.assertNotEqual(id(it1), id(it2)) - self.assertEqual(id(it1), id(it1.__iter__())) - self.assertEqual(list(it1), list(it2)) - self.assertEqual(list(self.v1.__iter__()), self.l1) - idx = 0 - for val in self.v1: - self.assertEqual(val, self.v1[idx]) - idx += 1 - - def test_rotate(self): - v1 = Vector3(1, 0, 0) - axis = Vector3(0, 1, 0) - v2 = v1.rotate(90, axis) - v3 = v1.rotate(90 + 360, axis) - self.assertEqual(v1.x, 1) - self.assertEqual(v1.y, 0) - self.assertEqual(v1.z, 0) - self.assertEqual(v2.x, 0) - self.assertEqual(v2.y, 0) - self.assertEqual(v2.z, -1) - self.assertEqual(v3.x, v2.x) - self.assertEqual(v3.y, v2.y) - self.assertEqual(v3.z, v2.z) - v1 = Vector3(-1, -1, -1) - v2 = v1.rotate(-90, axis) - self.assertEqual(v2.x, 1) - self.assertEqual(v2.y, -1) - self.assertEqual(v2.z, -1) - v2 = v1.rotate(360, axis) - self.assertEqual(v1.x, v2.x) - self.assertEqual(v1.y, v2.y) - self.assertEqual(v1.z, v2.z) - v2 = v1.rotate(0, axis) - self.assertEqual(v1.x, v2.x) - self.assertEqual(v1.y, v2.y) - self.assertEqual(v1.z, v2.z) - # issue 214 - self.assertEqual( - Vector3(0, 1, 0).rotate(359.9999999, Vector3(0, 0, 1)), Vector3(0, 1, 0) - ) - - def test_rotate_rad(self): - axis = Vector3(0, 0, 1) - tests = ( - ((1, 0, 0), math.pi), - ((1, 0, 0), math.pi / 2), - ((1, 0, 0), -math.pi / 2), - ((1, 0, 0), math.pi / 4), - ) - for initialVec, radians in tests: - vec = Vector3(initialVec).rotate_rad(radians, axis) - self.assertEqual(vec, (math.cos(radians), math.sin(radians), 0)) - - def test_rotate_ip(self): - v = Vector3(1, 0, 0) - axis = Vector3(0, 1, 0) - self.assertEqual(v.rotate_ip(90, axis), None) - self.assertEqual(v.x, 0) - self.assertEqual(v.y, 0) - self.assertEqual(v.z, -1) - v = Vector3(-1, -1, 1) - v.rotate_ip(-90, axis) - self.assertEqual(v.x, -1) - self.assertEqual(v.y, -1) - self.assertEqual(v.z, -1) - - def test_rotate_rad_ip(self): - axis = Vector3(0, 0, 1) - tests = ( - ((1, 0, 0), math.pi), - ((1, 0, 0), math.pi / 2), - ((1, 0, 0), -math.pi / 2), - ((1, 0, 0), math.pi / 4), - ) - for initialVec, radians in tests: - vec = Vector3(initialVec) - vec.rotate_rad_ip(radians, axis) - self.assertEqual(vec, (math.cos(radians), math.sin(radians), 0)) - - def test_rotate_x(self): - v1 = Vector3(1, 0, 0) - v2 = v1.rotate_x(90) - v3 = v1.rotate_x(90 + 360) - self.assertEqual(v1.x, 1) - self.assertEqual(v1.y, 0) - self.assertEqual(v1.z, 0) - self.assertEqual(v2.x, 1) - self.assertEqual(v2.y, 0) - self.assertEqual(v2.z, 0) - self.assertEqual(v3.x, v2.x) - self.assertEqual(v3.y, v2.y) - self.assertEqual(v3.z, v2.z) - v1 = Vector3(-1, -1, -1) - v2 = v1.rotate_x(-90) - self.assertEqual(v2.x, -1) - self.assertAlmostEqual(v2.y, -1) - self.assertAlmostEqual(v2.z, 1) - v2 = v1.rotate_x(360) - self.assertAlmostEqual(v1.x, v2.x) - self.assertAlmostEqual(v1.y, v2.y) - self.assertAlmostEqual(v1.z, v2.z) - v2 = v1.rotate_x(0) - self.assertEqual(v1.x, v2.x) - self.assertAlmostEqual(v1.y, v2.y) - self.assertAlmostEqual(v1.z, v2.z) - - def test_rotate_x_rad(self): - vec = Vector3(0, 1, 0) - result = vec.rotate_x_rad(math.pi / 2) - self.assertEqual(result, (0, 0, 1)) - - def test_rotate_x_ip(self): - v = Vector3(1, 0, 0) - self.assertEqual(v.rotate_x_ip(90), None) - self.assertEqual(v.x, 1) - self.assertEqual(v.y, 0) - self.assertEqual(v.z, 0) - v = Vector3(-1, -1, 1) - v.rotate_x_ip(-90) - self.assertEqual(v.x, -1) - self.assertAlmostEqual(v.y, 1) - self.assertAlmostEqual(v.z, 1) - - def test_rotate_x_rad_ip(self): - vec = Vector3(0, 1, 0) - vec.rotate_x_rad_ip(math.pi / 2) - self.assertEqual(vec, (0, 0, 1)) - - def test_rotate_y(self): - v1 = Vector3(1, 0, 0) - v2 = v1.rotate_y(90) - v3 = v1.rotate_y(90 + 360) - self.assertEqual(v1.x, 1) - self.assertEqual(v1.y, 0) - self.assertEqual(v1.z, 0) - self.assertAlmostEqual(v2.x, 0) - self.assertEqual(v2.y, 0) - self.assertAlmostEqual(v2.z, -1) - self.assertAlmostEqual(v3.x, v2.x) - self.assertEqual(v3.y, v2.y) - self.assertAlmostEqual(v3.z, v2.z) - v1 = Vector3(-1, -1, -1) - v2 = v1.rotate_y(-90) - self.assertAlmostEqual(v2.x, 1) - self.assertEqual(v2.y, -1) - self.assertAlmostEqual(v2.z, -1) - v2 = v1.rotate_y(360) - self.assertAlmostEqual(v1.x, v2.x) - self.assertEqual(v1.y, v2.y) - self.assertAlmostEqual(v1.z, v2.z) - v2 = v1.rotate_y(0) - self.assertEqual(v1.x, v2.x) - self.assertEqual(v1.y, v2.y) - self.assertEqual(v1.z, v2.z) - - def test_rotate_y_rad(self): - vec = Vector3(1, 0, 0) - result = vec.rotate_y_rad(math.pi / 2) - self.assertEqual(result, (0, 0, -1)) - - def test_rotate_y_ip(self): - v = Vector3(1, 0, 0) - self.assertEqual(v.rotate_y_ip(90), None) - self.assertAlmostEqual(v.x, 0) - self.assertEqual(v.y, 0) - self.assertAlmostEqual(v.z, -1) - v = Vector3(-1, -1, 1) - v.rotate_y_ip(-90) - self.assertAlmostEqual(v.x, -1) - self.assertEqual(v.y, -1) - self.assertAlmostEqual(v.z, -1) - - def test_rotate_y_rad_ip(self): - vec = Vector3(1, 0, 0) - vec.rotate_y_rad_ip(math.pi / 2) - self.assertEqual(vec, (0, 0, -1)) - - def test_rotate_z(self): - v1 = Vector3(1, 0, 0) - v2 = v1.rotate_z(90) - v3 = v1.rotate_z(90 + 360) - self.assertEqual(v1.x, 1) - self.assertEqual(v1.y, 0) - self.assertEqual(v1.z, 0) - self.assertAlmostEqual(v2.x, 0) - self.assertAlmostEqual(v2.y, 1) - self.assertEqual(v2.z, 0) - self.assertAlmostEqual(v3.x, v2.x) - self.assertAlmostEqual(v3.y, v2.y) - self.assertEqual(v3.z, v2.z) - v1 = Vector3(-1, -1, -1) - v2 = v1.rotate_z(-90) - self.assertAlmostEqual(v2.x, -1) - self.assertAlmostEqual(v2.y, 1) - self.assertEqual(v2.z, -1) - v2 = v1.rotate_z(360) - self.assertAlmostEqual(v1.x, v2.x) - self.assertAlmostEqual(v1.y, v2.y) - self.assertEqual(v1.z, v2.z) - v2 = v1.rotate_z(0) - self.assertAlmostEqual(v1.x, v2.x) - self.assertAlmostEqual(v1.y, v2.y) - self.assertEqual(v1.z, v2.z) - - def test_rotate_z_rad(self): - vec = Vector3(1, 0, 0) - result = vec.rotate_z_rad(math.pi / 2) - self.assertEqual(result, (0, 1, 0)) - - def test_rotate_z_ip(self): - v = Vector3(1, 0, 0) - self.assertEqual(v.rotate_z_ip(90), None) - self.assertAlmostEqual(v.x, 0) - self.assertAlmostEqual(v.y, 1) - self.assertEqual(v.z, 0) - v = Vector3(-1, -1, 1) - v.rotate_z_ip(-90) - self.assertAlmostEqual(v.x, -1) - self.assertAlmostEqual(v.y, 1) - self.assertEqual(v.z, 1) - - def test_rotate_z_rad_ip(self): - vec = Vector3(1, 0, 0) - vec.rotate_z_rad_ip(math.pi / 2) - self.assertEqual(vec, (0, 1, 0)) - - def test_normalize(self): - v = self.v1.normalize() - # length is 1 - self.assertAlmostEqual(v.x * v.x + v.y * v.y + v.z * v.z, 1.0) - # v1 is unchanged - self.assertEqual(self.v1.x, self.l1[0]) - self.assertEqual(self.v1.y, self.l1[1]) - self.assertEqual(self.v1.z, self.l1[2]) - # v2 is parallel to v1 (tested via cross product) - cross = ( - (self.v1.y * v.z - self.v1.z * v.y) ** 2 - + (self.v1.z * v.x - self.v1.x * v.z) ** 2 - + (self.v1.x * v.y - self.v1.y * v.x) ** 2 - ) - self.assertAlmostEqual(cross, 0.0) - self.assertRaises(ValueError, lambda: self.zeroVec.normalize()) - - def test_normalize_ip(self): - v = +self.v1 - # v has length != 1 before normalizing - self.assertNotEqual(v.x * v.x + v.y * v.y + v.z * v.z, 1.0) - # inplace operations should return None - self.assertEqual(v.normalize_ip(), None) - # length is 1 - self.assertAlmostEqual(v.x * v.x + v.y * v.y + v.z * v.z, 1.0) - # v2 is parallel to v1 (tested via cross product) - cross = ( - (self.v1.y * v.z - self.v1.z * v.y) ** 2 - + (self.v1.z * v.x - self.v1.x * v.z) ** 2 - + (self.v1.x * v.y - self.v1.y * v.x) ** 2 - ) - self.assertAlmostEqual(cross, 0.0) - self.assertRaises(ValueError, lambda: self.zeroVec.normalize_ip()) - - def test_is_normalized(self): - self.assertEqual(self.v1.is_normalized(), False) - v = self.v1.normalize() - self.assertEqual(v.is_normalized(), True) - self.assertEqual(self.e2.is_normalized(), True) - self.assertEqual(self.zeroVec.is_normalized(), False) - - def test_cross(self): - def cross(a, b): - return Vector3( - a[1] * b[2] - a[2] * b[1], - a[2] * b[0] - a[0] * b[2], - a[0] * b[1] - a[1] * b[0], - ) - - self.assertEqual(self.v1.cross(self.v2), cross(self.v1, self.v2)) - self.assertEqual(self.v1.cross(self.l2), cross(self.v1, self.l2)) - self.assertEqual(self.v1.cross(self.t2), cross(self.v1, self.t2)) - self.assertEqual(self.v1.cross(self.v2), -self.v2.cross(self.v1)) - self.assertEqual(self.v1.cross(self.v1), self.zeroVec) - - def test_dot(self): - self.assertAlmostEqual( - self.v1.dot(self.v2), - self.v1.x * self.v2.x + self.v1.y * self.v2.y + self.v1.z * self.v2.z, - ) - self.assertAlmostEqual( - self.v1.dot(self.l2), - self.v1.x * self.l2[0] + self.v1.y * self.l2[1] + self.v1.z * self.l2[2], - ) - self.assertAlmostEqual( - self.v1.dot(self.t2), - self.v1.x * self.t2[0] + self.v1.y * self.t2[1] + self.v1.z * self.t2[2], - ) - self.assertAlmostEqual(self.v1.dot(self.v2), self.v2.dot(self.v1)) - self.assertAlmostEqual(self.v1.dot(self.v2), self.v1 * self.v2) - - def test_angle_to(self): - self.assertEqual(Vector3(1, 1, 0).angle_to((-1, 1, 0)), 90) - self.assertEqual(Vector3(1, 0, 0).angle_to((0, 0, -1)), 90) - self.assertEqual(Vector3(1, 0, 0).angle_to((-1, 0, 1)), 135) - self.assertEqual(abs(Vector3(1, 0, 1).angle_to((-1, 0, -1))), 180) - # if we rotate v1 by the angle_to v2 around their cross product - # we should look in the same direction - self.assertEqual( - self.v1.rotate( - self.v1.angle_to(self.v2), self.v1.cross(self.v2) - ).normalize(), - self.v2.normalize(), - ) - - def test_scale_to_length(self): - v = Vector3(1, 1, 1) - v.scale_to_length(2.5) - self.assertEqual(v, Vector3(2.5, 2.5, 2.5) / math.sqrt(3)) - self.assertRaises(ValueError, lambda: self.zeroVec.scale_to_length(1)) - self.assertEqual(v.scale_to_length(0), None) - self.assertEqual(v, self.zeroVec) - - def test_length(self): - self.assertEqual(Vector3(3, 4, 5).length(), math.sqrt(3 * 3 + 4 * 4 + 5 * 5)) - self.assertEqual(Vector3(-3, 4, 5).length(), math.sqrt(-3 * -3 + 4 * 4 + 5 * 5)) - self.assertEqual(self.zeroVec.length(), 0) - - def test_length_squared(self): - self.assertEqual(Vector3(3, 4, 5).length_squared(), 3 * 3 + 4 * 4 + 5 * 5) - self.assertEqual(Vector3(-3, 4, 5).length_squared(), -3 * -3 + 4 * 4 + 5 * 5) - self.assertEqual(self.zeroVec.length_squared(), 0) - - def test_reflect(self): - v = Vector3(1, -1, 1) - n = Vector3(0, 1, 0) - self.assertEqual(v.reflect(n), Vector3(1, 1, 1)) - self.assertEqual(v.reflect(3 * n), v.reflect(n)) - self.assertEqual(v.reflect(-v), -v) - self.assertRaises(ValueError, lambda: v.reflect(self.zeroVec)) - - def test_reflect_ip(self): - v1 = Vector3(1, -1, 1) - v2 = Vector3(v1) - n = Vector3(0, 1, 0) - self.assertEqual(v2.reflect_ip(n), None) - self.assertEqual(v2, Vector3(1, 1, 1)) - v2 = Vector3(v1) - v2.reflect_ip(3 * n) - self.assertEqual(v2, v1.reflect(n)) - v2 = Vector3(v1) - v2.reflect_ip(-v1) - self.assertEqual(v2, -v1) - self.assertRaises(ValueError, lambda: v2.reflect_ip(self.zeroVec)) - - def test_distance_to(self): - diff = self.v1 - self.v2 - self.assertEqual(self.e1.distance_to(self.e2), math.sqrt(2)) - self.assertEqual( - self.v1.distance_to(self.v2), - math.sqrt(diff.x * diff.x + diff.y * diff.y + diff.z * diff.z), - ) - self.assertEqual(self.v1.distance_to(self.v1), 0) - self.assertEqual(self.v1.distance_to(self.v2), self.v2.distance_to(self.v1)) - - def test_distance_squared_to(self): - diff = self.v1 - self.v2 - self.assertEqual(self.e1.distance_squared_to(self.e2), 2) - self.assertAlmostEqual( - self.v1.distance_squared_to(self.v2), - diff.x * diff.x + diff.y * diff.y + diff.z * diff.z, - ) - self.assertEqual(self.v1.distance_squared_to(self.v1), 0) - self.assertEqual( - self.v1.distance_squared_to(self.v2), self.v2.distance_squared_to(self.v1) - ) - - def test_swizzle(self): - self.assertEqual(self.v1.yxz, (self.v1.y, self.v1.x, self.v1.z)) - self.assertEqual( - self.v1.xxyyzzxyz, - ( - self.v1.x, - self.v1.x, - self.v1.y, - self.v1.y, - self.v1.z, - self.v1.z, - self.v1.x, - self.v1.y, - self.v1.z, - ), - ) - self.v1.xyz = self.t2 - self.assertEqual(self.v1, self.t2) - self.v1.zxy = self.t2 - self.assertEqual(self.v1, (self.t2[1], self.t2[2], self.t2[0])) - self.v1.yz = self.t2[:2] - self.assertEqual(self.v1, (self.t2[1], self.t2[0], self.t2[1])) - self.assertEqual(type(self.v1), Vector3) - - @unittest.skipIf(IS_PYPY, "known pypy failure") - def test_invalid_swizzle(self): - def invalidSwizzleX(): - Vector3().xx = (1, 2) - - def invalidSwizzleY(): - Vector3().yy = (1, 2) - - def invalidSwizzleZ(): - Vector3().zz = (1, 2) - - def invalidSwizzleW(): - Vector3().ww = (1, 2) - - self.assertRaises(AttributeError, invalidSwizzleX) - self.assertRaises(AttributeError, invalidSwizzleY) - self.assertRaises(AttributeError, invalidSwizzleZ) - self.assertRaises(AttributeError, invalidSwizzleW) - - def invalidAssignment(): - Vector3().xy = 3 - - self.assertRaises(TypeError, invalidAssignment) - - def test_swizzle_return_types(self): - self.assertEqual(type(self.v1.x), float) - self.assertEqual(type(self.v1.xy), Vector2) - self.assertEqual(type(self.v1.xyz), Vector3) - # but we don't have vector4 or above... so tuple. - self.assertEqual(type(self.v1.xyxy), tuple) - self.assertEqual(type(self.v1.xyxyx), tuple) - - def test_dir_works(self): - # not every single one of the attributes... - attributes = set( - ["lerp", "normalize", "normalize_ip", "reflect", "slerp", "x", "y"] - ) - # check if this selection of attributes are all there. - self.assertTrue(attributes.issubset(set(dir(self.v1)))) - - def test_elementwise(self): - # behaviour for "elementwise op scalar" - self.assertEqual( - self.v1.elementwise() + self.s1, - (self.v1.x + self.s1, self.v1.y + self.s1, self.v1.z + self.s1), - ) - self.assertEqual( - self.v1.elementwise() - self.s1, - (self.v1.x - self.s1, self.v1.y - self.s1, self.v1.z - self.s1), - ) - self.assertEqual( - self.v1.elementwise() * self.s2, - (self.v1.x * self.s2, self.v1.y * self.s2, self.v1.z * self.s2), - ) - self.assertEqual( - self.v1.elementwise() / self.s2, - (self.v1.x / self.s2, self.v1.y / self.s2, self.v1.z / self.s2), - ) - self.assertEqual( - self.v1.elementwise() // self.s1, - (self.v1.x // self.s1, self.v1.y // self.s1, self.v1.z // self.s1), - ) - self.assertEqual( - self.v1.elementwise() ** self.s1, - (self.v1.x ** self.s1, self.v1.y ** self.s1, self.v1.z ** self.s1), - ) - self.assertEqual( - self.v1.elementwise() % self.s1, - (self.v1.x % self.s1, self.v1.y % self.s1, self.v1.z % self.s1), - ) - self.assertEqual( - self.v1.elementwise() > self.s1, - self.v1.x > self.s1 and self.v1.y > self.s1 and self.v1.z > self.s1, - ) - self.assertEqual( - self.v1.elementwise() < self.s1, - self.v1.x < self.s1 and self.v1.y < self.s1 and self.v1.z < self.s1, - ) - self.assertEqual( - self.v1.elementwise() == self.s1, - self.v1.x == self.s1 and self.v1.y == self.s1 and self.v1.z == self.s1, - ) - self.assertEqual( - self.v1.elementwise() != self.s1, - self.v1.x != self.s1 and self.v1.y != self.s1 and self.v1.z != self.s1, - ) - self.assertEqual( - self.v1.elementwise() >= self.s1, - self.v1.x >= self.s1 and self.v1.y >= self.s1 and self.v1.z >= self.s1, - ) - self.assertEqual( - self.v1.elementwise() <= self.s1, - self.v1.x <= self.s1 and self.v1.y <= self.s1 and self.v1.z <= self.s1, - ) - # behaviour for "scalar op elementwise" - self.assertEqual(5 + self.v1.elementwise(), Vector3(5, 5, 5) + self.v1) - self.assertEqual(3.5 - self.v1.elementwise(), Vector3(3.5, 3.5, 3.5) - self.v1) - self.assertEqual(7.5 * self.v1.elementwise(), 7.5 * self.v1) - self.assertEqual( - -3.5 / self.v1.elementwise(), - (-3.5 / self.v1.x, -3.5 / self.v1.y, -3.5 / self.v1.z), - ) - self.assertEqual( - -3.5 // self.v1.elementwise(), - (-3.5 // self.v1.x, -3.5 // self.v1.y, -3.5 // self.v1.z), - ) - self.assertEqual( - -(3.5 ** self.v1.elementwise()), - (-(3.5 ** self.v1.x), -(3.5 ** self.v1.y), -(3.5 ** self.v1.z)), - ) - self.assertEqual( - 3 % self.v1.elementwise(), (3 % self.v1.x, 3 % self.v1.y, 3 % self.v1.z) - ) - self.assertEqual( - 2 < self.v1.elementwise(), 2 < self.v1.x and 2 < self.v1.y and 2 < self.v1.z - ) - self.assertEqual( - 2 > self.v1.elementwise(), 2 > self.v1.x and 2 > self.v1.y and 2 > self.v1.z - ) - self.assertEqual( - 1 == self.v1.elementwise(), - 1 == self.v1.x and 1 == self.v1.y and 1 == self.v1.z, - ) - self.assertEqual( - 1 != self.v1.elementwise(), - 1 != self.v1.x and 1 != self.v1.y and 1 != self.v1.z, - ) - self.assertEqual( - 2 <= self.v1.elementwise(), - 2 <= self.v1.x and 2 <= self.v1.y and 2 <= self.v1.z, - ) - self.assertEqual( - -7 >= self.v1.elementwise(), - -7 >= self.v1.x and -7 >= self.v1.y and -7 >= self.v1.z, - ) - self.assertEqual( - -7 != self.v1.elementwise(), - -7 != self.v1.x and -7 != self.v1.y and -7 != self.v1.z, - ) - - # behaviour for "elementwise op vector" - self.assertEqual(type(self.v1.elementwise() * self.v2), type(self.v1)) - self.assertEqual(self.v1.elementwise() + self.v2, self.v1 + self.v2) - self.assertEqual(self.v1.elementwise() + self.v2, self.v1 + self.v2) - self.assertEqual(self.v1.elementwise() - self.v2, self.v1 - self.v2) - self.assertEqual( - self.v1.elementwise() * self.v2, - (self.v1.x * self.v2.x, self.v1.y * self.v2.y, self.v1.z * self.v2.z), - ) - self.assertEqual( - self.v1.elementwise() / self.v2, - (self.v1.x / self.v2.x, self.v1.y / self.v2.y, self.v1.z / self.v2.z), - ) - self.assertEqual( - self.v1.elementwise() // self.v2, - (self.v1.x // self.v2.x, self.v1.y // self.v2.y, self.v1.z // self.v2.z), - ) - self.assertEqual( - self.v1.elementwise() ** self.v2, - (self.v1.x ** self.v2.x, self.v1.y ** self.v2.y, self.v1.z ** self.v2.z), - ) - self.assertEqual( - self.v1.elementwise() % self.v2, - (self.v1.x % self.v2.x, self.v1.y % self.v2.y, self.v1.z % self.v2.z), - ) - self.assertEqual( - self.v1.elementwise() > self.v2, - self.v1.x > self.v2.x and self.v1.y > self.v2.y and self.v1.z > self.v2.z, - ) - self.assertEqual( - self.v1.elementwise() < self.v2, - self.v1.x < self.v2.x and self.v1.y < self.v2.y and self.v1.z < self.v2.z, - ) - self.assertEqual( - self.v1.elementwise() >= self.v2, - self.v1.x >= self.v2.x - and self.v1.y >= self.v2.y - and self.v1.z >= self.v2.z, - ) - self.assertEqual( - self.v1.elementwise() <= self.v2, - self.v1.x <= self.v2.x - and self.v1.y <= self.v2.y - and self.v1.z <= self.v2.z, - ) - self.assertEqual( - self.v1.elementwise() == self.v2, - self.v1.x == self.v2.x - and self.v1.y == self.v2.y - and self.v1.z == self.v2.z, - ) - self.assertEqual( - self.v1.elementwise() != self.v2, - self.v1.x != self.v2.x - and self.v1.y != self.v2.y - and self.v1.z != self.v2.z, - ) - # behaviour for "vector op elementwise" - self.assertEqual(self.v2 + self.v1.elementwise(), self.v2 + self.v1) - self.assertEqual(self.v2 - self.v1.elementwise(), self.v2 - self.v1) - self.assertEqual( - self.v2 * self.v1.elementwise(), - (self.v2.x * self.v1.x, self.v2.y * self.v1.y, self.v2.z * self.v1.z), - ) - self.assertEqual( - self.v2 / self.v1.elementwise(), - (self.v2.x / self.v1.x, self.v2.y / self.v1.y, self.v2.z / self.v1.z), - ) - self.assertEqual( - self.v2 // self.v1.elementwise(), - (self.v2.x // self.v1.x, self.v2.y // self.v1.y, self.v2.z // self.v1.z), - ) - self.assertEqual( - self.v2 ** self.v1.elementwise(), - (self.v2.x ** self.v1.x, self.v2.y ** self.v1.y, self.v2.z ** self.v1.z), - ) - self.assertEqual( - self.v2 % self.v1.elementwise(), - (self.v2.x % self.v1.x, self.v2.y % self.v1.y, self.v2.z % self.v1.z), - ) - self.assertEqual( - self.v2 < self.v1.elementwise(), - self.v2.x < self.v1.x and self.v2.y < self.v1.y and self.v2.z < self.v1.z, - ) - self.assertEqual( - self.v2 > self.v1.elementwise(), - self.v2.x > self.v1.x and self.v2.y > self.v1.y and self.v2.z > self.v1.z, - ) - self.assertEqual( - self.v2 <= self.v1.elementwise(), - self.v2.x <= self.v1.x - and self.v2.y <= self.v1.y - and self.v2.z <= self.v1.z, - ) - self.assertEqual( - self.v2 >= self.v1.elementwise(), - self.v2.x >= self.v1.x - and self.v2.y >= self.v1.y - and self.v2.z >= self.v1.z, - ) - self.assertEqual( - self.v2 == self.v1.elementwise(), - self.v2.x == self.v1.x - and self.v2.y == self.v1.y - and self.v2.z == self.v1.z, - ) - self.assertEqual( - self.v2 != self.v1.elementwise(), - self.v2.x != self.v1.x - and self.v2.y != self.v1.y - and self.v2.z != self.v1.z, - ) - - # behaviour for "elementwise op elementwise" - self.assertEqual( - self.v2.elementwise() + self.v1.elementwise(), self.v2 + self.v1 - ) - self.assertEqual( - self.v2.elementwise() - self.v1.elementwise(), self.v2 - self.v1 - ) - self.assertEqual( - self.v2.elementwise() * self.v1.elementwise(), - (self.v2.x * self.v1.x, self.v2.y * self.v1.y, self.v2.z * self.v1.z), - ) - self.assertEqual( - self.v2.elementwise() / self.v1.elementwise(), - (self.v2.x / self.v1.x, self.v2.y / self.v1.y, self.v2.z / self.v1.z), - ) - self.assertEqual( - self.v2.elementwise() // self.v1.elementwise(), - (self.v2.x // self.v1.x, self.v2.y // self.v1.y, self.v2.z // self.v1.z), - ) - self.assertEqual( - self.v2.elementwise() ** self.v1.elementwise(), - (self.v2.x ** self.v1.x, self.v2.y ** self.v1.y, self.v2.z ** self.v1.z), - ) - self.assertEqual( - self.v2.elementwise() % self.v1.elementwise(), - (self.v2.x % self.v1.x, self.v2.y % self.v1.y, self.v2.z % self.v1.z), - ) - self.assertEqual( - self.v2.elementwise() < self.v1.elementwise(), - self.v2.x < self.v1.x and self.v2.y < self.v1.y and self.v2.z < self.v1.z, - ) - self.assertEqual( - self.v2.elementwise() > self.v1.elementwise(), - self.v2.x > self.v1.x and self.v2.y > self.v1.y and self.v2.z > self.v1.z, - ) - self.assertEqual( - self.v2.elementwise() <= self.v1.elementwise(), - self.v2.x <= self.v1.x - and self.v2.y <= self.v1.y - and self.v2.z <= self.v1.z, - ) - self.assertEqual( - self.v2.elementwise() >= self.v1.elementwise(), - self.v2.x >= self.v1.x - and self.v2.y >= self.v1.y - and self.v2.z >= self.v1.z, - ) - self.assertEqual( - self.v2.elementwise() == self.v1.elementwise(), - self.v2.x == self.v1.x - and self.v2.y == self.v1.y - and self.v2.z == self.v1.z, - ) - self.assertEqual( - self.v2.elementwise() != self.v1.elementwise(), - self.v2.x != self.v1.x - and self.v2.y != self.v1.y - and self.v2.z != self.v1.z, - ) - - # other behaviour - self.assertEqual( - abs(self.v1.elementwise()), (abs(self.v1.x), abs(self.v1.y), abs(self.v1.z)) - ) - self.assertEqual(-self.v1.elementwise(), -self.v1) - self.assertEqual(+self.v1.elementwise(), +self.v1) - self.assertEqual(bool(self.v1.elementwise()), bool(self.v1)) - self.assertEqual(bool(Vector3().elementwise()), bool(Vector3())) - self.assertEqual(self.zeroVec.elementwise() ** 0, (1, 1, 1)) - self.assertRaises(ValueError, lambda: pow(Vector3(-1, 0, 0).elementwise(), 1.2)) - self.assertRaises(ZeroDivisionError, lambda: self.zeroVec.elementwise() ** -1) - self.assertRaises(ZeroDivisionError, lambda: Vector3(1, 1, 1).elementwise() / 0) - self.assertRaises( - ZeroDivisionError, lambda: Vector3(1, 1, 1).elementwise() // 0 - ) - self.assertRaises(ZeroDivisionError, lambda: Vector3(1, 1, 1).elementwise() % 0) - self.assertRaises( - ZeroDivisionError, lambda: Vector3(1, 1, 1).elementwise() / self.zeroVec - ) - self.assertRaises( - ZeroDivisionError, lambda: Vector3(1, 1, 1).elementwise() // self.zeroVec - ) - self.assertRaises( - ZeroDivisionError, lambda: Vector3(1, 1, 1).elementwise() % self.zeroVec - ) - self.assertRaises(ZeroDivisionError, lambda: 2 / self.zeroVec.elementwise()) - self.assertRaises(ZeroDivisionError, lambda: 2 // self.zeroVec.elementwise()) - self.assertRaises(ZeroDivisionError, lambda: 2 % self.zeroVec.elementwise()) - - def test_slerp(self): - self.assertRaises(ValueError, lambda: self.zeroVec.slerp(self.v1, 0.5)) - self.assertRaises(ValueError, lambda: self.v1.slerp(self.zeroVec, 0.5)) - self.assertRaises(ValueError, lambda: self.zeroVec.slerp(self.zeroVec, 0.5)) - steps = 10 - angle_step = self.e1.angle_to(self.e2) / steps - for i, u in ( - (i, self.e1.slerp(self.e2, i / float(steps))) for i in range(steps + 1) - ): - self.assertAlmostEqual(u.length(), 1) - self.assertAlmostEqual(self.e1.angle_to(u), i * angle_step) - self.assertEqual(u, self.e2) - - v1 = Vector3(100, 0, 0) - v2 = Vector3(0, 10, 7) - radial_factor = v2.length() / v1.length() - for i, u in ((i, v1.slerp(v2, -i / float(steps))) for i in range(steps + 1)): - self.assertAlmostEqual( - u.length(), - (v2.length() - v1.length()) * (float(i) / steps) + v1.length(), - ) - self.assertEqual(u, v2) - self.assertEqual(v1.slerp(v1, 0.5), v1) - self.assertEqual(v2.slerp(v2, 0.5), v2) - self.assertRaises(ValueError, lambda: v1.slerp(-v1, 0.5)) - - def test_lerp(self): - v1 = Vector3(0, 0, 0) - v2 = Vector3(10, 10, 10) - self.assertEqual(v1.lerp(v2, 0.5), (5, 5, 5)) - self.assertRaises(ValueError, lambda: v1.lerp(v2, 2.5)) - - v1 = Vector3(-10, -5, -20) - v2 = Vector3(10, 10, -20) - self.assertEqual(v1.lerp(v2, 0.5), (0, 2.5, -20)) - - def test_spherical(self): - v = Vector3() - v.from_spherical(self.v1.as_spherical()) - self.assertEqual(self.v1, v) - self.assertEqual(self.e1.as_spherical(), (1, 90, 0)) - self.assertEqual(self.e2.as_spherical(), (1, 90, 90)) - self.assertEqual(self.e3.as_spherical(), (1, 0, 0)) - self.assertEqual((2 * self.e2).as_spherical(), (2, 90, 90)) - self.assertRaises(TypeError, lambda: v.from_spherical((None, None, None))) - self.assertRaises(TypeError, lambda: v.from_spherical("abc")) - self.assertRaises(TypeError, lambda: v.from_spherical((None, 1, 2))) - self.assertRaises(TypeError, lambda: v.from_spherical((1, 2, 3, 4))) - self.assertRaises(TypeError, lambda: v.from_spherical((1, 2))) - self.assertRaises(TypeError, lambda: v.from_spherical(1, 2, 3)) - v.from_spherical((0.5, 90, 90)) - self.assertEqual(v, 0.5 * self.e2) - - def test_inplace_operators(self): - - v = Vector3(1, 1, 1) - v *= 2 - self.assertEqual(v, (2.0, 2.0, 2.0)) - - v = Vector3(4, 4, 4) - v /= 2 - self.assertEqual(v, (2.0, 2.0, 2.0)) - - v = Vector3(3.0, 3.0, 3.0) - v -= (1, 1, 1) - self.assertEqual(v, (2.0, 2.0, 2.0)) - - v = Vector3(3.0, 3.0, 3.0) - v += (1, 1, 1) - self.assertEqual(v, (4.0, 4.0, 4.0)) - - def test_pickle(self): - import pickle - - v2 = Vector2(1, 2) - v3 = Vector3(1, 2, 3) - self.assertEqual(pickle.loads(pickle.dumps(v2)), v2) - self.assertEqual(pickle.loads(pickle.dumps(v3)), v3) - - def test_subclass_operation(self): - class Vector(pygame.math.Vector3): - pass - - v = Vector(2.0, 2.0, 2.0) - v *= 2 - self.assertEqual(v, (4.0, 4.0, 4.0)) - - def test_swizzle_constants(self): - """We can get constant values from a swizzle.""" - v = Vector2(7, 6) - self.assertEqual( - v.xy1, - (7.0, 6.0, 1.0), - ) - - def test_swizzle_four_constants(self): - """We can get 4 constant values from a swizzle.""" - v = Vector2(7, 6) - self.assertEqual( - v.xy01, - (7.0, 6.0, 0.0, 1.0), - ) - - def test_swizzle_oob(self): - """An out-of-bounds swizzle raises an AttributeError.""" - v = Vector2(7, 6) - with self.assertRaises(AttributeError): - v.xyz - - @unittest.skipIf(IS_PYPY, "known pypy failure") - def test_swizzle_set_oob(self): - """An out-of-bounds swizzle set raises an AttributeError.""" - v = Vector2(7, 6) - with self.assertRaises(AttributeError): - v.xz = (1, 1) - - def test_project_v3_onto_x_axis(self): - """Project onto x-axis, e.g. get the component pointing in the x-axis direction.""" - # arrange - v = Vector3(2, 3, 4) - x_axis = Vector3(10, 0, 0) - - # act - actual = v.project(x_axis) - - # assert - self.assertEqual(v.x, actual.x) - self.assertEqual(0, actual.y) - self.assertEqual(0, actual.z) - - def test_project_v3_onto_y_axis(self): - """Project onto y-axis, e.g. get the component pointing in the y-axis direction.""" - # arrange - v = Vector3(2, 3, 4) - y_axis = Vector3(0, 100, 0) - - # act - actual = v.project(y_axis) - - # assert - self.assertEqual(0, actual.x) - self.assertEqual(v.y, actual.y) - self.assertEqual(0, actual.z) - - def test_project_v3_onto_z_axis(self): - """Project onto z-axis, e.g. get the component pointing in the z-axis direction.""" - # arrange - v = Vector3(2, 3, 4) - y_axis = Vector3(0, 0, 77) - - # act - actual = v.project(y_axis) - - # assert - self.assertEqual(0, actual.x) - self.assertEqual(0, actual.y) - self.assertEqual(v.z, actual.z) - - def test_project_v3_onto_other(self): - """Project onto other vector.""" - # arrange - v = Vector3(2, 3, 4) - other = Vector3(3, 5, 7) - - # act - actual = v.project(other) - - # assert - expected = v.dot(other) / other.dot(other) * other - self.assertAlmostEqual(expected.x, actual.x) - self.assertAlmostEqual(expected.y, actual.y) - self.assertAlmostEqual(expected.z, actual.z) - - def test_project_v3_onto_other_as_tuple(self): - """Project onto other tuple as vector.""" - # arrange - v = Vector3(2, 3, 4) - other = Vector3(3, 5, 7) - - # act - actual = v.project(tuple(other)) - - # assert - expected = v.dot(other) / other.dot(other) * other - self.assertAlmostEqual(expected.x, actual.x) - self.assertAlmostEqual(expected.y, actual.y) - self.assertAlmostEqual(expected.z, actual.z) - - def test_project_v3_onto_other_as_list(self): - """Project onto other list as vector.""" - # arrange - v = Vector3(2, 3, 4) - other = Vector3(3, 5, 7) - - # act - actual = v.project(list(other)) - - # assert - expected = v.dot(other) / other.dot(other) * other - self.assertAlmostEqual(expected.x, actual.x) - self.assertAlmostEqual(expected.y, actual.y) - self.assertAlmostEqual(expected.z, actual.z) - - def test_project_v3_raises_if_other_has_zero_length(self): - """Check if exception is raise when projected on vector has zero length.""" - # arrange - v = Vector3(2, 3, 4) - other = Vector3(0, 0, 0) - - # act / assert - self.assertRaises(ValueError, v.project, other) - - def test_project_v3_raises_if_other_is_not_iterable(self): - """Check if exception is raise when projected on vector is not iterable.""" - # arrange - v = Vector3(2, 3, 4) - other = 10 - - # act / assert - self.assertRaises(TypeError, v.project, other) - - -if __name__ == "__main__": - unittest.main() diff --git a/venv/Lib/site-packages/pygame/tests/midi_test.py b/venv/Lib/site-packages/pygame/tests/midi_test.py deleted file mode 100644 index ffbe8ff..0000000 --- a/venv/Lib/site-packages/pygame/tests/midi_test.py +++ /dev/null @@ -1,472 +0,0 @@ -import unittest - - -import pygame - - -class MidiInputTest(unittest.TestCase): - __tags__ = ["interactive"] - - def setUp(self): - import pygame.midi - - pygame.midi.init() - in_id = pygame.midi.get_default_input_id() - if in_id != -1: - self.midi_input = pygame.midi.Input(in_id) - else: - self.midi_input = None - - def tearDown(self): - if self.midi_input: - self.midi_input.close() - pygame.midi.quit() - - def test_Input(self): - i = pygame.midi.get_default_input_id() - if self.midi_input: - self.assertEqual(self.midi_input.device_id, i) - - # try feeding it an input id. - i = pygame.midi.get_default_output_id() - - # can handle some invalid input too. - self.assertRaises(pygame.midi.MidiException, pygame.midi.Input, i) - self.assertRaises(pygame.midi.MidiException, pygame.midi.Input, 9009) - self.assertRaises(pygame.midi.MidiException, pygame.midi.Input, -1) - self.assertRaises(TypeError, pygame.midi.Input, "1234") - self.assertRaises(OverflowError, pygame.midi.Input, pow(2, 99)) - - def test_poll(self): - - if not self.midi_input: - self.skipTest("No midi Input device") - - self.assertFalse(self.midi_input.poll()) - # TODO fake some incoming data - - pygame.midi.quit() - self.assertRaises(RuntimeError, self.midi_input.poll) - # set midi_input to None to avoid error in tearDown - self.midi_input = None - - def test_read(self): - - if not self.midi_input: - self.skipTest("No midi Input device") - - read = self.midi_input.read(5) - self.assertEqual(read, []) - # TODO fake some incoming data - - pygame.midi.quit() - self.assertRaises(RuntimeError, self.midi_input.read, 52) - # set midi_input to None to avoid error in tearDown - self.midi_input = None - - def test_close(self): - if not self.midi_input: - self.skipTest("No midi Input device") - - self.assertIsNotNone(self.midi_input._input) - self.midi_input.close() - self.assertIsNone(self.midi_input._input) - - -class MidiOutputTest(unittest.TestCase): - __tags__ = ["interactive"] - - def setUp(self): - import pygame.midi - - pygame.midi.init() - m_out_id = pygame.midi.get_default_output_id() - if m_out_id != -1: - self.midi_output = pygame.midi.Output(m_out_id) - else: - self.midi_output = None - - def tearDown(self): - if self.midi_output: - self.midi_output.close() - pygame.midi.quit() - - def test_Output(self): - i = pygame.midi.get_default_output_id() - if self.midi_output: - self.assertEqual(self.midi_output.device_id, i) - - # try feeding it an input id. - i = pygame.midi.get_default_input_id() - - # can handle some invalid input too. - self.assertRaises(pygame.midi.MidiException, pygame.midi.Output, i) - self.assertRaises(pygame.midi.MidiException, pygame.midi.Output, 9009) - self.assertRaises(pygame.midi.MidiException, pygame.midi.Output, -1) - self.assertRaises(TypeError, pygame.midi.Output, "1234") - self.assertRaises(OverflowError, pygame.midi.Output, pow(2, 99)) - - def test_note_off(self): - if self.midi_output: - out = self.midi_output - out.note_on(5, 30, 0) - out.note_off(5, 30, 0) - with self.assertRaises(ValueError) as cm: - out.note_off(5, 30, 25) - self.assertEqual(str(cm.exception), "Channel not between 0 and 15.") - with self.assertRaises(ValueError) as cm: - out.note_off(5, 30, -1) - self.assertEqual(str(cm.exception), "Channel not between 0 and 15.") - - def test_note_on(self): - if self.midi_output: - out = self.midi_output - out.note_on(5, 30, 0) - out.note_on(5, 42, 10) - with self.assertRaises(ValueError) as cm: - out.note_on(5, 30, 25) - self.assertEqual(str(cm.exception), "Channel not between 0 and 15.") - with self.assertRaises(ValueError) as cm: - out.note_on(5, 30, -1) - self.assertEqual(str(cm.exception), "Channel not between 0 and 15.") - - def test_set_instrument(self): - - if not self.midi_output: - self.skipTest("No midi device") - out = self.midi_output - out.set_instrument(5) - out.set_instrument(42, channel=2) - with self.assertRaises(ValueError) as cm: - out.set_instrument(-6) - self.assertEqual(str(cm.exception), "Undefined instrument id: -6") - with self.assertRaises(ValueError) as cm: - out.set_instrument(156) - self.assertEqual(str(cm.exception), "Undefined instrument id: 156") - with self.assertRaises(ValueError) as cm: - out.set_instrument(5, -1) - self.assertEqual(str(cm.exception), "Channel not between 0 and 15.") - with self.assertRaises(ValueError) as cm: - out.set_instrument(5, 16) - self.assertEqual(str(cm.exception), "Channel not between 0 and 15.") - - def test_write(self): - if not self.midi_output: - self.skipTest("No midi device") - - out = self.midi_output - out.write([[[0xC0, 0, 0], 20000]]) - # is equivalent to - out.write([[[0xC0], 20000]]) - # example from the docstring : - # 1. choose program change 1 at time 20000 and - # 2. send note 65 with velocity 100 500 ms later - out.write([[[0xC0, 0, 0], 20000], [[0x90, 60, 100], 20500]]) - - out.write([]) - verrry_long = [[[0x90, 60, i % 100], 20000 + 100 * i] for i in range(1024)] - out.write(verrry_long) - - too_long = [[[0x90, 60, i % 100], 20000 + 100 * i] for i in range(1025)] - self.assertRaises(IndexError, out.write, too_long) - # test wrong data - with self.assertRaises(TypeError) as cm: - out.write("Non sens ?") - error_msg = "unsupported operand type(s) for &: 'str' and 'int'" - self.assertEqual(str(cm.exception), error_msg) - - with self.assertRaises(TypeError) as cm: - out.write(["Hey what's that?"]) - self.assertEqual(str(cm.exception), error_msg) - - def test_write_short(self): - if not self.midi_output: - self.skipTest("No midi device") - - out = self.midi_output - # program change - out.write_short(0xC0) - # put a note on, then off. - out.write_short(0x90, 65, 100) - out.write_short(0x80, 65, 100) - out.write_short(0x90) - - def test_write_sys_ex(self): - if not self.midi_output: - self.skipTest("No midi device") - - out = self.midi_output - out.write_sys_ex(pygame.midi.time(), [0xF0, 0x7D, 0x10, 0x11, 0x12, 0x13, 0xF7]) - - def test_pitch_bend(self): - # FIXME : pitch_bend in the code, but not in documentation - if not self.midi_output: - self.skipTest("No midi device") - - out = self.midi_output - with self.assertRaises(ValueError) as cm: - out.pitch_bend(5, channel=-1) - self.assertEqual(str(cm.exception), "Channel not between 0 and 15.") - with self.assertRaises(ValueError) as cm: - out.pitch_bend(5, channel=16) - with self.assertRaises(ValueError) as cm: - out.pitch_bend(-10001, 1) - self.assertEqual( - str(cm.exception), - "Pitch bend value must be between " "-8192 and +8191, not -10001.", - ) - with self.assertRaises(ValueError) as cm: - out.pitch_bend(10665, 2) - - def test_close(self): - if not self.midi_output: - self.skipTest("No midi device") - self.assertIsNotNone(self.midi_output._output) - self.midi_output.close() - self.assertIsNone(self.midi_output._output) - - def test_abort(self): - if not self.midi_output: - self.skipTest("No midi device") - self.assertEqual(self.midi_output._aborted, 0) - self.midi_output.abort() - self.assertEqual(self.midi_output._aborted, 1) - - -class MidiModuleTest(unittest.TestCase): - """Midi module tests that require midi hardware or midi.init(). - - See MidiModuleNonInteractiveTest for non-interactive module tests. - """ - - __tags__ = ["interactive"] - - def setUp(self): - import pygame.midi - - pygame.midi.init() - - def tearDown(self): - pygame.midi.quit() - - def test_get_count(self): - c = pygame.midi.get_count() - self.assertIsInstance(c, int) - self.assertTrue(c >= 0) - - def test_get_default_input_id(self): - - midin_id = pygame.midi.get_default_input_id() - # if there is a not None return make sure it is an int. - self.assertIsInstance(midin_id, int) - self.assertTrue(midin_id >= -1) - pygame.midi.quit() - self.assertRaises(RuntimeError, pygame.midi.get_default_output_id) - - def test_get_default_output_id(self): - - c = pygame.midi.get_default_output_id() - self.assertIsInstance(c, int) - self.assertTrue(c >= -1) - pygame.midi.quit() - self.assertRaises(RuntimeError, pygame.midi.get_default_output_id) - - def test_get_device_info(self): - - an_id = pygame.midi.get_default_output_id() - if an_id != -1: - interf, name, input, output, opened = pygame.midi.get_device_info(an_id) - self.assertEqual(output, 1) - self.assertEqual(input, 0) - self.assertEqual(opened, 0) - - an_in_id = pygame.midi.get_default_input_id() - if an_in_id != -1: - r = pygame.midi.get_device_info(an_in_id) - # if r is None, it means that the id is out of range. - interf, name, input, output, opened = r - - self.assertEqual(output, 0) - self.assertEqual(input, 1) - self.assertEqual(opened, 0) - out_of_range = pygame.midi.get_count() - for num in range(out_of_range): - self.assertIsNotNone(pygame.midi.get_device_info(num)) - info = pygame.midi.get_device_info(out_of_range) - self.assertIsNone(info) - - def test_init(self): - - pygame.midi.quit() - self.assertRaises(RuntimeError, pygame.midi.get_count) - # initialising many times should be fine. - pygame.midi.init() - pygame.midi.init() - pygame.midi.init() - pygame.midi.init() - - self.assertTrue(pygame.midi.get_init()) - - def test_quit(self): - - # It is safe to call this more than once. - pygame.midi.quit() - pygame.midi.init() - pygame.midi.quit() - pygame.midi.quit() - pygame.midi.init() - pygame.midi.init() - pygame.midi.quit() - - self.assertFalse(pygame.midi.get_init()) - - def test_get_init(self): - # Already initialized as pygame.midi.init() was called in setUp(). - self.assertTrue(pygame.midi.get_init()) - - def test_time(self): - - mtime = pygame.midi.time() - self.assertIsInstance(mtime, int) - # should be close to 2-3... since the timer is just init'd. - self.assertTrue(0 <= mtime < 100) - - -class MidiModuleNonInteractiveTest(unittest.TestCase): - """Midi module tests that do not require midi hardware or midi.init(). - - See MidiModuleTest for interactive module tests. - """ - - def setUp(self): - import pygame.midi - - def test_midiin(self): - """Ensures the MIDIIN event id exists in the midi module. - - The MIDIIN event id can be accessed via the midi module for backward - compatibility. - """ - self.assertEqual(pygame.midi.MIDIIN, pygame.MIDIIN) - self.assertEqual(pygame.midi.MIDIIN, pygame.locals.MIDIIN) - - self.assertNotEqual(pygame.midi.MIDIIN, pygame.MIDIOUT) - self.assertNotEqual(pygame.midi.MIDIIN, pygame.locals.MIDIOUT) - - def test_midiout(self): - """Ensures the MIDIOUT event id exists in the midi module. - - The MIDIOUT event id can be accessed via the midi module for backward - compatibility. - """ - self.assertEqual(pygame.midi.MIDIOUT, pygame.MIDIOUT) - self.assertEqual(pygame.midi.MIDIOUT, pygame.locals.MIDIOUT) - - self.assertNotEqual(pygame.midi.MIDIOUT, pygame.MIDIIN) - self.assertNotEqual(pygame.midi.MIDIOUT, pygame.locals.MIDIIN) - - def test_MidiException(self): - """Ensures the MidiException is raised as expected.""" - - def raiseit(): - raise pygame.midi.MidiException("Hello Midi param") - - with self.assertRaises(pygame.midi.MidiException) as cm: - raiseit() - - self.assertEqual(cm.exception.parameter, "Hello Midi param") - - def test_midis2events(self): - """Ensures midi events are properly converted to pygame events.""" - # List/tuple indexes. - MIDI_DATA = 0 - MD_STATUS = 0 - MD_DATA1 = 1 - MD_DATA2 = 2 - MD_DATA3 = 3 - - TIMESTAMP = 1 - - # Midi events take the form of: - # ((status, data1, data2, data3), timestamp) - midi_events = ( - ((0xC0, 0, 1, 2), 20000), - ((0x90, 60, 1000, "string_data"), 20001), - (("0", "1", "2", "3"), "4"), - ) - expected_num_events = len(midi_events) - - # Test different device ids. - for device_id in range(3): - pg_events = pygame.midi.midis2events(midi_events, device_id) - - self.assertEqual(len(pg_events), expected_num_events) - - for i, pg_event in enumerate(pg_events): - # Get the original midi data for comparison. - midi_event = midi_events[i] - midi_event_data = midi_event[MIDI_DATA] - - # Can't directly check event instance as pygame.event.Event is - # a function. - # self.assertIsInstance(pg_event, pygame.event.Event) - self.assertEqual(pg_event.__class__.__name__, "Event") - self.assertEqual(pg_event.type, pygame.MIDIIN) - self.assertEqual(pg_event.status, midi_event_data[MD_STATUS]) - self.assertEqual(pg_event.data1, midi_event_data[MD_DATA1]) - self.assertEqual(pg_event.data2, midi_event_data[MD_DATA2]) - self.assertEqual(pg_event.data3, midi_event_data[MD_DATA3]) - self.assertEqual(pg_event.timestamp, midi_event[TIMESTAMP]) - self.assertEqual(pg_event.vice_id, device_id) - - def test_midis2events__missing_event_data(self): - """Ensures midi events with missing values are handled properly.""" - midi_event_missing_data = ((0xC0, 0, 1), 20000) - midi_event_missing_timestamp = ((0xC0, 0, 1, 2),) - - for midi_event in (midi_event_missing_data, midi_event_missing_timestamp): - with self.assertRaises(ValueError): - events = pygame.midi.midis2events([midi_event], 0) - - def test_midis2events__extra_event_data(self): - """Ensures midi events with extra values are handled properly.""" - midi_event_extra_data = ((0xC0, 0, 1, 2, "extra"), 20000) - midi_event_extra_timestamp = ((0xC0, 0, 1, 2), 20000, "extra") - - for midi_event in (midi_event_extra_data, midi_event_extra_timestamp): - with self.assertRaises(ValueError): - events = pygame.midi.midis2events([midi_event], 0) - - def test_midis2events__extra_event_data_missing_timestamp(self): - """Ensures midi events with extra data and no timestamps are handled - properly. - """ - midi_event_extra_data_no_timestamp = ((0xC0, 0, 1, 2, "extra"),) - - with self.assertRaises(ValueError): - events = pygame.midi.midis2events([midi_event_extra_data_no_timestamp], 0) - - def test_conversions(self): - """of frequencies to midi note numbers and ansi note names.""" - from pygame.midi import frequency_to_midi, midi_to_frequency, midi_to_ansi_note - - self.assertEqual(frequency_to_midi(27.5), 21) - self.assertEqual(frequency_to_midi(36.7), 26) - self.assertEqual(frequency_to_midi(4186.0), 108) - self.assertEqual(midi_to_frequency(21), 27.5) - self.assertEqual(midi_to_frequency(26), 36.7) - self.assertEqual(midi_to_frequency(108), 4186.0) - self.assertEqual(midi_to_ansi_note(21), "A0") - self.assertEqual(midi_to_ansi_note(71), "B4") - self.assertEqual(midi_to_ansi_note(82), "A#5") - self.assertEqual(midi_to_ansi_note(83), "B5") - self.assertEqual(midi_to_ansi_note(93), "A6") - self.assertEqual(midi_to_ansi_note(94), "A#6") - self.assertEqual(midi_to_ansi_note(95), "B6") - self.assertEqual(midi_to_ansi_note(96), "C7") - self.assertEqual(midi_to_ansi_note(102), "F#7") - self.assertEqual(midi_to_ansi_note(108), "C8") - - -if __name__ == "__main__": - unittest.main() diff --git a/venv/Lib/site-packages/pygame/tests/mixer_music_tags.py b/venv/Lib/site-packages/pygame/tests/mixer_music_tags.py deleted file mode 100644 index 30f6893..0000000 --- a/venv/Lib/site-packages/pygame/tests/mixer_music_tags.py +++ /dev/null @@ -1,7 +0,0 @@ -__tags__ = [] - -import pygame -import sys - -if "pygame.mixer_music" not in sys.modules: - __tags__.extend(("ignore", "subprocess_ignore")) diff --git a/venv/Lib/site-packages/pygame/tests/mixer_music_test.py b/venv/Lib/site-packages/pygame/tests/mixer_music_test.py deleted file mode 100644 index 3d1c1f7..0000000 --- a/venv/Lib/site-packages/pygame/tests/mixer_music_test.py +++ /dev/null @@ -1,415 +0,0 @@ -# -*- coding: utf-8 -*- - -import os -import sys -import platform -import unittest -import time - -from pygame.tests.test_utils import example_path -import pygame - - -class MixerMusicModuleTest(unittest.TestCase): - @classmethod - def setUpClass(cls): - # Initializing the mixer is slow, so minimize the times it is called. - pygame.mixer.init() - - @classmethod - def tearDownClass(cls): - pygame.mixer.quit() - - def setUp(cls): - # This makes sure the mixer is always initialized before each test (in - # case a test calls pygame.mixer.quit()). - if pygame.mixer.get_init() is None: - pygame.mixer.init() - - def test_load_mp3(self): - "|tags:music|" - self.music_load("mp3") - - def test_load_ogg(self): - "|tags:music|" - self.music_load("ogg") - - def test_load_wav(self): - "|tags:music|" - self.music_load("wav") - - def music_load(self, format): - data_fname = example_path("data") - - path = os.path.join(data_fname, "house_lo.%s" % format) - if os.sep == "\\": - path = path.replace("\\", "\\\\") - umusfn = str(path) - bmusfn = umusfn.encode() - - pygame.mixer.music.load(umusfn) - pygame.mixer.music.load(bmusfn) - - def test_load_object(self): - """test loading music from file-like objects.""" - formats = ["ogg", "wav"] - data_fname = example_path("data") - for f in formats: - path = os.path.join(data_fname, "house_lo.%s" % f) - if os.sep == "\\": - path = path.replace("\\", "\\\\") - bmusfn = path.encode() - - with open(bmusfn, "rb") as musf: - pygame.mixer.music.load(musf) - - def test_object_namehint(self): - """test loading & queuing music from file-like objects with namehint argument.""" - formats = ["wav", "ogg"] - data_fname = example_path("data") - for f in formats: - path = os.path.join(data_fname, "house_lo.%s" % f) - if os.sep == "\\": - path = path.replace("\\", "\\\\") - bmusfn = path.encode() - - # these two "with open" blocks need to be separate, which is kinda weird - with open(bmusfn, "rb") as musf: - pygame.mixer.music.load(musf, f) - - with open(bmusfn, "rb") as musf: - pygame.mixer.music.queue(musf, f) - - with open(bmusfn, "rb") as musf: - pygame.mixer.music.load(musf, namehint=f) - - with open(bmusfn, "rb") as musf: - pygame.mixer.music.queue(musf, namehint=f) - - def test_load_unicode(self): - """test non-ASCII unicode path""" - import shutil - - ep = example_path("data") - temp_file = os.path.join(ep, u"你好.wav") - org_file = os.path.join(ep, u"house_lo.wav") - try: - with open(temp_file, "w") as f: - pass - os.remove(temp_file) - except IOError: - raise unittest.SkipTest("the path cannot be opened") - shutil.copy(org_file, temp_file) - try: - pygame.mixer.music.load(temp_file) - pygame.mixer.music.load(org_file) # unload - finally: - os.remove(temp_file) - - def test_unload(self): - import shutil - import tempfile - - ep = example_path("data") - org_file = os.path.join(ep, u"house_lo.wav") - tmpfd, tmppath = tempfile.mkstemp(".wav") - os.close(tmpfd) - shutil.copy(org_file, tmppath) - try: - pygame.mixer.music.load(tmppath) - pygame.mixer.music.unload() - finally: - os.remove(tmppath) - - def test_queue_mp3(self): - """Ensures queue() accepts mp3 files. - - |tags:music| - """ - filename = example_path(os.path.join("data", "house_lo.mp3")) - pygame.mixer.music.queue(filename) - - def test_queue_ogg(self): - """Ensures queue() accepts ogg files. - - |tags:music| - """ - filename = example_path(os.path.join("data", "house_lo.ogg")) - pygame.mixer.music.queue(filename) - - def test_queue_wav(self): - """Ensures queue() accepts wav files. - - |tags:music| - """ - filename = example_path(os.path.join("data", "house_lo.wav")) - pygame.mixer.music.queue(filename) - - def test_queue__multiple_calls(self): - """Ensures queue() can be called multiple times.""" - ogg_file = example_path(os.path.join("data", "house_lo.ogg")) - wav_file = example_path(os.path.join("data", "house_lo.wav")) - - pygame.mixer.music.queue(ogg_file) - pygame.mixer.music.queue(wav_file) - - def test_queue__arguments(self): - """Ensures queue() can be called with proper arguments.""" - wav_file = example_path(os.path.join("data", "house_lo.wav")) - - pygame.mixer.music.queue(wav_file, loops=2) - pygame.mixer.music.queue(wav_file, namehint="") - pygame.mixer.music.queue(wav_file, "") - pygame.mixer.music.queue(wav_file, "", 2) - - def test_queue__no_file(self): - """Ensures queue() correctly handles missing the file argument.""" - with self.assertRaises(TypeError): - pygame.mixer.music.queue() - - def test_queue__invalid_sound_type(self): - """Ensures queue() correctly handles invalid file types.""" - not_a_sound_file = example_path(os.path.join("data", "city.png")) - - with self.assertRaises(pygame.error): - pygame.mixer.music.queue(not_a_sound_file) - - def test_queue__invalid_filename(self): - """Ensures queue() correctly handles invalid filenames.""" - with self.assertRaises(pygame.error): - pygame.mixer.music.queue("") - - def test_music_pause__unpause(self): - """Ensure music has the correct position immediately after unpausing - - |tags:music| - """ - filename = example_path(os.path.join("data", "house_lo.mp3")) - pygame.mixer.music.load(filename) - pygame.mixer.music.play() - - # Wait 0.05s, then pause - time.sleep(0.05) - pygame.mixer.music.pause() - # Wait 0.05s, get position, unpause, then get position again - time.sleep(0.05) - before_unpause = pygame.mixer.music.get_pos() - pygame.mixer.music.unpause() - after_unpause = pygame.mixer.music.get_pos() - - self.assertEqual(before_unpause, after_unpause) - - def todo_test_stop(self): - - # __doc__ (as of 2008-08-02) for pygame.mixer_music.stop: - - # Stops the music playback if it is currently playing. - - self.fail() - - def todo_test_rewind(self): - - # __doc__ (as of 2008-08-02) for pygame.mixer_music.rewind: - - # Resets playback of the current music to the beginning. - - self.fail() - - def todo_test_get_pos(self): - - # __doc__ (as of 2008-08-02) for pygame.mixer_music.get_pos: - - # This gets the number of milliseconds that the music has been playing - # for. The returned time only represents how long the music has been - # playing; it does not take into account any starting position - # offsets. - # - - self.fail() - - def todo_test_fadeout(self): - - # __doc__ (as of 2008-08-02) for pygame.mixer_music.fadeout: - - # This will stop the music playback after it has been faded out over - # the specified time (measured in milliseconds). - # - # Note, that this function blocks until the music has faded out. - - self.fail() - - @unittest.skipIf( - os.environ.get("SDL_AUDIODRIVER") == "disk", - 'disk audio driver "playback" writing to disk is slow', - ) - def test_play__start_time(self): - - pygame.display.init() - - # music file is 7 seconds long - filename = example_path(os.path.join("data", "house_lo.ogg")) - pygame.mixer.music.load(filename) - start_time_in_seconds = 6.0 # 6 seconds - - music_finished = False - clock = pygame.time.Clock() - start_time_in_ms = clock.tick() - # should play the last 1 second - pygame.mixer.music.play(0, start=start_time_in_seconds) - running = True - while running: - pygame.event.pump() - - if not (pygame.mixer.music.get_busy() or music_finished): - music_finished = True - time_to_finish = (clock.tick() - start_time_in_ms) // 1000 - self.assertEqual(time_to_finish, 1) - running = False - - def todo_test_play(self): - - # __doc__ (as of 2008-08-02) for pygame.mixer_music.play: - - # This will play the loaded music stream. If the music is already - # playing it will be restarted. - # - # The loops argument controls the number of repeats a music will play. - # play(5) will cause the music to played once, then repeated five - # times, for a total of six. If the loops is -1 then the music will - # repeat indefinitely. - # - # The starting position argument controls where in the music the song - # starts playing. The starting position is dependent on the format of - # music playing. MP3 and OGG use the position as time (in seconds). - # MOD music it is the pattern order number. Passing a startpos will - # raise a NotImplementedError if it cannot set the start position - # - - self.fail() - - def todo_test_load(self): - - # __doc__ (as of 2008-08-02) for pygame.mixer_music.load: - - # This will load a music file and prepare it for playback. If a music - # stream is already playing it will be stopped. This does not start - # the music playing. - # - # Music can only be loaded from filenames, not python file objects - # like the other pygame loading functions. - # - - self.fail() - - def todo_test_get_volume(self): - - # __doc__ (as of 2008-08-02) for pygame.mixer_music.get_volume: - - # Returns the current volume for the mixer. The value will be between - # 0.0 and 1.0. - # - - self.fail() - - def todo_test_set_endevent(self): - - # __doc__ (as of 2008-08-02) for pygame.mixer_music.set_endevent: - - # This causes Pygame to signal (by means of the event queue) when the - # music is done playing. The argument determines the type of event - # that will be queued. - # - # The event will be queued every time the music finishes, not just the - # first time. To stop the event from being queued, call this method - # with no argument. - # - - self.fail() - - def todo_test_pause(self): - - # __doc__ (as of 2008-08-02) for pygame.mixer_music.pause: - - # Temporarily stop playback of the music stream. It can be resumed - # with the pygame.mixer.music.unpause() function. - # - - self.fail() - - def test_get_busy(self): - - # __doc__ (as of 2008-08-02) for pygame.mixer_music.get_busy: - - # Returns True when the music stream is actively playing. When the - # music is idle this returns False. - # - - self.music_load("ogg") - self.assertFalse(pygame.mixer.music.get_busy()) - pygame.mixer.music.play() - self.assertTrue(pygame.mixer.music.get_busy()) - pygame.mixer.music.pause() - self.assertFalse(pygame.mixer.music.get_busy()) - - def todo_test_get_endevent(self): - - # __doc__ (as of 2008-08-02) for pygame.mixer_music.get_endevent: - - # Returns the event type to be sent every time the music finishes - # playback. If there is no endevent the function returns - # pygame.NOEVENT. - # - - self.fail() - - def todo_test_unpause(self): - - # __doc__ (as of 2008-08-02) for pygame.mixer_music.unpause: - - # This will resume the playback of a music stream after it has been paused. - - self.fail() - - def todo_test_set_volume(self): - - # __doc__ (as of 2008-08-02) for pygame.mixer_music.set_volume: - - # Set the volume of the music playback. The value argument is between - # 0.0 and 1.0. When new music is loaded the volume is reset. - # - - self.fail() - - def todo_test_set_pos(self): - - # __doc__ (as of 2010-24-05) for pygame.mixer_music.set_pos: - - # This sets the position in the music file where playback will start. The - # meaning of "pos", a float (or a number that can be converted to a float), - # depends on the music format. Newer versions of SDL_mixer have better - # positioning support than earlier. An SDLError is raised if a particular - # format does not support positioning. - # - - self.fail() - - def test_init(self): - """issue #955. unload music whenever mixer.quit() is called""" - import tempfile - import shutil - - testfile = example_path(os.path.join("data", "house_lo.wav")) - tempcopy = os.path.join(tempfile.gettempdir(), "tempfile.wav") - - for i in range(10): - pygame.mixer.init() - try: - shutil.copy2(testfile, tempcopy) - pygame.mixer.music.load(tempcopy) - pygame.mixer.quit() - finally: - os.remove(tempcopy) - - -if __name__ == "__main__": - unittest.main() diff --git a/venv/Lib/site-packages/pygame/tests/mixer_tags.py b/venv/Lib/site-packages/pygame/tests/mixer_tags.py deleted file mode 100644 index 06a9de2..0000000 --- a/venv/Lib/site-packages/pygame/tests/mixer_tags.py +++ /dev/null @@ -1,7 +0,0 @@ -__tags__ = [] - -import pygame -import sys - -if "pygame.mixer" not in sys.modules: - __tags__.extend(("ignore", "subprocess_ignore")) diff --git a/venv/Lib/site-packages/pygame/tests/mixer_test.py b/venv/Lib/site-packages/pygame/tests/mixer_test.py deleted file mode 100644 index 0d5985d..0000000 --- a/venv/Lib/site-packages/pygame/tests/mixer_test.py +++ /dev/null @@ -1,1193 +0,0 @@ -# -*- coding: utf8 -*- - -import sys -import os -import unittest -import pathlib -import platform - -from pygame.tests.test_utils import example_path, AssertRaisesRegexMixin - -import pygame -from pygame import mixer - -IS_PYPY = "PyPy" == platform.python_implementation() - -################################### CONSTANTS ################################## - -FREQUENCIES = [11025, 22050, 44100, 48000] -SIZES = [-16, -8, 8, 16] # fixme -# size 32 failed in test_get_init__returns_exact_values_used_for_init -CHANNELS = [1, 2] -BUFFERS = [3024] - -CONFIGS = [ - {"frequency": f, "size": s, "channels": c} - for f in FREQUENCIES - for s in SIZES - for c in CHANNELS -] -# Using all CONFIGS fails on a Mac; probably older SDL_mixer; we could do: -# if platform.system() == 'Darwin': -# But using all CONFIGS is very slow (> 10 sec for example) -# And probably, we don't need to be so exhaustive, hence: - -CONFIG = {"frequency": 44100, "size": 32, "channels": 2, "allowedchanges": 0} - - -class InvalidBool(object): - """To help test invalid bool values.""" - - __nonzero__ = None - __bool__ = None - - -############################## MODULE LEVEL TESTS ############################# - - -class MixerModuleTest(unittest.TestCase): - def tearDown(self): - mixer.quit() - mixer.pre_init(0, 0, 0, 0) - - def test_init__keyword_args(self): - # note: this test used to loop over all CONFIGS, but it's very slow.. - mixer.init(**CONFIG) - mixer_conf = mixer.get_init() - - self.assertEqual(mixer_conf[0], CONFIG["frequency"]) - # Not all "sizes" are supported on all systems, hence "abs". - self.assertEqual(abs(mixer_conf[1]), abs(CONFIG["size"])) - self.assertGreaterEqual(mixer_conf[2], CONFIG["channels"]) - - def test_pre_init__keyword_args(self): - # note: this test used to loop over all CONFIGS, but it's very slow.. - mixer.pre_init(**CONFIG) - mixer.init() - - mixer_conf = mixer.get_init() - - self.assertEqual(mixer_conf[0], CONFIG["frequency"]) - # Not all "sizes" are supported on all systems, hence "abs". - self.assertEqual(abs(mixer_conf[1]), abs(CONFIG["size"])) - self.assertGreaterEqual(mixer_conf[2], CONFIG["channels"]) - - def test_pre_init__zero_values(self): - # Ensure that argument values of 0 are replaced with - # default values. No way to check buffer size though. - mixer.pre_init(22050, -8, 1) # Non default values - mixer.pre_init(0, 0, 0) # Should reset to default values - mixer.init(allowedchanges=0) - self.assertEqual(mixer.get_init()[0], 44100) - self.assertEqual(mixer.get_init()[1], -16) - self.assertGreaterEqual(mixer.get_init()[2], 2) - - def test_init__zero_values(self): - # Ensure that argument values of 0 are replaced with - # preset values. No way to check buffer size though. - mixer.pre_init(44100, 8, 1, allowedchanges=0) # None default values - mixer.init(0, 0, 0) - self.assertEqual(mixer.get_init(), (44100, 8, 1)) - - def test_get_init__returns_exact_values_used_for_init(self): - # TODO: size 32 fails in this test (maybe SDL_mixer bug) - - for init_conf in CONFIGS: - frequency, size, channels = init_conf.values() - if (frequency, size) == (22050, 16): - continue - mixer.init(frequency, size, channels) - - mixer_conf = mixer.get_init() - - self.assertEqual(tuple(init_conf.values()), mixer_conf) - mixer.quit() - - def test_get_init__returns_None_if_mixer_not_initialized(self): - self.assertIsNone(mixer.get_init()) - - def test_get_num_channels__defaults_eight_after_init(self): - mixer.init() - self.assertEqual(mixer.get_num_channels(), 8) - - def test_set_num_channels(self): - mixer.init() - - default_num_channels = mixer.get_num_channels() - for i in range(1, default_num_channels + 1): - mixer.set_num_channels(i) - self.assertEqual(mixer.get_num_channels(), i) - - def test_quit(self): - """get_num_channels() Should throw pygame.error if uninitialized - after mixer.quit()""" - mixer.init() - mixer.quit() - self.assertRaises(pygame.error, mixer.get_num_channels) - - # TODO: FIXME: appveyor and pypy (on linux) fails here sometimes. - @unittest.skipIf(sys.platform.startswith("win"), "See github issue 892.") - @unittest.skipIf(IS_PYPY, "random errors here with pypy") - def test_sound_args(self): - def get_bytes(snd): - return snd.get_raw() - - mixer.init() - - sample = b"\x00\xff" * 24 - wave_path = example_path(os.path.join("data", "house_lo.wav")) - uwave_path = str(wave_path) - bwave_path = uwave_path.encode(sys.getfilesystemencoding()) - snd = mixer.Sound(file=wave_path) - self.assertTrue(snd.get_length() > 0.5) - snd_bytes = get_bytes(snd) - self.assertTrue(len(snd_bytes) > 1000) - - self.assertEqual(get_bytes(mixer.Sound(wave_path)), snd_bytes) - - self.assertEqual(get_bytes(mixer.Sound(file=uwave_path)), snd_bytes) - self.assertEqual(get_bytes(mixer.Sound(uwave_path)), snd_bytes) - arg_emsg = "Sound takes either 1 positional or 1 keyword argument" - - with self.assertRaises(TypeError) as cm: - mixer.Sound() - self.assertEqual(str(cm.exception), arg_emsg) - with self.assertRaises(TypeError) as cm: - mixer.Sound(wave_path, buffer=sample) - self.assertEqual(str(cm.exception), arg_emsg) - with self.assertRaises(TypeError) as cm: - mixer.Sound(sample, file=wave_path) - self.assertEqual(str(cm.exception), arg_emsg) - with self.assertRaises(TypeError) as cm: - mixer.Sound(buffer=sample, file=wave_path) - self.assertEqual(str(cm.exception), arg_emsg) - - with self.assertRaises(TypeError) as cm: - mixer.Sound(foobar=sample) - self.assertEqual(str(cm.exception), "Unrecognized keyword argument 'foobar'") - - snd = mixer.Sound(wave_path, **{}) - self.assertEqual(get_bytes(snd), snd_bytes) - snd = mixer.Sound(*[], **{"file": wave_path}) - - with self.assertRaises(TypeError) as cm: - mixer.Sound([]) - self.assertEqual(str(cm.exception), "Unrecognized argument (type list)") - - with self.assertRaises(TypeError) as cm: - snd = mixer.Sound(buffer=[]) - emsg = "Expected object with buffer interface: got a list" - self.assertEqual(str(cm.exception), emsg) - - ufake_path = str("12345678") - self.assertRaises(IOError, mixer.Sound, ufake_path) - self.assertRaises(IOError, mixer.Sound, "12345678") - - with self.assertRaises(TypeError) as cm: - mixer.Sound(buffer=str("something")) - emsg = "Unicode object not allowed as buffer object" - self.assertEqual(str(cm.exception), emsg) - self.assertEqual(get_bytes(mixer.Sound(buffer=sample)), sample) - if type(sample) != str: - somebytes = get_bytes(mixer.Sound(sample)) - # on python 2 we do not allow using string except as file name. - self.assertEqual(somebytes, sample) - self.assertEqual(get_bytes(mixer.Sound(file=bwave_path)), snd_bytes) - self.assertEqual(get_bytes(mixer.Sound(bwave_path)), snd_bytes) - - snd = mixer.Sound(wave_path) - with self.assertRaises(TypeError) as cm: - mixer.Sound(wave_path, array=snd) - self.assertEqual(str(cm.exception), arg_emsg) - with self.assertRaises(TypeError) as cm: - mixer.Sound(buffer=sample, array=snd) - self.assertEqual(str(cm.exception), arg_emsg) - snd2 = mixer.Sound(array=snd) - self.assertEqual(snd.get_raw(), snd2.get_raw()) - - def test_sound_unicode(self): - """test non-ASCII unicode path""" - mixer.init() - import shutil - - ep = example_path("data") - temp_file = os.path.join(ep, u"你好.wav") - org_file = os.path.join(ep, u"house_lo.wav") - shutil.copy(org_file, temp_file) - try: - with open(temp_file, "rb") as f: - pass - except IOError: - raise unittest.SkipTest("the path cannot be opened") - - try: - sound = mixer.Sound(temp_file) - del sound - finally: - os.remove(temp_file) - - @unittest.skipIf( - os.environ.get("SDL_AUDIODRIVER") == "disk", - "this test fails without real sound card", - ) - def test_array_keyword(self): - try: - from numpy import ( - array, - arange, - zeros, - int8, - uint8, - int16, - uint16, - int32, - uint32, - ) - except ImportError: - self.skipTest("requires numpy") - - freq = 22050 - format_list = [-8, 8, -16, 16] - channels_list = [1, 2] - - a_lists = dict((f, []) for f in format_list) - a32u_mono = arange(0, 256, 1, uint32) - a16u_mono = a32u_mono.astype(uint16) - a8u_mono = a32u_mono.astype(uint8) - au_list_mono = [(1, a) for a in [a8u_mono, a16u_mono, a32u_mono]] - for format in format_list: - if format > 0: - a_lists[format].extend(au_list_mono) - a32s_mono = arange(-128, 128, 1, int32) - a16s_mono = a32s_mono.astype(int16) - a8s_mono = a32s_mono.astype(int8) - as_list_mono = [(1, a) for a in [a8s_mono, a16s_mono, a32s_mono]] - for format in format_list: - if format < 0: - a_lists[format].extend(as_list_mono) - a32u_stereo = zeros([a32u_mono.shape[0], 2], uint32) - a32u_stereo[:, 0] = a32u_mono - a32u_stereo[:, 1] = 255 - a32u_mono - a16u_stereo = a32u_stereo.astype(uint16) - a8u_stereo = a32u_stereo.astype(uint8) - au_list_stereo = [(2, a) for a in [a8u_stereo, a16u_stereo, a32u_stereo]] - for format in format_list: - if format > 0: - a_lists[format].extend(au_list_stereo) - a32s_stereo = zeros([a32s_mono.shape[0], 2], int32) - a32s_stereo[:, 0] = a32s_mono - a32s_stereo[:, 1] = -1 - a32s_mono - a16s_stereo = a32s_stereo.astype(int16) - a8s_stereo = a32s_stereo.astype(int8) - as_list_stereo = [(2, a) for a in [a8s_stereo, a16s_stereo, a32s_stereo]] - for format in format_list: - if format < 0: - a_lists[format].extend(as_list_stereo) - - for format in format_list: - for channels in channels_list: - try: - mixer.init(freq, format, channels) - except pygame.error: - # Some formats (e.g. 16) may not be supported. - continue - try: - __, f, c = mixer.get_init() - if f != format or c != channels: - # Some formats (e.g. -8) may not be supported. - continue - for c, a in a_lists[format]: - self._test_array_argument(format, a, c == channels) - finally: - mixer.quit() - - def _test_array_argument(self, format, a, test_pass): - from numpy import array, all as all_ - - try: - snd = mixer.Sound(array=a) - except ValueError: - if not test_pass: - return - self.fail("Raised ValueError: Format %i, dtype %s" % (format, a.dtype)) - if not test_pass: - self.fail( - "Did not raise ValueError: Format %i, dtype %s" % (format, a.dtype) - ) - a2 = array(snd) - a3 = a.astype(a2.dtype) - lshift = abs(format) - 8 * a.itemsize - if lshift >= 0: - # This is asymmetric with respect to downcasting. - a3 <<= lshift - self.assertTrue(all_(a2 == a3), "Format %i, dtype %s" % (format, a.dtype)) - - def _test_array_interface_fail(self, a): - self.assertRaises(ValueError, mixer.Sound, array=a) - - def test_array_interface(self): - mixer.init(22050, -16, 1, allowedchanges=0) - snd = mixer.Sound(buffer=b"\x00\x7f" * 20) - d = snd.__array_interface__ - self.assertTrue(isinstance(d, dict)) - if pygame.get_sdl_byteorder() == pygame.LIL_ENDIAN: - typestr = "") if is_lil_endian else (">", "<") - shape = (10, channels)[:ndim] - strides = (channels * itemsize, itemsize)[2 - ndim :] - exp = Exporter(shape, format=frev + "i") - snd = mixer.Sound(array=exp) - buflen = len(exp) * itemsize * channels - imp = Importer(snd, buftools.PyBUF_SIMPLE) - self.assertEqual(imp.ndim, 0) - self.assertTrue(imp.format is None) - self.assertEqual(imp.len, buflen) - self.assertEqual(imp.itemsize, itemsize) - self.assertTrue(imp.shape is None) - self.assertTrue(imp.strides is None) - self.assertTrue(imp.suboffsets is None) - self.assertFalse(imp.readonly) - self.assertEqual(imp.buf, snd._samples_address) - imp = Importer(snd, buftools.PyBUF_WRITABLE) - self.assertEqual(imp.ndim, 0) - self.assertTrue(imp.format is None) - self.assertEqual(imp.len, buflen) - self.assertEqual(imp.itemsize, itemsize) - self.assertTrue(imp.shape is None) - self.assertTrue(imp.strides is None) - self.assertTrue(imp.suboffsets is None) - self.assertFalse(imp.readonly) - self.assertEqual(imp.buf, snd._samples_address) - imp = Importer(snd, buftools.PyBUF_FORMAT) - self.assertEqual(imp.ndim, 0) - self.assertEqual(imp.format, format) - self.assertEqual(imp.len, buflen) - self.assertEqual(imp.itemsize, itemsize) - self.assertTrue(imp.shape is None) - self.assertTrue(imp.strides is None) - self.assertTrue(imp.suboffsets is None) - self.assertFalse(imp.readonly) - self.assertEqual(imp.buf, snd._samples_address) - imp = Importer(snd, buftools.PyBUF_ND) - self.assertEqual(imp.ndim, ndim) - self.assertTrue(imp.format is None) - self.assertEqual(imp.len, buflen) - self.assertEqual(imp.itemsize, itemsize) - self.assertEqual(imp.shape, shape) - self.assertTrue(imp.strides is None) - self.assertTrue(imp.suboffsets is None) - self.assertFalse(imp.readonly) - self.assertEqual(imp.buf, snd._samples_address) - imp = Importer(snd, buftools.PyBUF_STRIDES) - self.assertEqual(imp.ndim, ndim) - self.assertTrue(imp.format is None) - self.assertEqual(imp.len, buflen) - self.assertEqual(imp.itemsize, itemsize) - self.assertEqual(imp.shape, shape) - self.assertEqual(imp.strides, strides) - self.assertTrue(imp.suboffsets is None) - self.assertFalse(imp.readonly) - self.assertEqual(imp.buf, snd._samples_address) - imp = Importer(snd, buftools.PyBUF_FULL_RO) - self.assertEqual(imp.ndim, ndim) - self.assertEqual(imp.format, format) - self.assertEqual(imp.len, buflen) - self.assertEqual(imp.itemsize, 2) - self.assertEqual(imp.shape, shape) - self.assertEqual(imp.strides, strides) - self.assertTrue(imp.suboffsets is None) - self.assertFalse(imp.readonly) - self.assertEqual(imp.buf, snd._samples_address) - imp = Importer(snd, buftools.PyBUF_FULL_RO) - self.assertEqual(imp.ndim, ndim) - self.assertEqual(imp.format, format) - self.assertEqual(imp.len, buflen) - self.assertEqual(imp.itemsize, itemsize) - self.assertEqual(imp.shape, exp.shape) - self.assertEqual(imp.strides, strides) - self.assertTrue(imp.suboffsets is None) - self.assertFalse(imp.readonly) - self.assertEqual(imp.buf, snd._samples_address) - imp = Importer(snd, buftools.PyBUF_C_CONTIGUOUS) - self.assertEqual(imp.ndim, ndim) - self.assertTrue(imp.format is None) - self.assertEqual(imp.strides, strides) - imp = Importer(snd, buftools.PyBUF_ANY_CONTIGUOUS) - self.assertEqual(imp.ndim, ndim) - self.assertTrue(imp.format is None) - self.assertEqual(imp.strides, strides) - if ndim == 1: - imp = Importer(snd, buftools.PyBUF_F_CONTIGUOUS) - self.assertEqual(imp.ndim, 1) - self.assertTrue(imp.format is None) - self.assertEqual(imp.strides, strides) - else: - self.assertRaises(BufferError, Importer, snd, buftools.PyBUF_F_CONTIGUOUS) - - def todo_test_fadeout(self): - - # __doc__ (as of 2008-08-02) for pygame.mixer.fadeout: - - # pygame.mixer.fadeout(time): return None - # fade out the volume on all sounds before stopping - # - # This will fade out the volume on all active channels over the time - # argument in milliseconds. After the sound is muted the playback will - # stop. - # - - self.fail() - - def test_find_channel(self): - # __doc__ (as of 2008-08-02) for pygame.mixer.find_channel: - - # pygame.mixer.find_channel(force=False): return Channel - # find an unused channel - mixer.init() - - filename = example_path(os.path.join("data", "house_lo.wav")) - sound = mixer.Sound(file=filename) - - num_channels = mixer.get_num_channels() - - if num_channels > 0: - found_channel = mixer.find_channel() - self.assertIsNotNone(found_channel) - - # try playing on all channels - channels = [] - for channel_id in range(0, num_channels): - channel = mixer.Channel(channel_id) - channel.play(sound) - channels.append(channel) - - # should fail without being forceful - found_channel = mixer.find_channel() - self.assertIsNone(found_channel) - - # try forcing without keyword - found_channel = mixer.find_channel(True) - self.assertIsNotNone(found_channel) - - # try forcing with keyword - found_channel = mixer.find_channel(force=True) - self.assertIsNotNone(found_channel) - - for channel in channels: - channel.stop() - found_channel = mixer.find_channel() - self.assertIsNotNone(found_channel) - - def todo_test_get_busy(self): - - # __doc__ (as of 2008-08-02) for pygame.mixer.get_busy: - - # pygame.mixer.get_busy(): return bool - # test if any sound is being mixed - # - # Returns True if the mixer is busy mixing any channels. If the mixer - # is idle then this return False. - # - - self.fail() - - def todo_test_pause(self): - - # __doc__ (as of 2008-08-02) for pygame.mixer.pause: - - # pygame.mixer.pause(): return None - # temporarily stop playback of all sound channels - # - # This will temporarily stop all playback on the active mixer - # channels. The playback can later be resumed with - # pygame.mixer.unpause() - # - - self.fail() - - def test_set_reserved(self): - - # __doc__ (as of 2008-08-02) for pygame.mixer.set_reserved: - - # pygame.mixer.set_reserved(count): return count - mixer.init() - default_num_channels = mixer.get_num_channels() - - # try reserving all the channels - result = mixer.set_reserved(default_num_channels) - self.assertEqual(result, default_num_channels) - - # try reserving all the channels + 1 - result = mixer.set_reserved(default_num_channels + 1) - # should still be default - self.assertEqual(result, default_num_channels) - - # try unreserving all - result = mixer.set_reserved(0) - # should still be default - self.assertEqual(result, 0) - - # try reserving half - result = mixer.set_reserved(int(default_num_channels / 2)) - # should still be default - self.assertEqual(result, int(default_num_channels / 2)) - - def todo_test_stop(self): - - # __doc__ (as of 2008-08-02) for pygame.mixer.stop: - - # pygame.mixer.stop(): return None - # stop playback of all sound channels - # - # This will stop all playback of all active mixer channels. - - self.fail() - - def todo_test_unpause(self): - - # __doc__ (as of 2008-08-02) for pygame.mixer.unpause: - - # pygame.mixer.unpause(): return None - # resume paused playback of sound channels - # - # This will resume all active sound channels after they have been paused. - - self.fail() - - def test_get_sdl_mixer_version(self): - """Ensures get_sdl_mixer_version works correctly with no args.""" - expected_length = 3 - expected_type = tuple - expected_item_type = int - - version = pygame.mixer.get_sdl_mixer_version() - - self.assertIsInstance(version, expected_type) - self.assertEqual(len(version), expected_length) - - for item in version: - self.assertIsInstance(item, expected_item_type) - - def test_get_sdl_mixer_version__args(self): - """Ensures get_sdl_mixer_version works correctly using args.""" - expected_length = 3 - expected_type = tuple - expected_item_type = int - - for value in (True, False): - version = pygame.mixer.get_sdl_mixer_version(value) - - self.assertIsInstance(version, expected_type) - self.assertEqual(len(version), expected_length) - - for item in version: - self.assertIsInstance(item, expected_item_type) - - def test_get_sdl_mixer_version__kwargs(self): - """Ensures get_sdl_mixer_version works correctly using kwargs.""" - expected_length = 3 - expected_type = tuple - expected_item_type = int - - for value in (True, False): - version = pygame.mixer.get_sdl_mixer_version(linked=value) - - self.assertIsInstance(version, expected_type) - self.assertEqual(len(version), expected_length) - - for item in version: - self.assertIsInstance(item, expected_item_type) - - def test_get_sdl_mixer_version__invalid_args_kwargs(self): - """Ensures get_sdl_mixer_version handles invalid args and kwargs.""" - invalid_bool = InvalidBool() - - with self.assertRaises(TypeError): - version = pygame.mixer.get_sdl_mixer_version(invalid_bool) - - with self.assertRaises(TypeError): - version = pygame.mixer.get_sdl_mixer_version(linked=invalid_bool) - - def test_get_sdl_mixer_version__linked_equals_compiled(self): - """Ensures get_sdl_mixer_version's linked/compiled versions are equal.""" - linked_version = pygame.mixer.get_sdl_mixer_version(linked=True) - complied_version = pygame.mixer.get_sdl_mixer_version(linked=False) - - self.assertTupleEqual(linked_version, complied_version) - - -############################## CHANNEL CLASS TESTS ############################# - - -class ChannelTypeTest(AssertRaisesRegexMixin, unittest.TestCase): - @classmethod - def setUpClass(cls): - # Initializing the mixer is slow, so minimize the times it is called. - mixer.init() - - @classmethod - def tearDownClass(cls): - mixer.quit() - - def setUp(cls): - # This makes sure the mixer is always initialized before each test (in - # case a test calls pygame.mixer.quit()). - if mixer.get_init() is None: - mixer.init() - - def test_channel(self): - """Ensure Channel() creation works.""" - channel = mixer.Channel(0) - - self.assertIsInstance(channel, mixer.ChannelType) - self.assertEqual(channel.__class__.__name__, "Channel") - - def test_channel__without_arg(self): - """Ensure exception for Channel() creation with no argument.""" - with self.assertRaises(TypeError): - mixer.Channel() - - def test_channel__invalid_id(self): - """Ensure exception for Channel() creation with an invalid id.""" - with self.assertRaises(IndexError): - mixer.Channel(-1) - - def test_channel__before_init(self): - """Ensure exception for Channel() creation with non-init mixer.""" - mixer.quit() - - with self.assertRaisesRegex(pygame.error, "mixer not initialized"): - mixer.Channel(0) - - def todo_test_fadeout(self): - - # __doc__ (as of 2008-08-02) for pygame.mixer.Channel.fadeout: - - # Channel.fadeout(time): return None - # stop playback after fading channel out - # - # Stop playback of a channel after fading out the sound over the given - # time argument in milliseconds. - # - - self.fail() - - def test_get_busy(self): - """Ensure an idle channel's busy state is correct.""" - expected_busy = False - channel = mixer.Channel(0) - - busy = channel.get_busy() - - self.assertEqual(busy, expected_busy) - - def todo_test_get_busy__active(self): - """Ensure an active channel's busy state is correct.""" - self.fail() - - def todo_test_get_endevent(self): - - # __doc__ (as of 2008-08-02) for pygame.mixer.Channel.get_endevent: - - # Channel.get_endevent(): return type - # get the event a channel sends when playback stops - # - # Returns the event type to be sent every time the Channel finishes - # playback of a Sound. If there is no endevent the function returns - # pygame.NOEVENT. - # - - self.fail() - - def todo_test_get_queue(self): - - # __doc__ (as of 2008-08-02) for pygame.mixer.Channel.get_queue: - - # Channel.get_queue(): return Sound - # return any Sound that is queued - # - # If a Sound is already queued on this channel it will be returned. - # Once the queued sound begins playback it will no longer be on the - # queue. - # - - self.fail() - - def todo_test_get_sound(self): - - # __doc__ (as of 2008-08-02) for pygame.mixer.Channel.get_sound: - - # Channel.get_sound(): return Sound - # get the currently playing Sound - # - # Return the actual Sound object currently playing on this channel. If - # the channel is idle None is returned. - # - - self.fail() - - def test_get_volume(self): - """Ensure a channel's volume can be retrieved.""" - expected_volume = 1.0 # default - channel = mixer.Channel(0) - - volume = channel.get_volume() - - self.assertAlmostEqual(volume, expected_volume) - - def todo_test_get_volume__while_playing(self): - """Ensure a channel's volume can be retrieved while playing.""" - self.fail() - - def todo_test_pause(self): - - # __doc__ (as of 2008-08-02) for pygame.mixer.Channel.pause: - - # Channel.pause(): return None - # temporarily stop playback of a channel - # - # Temporarily stop the playback of sound on a channel. It can be - # resumed at a later time with Channel.unpause() - # - - self.fail() - - def todo_test_play(self): - - # __doc__ (as of 2008-08-02) for pygame.mixer.Channel.play: - - # Channel.play(Sound, loops=0, maxtime=0, fade_ms=0): return None - # play a Sound on a specific Channel - # - # This will begin playback of a Sound on a specific Channel. If the - # Channel is currently playing any other Sound it will be stopped. - # - # The loops argument has the same meaning as in Sound.play(): it is - # the number of times to repeat the sound after the first time. If it - # is 3, the sound will be played 4 times (the first time, then three - # more). If loops is -1 then the playback will repeat indefinitely. - # - # As in Sound.play(), the maxtime argument can be used to stop - # playback of the Sound after a given number of milliseconds. - # - # As in Sound.play(), the fade_ms argument can be used fade in the sound. - - self.fail() - - def todo_test_queue(self): - - # __doc__ (as of 2008-08-02) for pygame.mixer.Channel.queue: - - # Channel.queue(Sound): return None - # queue a Sound object to follow the current - # - # When a Sound is queued on a Channel, it will begin playing - # immediately after the current Sound is finished. Each channel can - # only have a single Sound queued at a time. The queued Sound will - # only play if the current playback finished automatically. It is - # cleared on any other call to Channel.stop() or Channel.play(). - # - # If there is no sound actively playing on the Channel then the Sound - # will begin playing immediately. - # - - self.fail() - - def todo_test_set_endevent(self): - - # __doc__ (as of 2008-08-02) for pygame.mixer.Channel.set_endevent: - - # Channel.set_endevent(): return None - # Channel.set_endevent(type): return None - # have the channel send an event when playback stops - # - # When an endevent is set for a channel, it will send an event to the - # pygame queue every time a sound finishes playing on that channel - # (not just the first time). Use pygame.event.get() to retrieve the - # endevent once it's sent. - # - # Note that if you called Sound.play(n) or Channel.play(sound,n), the - # end event is sent only once: after the sound has been played "n+1" - # times (see the documentation of Sound.play). - # - # If Channel.stop() or Channel.play() is called while the sound was - # still playing, the event will be posted immediately. - # - # The type argument will be the event id sent to the queue. This can - # be any valid event type, but a good choice would be a value between - # pygame.locals.USEREVENT and pygame.locals.NUMEVENTS. If no type - # argument is given then the Channel will stop sending endevents. - # - - self.fail() - - def todo_test_set_volume(self): - - # __doc__ (as of 2008-08-02) for pygame.mixer.Channel.set_volume: - - # Channel.set_volume(value): return None - # Channel.set_volume(left, right): return None - # set the volume of a playing channel - # - # Set the volume (loudness) of a playing sound. When a channel starts - # to play its volume value is reset. This only affects the current - # sound. The value argument is between 0.0 and 1.0. - # - # If one argument is passed, it will be the volume of both speakers. - # If two arguments are passed and the mixer is in stereo mode, the - # first argument will be the volume of the left speaker and the second - # will be the volume of the right speaker. (If the second argument is - # None, the first argument will be the volume of both speakers.) - # - # If the channel is playing a Sound on which set_volume() has also - # been called, both calls are taken into account. For example: - # - # sound = pygame.mixer.Sound("s.wav") - # channel = s.play() # Sound plays at full volume by default - # sound.set_volume(0.9) # Now plays at 90% of full volume. - # sound.set_volume(0.6) # Now plays at 60% (previous value replaced). - # channel.set_volume(0.5) # Now plays at 30% (0.6 * 0.5). - - self.fail() - - def todo_test_stop(self): - - # __doc__ (as of 2008-08-02) for pygame.mixer.Channel.stop: - - # Channel.stop(): return None - # stop playback on a Channel - # - # Stop sound playback on a channel. After playback is stopped the - # channel becomes available for new Sounds to play on it. - # - - self.fail() - - def todo_test_unpause(self): - - # __doc__ (as of 2008-08-02) for pygame.mixer.Channel.unpause: - - # Channel.unpause(): return None - # resume pause playback of a channel - # - # Resume the playback on a paused channel. - - self.fail() - - -############################### SOUND CLASS TESTS ############################## - - -class SoundTypeTest(AssertRaisesRegexMixin, unittest.TestCase): - @classmethod - def tearDownClass(cls): - mixer.quit() - - def setUp(cls): - # This makes sure the mixer is always initialized before each test (in - # case a test calls pygame.mixer.quit()). - if mixer.get_init() is None: - mixer.init() - - # See MixerModuleTest's methods test_sound_args(), test_sound_unicode(), - # and test_array_keyword() for additional testing of Sound() creation. - def test_sound(self): - """Ensure Sound() creation with a filename works.""" - filename = example_path(os.path.join("data", "house_lo.wav")) - sound1 = mixer.Sound(filename) - sound2 = mixer.Sound(file=filename) - - self.assertIsInstance(sound1, mixer.Sound) - self.assertIsInstance(sound2, mixer.Sound) - - def test_sound__from_file_object(self): - """Ensure Sound() creation with a file object works.""" - filename = example_path(os.path.join("data", "house_lo.wav")) - - # Using 'with' ensures the file is closed even if test fails. - with open(filename, "rb") as file_obj: - sound = mixer.Sound(file_obj) - - self.assertIsInstance(sound, mixer.Sound) - - def test_sound__from_sound_object(self): - """Ensure Sound() creation with a Sound() object works.""" - filename = example_path(os.path.join("data", "house_lo.wav")) - sound_obj = mixer.Sound(file=filename) - - sound = mixer.Sound(sound_obj) - - self.assertIsInstance(sound, mixer.Sound) - - def test_sound__from_pathlib(self): - """Ensure Sound() creation with a pathlib.Path object works.""" - path = pathlib.Path(example_path(os.path.join("data", "house_lo.wav"))) - sound1 = mixer.Sound(path) - sound2 = mixer.Sound(file=path) - self.assertIsInstance(sound1, mixer.Sound) - self.assertIsInstance(sound2, mixer.Sound) - - def todo_test_sound__from_buffer(self): - """Ensure Sound() creation with a buffer works.""" - self.fail() - - def todo_test_sound__from_array(self): - """Ensure Sound() creation with an array works.""" - self.fail() - - def test_sound__without_arg(self): - """Ensure exception raised for Sound() creation with no argument.""" - with self.assertRaises(TypeError): - mixer.Sound() - - def test_sound__before_init(self): - """Ensure exception raised for Sound() creation with non-init mixer.""" - mixer.quit() - filename = example_path(os.path.join("data", "house_lo.wav")) - - with self.assertRaisesRegex(pygame.error, "mixer not initialized"): - mixer.Sound(file=filename) - - @unittest.skipIf(IS_PYPY, "pypy skip") - def test_samples_address(self): - """Test the _samples_address getter.""" - try: - from ctypes import pythonapi, c_void_p, py_object - - Bytes_FromString = pythonapi.PyBytes_FromString - - Bytes_FromString.restype = c_void_p - Bytes_FromString.argtypes = [py_object] - samples = b"abcdefgh" # keep byte size a multiple of 4 - sample_bytes = Bytes_FromString(samples) - - snd = mixer.Sound(buffer=samples) - - self.assertNotEqual(snd._samples_address, sample_bytes) - finally: - pygame.mixer.quit() - with self.assertRaisesRegex(pygame.error, "mixer not initialized"): - snd._samples_address - - def todo_test_fadeout(self): - - # __doc__ (as of 2008-08-02) for pygame.mixer.Sound.fadeout: - - # Sound.fadeout(time): return None - # stop sound playback after fading out - # - # This will stop playback of the sound after fading it out over the - # time argument in milliseconds. The Sound will fade and stop on all - # actively playing channels. - # - - self.fail() - - def test_get_length(self): - """Tests if get_length returns a correct length.""" - try: - for size in SIZES: - pygame.mixer.quit() - pygame.mixer.init(size=size) - filename = example_path(os.path.join("data", "punch.wav")) - sound = mixer.Sound(file=filename) - # The sound data is in the mixer output format. So dividing the - # length of the raw sound data by the mixer settings gives - # the expected length of the sound. - sound_bytes = sound.get_raw() - mix_freq, mix_bits, mix_channels = pygame.mixer.get_init() - mix_bytes = abs(mix_bits) / 8 - expected_length = ( - float(len(sound_bytes)) / mix_freq / mix_bytes / mix_channels - ) - self.assertAlmostEqual(expected_length, sound.get_length()) - finally: - pygame.mixer.quit() - with self.assertRaisesRegex(pygame.error, "mixer not initialized"): - sound.get_length() - - def test_get_num_channels(self): - """ - Tests if Sound.get_num_channels returns the correct number - of channels playing a specific sound. - """ - try: - filename = example_path(os.path.join("data", "house_lo.wav")) - sound = mixer.Sound(file=filename) - - self.assertEqual(sound.get_num_channels(), 0) - sound.play() - self.assertEqual(sound.get_num_channels(), 1) - sound.play() - self.assertEqual(sound.get_num_channels(), 2) - sound.stop() - self.assertEqual(sound.get_num_channels(), 0) - finally: - pygame.mixer.quit() - with self.assertRaisesRegex(pygame.error, "mixer not initialized"): - sound.get_num_channels() - - def test_get_volume(self): - """Ensure a sound's volume can be retrieved.""" - try: - expected_volume = 1.0 # default - filename = example_path(os.path.join("data", "house_lo.wav")) - sound = mixer.Sound(file=filename) - - volume = sound.get_volume() - - self.assertAlmostEqual(volume, expected_volume) - finally: - pygame.mixer.quit() - with self.assertRaisesRegex(pygame.error, "mixer not initialized"): - sound.get_volume() - - def todo_test_get_volume__while_playing(self): - """Ensure a sound's volume can be retrieved while playing.""" - self.fail() - - def todo_test_play(self): - - # __doc__ (as of 2008-08-02) for pygame.mixer.Sound.play: - - # Sound.play(loops=0, maxtime=0, fade_ms=0): return Channel - # begin sound playback - # - # Begin playback of the Sound (i.e., on the computer's speakers) on an - # available Channel. This will forcibly select a Channel, so playback - # may cut off a currently playing sound if necessary. - # - # The loops argument controls how many times the sample will be - # repeated after being played the first time. A value of 5 means that - # the sound will be played once, then repeated five times, and so is - # played a total of six times. The default value (zero) means the - # Sound is not repeated, and so is only played once. If loops is set - # to -1 the Sound will loop indefinitely (though you can still call - # stop() to stop it). - # - # The maxtime argument can be used to stop playback after a given - # number of milliseconds. - # - # The fade_ms argument will make the sound start playing at 0 volume - # and fade up to full volume over the time given. The sample may end - # before the fade-in is complete. - # - # This returns the Channel object for the channel that was selected. - - self.fail() - - def test_set_volume(self): - """Ensure a sound's volume can be set.""" - try: - float_delta = 1.0 / 128 # SDL volume range is 0 to 128 - filename = example_path(os.path.join("data", "house_lo.wav")) - sound = mixer.Sound(file=filename) - current_volume = sound.get_volume() - - # (volume_set_value : expected_volume) - volumes = ( - (-1, current_volume), # value < 0 won't change volume - (0, 0.0), - (0.01, 0.01), - (0.1, 0.1), - (0.5, 0.5), - (0.9, 0.9), - (0.99, 0.99), - (1, 1.0), - (1.1, 1.0), - (2.0, 1.0), - ) - - for volume_set_value, expected_volume in volumes: - sound.set_volume(volume_set_value) - - self.assertAlmostEqual( - sound.get_volume(), expected_volume, delta=float_delta - ) - finally: - pygame.mixer.quit() - with self.assertRaisesRegex(pygame.error, "mixer not initialized"): - sound.set_volume(1) - - def todo_test_set_volume__while_playing(self): - """Ensure a sound's volume can be set while playing.""" - self.fail() - - def test_stop(self): - """Ensure stop can be called while not playing a sound.""" - try: - expected_channels = 0 - filename = example_path(os.path.join("data", "house_lo.wav")) - sound = mixer.Sound(file=filename) - - sound.stop() - - self.assertEqual(sound.get_num_channels(), expected_channels) - finally: - pygame.mixer.quit() - with self.assertRaisesRegex(pygame.error, "mixer not initialized"): - sound.stop() - - def todo_test_stop__while_playing(self): - """Ensure stop stops a playing sound.""" - self.fail() - - def test_get_raw(self): - """Ensure get_raw returns the correct bytestring.""" - try: - samples = b"abcdefgh" # keep byte size a multiple of 4 - snd = mixer.Sound(buffer=samples) - - raw = snd.get_raw() - - self.assertIsInstance(raw, bytes) - self.assertEqual(raw, samples) - finally: - pygame.mixer.quit() - with self.assertRaisesRegex(pygame.error, "mixer not initialized"): - snd.get_raw() - - -##################################### MAIN ##################################### - -if __name__ == "__main__": - unittest.main() diff --git a/venv/Lib/site-packages/pygame/tests/mouse_test.py b/venv/Lib/site-packages/pygame/tests/mouse_test.py deleted file mode 100644 index c23035f..0000000 --- a/venv/Lib/site-packages/pygame/tests/mouse_test.py +++ /dev/null @@ -1,350 +0,0 @@ -import unittest -import os -import platform -import warnings -import pygame - - -DARWIN = "Darwin" in platform.platform() - - -class MouseTests(unittest.TestCase): - @classmethod - def setUpClass(cls): - # The display needs to be initialized for mouse functions. - pygame.display.init() - - @classmethod - def tearDownClass(cls): - pygame.display.quit() - - -class MouseModuleInteractiveTest(MouseTests): - - __tags__ = ["interactive"] - - def test_set_pos(self): - """Ensures set_pos works correctly. - Requires tester to move the mouse to be on the window. - """ - pygame.display.set_mode((500, 500)) - pygame.event.get() # Pump event queue to make window get focus on macos. - - if not pygame.mouse.get_focused(): - # The window needs to be focused for the mouse.set_pos to work on macos. - return - clock = pygame.time.Clock() - - expected_pos = ((10, 0), (0, 0), (499, 0), (499, 499), (341, 143), (94, 49)) - - for x, y in expected_pos: - pygame.mouse.set_pos(x, y) - pygame.event.get() - found_pos = pygame.mouse.get_pos() - - clock.tick() - time_passed = 0.0 - ready_to_test = False - - while not ready_to_test and time_passed <= 1000.0: # Avoid endless loop - time_passed += clock.tick() - for event in pygame.event.get(): - if event.type == pygame.MOUSEMOTION: - ready_to_test = True - - self.assertEqual(found_pos, (x, y)) - - -class MouseModuleTest(MouseTests): - @unittest.skipIf( - os.environ.get("SDL_VIDEODRIVER", "") == "dummy", - "Cursors not supported on headless test machines", - ) - def test_get_cursor(self): - """Ensures get_cursor works correctly.""" - - # error should be raised when the display is unintialized - with self.assertRaises(pygame.error): - pygame.display.quit() - pygame.mouse.get_cursor() - - pygame.display.init() - - size = (8, 8) - hotspot = (0, 0) - xormask = (0, 96, 120, 126, 112, 96, 0, 0) - andmask = (224, 240, 254, 255, 254, 240, 96, 0) - - expected_length = 4 - expected_cursor = pygame.cursors.Cursor(size, hotspot, xormask, andmask) - pygame.mouse.set_cursor(expected_cursor) - - try: - cursor = pygame.mouse.get_cursor() - - self.assertIsInstance(cursor, pygame.cursors.Cursor) - self.assertEqual(len(cursor), expected_length) - - for info in cursor: - self.assertIsInstance(info, tuple) - - pygame.mouse.set_cursor(size, hotspot, xormask, andmask) - self.assertEqual(pygame.mouse.get_cursor(), expected_cursor) - - # SDLError should be raised when the mouse cursor is NULL - except pygame.error: - with self.assertRaises(pygame.error): - pygame.mouse.get_cursor() - - @unittest.skipIf( - os.environ.get("SDL_VIDEODRIVER", "") == "dummy", - "mouse.set_system_cursor only available in SDL2", - ) - def test_set_system_cursor(self): - """Ensures set_system_cursor works correctly.""" - - with warnings.catch_warnings(record=True) as w: - """From Pygame 2.0.1, set_system_cursor() should raise a deprecation warning""" - # Cause all warnings to always be triggered. - warnings.simplefilter("always") - - # Error should be raised when the display is uninitialized - with self.assertRaises(pygame.error): - pygame.display.quit() - pygame.mouse.set_system_cursor(pygame.SYSTEM_CURSOR_HAND) - - pygame.display.init() - - # TypeError raised when PyArg_ParseTuple fails to parse parameters - with self.assertRaises(TypeError): - pygame.mouse.set_system_cursor("b") - with self.assertRaises(TypeError): - pygame.mouse.set_system_cursor(None) - with self.assertRaises(TypeError): - pygame.mouse.set_system_cursor((8, 8), (0, 0)) - - # Right type, invalid value - with self.assertRaises(pygame.error): - pygame.mouse.set_system_cursor(2000) - - # Working as intended - self.assertEqual( - pygame.mouse.set_system_cursor(pygame.SYSTEM_CURSOR_ARROW), None - ) - - # Making sure the warnings are working properly - self.assertEqual(len(w), 6) - self.assertTrue( - all([issubclass(warn.category, DeprecationWarning) for warn in w]) - ) - - @unittest.skipIf( - os.environ.get("SDL_VIDEODRIVER", "") == "dummy", - "Cursors not supported on headless test machines", - ) - def test_set_cursor(self): - """Ensures set_cursor works correctly.""" - - # Bitmap cursor information - size = (8, 8) - hotspot = (0, 0) - xormask = (0, 126, 64, 64, 32, 16, 0, 0) - andmask = (254, 255, 254, 112, 56, 28, 12, 0) - bitmap_cursor = pygame.cursors.Cursor(size, hotspot, xormask, andmask) - - # System cursor information - constant = pygame.SYSTEM_CURSOR_ARROW - system_cursor = pygame.cursors.Cursor(constant) - - # Color cursor information (also uses hotspot variable from Bitmap cursor info) - surface = pygame.Surface((10, 10)) - color_cursor = pygame.cursors.Cursor(hotspot, surface) - - pygame.display.quit() - - # Bitmap: Error should be raised when the display is uninitialized - with self.assertRaises(pygame.error): - pygame.mouse.set_cursor(bitmap_cursor) - - # System: Error should be raised when the display is uninitialized - with self.assertRaises(pygame.error): - pygame.mouse.set_cursor(system_cursor) - - # Color: Error should be raised when the display is uninitialized - with self.assertRaises(pygame.error): - pygame.mouse.set_cursor(color_cursor) - - pygame.display.init() - - # Bitmap: TypeError raised when PyArg_ParseTuple fails to parse parameters - with self.assertRaises(TypeError): - pygame.mouse.set_cursor(("w", "h"), hotspot, xormask, andmask) - with self.assertRaises(TypeError): - pygame.mouse.set_cursor(size, ("0", "0"), xormask, andmask) - with self.assertRaises(TypeError): - pygame.mouse.set_cursor(size, ("x", "y", "z"), xormask, andmask) - - # Bitmap: TypeError raised when either mask is not a sequence - with self.assertRaises(TypeError): - pygame.mouse.set_cursor(size, hotspot, 12345678, andmask) - with self.assertRaises(TypeError): - pygame.mouse.set_cursor(size, hotspot, xormask, 12345678) - - # Bitmap: TypeError raised when element of mask is not an integer - with self.assertRaises(TypeError): - pygame.mouse.set_cursor(size, hotspot, "00000000", andmask) - with self.assertRaises(TypeError): - pygame.mouse.set_cursor(size, hotspot, xormask, (2, [0], 4, 0, 0, 8, 0, 1)) - - # Bitmap: ValueError raised when width not divisible by 8 - with self.assertRaises(ValueError): - pygame.mouse.set_cursor((3, 8), hotspot, xormask, andmask) - - # Bitmap: ValueError raised when length of either mask != width * height / 8 - with self.assertRaises(ValueError): - pygame.mouse.set_cursor((16, 2), hotspot, (128, 64, 32), andmask) - with self.assertRaises(ValueError): - pygame.mouse.set_cursor((16, 2), hotspot, xormask, (192, 96, 48, 0, 1)) - - # Bitmap: Working as intended - self.assertEqual( - pygame.mouse.set_cursor((16, 1), hotspot, (8, 0), (0, 192)), None - ) - pygame.mouse.set_cursor(size, hotspot, xormask, andmask) - self.assertEqual(pygame.mouse.get_cursor(), bitmap_cursor) - - # Bitmap: Working as intended + lists + masks with no references - pygame.mouse.set_cursor(size, hotspot, list(xormask), list(andmask)) - self.assertEqual(pygame.mouse.get_cursor(), bitmap_cursor) - - # System: TypeError raised when constant is invalid - with self.assertRaises(TypeError): - pygame.mouse.set_cursor(-50021232) - with self.assertRaises(TypeError): - pygame.mouse.set_cursor("yellow") - - # System: Working as intended - self.assertEqual(pygame.mouse.set_cursor(constant), None) - pygame.mouse.set_cursor(constant) - self.assertEqual(pygame.mouse.get_cursor(), system_cursor) - pygame.mouse.set_cursor(system_cursor) - self.assertEqual(pygame.mouse.get_cursor(), system_cursor) - - # Color: TypeError raised with invalid parameters - with self.assertRaises(TypeError): - pygame.mouse.set_cursor(("x", "y"), surface) - with self.assertRaises(TypeError): - pygame.mouse.set_cursor(hotspot, "not_a_surface") - - # Color: Working as intended - self.assertEqual(pygame.mouse.set_cursor(hotspot, surface), None) - pygame.mouse.set_cursor(hotspot, surface) - self.assertEqual(pygame.mouse.get_cursor(), color_cursor) - pygame.mouse.set_cursor(color_cursor) - self.assertEqual(pygame.mouse.get_cursor(), color_cursor) - - # Color: Working as intended + Surface with no references is returned okay - pygame.mouse.set_cursor((0, 0), pygame.Surface((20, 20))) - cursor = pygame.mouse.get_cursor() - self.assertEqual(cursor.type, "color") - self.assertEqual(cursor.data[0], (0, 0)) - self.assertEqual(cursor.data[1].get_size(), (20, 20)) - - def test_get_focused(self): - """Ensures get_focused returns the correct type.""" - focused = pygame.mouse.get_focused() - - self.assertIsInstance(focused, int) - - def test_get_pressed(self): - """Ensures get_pressed returns the correct types.""" - expected_length = 3 - buttons_pressed = pygame.mouse.get_pressed() - self.assertIsInstance(buttons_pressed, tuple) - self.assertEqual(len(buttons_pressed), expected_length) - for value in buttons_pressed: - self.assertIsInstance(value, bool) - - expected_length = 5 - buttons_pressed = pygame.mouse.get_pressed(num_buttons=5) - self.assertIsInstance(buttons_pressed, tuple) - self.assertEqual(len(buttons_pressed), expected_length) - for value in buttons_pressed: - self.assertIsInstance(value, bool) - - expected_length = 3 - buttons_pressed = pygame.mouse.get_pressed(3) - self.assertIsInstance(buttons_pressed, tuple) - self.assertEqual(len(buttons_pressed), expected_length) - for value in buttons_pressed: - self.assertIsInstance(value, bool) - - expected_length = 5 - buttons_pressed = pygame.mouse.get_pressed(5) - self.assertIsInstance(buttons_pressed, tuple) - self.assertEqual(len(buttons_pressed), expected_length) - for value in buttons_pressed: - self.assertIsInstance(value, bool) - - with self.assertRaises(ValueError): - pygame.mouse.get_pressed(4) - - def test_get_pos(self): - """Ensures get_pos returns the correct types.""" - expected_length = 2 - - pos = pygame.mouse.get_pos() - - self.assertIsInstance(pos, tuple) - self.assertEqual(len(pos), expected_length) - for value in pos: - self.assertIsInstance(value, int) - - def test_set_pos__invalid_pos(self): - """Ensures set_pos handles invalid positions correctly.""" - for invalid_pos in ((1,), [1, 2, 3], 1, "1", (1, "1"), []): - - with self.assertRaises(TypeError): - pygame.mouse.set_pos(invalid_pos) - - def test_get_rel(self): - """Ensures get_rel returns the correct types.""" - expected_length = 2 - - rel = pygame.mouse.get_rel() - - self.assertIsInstance(rel, tuple) - self.assertEqual(len(rel), expected_length) - for value in rel: - self.assertIsInstance(value, int) - - def test_get_visible(self): - """Ensures get_visible works correctly.""" - for expected_value in (False, True): - pygame.mouse.set_visible(expected_value) - - visible = pygame.mouse.get_visible() - - self.assertEqual(visible, expected_value) - - def test_set_visible(self): - """Ensures set_visible returns the correct values.""" - # Set to a known state. - pygame.mouse.set_visible(True) - - for expected_visible in (False, True): - prev_visible = pygame.mouse.set_visible(expected_visible) - - self.assertEqual(prev_visible, not expected_visible) - - def test_set_visible__invalid_value(self): - """Ensures set_visible handles invalid positions correctly.""" - for invalid_value in ((1,), [1, 2, 3], 1.1, "1", (1, "1"), []): - with self.assertRaises(TypeError): - prev_visible = pygame.mouse.set_visible(invalid_value) - - -################################################################################ - -if __name__ == "__main__": - unittest.main() diff --git a/venv/Lib/site-packages/pygame/tests/pixelarray_test.py b/venv/Lib/site-packages/pygame/tests/pixelarray_test.py deleted file mode 100644 index c93f36a..0000000 --- a/venv/Lib/site-packages/pygame/tests/pixelarray_test.py +++ /dev/null @@ -1,1660 +0,0 @@ -import sys -import platform - -try: - reduce -except NameError: - from functools import reduce -import operator -import weakref -import gc -import unittest - -from pygame.tests.test_utils import SurfaceSubclass - -try: - from pygame.tests.test_utils import arrinter -except NameError: - pass - -import pygame - - -IS_PYPY = "PyPy" == platform.python_implementation() - - -class TestMixin(object): - def assert_surfaces_equal(self, s1, s2, msg=None): - """Checks if two surfaces are equal in size and color.""" - w, h = s1.get_size() - - self.assertTupleEqual((w, h), s2.get_size(), msg) - - msg = "" if msg is None else "{}, ".format(msg) - msg += "size: ({}, {})".format(w, h) - - for x in range(w): - for y in range(h): - self.assertEqual( - s1.get_at((x, y)), - s2.get_at((x, y)), - "{}, position: ({}, {})".format(msg, x, y), - ) - - def assert_surface_filled(self, surface, expected_color, msg=None): - """Checks if the surface is filled with the given color.""" - width, height = surface.get_size() - - surface.lock() # Lock for possible speed up. - for pos in ((x, y) for y in range(height) for x in range(width)): - self.assertEqual(surface.get_at(pos), expected_color, msg) - surface.unlock() - - -@unittest.skipIf(IS_PYPY, "pypy having issues") -class PixelArrayTypeTest(unittest.TestCase, TestMixin): - def test_compare(self): - # __doc__ (as of 2008-06-25) for pygame.pixelarray.PixelArray.compare: - - # PixelArray.compare (array, distance=0, weights=(0.299, 0.587, 0.114)): Return PixelArray - # Compares the PixelArray with another one. - - w = 10 - h = 20 - size = w, h - sf = pygame.Surface(size, 0, 32) - ar = pygame.PixelArray(sf) - sf2 = pygame.Surface(size, 0, 32) - self.assertRaises(TypeError, ar.compare, sf2) - ar2 = pygame.PixelArray(sf2) - ar3 = ar.compare(ar2) - self.assertTrue(isinstance(ar3, pygame.PixelArray)) - self.assertEqual(ar3.shape, size) - sf2.fill(pygame.Color("white")) - self.assert_surfaces_equal(sf2, ar3.surface) - del ar3 - r = pygame.Rect(2, 5, 6, 13) - sf.fill(pygame.Color("blue"), r) - sf2.fill(pygame.Color("red")) - sf2.fill(pygame.Color("blue"), r) - ar3 = ar.compare(ar2) - sf.fill(pygame.Color("white"), r) - self.assert_surfaces_equal(sf, ar3.surface) - - # FINISH ME! - # Test other bit depths, slices, and distance != 0. - - def test_compare__same_colors_within_distance(self): - """Ensures compare works correctly with same colored surfaces.""" - size = (3, 5) - pixelarray_result_color = pygame.Color("white") - surface_color = (127, 127, 127, 255) - - for depth in (8, 16, 24, 32): - expected_pixelarray_surface = pygame.Surface(size, depth=depth) - expected_pixelarray_surface.fill(pixelarray_result_color) - - # Copy the surface to ensure same dimensions/formatting. - surf_a = expected_pixelarray_surface.copy() - surf_a.fill(surface_color) - # For non-32 bit depths, the actual color can be different from what - # was filled. - expected_surface_color = surf_a.get_at((0, 0)) - - pixelarray_a = pygame.PixelArray(surf_a) - pixelarray_b = pygame.PixelArray(surf_a.copy()) - - for distance in (0.0, 0.01, 0.1, 1.0): - pixelarray_result = pixelarray_a.compare( - pixelarray_b, distance=distance - ) - - # Ensure the resulting pixelarray is correct and that the original - # surfaces were not changed. - self.assert_surfaces_equal( - pixelarray_result.surface, - expected_pixelarray_surface, - (depth, distance), - ) - self.assert_surface_filled( - pixelarray_a.surface, expected_surface_color, (depth, distance) - ) - self.assert_surface_filled( - pixelarray_b.surface, expected_surface_color, (depth, distance) - ) - - pixelarray_a.close() - pixelarray_b.close() - pixelarray_result.close() - - def test_compare__different_colors_within_distance(self): - """Ensures compare works correctly with different colored surfaces - and the color difference is within the given distance. - """ - size = (3, 5) - pixelarray_result_color = pygame.Color("white") - surface_a_color = (127, 127, 127, 255) - surface_b_color = (128, 127, 127, 255) - - for depth in (8, 16, 24, 32): - expected_pixelarray_surface = pygame.Surface(size, depth=depth) - expected_pixelarray_surface.fill(pixelarray_result_color) - - # Copy the surface to ensure same dimensions/formatting. - surf_a = expected_pixelarray_surface.copy() - surf_a.fill(surface_a_color) - # For non-32 bit depths, the actual color can be different from what - # was filled. - expected_surface_a_color = surf_a.get_at((0, 0)) - pixelarray_a = pygame.PixelArray(surf_a) - - surf_b = expected_pixelarray_surface.copy() - surf_b.fill(surface_b_color) - # For non-32 bit depths, the actual color can be different from what - # was filled. - expected_surface_b_color = surf_b.get_at((0, 0)) - pixelarray_b = pygame.PixelArray(surf_b) - - for distance in (0.2, 0.3, 0.5, 1.0): - pixelarray_result = pixelarray_a.compare( - pixelarray_b, distance=distance - ) - - # Ensure the resulting pixelarray is correct and that the original - # surfaces were not changed. - self.assert_surfaces_equal( - pixelarray_result.surface, - expected_pixelarray_surface, - (depth, distance), - ) - self.assert_surface_filled( - pixelarray_a.surface, expected_surface_a_color, (depth, distance) - ) - self.assert_surface_filled( - pixelarray_b.surface, expected_surface_b_color, (depth, distance) - ) - - pixelarray_a.close() - pixelarray_b.close() - pixelarray_result.close() - - def test_compare__different_colors_not_within_distance(self): - """Ensures compare works correctly with different colored surfaces - and the color difference is not within the given distance. - """ - size = (3, 5) - pixelarray_result_color = pygame.Color("black") - surface_a_color = (127, 127, 127, 255) - surface_b_color = (128, 127, 127, 255) - - for depth in (8, 16, 24, 32): - expected_pixelarray_surface = pygame.Surface(size, depth=depth) - expected_pixelarray_surface.fill(pixelarray_result_color) - - # Copy the surface to ensure same dimensions/formatting. - surf_a = expected_pixelarray_surface.copy() - surf_a.fill(surface_a_color) - # For non-32 bit depths, the actual color can be different from what - # was filled. - expected_surface_a_color = surf_a.get_at((0, 0)) - pixelarray_a = pygame.PixelArray(surf_a) - - surf_b = expected_pixelarray_surface.copy() - surf_b.fill(surface_b_color) - # For non-32 bit depths, the actual color can be different from what - # was filled. - expected_surface_b_color = surf_b.get_at((0, 0)) - pixelarray_b = pygame.PixelArray(surf_b) - - for distance in (0.0, 0.00001, 0.0001, 0.001): - pixelarray_result = pixelarray_a.compare( - pixelarray_b, distance=distance - ) - - # Ensure the resulting pixelarray is correct and that the original - # surfaces were not changed. - self.assert_surfaces_equal( - pixelarray_result.surface, - expected_pixelarray_surface, - (depth, distance), - ) - self.assert_surface_filled( - pixelarray_a.surface, expected_surface_a_color, (depth, distance) - ) - self.assert_surface_filled( - pixelarray_b.surface, expected_surface_b_color, (depth, distance) - ) - - pixelarray_a.close() - pixelarray_b.close() - pixelarray_result.close() - - def test_close(self): - """does not crash when it is deleted.""" - s = pygame.Surface((10, 10)) - a = pygame.PixelArray(s) - a.close() - del a - - def test_close_raises(self): - """when you try to do an operation after it is closed.""" - s = pygame.Surface((10, 10)) - a = pygame.PixelArray(s) - a.close() - - def do_operation(): - a[:] - - self.assertRaises(ValueError, do_operation) - - def do_operation2(): - a[:] = 1 - - self.assertRaises(ValueError, do_operation2) - - def do_operation3(): - a.make_surface() - - self.assertRaises(ValueError, do_operation3) - - def do_operation4(): - for x in a: - pass - - self.assertRaises(ValueError, do_operation4) - - def test_context_manager(self): - """closes properly.""" - s = pygame.Surface((10, 10)) - with pygame.PixelArray(s) as a: - a[:] - - def test_pixel_array(self): - for bpp in (8, 16, 24, 32): - sf = pygame.Surface((10, 20), 0, bpp) - sf.fill((0, 0, 0)) - ar = pygame.PixelArray(sf) - - self.assertEqual(ar._pixels_address, sf._pixels_address) - - if sf.mustlock(): - self.assertTrue(sf.get_locked()) - - self.assertEqual(len(ar), 10) - - del ar - if sf.mustlock(): - self.assertFalse(sf.get_locked()) - - def test_as_class(self): - # Check general new-style class freatures. - sf = pygame.Surface((2, 3), 0, 32) - ar = pygame.PixelArray(sf) - self.assertRaises(AttributeError, getattr, ar, "nonnative") - ar.nonnative = "value" - self.assertEqual(ar.nonnative, "value") - r = weakref.ref(ar) - self.assertTrue(r() is ar) - del ar - gc.collect() - self.assertTrue(r() is None) - - class C(pygame.PixelArray): - def __str__(self): - return "string (%i, %i)" % self.shape - - ar = C(sf) - self.assertEqual(str(ar), "string (2, 3)") - r = weakref.ref(ar) - self.assertTrue(r() is ar) - del ar - gc.collect() - self.assertTrue(r() is None) - - def test_pixelarray__subclassed_surface(self): - """Ensure the PixelArray constructor accepts subclassed surfaces.""" - surface = SurfaceSubclass((3, 5), 0, 32) - pixelarray = pygame.PixelArray(surface) - - self.assertIsInstance(pixelarray, pygame.PixelArray) - - # Sequence interfaces - def test_get_column(self): - for bpp in (8, 16, 24, 32): - sf = pygame.Surface((6, 8), 0, bpp) - sf.fill((0, 0, 255)) - val = sf.map_rgb((0, 0, 255)) - ar = pygame.PixelArray(sf) - - ar2 = ar.__getitem__(1) - self.assertEqual(len(ar2), 8) - self.assertEqual(ar2.__getitem__(0), val) - self.assertEqual(ar2.__getitem__(1), val) - self.assertEqual(ar2.__getitem__(2), val) - - ar2 = ar.__getitem__(-1) - self.assertEqual(len(ar2), 8) - self.assertEqual(ar2.__getitem__(0), val) - self.assertEqual(ar2.__getitem__(1), val) - self.assertEqual(ar2.__getitem__(2), val) - - @unittest.skipIf(IS_PYPY, "pypy malloc abort") - def test_get_pixel(self): - w = 10 - h = 20 - size = w, h - bg_color = (0, 0, 255) - fg_color_y = (0, 0, 128) - fg_color_x = (0, 0, 11) - for bpp in (8, 16, 24, 32): - sf = pygame.Surface(size, 0, bpp) - mapped_bg_color = sf.map_rgb(bg_color) - mapped_fg_color_y = sf.map_rgb(fg_color_y) - mapped_fg_color_x = sf.map_rgb(fg_color_x) - self.assertNotEqual( - mapped_fg_color_y, - mapped_bg_color, - "Unusable test colors for bpp %i" % (bpp,), - ) - self.assertNotEqual( - mapped_fg_color_x, - mapped_bg_color, - "Unusable test colors for bpp %i" % (bpp,), - ) - self.assertNotEqual( - mapped_fg_color_y, - mapped_fg_color_x, - "Unusable test colors for bpp %i" % (bpp,), - ) - sf.fill(bg_color) - - ar = pygame.PixelArray(sf) - - ar_y = ar.__getitem__(1) - for y in range(h): - ar2 = ar_y.__getitem__(y) - self.assertEqual( - ar2, - mapped_bg_color, - "ar[1][%i] == %i, mapped_bg_color == %i" - % (y, ar2, mapped_bg_color), - ) - - sf.set_at((1, y), fg_color_y) - ar2 = ar_y.__getitem__(y) - self.assertEqual( - ar2, - mapped_fg_color_y, - "ar[1][%i] == %i, mapped_fg_color_y == %i" - % (y, ar2, mapped_fg_color_y), - ) - - sf.set_at((1, 1), bg_color) - for x in range(w): - ar2 = ar.__getitem__(x).__getitem__(1) - self.assertEqual( - ar2, - mapped_bg_color, - "ar[%i][1] = %i, mapped_bg_color = %i" % (x, ar2, mapped_bg_color), - ) - sf.set_at((x, 1), fg_color_x) - ar2 = ar.__getitem__(x).__getitem__(1) - self.assertEqual( - ar2, - mapped_fg_color_x, - "ar[%i][1] = %i, mapped_fg_color_x = %i" - % (x, ar2, mapped_fg_color_x), - ) - - ar2 = ar.__getitem__(0).__getitem__(0) - self.assertEqual(ar2, mapped_bg_color, "bpp = %i" % (bpp,)) - - ar2 = ar.__getitem__(1).__getitem__(0) - self.assertEqual(ar2, mapped_fg_color_y, "bpp = %i" % (bpp,)) - - ar2 = ar.__getitem__(-4).__getitem__(1) - self.assertEqual(ar2, mapped_fg_color_x, "bpp = %i" % (bpp,)) - - ar2 = ar.__getitem__(-4).__getitem__(5) - self.assertEqual(ar2, mapped_bg_color, "bpp = %i" % (bpp,)) - - ar2 = ar.__getitem__(-4).__getitem__(0) - self.assertEqual(ar2, mapped_bg_color, "bpp = %i" % (bpp,)) - - ar2 = ar.__getitem__(-w + 1).__getitem__(0) - self.assertEqual(ar2, mapped_fg_color_y, "bpp = %i" % (bpp,)) - - ar2 = ar.__getitem__(-w).__getitem__(0) - self.assertEqual(ar2, mapped_bg_color, "bpp = %i" % (bpp,)) - - ar2 = ar.__getitem__(5).__getitem__(-4) - self.assertEqual(ar2, mapped_bg_color, "bpp = %i" % (bpp,)) - - ar2 = ar.__getitem__(5).__getitem__(-h + 1) - self.assertEqual(ar2, mapped_fg_color_x, "bpp = %i" % (bpp,)) - - ar2 = ar.__getitem__(5).__getitem__(-h) - self.assertEqual(ar2, mapped_bg_color, "bpp = %i" % (bpp,)) - - ar2 = ar.__getitem__(0).__getitem__(-h + 1) - self.assertEqual(ar2, mapped_fg_color_x, "bpp = %i" % (bpp,)) - - ar2 = ar.__getitem__(0).__getitem__(-h) - self.assertEqual(ar2, mapped_bg_color, "bpp = %i" % (bpp,)) - - def test_set_pixel(self): - for bpp in (8, 16, 24, 32): - sf = pygame.Surface((10, 20), 0, bpp) - sf.fill((0, 0, 0)) - ar = pygame.PixelArray(sf) - - ar.__getitem__(0).__setitem__(0, (0, 255, 0)) - self.assertEqual(ar[0][0], sf.map_rgb((0, 255, 0))) - - ar.__getitem__(1).__setitem__(1, (128, 128, 128)) - self.assertEqual(ar[1][1], sf.map_rgb((128, 128, 128))) - - ar.__getitem__(-1).__setitem__(-1, (128, 128, 128)) - self.assertEqual(ar[9][19], sf.map_rgb((128, 128, 128))) - - ar.__getitem__(-2).__setitem__(-2, (128, 128, 128)) - self.assertEqual(ar[8][-2], sf.map_rgb((128, 128, 128))) - - def test_set_column(self): - for bpp in (8, 16, 24, 32): - sf = pygame.Surface((6, 8), 0, bpp) - sf.fill((0, 0, 0)) - ar = pygame.PixelArray(sf) - - sf2 = pygame.Surface((6, 8), 0, bpp) - sf2.fill((0, 255, 255)) - ar2 = pygame.PixelArray(sf2) - - # Test single value assignment - ar.__setitem__(2, (128, 128, 128)) - self.assertEqual(ar[2][0], sf.map_rgb((128, 128, 128))) - self.assertEqual(ar[2][1], sf.map_rgb((128, 128, 128))) - - ar.__setitem__(-1, (0, 255, 255)) - self.assertEqual(ar[5][0], sf.map_rgb((0, 255, 255))) - self.assertEqual(ar[-1][1], sf.map_rgb((0, 255, 255))) - - ar.__setitem__(-2, (255, 255, 0)) - self.assertEqual(ar[4][0], sf.map_rgb((255, 255, 0))) - self.assertEqual(ar[-2][1], sf.map_rgb((255, 255, 0))) - - # Test list assignment. - ar.__setitem__(0, [(255, 255, 255)] * 8) - self.assertEqual(ar[0][0], sf.map_rgb((255, 255, 255))) - self.assertEqual(ar[0][1], sf.map_rgb((255, 255, 255))) - - # Test tuple assignment. - # Changed in Pygame 1.9.2 - Raises an exception. - self.assertRaises( - ValueError, - ar.__setitem__, - 1, - ( - (204, 0, 204), - (17, 17, 17), - (204, 0, 204), - (17, 17, 17), - (204, 0, 204), - (17, 17, 17), - (204, 0, 204), - (17, 17, 17), - ), - ) - - # Test pixel array assignment. - ar.__setitem__(1, ar2.__getitem__(3)) - self.assertEqual(ar[1][0], sf.map_rgb((0, 255, 255))) - self.assertEqual(ar[1][1], sf.map_rgb((0, 255, 255))) - - def test_get_slice(self): - for bpp in (8, 16, 24, 32): - sf = pygame.Surface((10, 20), 0, bpp) - sf.fill((0, 0, 0)) - ar = pygame.PixelArray(sf) - - self.assertEqual(len(ar[0:2]), 2) - self.assertEqual(len(ar[3:7][3]), 20) - - self.assertEqual(ar[0:0], None) - self.assertEqual(ar[5:5], None) - self.assertEqual(ar[9:9], None) - - # Has to resolve to ar[7:8] - self.assertEqual(len(ar[-3:-2]), 1) # 2D - self.assertEqual(len(ar[-3:-2][0]), 20) # 1D - - # Try assignments. - - # 2D assignment. - ar[2:5] = (255, 255, 255) - - # 1D assignment - ar[3][3:7] = (10, 10, 10) - self.assertEqual(ar[3][5], sf.map_rgb((10, 10, 10))) - self.assertEqual(ar[3][6], sf.map_rgb((10, 10, 10))) - - @unittest.skipIf(IS_PYPY, "skipping for PyPy (segfaults on mac pypy3 6.0.0)") - def test_contains(self): - for bpp in (8, 16, 24, 32): - sf = pygame.Surface((10, 20), 0, bpp) - sf.fill((0, 0, 0)) - sf.set_at((8, 8), (255, 255, 255)) - - ar = pygame.PixelArray(sf) - self.assertTrue((0, 0, 0) in ar) - self.assertTrue((255, 255, 255) in ar) - self.assertFalse((255, 255, 0) in ar) - self.assertFalse(0x0000FF in ar) - - # Test sliced array - self.assertTrue((0, 0, 0) in ar[8]) - self.assertTrue((255, 255, 255) in ar[8]) - self.assertFalse((255, 255, 0) in ar[8]) - self.assertFalse(0x0000FF in ar[8]) - - def test_get_surface(self): - for bpp in (8, 16, 24, 32): - sf = pygame.Surface((10, 20), 0, bpp) - sf.fill((0, 0, 0)) - ar = pygame.PixelArray(sf) - self.assertTrue(ar.surface is sf) - - def test_get_surface__subclassed_surface(self): - """Ensure the surface attribute can handle subclassed surfaces.""" - expected_surface = SurfaceSubclass((5, 3), 0, 32) - pixelarray = pygame.PixelArray(expected_surface) - - surface = pixelarray.surface - - self.assertIs(surface, expected_surface) - self.assertIsInstance(surface, pygame.Surface) - self.assertIsInstance(surface, SurfaceSubclass) - - def test_set_slice(self): - for bpp in (8, 16, 24, 32): - sf = pygame.Surface((6, 8), 0, bpp) - sf.fill((0, 0, 0)) - ar = pygame.PixelArray(sf) - - # Test single value assignment - val = sf.map_rgb((128, 128, 128)) - ar[0:2] = val - self.assertEqual(ar[0][0], val) - self.assertEqual(ar[0][1], val) - self.assertEqual(ar[1][0], val) - self.assertEqual(ar[1][1], val) - - val = sf.map_rgb((0, 255, 255)) - ar[-3:-1] = val - self.assertEqual(ar[3][0], val) - self.assertEqual(ar[-2][1], val) - - val = sf.map_rgb((255, 255, 255)) - ar[-3:] = (255, 255, 255) - self.assertEqual(ar[4][0], val) - self.assertEqual(ar[-1][1], val) - - # Test array size mismatch. - # Changed in ver. 1.9.2 - # (was "Test list assignment, this is a vertical assignment.") - val = sf.map_rgb((0, 255, 0)) - self.assertRaises(ValueError, ar.__setitem__, slice(2, 4), [val] * 8) - - # And the horizontal assignment. - val = sf.map_rgb((255, 0, 0)) - val2 = sf.map_rgb((128, 0, 255)) - ar[0:2] = [val, val2] - self.assertEqual(ar[0][0], val) - self.assertEqual(ar[1][0], val2) - self.assertEqual(ar[0][1], val) - self.assertEqual(ar[1][1], val2) - self.assertEqual(ar[0][4], val) - self.assertEqual(ar[1][4], val2) - self.assertEqual(ar[0][5], val) - self.assertEqual(ar[1][5], val2) - - # Test pixelarray assignment. - ar[:] = (0, 0, 0) - sf2 = pygame.Surface((6, 8), 0, bpp) - sf2.fill((255, 0, 255)) - - val = sf.map_rgb((255, 0, 255)) - ar2 = pygame.PixelArray(sf2) - - ar[:] = ar2[:] - self.assertEqual(ar[0][0], val) - self.assertEqual(ar[5][7], val) - - # Ensure p1 ... pn are freed for array[...] = [p1, ..., pn] - # Bug fix: reference counting. - if hasattr(sys, "getrefcount"): - - class Int(int): - """Unique int instances""" - - pass - - sf = pygame.Surface((5, 2), 0, 32) - ar = pygame.PixelArray(sf) - pixel_list = [Int(i) for i in range(ar.shape[0])] - refcnts_before = [sys.getrefcount(i) for i in pixel_list] - ar[...] = pixel_list - refcnts_after = [sys.getrefcount(i) for i in pixel_list] - gc.collect() - self.assertEqual(refcnts_after, refcnts_before) - - def test_subscript(self): - # By default we do not need to work with any special __***__ - # methods as map subscripts are the first looked up by the - # object system. - for bpp in (8, 16, 24, 32): - sf = pygame.Surface((6, 8), 0, bpp) - sf.set_at((1, 3), (0, 255, 0)) - sf.set_at((0, 0), (0, 255, 0)) - sf.set_at((4, 4), (0, 255, 0)) - val = sf.map_rgb((0, 255, 0)) - - ar = pygame.PixelArray(sf) - - # Test single value requests. - self.assertEqual(ar[1, 3], val) - self.assertEqual(ar[0, 0], val) - self.assertEqual(ar[4, 4], val) - self.assertEqual(ar[1][3], val) - self.assertEqual(ar[0][0], val) - self.assertEqual(ar[4][4], val) - - # Test ellipse working. - self.assertEqual(len(ar[..., ...]), 6) - self.assertEqual(len(ar[1, ...]), 8) - self.assertEqual(len(ar[..., 3]), 6) - - # Test simple slicing - self.assertEqual(len(ar[:, :]), 6) - self.assertEqual( - len( - ar[ - :, - ] - ), - 6, - ) - self.assertEqual(len(ar[1, :]), 8) - self.assertEqual(len(ar[:, 2]), 6) - # Empty slices - self.assertEqual( - ar[ - 4:4, - ], - None, - ) - self.assertEqual(ar[4:4, ...], None) - self.assertEqual(ar[4:4, 2:2], None) - self.assertEqual(ar[4:4, 1:4], None) - self.assertEqual( - ar[ - 4:4:2, - ], - None, - ) - self.assertEqual( - ar[ - 4:4:-2, - ], - None, - ) - self.assertEqual(ar[4:4:1, ...], None) - self.assertEqual(ar[4:4:-1, ...], None) - self.assertEqual(ar[4:4:1, 2:2], None) - self.assertEqual(ar[4:4:-1, 1:4], None) - self.assertEqual(ar[..., 4:4], None) - self.assertEqual(ar[1:4, 4:4], None) - self.assertEqual(ar[..., 4:4:1], None) - self.assertEqual(ar[..., 4:4:-1], None) - self.assertEqual(ar[2:2, 4:4:1], None) - self.assertEqual(ar[1:4, 4:4:-1], None) - - # Test advanced slicing - ar[0] = 0 - ar[1] = 1 - ar[2] = 2 - ar[3] = 3 - ar[4] = 4 - ar[5] = 5 - - # We should receive something like [0,2,4] - self.assertEqual(ar[::2, 1][0], 0) - self.assertEqual(ar[::2, 1][1], 2) - self.assertEqual(ar[::2, 1][2], 4) - # We should receive something like [2,2,2] - self.assertEqual(ar[2, ::2][0], 2) - self.assertEqual(ar[2, ::2][1], 2) - self.assertEqual(ar[2, ::2][2], 2) - - # Should create a 3x3 array of [0,2,4] - ar2 = ar[::2, ::2] - self.assertEqual(len(ar2), 3) - self.assertEqual(ar2[0][0], 0) - self.assertEqual(ar2[0][1], 0) - self.assertEqual(ar2[0][2], 0) - self.assertEqual(ar2[2][0], 4) - self.assertEqual(ar2[2][1], 4) - self.assertEqual(ar2[2][2], 4) - self.assertEqual(ar2[1][0], 2) - self.assertEqual(ar2[2][0], 4) - self.assertEqual(ar2[1][1], 2) - - # Should create a reversed 3x8 array over X of [1,2,3] -> [3,2,1] - ar2 = ar[3:0:-1] - self.assertEqual(len(ar2), 3) - self.assertEqual(ar2[0][0], 3) - self.assertEqual(ar2[0][1], 3) - self.assertEqual(ar2[0][2], 3) - self.assertEqual(ar2[0][7], 3) - self.assertEqual(ar2[2][0], 1) - self.assertEqual(ar2[2][1], 1) - self.assertEqual(ar2[2][2], 1) - self.assertEqual(ar2[2][7], 1) - self.assertEqual(ar2[1][0], 2) - self.assertEqual(ar2[1][1], 2) - # Should completely reverse the array over X -> [5,4,3,2,1,0] - ar2 = ar[::-1] - self.assertEqual(len(ar2), 6) - self.assertEqual(ar2[0][0], 5) - self.assertEqual(ar2[0][1], 5) - self.assertEqual(ar2[0][3], 5) - self.assertEqual(ar2[0][-1], 5) - self.assertEqual(ar2[1][0], 4) - self.assertEqual(ar2[1][1], 4) - self.assertEqual(ar2[1][3], 4) - self.assertEqual(ar2[1][-1], 4) - self.assertEqual(ar2[-1][-1], 0) - self.assertEqual(ar2[-2][-2], 1) - self.assertEqual(ar2[-3][-1], 2) - - # Test advanced slicing - ar[:] = 0 - ar2 = ar[:, 1] - ar2[:] = [99] * len(ar2) - self.assertEqual(ar2[0], 99) - self.assertEqual(ar2[-1], 99) - self.assertEqual(ar2[-2], 99) - self.assertEqual(ar2[2], 99) - self.assertEqual(ar[0, 1], 99) - self.assertEqual(ar[1, 1], 99) - self.assertEqual(ar[2, 1], 99) - self.assertEqual(ar[-1, 1], 99) - self.assertEqual(ar[-2, 1], 99) - - # Cases where a 2d array should have a dimension of length 1. - ar2 = ar[1:2, :] - self.assertEqual(ar2.shape, (1, ar.shape[1])) - ar2 = ar[:, 1:2] - self.assertEqual(ar2.shape, (ar.shape[0], 1)) - sf2 = pygame.Surface((1, 5), 0, 32) - ar2 = pygame.PixelArray(sf2) - self.assertEqual(ar2.shape, sf2.get_size()) - sf2 = pygame.Surface((7, 1), 0, 32) - ar2 = pygame.PixelArray(sf2) - self.assertEqual(ar2.shape, sf2.get_size()) - - # Array has a single ellipsis subscript: the identity operator - ar2 = ar[...] - self.assertTrue(ar2 is ar) - - # Ensure x and y are freed for p = array[x, y] - # Bug fix: reference counting - if hasattr(sys, "getrefcount"): - - class Int(int): - """Unique int instances""" - - pass - - sf = pygame.Surface((2, 2), 0, 32) - ar = pygame.PixelArray(sf) - x, y = Int(0), Int(1) - rx_before, ry_before = sys.getrefcount(x), sys.getrefcount(y) - p = ar[x, y] - rx_after, ry_after = sys.getrefcount(x), sys.getrefcount(y) - self.assertEqual(rx_after, rx_before) - self.assertEqual(ry_after, ry_before) - - def test_ass_subscript(self): - for bpp in (8, 16, 24, 32): - sf = pygame.Surface((6, 8), 0, bpp) - sf.fill((255, 255, 255)) - ar = pygame.PixelArray(sf) - - # Test ellipse working - ar[..., ...] = (0, 0, 0) - self.assertEqual(ar[0, 0], 0) - self.assertEqual(ar[1, 0], 0) - self.assertEqual(ar[-1, -1], 0) - ar[ - ..., - ] = (0, 0, 255) - self.assertEqual(ar[0, 0], sf.map_rgb((0, 0, 255))) - self.assertEqual(ar[1, 0], sf.map_rgb((0, 0, 255))) - self.assertEqual(ar[-1, -1], sf.map_rgb((0, 0, 255))) - ar[:, ...] = (255, 0, 0) - self.assertEqual(ar[0, 0], sf.map_rgb((255, 0, 0))) - self.assertEqual(ar[1, 0], sf.map_rgb((255, 0, 0))) - self.assertEqual(ar[-1, -1], sf.map_rgb((255, 0, 0))) - ar[...] = (0, 255, 0) - self.assertEqual(ar[0, 0], sf.map_rgb((0, 255, 0))) - self.assertEqual(ar[1, 0], sf.map_rgb((0, 255, 0))) - self.assertEqual(ar[-1, -1], sf.map_rgb((0, 255, 0))) - - # Ensure x and y are freed for array[x, y] = p - # Bug fix: reference counting - if hasattr(sys, "getrefcount"): - - class Int(int): - """Unique int instances""" - - pass - - sf = pygame.Surface((2, 2), 0, 32) - ar = pygame.PixelArray(sf) - x, y = Int(0), Int(1) - rx_before, ry_before = sys.getrefcount(x), sys.getrefcount(y) - ar[x, y] = 0 - rx_after, ry_after = sys.getrefcount(x), sys.getrefcount(y) - self.assertEqual(rx_after, rx_before) - self.assertEqual(ry_after, ry_before) - - def test_pixels_field(self): - for bpp in [1, 2, 3, 4]: - sf = pygame.Surface((11, 7), 0, bpp * 8) - ar = pygame.PixelArray(sf) - ar2 = ar[1:, :] - self.assertEqual(ar2._pixels_address - ar._pixels_address, ar.itemsize) - ar2 = ar[:, 1:] - self.assertEqual(ar2._pixels_address - ar._pixels_address, ar.strides[1]) - ar2 = ar[::-1, :] - self.assertEqual( - ar2._pixels_address - ar._pixels_address, - (ar.shape[0] - 1) * ar.itemsize, - ) - ar2 = ar[::-2, :] - self.assertEqual( - ar2._pixels_address - ar._pixels_address, - (ar.shape[0] - 1) * ar.itemsize, - ) - ar2 = ar[:, ::-1] - self.assertEqual( - ar2._pixels_address - ar._pixels_address, - (ar.shape[1] - 1) * ar.strides[1], - ) - ar3 = ar2[::-1, :] - self.assertEqual( - ar3._pixels_address - ar._pixels_address, - (ar.shape[0] - 1) * ar.strides[0] + (ar.shape[1] - 1) * ar.strides[1], - ) - ar2 = ar[:, ::-2] - self.assertEqual( - ar2._pixels_address - ar._pixels_address, - (ar.shape[1] - 1) * ar.strides[1], - ) - ar2 = ar[2::, 3::] - self.assertEqual( - ar2._pixels_address - ar._pixels_address, - ar.strides[0] * 2 + ar.strides[1] * 3, - ) - ar2 = ar[2::2, 3::4] - self.assertEqual( - ar2._pixels_address - ar._pixels_address, - ar.strides[0] * 2 + ar.strides[1] * 3, - ) - ar2 = ar[9:2:-1, :] - self.assertEqual( - ar2._pixels_address - ar._pixels_address, ar.strides[0] * 9 - ) - ar2 = ar[:, 5:2:-1] - self.assertEqual( - ar2._pixels_address - ar._pixels_address, ar.strides[1] * 5 - ) - ##? ar2 = ar[:,9:2:-1] - - def test_make_surface(self): - bg_color = pygame.Color(255, 255, 255) - fg_color = pygame.Color(128, 100, 0) - for bpp in (8, 16, 24, 32): - sf = pygame.Surface((10, 20), 0, bpp) - bg_color_adj = sf.unmap_rgb(sf.map_rgb(bg_color)) - fg_color_adj = sf.unmap_rgb(sf.map_rgb(fg_color)) - sf.fill(bg_color_adj) - sf.fill(fg_color_adj, (2, 5, 4, 11)) - ar = pygame.PixelArray(sf) - newsf = ar[::2, ::2].make_surface() - rect = newsf.get_rect() - self.assertEqual(rect.width, 5) - self.assertEqual(rect.height, 10) - for p in [ - (0, 2), - (0, 3), - (1, 2), - (2, 2), - (3, 2), - (3, 3), - (0, 7), - (0, 8), - (1, 8), - (2, 8), - (3, 8), - (3, 7), - ]: - self.assertEqual(newsf.get_at(p), bg_color_adj) - for p in [(1, 3), (2, 3), (1, 5), (2, 5), (1, 7), (2, 7)]: - self.assertEqual(newsf.get_at(p), fg_color_adj) - - # Bug when array width is not a multiple of the slice step. - w = 17 - lst = list(range(w)) - w_slice = len(lst[::2]) - h = 3 - sf = pygame.Surface((w, h), 0, 32) - ar = pygame.PixelArray(sf) - ar2 = ar[::2, :] - sf2 = ar2.make_surface() - w2, h2 = sf2.get_size() - self.assertEqual(w2, w_slice) - self.assertEqual(h2, h) - - # Bug when array height is not a multiple of the slice step. - # This can hang the Python interpreter. - h = 17 - lst = list(range(h)) - h_slice = len(lst[::2]) - w = 3 - sf = pygame.Surface((w, h), 0, 32) - ar = pygame.PixelArray(sf) - ar2 = ar[:, ::2] - sf2 = ar2.make_surface() # Hangs here. - w2, h2 = sf2.get_size() - self.assertEqual(w2, w) - self.assertEqual(h2, h_slice) - - def test_make_surface__subclassed_surface(self): - """Ensure make_surface can handle subclassed surfaces.""" - expected_size = (3, 5) - expected_flags = 0 - expected_depth = 32 - original_surface = SurfaceSubclass( - expected_size, expected_flags, expected_depth - ) - pixelarray = pygame.PixelArray(original_surface) - - surface = pixelarray.make_surface() - - self.assertIsNot(surface, original_surface) - self.assertIsInstance(surface, pygame.Surface) - self.assertNotIsInstance(surface, SurfaceSubclass) - self.assertEqual(surface.get_size(), expected_size) - self.assertEqual(surface.get_flags(), expected_flags) - self.assertEqual(surface.get_bitsize(), expected_depth) - - def test_iter(self): - for bpp in (8, 16, 24, 32): - sf = pygame.Surface((5, 10), 0, bpp) - ar = pygame.PixelArray(sf) - iterations = 0 - for col in ar: - self.assertEqual(len(col), 10) - iterations += 1 - self.assertEqual(iterations, 5) - - def test_replace(self): - # print "replace start" - for bpp in (8, 16, 24, 32): - sf = pygame.Surface((10, 10), 0, bpp) - sf.fill((255, 0, 0)) - rval = sf.map_rgb((0, 0, 255)) - oval = sf.map_rgb((255, 0, 0)) - ar = pygame.PixelArray(sf) - ar[::2].replace((255, 0, 0), (0, 0, 255)) - self.assertEqual(ar[0][0], rval) - self.assertEqual(ar[1][0], oval) - self.assertEqual(ar[2][3], rval) - self.assertEqual(ar[3][6], oval) - self.assertEqual(ar[8][9], rval) - self.assertEqual(ar[9][9], oval) - - ar[::2].replace((0, 0, 255), (255, 0, 0), weights=(10, 20, 50)) - self.assertEqual(ar[0][0], oval) - self.assertEqual(ar[2][3], oval) - self.assertEqual(ar[3][6], oval) - self.assertEqual(ar[8][9], oval) - self.assertEqual(ar[9][9], oval) - # print "replace end" - - def test_extract(self): - # print "extract start" - for bpp in (8, 16, 24, 32): - sf = pygame.Surface((10, 10), 0, bpp) - sf.fill((0, 0, 255)) - sf.fill((255, 0, 0), (2, 2, 6, 6)) - - white = sf.map_rgb((255, 255, 255)) - black = sf.map_rgb((0, 0, 0)) - - ar = pygame.PixelArray(sf) - newar = ar.extract((255, 0, 0)) - - self.assertEqual(newar[0][0], black) - self.assertEqual(newar[1][0], black) - self.assertEqual(newar[2][3], white) - self.assertEqual(newar[3][6], white) - self.assertEqual(newar[8][9], black) - self.assertEqual(newar[9][9], black) - - newar = ar.extract((255, 0, 0), weights=(10, 0.1, 50)) - self.assertEqual(newar[0][0], black) - self.assertEqual(newar[1][0], black) - self.assertEqual(newar[2][3], white) - self.assertEqual(newar[3][6], white) - self.assertEqual(newar[8][9], black) - self.assertEqual(newar[9][9], black) - # print "extract end" - - def test_2dslice_assignment(self): - w = 2 * 5 * 8 - h = 3 * 5 * 9 - sf = pygame.Surface((w, h), 0, 32) - ar = pygame.PixelArray(sf) - size = (w, h) - strides = (1, w) - offset = 0 - self._test_assignment(sf, ar, size, strides, offset) - xslice = slice(None, None, 2) - yslice = slice(None, None, 3) - ar, size, strides, offset = self._array_slice( - ar, size, (xslice, yslice), strides, offset - ) - self._test_assignment(sf, ar, size, strides, offset) - xslice = slice(5, None, 5) - yslice = slice(5, None, 5) - ar, size, strides, offset = self._array_slice( - ar, size, (xslice, yslice), strides, offset - ) - self._test_assignment(sf, ar, size, strides, offset) - - def _test_assignment(self, sf, ar, ar_size, ar_strides, ar_offset): - self.assertEqual(ar.shape, ar_size) - ar_w, ar_h = ar_size - ar_xstride, ar_ystride = ar_strides - sf_w, sf_h = sf.get_size() - black = pygame.Color("black") - color = pygame.Color(0, 0, 12) - pxcolor = sf.map_rgb(color) - sf.fill(black) - for ar_x, ar_y in [ - (0, 0), - (0, ar_h - 4), - (ar_w - 3, 0), - (0, ar_h - 1), - (ar_w - 1, 0), - (ar_w - 1, ar_h - 1), - ]: - sf_offset = ar_offset + ar_x * ar_xstride + ar_y * ar_ystride - sf_y = sf_offset // sf_w - sf_x = sf_offset - sf_y * sf_w - sf_posn = (sf_x, sf_y) - sf_pix = sf.get_at(sf_posn) - self.assertEqual( - sf_pix, - black, - "at pixarr posn (%i, %i) (surf posn (%i, %i)): " - "%s != %s" % (ar_x, ar_y, sf_x, sf_y, sf_pix, black), - ) - ar[ar_x, ar_y] = pxcolor - sf_pix = sf.get_at(sf_posn) - self.assertEqual( - sf_pix, - color, - "at pixarr posn (%i, %i) (surf posn (%i, %i)): " - "%s != %s" % (ar_x, ar_y, sf_x, sf_y, sf_pix, color), - ) - - def _array_slice(self, ar, size, slices, strides, offset): - ar = ar[slices] - xslice, yslice = slices - w, h = size - xstart, xstop, xstep = xslice.indices(w) - ystart, ystop, ystep = yslice.indices(h) - w = (xstop - xstart + xstep - 1) // xstep - h = (ystop - ystart + ystep - 1) // ystep - xstride, ystride = strides - offset += xstart * xstride + ystart * ystride - xstride *= xstep - ystride *= ystep - return ar, (w, h), (xstride, ystride), offset - - def test_array_properties(self): - # itemsize, ndim, shape, and strides. - for bpp in [1, 2, 3, 4]: - sf = pygame.Surface((2, 2), 0, bpp * 8) - ar = pygame.PixelArray(sf) - self.assertEqual(ar.itemsize, bpp) - - for shape in [(4, 16), (5, 13)]: - w, h = shape - sf = pygame.Surface(shape, 0, 32) - bpp = sf.get_bytesize() - pitch = sf.get_pitch() - ar = pygame.PixelArray(sf) - self.assertEqual(ar.ndim, 2) - self.assertEqual(ar.shape, shape) - self.assertEqual(ar.strides, (bpp, pitch)) - ar2 = ar[::2, :] - w2 = len(([0] * w)[::2]) - self.assertEqual(ar2.ndim, 2) - self.assertEqual(ar2.shape, (w2, h)) - self.assertEqual(ar2.strides, (2 * bpp, pitch)) - ar2 = ar[:, ::2] - h2 = len(([0] * h)[::2]) - self.assertEqual(ar2.ndim, 2) - self.assertEqual(ar2.shape, (w, h2)) - self.assertEqual(ar2.strides, (bpp, 2 * pitch)) - ar2 = ar[1] - self.assertEqual(ar2.ndim, 1) - self.assertEqual(ar2.shape, (h,)) - self.assertEqual(ar2.strides, (pitch,)) - ar2 = ar[:, 1] - self.assertEqual(ar2.ndim, 1) - self.assertEqual(ar2.shape, (w,)) - self.assertEqual(ar2.strides, (bpp,)) - - def test_self_assign(self): - # This differs from NumPy arrays. - w = 10 - max_x = w - 1 - h = 20 - max_y = h - 1 - for bpp in [1, 2, 3, 4]: - sf = pygame.Surface((w, h), 0, bpp * 8) - ar = pygame.PixelArray(sf) - for i in range(w * h): - ar[i % w, i // w] = i - ar[:, :] = ar[::-1, :] - for i in range(w * h): - self.assertEqual(ar[max_x - i % w, i // w], i) - ar = pygame.PixelArray(sf) - for i in range(w * h): - ar[i % w, i // w] = i - ar[:, :] = ar[:, ::-1] - for i in range(w * h): - self.assertEqual(ar[i % w, max_y - i // w], i) - ar = pygame.PixelArray(sf) - for i in range(w * h): - ar[i % w, i // w] = i - ar[:, :] = ar[::-1, ::-1] - for i in range(w * h): - self.assertEqual(ar[max_x - i % w, max_y - i // w], i) - - def test_color_value(self): - # Confirm that a PixelArray slice assignment distinguishes between - # pygame.Color and tuple objects as single (r, g, b[, a]) colors - # and other sequences as sequences of colors to be treated as - # slices. - sf = pygame.Surface((5, 5), 0, 32) - ar = pygame.PixelArray(sf) - index = slice(None, None, 1) - ar.__setitem__(index, (1, 2, 3)) - self.assertEqual(ar[0, 0], sf.map_rgb((1, 2, 3))) - ar.__setitem__(index, pygame.Color(10, 11, 12)) - self.assertEqual(ar[0, 0], sf.map_rgb((10, 11, 12))) - self.assertRaises(ValueError, ar.__setitem__, index, (1, 2, 3, 4, 5)) - self.assertRaises(ValueError, ar.__setitem__, (index, index), (1, 2, 3, 4, 5)) - self.assertRaises(ValueError, ar.__setitem__, index, [1, 2, 3]) - self.assertRaises(ValueError, ar.__setitem__, (index, index), [1, 2, 3]) - sf = pygame.Surface((3, 3), 0, 32) - ar = pygame.PixelArray(sf) - ar[:] = (20, 30, 40) - self.assertEqual(ar[0, 0], sf.map_rgb((20, 30, 40))) - ar[:] = [20, 30, 40] - self.assertEqual(ar[0, 0], 20) - self.assertEqual(ar[1, 0], 30) - self.assertEqual(ar[2, 0], 40) - - def test_transpose(self): - # PixelArray.transpose(): swap axis on a 2D array, add a length - # 1 x axis to a 1D array. - sf = pygame.Surface((3, 7), 0, 32) - ar = pygame.PixelArray(sf) - w, h = ar.shape - dx, dy = ar.strides - for i in range(w * h): - x = i % w - y = i // w - ar[x, y] = i - ar_t = ar.transpose() - self.assertEqual(ar_t.shape, (h, w)) - self.assertEqual(ar_t.strides, (dy, dx)) - for i in range(w * h): - x = i % w - y = i // w - self.assertEqual(ar_t[y, x], ar[x, y]) - ar1D = ar[0] - ar2D = ar1D.transpose() - self.assertEqual(ar2D.shape, (1, h)) - for y in range(h): - self.assertEqual(ar1D[y], ar2D[0, y]) - ar1D = ar[:, 0] - ar2D = ar1D.transpose() - self.assertEqual(ar2D.shape, (1, w)) - for x in range(2): - self.assertEqual(ar1D[x], ar2D[0, x]) - - def test_length_1_dimension_broadcast(self): - w = 5 - sf = pygame.Surface((w, w), 0, 32) - ar = pygame.PixelArray(sf) - # y-axis broadcast. - sf_x = pygame.Surface((w, 1), 0, 32) - ar_x = pygame.PixelArray(sf_x) - for i in range(w): - ar_x[i, 0] = (w + 1) * 10 - ar[...] = ar_x - for y in range(w): - for x in range(w): - self.assertEqual(ar[x, y], ar_x[x, 0]) - # x-axis broadcast. - ar[...] = 0 - sf_y = pygame.Surface((1, w), 0, 32) - ar_y = pygame.PixelArray(sf_y) - for i in range(w): - ar_y[0, i] = (w + 1) * 10 - ar[...] = ar_y - for x in range(w): - for y in range(w): - self.assertEqual(ar[x, y], ar_y[0, y]) - # (1, 1) array broadcast. - ar[...] = 0 - sf_1px = pygame.Surface((1, 1), 0, 32) - ar_1px = pygame.PixelArray(sf_1px) - ar_1px[0, 0] = 42 # Well it had to show up somewhere. - ar[...] = ar_1px - for y in range(w): - for x in range(w): - self.assertEqual(ar[x, y], 42) - - def test_assign_size_mismatch(self): - sf = pygame.Surface((7, 11), 0, 32) - ar = pygame.PixelArray(sf) - self.assertRaises(ValueError, ar.__setitem__, Ellipsis, ar[:, 0:2]) - self.assertRaises(ValueError, ar.__setitem__, Ellipsis, ar[0:2, :]) - - def test_repr(self): - # Python 3.x bug: the tp_repr slot function returned NULL instead - # of a Unicode string, triggering an exception. - sf = pygame.Surface((3, 1), pygame.SRCALPHA, 16) - ar = pygame.PixelArray(sf) - ar[...] = 42 - pixel = sf.get_at_mapped((0, 0)) - self.assertEqual(repr(ar), type(ar).__name__ + "([\n [42, 42, 42]]\n)") - - -@unittest.skipIf(IS_PYPY, "pypy having issues") -class PixelArrayArrayInterfaceTest(unittest.TestCase, TestMixin): - @unittest.skipIf(IS_PYPY, "skipping for PyPy (why?)") - def test_basic(self): - # Check unchanging fields. - sf = pygame.Surface((2, 2), 0, 32) - ar = pygame.PixelArray(sf) - - ai = arrinter.ArrayInterface(ar) - self.assertEqual(ai.two, 2) - self.assertEqual(ai.typekind, "u") - self.assertEqual(ai.nd, 2) - self.assertEqual(ai.data, ar._pixels_address) - - @unittest.skipIf(IS_PYPY, "skipping for PyPy (why?)") - def test_shape(self): - - for shape in [[4, 16], [5, 13]]: - w, h = shape - sf = pygame.Surface(shape, 0, 32) - ar = pygame.PixelArray(sf) - ai = arrinter.ArrayInterface(ar) - ai_shape = [ai.shape[i] for i in range(ai.nd)] - self.assertEqual(ai_shape, shape) - ar2 = ar[::2, :] - ai2 = arrinter.ArrayInterface(ar2) - w2 = len(([0] * w)[::2]) - ai_shape = [ai2.shape[i] for i in range(ai2.nd)] - self.assertEqual(ai_shape, [w2, h]) - ar2 = ar[:, ::2] - ai2 = arrinter.ArrayInterface(ar2) - h2 = len(([0] * h)[::2]) - ai_shape = [ai2.shape[i] for i in range(ai2.nd)] - self.assertEqual(ai_shape, [w, h2]) - - @unittest.skipIf(IS_PYPY, "skipping for PyPy (why?)") - def test_itemsize(self): - for bytes_per_pixel in range(1, 5): - bits_per_pixel = 8 * bytes_per_pixel - sf = pygame.Surface((2, 2), 0, bits_per_pixel) - ar = pygame.PixelArray(sf) - ai = arrinter.ArrayInterface(ar) - self.assertEqual(ai.itemsize, bytes_per_pixel) - - @unittest.skipIf(IS_PYPY, "skipping for PyPy (why?)") - def test_flags(self): - aim = arrinter - common_flags = aim.PAI_NOTSWAPPED | aim.PAI_WRITEABLE | aim.PAI_ALIGNED - s = pygame.Surface((10, 2), 0, 32) - ar = pygame.PixelArray(s) - ai = aim.ArrayInterface(ar) - self.assertEqual(ai.flags, common_flags | aim.PAI_FORTRAN) - - ar2 = ar[::2, :] - ai = aim.ArrayInterface(ar2) - self.assertEqual(ai.flags, common_flags) - - s = pygame.Surface((8, 2), 0, 24) - ar = pygame.PixelArray(s) - ai = aim.ArrayInterface(ar) - self.assertEqual(ai.flags, common_flags | aim.PAI_FORTRAN) - - s = pygame.Surface((7, 2), 0, 24) - ar = pygame.PixelArray(s) - ai = aim.ArrayInterface(ar) - self.assertEqual(ai.flags, common_flags) - - def test_slicing(self): - # This will implicitly test data and strides fields. - # - # Need an 8 bit test surfaces because pixelcopy.make_surface - # returns an 8 bit surface for a 2d array. - - factors = [7, 3, 11] - - w = reduce(operator.mul, factors, 1) - h = 13 - sf = pygame.Surface((w, h), 0, 8) - color = sf.map_rgb((1, 17, 128)) - ar = pygame.PixelArray(sf) - for f in factors[:-1]: - w = w // f - sf.fill((0, 0, 0)) - ar = ar[f : f + w, :] - ar[0][0] = color - ar[-1][-2] = color - ar[0][-3] = color - sf2 = ar.make_surface() - sf3 = pygame.pixelcopy.make_surface(ar) - self.assert_surfaces_equal(sf3, sf2) - - h = reduce(operator.mul, factors, 1) - w = 13 - sf = pygame.Surface((w, h), 0, 8) - color = sf.map_rgb((1, 17, 128)) - ar = pygame.PixelArray(sf) - for f in factors[:-1]: - h = h // f - sf.fill((0, 0, 0)) - ar = ar[:, f : f + h] - ar[0][0] = color - ar[-1][-2] = color - ar[0][-3] = color - sf2 = ar.make_surface() - sf3 = pygame.pixelcopy.make_surface(ar) - self.assert_surfaces_equal(sf3, sf2) - - w = 20 - h = 10 - sf = pygame.Surface((w, h), 0, 8) - color = sf.map_rgb((1, 17, 128)) - ar = pygame.PixelArray(sf) - for slices in [ - (slice(w), slice(h)), - (slice(0, w, 2), slice(h)), - (slice(0, w, 3), slice(h)), - (slice(w), slice(0, h, 2)), - (slice(w), slice(0, h, 3)), - (slice(0, w, 2), slice(0, h, 2)), - (slice(0, w, 3), slice(0, h, 3)), - ]: - sf.fill((0, 0, 0)) - ar2 = ar[slices] - ar2[0][0] = color - ar2[-1][-2] = color - ar2[0][-3] = color - sf2 = ar2.make_surface() - sf3 = pygame.pixelcopy.make_surface(ar2) - self.assert_surfaces_equal(sf3, sf2) - - -@unittest.skipIf(not pygame.HAVE_NEWBUF, "newbuf not implemented") -@unittest.skipIf(IS_PYPY, "pypy having issues") -class PixelArrayNewBufferTest(unittest.TestCase, TestMixin): - - if pygame.HAVE_NEWBUF: - from pygame.tests.test_utils import buftools - - bitsize_to_format = {8: "B", 16: "=H", 24: "3x", 32: "=I"} - - def test_newbuf_2D(self): - buftools = self.buftools - Importer = buftools.Importer - - for bit_size in [8, 16, 24, 32]: - s = pygame.Surface((10, 2), 0, bit_size) - ar = pygame.PixelArray(s) - format = self.bitsize_to_format[bit_size] - itemsize = ar.itemsize - shape = ar.shape - w, h = shape - strides = ar.strides - length = w * h * itemsize - imp = Importer(ar, buftools.PyBUF_FULL) - self.assertTrue(imp.obj, ar) - self.assertEqual(imp.len, length) - self.assertEqual(imp.ndim, 2) - self.assertEqual(imp.itemsize, itemsize) - self.assertEqual(imp.format, format) - self.assertFalse(imp.readonly) - self.assertEqual(imp.shape, shape) - self.assertEqual(imp.strides, strides) - self.assertTrue(imp.suboffsets is None) - self.assertEqual(imp.buf, s._pixels_address) - - s = pygame.Surface((8, 16), 0, 32) - ar = pygame.PixelArray(s) - format = self.bitsize_to_format[s.get_bitsize()] - itemsize = ar.itemsize - shape = ar.shape - w, h = shape - strides = ar.strides - length = w * h * itemsize - imp = Importer(ar, buftools.PyBUF_SIMPLE) - self.assertTrue(imp.obj, ar) - self.assertEqual(imp.len, length) - self.assertEqual(imp.ndim, 0) - self.assertEqual(imp.itemsize, itemsize) - self.assertTrue(imp.format is None) - self.assertFalse(imp.readonly) - self.assertTrue(imp.shape is None) - self.assertTrue(imp.strides is None) - self.assertTrue(imp.suboffsets is None) - self.assertEqual(imp.buf, s._pixels_address) - imp = Importer(ar, buftools.PyBUF_FORMAT) - self.assertEqual(imp.ndim, 0) - self.assertEqual(imp.format, format) - imp = Importer(ar, buftools.PyBUF_WRITABLE) - self.assertEqual(imp.ndim, 0) - self.assertTrue(imp.format is None) - imp = Importer(ar, buftools.PyBUF_F_CONTIGUOUS) - self.assertEqual(imp.ndim, 2) - self.assertTrue(imp.format is None) - self.assertEqual(imp.shape, shape) - self.assertEqual(imp.strides, strides) - imp = Importer(ar, buftools.PyBUF_ANY_CONTIGUOUS) - self.assertEqual(imp.ndim, 2) - self.assertTrue(imp.format is None) - self.assertEqual(imp.shape, shape) - self.assertEqual(imp.strides, strides) - self.assertRaises(BufferError, Importer, ar, buftools.PyBUF_C_CONTIGUOUS) - self.assertRaises(BufferError, Importer, ar, buftools.PyBUF_ND) - - ar_sliced = ar[:, ::2] - format = self.bitsize_to_format[s.get_bitsize()] - itemsize = ar_sliced.itemsize - shape = ar_sliced.shape - w, h = shape - strides = ar_sliced.strides - length = w * h * itemsize - imp = Importer(ar_sliced, buftools.PyBUF_STRIDED) - self.assertEqual(imp.len, length) - self.assertEqual(imp.ndim, 2) - self.assertEqual(imp.itemsize, itemsize) - self.assertTrue(imp.format is None) - self.assertFalse(imp.readonly) - self.assertEqual(imp.shape, shape) - self.assertEqual(imp.strides, strides) - self.assertEqual(imp.buf, s._pixels_address) - self.assertRaises(BufferError, Importer, ar_sliced, buftools.PyBUF_SIMPLE) - self.assertRaises(BufferError, Importer, ar_sliced, buftools.PyBUF_ND) - self.assertRaises(BufferError, Importer, ar_sliced, buftools.PyBUF_C_CONTIGUOUS) - self.assertRaises(BufferError, Importer, ar_sliced, buftools.PyBUF_F_CONTIGUOUS) - self.assertRaises( - BufferError, Importer, ar_sliced, buftools.PyBUF_ANY_CONTIGUOUS - ) - - ar_sliced = ar[::2, :] - format = self.bitsize_to_format[s.get_bitsize()] - itemsize = ar_sliced.itemsize - shape = ar_sliced.shape - w, h = shape - strides = ar_sliced.strides - length = w * h * itemsize - imp = Importer(ar_sliced, buftools.PyBUF_STRIDED) - self.assertEqual(imp.len, length) - self.assertEqual(imp.ndim, 2) - self.assertEqual(imp.itemsize, itemsize) - self.assertTrue(imp.format is None) - self.assertFalse(imp.readonly) - self.assertEqual(imp.shape, shape) - self.assertEqual(imp.strides, strides) - self.assertEqual(imp.buf, s._pixels_address) - self.assertRaises(BufferError, Importer, ar_sliced, buftools.PyBUF_SIMPLE) - self.assertRaises(BufferError, Importer, ar_sliced, buftools.PyBUF_ND) - self.assertRaises(BufferError, Importer, ar_sliced, buftools.PyBUF_C_CONTIGUOUS) - self.assertRaises(BufferError, Importer, ar_sliced, buftools.PyBUF_F_CONTIGUOUS) - self.assertRaises( - BufferError, Importer, ar_sliced, buftools.PyBUF_ANY_CONTIGUOUS - ) - - s2 = s.subsurface((2, 3, 5, 7)) - ar = pygame.PixelArray(s2) - format = self.bitsize_to_format[s.get_bitsize()] - itemsize = ar.itemsize - shape = ar.shape - w, h = shape - strides = ar.strides - length = w * h * itemsize - imp = Importer(ar, buftools.PyBUF_STRIDES) - self.assertTrue(imp.obj, ar) - self.assertEqual(imp.len, length) - self.assertEqual(imp.ndim, 2) - self.assertEqual(imp.itemsize, itemsize) - self.assertTrue(imp.format is None) - self.assertFalse(imp.readonly) - self.assertEqual(imp.shape, shape) - self.assertEqual(imp.strides, strides) - self.assertTrue(imp.suboffsets is None) - self.assertEqual(imp.buf, s2._pixels_address) - self.assertRaises(BufferError, Importer, ar, buftools.PyBUF_SIMPLE) - self.assertRaises(BufferError, Importer, ar, buftools.PyBUF_FORMAT) - self.assertRaises(BufferError, Importer, ar, buftools.PyBUF_WRITABLE) - self.assertRaises(BufferError, Importer, ar, buftools.PyBUF_ND) - self.assertRaises(BufferError, Importer, ar, buftools.PyBUF_C_CONTIGUOUS) - self.assertRaises(BufferError, Importer, ar, buftools.PyBUF_F_CONTIGUOUS) - self.assertRaises(BufferError, Importer, ar, buftools.PyBUF_ANY_CONTIGUOUS) - - def test_newbuf_1D(self): - buftools = self.buftools - Importer = buftools.Importer - - s = pygame.Surface((2, 16), 0, 32) - ar_2D = pygame.PixelArray(s) - x = 0 - ar = ar_2D[x] - format = self.bitsize_to_format[s.get_bitsize()] - itemsize = ar.itemsize - shape = ar.shape - h = shape[0] - strides = ar.strides - length = h * itemsize - buf = s._pixels_address + x * itemsize - imp = Importer(ar, buftools.PyBUF_STRIDES) - self.assertTrue(imp.obj, ar) - self.assertEqual(imp.len, length) - self.assertEqual(imp.ndim, 1) - self.assertEqual(imp.itemsize, itemsize) - self.assertTrue(imp.format is None) - self.assertFalse(imp.readonly) - self.assertEqual(imp.shape, shape) - self.assertEqual(imp.strides, strides) - self.assertTrue(imp.suboffsets is None) - self.assertEqual(imp.buf, buf) - imp = Importer(ar, buftools.PyBUF_FULL) - self.assertEqual(imp.ndim, 1) - self.assertEqual(imp.format, format) - self.assertRaises(BufferError, Importer, ar, buftools.PyBUF_SIMPLE) - self.assertRaises(BufferError, Importer, ar, buftools.PyBUF_FORMAT) - self.assertRaises(BufferError, Importer, ar, buftools.PyBUF_WRITABLE) - self.assertRaises(BufferError, Importer, ar, buftools.PyBUF_ND) - self.assertRaises(BufferError, Importer, ar, buftools.PyBUF_C_CONTIGUOUS) - self.assertRaises(BufferError, Importer, ar, buftools.PyBUF_F_CONTIGUOUS) - self.assertRaises(BufferError, Importer, ar, buftools.PyBUF_ANY_CONTIGUOUS) - y = 10 - ar = ar_2D[:, y] - shape = ar.shape - w = shape[0] - strides = ar.strides - length = w * itemsize - buf = s._pixels_address + y * s.get_pitch() - imp = Importer(ar, buftools.PyBUF_FULL) - self.assertEqual(imp.len, length) - self.assertEqual(imp.ndim, 1) - self.assertEqual(imp.itemsize, itemsize) - self.assertEqual(imp.format, format) - self.assertFalse(imp.readonly) - self.assertEqual(imp.shape, shape) - self.assertEqual(imp.strides, strides) - self.assertEqual(imp.buf, buf) - self.assertTrue(imp.suboffsets is None) - imp = Importer(ar, buftools.PyBUF_SIMPLE) - self.assertEqual(imp.len, length) - self.assertEqual(imp.ndim, 0) - self.assertEqual(imp.itemsize, itemsize) - self.assertTrue(imp.format is None) - self.assertFalse(imp.readonly) - self.assertTrue(imp.shape is None) - self.assertTrue(imp.strides is None) - imp = Importer(ar, buftools.PyBUF_ND) - self.assertEqual(imp.len, length) - self.assertEqual(imp.ndim, 1) - self.assertEqual(imp.itemsize, itemsize) - self.assertTrue(imp.format is None) - self.assertFalse(imp.readonly) - self.assertEqual(imp.shape, shape) - self.assertTrue(imp.strides is None) - imp = Importer(ar, buftools.PyBUF_C_CONTIGUOUS) - self.assertEqual(imp.ndim, 1) - imp = Importer(ar, buftools.PyBUF_F_CONTIGUOUS) - self.assertEqual(imp.ndim, 1) - imp = Importer(ar, buftools.PyBUF_ANY_CONTIGUOUS) - self.assertEqual(imp.ndim, 1) - - -if __name__ == "__main__": - unittest.main() diff --git a/venv/Lib/site-packages/pygame/tests/pixelcopy_test.py b/venv/Lib/site-packages/pygame/tests/pixelcopy_test.py deleted file mode 100644 index f89c665..0000000 --- a/venv/Lib/site-packages/pygame/tests/pixelcopy_test.py +++ /dev/null @@ -1,712 +0,0 @@ -import platform -import unittest - -try: - from pygame.tests.test_utils import arrinter -except NameError: - pass -import pygame -from pygame.locals import * -from pygame.pixelcopy import surface_to_array, map_array, array_to_surface, make_surface - -IS_PYPY = "PyPy" == platform.python_implementation() - - -def unsigned32(i): - """cast signed 32 bit integer to an unsigned integer""" - return i & 0xFFFFFFFF - - -@unittest.skipIf(IS_PYPY, "pypy having illegal instruction on mac") -class PixelcopyModuleTest(unittest.TestCase): - - bitsizes = [8, 16, 32] - - test_palette = [ - (0, 0, 0, 255), - (10, 30, 60, 255), - (25, 75, 100, 255), - (100, 150, 200, 255), - (0, 100, 200, 255), - ] - - surf_size = (10, 12) - test_points = [ - ((0, 0), 1), - ((4, 5), 1), - ((9, 0), 2), - ((5, 5), 2), - ((0, 11), 3), - ((4, 6), 3), - ((9, 11), 4), - ((5, 6), 4), - ] - - def __init__(self, *args, **kwds): - pygame.display.init() - try: - unittest.TestCase.__init__(self, *args, **kwds) - self.sources = [ - self._make_src_surface(8), - self._make_src_surface(16), - self._make_src_surface(16, srcalpha=True), - self._make_src_surface(24), - self._make_src_surface(32), - self._make_src_surface(32, srcalpha=True), - ] - finally: - pygame.display.quit() - - def _make_surface(self, bitsize, srcalpha=False, palette=None): - if palette is None: - palette = self.test_palette - flags = 0 - if srcalpha: - flags |= SRCALPHA - surf = pygame.Surface(self.surf_size, flags, bitsize) - if bitsize == 8: - surf.set_palette([c[:3] for c in palette]) - return surf - - def _fill_surface(self, surf, palette=None): - if palette is None: - palette = self.test_palette - surf.fill(palette[1], (0, 0, 5, 6)) - surf.fill(palette[2], (5, 0, 5, 6)) - surf.fill(palette[3], (0, 6, 5, 6)) - surf.fill(palette[4], (5, 6, 5, 6)) - - def _make_src_surface(self, bitsize, srcalpha=False, palette=None): - surf = self._make_surface(bitsize, srcalpha, palette) - self._fill_surface(surf, palette) - return surf - - def setUp(self): - pygame.display.init() - - def tearDown(self): - pygame.display.quit() - - def test_surface_to_array_2d(self): - alpha_color = (0, 0, 0, 128) - - for surf in self.sources: - src_bitsize = surf.get_bitsize() - for dst_bitsize in self.bitsizes: - # dst in a surface standing in for a 2 dimensional array - # of unsigned integers. The byte order is system dependent. - dst = pygame.Surface(surf.get_size(), 0, dst_bitsize) - dst.fill((0, 0, 0, 0)) - view = dst.get_view("2") - self.assertFalse(surf.get_locked()) - if dst_bitsize < src_bitsize: - self.assertRaises(ValueError, surface_to_array, view, surf) - self.assertFalse(surf.get_locked()) - continue - surface_to_array(view, surf) - self.assertFalse(surf.get_locked()) - for posn, i in self.test_points: - sp = surf.get_at_mapped(posn) - dp = dst.get_at_mapped(posn) - self.assertEqual( - dp, - sp, - "%s != %s: flags: %i" - ", bpp: %i, posn: %s" - % (dp, sp, surf.get_flags(), surf.get_bitsize(), posn), - ) - del view - - if surf.get_masks()[3]: - dst.fill((0, 0, 0, 0)) - view = dst.get_view("2") - posn = (2, 1) - surf.set_at(posn, alpha_color) - self.assertFalse(surf.get_locked()) - surface_to_array(view, surf) - self.assertFalse(surf.get_locked()) - sp = surf.get_at_mapped(posn) - dp = dst.get_at_mapped(posn) - self.assertEqual( - dp, sp, "%s != %s: bpp: %i" % (dp, sp, surf.get_bitsize()) - ) - - if IS_PYPY: - return - # Swapped endian destination array - pai_flags = arrinter.PAI_ALIGNED | arrinter.PAI_WRITEABLE - for surf in self.sources: - for itemsize in [1, 2, 4, 8]: - if itemsize < surf.get_bytesize(): - continue - a = arrinter.Array(surf.get_size(), "u", itemsize, flags=pai_flags) - surface_to_array(a, surf) - for posn, i in self.test_points: - sp = unsigned32(surf.get_at_mapped(posn)) - dp = a[posn] - self.assertEqual( - dp, - sp, - "%s != %s: itemsize: %i, flags: %i" - ", bpp: %i, posn: %s" - % ( - dp, - sp, - itemsize, - surf.get_flags(), - surf.get_bitsize(), - posn, - ), - ) - - def test_surface_to_array_3d(self): - self.iter_surface_to_array_3d((0xFF, 0xFF00, 0xFF0000, 0)) - self.iter_surface_to_array_3d((0xFF0000, 0xFF00, 0xFF, 0)) - - def iter_surface_to_array_3d(self, rgba_masks): - dst = pygame.Surface(self.surf_size, 0, 24, masks=rgba_masks) - - for surf in self.sources: - dst.fill((0, 0, 0, 0)) - src_bitsize = surf.get_bitsize() - view = dst.get_view("3") - self.assertFalse(surf.get_locked()) - surface_to_array(view, surf) - self.assertFalse(surf.get_locked()) - for posn, i in self.test_points: - sc = surf.get_at(posn)[0:3] - dc = dst.get_at(posn)[0:3] - self.assertEqual( - dc, - sc, - "%s != %s: flags: %i" - ", bpp: %i, posn: %s" - % (dc, sc, surf.get_flags(), surf.get_bitsize(), posn), - ) - view = None - - def test_map_array(self): - targets = [ - self._make_surface(8), - self._make_surface(16), - self._make_surface(16, srcalpha=True), - self._make_surface(24), - self._make_surface(32), - self._make_surface(32, srcalpha=True), - ] - source = pygame.Surface( - self.surf_size, 0, 24, masks=[0xFF, 0xFF00, 0xFF0000, 0] - ) - self._fill_surface(source) - source_view = source.get_view("3") # (w, h, 3) - for t in targets: - map_array(t.get_view("2"), source_view, t) - for posn, i in self.test_points: - sc = t.map_rgb(source.get_at(posn)) - dc = t.get_at_mapped(posn) - self.assertEqual( - dc, - sc, - "%s != %s: flags: %i" - ", bpp: %i, posn: %s" - % (dc, sc, t.get_flags(), t.get_bitsize(), posn), - ) - - color = pygame.Color("salmon") - color.set_length(3) - for t in targets: - map_array(t.get_view("2"), color, t) - sc = t.map_rgb(color) - for posn, i in self.test_points: - dc = t.get_at_mapped(posn) - self.assertEqual( - dc, - sc, - "%s != %s: flags: %i" - ", bpp: %i, posn: %s" - % (dc, sc, t.get_flags(), t.get_bitsize(), posn), - ) - - # mismatched shapes - w, h = source.get_size() - target = pygame.Surface((w, h + 1), 0, 32) - self.assertRaises(ValueError, map_array, target, source, target) - target = pygame.Surface((w - 1, h), 0, 32) - self.assertRaises(ValueError, map_array, target, source, target) - - def test_array_to_surface_broadcasting(self): - # target surfaces - targets = [ - self._make_surface(8), - self._make_surface(16), - self._make_surface(16, srcalpha=True), - self._make_surface(24), - self._make_surface(32), - self._make_surface(32, srcalpha=True), - ] - - w, h = self.surf_size - - # broadcast column - column = pygame.Surface((1, h), 0, 32) - for target in targets: - source = pygame.Surface((1, h), 0, target) - for y in range(h): - source.set_at((0, y), pygame.Color(y + 1, y + h + 1, y + 2 * h + 1)) - pygame.pixelcopy.surface_to_array(column.get_view("2"), source) - pygame.pixelcopy.array_to_surface(target, column.get_view("2")) - for x in range(w): - for y in range(h): - self.assertEqual( - target.get_at_mapped((x, y)), column.get_at_mapped((0, y)) - ) - - # broadcast row - row = pygame.Surface((w, 1), 0, 32) - for target in targets: - source = pygame.Surface((w, 1), 0, target) - for x in range(w): - source.set_at((x, 0), pygame.Color(x + 1, x + w + 1, x + 2 * w + 1)) - pygame.pixelcopy.surface_to_array(row.get_view("2"), source) - pygame.pixelcopy.array_to_surface(target, row.get_view("2")) - for x in range(w): - for y in range(h): - self.assertEqual( - target.get_at_mapped((x, y)), row.get_at_mapped((x, 0)) - ) - - # broadcast pixel - pixel = pygame.Surface((1, 1), 0, 32) - for target in targets: - source = pygame.Surface((1, 1), 0, target) - source.set_at((0, 0), pygame.Color(13, 47, 101)) - pygame.pixelcopy.surface_to_array(pixel.get_view("2"), source) - pygame.pixelcopy.array_to_surface(target, pixel.get_view("2")) - p = pixel.get_at_mapped((0, 0)) - for x in range(w): - for y in range(h): - self.assertEqual(target.get_at_mapped((x, y)), p) - - -@unittest.skipIf(IS_PYPY, "pypy having illegal instruction on mac") -class PixelCopyTestWithArray(unittest.TestCase): - try: - import numpy - except ImportError: - __tags__ = ["ignore", "subprocess_ignore"] - else: - pygame.surfarray.use_arraytype("numpy") - - bitsizes = [8, 16, 32] - - test_palette = [ - (0, 0, 0, 255), - (10, 30, 60, 255), - (25, 75, 100, 255), - (100, 150, 200, 255), - (0, 100, 200, 255), - ] - - surf_size = (10, 12) - test_points = [ - ((0, 0), 1), - ((4, 5), 1), - ((9, 0), 2), - ((5, 5), 2), - ((0, 11), 3), - ((4, 6), 3), - ((9, 11), 4), - ((5, 6), 4), - ] - - pixels2d = set([8, 16, 32]) - pixels3d = set([24, 32]) - array2d = set([8, 16, 24, 32]) - array3d = set([24, 32]) - - def __init__(self, *args, **kwds): - import numpy - - self.dst_types = [numpy.uint8, numpy.uint16, numpy.uint32] - try: - self.dst_types.append(numpy.uint64) - except AttributeError: - pass - pygame.display.init() - try: - unittest.TestCase.__init__(self, *args, **kwds) - self.sources = [ - self._make_src_surface(8), - self._make_src_surface(16), - self._make_src_surface(16, srcalpha=True), - self._make_src_surface(24), - self._make_src_surface(32), - self._make_src_surface(32, srcalpha=True), - ] - finally: - pygame.display.quit() - - def _make_surface(self, bitsize, srcalpha=False, palette=None): - if palette is None: - palette = self.test_palette - flags = 0 - if srcalpha: - flags |= SRCALPHA - surf = pygame.Surface(self.surf_size, flags, bitsize) - if bitsize == 8: - surf.set_palette([c[:3] for c in palette]) - return surf - - def _fill_surface(self, surf, palette=None): - if palette is None: - palette = self.test_palette - surf.fill(palette[1], (0, 0, 5, 6)) - surf.fill(palette[2], (5, 0, 5, 6)) - surf.fill(palette[3], (0, 6, 5, 6)) - surf.fill(palette[4], (5, 6, 5, 6)) - - def _make_src_surface(self, bitsize, srcalpha=False, palette=None): - surf = self._make_surface(bitsize, srcalpha, palette) - self._fill_surface(surf, palette) - return surf - - def setUp(self): - pygame.display.init() - - def tearDown(self): - pygame.display.quit() - - def test_surface_to_array_2d(self): - try: - from numpy import empty, dtype - except ImportError: - return - - palette = self.test_palette - alpha_color = (0, 0, 0, 128) - - dst_dims = self.surf_size - destinations = [empty(dst_dims, t) for t in self.dst_types] - if pygame.get_sdl_byteorder() == pygame.LIL_ENDIAN: - swapped_dst = empty(dst_dims, dtype(">u4")) - else: - swapped_dst = empty(dst_dims, dtype("u4")) - else: - swapped_dst = empty(dst_dims, dtype("i", - "!i", - "1i", - "=1i", - "@q", - "q", - "4x", - "8x", - ]: - surface.fill((255, 254, 253)) - exp = Exporter(shape, format=format) - exp._buf[:] = [42] * exp.buflen - array_to_surface(surface, exp) - for x in range(w): - for y in range(h): - self.assertEqual(surface.get_at((x, y)), (42, 42, 42, 255)) - # Some unsupported formats for array_to_surface and a 32 bit surface - for format in ["f", "d", "?", "x", "1x", "2x", "3x", "5x", "6x", "7x", "9x"]: - exp = Exporter(shape, format=format) - self.assertRaises(ValueError, array_to_surface, surface, exp) - - -if __name__ == "__main__": - unittest.main() diff --git a/venv/Lib/site-packages/pygame/tests/rect_test.py b/venv/Lib/site-packages/pygame/tests/rect_test.py deleted file mode 100644 index 0d635c8..0000000 --- a/venv/Lib/site-packages/pygame/tests/rect_test.py +++ /dev/null @@ -1,2152 +0,0 @@ -import math -import sys -import unittest -import platform - -from pygame import Rect, Vector2 -from pygame.tests import test_utils - - -IS_PYPY = "PyPy" == platform.python_implementation() - - -class RectTypeTest(unittest.TestCase): - def _assertCountEqual(self, *args, **kwargs): - self.assertCountEqual(*args, **kwargs) - - def testConstructionXYWidthHeight(self): - r = Rect(1, 2, 3, 4) - self.assertEqual(1, r.left) - self.assertEqual(2, r.top) - self.assertEqual(3, r.width) - self.assertEqual(4, r.height) - - def testConstructionTopLeftSize(self): - r = Rect((1, 2), (3, 4)) - self.assertEqual(1, r.left) - self.assertEqual(2, r.top) - self.assertEqual(3, r.width) - self.assertEqual(4, r.height) - - def testCalculatedAttributes(self): - r = Rect(1, 2, 3, 4) - - self.assertEqual(r.left + r.width, r.right) - self.assertEqual(r.top + r.height, r.bottom) - self.assertEqual((r.width, r.height), r.size) - self.assertEqual((r.left, r.top), r.topleft) - self.assertEqual((r.right, r.top), r.topright) - self.assertEqual((r.left, r.bottom), r.bottomleft) - self.assertEqual((r.right, r.bottom), r.bottomright) - - midx = r.left + r.width // 2 - midy = r.top + r.height // 2 - - self.assertEqual(midx, r.centerx) - self.assertEqual(midy, r.centery) - self.assertEqual((r.centerx, r.centery), r.center) - self.assertEqual((r.centerx, r.top), r.midtop) - self.assertEqual((r.centerx, r.bottom), r.midbottom) - self.assertEqual((r.left, r.centery), r.midleft) - self.assertEqual((r.right, r.centery), r.midright) - - def test_normalize(self): - """Ensures normalize works when width and height are both negative.""" - test_rect = Rect((1, 2), (-3, -6)) - expected_normalized_rect = ( - (test_rect.x + test_rect.w, test_rect.y + test_rect.h), - (-test_rect.w, -test_rect.h), - ) - - test_rect.normalize() - - self.assertEqual(test_rect, expected_normalized_rect) - - @unittest.skipIf(IS_PYPY, "fails on pypy sometimes") - def test_normalize__positive_height(self): - """Ensures normalize works with a negative width and a positive height.""" - test_rect = Rect((1, 2), (-3, 6)) - expected_normalized_rect = ( - (test_rect.x + test_rect.w, test_rect.y), - (-test_rect.w, test_rect.h), - ) - - test_rect.normalize() - - self.assertEqual(test_rect, expected_normalized_rect) - - @unittest.skipIf(IS_PYPY, "fails on pypy sometimes") - def test_normalize__positive_width(self): - """Ensures normalize works with a positive width and a negative height.""" - test_rect = Rect((1, 2), (3, -6)) - expected_normalized_rect = ( - (test_rect.x, test_rect.y + test_rect.h), - (test_rect.w, -test_rect.h), - ) - - test_rect.normalize() - - self.assertEqual(test_rect, expected_normalized_rect) - - @unittest.skipIf(IS_PYPY, "fails on pypy sometimes") - def test_normalize__zero_height(self): - """Ensures normalize works with a negative width and a zero height.""" - test_rect = Rect((1, 2), (-3, 0)) - expected_normalized_rect = ( - (test_rect.x + test_rect.w, test_rect.y), - (-test_rect.w, test_rect.h), - ) - - test_rect.normalize() - - self.assertEqual(test_rect, expected_normalized_rect) - - @unittest.skipIf(IS_PYPY, "fails on pypy sometimes") - def test_normalize__zero_width(self): - """Ensures normalize works with a zero width and a negative height.""" - test_rect = Rect((1, 2), (0, -6)) - expected_normalized_rect = ( - (test_rect.x, test_rect.y + test_rect.h), - (test_rect.w, -test_rect.h), - ) - - test_rect.normalize() - - self.assertEqual(test_rect, expected_normalized_rect) - - @unittest.skipIf(IS_PYPY, "fails on pypy") - def test_normalize__non_negative(self): - """Ensures normalize works when width and height are both non-negative. - - Tests combinations of positive and zero values for width and height. - The normalize method has no impact when both width and height are - non-negative. - """ - for size in ((3, 6), (3, 0), (0, 6), (0, 0)): - test_rect = Rect((1, 2), size) - expected_normalized_rect = Rect(test_rect) - - test_rect.normalize() - - self.assertEqual(test_rect, expected_normalized_rect) - - def test_x(self): - """Ensures changing the x attribute moves the rect and does not change - the rect's size. - """ - expected_x = 10 - expected_y = 2 - expected_size = (3, 4) - r = Rect((1, expected_y), expected_size) - - r.x = expected_x - - self.assertEqual(r.x, expected_x) - self.assertEqual(r.x, r.left) - self.assertEqual(r.y, expected_y) - self.assertEqual(r.size, expected_size) - - def test_x__invalid_value(self): - """Ensures the x attribute handles invalid values correctly.""" - r = Rect(0, 0, 1, 1) - - for value in (None, [], "1", (1,), [1, 2, 3]): - with self.assertRaises(TypeError): - r.x = value - - def test_x__del(self): - """Ensures the x attribute can't be deleted.""" - r = Rect(0, 0, 1, 1) - - with self.assertRaises(AttributeError): - del r.x - - def test_y(self): - """Ensures changing the y attribute moves the rect and does not change - the rect's size. - """ - expected_x = 1 - expected_y = 20 - expected_size = (3, 4) - r = Rect((expected_x, 2), expected_size) - - r.y = expected_y - - self.assertEqual(r.y, expected_y) - self.assertEqual(r.y, r.top) - self.assertEqual(r.x, expected_x) - self.assertEqual(r.size, expected_size) - - def test_y__invalid_value(self): - """Ensures the y attribute handles invalid values correctly.""" - r = Rect(0, 0, 1, 1) - - for value in (None, [], "1", (1,), [1, 2, 3]): - with self.assertRaises(TypeError): - r.y = value - - def test_y__del(self): - """Ensures the y attribute can't be deleted.""" - r = Rect(0, 0, 1, 1) - - with self.assertRaises(AttributeError): - del r.y - - def test_left(self): - """Changing the left attribute moves the rect and does not change - the rect's width - """ - r = Rect(1, 2, 3, 4) - new_left = 10 - - r.left = new_left - self.assertEqual(new_left, r.left) - self.assertEqual(Rect(new_left, 2, 3, 4), r) - - def test_left__invalid_value(self): - """Ensures the left attribute handles invalid values correctly.""" - r = Rect(0, 0, 1, 1) - - for value in (None, [], "1", (1,), [1, 2, 3]): - with self.assertRaises(TypeError): - r.left = value - - def test_left__del(self): - """Ensures the left attribute can't be deleted.""" - r = Rect(0, 0, 1, 1) - - with self.assertRaises(AttributeError): - del r.left - - def test_right(self): - """Changing the right attribute moves the rect and does not change - the rect's width - """ - r = Rect(1, 2, 3, 4) - new_right = r.right + 20 - expected_left = r.left + 20 - old_width = r.width - - r.right = new_right - self.assertEqual(new_right, r.right) - self.assertEqual(expected_left, r.left) - self.assertEqual(old_width, r.width) - - def test_right__invalid_value(self): - """Ensures the right attribute handles invalid values correctly.""" - r = Rect(0, 0, 1, 1) - - for value in (None, [], "1", (1,), [1, 2, 3]): - with self.assertRaises(TypeError): - r.right = value - - def test_right__del(self): - """Ensures the right attribute can't be deleted.""" - r = Rect(0, 0, 1, 1) - - with self.assertRaises(AttributeError): - del r.right - - def test_top(self): - """Changing the top attribute moves the rect and does not change - the rect's width - """ - r = Rect(1, 2, 3, 4) - new_top = 10 - - r.top = new_top - self.assertEqual(Rect(1, new_top, 3, 4), r) - self.assertEqual(new_top, r.top) - - def test_top__invalid_value(self): - """Ensures the top attribute handles invalid values correctly.""" - r = Rect(0, 0, 1, 1) - - for value in (None, [], "1", (1,), [1, 2, 3]): - with self.assertRaises(TypeError): - r.top = value - - def test_top__del(self): - """Ensures the top attribute can't be deleted.""" - r = Rect(0, 0, 1, 1) - - with self.assertRaises(AttributeError): - del r.top - - def test_bottom(self): - """Changing the bottom attribute moves the rect and does not change - the rect's height - """ - r = Rect(1, 2, 3, 4) - new_bottom = r.bottom + 20 - expected_top = r.top + 20 - old_height = r.height - - r.bottom = new_bottom - self.assertEqual(new_bottom, r.bottom) - self.assertEqual(expected_top, r.top) - self.assertEqual(old_height, r.height) - - def test_bottom__invalid_value(self): - """Ensures the bottom attribute handles invalid values correctly.""" - r = Rect(0, 0, 1, 1) - - for value in (None, [], "1", (1,), [1, 2, 3]): - with self.assertRaises(TypeError): - r.bottom = value - - def test_bottom__del(self): - """Ensures the bottom attribute can't be deleted.""" - r = Rect(0, 0, 1, 1) - - with self.assertRaises(AttributeError): - del r.bottom - - def test_centerx(self): - """Changing the centerx attribute moves the rect and does not change - the rect's width - """ - r = Rect(1, 2, 3, 4) - new_centerx = r.centerx + 20 - expected_left = r.left + 20 - old_width = r.width - - r.centerx = new_centerx - self.assertEqual(new_centerx, r.centerx) - self.assertEqual(expected_left, r.left) - self.assertEqual(old_width, r.width) - - def test_centerx__invalid_value(self): - """Ensures the centerx attribute handles invalid values correctly.""" - r = Rect(0, 0, 1, 1) - - for value in (None, [], "1", (1,), [1, 2, 3]): - with self.assertRaises(TypeError): - r.centerx = value - - def test_centerx__del(self): - """Ensures the centerx attribute can't be deleted.""" - r = Rect(0, 0, 1, 1) - - with self.assertRaises(AttributeError): - del r.centerx - - def test_centery(self): - """Changing the centery attribute moves the rect and does not change - the rect's width - """ - r = Rect(1, 2, 3, 4) - new_centery = r.centery + 20 - expected_top = r.top + 20 - old_height = r.height - - r.centery = new_centery - self.assertEqual(new_centery, r.centery) - self.assertEqual(expected_top, r.top) - self.assertEqual(old_height, r.height) - - def test_centery__invalid_value(self): - """Ensures the centery attribute handles invalid values correctly.""" - r = Rect(0, 0, 1, 1) - - for value in (None, [], "1", (1,), [1, 2, 3]): - with self.assertRaises(TypeError): - r.centery = value - - def test_centery__del(self): - """Ensures the centery attribute can't be deleted.""" - r = Rect(0, 0, 1, 1) - - with self.assertRaises(AttributeError): - del r.centery - - def test_topleft(self): - """Changing the topleft attribute moves the rect and does not change - the rect's size - """ - r = Rect(1, 2, 3, 4) - new_topleft = (r.left + 20, r.top + 30) - old_size = r.size - - r.topleft = new_topleft - self.assertEqual(new_topleft, r.topleft) - self.assertEqual(old_size, r.size) - - def test_topleft__invalid_value(self): - """Ensures the topleft attribute handles invalid values correctly.""" - r = Rect(0, 0, 1, 1) - - for value in (None, [], "1", 1, (1,), [1, 2, 3]): - with self.assertRaises(TypeError): - r.topleft = value - - def test_topleft__del(self): - """Ensures the topleft attribute can't be deleted.""" - r = Rect(0, 0, 1, 1) - - with self.assertRaises(AttributeError): - del r.topleft - - def test_bottomleft(self): - """Changing the bottomleft attribute moves the rect and does not change - the rect's size - """ - r = Rect(1, 2, 3, 4) - new_bottomleft = (r.left + 20, r.bottom + 30) - expected_topleft = (r.left + 20, r.top + 30) - old_size = r.size - - r.bottomleft = new_bottomleft - self.assertEqual(new_bottomleft, r.bottomleft) - self.assertEqual(expected_topleft, r.topleft) - self.assertEqual(old_size, r.size) - - def test_bottomleft__invalid_value(self): - """Ensures the bottomleft attribute handles invalid values correctly.""" - r = Rect(0, 0, 1, 1) - - for value in (None, [], "1", 1, (1,), [1, 2, 3]): - with self.assertRaises(TypeError): - r.bottomleft = value - - def test_bottomleft__del(self): - """Ensures the bottomleft attribute can't be deleted.""" - r = Rect(0, 0, 1, 1) - - with self.assertRaises(AttributeError): - del r.bottomleft - - def test_topright(self): - """Changing the topright attribute moves the rect and does not change - the rect's size - """ - r = Rect(1, 2, 3, 4) - new_topright = (r.right + 20, r.top + 30) - expected_topleft = (r.left + 20, r.top + 30) - old_size = r.size - - r.topright = new_topright - self.assertEqual(new_topright, r.topright) - self.assertEqual(expected_topleft, r.topleft) - self.assertEqual(old_size, r.size) - - def test_topright__invalid_value(self): - """Ensures the topright attribute handles invalid values correctly.""" - r = Rect(0, 0, 1, 1) - - for value in (None, [], "1", 1, (1,), [1, 2, 3]): - with self.assertRaises(TypeError): - r.topright = value - - def test_topright__del(self): - """Ensures the topright attribute can't be deleted.""" - r = Rect(0, 0, 1, 1) - - with self.assertRaises(AttributeError): - del r.topright - - def test_bottomright(self): - """Changing the bottomright attribute moves the rect and does not change - the rect's size - """ - r = Rect(1, 2, 3, 4) - new_bottomright = (r.right + 20, r.bottom + 30) - expected_topleft = (r.left + 20, r.top + 30) - old_size = r.size - - r.bottomright = new_bottomright - self.assertEqual(new_bottomright, r.bottomright) - self.assertEqual(expected_topleft, r.topleft) - self.assertEqual(old_size, r.size) - - def test_bottomright__invalid_value(self): - """Ensures the bottomright attribute handles invalid values correctly.""" - r = Rect(0, 0, 1, 1) - - for value in (None, [], "1", 1, (1,), [1, 2, 3]): - with self.assertRaises(TypeError): - r.bottomright = value - - def test_bottomright__del(self): - """Ensures the bottomright attribute can't be deleted.""" - r = Rect(0, 0, 1, 1) - - with self.assertRaises(AttributeError): - del r.bottomright - - def test_center(self): - """Changing the center attribute moves the rect and does not change - the rect's size - """ - r = Rect(1, 2, 3, 4) - new_center = (r.centerx + 20, r.centery + 30) - expected_topleft = (r.left + 20, r.top + 30) - old_size = r.size - - r.center = new_center - self.assertEqual(new_center, r.center) - self.assertEqual(expected_topleft, r.topleft) - self.assertEqual(old_size, r.size) - - def test_center__invalid_value(self): - """Ensures the center attribute handles invalid values correctly.""" - r = Rect(0, 0, 1, 1) - - for value in (None, [], "1", 1, (1,), [1, 2, 3]): - with self.assertRaises(TypeError): - r.center = value - - def test_center__del(self): - """Ensures the center attribute can't be deleted.""" - r = Rect(0, 0, 1, 1) - - with self.assertRaises(AttributeError): - del r.center - - def test_midleft(self): - """Changing the midleft attribute moves the rect and does not change - the rect's size - """ - r = Rect(1, 2, 3, 4) - new_midleft = (r.left + 20, r.centery + 30) - expected_topleft = (r.left + 20, r.top + 30) - old_size = r.size - - r.midleft = new_midleft - self.assertEqual(new_midleft, r.midleft) - self.assertEqual(expected_topleft, r.topleft) - self.assertEqual(old_size, r.size) - - def test_midleft__invalid_value(self): - """Ensures the midleft attribute handles invalid values correctly.""" - r = Rect(0, 0, 1, 1) - - for value in (None, [], "1", 1, (1,), [1, 2, 3]): - with self.assertRaises(TypeError): - r.midleft = value - - def test_midleft__del(self): - """Ensures the midleft attribute can't be deleted.""" - r = Rect(0, 0, 1, 1) - - with self.assertRaises(AttributeError): - del r.midleft - - def test_midright(self): - """Changing the midright attribute moves the rect and does not change - the rect's size - """ - r = Rect(1, 2, 3, 4) - new_midright = (r.right + 20, r.centery + 30) - expected_topleft = (r.left + 20, r.top + 30) - old_size = r.size - - r.midright = new_midright - self.assertEqual(new_midright, r.midright) - self.assertEqual(expected_topleft, r.topleft) - self.assertEqual(old_size, r.size) - - def test_midright__invalid_value(self): - """Ensures the midright attribute handles invalid values correctly.""" - r = Rect(0, 0, 1, 1) - - for value in (None, [], "1", 1, (1,), [1, 2, 3]): - with self.assertRaises(TypeError): - r.midright = value - - def test_midright__del(self): - """Ensures the midright attribute can't be deleted.""" - r = Rect(0, 0, 1, 1) - - with self.assertRaises(AttributeError): - del r.midright - - def test_midtop(self): - """Changing the midtop attribute moves the rect and does not change - the rect's size - """ - r = Rect(1, 2, 3, 4) - new_midtop = (r.centerx + 20, r.top + 30) - expected_topleft = (r.left + 20, r.top + 30) - old_size = r.size - - r.midtop = new_midtop - self.assertEqual(new_midtop, r.midtop) - self.assertEqual(expected_topleft, r.topleft) - self.assertEqual(old_size, r.size) - - def test_midtop__invalid_value(self): - """Ensures the midtop attribute handles invalid values correctly.""" - r = Rect(0, 0, 1, 1) - - for value in (None, [], "1", 1, (1,), [1, 2, 3]): - with self.assertRaises(TypeError): - r.midtop = value - - def test_midtop__del(self): - """Ensures the midtop attribute can't be deleted.""" - r = Rect(0, 0, 1, 1) - - with self.assertRaises(AttributeError): - del r.midtop - - def test_midbottom(self): - """Changing the midbottom attribute moves the rect and does not change - the rect's size - """ - r = Rect(1, 2, 3, 4) - new_midbottom = (r.centerx + 20, r.bottom + 30) - expected_topleft = (r.left + 20, r.top + 30) - old_size = r.size - - r.midbottom = new_midbottom - self.assertEqual(new_midbottom, r.midbottom) - self.assertEqual(expected_topleft, r.topleft) - self.assertEqual(old_size, r.size) - - def test_midbottom__invalid_value(self): - """Ensures the midbottom attribute handles invalid values correctly.""" - r = Rect(0, 0, 1, 1) - - for value in (None, [], "1", 1, (1,), [1, 2, 3]): - with self.assertRaises(TypeError): - r.midbottom = value - - def test_midbottom__del(self): - """Ensures the midbottom attribute can't be deleted.""" - r = Rect(0, 0, 1, 1) - - with self.assertRaises(AttributeError): - del r.midbottom - - def test_width(self): - """Changing the width resizes the rect from the top-left corner""" - r = Rect(1, 2, 3, 4) - new_width = 10 - old_topleft = r.topleft - old_height = r.height - - r.width = new_width - self.assertEqual(new_width, r.width) - self.assertEqual(old_height, r.height) - self.assertEqual(old_topleft, r.topleft) - - def test_width__invalid_value(self): - """Ensures the width attribute handles invalid values correctly.""" - r = Rect(0, 0, 1, 1) - - for value in (None, [], "1", (1,), [1, 2, 3]): - with self.assertRaises(TypeError): - r.width = value - - def test_width__del(self): - """Ensures the width attribute can't be deleted.""" - r = Rect(0, 0, 1, 1) - - with self.assertRaises(AttributeError): - del r.width - - def test_height(self): - """Changing the height resizes the rect from the top-left corner""" - r = Rect(1, 2, 3, 4) - new_height = 10 - old_topleft = r.topleft - old_width = r.width - - r.height = new_height - self.assertEqual(new_height, r.height) - self.assertEqual(old_width, r.width) - self.assertEqual(old_topleft, r.topleft) - - def test_height__invalid_value(self): - """Ensures the height attribute handles invalid values correctly.""" - r = Rect(0, 0, 1, 1) - - for value in (None, [], "1", (1,), [1, 2, 3]): - with self.assertRaises(TypeError): - r.height = value - - def test_height__del(self): - """Ensures the height attribute can't be deleted.""" - r = Rect(0, 0, 1, 1) - - with self.assertRaises(AttributeError): - del r.height - - def test_size(self): - """Changing the size resizes the rect from the top-left corner""" - r = Rect(1, 2, 3, 4) - new_size = (10, 20) - old_topleft = r.topleft - - r.size = new_size - self.assertEqual(new_size, r.size) - self.assertEqual(old_topleft, r.topleft) - - def test_size__invalid_value(self): - """Ensures the size attribute handles invalid values correctly.""" - r = Rect(0, 0, 1, 1) - - for value in (None, [], "1", 1, (1,), [1, 2, 3]): - with self.assertRaises(TypeError): - r.size = value - - def test_size__del(self): - """Ensures the size attribute can't be deleted.""" - r = Rect(0, 0, 1, 1) - - with self.assertRaises(AttributeError): - del r.size - - def test_contains(self): - r = Rect(1, 2, 3, 4) - - self.assertTrue( - r.contains(Rect(2, 3, 1, 1)), "r does not contain Rect(2, 3, 1, 1)" - ) - self.assertTrue(Rect(2, 3, 1, 1) in r, "r does not contain Rect(2, 3, 1, 1) 2") - self.assertTrue( - r.contains(Rect(r)), "r does not contain the same rect as itself" - ) - self.assertTrue(r in Rect(r), "r does not contain the same rect as itself") - self.assertTrue( - r.contains(Rect(2, 3, 0, 0)), - "r does not contain an empty rect within its bounds", - ) - self.assertTrue( - Rect(2, 3, 0, 0) in r, - "r does not contain an empty rect within its bounds", - ) - self.assertFalse(r.contains(Rect(0, 0, 1, 2)), "r contains Rect(0, 0, 1, 2)") - self.assertFalse(r.contains(Rect(4, 6, 1, 1)), "r contains Rect(4, 6, 1, 1)") - self.assertFalse(r.contains(Rect(4, 6, 0, 0)), "r contains Rect(4, 6, 0, 0)") - self.assertFalse(Rect(0, 0, 1, 2) in r, "r contains Rect(0, 0, 1, 2)") - self.assertFalse(Rect(4, 6, 1, 1) in r, "r contains Rect(4, 6, 1, 1)") - self.assertFalse(Rect(4, 6, 0, 0) in r, "r contains Rect(4, 6, 0, 0)") - self.assertTrue(2 in Rect(0, 0, 1, 2), "r does not contain 2") - self.assertFalse(3 in Rect(0, 0, 1, 2), "r contains 3") - - def test_collidepoint(self): - r = Rect(1, 2, 3, 4) - - self.assertTrue( - r.collidepoint(r.left, r.top), "r does not collide with point (left, top)" - ) - self.assertFalse( - r.collidepoint(r.left - 1, r.top), "r collides with point (left - 1, top)" - ) - self.assertFalse( - r.collidepoint(r.left, r.top - 1), "r collides with point (left, top - 1)" - ) - self.assertFalse( - r.collidepoint(r.left - 1, r.top - 1), - "r collides with point (left - 1, top - 1)", - ) - - self.assertTrue( - r.collidepoint(r.right - 1, r.bottom - 1), - "r does not collide with point (right - 1, bottom - 1)", - ) - self.assertFalse( - r.collidepoint(r.right, r.bottom), "r collides with point (right, bottom)" - ) - self.assertFalse( - r.collidepoint(r.right - 1, r.bottom), - "r collides with point (right - 1, bottom)", - ) - self.assertFalse( - r.collidepoint(r.right, r.bottom - 1), - "r collides with point (right, bottom - 1)", - ) - - def test_inflate__larger(self): - """The inflate method inflates around the center of the rectangle""" - r = Rect(2, 4, 6, 8) - r2 = r.inflate(4, 6) - - self.assertEqual(r.center, r2.center) - self.assertEqual(r.left - 2, r2.left) - self.assertEqual(r.top - 3, r2.top) - self.assertEqual(r.right + 2, r2.right) - self.assertEqual(r.bottom + 3, r2.bottom) - self.assertEqual(r.width + 4, r2.width) - self.assertEqual(r.height + 6, r2.height) - - def test_inflate__smaller(self): - """The inflate method inflates around the center of the rectangle""" - r = Rect(2, 4, 6, 8) - r2 = r.inflate(-4, -6) - - self.assertEqual(r.center, r2.center) - self.assertEqual(r.left + 2, r2.left) - self.assertEqual(r.top + 3, r2.top) - self.assertEqual(r.right - 2, r2.right) - self.assertEqual(r.bottom - 3, r2.bottom) - self.assertEqual(r.width - 4, r2.width) - self.assertEqual(r.height - 6, r2.height) - - def test_inflate_ip__larger(self): - """The inflate_ip method inflates around the center of the rectangle""" - r = Rect(2, 4, 6, 8) - r2 = Rect(r) - r2.inflate_ip(-4, -6) - - self.assertEqual(r.center, r2.center) - self.assertEqual(r.left + 2, r2.left) - self.assertEqual(r.top + 3, r2.top) - self.assertEqual(r.right - 2, r2.right) - self.assertEqual(r.bottom - 3, r2.bottom) - self.assertEqual(r.width - 4, r2.width) - self.assertEqual(r.height - 6, r2.height) - - def test_inflate_ip__smaller(self): - """The inflate method inflates around the center of the rectangle""" - r = Rect(2, 4, 6, 8) - r2 = Rect(r) - r2.inflate_ip(-4, -6) - - self.assertEqual(r.center, r2.center) - self.assertEqual(r.left + 2, r2.left) - self.assertEqual(r.top + 3, r2.top) - self.assertEqual(r.right - 2, r2.right) - self.assertEqual(r.bottom - 3, r2.bottom) - self.assertEqual(r.width - 4, r2.width) - self.assertEqual(r.height - 6, r2.height) - - def test_clamp(self): - r = Rect(10, 10, 10, 10) - c = Rect(19, 12, 5, 5).clamp(r) - self.assertEqual(c.right, r.right) - self.assertEqual(c.top, 12) - c = Rect(1, 2, 3, 4).clamp(r) - self.assertEqual(c.topleft, r.topleft) - c = Rect(5, 500, 22, 33).clamp(r) - self.assertEqual(c.center, r.center) - - def test_clamp_ip(self): - r = Rect(10, 10, 10, 10) - c = Rect(19, 12, 5, 5) - c.clamp_ip(r) - self.assertEqual(c.right, r.right) - self.assertEqual(c.top, 12) - c = Rect(1, 2, 3, 4) - c.clamp_ip(r) - self.assertEqual(c.topleft, r.topleft) - c = Rect(5, 500, 22, 33) - c.clamp_ip(r) - self.assertEqual(c.center, r.center) - - def test_clip(self): - r1 = Rect(1, 2, 3, 4) - self.assertEqual(Rect(1, 2, 2, 2), r1.clip(Rect(0, 0, 3, 4))) - self.assertEqual(Rect(2, 2, 2, 4), r1.clip(Rect(2, 2, 10, 20))) - self.assertEqual(Rect(2, 3, 1, 2), r1.clip(Rect(2, 3, 1, 2))) - self.assertEqual((0, 0), r1.clip(20, 30, 5, 6).size) - self.assertEqual( - r1, r1.clip(Rect(r1)), "r1 does not clip an identical rect to itself" - ) - - def test_clipline(self): - """Ensures clipline handles four int parameters. - - Tests the clipline(x1, y1, x2, y2) format. - """ - rect = Rect((1, 2), (35, 40)) - x1 = 5 - y1 = 6 - x2 = 11 - y2 = 19 - expected_line = ((x1, y1), (x2, y2)) - - clipped_line = rect.clipline(x1, y1, x2, y2) - - self.assertIsInstance(clipped_line, tuple) - self.assertTupleEqual(clipped_line, expected_line) - - def test_clipline__two_sequences(self): - """Ensures clipline handles a sequence of two sequences. - - Tests the clipline((x1, y1), (x2, y2)) format. - Tests the sequences as different types. - """ - rect = Rect((1, 2), (35, 40)) - pt1 = (5, 6) - pt2 = (11, 19) - - INNER_SEQUENCES = (list, tuple, Vector2) - expected_line = (pt1, pt2) - - for inner_seq1 in INNER_SEQUENCES: - endpt1 = inner_seq1(pt1) - - for inner_seq2 in INNER_SEQUENCES: - clipped_line = rect.clipline((endpt1, inner_seq2(pt2))) - - self.assertIsInstance(clipped_line, tuple) - self.assertTupleEqual(clipped_line, expected_line) - - def test_clipline__sequence_of_four_ints(self): - """Ensures clipline handles a sequence of four ints. - - Tests the clipline((x1, y1, x2, y2)) format. - Tests the sequence as different types. - """ - rect = Rect((1, 2), (35, 40)) - line = (5, 6, 11, 19) - expected_line = ((line[0], line[1]), (line[2], line[3])) - - for outer_seq in (list, tuple): - clipped_line = rect.clipline(outer_seq(line)) - - self.assertIsInstance(clipped_line, tuple) - self.assertTupleEqual(clipped_line, expected_line) - - def test_clipline__sequence_of_two_sequences(self): - """Ensures clipline handles a sequence of two sequences. - - Tests the clipline(((x1, y1), (x2, y2))) format. - Tests the sequences as different types. - """ - rect = Rect((1, 2), (35, 40)) - pt1 = (5, 6) - pt2 = (11, 19) - - INNER_SEQUENCES = (list, tuple, Vector2) - expected_line = (pt1, pt2) - - for inner_seq1 in INNER_SEQUENCES: - endpt1 = inner_seq1(pt1) - - for inner_seq2 in INNER_SEQUENCES: - endpt2 = inner_seq2(pt2) - - for outer_seq in (list, tuple): - clipped_line = rect.clipline(outer_seq((endpt1, endpt2))) - - self.assertIsInstance(clipped_line, tuple) - self.assertTupleEqual(clipped_line, expected_line) - - def test_clipline__floats(self): - """Ensures clipline handles float parameters.""" - rect = Rect((1, 2), (35, 40)) - x1 = 5.9 - y1 = 6.9 - x2 = 11.9 - y2 = 19.9 - - # Floats are truncated. - expected_line = ( - (math.floor(x1), math.floor(y1)), - (math.floor(x2), math.floor(y2)), - ) - - clipped_line = rect.clipline(x1, y1, x2, y2) - - self.assertIsInstance(clipped_line, tuple) - self.assertTupleEqual(clipped_line, expected_line) - - def test_clipline__no_overlap(self): - """Ensures lines that do not overlap the rect are not clipped.""" - rect = Rect((10, 25), (15, 20)) - # Use a bigger rect to help create test lines. - big_rect = rect.inflate(2, 2) - lines = ( - (big_rect.bottomleft, big_rect.topleft), # Left edge. - (big_rect.topleft, big_rect.topright), # Top edge. - (big_rect.topright, big_rect.bottomright), # Right edge. - (big_rect.bottomright, big_rect.bottomleft), - ) # Bottom edge. - expected_line = () - - # Test lines outside rect. - for line in lines: - clipped_line = rect.clipline(line) - - self.assertTupleEqual(clipped_line, expected_line) - - def test_clipline__both_endpoints_outside(self): - """Ensures lines that overlap the rect are clipped. - - Testing lines with both endpoints outside the rect. - """ - rect = Rect((0, 0), (20, 20)) - # Use a bigger rect to help create test lines. - big_rect = rect.inflate(2, 2) - - # Create a dict of lines and expected results. - line_dict = { - (big_rect.midleft, big_rect.midright): ( - rect.midleft, - (rect.midright[0] - 1, rect.midright[1]), - ), - (big_rect.midtop, big_rect.midbottom): ( - rect.midtop, - (rect.midbottom[0], rect.midbottom[1] - 1), - ), - # Diagonals. - (big_rect.topleft, big_rect.bottomright): ( - rect.topleft, - (rect.bottomright[0] - 1, rect.bottomright[1] - 1), - ), - # This line needs a small adjustment to make sure it intersects - # the rect correctly. - ( - (big_rect.topright[0] - 1, big_rect.topright[1]), - (big_rect.bottomleft[0], big_rect.bottomleft[1] - 1), - ): ( - (rect.topright[0] - 1, rect.topright[1]), - (rect.bottomleft[0], rect.bottomleft[1] - 1), - ), - } - - for line, expected_line in line_dict.items(): - clipped_line = rect.clipline(line) - - self.assertTupleEqual(clipped_line, expected_line) - - # Swap endpoints to test for symmetry. - expected_line = (expected_line[1], expected_line[0]) - - clipped_line = rect.clipline((line[1], line[0])) - - self.assertTupleEqual(clipped_line, expected_line) - - def test_clipline__both_endpoints_inside(self): - """Ensures lines that overlap the rect are clipped. - - Testing lines with both endpoints inside the rect. - """ - rect = Rect((-10, -5), (20, 20)) - # Use a smaller rect to help create test lines. - small_rect = rect.inflate(-2, -2) - - lines = ( - (small_rect.midleft, small_rect.midright), - (small_rect.midtop, small_rect.midbottom), - # Diagonals. - (small_rect.topleft, small_rect.bottomright), - (small_rect.topright, small_rect.bottomleft), - ) - - for line in lines: - expected_line = line - - clipped_line = rect.clipline(line) - - self.assertTupleEqual(clipped_line, expected_line) - - # Swap endpoints to test for symmetry. - expected_line = (expected_line[1], expected_line[0]) - - clipped_line = rect.clipline((line[1], line[0])) - - self.assertTupleEqual(clipped_line, expected_line) - - def test_clipline__endpoints_inside_and_outside(self): - """Ensures lines that overlap the rect are clipped. - - Testing lines with one endpoint outside the rect and the other is - inside the rect. - """ - rect = Rect((0, 0), (21, 21)) - # Use a bigger rect to help create test lines. - big_rect = rect.inflate(2, 2) - - # Create a dict of lines and expected results. - line_dict = { - (big_rect.midleft, rect.center): (rect.midleft, rect.center), - (big_rect.midtop, rect.center): (rect.midtop, rect.center), - (big_rect.midright, rect.center): ( - (rect.midright[0] - 1, rect.midright[1]), - rect.center, - ), - (big_rect.midbottom, rect.center): ( - (rect.midbottom[0], rect.midbottom[1] - 1), - rect.center, - ), - # Diagonals. - (big_rect.topleft, rect.center): (rect.topleft, rect.center), - (big_rect.topright, rect.center): ( - (rect.topright[0] - 1, rect.topright[1]), - rect.center, - ), - (big_rect.bottomright, rect.center): ( - (rect.bottomright[0] - 1, rect.bottomright[1] - 1), - rect.center, - ), - # This line needs a small adjustment to make sure it intersects - # the rect correctly. - ((big_rect.bottomleft[0], big_rect.bottomleft[1] - 1), rect.center): ( - (rect.bottomleft[0], rect.bottomleft[1] - 1), - rect.center, - ), - } - - for line, expected_line in line_dict.items(): - clipped_line = rect.clipline(line) - - self.assertTupleEqual(clipped_line, expected_line) - - # Swap endpoints to test for symmetry. - expected_line = (expected_line[1], expected_line[0]) - - clipped_line = rect.clipline((line[1], line[0])) - - self.assertTupleEqual(clipped_line, expected_line) - - def test_clipline__edges(self): - """Ensures clipline properly clips line that are along the rect edges.""" - rect = Rect((10, 25), (15, 20)) - - # Create a dict of edges and expected results. - edge_dict = { - # Left edge. - (rect.bottomleft, rect.topleft): ( - (rect.bottomleft[0], rect.bottomleft[1] - 1), - rect.topleft, - ), - # Top edge. - (rect.topleft, rect.topright): ( - rect.topleft, - (rect.topright[0] - 1, rect.topright[1]), - ), - # Right edge. - (rect.topright, rect.bottomright): (), - # Bottom edge. - (rect.bottomright, rect.bottomleft): (), - } - - for edge, expected_line in edge_dict.items(): - clipped_line = rect.clipline(edge) - - self.assertTupleEqual(clipped_line, expected_line) - - # Swap endpoints to test for symmetry. - if expected_line: - expected_line = (expected_line[1], expected_line[0]) - - clipped_line = rect.clipline((edge[1], edge[0])) - - self.assertTupleEqual(clipped_line, expected_line) - - def test_clipline__equal_endpoints_with_overlap(self): - """Ensures clipline handles lines with both endpoints the same. - - Testing lines that overlap the rect. - """ - rect = Rect((10, 25), (15, 20)) - - # Test all the points in and on a rect. - pts = ( - (x, y) - for x in range(rect.left, rect.right) - for y in range(rect.top, rect.bottom) - ) - - for pt in pts: - expected_line = (pt, pt) - - clipped_line = rect.clipline((pt, pt)) - - self.assertTupleEqual(clipped_line, expected_line) - - def test_clipline__equal_endpoints_no_overlap(self): - """Ensures clipline handles lines with both endpoints the same. - - Testing lines that do not overlap the rect. - """ - expected_line = () - rect = Rect((10, 25), (15, 20)) - - # Test points outside rect. - for pt in test_utils.rect_perimeter_pts(rect.inflate(2, 2)): - clipped_line = rect.clipline((pt, pt)) - - self.assertTupleEqual(clipped_line, expected_line) - - def test_clipline__zero_size_rect(self): - """Ensures clipline handles zero sized rects correctly.""" - expected_line = () - - for size in ((0, 15), (15, 0), (0, 0)): - rect = Rect((10, 25), size) - - clipped_line = rect.clipline(rect.topleft, rect.topleft) - - self.assertTupleEqual(clipped_line, expected_line) - - def test_clipline__negative_size_rect(self): - """Ensures clipline handles negative sized rects correctly.""" - expected_line = () - - for size in ((-15, 20), (15, -20), (-15, -20)): - rect = Rect((10, 25), size) - norm_rect = rect.copy() - norm_rect.normalize() - # Use a bigger rect to help create test lines. - big_rect = norm_rect.inflate(2, 2) - - # Create a dict of lines and expected results. Some line have both - # endpoints outside the rect and some have one inside and one - # outside. - line_dict = { - (big_rect.midleft, big_rect.midright): ( - norm_rect.midleft, - (norm_rect.midright[0] - 1, norm_rect.midright[1]), - ), - (big_rect.midtop, big_rect.midbottom): ( - norm_rect.midtop, - (norm_rect.midbottom[0], norm_rect.midbottom[1] - 1), - ), - (big_rect.midleft, norm_rect.center): ( - norm_rect.midleft, - norm_rect.center, - ), - (big_rect.midtop, norm_rect.center): ( - norm_rect.midtop, - norm_rect.center, - ), - (big_rect.midright, norm_rect.center): ( - (norm_rect.midright[0] - 1, norm_rect.midright[1]), - norm_rect.center, - ), - (big_rect.midbottom, norm_rect.center): ( - (norm_rect.midbottom[0], norm_rect.midbottom[1] - 1), - norm_rect.center, - ), - } - - for line, expected_line in line_dict.items(): - clipped_line = rect.clipline(line) - - # Make sure rect wasn't normalized. - self.assertNotEqual(rect, norm_rect) - self.assertTupleEqual(clipped_line, expected_line) - - # Swap endpoints to test for symmetry. - expected_line = (expected_line[1], expected_line[0]) - - clipped_line = rect.clipline((line[1], line[0])) - - self.assertTupleEqual(clipped_line, expected_line) - - def test_clipline__invalid_line(self): - """Ensures clipline handles invalid lines correctly.""" - rect = Rect((0, 0), (10, 20)) - invalid_lines = ( - (), - (1,), - (1, 2), - (1, 2, 3), - (1, 2, 3, 4, 5), - ((1, 2),), - ((1, 2), (3,)), - ((1, 2), 3), - ((1, 2, 5), (3, 4)), - ((1, 2), (3, 4, 5)), - ((1, 2), (3, 4), (5, 6)), - ) - - for line in invalid_lines: - with self.assertRaises(TypeError): - clipped_line = rect.clipline(line) - - with self.assertRaises(TypeError): - clipped_line = rect.clipline(*line) - - @unittest.skipIf(IS_PYPY, "fails on pypy sometimes") - def test_move(self): - r = Rect(1, 2, 3, 4) - move_x = 10 - move_y = 20 - r2 = r.move(move_x, move_y) - expected_r2 = Rect(r.left + move_x, r.top + move_y, r.width, r.height) - self.assertEqual(expected_r2, r2) - - @unittest.skipIf(IS_PYPY, "fails on pypy sometimes") - def test_move_ip(self): - r = Rect(1, 2, 3, 4) - r2 = Rect(r) - move_x = 10 - move_y = 20 - r2.move_ip(move_x, move_y) - expected_r2 = Rect(r.left + move_x, r.top + move_y, r.width, r.height) - self.assertEqual(expected_r2, r2) - - def test_update_XYWidthHeight(self): - """Test update with 4 int values(x, y, w, h)""" - rect = Rect(0, 0, 1, 1) - rect.update(1, 2, 3, 4) - - self.assertEqual(1, rect.left) - self.assertEqual(2, rect.top) - self.assertEqual(3, rect.width) - self.assertEqual(4, rect.height) - - def test_update__TopLeftSize(self): - """Test update with 2 tuples((x, y), (w, h))""" - rect = Rect(0, 0, 1, 1) - rect.update((1, 2), (3, 4)) - - self.assertEqual(1, rect.left) - self.assertEqual(2, rect.top) - self.assertEqual(3, rect.width) - self.assertEqual(4, rect.height) - - def test_update__List(self): - """Test update with list""" - rect = Rect(0, 0, 1, 1) - rect2 = [1, 2, 3, 4] - rect.update(rect2) - - self.assertEqual(1, rect.left) - self.assertEqual(2, rect.top) - self.assertEqual(3, rect.width) - self.assertEqual(4, rect.height) - - def test_update__RectObject(self): - """Test update with other rect object""" - rect = Rect(0, 0, 1, 1) - rect2 = Rect(1, 2, 3, 4) - rect.update(rect2) - - self.assertEqual(1, rect.left) - self.assertEqual(2, rect.top) - self.assertEqual(3, rect.width) - self.assertEqual(4, rect.height) - - def test_union(self): - r1 = Rect(1, 1, 1, 2) - r2 = Rect(-2, -2, 1, 2) - self.assertEqual(Rect(-2, -2, 4, 5), r1.union(r2)) - - def test_union__with_identical_Rect(self): - r1 = Rect(1, 2, 3, 4) - self.assertEqual(r1, r1.union(Rect(r1))) - - def test_union_ip(self): - r1 = Rect(1, 1, 1, 2) - r2 = Rect(-2, -2, 1, 2) - r1.union_ip(r2) - self.assertEqual(Rect(-2, -2, 4, 5), r1) - - def test_unionall(self): - r1 = Rect(0, 0, 1, 1) - r2 = Rect(-2, -2, 1, 1) - r3 = Rect(2, 2, 1, 1) - - r4 = r1.unionall([r2, r3]) - self.assertEqual(Rect(-2, -2, 5, 5), r4) - - def test_unionall__invalid_rect_format(self): - """Ensures unionall correctly handles invalid rect parameters.""" - numbers = [0, 1.2, 2, 3.3] - strs = ["a", "b", "c"] - nones = [None, None] - - for invalid_rects in (numbers, strs, nones): - with self.assertRaises(TypeError): - Rect(0, 0, 1, 1).unionall(invalid_rects) - - def test_unionall_ip(self): - r1 = Rect(0, 0, 1, 1) - r2 = Rect(-2, -2, 1, 1) - r3 = Rect(2, 2, 1, 1) - - r1.unionall_ip([r2, r3]) - self.assertEqual(Rect(-2, -2, 5, 5), r1) - - # Bug for an empty list. Would return a Rect instead of None. - self.assertTrue(r1.unionall_ip([]) is None) - - def test_unionall__invalid_rect_format(self): - """Ensures unionall_ip correctly handles invalid rect parameters.""" - numbers = [0, 1.2, 2, 3.3] - strs = ["a", "b", "c"] - nones = [None, None] - - for invalid_rects in (numbers, strs, nones): - with self.assertRaises(TypeError): - Rect(0, 0, 1, 1).unionall_ip(invalid_rects) - - def test_colliderect(self): - r1 = Rect(1, 2, 3, 4) - self.assertTrue( - r1.colliderect(Rect(0, 0, 2, 3)), - "r1 does not collide with Rect(0, 0, 2, 3)", - ) - self.assertFalse( - r1.colliderect(Rect(0, 0, 1, 2)), "r1 collides with Rect(0, 0, 1, 2)" - ) - self.assertFalse( - r1.colliderect(Rect(r1.right, r1.bottom, 2, 2)), - "r1 collides with Rect(r1.right, r1.bottom, 2, 2)", - ) - self.assertTrue( - r1.colliderect(Rect(r1.left + 1, r1.top + 1, r1.width - 2, r1.height - 2)), - "r1 does not collide with Rect(r1.left + 1, r1.top + 1, " - + "r1.width - 2, r1.height - 2)", - ) - self.assertTrue( - r1.colliderect(Rect(r1.left - 1, r1.top - 1, r1.width + 2, r1.height + 2)), - "r1 does not collide with Rect(r1.left - 1, r1.top - 1, " - + "r1.width + 2, r1.height + 2)", - ) - self.assertTrue( - r1.colliderect(Rect(r1)), "r1 does not collide with an identical rect" - ) - self.assertFalse( - r1.colliderect(Rect(r1.right, r1.bottom, 0, 0)), - "r1 collides with Rect(r1.right, r1.bottom, 0, 0)", - ) - self.assertFalse( - r1.colliderect(Rect(r1.right, r1.bottom, 1, 1)), - "r1 collides with Rect(r1.right, r1.bottom, 1, 1)", - ) - - @unittest.skipIf(IS_PYPY, "fails on pypy3 sometimes") - def testEquals(self): - """check to see how the rect uses __eq__""" - r1 = Rect(1, 2, 3, 4) - r2 = Rect(10, 20, 30, 40) - r3 = (10, 20, 30, 40) - r4 = Rect(10, 20, 30, 40) - - class foo(Rect): - def __eq__(self, other): - return id(self) == id(other) - - def __ne__(self, other): - return id(self) != id(other) - - class foo2(Rect): - pass - - r5 = foo(10, 20, 30, 40) - r6 = foo2(10, 20, 30, 40) - - self.assertNotEqual(r5, r2) - - # because we define equality differently for this subclass. - self.assertEqual(r6, r2) - - rect_list = [r1, r2, r3, r4, r6] - - # see if we can remove 4 of these. - rect_list.remove(r2) - rect_list.remove(r2) - rect_list.remove(r2) - rect_list.remove(r2) - self.assertRaises(ValueError, rect_list.remove, r2) - - def test_collidedict(self): - """Ensures collidedict detects collisions.""" - rect = Rect(1, 1, 10, 10) - - collide_item1 = ("collide 1", rect.copy()) - collide_item2 = ("collide 2", Rect(5, 5, 10, 10)) - no_collide_item1 = ("no collide 1", Rect(60, 60, 10, 10)) - no_collide_item2 = ("no collide 2", Rect(70, 70, 10, 10)) - - # Dict to check collisions with values. - rect_values = dict( - (collide_item1, collide_item2, no_collide_item1, no_collide_item2) - ) - value_collide_items = (collide_item1, collide_item2) - - # Dict to check collisions with keys. - rect_keys = {tuple(v): k for k, v in rect_values.items()} - key_collide_items = tuple((tuple(v), k) for k, v in value_collide_items) - - for use_values in (True, False): - if use_values: - expected_items = value_collide_items - d = rect_values - else: - expected_items = key_collide_items - d = rect_keys - - collide_item = rect.collidedict(d, use_values) - - # The detected collision could be any of the possible items. - self.assertIn(collide_item, expected_items) - - def test_collidedict__no_collision(self): - """Ensures collidedict returns None when no collisions.""" - rect = Rect(1, 1, 10, 10) - - no_collide_item1 = ("no collide 1", Rect(50, 50, 10, 10)) - no_collide_item2 = ("no collide 2", Rect(60, 60, 10, 10)) - no_collide_item3 = ("no collide 3", Rect(70, 70, 10, 10)) - - # Dict to check collisions with values. - rect_values = dict((no_collide_item1, no_collide_item2, no_collide_item3)) - - # Dict to check collisions with keys. - rect_keys = {tuple(v): k for k, v in rect_values.items()} - - for use_values in (True, False): - d = rect_values if use_values else rect_keys - - collide_item = rect.collidedict(d, use_values) - - self.assertIsNone(collide_item) - - def test_collidedict__barely_touching(self): - """Ensures collidedict works correctly for rects that barely touch.""" - rect = Rect(1, 1, 10, 10) - # Small rect to test barely touching collisions. - collide_rect = Rect(0, 0, 1, 1) - - collide_item1 = ("collide 1", collide_rect) - no_collide_item1 = ("no collide 1", Rect(50, 50, 10, 10)) - no_collide_item2 = ("no collide 2", Rect(60, 60, 10, 10)) - no_collide_item3 = ("no collide 3", Rect(70, 70, 10, 10)) - - # Dict to check collisions with values. - no_collide_rect_values = dict( - (no_collide_item1, no_collide_item2, no_collide_item3) - ) - - # Dict to check collisions with keys. - no_collide_rect_keys = {tuple(v): k for k, v in no_collide_rect_values.items()} - - # Tests the collide_rect on each of the rect's corners. - for attr in ("topleft", "topright", "bottomright", "bottomleft"): - setattr(collide_rect, attr, getattr(rect, attr)) - - for use_values in (True, False): - if use_values: - expected_item = collide_item1 - d = dict(no_collide_rect_values) - else: - expected_item = (tuple(collide_item1[1]), collide_item1[0]) - d = dict(no_collide_rect_keys) - - d.update((expected_item,)) # Add in the expected item. - - collide_item = rect.collidedict(d, use_values) - - self.assertTupleEqual(collide_item, expected_item) - - def test_collidedict__zero_sized_rects(self): - """Ensures collidedict works correctly with zero sized rects. - - There should be no collisions with zero sized rects. - """ - zero_rect1 = Rect(1, 1, 0, 0) - zero_rect2 = Rect(1, 1, 1, 0) - zero_rect3 = Rect(1, 1, 0, 1) - zero_rect4 = Rect(1, 1, -1, 0) - zero_rect5 = Rect(1, 1, 0, -1) - - no_collide_item1 = ("no collide 1", zero_rect1.copy()) - no_collide_item2 = ("no collide 2", zero_rect2.copy()) - no_collide_item3 = ("no collide 3", zero_rect3.copy()) - no_collide_item4 = ("no collide 4", zero_rect4.copy()) - no_collide_item5 = ("no collide 5", zero_rect5.copy()) - no_collide_item6 = ("no collide 6", Rect(0, 0, 10, 10)) - no_collide_item7 = ("no collide 7", Rect(0, 0, 2, 2)) - - # Dict to check collisions with values. - rect_values = dict( - ( - no_collide_item1, - no_collide_item2, - no_collide_item3, - no_collide_item4, - no_collide_item5, - no_collide_item6, - no_collide_item7, - ) - ) - - # Dict to check collisions with keys. - rect_keys = {tuple(v): k for k, v in rect_values.items()} - - for use_values in (True, False): - d = rect_values if use_values else rect_keys - - for zero_rect in ( - zero_rect1, - zero_rect2, - zero_rect3, - zero_rect4, - zero_rect5, - ): - collide_item = zero_rect.collidedict(d, use_values) - - self.assertIsNone(collide_item) - - def test_collidedict__zero_sized_rects_as_args(self): - """Ensures collidedict works correctly with zero sized rects as args. - - There should be no collisions with zero sized rects. - """ - rect = Rect(0, 0, 10, 10) - - no_collide_item1 = ("no collide 1", Rect(1, 1, 0, 0)) - no_collide_item2 = ("no collide 2", Rect(1, 1, 1, 0)) - no_collide_item3 = ("no collide 3", Rect(1, 1, 0, 1)) - - # Dict to check collisions with values. - rect_values = dict((no_collide_item1, no_collide_item2, no_collide_item3)) - - # Dict to check collisions with keys. - rect_keys = {tuple(v): k for k, v in rect_values.items()} - - for use_values in (True, False): - d = rect_values if use_values else rect_keys - - collide_item = rect.collidedict(d, use_values) - - self.assertIsNone(collide_item) - - def test_collidedict__negative_sized_rects(self): - """Ensures collidedict works correctly with negative sized rects.""" - neg_rect = Rect(1, 1, -1, -1) - - collide_item1 = ("collide 1", neg_rect.copy()) - collide_item2 = ("collide 2", Rect(0, 0, 10, 10)) - no_collide_item1 = ("no collide 1", Rect(1, 1, 10, 10)) - - # Dict to check collisions with values. - rect_values = dict((collide_item1, collide_item2, no_collide_item1)) - value_collide_items = (collide_item1, collide_item2) - - # Dict to check collisions with keys. - rect_keys = {tuple(v): k for k, v in rect_values.items()} - key_collide_items = tuple((tuple(v), k) for k, v in value_collide_items) - - for use_values in (True, False): - if use_values: - collide_items = value_collide_items - d = rect_values - else: - collide_items = key_collide_items - d = rect_keys - - collide_item = neg_rect.collidedict(d, use_values) - - # The detected collision could be any of the possible items. - self.assertIn(collide_item, collide_items) - - def test_collidedict__negative_sized_rects_as_args(self): - """Ensures collidedict works correctly with negative sized rect args.""" - rect = Rect(0, 0, 10, 10) - - collide_item1 = ("collide 1", Rect(1, 1, -1, -1)) - no_collide_item1 = ("no collide 1", Rect(1, 1, -1, 0)) - no_collide_item2 = ("no collide 2", Rect(1, 1, 0, -1)) - - # Dict to check collisions with values. - rect_values = dict((collide_item1, no_collide_item1, no_collide_item2)) - - # Dict to check collisions with keys. - rect_keys = {tuple(v): k for k, v in rect_values.items()} - - for use_values in (True, False): - if use_values: - expected_item = collide_item1 - d = rect_values - else: - expected_item = (tuple(collide_item1[1]), collide_item1[0]) - d = rect_keys - - collide_item = rect.collidedict(d, use_values) - - self.assertTupleEqual(collide_item, expected_item) - - def test_collidedict__invalid_dict_format(self): - """Ensures collidedict correctly handles invalid dict parameters.""" - rect = Rect(0, 0, 10, 10) - - invalid_value_dict = ("collide", rect.copy()) - invalid_key_dict = tuple(invalid_value_dict[1]), invalid_value_dict[0] - - for use_values in (True, False): - d = invalid_value_dict if use_values else invalid_key_dict - - with self.assertRaises(TypeError): - collide_item = rect.collidedict(d, use_values) - - def test_collidedict__invalid_dict_value_format(self): - """Ensures collidedict correctly handles dicts with invalid values.""" - rect = Rect(0, 0, 10, 10) - rect_keys = {tuple(rect): "collide"} - - with self.assertRaises(TypeError): - collide_item = rect.collidedict(rect_keys, 1) - - def test_collidedict__invalid_dict_key_format(self): - """Ensures collidedict correctly handles dicts with invalid keys.""" - rect = Rect(0, 0, 10, 10) - rect_values = {"collide": rect.copy()} - - with self.assertRaises(TypeError): - collide_item = rect.collidedict(rect_values) - - def test_collidedict__invalid_use_values_format(self): - """Ensures collidedict correctly handles invalid use_values parameters.""" - rect = Rect(0, 0, 1, 1) - d = {} - - for invalid_param in (None, d, 1.1): - with self.assertRaises(TypeError): - collide_item = rect.collidedict(d, invalid_param) - - def test_collidedictall(self): - """Ensures collidedictall detects collisions.""" - rect = Rect(1, 1, 10, 10) - - collide_item1 = ("collide 1", rect.copy()) - collide_item2 = ("collide 2", Rect(5, 5, 10, 10)) - no_collide_item1 = ("no collide 1", Rect(60, 60, 20, 20)) - no_collide_item2 = ("no collide 2", Rect(70, 70, 20, 20)) - - # Dict to check collisions with values. - rect_values = dict( - (collide_item1, collide_item2, no_collide_item1, no_collide_item2) - ) - value_collide_items = [collide_item1, collide_item2] - - # Dict to check collisions with keys. - rect_keys = {tuple(v): k for k, v in rect_values.items()} - key_collide_items = [(tuple(v), k) for k, v in value_collide_items] - - for use_values in (True, False): - if use_values: - expected_items = value_collide_items - d = rect_values - else: - expected_items = key_collide_items - d = rect_keys - - collide_items = rect.collidedictall(d, use_values) - - self._assertCountEqual(collide_items, expected_items) - - def test_collidedictall__no_collision(self): - """Ensures collidedictall returns an empty list when no collisions.""" - rect = Rect(1, 1, 10, 10) - - no_collide_item1 = ("no collide 1", Rect(50, 50, 20, 20)) - no_collide_item2 = ("no collide 2", Rect(60, 60, 20, 20)) - no_collide_item3 = ("no collide 3", Rect(70, 70, 20, 20)) - - # Dict to check collisions with values. - rect_values = dict((no_collide_item1, no_collide_item2, no_collide_item3)) - - # Dict to check collisions with keys. - rect_keys = {tuple(v): k for k, v in rect_values.items()} - - expected_items = [] - - for use_values in (True, False): - d = rect_values if use_values else rect_keys - - collide_items = rect.collidedictall(d, use_values) - - self._assertCountEqual(collide_items, expected_items) - - def test_collidedictall__barely_touching(self): - """Ensures collidedictall works correctly for rects that barely touch.""" - rect = Rect(1, 1, 10, 10) - # Small rect to test barely touching collisions. - collide_rect = Rect(0, 0, 1, 1) - - collide_item1 = ("collide 1", collide_rect) - no_collide_item1 = ("no collide 1", Rect(50, 50, 20, 20)) - no_collide_item2 = ("no collide 2", Rect(60, 60, 20, 20)) - no_collide_item3 = ("no collide 3", Rect(70, 70, 20, 20)) - - # Dict to check collisions with values. - no_collide_rect_values = dict( - (no_collide_item1, no_collide_item2, no_collide_item3) - ) - - # Dict to check collisions with keys. - no_collide_rect_keys = {tuple(v): k for k, v in no_collide_rect_values.items()} - - # Tests the collide_rect on each of the rect's corners. - for attr in ("topleft", "topright", "bottomright", "bottomleft"): - setattr(collide_rect, attr, getattr(rect, attr)) - - for use_values in (True, False): - if use_values: - expected_items = [collide_item1] - d = dict(no_collide_rect_values) - else: - expected_items = [(tuple(collide_item1[1]), collide_item1[0])] - d = dict(no_collide_rect_keys) - - d.update(expected_items) # Add in the expected items. - - collide_items = rect.collidedictall(d, use_values) - - self._assertCountEqual(collide_items, expected_items) - - def test_collidedictall__zero_sized_rects(self): - """Ensures collidedictall works correctly with zero sized rects. - - There should be no collisions with zero sized rects. - """ - zero_rect1 = Rect(2, 2, 0, 0) - zero_rect2 = Rect(2, 2, 2, 0) - zero_rect3 = Rect(2, 2, 0, 2) - zero_rect4 = Rect(2, 2, -2, 0) - zero_rect5 = Rect(2, 2, 0, -2) - - no_collide_item1 = ("no collide 1", zero_rect1.copy()) - no_collide_item2 = ("no collide 2", zero_rect2.copy()) - no_collide_item3 = ("no collide 3", zero_rect3.copy()) - no_collide_item4 = ("no collide 4", zero_rect4.copy()) - no_collide_item5 = ("no collide 5", zero_rect5.copy()) - no_collide_item6 = ("no collide 6", Rect(0, 0, 10, 10)) - no_collide_item7 = ("no collide 7", Rect(0, 0, 2, 2)) - - # Dict to check collisions with values. - rect_values = dict( - ( - no_collide_item1, - no_collide_item2, - no_collide_item3, - no_collide_item4, - no_collide_item5, - no_collide_item6, - no_collide_item7, - ) - ) - - # Dict to check collisions with keys. - rect_keys = {tuple(v): k for k, v in rect_values.items()} - - expected_items = [] - - for use_values in (True, False): - d = rect_values if use_values else rect_keys - - for zero_rect in ( - zero_rect1, - zero_rect2, - zero_rect3, - zero_rect4, - zero_rect5, - ): - collide_items = zero_rect.collidedictall(d, use_values) - - self._assertCountEqual(collide_items, expected_items) - - def test_collidedictall__zero_sized_rects_as_args(self): - """Ensures collidedictall works correctly with zero sized rects - as args. - - There should be no collisions with zero sized rects. - """ - rect = Rect(0, 0, 20, 20) - - no_collide_item1 = ("no collide 1", Rect(2, 2, 0, 0)) - no_collide_item2 = ("no collide 2", Rect(2, 2, 2, 0)) - no_collide_item3 = ("no collide 3", Rect(2, 2, 0, 2)) - - # Dict to check collisions with values. - rect_values = dict((no_collide_item1, no_collide_item2, no_collide_item3)) - - # Dict to check collisions with keys. - rect_keys = {tuple(v): k for k, v in rect_values.items()} - - expected_items = [] - - for use_values in (True, False): - d = rect_values if use_values else rect_keys - - collide_items = rect.collidedictall(d, use_values) - - self._assertCountEqual(collide_items, expected_items) - - def test_collidedictall__negative_sized_rects(self): - """Ensures collidedictall works correctly with negative sized rects.""" - neg_rect = Rect(2, 2, -2, -2) - - collide_item1 = ("collide 1", neg_rect.copy()) - collide_item2 = ("collide 2", Rect(0, 0, 20, 20)) - no_collide_item1 = ("no collide 1", Rect(2, 2, 20, 20)) - - # Dict to check collisions with values. - rect_values = dict((collide_item1, collide_item2, no_collide_item1)) - value_collide_items = [collide_item1, collide_item2] - - # Dict to check collisions with keys. - rect_keys = {tuple(v): k for k, v in rect_values.items()} - key_collide_items = [(tuple(v), k) for k, v in value_collide_items] - - for use_values in (True, False): - if use_values: - expected_items = value_collide_items - d = rect_values - else: - expected_items = key_collide_items - d = rect_keys - - collide_items = neg_rect.collidedictall(d, use_values) - - self._assertCountEqual(collide_items, expected_items) - - def test_collidedictall__negative_sized_rects_as_args(self): - """Ensures collidedictall works correctly with negative sized rect - args. - """ - rect = Rect(0, 0, 10, 10) - - collide_item1 = ("collide 1", Rect(1, 1, -1, -1)) - no_collide_item1 = ("no collide 1", Rect(1, 1, -1, 0)) - no_collide_item2 = ("no collide 2", Rect(1, 1, 0, -1)) - - # Dict to check collisions with values. - rect_values = dict((collide_item1, no_collide_item1, no_collide_item2)) - value_collide_items = [collide_item1] - - # Dict to check collisions with keys. - rect_keys = {tuple(v): k for k, v in rect_values.items()} - key_collide_items = [(tuple(v), k) for k, v in value_collide_items] - - for use_values in (True, False): - if use_values: - expected_items = value_collide_items - d = rect_values - else: - expected_items = key_collide_items - d = rect_keys - - collide_items = rect.collidedictall(d, use_values) - - self._assertCountEqual(collide_items, expected_items) - - def test_collidedictall__invalid_dict_format(self): - """Ensures collidedictall correctly handles invalid dict parameters.""" - rect = Rect(0, 0, 10, 10) - - invalid_value_dict = ("collide", rect.copy()) - invalid_key_dict = tuple(invalid_value_dict[1]), invalid_value_dict[0] - - for use_values in (True, False): - d = invalid_value_dict if use_values else invalid_key_dict - - with self.assertRaises(TypeError): - collide_item = rect.collidedictall(d, use_values) - - def test_collidedictall__invalid_dict_value_format(self): - """Ensures collidedictall correctly handles dicts with invalid values.""" - rect = Rect(0, 0, 10, 10) - rect_keys = {tuple(rect): "collide"} - - with self.assertRaises(TypeError): - collide_items = rect.collidedictall(rect_keys, 1) - - def test_collidedictall__invalid_dict_key_format(self): - """Ensures collidedictall correctly handles dicts with invalid keys.""" - rect = Rect(0, 0, 10, 10) - rect_values = {"collide": rect.copy()} - - with self.assertRaises(TypeError): - collide_items = rect.collidedictall(rect_values) - - def test_collidedictall__invalid_use_values_format(self): - """Ensures collidedictall correctly handles invalid use_values - parameters. - """ - rect = Rect(0, 0, 1, 1) - d = {} - - for invalid_param in (None, d, 1.1): - with self.assertRaises(TypeError): - collide_items = rect.collidedictall(d, invalid_param) - - def test_collidelist(self): - - # __doc__ (as of 2008-08-02) for pygame.rect.Rect.collidelist: - - # Rect.collidelist(list): return index - # test if one rectangle in a list intersects - # - # Test whether the rectangle collides with any in a sequence of - # rectangles. The index of the first collision found is returned. If - # no collisions are found an index of -1 is returned. - - r = Rect(1, 1, 10, 10) - l = [Rect(50, 50, 1, 1), Rect(5, 5, 10, 10), Rect(15, 15, 1, 1)] - - self.assertEqual(r.collidelist(l), 1) - - f = [Rect(50, 50, 1, 1), (100, 100, 4, 4)] - self.assertEqual(r.collidelist(f), -1) - - def test_collidelistall(self): - - # __doc__ (as of 2008-08-02) for pygame.rect.Rect.collidelistall: - - # Rect.collidelistall(list): return indices - # test if all rectangles in a list intersect - # - # Returns a list of all the indices that contain rectangles that - # collide with the Rect. If no intersecting rectangles are found, an - # empty list is returned. - - r = Rect(1, 1, 10, 10) - - l = [ - Rect(1, 1, 10, 10), - Rect(5, 5, 10, 10), - Rect(15, 15, 1, 1), - Rect(2, 2, 1, 1), - ] - self.assertEqual(r.collidelistall(l), [0, 1, 3]) - - f = [Rect(50, 50, 1, 1), Rect(20, 20, 5, 5)] - self.assertFalse(r.collidelistall(f)) - - def test_fit(self): - - # __doc__ (as of 2008-08-02) for pygame.rect.Rect.fit: - - # Rect.fit(Rect): return Rect - # resize and move a rectangle with aspect ratio - # - # Returns a new rectangle that is moved and resized to fit another. - # The aspect ratio of the original Rect is preserved, so the new - # rectangle may be smaller than the target in either width or height. - - r = Rect(10, 10, 30, 30) - - r2 = Rect(30, 30, 15, 10) - - f = r.fit(r2) - self.assertTrue(r2.contains(f)) - - f2 = r2.fit(r) - self.assertTrue(r.contains(f2)) - - def test_copy(self): - r = Rect(1, 2, 10, 20) - c = r.copy() - self.assertEqual(c, r) - - def test_subscript(self): - r = Rect(1, 2, 3, 4) - self.assertEqual(r[0], 1) - self.assertEqual(r[1], 2) - self.assertEqual(r[2], 3) - self.assertEqual(r[3], 4) - self.assertEqual(r[-1], 4) - self.assertEqual(r[-2], 3) - self.assertEqual(r[-4], 1) - self.assertRaises(IndexError, r.__getitem__, 5) - self.assertRaises(IndexError, r.__getitem__, -5) - self.assertEqual(r[0:2], [1, 2]) - self.assertEqual(r[0:4], [1, 2, 3, 4]) - self.assertEqual(r[0:-1], [1, 2, 3]) - self.assertEqual(r[:], [1, 2, 3, 4]) - self.assertEqual(r[...], [1, 2, 3, 4]) - self.assertEqual(r[0:4:2], [1, 3]) - self.assertEqual(r[0:4:3], [1, 4]) - self.assertEqual(r[3::-1], [4, 3, 2, 1]) - self.assertRaises(TypeError, r.__getitem__, None) - - def test_ass_subscript(self): - r = Rect(0, 0, 0, 0) - r[...] = 1, 2, 3, 4 - self.assertEqual(r, [1, 2, 3, 4]) - self.assertRaises(TypeError, r.__setitem__, None, 0) - self.assertEqual(r, [1, 2, 3, 4]) - self.assertRaises(TypeError, r.__setitem__, 0, "") - self.assertEqual(r, [1, 2, 3, 4]) - self.assertRaises(IndexError, r.__setitem__, 4, 0) - self.assertEqual(r, [1, 2, 3, 4]) - self.assertRaises(IndexError, r.__setitem__, -5, 0) - self.assertEqual(r, [1, 2, 3, 4]) - r[0] = 10 - self.assertEqual(r, [10, 2, 3, 4]) - r[3] = 40 - self.assertEqual(r, [10, 2, 3, 40]) - r[-1] = 400 - self.assertEqual(r, [10, 2, 3, 400]) - r[-4] = 100 - self.assertEqual(r, [100, 2, 3, 400]) - r[1:3] = 0 - self.assertEqual(r, [100, 0, 0, 400]) - r[...] = 0 - self.assertEqual(r, [0, 0, 0, 0]) - r[:] = 9 - self.assertEqual(r, [9, 9, 9, 9]) - r[:] = 11, 12, 13, 14 - self.assertEqual(r, [11, 12, 13, 14]) - r[::-1] = r - self.assertEqual(r, [14, 13, 12, 11]) - - -@unittest.skipIf(IS_PYPY, "fails on pypy") -class SubclassTest(unittest.TestCase): - class MyRect(Rect): - def __init__(self, *args, **kwds): - super(SubclassTest.MyRect, self).__init__(*args, **kwds) - self.an_attribute = True - - def test_copy(self): - mr1 = self.MyRect(1, 2, 10, 20) - self.assertTrue(mr1.an_attribute) - mr2 = mr1.copy() - self.assertTrue(isinstance(mr2, self.MyRect)) - self.assertRaises(AttributeError, getattr, mr2, "an_attribute") - - def test_move(self): - mr1 = self.MyRect(1, 2, 10, 20) - self.assertTrue(mr1.an_attribute) - mr2 = mr1.move(1, 2) - self.assertTrue(isinstance(mr2, self.MyRect)) - self.assertRaises(AttributeError, getattr, mr2, "an_attribute") - - def test_inflate(self): - mr1 = self.MyRect(1, 2, 10, 20) - self.assertTrue(mr1.an_attribute) - mr2 = mr1.inflate(2, 4) - self.assertTrue(isinstance(mr2, self.MyRect)) - self.assertRaises(AttributeError, getattr, mr2, "an_attribute") - - def test_clamp(self): - mr1 = self.MyRect(19, 12, 5, 5) - self.assertTrue(mr1.an_attribute) - mr2 = mr1.clamp(Rect(10, 10, 10, 10)) - self.assertTrue(isinstance(mr2, self.MyRect)) - self.assertRaises(AttributeError, getattr, mr2, "an_attribute") - - def test_clip(self): - mr1 = self.MyRect(1, 2, 3, 4) - self.assertTrue(mr1.an_attribute) - mr2 = mr1.clip(Rect(0, 0, 3, 4)) - self.assertTrue(isinstance(mr2, self.MyRect)) - self.assertRaises(AttributeError, getattr, mr2, "an_attribute") - - def test_union(self): - mr1 = self.MyRect(1, 1, 1, 2) - self.assertTrue(mr1.an_attribute) - mr2 = mr1.union(Rect(-2, -2, 1, 2)) - self.assertTrue(isinstance(mr2, self.MyRect)) - self.assertRaises(AttributeError, getattr, mr2, "an_attribute") - - def test_unionall(self): - mr1 = self.MyRect(0, 0, 1, 1) - self.assertTrue(mr1.an_attribute) - mr2 = mr1.unionall([Rect(-2, -2, 1, 1), Rect(2, 2, 1, 1)]) - self.assertTrue(isinstance(mr2, self.MyRect)) - self.assertRaises(AttributeError, getattr, mr2, "an_attribute") - - def test_fit(self): - mr1 = self.MyRect(10, 10, 30, 30) - self.assertTrue(mr1.an_attribute) - mr2 = mr1.fit(Rect(30, 30, 15, 10)) - self.assertTrue(isinstance(mr2, self.MyRect)) - self.assertRaises(AttributeError, getattr, mr2, "an_attribute") - - -if __name__ == "__main__": - unittest.main() diff --git a/venv/Lib/site-packages/pygame/tests/run_tests__tests/__init__.py b/venv/Lib/site-packages/pygame/tests/run_tests__tests/__init__.py deleted file mode 100644 index 1bb8bf6..0000000 --- a/venv/Lib/site-packages/pygame/tests/run_tests__tests/__init__.py +++ /dev/null @@ -1 +0,0 @@ -# empty diff --git a/venv/Lib/site-packages/pygame/tests/run_tests__tests/__pycache__/__init__.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/run_tests__tests/__pycache__/__init__.cpython-39.pyc deleted file mode 100644 index 824cc7b..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/run_tests__tests/__pycache__/__init__.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/run_tests__tests/__pycache__/run_tests__test.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/run_tests__tests/__pycache__/run_tests__test.cpython-39.pyc deleted file mode 100644 index ed7dd30..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/run_tests__tests/__pycache__/run_tests__test.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/run_tests__tests/all_ok/__init__.py b/venv/Lib/site-packages/pygame/tests/run_tests__tests/all_ok/__init__.py deleted file mode 100644 index 1bb8bf6..0000000 --- a/venv/Lib/site-packages/pygame/tests/run_tests__tests/all_ok/__init__.py +++ /dev/null @@ -1 +0,0 @@ -# empty diff --git a/venv/Lib/site-packages/pygame/tests/run_tests__tests/all_ok/__pycache__/__init__.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/run_tests__tests/all_ok/__pycache__/__init__.cpython-39.pyc deleted file mode 100644 index dec15cb..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/run_tests__tests/all_ok/__pycache__/__init__.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/run_tests__tests/all_ok/__pycache__/fake_2_test.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/run_tests__tests/all_ok/__pycache__/fake_2_test.cpython-39.pyc deleted file mode 100644 index 39c80ca..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/run_tests__tests/all_ok/__pycache__/fake_2_test.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/run_tests__tests/all_ok/__pycache__/fake_3_test.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/run_tests__tests/all_ok/__pycache__/fake_3_test.cpython-39.pyc deleted file mode 100644 index 8e9c8a3..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/run_tests__tests/all_ok/__pycache__/fake_3_test.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/run_tests__tests/all_ok/__pycache__/fake_4_test.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/run_tests__tests/all_ok/__pycache__/fake_4_test.cpython-39.pyc deleted file mode 100644 index a9bd985..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/run_tests__tests/all_ok/__pycache__/fake_4_test.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/run_tests__tests/all_ok/__pycache__/fake_5_test.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/run_tests__tests/all_ok/__pycache__/fake_5_test.cpython-39.pyc deleted file mode 100644 index b73dbf4..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/run_tests__tests/all_ok/__pycache__/fake_5_test.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/run_tests__tests/all_ok/__pycache__/fake_6_test.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/run_tests__tests/all_ok/__pycache__/fake_6_test.cpython-39.pyc deleted file mode 100644 index 6ae1085..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/run_tests__tests/all_ok/__pycache__/fake_6_test.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/run_tests__tests/all_ok/__pycache__/no_assertions__ret_code_of_1__test.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/run_tests__tests/all_ok/__pycache__/no_assertions__ret_code_of_1__test.cpython-39.pyc deleted file mode 100644 index b67ac97..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/run_tests__tests/all_ok/__pycache__/no_assertions__ret_code_of_1__test.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/run_tests__tests/all_ok/__pycache__/zero_tests_test.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/run_tests__tests/all_ok/__pycache__/zero_tests_test.cpython-39.pyc deleted file mode 100644 index 213aa88..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/run_tests__tests/all_ok/__pycache__/zero_tests_test.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/run_tests__tests/all_ok/fake_2_test.py b/venv/Lib/site-packages/pygame/tests/run_tests__tests/all_ok/fake_2_test.py deleted file mode 100644 index 3be92e1..0000000 --- a/venv/Lib/site-packages/pygame/tests/run_tests__tests/all_ok/fake_2_test.py +++ /dev/null @@ -1,39 +0,0 @@ -if __name__ == "__main__": - import sys - import os - - pkg_dir = os.path.split( - os.path.split(os.path.split(os.path.abspath(__file__))[0])[0] - )[0] - parent_dir, pkg_name = os.path.split(pkg_dir) - is_pygame_pkg = pkg_name == "tests" and os.path.split(parent_dir)[1] == "pygame" - if not is_pygame_pkg: - sys.path.insert(0, parent_dir) -else: - is_pygame_pkg = __name__.startswith("pygame.tests.") - -import unittest - - -class KeyModuleTest(unittest.TestCase): - def test_get_focused(self): - self.assertTrue(True) - - def test_get_mods(self): - self.assertTrue(True) - - def test_get_pressed(self): - self.assertTrue(True) - - def test_name(self): - self.assertTrue(True) - - def test_set_mods(self): - self.assertTrue(True) - - def test_set_repeat(self): - self.assertTrue(True) - - -if __name__ == "__main__": - unittest.main() diff --git a/venv/Lib/site-packages/pygame/tests/run_tests__tests/all_ok/fake_3_test.py b/venv/Lib/site-packages/pygame/tests/run_tests__tests/all_ok/fake_3_test.py deleted file mode 100644 index 3be92e1..0000000 --- a/venv/Lib/site-packages/pygame/tests/run_tests__tests/all_ok/fake_3_test.py +++ /dev/null @@ -1,39 +0,0 @@ -if __name__ == "__main__": - import sys - import os - - pkg_dir = os.path.split( - os.path.split(os.path.split(os.path.abspath(__file__))[0])[0] - )[0] - parent_dir, pkg_name = os.path.split(pkg_dir) - is_pygame_pkg = pkg_name == "tests" and os.path.split(parent_dir)[1] == "pygame" - if not is_pygame_pkg: - sys.path.insert(0, parent_dir) -else: - is_pygame_pkg = __name__.startswith("pygame.tests.") - -import unittest - - -class KeyModuleTest(unittest.TestCase): - def test_get_focused(self): - self.assertTrue(True) - - def test_get_mods(self): - self.assertTrue(True) - - def test_get_pressed(self): - self.assertTrue(True) - - def test_name(self): - self.assertTrue(True) - - def test_set_mods(self): - self.assertTrue(True) - - def test_set_repeat(self): - self.assertTrue(True) - - -if __name__ == "__main__": - unittest.main() diff --git a/venv/Lib/site-packages/pygame/tests/run_tests__tests/all_ok/fake_4_test.py b/venv/Lib/site-packages/pygame/tests/run_tests__tests/all_ok/fake_4_test.py deleted file mode 100644 index 3be92e1..0000000 --- a/venv/Lib/site-packages/pygame/tests/run_tests__tests/all_ok/fake_4_test.py +++ /dev/null @@ -1,39 +0,0 @@ -if __name__ == "__main__": - import sys - import os - - pkg_dir = os.path.split( - os.path.split(os.path.split(os.path.abspath(__file__))[0])[0] - )[0] - parent_dir, pkg_name = os.path.split(pkg_dir) - is_pygame_pkg = pkg_name == "tests" and os.path.split(parent_dir)[1] == "pygame" - if not is_pygame_pkg: - sys.path.insert(0, parent_dir) -else: - is_pygame_pkg = __name__.startswith("pygame.tests.") - -import unittest - - -class KeyModuleTest(unittest.TestCase): - def test_get_focused(self): - self.assertTrue(True) - - def test_get_mods(self): - self.assertTrue(True) - - def test_get_pressed(self): - self.assertTrue(True) - - def test_name(self): - self.assertTrue(True) - - def test_set_mods(self): - self.assertTrue(True) - - def test_set_repeat(self): - self.assertTrue(True) - - -if __name__ == "__main__": - unittest.main() diff --git a/venv/Lib/site-packages/pygame/tests/run_tests__tests/all_ok/fake_5_test.py b/venv/Lib/site-packages/pygame/tests/run_tests__tests/all_ok/fake_5_test.py deleted file mode 100644 index 3be92e1..0000000 --- a/venv/Lib/site-packages/pygame/tests/run_tests__tests/all_ok/fake_5_test.py +++ /dev/null @@ -1,39 +0,0 @@ -if __name__ == "__main__": - import sys - import os - - pkg_dir = os.path.split( - os.path.split(os.path.split(os.path.abspath(__file__))[0])[0] - )[0] - parent_dir, pkg_name = os.path.split(pkg_dir) - is_pygame_pkg = pkg_name == "tests" and os.path.split(parent_dir)[1] == "pygame" - if not is_pygame_pkg: - sys.path.insert(0, parent_dir) -else: - is_pygame_pkg = __name__.startswith("pygame.tests.") - -import unittest - - -class KeyModuleTest(unittest.TestCase): - def test_get_focused(self): - self.assertTrue(True) - - def test_get_mods(self): - self.assertTrue(True) - - def test_get_pressed(self): - self.assertTrue(True) - - def test_name(self): - self.assertTrue(True) - - def test_set_mods(self): - self.assertTrue(True) - - def test_set_repeat(self): - self.assertTrue(True) - - -if __name__ == "__main__": - unittest.main() diff --git a/venv/Lib/site-packages/pygame/tests/run_tests__tests/all_ok/fake_6_test.py b/venv/Lib/site-packages/pygame/tests/run_tests__tests/all_ok/fake_6_test.py deleted file mode 100644 index 3be92e1..0000000 --- a/venv/Lib/site-packages/pygame/tests/run_tests__tests/all_ok/fake_6_test.py +++ /dev/null @@ -1,39 +0,0 @@ -if __name__ == "__main__": - import sys - import os - - pkg_dir = os.path.split( - os.path.split(os.path.split(os.path.abspath(__file__))[0])[0] - )[0] - parent_dir, pkg_name = os.path.split(pkg_dir) - is_pygame_pkg = pkg_name == "tests" and os.path.split(parent_dir)[1] == "pygame" - if not is_pygame_pkg: - sys.path.insert(0, parent_dir) -else: - is_pygame_pkg = __name__.startswith("pygame.tests.") - -import unittest - - -class KeyModuleTest(unittest.TestCase): - def test_get_focused(self): - self.assertTrue(True) - - def test_get_mods(self): - self.assertTrue(True) - - def test_get_pressed(self): - self.assertTrue(True) - - def test_name(self): - self.assertTrue(True) - - def test_set_mods(self): - self.assertTrue(True) - - def test_set_repeat(self): - self.assertTrue(True) - - -if __name__ == "__main__": - unittest.main() diff --git a/venv/Lib/site-packages/pygame/tests/run_tests__tests/all_ok/no_assertions__ret_code_of_1__test.py b/venv/Lib/site-packages/pygame/tests/run_tests__tests/all_ok/no_assertions__ret_code_of_1__test.py deleted file mode 100644 index 0ba0e94..0000000 --- a/venv/Lib/site-packages/pygame/tests/run_tests__tests/all_ok/no_assertions__ret_code_of_1__test.py +++ /dev/null @@ -1,39 +0,0 @@ -if __name__ == "__main__": - import sys - import os - - pkg_dir = os.path.split( - os.path.split(os.path.split(os.path.abspath(__file__))[0])[0] - )[0] - parent_dir, pkg_name = os.path.split(pkg_dir) - is_pygame_pkg = pkg_name == "tests" and os.path.split(parent_dir)[1] == "pygame" - if not is_pygame_pkg: - sys.path.insert(0, parent_dir) -else: - is_pygame_pkg = __name__.startswith("pygame.tests.") - -import unittest - - -class KeyModuleTest(unittest.TestCase): - def test_get_focused(self): - pass - - def test_get_mods(self): - pass - - def test_get_pressed(self): - pass - - def test_name(self): - pass - - def test_set_mods(self): - pass - - def test_set_repeat(self): - pass - - -if __name__ == "__main__": - unittest.main() diff --git a/venv/Lib/site-packages/pygame/tests/run_tests__tests/all_ok/zero_tests_test.py b/venv/Lib/site-packages/pygame/tests/run_tests__tests/all_ok/zero_tests_test.py deleted file mode 100644 index 649055a..0000000 --- a/venv/Lib/site-packages/pygame/tests/run_tests__tests/all_ok/zero_tests_test.py +++ /dev/null @@ -1,23 +0,0 @@ -if __name__ == "__main__": - import sys - import os - - pkg_dir = os.path.split( - os.path.split(os.path.split(os.path.abspath(__file__))[0])[0] - )[0] - parent_dir, pkg_name = os.path.split(pkg_dir) - is_pygame_pkg = pkg_name == "tests" and os.path.split(parent_dir)[1] == "pygame" - if not is_pygame_pkg: - sys.path.insert(0, parent_dir) -else: - is_pygame_pkg = __name__.startswith("pygame.tests.") - -import unittest - - -class KeyModuleTest(unittest.TestCase): - pass - - -if __name__ == "__main__": - unittest.main() diff --git a/venv/Lib/site-packages/pygame/tests/run_tests__tests/everything/__init__.py b/venv/Lib/site-packages/pygame/tests/run_tests__tests/everything/__init__.py deleted file mode 100644 index 1bb8bf6..0000000 --- a/venv/Lib/site-packages/pygame/tests/run_tests__tests/everything/__init__.py +++ /dev/null @@ -1 +0,0 @@ -# empty diff --git a/venv/Lib/site-packages/pygame/tests/run_tests__tests/everything/__pycache__/__init__.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/run_tests__tests/everything/__pycache__/__init__.cpython-39.pyc deleted file mode 100644 index 06f843d..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/run_tests__tests/everything/__pycache__/__init__.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/run_tests__tests/everything/__pycache__/fake_2_test.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/run_tests__tests/everything/__pycache__/fake_2_test.cpython-39.pyc deleted file mode 100644 index 73a93e1..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/run_tests__tests/everything/__pycache__/fake_2_test.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/run_tests__tests/everything/__pycache__/incomplete_todo_test.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/run_tests__tests/everything/__pycache__/incomplete_todo_test.cpython-39.pyc deleted file mode 100644 index 66106e7..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/run_tests__tests/everything/__pycache__/incomplete_todo_test.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/run_tests__tests/everything/__pycache__/magic_tag_test.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/run_tests__tests/everything/__pycache__/magic_tag_test.cpython-39.pyc deleted file mode 100644 index 9c9bcc1..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/run_tests__tests/everything/__pycache__/magic_tag_test.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/run_tests__tests/everything/__pycache__/sleep_test.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/run_tests__tests/everything/__pycache__/sleep_test.cpython-39.pyc deleted file mode 100644 index aa23670..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/run_tests__tests/everything/__pycache__/sleep_test.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/run_tests__tests/everything/fake_2_test.py b/venv/Lib/site-packages/pygame/tests/run_tests__tests/everything/fake_2_test.py deleted file mode 100644 index 3be92e1..0000000 --- a/venv/Lib/site-packages/pygame/tests/run_tests__tests/everything/fake_2_test.py +++ /dev/null @@ -1,39 +0,0 @@ -if __name__ == "__main__": - import sys - import os - - pkg_dir = os.path.split( - os.path.split(os.path.split(os.path.abspath(__file__))[0])[0] - )[0] - parent_dir, pkg_name = os.path.split(pkg_dir) - is_pygame_pkg = pkg_name == "tests" and os.path.split(parent_dir)[1] == "pygame" - if not is_pygame_pkg: - sys.path.insert(0, parent_dir) -else: - is_pygame_pkg = __name__.startswith("pygame.tests.") - -import unittest - - -class KeyModuleTest(unittest.TestCase): - def test_get_focused(self): - self.assertTrue(True) - - def test_get_mods(self): - self.assertTrue(True) - - def test_get_pressed(self): - self.assertTrue(True) - - def test_name(self): - self.assertTrue(True) - - def test_set_mods(self): - self.assertTrue(True) - - def test_set_repeat(self): - self.assertTrue(True) - - -if __name__ == "__main__": - unittest.main() diff --git a/venv/Lib/site-packages/pygame/tests/run_tests__tests/everything/incomplete_todo_test.py b/venv/Lib/site-packages/pygame/tests/run_tests__tests/everything/incomplete_todo_test.py deleted file mode 100644 index bdd8a3b..0000000 --- a/venv/Lib/site-packages/pygame/tests/run_tests__tests/everything/incomplete_todo_test.py +++ /dev/null @@ -1,39 +0,0 @@ -if __name__ == "__main__": - import sys - import os - - pkg_dir = os.path.split( - os.path.split(os.path.split(os.path.abspath(__file__))[0])[0] - )[0] - parent_dir, pkg_name = os.path.split(pkg_dir) - is_pygame_pkg = pkg_name == "tests" and os.path.split(parent_dir)[1] == "pygame" - if not is_pygame_pkg: - sys.path.insert(0, parent_dir) -else: - is_pygame_pkg = __name__.startswith("pygame.tests.") - -import unittest - - -class KeyModuleTest(unittest.TestCase): - def test_get_focused(self): - self.assertTrue(True) - - def test_get_mods(self): - self.assertTrue(True) - - def todo_test_get_pressed(self): - self.assertTrue(True) - - def test_name(self): - self.assertTrue(True) - - def todo_test_set_mods(self): - self.assertTrue(True) - - def test_set_repeat(self): - self.assertTrue(True) - - -if __name__ == "__main__": - unittest.main() diff --git a/venv/Lib/site-packages/pygame/tests/run_tests__tests/everything/magic_tag_test.py b/venv/Lib/site-packages/pygame/tests/run_tests__tests/everything/magic_tag_test.py deleted file mode 100644 index 126bc2b..0000000 --- a/venv/Lib/site-packages/pygame/tests/run_tests__tests/everything/magic_tag_test.py +++ /dev/null @@ -1,38 +0,0 @@ -__tags__ = ["magic"] - -if __name__ == "__main__": - import sys - import os - - pkg_dir = os.path.split( - os.path.split(os.path.split(os.path.abspath(__file__))[0])[0] - )[0] - parent_dir, pkg_name = os.path.split(pkg_dir) - is_pygame_pkg = pkg_name == "tests" and os.path.split(parent_dir)[1] == "pygame" - if not is_pygame_pkg: - sys.path.insert(0, parent_dir) -else: - is_pygame_pkg = __name__.startswith("pygame.tests.") - -import unittest - - -class KeyModuleTest(unittest.TestCase): - def test_get_focused(self): - self.assertTrue(True) - - def test_get_mods(self): - self.assertTrue(True) - - def test_get_pressed(self): - self.assertTrue(True) - - def test_name(self): - self.assertTrue(True) - - def test_set_mods(self): - self.assertTrue(True) - - -if __name__ == "__main__": - unittest.main() diff --git a/venv/Lib/site-packages/pygame/tests/run_tests__tests/everything/sleep_test.py b/venv/Lib/site-packages/pygame/tests/run_tests__tests/everything/sleep_test.py deleted file mode 100644 index 468c75f..0000000 --- a/venv/Lib/site-packages/pygame/tests/run_tests__tests/everything/sleep_test.py +++ /dev/null @@ -1,29 +0,0 @@ -if __name__ == "__main__": - import sys - import os - - pkg_dir = os.path.split( - os.path.split(os.path.split(os.path.abspath(__file__))[0])[0] - )[0] - parent_dir, pkg_name = os.path.split(pkg_dir) - is_pygame_pkg = pkg_name == "tests" and os.path.split(parent_dir)[1] == "pygame" - if not is_pygame_pkg: - sys.path.insert(0, parent_dir) -else: - is_pygame_pkg = __name__.startswith("pygame.tests.") - -import unittest - -import time - - -class KeyModuleTest(unittest.TestCase): - def test_get_focused(self): - stop_time = time.time() + 10.0 - while time.time() < stop_time: - time.sleep(1) - self.assertTrue(True) - - -if __name__ == "__main__": - unittest.main() diff --git a/venv/Lib/site-packages/pygame/tests/run_tests__tests/exclude/__init__.py b/venv/Lib/site-packages/pygame/tests/run_tests__tests/exclude/__init__.py deleted file mode 100644 index 1bb8bf6..0000000 --- a/venv/Lib/site-packages/pygame/tests/run_tests__tests/exclude/__init__.py +++ /dev/null @@ -1 +0,0 @@ -# empty diff --git a/venv/Lib/site-packages/pygame/tests/run_tests__tests/exclude/__pycache__/__init__.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/run_tests__tests/exclude/__pycache__/__init__.cpython-39.pyc deleted file mode 100644 index 38aefa9..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/run_tests__tests/exclude/__pycache__/__init__.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/run_tests__tests/exclude/__pycache__/fake_2_test.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/run_tests__tests/exclude/__pycache__/fake_2_test.cpython-39.pyc deleted file mode 100644 index 1f55808..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/run_tests__tests/exclude/__pycache__/fake_2_test.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/run_tests__tests/exclude/__pycache__/invisible_tag_test.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/run_tests__tests/exclude/__pycache__/invisible_tag_test.cpython-39.pyc deleted file mode 100644 index 8f4ea0a..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/run_tests__tests/exclude/__pycache__/invisible_tag_test.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/run_tests__tests/exclude/__pycache__/magic_tag_test.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/run_tests__tests/exclude/__pycache__/magic_tag_test.cpython-39.pyc deleted file mode 100644 index 4ef0dcd..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/run_tests__tests/exclude/__pycache__/magic_tag_test.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/run_tests__tests/exclude/fake_2_test.py b/venv/Lib/site-packages/pygame/tests/run_tests__tests/exclude/fake_2_test.py deleted file mode 100644 index 3be92e1..0000000 --- a/venv/Lib/site-packages/pygame/tests/run_tests__tests/exclude/fake_2_test.py +++ /dev/null @@ -1,39 +0,0 @@ -if __name__ == "__main__": - import sys - import os - - pkg_dir = os.path.split( - os.path.split(os.path.split(os.path.abspath(__file__))[0])[0] - )[0] - parent_dir, pkg_name = os.path.split(pkg_dir) - is_pygame_pkg = pkg_name == "tests" and os.path.split(parent_dir)[1] == "pygame" - if not is_pygame_pkg: - sys.path.insert(0, parent_dir) -else: - is_pygame_pkg = __name__.startswith("pygame.tests.") - -import unittest - - -class KeyModuleTest(unittest.TestCase): - def test_get_focused(self): - self.assertTrue(True) - - def test_get_mods(self): - self.assertTrue(True) - - def test_get_pressed(self): - self.assertTrue(True) - - def test_name(self): - self.assertTrue(True) - - def test_set_mods(self): - self.assertTrue(True) - - def test_set_repeat(self): - self.assertTrue(True) - - -if __name__ == "__main__": - unittest.main() diff --git a/venv/Lib/site-packages/pygame/tests/run_tests__tests/exclude/invisible_tag_test.py b/venv/Lib/site-packages/pygame/tests/run_tests__tests/exclude/invisible_tag_test.py deleted file mode 100644 index 3ef959a..0000000 --- a/venv/Lib/site-packages/pygame/tests/run_tests__tests/exclude/invisible_tag_test.py +++ /dev/null @@ -1,41 +0,0 @@ -__tags__ = ["invisible"] - -if __name__ == "__main__": - import sys - import os - - pkg_dir = os.path.split( - os.path.split(os.path.split(os.path.abspath(__file__))[0])[0] - )[0] - parent_dir, pkg_name = os.path.split(pkg_dir) - is_pygame_pkg = pkg_name == "tests" and os.path.split(parent_dir)[1] == "pygame" - if not is_pygame_pkg: - sys.path.insert(0, parent_dir) -else: - is_pygame_pkg = __name__.startswith("pygame.tests.") - -import unittest - - -class KeyModuleTest(unittest.TestCase): - def test_get_focused(self): - self.assertTrue(True) - - def test_get_mods(self): - self.assertTrue(True) - - def test_get_pressed(self): - self.assertTrue(True) - - def test_name(self): - self.assertTrue(True) - - def test_set_mods(self): - self.assertTrue(True) - - def test_set_repeat(self): - self.assertTrue(True) - - -if __name__ == "__main__": - unittest.main() diff --git a/venv/Lib/site-packages/pygame/tests/run_tests__tests/exclude/magic_tag_test.py b/venv/Lib/site-packages/pygame/tests/run_tests__tests/exclude/magic_tag_test.py deleted file mode 100644 index 126bc2b..0000000 --- a/venv/Lib/site-packages/pygame/tests/run_tests__tests/exclude/magic_tag_test.py +++ /dev/null @@ -1,38 +0,0 @@ -__tags__ = ["magic"] - -if __name__ == "__main__": - import sys - import os - - pkg_dir = os.path.split( - os.path.split(os.path.split(os.path.abspath(__file__))[0])[0] - )[0] - parent_dir, pkg_name = os.path.split(pkg_dir) - is_pygame_pkg = pkg_name == "tests" and os.path.split(parent_dir)[1] == "pygame" - if not is_pygame_pkg: - sys.path.insert(0, parent_dir) -else: - is_pygame_pkg = __name__.startswith("pygame.tests.") - -import unittest - - -class KeyModuleTest(unittest.TestCase): - def test_get_focused(self): - self.assertTrue(True) - - def test_get_mods(self): - self.assertTrue(True) - - def test_get_pressed(self): - self.assertTrue(True) - - def test_name(self): - self.assertTrue(True) - - def test_set_mods(self): - self.assertTrue(True) - - -if __name__ == "__main__": - unittest.main() diff --git a/venv/Lib/site-packages/pygame/tests/run_tests__tests/failures1/__init__.py b/venv/Lib/site-packages/pygame/tests/run_tests__tests/failures1/__init__.py deleted file mode 100644 index 1bb8bf6..0000000 --- a/venv/Lib/site-packages/pygame/tests/run_tests__tests/failures1/__init__.py +++ /dev/null @@ -1 +0,0 @@ -# empty diff --git a/venv/Lib/site-packages/pygame/tests/run_tests__tests/failures1/__pycache__/__init__.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/run_tests__tests/failures1/__pycache__/__init__.cpython-39.pyc deleted file mode 100644 index d3bfae0..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/run_tests__tests/failures1/__pycache__/__init__.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/run_tests__tests/failures1/__pycache__/fake_2_test.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/run_tests__tests/failures1/__pycache__/fake_2_test.cpython-39.pyc deleted file mode 100644 index b362531..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/run_tests__tests/failures1/__pycache__/fake_2_test.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/run_tests__tests/failures1/__pycache__/fake_3_test.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/run_tests__tests/failures1/__pycache__/fake_3_test.cpython-39.pyc deleted file mode 100644 index c020795..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/run_tests__tests/failures1/__pycache__/fake_3_test.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/run_tests__tests/failures1/__pycache__/fake_4_test.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/run_tests__tests/failures1/__pycache__/fake_4_test.cpython-39.pyc deleted file mode 100644 index 82c1f8d..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/run_tests__tests/failures1/__pycache__/fake_4_test.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/run_tests__tests/failures1/fake_2_test.py b/venv/Lib/site-packages/pygame/tests/run_tests__tests/failures1/fake_2_test.py deleted file mode 100644 index 3be92e1..0000000 --- a/venv/Lib/site-packages/pygame/tests/run_tests__tests/failures1/fake_2_test.py +++ /dev/null @@ -1,39 +0,0 @@ -if __name__ == "__main__": - import sys - import os - - pkg_dir = os.path.split( - os.path.split(os.path.split(os.path.abspath(__file__))[0])[0] - )[0] - parent_dir, pkg_name = os.path.split(pkg_dir) - is_pygame_pkg = pkg_name == "tests" and os.path.split(parent_dir)[1] == "pygame" - if not is_pygame_pkg: - sys.path.insert(0, parent_dir) -else: - is_pygame_pkg = __name__.startswith("pygame.tests.") - -import unittest - - -class KeyModuleTest(unittest.TestCase): - def test_get_focused(self): - self.assertTrue(True) - - def test_get_mods(self): - self.assertTrue(True) - - def test_get_pressed(self): - self.assertTrue(True) - - def test_name(self): - self.assertTrue(True) - - def test_set_mods(self): - self.assertTrue(True) - - def test_set_repeat(self): - self.assertTrue(True) - - -if __name__ == "__main__": - unittest.main() diff --git a/venv/Lib/site-packages/pygame/tests/run_tests__tests/failures1/fake_3_test.py b/venv/Lib/site-packages/pygame/tests/run_tests__tests/failures1/fake_3_test.py deleted file mode 100644 index 3be92e1..0000000 --- a/venv/Lib/site-packages/pygame/tests/run_tests__tests/failures1/fake_3_test.py +++ /dev/null @@ -1,39 +0,0 @@ -if __name__ == "__main__": - import sys - import os - - pkg_dir = os.path.split( - os.path.split(os.path.split(os.path.abspath(__file__))[0])[0] - )[0] - parent_dir, pkg_name = os.path.split(pkg_dir) - is_pygame_pkg = pkg_name == "tests" and os.path.split(parent_dir)[1] == "pygame" - if not is_pygame_pkg: - sys.path.insert(0, parent_dir) -else: - is_pygame_pkg = __name__.startswith("pygame.tests.") - -import unittest - - -class KeyModuleTest(unittest.TestCase): - def test_get_focused(self): - self.assertTrue(True) - - def test_get_mods(self): - self.assertTrue(True) - - def test_get_pressed(self): - self.assertTrue(True) - - def test_name(self): - self.assertTrue(True) - - def test_set_mods(self): - self.assertTrue(True) - - def test_set_repeat(self): - self.assertTrue(True) - - -if __name__ == "__main__": - unittest.main() diff --git a/venv/Lib/site-packages/pygame/tests/run_tests__tests/failures1/fake_4_test.py b/venv/Lib/site-packages/pygame/tests/run_tests__tests/failures1/fake_4_test.py deleted file mode 100644 index 1e75fea..0000000 --- a/venv/Lib/site-packages/pygame/tests/run_tests__tests/failures1/fake_4_test.py +++ /dev/null @@ -1,41 +0,0 @@ -if __name__ == "__main__": - import sys - import os - - pkg_dir = os.path.split( - os.path.split(os.path.split(os.path.abspath(__file__))[0])[0] - )[0] - parent_dir, pkg_name = os.path.split(pkg_dir) - is_pygame_pkg = pkg_name == "tests" and os.path.split(parent_dir)[1] == "pygame" - if not is_pygame_pkg: - sys.path.insert(0, parent_dir) -else: - is_pygame_pkg = __name__.startswith("pygame.tests.") - -import unittest - - -class KeyModuleTest(unittest.TestCase): - def test_get_focused(self): - self.assertTrue(True) - - def test_get_mods(self): - self.assertTrue(True) - - def test_get_pressed(self): - self.assertTrue(False, "Some Jibberish") - - def test_name(self): - self.assertTrue(True) - - def test_set_mods(self): - if 1: - if 1: - assert False - - def test_set_repeat(self): - self.assertTrue(True) - - -if __name__ == "__main__": - unittest.main() diff --git a/venv/Lib/site-packages/pygame/tests/run_tests__tests/incomplete/__init__.py b/venv/Lib/site-packages/pygame/tests/run_tests__tests/incomplete/__init__.py deleted file mode 100644 index 1bb8bf6..0000000 --- a/venv/Lib/site-packages/pygame/tests/run_tests__tests/incomplete/__init__.py +++ /dev/null @@ -1 +0,0 @@ -# empty diff --git a/venv/Lib/site-packages/pygame/tests/run_tests__tests/incomplete/__pycache__/__init__.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/run_tests__tests/incomplete/__pycache__/__init__.cpython-39.pyc deleted file mode 100644 index 12b6ed3..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/run_tests__tests/incomplete/__pycache__/__init__.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/run_tests__tests/incomplete/__pycache__/fake_2_test.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/run_tests__tests/incomplete/__pycache__/fake_2_test.cpython-39.pyc deleted file mode 100644 index 7642ff0..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/run_tests__tests/incomplete/__pycache__/fake_2_test.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/run_tests__tests/incomplete/__pycache__/fake_3_test.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/run_tests__tests/incomplete/__pycache__/fake_3_test.cpython-39.pyc deleted file mode 100644 index b01dc65..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/run_tests__tests/incomplete/__pycache__/fake_3_test.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/run_tests__tests/incomplete/fake_2_test.py b/venv/Lib/site-packages/pygame/tests/run_tests__tests/incomplete/fake_2_test.py deleted file mode 100644 index b88f1ae..0000000 --- a/venv/Lib/site-packages/pygame/tests/run_tests__tests/incomplete/fake_2_test.py +++ /dev/null @@ -1,39 +0,0 @@ -if __name__ == "__main__": - import sys - import os - - pkg_dir = os.path.split( - os.path.split(os.path.split(os.path.abspath(__file__))[0])[0] - )[0] - parent_dir, pkg_name = os.path.split(pkg_dir) - is_pygame_pkg = pkg_name == "tests" and os.path.split(parent_dir)[1] == "pygame" - if not is_pygame_pkg: - sys.path.insert(0, parent_dir) -else: - is_pygame_pkg = __name__.startswith("pygame.tests.") - -import unittest - - -class KeyModuleTest(unittest.TestCase): - def test_get_focused(self): - self.assertTrue(True) - - def test_get_mods(self): - self.assertTrue(True) - - def todo_test_get_pressed(self): - self.fail() - - def test_name(self): - self.assertTrue(True) - - def todo_test_set_mods(self): - self.fail() - - def test_set_repeat(self): - self.assertTrue(True) - - -if __name__ == "__main__": - unittest.main() diff --git a/venv/Lib/site-packages/pygame/tests/run_tests__tests/incomplete/fake_3_test.py b/venv/Lib/site-packages/pygame/tests/run_tests__tests/incomplete/fake_3_test.py deleted file mode 100644 index 3be92e1..0000000 --- a/venv/Lib/site-packages/pygame/tests/run_tests__tests/incomplete/fake_3_test.py +++ /dev/null @@ -1,39 +0,0 @@ -if __name__ == "__main__": - import sys - import os - - pkg_dir = os.path.split( - os.path.split(os.path.split(os.path.abspath(__file__))[0])[0] - )[0] - parent_dir, pkg_name = os.path.split(pkg_dir) - is_pygame_pkg = pkg_name == "tests" and os.path.split(parent_dir)[1] == "pygame" - if not is_pygame_pkg: - sys.path.insert(0, parent_dir) -else: - is_pygame_pkg = __name__.startswith("pygame.tests.") - -import unittest - - -class KeyModuleTest(unittest.TestCase): - def test_get_focused(self): - self.assertTrue(True) - - def test_get_mods(self): - self.assertTrue(True) - - def test_get_pressed(self): - self.assertTrue(True) - - def test_name(self): - self.assertTrue(True) - - def test_set_mods(self): - self.assertTrue(True) - - def test_set_repeat(self): - self.assertTrue(True) - - -if __name__ == "__main__": - unittest.main() diff --git a/venv/Lib/site-packages/pygame/tests/run_tests__tests/incomplete_todo/__init__.py b/venv/Lib/site-packages/pygame/tests/run_tests__tests/incomplete_todo/__init__.py deleted file mode 100644 index 1bb8bf6..0000000 --- a/venv/Lib/site-packages/pygame/tests/run_tests__tests/incomplete_todo/__init__.py +++ /dev/null @@ -1 +0,0 @@ -# empty diff --git a/venv/Lib/site-packages/pygame/tests/run_tests__tests/incomplete_todo/__pycache__/__init__.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/run_tests__tests/incomplete_todo/__pycache__/__init__.cpython-39.pyc deleted file mode 100644 index 9b18738..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/run_tests__tests/incomplete_todo/__pycache__/__init__.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/run_tests__tests/incomplete_todo/__pycache__/fake_2_test.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/run_tests__tests/incomplete_todo/__pycache__/fake_2_test.cpython-39.pyc deleted file mode 100644 index ade0cff..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/run_tests__tests/incomplete_todo/__pycache__/fake_2_test.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/run_tests__tests/incomplete_todo/__pycache__/fake_3_test.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/run_tests__tests/incomplete_todo/__pycache__/fake_3_test.cpython-39.pyc deleted file mode 100644 index b77c5c0..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/run_tests__tests/incomplete_todo/__pycache__/fake_3_test.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/run_tests__tests/incomplete_todo/fake_2_test.py b/venv/Lib/site-packages/pygame/tests/run_tests__tests/incomplete_todo/fake_2_test.py deleted file mode 100644 index bdd8a3b..0000000 --- a/venv/Lib/site-packages/pygame/tests/run_tests__tests/incomplete_todo/fake_2_test.py +++ /dev/null @@ -1,39 +0,0 @@ -if __name__ == "__main__": - import sys - import os - - pkg_dir = os.path.split( - os.path.split(os.path.split(os.path.abspath(__file__))[0])[0] - )[0] - parent_dir, pkg_name = os.path.split(pkg_dir) - is_pygame_pkg = pkg_name == "tests" and os.path.split(parent_dir)[1] == "pygame" - if not is_pygame_pkg: - sys.path.insert(0, parent_dir) -else: - is_pygame_pkg = __name__.startswith("pygame.tests.") - -import unittest - - -class KeyModuleTest(unittest.TestCase): - def test_get_focused(self): - self.assertTrue(True) - - def test_get_mods(self): - self.assertTrue(True) - - def todo_test_get_pressed(self): - self.assertTrue(True) - - def test_name(self): - self.assertTrue(True) - - def todo_test_set_mods(self): - self.assertTrue(True) - - def test_set_repeat(self): - self.assertTrue(True) - - -if __name__ == "__main__": - unittest.main() diff --git a/venv/Lib/site-packages/pygame/tests/run_tests__tests/incomplete_todo/fake_3_test.py b/venv/Lib/site-packages/pygame/tests/run_tests__tests/incomplete_todo/fake_3_test.py deleted file mode 100644 index 3be92e1..0000000 --- a/venv/Lib/site-packages/pygame/tests/run_tests__tests/incomplete_todo/fake_3_test.py +++ /dev/null @@ -1,39 +0,0 @@ -if __name__ == "__main__": - import sys - import os - - pkg_dir = os.path.split( - os.path.split(os.path.split(os.path.abspath(__file__))[0])[0] - )[0] - parent_dir, pkg_name = os.path.split(pkg_dir) - is_pygame_pkg = pkg_name == "tests" and os.path.split(parent_dir)[1] == "pygame" - if not is_pygame_pkg: - sys.path.insert(0, parent_dir) -else: - is_pygame_pkg = __name__.startswith("pygame.tests.") - -import unittest - - -class KeyModuleTest(unittest.TestCase): - def test_get_focused(self): - self.assertTrue(True) - - def test_get_mods(self): - self.assertTrue(True) - - def test_get_pressed(self): - self.assertTrue(True) - - def test_name(self): - self.assertTrue(True) - - def test_set_mods(self): - self.assertTrue(True) - - def test_set_repeat(self): - self.assertTrue(True) - - -if __name__ == "__main__": - unittest.main() diff --git a/venv/Lib/site-packages/pygame/tests/run_tests__tests/infinite_loop/__init__.py b/venv/Lib/site-packages/pygame/tests/run_tests__tests/infinite_loop/__init__.py deleted file mode 100644 index 1bb8bf6..0000000 --- a/venv/Lib/site-packages/pygame/tests/run_tests__tests/infinite_loop/__init__.py +++ /dev/null @@ -1 +0,0 @@ -# empty diff --git a/venv/Lib/site-packages/pygame/tests/run_tests__tests/infinite_loop/__pycache__/__init__.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/run_tests__tests/infinite_loop/__pycache__/__init__.cpython-39.pyc deleted file mode 100644 index d2be196..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/run_tests__tests/infinite_loop/__pycache__/__init__.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/run_tests__tests/infinite_loop/__pycache__/fake_1_test.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/run_tests__tests/infinite_loop/__pycache__/fake_1_test.cpython-39.pyc deleted file mode 100644 index 9e72de3..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/run_tests__tests/infinite_loop/__pycache__/fake_1_test.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/run_tests__tests/infinite_loop/__pycache__/fake_2_test.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/run_tests__tests/infinite_loop/__pycache__/fake_2_test.cpython-39.pyc deleted file mode 100644 index 46a2b0a..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/run_tests__tests/infinite_loop/__pycache__/fake_2_test.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/run_tests__tests/infinite_loop/fake_1_test.py b/venv/Lib/site-packages/pygame/tests/run_tests__tests/infinite_loop/fake_1_test.py deleted file mode 100644 index 3e9e936..0000000 --- a/venv/Lib/site-packages/pygame/tests/run_tests__tests/infinite_loop/fake_1_test.py +++ /dev/null @@ -1,40 +0,0 @@ -if __name__ == "__main__": - import sys - import os - - pkg_dir = os.path.split( - os.path.split(os.path.split(os.path.abspath(__file__))[0])[0] - )[0] - parent_dir, pkg_name = os.path.split(pkg_dir) - is_pygame_pkg = pkg_name == "tests" and os.path.split(parent_dir)[1] == "pygame" - if not is_pygame_pkg: - sys.path.insert(0, parent_dir) -else: - is_pygame_pkg = __name__.startswith("pygame.tests.") - -import unittest - - -class KeyModuleTest(unittest.TestCase): - def test_get_focused(self): - self.assertTrue(True) - - def test_get_mods(self): - while True: - pass - - def test_get_pressed(self): - self.assertTrue(True) - - def test_name(self): - self.assertTrue(True) - - def test_set_mods(self): - self.assertTrue(True) - - def test_set_repeat(self): - self.assertTrue(True) - - -if __name__ == "__main__": - unittest.main() diff --git a/venv/Lib/site-packages/pygame/tests/run_tests__tests/infinite_loop/fake_2_test.py b/venv/Lib/site-packages/pygame/tests/run_tests__tests/infinite_loop/fake_2_test.py deleted file mode 100644 index 3be92e1..0000000 --- a/venv/Lib/site-packages/pygame/tests/run_tests__tests/infinite_loop/fake_2_test.py +++ /dev/null @@ -1,39 +0,0 @@ -if __name__ == "__main__": - import sys - import os - - pkg_dir = os.path.split( - os.path.split(os.path.split(os.path.abspath(__file__))[0])[0] - )[0] - parent_dir, pkg_name = os.path.split(pkg_dir) - is_pygame_pkg = pkg_name == "tests" and os.path.split(parent_dir)[1] == "pygame" - if not is_pygame_pkg: - sys.path.insert(0, parent_dir) -else: - is_pygame_pkg = __name__.startswith("pygame.tests.") - -import unittest - - -class KeyModuleTest(unittest.TestCase): - def test_get_focused(self): - self.assertTrue(True) - - def test_get_mods(self): - self.assertTrue(True) - - def test_get_pressed(self): - self.assertTrue(True) - - def test_name(self): - self.assertTrue(True) - - def test_set_mods(self): - self.assertTrue(True) - - def test_set_repeat(self): - self.assertTrue(True) - - -if __name__ == "__main__": - unittest.main() diff --git a/venv/Lib/site-packages/pygame/tests/run_tests__tests/print_stderr/__init__.py b/venv/Lib/site-packages/pygame/tests/run_tests__tests/print_stderr/__init__.py deleted file mode 100644 index 1bb8bf6..0000000 --- a/venv/Lib/site-packages/pygame/tests/run_tests__tests/print_stderr/__init__.py +++ /dev/null @@ -1 +0,0 @@ -# empty diff --git a/venv/Lib/site-packages/pygame/tests/run_tests__tests/print_stderr/__pycache__/__init__.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/run_tests__tests/print_stderr/__pycache__/__init__.cpython-39.pyc deleted file mode 100644 index 5148ae3..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/run_tests__tests/print_stderr/__pycache__/__init__.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/run_tests__tests/print_stderr/__pycache__/fake_2_test.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/run_tests__tests/print_stderr/__pycache__/fake_2_test.cpython-39.pyc deleted file mode 100644 index 93e3950..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/run_tests__tests/print_stderr/__pycache__/fake_2_test.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/run_tests__tests/print_stderr/__pycache__/fake_3_test.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/run_tests__tests/print_stderr/__pycache__/fake_3_test.cpython-39.pyc deleted file mode 100644 index 19ee82e..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/run_tests__tests/print_stderr/__pycache__/fake_3_test.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/run_tests__tests/print_stderr/__pycache__/fake_4_test.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/run_tests__tests/print_stderr/__pycache__/fake_4_test.cpython-39.pyc deleted file mode 100644 index 07a9e27..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/run_tests__tests/print_stderr/__pycache__/fake_4_test.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/run_tests__tests/print_stderr/fake_2_test.py b/venv/Lib/site-packages/pygame/tests/run_tests__tests/print_stderr/fake_2_test.py deleted file mode 100644 index 3be92e1..0000000 --- a/venv/Lib/site-packages/pygame/tests/run_tests__tests/print_stderr/fake_2_test.py +++ /dev/null @@ -1,39 +0,0 @@ -if __name__ == "__main__": - import sys - import os - - pkg_dir = os.path.split( - os.path.split(os.path.split(os.path.abspath(__file__))[0])[0] - )[0] - parent_dir, pkg_name = os.path.split(pkg_dir) - is_pygame_pkg = pkg_name == "tests" and os.path.split(parent_dir)[1] == "pygame" - if not is_pygame_pkg: - sys.path.insert(0, parent_dir) -else: - is_pygame_pkg = __name__.startswith("pygame.tests.") - -import unittest - - -class KeyModuleTest(unittest.TestCase): - def test_get_focused(self): - self.assertTrue(True) - - def test_get_mods(self): - self.assertTrue(True) - - def test_get_pressed(self): - self.assertTrue(True) - - def test_name(self): - self.assertTrue(True) - - def test_set_mods(self): - self.assertTrue(True) - - def test_set_repeat(self): - self.assertTrue(True) - - -if __name__ == "__main__": - unittest.main() diff --git a/venv/Lib/site-packages/pygame/tests/run_tests__tests/print_stderr/fake_3_test.py b/venv/Lib/site-packages/pygame/tests/run_tests__tests/print_stderr/fake_3_test.py deleted file mode 100644 index f59ad40..0000000 --- a/venv/Lib/site-packages/pygame/tests/run_tests__tests/print_stderr/fake_3_test.py +++ /dev/null @@ -1,41 +0,0 @@ -import sys - -if __name__ == "__main__": - import os - - pkg_dir = os.path.split( - os.path.split(os.path.split(os.path.abspath(__file__))[0])[0] - )[0] - parent_dir, pkg_name = os.path.split(pkg_dir) - is_pygame_pkg = pkg_name == "tests" and os.path.split(parent_dir)[1] == "pygame" - if not is_pygame_pkg: - sys.path.insert(0, parent_dir) -else: - is_pygame_pkg = __name__.startswith("pygame.tests.") - -import unittest - - -class KeyModuleTest(unittest.TestCase): - def test_get_focused(self): - self.assertTrue(True) - - def test_get_mods(self): - self.assertTrue(True) - - def test_get_pressed(self): - self.assertTrue(True) - - def test_name(self): - sys.stderr.write("jibberish messes things up\n") - self.assertTrue(False) - - def test_set_mods(self): - self.assertTrue(True) - - def test_set_repeat(self): - self.assertTrue(True) - - -if __name__ == "__main__": - unittest.main() diff --git a/venv/Lib/site-packages/pygame/tests/run_tests__tests/print_stderr/fake_4_test.py b/venv/Lib/site-packages/pygame/tests/run_tests__tests/print_stderr/fake_4_test.py deleted file mode 100644 index 1e75fea..0000000 --- a/venv/Lib/site-packages/pygame/tests/run_tests__tests/print_stderr/fake_4_test.py +++ /dev/null @@ -1,41 +0,0 @@ -if __name__ == "__main__": - import sys - import os - - pkg_dir = os.path.split( - os.path.split(os.path.split(os.path.abspath(__file__))[0])[0] - )[0] - parent_dir, pkg_name = os.path.split(pkg_dir) - is_pygame_pkg = pkg_name == "tests" and os.path.split(parent_dir)[1] == "pygame" - if not is_pygame_pkg: - sys.path.insert(0, parent_dir) -else: - is_pygame_pkg = __name__.startswith("pygame.tests.") - -import unittest - - -class KeyModuleTest(unittest.TestCase): - def test_get_focused(self): - self.assertTrue(True) - - def test_get_mods(self): - self.assertTrue(True) - - def test_get_pressed(self): - self.assertTrue(False, "Some Jibberish") - - def test_name(self): - self.assertTrue(True) - - def test_set_mods(self): - if 1: - if 1: - assert False - - def test_set_repeat(self): - self.assertTrue(True) - - -if __name__ == "__main__": - unittest.main() diff --git a/venv/Lib/site-packages/pygame/tests/run_tests__tests/print_stdout/__init__.py b/venv/Lib/site-packages/pygame/tests/run_tests__tests/print_stdout/__init__.py deleted file mode 100644 index 1bb8bf6..0000000 --- a/venv/Lib/site-packages/pygame/tests/run_tests__tests/print_stdout/__init__.py +++ /dev/null @@ -1 +0,0 @@ -# empty diff --git a/venv/Lib/site-packages/pygame/tests/run_tests__tests/print_stdout/__pycache__/__init__.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/run_tests__tests/print_stdout/__pycache__/__init__.cpython-39.pyc deleted file mode 100644 index 4f38bfd..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/run_tests__tests/print_stdout/__pycache__/__init__.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/run_tests__tests/print_stdout/__pycache__/fake_2_test.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/run_tests__tests/print_stdout/__pycache__/fake_2_test.cpython-39.pyc deleted file mode 100644 index 615c194..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/run_tests__tests/print_stdout/__pycache__/fake_2_test.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/run_tests__tests/print_stdout/__pycache__/fake_3_test.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/run_tests__tests/print_stdout/__pycache__/fake_3_test.cpython-39.pyc deleted file mode 100644 index 118e166..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/run_tests__tests/print_stdout/__pycache__/fake_3_test.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/run_tests__tests/print_stdout/__pycache__/fake_4_test.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/run_tests__tests/print_stdout/__pycache__/fake_4_test.cpython-39.pyc deleted file mode 100644 index d71deee..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/run_tests__tests/print_stdout/__pycache__/fake_4_test.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/run_tests__tests/print_stdout/fake_2_test.py b/venv/Lib/site-packages/pygame/tests/run_tests__tests/print_stdout/fake_2_test.py deleted file mode 100644 index 3be92e1..0000000 --- a/venv/Lib/site-packages/pygame/tests/run_tests__tests/print_stdout/fake_2_test.py +++ /dev/null @@ -1,39 +0,0 @@ -if __name__ == "__main__": - import sys - import os - - pkg_dir = os.path.split( - os.path.split(os.path.split(os.path.abspath(__file__))[0])[0] - )[0] - parent_dir, pkg_name = os.path.split(pkg_dir) - is_pygame_pkg = pkg_name == "tests" and os.path.split(parent_dir)[1] == "pygame" - if not is_pygame_pkg: - sys.path.insert(0, parent_dir) -else: - is_pygame_pkg = __name__.startswith("pygame.tests.") - -import unittest - - -class KeyModuleTest(unittest.TestCase): - def test_get_focused(self): - self.assertTrue(True) - - def test_get_mods(self): - self.assertTrue(True) - - def test_get_pressed(self): - self.assertTrue(True) - - def test_name(self): - self.assertTrue(True) - - def test_set_mods(self): - self.assertTrue(True) - - def test_set_repeat(self): - self.assertTrue(True) - - -if __name__ == "__main__": - unittest.main() diff --git a/venv/Lib/site-packages/pygame/tests/run_tests__tests/print_stdout/fake_3_test.py b/venv/Lib/site-packages/pygame/tests/run_tests__tests/print_stdout/fake_3_test.py deleted file mode 100644 index 467c725..0000000 --- a/venv/Lib/site-packages/pygame/tests/run_tests__tests/print_stdout/fake_3_test.py +++ /dev/null @@ -1,42 +0,0 @@ -import sys - -if __name__ == "__main__": - import os - - pkg_dir = os.path.split( - os.path.split(os.path.split(os.path.abspath(__file__))[0])[0] - )[0] - parent_dir, pkg_name = os.path.split(pkg_dir) - is_pygame_pkg = pkg_name == "tests" and os.path.split(parent_dir)[1] == "pygame" - if not is_pygame_pkg: - sys.path.insert(0, parent_dir) -else: - is_pygame_pkg = __name__.startswith("pygame.tests.") - -import unittest - - -class KeyModuleTest(unittest.TestCase): - def test_get_focused(self): - self.assertTrue(True) - - def test_get_mods(self): - self.assertTrue(True) - - def test_get_pressed(self): - sys.stdout.write("jibberish ruins everything\n") - self.assertTrue(False) - - def test_name(self): - sys.stdout.write("forgot to remove debug crap\n") - self.assertTrue(True) - - def test_set_mods(self): - self.assertTrue(True) - - def test_set_repeat(self): - self.assertTrue(True) - - -if __name__ == "__main__": - unittest.main() diff --git a/venv/Lib/site-packages/pygame/tests/run_tests__tests/print_stdout/fake_4_test.py b/venv/Lib/site-packages/pygame/tests/run_tests__tests/print_stdout/fake_4_test.py deleted file mode 100644 index 1e75fea..0000000 --- a/venv/Lib/site-packages/pygame/tests/run_tests__tests/print_stdout/fake_4_test.py +++ /dev/null @@ -1,41 +0,0 @@ -if __name__ == "__main__": - import sys - import os - - pkg_dir = os.path.split( - os.path.split(os.path.split(os.path.abspath(__file__))[0])[0] - )[0] - parent_dir, pkg_name = os.path.split(pkg_dir) - is_pygame_pkg = pkg_name == "tests" and os.path.split(parent_dir)[1] == "pygame" - if not is_pygame_pkg: - sys.path.insert(0, parent_dir) -else: - is_pygame_pkg = __name__.startswith("pygame.tests.") - -import unittest - - -class KeyModuleTest(unittest.TestCase): - def test_get_focused(self): - self.assertTrue(True) - - def test_get_mods(self): - self.assertTrue(True) - - def test_get_pressed(self): - self.assertTrue(False, "Some Jibberish") - - def test_name(self): - self.assertTrue(True) - - def test_set_mods(self): - if 1: - if 1: - assert False - - def test_set_repeat(self): - self.assertTrue(True) - - -if __name__ == "__main__": - unittest.main() diff --git a/venv/Lib/site-packages/pygame/tests/run_tests__tests/run_tests__test.py b/venv/Lib/site-packages/pygame/tests/run_tests__tests/run_tests__test.py deleted file mode 100644 index 533f7a0..0000000 --- a/venv/Lib/site-packages/pygame/tests/run_tests__tests/run_tests__test.py +++ /dev/null @@ -1,145 +0,0 @@ -################################################################################ - -import subprocess, os, sys, re, difflib - -################################################################################ - -IGNORE = (".svn", "infinite_loop") -NORMALIZERS = ( - (r"Ran (\d+) tests in (\d+\.\d+)s", "Ran \\1 tests in X.XXXs"), - (r'File ".*?([^/\\.]+\.py)"', 'File "\\1"'), -) - -################################################################################ - - -def norm_result(result): - "normalize differences, such as timing between output" - for normalizer, replacement in NORMALIZERS: - if hasattr(normalizer, "__call__"): - result = normalizer(result) - else: - result = re.sub(normalizer, replacement, result) - - return result - - -def call_proc(cmd, cd=None): - proc = subprocess.Popen( - cmd, - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT, - cwd=cd, - universal_newlines=True, - ) - if proc.wait(): - print("%s %s" % (cmd, proc.wait())) - raise Exception(proc.stdout.read()) - - return proc.stdout.read() - - -################################################################################ - -unnormed_diff = "-u" in sys.argv -verbose = "-v" in sys.argv or unnormed_diff -if "-h" in sys.argv or "--help" in sys.argv: - sys.exit( - "\nCOMPARES OUTPUT OF SINGLE VS SUBPROCESS MODE OF RUN_TESTS.PY\n\n" - "-v, to output diffs even on success\n" - "-u, to output diffs of unnormalized tests\n\n" - "Each line of a Differ delta begins with a two-letter code:\n\n" - " '- ' line unique to sequence 1\n" - " '+ ' line unique to sequence 2\n" - " ' ' line common to both sequences\n" - " '? ' line not present in either input sequence\n" - ) - -main_dir = os.path.split(os.path.abspath(sys.argv[0]))[0] -trunk_dir = os.path.normpath(os.path.join(main_dir, "../../")) - -test_suite_dirs = [ - x - for x in os.listdir(main_dir) - if os.path.isdir(os.path.join(main_dir, x)) and x not in IGNORE -] - - -################################################################################ - - -def assert_on_results(suite, single, sub): - test = globals().get("%s_test" % suite) - if hasattr(test, "__call_"): - test(suite, single, sub) - print("assertions on %s OK" % (suite,)) - - -# Don't modify tests in suites below. These assertions are in place to make sure -# that tests are actually being ran - - -def all_ok_test(uite, *args): - for results in args: - assert "Ran 36 tests" in results # some tests are runing - assert "OK" in results # OK - - -def failures1_test(suite, *args): - for results in args: - assert "FAILED (failures=2)" in results - assert "Ran 18 tests" in results - - -################################################################################ -# Test that output is the same in single process and subprocess modes -# - -base_cmd = [sys.executable, "run_tests.py", "-i"] - -cmd = base_cmd + ["-n", "-f"] -sub_cmd = base_cmd + ["-f"] -time_out_cmd = base_cmd + ["-t", "4", "-f", "infinite_loop"] - -passes = 0 -failed = False - -for suite in test_suite_dirs: - single = call_proc(cmd + [suite], trunk_dir) - subs = call_proc(sub_cmd + [suite], trunk_dir) - - normed_single, normed_subs = map(norm_result, (single, subs)) - - failed = normed_single != normed_subs - if failed: - print("%s suite comparison FAILED\n" % (suite,)) - else: - passes += 1 - print("%s suite comparison OK" % (suite,)) - - assert_on_results(suite, single, subs) - - if verbose or failed: - print("difflib.Differ().compare(single, suprocessed):\n") - print( - "".join( - list( - difflib.Differ().compare( - (unnormed_diff and single or normed_single).splitlines(1), - (unnormed_diff and subs or normed_subs).splitlines(1), - ) - ) - ) - ) - -sys.stdout.write("infinite_loop suite (subprocess mode timeout) ") -loop_test = call_proc(time_out_cmd, trunk_dir) -assert "successfully terminated" in loop_test -passes += 1 -print("OK") - -print("\n%s/%s suites pass" % (passes, len(test_suite_dirs) + 1)) - -print("\n-h for help") - -################################################################################ diff --git a/venv/Lib/site-packages/pygame/tests/run_tests__tests/timeout/__init__.py b/venv/Lib/site-packages/pygame/tests/run_tests__tests/timeout/__init__.py deleted file mode 100644 index 1bb8bf6..0000000 --- a/venv/Lib/site-packages/pygame/tests/run_tests__tests/timeout/__init__.py +++ /dev/null @@ -1 +0,0 @@ -# empty diff --git a/venv/Lib/site-packages/pygame/tests/run_tests__tests/timeout/__pycache__/__init__.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/run_tests__tests/timeout/__pycache__/__init__.cpython-39.pyc deleted file mode 100644 index 7d87f29..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/run_tests__tests/timeout/__pycache__/__init__.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/run_tests__tests/timeout/__pycache__/fake_2_test.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/run_tests__tests/timeout/__pycache__/fake_2_test.cpython-39.pyc deleted file mode 100644 index b260332..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/run_tests__tests/timeout/__pycache__/fake_2_test.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/run_tests__tests/timeout/__pycache__/sleep_test.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/run_tests__tests/timeout/__pycache__/sleep_test.cpython-39.pyc deleted file mode 100644 index 692e348..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/run_tests__tests/timeout/__pycache__/sleep_test.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/run_tests__tests/timeout/fake_2_test.py b/venv/Lib/site-packages/pygame/tests/run_tests__tests/timeout/fake_2_test.py deleted file mode 100644 index 3be92e1..0000000 --- a/venv/Lib/site-packages/pygame/tests/run_tests__tests/timeout/fake_2_test.py +++ /dev/null @@ -1,39 +0,0 @@ -if __name__ == "__main__": - import sys - import os - - pkg_dir = os.path.split( - os.path.split(os.path.split(os.path.abspath(__file__))[0])[0] - )[0] - parent_dir, pkg_name = os.path.split(pkg_dir) - is_pygame_pkg = pkg_name == "tests" and os.path.split(parent_dir)[1] == "pygame" - if not is_pygame_pkg: - sys.path.insert(0, parent_dir) -else: - is_pygame_pkg = __name__.startswith("pygame.tests.") - -import unittest - - -class KeyModuleTest(unittest.TestCase): - def test_get_focused(self): - self.assertTrue(True) - - def test_get_mods(self): - self.assertTrue(True) - - def test_get_pressed(self): - self.assertTrue(True) - - def test_name(self): - self.assertTrue(True) - - def test_set_mods(self): - self.assertTrue(True) - - def test_set_repeat(self): - self.assertTrue(True) - - -if __name__ == "__main__": - unittest.main() diff --git a/venv/Lib/site-packages/pygame/tests/run_tests__tests/timeout/sleep_test.py b/venv/Lib/site-packages/pygame/tests/run_tests__tests/timeout/sleep_test.py deleted file mode 100644 index bab528a..0000000 --- a/venv/Lib/site-packages/pygame/tests/run_tests__tests/timeout/sleep_test.py +++ /dev/null @@ -1,30 +0,0 @@ -if __name__ == "__main__": - import sys - import os - - pkg_dir = os.path.split( - os.path.split(os.path.split(os.path.abspath(__file__))[0])[0] - )[0] - parent_dir, pkg_name = os.path.split(pkg_dir) - is_pygame_pkg = pkg_name == "tests" and os.path.split(parent_dir)[1] == "pygame" - if not is_pygame_pkg: - sys.path.insert(0, parent_dir) -else: - is_pygame_pkg = __name__.startswith("pygame.tests.") - -import unittest - -import time - - -class KeyModuleTest(unittest.TestCase): - def test_get_focused(self): - stop_time = time.time() + 10.0 - while time.time() < stop_time: - time.sleep(1) - - self.assertTrue(True) - - -if __name__ == "__main__": - unittest.main() diff --git a/venv/Lib/site-packages/pygame/tests/rwobject_test.py b/venv/Lib/site-packages/pygame/tests/rwobject_test.py deleted file mode 100644 index 31723ae..0000000 --- a/venv/Lib/site-packages/pygame/tests/rwobject_test.py +++ /dev/null @@ -1,139 +0,0 @@ -import pathlib -import unittest - -from pygame import encode_string, encode_file_path - - -class RWopsEncodeStringTest(unittest.TestCase): - global getrefcount - - def test_obj_None(self): - encoded_string = encode_string(None) - - self.assertIsNone(encoded_string) - - def test_returns_bytes(self): - u = "Hello" - encoded_string = encode_string(u) - - self.assertIsInstance(encoded_string, bytes) - - def test_obj_bytes(self): - b = b"encyclop\xE6dia" - encoded_string = encode_string(b, "ascii", "strict") - - self.assertIs(encoded_string, b) - - def test_encode_unicode(self): - u = "\u00DEe Olde Komp\u00FCter Shoppe" - b = u.encode("utf-8") - self.assertEqual(encode_string(u, "utf-8"), b) - - def test_error_fowarding(self): - self.assertRaises(SyntaxError, encode_string) - - def test_errors(self): - u = "abc\u0109defg\u011Dh\u0125ij\u0135klmnoprs\u015Dtu\u016Dvz" - b = u.encode("ascii", "ignore") - self.assertEqual(encode_string(u, "ascii", "ignore"), b) - - def test_encoding_error(self): - u = "a\x80b" - encoded_string = encode_string(u, "ascii", "strict") - - self.assertIsNone(encoded_string) - - def test_check_defaults(self): - u = "a\u01F7b" - b = u.encode("unicode_escape", "backslashreplace") - encoded_string = encode_string(u) - - self.assertEqual(encoded_string, b) - - def test_etype(self): - u = "a\x80b" - self.assertRaises(SyntaxError, encode_string, u, "ascii", "strict", SyntaxError) - - def test_etype__invalid(self): - """Ensures invalid etypes are properly handled.""" - - for etype in ("SyntaxError", self): - self.assertRaises(TypeError, encode_string, "test", etype=etype) - - def test_string_with_null_bytes(self): - b = b"a\x00b\x00c" - encoded_string = encode_string(b, etype=SyntaxError) - encoded_decode_string = encode_string(b.decode(), "ascii", "strict") - - self.assertIs(encoded_string, b) - self.assertEqual(encoded_decode_string, b) - - try: - from sys import getrefcount as _g - - getrefcount = _g # This nonsense is for Python 3.x - except ImportError: - pass - else: - - def test_refcount(self): - bpath = b" This is a string that is not cached."[1:] - upath = bpath.decode("ascii") - before = getrefcount(bpath) - bpath = encode_string(bpath) - self.assertEqual(getrefcount(bpath), before) - bpath = encode_string(upath) - self.assertEqual(getrefcount(bpath), before) - - def test_smp(self): - utf_8 = b"a\xF0\x93\x82\xA7b" - u = "a\U000130A7b" - b = encode_string(u, "utf-8", "strict", AssertionError) - self.assertEqual(b, utf_8) - - def test_pathlib_obj(self): - """Test loading string representation of pathlib object""" - """ - We do this because pygame functions internally use pg_EncodeString - to decode the filenames passed to them. So if we test that here, we - can safely assume that all those functions do not have any issues - with pathlib objects - """ - encoded = encode_string(pathlib.PurePath("foo"), "utf-8") - self.assertEqual(encoded, b"foo") - - encoded = encode_string(pathlib.Path("baz")) - self.assertEqual(encoded, b"baz") - - -class RWopsEncodeFilePathTest(unittest.TestCase): - # Most tests can be skipped since RWopsEncodeFilePath wraps - # RWopsEncodeString - def test_encoding(self): - u = "Hello" - encoded_file_path = encode_file_path(u) - - self.assertIsInstance(encoded_file_path, bytes) - - def test_error_fowarding(self): - self.assertRaises(SyntaxError, encode_file_path) - - def test_path_with_null_bytes(self): - b = b"a\x00b\x00c" - encoded_file_path = encode_file_path(b) - - self.assertIsNone(encoded_file_path) - - def test_etype(self): - b = b"a\x00b\x00c" - self.assertRaises(TypeError, encode_file_path, b, TypeError) - - def test_etype__invalid(self): - """Ensures invalid etypes are properly handled.""" - - for etype in ("SyntaxError", self): - self.assertRaises(TypeError, encode_file_path, "test", etype) - - -if __name__ == "__main__": - unittest.main() diff --git a/venv/Lib/site-packages/pygame/tests/scrap_tags.py b/venv/Lib/site-packages/pygame/tests/scrap_tags.py deleted file mode 100644 index 17a82ff..0000000 --- a/venv/Lib/site-packages/pygame/tests/scrap_tags.py +++ /dev/null @@ -1,26 +0,0 @@ -__tags__ = ["ignore", "subprocess_ignore"] - -# TODO: make scrap_test.py work -# This test used to work only on linux and windows. -# Currently it only work in windows, and in linux it throws: -# `pygame.error: content could not be placed in clipboard.` -# The old test and tags kept here for reference when fixing. - -# import sys -# -# exclude = False -# -# if sys.platform == "win32" or sys.platform.startswith("linux"): -# try: -# import pygame -# -# pygame.scrap._NOT_IMPLEMENTED_ -# except AttributeError: -# pass -# else: -# exclude = True -# else: -# exclude = True -# -# if exclude: -# __tags__.extend(["ignore", "subprocess_ignore"]) diff --git a/venv/Lib/site-packages/pygame/tests/scrap_test.py b/venv/Lib/site-packages/pygame/tests/scrap_test.py deleted file mode 100644 index 6b7f6fa..0000000 --- a/venv/Lib/site-packages/pygame/tests/scrap_test.py +++ /dev/null @@ -1,301 +0,0 @@ -import os -import sys - -if os.environ.get("SDL_VIDEODRIVER") == "dummy": - __tags__ = ("ignore", "subprocess_ignore") -import unittest -from pygame.tests.test_utils import trunk_relative_path - -import pygame -from pygame import scrap - - -class ScrapModuleTest(unittest.TestCase): - @classmethod - def setUpClass(cls): - pygame.display.init() - pygame.display.set_mode((1, 1)) - scrap.init() - - @classmethod - def tearDownClass(cls): - # scrap.quit() # Does not exist! - pygame.display.quit() - - def test_init(self): - """Ensures scrap module still initialized after multiple init calls.""" - scrap.init() - scrap.init() - - self.assertTrue(scrap.get_init()) - - def test_init__reinit(self): - """Ensures reinitializing the scrap module doesn't clear its data.""" - data_type = pygame.SCRAP_TEXT - expected_data = b"test_init__reinit" - scrap.put(data_type, expected_data) - - scrap.init() - - self.assertEqual(scrap.get(data_type), expected_data) - - def test_get_init(self): - """Ensures get_init gets the init state.""" - self.assertTrue(scrap.get_init()) - - def todo_test_contains(self): - """Ensures contains works as expected.""" - self.fail() - - def todo_test_get(self): - """Ensures get works as expected.""" - self.fail() - - def test_get__owned_empty_type(self): - """Ensures get works when there is no data of the requested type - in the clipboard and the clipboard is owned by the pygame application. - """ - # Use a unique data type identifier to ensure there is no preexisting - # data. - DATA_TYPE = "test_get__owned_empty_type" - - if scrap.lost(): - # Try to acquire the clipboard. - scrap.put(pygame.SCRAP_TEXT, b"text to clipboard") - - if scrap.lost(): - self.skipTest("requires the pygame application to own the clipboard") - - data = scrap.get(DATA_TYPE) - - self.assertIsNone(data) - - def todo_test_get_types(self): - """Ensures get_types works as expected.""" - self.fail() - - def todo_test_lost(self): - """Ensures lost works as expected.""" - self.fail() - - def test_set_mode(self): - """Ensures set_mode works as expected.""" - scrap.set_mode(pygame.SCRAP_SELECTION) - scrap.set_mode(pygame.SCRAP_CLIPBOARD) - - self.assertRaises(ValueError, scrap.set_mode, 1099) - - def test_put__text(self): - """Ensures put can place text into the clipboard.""" - scrap.put(pygame.SCRAP_TEXT, b"Hello world") - - self.assertEqual(scrap.get(pygame.SCRAP_TEXT), b"Hello world") - - scrap.put(pygame.SCRAP_TEXT, b"Another String") - - self.assertEqual(scrap.get(pygame.SCRAP_TEXT), b"Another String") - - @unittest.skipIf("pygame.image" not in sys.modules, "requires pygame.image module") - def test_put__bmp_image(self): - """Ensures put can place a BMP image into the clipboard.""" - sf = pygame.image.load(trunk_relative_path("examples/data/asprite.bmp")) - expected_string = pygame.image.tostring(sf, "RGBA") - scrap.put(pygame.SCRAP_BMP, expected_string) - - self.assertEqual(scrap.get(pygame.SCRAP_BMP), expected_string) - - def test_put(self): - """Ensures put can place data into the clipboard - when using a user defined type identifier. - """ - DATA_TYPE = "arbitrary buffer" - - scrap.put(DATA_TYPE, b"buf") - r = scrap.get(DATA_TYPE) - - self.assertEqual(r, b"buf") - - -class ScrapModuleClipboardNotOwnedTest(unittest.TestCase): - """Test the scrap module's functionality when the pygame application is - not the current owner of the clipboard. - - A separate class is used to prevent tests that acquire the clipboard from - interfering with these tests. - """ - - @classmethod - def setUpClass(cls): - pygame.display.init() - pygame.display.set_mode((1, 1)) - scrap.init() - - @classmethod - def tearDownClass(cls): - # scrap.quit() # Does not exist! - pygame.quit() - pygame.display.quit() - - def _skip_if_clipboard_owned(self): - # Skip test if the pygame application owns the clipboard. Currently, - # there is no way to give up ownership. - if not scrap.lost(): - self.skipTest("requires the pygame application to not own the clipboard") - - def test_get__not_owned(self): - """Ensures get works when there is no data of the requested type - in the clipboard and the clipboard is not owned by the pygame - application. - """ - self._skip_if_clipboard_owned() - - # Use a unique data type identifier to ensure there is no preexisting - # data. - DATA_TYPE = "test_get__not_owned" - - data = scrap.get(DATA_TYPE) - - self.assertIsNone(data) - - def test_get_types__not_owned(self): - """Ensures get_types works when the clipboard is not owned - by the pygame application. - """ - self._skip_if_clipboard_owned() - - data_types = scrap.get_types() - - self.assertIsInstance(data_types, list) - - def test_contains__not_owned(self): - """Ensures contains works when the clipboard is not owned - by the pygame application. - """ - self._skip_if_clipboard_owned() - - # Use a unique data type identifier to ensure there is no preexisting - # data. - DATA_TYPE = "test_contains__not_owned" - - contains = scrap.contains(DATA_TYPE) - - self.assertFalse(contains) - - def test_lost__not_owned(self): - """Ensures lost works when the clipboard is not owned - by the pygame application. - """ - self._skip_if_clipboard_owned() - - lost = scrap.lost() - - self.assertTrue(lost) - - -class X11InteractiveTest(unittest.TestCase): - __tags__ = ["ignore", "subprocess_ignore"] - try: - pygame.display.init() - except Exception: - pass - else: - if pygame.display.get_driver() == "x11": - __tags__ = ["interactive"] - pygame.display.quit() - - def test_issue_208(self): - """PATCH: pygame.scrap on X11, fix copying into PRIMARY selection - - Copying into theX11 PRIMARY selection (mouse copy/paste) would not - work due to a confusion between content type and clipboard type. - - """ - - from pygame import display, event, freetype - from pygame.locals import SCRAP_SELECTION, SCRAP_TEXT - from pygame.locals import KEYDOWN, K_y, QUIT - - success = False - freetype.init() - font = freetype.Font(None, 24) - display.init() - display.set_caption("Interactive X11 Paste Test") - screen = display.set_mode((600, 200)) - screen.fill(pygame.Color("white")) - text = "Scrap put() succeeded." - msg = ( - "Some text has been placed into the X11 clipboard." - " Please click the center mouse button in an open" - " text window to retrieve it." - '\n\nDid you get "{}"? (y/n)' - ).format(text) - word_wrap(screen, msg, font, 6) - display.flip() - event.pump() - scrap.init() - scrap.set_mode(SCRAP_SELECTION) - scrap.put(SCRAP_TEXT, text.encode("UTF-8")) - while True: - e = event.wait() - if e.type == QUIT: - break - if e.type == KEYDOWN: - success = e.key == K_y - break - pygame.display.quit() - self.assertTrue(success) - - -def word_wrap(surf, text, font, margin=0, color=(0, 0, 0)): - font.origin = True - surf_width, surf_height = surf.get_size() - width = surf_width - 2 * margin - height = surf_height - 2 * margin - line_spacing = int(1.25 * font.get_sized_height()) - x, y = margin, margin + line_spacing - space = font.get_rect(" ") - for word in iwords(text): - if word == "\n": - x, y = margin, y + line_spacing - else: - bounds = font.get_rect(word) - if x + bounds.width + bounds.x >= width: - x, y = margin, y + line_spacing - if x + bounds.width + bounds.x >= width: - raise ValueError("word too wide for the surface") - if y + bounds.height - bounds.y >= height: - raise ValueError("text to long for the surface") - font.render_to(surf, (x, y), None, color) - x += bounds.width + space.width - return x, y - - -def iwords(text): - # r"\n|[^ ]+" - # - head = 0 - tail = head - end = len(text) - while head < end: - if text[head] == " ": - head += 1 - tail = head + 1 - elif text[head] == "\n": - head += 1 - yield "\n" - tail = head + 1 - elif tail == end: - yield text[head:] - head = end - elif text[tail] == "\n": - yield text[head:tail] - head = tail - elif text[tail] == " ": - yield text[head:tail] - head = tail - else: - tail += 1 - - -if __name__ == "__main__": - unittest.main() diff --git a/venv/Lib/site-packages/pygame/tests/sndarray_tags.py b/venv/Lib/site-packages/pygame/tests/sndarray_tags.py deleted file mode 100644 index 68fa7a5..0000000 --- a/venv/Lib/site-packages/pygame/tests/sndarray_tags.py +++ /dev/null @@ -1,12 +0,0 @@ -__tags__ = ["array"] - -exclude = False - -try: - import pygame.mixer - import numpy -except ImportError: - exclude = True - -if exclude: - __tags__.extend(("ignore", "subprocess_ignore")) diff --git a/venv/Lib/site-packages/pygame/tests/sndarray_test.py b/venv/Lib/site-packages/pygame/tests/sndarray_test.py deleted file mode 100644 index afa94ec..0000000 --- a/venv/Lib/site-packages/pygame/tests/sndarray_test.py +++ /dev/null @@ -1,155 +0,0 @@ -import unittest - -from numpy import int8, int16, uint8, uint16, float32, array, alltrue - -import pygame -import pygame.sndarray - - -class SndarrayTest(unittest.TestCase): - array_dtypes = {8: uint8, -8: int8, 16: uint16, -16: int16, 32: float32} - - def _assert_compatible(self, arr, size): - dtype = self.array_dtypes[size] - self.assertEqual(arr.dtype, dtype) - - def test_array(self): - def check_array(size, channels, test_data): - try: - pygame.mixer.init(22050, size, channels, allowedchanges=0) - except pygame.error: - # Not all sizes are supported on all systems. - return - try: - __, sz, __ = pygame.mixer.get_init() - if sz == size: - srcarr = array(test_data, self.array_dtypes[size]) - snd = pygame.sndarray.make_sound(srcarr) - arr = pygame.sndarray.array(snd) - self._assert_compatible(arr, size) - self.assertTrue( - alltrue(arr == srcarr), - "size: %i\n%s\n%s" % (size, arr, test_data), - ) - finally: - pygame.mixer.quit() - - check_array(8, 1, [0, 0x0F, 0xF0, 0xFF]) - check_array(8, 2, [[0, 0x80], [0x2D, 0x41], [0x64, 0xA1], [0xFF, 0x40]]) - check_array(16, 1, [0, 0x00FF, 0xFF00, 0xFFFF]) - check_array( - 16, 2, [[0, 0xFFFF], [0xFFFF, 0], [0x00FF, 0xFF00], [0x0F0F, 0xF0F0]] - ) - check_array(-8, 1, [0, -0x80, 0x7F, 0x64]) - check_array(-8, 2, [[0, -0x80], [-0x64, 0x64], [0x25, -0x50], [0xFF, 0]]) - check_array(-16, 1, [0, 0x7FFF, -0x7FFF, -1]) - check_array(-16, 2, [[0, -0x7FFF], [-0x7FFF, 0], [0x7FFF, 0], [0, 0x7FFF]]) - - def test_get_arraytype(self): - array_type = pygame.sndarray.get_arraytype() - - self.assertEqual(array_type, "numpy", "unknown array type %s" % array_type) - - def test_get_arraytypes(self): - arraytypes = pygame.sndarray.get_arraytypes() - self.assertIn("numpy", arraytypes) - - for atype in arraytypes: - self.assertEqual(atype, "numpy", "unknown array type %s" % atype) - - def test_make_sound(self): - def check_sound(size, channels, test_data): - try: - pygame.mixer.init(22050, size, channels, allowedchanges=0) - except pygame.error: - # Not all sizes are supported on all systems. - return - try: - __, sz, __ = pygame.mixer.get_init() - if sz == size: - srcarr = array(test_data, self.array_dtypes[size]) - snd = pygame.sndarray.make_sound(srcarr) - arr = pygame.sndarray.samples(snd) - self.assertTrue( - alltrue(arr == srcarr), - "size: %i\n%s\n%s" % (size, arr, test_data), - ) - finally: - pygame.mixer.quit() - - check_sound(8, 1, [0, 0x0F, 0xF0, 0xFF]) - check_sound(8, 2, [[0, 0x80], [0x2D, 0x41], [0x64, 0xA1], [0xFF, 0x40]]) - check_sound(16, 1, [0, 0x00FF, 0xFF00, 0xFFFF]) - check_sound( - 16, 2, [[0, 0xFFFF], [0xFFFF, 0], [0x00FF, 0xFF00], [0x0F0F, 0xF0F0]] - ) - check_sound(-8, 1, [0, -0x80, 0x7F, 0x64]) - check_sound(-8, 2, [[0, -0x80], [-0x64, 0x64], [0x25, -0x50], [0xFF, 0]]) - check_sound(-16, 1, [0, 0x7FFF, -0x7FFF, -1]) - check_sound(-16, 2, [[0, -0x7FFF], [-0x7FFF, 0], [0x7FFF, 0], [0, 0x7FFF]]) - check_sound(32, 2, [[0.0, -1.0], [-1.0, 0], [1.0, 0], [0, 1.0]]) - - def test_samples(self): - - null_byte = b"\x00" - - def check_sample(size, channels, test_data): - try: - pygame.mixer.init(22050, size, channels, allowedchanges=0) - except pygame.error: - # Not all sizes are supported on all systems. - return - try: - __, sz, __ = pygame.mixer.get_init() - if sz == size: - zeroed = null_byte * ((abs(size) // 8) * len(test_data) * channels) - snd = pygame.mixer.Sound(buffer=zeroed) - samples = pygame.sndarray.samples(snd) - self._assert_compatible(samples, size) - ##print ('X %s' % (samples.shape,)) - ##print ('Y %s' % (test_data,)) - samples[...] = test_data - arr = pygame.sndarray.array(snd) - self.assertTrue( - alltrue(samples == arr), - "size: %i\n%s\n%s" % (size, arr, test_data), - ) - finally: - pygame.mixer.quit() - - check_sample(8, 1, [0, 0x0F, 0xF0, 0xFF]) - check_sample(8, 2, [[0, 0x80], [0x2D, 0x41], [0x64, 0xA1], [0xFF, 0x40]]) - check_sample(16, 1, [0, 0x00FF, 0xFF00, 0xFFFF]) - check_sample( - 16, 2, [[0, 0xFFFF], [0xFFFF, 0], [0x00FF, 0xFF00], [0x0F0F, 0xF0F0]] - ) - check_sample(-8, 1, [0, -0x80, 0x7F, 0x64]) - check_sample(-8, 2, [[0, -0x80], [-0x64, 0x64], [0x25, -0x50], [0xFF, 0]]) - check_sample(-16, 1, [0, 0x7FFF, -0x7FFF, -1]) - check_sample(-16, 2, [[0, -0x7FFF], [-0x7FFF, 0], [0x7FFF, 0], [0, 0x7FFF]]) - check_sample(32, 2, [[0.0, -1.0], [-1.0, 0], [1.0, 0], [0, 1.0]]) - - def test_use_arraytype(self): - def do_use_arraytype(atype): - pygame.sndarray.use_arraytype(atype) - - pygame.sndarray.use_arraytype("numpy") - self.assertEqual(pygame.sndarray.get_arraytype(), "numpy") - - self.assertRaises(ValueError, do_use_arraytype, "not an option") - - def test_float32(self): - """sized arrays work with Sounds and 32bit float arrays.""" - try: - pygame.mixer.init(22050, 32, 2, allowedchanges=0) - except pygame.error: - # Not all sizes are supported on all systems. - self.skipTest("unsupported mixer configuration") - - arr = array([[0.0, -1.0], [-1.0, 0], [1.0, 0], [0, 1.0]], float32) - newsound = pygame.mixer.Sound(array=arr) - pygame.mixer.quit() - - -if __name__ == "__main__": - unittest.main() diff --git a/venv/Lib/site-packages/pygame/tests/sprite_test.py b/venv/Lib/site-packages/pygame/tests/sprite_test.py deleted file mode 100644 index b0b099a..0000000 --- a/venv/Lib/site-packages/pygame/tests/sprite_test.py +++ /dev/null @@ -1,1403 +0,0 @@ -#################################### IMPORTS ################################### -# -*- encoding: utf-8 -*- - - -import unittest - -import pygame -from pygame import sprite - - -################################# MODULE LEVEL ################################# - - -class SpriteModuleTest(unittest.TestCase): - pass - - -######################### SPRITECOLLIDE FUNCTIONS TEST ######################### - - -class SpriteCollideTest(unittest.TestCase): - def setUp(self): - self.ag = sprite.AbstractGroup() - self.ag2 = sprite.AbstractGroup() - self.s1 = sprite.Sprite(self.ag) - self.s2 = sprite.Sprite(self.ag2) - self.s3 = sprite.Sprite(self.ag2) - - self.s1.image = pygame.Surface((50, 10), pygame.SRCALPHA, 32) - self.s2.image = pygame.Surface((10, 10), pygame.SRCALPHA, 32) - self.s3.image = pygame.Surface((10, 10), pygame.SRCALPHA, 32) - - self.s1.rect = self.s1.image.get_rect() - self.s2.rect = self.s2.image.get_rect() - self.s3.rect = self.s3.image.get_rect() - self.s2.rect.move_ip(40, 0) - self.s3.rect.move_ip(100, 100) - - def test_spritecollide__works_if_collided_cb_is_None(self): - # Test that sprites collide without collided function. - self.assertEqual( - sprite.spritecollide(self.s1, self.ag2, dokill=False, collided=None), - [self.s2], - ) - - def test_spritecollide__works_if_collided_cb_not_passed(self): - # Should also work when collided function isn't passed at all. - self.assertEqual( - sprite.spritecollide(self.s1, self.ag2, dokill=False), [self.s2] - ) - - def test_spritecollide__collided_must_be_a_callable(self): - # Need to pass a callable. - self.assertRaises( - TypeError, sprite.spritecollide, self.s1, self.ag2, dokill=False, collided=1 - ) - - def test_spritecollide__collided_defaults_to_collide_rect(self): - # collide_rect should behave the same as default. - self.assertEqual( - sprite.spritecollide( - self.s1, self.ag2, dokill=False, collided=sprite.collide_rect - ), - [self.s2], - ) - - def test_collide_rect_ratio__ratio_of_one_like_default(self): - # collide_rect_ratio should behave the same as default at a 1.0 ratio. - self.assertEqual( - sprite.spritecollide( - self.s1, self.ag2, dokill=False, collided=sprite.collide_rect_ratio(1.0) - ), - [self.s2], - ) - - def test_collide_rect_ratio__collides_all_at_ratio_of_twenty(self): - # collide_rect_ratio should collide all at a 20.0 ratio. - collided_func = sprite.collide_rect_ratio(20.0) - expected_sprites = sorted(self.ag2.sprites(), key=id) - - collided_sprites = sorted( - sprite.spritecollide( - self.s1, self.ag2, dokill=False, collided=collided_func - ), - key=id, - ) - - self.assertListEqual(collided_sprites, expected_sprites) - - def test_collide_circle__no_radius_set(self): - # collide_circle with no radius set. - self.assertEqual( - sprite.spritecollide( - self.s1, self.ag2, dokill=False, collided=sprite.collide_circle - ), - [self.s2], - ) - - def test_collide_circle_ratio__no_radius_and_ratio_of_one(self): - # collide_circle_ratio with no radius set, at a 1.0 ratio. - self.assertEqual( - sprite.spritecollide( - self.s1, - self.ag2, - dokill=False, - collided=sprite.collide_circle_ratio(1.0), - ), - [self.s2], - ) - - def test_collide_circle_ratio__no_radius_and_ratio_of_twenty(self): - # collide_circle_ratio with no radius set, at a 20.0 ratio. - collided_func = sprite.collide_circle_ratio(20.0) - expected_sprites = sorted(self.ag2.sprites(), key=id) - - collided_sprites = sorted( - sprite.spritecollide( - self.s1, self.ag2, dokill=False, collided=collided_func - ), - key=id, - ) - - self.assertListEqual(expected_sprites, collided_sprites) - - def test_collide_circle__radius_set_by_collide_circle_ratio(self): - # Call collide_circle_ratio with no radius set, at a 20.0 ratio. - # That should return group ag2 AND set the radius attribute of the - # sprites in such a way that collide_circle would give same result as - # if it had been called without the radius being set. - collided_func = sprite.collide_circle_ratio(20.0) - - sprite.spritecollide(self.s1, self.ag2, dokill=False, collided=collided_func) - - self.assertEqual( - sprite.spritecollide( - self.s1, self.ag2, dokill=False, collided=sprite.collide_circle - ), - [self.s2], - ) - - def test_collide_circle_ratio__no_radius_and_ratio_of_two_twice(self): - # collide_circle_ratio with no radius set, at a 2.0 ratio, - # called twice to check if the bug where the calculated radius - # is not stored correctly in the radius attribute of each sprite. - collided_func = sprite.collide_circle_ratio(2.0) - - # Calling collide_circle_ratio will set the radius attribute of the - # sprites. If an incorrect value is stored then we will not get the - # same result next time it is called: - expected_sprites = sorted( - sprite.spritecollide( - self.s1, self.ag2, dokill=False, collided=collided_func - ), - key=id, - ) - collided_sprites = sorted( - sprite.spritecollide( - self.s1, self.ag2, dokill=False, collided=collided_func - ), - key=id, - ) - - self.assertListEqual(expected_sprites, collided_sprites) - - def test_collide_circle__with_radii_set(self): - # collide_circle with a radius set. - self.s1.radius = 50 - self.s2.radius = 10 - self.s3.radius = 400 - collided_func = sprite.collide_circle - expected_sprites = sorted(self.ag2.sprites(), key=id) - - collided_sprites = sorted( - sprite.spritecollide( - self.s1, self.ag2, dokill=False, collided=collided_func - ), - key=id, - ) - - self.assertListEqual(expected_sprites, collided_sprites) - - def test_collide_circle_ratio__with_radii_set(self): - # collide_circle_ratio with a radius set. - self.s1.radius = 50 - self.s2.radius = 10 - self.s3.radius = 400 - collided_func = sprite.collide_circle_ratio(0.5) - expected_sprites = sorted(self.ag2.sprites(), key=id) - - collided_sprites = sorted( - sprite.spritecollide( - self.s1, self.ag2, dokill=False, collided=collided_func - ), - key=id, - ) - - self.assertListEqual(expected_sprites, collided_sprites) - - def test_collide_mask__opaque(self): - # make some fully opaque sprites that will collide with masks. - self.s1.image.fill((255, 255, 255, 255)) - self.s2.image.fill((255, 255, 255, 255)) - self.s3.image.fill((255, 255, 255, 255)) - - # masks should be autogenerated from image if they don't exist. - self.assertEqual( - sprite.spritecollide( - self.s1, self.ag2, dokill=False, collided=sprite.collide_mask - ), - [self.s2], - ) - - self.s1.mask = pygame.mask.from_surface(self.s1.image) - self.s2.mask = pygame.mask.from_surface(self.s2.image) - self.s3.mask = pygame.mask.from_surface(self.s3.image) - - # with set masks. - self.assertEqual( - sprite.spritecollide( - self.s1, self.ag2, dokill=False, collided=sprite.collide_mask - ), - [self.s2], - ) - - def test_collide_mask__transparent(self): - # make some sprites that are fully transparent, so they won't collide. - self.s1.image.fill((255, 255, 255, 0)) - self.s2.image.fill((255, 255, 255, 0)) - self.s3.image.fill((255, 255, 255, 0)) - - self.s1.mask = pygame.mask.from_surface(self.s1.image, 255) - self.s2.mask = pygame.mask.from_surface(self.s2.image, 255) - self.s3.mask = pygame.mask.from_surface(self.s3.image, 255) - - self.assertFalse( - sprite.spritecollide( - self.s1, self.ag2, dokill=False, collided=sprite.collide_mask - ) - ) - - def test_spritecollideany__without_collided_callback(self): - - # pygame.sprite.spritecollideany(sprite, group) -> sprite - # finds any sprites that collide - - # if collided is not passed, all - # sprites must have a "rect" value, which is a - # rectangle of the sprite area, which will be used - # to calculate the collision. - - # s2 in, s3 out - expected_sprite = self.s2 - collided_sprite = sprite.spritecollideany(self.s1, self.ag2) - - self.assertEqual(collided_sprite, expected_sprite) - - # s2 and s3 out - self.s2.rect.move_ip(0, 10) - collided_sprite = sprite.spritecollideany(self.s1, self.ag2) - - self.assertIsNone(collided_sprite) - - # s2 out, s3 in - self.s3.rect.move_ip(-105, -105) - expected_sprite = self.s3 - collided_sprite = sprite.spritecollideany(self.s1, self.ag2) - - self.assertEqual(collided_sprite, expected_sprite) - - # s2 and s3 in - self.s2.rect.move_ip(0, -10) - expected_sprite_choices = self.ag2.sprites() - collided_sprite = sprite.spritecollideany(self.s1, self.ag2) - - self.assertIn(collided_sprite, expected_sprite_choices) - - def test_spritecollideany__with_collided_callback(self): - - # pygame.sprite.spritecollideany(sprite, group) -> sprite - # finds any sprites that collide - - # collided is a callback function used to calculate if - # two sprites are colliding. it should take two sprites - # as values, and return a bool value indicating if - # they are colliding. - - # This collision test can be faster than pygame.sprite.spritecollide() - # since it has less work to do. - - arg_dict_a = {} - arg_dict_b = {} - return_container = [True] - - # This function is configurable using the mutable default arguments! - def collided_callback( - spr_a, - spr_b, - arg_dict_a=arg_dict_a, - arg_dict_b=arg_dict_b, - return_container=return_container, - ): - - count = arg_dict_a.get(spr_a, 0) - arg_dict_a[spr_a] = 1 + count - - count = arg_dict_b.get(spr_b, 0) - arg_dict_b[spr_b] = 1 + count - - return return_container[0] - - # This should return a sprite from self.ag2 because the callback - # function (collided_callback()) currently returns True. - expected_sprite_choices = self.ag2.sprites() - collided_sprite = sprite.spritecollideany(self.s1, self.ag2, collided_callback) - - self.assertIn(collided_sprite, expected_sprite_choices) - - # The callback function should have been called only once, so self.s1 - # should have only been passed as an argument once - self.assertEqual(len(arg_dict_a), 1) - self.assertEqual(arg_dict_a[self.s1], 1) - - # The callback function should have been called only once, so self.s2 - # exclusive-or self.s3 should have only been passed as an argument - # once - self.assertEqual(len(arg_dict_b), 1) - self.assertEqual(list(arg_dict_b.values())[0], 1) - self.assertTrue(self.s2 in arg_dict_b or self.s3 in arg_dict_b) - - arg_dict_a.clear() - arg_dict_b.clear() - return_container[0] = False - - # This should return None because the callback function - # (collided_callback()) currently returns False. - collided_sprite = sprite.spritecollideany(self.s1, self.ag2, collided_callback) - - self.assertIsNone(collided_sprite) - - # The callback function should have been called as many times as - # there are sprites in self.ag2 - self.assertEqual(len(arg_dict_a), 1) - self.assertEqual(arg_dict_a[self.s1], len(self.ag2)) - self.assertEqual(len(arg_dict_b), len(self.ag2)) - - # Each sprite in self.ag2 should be called once. - for s in self.ag2: - self.assertEqual(arg_dict_b[s], 1) - - def test_groupcollide__without_collided_callback(self): - - # pygame.sprite.groupcollide(groupa, groupb, dokilla, dokillb) -> dict - # collision detection between group and group - - # test no kill - expected_dict = {self.s1: [self.s2]} - crashed = pygame.sprite.groupcollide(self.ag, self.ag2, False, False) - - self.assertDictEqual(expected_dict, crashed) - - crashed = pygame.sprite.groupcollide(self.ag, self.ag2, False, False) - - self.assertDictEqual(expected_dict, crashed) - - # Test dokill2=True (kill colliding sprites in second group). - crashed = pygame.sprite.groupcollide(self.ag, self.ag2, False, True) - - self.assertDictEqual(expected_dict, crashed) - - expected_dict = {} - crashed = pygame.sprite.groupcollide(self.ag, self.ag2, False, False) - - self.assertDictEqual(expected_dict, crashed) - - # Test dokill1=True (kill colliding sprites in first group). - self.s3.rect.move_ip(-100, -100) - expected_dict = {self.s1: [self.s3]} - crashed = pygame.sprite.groupcollide(self.ag, self.ag2, True, False) - - self.assertDictEqual(expected_dict, crashed) - - expected_dict = {} - crashed = pygame.sprite.groupcollide(self.ag, self.ag2, False, False) - - self.assertDictEqual(expected_dict, crashed) - - def test_groupcollide__with_collided_callback(self): - - collided_callback_true = lambda spr_a, spr_b: True - collided_callback_false = lambda spr_a, spr_b: False - - # test no kill - expected_dict = {} - crashed = pygame.sprite.groupcollide( - self.ag, self.ag2, False, False, collided_callback_false - ) - - self.assertDictEqual(expected_dict, crashed) - - expected_dict = {self.s1: sorted(self.ag2.sprites(), key=id)} - crashed = pygame.sprite.groupcollide( - self.ag, self.ag2, False, False, collided_callback_true - ) - for value in crashed.values(): - value.sort(key=id) - - self.assertDictEqual(expected_dict, crashed) - - # expected_dict is the same again for this collide - crashed = pygame.sprite.groupcollide( - self.ag, self.ag2, False, False, collided_callback_true - ) - for value in crashed.values(): - value.sort(key=id) - - self.assertDictEqual(expected_dict, crashed) - - # Test dokill2=True (kill colliding sprites in second group). - expected_dict = {} - crashed = pygame.sprite.groupcollide( - self.ag, self.ag2, False, True, collided_callback_false - ) - - self.assertDictEqual(expected_dict, crashed) - - expected_dict = {self.s1: sorted(self.ag2.sprites(), key=id)} - crashed = pygame.sprite.groupcollide( - self.ag, self.ag2, False, True, collided_callback_true - ) - for value in crashed.values(): - value.sort(key=id) - - self.assertDictEqual(expected_dict, crashed) - - expected_dict = {} - crashed = pygame.sprite.groupcollide( - self.ag, self.ag2, False, True, collided_callback_true - ) - - self.assertDictEqual(expected_dict, crashed) - - # Test dokill1=True (kill colliding sprites in first group). - self.ag.add(self.s2) - self.ag2.add(self.s3) - expected_dict = {} - crashed = pygame.sprite.groupcollide( - self.ag, self.ag2, True, False, collided_callback_false - ) - - self.assertDictEqual(expected_dict, crashed) - - expected_dict = {self.s1: [self.s3], self.s2: [self.s3]} - crashed = pygame.sprite.groupcollide( - self.ag, self.ag2, True, False, collided_callback_true - ) - - self.assertDictEqual(expected_dict, crashed) - - expected_dict = {} - crashed = pygame.sprite.groupcollide( - self.ag, self.ag2, True, False, collided_callback_true - ) - - self.assertDictEqual(expected_dict, crashed) - - def test_collide_rect(self): - # Test colliding - some edges touching - self.assertTrue(pygame.sprite.collide_rect(self.s1, self.s2)) - self.assertTrue(pygame.sprite.collide_rect(self.s2, self.s1)) - - # Test colliding - all edges touching - self.s2.rect.center = self.s3.rect.center - - self.assertTrue(pygame.sprite.collide_rect(self.s2, self.s3)) - self.assertTrue(pygame.sprite.collide_rect(self.s3, self.s2)) - - # Test colliding - no edges touching - self.s2.rect.inflate_ip(10, 10) - - self.assertTrue(pygame.sprite.collide_rect(self.s2, self.s3)) - self.assertTrue(pygame.sprite.collide_rect(self.s3, self.s2)) - - # Test colliding - some edges intersecting - self.s2.rect.center = (self.s1.rect.right, self.s1.rect.bottom) - - self.assertTrue(pygame.sprite.collide_rect(self.s1, self.s2)) - self.assertTrue(pygame.sprite.collide_rect(self.s2, self.s1)) - - # Test not colliding - self.assertFalse(pygame.sprite.collide_rect(self.s1, self.s3)) - self.assertFalse(pygame.sprite.collide_rect(self.s3, self.s1)) - - -################################################################################ - - -class AbstractGroupTypeTest(unittest.TestCase): - def setUp(self): - self.ag = sprite.AbstractGroup() - self.ag2 = sprite.AbstractGroup() - self.s1 = sprite.Sprite(self.ag) - self.s2 = sprite.Sprite(self.ag) - self.s3 = sprite.Sprite(self.ag2) - self.s4 = sprite.Sprite(self.ag2) - - self.s1.image = pygame.Surface((10, 10)) - self.s1.image.fill(pygame.Color("red")) - self.s1.rect = self.s1.image.get_rect() - - self.s2.image = pygame.Surface((10, 10)) - self.s2.image.fill(pygame.Color("green")) - self.s2.rect = self.s2.image.get_rect() - self.s2.rect.left = 10 - - self.s3.image = pygame.Surface((10, 10)) - self.s3.image.fill(pygame.Color("blue")) - self.s3.rect = self.s3.image.get_rect() - self.s3.rect.top = 10 - - self.s4.image = pygame.Surface((10, 10)) - self.s4.image.fill(pygame.Color("white")) - self.s4.rect = self.s4.image.get_rect() - self.s4.rect.left = 10 - self.s4.rect.top = 10 - - self.bg = pygame.Surface((20, 20)) - self.scr = pygame.Surface((20, 20)) - self.scr.fill(pygame.Color("grey")) - - def test_has(self): - "See if AbstractGroup.has() works as expected." - - self.assertEqual(True, self.s1 in self.ag) - - self.assertEqual(True, self.ag.has(self.s1)) - - self.assertEqual(True, self.ag.has([self.s1, self.s2])) - - # see if one of them not being in there. - self.assertNotEqual(True, self.ag.has([self.s1, self.s2, self.s3])) - self.assertNotEqual(True, self.ag.has(self.s1, self.s2, self.s3)) - self.assertNotEqual(True, self.ag.has(self.s1, sprite.Group(self.s2, self.s3))) - self.assertNotEqual(True, self.ag.has(self.s1, [self.s2, self.s3])) - - # test empty list processing - self.assertFalse(self.ag.has(*[])) - self.assertFalse(self.ag.has([])) - self.assertFalse(self.ag.has([[]])) - - # see if a second AbstractGroup works. - self.assertEqual(True, self.ag2.has(self.s3)) - - def test_add(self): - ag3 = sprite.AbstractGroup() - sprites = (self.s1, self.s2, self.s3, self.s4) - - for s in sprites: - self.assertNotIn(s, ag3) - - ag3.add(self.s1, [self.s2], self.ag2) - - for s in sprites: - self.assertIn(s, ag3) - - def test_add_internal(self): - self.assertNotIn(self.s1, self.ag2) - - self.ag2.add_internal(self.s1) - - self.assertIn(self.s1, self.ag2) - - def test_clear(self): - - self.ag.draw(self.scr) - self.ag.clear(self.scr, self.bg) - self.assertEqual((0, 0, 0, 255), self.scr.get_at((5, 5))) - self.assertEqual((0, 0, 0, 255), self.scr.get_at((15, 5))) - - def test_draw(self): - - self.ag.draw(self.scr) - self.assertEqual((255, 0, 0, 255), self.scr.get_at((5, 5))) - self.assertEqual((0, 255, 0, 255), self.scr.get_at((15, 5))) - - self.assertEqual(self.ag.spritedict[self.s1], pygame.Rect(0, 0, 10, 10)) - self.assertEqual(self.ag.spritedict[self.s2], pygame.Rect(10, 0, 10, 10)) - - def test_empty(self): - - self.ag.empty() - self.assertFalse(self.s1 in self.ag) - self.assertFalse(self.s2 in self.ag) - - def test_has_internal(self): - self.assertTrue(self.ag.has_internal(self.s1)) - self.assertFalse(self.ag.has_internal(self.s3)) - - def test_remove(self): - - # Test removal of 1 sprite - self.ag.remove(self.s1) - self.assertFalse(self.ag in self.s1.groups()) - self.assertFalse(self.ag.has(self.s1)) - - # Test removal of 2 sprites as 2 arguments - self.ag2.remove(self.s3, self.s4) - self.assertFalse(self.ag2 in self.s3.groups()) - self.assertFalse(self.ag2 in self.s4.groups()) - self.assertFalse(self.ag2.has(self.s3, self.s4)) - - # Test removal of 4 sprites as a list containing a sprite and a group - # containing a sprite and another group containing 2 sprites. - self.ag.add(self.s1, self.s3, self.s4) - self.ag2.add(self.s3, self.s4) - g = sprite.Group(self.s2) - self.ag.remove([self.s1, g], self.ag2) - self.assertFalse(self.ag in self.s1.groups()) - self.assertFalse(self.ag in self.s2.groups()) - self.assertFalse(self.ag in self.s3.groups()) - self.assertFalse(self.ag in self.s4.groups()) - self.assertFalse(self.ag.has(self.s1, self.s2, self.s3, self.s4)) - - def test_remove_internal(self): - - self.ag.remove_internal(self.s1) - self.assertFalse(self.ag.has_internal(self.s1)) - - def test_sprites(self): - expected_sprites = sorted((self.s1, self.s2), key=id) - sprite_list = sorted(self.ag.sprites(), key=id) - - self.assertListEqual(sprite_list, expected_sprites) - - def test_update(self): - class test_sprite(pygame.sprite.Sprite): - sink = [] - - def __init__(self, *groups): - pygame.sprite.Sprite.__init__(self, *groups) - - def update(self, *args): - self.sink += args - - s = test_sprite(self.ag) - self.ag.update(1, 2, 3) - - self.assertEqual(test_sprite.sink, [1, 2, 3]) - - def test_update_with_kwargs(self): - class test_sprite(pygame.sprite.Sprite): - sink = [] - sink_kwargs = {} - - def __init__(self, *groups): - pygame.sprite.Sprite.__init__(self, *groups) - - def update(self, *args, **kwargs): - self.sink += args - self.sink_kwargs.update(kwargs) - - s = test_sprite(self.ag) - self.ag.update(1, 2, 3, foo=4, bar=5) - - self.assertEqual(test_sprite.sink, [1, 2, 3]) - self.assertEqual(test_sprite.sink_kwargs, {"foo": 4, "bar": 5}) - - -################################################################################ - -# A base class to share tests between similar classes - - -class LayeredGroupBase: - def test_get_layer_of_sprite(self): - expected_layer = 666 - spr = self.sprite() - self.LG.add(spr, layer=expected_layer) - layer = self.LG.get_layer_of_sprite(spr) - - self.assertEqual(len(self.LG._spritelist), 1) - self.assertEqual(layer, self.LG.get_layer_of_sprite(spr)) - self.assertEqual(layer, expected_layer) - self.assertEqual(layer, self.LG._spritelayers[spr]) - - def test_add(self): - expected_layer = self.LG._default_layer - spr = self.sprite() - self.LG.add(spr) - layer = self.LG.get_layer_of_sprite(spr) - - self.assertEqual(len(self.LG._spritelist), 1) - self.assertEqual(layer, expected_layer) - - def test_add__sprite_with_layer_attribute(self): - expected_layer = 100 - spr = self.sprite() - spr._layer = expected_layer - self.LG.add(spr) - layer = self.LG.get_layer_of_sprite(spr) - - self.assertEqual(len(self.LG._spritelist), 1) - self.assertEqual(layer, expected_layer) - - def test_add__passing_layer_keyword(self): - expected_layer = 100 - spr = self.sprite() - self.LG.add(spr, layer=expected_layer) - layer = self.LG.get_layer_of_sprite(spr) - - self.assertEqual(len(self.LG._spritelist), 1) - self.assertEqual(layer, expected_layer) - - def test_add__overriding_sprite_layer_attr(self): - expected_layer = 200 - spr = self.sprite() - spr._layer = 100 - self.LG.add(spr, layer=expected_layer) - layer = self.LG.get_layer_of_sprite(spr) - - self.assertEqual(len(self.LG._spritelist), 1) - self.assertEqual(layer, expected_layer) - - def test_add__adding_sprite_on_init(self): - spr = self.sprite() - lrg2 = sprite.LayeredUpdates(spr) - expected_layer = lrg2._default_layer - layer = lrg2._spritelayers[spr] - - self.assertEqual(len(lrg2._spritelist), 1) - self.assertEqual(layer, expected_layer) - - def test_add__sprite_init_layer_attr(self): - expected_layer = 20 - spr = self.sprite() - spr._layer = expected_layer - lrg2 = sprite.LayeredUpdates(spr) - layer = lrg2._spritelayers[spr] - - self.assertEqual(len(lrg2._spritelist), 1) - self.assertEqual(layer, expected_layer) - - def test_add__sprite_init_passing_layer(self): - expected_layer = 33 - spr = self.sprite() - lrg2 = sprite.LayeredUpdates(spr, layer=expected_layer) - layer = lrg2._spritelayers[spr] - - self.assertEqual(len(lrg2._spritelist), 1) - self.assertEqual(layer, expected_layer) - - def test_add__sprite_init_overiding_layer(self): - expected_layer = 33 - spr = self.sprite() - spr._layer = 55 - lrg2 = sprite.LayeredUpdates(spr, layer=expected_layer) - layer = lrg2._spritelayers[spr] - - self.assertEqual(len(lrg2._spritelist), 1) - self.assertEqual(layer, expected_layer) - - def test_add__spritelist(self): - expected_layer = self.LG._default_layer - sprite_count = 10 - sprites = [self.sprite() for _ in range(sprite_count)] - - self.LG.add(sprites) - - self.assertEqual(len(self.LG._spritelist), sprite_count) - - for i in range(sprite_count): - layer = self.LG.get_layer_of_sprite(sprites[i]) - - self.assertEqual(layer, expected_layer) - - def test_add__spritelist_with_layer_attr(self): - sprites = [] - sprite_and_layer_count = 10 - for i in range(sprite_and_layer_count): - sprites.append(self.sprite()) - sprites[-1]._layer = i - - self.LG.add(sprites) - - self.assertEqual(len(self.LG._spritelist), sprite_and_layer_count) - - for i in range(sprite_and_layer_count): - layer = self.LG.get_layer_of_sprite(sprites[i]) - - self.assertEqual(layer, i) - - def test_add__spritelist_passing_layer(self): - expected_layer = 33 - sprite_count = 10 - sprites = [self.sprite() for _ in range(sprite_count)] - - self.LG.add(sprites, layer=expected_layer) - - self.assertEqual(len(self.LG._spritelist), sprite_count) - - for i in range(sprite_count): - layer = self.LG.get_layer_of_sprite(sprites[i]) - - self.assertEqual(layer, expected_layer) - - def test_add__spritelist_overriding_layer(self): - expected_layer = 33 - sprites = [] - sprite_and_layer_count = 10 - for i in range(sprite_and_layer_count): - sprites.append(self.sprite()) - sprites[-1].layer = i - - self.LG.add(sprites, layer=expected_layer) - - self.assertEqual(len(self.LG._spritelist), sprite_and_layer_count) - - for i in range(sprite_and_layer_count): - layer = self.LG.get_layer_of_sprite(sprites[i]) - - self.assertEqual(layer, expected_layer) - - def test_add__spritelist_init(self): - sprite_count = 10 - sprites = [self.sprite() for _ in range(sprite_count)] - - lrg2 = sprite.LayeredUpdates(sprites) - expected_layer = lrg2._default_layer - - self.assertEqual(len(lrg2._spritelist), sprite_count) - - for i in range(sprite_count): - layer = lrg2.get_layer_of_sprite(sprites[i]) - - self.assertEqual(layer, expected_layer) - - def test_remove__sprite(self): - sprites = [] - sprite_count = 10 - for i in range(sprite_count): - sprites.append(self.sprite()) - sprites[-1].rect = pygame.Rect((0, 0, 0, 0)) - - self.LG.add(sprites) - - self.assertEqual(len(self.LG._spritelist), sprite_count) - - for i in range(sprite_count): - self.LG.remove(sprites[i]) - - self.assertEqual(len(self.LG._spritelist), 0) - - def test_sprites(self): - sprites = [] - sprite_and_layer_count = 10 - for i in range(sprite_and_layer_count, 0, -1): - sprites.append(self.sprite()) - sprites[-1]._layer = i - - self.LG.add(sprites) - - self.assertEqual(len(self.LG._spritelist), sprite_and_layer_count) - - # Sprites should be ordered based on their layer (bottom to top), - # which is the reverse order of the sprites list. - expected_sprites = list(reversed(sprites)) - actual_sprites = self.LG.sprites() - - self.assertListEqual(actual_sprites, expected_sprites) - - def test_layers(self): - sprites = [] - expected_layers = [] - layer_count = 10 - for i in range(layer_count): - expected_layers.append(i) - for j in range(5): - sprites.append(self.sprite()) - sprites[-1]._layer = i - self.LG.add(sprites) - - layers = self.LG.layers() - - self.assertListEqual(layers, expected_layers) - - def test_add__layers_are_correct(self): - layers = [1, 4, 6, 8, 3, 6, 2, 6, 4, 5, 6, 1, 0, 9, 7, 6, 54, 8, 2, 43, 6, 1] - for lay in layers: - self.LG.add(self.sprite(), layer=lay) - layers.sort() - - for idx, spr in enumerate(self.LG.sprites()): - layer = self.LG.get_layer_of_sprite(spr) - - self.assertEqual(layer, layers[idx]) - - def test_change_layer(self): - expected_layer = 99 - spr = self.sprite() - self.LG.add(spr, layer=expected_layer) - - self.assertEqual(self.LG._spritelayers[spr], expected_layer) - - expected_layer = 44 - self.LG.change_layer(spr, expected_layer) - - self.assertEqual(self.LG._spritelayers[spr], expected_layer) - - expected_layer = 77 - spr2 = self.sprite() - spr2.layer = 55 - self.LG.add(spr2) - self.LG.change_layer(spr2, expected_layer) - - self.assertEqual(spr2.layer, expected_layer) - - def test_get_sprites_at(self): - sprites = [] - expected_sprites = [] - for i in range(3): - spr = self.sprite() - spr.rect = pygame.Rect(i * 50, i * 50, 100, 100) - sprites.append(spr) - if i < 2: - expected_sprites.append(spr) - self.LG.add(sprites) - result = self.LG.get_sprites_at((50, 50)) - self.assertEqual(result, expected_sprites) - - def test_get_top_layer(self): - layers = [1, 5, 2, 8, 4, 5, 3, 88, 23, 0] - for i in layers: - self.LG.add(self.sprite(), layer=i) - top_layer = self.LG.get_top_layer() - - self.assertEqual(top_layer, self.LG.get_top_layer()) - self.assertEqual(top_layer, max(layers)) - self.assertEqual(top_layer, max(self.LG._spritelayers.values())) - self.assertEqual(top_layer, self.LG._spritelayers[self.LG._spritelist[-1]]) - - def test_get_bottom_layer(self): - layers = [1, 5, 2, 8, 4, 5, 3, 88, 23, 0] - for i in layers: - self.LG.add(self.sprite(), layer=i) - bottom_layer = self.LG.get_bottom_layer() - - self.assertEqual(bottom_layer, self.LG.get_bottom_layer()) - self.assertEqual(bottom_layer, min(layers)) - self.assertEqual(bottom_layer, min(self.LG._spritelayers.values())) - self.assertEqual(bottom_layer, self.LG._spritelayers[self.LG._spritelist[0]]) - - def test_move_to_front(self): - layers = [1, 5, 2, 8, 4, 5, 3, 88, 23, 0] - for i in layers: - self.LG.add(self.sprite(), layer=i) - spr = self.sprite() - self.LG.add(spr, layer=3) - - self.assertNotEqual(spr, self.LG._spritelist[-1]) - - self.LG.move_to_front(spr) - - self.assertEqual(spr, self.LG._spritelist[-1]) - - def test_move_to_back(self): - layers = [1, 5, 2, 8, 4, 5, 3, 88, 23, 0] - for i in layers: - self.LG.add(self.sprite(), layer=i) - spr = self.sprite() - self.LG.add(spr, layer=55) - - self.assertNotEqual(spr, self.LG._spritelist[0]) - - self.LG.move_to_back(spr) - - self.assertEqual(spr, self.LG._spritelist[0]) - - def test_get_top_sprite(self): - layers = [1, 5, 2, 8, 4, 5, 3, 88, 23, 0] - for i in layers: - self.LG.add(self.sprite(), layer=i) - expected_layer = self.LG.get_top_layer() - layer = self.LG.get_layer_of_sprite(self.LG.get_top_sprite()) - - self.assertEqual(layer, expected_layer) - - def test_get_sprites_from_layer(self): - sprites = {} - layers = [ - 1, - 4, - 5, - 6, - 3, - 7, - 8, - 2, - 1, - 3, - 4, - 5, - 6, - 7, - 8, - 9, - 1, - 2, - 3, - 4, - 5, - 6, - 7, - 8, - 9, - 0, - 1, - 6, - 5, - 4, - 3, - 2, - ] - for lay in layers: - spr = self.sprite() - spr._layer = lay - self.LG.add(spr) - if lay not in sprites: - sprites[lay] = [] - sprites[lay].append(spr) - - for lay in self.LG.layers(): - for spr in self.LG.get_sprites_from_layer(lay): - self.assertIn(spr, sprites[lay]) - - sprites[lay].remove(spr) - if len(sprites[lay]) == 0: - del sprites[lay] - - self.assertEqual(len(sprites.values()), 0) - - def test_switch_layer(self): - sprites1 = [] - sprites2 = [] - layers = [3, 2, 3, 2, 3, 3, 2, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 3, 2, 2, 3, 2, 3] - for lay in layers: - spr = self.sprite() - spr._layer = lay - self.LG.add(spr) - if lay == 2: - sprites1.append(spr) - else: - sprites2.append(spr) - - sprites1.sort(key=id) - sprites2.sort(key=id) - layer2_sprites = sorted(self.LG.get_sprites_from_layer(2), key=id) - layer3_sprites = sorted(self.LG.get_sprites_from_layer(3), key=id) - - self.assertListEqual(sprites1, layer2_sprites) - self.assertListEqual(sprites2, layer3_sprites) - self.assertEqual(len(self.LG), len(sprites1) + len(sprites2)) - - self.LG.switch_layer(2, 3) - layer2_sprites = sorted(self.LG.get_sprites_from_layer(2), key=id) - layer3_sprites = sorted(self.LG.get_sprites_from_layer(3), key=id) - - self.assertListEqual(sprites1, layer3_sprites) - self.assertListEqual(sprites2, layer2_sprites) - self.assertEqual(len(self.LG), len(sprites1) + len(sprites2)) - - def test_copy(self): - self.LG.add(self.sprite()) - spr = self.LG.sprites()[0] - lg_copy = self.LG.copy() - - self.assertIsInstance(lg_copy, type(self.LG)) - self.assertIn(spr, lg_copy) - self.assertIn(lg_copy, spr.groups()) - - -########################## LAYERED RENDER GROUP TESTS ########################## - - -class LayeredUpdatesTypeTest__SpriteTest(LayeredGroupBase, unittest.TestCase): - sprite = sprite.Sprite - - def setUp(self): - self.LG = sprite.LayeredUpdates() - - -class LayeredUpdatesTypeTest__DirtySprite(LayeredGroupBase, unittest.TestCase): - sprite = sprite.DirtySprite - - def setUp(self): - self.LG = sprite.LayeredUpdates() - - -class LayeredDirtyTypeTest__DirtySprite(LayeredGroupBase, unittest.TestCase): - sprite = sprite.DirtySprite - - def setUp(self): - self.LG = sprite.LayeredDirty() - - def test_repaint_rect(self): - group = self.LG - surface = pygame.Surface((100, 100)) - - group.repaint_rect(pygame.Rect(0, 0, 100, 100)) - group.draw(surface) - - def test_repaint_rect_with_clip(self): - group = self.LG - surface = pygame.Surface((100, 100)) - - group.set_clip(pygame.Rect(0, 0, 100, 100)) - group.repaint_rect(pygame.Rect(0, 0, 100, 100)) - group.draw(surface) - - def _nondirty_intersections_redrawn(self, use_source_rect=False): - # Helper method to ensure non-dirty sprites are redrawn correctly. - # - # Parameters: - # use_source_rect - allows non-dirty sprites to be tested - # with (True) and without (False) a source_rect - # - # This test was written to reproduce the behavior seen in issue #898. - # A non-dirty sprite (using source_rect) was being redrawn incorrectly - # after a dirty sprite intersected with it. - # - # This test does the following. - # 1. Creates a surface filled with white. Also creates an image_source - # with a default fill color of yellow and adds 2 images to it - # (red and blue rectangles). - # 2. Creates 2 DirtySprites (red_sprite and blue_sprite) using the - # image_source and adds them to a LayeredDirty group. - # 3. Moves the red_sprite and calls LayeredDirty.draw(surface) a few - # times. - # 4. Checks to make sure the sprites were redrawn correctly. - RED = pygame.Color("red") - BLUE = pygame.Color("blue") - WHITE = pygame.Color("white") - YELLOW = pygame.Color("yellow") - - surface = pygame.Surface((60, 80)) - surface.fill(WHITE) - start_pos = (10, 10) - - # These rects define each sprite's image area in the image_source. - red_sprite_source = pygame.Rect((45, 0), (5, 4)) - blue_sprite_source = pygame.Rect((0, 40), (20, 10)) - - # Create a source image/surface. - image_source = pygame.Surface((50, 50)) - image_source.fill(YELLOW) - image_source.fill(RED, red_sprite_source) - image_source.fill(BLUE, blue_sprite_source) - - # The blue_sprite is stationary and will not reset its dirty flag. It - # will be the non-dirty sprite in this test. Its values are dependent - # on the use_source_rect flag. - blue_sprite = pygame.sprite.DirtySprite(self.LG) - - if use_source_rect: - blue_sprite.image = image_source - # The rect is a bit smaller than the source_rect to make sure - # LayeredDirty.draw() is using the correct dimensions. - blue_sprite.rect = pygame.Rect( - start_pos, (blue_sprite_source.w - 7, blue_sprite_source.h - 7) - ) - blue_sprite.source_rect = blue_sprite_source - start_x, start_y = blue_sprite.rect.topleft - end_x = start_x + blue_sprite.source_rect.w - end_y = start_y + blue_sprite.source_rect.h - else: - blue_sprite.image = image_source.subsurface(blue_sprite_source) - blue_sprite.rect = pygame.Rect(start_pos, blue_sprite_source.size) - start_x, start_y = blue_sprite.rect.topleft - end_x, end_y = blue_sprite.rect.bottomright - - # The red_sprite is moving and will always be dirty. - red_sprite = pygame.sprite.DirtySprite(self.LG) - red_sprite.image = image_source - red_sprite.rect = pygame.Rect(start_pos, red_sprite_source.size) - red_sprite.source_rect = red_sprite_source - red_sprite.dirty = 2 - - # Draw the red_sprite as it moves a few steps. - for _ in range(4): - red_sprite.rect.move_ip(2, 1) - - # This is the method being tested. - self.LG.draw(surface) - - # Check colors where the blue_sprite is drawn. We expect red where the - # red_sprite is drawn over the blue_sprite, but the rest should be - # blue. - surface.lock() # Lock surface for possible speed up. - try: - for y in range(start_y, end_y): - for x in range(start_x, end_x): - if red_sprite.rect.collidepoint(x, y): - expected_color = RED - else: - expected_color = BLUE - - color = surface.get_at((x, y)) - - self.assertEqual(color, expected_color, "pos=({}, {})".format(x, y)) - finally: - surface.unlock() - - def test_nondirty_intersections_redrawn(self): - """Ensure non-dirty sprites are correctly redrawn - when dirty sprites intersect with them. - """ - self._nondirty_intersections_redrawn() - - def test_nondirty_intersections_redrawn__with_source_rect(self): - """Ensure non-dirty sprites using source_rects are correctly redrawn - when dirty sprites intersect with them. - - Related to issue #898. - """ - self._nondirty_intersections_redrawn(True) - - -############################### SPRITE BASE CLASS ############################## -# -# tests common between sprite classes - - -class SpriteBase: - def setUp(self): - self.groups = [] - for Group in self.Groups: - self.groups.append(Group()) - - self.sprite = self.Sprite() - - def test_add_internal(self): - - for g in self.groups: - self.sprite.add_internal(g) - - for g in self.groups: - self.assertIn(g, self.sprite.groups()) - - def test_remove_internal(self): - - for g in self.groups: - self.sprite.add_internal(g) - - for g in self.groups: - self.sprite.remove_internal(g) - - for g in self.groups: - self.assertFalse(g in self.sprite.groups()) - - def test_update(self): - class test_sprite(pygame.sprite.Sprite): - sink = [] - - def __init__(self, *groups): - pygame.sprite.Sprite.__init__(self, *groups) - - def update(self, *args): - self.sink += args - - s = test_sprite() - s.update(1, 2, 3) - - self.assertEqual(test_sprite.sink, [1, 2, 3]) - - def test_update_with_kwargs(self): - class test_sprite(pygame.sprite.Sprite): - sink = [] - sink_dict = {} - - def __init__(self, *groups): - pygame.sprite.Sprite.__init__(self, *groups) - - def update(self, *args, **kwargs): - self.sink += args - self.sink_dict.update(kwargs) - - s = test_sprite() - s.update(1, 2, 3, foo=4, bar=5) - - self.assertEqual(test_sprite.sink, [1, 2, 3]) - self.assertEqual(test_sprite.sink_dict, {"foo": 4, "bar": 5}) - - def test___init____added_to_groups_passed(self): - expected_groups = sorted(self.groups, key=id) - sprite = self.Sprite(self.groups) - groups = sorted(sprite.groups(), key=id) - - self.assertListEqual(groups, expected_groups) - - def test_add(self): - expected_groups = sorted(self.groups, key=id) - self.sprite.add(self.groups) - groups = sorted(self.sprite.groups(), key=id) - - self.assertListEqual(groups, expected_groups) - - def test_alive(self): - self.assertFalse( - self.sprite.alive(), "Sprite should not be alive if in no groups" - ) - - self.sprite.add(self.groups) - - self.assertTrue(self.sprite.alive()) - - def test_groups(self): - for i, g in enumerate(self.groups): - expected_groups = sorted(self.groups[: i + 1], key=id) - self.sprite.add(g) - groups = sorted(self.sprite.groups(), key=id) - - self.assertListEqual(groups, expected_groups) - - def test_kill(self): - self.sprite.add(self.groups) - - self.assertTrue(self.sprite.alive()) - - self.sprite.kill() - - self.assertListEqual(self.sprite.groups(), []) - self.assertFalse(self.sprite.alive()) - - def test_remove(self): - self.sprite.add(self.groups) - self.sprite.remove(self.groups) - - self.assertListEqual(self.sprite.groups(), []) - - -############################## SPRITE CLASS TESTS ############################## - - -class SpriteTypeTest(SpriteBase, unittest.TestCase): - Sprite = sprite.Sprite - - Groups = [ - sprite.Group, - sprite.LayeredUpdates, - sprite.RenderUpdates, - sprite.OrderedUpdates, - ] - - -class DirtySpriteTypeTest(SpriteBase, unittest.TestCase): - Sprite = sprite.DirtySprite - - Groups = [ - sprite.Group, - sprite.LayeredUpdates, - sprite.RenderUpdates, - sprite.OrderedUpdates, - sprite.LayeredDirty, - ] - - -############################## BUG TESTS ####################################### - - -class SingleGroupBugsTest(unittest.TestCase): - def test_memoryleak_bug(self): - # For memoryleak bug posted to mailing list by Tobias Steinrücken on 16/11/10. - # Fixed in revision 2953. - - import weakref - import gc - - class MySprite(sprite.Sprite): - def __init__(self, *args, **kwargs): - sprite.Sprite.__init__(self, *args, **kwargs) - self.image = pygame.Surface((2, 4), 0, 24) - self.rect = self.image.get_rect() - - g = sprite.GroupSingle() - screen = pygame.Surface((4, 8), 0, 24) - s = MySprite() - r = weakref.ref(s) - g.sprite = s - del s - gc.collect() - - self.assertIsNotNone(r()) - - g.update() - g.draw(screen) - g.sprite = MySprite() - gc.collect() - - self.assertIsNone(r()) - - -################################################################################ - -if __name__ == "__main__": - unittest.main() diff --git a/venv/Lib/site-packages/pygame/tests/surface_test.py b/venv/Lib/site-packages/pygame/tests/surface_test.py deleted file mode 100644 index ab9b0e6..0000000 --- a/venv/Lib/site-packages/pygame/tests/surface_test.py +++ /dev/null @@ -1,4015 +0,0 @@ -import os -import unittest -from pygame.tests import test_utils -from pygame.tests.test_utils import ( - example_path, - AssertRaisesRegexMixin, - SurfaceSubclass, -) - -try: - from pygame.tests.test_utils.arrinter import * -except (ImportError, NameError): - pass -import pygame -from pygame.locals import * -from pygame.bufferproxy import BufferProxy - -import platform -import gc -import weakref -import ctypes - -IS_PYPY = "PyPy" == platform.python_implementation() - - -class SurfaceTypeTest(AssertRaisesRegexMixin, unittest.TestCase): - def test_surface__pixel_format_as_surface_subclass(self): - """Ensure a subclassed surface can be used for pixel format - when creating a new surface.""" - expected_depth = 16 - expected_flags = SRCALPHA - expected_size = (13, 37) - depth_surface = SurfaceSubclass((11, 21), expected_flags, expected_depth) - - surface = pygame.Surface(expected_size, expected_flags, depth_surface) - - self.assertIsNot(surface, depth_surface) - self.assertIsInstance(surface, pygame.Surface) - self.assertNotIsInstance(surface, SurfaceSubclass) - self.assertEqual(surface.get_size(), expected_size) - self.assertEqual(surface.get_flags(), expected_flags) - self.assertEqual(surface.get_bitsize(), expected_depth) - - def test_set_clip(self): - """see if surface.set_clip(None) works correctly.""" - s = pygame.Surface((800, 600)) - r = pygame.Rect(10, 10, 10, 10) - s.set_clip(r) - r.move_ip(10, 0) - s.set_clip(None) - res = s.get_clip() - # this was garbled before. - self.assertEqual(res[0], 0) - self.assertEqual(res[2], 800) - - def test_print(self): - surf = pygame.Surface((70, 70), 0, 32) - self.assertEqual(repr(surf), "") - - def test_keyword_arguments(self): - surf = pygame.Surface((70, 70), flags=SRCALPHA, depth=32) - self.assertEqual(surf.get_flags() & SRCALPHA, SRCALPHA) - self.assertEqual(surf.get_bitsize(), 32) - - # sanity check to make sure the check below is valid - surf_16 = pygame.Surface((70, 70), 0, 16) - self.assertEqual(surf_16.get_bytesize(), 2) - - # try again with an argument list - surf_16 = pygame.Surface((70, 70), depth=16) - self.assertEqual(surf_16.get_bytesize(), 2) - - def test_set_at(self): - - # 24bit surfaces - s = pygame.Surface((100, 100), 0, 24) - s.fill((0, 0, 0)) - - # set it with a tuple. - s.set_at((0, 0), (10, 10, 10, 255)) - r = s.get_at((0, 0)) - self.assertIsInstance(r, pygame.Color) - self.assertEqual(r, (10, 10, 10, 255)) - - # try setting a color with a single integer. - s.fill((0, 0, 0, 255)) - s.set_at((10, 1), 0x0000FF) - r = s.get_at((10, 1)) - self.assertEqual(r, (0, 0, 255, 255)) - - def test_set_at__big_endian(self): - """png files are loaded in big endian format (BGR rather than RGB)""" - pygame.display.init() - try: - image = pygame.image.load(example_path(os.path.join("data", "BGR.png"))) - # Check they start red, green and blue - self.assertEqual(image.get_at((10, 10)), pygame.Color(255, 0, 0)) - self.assertEqual(image.get_at((10, 20)), pygame.Color(0, 255, 0)) - self.assertEqual(image.get_at((10, 40)), pygame.Color(0, 0, 255)) - # Set three pixels that are already red, green, blue - # to red, green and, blue with set_at: - image.set_at((10, 10), pygame.Color(255, 0, 0)) - image.set_at((10, 20), pygame.Color(0, 255, 0)) - image.set_at((10, 40), pygame.Color(0, 0, 255)) - - # Check they still are - self.assertEqual(image.get_at((10, 10)), pygame.Color(255, 0, 0)) - self.assertEqual(image.get_at((10, 20)), pygame.Color(0, 255, 0)) - self.assertEqual(image.get_at((10, 40)), pygame.Color(0, 0, 255)) - - finally: - pygame.display.quit() - - def test_SRCALPHA(self): - # has the flag been passed in ok? - surf = pygame.Surface((70, 70), SRCALPHA, 32) - self.assertEqual(surf.get_flags() & SRCALPHA, SRCALPHA) - - # 24bit surfaces can not have SRCALPHA. - self.assertRaises(ValueError, pygame.Surface, (100, 100), pygame.SRCALPHA, 24) - - # if we have a 32 bit surface, the SRCALPHA should have worked too. - surf2 = pygame.Surface((70, 70), SRCALPHA) - if surf2.get_bitsize() == 32: - self.assertEqual(surf2.get_flags() & SRCALPHA, SRCALPHA) - - def test_flags_default0_nodisplay(self): - """is set to zero, and SRCALPH is not set by default with no display initialized.""" - pygame.display.quit() - surf = pygame.Surface((70, 70)) - self.assertEqual(surf.get_flags() & SRCALPHA, 0) - - def test_flags_default0_display(self): - """is set to zero, and SRCALPH is not set by default even when the display is initialized.""" - pygame.display.set_mode((320, 200)) - try: - surf = pygame.Surface((70, 70)) - self.assertEqual(surf.get_flags() & SRCALPHA, 0) - finally: - pygame.display.quit() - - def test_masks(self): - def make_surf(bpp, flags, masks): - pygame.Surface((10, 10), flags, bpp, masks) - - # With some masks SDL_CreateRGBSurface does not work properly. - masks = (0xFF000000, 0xFF0000, 0xFF00, 0) - self.assertEqual(make_surf(32, 0, masks), None) - # For 24 and 32 bit surfaces Pygame assumes no losses. - masks = (0x7F0000, 0xFF00, 0xFF, 0) - self.assertRaises(ValueError, make_surf, 24, 0, masks) - self.assertRaises(ValueError, make_surf, 32, 0, masks) - # What contiguous bits in a mask. - masks = (0x6F0000, 0xFF00, 0xFF, 0) - self.assertRaises(ValueError, make_surf, 32, 0, masks) - - def test_get_bounding_rect(self): - surf = pygame.Surface((70, 70), SRCALPHA, 32) - surf.fill((0, 0, 0, 0)) - bound_rect = surf.get_bounding_rect() - self.assertEqual(bound_rect.width, 0) - self.assertEqual(bound_rect.height, 0) - surf.set_at((30, 30), (255, 255, 255, 1)) - bound_rect = surf.get_bounding_rect() - self.assertEqual(bound_rect.left, 30) - self.assertEqual(bound_rect.top, 30) - self.assertEqual(bound_rect.width, 1) - self.assertEqual(bound_rect.height, 1) - surf.set_at((29, 29), (255, 255, 255, 1)) - bound_rect = surf.get_bounding_rect() - self.assertEqual(bound_rect.left, 29) - self.assertEqual(bound_rect.top, 29) - self.assertEqual(bound_rect.width, 2) - self.assertEqual(bound_rect.height, 2) - - surf = pygame.Surface((70, 70), 0, 24) - surf.fill((0, 0, 0)) - bound_rect = surf.get_bounding_rect() - self.assertEqual(bound_rect.width, surf.get_width()) - self.assertEqual(bound_rect.height, surf.get_height()) - - surf.set_colorkey((0, 0, 0)) - bound_rect = surf.get_bounding_rect() - self.assertEqual(bound_rect.width, 0) - self.assertEqual(bound_rect.height, 0) - surf.set_at((30, 30), (255, 255, 255)) - bound_rect = surf.get_bounding_rect() - self.assertEqual(bound_rect.left, 30) - self.assertEqual(bound_rect.top, 30) - self.assertEqual(bound_rect.width, 1) - self.assertEqual(bound_rect.height, 1) - surf.set_at((60, 60), (255, 255, 255)) - bound_rect = surf.get_bounding_rect() - self.assertEqual(bound_rect.left, 30) - self.assertEqual(bound_rect.top, 30) - self.assertEqual(bound_rect.width, 31) - self.assertEqual(bound_rect.height, 31) - - # Issue #180 - pygame.display.init() - try: - surf = pygame.Surface((4, 1), 0, 8) - surf.fill((255, 255, 255)) - surf.get_bounding_rect() # Segfault. - finally: - pygame.display.quit() - - def test_copy(self): - """Ensure a surface can be copied.""" - color = (25, 25, 25, 25) - s1 = pygame.Surface((32, 32), pygame.SRCALPHA, 32) - s1.fill(color) - - s2 = s1.copy() - - s1rect = s1.get_rect() - s2rect = s2.get_rect() - - self.assertEqual(s1rect.size, s2rect.size) - self.assertEqual(s2.get_at((10, 10)), color) - - def test_fill(self): - """Ensure a surface can be filled.""" - color = (25, 25, 25, 25) - fill_rect = pygame.Rect(0, 0, 16, 16) - s1 = pygame.Surface((32, 32), pygame.SRCALPHA, 32) - s1.fill(color, fill_rect) - - for pt in test_utils.rect_area_pts(fill_rect): - self.assertEqual(s1.get_at(pt), color) - - for pt in test_utils.rect_outer_bounds(fill_rect): - self.assertNotEqual(s1.get_at(pt), color) - - def test_fill_rle(self): - """Test RLEACCEL flag with fill()""" - color = (250, 25, 25, 255) - - surf = pygame.Surface((32, 32)) - blit_surf = pygame.Surface((32, 32)) - - blit_surf.set_colorkey((255, 0, 255), pygame.RLEACCEL) - self.assertTrue(blit_surf.get_flags() & pygame.RLEACCELOK) - surf.blit(blit_surf, (0, 0)) - blit_surf.fill(color) - self.assertEqual( - blit_surf.mustlock(), (blit_surf.get_flags() & pygame.RLEACCEL) != 0 - ) - self.assertTrue(blit_surf.get_flags() & pygame.RLEACCEL) - - def test_mustlock_rle(self): - """Test RLEACCEL flag with mustlock()""" - surf = pygame.Surface((100, 100)) - blit_surf = pygame.Surface((100, 100)) - blit_surf.set_colorkey((0, 0, 255), pygame.RLEACCEL) - self.assertTrue(blit_surf.get_flags() & pygame.RLEACCELOK) - surf.blit(blit_surf, (0, 0)) - self.assertTrue(blit_surf.get_flags() & pygame.RLEACCEL) - self.assertTrue(blit_surf.mustlock()) - - def test_mustlock_surf_alpha_rle(self): - """Test RLEACCEL flag with mustlock() on a surface - with per pixel alpha - new feature in SDL2""" - surf = pygame.Surface((100, 100)) - blit_surf = pygame.Surface((100, 100), depth=32, flags=pygame.SRCALPHA) - blit_surf.set_colorkey((192, 191, 192, 255), pygame.RLEACCEL) - self.assertTrue(blit_surf.get_flags() & pygame.RLEACCELOK) - surf.blit(blit_surf, (0, 0)) - self.assertTrue(blit_surf.get_flags() & pygame.RLEACCEL) - self.assertTrue(blit_surf.get_flags() & pygame.SRCALPHA) - self.assertTrue(blit_surf.mustlock()) - - def test_copy_rle(self): - """Test copying a surface set to use run length encoding""" - s1 = pygame.Surface((32, 32), 24) - s1.set_colorkey((255, 0, 255), pygame.RLEACCEL) - self.assertTrue(s1.get_flags() & pygame.RLEACCELOK) - - newsurf = s1.copy() - self.assertTrue(s1.get_flags() & pygame.RLEACCELOK) - self.assertTrue(newsurf.get_flags() & pygame.RLEACCELOK) - - def test_subsurface_rle(self): - """Ensure an RLE sub-surface works independently of its parent.""" - color = (250, 25, 25, 255) - color2 = (200, 200, 250, 255) - sub_rect = pygame.Rect(16, 16, 16, 16) - s0 = pygame.Surface((32, 32), 24) - s1 = pygame.Surface((32, 32), 24) - s1.set_colorkey((255, 0, 255), pygame.RLEACCEL) - s1.fill(color) - s2 = s1.subsurface(sub_rect) - s2.fill(color2) - s0.blit(s1, (0, 0)) - self.assertTrue(s1.get_flags() & pygame.RLEACCEL) - self.assertTrue(not s2.get_flags() & pygame.RLEACCEL) - - def test_subsurface_rle2(self): - """Ensure an RLE sub-surface works independently of its parent.""" - color = (250, 25, 25, 255) - color2 = (200, 200, 250, 255) - sub_rect = pygame.Rect(16, 16, 16, 16) - - s0 = pygame.Surface((32, 32), 24) - s1 = pygame.Surface((32, 32), 24) - s1.set_colorkey((255, 0, 255), pygame.RLEACCEL) - s1.fill(color) - s2 = s1.subsurface(sub_rect) - s2.fill(color2) - s0.blit(s2, (0, 0)) - self.assertTrue(s1.get_flags() & pygame.RLEACCELOK) - self.assertTrue(not s2.get_flags() & pygame.RLEACCELOK) - - def test_solarwolf_rle_usage(self): - """Test for error/crash when calling set_colorkey() followed - by convert twice in succession. Code originally taken - from solarwolf.""" - - def optimize(img): - clear = img.get_colorkey() - img.set_colorkey(clear, RLEACCEL) - self.assertEqual(img.get_colorkey(), clear) - return img.convert() - - pygame.display.init() - try: - pygame.display.set_mode((640, 480)) - - image = pygame.image.load(example_path(os.path.join("data", "alien1.png"))) - image = image.convert() - orig_colorkey = image.get_colorkey() - - image = optimize(image) - image = optimize(image) - self.assertTrue(image.get_flags() & pygame.RLEACCELOK) - self.assertTrue(not image.get_flags() & pygame.RLEACCEL) - self.assertEqual(image.get_colorkey(), orig_colorkey) - self.assertTrue(isinstance(image, pygame.Surface)) - finally: - pygame.display.quit() - - def test_solarwolf_rle_usage_2(self): - """Test for RLE status after setting alpha""" - - pygame.display.init() - try: - pygame.display.set_mode((640, 480), depth=32) - blit_to_surf = pygame.Surface((100, 100)) - - image = pygame.image.load(example_path(os.path.join("data", "alien1.png"))) - image = image.convert() - orig_colorkey = image.get_colorkey() - - # set the colorkey with RLEACCEL, should add the RLEACCELOK flag - image.set_colorkey(orig_colorkey, RLEACCEL) - self.assertTrue(image.get_flags() & pygame.RLEACCELOK) - self.assertTrue(not image.get_flags() & pygame.RLEACCEL) - - # now blit the surface - should add the RLEACCEL flag - blit_to_surf.blit(image, (0, 0)) - self.assertTrue(image.get_flags() & pygame.RLEACCELOK) - self.assertTrue(image.get_flags() & pygame.RLEACCEL) - - # Now set the alpha, without RLE acceleration - should strip all - # RLE flags - image.set_alpha(90) - self.assertTrue(not image.get_flags() & pygame.RLEACCELOK) - self.assertTrue(not image.get_flags() & pygame.RLEACCEL) - - finally: - pygame.display.quit() - - def test_set_alpha__set_colorkey_rle(self): - pygame.display.init() - try: - pygame.display.set_mode((640, 480)) - blit_to_surf = pygame.Surface((80, 71)) - blit_to_surf.fill((255, 255, 255)) - - image = pygame.image.load(example_path(os.path.join("data", "alien1.png"))) - image = image.convert() - orig_colorkey = image.get_colorkey() - - # Add the RLE flag while setting alpha for the whole surface - image.set_alpha(90, RLEACCEL) - blit_to_surf.blit(image, (0, 0)) - sample_pixel_rle = blit_to_surf.get_at((50, 50)) - - # Now reset the colorkey to the original value with RLE - self.assertEqual(image.get_colorkey(), orig_colorkey) - image.set_colorkey(orig_colorkey, RLEACCEL) - blit_to_surf.fill((255, 255, 255)) - blit_to_surf.blit(image, (0, 0)) - sample_pixel_no_rle = blit_to_surf.get_at((50, 50)) - - self.assertAlmostEqual(sample_pixel_rle.r, sample_pixel_no_rle.r, delta=2) - self.assertAlmostEqual(sample_pixel_rle.g, sample_pixel_no_rle.g, delta=2) - self.assertAlmostEqual(sample_pixel_rle.b, sample_pixel_no_rle.b, delta=2) - - finally: - pygame.display.quit() - - def test_fill_negative_coordinates(self): - - # negative coordinates should be clipped by fill, and not draw outside the surface. - color = (25, 25, 25, 25) - color2 = (20, 20, 20, 25) - fill_rect = pygame.Rect(-10, -10, 16, 16) - - s1 = pygame.Surface((32, 32), pygame.SRCALPHA, 32) - r1 = s1.fill(color, fill_rect) - c = s1.get_at((0, 0)) - self.assertEqual(c, color) - - # make subsurface in the middle to test it doesn't over write. - s2 = s1.subsurface((5, 5, 5, 5)) - r2 = s2.fill(color2, (-3, -3, 5, 5)) - c2 = s1.get_at((4, 4)) - self.assertEqual(c, color) - - # rect returns the area we actually fill. - r3 = s2.fill(color2, (-30, -30, 5, 5)) - # since we are using negative coords, it should be an zero sized rect. - self.assertEqual(tuple(r3), (0, 0, 0, 0)) - - def test_fill_keyword_args(self): - """Ensure fill() accepts keyword arguments.""" - color = (1, 2, 3, 255) - area = (1, 1, 2, 2) - s1 = pygame.Surface((4, 4), 0, 32) - s1.fill(special_flags=pygame.BLEND_ADD, color=color, rect=area) - - self.assertEqual(s1.get_at((0, 0)), (0, 0, 0, 255)) - self.assertEqual(s1.get_at((1, 1)), color) - - ######################################################################## - - def test_get_alpha(self): - """Ensure a surface's alpha value can be retrieved.""" - s1 = pygame.Surface((32, 32), pygame.SRCALPHA, 32) - - self.assertEqual(s1.get_alpha(), 255) - - for alpha in (0, 32, 127, 255): - s1.set_alpha(alpha) - for t in range(4): - s1.set_alpha(s1.get_alpha()) - - self.assertEqual(s1.get_alpha(), alpha) - - ######################################################################## - - def test_get_bytesize(self): - """Ensure a surface's bit and byte sizes can be retrieved.""" - pygame.display.init() - try: - depth = 32 - depth_bytes = 4 - s1 = pygame.Surface((32, 32), pygame.SRCALPHA, depth) - - self.assertEqual(s1.get_bytesize(), depth_bytes) - self.assertEqual(s1.get_bitsize(), depth) - - depth = 15 - depth_bytes = 2 - s1 = pygame.Surface((32, 32), 0, depth) - - self.assertEqual(s1.get_bytesize(), depth_bytes) - self.assertEqual(s1.get_bitsize(), depth) - - depth = 12 - depth_bytes = 2 - s1 = pygame.Surface((32, 32), 0, depth) - - self.assertEqual(s1.get_bytesize(), depth_bytes) - self.assertEqual(s1.get_bitsize(), depth) - - with self.assertRaises(pygame.error): - surface = pygame.display.set_mode() - pygame.display.quit() - surface.get_bytesize() - finally: - pygame.display.quit() - - ######################################################################## - - def test_get_flags(self): - """Ensure a surface's flags can be retrieved.""" - s1 = pygame.Surface((32, 32), pygame.SRCALPHA, 32) - - self.assertEqual(s1.get_flags(), pygame.SRCALPHA) - - @unittest.skipIf( - os.environ.get("SDL_VIDEODRIVER") == "dummy", - 'requires a non-"dummy" SDL_VIDEODRIVER', - ) - def test_get_flags__display_surf(self): - pygame.display.init() - try: - # FULLSCREEN - screen_surf = pygame.display.set_mode((600, 400), flags=0) - self.assertFalse(screen_surf.get_flags() & pygame.FULLSCREEN) - - screen_surf = pygame.display.set_mode((600, 400), flags=pygame.FULLSCREEN) - self.assertTrue(screen_surf.get_flags() & pygame.FULLSCREEN) - - # NOFRAME - screen_surf = pygame.display.set_mode((600, 400), flags=0) - self.assertFalse(screen_surf.get_flags() & pygame.NOFRAME) - - screen_surf = pygame.display.set_mode((600, 400), flags=pygame.NOFRAME) - self.assertTrue(screen_surf.get_flags() & pygame.NOFRAME) - - # RESIZABLE - screen_surf = pygame.display.set_mode((600, 400), flags=0) - self.assertFalse(screen_surf.get_flags() & pygame.RESIZABLE) - - screen_surf = pygame.display.set_mode((600, 400), flags=pygame.RESIZABLE) - self.assertTrue(screen_surf.get_flags() & pygame.RESIZABLE) - - # OPENGL - screen_surf = pygame.display.set_mode((600, 400), flags=0) - # it can have an OPENGL flag by default on Macos? - if not (screen_surf.get_flags() & pygame.OPENGL): - self.assertFalse(screen_surf.get_flags() & pygame.OPENGL) - - try: - pygame.display.set_mode((200, 200), pygame.OPENGL, 32) - except pygame.error: - pass # If we can't create OPENGL surface don't try this test - else: - self.assertTrue(screen_surf.get_flags() & pygame.OPENGL) - finally: - pygame.display.quit() - - ######################################################################## - - def test_get_parent(self): - """Ensure a surface's parent can be retrieved.""" - pygame.display.init() - try: - parent = pygame.Surface((16, 16)) - child = parent.subsurface((0, 0, 5, 5)) - - self.assertIs(child.get_parent(), parent) - - with self.assertRaises(pygame.error): - surface = pygame.display.set_mode() - pygame.display.quit() - surface.get_parent() - finally: - pygame.display.quit() - - ######################################################################## - - def test_get_rect(self): - """Ensure a surface's rect can be retrieved.""" - size = (16, 16) - surf = pygame.Surface(size) - rect = surf.get_rect() - - self.assertEqual(rect.size, size) - - ######################################################################## - - def test_get_width__size_and_height(self): - """Ensure a surface's size, width and height can be retrieved.""" - for w in range(0, 255, 32): - for h in range(0, 127, 15): - s = pygame.Surface((w, h)) - self.assertEqual(s.get_width(), w) - self.assertEqual(s.get_height(), h) - self.assertEqual(s.get_size(), (w, h)) - - def test_get_view(self): - """Ensure a buffer view of the surface's pixels can be retrieved.""" - # Check that BufferProxys are returned when array depth is supported, - # ValueErrors returned otherwise. - Error = ValueError - s = pygame.Surface((5, 7), 0, 8) - v2 = s.get_view("2") - - self.assertRaises(Error, s.get_view, "0") - self.assertRaises(Error, s.get_view, "1") - self.assertIsInstance(v2, BufferProxy) - self.assertRaises(Error, s.get_view, "3") - - s = pygame.Surface((8, 7), 0, 8) - length = s.get_bytesize() * s.get_width() * s.get_height() - v0 = s.get_view("0") - v1 = s.get_view("1") - - self.assertIsInstance(v0, BufferProxy) - self.assertEqual(v0.length, length) - self.assertIsInstance(v1, BufferProxy) - self.assertEqual(v1.length, length) - - s = pygame.Surface((5, 7), 0, 16) - v2 = s.get_view("2") - - self.assertRaises(Error, s.get_view, "0") - self.assertRaises(Error, s.get_view, "1") - self.assertIsInstance(v2, BufferProxy) - self.assertRaises(Error, s.get_view, "3") - - s = pygame.Surface((8, 7), 0, 16) - length = s.get_bytesize() * s.get_width() * s.get_height() - v0 = s.get_view("0") - v1 = s.get_view("1") - - self.assertIsInstance(v0, BufferProxy) - self.assertEqual(v0.length, length) - self.assertIsInstance(v1, BufferProxy) - self.assertEqual(v1.length, length) - - s = pygame.Surface((5, 7), pygame.SRCALPHA, 16) - v2 = s.get_view("2") - - self.assertIsInstance(v2, BufferProxy) - self.assertRaises(Error, s.get_view, "3") - - s = pygame.Surface((5, 7), 0, 24) - v2 = s.get_view("2") - v3 = s.get_view("3") - - self.assertRaises(Error, s.get_view, "0") - self.assertRaises(Error, s.get_view, "1") - self.assertIsInstance(v2, BufferProxy) - self.assertIsInstance(v3, BufferProxy) - - s = pygame.Surface((8, 7), 0, 24) - length = s.get_bytesize() * s.get_width() * s.get_height() - v0 = s.get_view("0") - v1 = s.get_view("1") - - self.assertIsInstance(v0, BufferProxy) - self.assertEqual(v0.length, length) - self.assertIsInstance(v1, BufferProxy) - self.assertEqual(v1.length, length) - - s = pygame.Surface((5, 7), 0, 32) - length = s.get_bytesize() * s.get_width() * s.get_height() - v0 = s.get_view("0") - v1 = s.get_view("1") - v2 = s.get_view("2") - v3 = s.get_view("3") - - self.assertIsInstance(v0, BufferProxy) - self.assertEqual(v0.length, length) - self.assertIsInstance(v1, BufferProxy) - self.assertEqual(v1.length, length) - self.assertIsInstance(v2, BufferProxy) - self.assertIsInstance(v3, BufferProxy) - - s2 = s.subsurface((0, 0, 4, 7)) - - self.assertRaises(Error, s2.get_view, "0") - self.assertRaises(Error, s2.get_view, "1") - - s2 = None - s = pygame.Surface((5, 7), pygame.SRCALPHA, 32) - - for kind in ("2", "3", "a", "A", "r", "R", "g", "G", "b", "B"): - self.assertIsInstance(s.get_view(kind), BufferProxy) - - # Check default argument value: '2' - s = pygame.Surface((2, 4), 0, 32) - v = s.get_view() - if not IS_PYPY: - ai = ArrayInterface(v) - self.assertEqual(ai.nd, 2) - - # Check locking. - s = pygame.Surface((2, 4), 0, 32) - - self.assertFalse(s.get_locked()) - - v = s.get_view("2") - - self.assertFalse(s.get_locked()) - - c = v.__array_interface__ - - self.assertTrue(s.get_locked()) - - c = None - gc.collect() - - self.assertTrue(s.get_locked()) - - v = None - gc.collect() - - self.assertFalse(s.get_locked()) - - # Check invalid view kind values. - s = pygame.Surface((2, 4), pygame.SRCALPHA, 32) - self.assertRaises(TypeError, s.get_view, "") - self.assertRaises(TypeError, s.get_view, "9") - self.assertRaises(TypeError, s.get_view, "RGBA") - self.assertRaises(TypeError, s.get_view, 2) - - # Both unicode and bytes strings are allowed for kind. - s = pygame.Surface((2, 4), 0, 32) - s.get_view("2") - s.get_view(b"2") - - # Garbage collection - s = pygame.Surface((2, 4), 0, 32) - weak_s = weakref.ref(s) - v = s.get_view("3") - weak_v = weakref.ref(v) - gc.collect() - self.assertTrue(weak_s() is s) - self.assertTrue(weak_v() is v) - del v - gc.collect() - self.assertTrue(weak_s() is s) - self.assertTrue(weak_v() is None) - del s - gc.collect() - self.assertTrue(weak_s() is None) - - def test_get_buffer(self): - # Check that get_buffer works for all pixel sizes and for a subsurface. - - # Check for all pixel sizes - for bitsize in [8, 16, 24, 32]: - s = pygame.Surface((5, 7), 0, bitsize) - length = s.get_pitch() * s.get_height() - v = s.get_buffer() - - self.assertIsInstance(v, BufferProxy) - self.assertEqual(v.length, length) - self.assertEqual(repr(v), "") - - # Check for a subsurface (not contiguous) - s = pygame.Surface((7, 10), 0, 32) - s2 = s.subsurface((1, 2, 5, 7)) - length = s2.get_pitch() * s2.get_height() - v = s2.get_buffer() - - self.assertIsInstance(v, BufferProxy) - self.assertEqual(v.length, length) - - # Check locking. - s = pygame.Surface((2, 4), 0, 32) - v = s.get_buffer() - self.assertTrue(s.get_locked()) - v = None - gc.collect() - self.assertFalse(s.get_locked()) - - OLDBUF = hasattr(pygame.bufferproxy, "get_segcount") - - @unittest.skipIf(not OLDBUF, "old buffer not available") - def test_get_buffer_oldbuf(self): - from pygame.bufferproxy import get_segcount, get_write_buffer - - s = pygame.Surface((2, 4), pygame.SRCALPHA, 32) - v = s.get_buffer() - segcount, buflen = get_segcount(v) - self.assertEqual(segcount, 1) - self.assertEqual(buflen, s.get_pitch() * s.get_height()) - seglen, segaddr = get_write_buffer(v, 0) - self.assertEqual(segaddr, s._pixels_address) - self.assertEqual(seglen, buflen) - - @unittest.skipIf(not OLDBUF, "old buffer not available") - def test_get_view_oldbuf(self): - from pygame.bufferproxy import get_segcount, get_write_buffer - - s = pygame.Surface((2, 4), pygame.SRCALPHA, 32) - v = s.get_view("1") - segcount, buflen = get_segcount(v) - self.assertEqual(segcount, 8) - self.assertEqual(buflen, s.get_pitch() * s.get_height()) - seglen, segaddr = get_write_buffer(v, 7) - self.assertEqual(segaddr, s._pixels_address + s.get_bytesize() * 7) - self.assertEqual(seglen, s.get_bytesize()) - - def test_set_colorkey(self): - - # __doc__ (as of 2008-06-25) for pygame.surface.Surface.set_colorkey: - - # Surface.set_colorkey(Color, flags=0): return None - # Surface.set_colorkey(None): return None - # Set the transparent colorkey - - s = pygame.Surface((16, 16), pygame.SRCALPHA, 32) - - colorkeys = ((20, 189, 20, 255), (128, 50, 50, 255), (23, 21, 255, 255)) - - for colorkey in colorkeys: - s.set_colorkey(colorkey) - - for t in range(4): - s.set_colorkey(s.get_colorkey()) - - self.assertEqual(s.get_colorkey(), colorkey) - - def test_set_masks(self): - s = pygame.Surface((32, 32)) - r, g, b, a = s.get_masks() - self.assertRaises(TypeError, s.set_masks, (b, g, r, a)) - - def test_set_shifts(self): - s = pygame.Surface((32, 32)) - r, g, b, a = s.get_shifts() - self.assertRaises(TypeError, s.set_shifts, (b, g, r, a)) - - def test_blit_keyword_args(self): - color = (1, 2, 3, 255) - s1 = pygame.Surface((4, 4), 0, 32) - s2 = pygame.Surface((2, 2), 0, 32) - s2.fill((1, 2, 3)) - s1.blit(special_flags=BLEND_ADD, source=s2, dest=(1, 1), area=s2.get_rect()) - self.assertEqual(s1.get_at((0, 0)), (0, 0, 0, 255)) - self.assertEqual(s1.get_at((1, 1)), color) - - def test_blit_big_rects(self): - """SDL2 can have more than 16 bits for x, y, width, height.""" - big_surf = pygame.Surface((100, 68000), 0, 32) - big_surf_color = (255, 0, 0) - big_surf.fill(big_surf_color) - - background = pygame.Surface((500, 500), 0, 32) - background_color = (0, 255, 0) - background.fill(background_color) - - # copy parts of the big_surf using more than 16bit parts. - background.blit(big_surf, (100, 100), area=(0, 16000, 100, 100)) - background.blit(big_surf, (200, 200), area=(0, 32000, 100, 100)) - background.blit(big_surf, (300, 300), area=(0, 66000, 100, 100)) - - # check that all three areas are drawn. - self.assertEqual(background.get_at((101, 101)), big_surf_color) - self.assertEqual(background.get_at((201, 201)), big_surf_color) - self.assertEqual(background.get_at((301, 301)), big_surf_color) - - # areas outside the 3 blitted areas not covered by those blits. - self.assertEqual(background.get_at((400, 301)), background_color) - self.assertEqual(background.get_at((400, 201)), background_color) - self.assertEqual(background.get_at((100, 201)), background_color) - self.assertEqual(background.get_at((99, 99)), background_color) - self.assertEqual(background.get_at((450, 450)), background_color) - - -class TestSurfaceBlit(unittest.TestCase): - """Tests basic blitting functionality and options.""" - - # __doc__ (as of 2008-08-02) for pygame.surface.Surface.blit: - - # Surface.blit(source, dest, area=None, special_flags = 0): return Rect - # draw one image onto another - # - # Draws a source Surface onto this Surface. The draw can be positioned - # with the dest argument. Dest can either be pair of coordinates - # representing the upper left corner of the source. A Rect can also be - # passed as the destination and the topleft corner of the rectangle - # will be used as the position for the blit. The size of the - # destination rectangle does not effect the blit. - # - # An optional area rectangle can be passed as well. This represents a - # smaller portion of the source Surface to draw. - # - # An optional special flags is for passing in new in 1.8.0: BLEND_ADD, - # BLEND_SUB, BLEND_MULT, BLEND_MIN, BLEND_MAX new in 1.8.1: - # BLEND_RGBA_ADD, BLEND_RGBA_SUB, BLEND_RGBA_MULT, BLEND_RGBA_MIN, - # BLEND_RGBA_MAX BLEND_RGB_ADD, BLEND_RGB_SUB, BLEND_RGB_MULT, - # BLEND_RGB_MIN, BLEND_RGB_MAX With other special blitting flags - # perhaps added in the future. - # - # The return rectangle is the area of the affected pixels, excluding - # any pixels outside the destination Surface, or outside the clipping - # area. - # - # Pixel alphas will be ignored when blitting to an 8 bit Surface. - # special_flags new in pygame 1.8. - - def setUp(self): - """Resets starting surfaces.""" - self.src_surface = pygame.Surface((256, 256), 32) - self.src_surface.fill(pygame.Color(255, 255, 255)) - self.dst_surface = pygame.Surface((64, 64), 32) - self.dst_surface.fill(pygame.Color(0, 0, 0)) - - def test_blit_overflow_coord(self): - """Full coverage w/ overflow, specified with Coordinate""" - result = self.dst_surface.blit(self.src_surface, (0, 0)) - self.assertIsInstance(result, pygame.Rect) - self.assertEqual(result.size, (64, 64)) - for k in [(x, x) for x in range(64)]: - self.assertEqual(self.dst_surface.get_at(k), (255, 255, 255)) - - def test_blit_overflow_rect(self): - """Full coverage w/ overflow, specified with a Rect""" - result = self.dst_surface.blit(self.src_surface, pygame.Rect(-1, -1, 300, 300)) - self.assertIsInstance(result, pygame.Rect) - self.assertEqual(result.size, (64, 64)) - for k in [(x, x) for x in range(64)]: - self.assertEqual(self.dst_surface.get_at(k), (255, 255, 255)) - - def test_blit_overflow_nonorigin(self): - """Test Rectange Dest, with overflow but with starting rect with top-left at (1,1)""" - result = self.dst_surface.blit(self.src_surface, dest=pygame.Rect((1, 1, 1, 1))) - self.assertIsInstance(result, pygame.Rect) - self.assertEqual(result.size, (63, 63)) - self.assertEqual(self.dst_surface.get_at((0, 0)), (0, 0, 0)) - self.assertEqual(self.dst_surface.get_at((63, 0)), (0, 0, 0)) - self.assertEqual(self.dst_surface.get_at((0, 63)), (0, 0, 0)) - self.assertEqual(self.dst_surface.get_at((1, 1)), (255, 255, 255)) - self.assertEqual(self.dst_surface.get_at((63, 63)), (255, 255, 255)) - - def test_blit_area_contraint(self): - """Testing area constraint""" - result = self.dst_surface.blit( - self.src_surface, - dest=pygame.Rect((1, 1, 1, 1)), - area=pygame.Rect((2, 2, 2, 2)), - ) - self.assertIsInstance(result, pygame.Rect) - self.assertEqual(result.size, (2, 2)) - self.assertEqual(self.dst_surface.get_at((0, 0)), (0, 0, 0)) # Corners - self.assertEqual(self.dst_surface.get_at((63, 0)), (0, 0, 0)) - self.assertEqual(self.dst_surface.get_at((0, 63)), (0, 0, 0)) - self.assertEqual(self.dst_surface.get_at((63, 63)), (0, 0, 0)) - self.assertEqual( - self.dst_surface.get_at((1, 1)), (255, 255, 255) - ) # Blitted Area - self.assertEqual(self.dst_surface.get_at((2, 2)), (255, 255, 255)) - self.assertEqual(self.dst_surface.get_at((3, 3)), (0, 0, 0)) - # Should stop short of filling in (3,3) - - def test_blit_zero_overlap(self): - """Testing zero-overlap condition.""" - result = self.dst_surface.blit( - self.src_surface, - dest=pygame.Rect((-256, -256, 1, 1)), - area=pygame.Rect((2, 2, 256, 256)), - ) - self.assertIsInstance(result, pygame.Rect) - self.assertEqual(result.size, (0, 0)) # No blitting expected - for k in [(x, x) for x in range(64)]: - self.assertEqual(self.dst_surface.get_at(k), (0, 0, 0)) # Diagonal - self.assertEqual( - self.dst_surface.get_at((63, 0)), (0, 0, 0) - ) # Remaining corners - self.assertEqual(self.dst_surface.get_at((0, 63)), (0, 0, 0)) - - def test_blit__SRCALPHA_opaque_source(self): - src = pygame.Surface((256, 256), SRCALPHA, 32) - dst = src.copy() - - for i, j in test_utils.rect_area_pts(src.get_rect()): - dst.set_at((i, j), (i, 0, 0, j)) - src.set_at((i, j), (0, i, 0, 255)) - - dst.blit(src, (0, 0)) - - for pt in test_utils.rect_area_pts(src.get_rect()): - self.assertEqual(dst.get_at(pt)[1], src.get_at(pt)[1]) - - def test_blit__blit_to_self(self): - """Test that blit operation works on self, alpha value is - correct, and that no RGB distortion occurs.""" - test_surface = pygame.Surface((128, 128), SRCALPHA, 32) - area = test_surface.get_rect() - - for pt, test_color in test_utils.gradient(area.width, area.height): - test_surface.set_at(pt, test_color) - - reference_surface = test_surface.copy() - - test_surface.blit(test_surface, (0, 0)) - - for x in range(area.width): - for y in range(area.height): - (r, g, b, a) = reference_color = reference_surface.get_at((x, y)) - expected_color = (r, g, b, (a + (a * ((256 - a) // 256)))) - self.assertEqual(reference_color, expected_color) - - self.assertEqual(reference_surface.get_rect(), test_surface.get_rect()) - - def test_blit__SRCALPHA_to_SRCALPHA_non_zero(self): - """Tests blitting a nonzero alpha surface to another nonzero alpha surface - both straight alpha compositing method. Test is fuzzy (+/- 1/256) to account for - different implementations in SDL1 and SDL2. - """ - - size = (32, 32) - - def check_color_diff(color1, color2): - """Returns True if two colors are within (1, 1, 1, 1) of each other.""" - for val in color1 - color2: - if abs(val) > 1: - return False - return True - - def high_a_onto_low(high, low): - """Tests straight alpha case. Source is low alpha, destination is high alpha""" - high_alpha_surface = pygame.Surface(size, pygame.SRCALPHA, 32) - low_alpha_surface = high_alpha_surface.copy() - high_alpha_color = Color( - (high, high, low, high) - ) # Injecting some RGB variance. - low_alpha_color = Color((high, low, low, low)) - high_alpha_surface.fill(high_alpha_color) - low_alpha_surface.fill(low_alpha_color) - - high_alpha_surface.blit(low_alpha_surface, (0, 0)) - - expected_color = low_alpha_color + Color( - tuple( - ((x * (255 - low_alpha_color.a)) // 255) for x in high_alpha_color - ) - ) - self.assertTrue( - check_color_diff(high_alpha_surface.get_at((0, 0)), expected_color) - ) - - def low_a_onto_high(high, low): - """Tests straight alpha case. Source is high alpha, destination is low alpha""" - high_alpha_surface = pygame.Surface(size, pygame.SRCALPHA, 32) - low_alpha_surface = high_alpha_surface.copy() - high_alpha_color = Color( - (high, high, low, high) - ) # Injecting some RGB variance. - low_alpha_color = Color((high, low, low, low)) - high_alpha_surface.fill(high_alpha_color) - low_alpha_surface.fill(low_alpha_color) - - low_alpha_surface.blit(high_alpha_surface, (0, 0)) - - expected_color = high_alpha_color + Color( - tuple( - ((x * (255 - high_alpha_color.a)) // 255) for x in low_alpha_color - ) - ) - self.assertTrue( - check_color_diff(low_alpha_surface.get_at((0, 0)), expected_color) - ) - - for low_a in range(0, 128): - for high_a in range(128, 256): - high_a_onto_low(high_a, low_a) - low_a_onto_high(high_a, low_a) - - def test_blit__SRCALPHA32_to_8(self): - # Bug: fatal - # SDL_DisplayConvert segfaults when video is uninitialized. - target = pygame.Surface((11, 8), 0, 8) - test_color = target.get_palette_at(2) - source = pygame.Surface((1, 1), pygame.SRCALPHA, 32) - source.set_at((0, 0), test_color) - target.blit(source, (0, 0)) - - -class GeneralSurfaceTests(AssertRaisesRegexMixin, unittest.TestCase): - @unittest.skipIf( - os.environ.get("SDL_VIDEODRIVER") == "dummy", - 'requires a non-"dummy" SDL_VIDEODRIVER', - ) - def test_image_convert_bug_131(self): - # Bitbucket bug #131: Unable to Surface.convert(32) some 1-bit images. - # https://bitbucket.org/pygame/pygame/issue/131/unable-to-surfaceconvert-32-some-1-bit - - pygame.display.init() - try: - pygame.display.set_mode((640, 480)) - - im = pygame.image.load(example_path(os.path.join("data", "city.png"))) - im2 = pygame.image.load(example_path(os.path.join("data", "brick.png"))) - - self.assertEqual(im.get_palette(), ((0, 0, 0, 255), (255, 255, 255, 255))) - self.assertEqual(im2.get_palette(), ((0, 0, 0, 255), (0, 0, 0, 255))) - - self.assertEqual(repr(im.convert(32)), "") - self.assertEqual(repr(im2.convert(32)), "") - - # Ensure a palette format to palette format works. - im3 = im.convert(8) - self.assertEqual(repr(im3), "") - self.assertEqual(im3.get_palette(), im.get_palette()) - - finally: - pygame.display.quit() - - def test_convert_init(self): - """Ensure initialization exceptions are raised - for surf.convert().""" - pygame.display.quit() - surf = pygame.Surface((1, 1)) - - self.assertRaisesRegex(pygame.error, "display initialized", surf.convert) - - pygame.display.init() - try: - if os.environ.get("SDL_VIDEODRIVER") != "dummy": - try: - surf.convert(32) - surf.convert(pygame.Surface((1, 1))) - except pygame.error: - self.fail("convert() should not raise an exception here.") - - self.assertRaisesRegex(pygame.error, "No video mode", surf.convert) - - pygame.display.set_mode((640, 480)) - try: - surf.convert() - except pygame.error: - self.fail("convert() should not raise an exception here.") - finally: - pygame.display.quit() - - def test_convert_alpha_init(self): - """Ensure initialization exceptions are raised - for surf.convert_alpha().""" - pygame.display.quit() - surf = pygame.Surface((1, 1)) - - self.assertRaisesRegex(pygame.error, "display initialized", surf.convert_alpha) - - pygame.display.init() - try: - self.assertRaisesRegex(pygame.error, "No video mode", surf.convert_alpha) - - pygame.display.set_mode((640, 480)) - try: - surf.convert_alpha() - except pygame.error: - self.fail("convert_alpha() should not raise an exception here.") - finally: - pygame.display.quit() - - def test_convert_alpha_SRCALPHA(self): - """Ensure that the surface returned by surf.convert_alpha() - has alpha blending enabled""" - pygame.display.init() - try: - pygame.display.set_mode((640, 480)) - - s1 = pygame.Surface((100, 100), 0, 32) - # s2=pygame.Surface((100,100), pygame.SRCALPHA, 32) - s1_alpha = s1.convert_alpha() - self.assertEqual(s1_alpha.get_flags() & SRCALPHA, SRCALPHA) - self.assertEqual(s1_alpha.get_alpha(), 255) - finally: - pygame.display.quit() - - def test_src_alpha_issue_1289(self): - """blit should be white.""" - surf1 = pygame.Surface((1, 1), pygame.SRCALPHA, 32) - surf1.fill((255, 255, 255, 100)) - - surf2 = pygame.Surface((1, 1), pygame.SRCALPHA, 32) - self.assertEqual(surf2.get_at((0, 0)), (0, 0, 0, 0)) - surf2.blit(surf1, (0, 0)) - - self.assertEqual(surf1.get_at((0, 0)), (255, 255, 255, 100)) - self.assertEqual(surf2.get_at((0, 0)), (255, 255, 255, 100)) - - def test_src_alpha_compatible(self): - """ "What pygame 1.9.x did". Is the alpha blitter as before?""" - - # The table below was generated with the SDL1 blit. - # def print_table(): - # nums = [0, 1, 65, 126, 127, 199, 254, 255] - # results = {} - # for dest_r, dest_b, dest_a in zip(nums, reversed(nums), reversed(nums)): - # for src_r, src_b, src_a in zip(nums, reversed(nums), nums): - # src_surf = pygame.Surface((66, 66), pygame.SRCALPHA, 32) - # src_surf.fill((src_r, 255, src_b, src_a)) - # dest_surf = pygame.Surface((66, 66), pygame.SRCALPHA, 32) - # dest_surf.fill((dest_r, 255, dest_b, dest_a)) - # dest_surf.blit(src_surf, (0, 0)) - # key = ((dest_r, dest_b, dest_a), (src_r, src_b, src_a)) - # results[key] = dest_surf.get_at((65, 33)) - # print("(dest_r, dest_b, dest_a), (src_r, src_b, src_a): color") - # pprint(results) - - results_expected = { - ((0, 255, 255), (0, 255, 0)): (0, 255, 255, 255), - ((0, 255, 255), (1, 254, 1)): (0, 255, 255, 255), - ((0, 255, 255), (65, 199, 65)): (16, 255, 241, 255), - ((0, 255, 255), (126, 127, 126)): (62, 255, 192, 255), - ((0, 255, 255), (127, 126, 127)): (63, 255, 191, 255), - ((0, 255, 255), (199, 65, 199)): (155, 255, 107, 255), - ((0, 255, 255), (254, 1, 254)): (253, 255, 2, 255), - ((0, 255, 255), (255, 0, 255)): (255, 255, 0, 255), - ((1, 254, 254), (0, 255, 0)): (1, 255, 254, 254), - ((1, 254, 254), (1, 254, 1)): (1, 255, 254, 255), - ((1, 254, 254), (65, 199, 65)): (17, 255, 240, 255), - ((1, 254, 254), (126, 127, 126)): (63, 255, 191, 255), - ((1, 254, 254), (127, 126, 127)): (64, 255, 190, 255), - ((1, 254, 254), (199, 65, 199)): (155, 255, 107, 255), - ((1, 254, 254), (254, 1, 254)): (253, 255, 2, 255), - ((1, 254, 254), (255, 0, 255)): (255, 255, 0, 255), - ((65, 199, 199), (0, 255, 0)): (65, 255, 199, 199), - ((65, 199, 199), (1, 254, 1)): (64, 255, 200, 200), - ((65, 199, 199), (65, 199, 65)): (65, 255, 199, 214), - ((65, 199, 199), (126, 127, 126)): (95, 255, 164, 227), - ((65, 199, 199), (127, 126, 127)): (96, 255, 163, 227), - ((65, 199, 199), (199, 65, 199)): (169, 255, 95, 243), - ((65, 199, 199), (254, 1, 254)): (253, 255, 2, 255), - ((65, 199, 199), (255, 0, 255)): (255, 255, 0, 255), - ((126, 127, 127), (0, 255, 0)): (126, 255, 127, 127), - ((126, 127, 127), (1, 254, 1)): (125, 255, 128, 128), - ((126, 127, 127), (65, 199, 65)): (110, 255, 146, 160), - ((126, 127, 127), (126, 127, 126)): (126, 255, 127, 191), - ((126, 127, 127), (127, 126, 127)): (126, 255, 126, 191), - ((126, 127, 127), (199, 65, 199)): (183, 255, 79, 227), - ((126, 127, 127), (254, 1, 254)): (253, 255, 1, 255), - ((126, 127, 127), (255, 0, 255)): (255, 255, 0, 255), - ((127, 126, 126), (0, 255, 0)): (127, 255, 126, 126), - ((127, 126, 126), (1, 254, 1)): (126, 255, 127, 127), - ((127, 126, 126), (65, 199, 65)): (111, 255, 145, 159), - ((127, 126, 126), (126, 127, 126)): (127, 255, 126, 190), - ((127, 126, 126), (127, 126, 127)): (127, 255, 126, 191), - ((127, 126, 126), (199, 65, 199)): (183, 255, 78, 227), - ((127, 126, 126), (254, 1, 254)): (254, 255, 1, 255), - ((127, 126, 126), (255, 0, 255)): (255, 255, 0, 255), - ((199, 65, 65), (0, 255, 0)): (199, 255, 65, 65), - ((199, 65, 65), (1, 254, 1)): (198, 255, 66, 66), - ((199, 65, 65), (65, 199, 65)): (165, 255, 99, 114), - ((199, 65, 65), (126, 127, 126)): (163, 255, 96, 159), - ((199, 65, 65), (127, 126, 127)): (163, 255, 95, 160), - ((199, 65, 65), (199, 65, 199)): (199, 255, 65, 214), - ((199, 65, 65), (254, 1, 254)): (254, 255, 1, 255), - ((199, 65, 65), (255, 0, 255)): (255, 255, 0, 255), - ((254, 1, 1), (0, 255, 0)): (254, 255, 1, 1), - ((254, 1, 1), (1, 254, 1)): (253, 255, 2, 2), - ((254, 1, 1), (65, 199, 65)): (206, 255, 52, 66), - ((254, 1, 1), (126, 127, 126)): (191, 255, 63, 127), - ((254, 1, 1), (127, 126, 127)): (191, 255, 63, 128), - ((254, 1, 1), (199, 65, 199)): (212, 255, 51, 200), - ((254, 1, 1), (254, 1, 254)): (254, 255, 1, 255), - ((254, 1, 1), (255, 0, 255)): (255, 255, 0, 255), - ((255, 0, 0), (0, 255, 0)): (0, 255, 255, 0), - ((255, 0, 0), (1, 254, 1)): (1, 255, 254, 1), - ((255, 0, 0), (65, 199, 65)): (65, 255, 199, 65), - ((255, 0, 0), (126, 127, 126)): (126, 255, 127, 126), - ((255, 0, 0), (127, 126, 127)): (127, 255, 126, 127), - ((255, 0, 0), (199, 65, 199)): (199, 255, 65, 199), - ((255, 0, 0), (254, 1, 254)): (254, 255, 1, 254), - ((255, 0, 0), (255, 0, 255)): (255, 255, 0, 255), - } - - # chosen because they contain edge cases. - nums = [0, 1, 65, 126, 127, 199, 254, 255] - results = {} - for dst_r, dst_b, dst_a in zip(nums, reversed(nums), reversed(nums)): - for src_r, src_b, src_a in zip(nums, reversed(nums), nums): - with self.subTest( - src_r=src_r, - src_b=src_b, - src_a=src_a, - dest_r=dst_r, - dest_b=dst_b, - dest_a=dst_a, - ): - src_surf = pygame.Surface((66, 66), pygame.SRCALPHA, 32) - src_surf.fill((src_r, 255, src_b, src_a)) - dest_surf = pygame.Surface((66, 66), pygame.SRCALPHA, 32) - dest_surf.fill((dst_r, 255, dst_b, dst_a)) - - dest_surf.blit(src_surf, (0, 0)) - key = ((dst_r, dst_b, dst_a), (src_r, src_b, src_a)) - results[key] = dest_surf.get_at((65, 33)) - self.assertEqual(results[key], results_expected[key]) - - self.assertEqual(results, results_expected) - - def test_src_alpha_compatible_16bit(self): - """ "What pygame 1.9.x did". Is the alpha blitter as before?""" - - # The table below was generated with the SDL1 blit. - # def print_table(): - # nums = [0, 1, 65, 126, 127, 199, 254, 255] - # results = {} - # for dest_r, dest_b, dest_a in zip(nums, reversed(nums), reversed(nums)): - # for src_r, src_b, src_a in zip(nums, reversed(nums), nums): - # src_surf = pygame.Surface((66, 66), pygame.SRCALPHA, 16) - # src_surf.fill((src_r, 255, src_b, src_a)) - # dest_surf = pygame.Surface((66, 66), pygame.SRCALPHA, 16) - # dest_surf.fill((dest_r, 255, dest_b, dest_a)) - # dest_surf.blit(src_surf, (0, 0)) - # key = ((dest_r, dest_b, dest_a), (src_r, src_b, src_a)) - # results[key] = dest_surf.get_at((65, 33)) - # print("(dest_r, dest_b, dest_a), (src_r, src_b, src_a): color") - # pprint(results) - - results_expected = { - ((0, 255, 255), (0, 255, 0)): (0, 255, 255, 255), - ((0, 255, 255), (1, 254, 1)): (0, 255, 255, 255), - ((0, 255, 255), (65, 199, 65)): (17, 255, 255, 255), - ((0, 255, 255), (126, 127, 126)): (51, 255, 204, 255), - ((0, 255, 255), (127, 126, 127)): (51, 255, 204, 255), - ((0, 255, 255), (199, 65, 199)): (170, 255, 102, 255), - ((0, 255, 255), (254, 1, 254)): (255, 255, 0, 255), - ((0, 255, 255), (255, 0, 255)): (255, 255, 0, 255), - ((1, 254, 254), (0, 255, 0)): (0, 255, 255, 255), - ((1, 254, 254), (1, 254, 1)): (0, 255, 255, 255), - ((1, 254, 254), (65, 199, 65)): (17, 255, 255, 255), - ((1, 254, 254), (126, 127, 126)): (51, 255, 204, 255), - ((1, 254, 254), (127, 126, 127)): (51, 255, 204, 255), - ((1, 254, 254), (199, 65, 199)): (170, 255, 102, 255), - ((1, 254, 254), (254, 1, 254)): (255, 255, 0, 255), - ((1, 254, 254), (255, 0, 255)): (255, 255, 0, 255), - ((65, 199, 199), (0, 255, 0)): (68, 255, 204, 204), - ((65, 199, 199), (1, 254, 1)): (68, 255, 204, 204), - ((65, 199, 199), (65, 199, 65)): (68, 255, 204, 221), - ((65, 199, 199), (126, 127, 126)): (85, 255, 170, 238), - ((65, 199, 199), (127, 126, 127)): (85, 255, 170, 238), - ((65, 199, 199), (199, 65, 199)): (187, 255, 85, 255), - ((65, 199, 199), (254, 1, 254)): (255, 255, 0, 255), - ((65, 199, 199), (255, 0, 255)): (255, 255, 0, 255), - ((126, 127, 127), (0, 255, 0)): (119, 255, 119, 119), - ((126, 127, 127), (1, 254, 1)): (119, 255, 119, 119), - ((126, 127, 127), (65, 199, 65)): (102, 255, 136, 153), - ((126, 127, 127), (126, 127, 126)): (119, 255, 119, 187), - ((126, 127, 127), (127, 126, 127)): (119, 255, 119, 187), - ((126, 127, 127), (199, 65, 199)): (187, 255, 68, 238), - ((126, 127, 127), (254, 1, 254)): (255, 255, 0, 255), - ((126, 127, 127), (255, 0, 255)): (255, 255, 0, 255), - ((127, 126, 126), (0, 255, 0)): (119, 255, 119, 119), - ((127, 126, 126), (1, 254, 1)): (119, 255, 119, 119), - ((127, 126, 126), (65, 199, 65)): (102, 255, 136, 153), - ((127, 126, 126), (126, 127, 126)): (119, 255, 119, 187), - ((127, 126, 126), (127, 126, 127)): (119, 255, 119, 187), - ((127, 126, 126), (199, 65, 199)): (187, 255, 68, 238), - ((127, 126, 126), (254, 1, 254)): (255, 255, 0, 255), - ((127, 126, 126), (255, 0, 255)): (255, 255, 0, 255), - ((199, 65, 65), (0, 255, 0)): (204, 255, 68, 68), - ((199, 65, 65), (1, 254, 1)): (204, 255, 68, 68), - ((199, 65, 65), (65, 199, 65)): (170, 255, 102, 119), - ((199, 65, 65), (126, 127, 126)): (170, 255, 85, 153), - ((199, 65, 65), (127, 126, 127)): (170, 255, 85, 153), - ((199, 65, 65), (199, 65, 199)): (204, 255, 68, 221), - ((199, 65, 65), (254, 1, 254)): (255, 255, 0, 255), - ((199, 65, 65), (255, 0, 255)): (255, 255, 0, 255), - ((254, 1, 1), (0, 255, 0)): (0, 255, 255, 0), - ((254, 1, 1), (1, 254, 1)): (0, 255, 255, 0), - ((254, 1, 1), (65, 199, 65)): (68, 255, 204, 68), - ((254, 1, 1), (126, 127, 126)): (119, 255, 119, 119), - ((254, 1, 1), (127, 126, 127)): (119, 255, 119, 119), - ((254, 1, 1), (199, 65, 199)): (204, 255, 68, 204), - ((254, 1, 1), (254, 1, 254)): (255, 255, 0, 255), - ((254, 1, 1), (255, 0, 255)): (255, 255, 0, 255), - ((255, 0, 0), (0, 255, 0)): (0, 255, 255, 0), - ((255, 0, 0), (1, 254, 1)): (0, 255, 255, 0), - ((255, 0, 0), (65, 199, 65)): (68, 255, 204, 68), - ((255, 0, 0), (126, 127, 126)): (119, 255, 119, 119), - ((255, 0, 0), (127, 126, 127)): (119, 255, 119, 119), - ((255, 0, 0), (199, 65, 199)): (204, 255, 68, 204), - ((255, 0, 0), (254, 1, 254)): (255, 255, 0, 255), - ((255, 0, 0), (255, 0, 255)): (255, 255, 0, 255), - } - - # chosen because they contain edge cases. - nums = [0, 1, 65, 126, 127, 199, 254, 255] - results = {} - for dst_r, dst_b, dst_a in zip(nums, reversed(nums), reversed(nums)): - for src_r, src_b, src_a in zip(nums, reversed(nums), nums): - with self.subTest( - src_r=src_r, - src_b=src_b, - src_a=src_a, - dest_r=dst_r, - dest_b=dst_b, - dest_a=dst_a, - ): - src_surf = pygame.Surface((66, 66), pygame.SRCALPHA, 16) - src_surf.fill((src_r, 255, src_b, src_a)) - dest_surf = pygame.Surface((66, 66), pygame.SRCALPHA, 16) - dest_surf.fill((dst_r, 255, dst_b, dst_a)) - - dest_surf.blit(src_surf, (0, 0)) - key = ((dst_r, dst_b, dst_a), (src_r, src_b, src_a)) - results[key] = dest_surf.get_at((65, 33)) - self.assertEqual(results[key], results_expected[key]) - - self.assertEqual(results, results_expected) - - def test_sdl1_mimic_blitter_with_set_alpha(self): - """does the SDL 1 style blitter in pygame 2 work with set_alpha(), - this feature only exists in pygame 2/SDL2 SDL1 did not support - combining surface and pixel alpha""" - - results_expected = { - ((0, 255, 255), (0, 255, 0)): (0, 255, 255, 255), - ((0, 255, 255), (1, 254, 1)): (0, 255, 255, 255), - ((0, 255, 255), (65, 199, 65)): (16, 255, 241, 255), - ((0, 255, 255), (126, 127, 126)): (62, 255, 192, 255), - ((0, 255, 255), (127, 126, 127)): (63, 255, 191, 255), - ((0, 255, 255), (199, 65, 199)): (155, 255, 107, 255), - ((0, 255, 255), (254, 1, 254)): (253, 255, 2, 255), - ((0, 255, 255), (255, 0, 255)): (255, 255, 0, 255), - ((1, 254, 254), (0, 255, 0)): (1, 255, 254, 254), - ((1, 254, 254), (1, 254, 1)): (1, 255, 254, 255), - ((1, 254, 254), (65, 199, 65)): (17, 255, 240, 255), - ((1, 254, 254), (126, 127, 126)): (63, 255, 191, 255), - ((1, 254, 254), (127, 126, 127)): (64, 255, 190, 255), - ((1, 254, 254), (199, 65, 199)): (155, 255, 107, 255), - ((1, 254, 254), (254, 1, 254)): (253, 255, 2, 255), - ((1, 254, 254), (255, 0, 255)): (255, 255, 0, 255), - ((65, 199, 199), (0, 255, 0)): (65, 255, 199, 199), - ((65, 199, 199), (1, 254, 1)): (64, 255, 200, 200), - ((65, 199, 199), (65, 199, 65)): (65, 255, 199, 214), - ((65, 199, 199), (126, 127, 126)): (95, 255, 164, 227), - ((65, 199, 199), (127, 126, 127)): (96, 255, 163, 227), - ((65, 199, 199), (199, 65, 199)): (169, 255, 95, 243), - ((65, 199, 199), (254, 1, 254)): (253, 255, 2, 255), - ((65, 199, 199), (255, 0, 255)): (255, 255, 0, 255), - ((126, 127, 127), (0, 255, 0)): (126, 255, 127, 127), - ((126, 127, 127), (1, 254, 1)): (125, 255, 128, 128), - ((126, 127, 127), (65, 199, 65)): (110, 255, 146, 160), - ((126, 127, 127), (126, 127, 126)): (126, 255, 127, 191), - ((126, 127, 127), (127, 126, 127)): (126, 255, 126, 191), - ((126, 127, 127), (199, 65, 199)): (183, 255, 79, 227), - ((126, 127, 127), (254, 1, 254)): (253, 255, 1, 255), - ((126, 127, 127), (255, 0, 255)): (255, 255, 0, 255), - ((127, 126, 126), (0, 255, 0)): (127, 255, 126, 126), - ((127, 126, 126), (1, 254, 1)): (126, 255, 127, 127), - ((127, 126, 126), (65, 199, 65)): (111, 255, 145, 159), - ((127, 126, 126), (126, 127, 126)): (127, 255, 126, 190), - ((127, 126, 126), (127, 126, 127)): (127, 255, 126, 191), - ((127, 126, 126), (199, 65, 199)): (183, 255, 78, 227), - ((127, 126, 126), (254, 1, 254)): (254, 255, 1, 255), - ((127, 126, 126), (255, 0, 255)): (255, 255, 0, 255), - ((199, 65, 65), (0, 255, 0)): (199, 255, 65, 65), - ((199, 65, 65), (1, 254, 1)): (198, 255, 66, 66), - ((199, 65, 65), (65, 199, 65)): (165, 255, 99, 114), - ((199, 65, 65), (126, 127, 126)): (163, 255, 96, 159), - ((199, 65, 65), (127, 126, 127)): (163, 255, 95, 160), - ((199, 65, 65), (199, 65, 199)): (199, 255, 65, 214), - ((199, 65, 65), (254, 1, 254)): (254, 255, 1, 255), - ((199, 65, 65), (255, 0, 255)): (255, 255, 0, 255), - ((254, 1, 1), (0, 255, 0)): (254, 255, 1, 1), - ((254, 1, 1), (1, 254, 1)): (253, 255, 2, 2), - ((254, 1, 1), (65, 199, 65)): (206, 255, 52, 66), - ((254, 1, 1), (126, 127, 126)): (191, 255, 63, 127), - ((254, 1, 1), (127, 126, 127)): (191, 255, 63, 128), - ((254, 1, 1), (199, 65, 199)): (212, 255, 51, 200), - ((254, 1, 1), (254, 1, 254)): (254, 255, 1, 255), - ((254, 1, 1), (255, 0, 255)): (255, 255, 0, 255), - ((255, 0, 0), (0, 255, 0)): (0, 255, 255, 0), - ((255, 0, 0), (1, 254, 1)): (1, 255, 254, 1), - ((255, 0, 0), (65, 199, 65)): (65, 255, 199, 65), - ((255, 0, 0), (126, 127, 126)): (126, 255, 127, 126), - ((255, 0, 0), (127, 126, 127)): (127, 255, 126, 127), - ((255, 0, 0), (199, 65, 199)): (199, 255, 65, 199), - ((255, 0, 0), (254, 1, 254)): (254, 255, 1, 254), - ((255, 0, 0), (255, 0, 255)): (255, 255, 0, 255), - } - - # chosen because they contain edge cases. - nums = [0, 1, 65, 126, 127, 199, 254, 255] - results = {} - for dst_r, dst_b, dst_a in zip(nums, reversed(nums), reversed(nums)): - for src_r, src_b, src_a in zip(nums, reversed(nums), nums): - with self.subTest( - src_r=src_r, - src_b=src_b, - src_a=src_a, - dest_r=dst_r, - dest_b=dst_b, - dest_a=dst_a, - ): - src_surf = pygame.Surface((66, 66), pygame.SRCALPHA, 32) - src_surf.fill((src_r, 255, src_b, 255)) - src_surf.set_alpha(src_a) - dest_surf = pygame.Surface((66, 66), pygame.SRCALPHA, 32) - dest_surf.fill((dst_r, 255, dst_b, dst_a)) - - dest_surf.blit(src_surf, (0, 0)) - key = ((dst_r, dst_b, dst_a), (src_r, src_b, src_a)) - results[key] = dest_surf.get_at((65, 33)) - self.assertEqual(results[key], results_expected[key]) - - self.assertEqual(results, results_expected) - - @unittest.skipIf( - "arm" in platform.machine() or "aarch64" in platform.machine(), - "sdl2 blitter produces different results on arm", - ) - def test_src_alpha_sdl2_blitter(self): - """Checking that the BLEND_ALPHA_SDL2 flag works - this feature - only exists when using SDL2""" - - results_expected = { - ((0, 255, 255), (0, 255, 0)): (0, 255, 255, 255), - ((0, 255, 255), (1, 254, 1)): (0, 253, 253, 253), - ((0, 255, 255), (65, 199, 65)): (16, 253, 239, 253), - ((0, 255, 255), (126, 127, 126)): (62, 253, 190, 253), - ((0, 255, 255), (127, 126, 127)): (63, 253, 189, 253), - ((0, 255, 255), (199, 65, 199)): (154, 253, 105, 253), - ((0, 255, 255), (254, 1, 254)): (252, 253, 0, 253), - ((0, 255, 255), (255, 0, 255)): (255, 255, 0, 255), - ((1, 254, 254), (0, 255, 0)): (1, 255, 254, 254), - ((1, 254, 254), (1, 254, 1)): (0, 253, 252, 252), - ((1, 254, 254), (65, 199, 65)): (16, 253, 238, 252), - ((1, 254, 254), (126, 127, 126)): (62, 253, 189, 252), - ((1, 254, 254), (127, 126, 127)): (63, 253, 189, 253), - ((1, 254, 254), (199, 65, 199)): (154, 253, 105, 253), - ((1, 254, 254), (254, 1, 254)): (252, 253, 0, 253), - ((1, 254, 254), (255, 0, 255)): (255, 255, 0, 255), - ((65, 199, 199), (0, 255, 0)): (65, 255, 199, 199), - ((65, 199, 199), (1, 254, 1)): (64, 253, 197, 197), - ((65, 199, 199), (65, 199, 65)): (64, 253, 197, 211), - ((65, 199, 199), (126, 127, 126)): (94, 253, 162, 225), - ((65, 199, 199), (127, 126, 127)): (95, 253, 161, 225), - ((65, 199, 199), (199, 65, 199)): (168, 253, 93, 241), - ((65, 199, 199), (254, 1, 254)): (252, 253, 0, 253), - ((65, 199, 199), (255, 0, 255)): (255, 255, 0, 255), - ((126, 127, 127), (0, 255, 0)): (126, 255, 127, 127), - ((126, 127, 127), (1, 254, 1)): (125, 253, 126, 126), - ((126, 127, 127), (65, 199, 65)): (109, 253, 144, 158), - ((126, 127, 127), (126, 127, 126)): (125, 253, 125, 188), - ((126, 127, 127), (127, 126, 127)): (126, 253, 125, 189), - ((126, 127, 127), (199, 65, 199)): (181, 253, 77, 225), - ((126, 127, 127), (254, 1, 254)): (252, 253, 0, 253), - ((126, 127, 127), (255, 0, 255)): (255, 255, 0, 255), - ((127, 126, 126), (0, 255, 0)): (127, 255, 126, 126), - ((127, 126, 126), (1, 254, 1)): (126, 253, 125, 125), - ((127, 126, 126), (65, 199, 65)): (110, 253, 143, 157), - ((127, 126, 126), (126, 127, 126)): (125, 253, 125, 188), - ((127, 126, 126), (127, 126, 127)): (126, 253, 125, 189), - ((127, 126, 126), (199, 65, 199)): (181, 253, 77, 225), - ((127, 126, 126), (254, 1, 254)): (252, 253, 0, 253), - ((127, 126, 126), (255, 0, 255)): (255, 255, 0, 255), - ((199, 65, 65), (0, 255, 0)): (199, 255, 65, 65), - ((199, 65, 65), (1, 254, 1)): (197, 253, 64, 64), - ((199, 65, 65), (65, 199, 65)): (163, 253, 98, 112), - ((199, 65, 65), (126, 127, 126)): (162, 253, 94, 157), - ((199, 65, 65), (127, 126, 127)): (162, 253, 94, 158), - ((199, 65, 65), (199, 65, 199)): (197, 253, 64, 212), - ((199, 65, 65), (254, 1, 254)): (252, 253, 0, 253), - ((199, 65, 65), (255, 0, 255)): (255, 255, 0, 255), - ((254, 1, 1), (0, 255, 0)): (254, 255, 1, 1), - ((254, 1, 1), (1, 254, 1)): (252, 253, 0, 0), - ((254, 1, 1), (65, 199, 65)): (204, 253, 50, 64), - ((254, 1, 1), (126, 127, 126)): (189, 253, 62, 125), - ((254, 1, 1), (127, 126, 127)): (190, 253, 62, 126), - ((254, 1, 1), (199, 65, 199)): (209, 253, 50, 198), - ((254, 1, 1), (254, 1, 254)): (252, 253, 0, 253), - ((254, 1, 1), (255, 0, 255)): (255, 255, 0, 255), - ((255, 0, 0), (0, 255, 0)): (255, 255, 0, 0), - ((255, 0, 0), (1, 254, 1)): (253, 253, 0, 0), - ((255, 0, 0), (65, 199, 65)): (205, 253, 50, 64), - ((255, 0, 0), (126, 127, 126)): (190, 253, 62, 125), - ((255, 0, 0), (127, 126, 127)): (190, 253, 62, 126), - ((255, 0, 0), (199, 65, 199)): (209, 253, 50, 198), - ((255, 0, 0), (254, 1, 254)): (252, 253, 0, 253), - ((255, 0, 0), (255, 0, 255)): (255, 255, 0, 255), - } - - # chosen because they contain edge cases. - nums = [0, 1, 65, 126, 127, 199, 254, 255] - results = {} - for dst_r, dst_b, dst_a in zip(nums, reversed(nums), reversed(nums)): - for src_r, src_b, src_a in zip(nums, reversed(nums), nums): - with self.subTest( - src_r=src_r, - src_b=src_b, - src_a=src_a, - dest_r=dst_r, - dest_b=dst_b, - dest_a=dst_a, - ): - src_surf = pygame.Surface((66, 66), pygame.SRCALPHA, 32) - src_surf.fill((src_r, 255, src_b, src_a)) - dest_surf = pygame.Surface((66, 66), pygame.SRCALPHA, 32) - dest_surf.fill((dst_r, 255, dst_b, dst_a)) - - dest_surf.blit( - src_surf, (0, 0), special_flags=pygame.BLEND_ALPHA_SDL2 - ) - key = ((dst_r, dst_b, dst_a), (src_r, src_b, src_a)) - results[key] = dest_surf.get_at((65, 33)) - self.assertEqual(results[key], results_expected[key]) - - # print("(dest_r, dest_b, dest_a), (src_r, src_b, src_a): color") - # pprint(results) - self.assertEqual(results, results_expected) - - def test_opaque_destination_blit_with_set_alpha(self): - # no set_alpha() - src_surf = pygame.Surface((32, 32), pygame.SRCALPHA, 32) - src_surf.fill((255, 255, 255, 200)) - dest_surf = pygame.Surface((32, 32)) - dest_surf.fill((100, 100, 100)) - - dest_surf.blit(src_surf, (0, 0)) - - no_surf_alpha_col = dest_surf.get_at((0, 0)) - - dest_surf.fill((100, 100, 100)) - dest_surf.set_alpha(200) - dest_surf.blit(src_surf, (0, 0)) - - surf_alpha_col = dest_surf.get_at((0, 0)) - - self.assertEqual(no_surf_alpha_col, surf_alpha_col) - - def todo_test_convert(self): - - # __doc__ (as of 2008-08-02) for pygame.surface.Surface.convert: - - # Surface.convert(Surface): return Surface - # Surface.convert(depth, flags=0): return Surface - # Surface.convert(masks, flags=0): return Surface - # Surface.convert(): return Surface - # change the pixel format of an image - # - # Creates a new copy of the Surface with the pixel format changed. The - # new pixel format can be determined from another existing Surface. - # Otherwise depth, flags, and masks arguments can be used, similar to - # the pygame.Surface() call. - # - # If no arguments are passed the new Surface will have the same pixel - # format as the display Surface. This is always the fastest format for - # blitting. It is a good idea to convert all Surfaces before they are - # blitted many times. - # - # The converted Surface will have no pixel alphas. They will be - # stripped if the original had them. See Surface.convert_alpha() for - # preserving or creating per-pixel alphas. - # - - self.fail() - - def test_convert__pixel_format_as_surface_subclass(self): - """Ensure convert accepts a Surface subclass argument.""" - expected_size = (23, 17) - convert_surface = SurfaceSubclass(expected_size, 0, 32) - depth_surface = SurfaceSubclass((31, 61), 0, 32) - - pygame.display.init() - try: - surface = convert_surface.convert(depth_surface) - - self.assertIsNot(surface, depth_surface) - self.assertIsNot(surface, convert_surface) - self.assertIsInstance(surface, pygame.Surface) - self.assertIsInstance(surface, SurfaceSubclass) - self.assertEqual(surface.get_size(), expected_size) - finally: - pygame.display.quit() - - def test_convert_alpha(self): - """Ensure the surface returned by surf.convert_alpha - has alpha values added""" - pygame.display.init() - try: - pygame.display.set_mode((640, 480)) - - s1 = pygame.Surface((100, 100), 0, 32) - s1_alpha = pygame.Surface.convert_alpha(s1) - - s2 = pygame.Surface((100, 100), 0, 32) - s2_alpha = s2.convert_alpha() - - s3 = pygame.Surface((100, 100), 0, 8) - s3_alpha = s3.convert_alpha() - - s4 = pygame.Surface((100, 100), 0, 12) - s4_alpha = s4.convert_alpha() - - s5 = pygame.Surface((100, 100), 0, 15) - s5_alpha = s5.convert_alpha() - - s6 = pygame.Surface((100, 100), 0, 16) - s6_alpha = s6.convert_alpha() - - s7 = pygame.Surface((100, 100), 0, 24) - s7_alpha = s7.convert_alpha() - - self.assertEqual(s1_alpha.get_alpha(), 255) - self.assertEqual(s2_alpha.get_alpha(), 255) - self.assertEqual(s3_alpha.get_alpha(), 255) - self.assertEqual(s4_alpha.get_alpha(), 255) - self.assertEqual(s5_alpha.get_alpha(), 255) - self.assertEqual(s6_alpha.get_alpha(), 255) - self.assertEqual(s7_alpha.get_alpha(), 255) - - self.assertEqual(s1_alpha.get_bitsize(), 32) - self.assertEqual(s2_alpha.get_bitsize(), 32) - self.assertEqual(s3_alpha.get_bitsize(), 32) - self.assertEqual(s4_alpha.get_bitsize(), 32) - self.assertEqual(s5_alpha.get_bitsize(), 32) - self.assertEqual(s6_alpha.get_bitsize(), 32) - self.assertEqual(s6_alpha.get_bitsize(), 32) - - with self.assertRaises(pygame.error): - surface = pygame.display.set_mode() - pygame.display.quit() - surface.convert_alpha() - - finally: - pygame.display.quit() - - def test_convert_alpha__pixel_format_as_surface_subclass(self): - """Ensure convert_alpha accepts a Surface subclass argument.""" - expected_size = (23, 17) - convert_surface = SurfaceSubclass(expected_size, SRCALPHA, 32) - depth_surface = SurfaceSubclass((31, 57), SRCALPHA, 32) - - pygame.display.init() - try: - pygame.display.set_mode((60, 60)) - - # This is accepted as an argument, but its values are ignored. - # See issue #599. - surface = convert_surface.convert_alpha(depth_surface) - - self.assertIsNot(surface, depth_surface) - self.assertIsNot(surface, convert_surface) - self.assertIsInstance(surface, pygame.Surface) - self.assertIsInstance(surface, SurfaceSubclass) - self.assertEqual(surface.get_size(), expected_size) - finally: - pygame.display.quit() - - def test_get_abs_offset(self): - pygame.display.init() - try: - parent = pygame.Surface((64, 64), SRCALPHA, 32) - - # Stack bunch of subsurfaces - sub_level_1 = parent.subsurface((2, 2), (34, 37)) - sub_level_2 = sub_level_1.subsurface((0, 0), (30, 29)) - sub_level_3 = sub_level_2.subsurface((3, 7), (20, 21)) - sub_level_4 = sub_level_3.subsurface((6, 1), (14, 14)) - sub_level_5 = sub_level_4.subsurface((5, 6), (3, 4)) - - # Parent is always (0, 0) - self.assertEqual(parent.get_abs_offset(), (0, 0)) - # Total offset: (0+2, 0+2) = (2, 2) - self.assertEqual(sub_level_1.get_abs_offset(), (2, 2)) - # Total offset: (0+2+0, 0+2+0) = (2, 2) - self.assertEqual(sub_level_2.get_abs_offset(), (2, 2)) - # Total offset: (0+2+0+3, 0+2+0+7) = (5, 9) - self.assertEqual(sub_level_3.get_abs_offset(), (5, 9)) - # Total offset: (0+2+0+3+6, 0+2+0+7+1) = (11, 10) - self.assertEqual(sub_level_4.get_abs_offset(), (11, 10)) - # Total offset: (0+2+0+3+6+5, 0+2+0+7+1+6) = (16, 16) - self.assertEqual(sub_level_5.get_abs_offset(), (16, 16)) - - with self.assertRaises(pygame.error): - surface = pygame.display.set_mode() - pygame.display.quit() - surface.get_abs_offset() - finally: - pygame.display.quit() - - def test_get_abs_parent(self): - pygame.display.init() - try: - parent = pygame.Surface((32, 32), SRCALPHA, 32) - - # Stack bunch of subsurfaces - sub_level_1 = parent.subsurface((1, 1), (15, 15)) - sub_level_2 = sub_level_1.subsurface((1, 1), (12, 12)) - sub_level_3 = sub_level_2.subsurface((1, 1), (9, 9)) - sub_level_4 = sub_level_3.subsurface((1, 1), (8, 8)) - sub_level_5 = sub_level_4.subsurface((2, 2), (3, 4)) - sub_level_6 = sub_level_5.subsurface((0, 0), (2, 1)) - - # Can't have subsurfaces bigger than parents - self.assertRaises(ValueError, parent.subsurface, (5, 5), (100, 100)) - self.assertRaises(ValueError, sub_level_3.subsurface, (0, 0), (11, 5)) - self.assertRaises(ValueError, sub_level_6.subsurface, (0, 0), (5, 5)) - - # Calling get_abs_parent on parent should return itself - self.assertEqual(parent.get_abs_parent(), parent) - - # On subclass "depth" of 1, get_abs_parent and get_parent should return the same - self.assertEqual(sub_level_1.get_abs_parent(), sub_level_1.get_parent()) - self.assertEqual(sub_level_2.get_abs_parent(), parent) - self.assertEqual(sub_level_3.get_abs_parent(), parent) - self.assertEqual(sub_level_4.get_abs_parent(), parent) - self.assertEqual(sub_level_5.get_abs_parent(), parent) - self.assertEqual( - sub_level_6.get_abs_parent(), sub_level_6.get_parent().get_abs_parent() - ) - - with self.assertRaises(pygame.error): - surface = pygame.display.set_mode() - pygame.display.quit() - surface.get_abs_parent() - finally: - pygame.display.quit() - - def test_get_at(self): - surf = pygame.Surface((2, 2), 0, 24) - c00 = pygame.Color(1, 2, 3) - c01 = pygame.Color(5, 10, 15) - c10 = pygame.Color(100, 50, 0) - c11 = pygame.Color(4, 5, 6) - surf.set_at((0, 0), c00) - surf.set_at((0, 1), c01) - surf.set_at((1, 0), c10) - surf.set_at((1, 1), c11) - c = surf.get_at((0, 0)) - self.assertIsInstance(c, pygame.Color) - self.assertEqual(c, c00) - self.assertEqual(surf.get_at((0, 1)), c01) - self.assertEqual(surf.get_at((1, 0)), c10) - self.assertEqual(surf.get_at((1, 1)), c11) - for p in [(-1, 0), (0, -1), (2, 0), (0, 2)]: - self.assertRaises(IndexError, surf.get_at, p) - - def test_get_at_mapped(self): - color = pygame.Color(10, 20, 30) - for bitsize in [8, 16, 24, 32]: - surf = pygame.Surface((2, 2), 0, bitsize) - surf.fill(color) - pixel = surf.get_at_mapped((0, 0)) - self.assertEqual( - pixel, - surf.map_rgb(color), - "%i != %i, bitsize: %i" % (pixel, surf.map_rgb(color), bitsize), - ) - - def test_get_bitsize(self): - pygame.display.init() - try: - expected_size = (11, 21) - - # Check that get_bitsize returns passed depth - expected_depth = 32 - surface = pygame.Surface(expected_size, pygame.SRCALPHA, expected_depth) - self.assertEqual(surface.get_size(), expected_size) - self.assertEqual(surface.get_bitsize(), expected_depth) - - expected_depth = 16 - surface = pygame.Surface(expected_size, pygame.SRCALPHA, expected_depth) - self.assertEqual(surface.get_size(), expected_size) - self.assertEqual(surface.get_bitsize(), expected_depth) - - expected_depth = 15 - surface = pygame.Surface(expected_size, 0, expected_depth) - self.assertEqual(surface.get_size(), expected_size) - self.assertEqual(surface.get_bitsize(), expected_depth) - # Check for invalid depths - expected_depth = -1 - self.assertRaises( - ValueError, pygame.Surface, expected_size, 0, expected_depth - ) - expected_depth = 11 - self.assertRaises( - ValueError, pygame.Surface, expected_size, 0, expected_depth - ) - expected_depth = 1024 - self.assertRaises( - ValueError, pygame.Surface, expected_size, 0, expected_depth - ) - - with self.assertRaises(pygame.error): - surface = pygame.display.set_mode() - pygame.display.quit() - surface.get_bitsize() - finally: - pygame.display.quit() - - def test_get_clip(self): - s = pygame.Surface((800, 600)) - rectangle = s.get_clip() - self.assertEqual(rectangle, (0, 0, 800, 600)) - - def test_get_colorkey(self): - pygame.display.init() - try: - # if set_colorkey is not used - s = pygame.Surface((800, 600), 0, 32) - self.assertIsNone(s.get_colorkey()) - - # if set_colorkey is used - s.set_colorkey(None) - self.assertIsNone(s.get_colorkey()) - - # setting up remainder of tests... - r, g, b, a = 20, 40, 60, 12 - colorkey = pygame.Color(r, g, b) - s.set_colorkey(colorkey) - - # test for ideal case - self.assertEqual(s.get_colorkey(), (r, g, b, 255)) - - # test for if the color_key is set using pygame.RLEACCEL - s.set_colorkey(colorkey, pygame.RLEACCEL) - self.assertEqual(s.get_colorkey(), (r, g, b, 255)) - - # test for if the color key is not what's expected - s.set_colorkey(pygame.Color(r + 1, g + 1, b + 1)) - self.assertNotEqual(s.get_colorkey(), (r, g, b, 255)) - - s.set_colorkey(pygame.Color(r, g, b, a)) - # regardless of whether alpha is not 255 - # colorkey returned from surface is always 255 - self.assertEqual(s.get_colorkey(), (r, g, b, 255)) - finally: - # test for using method after display.quit() is called... - s = pygame.display.set_mode((200, 200), 0, 32) - pygame.display.quit() - with self.assertRaises(pygame.error): - s.get_colorkey() - - def test_get_height(self): - sizes = ((1, 1), (119, 10), (10, 119), (1, 1000), (1000, 1), (1000, 1000)) - for width, height in sizes: - surf = pygame.Surface((width, height)) - found_height = surf.get_height() - self.assertEqual(height, found_height) - - def test_get_locked(self): - def blit_locked_test(surface): - newSurf = pygame.Surface((10, 10)) - try: - newSurf.blit(surface, (0, 0)) - except pygame.error: - return True - else: - return False - - surf = pygame.Surface((100, 100)) - - self.assertIs(surf.get_locked(), blit_locked_test(surf)) # Unlocked - # Surface should lock - surf.lock() - self.assertIs(surf.get_locked(), blit_locked_test(surf)) # Locked - # Surface should unlock - surf.unlock() - self.assertIs(surf.get_locked(), blit_locked_test(surf)) # Unlocked - - # Check multiple locks - surf = pygame.Surface((100, 100)) - surf.lock() - surf.lock() - self.assertIs(surf.get_locked(), blit_locked_test(surf)) # Locked - surf.unlock() - self.assertIs(surf.get_locked(), blit_locked_test(surf)) # Locked - surf.unlock() - self.assertIs(surf.get_locked(), blit_locked_test(surf)) # Unlocked - - # Check many locks - surf = pygame.Surface((100, 100)) - for i in range(1000): - surf.lock() - self.assertIs(surf.get_locked(), blit_locked_test(surf)) # Locked - for i in range(1000): - surf.unlock() - self.assertFalse(surf.get_locked()) # Unlocked - - # Unlocking an unlocked surface - surf = pygame.Surface((100, 100)) - surf.unlock() - self.assertIs(surf.get_locked(), blit_locked_test(surf)) # Unlocked - surf.unlock() - self.assertIs(surf.get_locked(), blit_locked_test(surf)) # Unlocked - - def test_get_locks(self): - - # __doc__ (as of 2008-08-02) for pygame.surface.Surface.get_locks: - - # Surface.get_locks(): return tuple - # Gets the locks for the Surface - # - # Returns the currently existing locks for the Surface. - - # test on a surface that is not initially locked - surface = pygame.Surface((100, 100)) - self.assertEqual(surface.get_locks(), ()) - - # test on the same surface after it has been locked - surface.lock() - self.assertEqual(surface.get_locks(), (surface,)) - - # test on the same surface after it has been unlocked - surface.unlock() - self.assertEqual(surface.get_locks(), ()) - - # test with PixelArray initialization: locks surface - pxarray = pygame.PixelArray(surface) - self.assertNotEqual(surface.get_locks(), ()) - - # closing the PixelArray releases the surface lock - pxarray.close() - self.assertEqual(surface.get_locks(), ()) - - # AttributeError raised when called on invalid object type (i.e. not a pygame.Surface object) - with self.assertRaises(AttributeError): - "DUMMY".get_locks() - - # test multiple locks and unlocks on the same surface - surface.lock() - surface.lock() - surface.lock() - self.assertEqual(surface.get_locks(), (surface, surface, surface)) - - surface.unlock() - surface.unlock() - self.assertEqual(surface.get_locks(), (surface,)) - surface.unlock() - self.assertEqual(surface.get_locks(), ()) - - def test_get_losses(self): - """Ensure a surface's losses can be retrieved""" - pygame.display.init() - try: - # Masks for different color component configurations - mask8 = (224, 28, 3, 0) - mask15 = (31744, 992, 31, 0) - mask16 = (63488, 2016, 31, 0) - mask24 = (4278190080, 16711680, 65280, 0) - mask32 = (4278190080, 16711680, 65280, 255) - - # Surfaces with standard depths and masks - display_surf = pygame.display.set_mode((100, 100)) - surf = pygame.Surface((100, 100)) - surf_8bit = pygame.Surface((100, 100), depth=8, masks=mask8) - surf_15bit = pygame.Surface((100, 100), depth=15, masks=mask15) - surf_16bit = pygame.Surface((100, 100), depth=16, masks=mask16) - surf_24bit = pygame.Surface((100, 100), depth=24, masks=mask24) - surf_32bit = pygame.Surface((100, 100), depth=32, masks=mask32) - - # Test output is correct type, length, and value range - losses = surf.get_losses() - self.assertIsInstance(losses, tuple) - self.assertEqual(len(losses), 4) - for loss in losses: - self.assertIsInstance(loss, int) - self.assertGreaterEqual(loss, 0) - self.assertLessEqual(loss, 8) - - # Test each surface for correct losses - # Display surface losses gives idea of default surface losses - if display_surf.get_losses() == (0, 0, 0, 8): - self.assertEqual(losses, (0, 0, 0, 8)) - elif display_surf.get_losses() == (8, 8, 8, 8): - self.assertEqual(losses, (8, 8, 8, 8)) - - self.assertEqual(surf_8bit.get_losses(), (5, 5, 6, 8)) - self.assertEqual(surf_15bit.get_losses(), (3, 3, 3, 8)) - self.assertEqual(surf_16bit.get_losses(), (3, 2, 3, 8)) - self.assertEqual(surf_24bit.get_losses(), (0, 0, 0, 8)) - self.assertEqual(surf_32bit.get_losses(), (0, 0, 0, 0)) - - # Method should fail when display is not initialized - with self.assertRaises(pygame.error): - surface = pygame.display.set_mode((100, 100)) - pygame.display.quit() - surface.get_losses() - finally: - pygame.display.quit() - - def test_get_masks__rgba(self): - """ - Ensure that get_mask can return RGBA mask. - """ - masks = [ - (0x0F00, 0x00F0, 0x000F, 0xF000), - (0x00FF0000, 0x0000FF00, 0x000000FF, 0xFF000000), - ] - depths = [16, 32] - for expected, depth in list(zip(masks, depths)): - surface = pygame.Surface((10, 10), pygame.SRCALPHA, depth) - self.assertEqual(expected, surface.get_masks()) - - def test_get_masks__rgb(self): - """ - Ensure that get_mask can return RGB mask. - """ - masks = [ - (0x60, 0x1C, 0x03, 0x00), - (0xF00, 0x0F0, 0x00F, 0x000), - (0x7C00, 0x03E0, 0x001F, 0x0000), - (0xF800, 0x07E0, 0x001F, 0x0000), - (0xFF0000, 0x00FF00, 0x0000FF, 0x000000), - (0xFF0000, 0x00FF00, 0x0000FF, 0x000000), - ] - depths = [8, 12, 15, 16, 24, 32] - for expected, depth in list(zip(masks, depths)): - surface = pygame.Surface((10, 10), 0, depth) - if depth == 8: - expected = (0x00, 0x00, 0x00, 0x00) - self.assertEqual(expected, surface.get_masks()) - - def test_get_masks__no_surface(self): - """ - Ensure that after display.quit, calling get_masks raises pygame.error. - """ - with self.assertRaises(pygame.error): - surface = pygame.display.set_mode((10, 10)) - pygame.display.quit() - surface.get_masks() - - def test_get_offset(self): - """get_offset returns the (0,0) if surface is not a child - returns the position of child subsurface inside of parent - """ - pygame.display.init() - try: - surf = pygame.Surface((100, 100)) - self.assertEqual(surf.get_offset(), (0, 0)) - - # subsurface offset test - subsurf = surf.subsurface(1, 1, 10, 10) - self.assertEqual(subsurf.get_offset(), (1, 1)) - - with self.assertRaises(pygame.error): - surface = pygame.display.set_mode() - pygame.display.quit() - surface.get_offset() - finally: - pygame.display.quit() - - def test_get_palette(self): - pygame.display.init() - try: - palette = [Color(i, i, i) for i in range(256)] - pygame.display.set_mode((100, 50)) - surf = pygame.Surface((2, 2), 0, 8) - surf.set_palette(palette) - palette2 = surf.get_palette() - r, g, b = palette2[0] - - self.assertEqual(len(palette2), len(palette)) - for c2, c in zip(palette2, palette): - self.assertEqual(c2, c) - for c in palette2: - self.assertIsInstance(c, pygame.Color) - finally: - pygame.display.quit() - - def test_get_palette_at(self): - # See also test_get_palette - pygame.display.init() - try: - pygame.display.set_mode((100, 50)) - surf = pygame.Surface((2, 2), 0, 8) - color = pygame.Color(1, 2, 3, 255) - surf.set_palette_at(0, color) - color2 = surf.get_palette_at(0) - self.assertIsInstance(color2, pygame.Color) - self.assertEqual(color2, color) - self.assertRaises(IndexError, surf.get_palette_at, -1) - self.assertRaises(IndexError, surf.get_palette_at, 256) - finally: - pygame.display.quit() - - def test_get_pitch(self): - # Test get_pitch() on several surfaces of varying size/depth - sizes = ((2, 2), (7, 33), (33, 7), (2, 734), (734, 2), (734, 734)) - depths = [8, 24, 32] - for width, height in sizes: - for depth in depths: - # Test get_pitch() on parent surface - surf = pygame.Surface((width, height), depth=depth) - buff = surf.get_buffer() - pitch = buff.length / surf.get_height() - test_pitch = surf.get_pitch() - self.assertEqual(pitch, test_pitch) - # Test get_pitch() on subsurface with same rect as parent - rect1 = surf.get_rect() - subsurf1 = surf.subsurface(rect1) - sub_buff1 = subsurf1.get_buffer() - sub_pitch1 = sub_buff1.length / subsurf1.get_height() - test_sub_pitch1 = subsurf1.get_pitch() - self.assertEqual(sub_pitch1, test_sub_pitch1) - # Test get_pitch on subsurface with modified rect - rect2 = rect1.inflate(-width / 2, -height / 2) - subsurf2 = surf.subsurface(rect2) - sub_buff2 = subsurf2.get_buffer() - sub_pitch2 = sub_buff2.length / float(subsurf2.get_height()) - test_sub_pitch2 = subsurf2.get_pitch() - self.assertEqual(sub_pitch2, test_sub_pitch2) - - def test_get_shifts(self): - """ - Tests whether Surface.get_shifts returns proper - RGBA shifts under various conditions. - """ - # __doc__ (as of 2008-08-02) for pygame.surface.Surface.get_shifts: - # Surface.get_shifts(): return (R, G, B, A) - # the bit shifts needed to convert between color and mapped integer. - # Returns the pixel shifts need to convert between each color and a - # mapped integer. - # This value is not needed for normal Pygame usage. - - # Test for SDL2 on surfaces with various depths and alpha on/off - depths = [8, 24, 32] - alpha = 128 - off = None - for bit_depth in depths: - surface = pygame.Surface((32, 32), depth=bit_depth) - surface.set_alpha(alpha) - r1, g1, b1, a1 = surface.get_shifts() - surface.set_alpha(off) - r2, g2, b2, a2 = surface.get_shifts() - self.assertEqual((r1, g1, b1, a1), (r2, g2, b2, a2)) - - def test_get_size(self): - sizes = ((1, 1), (119, 10), (1000, 1000), (1, 5000), (1221, 1), (99, 999)) - for width, height in sizes: - surf = pygame.Surface((width, height)) - found_size = surf.get_size() - self.assertEqual((width, height), found_size) - - def test_lock(self): - - # __doc__ (as of 2008-08-02) for pygame.surface.Surface.lock: - - # Surface.lock(): return None - # lock the Surface memory for pixel access - # - # Lock the pixel data of a Surface for access. On accelerated - # Surfaces, the pixel data may be stored in volatile video memory or - # nonlinear compressed forms. When a Surface is locked the pixel - # memory becomes available to access by regular software. Code that - # reads or writes pixel values will need the Surface to be locked. - # - # Surfaces should not remain locked for more than necessary. A locked - # Surface can often not be displayed or managed by Pygame. - # - # Not all Surfaces require locking. The Surface.mustlock() method can - # determine if it is actually required. There is no performance - # penalty for locking and unlocking a Surface that does not need it. - # - # All pygame functions will automatically lock and unlock the Surface - # data as needed. If a section of code is going to make calls that - # will repeatedly lock and unlock the Surface many times, it can be - # helpful to wrap the block inside a lock and unlock pair. - # - # It is safe to nest locking and unlocking calls. The surface will - # only be unlocked after the final lock is released. - # - - # Basic - surf = pygame.Surface((100, 100)) - surf.lock() - self.assertTrue(surf.get_locked()) - - # Nested - surf = pygame.Surface((100, 100)) - surf.lock() - surf.lock() - surf.unlock() - self.assertTrue(surf.get_locked()) - surf.unlock() - surf.lock() - surf.lock() - self.assertTrue(surf.get_locked()) - surf.unlock() - self.assertTrue(surf.get_locked()) - surf.unlock() - self.assertFalse(surf.get_locked()) - - # Already Locked - surf = pygame.Surface((100, 100)) - surf.lock() - surf.lock() - self.assertTrue(surf.get_locked()) - surf.unlock() - self.assertTrue(surf.get_locked()) - surf.unlock() - self.assertFalse(surf.get_locked()) - - def test_map_rgb(self): - color = Color(0, 128, 255, 64) - surf = pygame.Surface((5, 5), SRCALPHA, 32) - c = surf.map_rgb(color) - self.assertEqual(surf.unmap_rgb(c), color) - - self.assertEqual(surf.get_at((0, 0)), (0, 0, 0, 0)) - surf.fill(c) - self.assertEqual(surf.get_at((0, 0)), color) - - surf.fill((0, 0, 0, 0)) - self.assertEqual(surf.get_at((0, 0)), (0, 0, 0, 0)) - surf.set_at((0, 0), c) - self.assertEqual(surf.get_at((0, 0)), color) - - def test_mustlock(self): - # Test that subsurfaces mustlock - surf = pygame.Surface((1024, 1024)) - subsurf = surf.subsurface((0, 0, 1024, 1024)) - self.assertTrue(subsurf.mustlock()) - self.assertFalse(surf.mustlock()) - # Tests nested subsurfaces - rects = ((0, 0, 512, 512), (0, 0, 256, 256), (0, 0, 128, 128)) - surf_stack = [] - surf_stack.append(surf) - surf_stack.append(subsurf) - for rect in rects: - surf_stack.append(surf_stack[-1].subsurface(rect)) - self.assertTrue(surf_stack[-1].mustlock()) - self.assertTrue(surf_stack[-2].mustlock()) - - def test_set_alpha_none(self): - """surf.set_alpha(None) disables blending""" - s = pygame.Surface((1, 1), SRCALPHA, 32) - s.fill((0, 255, 0, 128)) - s.set_alpha(None) - self.assertEqual(None, s.get_alpha()) - - s2 = pygame.Surface((1, 1), SRCALPHA, 32) - s2.fill((255, 0, 0, 255)) - s2.blit(s, (0, 0)) - self.assertEqual(s2.get_at((0, 0))[0], 0, "the red component should be 0") - - def test_set_alpha_value(self): - """surf.set_alpha(x), where x != None, enables blending""" - s = pygame.Surface((1, 1), SRCALPHA, 32) - s.fill((0, 255, 0, 128)) - s.set_alpha(255) - - s2 = pygame.Surface((1, 1), SRCALPHA, 32) - s2.fill((255, 0, 0, 255)) - s2.blit(s, (0, 0)) - self.assertGreater( - s2.get_at((0, 0))[0], 0, "the red component should be above 0" - ) - - def test_palette_colorkey(self): - """test bug discovered by robertpfeiffer - https://github.com/pygame/pygame/issues/721 - """ - surf = pygame.image.load(example_path(os.path.join("data", "alien2.png"))) - key = surf.get_colorkey() - self.assertEqual(surf.get_palette()[surf.map_rgb(key)], key) - - def test_palette_colorkey_set_px(self): - surf = pygame.image.load(example_path(os.path.join("data", "alien2.png"))) - key = surf.get_colorkey() - surf.set_at((0, 0), key) - self.assertEqual(surf.get_at((0, 0)), key) - - def test_palette_colorkey_fill(self): - surf = pygame.image.load(example_path(os.path.join("data", "alien2.png"))) - key = surf.get_colorkey() - surf.fill(key) - self.assertEqual(surf.get_at((0, 0)), key) - - def test_set_palette(self): - palette = [pygame.Color(i, i, i) for i in range(256)] - palette[10] = tuple(palette[10]) # 4 element tuple - palette[11] = tuple(palette[11])[0:3] # 3 element tuple - - surf = pygame.Surface((2, 2), 0, 8) - pygame.display.init() - try: - pygame.display.set_mode((100, 50)) - surf.set_palette(palette) - for i in range(256): - self.assertEqual(surf.map_rgb(palette[i]), i, "palette color %i" % (i,)) - c = palette[i] - surf.fill(c) - self.assertEqual(surf.get_at((0, 0)), c, "palette color %i" % (i,)) - for i in range(10): - palette[i] = pygame.Color(255 - i, 0, 0) - surf.set_palette(palette[0:10]) - for i in range(256): - self.assertEqual(surf.map_rgb(palette[i]), i, "palette color %i" % (i,)) - c = palette[i] - surf.fill(c) - self.assertEqual(surf.get_at((0, 0)), c, "palette color %i" % (i,)) - self.assertRaises(ValueError, surf.set_palette, [Color(1, 2, 3, 254)]) - self.assertRaises(ValueError, surf.set_palette, (1, 2, 3, 254)) - finally: - pygame.display.quit() - - def test_set_palette__fail(self): - pygame.init() - palette = 256 * [(10, 20, 30)] - surf = pygame.Surface((2, 2), 0, 32) - self.assertRaises(pygame.error, surf.set_palette, palette) - pygame.quit() - - def test_set_palette_at(self): - pygame.display.init() - try: - pygame.display.set_mode((100, 50)) - surf = pygame.Surface((2, 2), 0, 8) - original = surf.get_palette_at(10) - replacement = Color(1, 1, 1, 255) - if replacement == original: - replacement = Color(2, 2, 2, 255) - surf.set_palette_at(10, replacement) - self.assertEqual(surf.get_palette_at(10), replacement) - next = tuple(original) - surf.set_palette_at(10, next) - self.assertEqual(surf.get_palette_at(10), next) - next = tuple(original)[0:3] - surf.set_palette_at(10, next) - self.assertEqual(surf.get_palette_at(10), next) - self.assertRaises(IndexError, surf.set_palette_at, 256, replacement) - self.assertRaises(IndexError, surf.set_palette_at, -1, replacement) - finally: - pygame.display.quit() - - def test_subsurface(self): - - # __doc__ (as of 2008-08-02) for pygame.surface.Surface.subsurface: - - # Surface.subsurface(Rect): return Surface - # create a new surface that references its parent - # - # Returns a new Surface that shares its pixels with its new parent. - # The new Surface is considered a child of the original. Modifications - # to either Surface pixels will effect each other. Surface information - # like clipping area and color keys are unique to each Surface. - # - # The new Surface will inherit the palette, color key, and alpha - # settings from its parent. - # - # It is possible to have any number of subsurfaces and subsubsurfaces - # on the parent. It is also possible to subsurface the display Surface - # if the display mode is not hardware accelerated. - # - # See the Surface.get_offset(), Surface.get_parent() to learn more - # about the state of a subsurface. - # - - surf = pygame.Surface((16, 16)) - s = surf.subsurface(0, 0, 1, 1) - s = surf.subsurface((0, 0, 1, 1)) - - # s = surf.subsurface((0,0,1,1), 1) - # This form is not acceptable. - # s = surf.subsurface(0,0,10,10, 1) - - self.assertRaises(ValueError, surf.subsurface, (0, 0, 1, 1, 666)) - - self.assertEqual(s.get_shifts(), surf.get_shifts()) - self.assertEqual(s.get_masks(), surf.get_masks()) - self.assertEqual(s.get_losses(), surf.get_losses()) - - # Issue 2 at Bitbucket.org/pygame/pygame - surf = pygame.Surface.__new__(pygame.Surface) - self.assertRaises(pygame.error, surf.subsurface, (0, 0, 0, 0)) - - def test_unlock(self): - # Basic - surf = pygame.Surface((100, 100)) - surf.lock() - surf.unlock() - self.assertFalse(surf.get_locked()) - - # Nested - surf = pygame.Surface((100, 100)) - surf.lock() - surf.lock() - surf.unlock() - self.assertTrue(surf.get_locked()) - surf.unlock() - self.assertFalse(surf.get_locked()) - - # Already Unlocked - surf = pygame.Surface((100, 100)) - surf.unlock() - self.assertFalse(surf.get_locked()) - surf.unlock() - self.assertFalse(surf.get_locked()) - - # Surface can be relocked - surf = pygame.Surface((100, 100)) - surf.lock() - surf.unlock() - self.assertFalse(surf.get_locked()) - surf.lock() - surf.unlock() - self.assertFalse(surf.get_locked()) - - def test_unmap_rgb(self): - # Special case, 8 bit-per-pixel surface (has a palette). - surf = pygame.Surface((2, 2), 0, 8) - c = (1, 1, 1) # Unlikely to be in a default palette. - i = 67 - pygame.display.init() - try: - pygame.display.set_mode((100, 50)) - surf.set_palette_at(i, c) - unmapped_c = surf.unmap_rgb(i) - self.assertEqual(unmapped_c, c) - # Confirm it is a Color instance - self.assertIsInstance(unmapped_c, pygame.Color) - finally: - pygame.display.quit() - - # Remaining, non-pallete, cases. - c = (128, 64, 12, 255) - formats = [(0, 16), (0, 24), (0, 32), (SRCALPHA, 16), (SRCALPHA, 32)] - for flags, bitsize in formats: - surf = pygame.Surface((2, 2), flags, bitsize) - unmapped_c = surf.unmap_rgb(surf.map_rgb(c)) - surf.fill(c) - comparison_c = surf.get_at((0, 0)) - self.assertEqual( - unmapped_c, - comparison_c, - "%s != %s, flags: %i, bitsize: %i" - % (unmapped_c, comparison_c, flags, bitsize), - ) - # Confirm it is a Color instance - self.assertIsInstance(unmapped_c, pygame.Color) - - def test_scroll(self): - scrolls = [ - (8, 2, 3), - (16, 2, 3), - (24, 2, 3), - (32, 2, 3), - (32, -1, -3), - (32, 0, 0), - (32, 11, 0), - (32, 0, 11), - (32, -11, 0), - (32, 0, -11), - (32, -11, 2), - (32, 2, -11), - ] - for bitsize, dx, dy in scrolls: - surf = pygame.Surface((10, 10), 0, bitsize) - surf.fill((255, 0, 0)) - surf.fill((0, 255, 0), (2, 2, 2, 2)) - comp = surf.copy() - comp.blit(surf, (dx, dy)) - surf.scroll(dx, dy) - w, h = surf.get_size() - for x in range(w): - for y in range(h): - with self.subTest(x=x, y=y): - self.assertEqual( - surf.get_at((x, y)), - comp.get_at((x, y)), - "%s != %s, bpp:, %i, x: %i, y: %i" - % ( - surf.get_at((x, y)), - comp.get_at((x, y)), - bitsize, - dx, - dy, - ), - ) - # Confirm clip rect containment - surf = pygame.Surface((20, 13), 0, 32) - surf.fill((255, 0, 0)) - surf.fill((0, 255, 0), (7, 1, 6, 6)) - comp = surf.copy() - clip = Rect(3, 1, 8, 14) - surf.set_clip(clip) - comp.set_clip(clip) - comp.blit(surf, (clip.x + 2, clip.y + 3), surf.get_clip()) - surf.scroll(2, 3) - w, h = surf.get_size() - for x in range(w): - for y in range(h): - self.assertEqual(surf.get_at((x, y)), comp.get_at((x, y))) - # Confirm keyword arguments and per-pixel alpha - spot_color = (0, 255, 0, 128) - surf = pygame.Surface((4, 4), pygame.SRCALPHA, 32) - surf.fill((255, 0, 0, 255)) - surf.set_at((1, 1), spot_color) - surf.scroll(dx=1) - self.assertEqual(surf.get_at((2, 1)), spot_color) - surf.scroll(dy=1) - self.assertEqual(surf.get_at((2, 2)), spot_color) - surf.scroll(dy=1, dx=1) - self.assertEqual(surf.get_at((3, 3)), spot_color) - surf.scroll(dx=-3, dy=-3) - self.assertEqual(surf.get_at((0, 0)), spot_color) - - -class SurfaceSubtypeTest(unittest.TestCase): - """Issue #280: Methods that return a new Surface preserve subclasses""" - - def setUp(self): - pygame.display.init() - - def tearDown(self): - pygame.display.quit() - - def test_copy(self): - """Ensure method copy() preserves the surface's class - - When Surface is subclassed, the inherited copy() method will return - instances of the subclass. Non Surface fields are uncopied, however. - This includes instance attributes. - """ - expected_size = (32, 32) - ms1 = SurfaceSubclass(expected_size, SRCALPHA, 32) - ms2 = ms1.copy() - - self.assertIsNot(ms1, ms2) - self.assertIsInstance(ms1, pygame.Surface) - self.assertIsInstance(ms2, pygame.Surface) - self.assertIsInstance(ms1, SurfaceSubclass) - self.assertIsInstance(ms2, SurfaceSubclass) - self.assertTrue(ms1.test_attribute) - self.assertRaises(AttributeError, getattr, ms2, "test_attribute") - self.assertEqual(ms2.get_size(), expected_size) - - def test_convert(self): - """Ensure method convert() preserves the surface's class - - When Surface is subclassed, the inherited convert() method will return - instances of the subclass. Non Surface fields are omitted, however. - This includes instance attributes. - """ - expected_size = (32, 32) - ms1 = SurfaceSubclass(expected_size, 0, 24) - ms2 = ms1.convert(24) - - self.assertIsNot(ms1, ms2) - self.assertIsInstance(ms1, pygame.Surface) - self.assertIsInstance(ms2, pygame.Surface) - self.assertIsInstance(ms1, SurfaceSubclass) - self.assertIsInstance(ms2, SurfaceSubclass) - self.assertTrue(ms1.test_attribute) - self.assertRaises(AttributeError, getattr, ms2, "test_attribute") - self.assertEqual(ms2.get_size(), expected_size) - - def test_convert_alpha(self): - """Ensure method convert_alpha() preserves the surface's class - - When Surface is subclassed, the inherited convert_alpha() method will - return instances of the subclass. Non Surface fields are omitted, - however. This includes instance attributes. - """ - pygame.display.set_mode((40, 40)) - expected_size = (32, 32) - s = pygame.Surface(expected_size, SRCALPHA, 16) - ms1 = SurfaceSubclass(expected_size, SRCALPHA, 32) - ms2 = ms1.convert_alpha(s) - - self.assertIsNot(ms1, ms2) - self.assertIsInstance(ms1, pygame.Surface) - self.assertIsInstance(ms2, pygame.Surface) - self.assertIsInstance(ms1, SurfaceSubclass) - self.assertIsInstance(ms2, SurfaceSubclass) - self.assertTrue(ms1.test_attribute) - self.assertRaises(AttributeError, getattr, ms2, "test_attribute") - self.assertEqual(ms2.get_size(), expected_size) - - def test_subsurface(self): - """Ensure method subsurface() preserves the surface's class - - When Surface is subclassed, the inherited subsurface() method will - return instances of the subclass. Non Surface fields are uncopied, - however. This includes instance attributes. - """ - expected_size = (10, 12) - ms1 = SurfaceSubclass((32, 32), SRCALPHA, 32) - ms2 = ms1.subsurface((4, 5), expected_size) - - self.assertIsNot(ms1, ms2) - self.assertIsInstance(ms1, pygame.Surface) - self.assertIsInstance(ms2, pygame.Surface) - self.assertIsInstance(ms1, SurfaceSubclass) - self.assertIsInstance(ms2, SurfaceSubclass) - self.assertTrue(ms1.test_attribute) - self.assertRaises(AttributeError, getattr, ms2, "test_attribute") - self.assertEqual(ms2.get_size(), expected_size) - - -class SurfaceGetBufferTest(unittest.TestCase): - # These tests requires ctypes. They are disabled if ctypes - # is not installed. - try: - ArrayInterface - except NameError: - __tags__ = ("ignore", "subprocess_ignore") - - lilendian = pygame.get_sdl_byteorder() == pygame.LIL_ENDIAN - - def _check_interface_2D(self, s): - s_w, s_h = s.get_size() - s_bytesize = s.get_bytesize() - s_pitch = s.get_pitch() - s_pixels = s._pixels_address - - # check the array interface structure fields. - v = s.get_view("2") - if not IS_PYPY: - flags = PAI_ALIGNED | PAI_NOTSWAPPED | PAI_WRITEABLE - if s.get_pitch() == s_w * s_bytesize: - flags |= PAI_FORTRAN - - inter = ArrayInterface(v) - - self.assertEqual(inter.two, 2) - self.assertEqual(inter.nd, 2) - self.assertEqual(inter.typekind, "u") - self.assertEqual(inter.itemsize, s_bytesize) - self.assertEqual(inter.shape[0], s_w) - self.assertEqual(inter.shape[1], s_h) - self.assertEqual(inter.strides[0], s_bytesize) - self.assertEqual(inter.strides[1], s_pitch) - self.assertEqual(inter.flags, flags) - self.assertEqual(inter.data, s_pixels) - - def _check_interface_3D(self, s): - s_w, s_h = s.get_size() - s_bytesize = s.get_bytesize() - s_pitch = s.get_pitch() - s_pixels = s._pixels_address - s_shifts = list(s.get_shifts()) - - # Check for RGB or BGR surface. - if s_shifts[0:3] == [0, 8, 16]: - if self.lilendian: - # RGB - offset = 0 - step = 1 - else: - # BGR - offset = s_bytesize - 1 - step = -1 - elif s_shifts[0:3] == [8, 16, 24]: - if self.lilendian: - # xRGB - offset = 1 - step = 1 - else: - # BGRx - offset = s_bytesize - 2 - step = -1 - elif s_shifts[0:3] == [16, 8, 0]: - if self.lilendian: - # BGR - offset = 2 - step = -1 - else: - # RGB - offset = s_bytesize - 3 - step = 1 - elif s_shifts[0:3] == [24, 16, 8]: - if self.lilendian: - # BGRx - offset = 2 - step = -1 - else: - # RGBx - offset = s_bytesize - 4 - step = -1 - else: - return - - # check the array interface structure fields. - v = s.get_view("3") - if not IS_PYPY: - inter = ArrayInterface(v) - flags = PAI_ALIGNED | PAI_NOTSWAPPED | PAI_WRITEABLE - self.assertEqual(inter.two, 2) - self.assertEqual(inter.nd, 3) - self.assertEqual(inter.typekind, "u") - self.assertEqual(inter.itemsize, 1) - self.assertEqual(inter.shape[0], s_w) - self.assertEqual(inter.shape[1], s_h) - self.assertEqual(inter.shape[2], 3) - self.assertEqual(inter.strides[0], s_bytesize) - self.assertEqual(inter.strides[1], s_pitch) - self.assertEqual(inter.strides[2], step) - self.assertEqual(inter.flags, flags) - self.assertEqual(inter.data, s_pixels + offset) - - def _check_interface_rgba(self, s, plane): - s_w, s_h = s.get_size() - s_bytesize = s.get_bytesize() - s_pitch = s.get_pitch() - s_pixels = s._pixels_address - s_shifts = s.get_shifts() - s_masks = s.get_masks() - - # Find the color plane position within the pixel. - if not s_masks[plane]: - return - alpha_shift = s_shifts[plane] - offset = alpha_shift // 8 - if not self.lilendian: - offset = s_bytesize - offset - 1 - - # check the array interface structure fields. - v = s.get_view("rgba"[plane]) - if not IS_PYPY: - inter = ArrayInterface(v) - flags = PAI_ALIGNED | PAI_NOTSWAPPED | PAI_WRITEABLE - self.assertEqual(inter.two, 2) - self.assertEqual(inter.nd, 2) - self.assertEqual(inter.typekind, "u") - self.assertEqual(inter.itemsize, 1) - self.assertEqual(inter.shape[0], s_w) - self.assertEqual(inter.shape[1], s_h) - self.assertEqual(inter.strides[0], s_bytesize) - self.assertEqual(inter.strides[1], s_pitch) - self.assertEqual(inter.flags, flags) - self.assertEqual(inter.data, s_pixels + offset) - - def test_array_interface(self): - self._check_interface_2D(pygame.Surface((5, 7), 0, 8)) - self._check_interface_2D(pygame.Surface((5, 7), 0, 16)) - self._check_interface_2D(pygame.Surface((5, 7), pygame.SRCALPHA, 16)) - self._check_interface_3D(pygame.Surface((5, 7), 0, 24)) - self._check_interface_3D(pygame.Surface((8, 4), 0, 24)) # No gaps - self._check_interface_2D(pygame.Surface((5, 7), 0, 32)) - self._check_interface_3D(pygame.Surface((5, 7), 0, 32)) - self._check_interface_2D(pygame.Surface((5, 7), pygame.SRCALPHA, 32)) - self._check_interface_3D(pygame.Surface((5, 7), pygame.SRCALPHA, 32)) - - def test_array_interface_masks(self): - """Test non-default color byte orders on 3D views""" - - sz = (5, 7) - # Reversed RGB byte order - s = pygame.Surface(sz, 0, 32) - s_masks = list(s.get_masks()) - masks = [0xFF, 0xFF00, 0xFF0000] - if s_masks[0:3] == masks or s_masks[0:3] == masks[::-1]: - masks = s_masks[2::-1] + s_masks[3:4] - self._check_interface_3D(pygame.Surface(sz, 0, 32, masks)) - s = pygame.Surface(sz, 0, 24) - s_masks = list(s.get_masks()) - masks = [0xFF, 0xFF00, 0xFF0000] - if s_masks[0:3] == masks or s_masks[0:3] == masks[::-1]: - masks = s_masks[2::-1] + s_masks[3:4] - self._check_interface_3D(pygame.Surface(sz, 0, 24, masks)) - - masks = [0xFF00, 0xFF0000, 0xFF000000, 0] - self._check_interface_3D(pygame.Surface(sz, 0, 32, masks)) - - def test_array_interface_alpha(self): - for shifts in [[0, 8, 16, 24], [8, 16, 24, 0], [24, 16, 8, 0], [16, 8, 0, 24]]: - masks = [0xFF << s for s in shifts] - s = pygame.Surface((4, 2), pygame.SRCALPHA, 32, masks) - self._check_interface_rgba(s, 3) - - def test_array_interface_rgb(self): - for shifts in [[0, 8, 16, 24], [8, 16, 24, 0], [24, 16, 8, 0], [16, 8, 0, 24]]: - masks = [0xFF << s for s in shifts] - masks[3] = 0 - for plane in range(3): - s = pygame.Surface((4, 2), 0, 24) - self._check_interface_rgba(s, plane) - s = pygame.Surface((4, 2), 0, 32) - self._check_interface_rgba(s, plane) - - @unittest.skipIf(not pygame.HAVE_NEWBUF, "newbuf not implemented") - def test_newbuf_PyBUF_flags_bytes(self): - from pygame.tests.test_utils import buftools - - Importer = buftools.Importer - s = pygame.Surface((10, 6), 0, 32) - a = s.get_buffer() - b = Importer(a, buftools.PyBUF_SIMPLE) - self.assertEqual(b.ndim, 0) - self.assertTrue(b.format is None) - self.assertEqual(b.len, a.length) - self.assertEqual(b.itemsize, 1) - self.assertTrue(b.shape is None) - self.assertTrue(b.strides is None) - self.assertTrue(b.suboffsets is None) - self.assertFalse(b.readonly) - self.assertEqual(b.buf, s._pixels_address) - b = Importer(a, buftools.PyBUF_WRITABLE) - self.assertEqual(b.ndim, 0) - self.assertTrue(b.format is None) - self.assertFalse(b.readonly) - b = Importer(a, buftools.PyBUF_FORMAT) - self.assertEqual(b.ndim, 0) - self.assertEqual(b.format, "B") - b = Importer(a, buftools.PyBUF_ND) - self.assertEqual(b.ndim, 1) - self.assertTrue(b.format is None) - self.assertEqual(b.len, a.length) - self.assertEqual(b.itemsize, 1) - self.assertEqual(b.shape, (a.length,)) - self.assertTrue(b.strides is None) - self.assertTrue(b.suboffsets is None) - self.assertFalse(b.readonly) - self.assertEqual(b.buf, s._pixels_address) - b = Importer(a, buftools.PyBUF_STRIDES) - self.assertEqual(b.ndim, 1) - self.assertTrue(b.format is None) - self.assertEqual(b.strides, (1,)) - s2 = s.subsurface((1, 1, 7, 4)) # Not contiguous - a = s2.get_buffer() - b = Importer(a, buftools.PyBUF_SIMPLE) - self.assertEqual(b.ndim, 0) - self.assertTrue(b.format is None) - self.assertEqual(b.len, a.length) - self.assertEqual(b.itemsize, 1) - self.assertTrue(b.shape is None) - self.assertTrue(b.strides is None) - self.assertTrue(b.suboffsets is None) - self.assertFalse(b.readonly) - self.assertEqual(b.buf, s2._pixels_address) - b = Importer(a, buftools.PyBUF_C_CONTIGUOUS) - self.assertEqual(b.ndim, 1) - self.assertEqual(b.strides, (1,)) - b = Importer(a, buftools.PyBUF_F_CONTIGUOUS) - self.assertEqual(b.ndim, 1) - self.assertEqual(b.strides, (1,)) - b = Importer(a, buftools.PyBUF_ANY_CONTIGUOUS) - self.assertEqual(b.ndim, 1) - self.assertEqual(b.strides, (1,)) - - @unittest.skipIf(not pygame.HAVE_NEWBUF, "newbuf not implemented") - def test_newbuf_PyBUF_flags_0D(self): - # This is the same handler as used by get_buffer(), so just - # confirm that it succeeds for one case. - from pygame.tests.test_utils import buftools - - Importer = buftools.Importer - s = pygame.Surface((10, 6), 0, 32) - a = s.get_view("0") - b = Importer(a, buftools.PyBUF_SIMPLE) - self.assertEqual(b.ndim, 0) - self.assertTrue(b.format is None) - self.assertEqual(b.len, a.length) - self.assertEqual(b.itemsize, 1) - self.assertTrue(b.shape is None) - self.assertTrue(b.strides is None) - self.assertTrue(b.suboffsets is None) - self.assertFalse(b.readonly) - self.assertEqual(b.buf, s._pixels_address) - - @unittest.skipIf(not pygame.HAVE_NEWBUF, "newbuf not implemented") - def test_newbuf_PyBUF_flags_1D(self): - from pygame.tests.test_utils import buftools - - Importer = buftools.Importer - s = pygame.Surface((10, 6), 0, 32) - a = s.get_view("1") - b = Importer(a, buftools.PyBUF_SIMPLE) - self.assertEqual(b.ndim, 0) - self.assertTrue(b.format is None) - self.assertEqual(b.len, a.length) - self.assertEqual(b.itemsize, s.get_bytesize()) - self.assertTrue(b.shape is None) - self.assertTrue(b.strides is None) - self.assertTrue(b.suboffsets is None) - self.assertFalse(b.readonly) - self.assertEqual(b.buf, s._pixels_address) - b = Importer(a, buftools.PyBUF_WRITABLE) - self.assertEqual(b.ndim, 0) - self.assertTrue(b.format is None) - self.assertFalse(b.readonly) - b = Importer(a, buftools.PyBUF_FORMAT) - self.assertEqual(b.ndim, 0) - self.assertEqual(b.format, "=I") - b = Importer(a, buftools.PyBUF_ND) - self.assertEqual(b.ndim, 1) - self.assertTrue(b.format is None) - self.assertEqual(b.len, a.length) - self.assertEqual(b.itemsize, s.get_bytesize()) - self.assertEqual(b.shape, (s.get_width() * s.get_height(),)) - self.assertTrue(b.strides is None) - self.assertTrue(b.suboffsets is None) - self.assertFalse(b.readonly) - self.assertEqual(b.buf, s._pixels_address) - b = Importer(a, buftools.PyBUF_STRIDES) - self.assertEqual(b.ndim, 1) - self.assertTrue(b.format is None) - self.assertEqual(b.strides, (s.get_bytesize(),)) - - @unittest.skipIf(not pygame.HAVE_NEWBUF, "newbuf not implemented") - def test_newbuf_PyBUF_flags_2D(self): - from pygame.tests.test_utils import buftools - - Importer = buftools.Importer - s = pygame.Surface((10, 6), 0, 32) - a = s.get_view("2") - # Non dimensional requests, no PyDEF_ND, are handled by the - # 1D surface buffer code, so only need to confirm a success. - b = Importer(a, buftools.PyBUF_SIMPLE) - self.assertEqual(b.ndim, 0) - self.assertTrue(b.format is None) - self.assertEqual(b.len, a.length) - self.assertEqual(b.itemsize, s.get_bytesize()) - self.assertTrue(b.shape is None) - self.assertTrue(b.strides is None) - self.assertTrue(b.suboffsets is None) - self.assertFalse(b.readonly) - self.assertEqual(b.buf, s._pixels_address) - # Uniquely 2D - b = Importer(a, buftools.PyBUF_STRIDES) - self.assertEqual(b.ndim, 2) - self.assertTrue(b.format is None) - self.assertEqual(b.len, a.length) - self.assertEqual(b.itemsize, s.get_bytesize()) - self.assertEqual(b.shape, s.get_size()) - self.assertEqual(b.strides, (s.get_bytesize(), s.get_pitch())) - self.assertTrue(b.suboffsets is None) - self.assertFalse(b.readonly) - self.assertEqual(b.buf, s._pixels_address) - b = Importer(a, buftools.PyBUF_RECORDS_RO) - self.assertEqual(b.ndim, 2) - self.assertEqual(b.format, "=I") - self.assertEqual(b.strides, (s.get_bytesize(), s.get_pitch())) - b = Importer(a, buftools.PyBUF_RECORDS) - self.assertEqual(b.ndim, 2) - self.assertEqual(b.format, "=I") - self.assertEqual(b.strides, (s.get_bytesize(), s.get_pitch())) - b = Importer(a, buftools.PyBUF_F_CONTIGUOUS) - self.assertEqual(b.ndim, 2) - self.assertEqual(b.format, None) - self.assertEqual(b.strides, (s.get_bytesize(), s.get_pitch())) - b = Importer(a, buftools.PyBUF_ANY_CONTIGUOUS) - self.assertEqual(b.ndim, 2) - self.assertEqual(b.format, None) - self.assertEqual(b.strides, (s.get_bytesize(), s.get_pitch())) - self.assertRaises(BufferError, Importer, a, buftools.PyBUF_ND) - self.assertRaises(BufferError, Importer, a, buftools.PyBUF_C_CONTIGUOUS) - s2 = s.subsurface((1, 1, 7, 4)) # Not contiguous - a = s2.get_view("2") - b = Importer(a, buftools.PyBUF_STRIDES) - self.assertEqual(b.ndim, 2) - self.assertTrue(b.format is None) - self.assertEqual(b.len, a.length) - self.assertEqual(b.itemsize, s2.get_bytesize()) - self.assertEqual(b.shape, s2.get_size()) - self.assertEqual(b.strides, (s2.get_bytesize(), s.get_pitch())) - self.assertTrue(b.suboffsets is None) - self.assertFalse(b.readonly) - self.assertEqual(b.buf, s2._pixels_address) - b = Importer(a, buftools.PyBUF_RECORDS) - self.assertEqual(b.ndim, 2) - self.assertEqual(b.format, "=I") - self.assertRaises(BufferError, Importer, a, buftools.PyBUF_SIMPLE) - self.assertRaises(BufferError, Importer, a, buftools.PyBUF_FORMAT) - self.assertRaises(BufferError, Importer, a, buftools.PyBUF_WRITABLE) - self.assertRaises(BufferError, Importer, a, buftools.PyBUF_ND) - self.assertRaises(BufferError, Importer, a, buftools.PyBUF_C_CONTIGUOUS) - self.assertRaises(BufferError, Importer, a, buftools.PyBUF_F_CONTIGUOUS) - self.assertRaises(BufferError, Importer, a, buftools.PyBUF_ANY_CONTIGUOUS) - - @unittest.skipIf(not pygame.HAVE_NEWBUF, "newbuf not implemented") - def test_newbuf_PyBUF_flags_3D(self): - from pygame.tests.test_utils import buftools - - Importer = buftools.Importer - s = pygame.Surface((12, 6), 0, 24) - rmask, gmask, bmask, amask = s.get_masks() - if self.lilendian: - if rmask == 0x0000FF: - color_step = 1 - addr_offset = 0 - else: - color_step = -1 - addr_offset = 2 - else: - if rmask == 0xFF0000: - color_step = 1 - addr_offset = 0 - else: - color_step = -1 - addr_offset = 2 - a = s.get_view("3") - b = Importer(a, buftools.PyBUF_STRIDES) - w, h = s.get_size() - shape = w, h, 3 - strides = 3, s.get_pitch(), color_step - self.assertEqual(b.ndim, 3) - self.assertTrue(b.format is None) - self.assertEqual(b.len, a.length) - self.assertEqual(b.itemsize, 1) - self.assertEqual(b.shape, shape) - self.assertEqual(b.strides, strides) - self.assertTrue(b.suboffsets is None) - self.assertFalse(b.readonly) - self.assertEqual(b.buf, s._pixels_address + addr_offset) - b = Importer(a, buftools.PyBUF_RECORDS_RO) - self.assertEqual(b.ndim, 3) - self.assertEqual(b.format, "B") - self.assertEqual(b.strides, strides) - b = Importer(a, buftools.PyBUF_RECORDS) - self.assertEqual(b.ndim, 3) - self.assertEqual(b.format, "B") - self.assertEqual(b.strides, strides) - self.assertRaises(BufferError, Importer, a, buftools.PyBUF_SIMPLE) - self.assertRaises(BufferError, Importer, a, buftools.PyBUF_FORMAT) - self.assertRaises(BufferError, Importer, a, buftools.PyBUF_WRITABLE) - self.assertRaises(BufferError, Importer, a, buftools.PyBUF_ND) - self.assertRaises(BufferError, Importer, a, buftools.PyBUF_C_CONTIGUOUS) - self.assertRaises(BufferError, Importer, a, buftools.PyBUF_F_CONTIGUOUS) - self.assertRaises(BufferError, Importer, a, buftools.PyBUF_ANY_CONTIGUOUS) - - @unittest.skipIf(not pygame.HAVE_NEWBUF, "newbuf not implemented") - def test_newbuf_PyBUF_flags_rgba(self): - # All color plane views are handled by the same routine, - # so only one plane need be checked. - from pygame.tests.test_utils import buftools - - Importer = buftools.Importer - s = pygame.Surface((12, 6), 0, 24) - rmask, gmask, bmask, amask = s.get_masks() - if self.lilendian: - if rmask == 0x0000FF: - addr_offset = 0 - else: - addr_offset = 2 - else: - if rmask == 0xFF0000: - addr_offset = 0 - else: - addr_offset = 2 - a = s.get_view("R") - b = Importer(a, buftools.PyBUF_STRIDES) - w, h = s.get_size() - shape = w, h - strides = s.get_bytesize(), s.get_pitch() - self.assertEqual(b.ndim, 2) - self.assertTrue(b.format is None) - self.assertEqual(b.len, a.length) - self.assertEqual(b.itemsize, 1) - self.assertEqual(b.shape, shape) - self.assertEqual(b.strides, strides) - self.assertTrue(b.suboffsets is None) - self.assertFalse(b.readonly) - self.assertEqual(b.buf, s._pixels_address + addr_offset) - b = Importer(a, buftools.PyBUF_RECORDS_RO) - self.assertEqual(b.ndim, 2) - self.assertEqual(b.format, "B") - self.assertEqual(b.strides, strides) - b = Importer(a, buftools.PyBUF_RECORDS) - self.assertEqual(b.ndim, 2) - self.assertEqual(b.format, "B") - self.assertEqual(b.strides, strides) - self.assertRaises(BufferError, Importer, a, buftools.PyBUF_SIMPLE) - self.assertRaises(BufferError, Importer, a, buftools.PyBUF_FORMAT) - self.assertRaises(BufferError, Importer, a, buftools.PyBUF_WRITABLE) - self.assertRaises(BufferError, Importer, a, buftools.PyBUF_ND) - self.assertRaises(BufferError, Importer, a, buftools.PyBUF_C_CONTIGUOUS) - self.assertRaises(BufferError, Importer, a, buftools.PyBUF_F_CONTIGUOUS) - self.assertRaises(BufferError, Importer, a, buftools.PyBUF_ANY_CONTIGUOUS) - - -class SurfaceBlendTest(unittest.TestCase): - def setUp(self): - # Needed for 8 bits-per-pixel color palette surface tests. - pygame.display.init() - - def tearDown(self): - pygame.display.quit() - - _test_palette = [ - (0, 0, 0, 255), - (10, 30, 60, 0), - (25, 75, 100, 128), - (200, 150, 100, 200), - (0, 100, 200, 255), - ] - surf_size = (10, 12) - _test_points = [ - ((0, 0), 1), - ((4, 5), 1), - ((9, 0), 2), - ((5, 5), 2), - ((0, 11), 3), - ((4, 6), 3), - ((9, 11), 4), - ((5, 6), 4), - ] - - def _make_surface(self, bitsize, srcalpha=False, palette=None): - if palette is None: - palette = self._test_palette - flags = 0 - if srcalpha: - flags |= SRCALPHA - surf = pygame.Surface(self.surf_size, flags, bitsize) - if bitsize == 8: - surf.set_palette([c[:3] for c in palette]) - return surf - - def _fill_surface(self, surf, palette=None): - if palette is None: - palette = self._test_palette - surf.fill(palette[1], (0, 0, 5, 6)) - surf.fill(palette[2], (5, 0, 5, 6)) - surf.fill(palette[3], (0, 6, 5, 6)) - surf.fill(palette[4], (5, 6, 5, 6)) - - def _make_src_surface(self, bitsize, srcalpha=False, palette=None): - surf = self._make_surface(bitsize, srcalpha, palette) - self._fill_surface(surf, palette) - return surf - - def _assert_surface(self, surf, palette=None, msg=""): - if palette is None: - palette = self._test_palette - if surf.get_bitsize() == 16: - palette = [surf.unmap_rgb(surf.map_rgb(c)) for c in palette] - for posn, i in self._test_points: - self.assertEqual( - surf.get_at(posn), - palette[i], - "%s != %s: flags: %i, bpp: %i, posn: %s%s" - % ( - surf.get_at(posn), - palette[i], - surf.get_flags(), - surf.get_bitsize(), - posn, - msg, - ), - ) - - def test_blit_blend(self): - sources = [ - self._make_src_surface(8), - self._make_src_surface(16), - self._make_src_surface(16, srcalpha=True), - self._make_src_surface(24), - self._make_src_surface(32), - self._make_src_surface(32, srcalpha=True), - ] - destinations = [ - self._make_surface(8), - self._make_surface(16), - self._make_surface(16, srcalpha=True), - self._make_surface(24), - self._make_surface(32), - self._make_surface(32, srcalpha=True), - ] - blend = [ - ("BLEND_ADD", (0, 25, 100, 255), lambda a, b: min(a + b, 255)), - ("BLEND_SUB", (100, 25, 0, 100), lambda a, b: max(a - b, 0)), - ("BLEND_MULT", (100, 200, 0, 0), lambda a, b: (a * b) // 256), - ("BLEND_MIN", (255, 0, 0, 255), min), - ("BLEND_MAX", (0, 255, 0, 255), max), - ] - - for src in sources: - src_palette = [src.unmap_rgb(src.map_rgb(c)) for c in self._test_palette] - for dst in destinations: - for blend_name, dst_color, op in blend: - dc = dst.unmap_rgb(dst.map_rgb(dst_color)) - p = [] - for sc in src_palette: - c = [op(dc[i], sc[i]) for i in range(3)] - if dst.get_masks()[3]: - c.append(dc[3]) - else: - c.append(255) - c = dst.unmap_rgb(dst.map_rgb(c)) - p.append(c) - dst.fill(dst_color) - dst.blit(src, (0, 0), special_flags=getattr(pygame, blend_name)) - self._assert_surface( - dst, - p, - ( - ", op: %s, src bpp: %i" - ", src flags: %i" - % (blend_name, src.get_bitsize(), src.get_flags()) - ), - ) - - src = self._make_src_surface(32) - masks = src.get_masks() - dst = pygame.Surface( - src.get_size(), 0, 32, [masks[2], masks[1], masks[0], masks[3]] - ) - for blend_name, dst_color, op in blend: - p = [] - for src_color in self._test_palette: - c = [op(dst_color[i], src_color[i]) for i in range(3)] - c.append(255) - p.append(tuple(c)) - dst.fill(dst_color) - dst.blit(src, (0, 0), special_flags=getattr(pygame, blend_name)) - self._assert_surface(dst, p, ", %s" % blend_name) - - # Blend blits are special cased for 32 to 32 bit surfaces. - # - # Confirm that it works when the rgb bytes are not the - # least significant bytes. - pat = self._make_src_surface(32) - masks = pat.get_masks() - if min(masks) == 0xFF000000: - masks = [m >> 8 for m in masks] - else: - masks = [m << 8 for m in masks] - src = pygame.Surface(pat.get_size(), 0, 32, masks) - self._fill_surface(src) - dst = pygame.Surface(src.get_size(), 0, 32, masks) - for blend_name, dst_color, op in blend: - p = [] - for src_color in self._test_palette: - c = [op(dst_color[i], src_color[i]) for i in range(3)] - c.append(255) - p.append(tuple(c)) - dst.fill(dst_color) - dst.blit(src, (0, 0), special_flags=getattr(pygame, blend_name)) - self._assert_surface(dst, p, ", %s" % blend_name) - - def test_blit_blend_rgba(self): - sources = [ - self._make_src_surface(8), - self._make_src_surface(16), - self._make_src_surface(16, srcalpha=True), - self._make_src_surface(24), - self._make_src_surface(32), - self._make_src_surface(32, srcalpha=True), - ] - destinations = [ - self._make_surface(8), - self._make_surface(16), - self._make_surface(16, srcalpha=True), - self._make_surface(24), - self._make_surface(32), - self._make_surface(32, srcalpha=True), - ] - blend = [ - ("BLEND_RGBA_ADD", (0, 25, 100, 255), lambda a, b: min(a + b, 255)), - ("BLEND_RGBA_SUB", (0, 25, 100, 255), lambda a, b: max(a - b, 0)), - ("BLEND_RGBA_MULT", (0, 7, 100, 255), lambda a, b: (a * b) // 256), - ("BLEND_RGBA_MIN", (0, 255, 0, 255), min), - ("BLEND_RGBA_MAX", (0, 255, 0, 255), max), - ] - - for src in sources: - src_palette = [src.unmap_rgb(src.map_rgb(c)) for c in self._test_palette] - for dst in destinations: - for blend_name, dst_color, op in blend: - dc = dst.unmap_rgb(dst.map_rgb(dst_color)) - p = [] - for sc in src_palette: - c = [op(dc[i], sc[i]) for i in range(4)] - if not dst.get_masks()[3]: - c[3] = 255 - c = dst.unmap_rgb(dst.map_rgb(c)) - p.append(c) - dst.fill(dst_color) - dst.blit(src, (0, 0), special_flags=getattr(pygame, blend_name)) - self._assert_surface( - dst, - p, - ( - ", op: %s, src bpp: %i" - ", src flags: %i" - % (blend_name, src.get_bitsize(), src.get_flags()) - ), - ) - - # Blend blits are special cased for 32 to 32 bit surfaces - # with per-pixel alpha. - # - # Confirm the general case is used instead when the formats differ. - src = self._make_src_surface(32, srcalpha=True) - masks = src.get_masks() - dst = pygame.Surface( - src.get_size(), SRCALPHA, 32, (masks[2], masks[1], masks[0], masks[3]) - ) - for blend_name, dst_color, op in blend: - p = [ - tuple([op(dst_color[i], src_color[i]) for i in range(4)]) - for src_color in self._test_palette - ] - dst.fill(dst_color) - dst.blit(src, (0, 0), special_flags=getattr(pygame, blend_name)) - self._assert_surface(dst, p, ", %s" % blend_name) - - # Confirm this special case handles subsurfaces. - src = pygame.Surface((8, 10), SRCALPHA, 32) - dst = pygame.Surface((8, 10), SRCALPHA, 32) - tst = pygame.Surface((8, 10), SRCALPHA, 32) - src.fill((1, 2, 3, 4)) - dst.fill((40, 30, 20, 10)) - subsrc = src.subsurface((2, 3, 4, 4)) - subdst = dst.subsurface((2, 3, 4, 4)) - subdst.blit(subsrc, (0, 0), special_flags=BLEND_RGBA_ADD) - tst.fill((40, 30, 20, 10)) - tst.fill((41, 32, 23, 14), (2, 3, 4, 4)) - for x in range(8): - for y in range(10): - self.assertEqual( - dst.get_at((x, y)), - tst.get_at((x, y)), - "%s != %s at (%i, %i)" - % (dst.get_at((x, y)), tst.get_at((x, y)), x, y), - ) - - def test_blit_blend_premultiplied(self): - def test_premul_surf( - src_col, - dst_col, - src_size=(16, 16), - dst_size=(16, 16), - src_bit_depth=32, - dst_bit_depth=32, - src_has_alpha=True, - dst_has_alpha=True, - ): - if src_bit_depth == 8: - src = pygame.Surface(src_size, 0, src_bit_depth) - palette = [src_col, dst_col] - src.set_palette(palette) - src.fill(palette[0]) - elif src_has_alpha: - src = pygame.Surface(src_size, SRCALPHA, src_bit_depth) - src.fill(src_col) - else: - src = pygame.Surface(src_size, 0, src_bit_depth) - src.fill(src_col) - - if dst_bit_depth == 8: - dst = pygame.Surface(dst_size, 0, dst_bit_depth) - palette = [src_col, dst_col] - dst.set_palette(palette) - dst.fill(palette[1]) - elif dst_has_alpha: - dst = pygame.Surface(dst_size, SRCALPHA, dst_bit_depth) - dst.fill(dst_col) - else: - dst = pygame.Surface(dst_size, 0, dst_bit_depth) - dst.fill(dst_col) - - dst.blit(src, (0, 0), special_flags=BLEND_PREMULTIPLIED) - - actual_col = dst.get_at( - (int(float(src_size[0] / 2.0)), int(float(src_size[0] / 2.0))) - ) - - # This is the blend pre-multiplied formula - if src_col.a == 0: - expected_col = dst_col - elif src_col.a == 255: - expected_col = src_col - else: - # sC + dC - (((dC + 1) * sA >> 8) - expected_col = pygame.Color( - (src_col.r + dst_col.r - ((dst_col.r + 1) * src_col.a >> 8)), - (src_col.g + dst_col.g - ((dst_col.g + 1) * src_col.a >> 8)), - (src_col.b + dst_col.b - ((dst_col.b + 1) * src_col.a >> 8)), - (src_col.a + dst_col.a - ((dst_col.a + 1) * src_col.a >> 8)), - ) - if not dst_has_alpha: - expected_col.a = 255 - - return (expected_col, actual_col) - - # # Colour Tests - self.assertEqual( - *test_premul_surf(pygame.Color(40, 20, 0, 51), pygame.Color(40, 20, 0, 51)) - ) - - self.assertEqual( - *test_premul_surf(pygame.Color(0, 0, 0, 0), pygame.Color(40, 20, 0, 51)) - ) - - self.assertEqual( - *test_premul_surf(pygame.Color(40, 20, 0, 51), pygame.Color(0, 0, 0, 0)) - ) - - self.assertEqual( - *test_premul_surf(pygame.Color(0, 0, 0, 0), pygame.Color(0, 0, 0, 0)) - ) - - self.assertEqual( - *test_premul_surf(pygame.Color(2, 2, 2, 2), pygame.Color(40, 20, 0, 51)) - ) - - self.assertEqual( - *test_premul_surf(pygame.Color(40, 20, 0, 51), pygame.Color(2, 2, 2, 2)) - ) - - self.assertEqual( - *test_premul_surf(pygame.Color(2, 2, 2, 2), pygame.Color(2, 2, 2, 2)) - ) - - self.assertEqual( - *test_premul_surf(pygame.Color(9, 9, 9, 9), pygame.Color(40, 20, 0, 51)) - ) - - self.assertEqual( - *test_premul_surf(pygame.Color(40, 20, 0, 51), pygame.Color(9, 9, 9, 9)) - ) - - self.assertEqual( - *test_premul_surf(pygame.Color(9, 9, 9, 9), pygame.Color(9, 9, 9, 9)) - ) - - self.assertEqual( - *test_premul_surf( - pygame.Color(127, 127, 127, 127), pygame.Color(40, 20, 0, 51) - ) - ) - - self.assertEqual( - *test_premul_surf( - pygame.Color(40, 20, 0, 51), pygame.Color(127, 127, 127, 127) - ) - ) - - self.assertEqual( - *test_premul_surf( - pygame.Color(127, 127, 127, 127), pygame.Color(127, 127, 127, 127) - ) - ) - - self.assertEqual( - *test_premul_surf( - pygame.Color(200, 200, 200, 200), pygame.Color(40, 20, 0, 51) - ) - ) - - self.assertEqual( - *test_premul_surf( - pygame.Color(40, 20, 0, 51), pygame.Color(200, 200, 200, 200) - ) - ) - - self.assertEqual( - *test_premul_surf( - pygame.Color(200, 200, 200, 200), pygame.Color(200, 200, 200, 200) - ) - ) - - self.assertEqual( - *test_premul_surf( - pygame.Color(255, 255, 255, 255), pygame.Color(40, 20, 0, 51) - ) - ) - - self.assertEqual( - *test_premul_surf( - pygame.Color(40, 20, 0, 51), pygame.Color(255, 255, 255, 255) - ) - ) - - self.assertEqual( - *test_premul_surf( - pygame.Color(255, 255, 255, 255), pygame.Color(255, 255, 255, 255) - ) - ) - - # Surface format tests - self.assertRaises( - IndexError, - test_premul_surf, - pygame.Color(255, 255, 255, 255), - pygame.Color(255, 255, 255, 255), - src_size=(0, 0), - dst_size=(0, 0), - ) - - self.assertEqual( - *test_premul_surf( - pygame.Color(40, 20, 0, 51), - pygame.Color(30, 20, 0, 51), - src_size=(4, 4), - dst_size=(9, 9), - ) - ) - - self.assertEqual( - *test_premul_surf( - pygame.Color(30, 20, 0, 51), - pygame.Color(40, 20, 0, 51), - src_size=(17, 67), - dst_size=(69, 69), - ) - ) - - self.assertEqual( - *test_premul_surf( - pygame.Color(30, 20, 0, 255), - pygame.Color(40, 20, 0, 51), - src_size=(17, 67), - dst_size=(69, 69), - src_has_alpha=True, - ) - ) - self.assertEqual( - *test_premul_surf( - pygame.Color(30, 20, 0, 51), - pygame.Color(40, 20, 0, 255), - src_size=(17, 67), - dst_size=(69, 69), - dst_has_alpha=False, - ) - ) - - self.assertEqual( - *test_premul_surf( - pygame.Color(30, 20, 0, 255), - pygame.Color(40, 20, 0, 255), - src_size=(17, 67), - dst_size=(69, 69), - src_has_alpha=False, - dst_has_alpha=False, - ) - ) - - self.assertEqual( - *test_premul_surf( - pygame.Color(30, 20, 0, 255), - pygame.Color(40, 20, 0, 255), - src_size=(17, 67), - dst_size=(69, 69), - dst_bit_depth=24, - src_has_alpha=True, - dst_has_alpha=False, - ) - ) - - self.assertEqual( - *test_premul_surf( - pygame.Color(30, 20, 0, 255), - pygame.Color(40, 20, 0, 255), - src_size=(17, 67), - dst_size=(69, 69), - src_bit_depth=24, - src_has_alpha=False, - dst_has_alpha=True, - ) - ) - - self.assertEqual( - *test_premul_surf( - pygame.Color(30, 20, 0, 255), - pygame.Color(40, 20, 0, 255), - src_size=(17, 67), - dst_size=(69, 69), - src_bit_depth=24, - dst_bit_depth=24, - src_has_alpha=False, - dst_has_alpha=False, - ) - ) - - self.assertEqual( - *test_premul_surf( - pygame.Color(30, 20, 0, 255), - pygame.Color(40, 20, 0, 255), - src_size=(17, 67), - dst_size=(69, 69), - src_bit_depth=8, - ) - ) - - self.assertEqual( - *test_premul_surf( - pygame.Color(30, 20, 0, 255), - pygame.Color(40, 20, 0, 255), - src_size=(17, 67), - dst_size=(69, 69), - dst_bit_depth=8, - ) - ) - - self.assertEqual( - *test_premul_surf( - pygame.Color(30, 20, 0, 255), - pygame.Color(40, 20, 0, 255), - src_size=(17, 67), - dst_size=(69, 69), - src_bit_depth=8, - dst_bit_depth=8, - ) - ) - - def test_blit_blend_big_rect(self): - """test that an oversized rect works ok.""" - color = (1, 2, 3, 255) - area = (1, 1, 30, 30) - s1 = pygame.Surface((4, 4), 0, 32) - r = s1.fill(special_flags=pygame.BLEND_ADD, color=color, rect=area) - - self.assertEqual(pygame.Rect((1, 1, 3, 3)), r) - self.assertEqual(s1.get_at((0, 0)), (0, 0, 0, 255)) - self.assertEqual(s1.get_at((1, 1)), color) - - black = pygame.Color("black") - red = pygame.Color("red") - self.assertNotEqual(black, red) - - surf = pygame.Surface((10, 10), 0, 32) - surf.fill(black) - subsurf = surf.subsurface(pygame.Rect(0, 1, 10, 8)) - self.assertEqual(surf.get_at((0, 0)), black) - self.assertEqual(surf.get_at((0, 9)), black) - - subsurf.fill(red, (0, -1, 10, 1), pygame.BLEND_RGB_ADD) - self.assertEqual(surf.get_at((0, 0)), black) - self.assertEqual(surf.get_at((0, 9)), black) - - subsurf.fill(red, (0, 8, 10, 1), pygame.BLEND_RGB_ADD) - self.assertEqual(surf.get_at((0, 0)), black) - self.assertEqual(surf.get_at((0, 9)), black) - - def test_GET_PIXELVALS(self): - # surface.h GET_PIXELVALS bug regarding whether of not - # a surface has per-pixel alpha. Looking at the Amask - # is not enough. The surface's SRCALPHA flag must also - # be considered. Fix rev. 1923. - src = self._make_surface(32, srcalpha=True) - src.fill((0, 0, 0, 128)) - src.set_alpha(None) # Clear SRCALPHA flag. - dst = self._make_surface(32, srcalpha=True) - dst.blit(src, (0, 0), special_flags=BLEND_RGBA_ADD) - self.assertEqual(dst.get_at((0, 0)), (0, 0, 0, 255)) - - def test_fill_blend(self): - destinations = [ - self._make_surface(8), - self._make_surface(16), - self._make_surface(16, srcalpha=True), - self._make_surface(24), - self._make_surface(32), - self._make_surface(32, srcalpha=True), - ] - blend = [ - ("BLEND_ADD", (0, 25, 100, 255), lambda a, b: min(a + b, 255)), - ("BLEND_SUB", (0, 25, 100, 255), lambda a, b: max(a - b, 0)), - ("BLEND_MULT", (0, 7, 100, 255), lambda a, b: (a * b) // 256), - ("BLEND_MIN", (0, 255, 0, 255), min), - ("BLEND_MAX", (0, 255, 0, 255), max), - ] - - for dst in destinations: - dst_palette = [dst.unmap_rgb(dst.map_rgb(c)) for c in self._test_palette] - for blend_name, fill_color, op in blend: - fc = dst.unmap_rgb(dst.map_rgb(fill_color)) - self._fill_surface(dst) - p = [] - for dc in dst_palette: - c = [op(dc[i], fc[i]) for i in range(3)] - if dst.get_masks()[3]: - c.append(dc[3]) - else: - c.append(255) - c = dst.unmap_rgb(dst.map_rgb(c)) - p.append(c) - dst.fill(fill_color, special_flags=getattr(pygame, blend_name)) - self._assert_surface(dst, p, ", %s" % blend_name) - - def test_fill_blend_rgba(self): - destinations = [ - self._make_surface(8), - self._make_surface(16), - self._make_surface(16, srcalpha=True), - self._make_surface(24), - self._make_surface(32), - self._make_surface(32, srcalpha=True), - ] - blend = [ - ("BLEND_RGBA_ADD", (0, 25, 100, 255), lambda a, b: min(a + b, 255)), - ("BLEND_RGBA_SUB", (0, 25, 100, 255), lambda a, b: max(a - b, 0)), - ("BLEND_RGBA_MULT", (0, 7, 100, 255), lambda a, b: (a * b) // 256), - ("BLEND_RGBA_MIN", (0, 255, 0, 255), min), - ("BLEND_RGBA_MAX", (0, 255, 0, 255), max), - ] - - for dst in destinations: - dst_palette = [dst.unmap_rgb(dst.map_rgb(c)) for c in self._test_palette] - for blend_name, fill_color, op in blend: - fc = dst.unmap_rgb(dst.map_rgb(fill_color)) - self._fill_surface(dst) - p = [] - for dc in dst_palette: - c = [op(dc[i], fc[i]) for i in range(4)] - if not dst.get_masks()[3]: - c[3] = 255 - c = dst.unmap_rgb(dst.map_rgb(c)) - p.append(c) - dst.fill(fill_color, special_flags=getattr(pygame, blend_name)) - self._assert_surface(dst, p, ", %s" % blend_name) - - -class SurfaceSelfBlitTest(unittest.TestCase): - """Blit to self tests. - - This test case is in response to MotherHamster Bugzilla Bug 19. - """ - - def setUp(self): - # Needed for 8 bits-per-pixel color palette surface tests. - pygame.display.init() - - def tearDown(self): - pygame.display.quit() - - _test_palette = [(0, 0, 0, 255), (255, 0, 0, 0), (0, 255, 0, 255)] - surf_size = (9, 6) - - def _fill_surface(self, surf, palette=None): - if palette is None: - palette = self._test_palette - surf.fill(palette[1]) - surf.fill(palette[2], (1, 2, 1, 2)) - - def _make_surface(self, bitsize, srcalpha=False, palette=None): - if palette is None: - palette = self._test_palette - flags = 0 - if srcalpha: - flags |= SRCALPHA - surf = pygame.Surface(self.surf_size, flags, bitsize) - if bitsize == 8: - surf.set_palette([c[:3] for c in palette]) - self._fill_surface(surf, palette) - return surf - - def _assert_same(self, a, b): - w, h = a.get_size() - for x in range(w): - for y in range(h): - self.assertEqual( - a.get_at((x, y)), - b.get_at((x, y)), - ( - "%s != %s, bpp: %i" - % (a.get_at((x, y)), b.get_at((x, y)), a.get_bitsize()) - ), - ) - - def test_overlap_check(self): - # Ensure overlapping blits are properly detected. There are two - # places where this is done, within SoftBlitPyGame() in alphablit.c - # and PySurface_Blit() in surface.c. SoftBlitPyGame should catch the - # per-pixel alpha surface, PySurface_Blit the colorkey and blanket - # alpha surface. per-pixel alpha and blanket alpha self blits are - # not properly handled by SDL 1.2.13, so Pygame does them. - bgc = (0, 0, 0, 255) - rectc_left = (128, 64, 32, 255) - rectc_right = (255, 255, 255, 255) - colors = [(255, 255, 255, 255), (128, 64, 32, 255)] - overlaps = [ - (0, 0, 1, 0, (50, 0)), - (0, 0, 49, 1, (98, 2)), - (0, 0, 49, 49, (98, 98)), - (49, 0, 0, 1, (0, 2)), - (49, 0, 0, 49, (0, 98)), - ] - surfs = [pygame.Surface((100, 100), SRCALPHA, 32)] - surf = pygame.Surface((100, 100), 0, 32) - surf.set_alpha(255) - surfs.append(surf) - surf = pygame.Surface((100, 100), 0, 32) - surf.set_colorkey((0, 1, 0)) - surfs.append(surf) - for surf in surfs: - for s_x, s_y, d_x, d_y, test_posn in overlaps: - surf.fill(bgc) - surf.fill(rectc_right, (25, 0, 25, 50)) - surf.fill(rectc_left, (0, 0, 25, 50)) - surf.blit(surf, (d_x, d_y), (s_x, s_y, 50, 50)) - self.assertEqual(surf.get_at(test_posn), rectc_right) - - # https://github.com/pygame/pygame/issues/370#issuecomment-364625291 - @unittest.skipIf("ppc64le" in platform.uname(), "known ppc64le issue") - def test_colorkey(self): - # Check a workaround for an SDL 1.2.13 surface self-blit problem - # (MotherHamster Bugzilla bug 19). - pygame.display.set_mode((100, 50)) # Needed for 8bit surface - bitsizes = [8, 16, 24, 32] - for bitsize in bitsizes: - surf = self._make_surface(bitsize) - surf.set_colorkey(self._test_palette[1]) - surf.blit(surf, (3, 0)) - p = [] - for c in self._test_palette: - c = surf.unmap_rgb(surf.map_rgb(c)) - p.append(c) - p[1] = (p[1][0], p[1][1], p[1][2], 0) - tmp = self._make_surface(32, srcalpha=True, palette=p) - tmp.blit(tmp, (3, 0)) - tmp.set_alpha(None) - comp = self._make_surface(bitsize) - comp.blit(tmp, (0, 0)) - self._assert_same(surf, comp) - - # https://github.com/pygame/pygame/issues/370#issuecomment-364625291 - @unittest.skipIf("ppc64le" in platform.uname(), "known ppc64le issue") - def test_blanket_alpha(self): - # Check a workaround for an SDL 1.2.13 surface self-blit problem - # (MotherHamster Bugzilla bug 19). - pygame.display.set_mode((100, 50)) # Needed for 8bit surface - bitsizes = [8, 16, 24, 32] - for bitsize in bitsizes: - surf = self._make_surface(bitsize) - surf.set_alpha(128) - surf.blit(surf, (3, 0)) - p = [] - for c in self._test_palette: - c = surf.unmap_rgb(surf.map_rgb(c)) - p.append((c[0], c[1], c[2], 128)) - tmp = self._make_surface(32, srcalpha=True, palette=p) - tmp.blit(tmp, (3, 0)) - tmp.set_alpha(None) - comp = self._make_surface(bitsize) - comp.blit(tmp, (0, 0)) - self._assert_same(surf, comp) - - def test_pixel_alpha(self): - bitsizes = [16, 32] - for bitsize in bitsizes: - surf = self._make_surface(bitsize, srcalpha=True) - comp = self._make_surface(bitsize, srcalpha=True) - comp.blit(surf, (3, 0)) - surf.blit(surf, (3, 0)) - self._assert_same(surf, comp) - - def test_blend(self): - bitsizes = [8, 16, 24, 32] - blends = ["BLEND_ADD", "BLEND_SUB", "BLEND_MULT", "BLEND_MIN", "BLEND_MAX"] - for bitsize in bitsizes: - surf = self._make_surface(bitsize) - comp = self._make_surface(bitsize) - for blend in blends: - self._fill_surface(surf) - self._fill_surface(comp) - comp.blit(surf, (3, 0), special_flags=getattr(pygame, blend)) - surf.blit(surf, (3, 0), special_flags=getattr(pygame, blend)) - self._assert_same(surf, comp) - - def test_blend_rgba(self): - bitsizes = [16, 32] - blends = [ - "BLEND_RGBA_ADD", - "BLEND_RGBA_SUB", - "BLEND_RGBA_MULT", - "BLEND_RGBA_MIN", - "BLEND_RGBA_MAX", - ] - for bitsize in bitsizes: - surf = self._make_surface(bitsize, srcalpha=True) - comp = self._make_surface(bitsize, srcalpha=True) - for blend in blends: - self._fill_surface(surf) - self._fill_surface(comp) - comp.blit(surf, (3, 0), special_flags=getattr(pygame, blend)) - surf.blit(surf, (3, 0), special_flags=getattr(pygame, blend)) - self._assert_same(surf, comp) - - def test_subsurface(self): - # Blitting a surface to its subsurface is allowed. - surf = self._make_surface(32, srcalpha=True) - comp = surf.copy() - comp.blit(surf, (3, 0)) - sub = surf.subsurface((3, 0, 6, 6)) - sub.blit(surf, (0, 0)) - del sub - self._assert_same(surf, comp) - - # Blitting a subsurface to its owner is forbidden because of - # lock conficts. This limitation allows the overlap check - # in PySurface_Blit of alphablit.c to be simplified. - def do_blit(d, s): - d.blit(s, (0, 0)) - - sub = surf.subsurface((1, 1, 2, 2)) - self.assertRaises(pygame.error, do_blit, surf, sub) - - def test_copy_alpha(self): - """issue 581: alpha of surface copy with SRCALPHA is set to 0.""" - surf = pygame.Surface((16, 16), pygame.SRCALPHA, 32) - self.assertEqual(surf.get_alpha(), 255) - surf2 = surf.copy() - self.assertEqual(surf2.get_alpha(), 255) - - -class SurfaceFillTest(unittest.TestCase): - def setUp(self): - pygame.display.init() - - def tearDown(self): - pygame.display.quit() - - def test_fill(self): - screen = pygame.display.set_mode((640, 480)) - - # Green and blue test pattern - screen.fill((0, 255, 0), (0, 0, 320, 240)) - screen.fill((0, 255, 0), (320, 240, 320, 240)) - screen.fill((0, 0, 255), (320, 0, 320, 240)) - screen.fill((0, 0, 255), (0, 240, 320, 240)) - - # Now apply a clip rect, such that only the left side of the - # screen should be effected by blit operations. - screen.set_clip((0, 0, 320, 480)) - - # Test fills with each special flag, and additionally without any. - screen.fill((255, 0, 0, 127), (160, 0, 320, 30), 0) - screen.fill((255, 0, 0, 127), (160, 30, 320, 30), pygame.BLEND_ADD) - screen.fill((0, 127, 127, 127), (160, 60, 320, 30), pygame.BLEND_SUB) - screen.fill((0, 63, 63, 127), (160, 90, 320, 30), pygame.BLEND_MULT) - screen.fill((0, 127, 127, 127), (160, 120, 320, 30), pygame.BLEND_MIN) - screen.fill((127, 0, 0, 127), (160, 150, 320, 30), pygame.BLEND_MAX) - screen.fill((255, 0, 0, 127), (160, 180, 320, 30), pygame.BLEND_RGBA_ADD) - screen.fill((0, 127, 127, 127), (160, 210, 320, 30), pygame.BLEND_RGBA_SUB) - screen.fill((0, 63, 63, 127), (160, 240, 320, 30), pygame.BLEND_RGBA_MULT) - screen.fill((0, 127, 127, 127), (160, 270, 320, 30), pygame.BLEND_RGBA_MIN) - screen.fill((127, 0, 0, 127), (160, 300, 320, 30), pygame.BLEND_RGBA_MAX) - screen.fill((255, 0, 0, 127), (160, 330, 320, 30), pygame.BLEND_RGB_ADD) - screen.fill((0, 127, 127, 127), (160, 360, 320, 30), pygame.BLEND_RGB_SUB) - screen.fill((0, 63, 63, 127), (160, 390, 320, 30), pygame.BLEND_RGB_MULT) - screen.fill((0, 127, 127, 127), (160, 420, 320, 30), pygame.BLEND_RGB_MIN) - screen.fill((255, 0, 0, 127), (160, 450, 320, 30), pygame.BLEND_RGB_MAX) - - # Update the display so we can see the results - pygame.display.flip() - - # Compare colors on both sides of window - for y in range(5, 480, 10): - self.assertEqual(screen.get_at((10, y)), screen.get_at((330, 480 - y))) - - -if __name__ == "__main__": - unittest.main() diff --git a/venv/Lib/site-packages/pygame/tests/surfarray_tags.py b/venv/Lib/site-packages/pygame/tests/surfarray_tags.py deleted file mode 100644 index baa535c..0000000 --- a/venv/Lib/site-packages/pygame/tests/surfarray_tags.py +++ /dev/null @@ -1,16 +0,0 @@ -__tags__ = ["array"] - -exclude = False - -try: - import numpy -except ImportError: - exclude = True -else: - try: - import pygame.pixelcopy - except ImportError: - exclude = True - -if exclude: - __tags__.extend(("ignore", "subprocess_ignore")) diff --git a/venv/Lib/site-packages/pygame/tests/surfarray_test.py b/venv/Lib/site-packages/pygame/tests/surfarray_test.py deleted file mode 100644 index 246da82..0000000 --- a/venv/Lib/site-packages/pygame/tests/surfarray_test.py +++ /dev/null @@ -1,754 +0,0 @@ -import unittest -import platform - -from numpy import ( - uint8, - uint16, - uint32, - uint64, - zeros, - float32, - float64, - alltrue, - rint, - arange, -) - -import pygame -from pygame.locals import * - -import pygame.surfarray - - -IS_PYPY = "PyPy" == platform.python_implementation() - - -@unittest.skipIf(IS_PYPY, "pypy skip known failure") # TODO -class SurfarrayModuleTest(unittest.TestCase): - pixels2d = {8: True, 16: True, 24: False, 32: True} - pixels3d = {8: False, 16: False, 24: True, 32: True} - array2d = {8: True, 16: True, 24: True, 32: True} - array3d = {8: False, 16: False, 24: True, 32: True} - - test_palette = [ - (0, 0, 0, 255), - (10, 30, 60, 255), - (25, 75, 100, 255), - (100, 150, 200, 255), - (0, 100, 200, 255), - ] - surf_size = (10, 12) - test_points = [ - ((0, 0), 1), - ((4, 5), 1), - ((9, 0), 2), - ((5, 5), 2), - ((0, 11), 3), - ((4, 6), 3), - ((9, 11), 4), - ((5, 6), 4), - ] - - @classmethod - def setUpClass(cls): - # Needed for 8 bits-per-pixel color palette surface tests. - pygame.init() - - @classmethod - def tearDownClass(cls): - pygame.quit() - - def setUp(cls): - # This makes sure pygame is always initialized before each test (in - # case a test calls pygame.quit()). - if not pygame.get_init(): - pygame.init() - - def _make_surface(self, bitsize, srcalpha=False, palette=None): - if palette is None: - palette = self.test_palette - flags = 0 - if srcalpha: - flags |= SRCALPHA - surf = pygame.Surface(self.surf_size, flags, bitsize) - if bitsize == 8: - surf.set_palette([c[:3] for c in palette]) - return surf - - def _fill_surface(self, surf, palette=None): - if palette is None: - palette = self.test_palette - surf.fill(palette[1], (0, 0, 5, 6)) - surf.fill(palette[2], (5, 0, 5, 6)) - surf.fill(palette[3], (0, 6, 5, 6)) - surf.fill(palette[4], (5, 6, 5, 6)) - - def _make_src_surface(self, bitsize, srcalpha=False, palette=None): - surf = self._make_surface(bitsize, srcalpha, palette) - self._fill_surface(surf, palette) - return surf - - def _assert_surface(self, surf, palette=None, msg=""): - if palette is None: - palette = self.test_palette - if surf.get_bitsize() == 16: - palette = [surf.unmap_rgb(surf.map_rgb(c)) for c in palette] - for posn, i in self.test_points: - self.assertEqual( - surf.get_at(posn), - palette[i], - "%s != %s: flags: %i, bpp: %i, posn: %s%s" - % ( - surf.get_at(posn), - palette[i], - surf.get_flags(), - surf.get_bitsize(), - posn, - msg, - ), - ) - - def _make_array3d(self, dtype): - return zeros((self.surf_size[0], self.surf_size[1], 3), dtype) - - def _fill_array2d(self, arr, surf): - palette = self.test_palette - arr[:5, :6] = surf.map_rgb(palette[1]) - arr[5:, :6] = surf.map_rgb(palette[2]) - arr[:5, 6:] = surf.map_rgb(palette[3]) - arr[5:, 6:] = surf.map_rgb(palette[4]) - - def _fill_array3d(self, arr): - palette = self.test_palette - arr[:5, :6] = palette[1][:3] - arr[5:, :6] = palette[2][:3] - arr[:5, 6:] = palette[3][:3] - arr[5:, 6:] = palette[4][:3] - - def _make_src_array3d(self, dtype): - arr = self._make_array3d(dtype) - self._fill_array3d(arr) - return arr - - def _make_array2d(self, dtype): - return zeros(self.surf_size, dtype) - - def test_array2d(self): - - sources = [ - self._make_src_surface(8), - self._make_src_surface(16), - self._make_src_surface(16, srcalpha=True), - self._make_src_surface(24), - self._make_src_surface(32), - self._make_src_surface(32, srcalpha=True), - ] - palette = self.test_palette - alpha_color = (0, 0, 0, 128) - - for surf in sources: - arr = pygame.surfarray.array2d(surf) - for posn, i in self.test_points: - self.assertEqual( - arr[posn], - surf.get_at_mapped(posn), - "%s != %s: flags: %i, bpp: %i, posn: %s" - % ( - arr[posn], - surf.get_at_mapped(posn), - surf.get_flags(), - surf.get_bitsize(), - posn, - ), - ) - - if surf.get_masks()[3]: - surf.fill(alpha_color) - arr = pygame.surfarray.array2d(surf) - posn = (0, 0) - self.assertEqual( - arr[posn], - surf.get_at_mapped(posn), - "%s != %s: bpp: %i" - % (arr[posn], surf.get_at_mapped(posn), surf.get_bitsize()), - ) - - def test_array3d(self): - - sources = [ - self._make_src_surface(16), - self._make_src_surface(16, srcalpha=True), - self._make_src_surface(24), - self._make_src_surface(32), - self._make_src_surface(32, srcalpha=True), - ] - palette = self.test_palette - - for surf in sources: - arr = pygame.surfarray.array3d(surf) - - def same_color(ac, sc): - return ac[0] == sc[0] and ac[1] == sc[1] and ac[2] == sc[2] - - for posn, i in self.test_points: - self.assertTrue( - same_color(arr[posn], surf.get_at(posn)), - "%s != %s: flags: %i, bpp: %i, posn: %s" - % ( - tuple(arr[posn]), - surf.get_at(posn), - surf.get_flags(), - surf.get_bitsize(), - posn, - ), - ) - - def test_array_alpha(self): - - palette = [ - (0, 0, 0, 0), - (10, 50, 100, 255), - (60, 120, 240, 130), - (64, 128, 255, 0), - (255, 128, 0, 65), - ] - targets = [ - self._make_src_surface(8, palette=palette), - self._make_src_surface(16, palette=palette), - self._make_src_surface(16, palette=palette, srcalpha=True), - self._make_src_surface(24, palette=palette), - self._make_src_surface(32, palette=palette), - self._make_src_surface(32, palette=palette, srcalpha=True), - ] - - for surf in targets: - p = palette - if surf.get_bitsize() == 16: - p = [surf.unmap_rgb(surf.map_rgb(c)) for c in p] - arr = pygame.surfarray.array_alpha(surf) - if surf.get_masks()[3]: - for (x, y), i in self.test_points: - self.assertEqual( - arr[x, y], - p[i][3], - ( - "%i != %i, posn: (%i, %i), " - "bitsize: %i" - % (arr[x, y], p[i][3], x, y, surf.get_bitsize()) - ), - ) - else: - self.assertTrue(alltrue(arr == 255)) - - # No per-pixel alpha when blanket alpha is None. - for surf in targets: - blanket_alpha = surf.get_alpha() - surf.set_alpha(None) - arr = pygame.surfarray.array_alpha(surf) - self.assertTrue( - alltrue(arr == 255), - "All alpha values should be 255 when" - " surf.set_alpha(None) has been set." - " bitsize: %i, flags: %i" % (surf.get_bitsize(), surf.get_flags()), - ) - surf.set_alpha(blanket_alpha) - - # Bug for per-pixel alpha surface when blanket alpha 0. - for surf in targets: - blanket_alpha = surf.get_alpha() - surf.set_alpha(0) - arr = pygame.surfarray.array_alpha(surf) - if surf.get_masks()[3]: - self.assertFalse( - alltrue(arr == 255), - "bitsize: %i, flags: %i" % (surf.get_bitsize(), surf.get_flags()), - ) - else: - self.assertTrue( - alltrue(arr == 255), - "bitsize: %i, flags: %i" % (surf.get_bitsize(), surf.get_flags()), - ) - surf.set_alpha(blanket_alpha) - - def test_array_colorkey(self): - - palette = [ - (0, 0, 0, 0), - (10, 50, 100, 255), - (60, 120, 240, 130), - (64, 128, 255, 0), - (255, 128, 0, 65), - ] - targets = [ - self._make_src_surface(8, palette=palette), - self._make_src_surface(16, palette=palette), - self._make_src_surface(16, palette=palette, srcalpha=True), - self._make_src_surface(24, palette=palette), - self._make_src_surface(32, palette=palette), - self._make_src_surface(32, palette=palette, srcalpha=True), - ] - - for surf in targets: - p = palette - if surf.get_bitsize() == 16: - p = [surf.unmap_rgb(surf.map_rgb(c)) for c in p] - surf.set_colorkey(None) - arr = pygame.surfarray.array_colorkey(surf) - self.assertTrue(alltrue(arr == 255)) - - for i in range(1, len(palette)): - surf.set_colorkey(p[i]) - alphas = [255] * len(p) - alphas[i] = 0 - arr = pygame.surfarray.array_colorkey(surf) - for (x, y), j in self.test_points: - self.assertEqual( - arr[x, y], - alphas[j], - ( - "%i != %i, posn: (%i, %i), " - "bitsize: %i" - % (arr[x, y], alphas[j], x, y, surf.get_bitsize()) - ), - ) - - def test_array_red(self): - self._test_array_rgb("red", 0) - - def test_array_green(self): - self._test_array_rgb("green", 1) - - def test_array_blue(self): - self._test_array_rgb("blue", 2) - - def _test_array_rgb(self, operation, mask_posn): - method_name = "array_" + operation - - array_rgb = getattr(pygame.surfarray, method_name) - palette = [ - (0, 0, 0, 255), - (5, 13, 23, 255), - (29, 31, 37, 255), - (131, 157, 167, 255), - (179, 191, 251, 255), - ] - plane = [c[mask_posn] for c in palette] - - targets = [ - self._make_src_surface(24, palette=palette), - self._make_src_surface(32, palette=palette), - self._make_src_surface(32, palette=palette, srcalpha=True), - ] - - for surf in targets: - self.assertFalse(surf.get_locked()) - for (x, y), i in self.test_points: - surf.fill(palette[i]) - arr = array_rgb(surf) - self.assertEqual(arr[x, y], plane[i]) - surf.fill((100, 100, 100, 250)) - self.assertEqual(arr[x, y], plane[i]) - self.assertFalse(surf.get_locked()) - del arr - - def test_blit_array(self): - - s = pygame.Surface((10, 10), 0, 24) - a = pygame.surfarray.array3d(s) - pygame.surfarray.blit_array(s, a) - - # target surfaces - targets = [ - self._make_surface(8), - self._make_surface(16), - self._make_surface(16, srcalpha=True), - self._make_surface(24), - self._make_surface(32), - self._make_surface(32, srcalpha=True), - ] - - # source arrays - arrays3d = [] - dtypes = [(8, uint8), (16, uint16), (32, uint32)] - try: - dtypes.append((64, uint64)) - except NameError: - pass - arrays3d = [(self._make_src_array3d(dtype), None) for __, dtype in dtypes] - for bitsize in [8, 16, 24, 32]: - palette = None - if bitsize == 16: - s = pygame.Surface((1, 1), 0, 16) - palette = [s.unmap_rgb(s.map_rgb(c)) for c in self.test_palette] - if self.pixels3d[bitsize]: - surf = self._make_src_surface(bitsize) - arr = pygame.surfarray.pixels3d(surf) - arrays3d.append((arr, palette)) - if self.array3d[bitsize]: - surf = self._make_src_surface(bitsize) - arr = pygame.surfarray.array3d(surf) - arrays3d.append((arr, palette)) - for sz, dtype in dtypes: - arrays3d.append((arr.astype(dtype), palette)) - - # tests on arrays - def do_blit(surf, arr): - pygame.surfarray.blit_array(surf, arr) - - for surf in targets: - bitsize = surf.get_bitsize() - for arr, palette in arrays3d: - surf.fill((0, 0, 0, 0)) - if bitsize == 8: - self.assertRaises(ValueError, do_blit, surf, arr) - else: - pygame.surfarray.blit_array(surf, arr) - self._assert_surface(surf, palette) - - if self.pixels2d[bitsize]: - surf.fill((0, 0, 0, 0)) - s = self._make_src_surface(bitsize, surf.get_flags() & SRCALPHA) - arr = pygame.surfarray.pixels2d(s) - pygame.surfarray.blit_array(surf, arr) - self._assert_surface(surf) - - if self.array2d[bitsize]: - s = self._make_src_surface(bitsize, surf.get_flags() & SRCALPHA) - arr = pygame.surfarray.array2d(s) - for sz, dtype in dtypes: - surf.fill((0, 0, 0, 0)) - if sz >= bitsize: - pygame.surfarray.blit_array(surf, arr.astype(dtype)) - self._assert_surface(surf) - else: - self.assertRaises( - ValueError, do_blit, surf, self._make_array2d(dtype) - ) - - # Check alpha for 2D arrays - surf = self._make_surface(16, srcalpha=True) - arr = zeros(surf.get_size(), uint16) - arr[...] = surf.map_rgb((0, 128, 255, 64)) - color = surf.unmap_rgb(arr[0, 0]) - pygame.surfarray.blit_array(surf, arr) - self.assertEqual(surf.get_at((5, 5)), color) - - surf = self._make_surface(32, srcalpha=True) - arr = zeros(surf.get_size(), uint32) - color = (0, 111, 255, 63) - arr[...] = surf.map_rgb(color) - pygame.surfarray.blit_array(surf, arr) - self.assertEqual(surf.get_at((5, 5)), color) - - # Check shifts - arr3d = self._make_src_array3d(uint8) - - shift_tests = [ - (16, [12, 0, 8, 4], [0xF000, 0xF, 0xF00, 0xF0]), - (24, [16, 0, 8, 0], [0xFF0000, 0xFF, 0xFF00, 0]), - (32, [0, 16, 24, 8], [0xFF, 0xFF0000, 0xFF000000, 0xFF00]), - ] - - for bitsize, shifts, masks in shift_tests: - surf = self._make_surface(bitsize, srcalpha=(shifts[3] != 0)) - palette = None - if bitsize == 16: - palette = [surf.unmap_rgb(surf.map_rgb(c)) for c in self.test_palette] - - self.assertRaises(TypeError, surf.set_shifts, shifts) - self.assertRaises(TypeError, surf.set_masks, masks) - - # Invalid arrays - surf = pygame.Surface((1, 1), 0, 32) - t = "abcd" - self.assertRaises(ValueError, do_blit, surf, t) - - surf_size = self.surf_size - surf = pygame.Surface(surf_size, 0, 32) - arr = zeros([surf_size[0], surf_size[1] + 1, 3], uint32) - self.assertRaises(ValueError, do_blit, surf, arr) - arr = zeros([surf_size[0] + 1, surf_size[1], 3], uint32) - self.assertRaises(ValueError, do_blit, surf, arr) - - surf = pygame.Surface((1, 4), 0, 32) - arr = zeros((4,), uint32) - self.assertRaises(ValueError, do_blit, surf, arr) - arr.shape = (1, 1, 1, 4) - self.assertRaises(ValueError, do_blit, surf, arr) - - # Issue #81: round from float to int - try: - rint - except NameError: - pass - else: - surf = pygame.Surface((10, 10), pygame.SRCALPHA, 32) - w, h = surf.get_size() - length = w * h - for dtype in [float32, float64]: - surf.fill((255, 255, 255, 0)) - farr = arange(0, length, dtype=dtype) - farr.shape = w, h - pygame.surfarray.blit_array(surf, farr) - for x in range(w): - for y in range(h): - self.assertEqual( - surf.get_at_mapped((x, y)), int(rint(farr[x, y])) - ) - - # this test should be removed soon, when the function is deleted - def test_get_arraytype(self): - array_type = pygame.surfarray.get_arraytype() - - self.assertEqual(array_type, "numpy", "unknown array type %s" % array_type) - - # this test should be removed soon, when the function is deleted - def test_get_arraytypes(self): - - arraytypes = pygame.surfarray.get_arraytypes() - self.assertIn("numpy", arraytypes) - - for atype in arraytypes: - self.assertEqual(atype, "numpy", "unknown array type %s" % atype) - - def test_make_surface(self): - - # How does one properly test this with 2d arrays. It makes no sense - # since the pixel format is not entirely dependent on element size. - # Just make sure the surface pixel size is at least as large as the - # array element size I guess. - # - for bitsize, dtype in [(8, uint8), (16, uint16), (24, uint32)]: - ## Even this simple assertion fails for 2d arrays. Where's the problem? - ## surf = pygame.surfarray.make_surface(self._make_array2d(dtype)) - ## self.assertGreaterEqual(surf.get_bitsize(), bitsize, - ## "not %i >= %i)" % (surf.get_bitsize(), bitsize)) - ## - surf = pygame.surfarray.make_surface(self._make_src_array3d(dtype)) - self._assert_surface(surf) - - # Issue #81: round from float to int - try: - rint - except NameError: - pass - else: - w = 9 - h = 11 - length = w * h - for dtype in [float32, float64]: - farr = arange(0, length, dtype=dtype) - farr.shape = w, h - surf = pygame.surfarray.make_surface(farr) - for x in range(w): - for y in range(h): - self.assertEqual( - surf.get_at_mapped((x, y)), int(rint(farr[x, y])) - ) - - def test_map_array(self): - - arr3d = self._make_src_array3d(uint8) - targets = [ - self._make_surface(8), - self._make_surface(16), - self._make_surface(16, srcalpha=True), - self._make_surface(24), - self._make_surface(32), - self._make_surface(32, srcalpha=True), - ] - palette = self.test_palette - - for surf in targets: - arr2d = pygame.surfarray.map_array(surf, arr3d) - for posn, i in self.test_points: - self.assertEqual( - arr2d[posn], - surf.map_rgb(palette[i]), - "%i != %i, bitsize: %i, flags: %i" - % ( - arr2d[posn], - surf.map_rgb(palette[i]), - surf.get_bitsize(), - surf.get_flags(), - ), - ) - - # Exception checks - self.assertRaises( - ValueError, - pygame.surfarray.map_array, - self._make_surface(32), - self._make_array2d(uint8), - ) - - def test_pixels2d(self): - - sources = [ - self._make_surface(8), - self._make_surface(16, srcalpha=True), - self._make_surface(32, srcalpha=True), - ] - - for surf in sources: - self.assertFalse(surf.get_locked()) - arr = pygame.surfarray.pixels2d(surf) - self.assertTrue(surf.get_locked()) - self._fill_array2d(arr, surf) - surf.unlock() - self.assertTrue(surf.get_locked()) - del arr - self.assertFalse(surf.get_locked()) - self.assertEqual(surf.get_locks(), ()) - self._assert_surface(surf) - - # Error checks - self.assertRaises(ValueError, pygame.surfarray.pixels2d, self._make_surface(24)) - - def test_pixels3d(self): - - sources = [self._make_surface(24), self._make_surface(32)] - - for surf in sources: - self.assertFalse(surf.get_locked()) - arr = pygame.surfarray.pixels3d(surf) - self.assertTrue(surf.get_locked()) - self._fill_array3d(arr) - surf.unlock() - self.assertTrue(surf.get_locked()) - del arr - self.assertFalse(surf.get_locked()) - self.assertEqual(surf.get_locks(), ()) - self._assert_surface(surf) - - # Alpha check - color = (1, 2, 3, 0) - surf = self._make_surface(32, srcalpha=True) - arr = pygame.surfarray.pixels3d(surf) - arr[0, 0] = color[:3] - self.assertEqual(surf.get_at((0, 0)), color) - - # Error checks - def do_pixels3d(surf): - pygame.surfarray.pixels3d(surf) - - self.assertRaises(ValueError, do_pixels3d, self._make_surface(8)) - self.assertRaises(ValueError, do_pixels3d, self._make_surface(16)) - - def test_pixels_alpha(self): - - palette = [ - (0, 0, 0, 0), - (127, 127, 127, 0), - (127, 127, 127, 85), - (127, 127, 127, 170), - (127, 127, 127, 255), - ] - alphas = [0, 45, 86, 99, 180] - - surf = self._make_src_surface(32, srcalpha=True, palette=palette) - - self.assertFalse(surf.get_locked()) - arr = pygame.surfarray.pixels_alpha(surf) - self.assertTrue(surf.get_locked()) - surf.unlock() - self.assertTrue(surf.get_locked()) - - for (x, y), i in self.test_points: - self.assertEqual(arr[x, y], palette[i][3]) - - for (x, y), i in self.test_points: - alpha = alphas[i] - arr[x, y] = alpha - color = (127, 127, 127, alpha) - self.assertEqual(surf.get_at((x, y)), color, "posn: (%i, %i)" % (x, y)) - - del arr - self.assertFalse(surf.get_locked()) - self.assertEqual(surf.get_locks(), ()) - - # Check exceptions. - def do_pixels_alpha(surf): - pygame.surfarray.pixels_alpha(surf) - - targets = [(8, False), (16, False), (16, True), (24, False), (32, False)] - - for bitsize, srcalpha in targets: - self.assertRaises( - ValueError, do_pixels_alpha, self._make_surface(bitsize, srcalpha) - ) - - def test_pixels_red(self): - self._test_pixels_rgb("red", 0) - - def test_pixels_green(self): - self._test_pixels_rgb("green", 1) - - def test_pixels_blue(self): - self._test_pixels_rgb("blue", 2) - - def _test_pixels_rgb(self, operation, mask_posn): - method_name = "pixels_" + operation - - pixels_rgb = getattr(pygame.surfarray, method_name) - palette = [ - (0, 0, 0, 255), - (5, 13, 23, 255), - (29, 31, 37, 255), - (131, 157, 167, 255), - (179, 191, 251, 255), - ] - plane = [c[mask_posn] for c in palette] - - surf24 = self._make_src_surface(24, srcalpha=False, palette=palette) - surf32 = self._make_src_surface(32, srcalpha=False, palette=palette) - surf32a = self._make_src_surface(32, srcalpha=True, palette=palette) - - for surf in [surf24, surf32, surf32a]: - self.assertFalse(surf.get_locked()) - arr = pixels_rgb(surf) - self.assertTrue(surf.get_locked()) - surf.unlock() - self.assertTrue(surf.get_locked()) - - for (x, y), i in self.test_points: - self.assertEqual(arr[x, y], plane[i]) - - del arr - self.assertFalse(surf.get_locked()) - self.assertEqual(surf.get_locks(), ()) - - # Check exceptions. - targets = [(8, False), (16, False), (16, True)] - - for bitsize, srcalpha in targets: - self.assertRaises( - ValueError, pixels_rgb, self._make_surface(bitsize, srcalpha) - ) - - def test_use_arraytype(self): - def do_use_arraytype(atype): - pygame.surfarray.use_arraytype(atype) - - pygame.surfarray.use_arraytype("numpy") - self.assertEqual(pygame.surfarray.get_arraytype(), "numpy") - self.assertRaises(ValueError, do_use_arraytype, "not an option") - - def test_surf_lock(self): - sf = pygame.Surface((5, 5), 0, 32) - for atype in pygame.surfarray.get_arraytypes(): - pygame.surfarray.use_arraytype(atype) - - ar = pygame.surfarray.pixels2d(sf) - self.assertTrue(sf.get_locked()) - - sf.unlock() - self.assertTrue(sf.get_locked()) - - del ar - self.assertFalse(sf.get_locked()) - self.assertEqual(sf.get_locks(), ()) - - -if __name__ == "__main__": - unittest.main() diff --git a/venv/Lib/site-packages/pygame/tests/surflock_test.py b/venv/Lib/site-packages/pygame/tests/surflock_test.py deleted file mode 100644 index 19f354b..0000000 --- a/venv/Lib/site-packages/pygame/tests/surflock_test.py +++ /dev/null @@ -1,144 +0,0 @@ -import unittest -import sys -import platform - -import pygame - -IS_PYPY = "PyPy" == platform.python_implementation() - - -@unittest.skipIf(IS_PYPY, "pypy skip known failure") # TODO -class SurfaceLockTest(unittest.TestCase): - def test_lock(self): - sf = pygame.Surface((5, 5)) - - sf.lock() - self.assertEqual(sf.get_locked(), True) - self.assertEqual(sf.get_locks(), (sf,)) - - sf.lock() - self.assertEqual(sf.get_locked(), True) - self.assertEqual(sf.get_locks(), (sf, sf)) - - sf.unlock() - self.assertEqual(sf.get_locked(), True) - self.assertEqual(sf.get_locks(), (sf,)) - - sf.unlock() - self.assertEqual(sf.get_locked(), False) - self.assertEqual(sf.get_locks(), ()) - - def test_subsurface_lock(self): - sf = pygame.Surface((5, 5)) - subsf = sf.subsurface((1, 1, 2, 2)) - sf2 = pygame.Surface((5, 5)) - - # Simple blits, nothing should happen here. - sf2.blit(subsf, (0, 0)) - sf2.blit(sf, (0, 0)) - - # Test blitting on self: - self.assertRaises(pygame.error, sf.blit, subsf, (0, 0)) - # self.assertRaises(pygame.error, subsf.blit, sf, (0, 0)) - # ^ Fails although it should not in my opinion. If I cannot - # blit the subsurface to the surface, it should not be allowed - # the other way around as well. - - # Test additional locks. - sf.lock() - sf2.blit(subsf, (0, 0)) - self.assertRaises(pygame.error, sf2.blit, sf, (0, 0)) - - subsf.lock() - self.assertRaises(pygame.error, sf2.blit, subsf, (0, 0)) - self.assertRaises(pygame.error, sf2.blit, sf, (0, 0)) - - # sf and subsf are now explicitly locked. Unlock sf, so we can - # (assume) to blit it. - # It will fail though as the subsurface still has a lock around, - # which is okay and correct behaviour. - sf.unlock() - self.assertRaises(pygame.error, sf2.blit, subsf, (0, 0)) - self.assertRaises(pygame.error, sf2.blit, sf, (0, 0)) - - # Run a second unlock on the surface. This should ideally have - # no effect as the subsurface is the locking reason! - sf.unlock() - self.assertRaises(pygame.error, sf2.blit, sf, (0, 0)) - self.assertRaises(pygame.error, sf2.blit, subsf, (0, 0)) - subsf.unlock() - - sf.lock() - self.assertEqual(sf.get_locked(), True) - self.assertEqual(sf.get_locks(), (sf,)) - self.assertEqual(subsf.get_locked(), False) - self.assertEqual(subsf.get_locks(), ()) - - subsf.lock() - self.assertEqual(sf.get_locked(), True) - self.assertEqual(sf.get_locks(), (sf, subsf)) - self.assertEqual(subsf.get_locked(), True) - self.assertEqual(subsf.get_locks(), (subsf,)) - - sf.unlock() - self.assertEqual(sf.get_locked(), True) - self.assertEqual(sf.get_locks(), (subsf,)) - self.assertEqual(subsf.get_locked(), True) - self.assertEqual(subsf.get_locks(), (subsf,)) - - subsf.unlock() - self.assertEqual(sf.get_locked(), False) - self.assertEqual(sf.get_locks(), ()) - self.assertEqual(subsf.get_locked(), False) - self.assertEqual(subsf.get_locks(), ()) - - subsf.lock() - self.assertEqual(sf.get_locked(), True) - self.assertEqual(sf.get_locks(), (subsf,)) - self.assertEqual(subsf.get_locked(), True) - self.assertEqual(subsf.get_locks(), (subsf,)) - - subsf.lock() - self.assertEqual(sf.get_locked(), True) - self.assertEqual(sf.get_locks(), (subsf, subsf)) - self.assertEqual(subsf.get_locked(), True) - self.assertEqual(subsf.get_locks(), (subsf, subsf)) - - def test_pxarray_ref(self): - sf = pygame.Surface((5, 5)) - ar = pygame.PixelArray(sf) - ar2 = pygame.PixelArray(sf) - - self.assertEqual(sf.get_locked(), True) - self.assertEqual(sf.get_locks(), (ar, ar2)) - - del ar - self.assertEqual(sf.get_locked(), True) - self.assertEqual(sf.get_locks(), (ar2,)) - - ar = ar2[:] - self.assertEqual(sf.get_locked(), True) - self.assertEqual(sf.get_locks(), (ar2,)) - - del ar - self.assertEqual(sf.get_locked(), True) - self.assertEqual(len(sf.get_locks()), 1) - - def test_buffer(self): - sf = pygame.Surface((5, 5)) - buf = sf.get_buffer() - - self.assertEqual(sf.get_locked(), True) - self.assertEqual(sf.get_locks(), (buf,)) - - sf.unlock() - self.assertEqual(sf.get_locked(), True) - self.assertEqual(sf.get_locks(), (buf,)) - - del buf - self.assertEqual(sf.get_locked(), False) - self.assertEqual(sf.get_locks(), ()) - - -if __name__ == "__main__": - unittest.main() diff --git a/venv/Lib/site-packages/pygame/tests/sysfont_test.py b/venv/Lib/site-packages/pygame/tests/sysfont_test.py deleted file mode 100644 index 0ae380a..0000000 --- a/venv/Lib/site-packages/pygame/tests/sysfont_test.py +++ /dev/null @@ -1,51 +0,0 @@ -import unittest -import platform - - -class SysfontModuleTest(unittest.TestCase): - def test_create_aliases(self): - import pygame.sysfont - - pygame.sysfont.initsysfonts() - pygame.sysfont.create_aliases() - self.assertTrue(len(pygame.sysfont.Sysalias) > 0) - - def test_initsysfonts(self): - import pygame.sysfont - - pygame.sysfont.initsysfonts() - self.assertTrue(len(pygame.sysfont.get_fonts()) > 0) - - @unittest.skipIf("Darwin" not in platform.platform(), "Not mac we skip.") - def test_initsysfonts_darwin(self): - import pygame.sysfont - - self.assertTrue(len(pygame.sysfont.get_fonts()) > 10) - - def test_sysfont(self): - import pygame.font - - pygame.font.init() - arial = pygame.font.SysFont("Arial", 40) - self.assertTrue(isinstance(arial, pygame.font.Font)) - - @unittest.skipIf( - ("Darwin" in platform.platform() or "Windows" in platform.platform()), - "Not unix we skip.", - ) - def test_initsysfonts_unix(self): - import pygame.sysfont - - self.assertTrue(len(pygame.sysfont.get_fonts()) > 0) - - @unittest.skipIf("Windows" not in platform.platform(), "Not windows we skip.") - def test_initsysfonts_win32(self): - import pygame.sysfont - - self.assertTrue(len(pygame.sysfont.get_fonts()) > 10) - - -############################################################################### - -if __name__ == "__main__": - unittest.main() diff --git a/venv/Lib/site-packages/pygame/tests/test_test_.py b/venv/Lib/site-packages/pygame/tests/test_test_.py deleted file mode 100644 index 0880e7e..0000000 --- a/venv/Lib/site-packages/pygame/tests/test_test_.py +++ /dev/null @@ -1,2 +0,0 @@ -while True: - pass diff --git a/venv/Lib/site-packages/pygame/tests/test_utils/__init__.py b/venv/Lib/site-packages/pygame/tests/test_utils/__init__.py deleted file mode 100644 index f8013ca..0000000 --- a/venv/Lib/site-packages/pygame/tests/test_utils/__init__.py +++ /dev/null @@ -1,227 +0,0 @@ -import os -import pygame -import sys -import tempfile -import time - -is_pygame_pkg = __name__.startswith("pygame.tests.") - -############################################################################### - - -def tostring(row): - """Convert row of bytes to string. Expects `row` to be an - ``array``. - """ - return row.tobytes() - - -def geterror(): - return sys.exc_info()[1] - - -class AssertRaisesRegexMixin(object): - """Provides a way to prevent DeprecationWarnings in python >= 3.2. - - For this mixin to override correctly it needs to be before the - unittest.TestCase in the multiple inheritance hierarchy. - e.g. class TestClass(AssertRaisesRegexMixin, unittest.TestCase) - - This class/mixin and its usage can be removed when pygame no longer - supports python < 3.2. - """ - - def assertRaisesRegex(self, *args, **kwargs): - try: - return super(AssertRaisesRegexMixin, self).assertRaisesRegex( - *args, **kwargs - ) - except AttributeError: - try: - return super(AssertRaisesRegexMixin, self).assertRaisesRegexp( - *args, **kwargs - ) - except AttributeError: - self.skipTest("No assertRaisesRegex/assertRaisesRegexp method") - - -############################################################################### - -this_dir = os.path.dirname(os.path.abspath(__file__)) -trunk_dir = os.path.split(os.path.split(this_dir)[0])[0] -if is_pygame_pkg: - test_module = "tests" -else: - test_module = "test" - - -def trunk_relative_path(relative): - return os.path.normpath(os.path.join(trunk_dir, relative)) - - -def fixture_path(path): - return trunk_relative_path(os.path.join(test_module, "fixtures", path)) - - -def example_path(path): - return trunk_relative_path(os.path.join("examples", path)) - - -sys.path.insert(0, trunk_relative_path(".")) - - -################################## TEMP FILES ################################# - - -def get_tmp_dir(): - return tempfile.mkdtemp() - - -############################################################################### - - -def question(q): - return input("\n%s (y/n): " % q.rstrip(" ")).lower().strip() == "y" - - -def prompt(p): - return input("\n%s (press enter to continue): " % p.rstrip(" ")) - - -#################################### HELPERS ################################## - - -def rgba_between(value, minimum=0, maximum=255): - if value < minimum: - return minimum - elif value > maximum: - return maximum - else: - return value - - -def combinations(seqs): - """ - - Recipe 496807 from ActiveState Python CookBook - - Non recursive technique for getting all possible combinations of a sequence - of sequences. - - """ - - r = [[]] - for x in seqs: - r = [i + [y] for y in x for i in r] - return r - - -def gradient(width, height): - """ - - Yields a pt and corresponding RGBA tuple, for every (width, height) combo. - Useful for generating gradients. - - Actual gradient may be changed, no tests rely on specific values. - - Used in transform.rotate lossless tests to generate a fixture. - - """ - - for l in range(width): - for t in range(height): - yield (l, t), tuple(map(rgba_between, (l, t, l, l + t))) - - -def rect_area_pts(rect): - for l in range(rect.left, rect.right): - for t in range(rect.top, rect.bottom): - yield l, t - - -def rect_perimeter_pts(rect): - """ - - Returns pts ((L, T) tuples) encompassing the perimeter of a rect. - - The order is clockwise: - - topleft to topright - topright to bottomright - bottomright to bottomleft - bottomleft to topleft - - Duplicate pts are not returned - - """ - clock_wise_from_top_left = ( - [(l, rect.top) for l in range(rect.left, rect.right)], - [(rect.right - 1, t) for t in range(rect.top + 1, rect.bottom)], - [(l, rect.bottom - 1) for l in range(rect.right - 2, rect.left - 1, -1)], - [(rect.left, t) for t in range(rect.bottom - 2, rect.top, -1)], - ) - - for line in clock_wise_from_top_left: - for pt in line: - yield pt - - -def rect_outer_bounds(rect): - """ - - Returns topleft outerbound if possible and then the other pts, that are - "exclusive" bounds of the rect - - ?------O - |RECT| ?|0)uterbound - |----| - O O - - """ - return ([(rect.left - 1, rect.top)] if rect.left else []) + [ - rect.topright, - rect.bottomleft, - rect.bottomright, - ] - - -def import_submodule(module): - m = __import__(module) - for n in module.split(".")[1:]: - m = getattr(m, n) - return m - - -class SurfaceSubclass(pygame.Surface): - """A subclassed Surface to test inheritance.""" - - def __init__(self, *args, **kwargs): - super(SurfaceSubclass, self).__init__(*args, **kwargs) - self.test_attribute = True - - -def test(): - """ - - Lightweight test for helpers - - """ - - r = pygame.Rect(0, 0, 10, 10) - assert rect_outer_bounds(r) == [(10, 0), (0, 10), (10, 10)] # tr # bl # br - - assert len(list(rect_area_pts(r))) == 100 - - r = pygame.Rect(0, 0, 3, 3) - assert list(rect_perimeter_pts(r)) == [ - (0, 0), - (1, 0), - (2, 0), # tl -> tr - (2, 1), - (2, 2), # tr -> br - (1, 2), - (0, 2), # br -> bl - (0, 1), # bl -> tl - ] - - print("Tests: OK") diff --git a/venv/Lib/site-packages/pygame/tests/test_utils/__pycache__/__init__.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/test_utils/__pycache__/__init__.cpython-39.pyc deleted file mode 100644 index c1ac907..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/test_utils/__pycache__/__init__.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/test_utils/__pycache__/arrinter.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/test_utils/__pycache__/arrinter.cpython-39.pyc deleted file mode 100644 index 03ebcf0..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/test_utils/__pycache__/arrinter.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/test_utils/__pycache__/async_sub.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/test_utils/__pycache__/async_sub.cpython-39.pyc deleted file mode 100644 index 21b0085..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/test_utils/__pycache__/async_sub.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/test_utils/__pycache__/buftools.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/test_utils/__pycache__/buftools.cpython-39.pyc deleted file mode 100644 index f0e2dd5..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/test_utils/__pycache__/buftools.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/test_utils/__pycache__/endian.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/test_utils/__pycache__/endian.cpython-39.pyc deleted file mode 100644 index fa0b318..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/test_utils/__pycache__/endian.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/test_utils/__pycache__/png.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/test_utils/__pycache__/png.cpython-39.pyc deleted file mode 100644 index 73c26ef..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/test_utils/__pycache__/png.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/test_utils/__pycache__/run_tests.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/test_utils/__pycache__/run_tests.cpython-39.pyc deleted file mode 100644 index 613a013..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/test_utils/__pycache__/run_tests.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/test_utils/__pycache__/test_machinery.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/test_utils/__pycache__/test_machinery.cpython-39.pyc deleted file mode 100644 index 87d9c71..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/test_utils/__pycache__/test_machinery.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/test_utils/__pycache__/test_runner.cpython-39.pyc b/venv/Lib/site-packages/pygame/tests/test_utils/__pycache__/test_runner.cpython-39.pyc deleted file mode 100644 index c15d72f..0000000 Binary files a/venv/Lib/site-packages/pygame/tests/test_utils/__pycache__/test_runner.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/tests/test_utils/arrinter.py b/venv/Lib/site-packages/pygame/tests/test_utils/arrinter.py deleted file mode 100644 index 3883b45..0000000 --- a/venv/Lib/site-packages/pygame/tests/test_utils/arrinter.py +++ /dev/null @@ -1,441 +0,0 @@ -import sys -import ctypes -from ctypes import * -import unittest - -__all__ = [ - "PAI_CONTIGUOUS", - "PAI_FORTRAN", - "PAI_ALIGNED", - "PAI_NOTSWAPPED", - "PAI_WRITEABLE", - "PAI_ARR_HAS_DESCR", - "ArrayInterface", -] - -try: - c_ssize_t # Undefined in early Python versions -except NameError: - if sizeof(c_uint) == sizeof(c_void_p): - c_size_t = c_uint - c_ssize_t = c_int - elif sizeof(c_ulong) == sizeof(c_void_p): - c_size_t = c_ulong - c_ssize_t = c_long - elif sizeof(c_ulonglong) == sizeof(c_void_p): - c_size_t = c_ulonglong - c_ssize_t = c_longlong - - -SIZEOF_VOID_P = sizeof(c_void_p) -if SIZEOF_VOID_P <= sizeof(c_int): - Py_intptr_t = c_int -elif SIZEOF_VOID_P <= sizeof(c_long): - Py_intptr_t = c_long -elif "c_longlong" in globals() and SIZEOF_VOID_P <= sizeof(c_longlong): - Py_intptr_t = c_longlong -else: - raise RuntimeError("Unrecognized pointer size %i" % (pointer_size,)) - - -class PyArrayInterface(Structure): - _fields_ = [ - ("two", c_int), - ("nd", c_int), - ("typekind", c_char), - ("itemsize", c_int), - ("flags", c_int), - ("shape", POINTER(Py_intptr_t)), - ("strides", POINTER(Py_intptr_t)), - ("data", c_void_p), - ("descr", py_object), - ] - - -PAI_Ptr = POINTER(PyArrayInterface) - -try: - PyCObject_AsVoidPtr = pythonapi.PyCObject_AsVoidPtr -except AttributeError: - - def PyCObject_AsVoidPtr(o): - raise TypeError("Not available") - -else: - PyCObject_AsVoidPtr.restype = c_void_p - PyCObject_AsVoidPtr.argtypes = [py_object] - PyCObject_GetDesc = pythonapi.PyCObject_GetDesc - PyCObject_GetDesc.restype = c_void_p - PyCObject_GetDesc.argtypes = [py_object] - -try: - PyCapsule_IsValid = pythonapi.PyCapsule_IsValid -except AttributeError: - - def PyCapsule_IsValid(capsule, name): - return 0 - -else: - PyCapsule_IsValid.restype = c_int - PyCapsule_IsValid.argtypes = [py_object, c_char_p] - PyCapsule_GetPointer = pythonapi.PyCapsule_GetPointer - PyCapsule_GetPointer.restype = c_void_p - PyCapsule_GetPointer.argtypes = [py_object, c_char_p] - PyCapsule_GetContext = pythonapi.PyCapsule_GetContext - PyCapsule_GetContext.restype = c_void_p - PyCapsule_GetContext.argtypes = [py_object] - -PyCapsule_Destructor = CFUNCTYPE(None, py_object) -PyCapsule_New = pythonapi.PyCapsule_New -PyCapsule_New.restype = py_object -PyCapsule_New.argtypes = [c_void_p, c_char_p, POINTER(PyCapsule_Destructor)] - - -def capsule_new(p): - return PyCapsule_New(addressof(p), None, None) - - -PAI_CONTIGUOUS = 0x01 -PAI_FORTRAN = 0x02 -PAI_ALIGNED = 0x100 -PAI_NOTSWAPPED = 0x200 -PAI_WRITEABLE = 0x400 -PAI_ARR_HAS_DESCR = 0x800 - - -class ArrayInterface(object): - def __init__(self, arr): - try: - self._cobj = arr.__array_struct__ - except AttributeError: - raise TypeError("The array object lacks an array structure") - if not self._cobj: - raise TypeError("The array object has a NULL array structure value") - try: - vp = PyCObject_AsVoidPtr(self._cobj) - except TypeError: - if PyCapsule_IsValid(self._cobj, None): - vp = PyCapsule_GetPointer(self._cobj, None) - else: - raise TypeError("The array object has an invalid array structure") - self.desc = PyCapsule_GetContext(self._cobj) - else: - self.desc = PyCObject_GetDesc(self._cobj) - self._inter = cast(vp, PAI_Ptr)[0] - - def __getattr__(self, name): - if name == "typekind": - return self._inter.typekind.decode("latin-1") - return getattr(self._inter, name) - - def __str__(self): - if isinstance(self.desc, tuple): - ver = self.desc[0] - else: - ver = "N/A" - return ( - "nd: %i\n" - "typekind: %s\n" - "itemsize: %i\n" - "flags: %s\n" - "shape: %s\n" - "strides: %s\n" - "ver: %s\n" - % ( - self.nd, - self.typekind, - self.itemsize, - format_flags(self.flags), - format_shape(self.nd, self.shape), - format_strides(self.nd, self.strides), - ver, - ) - ) - - -def format_flags(flags): - names = [] - for flag, name in [ - (PAI_CONTIGUOUS, "CONTIGUOUS"), - (PAI_FORTRAN, "FORTRAN"), - (PAI_ALIGNED, "ALIGNED"), - (PAI_NOTSWAPPED, "NOTSWAPPED"), - (PAI_WRITEABLE, "WRITEABLE"), - (PAI_ARR_HAS_DESCR, "ARR_HAS_DESCR"), - ]: - if flag & flags: - names.append(name) - return ", ".join(names) - - -def format_shape(nd, shape): - return ", ".join([str(shape[i]) for i in range(nd)]) - - -def format_strides(nd, strides): - return ", ".join([str(strides[i]) for i in range(nd)]) - - -class Exporter(object): - def __init__( - self, shape, typekind=None, itemsize=None, strides=None, descr=None, flags=None - ): - if typekind is None: - typekind = "u" - if itemsize is None: - itemsize = 1 - if flags is None: - flags = PAI_WRITEABLE | PAI_ALIGNED | PAI_NOTSWAPPED - if descr is not None: - flags |= PAI_ARR_HAS_DESCR - if len(typekind) != 1: - raise ValueError("Argument 'typekind' must be length 1 string") - nd = len(shape) - self.typekind = typekind - self.itemsize = itemsize - self.nd = nd - self.shape = tuple(shape) - self._shape = (c_ssize_t * self.nd)(*self.shape) - if strides is None: - self._strides = (c_ssize_t * self.nd)() - self._strides[self.nd - 1] = self.itemsize - for i in range(self.nd - 1, 0, -1): - self._strides[i - 1] = self.shape[i] * self._strides[i] - strides = tuple(self._strides) - self.strides = strides - elif len(strides) == nd: - self.strides = tuple(strides) - self._strides = (c_ssize_t * self.nd)(*self.strides) - else: - raise ValueError("Mismatch in length of strides and shape") - self.descr = descr - if self.is_contiguous("C"): - flags |= PAI_CONTIGUOUS - if self.is_contiguous("F"): - flags |= PAI_FORTRAN - self.flags = flags - sz = max(shape[i] * strides[i] for i in range(nd)) - self._data = (c_ubyte * sz)() - self.data = addressof(self._data) - self._inter = PyArrayInterface( - 2, - nd, - typekind.encode("latin_1"), - itemsize, - flags, - self._shape, - self._strides, - self.data, - descr, - ) - self.len = itemsize - for i in range(nd): - self.len *= self.shape[i] - - __array_struct__ = property(lambda self: capsule_new(self._inter)) - - def is_contiguous(self, fortran): - if fortran in "CA": - if self.strides[-1] == self.itemsize: - for i in range(self.nd - 1, 0, -1): - if self.strides[i - 1] != self.shape[i] * self.strides[i]: - break - else: - return True - if fortran in "FA": - if self.strides[0] == self.itemsize: - for i in range(0, self.nd - 1): - if self.strides[i + 1] != self.shape[i] * self.strides[i]: - break - else: - return True - return False - - -class Array(Exporter): - _ctypes = { - ("u", 1): c_uint8, - ("u", 2): c_uint16, - ("u", 4): c_uint32, - ("u", 8): c_uint64, - ("i", 1): c_int8, - ("i", 2): c_int16, - ("i", 4): c_int32, - ("i", 8): c_int64, - } - - def __init__(self, *args, **kwds): - super(Array, self).__init__(*args, **kwds) - try: - if self.flags & PAI_NOTSWAPPED: - ct = self._ctypes[self.typekind, self.itemsize] - elif c_int.__ctype_le__ is c_int: - ct = self._ctypes[self.typekind, self.itemsize].__ctype_be__ - else: - ct = self._ctypes[self.typekind, self.itemsize].__ctype_le__ - except KeyError: - ct = c_uint8 * self.itemsize - self._ctype = ct - self._ctype_p = POINTER(ct) - - def __getitem__(self, key): - return cast(self._addr_at(key), self._ctype_p)[0] - - def __setitem__(self, key, value): - cast(self._addr_at(key), self._ctype_p)[0] = value - - def _addr_at(self, key): - if not isinstance(key, tuple): - key = (key,) - if len(key) != self.nd: - raise ValueError("wrong number of indexes") - for i in range(self.nd): - if not (0 <= key[i] < self.shape[i]): - raise IndexError("index {} out of range".format(i)) - return self.data + sum(i * s for i, s in zip(key, self.strides)) - - -class ExporterTest(unittest.TestCase): - def test_strides(self): - self.check_args(0, (10,), "u", (2,), 20, 20, 2) - self.check_args(0, (5, 3), "u", (6, 2), 30, 30, 2) - self.check_args(0, (7, 3, 5), "u", (30, 10, 2), 210, 210, 2) - self.check_args(0, (13, 5, 11, 3), "u", (330, 66, 6, 2), 4290, 4290, 2) - self.check_args(3, (7, 3, 5), "i", (2, 14, 42), 210, 210, 2) - self.check_args(3, (7, 3, 5), "x", (2, 16, 48), 210, 240, 2) - self.check_args(3, (13, 5, 11, 3), "%", (440, 88, 8, 2), 4290, 5720, 2) - self.check_args(3, (7, 5), "-", (15, 3), 105, 105, 3) - self.check_args(3, (7, 5), "*", (3, 21), 105, 105, 3) - self.check_args(3, (7, 5), " ", (3, 24), 105, 120, 3) - - def test_is_contiguous(self): - a = Exporter((10,), itemsize=2) - self.assertTrue(a.is_contiguous("C")) - self.assertTrue(a.is_contiguous("F")) - self.assertTrue(a.is_contiguous("A")) - a = Exporter((10, 4), itemsize=2) - self.assertTrue(a.is_contiguous("C")) - self.assertTrue(a.is_contiguous("A")) - self.assertFalse(a.is_contiguous("F")) - a = Exporter((13, 5, 11, 3), itemsize=2, strides=(330, 66, 6, 2)) - self.assertTrue(a.is_contiguous("C")) - self.assertTrue(a.is_contiguous("A")) - self.assertFalse(a.is_contiguous("F")) - a = Exporter((10, 4), itemsize=2, strides=(2, 20)) - self.assertTrue(a.is_contiguous("F")) - self.assertTrue(a.is_contiguous("A")) - self.assertFalse(a.is_contiguous("C")) - a = Exporter((13, 5, 11, 3), itemsize=2, strides=(2, 26, 130, 1430)) - self.assertTrue(a.is_contiguous("F")) - self.assertTrue(a.is_contiguous("A")) - self.assertFalse(a.is_contiguous("C")) - a = Exporter((2, 11, 6, 4), itemsize=2, strides=(576, 48, 8, 2)) - self.assertFalse(a.is_contiguous("A")) - a = Exporter((2, 11, 6, 4), itemsize=2, strides=(2, 4, 48, 288)) - self.assertFalse(a.is_contiguous("A")) - a = Exporter((3, 2, 2), itemsize=2, strides=(16, 8, 4)) - self.assertFalse(a.is_contiguous("A")) - a = Exporter((3, 2, 2), itemsize=2, strides=(4, 12, 24)) - self.assertFalse(a.is_contiguous("A")) - - def check_args( - self, call_flags, shape, typekind, strides, length, bufsize, itemsize, offset=0 - ): - if call_flags & 1: - typekind_arg = typekind - else: - typekind_arg = None - if call_flags & 2: - strides_arg = strides - else: - strides_arg = None - a = Exporter(shape, itemsize=itemsize, strides=strides_arg) - self.assertEqual(sizeof(a._data), bufsize) - self.assertEqual(a.data, ctypes.addressof(a._data) + offset) - m = ArrayInterface(a) - self.assertEqual(m.data, a.data) - self.assertEqual(m.itemsize, itemsize) - self.assertEqual(tuple(m.shape[0 : m.nd]), shape) - self.assertEqual(tuple(m.strides[0 : m.nd]), strides) - - -class ArrayTest(unittest.TestCase): - def __init__(self, *args, **kwds): - unittest.TestCase.__init__(self, *args, **kwds) - self.a = Array((20, 15), "i", 4) - - def setUp(self): - # Every test starts with a zeroed array. - memset(self.a.data, 0, sizeof(self.a._data)) - - def test__addr_at(self): - a = self.a - self.assertEqual(a._addr_at((0, 0)), a.data) - self.assertEqual(a._addr_at((0, 1)), a.data + 4) - self.assertEqual(a._addr_at((1, 0)), a.data + 60) - self.assertEqual(a._addr_at((1, 1)), a.data + 64) - - def test_indices(self): - a = self.a - self.assertEqual(a[0, 0], 0) - self.assertEqual(a[19, 0], 0) - self.assertEqual(a[0, 14], 0) - self.assertEqual(a[19, 14], 0) - self.assertEqual(a[5, 8], 0) - a[0, 0] = 12 - a[5, 8] = 99 - self.assertEqual(a[0, 0], 12) - self.assertEqual(a[5, 8], 99) - self.assertRaises(IndexError, a.__getitem__, (-1, 0)) - self.assertRaises(IndexError, a.__getitem__, (0, -1)) - self.assertRaises(IndexError, a.__getitem__, (20, 0)) - self.assertRaises(IndexError, a.__getitem__, (0, 15)) - self.assertRaises(ValueError, a.__getitem__, 0) - self.assertRaises(ValueError, a.__getitem__, (0, 0, 0)) - a = Array((3,), "i", 4) - a[1] = 333 - self.assertEqual(a[1], 333) - - def test_typekind(self): - a = Array((1,), "i", 4) - self.assertTrue(a._ctype is c_int32) - self.assertTrue(a._ctype_p is POINTER(c_int32)) - a = Array((1,), "u", 4) - self.assertTrue(a._ctype is c_uint32) - self.assertTrue(a._ctype_p is POINTER(c_uint32)) - a = Array((1,), "f", 4) # float types unsupported: size system dependent - ct = a._ctype - self.assertTrue(issubclass(ct, ctypes.Array)) - self.assertEqual(sizeof(ct), 4) - - def test_itemsize(self): - for size in [1, 2, 4, 8]: - a = Array((1,), "i", size) - ct = a._ctype - self.assertTrue(issubclass(ct, ctypes._SimpleCData)) - self.assertEqual(sizeof(ct), size) - - def test_oddball_itemsize(self): - for size in [3, 5, 6, 7, 9]: - a = Array((1,), "i", size) - ct = a._ctype - self.assertTrue(issubclass(ct, ctypes.Array)) - self.assertEqual(sizeof(ct), size) - - def test_byteswapped(self): - a = Array((1,), "u", 4, flags=(PAI_ALIGNED | PAI_WRITEABLE)) - ct = a._ctype - self.assertTrue(ct is not c_uint32) - if sys.byteorder == "little": - self.assertTrue(ct is c_uint32.__ctype_be__) - else: - self.assertTrue(ct is c_uint32.__ctype_le__) - i = 0xA0B0C0D - n = c_uint32(i) - a[0] = i - self.assertEqual(a[0], i) - self.assertEqual(a._data[0:4], cast(addressof(n), POINTER(c_uint8))[3:-1:-1]) - - -if __name__ == "__main__": - unittest.main() diff --git a/venv/Lib/site-packages/pygame/tests/test_utils/async_sub.py b/venv/Lib/site-packages/pygame/tests/test_utils/async_sub.py deleted file mode 100644 index 4adc760..0000000 --- a/venv/Lib/site-packages/pygame/tests/test_utils/async_sub.py +++ /dev/null @@ -1,301 +0,0 @@ -################################################################################ -""" - -Modification of http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/440554 - -""" - -#################################### IMPORTS ################################### - -import os -import platform -import subprocess -import errno -import time -import sys -import unittest -import tempfile - - -def geterror(): - return sys.exc_info()[1] - - -null_byte = "\x00".encode("ascii") - -if platform.system() == "Windows": - - def encode(s): - return s.encode("ascii") - - def decode(b): - return b.decode("ascii") - - try: - import ctypes - from ctypes.wintypes import DWORD - - kernel32 = ctypes.windll.kernel32 - TerminateProcess = ctypes.windll.kernel32.TerminateProcess - - def WriteFile(handle, data, ol=None): - c_written = DWORD() - success = ctypes.windll.kernel32.WriteFile( - handle, - ctypes.create_string_buffer(encode(data)), - len(data), - ctypes.byref(c_written), - ol, - ) - return ctypes.windll.kernel32.GetLastError(), c_written.value - - def ReadFile(handle, desired_bytes, ol=None): - c_read = DWORD() - buffer = ctypes.create_string_buffer(desired_bytes + 1) - success = ctypes.windll.kernel32.ReadFile( - handle, buffer, desired_bytes, ctypes.byref(c_read), ol - ) - buffer[c_read.value] = null_byte - return ctypes.windll.kernel32.GetLastError(), decode(buffer.value) - - def PeekNamedPipe(handle, desired_bytes): - c_avail = DWORD() - c_message = DWORD() - if desired_bytes > 0: - c_read = DWORD() - buffer = ctypes.create_string_buffer(desired_bytes + 1) - success = ctypes.windll.kernel32.PeekNamedPipe( - handle, - buffer, - desired_bytes, - ctypes.byref(c_read), - ctypes.byref(c_avail), - ctypes.byref(c_message), - ) - buffer[c_read.value] = null_byte - return decode(buffer.value), c_avail.value, c_message.value - else: - success = ctypes.windll.kernel32.PeekNamedPipe( - handle, - None, - desired_bytes, - None, - ctypes.byref(c_avail), - ctypes.byref(c_message), - ) - return "", c_avail.value, c_message.value - - except ImportError: - from win32file import ReadFile, WriteFile - from win32pipe import PeekNamedPipe - from win32api import TerminateProcess - import msvcrt - -else: - from signal import SIGINT, SIGTERM, SIGKILL - import select - import fcntl - -################################### CONSTANTS ################################## - -PIPE = subprocess.PIPE - -################################################################################ - - -class Popen(subprocess.Popen): - def recv(self, maxsize=None): - return self._recv("stdout", maxsize) - - def recv_err(self, maxsize=None): - return self._recv("stderr", maxsize) - - def send_recv(self, input="", maxsize=None): - return self.send(input), self.recv(maxsize), self.recv_err(maxsize) - - def read_async(self, wait=0.1, e=1, tr=5, stderr=0): - if tr < 1: - tr = 1 - x = time.time() + wait - y = [] - r = "" - pr = self.recv - if stderr: - pr = self.recv_err - while time.time() < x or r: - r = pr() - if r is None: - if e: - raise Exception("Other end disconnected!") - else: - break - elif r: - y.append(r) - else: - time.sleep(max((x - time.time()) / tr, 0)) - return "".join(y) - - def send_all(self, data): - while len(data): - sent = self.send(data) - if sent is None: - raise Exception("Other end disconnected!") - data = buffer(data, sent) - - def get_conn_maxsize(self, which, maxsize): - if maxsize is None: - maxsize = 1024 - elif maxsize < 1: - maxsize = 1 - return getattr(self, which), maxsize - - def _close(self, which): - getattr(self, which).close() - setattr(self, which, None) - - if platform.system() == "Windows": - - def kill(self): - # Recipes - # http://me.in-berlin.de/doc/python/faq/windows.html#how-do-i-emulate-os-kill-in-windows - # http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/347462 - - """kill function for Win32""" - TerminateProcess(int(self._handle), 0) # returns None - - def send(self, input): - if not self.stdin: - return None - - try: - x = msvcrt.get_osfhandle(self.stdin.fileno()) - (errCode, written) = WriteFile(x, input) - except ValueError: - return self._close("stdin") - except (subprocess.pywintypes.error, Exception): - if geterror()[0] in (109, errno.ESHUTDOWN): - return self._close("stdin") - raise - - return written - - def _recv(self, which, maxsize): - conn, maxsize = self.get_conn_maxsize(which, maxsize) - if conn is None: - return None - - try: - x = msvcrt.get_osfhandle(conn.fileno()) - (read, nAvail, nMessage) = PeekNamedPipe(x, 0) - if maxsize < nAvail: - nAvail = maxsize - if nAvail > 0: - (errCode, read) = ReadFile(x, nAvail, None) - except ValueError: - return self._close(which) - except (subprocess.pywintypes.error, Exception): - if geterror()[0] in (109, errno.ESHUTDOWN): - return self._close(which) - raise - - if self.universal_newlines: - # Translate newlines. For Python 3.x assume read is text. - # If bytes then another solution is needed. - read = read.replace("\r\n", "\n").replace("\r", "\n") - return read - - else: - - def kill(self): - for i, sig in enumerate([SIGTERM, SIGKILL] * 2): - if i % 2 == 0: - os.kill(self.pid, sig) - time.sleep((i * (i % 2) / 5.0) + 0.01) - - killed_pid, stat = os.waitpid(self.pid, os.WNOHANG) - if killed_pid != 0: - return - - def send(self, input): - if not self.stdin: - return None - - if not select.select([], [self.stdin], [], 0)[1]: - return 0 - - try: - written = os.write(self.stdin.fileno(), input) - except OSError: - if geterror()[0] == errno.EPIPE: # broken pipe - return self._close("stdin") - raise - - return written - - def _recv(self, which, maxsize): - conn, maxsize = self.get_conn_maxsize(which, maxsize) - if conn is None: - return None - - if not select.select([conn], [], [], 0)[0]: - return "" - - r = conn.read(maxsize) - if not r: - return self._close(which) - - if self.universal_newlines: - r = r.replace("\r\n", "\n").replace("\r", "\n") - return r - - -################################################################################ - - -def proc_in_time_or_kill(cmd, time_out, wd=None, env=None): - proc = Popen( - cmd, - cwd=wd, - env=env, - stdin=subprocess.PIPE, - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT, - universal_newlines=1, - ) - - ret_code = None - response = [] - - t = time.time() - while ret_code is None and ((time.time() - t) < time_out): - ret_code = proc.poll() - response += [proc.read_async(wait=0.1, e=0)] - - if ret_code is None: - ret_code = '"Process timed out (time_out = %s secs) ' % time_out - try: - proc.kill() - ret_code += 'and was successfully terminated"' - except Exception: - ret_code += 'and termination failed (exception: %s)"' % (geterror(),) - - return ret_code, "".join(response) - - -################################################################################ - - -class AsyncTest(unittest.TestCase): - def test_proc_in_time_or_kill(self): - ret_code, response = proc_in_time_or_kill( - [sys.executable, "-c", "while 1: pass"], time_out=1 - ) - - self.assertIn("rocess timed out", ret_code) - self.assertIn("successfully terminated", ret_code) - - -################################################################################ - -if __name__ == "__main__": - unittest.main() diff --git a/venv/Lib/site-packages/pygame/tests/test_utils/buftools.py b/venv/Lib/site-packages/pygame/tests/test_utils/buftools.py deleted file mode 100644 index 2d2112a..0000000 --- a/venv/Lib/site-packages/pygame/tests/test_utils/buftools.py +++ /dev/null @@ -1,613 +0,0 @@ -"""Module pygame.tests.test_utils.array - -Export the Exporter and Importer classes. - -Class Exporter has configurable shape and strides. Exporter objects -provide a convient target for unit tests on Pygame objects and functions that -import a new buffer interface. - -Class Importer imports a buffer interface with the given PyBUF_* flags. -It returns NULL Py_buffer fields as None. The shape, strides, and suboffsets -arrays are returned as tuples of ints. All Py_buffer field properties are -read-only. This class is useful in comparing exported buffer interfaces -with the actual request. The simular Python builtin memoryview currently -does not support configurable PyBUF_* flags. - -This module contains its own unit tests. When Pygame is installed, these tests -can be run with the following command line statement: - -python -m pygame.tests.test_utils.array - -""" -import pygame - -if not pygame.HAVE_NEWBUF: - emsg = "This Pygame build does not support the new buffer protocol" - raise ImportError(emsg) -import pygame.newbuffer -from pygame.newbuffer import ( - PyBUF_SIMPLE, - PyBUF_FORMAT, - PyBUF_ND, - PyBUF_WRITABLE, - PyBUF_STRIDES, - PyBUF_C_CONTIGUOUS, - PyBUF_F_CONTIGUOUS, - PyBUF_ANY_CONTIGUOUS, - PyBUF_INDIRECT, - PyBUF_STRIDED, - PyBUF_STRIDED_RO, - PyBUF_RECORDS, - PyBUF_RECORDS_RO, - PyBUF_FULL, - PyBUF_FULL_RO, - PyBUF_CONTIG, - PyBUF_CONTIG_RO, -) - -import unittest -import sys -import ctypes -import operator - -try: - reduce -except NameError: - from functools import reduce - -__all__ = ["Exporter", "Importer"] - -try: - ctypes.c_ssize_t -except AttributeError: - void_p_sz = ctypes.sizeof(ctypes.c_void_p) - if ctypes.sizeof(ctypes.c_short) == void_p_sz: - ctypes.c_ssize_t = ctypes.c_short - elif ctypes.sizeof(ctypes.c_int) == void_p_sz: - ctypes.c_ssize_t = ctypes.c_int - elif ctypes.sizeof(ctypes.c_long) == void_p_sz: - ctypes.c_ssize_t = ctypes.c_long - elif ctypes.sizeof(ctypes.c_longlong) == void_p_sz: - ctypes.c_ssize_t = ctypes.c_longlong - else: - raise RuntimeError("Cannot set c_ssize_t: sizeof(void *) is %i" % void_p_sz) - - -def _prop_get(fn): - return property(fn) - - -class Exporter(pygame.newbuffer.BufferMixin): - """An object that exports a multi-dimension new buffer interface - - The only array operation this type supports is to export a buffer. - """ - - prefixes = { - "@": "", - "=": "=", - "<": "=", - ">": "=", - "!": "=", - "2": "2", - "3": "3", - "4": "4", - "5": "5", - "6": "6", - "7": "7", - "8": "8", - "9": "9", - } - types = { - "c": ctypes.c_char, - "b": ctypes.c_byte, - "B": ctypes.c_ubyte, - "=c": ctypes.c_int8, - "=b": ctypes.c_int8, - "=B": ctypes.c_uint8, - "?": ctypes.c_bool, - "=?": ctypes.c_int8, - "h": ctypes.c_short, - "H": ctypes.c_ushort, - "=h": ctypes.c_int16, - "=H": ctypes.c_uint16, - "i": ctypes.c_int, - "I": ctypes.c_uint, - "=i": ctypes.c_int32, - "=I": ctypes.c_uint32, - "l": ctypes.c_long, - "L": ctypes.c_ulong, - "=l": ctypes.c_int32, - "=L": ctypes.c_uint32, - "q": ctypes.c_longlong, - "Q": ctypes.c_ulonglong, - "=q": ctypes.c_int64, - "=Q": ctypes.c_uint64, - "f": ctypes.c_float, - "d": ctypes.c_double, - "P": ctypes.c_void_p, - "x": ctypes.c_ubyte * 1, - "2x": ctypes.c_ubyte * 2, - "3x": ctypes.c_ubyte * 3, - "4x": ctypes.c_ubyte * 4, - "5x": ctypes.c_ubyte * 5, - "6x": ctypes.c_ubyte * 6, - "7x": ctypes.c_ubyte * 7, - "8x": ctypes.c_ubyte * 8, - "9x": ctypes.c_ubyte * 9, - } - - def __init__(self, shape, format=None, strides=None, readonly=None, itemsize=None): - if format is None: - format = "B" - if readonly is None: - readonly = False - prefix = "" - typecode = "" - i = 0 - if i < len(format): - try: - prefix = self.prefixes[format[i]] - i += 1 - except LookupError: - pass - if i < len(format) and format[i] == "1": - i += 1 - if i == len(format) - 1: - typecode = format[i] - if itemsize is None: - try: - itemsize = ctypes.sizeof(self.types[prefix + typecode]) - except KeyError: - raise ValueError("Unknown item format '" + format + "'") - self.readonly = bool(readonly) - self.format = format - self._format = ctypes.create_string_buffer(format.encode("latin_1")) - self.ndim = len(shape) - self.itemsize = itemsize - self.len = reduce(operator.mul, shape, 1) * self.itemsize - self.shape = tuple(shape) - self._shape = (ctypes.c_ssize_t * self.ndim)(*self.shape) - if strides is None: - self._strides = (ctypes.c_ssize_t * self.ndim)() - self._strides[self.ndim - 1] = itemsize - for i in range(self.ndim - 1, 0, -1): - self._strides[i - 1] = self.shape[i] * self._strides[i] - self.strides = tuple(self._strides) - elif len(strides) == self.ndim: - self.strides = tuple(strides) - self._strides = (ctypes.c_ssize_t * self.ndim)(*self.strides) - else: - raise ValueError("Mismatch in length of strides and shape") - buflen = max(d * abs(s) for d, s in zip(self.shape, self.strides)) - self.buflen = buflen - self._buf = (ctypes.c_ubyte * buflen)() - offset = sum( - (d - 1) * abs(s) for d, s in zip(self.shape, self.strides) if s < 0 - ) - self.buf = ctypes.addressof(self._buf) + offset - - def buffer_info(self): - return (addressof(self.buffer), self.shape[0]) - - def tobytes(self): - return cast(self.buffer, POINTER(c_char))[0 : self._len] - - def __len__(self): - return self.shape[0] - - def _get_buffer(self, view, flags): - from ctypes import addressof - - if (flags & PyBUF_WRITABLE) == PyBUF_WRITABLE and self.readonly: - raise BufferError("buffer is read-only") - if ( - flags & PyBUF_C_CONTIGUOUS - ) == PyBUF_C_CONTIGUOUS and not self.is_contiguous("C"): - raise BufferError("data is not C contiguous") - if ( - flags & PyBUF_F_CONTIGUOUS - ) == PyBUF_F_CONTIGUOUS and not self.is_contiguous("F"): - raise BufferError("data is not F contiguous") - if ( - flags & PyBUF_ANY_CONTIGUOUS - ) == PyBUF_ANY_CONTIGUOUS and not self.is_contiguous("A"): - raise BufferError("data is not contiguous") - view.buf = self.buf - view.readonly = self.readonly - view.len = self.len - if flags | PyBUF_WRITABLE == PyBUF_WRITABLE: - view.ndim = 0 - else: - view.ndim = self.ndim - view.itemsize = self.itemsize - if (flags & PyBUF_FORMAT) == PyBUF_FORMAT: - view.format = addressof(self._format) - else: - view.format = None - if (flags & PyBUF_ND) == PyBUF_ND: - view.shape = addressof(self._shape) - elif self.is_contiguous("C"): - view.shape = None - else: - raise BufferError( - "shape required for {} dimensional data".format(self.ndim) - ) - if (flags & PyBUF_STRIDES) == PyBUF_STRIDES: - view.strides = ctypes.addressof(self._strides) - elif view.shape is None or self.is_contiguous("C"): - view.strides = None - else: - raise BufferError("strides required for none C contiguous data") - view.suboffsets = None - view.internal = None - view.obj = self - - def is_contiguous(self, fortran): - if fortran in "CA": - if self.strides[-1] == self.itemsize: - for i in range(self.ndim - 1, 0, -1): - if self.strides[i - 1] != self.shape[i] * self.strides[i]: - break - else: - return True - if fortran in "FA": - if self.strides[0] == self.itemsize: - for i in range(0, self.ndim - 1): - if self.strides[i + 1] != self.shape[i] * self.strides[i]: - break - else: - return True - return False - - -class Importer(object): - """An object that imports a new buffer interface - - The fields of the Py_buffer C struct are exposed by identically - named Importer read-only properties. - """ - - def __init__(self, obj, flags): - self._view = pygame.newbuffer.Py_buffer() - self._view.get_buffer(obj, flags) - - @property - def obj(self): - """return object or None for NULL field""" - return self._view.obj - - @property - def buf(self): - """return int or None for NULL field""" - return self._view.buf - - @property - def len(self): - """return int""" - return self._view.len - - @property - def readonly(self): - """return bool""" - return self._view.readonly - - @property - def format(self): - """return bytes or None for NULL field""" - format_addr = self._view.format - if format_addr is None: - return None - return ctypes.cast(format_addr, ctypes.c_char_p).value.decode("ascii") - - @property - def itemsize(self): - """return int""" - return self._view.itemsize - - @property - def ndim(self): - """return int""" - return self._view.ndim - - @property - def shape(self): - """return int tuple or None for NULL field""" - return self._to_ssize_tuple(self._view.shape) - - @property - def strides(self): - """return int tuple or None for NULL field""" - return self._to_ssize_tuple(self._view.strides) - - @property - def suboffsets(self): - """return int tuple or None for NULL field""" - return self._to_ssize_tuple(self._view.suboffsets) - - @property - def internal(self): - """return int or None for NULL field""" - return self._view.internal - - def _to_ssize_tuple(self, addr): - from ctypes import cast, POINTER, c_ssize_t - - if addr is None: - return None - return tuple(cast(addr, POINTER(c_ssize_t))[0 : self._view.ndim]) - - -class ExporterTest(unittest.TestCase): - """Class Exporter unit tests""" - - def test_formats(self): - char_sz = ctypes.sizeof(ctypes.c_char) - short_sz = ctypes.sizeof(ctypes.c_short) - int_sz = ctypes.sizeof(ctypes.c_int) - long_sz = ctypes.sizeof(ctypes.c_long) - longlong_sz = ctypes.sizeof(ctypes.c_longlong) - float_sz = ctypes.sizeof(ctypes.c_float) - double_sz = ctypes.sizeof(ctypes.c_double) - voidp_sz = ctypes.sizeof(ctypes.c_void_p) - bool_sz = ctypes.sizeof(ctypes.c_bool) - - self.check_args(0, (1,), "B", (1,), 1, 1, 1) - self.check_args(1, (1,), "b", (1,), 1, 1, 1) - self.check_args(1, (1,), "B", (1,), 1, 1, 1) - self.check_args(1, (1,), "c", (char_sz,), char_sz, char_sz, char_sz) - self.check_args(1, (1,), "h", (short_sz,), short_sz, short_sz, short_sz) - self.check_args(1, (1,), "H", (short_sz,), short_sz, short_sz, short_sz) - self.check_args(1, (1,), "i", (int_sz,), int_sz, int_sz, int_sz) - self.check_args(1, (1,), "I", (int_sz,), int_sz, int_sz, int_sz) - self.check_args(1, (1,), "l", (long_sz,), long_sz, long_sz, long_sz) - self.check_args(1, (1,), "L", (long_sz,), long_sz, long_sz, long_sz) - self.check_args( - 1, (1,), "q", (longlong_sz,), longlong_sz, longlong_sz, longlong_sz - ) - self.check_args( - 1, (1,), "Q", (longlong_sz,), longlong_sz, longlong_sz, longlong_sz - ) - self.check_args(1, (1,), "f", (float_sz,), float_sz, float_sz, float_sz) - self.check_args(1, (1,), "d", (double_sz,), double_sz, double_sz, double_sz) - self.check_args(1, (1,), "x", (1,), 1, 1, 1) - self.check_args(1, (1,), "P", (voidp_sz,), voidp_sz, voidp_sz, voidp_sz) - self.check_args(1, (1,), "?", (bool_sz,), bool_sz, bool_sz, bool_sz) - self.check_args(1, (1,), "@b", (1,), 1, 1, 1) - self.check_args(1, (1,), "@B", (1,), 1, 1, 1) - self.check_args(1, (1,), "@c", (char_sz,), char_sz, char_sz, char_sz) - self.check_args(1, (1,), "@h", (short_sz,), short_sz, short_sz, short_sz) - self.check_args(1, (1,), "@H", (short_sz,), short_sz, short_sz, short_sz) - self.check_args(1, (1,), "@i", (int_sz,), int_sz, int_sz, int_sz) - self.check_args(1, (1,), "@I", (int_sz,), int_sz, int_sz, int_sz) - self.check_args(1, (1,), "@l", (long_sz,), long_sz, long_sz, long_sz) - self.check_args(1, (1,), "@L", (long_sz,), long_sz, long_sz, long_sz) - self.check_args( - 1, (1,), "@q", (longlong_sz,), longlong_sz, longlong_sz, longlong_sz - ) - self.check_args( - 1, (1,), "@Q", (longlong_sz,), longlong_sz, longlong_sz, longlong_sz - ) - self.check_args(1, (1,), "@f", (float_sz,), float_sz, float_sz, float_sz) - self.check_args(1, (1,), "@d", (double_sz,), double_sz, double_sz, double_sz) - self.check_args(1, (1,), "@?", (bool_sz,), bool_sz, bool_sz, bool_sz) - self.check_args(1, (1,), "=b", (1,), 1, 1, 1) - self.check_args(1, (1,), "=B", (1,), 1, 1, 1) - self.check_args(1, (1,), "=c", (1,), 1, 1, 1) - self.check_args(1, (1,), "=h", (2,), 2, 2, 2) - self.check_args(1, (1,), "=H", (2,), 2, 2, 2) - self.check_args(1, (1,), "=i", (4,), 4, 4, 4) - self.check_args(1, (1,), "=I", (4,), 4, 4, 4) - self.check_args(1, (1,), "=l", (4,), 4, 4, 4) - self.check_args(1, (1,), "=L", (4,), 4, 4, 4) - self.check_args(1, (1,), "=q", (8,), 8, 8, 8) - self.check_args(1, (1,), "=Q", (8,), 8, 8, 8) - self.check_args(1, (1,), "=?", (1,), 1, 1, 1) - self.check_args(1, (1,), "h", (2,), 2, 2, 2) - self.check_args(1, (1,), "!h", (2,), 2, 2, 2) - self.check_args(1, (1,), "q", (8,), 8, 8, 8) - self.check_args(1, (1,), "!q", (8,), 8, 8, 8) - self.check_args(1, (1,), "1x", (1,), 1, 1, 1) - self.check_args(1, (1,), "2x", (2,), 2, 2, 2) - self.check_args(1, (1,), "3x", (3,), 3, 3, 3) - self.check_args(1, (1,), "4x", (4,), 4, 4, 4) - self.check_args(1, (1,), "5x", (5,), 5, 5, 5) - self.check_args(1, (1,), "6x", (6,), 6, 6, 6) - self.check_args(1, (1,), "7x", (7,), 7, 7, 7) - self.check_args(1, (1,), "8x", (8,), 8, 8, 8) - self.check_args(1, (1,), "9x", (9,), 9, 9, 9) - self.check_args(1, (1,), "1h", (2,), 2, 2, 2) - self.check_args(1, (1,), "=1h", (2,), 2, 2, 2) - self.assertRaises(ValueError, Exporter, (2, 1), "") - self.assertRaises(ValueError, Exporter, (2, 1), "W") - self.assertRaises(ValueError, Exporter, (2, 1), "^Q") - self.assertRaises(ValueError, Exporter, (2, 1), "=W") - self.assertRaises(ValueError, Exporter, (2, 1), "=f") - self.assertRaises(ValueError, Exporter, (2, 1), "=d") - self.assertRaises(ValueError, Exporter, (2, 1), "f") - self.assertRaises(ValueError, Exporter, (2, 1), ">d") - self.assertRaises(ValueError, Exporter, (2, 1), "!f") - self.assertRaises(ValueError, Exporter, (2, 1), "!d") - self.assertRaises(ValueError, Exporter, (2, 1), "0x") - self.assertRaises(ValueError, Exporter, (2, 1), "11x") - self.assertRaises(ValueError, Exporter, (2, 1), "BB") - - def test_strides(self): - self.check_args(1, (10,), "=h", (2,), 20, 20, 2) - self.check_args(1, (5, 3), "=h", (6, 2), 30, 30, 2) - self.check_args(1, (7, 3, 5), "=h", (30, 10, 2), 210, 210, 2) - self.check_args(1, (13, 5, 11, 3), "=h", (330, 66, 6, 2), 4290, 4290, 2) - self.check_args(3, (7, 3, 5), "=h", (2, 14, 42), 210, 210, 2) - self.check_args(3, (7, 3, 5), "=h", (2, 16, 48), 210, 240, 2) - self.check_args(3, (13, 5, 11, 3), "=h", (440, 88, 8, 2), 4290, 5720, 2) - self.check_args(3, (7, 5), "3x", (15, 3), 105, 105, 3) - self.check_args(3, (7, 5), "3x", (3, 21), 105, 105, 3) - self.check_args(3, (7, 5), "3x", (3, 24), 105, 120, 3) - - def test_readonly(self): - a = Exporter((2,), "h", readonly=True) - self.assertTrue(a.readonly) - b = Importer(a, PyBUF_STRIDED_RO) - self.assertRaises(BufferError, Importer, a, PyBUF_STRIDED) - b = Importer(a, PyBUF_STRIDED_RO) - - def test_is_contiguous(self): - a = Exporter((10,), "=h") - self.assertTrue(a.is_contiguous("C")) - self.assertTrue(a.is_contiguous("F")) - self.assertTrue(a.is_contiguous("A")) - a = Exporter((10, 4), "=h") - self.assertTrue(a.is_contiguous("C")) - self.assertTrue(a.is_contiguous("A")) - self.assertFalse(a.is_contiguous("F")) - a = Exporter((13, 5, 11, 3), "=h", (330, 66, 6, 2)) - self.assertTrue(a.is_contiguous("C")) - self.assertTrue(a.is_contiguous("A")) - self.assertFalse(a.is_contiguous("F")) - a = Exporter((10, 4), "=h", (2, 20)) - self.assertTrue(a.is_contiguous("F")) - self.assertTrue(a.is_contiguous("A")) - self.assertFalse(a.is_contiguous("C")) - a = Exporter((13, 5, 11, 3), "=h", (2, 26, 130, 1430)) - self.assertTrue(a.is_contiguous("F")) - self.assertTrue(a.is_contiguous("A")) - self.assertFalse(a.is_contiguous("C")) - a = Exporter((2, 11, 6, 4), "=h", (576, 48, 8, 2)) - self.assertFalse(a.is_contiguous("A")) - a = Exporter((2, 11, 6, 4), "=h", (2, 4, 48, 288)) - self.assertFalse(a.is_contiguous("A")) - a = Exporter((3, 2, 2), "=h", (16, 8, 4)) - self.assertFalse(a.is_contiguous("A")) - a = Exporter((3, 2, 2), "=h", (4, 12, 24)) - self.assertFalse(a.is_contiguous("A")) - - def test_PyBUF_flags(self): - a = Exporter((10, 2), "d") - b = Importer(a, PyBUF_SIMPLE) - self.assertTrue(b.obj is a) - self.assertTrue(b.format is None) - self.assertEqual(b.len, a.len) - self.assertEqual(b.itemsize, a.itemsize) - self.assertTrue(b.shape is None) - self.assertTrue(b.strides is None) - self.assertTrue(b.suboffsets is None) - self.assertTrue(b.internal is None) - self.assertFalse(b.readonly) - b = Importer(a, PyBUF_WRITABLE) - self.assertTrue(b.obj is a) - self.assertTrue(b.format is None) - self.assertEqual(b.len, a.len) - self.assertEqual(b.itemsize, a.itemsize) - self.assertTrue(b.shape is None) - self.assertTrue(b.strides is None) - self.assertTrue(b.suboffsets is None) - self.assertTrue(b.internal is None) - self.assertFalse(b.readonly) - b = Importer(a, PyBUF_ND) - self.assertTrue(b.obj is a) - self.assertTrue(b.format is None) - self.assertEqual(b.len, a.len) - self.assertEqual(b.itemsize, a.itemsize) - self.assertEqual(b.shape, a.shape) - self.assertTrue(b.strides is None) - self.assertTrue(b.suboffsets is None) - self.assertTrue(b.internal is None) - self.assertFalse(b.readonly) - a = Exporter((5, 10), "=h", (24, 2)) - b = Importer(a, PyBUF_STRIDES) - self.assertTrue(b.obj is a) - self.assertTrue(b.format is None) - self.assertEqual(b.len, a.len) - self.assertEqual(b.itemsize, a.itemsize) - self.assertEqual(b.shape, a.shape) - self.assertEqual(b.strides, a.strides) - self.assertTrue(b.suboffsets is None) - self.assertTrue(b.internal is None) - self.assertFalse(b.readonly) - b = Importer(a, PyBUF_FULL) - self.assertTrue(b.obj is a) - self.assertEqual(b.format, "=h") - self.assertEqual(b.len, a.len) - self.assertEqual(b.itemsize, a.itemsize) - self.assertEqual(b.shape, a.shape) - self.assertEqual(b.strides, a.strides) - self.assertTrue(b.suboffsets is None) - self.assertTrue(b.internal is None) - self.assertFalse(b.readonly) - self.assertRaises(BufferError, Importer, a, PyBUF_SIMPLE) - self.assertRaises(BufferError, Importer, a, PyBUF_WRITABLE) - self.assertRaises(BufferError, Importer, a, PyBUF_ND) - self.assertRaises(BufferError, Importer, a, PyBUF_C_CONTIGUOUS) - self.assertRaises(BufferError, Importer, a, PyBUF_F_CONTIGUOUS) - self.assertRaises(BufferError, Importer, a, PyBUF_ANY_CONTIGUOUS) - self.assertRaises(BufferError, Importer, a, PyBUF_CONTIG) - - def test_negative_strides(self): - self.check_args(3, (3, 5, 4), "B", (20, 4, -1), 60, 60, 1, 3) - self.check_args(3, (3, 5, 3), "B", (20, 4, -1), 45, 60, 1, 2) - self.check_args(3, (3, 5, 4), "B", (20, -4, 1), 60, 60, 1, 16) - self.check_args(3, (3, 5, 4), "B", (-20, -4, -1), 60, 60, 1, 59) - self.check_args(3, (3, 5, 3), "B", (-20, -4, -1), 45, 60, 1, 58) - - def test_attributes(self): - a = Exporter((13, 5, 11, 3), "=h", (440, 88, 8, 2)) - self.assertEqual(a.ndim, 4) - self.assertEqual(a.itemsize, 2) - self.assertFalse(a.readonly) - self.assertEqual(a.shape, (13, 5, 11, 3)) - self.assertEqual(a.format, "=h") - self.assertEqual(a.strides, (440, 88, 8, 2)) - self.assertEqual(a.len, 4290) - self.assertEqual(a.buflen, 5720) - self.assertEqual(a.buf, ctypes.addressof(a._buf)) - a = Exporter((8,)) - self.assertEqual(a.ndim, 1) - self.assertEqual(a.itemsize, 1) - self.assertFalse(a.readonly) - self.assertEqual(a.shape, (8,)) - self.assertEqual(a.format, "B") - self.assertTrue(isinstance(a.strides, tuple)) - self.assertEqual(a.strides, (1,)) - self.assertEqual(a.len, 8) - self.assertEqual(a.buflen, 8) - a = Exporter([13, 5, 11, 3], "=h", [440, 88, 8, 2]) - self.assertTrue(isinstance(a.shape, tuple)) - self.assertTrue(isinstance(a.strides, tuple)) - self.assertEqual(a.shape, (13, 5, 11, 3)) - self.assertEqual(a.strides, (440, 88, 8, 2)) - - def test_itemsize(self): - exp = Exporter((4, 5), format="B", itemsize=8) - imp = Importer(exp, PyBUF_RECORDS) - self.assertEqual(imp.itemsize, 8) - self.assertEqual(imp.format, "B") - self.assertEqual(imp.strides, (40, 8)) - exp = Exporter((4, 5), format="weird", itemsize=5) - imp = Importer(exp, PyBUF_RECORDS) - self.assertEqual(imp.itemsize, 5) - self.assertEqual(imp.format, "weird") - self.assertEqual(imp.strides, (25, 5)) - - def check_args( - self, call_flags, shape, format, strides, length, bufsize, itemsize, offset=0 - ): - format_arg = format if call_flags & 1 else None - strides_arg = strides if call_flags & 2 else None - a = Exporter(shape, format_arg, strides_arg) - self.assertEqual(a.buflen, bufsize) - self.assertEqual(a.buf, ctypes.addressof(a._buf) + offset) - m = Importer(a, PyBUF_RECORDS_RO) - self.assertEqual(m.buf, a.buf) - self.assertEqual(m.len, length) - self.assertEqual(m.format, format) - self.assertEqual(m.itemsize, itemsize) - self.assertEqual(m.shape, shape) - self.assertEqual(m.strides, strides) - - -if __name__ == "__main__": - unittest.main() diff --git a/venv/Lib/site-packages/pygame/tests/test_utils/endian.py b/venv/Lib/site-packages/pygame/tests/test_utils/endian.py deleted file mode 100644 index 64ba1b3..0000000 --- a/venv/Lib/site-packages/pygame/tests/test_utils/endian.py +++ /dev/null @@ -1,20 +0,0 @@ -# Module pygame.tests.test_utils.endian -# -# Machine independent conversion to little-endian and big-endian Python -# integer values. - -import struct - - -def little_endian_uint32(i): - """Return the 32 bit unsigned integer little-endian representation of i""" - - s = struct.pack("I", i) - return struct.unpack("=I", s)[0] diff --git a/venv/Lib/site-packages/pygame/tests/test_utils/png.py b/venv/Lib/site-packages/pygame/tests/test_utils/png.py deleted file mode 100644 index 74df9fd..0000000 --- a/venv/Lib/site-packages/pygame/tests/test_utils/png.py +++ /dev/null @@ -1,4001 +0,0 @@ -#!/usr/bin/env python - -# $URL: http://pypng.googlecode.com/svn/trunk/code/png.py $ -# $Rev: 228 $ - -# png.py - PNG encoder/decoder in pure Python -# -# Modified for Pygame in Oct., 2012 to work with Python 3.x. -# -# Copyright (C) 2006 Johann C. Rocholl -# Portions Copyright (C) 2009 David Jones -# And probably portions Copyright (C) 2006 Nicko van Someren -# -# Original concept by Johann C. Rocholl. -# -# LICENSE (The MIT License) -# -# Permission is hereby granted, free of charge, to any person -# obtaining a copy of this software and associated documentation files -# (the "Software"), to deal in the Software without restriction, -# including without limitation the rights to use, copy, modify, merge, -# publish, distribute, sublicense, and/or sell copies of the Software, -# and to permit persons to whom the Software is furnished to do so, -# subject to the following conditions: -# -# The above copyright notice and this permission notice shall be -# included in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# -# Changelog (recent first): -# 2009-03-11 David: interlaced bit depth < 8 (writing). -# 2009-03-10 David: interlaced bit depth < 8 (reading). -# 2009-03-04 David: Flat and Boxed pixel formats. -# 2009-02-26 David: Palette support (writing). -# 2009-02-23 David: Bit-depths < 8; better PNM support. -# 2006-06-17 Nicko: Reworked into a class, faster interlacing. -# 2006-06-17 Johann: Very simple prototype PNG decoder. -# 2006-06-17 Nicko: Test suite with various image generators. -# 2006-06-17 Nicko: Alpha-channel, grey-scale, 16-bit/plane support. -# 2006-06-15 Johann: Scanline iterator interface for large input files. -# 2006-06-09 Johann: Very simple prototype PNG encoder. - -# Incorporated into Bangai-O Development Tools by drj on 2009-02-11 from -# http://trac.browsershots.org/browser/trunk/pypng/lib/png.py?rev=2885 - -# Incorporated into pypng by drj on 2009-03-12 from -# //depot/prj/bangaio/master/code/png.py#67 - - -""" -Pure Python PNG Reader/Writer - -This Python module implements support for PNG images (see PNG -specification at http://www.w3.org/TR/2003/REC-PNG-20031110/ ). It reads -and writes PNG files with all allowable bit depths (1/2/4/8/16/24/32/48/64 -bits per pixel) and colour combinations: greyscale (1/2/4/8/16 bit); RGB, -RGBA, LA (greyscale with alpha) with 8/16 bits per channel; colour mapped -images (1/2/4/8 bit). Adam7 interlacing is supported for reading and -writing. A number of optional chunks can be specified (when writing) -and understood (when reading): ``tRNS``, ``bKGD``, ``gAMA``. - -For help, type ``import png; help(png)`` in your python interpreter. - -A good place to start is the :class:`Reader` and :class:`Writer` classes. - -This file can also be used as a command-line utility to convert -`Netpbm `_ PNM files to PNG, and the reverse conversion from PNG to -PNM. The interface is similar to that of the ``pnmtopng`` program from -Netpbm. Type ``python png.py --help`` at the shell prompt -for usage and a list of options. - -A note on spelling and terminology ----------------------------------- - -Generally British English spelling is used in the documentation. So -that's "greyscale" and "colour". This not only matches the author's -native language, it's also used by the PNG specification. - -The major colour models supported by PNG (and hence by PyPNG) are: -greyscale, RGB, greyscale--alpha, RGB--alpha. These are sometimes -referred to using the abbreviations: L, RGB, LA, RGBA. In this case -each letter abbreviates a single channel: *L* is for Luminance or Luma or -Lightness which is the channel used in greyscale images; *R*, *G*, *B* stand -for Red, Green, Blue, the components of a colour image; *A* stands for -Alpha, the opacity channel (used for transparency effects, but higher -values are more opaque, so it makes sense to call it opacity). - -A note on formats ------------------ - -When getting pixel data out of this module (reading) and presenting -data to this module (writing) there are a number of ways the data could -be represented as a Python value. Generally this module uses one of -three formats called "flat row flat pixel", "boxed row flat pixel", and -"boxed row boxed pixel". Basically the concern is whether each pixel -and each row comes in its own little tuple (box), or not. - -Consider an image that is 3 pixels wide by 2 pixels high, and each pixel -has RGB components: - -Boxed row flat pixel:: - - list([R,G,B, R,G,B, R,G,B], - [R,G,B, R,G,B, R,G,B]) - -Each row appears as its own list, but the pixels are flattened so that -three values for one pixel simply follow the three values for the previous -pixel. This is the most common format used, because it provides a good -compromise between space and convenience. PyPNG regards itself as -at liberty to replace any sequence type with any sufficiently compatible -other sequence type; in practice each row is an array (from the array -module), and the outer list is sometimes an iterator rather than an -explicit list (so that streaming is possible). - -Flat row flat pixel:: - - [R,G,B, R,G,B, R,G,B, - R,G,B, R,G,B, R,G,B] - -The entire image is one single giant sequence of colour values. -Generally an array will be used (to save space), not a list. - -Boxed row boxed pixel:: - - list([ (R,G,B), (R,G,B), (R,G,B) ], - [ (R,G,B), (R,G,B), (R,G,B) ]) - -Each row appears in its own list, but each pixel also appears in its own -tuple. A serious memory burn in Python. - -In all cases the top row comes first, and for each row the pixels are -ordered from left-to-right. Within a pixel the values appear in the -order, R-G-B-A (or L-A for greyscale--alpha). - -There is a fourth format, mentioned because it is used internally, -is close to what lies inside a PNG file itself, and has some support -from the public API. This format is called packed. When packed, -each row is a sequence of bytes (integers from 0 to 255), just as -it is before PNG scanline filtering is applied. When the bit depth -is 8 this is essentially the same as boxed row flat pixel; when the -bit depth is less than 8, several pixels are packed into each byte; -when the bit depth is 16 (the only value more than 8 that is supported -by the PNG image format) each pixel value is decomposed into 2 bytes -(and `packed` is a misnomer). This format is used by the -:meth:`Writer.write_packed` method. It isn't usually a convenient -format, but may be just right if the source data for the PNG image -comes from something that uses a similar format (for example, 1-bit -BMPs, or another PNG file). - -And now, my famous members --------------------------- -""" - -__version__ = "$URL: http://pypng.googlecode.com/svn/trunk/code/png.py $ $Rev: 228 $" - -from array import array -from pygame.tests.test_utils import tostring -import itertools -import math -import operator -import struct -import sys -import zlib -import warnings - -__all__ = ["Image", "Reader", "Writer", "write_chunks", "from_array"] - - -# The PNG signature. -# http://www.w3.org/TR/PNG/#5PNG-file-signature -_signature = struct.pack("8B", 137, 80, 78, 71, 13, 10, 26, 10) - -_adam7 = ( - (0, 0, 8, 8), - (4, 0, 8, 8), - (0, 4, 4, 8), - (2, 0, 4, 4), - (0, 2, 2, 4), - (1, 0, 2, 2), - (0, 1, 1, 2), -) - - -def group(s, n): - # See - # http://www.python.org/doc/2.6/library/functions.html#zip - return zip(*[iter(s)] * n) - - -def isarray(x): - """Same as ``isinstance(x, array)``.""" - return isinstance(x, array) - - -# Conditionally convert to bytes. Works on Python 2 and Python 3. -try: - bytes("", "ascii") - - def strtobytes(x): - return bytes(x, "iso8859-1") - - def bytestostr(x): - return str(x, "iso8859-1") - -except: - strtobytes = str - bytestostr = str - - -def interleave_planes(ipixels, apixels, ipsize, apsize): - """ - Interleave (colour) planes, e.g. RGB + A = RGBA. - - Return an array of pixels consisting of the `ipsize` elements of data - from each pixel in `ipixels` followed by the `apsize` elements of data - from each pixel in `apixels`. Conventionally `ipixels` and - `apixels` are byte arrays so the sizes are bytes, but it actually - works with any arrays of the same type. The returned array is the - same type as the input arrays which should be the same type as each other. - """ - - itotal = len(ipixels) - atotal = len(apixels) - newtotal = itotal + atotal - newpsize = ipsize + apsize - # Set up the output buffer - # See http://www.python.org/doc/2.4.4/lib/module-array.html#l2h-1356 - out = array(ipixels.typecode) - # It's annoying that there is no cheap way to set the array size :-( - out.extend(ipixels) - out.extend(apixels) - # Interleave in the pixel data - for i in range(ipsize): - out[i:newtotal:newpsize] = ipixels[i:itotal:ipsize] - for i in range(apsize): - out[i + ipsize : newtotal : newpsize] = apixels[i:atotal:apsize] - return out - - -def check_palette(palette): - """Check a palette argument (to the :class:`Writer` class) for validity. - Returns the palette as a list if okay; raises an exception otherwise. - """ - - # None is the default and is allowed. - if palette is None: - return None - - p = list(palette) - if not (0 < len(p) <= 256): - raise ValueError("a palette must have between 1 and 256 entries") - seen_triple = False - for i, t in enumerate(p): - if len(t) not in (3, 4): - raise ValueError("palette entry %d: entries must be 3- or 4-tuples." % i) - if len(t) == 3: - seen_triple = True - if seen_triple and len(t) == 4: - raise ValueError( - "palette entry %d: all 4-tuples must precede all 3-tuples" % i - ) - for x in t: - if int(x) != x or not (0 <= x <= 255): - raise ValueError( - "palette entry %d: values must be integer: 0 <= x <= 255" % i - ) - return p - - -class Error(Exception): - prefix = "Error" - - def __str__(self): - return self.prefix + ": " + " ".join(self.args) - - -class FormatError(Error): - """Problem with input file format. In other words, PNG file does - not conform to the specification in some way and is invalid. - """ - - prefix = "FormatError" - - -class ChunkError(FormatError): - prefix = "ChunkError" - - -class Writer: - """ - PNG encoder in pure Python. - """ - - def __init__( - self, - width=None, - height=None, - size=None, - greyscale=False, - alpha=False, - bitdepth=8, - palette=None, - transparent=None, - background=None, - gamma=None, - compression=None, - interlace=False, - bytes_per_sample=None, # deprecated - planes=None, - colormap=None, - maxval=None, - chunk_limit=2 ** 20, - ): - """ - Create a PNG encoder object. - - Arguments: - - width, height - Image size in pixels, as two separate arguments. - size - Image size (w,h) in pixels, as single argument. - greyscale - Input data is greyscale, not RGB. - alpha - Input data has alpha channel (RGBA or LA). - bitdepth - Bit depth: from 1 to 16. - palette - Create a palette for a colour mapped image (colour type 3). - transparent - Specify a transparent colour (create a ``tRNS`` chunk). - background - Specify a default background colour (create a ``bKGD`` chunk). - gamma - Specify a gamma value (create a ``gAMA`` chunk). - compression - zlib compression level (1-9). - interlace - Create an interlaced image. - chunk_limit - Write multiple ``IDAT`` chunks to save memory. - - The image size (in pixels) can be specified either by using the - `width` and `height` arguments, or with the single `size` - argument. If `size` is used it should be a pair (*width*, - *height*). - - `greyscale` and `alpha` are booleans that specify whether - an image is greyscale (or colour), and whether it has an - alpha channel (or not). - - `bitdepth` specifies the bit depth of the source pixel values. - Each source pixel value must be an integer between 0 and - ``2**bitdepth-1``. For example, 8-bit images have values - between 0 and 255. PNG only stores images with bit depths of - 1,2,4,8, or 16. When `bitdepth` is not one of these values, - the next highest valid bit depth is selected, and an ``sBIT`` - (significant bits) chunk is generated that specifies the original - precision of the source image. In this case the supplied pixel - values will be rescaled to fit the range of the selected bit depth. - - The details of which bit depth / colour model combinations the - PNG file format supports directly, are somewhat arcane - (refer to the PNG specification for full details). Briefly: - "small" bit depths (1,2,4) are only allowed with greyscale and - colour mapped images; colour mapped images cannot have bit depth - 16. - - For colour mapped images (in other words, when the `palette` - argument is specified) the `bitdepth` argument must match one of - the valid PNG bit depths: 1, 2, 4, or 8. (It is valid to have a - PNG image with a palette and an ``sBIT`` chunk, but the meaning - is slightly different; it would be awkward to press the - `bitdepth` argument into service for this.) - - The `palette` option, when specified, causes a colour mapped image - to be created: the PNG colour type is set to 3; greyscale - must not be set; alpha must not be set; transparent must - not be set; the bit depth must be 1,2,4, or 8. When a colour - mapped image is created, the pixel values are palette indexes - and the `bitdepth` argument specifies the size of these indexes - (not the size of the colour values in the palette). - - The palette argument value should be a sequence of 3- or - 4-tuples. 3-tuples specify RGB palette entries; 4-tuples - specify RGBA palette entries. If both 4-tuples and 3-tuples - appear in the sequence then all the 4-tuples must come - before all the 3-tuples. A ``PLTE`` chunk is created; if there - are 4-tuples then a ``tRNS`` chunk is created as well. The - ``PLTE`` chunk will contain all the RGB triples in the same - sequence; the ``tRNS`` chunk will contain the alpha channel for - all the 4-tuples, in the same sequence. Palette entries - are always 8-bit. - - If specified, the `transparent` and `background` parameters must - be a tuple with three integer values for red, green, blue, or - a simple integer (or singleton tuple) for a greyscale image. - - If specified, the `gamma` parameter must be a positive number - (generally, a float). A ``gAMA`` chunk will be created. Note that - this will not change the values of the pixels as they appear in - the PNG file, they are assumed to have already been converted - appropriately for the gamma specified. - - The `compression` argument specifies the compression level - to be used by the ``zlib`` module. Higher values are likely - to compress better, but will be slower to compress. The - default for this argument is ``None``; this does not mean - no compression, rather it means that the default from the - ``zlib`` module is used (which is generally acceptable). - - If `interlace` is true then an interlaced image is created - (using PNG's so far only interlace method, *Adam7*). This does not - affect how the pixels should be presented to the encoder, rather - it changes how they are arranged into the PNG file. On slow - connexions interlaced images can be partially decoded by the - browser to give a rough view of the image that is successively - refined as more image data appears. - - .. note :: - - Enabling the `interlace` option requires the entire image - to be processed in working memory. - - `chunk_limit` is used to limit the amount of memory used whilst - compressing the image. In order to avoid using large amounts of - memory, multiple ``IDAT`` chunks may be created. - """ - - # At the moment the `planes` argument is ignored; - # its purpose is to act as a dummy so that - # ``Writer(x, y, **info)`` works, where `info` is a dictionary - # returned by Reader.read and friends. - # Ditto for `colormap`. - - # A couple of helper functions come first. Best skipped if you - # are reading through. - - def isinteger(x): - try: - return int(x) == x - except: - return False - - def check_color(c, which): - """Checks that a colour argument for transparent or - background options is the right form. Also "corrects" bare - integers to 1-tuples. - """ - - if c is None: - return c - if greyscale: - try: - l = len(c) - except TypeError: - c = (c,) - if len(c) != 1: - raise ValueError("%s for greyscale must be 1-tuple" % which) - if not isinteger(c[0]): - raise ValueError("%s colour for greyscale must be integer" % which) - else: - if not ( - len(c) == 3 - and isinteger(c[0]) - and isinteger(c[1]) - and isinteger(c[2]) - ): - raise ValueError("%s colour must be a triple of integers" % which) - return c - - if size: - if len(size) != 2: - raise ValueError("size argument should be a pair (width, height)") - if width is not None and width != size[0]: - raise ValueError( - "size[0] (%r) and width (%r) should match when both are used." - % (size[0], width) - ) - if height is not None and height != size[1]: - raise ValueError( - "size[1] (%r) and height (%r) should match when both are used." - % (size[1], height) - ) - width, height = size - del size - - if width <= 0 or height <= 0: - raise ValueError("width and height must be greater than zero") - if not isinteger(width) or not isinteger(height): - raise ValueError("width and height must be integers") - # http://www.w3.org/TR/PNG/#7Integers-and-byte-order - if width > 2 ** 32 - 1 or height > 2 ** 32 - 1: - raise ValueError("width and height cannot exceed 2**32-1") - - if alpha and transparent is not None: - raise ValueError("transparent colour not allowed with alpha channel") - - if bytes_per_sample is not None: - warnings.warn( - "please use bitdepth instead of bytes_per_sample", DeprecationWarning - ) - if bytes_per_sample not in (0.125, 0.25, 0.5, 1, 2): - raise ValueError("bytes per sample must be .125, .25, .5, 1, or 2") - bitdepth = int(8 * bytes_per_sample) - del bytes_per_sample - if not isinteger(bitdepth) or bitdepth < 1 or 16 < bitdepth: - raise ValueError( - "bitdepth (%r) must be a positive integer <= 16" % bitdepth - ) - - self.rescale = None - if palette: - if bitdepth not in (1, 2, 4, 8): - raise ValueError("with palette, bitdepth must be 1, 2, 4, or 8") - if transparent is not None: - raise ValueError("transparent and palette not compatible") - if alpha: - raise ValueError("alpha and palette not compatible") - if greyscale: - raise ValueError("greyscale and palette not compatible") - else: - # No palette, check for sBIT chunk generation. - if alpha or not greyscale: - if bitdepth not in (8, 16): - targetbitdepth = (8, 16)[bitdepth > 8] - self.rescale = (bitdepth, targetbitdepth) - bitdepth = targetbitdepth - del targetbitdepth - else: - assert greyscale - assert not alpha - if bitdepth not in (1, 2, 4, 8, 16): - if bitdepth > 8: - targetbitdepth = 16 - elif bitdepth == 3: - targetbitdepth = 4 - else: - assert bitdepth in (5, 6, 7) - targetbitdepth = 8 - self.rescale = (bitdepth, targetbitdepth) - bitdepth = targetbitdepth - del targetbitdepth - - if bitdepth < 8 and (alpha or not greyscale and not palette): - raise ValueError("bitdepth < 8 only permitted with greyscale or palette") - if bitdepth > 8 and palette: - raise ValueError("bit depth must be 8 or less for images with palette") - - transparent = check_color(transparent, "transparent") - background = check_color(background, "background") - - # It's important that the true boolean values (greyscale, alpha, - # colormap, interlace) are converted to bool because Iverson's - # convention is relied upon later on. - self.width = width - self.height = height - self.transparent = transparent - self.background = background - self.gamma = gamma - self.greyscale = bool(greyscale) - self.alpha = bool(alpha) - self.colormap = bool(palette) - self.bitdepth = int(bitdepth) - self.compression = compression - self.chunk_limit = chunk_limit - self.interlace = bool(interlace) - self.palette = check_palette(palette) - - self.color_type = 4 * self.alpha + 2 * (not greyscale) + 1 * self.colormap - assert self.color_type in (0, 2, 3, 4, 6) - - self.color_planes = (3, 1)[self.greyscale or self.colormap] - self.planes = self.color_planes + self.alpha - # :todo: fix for bitdepth < 8 - self.psize = (self.bitdepth / 8) * self.planes - - def make_palette(self): - """Create the byte sequences for a ``PLTE`` and if necessary a - ``tRNS`` chunk. Returned as a pair (*p*, *t*). *t* will be - ``None`` if no ``tRNS`` chunk is necessary. - """ - - p = array("B") - t = array("B") - - for x in self.palette: - p.extend(x[0:3]) - if len(x) > 3: - t.append(x[3]) - p = tostring(p) - t = tostring(t) - if t: - return p, t - return p, None - - def write(self, outfile, rows): - """Write a PNG image to the output file. `rows` should be - an iterable that yields each row in boxed row flat pixel format. - The rows should be the rows of the original image, so there - should be ``self.height`` rows of ``self.width * self.planes`` values. - If `interlace` is specified (when creating the instance), then - an interlaced PNG file will be written. Supply the rows in the - normal image order; the interlacing is carried out internally. - - .. note :: - - Interlacing will require the entire image to be in working memory. - """ - - if self.interlace: - fmt = "BH"[self.bitdepth > 8] - a = array(fmt, itertools.chain(*rows)) - return self.write_array(outfile, a) - else: - nrows = self.write_passes(outfile, rows) - if nrows != self.height: - raise ValueError( - "rows supplied (%d) does not match height (%d)" - % (nrows, self.height) - ) - - def write_passes(self, outfile, rows, packed=False): - """ - Write a PNG image to the output file. - - Most users are expected to find the :meth:`write` or - :meth:`write_array` method more convenient. - - The rows should be given to this method in the order that - they appear in the output file. For straightlaced images, - this is the usual top to bottom ordering, but for interlaced - images the rows should have already been interlaced before - passing them to this function. - - `rows` should be an iterable that yields each row. When - `packed` is ``False`` the rows should be in boxed row flat pixel - format; when `packed` is ``True`` each row should be a packed - sequence of bytes. - - """ - - # http://www.w3.org/TR/PNG/#5PNG-file-signature - outfile.write(_signature) - - # http://www.w3.org/TR/PNG/#11IHDR - write_chunk( - outfile, - "IHDR", - struct.pack( - "!2I5B", - self.width, - self.height, - self.bitdepth, - self.color_type, - 0, - 0, - self.interlace, - ), - ) - - # See :chunk:order - # http://www.w3.org/TR/PNG/#11gAMA - if self.gamma is not None: - write_chunk( - outfile, "gAMA", struct.pack("!L", int(round(self.gamma * 1e5))) - ) - - # See :chunk:order - # http://www.w3.org/TR/PNG/#11sBIT - if self.rescale: - write_chunk( - outfile, - "sBIT", - struct.pack("%dB" % self.planes, *[self.rescale[0]] * self.planes), - ) - - # :chunk:order: Without a palette (PLTE chunk), ordering is - # relatively relaxed. With one, gAMA chunk must precede PLTE - # chunk which must precede tRNS and bKGD. - # See http://www.w3.org/TR/PNG/#5ChunkOrdering - if self.palette: - p, t = self.make_palette() - write_chunk(outfile, "PLTE", p) - if t: - # tRNS chunk is optional. Only needed if palette entries - # have alpha. - write_chunk(outfile, "tRNS", t) - - # http://www.w3.org/TR/PNG/#11tRNS - if self.transparent is not None: - if self.greyscale: - write_chunk(outfile, "tRNS", struct.pack("!1H", *self.transparent)) - else: - write_chunk(outfile, "tRNS", struct.pack("!3H", *self.transparent)) - - # http://www.w3.org/TR/PNG/#11bKGD - if self.background is not None: - if self.greyscale: - write_chunk(outfile, "bKGD", struct.pack("!1H", *self.background)) - else: - write_chunk(outfile, "bKGD", struct.pack("!3H", *self.background)) - - # http://www.w3.org/TR/PNG/#11IDAT - if self.compression is not None: - compressor = zlib.compressobj(self.compression) - else: - compressor = zlib.compressobj() - - # Choose an extend function based on the bitdepth. The extend - # function packs/decomposes the pixel values into bytes and - # stuffs them onto the data array. - data = array("B") - if self.bitdepth == 8 or packed: - extend = data.extend - elif self.bitdepth == 16: - # Decompose into bytes - def extend(sl): - fmt = "!%dH" % len(sl) - data.extend(array("B", struct.pack(fmt, *sl))) - - else: - # Pack into bytes - assert self.bitdepth < 8 - # samples per byte - spb = int(8 / self.bitdepth) - - def extend(sl): - a = array("B", sl) - # Adding padding bytes so we can group into a whole - # number of spb-tuples. - l = float(len(a)) - extra = math.ceil(l / float(spb)) * spb - l - a.extend([0] * int(extra)) - # Pack into bytes - l = group(a, spb) - l = map(lambda e: reduce(lambda x, y: (x << self.bitdepth) + y, e), l) - data.extend(l) - - if self.rescale: - oldextend = extend - factor = float(2 ** self.rescale[1] - 1) / float(2 ** self.rescale[0] - 1) - - def extend(sl): - oldextend(map(lambda x: int(round(factor * x)), sl)) - - # Build the first row, testing mostly to see if we need to - # changed the extend function to cope with NumPy integer types - # (they cause our ordinary definition of extend to fail, so we - # wrap it). See - # http://code.google.com/p/pypng/issues/detail?id=44 - enumrows = enumerate(rows) - del rows - - # First row's filter type. - data.append(0) - # :todo: Certain exceptions in the call to ``.next()`` or the - # following try would indicate no row data supplied. - # Should catch. - i, row = next(enumrows) - try: - # If this fails... - extend(row) - except: - # ... try a version that converts the values to int first. - # Not only does this work for the (slightly broken) NumPy - # types, there are probably lots of other, unknown, "nearly" - # int types it works for. - def wrapmapint(f): - return lambda sl: f(map(int, sl)) - - extend = wrapmapint(extend) - del wrapmapint - extend(row) - - for i, row in enumrows: - # Add "None" filter type. Currently, it's essential that - # this filter type be used for every scanline as we do not - # mark the first row of a reduced pass image; that means we - # could accidentally compute the wrong filtered scanline if - # we used "up", "average", or "paeth" on such a line. - data.append(0) - extend(row) - if len(data) > self.chunk_limit: - compressed = compressor.compress(tostring(data)) - if len(compressed): - # print >> sys.stderr, len(data), len(compressed) - write_chunk(outfile, "IDAT", compressed) - # Because of our very witty definition of ``extend``, - # above, we must re-use the same ``data`` object. Hence - # we use ``del`` to empty this one, rather than create a - # fresh one (which would be my natural FP instinct). - del data[:] - if len(data): - compressed = compressor.compress(tostring(data)) - else: - compressed = "" - flushed = compressor.flush() - if len(compressed) or len(flushed): - # print >> sys.stderr, len(data), len(compressed), len(flushed) - write_chunk(outfile, "IDAT", compressed + flushed) - # http://www.w3.org/TR/PNG/#11IEND - write_chunk(outfile, "IEND") - return i + 1 - - def write_array(self, outfile, pixels): - """ - Write an array in flat row flat pixel format as a PNG file on - the output file. See also :meth:`write` method. - """ - - if self.interlace: - self.write_passes(outfile, self.array_scanlines_interlace(pixels)) - else: - self.write_passes(outfile, self.array_scanlines(pixels)) - - def write_packed(self, outfile, rows): - """ - Write PNG file to `outfile`. The pixel data comes from `rows` - which should be in boxed row packed format. Each row should be - a sequence of packed bytes. - - Technically, this method does work for interlaced images but it - is best avoided. For interlaced images, the rows should be - presented in the order that they appear in the file. - - This method should not be used when the source image bit depth - is not one naturally supported by PNG; the bit depth should be - 1, 2, 4, 8, or 16. - """ - - if self.rescale: - raise Error( - "write_packed method not suitable for bit depth %d" % self.rescale[0] - ) - return self.write_passes(outfile, rows, packed=True) - - def convert_pnm(self, infile, outfile): - """ - Convert a PNM file containing raw pixel data into a PNG file - with the parameters set in the writer object. Works for - (binary) PGM, PPM, and PAM formats. - """ - - if self.interlace: - pixels = array("B") - pixels.fromfile( - infile, - (self.bitdepth / 8) * self.color_planes * self.width * self.height, - ) - self.write_passes(outfile, self.array_scanlines_interlace(pixels)) - else: - self.write_passes(outfile, self.file_scanlines(infile)) - - def convert_ppm_and_pgm(self, ppmfile, pgmfile, outfile): - """ - Convert a PPM and PGM file containing raw pixel data into a - PNG outfile with the parameters set in the writer object. - """ - pixels = array("B") - pixels.fromfile( - ppmfile, (self.bitdepth / 8) * self.color_planes * self.width * self.height - ) - apixels = array("B") - apixels.fromfile(pgmfile, (self.bitdepth / 8) * self.width * self.height) - pixels = interleave_planes( - pixels, - apixels, - (self.bitdepth / 8) * self.color_planes, - (self.bitdepth / 8), - ) - if self.interlace: - self.write_passes(outfile, self.array_scanlines_interlace(pixels)) - else: - self.write_passes(outfile, self.array_scanlines(pixels)) - - def file_scanlines(self, infile): - """ - Generates boxed rows in flat pixel format, from the input file - `infile`. It assumes that the input file is in a "Netpbm-like" - binary format, and is positioned at the beginning of the first - pixel. The number of pixels to read is taken from the image - dimensions (`width`, `height`, `planes`) and the number of bytes - per value is implied by the image `bitdepth`. - """ - - # Values per row - vpr = self.width * self.planes - row_bytes = vpr - if self.bitdepth > 8: - assert self.bitdepth == 16 - row_bytes *= 2 - fmt = ">%dH" % vpr - - def line(): - return array("H", struct.unpack(fmt, infile.read(row_bytes))) - - else: - - def line(): - scanline = array("B", infile.read(row_bytes)) - return scanline - - for y in range(self.height): - yield line() - - def array_scanlines(self, pixels): - """ - Generates boxed rows (flat pixels) from flat rows (flat pixels) - in an array. - """ - - # Values per row - vpr = self.width * self.planes - stop = 0 - for y in range(self.height): - start = stop - stop = start + vpr - yield pixels[start:stop] - - def array_scanlines_interlace(self, pixels): - """ - Generator for interlaced scanlines from an array. `pixels` is - the full source image in flat row flat pixel format. The - generator yields each scanline of the reduced passes in turn, in - boxed row flat pixel format. - """ - - # http://www.w3.org/TR/PNG/#8InterlaceMethods - # Array type. - fmt = "BH"[self.bitdepth > 8] - # Value per row - vpr = self.width * self.planes - for xstart, ystart, xstep, ystep in _adam7: - if xstart >= self.width: - continue - # Pixels per row (of reduced image) - ppr = int(math.ceil((self.width - xstart) / float(xstep))) - # number of values in reduced image row. - row_len = ppr * self.planes - for y in range(ystart, self.height, ystep): - if xstep == 1: - offset = y * vpr - yield pixels[offset : offset + vpr] - else: - row = array(fmt) - # There's no easier way to set the length of an array - row.extend(pixels[0:row_len]) - offset = y * vpr + xstart * self.planes - end_offset = (y + 1) * vpr - skip = self.planes * xstep - for i in range(self.planes): - row[i :: self.planes] = pixels[offset + i : end_offset : skip] - yield row - - -def write_chunk(outfile, tag, data=strtobytes("")): - """ - Write a PNG chunk to the output file, including length and - checksum. - """ - - # http://www.w3.org/TR/PNG/#5Chunk-layout - outfile.write(struct.pack("!I", len(data))) - tag = strtobytes(tag) - outfile.write(tag) - outfile.write(data) - checksum = zlib.crc32(tag) - checksum = zlib.crc32(data, checksum) - checksum &= 2 ** 32 - 1 - outfile.write(struct.pack("!I", checksum)) - - -def write_chunks(out, chunks): - """Create a PNG file by writing out the chunks.""" - - out.write(_signature) - for chunk in chunks: - write_chunk(out, *chunk) - - -def filter_scanline(type, line, fo, prev=None): - """Apply a scanline filter to a scanline. `type` specifies the - filter type (0 to 4); `line` specifies the current (unfiltered) - scanline as a sequence of bytes; `prev` specifies the previous - (unfiltered) scanline as a sequence of bytes. `fo` specifies the - filter offset; normally this is size of a pixel in bytes (the number - of bytes per sample times the number of channels), but when this is - < 1 (for bit depths < 8) then the filter offset is 1. - """ - - assert 0 <= type < 5 - - # The output array. Which, pathetically, we extend one-byte at a - # time (fortunately this is linear). - out = array("B", [type]) - - def sub(): - ai = -fo - for x in line: - if ai >= 0: - x = (x - line[ai]) & 0xFF - out.append(x) - ai += 1 - - def up(): - for i, x in enumerate(line): - x = (x - prev[i]) & 0xFF - out.append(x) - - def average(): - ai = -fo - for i, x in enumerate(line): - if ai >= 0: - x = (x - ((line[ai] + prev[i]) >> 1)) & 0xFF - else: - x = (x - (prev[i] >> 1)) & 0xFF - out.append(x) - ai += 1 - - def paeth(): - # http://www.w3.org/TR/PNG/#9Filter-type-4-Paeth - ai = -fo # also used for ci - for i, x in enumerate(line): - a = 0 - b = prev[i] - c = 0 - - if ai >= 0: - a = line[ai] - c = prev[ai] - p = a + b - c - pa = abs(p - a) - pb = abs(p - b) - pc = abs(p - c) - if pa <= pb and pa <= pc: - Pr = a - elif pb <= pc: - Pr = b - else: - Pr = c - - x = (x - Pr) & 0xFF - out.append(x) - ai += 1 - - if not prev: - # We're on the first line. Some of the filters can be reduced - # to simpler cases which makes handling the line "off the top" - # of the image simpler. "up" becomes "none"; "paeth" becomes - # "left" (non-trivial, but true). "average" needs to be handled - # specially. - if type == 2: # "up" - return line # type = 0 - elif type == 3: - prev = [0] * len(line) - elif type == 4: # "paeth" - type = 1 - if type == 0: - out.extend(line) - elif type == 1: - sub() - elif type == 2: - up() - elif type == 3: - average() - else: # type == 4 - paeth() - return out - - -def from_array(a, mode=None, info={}): - """Create a PNG :class:`Image` object from a 2- or 3-dimensional array. - One application of this function is easy PIL-style saving: - ``png.from_array(pixels, 'L').save('foo.png')``. - - .. note : - - The use of the term *3-dimensional* is for marketing purposes - only. It doesn't actually work. Please bear with us. Meanwhile - enjoy the complimentary snacks (on request) and please use a - 2-dimensional array. - - Unless they are specified using the *info* parameter, the PNG's - height and width are taken from the array size. For a 3 dimensional - array the first axis is the height; the second axis is the width; - and the third axis is the channel number. Thus an RGB image that is - 16 pixels high and 8 wide will use an array that is 16x8x3. For 2 - dimensional arrays the first axis is the height, but the second axis - is ``width*channels``, so an RGB image that is 16 pixels high and 8 - wide will use a 2-dimensional array that is 16x24 (each row will be - 8*3==24 sample values). - - *mode* is a string that specifies the image colour format in a - PIL-style mode. It can be: - - ``'L'`` - greyscale (1 channel) - ``'LA'`` - greyscale with alpha (2 channel) - ``'RGB'`` - colour image (3 channel) - ``'RGBA'`` - colour image with alpha (4 channel) - - The mode string can also specify the bit depth (overriding how this - function normally derives the bit depth, see below). Appending - ``';16'`` to the mode will cause the PNG to be 16 bits per channel; - any decimal from 1 to 16 can be used to specify the bit depth. - - When a 2-dimensional array is used *mode* determines how many - channels the image has, and so allows the width to be derived from - the second array dimension. - - The array is expected to be a ``numpy`` array, but it can be any - suitable Python sequence. For example, a list of lists can be used: - ``png.from_array([[0, 255, 0], [255, 0, 255]], 'L')``. The exact - rules are: ``len(a)`` gives the first dimension, height; - ``len(a[0])`` gives the second dimension; ``len(a[0][0])`` gives the - third dimension, unless an exception is raised in which case a - 2-dimensional array is assumed. It's slightly more complicated than - that because an iterator of rows can be used, and it all still - works. Using an iterator allows data to be streamed efficiently. - - The bit depth of the PNG is normally taken from the array element's - datatype (but if *mode* specifies a bitdepth then that is used - instead). The array element's datatype is determined in a way which - is supposed to work both for ``numpy`` arrays and for Python - ``array.array`` objects. A 1 byte datatype will give a bit depth of - 8, a 2 byte datatype will give a bit depth of 16. If the datatype - does not have an implicit size, for example it is a plain Python - list of lists, as above, then a default of 8 is used. - - The *info* parameter is a dictionary that can be used to specify - metadata (in the same style as the arguments to the - :class:``png.Writer`` class). For this function the keys that are - useful are: - - height - overrides the height derived from the array dimensions and allows - *a* to be an iterable. - width - overrides the width derived from the array dimensions. - bitdepth - overrides the bit depth derived from the element datatype (but - must match *mode* if that also specifies a bit depth). - - Generally anything specified in the - *info* dictionary will override any implicit choices that this - function would otherwise make, but must match any explicit ones. - For example, if the *info* dictionary has a ``greyscale`` key then - this must be true when mode is ``'L'`` or ``'LA'`` and false when - mode is ``'RGB'`` or ``'RGBA'``. - """ - - # We abuse the *info* parameter by modifying it. Take a copy here. - # (Also typechecks *info* to some extent). - info = dict(info) - - # Syntax check mode string. - bitdepth = None - try: - mode = mode.split(";") - if len(mode) not in (1, 2): - raise Error() - if mode[0] not in ("L", "LA", "RGB", "RGBA"): - raise Error() - if len(mode) == 2: - try: - bitdepth = int(mode[1]) - except: - raise Error() - except Error: - raise Error("mode string should be 'RGB' or 'L;16' or similar.") - mode = mode[0] - - # Get bitdepth from *mode* if possible. - if bitdepth: - if info.get("bitdepth") and bitdepth != info["bitdepth"]: - raise Error( - "mode bitdepth (%d) should match info bitdepth (%d)." - % (bitdepth, info["bitdepth"]) - ) - info["bitdepth"] = bitdepth - - # Fill in and/or check entries in *info*. - # Dimensions. - if "size" in info: - # Check width, height, size all match where used. - for dimension, axis in [("width", 0), ("height", 1)]: - if dimension in info: - if info[dimension] != info["size"][axis]: - raise Error( - "info[%r] should match info['size'][%r]." % (dimension, axis) - ) - info["width"], info["height"] = info["size"] - if "height" not in info: - try: - l = len(a) - except: - raise Error("len(a) does not work, supply info['height'] instead.") - info["height"] = l - # Colour format. - if "greyscale" in info: - if bool(info["greyscale"]) != ("L" in mode): - raise Error("info['greyscale'] should match mode.") - info["greyscale"] = "L" in mode - if "alpha" in info: - if bool(info["alpha"]) != ("A" in mode): - raise Error("info['alpha'] should match mode.") - info["alpha"] = "A" in mode - - planes = len(mode) - if "planes" in info: - if info["planes"] != planes: - raise Error("info['planes'] should match mode.") - - # In order to work out whether we the array is 2D or 3D we need its - # first row, which requires that we take a copy of its iterator. - # We may also need the first row to derive width and bitdepth. - a, t = itertools.tee(a) - row = next(t) - del t - try: - row[0][0] - threed = True - testelement = row[0] - except: - threed = False - testelement = row - if "width" not in info: - if threed: - width = len(row) - else: - width = len(row) // planes - info["width"] = width - - # Not implemented yet - assert not threed - - if "bitdepth" not in info: - try: - dtype = testelement.dtype - # goto the "else:" clause. Sorry. - except: - try: - # Try a Python array.array. - bitdepth = 8 * testelement.itemsize - except: - # We can't determine it from the array element's - # datatype, use a default of 8. - bitdepth = 8 - else: - # If we got here without exception, we now assume that - # the array is a numpy array. - if dtype.kind == "b": - bitdepth = 1 - else: - bitdepth = 8 * dtype.itemsize - info["bitdepth"] = bitdepth - - for thing in "width height bitdepth greyscale alpha".split(): - assert thing in info - return Image(a, info) - - -# So that refugee's from PIL feel more at home. Not documented. -fromarray = from_array - - -class Image: - """A PNG image. - You can create an :class:`Image` object from an array of pixels by calling - :meth:`png.from_array`. It can be saved to disk with the - :meth:`save` method.""" - - def __init__(self, rows, info): - """ - .. note :: - - The constructor is not public. Please do not call it. - """ - - self.rows = rows - self.info = info - - def save(self, file): - """Save the image to *file*. If *file* looks like an open file - descriptor then it is used, otherwise it is treated as a - filename and a fresh file is opened. - - In general, you can only call this method once; after it has - been called the first time and the PNG image has been saved, the - source data will have been streamed, and cannot be streamed - again. - """ - - w = Writer(**self.info) - - try: - file.write - - def close(): - pass - - except: - file = open(file, "wb") - - def close(): - file.close() - - try: - w.write(file, self.rows) - finally: - close() - - -class _readable: - """ - A simple file-like interface for strings and arrays. - """ - - def __init__(self, buf): - self.buf = buf - self.offset = 0 - - def read(self, n): - r = self.buf[self.offset : self.offset + n] - if isarray(r): - r = tostring(r) - self.offset += n - return r - - -class Reader: - """ - PNG decoder in pure Python. - """ - - def __init__(self, _guess=None, **kw): - """ - Create a PNG decoder object. - - The constructor expects exactly one keyword argument. If you - supply a positional argument instead, it will guess the input - type. You can choose among the following keyword arguments: - - filename - Name of input file (a PNG file). - file - A file-like object (object with a read() method). - bytes - ``array`` or ``string`` with PNG data. - - """ - if (_guess is not None and len(kw) != 0) or (_guess is None and len(kw) != 1): - raise TypeError("Reader() takes exactly 1 argument") - - # Will be the first 8 bytes, later on. See validate_signature. - self.signature = None - self.transparent = None - # A pair of (len,type) if a chunk has been read but its data and - # checksum have not (in other words the file position is just - # past the 4 bytes that specify the chunk type). See preamble - # method for how this is used. - self.atchunk = None - - if _guess is not None: - if isarray(_guess): - kw["bytes"] = _guess - elif isinstance(_guess, str): - kw["filename"] = _guess - elif isinstance(_guess, file): - kw["file"] = _guess - - if "filename" in kw: - self.file = open(kw["filename"], "rb") - elif "file" in kw: - self.file = kw["file"] - elif "bytes" in kw: - self.file = _readable(kw["bytes"]) - else: - raise TypeError("expecting filename, file or bytes array") - - def chunk(self, seek=None): - """ - Read the next PNG chunk from the input file; returns a - (*type*,*data*) tuple. *type* is the chunk's type as a string - (all PNG chunk types are 4 characters long). *data* is the - chunk's data content, as a string. - - If the optional `seek` argument is - specified then it will keep reading chunks until it either runs - out of file or finds the type specified by the argument. Note - that in general the order of chunks in PNGs is unspecified, so - using `seek` can cause you to miss chunks. - """ - - self.validate_signature() - - while True: - # http://www.w3.org/TR/PNG/#5Chunk-layout - if not self.atchunk: - self.atchunk = self.chunklentype() - length, type = self.atchunk - self.atchunk = None - data = self.file.read(length) - if len(data) != length: - raise ChunkError( - "Chunk %s too short for required %i octets." % (type, length) - ) - checksum = self.file.read(4) - if len(checksum) != 4: - raise ValueError("Chunk %s too short for checksum.", tag) - if seek and type != seek: - continue - verify = zlib.crc32(strtobytes(type)) - verify = zlib.crc32(data, verify) - # Whether the output from zlib.crc32 is signed or not varies - # according to hideous implementation details, see - # http://bugs.python.org/issue1202 . - # We coerce it to be positive here (in a way which works on - # Python 2.3 and older). - verify &= 2 ** 32 - 1 - verify = struct.pack("!I", verify) - if checksum != verify: - # print repr(checksum) - (a,) = struct.unpack("!I", checksum) - (b,) = struct.unpack("!I", verify) - raise ChunkError( - "Checksum error in %s chunk: 0x%08X != 0x%08X." % (type, a, b) - ) - return type, data - - def chunks(self): - """Return an iterator that will yield each chunk as a - (*chunktype*, *content*) pair. - """ - - while True: - t, v = self.chunk() - yield t, v - if t == "IEND": - break - - def undo_filter(self, filter_type, scanline, previous): - """Undo the filter for a scanline. `scanline` is a sequence of - bytes that does not include the initial filter type byte. - `previous` is decoded previous scanline (for straightlaced - images this is the previous pixel row, but for interlaced - images, it is the previous scanline in the reduced image, which - in general is not the previous pixel row in the final image). - When there is no previous scanline (the first row of a - straightlaced image, or the first row in one of the passes in an - interlaced image), then this argument should be ``None``. - - The scanline will have the effects of filtering removed, and the - result will be returned as a fresh sequence of bytes. - """ - - # :todo: Would it be better to update scanline in place? - - # Create the result byte array. It seems that the best way to - # create the array to be the right size is to copy from an - # existing sequence. *sigh* - # If we fill the result with scanline, then this allows a - # micro-optimisation in the "null" and "sub" cases. - result = array("B", scanline) - - if filter_type == 0: - # And here, we _rely_ on filling the result with scanline, - # above. - return result - - if filter_type not in (1, 2, 3, 4): - raise FormatError( - "Invalid PNG Filter Type." - " See http://www.w3.org/TR/2003/REC-PNG-20031110/#9Filters ." - ) - - # Filter unit. The stride from one pixel to the corresponding - # byte from the previous previous. Normally this is the pixel - # size in bytes, but when this is smaller than 1, the previous - # byte is used instead. - fu = max(1, self.psize) - - # For the first line of a pass, synthesize a dummy previous - # line. An alternative approach would be to observe that on the - # first line 'up' is the same as 'null', 'paeth' is the same - # as 'sub', with only 'average' requiring any special case. - if not previous: - previous = array("B", [0] * len(scanline)) - - def sub(): - """Undo sub filter.""" - - ai = 0 - # Loops starts at index fu. Observe that the initial part - # of the result is already filled in correctly with - # scanline. - for i in range(fu, len(result)): - x = scanline[i] - a = result[ai] - result[i] = (x + a) & 0xFF - ai += 1 - - def up(): - """Undo up filter.""" - for i in range(len(result)): # pylint: disable=consider-using-enumerate - x = scanline[i] - b = previous[i] - result[i] = (x + b) & 0xFF - - def average(): - """Undo average filter.""" - - ai = -fu - for i in range(len(result)): # pylint: disable=consider-using-enumerate - x = scanline[i] - if ai < 0: - a = 0 - else: - a = result[ai] - b = previous[i] - result[i] = (x + ((a + b) >> 1)) & 0xFF - ai += 1 - - def paeth(): - """Undo Paeth filter.""" - - # Also used for ci. - ai = -fu - for i in range(len(result)): # pylint: disable=consider-using-enumerate - x = scanline[i] - if ai < 0: - a = c = 0 - else: - a = result[ai] - c = previous[ai] - b = previous[i] - p = a + b - c - pa = abs(p - a) - pb = abs(p - b) - pc = abs(p - c) - if pa <= pb and pa <= pc: - pr = a - elif pb <= pc: - pr = b - else: - pr = c - result[i] = (x + pr) & 0xFF - ai += 1 - - # Call appropriate filter algorithm. Note that 0 has already - # been dealt with. - (None, sub, up, average, paeth)[filter_type]() - return result - - def deinterlace(self, raw): - """ - Read raw pixel data, undo filters, deinterlace, and flatten. - Return in flat row flat pixel format. - """ - - # print >> sys.stderr, ("Reading interlaced, w=%s, r=%s, planes=%s," + - # " bpp=%s") % (self.width, self.height, self.planes, self.bps) - # Values per row (of the target image) - vpr = self.width * self.planes - - # Make a result array, and make it big enough. Interleaving - # writes to the output array randomly (well, not quite), so the - # entire output array must be in memory. - fmt = "BH"[self.bitdepth > 8] - a = array(fmt, [0] * vpr * self.height) - source_offset = 0 - - for xstart, ystart, xstep, ystep in _adam7: - # print >> sys.stderr, "Adam7: start=%s,%s step=%s,%s" % ( - # xstart, ystart, xstep, ystep) - if xstart >= self.width: - continue - # The previous (reconstructed) scanline. None at the - # beginning of a pass to indicate that there is no previous - # line. - recon = None - # Pixels per row (reduced pass image) - ppr = int(math.ceil((self.width - xstart) / float(xstep))) - # Row size in bytes for this pass. - row_size = int(math.ceil(self.psize * ppr)) - for y in range(ystart, self.height, ystep): - filter_type = raw[source_offset] - source_offset += 1 - scanline = raw[source_offset : source_offset + row_size] - source_offset += row_size - recon = self.undo_filter(filter_type, scanline, recon) - # Convert so that there is one element per pixel value - flat = self.serialtoflat(recon, ppr) - if xstep == 1: - assert xstart == 0 - offset = y * vpr - a[offset : offset + vpr] = flat - else: - offset = y * vpr + xstart * self.planes - end_offset = (y + 1) * vpr - skip = self.planes * xstep - for i in range(self.planes): - a[offset + i : end_offset : skip] = flat[i :: self.planes] - return a - - def iterboxed(self, rows): - """Iterator that yields each scanline in boxed row flat pixel - format. `rows` should be an iterator that yields the bytes of - each row in turn. - """ - - def asvalues(raw): - """Convert a row of raw bytes into a flat row. Result may - or may not share with argument""" - - if self.bitdepth == 8: - return raw - if self.bitdepth == 16: - raw = tostring(raw) - return array("H", struct.unpack("!%dH" % (len(raw) // 2), raw)) - assert self.bitdepth < 8 - width = self.width - # Samples per byte - spb = 8 // self.bitdepth - out = array("B") - mask = 2 ** self.bitdepth - 1 - shifts = map(self.bitdepth.__mul__, reversed(range(spb))) - for o in raw: - out.extend(map(lambda i: mask & (o >> i), shifts)) - return out[:width] - - return map(asvalues, rows) - - def serialtoflat(self, bytes, width=None): - """Convert serial format (byte stream) pixel data to flat row - flat pixel. - """ - - if self.bitdepth == 8: - return bytes - if self.bitdepth == 16: - bytes = tostring(bytes) - return array("H", struct.unpack("!%dH" % (len(bytes) // 2), bytes)) - assert self.bitdepth < 8 - if width is None: - width = self.width - # Samples per byte - spb = 8 // self.bitdepth - out = array("B") - mask = 2 ** self.bitdepth - 1 - shifts = map(self.bitdepth.__mul__, reversed(range(spb))) - l = width - for o in bytes: - out.extend([(mask & (o >> s)) for s in shifts][:l]) - l -= spb - if l <= 0: - l = width - return out - - def iterstraight(self, raw): - """Iterator that undoes the effect of filtering, and yields each - row in serialised format (as a sequence of bytes). Assumes input - is straightlaced. `raw` should be an iterable that yields the - raw bytes in chunks of arbitrary size.""" - - # length of row, in bytes - rb = self.row_bytes - a = array("B") - # The previous (reconstructed) scanline. None indicates first - # line of image. - recon = None - for some in raw: - a.extend(some) - while len(a) >= rb + 1: - filter_type = a[0] - scanline = a[1 : rb + 1] - del a[: rb + 1] - recon = self.undo_filter(filter_type, scanline, recon) - yield recon - if len(a) != 0: - # :file:format We get here with a file format error: when the - # available bytes (after decompressing) do not pack into exact - # rows. - raise FormatError("Wrong size for decompressed IDAT chunk.") - assert len(a) == 0 - - def validate_signature(self): - """If signature (header) has not been read then read and - validate it; otherwise do nothing. - """ - - if self.signature: - return - self.signature = self.file.read(8) - if self.signature != _signature: - raise FormatError("PNG file has invalid signature.") - - def preamble(self): - """ - Extract the image metadata by reading the initial part of the PNG - file up to the start of the ``IDAT`` chunk. All the chunks that - precede the ``IDAT`` chunk are read and either processed for - metadata or discarded. - """ - - self.validate_signature() - - while True: - if not self.atchunk: - self.atchunk = self.chunklentype() - if self.atchunk is None: - raise FormatError("This PNG file has no IDAT chunks.") - if self.atchunk[1] == "IDAT": - return - self.process_chunk() - - def chunklentype(self): - """Reads just enough of the input to determine the next - chunk's length and type, returned as a (*length*, *type*) pair - where *type* is a string. If there are no more chunks, ``None`` - is returned. - """ - - x = self.file.read(8) - if not x: - return None - if len(x) != 8: - raise FormatError("End of file whilst reading chunk length and type.") - length, type = struct.unpack("!I4s", x) - type = bytestostr(type) - if length > 2 ** 31 - 1: - raise FormatError("Chunk %s is too large: %d." % (type, length)) - return length, type - - def process_chunk(self): - """Process the next chunk and its data. This only processes the - following chunk types, all others are ignored: ``IHDR``, - ``PLTE``, ``bKGD``, ``tRNS``, ``gAMA``, ``sBIT``. - """ - - type, data = self.chunk() - if type == "IHDR": - # http://www.w3.org/TR/PNG/#11IHDR - if len(data) != 13: - raise FormatError("IHDR chunk has incorrect length.") - ( - self.width, - self.height, - self.bitdepth, - self.color_type, - self.compression, - self.filter, - self.interlace, - ) = struct.unpack("!2I5B", data) - - # Check that the header specifies only valid combinations. - if self.bitdepth not in (1, 2, 4, 8, 16): - raise Error("invalid bit depth %d" % self.bitdepth) - if self.color_type not in (0, 2, 3, 4, 6): - raise Error("invalid colour type %d" % self.color_type) - # Check indexed (palettized) images have 8 or fewer bits - # per pixel; check only indexed or greyscale images have - # fewer than 8 bits per pixel. - if (self.color_type & 1 and self.bitdepth > 8) or ( - self.bitdepth < 8 and self.color_type not in (0, 3) - ): - raise FormatError( - "Illegal combination of bit depth (%d)" - " and colour type (%d)." - " See http://www.w3.org/TR/2003/REC-PNG-20031110/#table111 ." - % (self.bitdepth, self.color_type) - ) - if self.compression != 0: - raise Error("unknown compression method %d" % self.compression) - if self.filter != 0: - raise FormatError( - "Unknown filter method %d," - " see http://www.w3.org/TR/2003/REC-PNG-20031110/#9Filters ." - % self.filter - ) - if self.interlace not in (0, 1): - raise FormatError( - "Unknown interlace method %d," - " see http://www.w3.org/TR/2003/REC-PNG-20031110/#8InterlaceMethods ." - % self.interlace - ) - - # Derived values - # http://www.w3.org/TR/PNG/#6Colour-values - colormap = bool(self.color_type & 1) - greyscale = not (self.color_type & 2) - alpha = bool(self.color_type & 4) - color_planes = (3, 1)[greyscale or colormap] - planes = color_planes + alpha - - self.colormap = colormap - self.greyscale = greyscale - self.alpha = alpha - self.color_planes = color_planes - self.planes = planes - self.psize = float(self.bitdepth) / float(8) * planes - if int(self.psize) == self.psize: - self.psize = int(self.psize) - self.row_bytes = int(math.ceil(self.width * self.psize)) - # Stores PLTE chunk if present, and is used to check - # chunk ordering constraints. - self.plte = None - # Stores tRNS chunk if present, and is used to check chunk - # ordering constraints. - self.trns = None - # Stores sbit chunk if present. - self.sbit = None - elif type == "PLTE": - # http://www.w3.org/TR/PNG/#11PLTE - if self.plte: - warnings.warn("Multiple PLTE chunks present.") - self.plte = data - if len(data) % 3 != 0: - raise FormatError("PLTE chunk's length should be a multiple of 3.") - if len(data) > (2 ** self.bitdepth) * 3: - raise FormatError("PLTE chunk is too long.") - if len(data) == 0: - raise FormatError("Empty PLTE is not allowed.") - elif type == "bKGD": - try: - if self.colormap: - if not self.plte: - warnings.warn("PLTE chunk is required before bKGD chunk.") - self.background = struct.unpack("B", data) - else: - self.background = struct.unpack("!%dH" % self.color_planes, data) - except struct.error: - raise FormatError("bKGD chunk has incorrect length.") - elif type == "tRNS": - # http://www.w3.org/TR/PNG/#11tRNS - self.trns = data - if self.colormap: - if not self.plte: - warnings.warn("PLTE chunk is required before tRNS chunk.") - else: - if len(data) > len(self.plte) / 3: - # Was warning, but promoted to Error as it - # would otherwise cause pain later on. - raise FormatError("tRNS chunk is too long.") - else: - if self.alpha: - raise FormatError( - "tRNS chunk is not valid with colour type %d." % self.color_type - ) - try: - self.transparent = struct.unpack("!%dH" % self.color_planes, data) - except struct.error: - raise FormatError("tRNS chunk has incorrect length.") - elif type == "gAMA": - try: - self.gamma = struct.unpack("!L", data)[0] / 100000.0 - except struct.error: - raise FormatError("gAMA chunk has incorrect length.") - elif type == "sBIT": - self.sbit = data - if ( - self.colormap - and len(data) != 3 - or not self.colormap - and len(data) != self.planes - ): - raise FormatError("sBIT chunk has incorrect length.") - - def read(self): - """ - Read the PNG file and decode it. Returns (`width`, `height`, - `pixels`, `metadata`). - - May use excessive memory. - - `pixels` are returned in boxed row flat pixel format. - """ - - def iteridat(): - """Iterator that yields all the ``IDAT`` chunks as strings.""" - while True: - try: - type, data = self.chunk() - except ValueError as e: - raise ChunkError(e.args[0]) - if type == "IEND": - # http://www.w3.org/TR/PNG/#11IEND - break - if type != "IDAT": - continue - # type == 'IDAT' - # http://www.w3.org/TR/PNG/#11IDAT - if self.colormap and not self.plte: - warnings.warn("PLTE chunk is required before IDAT chunk") - yield data - - def iterdecomp(idat): - """Iterator that yields decompressed strings. `idat` should - be an iterator that yields the ``IDAT`` chunk data. - """ - - # Currently, with no max_length parameter to decompress, this - # routine will do one yield per IDAT chunk. So not very - # incremental. - d = zlib.decompressobj() - # Each IDAT chunk is passed to the decompressor, then any - # remaining state is decompressed out. - for data in idat: - # :todo: add a max_length argument here to limit output - # size. - yield array("B", d.decompress(data)) - yield array("B", d.flush()) - - self.preamble() - raw = iterdecomp(iteridat()) - - if self.interlace: - raw = array("B", itertools.chain(*raw)) - arraycode = "BH"[self.bitdepth > 8] - # Like :meth:`group` but producing an array.array object for - # each row. - pixels = map( - lambda *row: array(arraycode, row), - *[iter(self.deinterlace(raw))] * self.width * self.planes - ) - else: - pixels = self.iterboxed(self.iterstraight(raw)) - meta = dict() - for attr in "greyscale alpha planes bitdepth interlace".split(): - meta[attr] = getattr(self, attr) - meta["size"] = (self.width, self.height) - for attr in "gamma transparent background".split(): - a = getattr(self, attr, None) - if a is not None: - meta[attr] = a - return self.width, self.height, pixels, meta - - def read_flat(self): - """ - Read a PNG file and decode it into flat row flat pixel format. - Returns (*width*, *height*, *pixels*, *metadata*). - - May use excessive memory. - - `pixels` are returned in flat row flat pixel format. - - See also the :meth:`read` method which returns pixels in the - more stream-friendly boxed row flat pixel format. - """ - - x, y, pixel, meta = self.read() - arraycode = "BH"[meta["bitdepth"] > 8] - pixel = array(arraycode, itertools.chain(*pixel)) - return x, y, pixel, meta - - def palette(self, alpha="natural"): - """Returns a palette that is a sequence of 3-tuples or 4-tuples, - synthesizing it from the ``PLTE`` and ``tRNS`` chunks. These - chunks should have already been processed (for example, by - calling the :meth:`preamble` method). All the tuples are the - same size: 3-tuples if there is no ``tRNS`` chunk, 4-tuples when - there is a ``tRNS`` chunk. Assumes that the image is colour type - 3 and therefore a ``PLTE`` chunk is required. - - If the `alpha` argument is ``'force'`` then an alpha channel is - always added, forcing the result to be a sequence of 4-tuples. - """ - - if not self.plte: - raise FormatError("Required PLTE chunk is missing in colour type 3 image.") - plte = group(array("B", self.plte), 3) - if self.trns or alpha == "force": - trns = array("B", self.trns or "") - trns.extend([255] * (len(plte) - len(trns))) - plte = map(operator.add, plte, group(trns, 1)) - return plte - - def asDirect(self): - """Returns the image data as a direct representation of an - ``x * y * planes`` array. This method is intended to remove the - need for callers to deal with palettes and transparency - themselves. Images with a palette (colour type 3) - are converted to RGB or RGBA; images with transparency (a - ``tRNS`` chunk) are converted to LA or RGBA as appropriate. - When returned in this format the pixel values represent the - colour value directly without needing to refer to palettes or - transparency information. - - Like the :meth:`read` method this method returns a 4-tuple: - - (*width*, *height*, *pixels*, *meta*) - - This method normally returns pixel values with the bit depth - they have in the source image, but when the source PNG has an - ``sBIT`` chunk it is inspected and can reduce the bit depth of - the result pixels; pixel values will be reduced according to - the bit depth specified in the ``sBIT`` chunk (PNG nerds should - note a single result bit depth is used for all channels; the - maximum of the ones specified in the ``sBIT`` chunk. An RGB565 - image will be rescaled to 6-bit RGB666). - - The *meta* dictionary that is returned reflects the `direct` - format and not the original source image. For example, an RGB - source image with a ``tRNS`` chunk to represent a transparent - colour, will have ``planes=3`` and ``alpha=False`` for the - source image, but the *meta* dictionary returned by this method - will have ``planes=4`` and ``alpha=True`` because an alpha - channel is synthesized and added. - - *pixels* is the pixel data in boxed row flat pixel format (just - like the :meth:`read` method). - - All the other aspects of the image data are not changed. - """ - - self.preamble() - - # Simple case, no conversion necessary. - if not self.colormap and not self.trns and not self.sbit: - return self.read() - - x, y, pixels, meta = self.read() - - if self.colormap: - meta["colormap"] = False - meta["alpha"] = bool(self.trns) - meta["bitdepth"] = 8 - meta["planes"] = 3 + bool(self.trns) - plte = self.palette() - - def iterpal(pixels): - for row in pixels: - row = map(plte.__getitem__, row) - yield array("B", itertools.chain(*row)) - - pixels = iterpal(pixels) - elif self.trns: - # It would be nice if there was some reasonable way of doing - # this without generating a whole load of intermediate tuples. - # But tuples does seem like the easiest way, with no other way - # clearly much simpler or much faster. (Actually, the L to LA - # conversion could perhaps go faster (all those 1-tuples!), but - # I still wonder whether the code proliferation is worth it) - it = self.transparent - maxval = 2 ** meta["bitdepth"] - 1 - planes = meta["planes"] - meta["alpha"] = True - meta["planes"] += 1 - typecode = "BH"[meta["bitdepth"] > 8] - - def itertrns(pixels): - for row in pixels: - # For each row we group it into pixels, then form a - # characterisation vector that says whether each pixel - # is opaque or not. Then we convert True/False to - # 0/maxval (by multiplication), and add it as the extra - # channel. - row = group(row, planes) - opa = map(it.__ne__, row) - opa = map(maxval.__mul__, opa) - opa = zip(opa) # convert to 1-tuples - yield array(typecode, itertools.chain(*map(operator.add, row, opa))) - - pixels = itertrns(pixels) - targetbitdepth = None - if self.sbit: - sbit = struct.unpack("%dB" % len(self.sbit), self.sbit) - targetbitdepth = max(sbit) - if targetbitdepth > meta["bitdepth"]: - raise Error("sBIT chunk %r exceeds bitdepth %d" % (sbit, self.bitdepth)) - if min(sbit) <= 0: - raise Error("sBIT chunk %r has a 0-entry" % sbit) - if targetbitdepth == meta["bitdepth"]: - targetbitdepth = None - if targetbitdepth: - shift = meta["bitdepth"] - targetbitdepth - meta["bitdepth"] = targetbitdepth - - def itershift(pixels): - for row in pixels: - yield map(shift.__rrshift__, row) - - pixels = itershift(pixels) - return x, y, pixels, meta - - def asFloat(self, maxval=1.0): - """Return image pixels as per :meth:`asDirect` method, but scale - all pixel values to be floating point values between 0.0 and - *maxval*. - """ - - x, y, pixels, info = self.asDirect() - sourcemaxval = 2 ** info["bitdepth"] - 1 - del info["bitdepth"] - info["maxval"] = float(maxval) - factor = float(maxval) / float(sourcemaxval) - - def iterfloat(): - for row in pixels: - yield map(factor.__mul__, row) - - return x, y, iterfloat(), info - - def _as_rescale(self, get, targetbitdepth): - """Helper used by :meth:`asRGB8` and :meth:`asRGBA8`.""" - - width, height, pixels, meta = get() - maxval = 2 ** meta["bitdepth"] - 1 - targetmaxval = 2 ** targetbitdepth - 1 - factor = float(targetmaxval) / float(maxval) - meta["bitdepth"] = targetbitdepth - - def iterscale(): - for row in pixels: - yield map(lambda x: int(round(x * factor)), row) - - return width, height, iterscale(), meta - - def asRGB8(self): - """Return the image data as an RGB pixels with 8-bits per - sample. This is like the :meth:`asRGB` method except that - this method additionally rescales the values so that they - are all between 0 and 255 (8-bit). In the case where the - source image has a bit depth < 8 the transformation preserves - all the information; where the source image has bit depth - > 8, then rescaling to 8-bit values loses precision. No - dithering is performed. Like :meth:`asRGB`, an alpha channel - in the source image will raise an exception. - - This function returns a 4-tuple: - (*width*, *height*, *pixels*, *metadata*). - *width*, *height*, *metadata* are as per the :meth:`read` method. - - *pixels* is the pixel data in boxed row flat pixel format. - """ - - return self._as_rescale(self.asRGB, 8) - - def asRGBA8(self): - """Return the image data as RGBA pixels with 8-bits per - sample. This method is similar to :meth:`asRGB8` and - :meth:`asRGBA`: The result pixels have an alpha channel, *and* - values are rescaled to the range 0 to 255. The alpha channel is - synthesized if necessary (with a small speed penalty). - """ - - return self._as_rescale(self.asRGBA, 8) - - def asRGB(self): - """Return image as RGB pixels. RGB colour images are passed - through unchanged; greyscales are expanded into RGB - triplets (there is a small speed overhead for doing this). - - An alpha channel in the source image will raise an - exception. - - The return values are as for the :meth:`read` method - except that the *metadata* reflect the returned pixels, not the - source image. In particular, for this method - ``metadata['greyscale']`` will be ``False``. - """ - - width, height, pixels, meta = self.asDirect() - if meta["alpha"]: - raise Error("will not convert image with alpha channel to RGB") - if not meta["greyscale"]: - return width, height, pixels, meta - meta["greyscale"] = False - typecode = "BH"[meta["bitdepth"] > 8] - - def iterrgb(): - for row in pixels: - a = array(typecode, [0]) * 3 * width - for i in range(3): - a[i::3] = row - yield a - - return width, height, iterrgb(), meta - - def asRGBA(self): - """Return image as RGBA pixels. Greyscales are expanded into - RGB triplets; an alpha channel is synthesized if necessary. - The return values are as for the :meth:`read` method - except that the *metadata* reflect the returned pixels, not the - source image. In particular, for this method - ``metadata['greyscale']`` will be ``False``, and - ``metadata['alpha']`` will be ``True``. - """ - - width, height, pixels, meta = self.asDirect() - if meta["alpha"] and not meta["greyscale"]: - return width, height, pixels, meta - typecode = "BH"[meta["bitdepth"] > 8] - maxval = 2 ** meta["bitdepth"] - 1 - - def newarray(): - return array(typecode, [0]) * 4 * width - - if meta["alpha"] and meta["greyscale"]: - # LA to RGBA - def convert(): - for row in pixels: - # Create a fresh target row, then copy L channel - # into first three target channels, and A channel - # into fourth channel. - a = newarray() - for i in range(3): - a[i::4] = row[0::2] - a[3::4] = row[1::2] - yield a - - elif meta["greyscale"]: - # L to RGBA - def convert(): - for row in pixels: - a = newarray() - for i in range(3): - a[i::4] = row - a[3::4] = array(typecode, [maxval]) * width - yield a - - else: - assert not meta["alpha"] and not meta["greyscale"] - # RGB to RGBA - def convert(): - for row in pixels: - a = newarray() - for i in range(3): - a[i::4] = row[i::3] - a[3::4] = array(typecode, [maxval]) * width - yield a - - meta["alpha"] = True - meta["greyscale"] = False - return width, height, convert(), meta - - -# === Internal Test Support === - -# This section comprises the tests that are internally validated (as -# opposed to tests which produce output files that are externally -# validated). Primarily they are unittests. - -# Note that it is difficult to internally validate the results of -# writing a PNG file. The only thing we can do is read it back in -# again, which merely checks consistency, not that the PNG file we -# produce is valid. - -# Run the tests from the command line: -# python -c 'import png;png.test()' - -# (For an in-memory binary file IO object) We use BytesIO where -# available, otherwise we use StringIO, but name it BytesIO. -try: - from io import BytesIO -except: - from StringIO import StringIO as BytesIO -import tempfile -import unittest - - -def test(): - unittest.main(__name__) - - -def topngbytes(name, rows, x, y, **k): - """Convenience function for creating a PNG file "in memory" as a - string. Creates a :class:`Writer` instance using the keyword arguments, - then passes `rows` to its :meth:`Writer.write` method. The resulting - PNG file is returned as a string. `name` is used to identify the file for - debugging. - """ - - import os - - print(name) - f = BytesIO() - w = Writer(x, y, **k) - w.write(f, rows) - if os.environ.get("PYPNG_TEST_TMP"): - w = open(name, "wb") - w.write(f.getvalue()) - w.close() - return f.getvalue() - - -def testWithIO(inp, out, f): - """Calls the function `f` with ``sys.stdin`` changed to `inp` - and ``sys.stdout`` changed to `out`. They are restored when `f` - returns. This function returns whatever `f` returns. - """ - - import os - - try: - oldin, sys.stdin = sys.stdin, inp - oldout, sys.stdout = sys.stdout, out - x = f() - finally: - sys.stdin = oldin - sys.stdout = oldout - if os.environ.get("PYPNG_TEST_TMP") and hasattr(out, "getvalue"): - name = mycallersname() - if name: - w = open(name + ".png", "wb") - w.write(out.getvalue()) - w.close() - return x - - -def mycallersname(): - """Returns the name of the caller of the caller of this function - (hence the name of the caller of the function in which - "mycallersname()" textually appears). Returns None if this cannot - be determined.""" - - # http://docs.python.org/library/inspect.html#the-interpreter-stack - import inspect - - frame = inspect.currentframe() - if not frame: - return None - frame_, filename_, lineno_, funname, linelist_, listi_ = inspect.getouterframes( - frame - )[2] - return funname - - -def seqtobytes(s): - """Convert a sequence of integers to a *bytes* instance. Good for - plastering over Python 2 / Python 3 cracks. - """ - - return strtobytes("".join(chr(x) for x in s)) - - -class Test(unittest.TestCase): - # This member is used by the superclass. If we don't define a new - # class here then when we use self.assertRaises() and the PyPNG code - # raises an assertion then we get no proper traceback. I can't work - # out why, but defining a new class here means we get a proper - # traceback. - class failureException(Exception): - pass - - def helperLN(self, n): - mask = (1 << n) - 1 - # Use small chunk_limit so that multiple chunk writing is - # tested. Making it a test for Issue 20. - w = Writer(15, 17, greyscale=True, bitdepth=n, chunk_limit=99) - f = BytesIO() - w.write_array(f, array("B", map(mask.__and__, range(1, 256)))) - r = Reader(bytes=f.getvalue()) - x, y, pixels, meta = r.read() - self.assertEqual(x, 15) - self.assertEqual(y, 17) - self.assertEqual( - list(itertools.chain(*pixels)), map(mask.__and__, range(1, 256)) - ) - - def testL8(self): - return self.helperLN(8) - - def testL4(self): - return self.helperLN(4) - - def testL2(self): - "Also tests asRGB8." - w = Writer(1, 4, greyscale=True, bitdepth=2) - f = BytesIO() - w.write_array(f, array("B", range(4))) - r = Reader(bytes=f.getvalue()) - x, y, pixels, meta = r.asRGB8() - self.assertEqual(x, 1) - self.assertEqual(y, 4) - for i, row in enumerate(pixels): - self.assertEqual(len(row), 3) - self.assertEqual(list(row), [0x55 * i] * 3) - - def testP2(self): - "2-bit palette." - a = (255, 255, 255) - b = (200, 120, 120) - c = (50, 99, 50) - w = Writer(1, 4, bitdepth=2, palette=[a, b, c]) - f = BytesIO() - w.write_array(f, array("B", (0, 1, 1, 2))) - r = Reader(bytes=f.getvalue()) - x, y, pixels, meta = r.asRGB8() - self.assertEqual(x, 1) - self.assertEqual(y, 4) - self.assertEqual(list(pixels), map(list, [a, b, b, c])) - - def testPtrns(self): - "Test colour type 3 and tRNS chunk (and 4-bit palette)." - a = (50, 99, 50, 50) - b = (200, 120, 120, 80) - c = (255, 255, 255) - d = (200, 120, 120) - e = (50, 99, 50) - w = Writer(3, 3, bitdepth=4, palette=[a, b, c, d, e]) - f = BytesIO() - w.write_array(f, array("B", (4, 3, 2, 3, 2, 0, 2, 0, 1))) - r = Reader(bytes=f.getvalue()) - x, y, pixels, meta = r.asRGBA8() - self.assertEqual(x, 3) - self.assertEqual(y, 3) - c = c + (255,) - d = d + (255,) - e = e + (255,) - boxed = [(e, d, c), (d, c, a), (c, a, b)] - flat = map(lambda row: itertools.chain(*row), boxed) - self.assertEqual(map(list, pixels), map(list, flat)) - - def testRGBtoRGBA(self): - "asRGBA8() on colour type 2 source." "" - # Test for Issue 26 - r = Reader(bytes=_pngsuite["basn2c08"]) - x, y, pixels, meta = r.asRGBA8() - # Test the pixels at row 9 columns 0 and 1. - row9 = list(pixels)[9] - self.assertEqual(row9[0:8], [0xFF, 0xDF, 0xFF, 0xFF, 0xFF, 0xDE, 0xFF, 0xFF]) - - def testLtoRGBA(self): - "asRGBA() on grey source." "" - # Test for Issue 60 - r = Reader(bytes=_pngsuite["basi0g08"]) - x, y, pixels, meta = r.asRGBA() - row9 = list(list(pixels)[9]) - self.assertEqual(row9[0:8], [222, 222, 222, 255, 221, 221, 221, 255]) - - def testCtrns(self): - "Test colour type 2 and tRNS chunk." - # Test for Issue 25 - r = Reader(bytes=_pngsuite["tbrn2c08"]) - x, y, pixels, meta = r.asRGBA8() - # I just happen to know that the first pixel is transparent. - # In particular it should be #7f7f7f00 - row0 = list(pixels)[0] - self.assertEqual(tuple(row0[0:4]), (0x7F, 0x7F, 0x7F, 0x00)) - - def testAdam7read(self): - """Adam7 interlace reading. - Specifically, test that for images in the PngSuite that - have both an interlaced and straightlaced pair that both - images from the pair produce the same array of pixels.""" - for candidate in _pngsuite: - if not candidate.startswith("basn"): - continue - candi = candidate.replace("n", "i") - if candi not in _pngsuite: - continue - print("adam7 read %s" % (candidate,)) - straight = Reader(bytes=_pngsuite[candidate]) - adam7 = Reader(bytes=_pngsuite[candi]) - # Just compare the pixels. Ignore x,y (because they're - # likely to be correct?); metadata is ignored because the - # "interlace" member differs. Lame. - straight = straight.read()[2] - adam7 = adam7.read()[2] - self.assertEqual(map(list, straight), map(list, adam7)) - - def testAdam7write(self): - """Adam7 interlace writing. - For each test image in the PngSuite, write an interlaced - and a straightlaced version. Decode both, and compare results. - """ - # Not such a great test, because the only way we can check what - # we have written is to read it back again. - - for name, bytes in _pngsuite.items(): - # Only certain colour types supported for this test. - if name[3:5] not in ["n0", "n2", "n4", "n6"]: - continue - it = Reader(bytes=bytes) - x, y, pixels, meta = it.read() - pngi = topngbytes( - "adam7wn" + name + ".png", - pixels, - x=x, - y=y, - bitdepth=it.bitdepth, - greyscale=it.greyscale, - alpha=it.alpha, - transparent=it.transparent, - interlace=False, - ) - x, y, ps, meta = Reader(bytes=pngi).read() - it = Reader(bytes=bytes) - x, y, pixels, meta = it.read() - pngs = topngbytes( - "adam7wi" + name + ".png", - pixels, - x=x, - y=y, - bitdepth=it.bitdepth, - greyscale=it.greyscale, - alpha=it.alpha, - transparent=it.transparent, - interlace=True, - ) - x, y, pi, meta = Reader(bytes=pngs).read() - self.assertEqual(map(list, ps), map(list, pi)) - - def testPGMin(self): - """Test that the command line tool can read PGM files.""" - - def do(): - return _main(["testPGMin"]) - - s = BytesIO() - s.write(strtobytes("P5 2 2 3\n")) - s.write(strtobytes("\x00\x01\x02\x03")) - s.flush() - s.seek(0) - o = BytesIO() - testWithIO(s, o, do) - r = Reader(bytes=o.getvalue()) - x, y, pixels, meta = r.read() - self.assertTrue(r.greyscale) - self.assertEqual(r.bitdepth, 2) - - def testPAMin(self): - """Test that the command line tool can read PAM file.""" - - def do(): - return _main(["testPAMin"]) - - s = BytesIO() - s.write( - strtobytes( - "P7\nWIDTH 3\nHEIGHT 1\nDEPTH 4\nMAXVAL 255\n" - "TUPLTYPE RGB_ALPHA\nENDHDR\n" - ) - ) - # The pixels in flat row flat pixel format - flat = [255, 0, 0, 255, 0, 255, 0, 120, 0, 0, 255, 30] - asbytes = seqtobytes(flat) - s.write(asbytes) - s.flush() - s.seek(0) - o = BytesIO() - testWithIO(s, o, do) - r = Reader(bytes=o.getvalue()) - x, y, pixels, meta = r.read() - self.assertTrue(r.alpha) - self.assertTrue(not r.greyscale) - self.assertEqual(list(itertools.chain(*pixels)), flat) - - def testLA4(self): - """Create an LA image with bitdepth 4.""" - bytes = topngbytes( - "la4.png", [[5, 12]], 1, 1, greyscale=True, alpha=True, bitdepth=4 - ) - sbit = Reader(bytes=bytes).chunk("sBIT")[1] - self.assertEqual(sbit, strtobytes("\x04\x04")) - - def testPNMsbit(self): - """Test that PNM files can generates sBIT chunk.""" - - def do(): - return _main(["testPNMsbit"]) - - s = BytesIO() - s.write(strtobytes("P6 8 1 1\n")) - for pixel in range(8): - s.write(struct.pack(" 255: - a = array("H") - else: - a = array("B") - fw = float(width) - fh = float(height) - pfun = test_patterns[pattern] - for y in range(height): - fy = float(y) / fh - for x in range(width): - a.append(int(round(pfun(float(x) / fw, fy) * maxval))) - return a - - def test_rgba(size=256, bitdepth=8, red="GTB", green="GLR", blue="RTL", alpha=None): - """ - Create a test image. Each channel is generated from the - specified pattern; any channel apart from red can be set to - None, which will cause it not to be in the image. It - is possible to create all PNG channel types (L, RGB, LA, RGBA), - as well as non PNG channel types (RGA, and so on). - """ - - i = test_pattern(size, size, bitdepth, red) - psize = 1 - for channel in (green, blue, alpha): - if channel: - c = test_pattern(size, size, bitdepth, channel) - i = interleave_planes(i, c, psize, 1) - psize += 1 - return i - - def pngsuite_image(name): - """ - Create a test image by reading an internal copy of the files - from the PngSuite. Returned in flat row flat pixel format. - """ - - if name not in _pngsuite: - raise NotImplementedError( - "cannot find PngSuite file %s (use -L for a list)" % name - ) - r = Reader(bytes=_pngsuite[name]) - w, h, pixels, meta = r.asDirect() - assert w == h - # LAn for n < 8 is a special case for which we need to rescale - # the data. - if meta["greyscale"] and meta["alpha"] and meta["bitdepth"] < 8: - factor = 255 // (2 ** meta["bitdepth"] - 1) - - def rescale(data): - for row in data: - yield map(factor.__mul__, row) - - pixels = rescale(pixels) - meta["bitdepth"] = 8 - arraycode = "BH"[meta["bitdepth"] > 8] - return w, array(arraycode, itertools.chain(*pixels)), meta - - # The body of test_suite() - size = 256 - if options.test_size: - size = options.test_size - options.bitdepth = options.test_depth - options.greyscale = bool(options.test_black) - - kwargs = {} - if options.test_red: - kwargs["red"] = options.test_red - if options.test_green: - kwargs["green"] = options.test_green - if options.test_blue: - kwargs["blue"] = options.test_blue - if options.test_alpha: - kwargs["alpha"] = options.test_alpha - if options.greyscale: - if options.test_red or options.test_green or options.test_blue: - raise ValueError( - "cannot specify colours (R, G, B) when greyscale image (black channel, K) is specified" - ) - kwargs["red"] = options.test_black - kwargs["green"] = None - kwargs["blue"] = None - options.alpha = bool(options.test_alpha) - if not args: - pixels = test_rgba(size, options.bitdepth, **kwargs) - else: - size, pixels, meta = pngsuite_image(args[0]) - for k in ["bitdepth", "alpha", "greyscale"]: - setattr(options, k, meta[k]) - - writer = Writer( - size, - size, - bitdepth=options.bitdepth, - transparent=options.transparent, - background=options.background, - gamma=options.gamma, - greyscale=options.greyscale, - alpha=options.alpha, - compression=options.compression, - interlace=options.interlace, - ) - writer.write_array(sys.stdout, pixels) - - -def read_pam_header(infile): - """ - Read (the rest of a) PAM header. `infile` should be positioned - immediately after the initial 'P7' line (at the beginning of the - second line). Returns are as for `read_pnm_header`. - """ - - # Unlike PBM, PGM, and PPM, we can read the header a line at a time. - header = dict() - while True: - l = infile.readline().strip() - if l == strtobytes("ENDHDR"): - break - if not l: - raise EOFError("PAM ended prematurely") - if l[0] == strtobytes("#"): - continue - l = l.split(None, 1) - if l[0] not in header: - header[l[0]] = l[1] - else: - header[l[0]] += strtobytes(" ") + l[1] - - required = ["WIDTH", "HEIGHT", "DEPTH", "MAXVAL"] - required = [strtobytes(x) for x in required] - WIDTH, HEIGHT, DEPTH, MAXVAL = required - present = [x for x in required if x in header] - if len(present) != len(required): - raise Error("PAM file must specify WIDTH, HEIGHT, DEPTH, and MAXVAL") - width = int(header[WIDTH]) - height = int(header[HEIGHT]) - depth = int(header[DEPTH]) - maxval = int(header[MAXVAL]) - if width <= 0 or height <= 0 or depth <= 0 or maxval <= 0: - raise Error("WIDTH, HEIGHT, DEPTH, MAXVAL must all be positive integers") - return "P7", width, height, depth, maxval - - -def read_pnm_header(infile, supported=("P5", "P6")): - """ - Read a PNM header, returning (format,width,height,depth,maxval). - `width` and `height` are in pixels. `depth` is the number of - channels in the image; for PBM and PGM it is synthesized as 1, for - PPM as 3; for PAM images it is read from the header. `maxval` is - synthesized (as 1) for PBM images. - """ - - # Generally, see http://netpbm.sourceforge.net/doc/ppm.html - # and http://netpbm.sourceforge.net/doc/pam.html - - supported = [strtobytes(x) for x in supported] - - # Technically 'P7' must be followed by a newline, so by using - # rstrip() we are being liberal in what we accept. I think this - # is acceptable. - type = infile.read(3).rstrip() - if type not in supported: - raise NotImplementedError("file format %s not supported" % type) - if type == strtobytes("P7"): - # PAM header parsing is completely different. - return read_pam_header(infile) - # Expected number of tokens in header (3 for P4, 4 for P6) - expected = 4 - pbm = ("P1", "P4") - if type in pbm: - expected = 3 - header = [type] - - # We have to read the rest of the header byte by byte because the - # final whitespace character (immediately following the MAXVAL in - # the case of P6) may not be a newline. Of course all PNM files in - # the wild use a newline at this point, so it's tempting to use - # readline; but it would be wrong. - def getc(): - c = infile.read(1) - if not c: - raise Error("premature EOF reading PNM header") - return c - - c = getc() - while True: - # Skip whitespace that precedes a token. - while c.isspace(): - c = getc() - # Skip comments. - while c == "#": - while c not in "\n\r": - c = getc() - if not c.isdigit(): - raise Error("unexpected character %s found in header" % c) - # According to the specification it is legal to have comments - # that appear in the middle of a token. - # This is bonkers; I've never seen it; and it's a bit awkward to - # code good lexers in Python (no goto). So we break on such - # cases. - token = strtobytes("") - while c.isdigit(): - token += c - c = getc() - # Slight hack. All "tokens" are decimal integers, so convert - # them here. - header.append(int(token)) - if len(header) == expected: - break - # Skip comments (again) - while c == "#": - while c not in "\n\r": - c = getc() - if not c.isspace(): - raise Error("expected header to end with whitespace, not %s" % c) - - if type in pbm: - # synthesize a MAXVAL - header.append(1) - depth = (1, 3)[type == strtobytes("P6")] - return header[0], header[1], header[2], depth, header[3] - - -def write_pnm(file, width, height, pixels, meta): - """Write a Netpbm PNM/PAM file.""" - - bitdepth = meta["bitdepth"] - maxval = 2 ** bitdepth - 1 - # Rudely, the number of image planes can be used to determine - # whether we are L (PGM), LA (PAM), RGB (PPM), or RGBA (PAM). - planes = meta["planes"] - # Can be an assert as long as we assume that pixels and meta came - # from a PNG file. - assert planes in (1, 2, 3, 4) - if planes in (1, 3): - if 1 == planes: - # PGM - # Could generate PBM if maxval is 1, but we don't (for one - # thing, we'd have to convert the data, not just blat it - # out). - fmt = "P5" - else: - # PPM - fmt = "P6" - file.write("%s %d %d %d\n" % (fmt, width, height, maxval)) - if planes in (2, 4): - # PAM - # See http://netpbm.sourceforge.net/doc/pam.html - if 2 == planes: - tupltype = "GRAYSCALE_ALPHA" - else: - tupltype = "RGB_ALPHA" - file.write( - "P7\nWIDTH %d\nHEIGHT %d\nDEPTH %d\nMAXVAL %d\n" - "TUPLTYPE %s\nENDHDR\n" % (width, height, planes, maxval, tupltype) - ) - # Values per row - vpr = planes * width - # struct format - fmt = ">%d" % vpr - if maxval > 0xFF: - fmt = fmt + "H" - else: - fmt = fmt + "B" - for row in pixels: - file.write(struct.pack(fmt, *row)) - file.flush() - - -def color_triple(color): - """ - Convert a command line colour value to a RGB triple of integers. - FIXME: Somewhere we need support for greyscale backgrounds etc. - """ - if color.startswith("#") and len(color) == 4: - return (int(color[1], 16), int(color[2], 16), int(color[3], 16)) - if color.startswith("#") and len(color) == 7: - return (int(color[1:3], 16), int(color[3:5], 16), int(color[5:7], 16)) - elif color.startswith("#") and len(color) == 13: - return (int(color[1:5], 16), int(color[5:9], 16), int(color[9:13], 16)) - - -def _main(argv): - """ - Run the PNG encoder with options from the command line. - """ - - # Parse command line arguments - from optparse import OptionParser - import re - - version = "%prog " + re.sub(r"( ?\$|URL: |Rev:)", "", __version__) - parser = OptionParser(version=version) - parser.set_usage("%prog [options] [imagefile]") - parser.add_option( - "-r", - "--read-png", - default=False, - action="store_true", - help="Read PNG, write PNM", - ) - parser.add_option( - "-i", - "--interlace", - default=False, - action="store_true", - help="create an interlaced PNG file (Adam7)", - ) - parser.add_option( - "-t", - "--transparent", - action="store", - type="string", - metavar="color", - help="mark the specified colour (#RRGGBB) as transparent", - ) - parser.add_option( - "-b", - "--background", - action="store", - type="string", - metavar="color", - help="save the specified background colour", - ) - parser.add_option( - "-a", - "--alpha", - action="store", - type="string", - metavar="pgmfile", - help="alpha channel transparency (RGBA)", - ) - parser.add_option( - "-g", - "--gamma", - action="store", - type="float", - metavar="value", - help="save the specified gamma value", - ) - parser.add_option( - "-c", - "--compression", - action="store", - type="int", - metavar="level", - help="zlib compression level (0-9)", - ) - parser.add_option( - "-T", - "--test", - default=False, - action="store_true", - help="create a test image (a named PngSuite image if an argument is supplied)", - ) - parser.add_option( - "-L", - "--list", - default=False, - action="store_true", - help="print list of named test images", - ) - parser.add_option( - "-R", - "--test-red", - action="store", - type="string", - metavar="pattern", - help="test pattern for the red image layer", - ) - parser.add_option( - "-G", - "--test-green", - action="store", - type="string", - metavar="pattern", - help="test pattern for the green image layer", - ) - parser.add_option( - "-B", - "--test-blue", - action="store", - type="string", - metavar="pattern", - help="test pattern for the blue image layer", - ) - parser.add_option( - "-A", - "--test-alpha", - action="store", - type="string", - metavar="pattern", - help="test pattern for the alpha image layer", - ) - parser.add_option( - "-K", - "--test-black", - action="store", - type="string", - metavar="pattern", - help="test pattern for greyscale image", - ) - parser.add_option( - "-d", - "--test-depth", - default=8, - action="store", - type="int", - metavar="NBITS", - help="create test PNGs that are NBITS bits per channel", - ) - parser.add_option( - "-S", - "--test-size", - action="store", - type="int", - metavar="size", - help="width and height of the test image", - ) - (options, args) = parser.parse_args(args=argv[1:]) - - # Convert options - if options.transparent is not None: - options.transparent = color_triple(options.transparent) - if options.background is not None: - options.background = color_triple(options.background) - - if options.list: - names = list(_pngsuite) - names.sort() - for name in names: - print(name) - return - - # Run regression tests - if options.test: - return test_suite(options, args) - - # Prepare input and output files - if len(args) == 0: - infilename = "-" - infile = sys.stdin - elif len(args) == 1: - infilename = args[0] - infile = open(infilename, "rb") - else: - parser.error("more than one input file") - outfile = sys.stdout - - if options.read_png: - # Encode PNG to PPM - png = Reader(file=infile) - width, height, pixels, meta = png.asDirect() - write_pnm(outfile, width, height, pixels, meta) - else: - # Encode PNM to PNG - format, width, height, depth, maxval = read_pnm_header( - infile, ("P5", "P6", "P7") - ) - # When it comes to the variety of input formats, we do something - # rather rude. Observe that L, LA, RGB, RGBA are the 4 colour - # types supported by PNG and that they correspond to 1, 2, 3, 4 - # channels respectively. So we use the number of channels in - # the source image to determine which one we have. We do not - # care about TUPLTYPE. - greyscale = depth <= 2 - pamalpha = depth in (2, 4) - supported = map(lambda x: 2 ** x - 1, range(1, 17)) - try: - mi = supported.index(maxval) - except ValueError: - raise NotImplementedError( - "your maxval (%s) not in supported list %s" % (maxval, str(supported)) - ) - bitdepth = mi + 1 - writer = Writer( - width, - height, - greyscale=greyscale, - bitdepth=bitdepth, - interlace=options.interlace, - transparent=options.transparent, - background=options.background, - alpha=bool(pamalpha or options.alpha), - gamma=options.gamma, - compression=options.compression, - ) - if options.alpha: - pgmfile = open(options.alpha, "rb") - format, awidth, aheight, adepth, amaxval = read_pnm_header(pgmfile, "P5") - if amaxval != "255": - raise NotImplementedError( - "maxval %s not supported for alpha channel" % amaxval - ) - if (awidth, aheight) != (width, height): - raise ValueError( - "alpha channel image size mismatch" - " (%s has %sx%s but %s has %sx%s)" - % (infilename, width, height, options.alpha, awidth, aheight) - ) - writer.convert_ppm_and_pgm(infile, pgmfile, outfile) - else: - writer.convert_pnm(infile, outfile) - - -if __name__ == "__main__": - try: - _main(sys.argv) - except Error as e: - sys.stderr.write("%s\n" % (e,)) diff --git a/venv/Lib/site-packages/pygame/tests/test_utils/run_tests.py b/venv/Lib/site-packages/pygame/tests/test_utils/run_tests.py deleted file mode 100644 index a193e23..0000000 --- a/venv/Lib/site-packages/pygame/tests/test_utils/run_tests.py +++ /dev/null @@ -1,350 +0,0 @@ -import sys - -if __name__ == "__main__": - sys.exit("This module is for import only") - -test_pkg_name = ".".join(__name__.split(".")[0:-2]) -is_pygame_pkg = test_pkg_name == "pygame.tests" -test_runner_mod = test_pkg_name + ".test_utils.test_runner" - -if is_pygame_pkg: - from pygame.tests.test_utils import import_submodule - from pygame.tests.test_utils.test_runner import ( - prepare_test_env, - run_test, - combine_results, - get_test_results, - TEST_RESULTS_START, - ) -else: - from test.test_utils import import_submodule - from test.test_utils.test_runner import ( - prepare_test_env, - run_test, - combine_results, - get_test_results, - TEST_RESULTS_START, - ) -import pygame -import pygame.threads - -import os -import re -import shutil -import tempfile -import time -import random -from pprint import pformat - -was_run = False - - -def run(*args, **kwds): - """Run the Pygame unit test suite and return (total tests run, fails dict) - - Positional arguments (optional): - The names of tests to include. If omitted then all tests are run. Test - names need not include the trailing '_test'. - - Keyword arguments: - incomplete - fail incomplete tests (default False) - usesubprocess - run all test suites in the current process - (default False, use separate subprocesses) - dump - dump failures/errors as dict ready to eval (default False) - file - if provided, the name of a file into which to dump failures/errors - timings - if provided, the number of times to run each individual test to - get an average run time (default is run each test once) - exclude - A list of TAG names to exclude from the run. The items may be - comma or space separated. - show_output - show silenced stderr/stdout on errors (default False) - all - dump all results, not just errors (default False) - randomize - randomize order of tests (default False) - seed - if provided, a seed randomizer integer - multi_thread - if provided, the number of THREADS in which to run - subprocessed tests - time_out - if subprocess is True then the time limit in seconds before - killing a test (default 30) - fake - if provided, the name of the fake tests package in the - run_tests__tests subpackage to run instead of the normal - Pygame tests - python - the path to a python executable to run subprocessed tests - (default sys.executable) - interative - allow tests tagged 'interative'. - - Return value: - A tuple of total number of tests run, dictionary of error information. The - dictionary is empty if no errors were recorded. - - By default individual test modules are run in separate subprocesses. This - recreates normal Pygame usage where pygame.init() and pygame.quit() are - called only once per program execution, and avoids unfortunate - interactions between test modules. Also, a time limit is placed on test - execution, so frozen tests are killed when there time allotment expired. - Use the single process option if threading is not working properly or if - tests are taking too long. It is not guaranteed that all tests will pass - in single process mode. - - Tests are run in a randomized order if the randomize argument is True or a - seed argument is provided. If no seed integer is provided then the system - time is used. - - Individual test modules may have a corresponding *_tags.py module, - defining a __tags__ attribute, a list of tag strings used to selectively - omit modules from a run. By default only the 'interactive', 'ignore', and - 'subprocess_ignore' tags are ignored. 'interactive' is for modules that - take user input, like cdrom_test.py. 'ignore' and 'subprocess_ignore' for - for disabling modules for foreground and subprocess modes respectively. - These are for disabling tests on optional modules or for experimental - modules with known problems. These modules can be run from the console as - a Python program. - - This function can only be called once per Python session. It is not - reentrant. - - """ - - global was_run - - if was_run: - raise RuntimeError("run() was already called this session") - was_run = True - - options = kwds.copy() - option_usesubprocess = options.get("usesubprocess", False) - option_dump = options.pop("dump", False) - option_file = options.pop("file", None) - option_randomize = options.get("randomize", False) - option_seed = options.get("seed", None) - option_multi_thread = options.pop("multi_thread", 1) - option_time_out = options.pop("time_out", 120) - option_fake = options.pop("fake", None) - option_python = options.pop("python", sys.executable) - option_exclude = options.pop("exclude", ()) - option_interactive = options.pop("interactive", False) - - if not option_interactive and "interactive" not in option_exclude: - option_exclude += ("interactive",) - if option_usesubprocess and "subprocess_ignore" not in option_exclude: - option_exclude += ("subprocess_ignore",) - elif "ignore" not in option_exclude: - option_exclude += ("ignore",) - - option_exclude += ("python3_ignore",) - option_exclude += ("SDL2_ignore",) - - main_dir, test_subdir, fake_test_subdir = prepare_test_env() - - ########################################################################### - # Compile a list of test modules. If fake, then compile list of fake - # xxxx_test.py from run_tests__tests - - TEST_MODULE_RE = re.compile(r"^(.+_test)\.py$") - - test_mods_pkg_name = test_pkg_name - - working_dir_temp = tempfile.mkdtemp() - - if option_fake is not None: - test_mods_pkg_name = ".".join( - [test_mods_pkg_name, "run_tests__tests", option_fake] - ) - test_subdir = os.path.join(fake_test_subdir, option_fake) - working_dir = test_subdir - else: - working_dir = working_dir_temp - - # Added in because some machines will need os.environ else there will be - # false failures in subprocess mode. Same issue as python2.6. Needs some - # env vars. - - test_env = os.environ - - fmt1 = "%s.%%s" % test_mods_pkg_name - fmt2 = "%s.%%s_test" % test_mods_pkg_name - if args: - test_modules = [m.endswith("_test") and (fmt1 % m) or (fmt2 % m) for m in args] - else: - test_modules = [] - for f in sorted(os.listdir(test_subdir)): - for match in TEST_MODULE_RE.findall(f): - test_modules.append(fmt1 % (match,)) - - ########################################################################### - # Remove modules to be excluded. - - tmp = test_modules - test_modules = [] - for name in tmp: - tag_module_name = "%s_tags" % (name[0:-5],) - try: - tag_module = import_submodule(tag_module_name) - except ImportError: - test_modules.append(name) - else: - try: - tags = tag_module.__tags__ - except AttributeError: - print("%s has no tags: ignoring" % (tag_module_name,)) - test_modules.append(name) - else: - for tag in tags: - if tag in option_exclude: - print("skipping %s (tag '%s')" % (name, tag)) - break - else: - test_modules.append(name) - del tmp, tag_module_name, name - - ########################################################################### - # Meta results - - results = {} - meta_results = {"__meta__": {}} - meta = meta_results["__meta__"] - - ########################################################################### - # Randomization - - if option_randomize or option_seed is not None: - if option_seed is None: - option_seed = time.time() - meta["random_seed"] = option_seed - print("\nRANDOM SEED USED: %s\n" % option_seed) - random.seed(option_seed) - random.shuffle(test_modules) - - ########################################################################### - # Single process mode - - if not option_usesubprocess: - options["exclude"] = option_exclude - t = time.time() - for module in test_modules: - results.update(run_test(module, **options)) - t = time.time() - t - - ########################################################################### - # Subprocess mode - # - - else: - if is_pygame_pkg: - from pygame.tests.test_utils.async_sub import proc_in_time_or_kill - else: - from test.test_utils.async_sub import proc_in_time_or_kill - - pass_on_args = ["--exclude", ",".join(option_exclude)] - for field in ["randomize", "incomplete", "unbuffered", "verbosity"]: - if kwds.get(field, False): - pass_on_args.append("--" + field) - - def sub_test(module): - print("loading %s" % module) - - cmd = [option_python, "-m", test_runner_mod, module] + pass_on_args - - return ( - module, - (cmd, test_env, working_dir), - proc_in_time_or_kill( - cmd, option_time_out, env=test_env, wd=working_dir - ), - ) - - if option_multi_thread > 1: - - def tmap(f, args): - return pygame.threads.tmap( - f, args, stop_on_error=False, num_workers=option_multi_thread - ) - - else: - tmap = map - - t = time.time() - - for module, cmd, (return_code, raw_return) in tmap(sub_test, test_modules): - test_file = "%s.py" % os.path.join(test_subdir, module) - cmd, test_env, working_dir = cmd - - test_results = get_test_results(raw_return) - if test_results: - results.update(test_results) - else: - results[module] = {} - - results[module].update( - dict( - return_code=return_code, - raw_return=raw_return, - cmd=cmd, - test_file=test_file, - test_env=test_env, - working_dir=working_dir, - module=module, - ) - ) - - t = time.time() - t - - ########################################################################### - # Output Results - # - - untrusty_total, combined = combine_results(results, t) - total, n_errors, n_failures = count_results(results) - - meta["total_tests"] = total - meta["combined"] = combined - meta["total_errors"] = n_errors - meta["total_failures"] = n_failures - results.update(meta_results) - - if not option_usesubprocess and total != untrusty_total: - raise AssertionError( - "Something went wrong in the Test Machinery:\n" - "total: %d != untrusty_total: %d" % (total, untrusty_total) - ) - - if not option_dump: - print(combined) - else: - print(TEST_RESULTS_START) - print(pformat(results)) - - if option_file is not None: - results_file = open(option_file, "w") - try: - results_file.write(pformat(results)) - finally: - results_file.close() - - shutil.rmtree(working_dir_temp) - - return total, n_errors + n_failures - - -def count_results(results): - total = errors = failures = 0 - for result in results.values(): - if result.get("return_code", 0): - total += 1 - errors += 1 - else: - total += result["num_tests"] - errors += result["num_errors"] - failures += result["num_failures"] - - return total, errors, failures - - -def run_and_exit(*args, **kwargs): - """Run the tests, and if there are failures, exit with a return code of 1. - - This is needed for various buildbots to recognise that the tests have - failed. - """ - total, fails = run(*args, **kwargs) - if fails: - sys.exit(1) - sys.exit(0) diff --git a/venv/Lib/site-packages/pygame/tests/test_utils/test_machinery.py b/venv/Lib/site-packages/pygame/tests/test_utils/test_machinery.py deleted file mode 100644 index 114c281..0000000 --- a/venv/Lib/site-packages/pygame/tests/test_utils/test_machinery.py +++ /dev/null @@ -1,89 +0,0 @@ -import inspect -import random -import re -import unittest - -try: - from StringIO import StringIO -except ImportError: - from io import StringIO - -from . import import_submodule - - -class PygameTestLoader(unittest.TestLoader): - def __init__( - self, randomize_tests=False, include_incomplete=False, exclude=("interactive",) - ): - super(PygameTestLoader, self).__init__() - self.randomize_tests = randomize_tests - - if exclude is None: - self.exclude = set() - else: - self.exclude = set(exclude) - - if include_incomplete: - self.testMethodPrefix = ("test", "todo_") - - def getTestCaseNames(self, testCaseClass): - res = [] - for name in super(PygameTestLoader, self).getTestCaseNames(testCaseClass): - tags = get_tags(testCaseClass, getattr(testCaseClass, name)) - if self.exclude.isdisjoint(tags): - res.append(name) - - if self.randomize_tests: - random.shuffle(res) - - return res - - -# Exclude by tags: - -TAGS_RE = re.compile(r"\|[tT]ags:(-?[ a-zA-Z,0-9_\n]+)\|", re.M) - - -class TestTags: - def __init__(self): - self.memoized = {} - self.parent_modules = {} - - def get_parent_module(self, class_): - if class_ not in self.parent_modules: - self.parent_modules[class_] = import_submodule(class_.__module__) - return self.parent_modules[class_] - - def __call__(self, parent_class, meth): - key = (parent_class, meth.__name__) - if key not in self.memoized: - parent_module = self.get_parent_module(parent_class) - - module_tags = getattr(parent_module, "__tags__", []) - class_tags = getattr(parent_class, "__tags__", []) - - tags = TAGS_RE.search(inspect.getdoc(meth) or "") - if tags: - test_tags = [t.strip() for t in tags.group(1).split(",")] - else: - test_tags = [] - - combined = set() - for tags in (module_tags, class_tags, test_tags): - if not tags: - continue - - add = set([t for t in tags if not t.startswith("-")]) - remove = set([t[1:] for t in tags if t not in add]) - - if add: - combined.update(add) - if remove: - combined.difference_update(remove) - - self.memoized[key] = combined - - return self.memoized[key] - - -get_tags = TestTags() diff --git a/venv/Lib/site-packages/pygame/tests/test_utils/test_runner.py b/venv/Lib/site-packages/pygame/tests/test_utils/test_runner.py deleted file mode 100644 index 4c35221..0000000 --- a/venv/Lib/site-packages/pygame/tests/test_utils/test_runner.py +++ /dev/null @@ -1,330 +0,0 @@ -import sys -import os - -if __name__ == "__main__": - pkg_dir = os.path.split(os.path.split(os.path.abspath(__file__))[0])[0] - parent_dir, pkg_name = os.path.split(pkg_dir) - is_pygame_pkg = pkg_name == "tests" and os.path.split(parent_dir)[1] == "pygame" - if not is_pygame_pkg: - sys.path.insert(0, parent_dir) -else: - is_pygame_pkg = __name__.startswith("pygame.tests.") - -import unittest -from .test_machinery import PygameTestLoader - -import re - -try: - import StringIO -except ImportError: - import io as StringIO - -import optparse -from pprint import pformat - - -def prepare_test_env(): - test_subdir = os.path.split(os.path.split(os.path.abspath(__file__))[0])[0] - main_dir = os.path.split(test_subdir)[0] - sys.path.insert(0, test_subdir) - fake_test_subdir = os.path.join(test_subdir, "run_tests__tests") - return main_dir, test_subdir, fake_test_subdir - - -main_dir, test_subdir, fake_test_subdir = prepare_test_env() - -################################################################################ -# Set the command line options -# -# options are shared with run_tests.py so make sure not to conflict -# in time more will be added here - -TAG_PAT = r"-?[a-zA-Z0-9_]+" -TAG_RE = re.compile(TAG_PAT) -EXCLUDE_RE = re.compile(r"(%s,?\s*)+$" % (TAG_PAT,)) - - -def exclude_callback(option, opt, value, parser): - if EXCLUDE_RE.match(value) is None: - raise optparse.OptionValueError("%s argument has invalid value" % (opt,)) - parser.values.exclude = TAG_RE.findall(value) - - -opt_parser = optparse.OptionParser() - -opt_parser.add_option( - "-i", "--incomplete", action="store_true", help="fail incomplete tests" -) - -opt_parser.add_option( - "-s", - "--usesubprocess", - action="store_true", - help="run everything in a single process " " (default: use no subprocesses)", -) - -opt_parser.add_option( - "-e", - "--exclude", - action="callback", - type="string", - help="exclude tests containing any of TAGS", - callback=exclude_callback, -) - -opt_parser.add_option( - "-u", - "--unbuffered", - action="store_true", - help="Show stdout/stderr as tests run, rather than storing it and showing on failures", -) - -opt_parser.add_option( - "-v", - "--verbose", - dest="verbosity", - action="store_const", - const=2, - help="Verbose output", -) -opt_parser.add_option( - "-q", - "--quiet", - dest="verbosity", - action="store_const", - const=0, - help="Quiet output", -) - -opt_parser.add_option( - "-r", "--randomize", action="store_true", help="randomize order of tests" -) - -################################################################################ -# If an xxxx_test.py takes longer than TIME_OUT seconds it will be killed -# This is only the default, can be over-ridden on command line - -TIME_OUT = 30 - -# DEFAULTS - -################################################################################ -# Human readable output -# - -COMPLETE_FAILURE_TEMPLATE = """ -====================================================================== -ERROR: all_tests_for (%(module)s.AllTestCases) ----------------------------------------------------------------------- -Traceback (most recent call last): - File "test/%(module)s.py", line 1, in all_tests_for -subprocess completely failed with return code of %(return_code)s -cmd: %(cmd)s -test_env: %(test_env)s -working_dir: %(working_dir)s -return (first 10 and last 10 lines): -%(raw_return)s - -""" # Leave that last empty line else build page regex won't match -# Text also needs to be vertically compressed - - -RAN_TESTS_DIV = (70 * "-") + "\nRan" - -DOTS = re.compile("^([FE.sux]*)$", re.MULTILINE) - - -def extract_tracebacks(output): - """from test runner output return the tracebacks.""" - verbose_mode = " ..." in output - - if verbose_mode: - if "ERROR" in output or "FAILURE" in output: - return "\n\n==".join(output.split("\n\n==")[1:]) - else: - dots = DOTS.search(output).group(1) - if "E" in dots or "F" in dots: - return output[len(dots) + 1 :].split(RAN_TESTS_DIV)[0] - return "" - - -def output_into_dots(output): - """convert the test runner output into dots.""" - # verbose_mode = ") ..." in output - verbose_mode = " ..." in output - - if verbose_mode: - # a map from the verbose output to the dots output. - reasons = { - "... ERROR": "E", - "... unexpected success": "u", - "... skipped": "s", - "... expected failure": "x", - "... ok": ".", - "... FAIL": "F", - } - results = output.split("\n\n==")[0] - lines = [l for l in results.split("\n") if l and "..." in l] - dotlist = [] - for l in lines: - found = False - for reason in reasons: - if reason in l: - dotlist.append(reasons[reason]) - found = True - break - if not found: - raise ValueError("Not sure what this is. Add to reasons. :%s" % l) - - return "".join(dotlist) - dots = DOTS.search(output).group(1) - return dots - - -def combine_results(all_results, t): - """ - - Return pieced together results in a form fit for human consumption. Don't - rely on results if piecing together subprocessed results (single process - mode is fine). Was originally meant for that purpose but was found to be - unreliable. See the dump option for reliable results. - - """ - - all_dots = "" - failures = [] - - for module, results in sorted(all_results.items()): - output, return_code, raw_return = map( - results.get, ("output", "return_code", "raw_return") - ) - - if not output or (return_code and RAN_TESTS_DIV not in output): - # would this effect the original dict? TODO - output_lines = raw_return.splitlines() - if len(output_lines) > 20: - results["raw_return"] = "\n".join( - output_lines[:10] + ["..."] + output_lines[-10:] - ) - failures.append(COMPLETE_FAILURE_TEMPLATE % results) - all_dots += "E" - continue - - dots = output_into_dots(output) - all_dots += dots - tracebacks = extract_tracebacks(output) - if tracebacks: - failures.append(tracebacks) - - total_fails, total_errors = map(all_dots.count, "FE") - total_tests = len(all_dots) - - combined = [all_dots] - if failures: - combined += ["".join(failures).lstrip("\n")[:-1]] - combined += ["%s %s tests in %.3fs\n" % (RAN_TESTS_DIV, total_tests, t)] - - if failures: - infos = (["failures=%s" % total_fails] if total_fails else []) + ( - ["errors=%s" % total_errors] if total_errors else [] - ) - combined += ["FAILED (%s)\n" % ", ".join(infos)] - else: - combined += ["OK\n"] - - return total_tests, "\n".join(combined) - - -################################################################################ - -TEST_RESULTS_START = "<--!! TEST RESULTS START HERE !!-->" -TEST_RESULTS_END = "<--!! TEST RESULTS END HERE !!-->" -_test_re_str = "%s\n(.*)%s" % (TEST_RESULTS_START, TEST_RESULTS_END) -TEST_RESULTS_RE = re.compile(_test_re_str, re.DOTALL | re.M) - - -def get_test_results(raw_return): - test_results = TEST_RESULTS_RE.search(raw_return) - if test_results: - try: - return eval(test_results.group(1)) - except: - print("BUGGY TEST RESULTS EVAL:\n %s" % test_results.group(1)) - raise - - -################################################################################ - - -def run_test( - module, - incomplete=False, - usesubprocess=True, - randomize=False, - exclude=("interactive",), - buffer=True, - unbuffered=None, - verbosity=1, -): - """Run a unit test module""" - suite = unittest.TestSuite() - - if verbosity is None: - verbosity = 1 - - if verbosity: - print("loading %s" % module) - - loader = PygameTestLoader( - randomize_tests=randomize, include_incomplete=incomplete, exclude=exclude - ) - suite.addTest(loader.loadTestsFromName(module)) - - output = StringIO.StringIO() - runner = unittest.TextTestRunner(stream=output, buffer=buffer, verbosity=verbosity) - results = runner.run(suite) - - if verbosity == 2: - output.seek(0) - print(output.read()) - output.seek(0) - - results = { - module: { - "output": output.getvalue(), - "num_tests": results.testsRun, - "num_errors": len(results.errors), - "num_failures": len(results.failures), - } - } - - if usesubprocess: - print(TEST_RESULTS_START) - print(pformat(results)) - print(TEST_RESULTS_END) - else: - return results - - -################################################################################ - -if __name__ == "__main__": - options, args = opt_parser.parse_args() - if not args: - - if is_pygame_pkg: - run_from = "pygame.tests.go" - else: - run_from = os.path.join(main_dir, "run_tests.py") - sys.exit("No test module provided; consider using %s instead" % run_from) - run_test( - args[0], - incomplete=options.incomplete, - usesubprocess=options.usesubprocess, - randomize=options.randomize, - exclude=options.exclude, - buffer=(not options.unbuffered), - ) - -################################################################################ diff --git a/venv/Lib/site-packages/pygame/tests/threads_test.py b/venv/Lib/site-packages/pygame/tests/threads_test.py deleted file mode 100644 index 5f55eac..0000000 --- a/venv/Lib/site-packages/pygame/tests/threads_test.py +++ /dev/null @@ -1,240 +0,0 @@ -import unittest -from pygame.threads import FuncResult, tmap, WorkerQueue, Empty, STOP -from pygame import threads, Surface, transform - - -import time - - -class WorkerQueueTypeTest(unittest.TestCase): - def test_usage_with_different_functions(self): - def f(x): - return x + 1 - - def f2(x): - return x + 2 - - wq = WorkerQueue() - fr = FuncResult(f) - fr2 = FuncResult(f2) - wq.do(fr, 1) - wq.do(fr2, 1) - wq.wait() - wq.stop() - - self.assertEqual(fr.result, 2) - self.assertEqual(fr2.result, 3) - - def test_do(self): - """Tests function placement on queue and execution after blocking function completion.""" - # __doc__ (as of 2008-06-28) for pygame.threads.WorkerQueue.do: - - # puts a function on a queue for running _later_. - - # TODO: This tests needs refactoring to avoid sleep. - # sleep is slow and unreliable (especially on VMs). - - # def sleep_test(): - # time.sleep(0.5) - - # def calc_test(x): - # return x + 1 - - # worker_queue = WorkerQueue(num_workers=1) - # sleep_return = FuncResult(sleep_test) - # calc_return = FuncResult(calc_test) - # init_time = time.time() - # worker_queue.do(sleep_return) - # worker_queue.do(calc_return, 1) - # worker_queue.wait() - # worker_queue.stop() - # time_diff = time.time() - init_time - - # self.assertEqual(sleep_return.result, None) - # self.assertEqual(calc_return.result, 2) - # self.assertGreaterEqual(time_diff, 0.5) - - def test_stop(self): - """Ensure stop() stops the worker queue""" - wq = WorkerQueue() - - self.assertGreater(len(wq.pool), 0) - - for t in wq.pool: - self.assertTrue(t.is_alive()) - - for i in range(200): - wq.do(lambda x: x + 1, i) - - wq.stop() - - for t in wq.pool: - self.assertFalse(t.is_alive()) - - self.assertIs(wq.queue.get(), STOP) - - def test_threadloop(self): - - # __doc__ (as of 2008-06-28) for pygame.threads.WorkerQueue.threadloop: - - # Loops until all of the tasks are finished. - - # Make a worker queue with only one thread - wq = WorkerQueue(1) - - # Ocuppy the one worker with the threadloop - # wq threads are just threadloop, so this makes an embedded threadloop - wq.do(wq.threadloop) - - # Make sure wq can still do work - # If wq can still do work, threadloop works - l = [] - wq.do(l.append, 1) - # Wait won't work because the primary thread is in an infinite loop - time.sleep(0.5) - self.assertEqual(l[0], 1) - - # Kill the embedded threadloop by sending stop onto the stack - # Threadloop puts STOP back onto the queue when it STOPs so this kills both loops - wq.stop() - - # Make sure wq has stopped - self.assertFalse(wq.pool[0].is_alive()) - - def test_wait(self): - - # __doc__ (as of 2008-06-28) for pygame.threads.WorkerQueue.wait: - - # waits until all tasks are complete. - - wq = WorkerQueue() - - for i in range(2000): - wq.do(lambda x: x + 1, i) - wq.wait() - - self.assertRaises(Empty, wq.queue.get_nowait) - - wq.stop() - - -class ThreadsModuleTest(unittest.TestCase): - def test_benchmark_workers(self): - """Ensure benchmark_workers performance measure functions properly with both default and specified inputs""" - "tags:long_running" - - # __doc__ (as of 2008-06-28) for pygame.threads.benchmark_workers: - - # does a little test to see if workers are at all faster. - # Returns the number of workers which works best. - # Takes a little bit of time to run, so you should only really call - # it once. - # You can pass in benchmark data, and functions if you want. - # a_bench_func - f(data) - # the_data - data to work on. - optimal_workers = threads.benchmark_workers() - self.assertIsInstance(optimal_workers, int) - self.assertTrue(0 <= optimal_workers < 64) - - # Test passing benchmark data and function explicitly - def smooth_scale_bench(data): - transform.smoothscale(data, (128, 128)) - - surf_data = [Surface((x, x), 0, 32) for x in range(12, 64, 12)] - best_num_workers = threads.benchmark_workers(smooth_scale_bench, surf_data) - self.assertIsInstance(best_num_workers, int) - - def test_init(self): - """Ensure init() sets up the worker queue""" - threads.init(8) - - self.assertIsInstance(threads._wq, WorkerQueue) - - threads.quit() - - def test_quit(self): - """Ensure quit() cleans up the worker queue""" - threads.init(8) - threads.quit() - - self.assertIsNone(threads._wq) - - def test_tmap(self): - # __doc__ (as of 2008-06-28) for pygame.threads.tmap: - - # like map, but uses a thread pool to execute. - # num_workers - the number of worker threads that will be used. If pool - # is passed in, then the num_workers arg is ignored. - # worker_queue - you can optionally pass in an existing WorkerQueue. - # wait - True means that the results are returned when everything is finished. - # False means that we return the [worker_queue, results] right away instead. - # results, is returned as a list of FuncResult instances. - # stop_on_error - - - ## test that the outcomes of map and tmap are the same - func, data = lambda x: x + 1, range(100) - - tmapped = list(tmap(func, data)) - mapped = list(map(func, data)) - - self.assertEqual(tmapped, mapped) - - ## Test that setting tmap to not stop on errors produces the expected result - data2 = range(100) - always_excepts = lambda x: 1 / 0 - - tmapped2 = list(tmap(always_excepts, data2, stop_on_error=False)) - - # Use list comprehension to check all entries are None as all function - # calls made by tmap will have thrown an exception (ZeroDivisionError) - # Condense to single bool with `all`, which will return true if all - # entries are true - self.assertTrue(all([x is None for x in tmapped2])) - - def todo_test_tmap__None_func_and_multiple_sequences(self): - """Using a None as func and multiple sequences""" - self.fail() - - res = tmap(None, [1, 2, 3, 4]) - res2 = tmap(None, [1, 2, 3, 4], [22, 33, 44, 55]) - res3 = tmap(None, [1, 2, 3, 4], [22, 33, 44, 55, 66]) - res4 = tmap(None, [1, 2, 3, 4, 5], [22, 33, 44, 55]) - - self.assertEqual([1, 2, 3, 4], res) - self.assertEqual([(1, 22), (2, 33), (3, 44), (4, 55)], res2) - self.assertEqual([(1, 22), (2, 33), (3, 44), (4, 55), (None, 66)], res3) - self.assertEqual([(1, 22), (2, 33), (3, 44), (4, 55), (5, None)], res4) - - def test_tmap__wait(self): - r = range(1000) - wq, results = tmap(lambda x: x, r, num_workers=5, wait=False) - wq.wait() - r2 = map(lambda x: x.result, results) - self.assertEqual(list(r), list(r2)) - - def test_FuncResult(self): - """Ensure FuncResult sets its result and exception attributes""" - # Results are stored in result attribute - fr = FuncResult(lambda x: x + 1) - fr(2) - - self.assertEqual(fr.result, 3) - - # Exceptions are store in exception attribute - self.assertIsNone(fr.exception, "no exception should be raised") - - exception = ValueError("rast") - - def x(sdf): - raise exception - - fr = FuncResult(x) - fr(None) - - self.assertIs(fr.exception, exception) - - -################################################################################ - -if __name__ == "__main__": - unittest.main() diff --git a/venv/Lib/site-packages/pygame/tests/time_test.py b/venv/Lib/site-packages/pygame/tests/time_test.py deleted file mode 100644 index e428508..0000000 --- a/venv/Lib/site-packages/pygame/tests/time_test.py +++ /dev/null @@ -1,392 +0,0 @@ -import unittest -import pygame -import time - -Clock = pygame.time.Clock - - -class ClockTypeTest(unittest.TestCase): - __tags__ = ["timing"] - - def test_construction(self): - """Ensure a Clock object can be created""" - c = Clock() - - self.assertTrue(c, "Clock cannot be constructed") - - def test_get_fps(self): - """test_get_fps tests pygame.time.get_fps()""" - # Initialization check, first call should return 0 fps - c = Clock() - self.assertEqual(c.get_fps(), 0) - # Type check get_fps should return float - self.assertTrue(type(c.get_fps()) == float) - # Allowable margin of error in percentage - delta = 0.30 - # Test fps correctness for 100, 60 and 30 fps - self._fps_test(c, 100, delta) - self._fps_test(c, 60, delta) - self._fps_test(c, 30, delta) - - def _fps_test(self, clock, fps, delta): - """ticks fps times each second, hence get_fps() should return fps""" - delay_per_frame = 1.0 / fps - for f in range(fps): # For one second tick and sleep - clock.tick() - time.sleep(delay_per_frame) - # We should get around fps (+- fps*delta -- delta % of fps) - self.assertAlmostEqual(clock.get_fps(), fps, delta=fps * delta) - - def test_get_rawtime(self): - - iterations = 10 - delay = 0.1 - delay_miliseconds = delay * (10 ** 3) # actual time difference between ticks - framerate_limit = 5 - delta = 50 # allowable error in milliseconds - - # Testing Clock Initialization - c = Clock() - self.assertEqual(c.get_rawtime(), 0) - - # Testing Raw Time with Frame Delay - for f in range(iterations): - time.sleep(delay) - c.tick(framerate_limit) - c1 = c.get_rawtime() - self.assertAlmostEqual(delay_miliseconds, c1, delta=delta) - - # Testing get_rawtime() = get_time() - for f in range(iterations): - time.sleep(delay) - c.tick() - c1 = c.get_rawtime() - c2 = c.get_time() - self.assertAlmostEqual(c1, c2, delta=delta) - - def test_get_time(self): - # Testing parameters - delay = 0.1 # seconds - delay_miliseconds = delay * (10 ** 3) - iterations = 10 - delta = 50 # milliseconds - - # Testing Clock Initialization - c = Clock() - self.assertEqual(c.get_time(), 0) - - # Testing within delay parameter range - for i in range(iterations): - time.sleep(delay) - c.tick() - c1 = c.get_time() - self.assertAlmostEqual(delay_miliseconds, c1, delta=delta) - - # Comparing get_time() results with the 'time' module - for i in range(iterations): - t0 = time.time() - time.sleep(delay) - c.tick() - t1 = time.time() - c1 = c.get_time() # elapsed time in milliseconds - d0 = (t1 - t0) * ( - 10 ** 3 - ) #'time' module elapsed time converted to milliseconds - self.assertAlmostEqual(d0, c1, delta=delta) - - def test_tick(self): - """Tests time.Clock.tick()""" - """ - Loops with a set delay a few times then checks what tick reports to - verify its accuracy. Then calls tick with a desired frame-rate and - verifies it is not faster than the desired frame-rate nor is it taking - a dramatically long time to complete - """ - - # Adjust this value to increase the acceptable sleep jitter - epsilon = 1.5 - # Adjust this value to increase the acceptable locked frame-rate jitter - epsilon2 = 0.3 - # adjust this value to increase the acceptable frame-rate margin - epsilon3 = 20 - testing_framerate = 60 - milliseconds = 5.0 - - collection = [] - c = Clock() - - # verify time.Clock.tick() will measure the time correctly - c.tick() - for i in range(100): - time.sleep(milliseconds / 1000) # convert to seconds - collection.append(c.tick()) - - # removes the first highest and lowest value - for outlier in [min(collection), max(collection)]: - if outlier != milliseconds: - collection.remove(outlier) - - average_time = float(sum(collection)) / len(collection) - - # assert the deviation from the intended frame-rate is within the - # acceptable amount (the delay is not taking a dramatically long time) - self.assertAlmostEqual(average_time, milliseconds, delta=epsilon) - - # verify tick will control the frame-rate - - c = Clock() - collection = [] - - start = time.time() - - for i in range(testing_framerate): - collection.append(c.tick(testing_framerate)) - - # remove the highest and lowest outliers - for outlier in [min(collection), max(collection)]: - if outlier != round(1000 / testing_framerate): - collection.remove(outlier) - - end = time.time() - - # Since calling tick with a desired fps will prevent the program from - # running at greater than the given fps, 100 iterations at 100 fps - # should last no less than 1 second - self.assertAlmostEqual(end - start, 1, delta=epsilon2) - - average_tick_time = float(sum(collection)) / len(collection) - self.assertAlmostEqual( - 1000 / average_tick_time, testing_framerate, delta=epsilon3 - ) - - def test_tick_busy_loop(self): - """Test tick_busy_loop""" - - c = Clock() - - # Test whether the return value of tick_busy_loop is equal to - # (FPS is accurate) or greater than (slower than the set FPS) - # with a small margin for error based on differences in how this - # test runs in practise - it either sometimes runs slightly fast - # or seems to based on a rounding error. - second_length = 1000 - shortfall_tolerance = 1 # (ms) The amount of time a tick is allowed to run short of, to account for underlying rounding errors - sample_fps = 40 - - self.assertGreaterEqual( - c.tick_busy_loop(sample_fps), - (second_length / sample_fps) - shortfall_tolerance, - ) - pygame.time.wait(10) # incur delay between ticks that's faster than sample_fps - self.assertGreaterEqual( - c.tick_busy_loop(sample_fps), - (second_length / sample_fps) - shortfall_tolerance, - ) - pygame.time.wait(200) # incur delay between ticks that's slower than sample_fps - self.assertGreaterEqual( - c.tick_busy_loop(sample_fps), - (second_length / sample_fps) - shortfall_tolerance, - ) - - high_fps = 500 - self.assertGreaterEqual( - c.tick_busy_loop(high_fps), (second_length / high_fps) - shortfall_tolerance - ) - - low_fps = 1 - self.assertGreaterEqual( - c.tick_busy_loop(low_fps), (second_length / low_fps) - shortfall_tolerance - ) - - low_non_factor_fps = 35 # 1000/35 makes 28.5714285714 - frame_length_without_decimal_places = int( - second_length / low_non_factor_fps - ) # Same result as math.floor - self.assertGreaterEqual( - c.tick_busy_loop(low_non_factor_fps), - frame_length_without_decimal_places - shortfall_tolerance, - ) - - high_non_factor_fps = 750 # 1000/750 makes 1.3333... - frame_length_without_decimal_places_2 = int( - second_length / high_non_factor_fps - ) # Same result as math.floor - self.assertGreaterEqual( - c.tick_busy_loop(high_non_factor_fps), - frame_length_without_decimal_places_2 - shortfall_tolerance, - ) - - zero_fps = 0 - self.assertEqual(c.tick_busy_loop(zero_fps), 0) - - # Check behaviour of unexpected values - - negative_fps = -1 - self.assertEqual(c.tick_busy_loop(negative_fps), 0) - - fractional_fps = 32.75 - frame_length_without_decimal_places_3 = int(second_length / fractional_fps) - self.assertGreaterEqual( - c.tick_busy_loop(fractional_fps), - frame_length_without_decimal_places_3 - shortfall_tolerance, - ) - - bool_fps = True - self.assertGreaterEqual( - c.tick_busy_loop(bool_fps), (second_length / bool_fps) - shortfall_tolerance - ) - - -class TimeModuleTest(unittest.TestCase): - __tags__ = ["timing"] - - def test_delay(self): - """Tests time.delay() function.""" - millis = 50 # millisecond to wait on each iteration - iterations = 20 # number of iterations - delta = 150 # Represents acceptable margin of error for wait in ms - # Call checking function - self._wait_delay_check(pygame.time.delay, millis, iterations, delta) - # After timing behaviour, check argument type exceptions - self._type_error_checks(pygame.time.delay) - - def test_get_ticks(self): - """Tests time.get_ticks()""" - """ - Iterates and delays for arbitrary amount of time for each iteration, - check get_ticks to equal correct gap time - """ - iterations = 20 - millis = 50 - delta = 15 # Acceptable margin of error in ms - # Assert return type to be int - self.assertTrue(type(pygame.time.get_ticks()) == int) - for i in range(iterations): - curr_ticks = pygame.time.get_ticks() # Save current tick count - curr_time = time.time() # Save current time - pygame.time.delay(millis) # Delay for millis - # Time and Ticks difference from start of the iteration - time_diff = round((time.time() - curr_time) * 1000) - ticks_diff = pygame.time.get_ticks() - curr_ticks - # Assert almost equality of the ticking time and time difference - self.assertAlmostEqual(ticks_diff, time_diff, delta=delta) - - def test_set_timer(self): - """Tests time.set_timer()""" - """ - Tests if a timer will post the correct amount of eventid events in - the specified delay. Test is posting event objects work. - Also tests if setting milliseconds to 0 stops the timer and if - the once argument and repeat arguments work. - """ - pygame.init() - TIMER_EVENT_TYPE = pygame.event.custom_type() - timer_event = pygame.event.Event(TIMER_EVENT_TYPE) - delta = 50 - timer_delay = 100 - test_number = 8 # Number of events to read for the test - events = 0 # Events read - - pygame.event.clear() - pygame.time.set_timer(TIMER_EVENT_TYPE, timer_delay) - - # Test that 'test_number' events are posted in the right amount of time - t1 = pygame.time.get_ticks() - max_test_time = t1 + timer_delay * test_number + delta - while events < test_number: - for event in pygame.event.get(): - if event == timer_event: - events += 1 - - # The test takes too much time - if pygame.time.get_ticks() > max_test_time: - break - - pygame.time.set_timer(TIMER_EVENT_TYPE, 0) - t2 = pygame.time.get_ticks() - # Is the number ef events and the timing right? - self.assertEqual(events, test_number) - self.assertAlmostEqual(timer_delay * test_number, t2 - t1, delta=delta) - - # Test that the timer stopped when set with 0ms delay. - pygame.time.delay(200) - self.assertNotIn(timer_event, pygame.event.get()) - - # Test that the old timer for an event is deleted when a new timer is set - pygame.time.set_timer(TIMER_EVENT_TYPE, timer_delay) - pygame.time.delay(int(timer_delay * 3.5)) - self.assertEqual(pygame.event.get().count(timer_event), 3) - pygame.time.set_timer(TIMER_EVENT_TYPE, timer_delay * 10) # long wait time - pygame.time.delay(timer_delay * 5) - self.assertNotIn(timer_event, pygame.event.get()) - pygame.time.set_timer(TIMER_EVENT_TYPE, timer_delay * 3) - pygame.time.delay(timer_delay * 7) - self.assertEqual(pygame.event.get().count(timer_event), 2) - pygame.time.set_timer(TIMER_EVENT_TYPE, timer_delay) - pygame.time.delay(int(timer_delay * 5.5)) - self.assertEqual(pygame.event.get().count(timer_event), 5) - - # Test that the loops=True works - pygame.time.set_timer(TIMER_EVENT_TYPE, 10, True) - pygame.time.delay(40) - self.assertEqual(pygame.event.get().count(timer_event), 1) - - # Test a variety of event objects, test loops argument - events_to_test = [ - pygame.event.Event(TIMER_EVENT_TYPE), - pygame.event.Event( - TIMER_EVENT_TYPE, foo="9gwz5", baz=12, lol=[124, (34, "")] - ), - pygame.event.Event(pygame.KEYDOWN, key=pygame.K_a, unicode="a"), - ] - repeat = 3 - millis = 50 - for e in events_to_test: - pygame.time.set_timer(e, millis, loops=repeat) - pygame.time.delay(2 * millis * repeat) - self.assertEqual(pygame.event.get().count(e), repeat) - pygame.quit() - - def test_wait(self): - """Tests time.wait() function.""" - millis = 100 # millisecond to wait on each iteration - iterations = 10 # number of iterations - delta = 50 # Represents acceptable margin of error for wait in ms - # Call checking function - self._wait_delay_check(pygame.time.wait, millis, iterations, delta) - # After timing behaviour, check argument type exceptions - self._type_error_checks(pygame.time.wait) - - def _wait_delay_check(self, func_to_check, millis, iterations, delta): - """ " - call func_to_check(millis) "iterations" times and check each time if - function "waited" for given millisecond (+- delta). At the end, take - average time for each call (whole_duration/iterations), which should - be equal to millis (+- delta - acceptable margin of error). - *Created to avoid code duplication during delay and wait tests - """ - # take starting time for duration calculation - start_time = time.time() - for i in range(iterations): - wait_time = func_to_check(millis) - # Check equality of wait_time and millis with margin of error delta - self.assertAlmostEqual(wait_time, millis, delta=delta) - stop_time = time.time() - # Cycle duration in millisecond - duration = round((stop_time - start_time) * 1000) - # Duration/Iterations should be (almost) equal to predefined millis - self.assertAlmostEqual(duration / iterations, millis, delta=delta) - - def _type_error_checks(self, func_to_check): - """Checks 3 TypeError (float, tuple, string) for the func_to_check""" - """Intended for time.delay and time.wait functions""" - # Those methods throw no exceptions on negative integers - self.assertRaises(TypeError, func_to_check, 0.1) # check float - self.assertRaises(TypeError, pygame.time.delay, (0, 1)) # check tuple - self.assertRaises(TypeError, pygame.time.delay, "10") # check string - - -############################################################################### - -if __name__ == "__main__": - unittest.main() diff --git a/venv/Lib/site-packages/pygame/tests/touch_test.py b/venv/Lib/site-packages/pygame/tests/touch_test.py deleted file mode 100644 index 3f63cae..0000000 --- a/venv/Lib/site-packages/pygame/tests/touch_test.py +++ /dev/null @@ -1,98 +0,0 @@ -import unittest -import os -import pygame -from pygame._sdl2 import touch -from pygame.tests.test_utils import question - - -has_touchdevice = touch.get_num_devices() > 0 - - -class TouchTest(unittest.TestCase): - @classmethod - def setUpClass(cls): - pygame.display.init() - - @classmethod - def tearDownClass(cls): - pygame.display.quit() - - def test_num_devices(self): - touch.get_num_devices() - - @unittest.skipIf(not has_touchdevice, "no touch devices found") - def test_get_device(self): - touch.get_device(0) - - def test_num_fingers__invalid(self): - self.assertRaises(pygame.error, touch.get_device, -1234) - self.assertRaises(TypeError, touch.get_device, "test") - - @unittest.skipIf(not has_touchdevice, "no touch devices found") - def test_num_fingers(self): - touch.get_num_fingers(touch.get_device(0)) - - def test_num_fingers__invalid(self): - self.assertRaises(TypeError, touch.get_num_fingers, "test") - self.assertRaises(pygame.error, touch.get_num_fingers, -1234) - - -class TouchInteractiveTest(unittest.TestCase): - - __tags__ = ["interactive"] - - @unittest.skipIf(not has_touchdevice, "no touch devices found") - def test_get_finger(self): - """ask for touch input and check the dict""" - - pygame.display.init() - pygame.font.init() - - os.environ["SDL_VIDEO_WINDOW_POS"] = "50,50" - screen = pygame.display.set_mode((800, 600)) - screen.fill((255, 255, 255)) - - font = pygame.font.Font(None, 32) - instructions_str_1 = "Please place some fingers on your touch device" - instructions_str_2 = ( - "Close the window when finished, " "and answer the question" - ) - inst_1_render = font.render(instructions_str_1, True, pygame.Color("#000000")) - inst_2_render = font.render(instructions_str_2, True, pygame.Color("#000000")) - - running = True - while running: - for event in pygame.event.get(): - if event.type == pygame.QUIT: - running = False - - finger_data_renders = [] - num_devices = pygame._sdl2.touch.get_num_devices() - if num_devices > 0: - first_device = pygame._sdl2.touch.get_device(0) - num_fingers = pygame._sdl2.touch.get_num_fingers(first_device) - if num_fingers > 0: - for finger_index in range(0, num_fingers): - data = pygame._sdl2.touch.get_finger(first_device, finger_index) - render = font.render( - "finger - " + str(data), True, pygame.Color("#000000") - ) - - finger_data_renders.append(render) - - screen.fill((255, 255, 255)) - screen.blit(inst_1_render, (5, 5)) - screen.blit(inst_2_render, (5, 40)) - for index, finger in enumerate(finger_data_renders): - screen.blit(finger, (5, 80 + (index * 40))) - - pygame.display.update() - - response = question("Does the finger data seem correct?") - self.assertTrue(response) - - pygame.display.quit() - - -if __name__ == "__main__": - unittest.main() diff --git a/venv/Lib/site-packages/pygame/tests/transform_test.py b/venv/Lib/site-packages/pygame/tests/transform_test.py deleted file mode 100644 index 1484817..0000000 --- a/venv/Lib/site-packages/pygame/tests/transform_test.py +++ /dev/null @@ -1,1316 +0,0 @@ -import unittest -import os -import platform - -from pygame.tests import test_utils -from pygame.tests.test_utils import example_path - -import pygame -import pygame.transform -from pygame.locals import * - - -def show_image(s, images=[]): - # pygame.display.init() - size = s.get_rect()[2:] - screen = pygame.display.set_mode(size) - screen.blit(s, (0, 0)) - pygame.display.flip() - pygame.event.pump() - going = True - idx = 0 - while going: - events = pygame.event.get() - for e in events: - if e.type == QUIT: - going = False - if e.type == KEYDOWN: - if e.key in [K_s, K_a]: - if e.key == K_s: - idx += 1 - if e.key == K_a: - idx -= 1 - s = images[idx] - screen.blit(s, (0, 0)) - pygame.display.flip() - pygame.event.pump() - elif e.key in [K_ESCAPE]: - going = False - pygame.display.quit() - pygame.display.init() - - -def threshold( - return_surf, - surf, - color, - threshold=(0, 0, 0), - diff_color=(0, 0, 0), - change_return=True, -): - """given the color it makes return_surf only have areas with the given colour.""" - - width, height = surf.get_width(), surf.get_height() - - if change_return: - return_surf.fill(diff_color) - - try: - r, g, b = color - except ValueError: - r, g, b, a = color - - try: - tr, tg, tb = color - except ValueError: - tr, tg, tb, ta = color - - similar = 0 - for y in range(height): - for x in range(width): - c1 = surf.get_at((x, y)) - - if (abs(c1[0] - r) < tr) & (abs(c1[1] - g) < tg) & (abs(c1[2] - b) < tb): - # this pixel is within the threshold. - if change_return: - return_surf.set_at((x, y), c1) - similar += 1 - # else: - # print c1, c2 - - return similar - - -class TransformModuleTest(unittest.TestCase): - def test_scale__alpha(self): - """see if set_alpha information is kept.""" - - s = pygame.Surface((32, 32)) - s.set_alpha(55) - self.assertEqual(s.get_alpha(), 55) - - s = pygame.Surface((32, 32)) - s.set_alpha(55) - s2 = pygame.transform.scale(s, (64, 64)) - s3 = s.copy() - self.assertEqual(s.get_alpha(), s3.get_alpha()) - self.assertEqual(s.get_alpha(), s2.get_alpha()) - - def test_scale__destination(self): - """see if the destination surface can be passed in to use.""" - - s = pygame.Surface((32, 32)) - s2 = pygame.transform.scale(s, (64, 64)) - s3 = s2.copy() - - # Also validate keyword arguments - s3 = pygame.transform.scale(surface=s, size=(64, 64), dest_surface=s3) - pygame.transform.scale(s, (64, 64), s2) - - # the wrong size surface is past in. Should raise an error. - self.assertRaises(ValueError, pygame.transform.scale, s, (33, 64), s3) - - s = pygame.Surface((32, 32)) - s2 = pygame.transform.smoothscale(s, (64, 64)) - s3 = s2.copy() - - # Also validate keyword arguments - s3 = pygame.transform.smoothscale(surface=s, size=(64, 64), dest_surface=s3) - - # the wrong size surface is past in. Should raise an error. - self.assertRaises(ValueError, pygame.transform.smoothscale, s, (33, 64), s3) - - def test_scale__vector2(self): - s = pygame.Surface((32, 32)) - s2 = pygame.transform.scale(s, pygame.Vector2(64, 64)) - s3 = pygame.transform.smoothscale(s, pygame.Vector2(64, 64)) - - self.assertEqual((64, 64), s2.get_size()) - self.assertEqual((64, 64), s3.get_size()) - - def test_scale__zero_surface_transform(self): - tmp_surface = pygame.transform.scale(pygame.Surface((128, 128)), (0, 0)) - self.assertEqual(tmp_surface.get_size(), (0, 0)) - tmp_surface = pygame.transform.scale(tmp_surface, (128, 128)) - self.assertEqual(tmp_surface.get_size(), (128, 128)) - - def test_threshold__honors_third_surface(self): - # __doc__ for threshold as of Tue 07/15/2008 - - # pygame.transform.threshold(DestSurface, Surface, color, threshold = - # (0,0,0,0), diff_color = (0,0,0,0), change_return = True, Surface = - # None): return num_threshold_pixels - - # When given the optional third - # surface, it would use the colors in that rather than the "color" - # specified in the function to check against. - - # New in pygame 1.8 - - ################################################################ - # Sizes - (w, h) = size = (32, 32) - - # the original_color is within the threshold of the threshold_color - threshold = (20, 20, 20, 20) - - original_color = (25, 25, 25, 25) - threshold_color = (10, 10, 10, 10) - - # Surfaces - original_surface = pygame.Surface(size, pygame.SRCALPHA, 32) - dest_surface = pygame.Surface(size, pygame.SRCALPHA, 32) - - # Third surface is used in lieu of 3rd position arg color - third_surface = pygame.Surface(size, pygame.SRCALPHA, 32) - - # Color filling - original_surface.fill(original_color) - third_surface.fill(threshold_color) - - ################################################################ - # All pixels for color should be within threshold - # - pixels_within_threshold = pygame.transform.threshold( - dest_surface=None, - surface=original_surface, - search_color=threshold_color, - threshold=threshold, - set_color=None, - set_behavior=0, - ) - - self.assertEqual(w * h, pixels_within_threshold) - - ################################################################ - # This should respect third_surface colors in place of 3rd arg - # color Should be the same as: surface.fill(threshold_color) - # all within threshold - - pixels_within_threshold = pygame.transform.threshold( - dest_surface=None, - surface=original_surface, - search_color=None, - threshold=threshold, - set_color=None, - set_behavior=0, - search_surf=third_surface, - ) - self.assertEqual(w * h, pixels_within_threshold) - - def test_threshold_dest_surf_not_change(self): - """the pixels within the threshold. - - All pixels not within threshold are changed to set_color. - So there should be none changed in this test. - """ - (w, h) = size = (32, 32) - threshold = (20, 20, 20, 20) - original_color = (25, 25, 25, 25) - original_dest_color = (65, 65, 65, 55) - threshold_color = (10, 10, 10, 10) - set_color = (255, 10, 10, 10) - - surf = pygame.Surface(size, pygame.SRCALPHA, 32) - dest_surf = pygame.Surface(size, pygame.SRCALPHA, 32) - search_surf = pygame.Surface(size, pygame.SRCALPHA, 32) - - surf.fill(original_color) - search_surf.fill(threshold_color) - dest_surf.fill(original_dest_color) - - # set_behavior=1, set dest_surface from set_color. - # all within threshold of third_surface, so no color is set. - - THRESHOLD_BEHAVIOR_FROM_SEARCH_COLOR = 1 - pixels_within_threshold = pygame.transform.threshold( - dest_surface=dest_surf, - surface=surf, - search_color=None, - threshold=threshold, - set_color=set_color, - set_behavior=THRESHOLD_BEHAVIOR_FROM_SEARCH_COLOR, - search_surf=search_surf, - ) - - # # Return, of pixels within threshold is correct - self.assertEqual(w * h, pixels_within_threshold) - - # # Size of dest surface is correct - dest_rect = dest_surf.get_rect() - dest_size = dest_rect.size - self.assertEqual(size, dest_size) - - # The color is not the change_color specified for every pixel As all - # pixels are within threshold - - for pt in test_utils.rect_area_pts(dest_rect): - self.assertNotEqual(dest_surf.get_at(pt), set_color) - self.assertEqual(dest_surf.get_at(pt), original_dest_color) - - def test_threshold_dest_surf_all_changed(self): - """Lowering the threshold, expecting changed surface""" - - (w, h) = size = (32, 32) - threshold = (20, 20, 20, 20) - original_color = (25, 25, 25, 25) - original_dest_color = (65, 65, 65, 55) - threshold_color = (10, 10, 10, 10) - set_color = (255, 10, 10, 10) - - surf = pygame.Surface(size, pygame.SRCALPHA, 32) - dest_surf = pygame.Surface(size, pygame.SRCALPHA, 32) - search_surf = pygame.Surface(size, pygame.SRCALPHA, 32) - - surf.fill(original_color) - search_surf.fill(threshold_color) - dest_surf.fill(original_dest_color) - - THRESHOLD_BEHAVIOR_FROM_SEARCH_COLOR = 1 - pixels_within_threshold = pygame.transform.threshold( - dest_surf, - surf, - search_color=None, - set_color=set_color, - set_behavior=THRESHOLD_BEHAVIOR_FROM_SEARCH_COLOR, - search_surf=search_surf, - ) - - self.assertEqual(0, pixels_within_threshold) - - dest_rect = dest_surf.get_rect() - dest_size = dest_rect.size - self.assertEqual(size, dest_size) - - # The color is the set_color specified for every pixel As all - # pixels are not within threshold - for pt in test_utils.rect_area_pts(dest_rect): - self.assertEqual(dest_surf.get_at(pt), set_color) - - def test_threshold_count(self): - """counts the colors, and not changes them.""" - surf_size = (32, 32) - surf = pygame.Surface(surf_size, pygame.SRCALPHA, 32) - search_surf = pygame.Surface(surf_size, pygame.SRCALPHA, 32) - search_color = (55, 55, 55, 255) - original_color = (10, 10, 10, 255) - - surf.fill(original_color) - # set 2 pixels to the color we are searching for. - surf.set_at((0, 0), search_color) - surf.set_at((12, 5), search_color) - - # There is no destination surface, but we ask to change it. - # This should be an error. - self.assertRaises( - TypeError, pygame.transform.threshold, None, surf, search_color - ) - # from pygame.transform import THRESHOLD_BEHAVIOR_COUNT - THRESHOLD_BEHAVIOR_FROM_SEARCH_SURF = 2 - self.assertRaises( - TypeError, - pygame.transform.threshold, - None, - surf, - search_color, - set_behavior=THRESHOLD_BEHAVIOR_FROM_SEARCH_SURF, - ) - - THRESHOLD_BEHAVIOR_COUNT = 0 - num_threshold_pixels = pygame.transform.threshold( - dest_surface=None, - surface=surf, - search_color=search_color, - set_behavior=THRESHOLD_BEHAVIOR_COUNT, - ) - self.assertEqual(num_threshold_pixels, 2) - - def test_threshold_search_surf(self): - surf_size = (32, 32) - surf = pygame.Surface(surf_size, pygame.SRCALPHA, 32) - search_surf = pygame.Surface(surf_size, pygame.SRCALPHA, 32) - dest_surf = pygame.Surface(surf_size, pygame.SRCALPHA, 32) - - original_color = (10, 10, 10, 255) - search_color = (55, 55, 55, 255) - - surf.fill(original_color) - dest_surf.fill(original_color) - # set 2 pixels to the color we are searching for. - surf.set_at((0, 0), search_color) - surf.set_at((12, 5), search_color) - - search_surf.fill(search_color) - - # We look in the other surface for matching colors. - # Change it in dest_surf - THRESHOLD_BEHAVIOR_FROM_SEARCH_SURF = 2 - - # TypeError: if search_surf is used, search_color should be None - self.assertRaises( - TypeError, - pygame.transform.threshold, - dest_surf, - surf, - search_color, - set_behavior=THRESHOLD_BEHAVIOR_FROM_SEARCH_SURF, - search_surf=search_surf, - ) - - # surf, dest_surf, and search_surf should all be the same size. - # Check surface sizes are the same size. - different_sized_surf = pygame.Surface((22, 33), pygame.SRCALPHA, 32) - self.assertRaises( - TypeError, - pygame.transform.threshold, - different_sized_surf, - surf, - search_color=None, - set_color=None, - set_behavior=THRESHOLD_BEHAVIOR_FROM_SEARCH_SURF, - search_surf=search_surf, - ) - - self.assertRaises( - TypeError, - pygame.transform.threshold, - dest_surf, - surf, - search_color=None, - set_color=None, - set_behavior=THRESHOLD_BEHAVIOR_FROM_SEARCH_SURF, - search_surf=different_sized_surf, - ) - - # We look to see if colors in search_surf are in surf. - num_threshold_pixels = pygame.transform.threshold( - dest_surface=dest_surf, - surface=surf, - search_color=None, - set_color=None, - set_behavior=THRESHOLD_BEHAVIOR_FROM_SEARCH_SURF, - search_surf=search_surf, - ) - - num_pixels_within = 2 - self.assertEqual(num_threshold_pixels, num_pixels_within) - - dest_surf.fill(original_color) - num_threshold_pixels = pygame.transform.threshold( - dest_surf, - surf, - search_color=None, - set_color=None, - set_behavior=THRESHOLD_BEHAVIOR_FROM_SEARCH_SURF, - search_surf=search_surf, - inverse_set=True, - ) - - self.assertEqual(num_threshold_pixels, 2) - - def test_threshold_inverse_set(self): - """changes the pixels within the threshold, and not outside.""" - surf_size = (32, 32) - _dest_surf = pygame.Surface(surf_size, pygame.SRCALPHA, 32) - _surf = pygame.Surface(surf_size, pygame.SRCALPHA, 32) - - dest_surf = _dest_surf # surface we are changing. - surf = _surf # surface we are looking at - search_color = (55, 55, 55, 255) # color we are searching for. - threshold = (0, 0, 0, 0) # within this distance from search_color. - set_color = (245, 245, 245, 255) # color we set. - inverse_set = 1 # pixels within threshold are changed to 'set_color' - - original_color = (10, 10, 10, 255) - surf.fill(original_color) - # set 2 pixels to the color we are searching for. - surf.set_at((0, 0), search_color) - surf.set_at((12, 5), search_color) - - dest_surf.fill(original_color) - # set 2 pixels to the color we are searching for. - dest_surf.set_at((0, 0), search_color) - dest_surf.set_at((12, 5), search_color) - - THRESHOLD_BEHAVIOR_FROM_SEARCH_COLOR = 1 - num_threshold_pixels = pygame.transform.threshold( - dest_surf, - surf, - search_color=search_color, - threshold=threshold, - set_color=set_color, - set_behavior=THRESHOLD_BEHAVIOR_FROM_SEARCH_COLOR, - inverse_set=1, - ) - - self.assertEqual(num_threshold_pixels, 2) - # only two pixels changed to diff_color. - self.assertEqual(dest_surf.get_at((0, 0)), set_color) - self.assertEqual(dest_surf.get_at((12, 5)), set_color) - - # other pixels should be the same as they were before. - # We just check one other pixel, not all of them. - self.assertEqual(dest_surf.get_at((2, 2)), original_color) - - # XXX - def test_threshold_non_src_alpha(self): - - result = pygame.Surface((10, 10)) - s1 = pygame.Surface((10, 10)) - s2 = pygame.Surface((10, 10)) - s3 = pygame.Surface((10, 10)) - s4 = pygame.Surface((10, 10)) - - x = s1.fill((0, 0, 0)) - s1.set_at((0, 0), (32, 20, 0)) - - x = s2.fill((0, 20, 0)) - x = s3.fill((0, 0, 0)) - x = s4.fill((0, 0, 0)) - s2.set_at((0, 0), (33, 21, 0)) - s2.set_at((3, 0), (63, 61, 0)) - s3.set_at((0, 0), (112, 31, 0)) - s4.set_at((0, 0), (11, 31, 0)) - s4.set_at((1, 1), (12, 31, 0)) - - self.assertEqual(s1.get_at((0, 0)), (32, 20, 0, 255)) - self.assertEqual(s2.get_at((0, 0)), (33, 21, 0, 255)) - self.assertEqual((0, 0), (s1.get_flags(), s2.get_flags())) - - similar_color = (255, 255, 255, 255) - diff_color = (222, 0, 0, 255) - threshold_color = (20, 20, 20, 255) - - THRESHOLD_BEHAVIOR_FROM_SEARCH_COLOR = 1 - num_threshold_pixels = pygame.transform.threshold( - dest_surface=result, - surface=s1, - search_color=similar_color, - threshold=threshold_color, - set_color=diff_color, - set_behavior=THRESHOLD_BEHAVIOR_FROM_SEARCH_COLOR, - ) - self.assertEqual(num_threshold_pixels, 0) - - num_threshold_pixels = pygame.transform.threshold( - dest_surface=result, - surface=s1, - search_color=(40, 40, 0), - threshold=threshold_color, - set_color=diff_color, - set_behavior=THRESHOLD_BEHAVIOR_FROM_SEARCH_COLOR, - ) - self.assertEqual(num_threshold_pixels, 1) - - self.assertEqual(result.get_at((0, 0)), diff_color) - - def test_threshold__uneven_colors(self): - (w, h) = size = (16, 16) - - original_surface = pygame.Surface(size, pygame.SRCALPHA, 32) - dest_surface = pygame.Surface(size, pygame.SRCALPHA, 32) - - original_surface.fill(0) - - threshold_color_template = [5, 5, 5, 5] - threshold_template = [6, 6, 6, 6] - - ################################################################ - - for pos in range(len("rgb")): - threshold_color = threshold_color_template[:] - threshold = threshold_template[:] - - threshold_color[pos] = 45 - threshold[pos] = 50 - - pixels_within_threshold = pygame.transform.threshold( - None, - original_surface, - threshold_color, - threshold, - set_color=None, - set_behavior=0, - ) - - self.assertEqual(w * h, pixels_within_threshold) - - ################################################################ - - def test_threshold_set_behavior2(self): - """raises an error when set_behavior=2 and set_color is not None.""" - from pygame.transform import threshold - - s1 = pygame.Surface((32, 32), SRCALPHA, 32) - s2 = pygame.Surface((32, 32), SRCALPHA, 32) - THRESHOLD_BEHAVIOR_FROM_SEARCH_SURF = 2 - self.assertRaises( - TypeError, - threshold, - dest_surface=s2, - surface=s1, - search_color=(30, 30, 30), - threshold=(11, 11, 11), - set_color=(255, 0, 0), - set_behavior=THRESHOLD_BEHAVIOR_FROM_SEARCH_SURF, - ) - - def test_threshold_set_behavior0(self): - """raises an error when set_behavior=1 - and set_color is not None, - and dest_surf is not None. - """ - from pygame.transform import threshold - - s1 = pygame.Surface((32, 32), SRCALPHA, 32) - s2 = pygame.Surface((32, 32), SRCALPHA, 32) - THRESHOLD_BEHAVIOR_COUNT = 0 - - self.assertRaises( - TypeError, - threshold, - dest_surface=None, - surface=s2, - search_color=(30, 30, 30), - threshold=(11, 11, 11), - set_color=(0, 0, 0), - set_behavior=THRESHOLD_BEHAVIOR_COUNT, - ) - - self.assertRaises( - TypeError, - threshold, - dest_surface=s1, - surface=s2, - search_color=(30, 30, 30), - threshold=(11, 11, 11), - set_color=None, - set_behavior=THRESHOLD_BEHAVIOR_COUNT, - ) - - threshold( - dest_surface=None, - surface=s2, - search_color=(30, 30, 30), - threshold=(11, 11, 11), - set_color=None, - set_behavior=THRESHOLD_BEHAVIOR_COUNT, - ) - - def test_threshold_from_surface(self): - """Set similar pixels in 'dest_surf' to color in the 'surf'.""" - from pygame.transform import threshold - - surf = pygame.Surface((32, 32), SRCALPHA, 32) - dest_surf = pygame.Surface((32, 32), SRCALPHA, 32) - surf_color = (40, 40, 40, 255) - dest_color = (255, 255, 255) - surf.fill(surf_color) - dest_surf.fill(dest_color) - THRESHOLD_BEHAVIOR_FROM_SEARCH_SURF = 2 - - num_threshold_pixels = threshold( - dest_surface=dest_surf, - surface=surf, - search_color=(30, 30, 30), - threshold=(11, 11, 11), - set_color=None, - set_behavior=THRESHOLD_BEHAVIOR_FROM_SEARCH_SURF, - inverse_set=1, - ) - - self.assertEqual( - num_threshold_pixels, dest_surf.get_height() * dest_surf.get_width() - ) - self.assertEqual(dest_surf.get_at((0, 0)), surf_color) - - def test_threshold__surface(self): - """ """ - from pygame.transform import threshold - - s1 = pygame.Surface((32, 32), SRCALPHA, 32) - s2 = pygame.Surface((32, 32), SRCALPHA, 32) - s3 = pygame.Surface((1, 1), SRCALPHA, 32) - THRESHOLD_BEHAVIOR_FROM_SEARCH_SURF = 2 - - # # only one pixel should not be changed. - # s1.fill((40,40,40)) - # s2.fill((255,255,255)) - # s1.set_at( (0,0), (170, 170, 170) ) - # # set the similar pixels in destination surface to the color - # # in the first surface. - # num_threshold_pixels = threshold( - # dest_surface=s2, - # surface=s1, - # search_color=(30,30,30), - # threshold=(11,11,11), - # set_color=None, - # set_behavior=THRESHOLD_BEHAVIOR_FROM_SEARCH_SURF) - - # #num_threshold_pixels = threshold(s2, s1, (30,30,30)) - # self.assertEqual(num_threshold_pixels, (s1.get_height() * s1.get_width()) -1) - # self.assertEqual(s2.get_at((0,0)), (0,0,0, 255)) - # self.assertEqual(s2.get_at((0,1)), (40, 40, 40, 255)) - # self.assertEqual(s2.get_at((17,1)), (40, 40, 40, 255)) - - # # abs(40 - 255) < 100 - # #(abs(c1[0] - r) < tr) - - # s1.fill((160,160,160)) - # s2.fill((255,255,255)) - # num_threshold_pixels = threshold(s2, s1, (255,255,255), (100,100,100), (0,0,0), True) - - # self.assertEqual(num_threshold_pixels, (s1.get_height() * s1.get_width())) - - # only one pixel should not be changed. - s1.fill((40, 40, 40)) - s1.set_at((0, 0), (170, 170, 170)) - THRESHOLD_BEHAVIOR_COUNT = 0 - - num_threshold_pixels = threshold( - dest_surface=None, - surface=s1, - search_color=(30, 30, 30), - threshold=(11, 11, 11), - set_color=None, - set_behavior=THRESHOLD_BEHAVIOR_COUNT, - ) - - # num_threshold_pixels = threshold(s2, s1, (30,30,30)) - self.assertEqual(num_threshold_pixels, (s1.get_height() * s1.get_width()) - 1) - - # test end markers. 0, and 255 - - # the pixels are different by 1. - s1.fill((254, 254, 254)) - s2.fill((255, 255, 255)) - s3.fill((255, 255, 255)) - s1.set_at((0, 0), (170, 170, 170)) - num_threshold_pixels = threshold( - None, s1, (254, 254, 254), (1, 1, 1), None, THRESHOLD_BEHAVIOR_COUNT - ) - self.assertEqual(num_threshold_pixels, (s1.get_height() * s1.get_width()) - 1) - - # compare the two surfaces. Should be all but one matching. - num_threshold_pixels = threshold( - None, s1, None, (1, 1, 1), None, THRESHOLD_BEHAVIOR_COUNT, s2 - ) - self.assertEqual(num_threshold_pixels, (s1.get_height() * s1.get_width()) - 1) - - # within (0,0,0) threshold? Should match no pixels. - num_threshold_pixels = threshold( - None, s1, (253, 253, 253), (0, 0, 0), None, THRESHOLD_BEHAVIOR_COUNT - ) - self.assertEqual(num_threshold_pixels, 0) - - # other surface within (0,0,0) threshold? Should match no pixels. - num_threshold_pixels = threshold( - None, s1, None, (0, 0, 0), None, THRESHOLD_BEHAVIOR_COUNT, s2 - ) - self.assertEqual(num_threshold_pixels, 0) - - def test_threshold__subclassed_surface(self): - """Ensure threshold accepts subclassed surfaces.""" - expected_size = (13, 11) - expected_flags = 0 - expected_depth = 32 - expected_color = (90, 80, 70, 255) - expected_count = 0 - surface = test_utils.SurfaceSubclass( - expected_size, expected_flags, expected_depth - ) - dest_surface = test_utils.SurfaceSubclass( - expected_size, expected_flags, expected_depth - ) - search_surface = test_utils.SurfaceSubclass( - expected_size, expected_flags, expected_depth - ) - surface.fill((10, 10, 10)) - dest_surface.fill((255, 255, 255)) - search_surface.fill((20, 20, 20)) - - count = pygame.transform.threshold( - dest_surface=dest_surface, - surface=surface, - threshold=(1, 1, 1), - set_color=expected_color, - search_color=None, - search_surf=search_surface, - ) - - self.assertIsInstance(dest_surface, pygame.Surface) - self.assertIsInstance(dest_surface, test_utils.SurfaceSubclass) - self.assertEqual(count, expected_count) - self.assertEqual(dest_surface.get_at((0, 0)), expected_color) - self.assertEqual(dest_surface.get_bitsize(), expected_depth) - self.assertEqual(dest_surface.get_size(), expected_size) - self.assertEqual(dest_surface.get_flags(), expected_flags) - - def test_laplacian(self): - """ """ - - SIZE = 32 - s1 = pygame.Surface((SIZE, SIZE)) - s2 = pygame.Surface((SIZE, SIZE)) - s1.fill((10, 10, 70)) - pygame.draw.line(s1, (255, 0, 0), (3, 10), (20, 20)) - - # a line at the last row of the image. - pygame.draw.line(s1, (255, 0, 0), (0, 31), (31, 31)) - - pygame.transform.laplacian(s1, s2) - - # show_image(s1) - # show_image(s2) - - self.assertEqual(s2.get_at((0, 0)), (0, 0, 0, 255)) - self.assertEqual(s2.get_at((3, 10)), (255, 0, 0, 255)) - self.assertEqual(s2.get_at((0, 31)), (255, 0, 0, 255)) - self.assertEqual(s2.get_at((31, 31)), (255, 0, 0, 255)) - - # here we create the return surface. - s2 = pygame.transform.laplacian(s1) - - self.assertEqual(s2.get_at((0, 0)), (0, 0, 0, 255)) - self.assertEqual(s2.get_at((3, 10)), (255, 0, 0, 255)) - self.assertEqual(s2.get_at((0, 31)), (255, 0, 0, 255)) - self.assertEqual(s2.get_at((31, 31)), (255, 0, 0, 255)) - - def test_laplacian__24_big_endian(self): - """ """ - pygame.display.init() - try: - surf_1 = pygame.image.load( - example_path(os.path.join("data", "laplacian.png")) - ) - SIZE = 32 - surf_2 = pygame.Surface((SIZE, SIZE), 0, 24) - # s1.fill((10, 10, 70)) - # pygame.draw.line(s1, (255, 0, 0), (3, 10), (20, 20)) - - # a line at the last row of the image. - # pygame.draw.line(s1, (255, 0, 0), (0, 31), (31, 31)) - - # Also validate keyword arguments - pygame.transform.laplacian(surface=surf_1, dest_surface=surf_2) - - # show_image(s1) - # show_image(s2) - - self.assertEqual(surf_2.get_at((0, 0)), (0, 0, 0, 255)) - self.assertEqual(surf_2.get_at((3, 10)), (255, 0, 0, 255)) - self.assertEqual(surf_2.get_at((0, 31)), (255, 0, 0, 255)) - self.assertEqual(surf_2.get_at((31, 31)), (255, 0, 0, 255)) - - # here we create the return surface. - surf_2 = pygame.transform.laplacian(surf_1) - - self.assertEqual(surf_2.get_at((0, 0)), (0, 0, 0, 255)) - self.assertEqual(surf_2.get_at((3, 10)), (255, 0, 0, 255)) - self.assertEqual(surf_2.get_at((0, 31)), (255, 0, 0, 255)) - self.assertEqual(surf_2.get_at((31, 31)), (255, 0, 0, 255)) - finally: - pygame.display.quit() - - def test_average_surfaces(self): - """ """ - - SIZE = 32 - s1 = pygame.Surface((SIZE, SIZE)) - s2 = pygame.Surface((SIZE, SIZE)) - s3 = pygame.Surface((SIZE, SIZE)) - s1.fill((10, 10, 70)) - s2.fill((10, 20, 70)) - s3.fill((10, 130, 10)) - - surfaces = [s1, s2, s3] - surfaces = [s1, s2] - sr = pygame.transform.average_surfaces(surfaces) - - self.assertEqual(sr.get_at((0, 0)), (10, 15, 70, 255)) - - self.assertRaises(TypeError, pygame.transform.average_surfaces, 1) - self.assertRaises(TypeError, pygame.transform.average_surfaces, []) - - self.assertRaises(TypeError, pygame.transform.average_surfaces, [1]) - self.assertRaises(TypeError, pygame.transform.average_surfaces, [s1, 1]) - self.assertRaises(TypeError, pygame.transform.average_surfaces, [1, s1]) - self.assertRaises(TypeError, pygame.transform.average_surfaces, [s1, s2, 1]) - - self.assertRaises( - TypeError, pygame.transform.average_surfaces, (s for s in [s1, s2, s3]) - ) - - def test_average_surfaces__24(self): - - SIZE = 32 - depth = 24 - s1 = pygame.Surface((SIZE, SIZE), 0, depth) - s2 = pygame.Surface((SIZE, SIZE), 0, depth) - s3 = pygame.Surface((SIZE, SIZE), 0, depth) - s1.fill((10, 10, 70, 255)) - s2.fill((10, 20, 70, 255)) - s3.fill((10, 130, 10, 255)) - - surfaces = [s1, s2, s3] - sr = pygame.transform.average_surfaces(surfaces) - self.assertEqual(sr.get_masks(), s1.get_masks()) - self.assertEqual(sr.get_flags(), s1.get_flags()) - self.assertEqual(sr.get_losses(), s1.get_losses()) - - if 0: - print(sr, s1) - print(sr.get_masks(), s1.get_masks()) - print(sr.get_flags(), s1.get_flags()) - print(sr.get_losses(), s1.get_losses()) - print(sr.get_shifts(), s1.get_shifts()) - - self.assertEqual(sr.get_at((0, 0)), (10, 53, 50, 255)) - - def test_average_surfaces__24_big_endian(self): - pygame.display.init() - try: - surf_1 = pygame.image.load(example_path(os.path.join("data", "BGR.png"))) - - surf_2 = surf_1.copy() - - surfaces = [surf_1, surf_2] - self.assertEqual(surf_1.get_at((0, 0)), (255, 0, 0, 255)) - self.assertEqual(surf_2.get_at((0, 0)), (255, 0, 0, 255)) - - surf_av = pygame.transform.average_surfaces(surfaces) - self.assertEqual(surf_av.get_masks(), surf_1.get_masks()) - self.assertEqual(surf_av.get_flags(), surf_1.get_flags()) - self.assertEqual(surf_av.get_losses(), surf_1.get_losses()) - - self.assertEqual(surf_av.get_at((0, 0)), (255, 0, 0, 255)) - finally: - pygame.display.quit() - - def test_average_surfaces__subclassed_surfaces(self): - """Ensure average_surfaces accepts subclassed surfaces.""" - expected_size = (23, 17) - expected_flags = 0 - expected_depth = 32 - expected_color = (50, 50, 50, 255) - surfaces = [] - - for color in ((40, 60, 40), (60, 40, 60)): - s = test_utils.SurfaceSubclass( - expected_size, expected_flags, expected_depth - ) - s.fill(color) - surfaces.append(s) - - surface = pygame.transform.average_surfaces(surfaces) - - self.assertIsInstance(surface, pygame.Surface) - self.assertNotIsInstance(surface, test_utils.SurfaceSubclass) - self.assertEqual(surface.get_at((0, 0)), expected_color) - self.assertEqual(surface.get_bitsize(), expected_depth) - self.assertEqual(surface.get_size(), expected_size) - self.assertEqual(surface.get_flags(), expected_flags) - - def test_average_surfaces__subclassed_destination_surface(self): - """Ensure average_surfaces accepts a destination subclassed surface.""" - expected_size = (13, 27) - expected_flags = 0 - expected_depth = 32 - expected_color = (15, 15, 15, 255) - surfaces = [] - - for color in ((10, 10, 20), (20, 20, 10), (30, 30, 30)): - s = test_utils.SurfaceSubclass( - expected_size, expected_flags, expected_depth - ) - s.fill(color) - surfaces.append(s) - expected_dest_surface = surfaces.pop() - - # Also validate keyword arguments - dest_surface = pygame.transform.average_surfaces( - surfaces=surfaces, dest_surface=expected_dest_surface - ) - - self.assertIsInstance(dest_surface, pygame.Surface) - self.assertIsInstance(dest_surface, test_utils.SurfaceSubclass) - self.assertIs(dest_surface, expected_dest_surface) - self.assertEqual(dest_surface.get_at((0, 0)), expected_color) - self.assertEqual(dest_surface.get_bitsize(), expected_depth) - self.assertEqual(dest_surface.get_size(), expected_size) - self.assertEqual(dest_surface.get_flags(), expected_flags) - - def test_average_color(self): - """ """ - for i in (24, 32): - with self.subTest(f"Testing {i}-bit surface"): - s = pygame.Surface((32, 32), 0, i) - s.fill((0, 100, 200)) - s.fill((10, 50, 100), (0, 0, 16, 32)) - - self.assertEqual(pygame.transform.average_color(s), (5, 75, 150, 0)) - - # Also validate keyword arguments - avg_color = pygame.transform.average_color( - surface=s, rect=(16, 0, 16, 32) - ) - self.assertEqual(avg_color, (0, 100, 200, 0)) - - def test_rotate(self): - # setting colors and canvas - blue = (0, 0, 255, 255) - red = (255, 0, 0, 255) - black = (0, 0, 0) - canvas = pygame.Surface((3, 3)) - rotation = 0 - - canvas.set_at((2, 0), blue) - canvas.set_at((0, 2), red) - - self.assertEqual(canvas.get_at((0, 0)), black) - self.assertEqual(canvas.get_at((2, 0)), blue) - self.assertEqual(canvas.get_at((0, 2)), red) - - for i in range(0, 4): - if i % 2 == 0: - self.assertEqual(canvas.get_at((0, 0)), black) - elif i == 1: - self.assertEqual(canvas.get_at((0, 0)), blue) - elif i == 3: - self.assertEqual(canvas.get_at((0, 0)), red) - - rotation += 90 - # Also validate keyword arguments - canvas = pygame.transform.rotate(surface=canvas, angle=90) - - self.assertEqual(canvas.get_at((0, 0)), black) - - def test_rotate_of_0_sized_surface(self): - # This function just tests possible Segmentation Fault - canvas1 = pygame.Surface((0, 1)) - canvas2 = pygame.Surface((1, 0)) - pygame.transform.rotate(canvas1, 42) - pygame.transform.rotate(canvas2, 42) - - def test_rotate__lossless_at_90_degrees(self): - w, h = 32, 32 - s = pygame.Surface((w, h), pygame.SRCALPHA) - - gradient = list(test_utils.gradient(w, h)) - - for pt, color in gradient: - s.set_at(pt, color) - - for rotation in (90, -90): - s = pygame.transform.rotate(s, rotation) - - for pt, color in gradient: - self.assertTrue(s.get_at(pt) == color) - - def test_scale2x(self): - - # __doc__ (as of 2008-06-25) for pygame.transform.scale2x: - - # pygame.transform.scale2x(Surface, DestSurface = None): Surface - # specialized image doubler - - w, h = 32, 32 - s = pygame.Surface((w, h), pygame.SRCALPHA, 32) - - # s.set_at((0,0), (20, 20, 20, 255)) - - s1 = pygame.transform.scale2x(s) - # Also validate keyword arguments - s2 = pygame.transform.scale2x(surface=s) - self.assertEqual(s1.get_rect().size, (64, 64)) - self.assertEqual(s2.get_rect().size, (64, 64)) - - def test_scale2xraw(self): - w, h = 32, 32 - s = pygame.Surface((w, h), pygame.SRCALPHA, 32) - s.fill((0, 0, 0)) - pygame.draw.circle(s, (255, 0, 0), (w // 2, h // 2), (w // 3)) - - s2 = pygame.transform.scale(s, (w * 2, h * 2)) - s2_2 = pygame.transform.scale(s2, (w * 4, h * 4)) - s4 = pygame.transform.scale(s, (w * 4, h * 4)) - - self.assertEqual(s2_2.get_rect().size, (128, 128)) - - for pt in test_utils.rect_area_pts(s2_2.get_rect()): - self.assertEqual(s2_2.get_at(pt), s4.get_at(pt)) - - def test_get_smoothscale_backend(self): - filter_type = pygame.transform.get_smoothscale_backend() - self.assertTrue(filter_type in ["GENERIC", "MMX", "SSE"]) - # It would be nice to test if a non-generic type corresponds to an x86 - # processor. But there is no simple test for this. platform.machine() - # returns process version specific information, like 'i686'. - - def test_set_smoothscale_backend(self): - # All machines should allow 'GENERIC'. - original_type = pygame.transform.get_smoothscale_backend() - pygame.transform.set_smoothscale_backend("GENERIC") - filter_type = pygame.transform.get_smoothscale_backend() - self.assertEqual(filter_type, "GENERIC") - # All machines should allow returning to original value. - # Also check that keyword argument works. - pygame.transform.set_smoothscale_backend(backend=original_type) - # Something invalid. - def change(): - pygame.transform.set_smoothscale_backend("mmx") - - self.assertRaises(ValueError, change) - # Invalid argument keyword. - def change(): - pygame.transform.set_smoothscale_backend(t="GENERIC") - - self.assertRaises(TypeError, change) - # Invalid argument type. - def change(): - pygame.transform.set_smoothscale_backend(1) - - self.assertRaises(TypeError, change) - # Unsupported type, if possible. - if original_type != "SSE": - - def change(): - pygame.transform.set_smoothscale_backend("SSE") - - self.assertRaises(ValueError, change) - # Should be back where we started. - filter_type = pygame.transform.get_smoothscale_backend() - self.assertEqual(filter_type, original_type) - - def test_chop(self): - original_surface = pygame.Surface((20, 20)) - pygame.draw.rect(original_surface, (255, 0, 0), (0, 0, 10, 10)) - pygame.draw.rect(original_surface, (0, 255, 0), (0, 10, 10, 10)) - pygame.draw.rect(original_surface, (0, 0, 255), (10, 0, 10, 10)) - pygame.draw.rect(original_surface, (255, 255, 0), (10, 10, 10, 10)) - # Test chopping the corner of image - rect = pygame.Rect(0, 0, 5, 15) - test_surface = pygame.transform.chop(original_surface, rect) - # Check the size of chopped image - self.assertEqual(test_surface.get_size(), (15, 5)) - # Check if the colors of the chopped image are correct - for x in range(15): - for y in range(5): - if x < 5: - self.assertEqual(test_surface.get_at((x, y)), (0, 255, 0)) - else: - self.assertEqual(test_surface.get_at((x, y)), (255, 255, 0)) - # Check if the original image stayed the same - self.assertEqual(original_surface.get_size(), (20, 20)) - for x in range(20): - for y in range(20): - if x < 10 and y < 10: - self.assertEqual(original_surface.get_at((x, y)), (255, 0, 0)) - if x < 10 < y: - self.assertEqual(original_surface.get_at((x, y)), (0, 255, 0)) - if x > 10 > y: - self.assertEqual(original_surface.get_at((x, y)), (0, 0, 255)) - if x > 10 and y > 10: - self.assertEqual(original_surface.get_at((x, y)), (255, 255, 0)) - # Test chopping the center of the surface: - rect = pygame.Rect(0, 0, 10, 10) - rect.center = original_surface.get_rect().center - # Also validate keyword arguments - test_surface = pygame.transform.chop(surface=original_surface, rect=rect) - self.assertEqual(test_surface.get_size(), (10, 10)) - for x in range(10): - for y in range(10): - if x < 5 and y < 5: - self.assertEqual(test_surface.get_at((x, y)), (255, 0, 0)) - if x < 5 < y: - self.assertEqual(test_surface.get_at((x, y)), (0, 255, 0)) - if x > 5 > y: - self.assertEqual(test_surface.get_at((x, y)), (0, 0, 255)) - if x > 5 and y > 5: - self.assertEqual(test_surface.get_at((x, y)), (255, 255, 0)) - # Test chopping with the empty rect - rect = pygame.Rect(10, 10, 0, 0) - test_surface = pygame.transform.chop(original_surface, rect) - self.assertEqual(test_surface.get_size(), (20, 20)) - # Test chopping the entire surface - rect = pygame.Rect(0, 0, 20, 20) - test_surface = pygame.transform.chop(original_surface, rect) - self.assertEqual(test_surface.get_size(), (0, 0)) - # Test chopping outside of surface - rect = pygame.Rect(5, 15, 20, 20) - test_surface = pygame.transform.chop(original_surface, rect) - self.assertEqual(test_surface.get_size(), (5, 15)) - rect = pygame.Rect(400, 400, 10, 10) - test_surface = pygame.transform.chop(original_surface, rect) - self.assertEqual(test_surface.get_size(), (20, 20)) - - def test_rotozoom(self): - - # __doc__ (as of 2008-08-02) for pygame.transform.rotozoom: - - # pygame.transform.rotozoom(Surface, angle, scale): return Surface - # filtered scale and rotation - # - # This is a combined scale and rotation transform. The resulting - # Surface will be a filtered 32-bit Surface. The scale argument is a - # floating point value that will be multiplied by the current - # resolution. The angle argument is a floating point value that - # represents the counterclockwise degrees to rotate. A negative - # rotation angle will rotate clockwise. - - s = pygame.Surface((10, 0)) - pygame.transform.scale(s, (10, 2)) - s1 = pygame.transform.rotozoom(s, 30, 1) - # Also validate keyword arguments - s2 = pygame.transform.rotozoom(surface=s, angle=30, scale=1) - - self.assertEqual(s1.get_rect(), pygame.Rect(0, 0, 0, 0)) - self.assertEqual(s2.get_rect(), pygame.Rect(0, 0, 0, 0)) - - def test_smoothscale(self): - """Tests the stated boundaries, sizing, and color blending of smoothscale function""" - # __doc__ (as of 2008-08-02) for pygame.transform.smoothscale: - - # pygame.transform.smoothscale(Surface, (width, height), DestSurface = - # None): return Surface - # - # scale a surface to an arbitrary size smoothly - # - # Uses one of two different algorithms for scaling each dimension of - # the input surface as required. For shrinkage, the output pixels are - # area averages of the colors they cover. For expansion, a bilinear - # filter is used. For the amd64 and i686 architectures, optimized MMX - # routines are included and will run much faster than other machine - # types. The size is a 2 number sequence for (width, height). This - # function only works for 24-bit or 32-bit surfaces. An exception - # will be thrown if the input surface bit depth is less than 24. - # - # New in pygame 1.8 - - # check stated exceptions - def smoothscale_low_bpp(): - starting_surface = pygame.Surface((20, 20), depth=12) - smoothscaled_surface = pygame.transform.smoothscale( - starting_surface, (10, 10) - ) - - self.assertRaises(ValueError, smoothscale_low_bpp) - - def smoothscale_high_bpp(): - starting_surface = pygame.Surface((20, 20), depth=48) - smoothscaled_surface = pygame.transform.smoothscale( - starting_surface, (10, 10) - ) - - self.assertRaises(ValueError, smoothscale_high_bpp) - - def smoothscale_invalid_scale(): - starting_surface = pygame.Surface((20, 20), depth=32) - smoothscaled_surface = pygame.transform.smoothscale( - starting_surface, (-1, -1) - ) - - self.assertRaises(ValueError, smoothscale_invalid_scale) - - # Test Color Blending Scaling-Up - two_pixel_surface = pygame.Surface((2, 1), depth=32) - two_pixel_surface.fill(pygame.Color(0, 0, 0), pygame.Rect(0, 0, 1, 1)) - two_pixel_surface.fill(pygame.Color(255, 255, 255), pygame.Rect(1, 0, 1, 1)) - for k in [2 ** x for x in range(5, 8)]: # Enlarge to targets 32, 64...256 - bigger_surface = pygame.transform.smoothscale(two_pixel_surface, (k, 1)) - self.assertEqual( - bigger_surface.get_at((k // 2, 0)), pygame.Color(127, 127, 127) - ) - self.assertEqual(bigger_surface.get_size(), (k, 1)) - # Test Color Blending Scaling-Down - two_five_six_surf = pygame.Surface((256, 1), depth=32) - two_five_six_surf.fill(pygame.Color(0, 0, 0), pygame.Rect(0, 0, 128, 1)) - two_five_six_surf.fill(pygame.Color(255, 255, 255), pygame.Rect(128, 0, 128, 1)) - for k in range(3, 11, 2): # Shrink to targets 3, 5...11 pixels wide - smaller_surface = pygame.transform.smoothscale(two_five_six_surf, (k, 1)) - self.assertEqual( - smaller_surface.get_at(((k // 2), 0)), pygame.Color(127, 127, 127) - ) - self.assertEqual(smaller_surface.get_size(), (k, 1)) - - -class TransformDisplayModuleTest(unittest.TestCase): - def setUp(self): - pygame.display.init() - pygame.display.set_mode((320, 200)) - - def tearDown(self): - pygame.display.quit() - - def test_flip(self): - """honors the set_color key on the returned surface from flip.""" - image_loaded = pygame.image.load(example_path("data/chimp.png")) - - image = pygame.Surface(image_loaded.get_size(), 0, 32) - image.blit(image_loaded, (0, 0)) - - image_converted = image_loaded.convert() - - self.assertFalse(image.get_flags() & pygame.SRCALPHA) - self.assertFalse(image_converted.get_flags() & pygame.SRCALPHA) - - surf = pygame.Surface(image.get_size(), 0, 32) - surf2 = pygame.Surface(image.get_size(), 0, 32) - - surf.fill((255, 255, 255)) - surf2.fill((255, 255, 255)) - - colorkey = image.get_at((0, 0)) - image.set_colorkey(colorkey, RLEACCEL) - timage = pygame.transform.flip(image, 1, 0) - - colorkey = image_converted.get_at((0, 0)) - image_converted.set_colorkey(colorkey, RLEACCEL) - # Also validate keyword arguments - timage_converted = pygame.transform.flip( - surface=image_converted, flip_x=1, flip_y=0 - ) - - # blit the flipped surface, and non flipped surface. - surf.blit(timage, (0, 0)) - surf2.blit(image, (0, 0)) - - # the results should be the same. - self.assertEqual(surf.get_at((0, 0)), surf2.get_at((0, 0))) - self.assertEqual(surf2.get_at((0, 0)), (255, 255, 255, 255)) - - # now we test the convert() ed image also works. - surf.fill((255, 255, 255)) - surf2.fill((255, 255, 255)) - surf.blit(timage_converted, (0, 0)) - surf2.blit(image_converted, (0, 0)) - self.assertEqual(surf.get_at((0, 0)), surf2.get_at((0, 0))) - - def test_flip_alpha(self): - """returns a surface with the same properties as the input.""" - image_loaded = pygame.image.load(example_path("data/chimp.png")) - - image_alpha = pygame.Surface(image_loaded.get_size(), pygame.SRCALPHA, 32) - image_alpha.blit(image_loaded, (0, 0)) - - surf = pygame.Surface(image_loaded.get_size(), 0, 32) - surf2 = pygame.Surface(image_loaded.get_size(), 0, 32) - - colorkey = image_alpha.get_at((0, 0)) - image_alpha.set_colorkey(colorkey, RLEACCEL) - timage_alpha = pygame.transform.flip(image_alpha, 1, 0) - - self.assertTrue(image_alpha.get_flags() & pygame.SRCALPHA) - self.assertTrue(timage_alpha.get_flags() & pygame.SRCALPHA) - - # now we test the alpha image works. - surf.fill((255, 255, 255)) - surf2.fill((255, 255, 255)) - surf.blit(timage_alpha, (0, 0)) - surf2.blit(image_alpha, (0, 0)) - self.assertEqual(surf.get_at((0, 0)), surf2.get_at((0, 0))) - self.assertEqual(surf2.get_at((0, 0)), (255, 0, 0, 255)) - - -if __name__ == "__main__": - unittest.main() diff --git a/venv/Lib/site-packages/pygame/tests/version_test.py b/venv/Lib/site-packages/pygame/tests/version_test.py deleted file mode 100644 index ba0bb3d..0000000 --- a/venv/Lib/site-packages/pygame/tests/version_test.py +++ /dev/null @@ -1,48 +0,0 @@ -import os -import unittest - - -pg_header = os.path.join("src_c", "include", "_pygame.h") - - -class VersionTest(unittest.TestCase): - @unittest.skipIf( - not os.path.isfile(pg_header), "Skipping because we cannot find _pygame.h" - ) - def test_pg_version_consistency(self): - from pygame import version - - pgh_major = -1 - pgh_minor = -1 - pgh_patch = -1 - import re - - major_exp_search = re.compile(r"define\s+PG_MAJOR_VERSION\s+([0-9]+)").search - minor_exp_search = re.compile(r"define\s+PG_MINOR_VERSION\s+([0-9]+)").search - patch_exp_search = re.compile(r"define\s+PG_PATCH_VERSION\s+([0-9]+)").search - with open(pg_header) as f: - for line in f: - if pgh_major == -1: - m = major_exp_search(line) - if m: - pgh_major = int(m.group(1)) - if pgh_minor == -1: - m = minor_exp_search(line) - if m: - pgh_minor = int(m.group(1)) - if pgh_patch == -1: - m = patch_exp_search(line) - if m: - pgh_patch = int(m.group(1)) - self.assertEqual(pgh_major, version.vernum[0]) - self.assertEqual(pgh_minor, version.vernum[1]) - self.assertEqual(pgh_patch, version.vernum[2]) - - def test_sdl_version(self): - from pygame import version - - self.assertEqual(len(version.SDL), 3) - - -if __name__ == "__main__": - unittest.main() diff --git a/venv/Lib/site-packages/pygame/tests/video_test.py b/venv/Lib/site-packages/pygame/tests/video_test.py deleted file mode 100644 index a6a6473..0000000 --- a/venv/Lib/site-packages/pygame/tests/video_test.py +++ /dev/null @@ -1,26 +0,0 @@ -import unittest -import sys -import pygame - -from pygame._sdl2 import video - - -class VideoModuleTest(unittest.TestCase): - default_caption = "pygame window" - - @unittest.skipIf( - not (sys.maxsize > 2 ** 32), - "32 bit SDL 2.0.16 has an issue.", - ) - def test_renderer_set_viewport(self): - """works.""" - window = video.Window(title=self.default_caption, size=(800, 600)) - renderer = video.Renderer(window=window) - renderer.logical_size = (1920, 1080) - rect = pygame.Rect(0, 0, 1920, 1080) - renderer.set_viewport(rect) - self.assertEqual(renderer.get_viewport(), (0, 0, 1920, 1080)) - - -if __name__ == "__main__": - unittest.main() diff --git a/venv/Lib/site-packages/pygame/threads/__init__.py b/venv/Lib/site-packages/pygame/threads/__init__.py deleted file mode 100644 index 89db392..0000000 --- a/venv/Lib/site-packages/pygame/threads/__init__.py +++ /dev/null @@ -1,272 +0,0 @@ -""" -* Experimental * - -Like the map function, but can use a pool of threads. - -Really easy to use threads. eg. tmap(f, alist) - -If you know how to use the map function, you can use threads. -""" - -__author__ = "Rene Dudfield" -__version__ = "0.3.0" -__license__ = "Python license" - -from queue import Queue, Empty -import threading - - -Thread = threading.Thread - -STOP = object() -FINISH = object() - -# DONE_ONE = object() -# DONE_TWO = object() - -# a default worker queue. -_wq = None - -# if we are using threads or not. This is the number of workers. -_use_workers = 0 - -# Set this to the maximum for the amount of Cores/CPUs -# Note, that the tests early out. -# So it should only test the best number of workers +2 -MAX_WORKERS_TO_TEST = 64 - - -def init(number_of_workers=0): - """Does a little test to see if threading is worth it. - Sets up a global worker queue if it's worth it. - - Calling init() is not required, but is generally better to do. - """ - global _wq, _use_workers - - if number_of_workers: - _use_workers = number_of_workers - else: - _use_workers = benchmark_workers() - - # if it is best to use zero workers, then use that. - _wq = WorkerQueue(_use_workers) - - -def quit(): - """cleans up everything.""" - global _wq, _use_workers - _wq.stop() - _wq = None - _use_workers = False - - -def benchmark_workers(a_bench_func=None, the_data=None): - """does a little test to see if workers are at all faster. - Returns the number of workers which works best. - Takes a little bit of time to run, so you should only really call - it once. - You can pass in benchmark data, and functions if you want. - a_bench_func - f(data) - the_data - data to work on. - """ - # TODO: try and make this scale better with slower/faster cpus. - # first find some variables so that using 0 workers takes about 1.0 seconds. - # then go from there. - - # note, this will only work with pygame 1.8rc3+ - # replace the doit() and the_data with something that releases the GIL - - import pygame - import pygame.transform - import time - - if not a_bench_func: - - def doit(x): - return pygame.transform.scale(x, (544, 576)) - - else: - doit = a_bench_func - - if not the_data: - thedata = [pygame.Surface((155, 155), 0, 32) for x in range(10)] - else: - thedata = the_data - - best = time.time() + 100000000 - best_number = 0 - # last_best = -1 - - for num_workers in range(0, MAX_WORKERS_TO_TEST): - - wq = WorkerQueue(num_workers) - t1 = time.time() - for _ in range(20): - print(f"active count:{threading.activeCount()}") - tmap(doit, thedata, worker_queue=wq) - t2 = time.time() - - wq.stop() - - total_time = t2 - t1 - print(f"total time num_workers:{num_workers}: time:{total_time}:") - - if total_time < best: - # last_best = best_number - best_number = num_workers - best = total_time - - if num_workers - best_number > 1: - # We tried to add more, but it didn't like it. - # so we stop with testing at this number. - break - - return best_number - - -class WorkerQueue(object): - def __init__(self, num_workers=20): - self.queue = Queue() - self.pool = [] - self._setup_workers(num_workers) - - def _setup_workers(self, num_workers): - """Sets up the worker threads - NOTE: undefined behaviour if you call this again. - """ - self.pool = [] - - for _ in range(num_workers): - self.pool.append(Thread(target=self.threadloop)) - - for a_thread in self.pool: - a_thread.setDaemon(True) - a_thread.start() - - def do(self, f, *args, **kwArgs): - """puts a function on a queue for running later.""" - self.queue.put((f, args, kwArgs)) - - def stop(self): - """Stops the WorkerQueue, waits for all of the threads to finish up.""" - self.queue.put(STOP) - for thread in self.pool: - thread.join() - - def threadloop(self): # , finish=False): - """Loops until all of the tasks are finished.""" - while True: - args = self.queue.get() - if args is STOP: - self.queue.put(STOP) - self.queue.task_done() - break - try: - args[0](*args[1], **args[2]) - finally: - # clean up the queue, raise the exception. - self.queue.task_done() - # raise - - def wait(self): - """waits until all tasks are complete.""" - self.queue.join() - - -class FuncResult: - """Used for wrapping up a function call so that the results are stored - inside the instances result attribute. - """ - - def __init__(self, f, callback=None, errback=None): - """f - is the function we that we call - callback(result) - this is called when the function(f) returns - errback(exception) - this is called when the function(f) raises - an exception. - """ - self.f = f - self.exception = None - self.result = None - self.callback = callback - self.errback = errback - - def __call__(self, *args, **kwargs): - # we try to call the function here. If it fails we store the exception. - try: - self.result = self.f(*args, **kwargs) - if self.callback: - self.callback(self.result) - except Exception as e: - self.exception = e - if self.errback: - self.errback(self.exception) - - -def tmap(f, seq_args, num_workers=20, worker_queue=None, wait=True, stop_on_error=True): - """like map, but uses a thread pool to execute. - num_workers - the number of worker threads that will be used. If pool - is passed in, then the num_workers arg is ignored. - worker_queue - you can optionally pass in an existing WorkerQueue. - wait - True means that the results are returned when everything is finished. - False means that we return the [worker_queue, results] right away instead. - results, is returned as a list of FuncResult instances. - stop_on_error - - """ - - if worker_queue: - wq = worker_queue - else: - # see if we have a global queue to work with. - if _wq: - wq = _wq - else: - if num_workers == 0: - return map(f, seq_args) - - wq = WorkerQueue(num_workers) - - # we short cut it here if the number of workers is 0. - # normal map should be faster in this case. - if len(wq.pool) == 0: - return map(f, seq_args) - - # print ("queue size:%s" % wq.queue.qsize()) - - # TODO: divide the data (seq_args) into even chunks and - # then pass each thread a map(f, equal_part(seq_args)) - # That way there should be less locking, and overhead. - - results = [] - for sa in seq_args: - results.append(FuncResult(f)) - wq.do(results[-1], sa) - - # wq.stop() - - if wait: - # print ("wait") - wq.wait() - # print ("after wait") - # print ("queue size:%s" % wq.queue.qsize()) - if wq.queue.qsize(): - raise Exception("buggy threadmap") - # if we created a worker queue, we need to stop it. - if not worker_queue and not _wq: - # print ("stoping") - wq.stop() - if wq.queue.qsize(): - um = wq.queue.get() - if not um is STOP: - raise Exception("buggy threadmap") - - # see if there were any errors. If so raise the first one. This matches map behaviour. - # TODO: the traceback doesn't show up nicely. - # NOTE: TODO: we might want to return the results anyway? This should be an option. - if stop_on_error: - error_ones = list(filter(lambda x: x.exception, results)) - if error_ones: - raise error_ones[0].exception - - return map(lambda x: x.result, results) - return [wq, results] diff --git a/venv/Lib/site-packages/pygame/threads/__pycache__/__init__.cpython-39.pyc b/venv/Lib/site-packages/pygame/threads/__pycache__/__init__.cpython-39.pyc deleted file mode 100644 index 958b7a4..0000000 Binary files a/venv/Lib/site-packages/pygame/threads/__pycache__/__init__.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/time.cp39-win_amd64.pyd b/venv/Lib/site-packages/pygame/time.cp39-win_amd64.pyd deleted file mode 100644 index 8a63223..0000000 Binary files a/venv/Lib/site-packages/pygame/time.cp39-win_amd64.pyd and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/time.pyi b/venv/Lib/site-packages/pygame/time.pyi deleted file mode 100644 index f938442..0000000 --- a/venv/Lib/site-packages/pygame/time.pyi +++ /dev/null @@ -1,15 +0,0 @@ -from typing import Union - -from pygame.event import Event - -def get_ticks() -> int: ... -def wait(milliseconds: int) -> int: ... -def delay(milliseconds: int) -> int: ... -def set_timer(event: Union[int, Event], millis: int, loops: int = 0) -> None: ... - -class Clock: - def tick(self, framerate: int = 0) -> int: ... - def tick_busy_loop(self, framerate: int = 0) -> int: ... - def get_time(self) -> int: ... - def get_rawtime(self) -> int: ... - def get_fps(self) -> float: ... diff --git a/venv/Lib/site-packages/pygame/transform.cp39-win_amd64.pyd b/venv/Lib/site-packages/pygame/transform.cp39-win_amd64.pyd deleted file mode 100644 index cb646b3..0000000 Binary files a/venv/Lib/site-packages/pygame/transform.cp39-win_amd64.pyd and /dev/null differ diff --git a/venv/Lib/site-packages/pygame/transform.pyi b/venv/Lib/site-packages/pygame/transform.pyi deleted file mode 100644 index 4fe8e91..0000000 --- a/venv/Lib/site-packages/pygame/transform.pyi +++ /dev/null @@ -1,41 +0,0 @@ -from typing import Optional, Sequence, Union - -from pygame.color import Color -from pygame.surface import Surface - -from ._common import _ColorValue, _Coordinate, _RectValue - -def flip(surface: Surface, flip_x: bool, flip_y: bool) -> Surface: ... -def scale( - surface: Surface, - size: _Coordinate, - dest_surface: Optional[Surface] = None, -) -> Surface: ... -def rotate(surface: Surface, angle: float) -> Surface: ... -def rotozoom(surface: Surface, angle: float, scale: float) -> Surface: ... -def scale2x(surface: Surface, dest_surface: Optional[Surface] = None) -> Surface: ... -def smoothscale( - surface: Surface, - size: _Coordinate, - dest_surface: Optional[Surface] = None, -) -> Surface: ... -def get_smoothscale_backend() -> str: ... -def set_smoothscale_backend(backend: str) -> None: ... -def chop(surface: Surface, rect: _RectValue) -> Surface: ... -def laplacian(surface: Surface, dest_surface: Optional[Surface] = None) -> Surface: ... -def average_surfaces( - surfaces: Sequence[Surface], - dest_surface: Optional[Surface] = None, - palette_colors: Union[bool, int] = 1, -) -> Surface: ... -def average_color(surface: Surface, rect: Optional[_RectValue] = None) -> Color: ... -def threshold( - dest_surface: Optional[Surface], - surface: Surface, - search_color: Optional[_ColorValue], - threshold: Optional[_ColorValue] = (0, 0, 0, 0), - set_color: Optional[_ColorValue] = (0, 0, 0, 0), - set_behavior: Optional[int] = 1, - search_surf: Optional[Surface] = None, - inverse_set: Optional[bool] = False, -) -> int: ... diff --git a/venv/Lib/site-packages/pygame/version.py b/venv/Lib/site-packages/pygame/version.py deleted file mode 100644 index b287dba..0000000 --- a/venv/Lib/site-packages/pygame/version.py +++ /dev/null @@ -1,72 +0,0 @@ -## pygame - Python Game Library -## Copyright (C) 2000-2003 Pete Shinners -## -## This library is free software; you can redistribute it and/or -## modify it under the terms of the GNU Library General Public -## License as published by the Free Software Foundation; either -## version 2 of the License, or (at your option) any later version. -## -## This library is distributed in the hope that it will be useful, -## but WITHOUT ANY WARRANTY; without even the implied warranty of -## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -## Library General Public License for more details. -## -## You should have received a copy of the GNU Library General Public -## License along with this library; if not, write to the Free -## Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA -## -## Pete Shinners -## pete@shinners.org - -"""Simply the current installed pygame version. The version information is -stored in the regular pygame module as 'pygame.ver'. Keeping the version -information also available in a separate module allows you to test the -pygame version without importing the main pygame module. - -The python version information should always compare greater than any previous -releases. (hmm, until we get to versions > 10) -""" -from pygame.base import get_sdl_version - -############### -# This file is generated with version.py.in -## - -class SoftwareVersion(tuple): - """ - A class for storing data about software versions. - """ - __slots__ = () - fields = "major", "minor", "patch" - - def __new__(cls, major, minor, patch): - return tuple.__new__(cls, (major, minor, patch)) - - def __repr__(self): - fields = (f"{fld}={val}" for fld, val in zip(self.fields, self)) - return f"{str(self.__class__.__name__)}({', '.join(fields)})" - - def __str__(self): - return f"{self.major}.{self.minor}.{self.patch}" - - major = property(lambda self: self[0]) - minor = property(lambda self: self[1]) - patch = property(lambda self: self[2]) - -class PygameVersion(SoftwareVersion): - """ - Pygame Version class. - """ - -class SDLVersion(SoftwareVersion): - """ - SDL Version class. - """ - -_sdl_tuple = get_sdl_version() -SDL = SDLVersion(_sdl_tuple[0], _sdl_tuple[1], _sdl_tuple[2]) -ver = "2.1.2" # pylint: disable=invalid-name -vernum = PygameVersion(2, 1, 2) -rev = "" # pylint: disable=invalid-name - -__all__ = ["SDL", "ver", "vernum", "rev"] diff --git a/venv/Lib/site-packages/pygame/version.pyi b/venv/Lib/site-packages/pygame/version.pyi deleted file mode 100644 index 41f2132..0000000 --- a/venv/Lib/site-packages/pygame/version.pyi +++ /dev/null @@ -1,17 +0,0 @@ -from typing import Tuple - -class SoftwareVersion(Tuple[int, int, int]): - def __new__(cls, major: int, minor: int, patch: int) -> PygameVersion: ... - def __repr__(self) -> str: ... - def __str__(self) -> str: ... - major: int - minor: int - patch: int - -class PygameVersion(SoftwareVersion): ... -class SDLVersion(SoftwareVersion): ... - -SDL: SDLVersion -ver: str -vernum: PygameVersion -rev: str diff --git a/venv/Lib/site-packages/pygame/zlib1.dll b/venv/Lib/site-packages/pygame/zlib1.dll deleted file mode 100644 index e7493de..0000000 Binary files a/venv/Lib/site-packages/pygame/zlib1.dll and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools-60.9.3.dist-info/INSTALLER b/venv/Lib/site-packages/setuptools-60.9.3.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/venv/Lib/site-packages/setuptools-60.9.3.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/venv/Lib/site-packages/setuptools-60.9.3.dist-info/LICENSE b/venv/Lib/site-packages/setuptools-60.9.3.dist-info/LICENSE deleted file mode 100644 index 353924b..0000000 --- a/venv/Lib/site-packages/setuptools-60.9.3.dist-info/LICENSE +++ /dev/null @@ -1,19 +0,0 @@ -Copyright Jason R. Coombs - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to -deal in the Software without restriction, including without limitation the -rights to use, copy, modify, merge, publish, distribute, sublicense, and/or -sell copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS -IN THE SOFTWARE. diff --git a/venv/Lib/site-packages/setuptools-60.9.3.dist-info/METADATA b/venv/Lib/site-packages/setuptools-60.9.3.dist-info/METADATA deleted file mode 100644 index 394db51..0000000 --- a/venv/Lib/site-packages/setuptools-60.9.3.dist-info/METADATA +++ /dev/null @@ -1,142 +0,0 @@ -Metadata-Version: 2.1 -Name: setuptools -Version: 60.9.3 -Summary: Easily download, build, install, upgrade, and uninstall Python packages -Home-page: https://github.com/pypa/setuptools -Author: Python Packaging Authority -Author-email: distutils-sig@python.org -License: UNKNOWN -Project-URL: Documentation, https://setuptools.pypa.io/ -Keywords: CPAN PyPI distutils eggs package management -Platform: UNKNOWN -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3 :: Only -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: System :: Archiving :: Packaging -Classifier: Topic :: System :: Systems Administration -Classifier: Topic :: Utilities -Requires-Python: >=3.7 -License-File: LICENSE -Provides-Extra: certs -Provides-Extra: docs -Requires-Dist: sphinx ; extra == 'docs' -Requires-Dist: jaraco.packaging (>=9) ; extra == 'docs' -Requires-Dist: rst.linker (>=1.9) ; extra == 'docs' -Requires-Dist: jaraco.tidelift (>=1.4) ; extra == 'docs' -Requires-Dist: pygments-github-lexers (==0.0.5) ; extra == 'docs' -Requires-Dist: sphinx-favicon ; extra == 'docs' -Requires-Dist: sphinx-inline-tabs ; extra == 'docs' -Requires-Dist: sphinxcontrib-towncrier ; extra == 'docs' -Requires-Dist: furo ; extra == 'docs' -Provides-Extra: ssl -Provides-Extra: testing -Requires-Dist: pytest (>=6) ; extra == 'testing' -Requires-Dist: pytest-checkdocs (>=2.4) ; extra == 'testing' -Requires-Dist: pytest-flake8 ; extra == 'testing' -Requires-Dist: pytest-enabler (>=1.0.1) ; extra == 'testing' -Requires-Dist: pytest-perf ; extra == 'testing' -Requires-Dist: mock ; extra == 'testing' -Requires-Dist: flake8-2020 ; extra == 'testing' -Requires-Dist: virtualenv (>=13.0.0) ; extra == 'testing' -Requires-Dist: wheel ; extra == 'testing' -Requires-Dist: pip (>=19.1) ; extra == 'testing' -Requires-Dist: jaraco.envs (>=2.2) ; extra == 'testing' -Requires-Dist: pytest-xdist ; extra == 'testing' -Requires-Dist: sphinx (>=4.3.2) ; extra == 'testing' -Requires-Dist: jaraco.path (>=3.2.0) ; extra == 'testing' -Requires-Dist: build[virtualenv] ; extra == 'testing' -Requires-Dist: filelock (>=3.4.0) ; extra == 'testing' -Requires-Dist: pip-run (>=8.8) ; extra == 'testing' -Provides-Extra: testing-integration -Requires-Dist: pytest ; extra == 'testing-integration' -Requires-Dist: pytest-xdist ; extra == 'testing-integration' -Requires-Dist: pytest-enabler ; extra == 'testing-integration' -Requires-Dist: virtualenv (>=13.0.0) ; extra == 'testing-integration' -Requires-Dist: tomli ; extra == 'testing-integration' -Requires-Dist: wheel ; extra == 'testing-integration' -Requires-Dist: jaraco.path (>=3.2.0) ; extra == 'testing-integration' -Requires-Dist: jaraco.envs (>=2.2) ; extra == 'testing-integration' -Requires-Dist: build[virtualenv] ; extra == 'testing-integration' -Requires-Dist: filelock (>=3.4.0) ; extra == 'testing-integration' -Requires-Dist: pytest-black (>=0.3.7) ; (platform_python_implementation != "PyPy") and extra == 'testing' -Requires-Dist: pytest-cov ; (platform_python_implementation != "PyPy") and extra == 'testing' -Requires-Dist: pytest-mypy (>=0.9.1) ; (platform_python_implementation != "PyPy") and extra == 'testing' - -.. image:: https://raw.githubusercontent.com/pypa/setuptools/main/docs/images/banner-640x320.svg - :align: center - -| - -.. image:: https://img.shields.io/pypi/v/setuptools.svg - :target: `PyPI link`_ - -.. image:: https://img.shields.io/pypi/pyversions/setuptools.svg - :target: `PyPI link`_ - -.. _PyPI link: https://pypi.org/project/setuptools - -.. image:: https://github.com/pypa/setuptools/workflows/tests/badge.svg - :target: https://github.com/pypa/setuptools/actions?query=workflow%3A%22tests%22 - :alt: tests - -.. image:: https://img.shields.io/badge/code%20style-black-000000.svg - :target: https://github.com/psf/black - :alt: Code style: Black - -.. image:: https://img.shields.io/readthedocs/setuptools/latest.svg - :target: https://setuptools.pypa.io - -.. image:: https://img.shields.io/badge/skeleton-2022-informational - :target: https://blog.jaraco.com/skeleton - -.. image:: https://img.shields.io/codecov/c/github/pypa/setuptools/master.svg?logo=codecov&logoColor=white - :target: https://codecov.io/gh/pypa/setuptools - -.. image:: https://tidelift.com/badges/github/pypa/setuptools?style=flat - :target: https://tidelift.com/subscription/pkg/pypi-setuptools?utm_source=pypi-setuptools&utm_medium=readme - -.. image:: https://img.shields.io/discord/803025117553754132 - :target: https://discord.com/channels/803025117553754132/815945031150993468 - :alt: Discord - -See the `Installation Instructions -`_ in the Python Packaging -User's Guide for instructions on installing, upgrading, and uninstalling -Setuptools. - -Questions and comments should be directed to `GitHub Discussions -`_. -Bug reports and especially tested patches may be -submitted directly to the `bug tracker -`_. - - -Code of Conduct -=============== - -Everyone interacting in the setuptools project's codebases, issue trackers, -chat rooms, and fora is expected to follow the -`PSF Code of Conduct `_. - - -For Enterprise -============== - -Available as part of the Tidelift Subscription. - -Setuptools and the maintainers of thousands of other packages are working with Tidelift to deliver one enterprise subscription that covers all of the open source you use. - -`Learn more `_. - - -Security Contact -================ - -To report a security vulnerability, please use the -`Tidelift security contact `_. -Tidelift will coordinate the fix and disclosure. - - diff --git a/venv/Lib/site-packages/setuptools-60.9.3.dist-info/RECORD b/venv/Lib/site-packages/setuptools-60.9.3.dist-info/RECORD deleted file mode 100644 index f48ee7a..0000000 --- a/venv/Lib/site-packages/setuptools-60.9.3.dist-info/RECORD +++ /dev/null @@ -1,389 +0,0 @@ -_distutils_hack/__init__.py,sha256=Am1CVQftk_MHAKvfJQVpQMwVx3lrAbKGPhKIUgwawDY,5298 -_distutils_hack/__pycache__/__init__.cpython-39.pyc,, -_distutils_hack/__pycache__/override.cpython-39.pyc,, -_distutils_hack/override.py,sha256=Eu_s-NF6VIZ4Cqd0tbbA5wtWky2IZPNd8et6GLt1mzo,44 -distutils-precedence.pth,sha256=JjjOniUA5XKl4N5_rtZmHrVp0baW_LoHsN0iPaX10iQ,151 -pkg_resources/__init__.py,sha256=NssS5SQg4XHYfyygmvKvSjlzq9hhdwvr66DL0I5-bO8,107935 -pkg_resources/__pycache__/__init__.cpython-39.pyc,, -pkg_resources/_vendor/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -pkg_resources/_vendor/__pycache__/__init__.cpython-39.pyc,, -pkg_resources/_vendor/__pycache__/appdirs.cpython-39.pyc,, -pkg_resources/_vendor/__pycache__/pyparsing.cpython-39.pyc,, -pkg_resources/_vendor/__pycache__/zipp.cpython-39.pyc,, -pkg_resources/_vendor/appdirs.py,sha256=MievUEuv3l_mQISH5SF0shDk_BNhHHzYiAPrT3ITN4I,24701 -pkg_resources/_vendor/importlib_resources/__init__.py,sha256=evPm12kLgYqTm-pbzm60bOuumumT8IpBNWFp0uMyrzE,506 -pkg_resources/_vendor/importlib_resources/__pycache__/__init__.cpython-39.pyc,, -pkg_resources/_vendor/importlib_resources/__pycache__/_adapters.cpython-39.pyc,, -pkg_resources/_vendor/importlib_resources/__pycache__/_common.cpython-39.pyc,, -pkg_resources/_vendor/importlib_resources/__pycache__/_compat.cpython-39.pyc,, -pkg_resources/_vendor/importlib_resources/__pycache__/_itertools.cpython-39.pyc,, -pkg_resources/_vendor/importlib_resources/__pycache__/_legacy.cpython-39.pyc,, -pkg_resources/_vendor/importlib_resources/__pycache__/abc.cpython-39.pyc,, -pkg_resources/_vendor/importlib_resources/__pycache__/readers.cpython-39.pyc,, -pkg_resources/_vendor/importlib_resources/__pycache__/simple.cpython-39.pyc,, -pkg_resources/_vendor/importlib_resources/_adapters.py,sha256=o51tP2hpVtohP33gSYyAkGNpLfYDBqxxYsadyiRZi1E,4504 -pkg_resources/_vendor/importlib_resources/_common.py,sha256=iIxAaQhotSh6TLLUEfL_ynU2fzEeyHMz9JcL46mUhLg,2741 -pkg_resources/_vendor/importlib_resources/_compat.py,sha256=nFBCGMvImglrqgYkb9aPgOj68-h6xbw-ca94XOv1-zs,2706 -pkg_resources/_vendor/importlib_resources/_itertools.py,sha256=WCdJ1Gs_kNFwKENyIG7TO0Y434IWCu0zjVVSsSbZwU8,884 -pkg_resources/_vendor/importlib_resources/_legacy.py,sha256=TMLkx6aEM6U8xIREPXqGZrMbUhTiPUuPl6ESD7RdYj4,3494 -pkg_resources/_vendor/importlib_resources/abc.py,sha256=MvTJJXajbl74s36Gyeesf76egtbFnh-TMtzQMVhFWXo,3886 -pkg_resources/_vendor/importlib_resources/readers.py,sha256=_9QLGQ5AzrED3PY8S2Zf8V6yLR0-nqqYqtQmgleDJzY,3566 -pkg_resources/_vendor/importlib_resources/simple.py,sha256=xt0qhXbwt3bZ86zuaaKbTiE9A0mDbwu0saRjUq_pcY0,2836 -pkg_resources/_vendor/jaraco/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -pkg_resources/_vendor/jaraco/__pycache__/__init__.cpython-39.pyc,, -pkg_resources/_vendor/jaraco/__pycache__/context.cpython-39.pyc,, -pkg_resources/_vendor/jaraco/__pycache__/functools.cpython-39.pyc,, -pkg_resources/_vendor/jaraco/context.py,sha256=7X1tpCLc5EN45iWGzGcsH0Unx62REIkvtRvglj0SiUA,5420 -pkg_resources/_vendor/jaraco/functools.py,sha256=eLwPh8FWY7rQ_cj1YxCekUkibTuerwyoJ_41H7Q7oWM,13515 -pkg_resources/_vendor/jaraco/text/__init__.py,sha256=cN55bFcceW4wTHG5ruv5IuEDRarP-4hBYX8zl94_c30,15526 -pkg_resources/_vendor/jaraco/text/__pycache__/__init__.cpython-39.pyc,, -pkg_resources/_vendor/more_itertools/__init__.py,sha256=ZQYu_9H6stSG7viUgT32TFqslqcZwq82kWRZooKiI8Y,83 -pkg_resources/_vendor/more_itertools/__pycache__/__init__.cpython-39.pyc,, -pkg_resources/_vendor/more_itertools/__pycache__/more.cpython-39.pyc,, -pkg_resources/_vendor/more_itertools/__pycache__/recipes.cpython-39.pyc,, -pkg_resources/_vendor/more_itertools/more.py,sha256=oave_26jctLsuF30e1SOWMgW0bEuwS-t08wkaLUwvXc,132569 -pkg_resources/_vendor/more_itertools/recipes.py,sha256=N6aCDwoIPvE-aiqpGU-nbFwqiM3X8MKRcxBM84naW88,18410 -pkg_resources/_vendor/packaging/__about__.py,sha256=ugASIO2w1oUyH8_COqQ2X_s0rDhjbhQC3yJocD03h2c,661 -pkg_resources/_vendor/packaging/__init__.py,sha256=b9Kk5MF7KxhhLgcDmiUWukN-LatWFxPdNug0joPhHSk,497 -pkg_resources/_vendor/packaging/__pycache__/__about__.cpython-39.pyc,, -pkg_resources/_vendor/packaging/__pycache__/__init__.cpython-39.pyc,, -pkg_resources/_vendor/packaging/__pycache__/_manylinux.cpython-39.pyc,, -pkg_resources/_vendor/packaging/__pycache__/_musllinux.cpython-39.pyc,, -pkg_resources/_vendor/packaging/__pycache__/_structures.cpython-39.pyc,, -pkg_resources/_vendor/packaging/__pycache__/markers.cpython-39.pyc,, -pkg_resources/_vendor/packaging/__pycache__/requirements.cpython-39.pyc,, -pkg_resources/_vendor/packaging/__pycache__/specifiers.cpython-39.pyc,, -pkg_resources/_vendor/packaging/__pycache__/tags.cpython-39.pyc,, -pkg_resources/_vendor/packaging/__pycache__/utils.cpython-39.pyc,, -pkg_resources/_vendor/packaging/__pycache__/version.cpython-39.pyc,, -pkg_resources/_vendor/packaging/_manylinux.py,sha256=XcbiXB-qcjv3bcohp6N98TMpOP4_j3m-iOA8ptK2GWY,11488 -pkg_resources/_vendor/packaging/_musllinux.py,sha256=_KGgY_qc7vhMGpoqss25n2hiLCNKRtvz9mCrS7gkqyc,4378 -pkg_resources/_vendor/packaging/_structures.py,sha256=q3eVNmbWJGG_S0Dit_S3Ao8qQqz_5PYTXFAKBZe5yr4,1431 -pkg_resources/_vendor/packaging/markers.py,sha256=gFSKoBTb0sKDw1v_apJy15lPr0v2mEvuEkfooTtcWx4,8496 -pkg_resources/_vendor/packaging/requirements.py,sha256=uJ4cjwm3_nrfHJLCcGU9mT5aw8SXfw8v1aBUD7OFuVs,4706 -pkg_resources/_vendor/packaging/specifiers.py,sha256=LRQ0kFsHrl5qfcFNEEJrIFYsnIHQUJXY9fIsakTrrqE,30110 -pkg_resources/_vendor/packaging/tags.py,sha256=lmsnGNiJ8C4D_Pf9PbM0qgbZvD9kmB9lpZBQUZa3R_Y,15699 -pkg_resources/_vendor/packaging/utils.py,sha256=dJjeat3BS-TYn1RrUFVwufUMasbtzLfYRoy_HXENeFQ,4200 -pkg_resources/_vendor/packaging/version.py,sha256=_fLRNrFrxYcHVfyo8vk9j8s6JM8N_xsSxVFr6RJyco8,14665 -pkg_resources/_vendor/pyparsing.py,sha256=tmrp-lu-qO1i75ZzIN5A12nKRRD1Cm4Vpk-5LR9rims,232055 -pkg_resources/_vendor/zipp.py,sha256=ajztOH-9I7KA_4wqDYygtHa6xUBVZgFpmZ8FE74HHHI,8425 -pkg_resources/extern/__init__.py,sha256=inFoCK9jn_yRFqkbNSOxOYyZD0aB3awch_xtbwIW_-Y,2426 -pkg_resources/extern/__pycache__/__init__.cpython-39.pyc,, -setuptools-60.9.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -setuptools-60.9.3.dist-info/LICENSE,sha256=2z8CRrH5J48VhFuZ_sR4uLUG63ZIeZNyL4xuJUKF-vg,1050 -setuptools-60.9.3.dist-info/METADATA,sha256=YdoPvPFkCgYd185e5NKDeC7wKoAYULKUuzTS_klv8ws,5979 -setuptools-60.9.3.dist-info/RECORD,, -setuptools-60.9.3.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92 -setuptools-60.9.3.dist-info/entry_points.txt,sha256=wGnbIm7sqODfIXBH1z7YfkG4xXIks1fD-hFCtQrVaE4,2635 -setuptools-60.9.3.dist-info/top_level.txt,sha256=d9yL39v_W7qmKDDSH6sT4bE0j_Ls1M3P161OGgdsm4g,41 -setuptools/__init__.py,sha256=84lf54h_UbIkYxMn3ZqKyiaC9R86vnmIZUdTT-wxqeg,7461 -setuptools/__pycache__/__init__.cpython-39.pyc,, -setuptools/__pycache__/_deprecation_warning.cpython-39.pyc,, -setuptools/__pycache__/_entry_points.cpython-39.pyc,, -setuptools/__pycache__/_imp.cpython-39.pyc,, -setuptools/__pycache__/_importlib.cpython-39.pyc,, -setuptools/__pycache__/_itertools.cpython-39.pyc,, -setuptools/__pycache__/_path.cpython-39.pyc,, -setuptools/__pycache__/_reqs.cpython-39.pyc,, -setuptools/__pycache__/archive_util.cpython-39.pyc,, -setuptools/__pycache__/build_meta.cpython-39.pyc,, -setuptools/__pycache__/config.cpython-39.pyc,, -setuptools/__pycache__/dep_util.cpython-39.pyc,, -setuptools/__pycache__/depends.cpython-39.pyc,, -setuptools/__pycache__/dist.cpython-39.pyc,, -setuptools/__pycache__/errors.cpython-39.pyc,, -setuptools/__pycache__/extension.cpython-39.pyc,, -setuptools/__pycache__/glob.cpython-39.pyc,, -setuptools/__pycache__/installer.cpython-39.pyc,, -setuptools/__pycache__/launch.cpython-39.pyc,, -setuptools/__pycache__/logging.cpython-39.pyc,, -setuptools/__pycache__/monkey.cpython-39.pyc,, -setuptools/__pycache__/msvc.cpython-39.pyc,, -setuptools/__pycache__/namespaces.cpython-39.pyc,, -setuptools/__pycache__/package_index.cpython-39.pyc,, -setuptools/__pycache__/py34compat.cpython-39.pyc,, -setuptools/__pycache__/sandbox.cpython-39.pyc,, -setuptools/__pycache__/unicode_utils.cpython-39.pyc,, -setuptools/__pycache__/version.cpython-39.pyc,, -setuptools/__pycache__/wheel.cpython-39.pyc,, -setuptools/__pycache__/windows_support.cpython-39.pyc,, -setuptools/_deprecation_warning.py,sha256=jU9-dtfv6cKmtQJOXN8nP1mm7gONw5kKEtiPtbwnZyI,218 -setuptools/_distutils/__init__.py,sha256=3YtkfadGoU57VMEQFk2TNyMZVud1kDkakWQLhWg2Fm8,536 -setuptools/_distutils/__pycache__/__init__.cpython-39.pyc,, -setuptools/_distutils/__pycache__/_collections.cpython-39.pyc,, -setuptools/_distutils/__pycache__/_msvccompiler.cpython-39.pyc,, -setuptools/_distutils/__pycache__/archive_util.cpython-39.pyc,, -setuptools/_distutils/__pycache__/bcppcompiler.cpython-39.pyc,, -setuptools/_distutils/__pycache__/ccompiler.cpython-39.pyc,, -setuptools/_distutils/__pycache__/cmd.cpython-39.pyc,, -setuptools/_distutils/__pycache__/config.cpython-39.pyc,, -setuptools/_distutils/__pycache__/core.cpython-39.pyc,, -setuptools/_distutils/__pycache__/cygwinccompiler.cpython-39.pyc,, -setuptools/_distutils/__pycache__/debug.cpython-39.pyc,, -setuptools/_distutils/__pycache__/dep_util.cpython-39.pyc,, -setuptools/_distutils/__pycache__/dir_util.cpython-39.pyc,, -setuptools/_distutils/__pycache__/dist.cpython-39.pyc,, -setuptools/_distutils/__pycache__/errors.cpython-39.pyc,, -setuptools/_distutils/__pycache__/extension.cpython-39.pyc,, -setuptools/_distutils/__pycache__/fancy_getopt.cpython-39.pyc,, -setuptools/_distutils/__pycache__/file_util.cpython-39.pyc,, -setuptools/_distutils/__pycache__/filelist.cpython-39.pyc,, -setuptools/_distutils/__pycache__/log.cpython-39.pyc,, -setuptools/_distutils/__pycache__/msvc9compiler.cpython-39.pyc,, -setuptools/_distutils/__pycache__/msvccompiler.cpython-39.pyc,, -setuptools/_distutils/__pycache__/py35compat.cpython-39.pyc,, -setuptools/_distutils/__pycache__/py38compat.cpython-39.pyc,, -setuptools/_distutils/__pycache__/spawn.cpython-39.pyc,, -setuptools/_distutils/__pycache__/sysconfig.cpython-39.pyc,, -setuptools/_distutils/__pycache__/text_file.cpython-39.pyc,, -setuptools/_distutils/__pycache__/unixccompiler.cpython-39.pyc,, -setuptools/_distutils/__pycache__/util.cpython-39.pyc,, -setuptools/_distutils/__pycache__/version.cpython-39.pyc,, -setuptools/_distutils/__pycache__/versionpredicate.cpython-39.pyc,, -setuptools/_distutils/_collections.py,sha256=s7zkSh7QUyJWEYSt5n10ouAZNDYvux8YCHnnY3k0wmQ,1330 -setuptools/_distutils/_msvccompiler.py,sha256=i8vRyUE3jqX5BLzVa3ZeLheyEoKN6KGJDJ44Tlz69ww,20809 -setuptools/_distutils/archive_util.py,sha256=qW-uiGwYexTvK5e-iSel_31Dshx-CqTanNPK6snwf98,8572 -setuptools/_distutils/bcppcompiler.py,sha256=gJqtPboJZl1llfCtjo_SVCE1DdjgK1H2rd0Vngz18QI,14885 -setuptools/_distutils/ccompiler.py,sha256=YbernlpGZZqKnfzZSfJ814fINca8cicZiUlBjyUPyaM,47644 -setuptools/_distutils/cmd.py,sha256=eco6LAGUtobLuPafuhmgKgkwRRL_WY8KJ4YeDCHpcls,18079 -setuptools/_distutils/command/__init__.py,sha256=2TA-rlNDlzeI-csbWHXFjGD8uOYqALMfyWOhT49nC6g,799 -setuptools/_distutils/command/__pycache__/__init__.cpython-39.pyc,, -setuptools/_distutils/command/__pycache__/bdist.cpython-39.pyc,, -setuptools/_distutils/command/__pycache__/bdist_dumb.cpython-39.pyc,, -setuptools/_distutils/command/__pycache__/bdist_msi.cpython-39.pyc,, -setuptools/_distutils/command/__pycache__/bdist_rpm.cpython-39.pyc,, -setuptools/_distutils/command/__pycache__/bdist_wininst.cpython-39.pyc,, -setuptools/_distutils/command/__pycache__/build.cpython-39.pyc,, -setuptools/_distutils/command/__pycache__/build_clib.cpython-39.pyc,, -setuptools/_distutils/command/__pycache__/build_ext.cpython-39.pyc,, -setuptools/_distutils/command/__pycache__/build_py.cpython-39.pyc,, -setuptools/_distutils/command/__pycache__/build_scripts.cpython-39.pyc,, -setuptools/_distutils/command/__pycache__/check.cpython-39.pyc,, -setuptools/_distutils/command/__pycache__/clean.cpython-39.pyc,, -setuptools/_distutils/command/__pycache__/config.cpython-39.pyc,, -setuptools/_distutils/command/__pycache__/install.cpython-39.pyc,, -setuptools/_distutils/command/__pycache__/install_data.cpython-39.pyc,, -setuptools/_distutils/command/__pycache__/install_egg_info.cpython-39.pyc,, -setuptools/_distutils/command/__pycache__/install_headers.cpython-39.pyc,, -setuptools/_distutils/command/__pycache__/install_lib.cpython-39.pyc,, -setuptools/_distutils/command/__pycache__/install_scripts.cpython-39.pyc,, -setuptools/_distutils/command/__pycache__/py37compat.cpython-39.pyc,, -setuptools/_distutils/command/__pycache__/register.cpython-39.pyc,, -setuptools/_distutils/command/__pycache__/sdist.cpython-39.pyc,, -setuptools/_distutils/command/__pycache__/upload.cpython-39.pyc,, -setuptools/_distutils/command/bdist.py,sha256=2z4eudRl_n7m3lG9leL0IYqes4bsm8c0fxfZuiafjMg,5562 -setuptools/_distutils/command/bdist_dumb.py,sha256=BTur9jcIppyP7Piavjfsk7YjElqvxeYO2npUyPPOekc,4913 -setuptools/_distutils/command/bdist_msi.py,sha256=9Q1f4Pw4PRsg54fqHVgH8FZb78-yN4D5VlDC2KugX0A,35574 -setuptools/_distutils/command/bdist_rpm.py,sha256=gjOw22GhDSbcq0bdq25cTb-n6HWWm0bShLQad_mkJ4k,21537 -setuptools/_distutils/command/bdist_wininst.py,sha256=iGlaI-VfElHOneeczKHWnSN5a10-7IMcJaXuR1mdS3c,16030 -setuptools/_distutils/command/build.py,sha256=1AF-dxN_NlOEyoydBz19AwpeWYPSYCZvOLJSN_PdatY,5773 -setuptools/_distutils/command/build_clib.py,sha256=bgVTHh28eLQA2Gkw68amApd_j7qQBX4MTI-zTvAK_J4,8022 -setuptools/_distutils/command/build_ext.py,sha256=KgxpopuD6sqep0LsumMH15joWih0VdbnXpYm-ETNjoE,31612 -setuptools/_distutils/command/build_py.py,sha256=hXesMrH_epNj6K8SUtJdipgEis3EdICKeZ8VWe_ndck,16495 -setuptools/_distutils/command/build_scripts.py,sha256=urdn6wPxPMW5dLqpqFkZ8dqaFG1tf9TiAao6U9LCoEI,5963 -setuptools/_distutils/command/check.py,sha256=brOziX0PqvmfGYSUQlSA93m8b7T350uQwrOowwgNxqE,5630 -setuptools/_distutils/command/clean.py,sha256=2TCt47ru4hZZM0RfVfUYj5bbpicpGLP4Qhw5jBtvp9k,2776 -setuptools/_distutils/command/config.py,sha256=2aTjww3PwjMB8-ZibCe4P7B-qG1hM1gn_rJXYyxRz6c,13117 -setuptools/_distutils/command/install.py,sha256=dsCo4g_FG6SMsX_TIJQ-qaHWbgdjupBBie4-dfm793o,30075 -setuptools/_distutils/command/install_data.py,sha256=YhGOAwh3gJPqF7em5XA0rmpR42z1bLh80ooElzDyUvk,2822 -setuptools/_distutils/command/install_egg_info.py,sha256=WijZ7cHMAkNMMCwrZ--KoqV9M2RtLouU4-qSbiCwv70,2753 -setuptools/_distutils/command/install_headers.py,sha256=XQ6idkbIDfr1ljXCOznuVUMvOFpHBn6cK0Wz9gIM2b4,1298 -setuptools/_distutils/command/install_lib.py,sha256=9AofR-MO9lAtjwwuukCptepOaJEKMZW2VHiyR5hU7HA,8397 -setuptools/_distutils/command/install_scripts.py,sha256=_CLUeQwGJRcY2kik7azPMn5IdtDCrjWdUvZ1khlG6ck,2017 -setuptools/_distutils/command/py37compat.py,sha256=qzRhhvTihqx_PZZt2ZYECxh1X3Oj255VqatzelYFAKw,671 -setuptools/_distutils/command/register.py,sha256=2jaq9968rt2puRVDBx1HbNiXv27uOk8idE_4lPf_3VM,11712 -setuptools/_distutils/command/sdist.py,sha256=qotJjAOzyhJjq2-oDImjNFrOtaSneEFDJTB-sEk1wnU,19005 -setuptools/_distutils/command/upload.py,sha256=BLO1w7eSAqsCjCLXtf_CRVSjwF1WmyOByGVGNdcQ8oY,7597 -setuptools/_distutils/config.py,sha256=dtHgblx9JhfyrKx1-J7Jlxw_f7s8ZbPFQii2UWMTZpY,4827 -setuptools/_distutils/core.py,sha256=0v7Emh9y0AW9o4AEjfVMhDxKzTFWFxUQn46spFSL56g,9282 -setuptools/_distutils/cygwinccompiler.py,sha256=eOMXcoZ_Reto4VQR_lWK-IylR1Lsi_RW0MMwCqhlvtU,14521 -setuptools/_distutils/debug.py,sha256=N6MrTAqK6l9SVk6tWweR108PM8Ol7qNlfyV-nHcLhsY,139 -setuptools/_distutils/dep_util.py,sha256=GuR9Iw_jzZRkyemJ5HX8rB_wRGxkIBcBm1qh54r7zhk,3491 -setuptools/_distutils/dir_util.py,sha256=UwhBOUTcV65GTwce4SPuTXR8Z8q3LYEcmttqcGb0bYo,7778 -setuptools/_distutils/dist.py,sha256=Biuf6ca8uiFfMScRFsYUKtb5neMPtxKxRtXn50_1f3U,50421 -setuptools/_distutils/errors.py,sha256=Yr6tKZGdzBoNi53vBtiq0UJ__X05CmxSdQJqOWaw6SY,3577 -setuptools/_distutils/extension.py,sha256=bTb3Q0CoevGKYv5dX1ls--Ln8tlB0-UEOsi9BwzlZ-s,10515 -setuptools/_distutils/fancy_getopt.py,sha256=OPxp2CxHi1Yp_d1D8JxW4Ueq9fC71tegQFaafh58GGU,17784 -setuptools/_distutils/file_util.py,sha256=0hUqfItN_x2DVihR0MHdA4KCMVCOO8VoByaFp_a6MDg,8148 -setuptools/_distutils/filelist.py,sha256=Z9f5hvepZnpniZ2IFmCnWIjdviWozs8sbARBhWajwoM,13407 -setuptools/_distutils/log.py,sha256=gZ0wCQvSMzrS_6ccOhtvceqigM77oT_GKB_nnooRIXo,1973 -setuptools/_distutils/msvc9compiler.py,sha256=XXs85TZO4quRKOdWUk6ylcD-1f_QAm4ceiHbiW5tH-k,30474 -setuptools/_distutils/msvccompiler.py,sha256=E-Fm6eLWnRr-sSRpHGOq6ezJ7YCnDuM3MV7_i9wiTRg,23531 -setuptools/_distutils/py35compat.py,sha256=-sk1vBIsOgH-AobjIYbK_OEjdJF_54Ul_D1EiE9XM_c,455 -setuptools/_distutils/py38compat.py,sha256=II7ddBxOijC7uNN4z_46HYUjwYTJYMNiLJoGTormZm0,212 -setuptools/_distutils/spawn.py,sha256=rbPiTTUGLIo0QtzS-n04HuzXh15ztDcUDJ94vXfheFY,3474 -setuptools/_distutils/sysconfig.py,sha256=MJ2B3ARCEVi0TqHjTTQVxIfKLsQ1BUF__cfT9nCjTb4,21103 -setuptools/_distutils/text_file.py,sha256=PsuAJeWdKJoLSV_6N6IpB5-0Pa84KzLUucJMFRazw3I,12483 -setuptools/_distutils/unixccompiler.py,sha256=u2Sfs6LRmqQux4nZW08GwDtoFMded6wYnkiaO2TvKC4,14538 -setuptools/_distutils/util.py,sha256=2WMXovrYPE-Uc06ckbV-mHgm9IKsaQ4nUdSGiK-9w_I,18537 -setuptools/_distutils/version.py,sha256=syRvPxuMQxnftpuIKeRE-2ELQ_ZMCwMJ-o8ie-lxdZo,13015 -setuptools/_distutils/versionpredicate.py,sha256=vx4ND3BtMgxFR9iZ4_t3WFa-NdIKxO8vtOd0twBppxc,5277 -setuptools/_entry_points.py,sha256=5rRyEuiC0tdEsoCRJ6NWii5RET134mtDtjoSTFdLCwA,1972 -setuptools/_imp.py,sha256=HmF91IbitRfsD5z-g4_wmcuH-RahyIONbPgiCOFgtzA,2392 -setuptools/_importlib.py,sha256=VItIWibvlzNI2QEko84WAmLh0b0_iVFd8TtjY0AekjY,891 -setuptools/_itertools.py,sha256=pZAgXNz6tRPUFnHAaKJ90xAgD0gLPemcE1396Zgz73o,675 -setuptools/_path.py,sha256=nMzHs8z0MXh_MsQG3ndRrWsQS1q2u_E_-0QxeVqXfkk,176 -setuptools/_reqs.py,sha256=ApdTOmDFyK7hbHDnAH8VwhtVD5kvnOthyMNTmrUeFXs,501 -setuptools/_vendor/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -setuptools/_vendor/__pycache__/__init__.cpython-39.pyc,, -setuptools/_vendor/__pycache__/ordered_set.cpython-39.pyc,, -setuptools/_vendor/__pycache__/pyparsing.cpython-39.pyc,, -setuptools/_vendor/__pycache__/typing_extensions.cpython-39.pyc,, -setuptools/_vendor/__pycache__/zipp.cpython-39.pyc,, -setuptools/_vendor/importlib_metadata/__init__.py,sha256=xRXwTtvg4EAYuBotYeGawbjraQD4GFIvKgMClxApCDY,30130 -setuptools/_vendor/importlib_metadata/__pycache__/__init__.cpython-39.pyc,, -setuptools/_vendor/importlib_metadata/__pycache__/_adapters.cpython-39.pyc,, -setuptools/_vendor/importlib_metadata/__pycache__/_collections.cpython-39.pyc,, -setuptools/_vendor/importlib_metadata/__pycache__/_compat.cpython-39.pyc,, -setuptools/_vendor/importlib_metadata/__pycache__/_functools.cpython-39.pyc,, -setuptools/_vendor/importlib_metadata/__pycache__/_itertools.cpython-39.pyc,, -setuptools/_vendor/importlib_metadata/__pycache__/_meta.cpython-39.pyc,, -setuptools/_vendor/importlib_metadata/__pycache__/_text.cpython-39.pyc,, -setuptools/_vendor/importlib_metadata/_adapters.py,sha256=B6fCi5-8mLVDFUZj3krI5nAo-mKp1dH_qIavyIyFrJs,1862 -setuptools/_vendor/importlib_metadata/_collections.py,sha256=CJ0OTCHIjWA0ZIVS4voORAsn2R4R2cQBEtPsZEJpASY,743 -setuptools/_vendor/importlib_metadata/_compat.py,sha256=cotBaMUB-2pIRZboQnWp9fEqm6Dwlypndn-EEn0bj5M,1828 -setuptools/_vendor/importlib_metadata/_functools.py,sha256=PsY2-4rrKX4RVeRC1oGp1lB1pmC9eKN88_f-bD9uOoA,2895 -setuptools/_vendor/importlib_metadata/_itertools.py,sha256=cvr_2v8BRbxcIl5x5ldfqdHjhI8Yi8s8yk50G_nm6jQ,2068 -setuptools/_vendor/importlib_metadata/_meta.py,sha256=_F48Hu_jFxkfKWz5wcYS8vO23qEygbVdF9r-6qh-hjE,1154 -setuptools/_vendor/importlib_metadata/_text.py,sha256=HCsFksZpJLeTP3NEk_ngrAeXVRRtTrtyh9eOABoRP4A,2166 -setuptools/_vendor/importlib_resources/__init__.py,sha256=evPm12kLgYqTm-pbzm60bOuumumT8IpBNWFp0uMyrzE,506 -setuptools/_vendor/importlib_resources/__pycache__/__init__.cpython-39.pyc,, -setuptools/_vendor/importlib_resources/__pycache__/_adapters.cpython-39.pyc,, -setuptools/_vendor/importlib_resources/__pycache__/_common.cpython-39.pyc,, -setuptools/_vendor/importlib_resources/__pycache__/_compat.cpython-39.pyc,, -setuptools/_vendor/importlib_resources/__pycache__/_itertools.cpython-39.pyc,, -setuptools/_vendor/importlib_resources/__pycache__/_legacy.cpython-39.pyc,, -setuptools/_vendor/importlib_resources/__pycache__/abc.cpython-39.pyc,, -setuptools/_vendor/importlib_resources/__pycache__/readers.cpython-39.pyc,, -setuptools/_vendor/importlib_resources/__pycache__/simple.cpython-39.pyc,, -setuptools/_vendor/importlib_resources/_adapters.py,sha256=o51tP2hpVtohP33gSYyAkGNpLfYDBqxxYsadyiRZi1E,4504 -setuptools/_vendor/importlib_resources/_common.py,sha256=iIxAaQhotSh6TLLUEfL_ynU2fzEeyHMz9JcL46mUhLg,2741 -setuptools/_vendor/importlib_resources/_compat.py,sha256=nFBCGMvImglrqgYkb9aPgOj68-h6xbw-ca94XOv1-zs,2706 -setuptools/_vendor/importlib_resources/_itertools.py,sha256=WCdJ1Gs_kNFwKENyIG7TO0Y434IWCu0zjVVSsSbZwU8,884 -setuptools/_vendor/importlib_resources/_legacy.py,sha256=TMLkx6aEM6U8xIREPXqGZrMbUhTiPUuPl6ESD7RdYj4,3494 -setuptools/_vendor/importlib_resources/abc.py,sha256=MvTJJXajbl74s36Gyeesf76egtbFnh-TMtzQMVhFWXo,3886 -setuptools/_vendor/importlib_resources/readers.py,sha256=_9QLGQ5AzrED3PY8S2Zf8V6yLR0-nqqYqtQmgleDJzY,3566 -setuptools/_vendor/importlib_resources/simple.py,sha256=xt0qhXbwt3bZ86zuaaKbTiE9A0mDbwu0saRjUq_pcY0,2836 -setuptools/_vendor/jaraco/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -setuptools/_vendor/jaraco/__pycache__/__init__.cpython-39.pyc,, -setuptools/_vendor/jaraco/__pycache__/context.cpython-39.pyc,, -setuptools/_vendor/jaraco/__pycache__/functools.cpython-39.pyc,, -setuptools/_vendor/jaraco/context.py,sha256=7X1tpCLc5EN45iWGzGcsH0Unx62REIkvtRvglj0SiUA,5420 -setuptools/_vendor/jaraco/functools.py,sha256=ap1qoXaNABOx897366NTMEd2objrqAoSO1zuxZPjcmM,13512 -setuptools/_vendor/jaraco/text/__init__.py,sha256=KfFGMerrkN_0V0rgtJVx-9dHt3tW7i_uJypjwEcLtC0,15517 -setuptools/_vendor/jaraco/text/__pycache__/__init__.cpython-39.pyc,, -setuptools/_vendor/more_itertools/__init__.py,sha256=C7sXffHTXM3P-iaLPPfqfmDoxOflQMJLcM7ed9p3jak,82 -setuptools/_vendor/more_itertools/__pycache__/__init__.cpython-39.pyc,, -setuptools/_vendor/more_itertools/__pycache__/more.cpython-39.pyc,, -setuptools/_vendor/more_itertools/__pycache__/recipes.cpython-39.pyc,, -setuptools/_vendor/more_itertools/more.py,sha256=0rB_mibFR51sq33UlAI_bWfaNdsYNnJr1v6S0CaW7QA,117959 -setuptools/_vendor/more_itertools/recipes.py,sha256=UkNkrsZyqiwgLHANBTmvMhCvaNSvSNYhyOpz_Jc55DY,16256 -setuptools/_vendor/ordered_set.py,sha256=dbaCcs27dyN9gnMWGF5nA_BrVn6Q-NrjKYJpV9_fgBs,15130 -setuptools/_vendor/packaging/__about__.py,sha256=ugASIO2w1oUyH8_COqQ2X_s0rDhjbhQC3yJocD03h2c,661 -setuptools/_vendor/packaging/__init__.py,sha256=b9Kk5MF7KxhhLgcDmiUWukN-LatWFxPdNug0joPhHSk,497 -setuptools/_vendor/packaging/__pycache__/__about__.cpython-39.pyc,, -setuptools/_vendor/packaging/__pycache__/__init__.cpython-39.pyc,, -setuptools/_vendor/packaging/__pycache__/_manylinux.cpython-39.pyc,, -setuptools/_vendor/packaging/__pycache__/_musllinux.cpython-39.pyc,, -setuptools/_vendor/packaging/__pycache__/_structures.cpython-39.pyc,, -setuptools/_vendor/packaging/__pycache__/markers.cpython-39.pyc,, -setuptools/_vendor/packaging/__pycache__/requirements.cpython-39.pyc,, -setuptools/_vendor/packaging/__pycache__/specifiers.cpython-39.pyc,, -setuptools/_vendor/packaging/__pycache__/tags.cpython-39.pyc,, -setuptools/_vendor/packaging/__pycache__/utils.cpython-39.pyc,, -setuptools/_vendor/packaging/__pycache__/version.cpython-39.pyc,, -setuptools/_vendor/packaging/_manylinux.py,sha256=XcbiXB-qcjv3bcohp6N98TMpOP4_j3m-iOA8ptK2GWY,11488 -setuptools/_vendor/packaging/_musllinux.py,sha256=_KGgY_qc7vhMGpoqss25n2hiLCNKRtvz9mCrS7gkqyc,4378 -setuptools/_vendor/packaging/_structures.py,sha256=q3eVNmbWJGG_S0Dit_S3Ao8qQqz_5PYTXFAKBZe5yr4,1431 -setuptools/_vendor/packaging/markers.py,sha256=lihRgqpZjLM-JW-vxlLPqU3kmVe79g9vypy1kxmTRuQ,8493 -setuptools/_vendor/packaging/requirements.py,sha256=Opd0FjqgdEiWkzBLyo1oLU0Dj01uIFwTAnAJQrr6j2A,4700 -setuptools/_vendor/packaging/specifiers.py,sha256=LRQ0kFsHrl5qfcFNEEJrIFYsnIHQUJXY9fIsakTrrqE,30110 -setuptools/_vendor/packaging/tags.py,sha256=lmsnGNiJ8C4D_Pf9PbM0qgbZvD9kmB9lpZBQUZa3R_Y,15699 -setuptools/_vendor/packaging/utils.py,sha256=dJjeat3BS-TYn1RrUFVwufUMasbtzLfYRoy_HXENeFQ,4200 -setuptools/_vendor/packaging/version.py,sha256=_fLRNrFrxYcHVfyo8vk9j8s6JM8N_xsSxVFr6RJyco8,14665 -setuptools/_vendor/pyparsing.py,sha256=tmrp-lu-qO1i75ZzIN5A12nKRRD1Cm4Vpk-5LR9rims,232055 -setuptools/_vendor/typing_extensions.py,sha256=1uqi_RSlI7gos4eJB_NEV3d5wQwzTUQHd3_jrkbTo8Q,87149 -setuptools/_vendor/zipp.py,sha256=ajztOH-9I7KA_4wqDYygtHa6xUBVZgFpmZ8FE74HHHI,8425 -setuptools/archive_util.py,sha256=ji0lQ5itrhtfagixEsIJiTwWfl-KKD9cLX2nGMk9ggw,7070 -setuptools/build_meta.py,sha256=Rp40H4rz20WB0GUz7sGshN9mPx3M18zo7aW5hwejie0,10902 -setuptools/cli-32.exe,sha256=dfEuovMNnA2HLa3jRfMPVi5tk4R7alCbpTvuxtCyw0Y,65536 -setuptools/cli-64.exe,sha256=KLABu5pyrnokJCv6skjXZ6GsXeyYHGcqOUT3oHI3Xpo,74752 -setuptools/cli-arm64.exe,sha256=o9amxowudZ98NvNWh_a2DRY8LhoIRqTAekxABqltiMc,137216 -setuptools/cli.exe,sha256=dfEuovMNnA2HLa3jRfMPVi5tk4R7alCbpTvuxtCyw0Y,65536 -setuptools/command/__init__.py,sha256=e-8TJOikUe3St0fw2b2p9u5EDdSxl5zHUBJJKifbcQ8,217 -setuptools/command/__pycache__/__init__.cpython-39.pyc,, -setuptools/command/__pycache__/alias.cpython-39.pyc,, -setuptools/command/__pycache__/bdist_egg.cpython-39.pyc,, -setuptools/command/__pycache__/bdist_rpm.cpython-39.pyc,, -setuptools/command/__pycache__/build_clib.cpython-39.pyc,, -setuptools/command/__pycache__/build_ext.cpython-39.pyc,, -setuptools/command/__pycache__/build_py.cpython-39.pyc,, -setuptools/command/__pycache__/develop.cpython-39.pyc,, -setuptools/command/__pycache__/dist_info.cpython-39.pyc,, -setuptools/command/__pycache__/easy_install.cpython-39.pyc,, -setuptools/command/__pycache__/egg_info.cpython-39.pyc,, -setuptools/command/__pycache__/install.cpython-39.pyc,, -setuptools/command/__pycache__/install_egg_info.cpython-39.pyc,, -setuptools/command/__pycache__/install_lib.cpython-39.pyc,, -setuptools/command/__pycache__/install_scripts.cpython-39.pyc,, -setuptools/command/__pycache__/py36compat.cpython-39.pyc,, -setuptools/command/__pycache__/register.cpython-39.pyc,, -setuptools/command/__pycache__/rotate.cpython-39.pyc,, -setuptools/command/__pycache__/saveopts.cpython-39.pyc,, -setuptools/command/__pycache__/sdist.cpython-39.pyc,, -setuptools/command/__pycache__/setopt.cpython-39.pyc,, -setuptools/command/__pycache__/test.cpython-39.pyc,, -setuptools/command/__pycache__/upload.cpython-39.pyc,, -setuptools/command/__pycache__/upload_docs.cpython-39.pyc,, -setuptools/command/alias.py,sha256=1sLQxZcNh6dDQpDmm4G7UGGTol83nY1NTPmNBbm2siI,2381 -setuptools/command/bdist_egg.py,sha256=QEIu1AkgS02j6ejonJY7kwGp6LNxfMeYZ3sxkd55ftA,16623 -setuptools/command/bdist_rpm.py,sha256=PxrgoHPNaw2Pw2qNjjHDPC-Ay_IaDbCqP3d_5N-cj2A,1182 -setuptools/command/build_clib.py,sha256=fWHSFGkk10VCddBWCszvNhowbG9Z9CZXVjQ2uSInoOs,4415 -setuptools/command/build_ext.py,sha256=SNK042HfB2ezlDQbSVRGFqI1IM5A4AsjU1wpV3fgskE,13212 -setuptools/command/build_py.py,sha256=c90V1nVPEtYkdye-xvo-B48V5RLvSgD8JBMfPtUbtYw,8751 -setuptools/command/develop.py,sha256=5_Ss7ENd1_B_jVMY1tF5UV_y1Xu6jbVzAPG8oKeluGA,7012 -setuptools/command/dist_info.py,sha256=5t6kOfrdgALT-P3ogss6PF9k-Leyesueycuk3dUyZnI,960 -setuptools/command/easy_install.py,sha256=JvgOS1I6PjMdBl8XRGlom1y3B9GOiwzsGd5IU7OHW9k,86193 -setuptools/command/egg_info.py,sha256=KePN5k9NfkdmMhi-0cZMVbs-bnDdO--j7-1x-NavVlU,25703 -setuptools/command/install.py,sha256=UynjFBgRyyHrDZRVAmXrXG0vChJAMx-sxnOO3JoAzVo,4906 -setuptools/command/install_egg_info.py,sha256=pgZ64m_-kmtx3QISHN_kRtMiZC_Y8x1Nr1j38jXEbXQ,2226 -setuptools/command/install_lib.py,sha256=Uz42McsyHZAjrB6cw9E7Bz0xsaTbzxnM1PI9CBhiPtE,3875 -setuptools/command/install_scripts.py,sha256=APFFpt_lYUEo-viMtpXr-Hkwycwq8knTxSTNUu_TwHo,2612 -setuptools/command/launcher manifest.xml,sha256=xlLbjWrB01tKC0-hlVkOKkiSPbzMml2eOPtJ_ucCnbE,628 -setuptools/command/py36compat.py,sha256=7yLWzQj179Enx3pJ8V1cDDCzeLMFMd9XJXlK-iZTq5Y,4946 -setuptools/command/register.py,sha256=kk3DxXCb5lXTvqnhfwx2g6q7iwbUmgTyXUCaBooBOUk,468 -setuptools/command/rotate.py,sha256=SvsQPasezIojPjvMnfkqzh8P0U0tCj0daczF8uc3NQM,2128 -setuptools/command/saveopts.py,sha256=za7QCBcQimKKriWcoCcbhxPjUz30gSB74zuTL47xpP4,658 -setuptools/command/sdist.py,sha256=kTpUQpmT9XSE6wR3ClCNCH7eD7eNAeywIISYIRIYOpU,6422 -setuptools/command/setopt.py,sha256=okxhqD1NM1nQlbSVDCNv6P7Y7g680sc2r-tUW7wPH1Y,5086 -setuptools/command/test.py,sha256=j3NeJ3_ADgkCvWHfuvMrZmSFoTQFngp9Lg3XYNkyZlg,8133 -setuptools/command/upload.py,sha256=XT3YFVfYPAmA5qhGg0euluU98ftxRUW-PzKcODMLxUs,462 -setuptools/command/upload_docs.py,sha256=idT7bhLNblM_I9STmIF_psXRIooDN43gq0QIxt2GDpA,7220 -setuptools/config.py,sha256=O-T_28163qkEeaX8bLgqJLuOLYur15cC2_xpA0RENfM,23153 -setuptools/dep_util.py,sha256=BDx1BkzNQntvAB4alypHbW5UVBzjqths000PrUL4Zqc,949 -setuptools/depends.py,sha256=QYQIadr5DwLxPzkErhNt5hmRhvGhWxoXZMRXCm_jcQ0,5499 -setuptools/dist.py,sha256=TdhjsMb1QrRXkmgGiQAc5PSOrDZg0G45A67s4OFxMeU,44087 -setuptools/errors.py,sha256=t4Rm85eXm71Ti0-PO1gAQMRK3V7NN3x1tcbcw0-xGSI,1555 -setuptools/extension.py,sha256=wOWVz6qk-B3qx-O3vNp2gUZ2ItDEoB4MXIDNU_HCdiU,1675 -setuptools/extern/__init__.py,sha256=a3PZqKDn2rmMHybkWEtO0i8eGtGNoAu1HkULZlZRXdg,2503 -setuptools/extern/__pycache__/__init__.cpython-39.pyc,, -setuptools/glob.py,sha256=1oZjbfjAHSXbgdhSuR6YGU8jKob9L8NtEmBYqcPTLYk,4873 -setuptools/gui-32.exe,sha256=XBr0bHMA6Hpz2s9s9Bzjl-PwXfa9nH4ie0rFn4V2kWA,65536 -setuptools/gui-64.exe,sha256=aYKMhX1IJLn4ULHgWX0sE0yREUt6B3TEHf_jOw6yNyE,75264 -setuptools/gui-arm64.exe,sha256=TEFnOKDi-mq3ZszxqbCoCXTnM_lhUWjdIqBpr6fVs40,137728 -setuptools/gui.exe,sha256=XBr0bHMA6Hpz2s9s9Bzjl-PwXfa9nH4ie0rFn4V2kWA,65536 -setuptools/installer.py,sha256=s6DQfsoICBJxbUqbduhOJtl1oG0S4yegRCg3EAs0i3M,3824 -setuptools/launch.py,sha256=TyPT-Ic1T2EnYvGO26gfNRP4ysBlrhpbRjQxWsiO414,812 -setuptools/logging.py,sha256=npS_AaQd7a-4CVqxC8CKKkwQqLNvSH__pPO94BOB8TY,1148 -setuptools/monkey.py,sha256=0e3HdVKXHL415O7np-AUqhEFXPPuDdJKbI47chQ_DE4,5217 -setuptools/msvc.py,sha256=3LLt938e6OR7wWPzIvCQu7LCWZSIKqoKV6w3r8jV3kY,50561 -setuptools/namespaces.py,sha256=PMqGVPXPYQgjUTvEg9bGccRAkIODrQ6NmsDg_fwErwI,3093 -setuptools/package_index.py,sha256=iajBWGn6Kh6ti4xaiy5fmvG6_SlqLxJ1jV-RK9wnCHs,40055 -setuptools/py34compat.py,sha256=KYOd6ybRxjBW8NJmYD8t_UyyVmysppFXqHpFLdslGXU,245 -setuptools/sandbox.py,sha256=mR83i-mu-ZUU_7TaMgYCeRSyzkqv8loJ_GR9xhS2DDw,14348 -setuptools/script (dev).tmpl,sha256=RUzQzCQUaXtwdLtYHWYbIQmOaES5Brqq1FvUA_tu-5I,218 -setuptools/script.tmpl,sha256=WGTt5piezO27c-Dbx6l5Q4T3Ff20A5z7872hv3aAhYY,138 -setuptools/unicode_utils.py,sha256=aOOFo4JGwAsiBttGYDsqFS7YqWQeZ2j6DWiCuctR_00,941 -setuptools/version.py,sha256=og_cuZQb0QI6ukKZFfZWPlr1HgJBPPn2vO2m_bI9ZTE,144 -setuptools/wheel.py,sha256=4yoNrbyYshwcbLyV5hKksVJNnIXWV2pde_4sUYaxAcY,8274 -setuptools/windows_support.py,sha256=5GrfqSP2-dLGJoZTq2g6dCKkyQxxa2n5IQiXlJCoYEE,714 diff --git a/venv/Lib/site-packages/setuptools-60.9.3.dist-info/WHEEL b/venv/Lib/site-packages/setuptools-60.9.3.dist-info/WHEEL deleted file mode 100644 index becc9a6..0000000 --- a/venv/Lib/site-packages/setuptools-60.9.3.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.37.1) -Root-Is-Purelib: true -Tag: py3-none-any - diff --git a/venv/Lib/site-packages/setuptools-60.9.3.dist-info/entry_points.txt b/venv/Lib/site-packages/setuptools-60.9.3.dist-info/entry_points.txt deleted file mode 100644 index 18041f6..0000000 --- a/venv/Lib/site-packages/setuptools-60.9.3.dist-info/entry_points.txt +++ /dev/null @@ -1,55 +0,0 @@ -[distutils.commands] -alias = setuptools.command.alias:alias -bdist_egg = setuptools.command.bdist_egg:bdist_egg -bdist_rpm = setuptools.command.bdist_rpm:bdist_rpm -build_clib = setuptools.command.build_clib:build_clib -build_ext = setuptools.command.build_ext:build_ext -build_py = setuptools.command.build_py:build_py -develop = setuptools.command.develop:develop -dist_info = setuptools.command.dist_info:dist_info -easy_install = setuptools.command.easy_install:easy_install -egg_info = setuptools.command.egg_info:egg_info -install = setuptools.command.install:install -install_egg_info = setuptools.command.install_egg_info:install_egg_info -install_lib = setuptools.command.install_lib:install_lib -install_scripts = setuptools.command.install_scripts:install_scripts -rotate = setuptools.command.rotate:rotate -saveopts = setuptools.command.saveopts:saveopts -sdist = setuptools.command.sdist:sdist -setopt = setuptools.command.setopt:setopt -test = setuptools.command.test:test -upload_docs = setuptools.command.upload_docs:upload_docs - -[distutils.setup_keywords] -dependency_links = setuptools.dist:assert_string_list -eager_resources = setuptools.dist:assert_string_list -entry_points = setuptools.dist:check_entry_points -exclude_package_data = setuptools.dist:check_package_data -extras_require = setuptools.dist:check_extras -include_package_data = setuptools.dist:assert_bool -install_requires = setuptools.dist:check_requirements -namespace_packages = setuptools.dist:check_nsp -package_data = setuptools.dist:check_package_data -packages = setuptools.dist:check_packages -python_requires = setuptools.dist:check_specifier -setup_requires = setuptools.dist:check_requirements -test_loader = setuptools.dist:check_importable -test_runner = setuptools.dist:check_importable -test_suite = setuptools.dist:check_test_suite -tests_require = setuptools.dist:check_requirements -use_2to3 = setuptools.dist:invalid_unless_false -zip_safe = setuptools.dist:assert_bool - -[egg_info.writers] -PKG-INFO = setuptools.command.egg_info:write_pkg_info -dependency_links.txt = setuptools.command.egg_info:overwrite_arg -depends.txt = setuptools.command.egg_info:warn_depends_obsolete -eager_resources.txt = setuptools.command.egg_info:overwrite_arg -entry_points.txt = setuptools.command.egg_info:write_entries -namespace_packages.txt = setuptools.command.egg_info:overwrite_arg -requires.txt = setuptools.command.egg_info:write_requirements -top_level.txt = setuptools.command.egg_info:write_toplevel_names - -[setuptools.finalize_distribution_options] -keywords = setuptools.dist:Distribution._finalize_setup_keywords -parent_finalize = setuptools.dist:_Distribution.finalize_options diff --git a/venv/Lib/site-packages/setuptools-60.9.3.dist-info/top_level.txt b/venv/Lib/site-packages/setuptools-60.9.3.dist-info/top_level.txt deleted file mode 100644 index b5ac107..0000000 --- a/venv/Lib/site-packages/setuptools-60.9.3.dist-info/top_level.txt +++ /dev/null @@ -1,3 +0,0 @@ -_distutils_hack -pkg_resources -setuptools diff --git a/venv/Lib/site-packages/setuptools/__init__.py b/venv/Lib/site-packages/setuptools/__init__.py deleted file mode 100644 index 06991b6..0000000 --- a/venv/Lib/site-packages/setuptools/__init__.py +++ /dev/null @@ -1,244 +0,0 @@ -"""Extensions to the 'distutils' for large or complex distributions""" - -from fnmatch import fnmatchcase -import functools -import os -import re - -import _distutils_hack.override # noqa: F401 - -import distutils.core -from distutils.errors import DistutilsOptionError -from distutils.util import convert_path - -from ._deprecation_warning import SetuptoolsDeprecationWarning - -import setuptools.version -from setuptools.extension import Extension -from setuptools.dist import Distribution -from setuptools.depends import Require -from . import monkey -from . import logging - - -__all__ = [ - 'setup', - 'Distribution', - 'Command', - 'Extension', - 'Require', - 'SetuptoolsDeprecationWarning', - 'find_packages', - 'find_namespace_packages', -] - -__version__ = setuptools.version.__version__ - -bootstrap_install_from = None - - -class PackageFinder: - """ - Generate a list of all Python packages found within a directory - """ - - @classmethod - def find(cls, where='.', exclude=(), include=('*',)): - """Return a list all Python packages found within directory 'where' - - 'where' is the root directory which will be searched for packages. It - should be supplied as a "cross-platform" (i.e. URL-style) path; it will - be converted to the appropriate local path syntax. - - 'exclude' is a sequence of package names to exclude; '*' can be used - as a wildcard in the names, such that 'foo.*' will exclude all - subpackages of 'foo' (but not 'foo' itself). - - 'include' is a sequence of package names to include. If it's - specified, only the named packages will be included. If it's not - specified, all found packages will be included. 'include' can contain - shell style wildcard patterns just like 'exclude'. - """ - - return list( - cls._find_packages_iter( - convert_path(where), - cls._build_filter('ez_setup', '*__pycache__', *exclude), - cls._build_filter(*include), - ) - ) - - @classmethod - def _find_packages_iter(cls, where, exclude, include): - """ - All the packages found in 'where' that pass the 'include' filter, but - not the 'exclude' filter. - """ - for root, dirs, files in os.walk(where, followlinks=True): - # Copy dirs to iterate over it, then empty dirs. - all_dirs = dirs[:] - dirs[:] = [] - - for dir in all_dirs: - full_path = os.path.join(root, dir) - rel_path = os.path.relpath(full_path, where) - package = rel_path.replace(os.path.sep, '.') - - # Skip directory trees that are not valid packages - if '.' in dir or not cls._looks_like_package(full_path): - continue - - # Should this package be included? - if include(package) and not exclude(package): - yield package - - # Keep searching subdirectories, as there may be more packages - # down there, even if the parent was excluded. - dirs.append(dir) - - @staticmethod - def _looks_like_package(path): - """Does a directory look like a package?""" - return os.path.isfile(os.path.join(path, '__init__.py')) - - @staticmethod - def _build_filter(*patterns): - """ - Given a list of patterns, return a callable that will be true only if - the input matches at least one of the patterns. - """ - return lambda name: any(fnmatchcase(name, pat=pat) for pat in patterns) - - -class PEP420PackageFinder(PackageFinder): - @staticmethod - def _looks_like_package(path): - return True - - -find_packages = PackageFinder.find -find_namespace_packages = PEP420PackageFinder.find - - -def _install_setup_requires(attrs): - # Note: do not use `setuptools.Distribution` directly, as - # our PEP 517 backend patch `distutils.core.Distribution`. - class MinimalDistribution(distutils.core.Distribution): - """ - A minimal version of a distribution for supporting the - fetch_build_eggs interface. - """ - - def __init__(self, attrs): - _incl = 'dependency_links', 'setup_requires' - filtered = {k: attrs[k] for k in set(_incl) & set(attrs)} - super().__init__(filtered) - - def finalize_options(self): - """ - Disable finalize_options to avoid building the working set. - Ref #2158. - """ - - dist = MinimalDistribution(attrs) - - # Honor setup.cfg's options. - dist.parse_config_files(ignore_option_errors=True) - if dist.setup_requires: - dist.fetch_build_eggs(dist.setup_requires) - - -def setup(**attrs): - # Make sure we have any requirements needed to interpret 'attrs'. - logging.configure() - _install_setup_requires(attrs) - return distutils.core.setup(**attrs) - - -setup.__doc__ = distutils.core.setup.__doc__ - - -_Command = monkey.get_unpatched(distutils.core.Command) - - -class Command(_Command): - __doc__ = _Command.__doc__ - - command_consumes_arguments = False - - def __init__(self, dist, **kw): - """ - Construct the command for dist, updating - vars(self) with any keyword parameters. - """ - super().__init__(dist) - vars(self).update(kw) - - def _ensure_stringlike(self, option, what, default=None): - val = getattr(self, option) - if val is None: - setattr(self, option, default) - return default - elif not isinstance(val, str): - raise DistutilsOptionError( - "'%s' must be a %s (got `%s`)" % (option, what, val) - ) - return val - - def ensure_string_list(self, option): - r"""Ensure that 'option' is a list of strings. If 'option' is - currently a string, we split it either on /,\s*/ or /\s+/, so - "foo bar baz", "foo,bar,baz", and "foo, bar baz" all become - ["foo", "bar", "baz"]. - """ - val = getattr(self, option) - if val is None: - return - elif isinstance(val, str): - setattr(self, option, re.split(r',\s*|\s+', val)) - else: - if isinstance(val, list): - ok = all(isinstance(v, str) for v in val) - else: - ok = False - if not ok: - raise DistutilsOptionError( - "'%s' must be a list of strings (got %r)" % (option, val) - ) - - def reinitialize_command(self, command, reinit_subcommands=0, **kw): - cmd = _Command.reinitialize_command(self, command, reinit_subcommands) - vars(cmd).update(kw) - return cmd - - -def _find_all_simple(path): - """ - Find all files under 'path' - """ - results = ( - os.path.join(base, file) - for base, dirs, files in os.walk(path, followlinks=True) - for file in files - ) - return filter(os.path.isfile, results) - - -def findall(dir=os.curdir): - """ - Find all files under 'dir' and return the list of full filenames. - Unless dir is '.', return full filenames with dir prepended. - """ - files = _find_all_simple(dir) - if dir == os.curdir: - make_rel = functools.partial(os.path.relpath, start=dir) - files = map(make_rel, files) - return list(files) - - -class sic(str): - """Treat this string as-is (https://en.wikipedia.org/wiki/Sic)""" - - -# Apply monkey patches -monkey.patch_all() diff --git a/venv/Lib/site-packages/setuptools/__pycache__/__init__.cpython-39.pyc b/venv/Lib/site-packages/setuptools/__pycache__/__init__.cpython-39.pyc deleted file mode 100644 index 129e035..0000000 Binary files a/venv/Lib/site-packages/setuptools/__pycache__/__init__.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/__pycache__/_deprecation_warning.cpython-39.pyc b/venv/Lib/site-packages/setuptools/__pycache__/_deprecation_warning.cpython-39.pyc deleted file mode 100644 index e863a01..0000000 Binary files a/venv/Lib/site-packages/setuptools/__pycache__/_deprecation_warning.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/__pycache__/_entry_points.cpython-39.pyc b/venv/Lib/site-packages/setuptools/__pycache__/_entry_points.cpython-39.pyc deleted file mode 100644 index c5a5b67..0000000 Binary files a/venv/Lib/site-packages/setuptools/__pycache__/_entry_points.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/__pycache__/_imp.cpython-39.pyc b/venv/Lib/site-packages/setuptools/__pycache__/_imp.cpython-39.pyc deleted file mode 100644 index fe0d7e7..0000000 Binary files a/venv/Lib/site-packages/setuptools/__pycache__/_imp.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/__pycache__/_importlib.cpython-39.pyc b/venv/Lib/site-packages/setuptools/__pycache__/_importlib.cpython-39.pyc deleted file mode 100644 index 4d0fbb3..0000000 Binary files a/venv/Lib/site-packages/setuptools/__pycache__/_importlib.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/__pycache__/_itertools.cpython-39.pyc b/venv/Lib/site-packages/setuptools/__pycache__/_itertools.cpython-39.pyc deleted file mode 100644 index 48da402..0000000 Binary files a/venv/Lib/site-packages/setuptools/__pycache__/_itertools.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/__pycache__/_path.cpython-39.pyc b/venv/Lib/site-packages/setuptools/__pycache__/_path.cpython-39.pyc deleted file mode 100644 index 2653305..0000000 Binary files a/venv/Lib/site-packages/setuptools/__pycache__/_path.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/__pycache__/_reqs.cpython-39.pyc b/venv/Lib/site-packages/setuptools/__pycache__/_reqs.cpython-39.pyc deleted file mode 100644 index 97a22c9..0000000 Binary files a/venv/Lib/site-packages/setuptools/__pycache__/_reqs.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/__pycache__/archive_util.cpython-39.pyc b/venv/Lib/site-packages/setuptools/__pycache__/archive_util.cpython-39.pyc deleted file mode 100644 index 3a36ffe..0000000 Binary files a/venv/Lib/site-packages/setuptools/__pycache__/archive_util.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/__pycache__/build_meta.cpython-39.pyc b/venv/Lib/site-packages/setuptools/__pycache__/build_meta.cpython-39.pyc deleted file mode 100644 index 57ee7c4..0000000 Binary files a/venv/Lib/site-packages/setuptools/__pycache__/build_meta.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/__pycache__/config.cpython-39.pyc b/venv/Lib/site-packages/setuptools/__pycache__/config.cpython-39.pyc deleted file mode 100644 index 5fe67eb..0000000 Binary files a/venv/Lib/site-packages/setuptools/__pycache__/config.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/__pycache__/dep_util.cpython-39.pyc b/venv/Lib/site-packages/setuptools/__pycache__/dep_util.cpython-39.pyc deleted file mode 100644 index bac0d4b..0000000 Binary files a/venv/Lib/site-packages/setuptools/__pycache__/dep_util.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/__pycache__/depends.cpython-39.pyc b/venv/Lib/site-packages/setuptools/__pycache__/depends.cpython-39.pyc deleted file mode 100644 index 55af655..0000000 Binary files a/venv/Lib/site-packages/setuptools/__pycache__/depends.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/__pycache__/dist.cpython-39.pyc b/venv/Lib/site-packages/setuptools/__pycache__/dist.cpython-39.pyc deleted file mode 100644 index d70ad21..0000000 Binary files a/venv/Lib/site-packages/setuptools/__pycache__/dist.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/__pycache__/errors.cpython-39.pyc b/venv/Lib/site-packages/setuptools/__pycache__/errors.cpython-39.pyc deleted file mode 100644 index c0cd018..0000000 Binary files a/venv/Lib/site-packages/setuptools/__pycache__/errors.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/__pycache__/extension.cpython-39.pyc b/venv/Lib/site-packages/setuptools/__pycache__/extension.cpython-39.pyc deleted file mode 100644 index 1124d75..0000000 Binary files a/venv/Lib/site-packages/setuptools/__pycache__/extension.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/__pycache__/glob.cpython-39.pyc b/venv/Lib/site-packages/setuptools/__pycache__/glob.cpython-39.pyc deleted file mode 100644 index ba065c8..0000000 Binary files a/venv/Lib/site-packages/setuptools/__pycache__/glob.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/__pycache__/installer.cpython-39.pyc b/venv/Lib/site-packages/setuptools/__pycache__/installer.cpython-39.pyc deleted file mode 100644 index cfeac79..0000000 Binary files a/venv/Lib/site-packages/setuptools/__pycache__/installer.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/__pycache__/launch.cpython-39.pyc b/venv/Lib/site-packages/setuptools/__pycache__/launch.cpython-39.pyc deleted file mode 100644 index e832b0c..0000000 Binary files a/venv/Lib/site-packages/setuptools/__pycache__/launch.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/__pycache__/logging.cpython-39.pyc b/venv/Lib/site-packages/setuptools/__pycache__/logging.cpython-39.pyc deleted file mode 100644 index 6dfc920..0000000 Binary files a/venv/Lib/site-packages/setuptools/__pycache__/logging.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/__pycache__/monkey.cpython-39.pyc b/venv/Lib/site-packages/setuptools/__pycache__/monkey.cpython-39.pyc deleted file mode 100644 index 2a07e55..0000000 Binary files a/venv/Lib/site-packages/setuptools/__pycache__/monkey.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/__pycache__/msvc.cpython-39.pyc b/venv/Lib/site-packages/setuptools/__pycache__/msvc.cpython-39.pyc deleted file mode 100644 index 2192e1e..0000000 Binary files a/venv/Lib/site-packages/setuptools/__pycache__/msvc.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/__pycache__/namespaces.cpython-39.pyc b/venv/Lib/site-packages/setuptools/__pycache__/namespaces.cpython-39.pyc deleted file mode 100644 index ded41f3..0000000 Binary files a/venv/Lib/site-packages/setuptools/__pycache__/namespaces.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/__pycache__/package_index.cpython-39.pyc b/venv/Lib/site-packages/setuptools/__pycache__/package_index.cpython-39.pyc deleted file mode 100644 index c80df47..0000000 Binary files a/venv/Lib/site-packages/setuptools/__pycache__/package_index.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/__pycache__/py34compat.cpython-39.pyc b/venv/Lib/site-packages/setuptools/__pycache__/py34compat.cpython-39.pyc deleted file mode 100644 index 8d61458..0000000 Binary files a/venv/Lib/site-packages/setuptools/__pycache__/py34compat.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/__pycache__/sandbox.cpython-39.pyc b/venv/Lib/site-packages/setuptools/__pycache__/sandbox.cpython-39.pyc deleted file mode 100644 index 54ad0e8..0000000 Binary files a/venv/Lib/site-packages/setuptools/__pycache__/sandbox.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/__pycache__/unicode_utils.cpython-39.pyc b/venv/Lib/site-packages/setuptools/__pycache__/unicode_utils.cpython-39.pyc deleted file mode 100644 index f5c0c17..0000000 Binary files a/venv/Lib/site-packages/setuptools/__pycache__/unicode_utils.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/__pycache__/version.cpython-39.pyc b/venv/Lib/site-packages/setuptools/__pycache__/version.cpython-39.pyc deleted file mode 100644 index 86ae401..0000000 Binary files a/venv/Lib/site-packages/setuptools/__pycache__/version.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/__pycache__/wheel.cpython-39.pyc b/venv/Lib/site-packages/setuptools/__pycache__/wheel.cpython-39.pyc deleted file mode 100644 index 19b57ca..0000000 Binary files a/venv/Lib/site-packages/setuptools/__pycache__/wheel.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/__pycache__/windows_support.cpython-39.pyc b/venv/Lib/site-packages/setuptools/__pycache__/windows_support.cpython-39.pyc deleted file mode 100644 index b6417b0..0000000 Binary files a/venv/Lib/site-packages/setuptools/__pycache__/windows_support.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/_deprecation_warning.py b/venv/Lib/site-packages/setuptools/_deprecation_warning.py deleted file mode 100644 index 086b64d..0000000 --- a/venv/Lib/site-packages/setuptools/_deprecation_warning.py +++ /dev/null @@ -1,7 +0,0 @@ -class SetuptoolsDeprecationWarning(Warning): - """ - Base class for warning deprecations in ``setuptools`` - - This class is not derived from ``DeprecationWarning``, and as such is - visible by default. - """ diff --git a/venv/Lib/site-packages/setuptools/_distutils/__init__.py b/venv/Lib/site-packages/setuptools/_distutils/__init__.py deleted file mode 100644 index 8fd493b..0000000 --- a/venv/Lib/site-packages/setuptools/_distutils/__init__.py +++ /dev/null @@ -1,24 +0,0 @@ -"""distutils - -The main package for the Python Module Distribution Utilities. Normally -used from a setup script as - - from distutils.core import setup - - setup (...) -""" - -import sys -import importlib - -__version__ = sys.version[:sys.version.index(' ')] - - -try: - # Allow Debian and pkgsrc (only) to customize system - # behavior. Ref pypa/distutils#2 and pypa/distutils#16. - # This hook is deprecated and no other environments - # should use it. - importlib.import_module('_distutils_system_mod') -except ImportError: - pass diff --git a/venv/Lib/site-packages/setuptools/_distutils/__pycache__/__init__.cpython-39.pyc b/venv/Lib/site-packages/setuptools/_distutils/__pycache__/__init__.cpython-39.pyc deleted file mode 100644 index 161156a..0000000 Binary files a/venv/Lib/site-packages/setuptools/_distutils/__pycache__/__init__.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/__pycache__/_collections.cpython-39.pyc b/venv/Lib/site-packages/setuptools/_distutils/__pycache__/_collections.cpython-39.pyc deleted file mode 100644 index 079a56c..0000000 Binary files a/venv/Lib/site-packages/setuptools/_distutils/__pycache__/_collections.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/__pycache__/_msvccompiler.cpython-39.pyc b/venv/Lib/site-packages/setuptools/_distutils/__pycache__/_msvccompiler.cpython-39.pyc deleted file mode 100644 index de6c3ea..0000000 Binary files a/venv/Lib/site-packages/setuptools/_distutils/__pycache__/_msvccompiler.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/__pycache__/archive_util.cpython-39.pyc b/venv/Lib/site-packages/setuptools/_distutils/__pycache__/archive_util.cpython-39.pyc deleted file mode 100644 index 1662aab..0000000 Binary files a/venv/Lib/site-packages/setuptools/_distutils/__pycache__/archive_util.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/__pycache__/bcppcompiler.cpython-39.pyc b/venv/Lib/site-packages/setuptools/_distutils/__pycache__/bcppcompiler.cpython-39.pyc deleted file mode 100644 index 88708f1..0000000 Binary files a/venv/Lib/site-packages/setuptools/_distutils/__pycache__/bcppcompiler.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/__pycache__/ccompiler.cpython-39.pyc b/venv/Lib/site-packages/setuptools/_distutils/__pycache__/ccompiler.cpython-39.pyc deleted file mode 100644 index a4e9d31..0000000 Binary files a/venv/Lib/site-packages/setuptools/_distutils/__pycache__/ccompiler.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/__pycache__/cmd.cpython-39.pyc b/venv/Lib/site-packages/setuptools/_distutils/__pycache__/cmd.cpython-39.pyc deleted file mode 100644 index a8ce170..0000000 Binary files a/venv/Lib/site-packages/setuptools/_distutils/__pycache__/cmd.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/__pycache__/config.cpython-39.pyc b/venv/Lib/site-packages/setuptools/_distutils/__pycache__/config.cpython-39.pyc deleted file mode 100644 index 98dbb2e..0000000 Binary files a/venv/Lib/site-packages/setuptools/_distutils/__pycache__/config.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/__pycache__/core.cpython-39.pyc b/venv/Lib/site-packages/setuptools/_distutils/__pycache__/core.cpython-39.pyc deleted file mode 100644 index c75120f..0000000 Binary files a/venv/Lib/site-packages/setuptools/_distutils/__pycache__/core.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/__pycache__/cygwinccompiler.cpython-39.pyc b/venv/Lib/site-packages/setuptools/_distutils/__pycache__/cygwinccompiler.cpython-39.pyc deleted file mode 100644 index f9be736..0000000 Binary files a/venv/Lib/site-packages/setuptools/_distutils/__pycache__/cygwinccompiler.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/__pycache__/debug.cpython-39.pyc b/venv/Lib/site-packages/setuptools/_distutils/__pycache__/debug.cpython-39.pyc deleted file mode 100644 index d26b5e0..0000000 Binary files a/venv/Lib/site-packages/setuptools/_distutils/__pycache__/debug.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/__pycache__/dep_util.cpython-39.pyc b/venv/Lib/site-packages/setuptools/_distutils/__pycache__/dep_util.cpython-39.pyc deleted file mode 100644 index 26cadd7..0000000 Binary files a/venv/Lib/site-packages/setuptools/_distutils/__pycache__/dep_util.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/__pycache__/dir_util.cpython-39.pyc b/venv/Lib/site-packages/setuptools/_distutils/__pycache__/dir_util.cpython-39.pyc deleted file mode 100644 index 2a77431..0000000 Binary files a/venv/Lib/site-packages/setuptools/_distutils/__pycache__/dir_util.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/__pycache__/dist.cpython-39.pyc b/venv/Lib/site-packages/setuptools/_distutils/__pycache__/dist.cpython-39.pyc deleted file mode 100644 index f1b2600..0000000 Binary files a/venv/Lib/site-packages/setuptools/_distutils/__pycache__/dist.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/__pycache__/errors.cpython-39.pyc b/venv/Lib/site-packages/setuptools/_distutils/__pycache__/errors.cpython-39.pyc deleted file mode 100644 index fc11e86..0000000 Binary files a/venv/Lib/site-packages/setuptools/_distutils/__pycache__/errors.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/__pycache__/extension.cpython-39.pyc b/venv/Lib/site-packages/setuptools/_distutils/__pycache__/extension.cpython-39.pyc deleted file mode 100644 index e95d14e..0000000 Binary files a/venv/Lib/site-packages/setuptools/_distutils/__pycache__/extension.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/__pycache__/fancy_getopt.cpython-39.pyc b/venv/Lib/site-packages/setuptools/_distutils/__pycache__/fancy_getopt.cpython-39.pyc deleted file mode 100644 index 5a8e8f8..0000000 Binary files a/venv/Lib/site-packages/setuptools/_distutils/__pycache__/fancy_getopt.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/__pycache__/file_util.cpython-39.pyc b/venv/Lib/site-packages/setuptools/_distutils/__pycache__/file_util.cpython-39.pyc deleted file mode 100644 index aa26619..0000000 Binary files a/venv/Lib/site-packages/setuptools/_distutils/__pycache__/file_util.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/__pycache__/filelist.cpython-39.pyc b/venv/Lib/site-packages/setuptools/_distutils/__pycache__/filelist.cpython-39.pyc deleted file mode 100644 index b4182b0..0000000 Binary files a/venv/Lib/site-packages/setuptools/_distutils/__pycache__/filelist.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/__pycache__/log.cpython-39.pyc b/venv/Lib/site-packages/setuptools/_distutils/__pycache__/log.cpython-39.pyc deleted file mode 100644 index 5be1053..0000000 Binary files a/venv/Lib/site-packages/setuptools/_distutils/__pycache__/log.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/__pycache__/msvc9compiler.cpython-39.pyc b/venv/Lib/site-packages/setuptools/_distutils/__pycache__/msvc9compiler.cpython-39.pyc deleted file mode 100644 index 3860965..0000000 Binary files a/venv/Lib/site-packages/setuptools/_distutils/__pycache__/msvc9compiler.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/__pycache__/msvccompiler.cpython-39.pyc b/venv/Lib/site-packages/setuptools/_distutils/__pycache__/msvccompiler.cpython-39.pyc deleted file mode 100644 index 6036e83..0000000 Binary files a/venv/Lib/site-packages/setuptools/_distutils/__pycache__/msvccompiler.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/__pycache__/py35compat.cpython-39.pyc b/venv/Lib/site-packages/setuptools/_distutils/__pycache__/py35compat.cpython-39.pyc deleted file mode 100644 index f26785f..0000000 Binary files a/venv/Lib/site-packages/setuptools/_distutils/__pycache__/py35compat.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/__pycache__/py38compat.cpython-39.pyc b/venv/Lib/site-packages/setuptools/_distutils/__pycache__/py38compat.cpython-39.pyc deleted file mode 100644 index 247b5a9..0000000 Binary files a/venv/Lib/site-packages/setuptools/_distutils/__pycache__/py38compat.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/__pycache__/spawn.cpython-39.pyc b/venv/Lib/site-packages/setuptools/_distutils/__pycache__/spawn.cpython-39.pyc deleted file mode 100644 index 613c925..0000000 Binary files a/venv/Lib/site-packages/setuptools/_distutils/__pycache__/spawn.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/__pycache__/sysconfig.cpython-39.pyc b/venv/Lib/site-packages/setuptools/_distutils/__pycache__/sysconfig.cpython-39.pyc deleted file mode 100644 index e1b0cbe..0000000 Binary files a/venv/Lib/site-packages/setuptools/_distutils/__pycache__/sysconfig.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/__pycache__/text_file.cpython-39.pyc b/venv/Lib/site-packages/setuptools/_distutils/__pycache__/text_file.cpython-39.pyc deleted file mode 100644 index f73fa2d..0000000 Binary files a/venv/Lib/site-packages/setuptools/_distutils/__pycache__/text_file.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/__pycache__/unixccompiler.cpython-39.pyc b/venv/Lib/site-packages/setuptools/_distutils/__pycache__/unixccompiler.cpython-39.pyc deleted file mode 100644 index 8b84e37..0000000 Binary files a/venv/Lib/site-packages/setuptools/_distutils/__pycache__/unixccompiler.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/__pycache__/util.cpython-39.pyc b/venv/Lib/site-packages/setuptools/_distutils/__pycache__/util.cpython-39.pyc deleted file mode 100644 index c08b49a..0000000 Binary files a/venv/Lib/site-packages/setuptools/_distutils/__pycache__/util.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/__pycache__/version.cpython-39.pyc b/venv/Lib/site-packages/setuptools/_distutils/__pycache__/version.cpython-39.pyc deleted file mode 100644 index 2da932b..0000000 Binary files a/venv/Lib/site-packages/setuptools/_distutils/__pycache__/version.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/__pycache__/versionpredicate.cpython-39.pyc b/venv/Lib/site-packages/setuptools/_distutils/__pycache__/versionpredicate.cpython-39.pyc deleted file mode 100644 index e033151..0000000 Binary files a/venv/Lib/site-packages/setuptools/_distutils/__pycache__/versionpredicate.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/_collections.py b/venv/Lib/site-packages/setuptools/_distutils/_collections.py deleted file mode 100644 index 98fce80..0000000 --- a/venv/Lib/site-packages/setuptools/_distutils/_collections.py +++ /dev/null @@ -1,56 +0,0 @@ -import collections -import itertools - - -# from jaraco.collections 3.5.1 -class DictStack(list, collections.abc.Mapping): - """ - A stack of dictionaries that behaves as a view on those dictionaries, - giving preference to the last. - - >>> stack = DictStack([dict(a=1, c=2), dict(b=2, a=2)]) - >>> stack['a'] - 2 - >>> stack['b'] - 2 - >>> stack['c'] - 2 - >>> len(stack) - 3 - >>> stack.push(dict(a=3)) - >>> stack['a'] - 3 - >>> set(stack.keys()) == set(['a', 'b', 'c']) - True - >>> set(stack.items()) == set([('a', 3), ('b', 2), ('c', 2)]) - True - >>> dict(**stack) == dict(stack) == dict(a=3, c=2, b=2) - True - >>> d = stack.pop() - >>> stack['a'] - 2 - >>> d = stack.pop() - >>> stack['a'] - 1 - >>> stack.get('b', None) - >>> 'c' in stack - True - """ - - def __iter__(self): - dicts = list.__iter__(self) - return iter(set(itertools.chain.from_iterable(c.keys() for c in dicts))) - - def __getitem__(self, key): - for scope in reversed(tuple(list.__iter__(self))): - if key in scope: - return scope[key] - raise KeyError(key) - - push = list.append - - def __contains__(self, other): - return collections.abc.Mapping.__contains__(self, other) - - def __len__(self): - return len(list(iter(self))) diff --git a/venv/Lib/site-packages/setuptools/_distutils/_msvccompiler.py b/venv/Lib/site-packages/setuptools/_distutils/_msvccompiler.py deleted file mode 100644 index f2f801c..0000000 --- a/venv/Lib/site-packages/setuptools/_distutils/_msvccompiler.py +++ /dev/null @@ -1,561 +0,0 @@ -"""distutils._msvccompiler - -Contains MSVCCompiler, an implementation of the abstract CCompiler class -for Microsoft Visual Studio 2015. - -The module is compatible with VS 2015 and later. You can find legacy support -for older versions in distutils.msvc9compiler and distutils.msvccompiler. -""" - -# Written by Perry Stoll -# hacked by Robin Becker and Thomas Heller to do a better job of -# finding DevStudio (through the registry) -# ported to VS 2005 and VS 2008 by Christian Heimes -# ported to VS 2015 by Steve Dower - -import os -import subprocess -import contextlib -import warnings -import unittest.mock -with contextlib.suppress(ImportError): - import winreg - -from distutils.errors import DistutilsExecError, DistutilsPlatformError, \ - CompileError, LibError, LinkError -from distutils.ccompiler import CCompiler, gen_lib_options -from distutils import log -from distutils.util import get_platform - -from itertools import count - -def _find_vc2015(): - try: - key = winreg.OpenKeyEx( - winreg.HKEY_LOCAL_MACHINE, - r"Software\Microsoft\VisualStudio\SxS\VC7", - access=winreg.KEY_READ | winreg.KEY_WOW64_32KEY - ) - except OSError: - log.debug("Visual C++ is not registered") - return None, None - - best_version = 0 - best_dir = None - with key: - for i in count(): - try: - v, vc_dir, vt = winreg.EnumValue(key, i) - except OSError: - break - if v and vt == winreg.REG_SZ and os.path.isdir(vc_dir): - try: - version = int(float(v)) - except (ValueError, TypeError): - continue - if version >= 14 and version > best_version: - best_version, best_dir = version, vc_dir - return best_version, best_dir - -def _find_vc2017(): - """Returns "15, path" based on the result of invoking vswhere.exe - If no install is found, returns "None, None" - - The version is returned to avoid unnecessarily changing the function - result. It may be ignored when the path is not None. - - If vswhere.exe is not available, by definition, VS 2017 is not - installed. - """ - root = os.environ.get("ProgramFiles(x86)") or os.environ.get("ProgramFiles") - if not root: - return None, None - - try: - path = subprocess.check_output([ - os.path.join(root, "Microsoft Visual Studio", "Installer", "vswhere.exe"), - "-latest", - "-prerelease", - "-requires", "Microsoft.VisualStudio.Component.VC.Tools.x86.x64", - "-property", "installationPath", - "-products", "*", - ], encoding="mbcs", errors="strict").strip() - except (subprocess.CalledProcessError, OSError, UnicodeDecodeError): - return None, None - - path = os.path.join(path, "VC", "Auxiliary", "Build") - if os.path.isdir(path): - return 15, path - - return None, None - -PLAT_SPEC_TO_RUNTIME = { - 'x86' : 'x86', - 'x86_amd64' : 'x64', - 'x86_arm' : 'arm', - 'x86_arm64' : 'arm64' -} - -def _find_vcvarsall(plat_spec): - # bpo-38597: Removed vcruntime return value - _, best_dir = _find_vc2017() - - if not best_dir: - best_version, best_dir = _find_vc2015() - - if not best_dir: - log.debug("No suitable Visual C++ version found") - return None, None - - vcvarsall = os.path.join(best_dir, "vcvarsall.bat") - if not os.path.isfile(vcvarsall): - log.debug("%s cannot be found", vcvarsall) - return None, None - - return vcvarsall, None - -def _get_vc_env(plat_spec): - if os.getenv("DISTUTILS_USE_SDK"): - return { - key.lower(): value - for key, value in os.environ.items() - } - - vcvarsall, _ = _find_vcvarsall(plat_spec) - if not vcvarsall: - raise DistutilsPlatformError("Unable to find vcvarsall.bat") - - try: - out = subprocess.check_output( - 'cmd /u /c "{}" {} && set'.format(vcvarsall, plat_spec), - stderr=subprocess.STDOUT, - ).decode('utf-16le', errors='replace') - except subprocess.CalledProcessError as exc: - log.error(exc.output) - raise DistutilsPlatformError("Error executing {}" - .format(exc.cmd)) - - env = { - key.lower(): value - for key, _, value in - (line.partition('=') for line in out.splitlines()) - if key and value - } - - return env - -def _find_exe(exe, paths=None): - """Return path to an MSVC executable program. - - Tries to find the program in several places: first, one of the - MSVC program search paths from the registry; next, the directories - in the PATH environment variable. If any of those work, return an - absolute path that is known to exist. If none of them work, just - return the original program name, 'exe'. - """ - if not paths: - paths = os.getenv('path').split(os.pathsep) - for p in paths: - fn = os.path.join(os.path.abspath(p), exe) - if os.path.isfile(fn): - return fn - return exe - -# A map keyed by get_platform() return values to values accepted by -# 'vcvarsall.bat'. Always cross-compile from x86 to work with the -# lighter-weight MSVC installs that do not include native 64-bit tools. -PLAT_TO_VCVARS = { - 'win32' : 'x86', - 'win-amd64' : 'x86_amd64', - 'win-arm32' : 'x86_arm', - 'win-arm64' : 'x86_arm64' -} - -class MSVCCompiler(CCompiler) : - """Concrete class that implements an interface to Microsoft Visual C++, - as defined by the CCompiler abstract class.""" - - compiler_type = 'msvc' - - # Just set this so CCompiler's constructor doesn't barf. We currently - # don't use the 'set_executables()' bureaucracy provided by CCompiler, - # as it really isn't necessary for this sort of single-compiler class. - # Would be nice to have a consistent interface with UnixCCompiler, - # though, so it's worth thinking about. - executables = {} - - # Private class data (need to distinguish C from C++ source for compiler) - _c_extensions = ['.c'] - _cpp_extensions = ['.cc', '.cpp', '.cxx'] - _rc_extensions = ['.rc'] - _mc_extensions = ['.mc'] - - # Needed for the filename generation methods provided by the - # base class, CCompiler. - src_extensions = (_c_extensions + _cpp_extensions + - _rc_extensions + _mc_extensions) - res_extension = '.res' - obj_extension = '.obj' - static_lib_extension = '.lib' - shared_lib_extension = '.dll' - static_lib_format = shared_lib_format = '%s%s' - exe_extension = '.exe' - - - def __init__(self, verbose=0, dry_run=0, force=0): - super().__init__(verbose, dry_run, force) - # target platform (.plat_name is consistent with 'bdist') - self.plat_name = None - self.initialized = False - - def initialize(self, plat_name=None): - # multi-init means we would need to check platform same each time... - assert not self.initialized, "don't init multiple times" - if plat_name is None: - plat_name = get_platform() - # sanity check for platforms to prevent obscure errors later. - if plat_name not in PLAT_TO_VCVARS: - raise DistutilsPlatformError("--plat-name must be one of {}" - .format(tuple(PLAT_TO_VCVARS))) - - # Get the vcvarsall.bat spec for the requested platform. - plat_spec = PLAT_TO_VCVARS[plat_name] - - vc_env = _get_vc_env(plat_spec) - if not vc_env: - raise DistutilsPlatformError("Unable to find a compatible " - "Visual Studio installation.") - - self._paths = vc_env.get('path', '') - paths = self._paths.split(os.pathsep) - self.cc = _find_exe("cl.exe", paths) - self.linker = _find_exe("link.exe", paths) - self.lib = _find_exe("lib.exe", paths) - self.rc = _find_exe("rc.exe", paths) # resource compiler - self.mc = _find_exe("mc.exe", paths) # message compiler - self.mt = _find_exe("mt.exe", paths) # message compiler - - for dir in vc_env.get('include', '').split(os.pathsep): - if dir: - self.add_include_dir(dir.rstrip(os.sep)) - - for dir in vc_env.get('lib', '').split(os.pathsep): - if dir: - self.add_library_dir(dir.rstrip(os.sep)) - - self.preprocess_options = None - # bpo-38597: Always compile with dynamic linking - # Future releases of Python 3.x will include all past - # versions of vcruntime*.dll for compatibility. - self.compile_options = [ - '/nologo', '/O2', '/W3', '/GL', '/DNDEBUG', '/MD' - ] - - self.compile_options_debug = [ - '/nologo', '/Od', '/MDd', '/Zi', '/W3', '/D_DEBUG' - ] - - ldflags = [ - '/nologo', '/INCREMENTAL:NO', '/LTCG' - ] - - ldflags_debug = [ - '/nologo', '/INCREMENTAL:NO', '/LTCG', '/DEBUG:FULL' - ] - - self.ldflags_exe = [*ldflags, '/MANIFEST:EMBED,ID=1'] - self.ldflags_exe_debug = [*ldflags_debug, '/MANIFEST:EMBED,ID=1'] - self.ldflags_shared = [*ldflags, '/DLL', '/MANIFEST:EMBED,ID=2', '/MANIFESTUAC:NO'] - self.ldflags_shared_debug = [*ldflags_debug, '/DLL', '/MANIFEST:EMBED,ID=2', '/MANIFESTUAC:NO'] - self.ldflags_static = [*ldflags] - self.ldflags_static_debug = [*ldflags_debug] - - self._ldflags = { - (CCompiler.EXECUTABLE, None): self.ldflags_exe, - (CCompiler.EXECUTABLE, False): self.ldflags_exe, - (CCompiler.EXECUTABLE, True): self.ldflags_exe_debug, - (CCompiler.SHARED_OBJECT, None): self.ldflags_shared, - (CCompiler.SHARED_OBJECT, False): self.ldflags_shared, - (CCompiler.SHARED_OBJECT, True): self.ldflags_shared_debug, - (CCompiler.SHARED_LIBRARY, None): self.ldflags_static, - (CCompiler.SHARED_LIBRARY, False): self.ldflags_static, - (CCompiler.SHARED_LIBRARY, True): self.ldflags_static_debug, - } - - self.initialized = True - - # -- Worker methods ------------------------------------------------ - - def object_filenames(self, - source_filenames, - strip_dir=0, - output_dir=''): - ext_map = { - **{ext: self.obj_extension for ext in self.src_extensions}, - **{ext: self.res_extension for ext in self._rc_extensions + self._mc_extensions}, - } - - output_dir = output_dir or '' - - def make_out_path(p): - base, ext = os.path.splitext(p) - if strip_dir: - base = os.path.basename(base) - else: - _, base = os.path.splitdrive(base) - if base.startswith((os.path.sep, os.path.altsep)): - base = base[1:] - try: - # XXX: This may produce absurdly long paths. We should check - # the length of the result and trim base until we fit within - # 260 characters. - return os.path.join(output_dir, base + ext_map[ext]) - except LookupError: - # Better to raise an exception instead of silently continuing - # and later complain about sources and targets having - # different lengths - raise CompileError("Don't know how to compile {}".format(p)) - - return list(map(make_out_path, source_filenames)) - - - def compile(self, sources, - output_dir=None, macros=None, include_dirs=None, debug=0, - extra_preargs=None, extra_postargs=None, depends=None): - - if not self.initialized: - self.initialize() - compile_info = self._setup_compile(output_dir, macros, include_dirs, - sources, depends, extra_postargs) - macros, objects, extra_postargs, pp_opts, build = compile_info - - compile_opts = extra_preargs or [] - compile_opts.append('/c') - if debug: - compile_opts.extend(self.compile_options_debug) - else: - compile_opts.extend(self.compile_options) - - - add_cpp_opts = False - - for obj in objects: - try: - src, ext = build[obj] - except KeyError: - continue - if debug: - # pass the full pathname to MSVC in debug mode, - # this allows the debugger to find the source file - # without asking the user to browse for it - src = os.path.abspath(src) - - if ext in self._c_extensions: - input_opt = "/Tc" + src - elif ext in self._cpp_extensions: - input_opt = "/Tp" + src - add_cpp_opts = True - elif ext in self._rc_extensions: - # compile .RC to .RES file - input_opt = src - output_opt = "/fo" + obj - try: - self.spawn([self.rc] + pp_opts + [output_opt, input_opt]) - except DistutilsExecError as msg: - raise CompileError(msg) - continue - elif ext in self._mc_extensions: - # Compile .MC to .RC file to .RES file. - # * '-h dir' specifies the directory for the - # generated include file - # * '-r dir' specifies the target directory of the - # generated RC file and the binary message resource - # it includes - # - # For now (since there are no options to change this), - # we use the source-directory for the include file and - # the build directory for the RC file and message - # resources. This works at least for win32all. - h_dir = os.path.dirname(src) - rc_dir = os.path.dirname(obj) - try: - # first compile .MC to .RC and .H file - self.spawn([self.mc, '-h', h_dir, '-r', rc_dir, src]) - base, _ = os.path.splitext(os.path.basename (src)) - rc_file = os.path.join(rc_dir, base + '.rc') - # then compile .RC to .RES file - self.spawn([self.rc, "/fo" + obj, rc_file]) - - except DistutilsExecError as msg: - raise CompileError(msg) - continue - else: - # how to handle this file? - raise CompileError("Don't know how to compile {} to {}" - .format(src, obj)) - - args = [self.cc] + compile_opts + pp_opts - if add_cpp_opts: - args.append('/EHsc') - args.append(input_opt) - args.append("/Fo" + obj) - args.extend(extra_postargs) - - try: - self.spawn(args) - except DistutilsExecError as msg: - raise CompileError(msg) - - return objects - - - def create_static_lib(self, - objects, - output_libname, - output_dir=None, - debug=0, - target_lang=None): - - if not self.initialized: - self.initialize() - objects, output_dir = self._fix_object_args(objects, output_dir) - output_filename = self.library_filename(output_libname, - output_dir=output_dir) - - if self._need_link(objects, output_filename): - lib_args = objects + ['/OUT:' + output_filename] - if debug: - pass # XXX what goes here? - try: - log.debug('Executing "%s" %s', self.lib, ' '.join(lib_args)) - self.spawn([self.lib] + lib_args) - except DistutilsExecError as msg: - raise LibError(msg) - else: - log.debug("skipping %s (up-to-date)", output_filename) - - - def link(self, - target_desc, - objects, - output_filename, - output_dir=None, - libraries=None, - library_dirs=None, - runtime_library_dirs=None, - export_symbols=None, - debug=0, - extra_preargs=None, - extra_postargs=None, - build_temp=None, - target_lang=None): - - if not self.initialized: - self.initialize() - objects, output_dir = self._fix_object_args(objects, output_dir) - fixed_args = self._fix_lib_args(libraries, library_dirs, - runtime_library_dirs) - libraries, library_dirs, runtime_library_dirs = fixed_args - - if runtime_library_dirs: - self.warn("I don't know what to do with 'runtime_library_dirs': " - + str(runtime_library_dirs)) - - lib_opts = gen_lib_options(self, - library_dirs, runtime_library_dirs, - libraries) - if output_dir is not None: - output_filename = os.path.join(output_dir, output_filename) - - if self._need_link(objects, output_filename): - ldflags = self._ldflags[target_desc, debug] - - export_opts = ["/EXPORT:" + sym for sym in (export_symbols or [])] - - ld_args = (ldflags + lib_opts + export_opts + - objects + ['/OUT:' + output_filename]) - - # The MSVC linker generates .lib and .exp files, which cannot be - # suppressed by any linker switches. The .lib files may even be - # needed! Make sure they are generated in the temporary build - # directory. Since they have different names for debug and release - # builds, they can go into the same directory. - build_temp = os.path.dirname(objects[0]) - if export_symbols is not None: - (dll_name, dll_ext) = os.path.splitext( - os.path.basename(output_filename)) - implib_file = os.path.join( - build_temp, - self.library_filename(dll_name)) - ld_args.append ('/IMPLIB:' + implib_file) - - if extra_preargs: - ld_args[:0] = extra_preargs - if extra_postargs: - ld_args.extend(extra_postargs) - - output_dir = os.path.dirname(os.path.abspath(output_filename)) - self.mkpath(output_dir) - try: - log.debug('Executing "%s" %s', self.linker, ' '.join(ld_args)) - self.spawn([self.linker] + ld_args) - except DistutilsExecError as msg: - raise LinkError(msg) - else: - log.debug("skipping %s (up-to-date)", output_filename) - - def spawn(self, cmd): - env = dict(os.environ, PATH=self._paths) - with self._fallback_spawn(cmd, env) as fallback: - return super().spawn(cmd, env=env) - return fallback.value - - @contextlib.contextmanager - def _fallback_spawn(self, cmd, env): - """ - Discovered in pypa/distutils#15, some tools monkeypatch the compiler, - so the 'env' kwarg causes a TypeError. Detect this condition and - restore the legacy, unsafe behavior. - """ - bag = type('Bag', (), {})() - try: - yield bag - except TypeError as exc: - if "unexpected keyword argument 'env'" not in str(exc): - raise - else: - return - warnings.warn( - "Fallback spawn triggered. Please update distutils monkeypatch.") - with unittest.mock.patch.dict('os.environ', env): - bag.value = super().spawn(cmd) - - # -- Miscellaneous methods ----------------------------------------- - # These are all used by the 'gen_lib_options() function, in - # ccompiler.py. - - def library_dir_option(self, dir): - return "/LIBPATH:" + dir - - def runtime_library_dir_option(self, dir): - raise DistutilsPlatformError( - "don't know how to set runtime library search path for MSVC") - - def library_option(self, lib): - return self.library_filename(lib) - - def find_library_file(self, dirs, lib, debug=0): - # Prefer a debugging library if found (and requested), but deal - # with it if we don't have one. - if debug: - try_names = [lib + "_d", lib] - else: - try_names = [lib] - for dir in dirs: - for name in try_names: - libfile = os.path.join(dir, self.library_filename(name)) - if os.path.isfile(libfile): - return libfile - else: - # Oops, didn't find it in *any* of 'dirs' - return None diff --git a/venv/Lib/site-packages/setuptools/_distutils/archive_util.py b/venv/Lib/site-packages/setuptools/_distutils/archive_util.py deleted file mode 100644 index 565a311..0000000 --- a/venv/Lib/site-packages/setuptools/_distutils/archive_util.py +++ /dev/null @@ -1,256 +0,0 @@ -"""distutils.archive_util - -Utility functions for creating archive files (tarballs, zip files, -that sort of thing).""" - -import os -from warnings import warn -import sys - -try: - import zipfile -except ImportError: - zipfile = None - - -from distutils.errors import DistutilsExecError -from distutils.spawn import spawn -from distutils.dir_util import mkpath -from distutils import log - -try: - from pwd import getpwnam -except ImportError: - getpwnam = None - -try: - from grp import getgrnam -except ImportError: - getgrnam = None - -def _get_gid(name): - """Returns a gid, given a group name.""" - if getgrnam is None or name is None: - return None - try: - result = getgrnam(name) - except KeyError: - result = None - if result is not None: - return result[2] - return None - -def _get_uid(name): - """Returns an uid, given a user name.""" - if getpwnam is None or name is None: - return None - try: - result = getpwnam(name) - except KeyError: - result = None - if result is not None: - return result[2] - return None - -def make_tarball(base_name, base_dir, compress="gzip", verbose=0, dry_run=0, - owner=None, group=None): - """Create a (possibly compressed) tar file from all the files under - 'base_dir'. - - 'compress' must be "gzip" (the default), "bzip2", "xz", "compress", or - None. ("compress" will be deprecated in Python 3.2) - - 'owner' and 'group' can be used to define an owner and a group for the - archive that is being built. If not provided, the current owner and group - will be used. - - The output tar file will be named 'base_dir' + ".tar", possibly plus - the appropriate compression extension (".gz", ".bz2", ".xz" or ".Z"). - - Returns the output filename. - """ - tar_compression = {'gzip': 'gz', 'bzip2': 'bz2', 'xz': 'xz', None: '', - 'compress': ''} - compress_ext = {'gzip': '.gz', 'bzip2': '.bz2', 'xz': '.xz', - 'compress': '.Z'} - - # flags for compression program, each element of list will be an argument - if compress is not None and compress not in compress_ext.keys(): - raise ValueError( - "bad value for 'compress': must be None, 'gzip', 'bzip2', " - "'xz' or 'compress'") - - archive_name = base_name + '.tar' - if compress != 'compress': - archive_name += compress_ext.get(compress, '') - - mkpath(os.path.dirname(archive_name), dry_run=dry_run) - - # creating the tarball - import tarfile # late import so Python build itself doesn't break - - log.info('Creating tar archive') - - uid = _get_uid(owner) - gid = _get_gid(group) - - def _set_uid_gid(tarinfo): - if gid is not None: - tarinfo.gid = gid - tarinfo.gname = group - if uid is not None: - tarinfo.uid = uid - tarinfo.uname = owner - return tarinfo - - if not dry_run: - tar = tarfile.open(archive_name, 'w|%s' % tar_compression[compress]) - try: - tar.add(base_dir, filter=_set_uid_gid) - finally: - tar.close() - - # compression using `compress` - if compress == 'compress': - warn("'compress' will be deprecated.", PendingDeprecationWarning) - # the option varies depending on the platform - compressed_name = archive_name + compress_ext[compress] - if sys.platform == 'win32': - cmd = [compress, archive_name, compressed_name] - else: - cmd = [compress, '-f', archive_name] - spawn(cmd, dry_run=dry_run) - return compressed_name - - return archive_name - -def make_zipfile(base_name, base_dir, verbose=0, dry_run=0): - """Create a zip file from all the files under 'base_dir'. - - The output zip file will be named 'base_name' + ".zip". Uses either the - "zipfile" Python module (if available) or the InfoZIP "zip" utility - (if installed and found on the default search path). If neither tool is - available, raises DistutilsExecError. Returns the name of the output zip - file. - """ - zip_filename = base_name + ".zip" - mkpath(os.path.dirname(zip_filename), dry_run=dry_run) - - # If zipfile module is not available, try spawning an external - # 'zip' command. - if zipfile is None: - if verbose: - zipoptions = "-r" - else: - zipoptions = "-rq" - - try: - spawn(["zip", zipoptions, zip_filename, base_dir], - dry_run=dry_run) - except DistutilsExecError: - # XXX really should distinguish between "couldn't find - # external 'zip' command" and "zip failed". - raise DistutilsExecError(("unable to create zip file '%s': " - "could neither import the 'zipfile' module nor " - "find a standalone zip utility") % zip_filename) - - else: - log.info("creating '%s' and adding '%s' to it", - zip_filename, base_dir) - - if not dry_run: - try: - zip = zipfile.ZipFile(zip_filename, "w", - compression=zipfile.ZIP_DEFLATED) - except RuntimeError: - zip = zipfile.ZipFile(zip_filename, "w", - compression=zipfile.ZIP_STORED) - - with zip: - if base_dir != os.curdir: - path = os.path.normpath(os.path.join(base_dir, '')) - zip.write(path, path) - log.info("adding '%s'", path) - for dirpath, dirnames, filenames in os.walk(base_dir): - for name in dirnames: - path = os.path.normpath(os.path.join(dirpath, name, '')) - zip.write(path, path) - log.info("adding '%s'", path) - for name in filenames: - path = os.path.normpath(os.path.join(dirpath, name)) - if os.path.isfile(path): - zip.write(path, path) - log.info("adding '%s'", path) - - return zip_filename - -ARCHIVE_FORMATS = { - 'gztar': (make_tarball, [('compress', 'gzip')], "gzip'ed tar-file"), - 'bztar': (make_tarball, [('compress', 'bzip2')], "bzip2'ed tar-file"), - 'xztar': (make_tarball, [('compress', 'xz')], "xz'ed tar-file"), - 'ztar': (make_tarball, [('compress', 'compress')], "compressed tar file"), - 'tar': (make_tarball, [('compress', None)], "uncompressed tar file"), - 'zip': (make_zipfile, [],"ZIP file") - } - -def check_archive_formats(formats): - """Returns the first format from the 'format' list that is unknown. - - If all formats are known, returns None - """ - for format in formats: - if format not in ARCHIVE_FORMATS: - return format - return None - -def make_archive(base_name, format, root_dir=None, base_dir=None, verbose=0, - dry_run=0, owner=None, group=None): - """Create an archive file (eg. zip or tar). - - 'base_name' is the name of the file to create, minus any format-specific - extension; 'format' is the archive format: one of "zip", "tar", "gztar", - "bztar", "xztar", or "ztar". - - 'root_dir' is a directory that will be the root directory of the - archive; ie. we typically chdir into 'root_dir' before creating the - archive. 'base_dir' is the directory where we start archiving from; - ie. 'base_dir' will be the common prefix of all files and - directories in the archive. 'root_dir' and 'base_dir' both default - to the current directory. Returns the name of the archive file. - - 'owner' and 'group' are used when creating a tar archive. By default, - uses the current owner and group. - """ - save_cwd = os.getcwd() - if root_dir is not None: - log.debug("changing into '%s'", root_dir) - base_name = os.path.abspath(base_name) - if not dry_run: - os.chdir(root_dir) - - if base_dir is None: - base_dir = os.curdir - - kwargs = {'dry_run': dry_run} - - try: - format_info = ARCHIVE_FORMATS[format] - except KeyError: - raise ValueError("unknown archive format '%s'" % format) - - func = format_info[0] - for arg, val in format_info[1]: - kwargs[arg] = val - - if format != 'zip': - kwargs['owner'] = owner - kwargs['group'] = group - - try: - filename = func(base_name, base_dir, **kwargs) - finally: - if root_dir is not None: - log.debug("changing back to '%s'", save_cwd) - os.chdir(save_cwd) - - return filename diff --git a/venv/Lib/site-packages/setuptools/_distutils/bcppcompiler.py b/venv/Lib/site-packages/setuptools/_distutils/bcppcompiler.py deleted file mode 100644 index 2eb6d2e..0000000 --- a/venv/Lib/site-packages/setuptools/_distutils/bcppcompiler.py +++ /dev/null @@ -1,393 +0,0 @@ -"""distutils.bcppcompiler - -Contains BorlandCCompiler, an implementation of the abstract CCompiler class -for the Borland C++ compiler. -""" - -# This implementation by Lyle Johnson, based on the original msvccompiler.py -# module and using the directions originally published by Gordon Williams. - -# XXX looks like there's a LOT of overlap between these two classes: -# someone should sit down and factor out the common code as -# WindowsCCompiler! --GPW - - -import os -from distutils.errors import \ - DistutilsExecError, \ - CompileError, LibError, LinkError, UnknownFileError -from distutils.ccompiler import \ - CCompiler, gen_preprocess_options -from distutils.file_util import write_file -from distutils.dep_util import newer -from distutils import log - -class BCPPCompiler(CCompiler) : - """Concrete class that implements an interface to the Borland C/C++ - compiler, as defined by the CCompiler abstract class. - """ - - compiler_type = 'bcpp' - - # Just set this so CCompiler's constructor doesn't barf. We currently - # don't use the 'set_executables()' bureaucracy provided by CCompiler, - # as it really isn't necessary for this sort of single-compiler class. - # Would be nice to have a consistent interface with UnixCCompiler, - # though, so it's worth thinking about. - executables = {} - - # Private class data (need to distinguish C from C++ source for compiler) - _c_extensions = ['.c'] - _cpp_extensions = ['.cc', '.cpp', '.cxx'] - - # Needed for the filename generation methods provided by the - # base class, CCompiler. - src_extensions = _c_extensions + _cpp_extensions - obj_extension = '.obj' - static_lib_extension = '.lib' - shared_lib_extension = '.dll' - static_lib_format = shared_lib_format = '%s%s' - exe_extension = '.exe' - - - def __init__ (self, - verbose=0, - dry_run=0, - force=0): - - super().__init__(verbose, dry_run, force) - - # These executables are assumed to all be in the path. - # Borland doesn't seem to use any special registry settings to - # indicate their installation locations. - - self.cc = "bcc32.exe" - self.linker = "ilink32.exe" - self.lib = "tlib.exe" - - self.preprocess_options = None - self.compile_options = ['/tWM', '/O2', '/q', '/g0'] - self.compile_options_debug = ['/tWM', '/Od', '/q', '/g0'] - - self.ldflags_shared = ['/Tpd', '/Gn', '/q', '/x'] - self.ldflags_shared_debug = ['/Tpd', '/Gn', '/q', '/x'] - self.ldflags_static = [] - self.ldflags_exe = ['/Gn', '/q', '/x'] - self.ldflags_exe_debug = ['/Gn', '/q', '/x','/r'] - - - # -- Worker methods ------------------------------------------------ - - def compile(self, sources, - output_dir=None, macros=None, include_dirs=None, debug=0, - extra_preargs=None, extra_postargs=None, depends=None): - - macros, objects, extra_postargs, pp_opts, build = \ - self._setup_compile(output_dir, macros, include_dirs, sources, - depends, extra_postargs) - compile_opts = extra_preargs or [] - compile_opts.append ('-c') - if debug: - compile_opts.extend (self.compile_options_debug) - else: - compile_opts.extend (self.compile_options) - - for obj in objects: - try: - src, ext = build[obj] - except KeyError: - continue - # XXX why do the normpath here? - src = os.path.normpath(src) - obj = os.path.normpath(obj) - # XXX _setup_compile() did a mkpath() too but before the normpath. - # Is it possible to skip the normpath? - self.mkpath(os.path.dirname(obj)) - - if ext == '.res': - # This is already a binary file -- skip it. - continue # the 'for' loop - if ext == '.rc': - # This needs to be compiled to a .res file -- do it now. - try: - self.spawn (["brcc32", "-fo", obj, src]) - except DistutilsExecError as msg: - raise CompileError(msg) - continue # the 'for' loop - - # The next two are both for the real compiler. - if ext in self._c_extensions: - input_opt = "" - elif ext in self._cpp_extensions: - input_opt = "-P" - else: - # Unknown file type -- no extra options. The compiler - # will probably fail, but let it just in case this is a - # file the compiler recognizes even if we don't. - input_opt = "" - - output_opt = "-o" + obj - - # Compiler command line syntax is: "bcc32 [options] file(s)". - # Note that the source file names must appear at the end of - # the command line. - try: - self.spawn ([self.cc] + compile_opts + pp_opts + - [input_opt, output_opt] + - extra_postargs + [src]) - except DistutilsExecError as msg: - raise CompileError(msg) - - return objects - - # compile () - - - def create_static_lib (self, - objects, - output_libname, - output_dir=None, - debug=0, - target_lang=None): - - (objects, output_dir) = self._fix_object_args (objects, output_dir) - output_filename = \ - self.library_filename (output_libname, output_dir=output_dir) - - if self._need_link (objects, output_filename): - lib_args = [output_filename, '/u'] + objects - if debug: - pass # XXX what goes here? - try: - self.spawn ([self.lib] + lib_args) - except DistutilsExecError as msg: - raise LibError(msg) - else: - log.debug("skipping %s (up-to-date)", output_filename) - - # create_static_lib () - - - def link (self, - target_desc, - objects, - output_filename, - output_dir=None, - libraries=None, - library_dirs=None, - runtime_library_dirs=None, - export_symbols=None, - debug=0, - extra_preargs=None, - extra_postargs=None, - build_temp=None, - target_lang=None): - - # XXX this ignores 'build_temp'! should follow the lead of - # msvccompiler.py - - (objects, output_dir) = self._fix_object_args (objects, output_dir) - (libraries, library_dirs, runtime_library_dirs) = \ - self._fix_lib_args (libraries, library_dirs, runtime_library_dirs) - - if runtime_library_dirs: - log.warn("I don't know what to do with 'runtime_library_dirs': %s", - str(runtime_library_dirs)) - - if output_dir is not None: - output_filename = os.path.join (output_dir, output_filename) - - if self._need_link (objects, output_filename): - - # Figure out linker args based on type of target. - if target_desc == CCompiler.EXECUTABLE: - startup_obj = 'c0w32' - if debug: - ld_args = self.ldflags_exe_debug[:] - else: - ld_args = self.ldflags_exe[:] - else: - startup_obj = 'c0d32' - if debug: - ld_args = self.ldflags_shared_debug[:] - else: - ld_args = self.ldflags_shared[:] - - - # Create a temporary exports file for use by the linker - if export_symbols is None: - def_file = '' - else: - head, tail = os.path.split (output_filename) - modname, ext = os.path.splitext (tail) - temp_dir = os.path.dirname(objects[0]) # preserve tree structure - def_file = os.path.join (temp_dir, '%s.def' % modname) - contents = ['EXPORTS'] - for sym in (export_symbols or []): - contents.append(' %s=_%s' % (sym, sym)) - self.execute(write_file, (def_file, contents), - "writing %s" % def_file) - - # Borland C++ has problems with '/' in paths - objects2 = map(os.path.normpath, objects) - # split objects in .obj and .res files - # Borland C++ needs them at different positions in the command line - objects = [startup_obj] - resources = [] - for file in objects2: - (base, ext) = os.path.splitext(os.path.normcase(file)) - if ext == '.res': - resources.append(file) - else: - objects.append(file) - - - for l in library_dirs: - ld_args.append("/L%s" % os.path.normpath(l)) - ld_args.append("/L.") # we sometimes use relative paths - - # list of object files - ld_args.extend(objects) - - # XXX the command-line syntax for Borland C++ is a bit wonky; - # certain filenames are jammed together in one big string, but - # comma-delimited. This doesn't mesh too well with the - # Unix-centric attitude (with a DOS/Windows quoting hack) of - # 'spawn()', so constructing the argument list is a bit - # awkward. Note that doing the obvious thing and jamming all - # the filenames and commas into one argument would be wrong, - # because 'spawn()' would quote any filenames with spaces in - # them. Arghghh!. Apparently it works fine as coded... - - # name of dll/exe file - ld_args.extend([',',output_filename]) - # no map file and start libraries - ld_args.append(',,') - - for lib in libraries: - # see if we find it and if there is a bcpp specific lib - # (xxx_bcpp.lib) - libfile = self.find_library_file(library_dirs, lib, debug) - if libfile is None: - ld_args.append(lib) - # probably a BCPP internal library -- don't warn - else: - # full name which prefers bcpp_xxx.lib over xxx.lib - ld_args.append(libfile) - - # some default libraries - ld_args.append ('import32') - ld_args.append ('cw32mt') - - # def file for export symbols - ld_args.extend([',',def_file]) - # add resource files - ld_args.append(',') - ld_args.extend(resources) - - - if extra_preargs: - ld_args[:0] = extra_preargs - if extra_postargs: - ld_args.extend(extra_postargs) - - self.mkpath (os.path.dirname (output_filename)) - try: - self.spawn ([self.linker] + ld_args) - except DistutilsExecError as msg: - raise LinkError(msg) - - else: - log.debug("skipping %s (up-to-date)", output_filename) - - # link () - - # -- Miscellaneous methods ----------------------------------------- - - - def find_library_file (self, dirs, lib, debug=0): - # List of effective library names to try, in order of preference: - # xxx_bcpp.lib is better than xxx.lib - # and xxx_d.lib is better than xxx.lib if debug is set - # - # The "_bcpp" suffix is to handle a Python installation for people - # with multiple compilers (primarily Distutils hackers, I suspect - # ;-). The idea is they'd have one static library for each - # compiler they care about, since (almost?) every Windows compiler - # seems to have a different format for static libraries. - if debug: - dlib = (lib + "_d") - try_names = (dlib + "_bcpp", lib + "_bcpp", dlib, lib) - else: - try_names = (lib + "_bcpp", lib) - - for dir in dirs: - for name in try_names: - libfile = os.path.join(dir, self.library_filename(name)) - if os.path.exists(libfile): - return libfile - else: - # Oops, didn't find it in *any* of 'dirs' - return None - - # overwrite the one from CCompiler to support rc and res-files - def object_filenames (self, - source_filenames, - strip_dir=0, - output_dir=''): - if output_dir is None: output_dir = '' - obj_names = [] - for src_name in source_filenames: - # use normcase to make sure '.rc' is really '.rc' and not '.RC' - (base, ext) = os.path.splitext (os.path.normcase(src_name)) - if ext not in (self.src_extensions + ['.rc','.res']): - raise UnknownFileError("unknown file type '%s' (from '%s')" % \ - (ext, src_name)) - if strip_dir: - base = os.path.basename (base) - if ext == '.res': - # these can go unchanged - obj_names.append (os.path.join (output_dir, base + ext)) - elif ext == '.rc': - # these need to be compiled to .res-files - obj_names.append (os.path.join (output_dir, base + '.res')) - else: - obj_names.append (os.path.join (output_dir, - base + self.obj_extension)) - return obj_names - - # object_filenames () - - def preprocess (self, - source, - output_file=None, - macros=None, - include_dirs=None, - extra_preargs=None, - extra_postargs=None): - - (_, macros, include_dirs) = \ - self._fix_compile_args(None, macros, include_dirs) - pp_opts = gen_preprocess_options(macros, include_dirs) - pp_args = ['cpp32.exe'] + pp_opts - if output_file is not None: - pp_args.append('-o' + output_file) - if extra_preargs: - pp_args[:0] = extra_preargs - if extra_postargs: - pp_args.extend(extra_postargs) - pp_args.append(source) - - # We need to preprocess: either we're being forced to, or the - # source file is newer than the target (or the target doesn't - # exist). - if self.force or output_file is None or newer(source, output_file): - if output_file: - self.mkpath(os.path.dirname(output_file)) - try: - self.spawn(pp_args) - except DistutilsExecError as msg: - print(msg) - raise CompileError(msg) - - # preprocess() diff --git a/venv/Lib/site-packages/setuptools/_distutils/ccompiler.py b/venv/Lib/site-packages/setuptools/_distutils/ccompiler.py deleted file mode 100644 index 777fc66..0000000 --- a/venv/Lib/site-packages/setuptools/_distutils/ccompiler.py +++ /dev/null @@ -1,1123 +0,0 @@ -"""distutils.ccompiler - -Contains CCompiler, an abstract base class that defines the interface -for the Distutils compiler abstraction model.""" - -import sys, os, re -from distutils.errors import * -from distutils.spawn import spawn -from distutils.file_util import move_file -from distutils.dir_util import mkpath -from distutils.dep_util import newer_group -from distutils.util import split_quoted, execute -from distutils import log - -class CCompiler: - """Abstract base class to define the interface that must be implemented - by real compiler classes. Also has some utility methods used by - several compiler classes. - - The basic idea behind a compiler abstraction class is that each - instance can be used for all the compile/link steps in building a - single project. Thus, attributes common to all of those compile and - link steps -- include directories, macros to define, libraries to link - against, etc. -- are attributes of the compiler instance. To allow for - variability in how individual files are treated, most of those - attributes may be varied on a per-compilation or per-link basis. - """ - - # 'compiler_type' is a class attribute that identifies this class. It - # keeps code that wants to know what kind of compiler it's dealing with - # from having to import all possible compiler classes just to do an - # 'isinstance'. In concrete CCompiler subclasses, 'compiler_type' - # should really, really be one of the keys of the 'compiler_class' - # dictionary (see below -- used by the 'new_compiler()' factory - # function) -- authors of new compiler interface classes are - # responsible for updating 'compiler_class'! - compiler_type = None - - # XXX things not handled by this compiler abstraction model: - # * client can't provide additional options for a compiler, - # e.g. warning, optimization, debugging flags. Perhaps this - # should be the domain of concrete compiler abstraction classes - # (UnixCCompiler, MSVCCompiler, etc.) -- or perhaps the base - # class should have methods for the common ones. - # * can't completely override the include or library searchg - # path, ie. no "cc -I -Idir1 -Idir2" or "cc -L -Ldir1 -Ldir2". - # I'm not sure how widely supported this is even by Unix - # compilers, much less on other platforms. And I'm even less - # sure how useful it is; maybe for cross-compiling, but - # support for that is a ways off. (And anyways, cross - # compilers probably have a dedicated binary with the - # right paths compiled in. I hope.) - # * can't do really freaky things with the library list/library - # dirs, e.g. "-Ldir1 -lfoo -Ldir2 -lfoo" to link against - # different versions of libfoo.a in different locations. I - # think this is useless without the ability to null out the - # library search path anyways. - - - # Subclasses that rely on the standard filename generation methods - # implemented below should override these; see the comment near - # those methods ('object_filenames()' et. al.) for details: - src_extensions = None # list of strings - obj_extension = None # string - static_lib_extension = None - shared_lib_extension = None # string - static_lib_format = None # format string - shared_lib_format = None # prob. same as static_lib_format - exe_extension = None # string - - # Default language settings. language_map is used to detect a source - # file or Extension target language, checking source filenames. - # language_order is used to detect the language precedence, when deciding - # what language to use when mixing source types. For example, if some - # extension has two files with ".c" extension, and one with ".cpp", it - # is still linked as c++. - language_map = {".c" : "c", - ".cc" : "c++", - ".cpp" : "c++", - ".cxx" : "c++", - ".m" : "objc", - } - language_order = ["c++", "objc", "c"] - - def __init__(self, verbose=0, dry_run=0, force=0): - self.dry_run = dry_run - self.force = force - self.verbose = verbose - - # 'output_dir': a common output directory for object, library, - # shared object, and shared library files - self.output_dir = None - - # 'macros': a list of macro definitions (or undefinitions). A - # macro definition is a 2-tuple (name, value), where the value is - # either a string or None (no explicit value). A macro - # undefinition is a 1-tuple (name,). - self.macros = [] - - # 'include_dirs': a list of directories to search for include files - self.include_dirs = [] - - # 'libraries': a list of libraries to include in any link - # (library names, not filenames: eg. "foo" not "libfoo.a") - self.libraries = [] - - # 'library_dirs': a list of directories to search for libraries - self.library_dirs = [] - - # 'runtime_library_dirs': a list of directories to search for - # shared libraries/objects at runtime - self.runtime_library_dirs = [] - - # 'objects': a list of object files (or similar, such as explicitly - # named library files) to include on any link - self.objects = [] - - for key in self.executables.keys(): - self.set_executable(key, self.executables[key]) - - def set_executables(self, **kwargs): - """Define the executables (and options for them) that will be run - to perform the various stages of compilation. The exact set of - executables that may be specified here depends on the compiler - class (via the 'executables' class attribute), but most will have: - compiler the C/C++ compiler - linker_so linker used to create shared objects and libraries - linker_exe linker used to create binary executables - archiver static library creator - - On platforms with a command-line (Unix, DOS/Windows), each of these - is a string that will be split into executable name and (optional) - list of arguments. (Splitting the string is done similarly to how - Unix shells operate: words are delimited by spaces, but quotes and - backslashes can override this. See - 'distutils.util.split_quoted()'.) - """ - - # Note that some CCompiler implementation classes will define class - # attributes 'cpp', 'cc', etc. with hard-coded executable names; - # this is appropriate when a compiler class is for exactly one - # compiler/OS combination (eg. MSVCCompiler). Other compiler - # classes (UnixCCompiler, in particular) are driven by information - # discovered at run-time, since there are many different ways to do - # basically the same things with Unix C compilers. - - for key in kwargs: - if key not in self.executables: - raise ValueError("unknown executable '%s' for class %s" % - (key, self.__class__.__name__)) - self.set_executable(key, kwargs[key]) - - def set_executable(self, key, value): - if isinstance(value, str): - setattr(self, key, split_quoted(value)) - else: - setattr(self, key, value) - - def _find_macro(self, name): - i = 0 - for defn in self.macros: - if defn[0] == name: - return i - i += 1 - return None - - def _check_macro_definitions(self, definitions): - """Ensures that every element of 'definitions' is a valid macro - definition, ie. either (name,value) 2-tuple or a (name,) tuple. Do - nothing if all definitions are OK, raise TypeError otherwise. - """ - for defn in definitions: - if not (isinstance(defn, tuple) and - (len(defn) in (1, 2) and - (isinstance (defn[1], str) or defn[1] is None)) and - isinstance (defn[0], str)): - raise TypeError(("invalid macro definition '%s': " % defn) + \ - "must be tuple (string,), (string, string), or " + \ - "(string, None)") - - - # -- Bookkeeping methods ------------------------------------------- - - def define_macro(self, name, value=None): - """Define a preprocessor macro for all compilations driven by this - compiler object. The optional parameter 'value' should be a - string; if it is not supplied, then the macro will be defined - without an explicit value and the exact outcome depends on the - compiler used (XXX true? does ANSI say anything about this?) - """ - # Delete from the list of macro definitions/undefinitions if - # already there (so that this one will take precedence). - i = self._find_macro (name) - if i is not None: - del self.macros[i] - - self.macros.append((name, value)) - - def undefine_macro(self, name): - """Undefine a preprocessor macro for all compilations driven by - this compiler object. If the same macro is defined by - 'define_macro()' and undefined by 'undefine_macro()' the last call - takes precedence (including multiple redefinitions or - undefinitions). If the macro is redefined/undefined on a - per-compilation basis (ie. in the call to 'compile()'), then that - takes precedence. - """ - # Delete from the list of macro definitions/undefinitions if - # already there (so that this one will take precedence). - i = self._find_macro (name) - if i is not None: - del self.macros[i] - - undefn = (name,) - self.macros.append(undefn) - - def add_include_dir(self, dir): - """Add 'dir' to the list of directories that will be searched for - header files. The compiler is instructed to search directories in - the order in which they are supplied by successive calls to - 'add_include_dir()'. - """ - self.include_dirs.append(dir) - - def set_include_dirs(self, dirs): - """Set the list of directories that will be searched to 'dirs' (a - list of strings). Overrides any preceding calls to - 'add_include_dir()'; subsequence calls to 'add_include_dir()' add - to the list passed to 'set_include_dirs()'. This does not affect - any list of standard include directories that the compiler may - search by default. - """ - self.include_dirs = dirs[:] - - def add_library(self, libname): - """Add 'libname' to the list of libraries that will be included in - all links driven by this compiler object. Note that 'libname' - should *not* be the name of a file containing a library, but the - name of the library itself: the actual filename will be inferred by - the linker, the compiler, or the compiler class (depending on the - platform). - - The linker will be instructed to link against libraries in the - order they were supplied to 'add_library()' and/or - 'set_libraries()'. It is perfectly valid to duplicate library - names; the linker will be instructed to link against libraries as - many times as they are mentioned. - """ - self.libraries.append(libname) - - def set_libraries(self, libnames): - """Set the list of libraries to be included in all links driven by - this compiler object to 'libnames' (a list of strings). This does - not affect any standard system libraries that the linker may - include by default. - """ - self.libraries = libnames[:] - - def add_library_dir(self, dir): - """Add 'dir' to the list of directories that will be searched for - libraries specified to 'add_library()' and 'set_libraries()'. The - linker will be instructed to search for libraries in the order they - are supplied to 'add_library_dir()' and/or 'set_library_dirs()'. - """ - self.library_dirs.append(dir) - - def set_library_dirs(self, dirs): - """Set the list of library search directories to 'dirs' (a list of - strings). This does not affect any standard library search path - that the linker may search by default. - """ - self.library_dirs = dirs[:] - - def add_runtime_library_dir(self, dir): - """Add 'dir' to the list of directories that will be searched for - shared libraries at runtime. - """ - self.runtime_library_dirs.append(dir) - - def set_runtime_library_dirs(self, dirs): - """Set the list of directories to search for shared libraries at - runtime to 'dirs' (a list of strings). This does not affect any - standard search path that the runtime linker may search by - default. - """ - self.runtime_library_dirs = dirs[:] - - def add_link_object(self, object): - """Add 'object' to the list of object files (or analogues, such as - explicitly named library files or the output of "resource - compilers") to be included in every link driven by this compiler - object. - """ - self.objects.append(object) - - def set_link_objects(self, objects): - """Set the list of object files (or analogues) to be included in - every link to 'objects'. This does not affect any standard object - files that the linker may include by default (such as system - libraries). - """ - self.objects = objects[:] - - - # -- Private utility methods -------------------------------------- - # (here for the convenience of subclasses) - - # Helper method to prep compiler in subclass compile() methods - - def _setup_compile(self, outdir, macros, incdirs, sources, depends, - extra): - """Process arguments and decide which source files to compile.""" - if outdir is None: - outdir = self.output_dir - elif not isinstance(outdir, str): - raise TypeError("'output_dir' must be a string or None") - - if macros is None: - macros = self.macros - elif isinstance(macros, list): - macros = macros + (self.macros or []) - else: - raise TypeError("'macros' (if supplied) must be a list of tuples") - - if incdirs is None: - incdirs = self.include_dirs - elif isinstance(incdirs, (list, tuple)): - incdirs = list(incdirs) + (self.include_dirs or []) - else: - raise TypeError( - "'include_dirs' (if supplied) must be a list of strings") - - if extra is None: - extra = [] - - # Get the list of expected output (object) files - objects = self.object_filenames(sources, strip_dir=0, - output_dir=outdir) - assert len(objects) == len(sources) - - pp_opts = gen_preprocess_options(macros, incdirs) - - build = {} - for i in range(len(sources)): - src = sources[i] - obj = objects[i] - ext = os.path.splitext(src)[1] - self.mkpath(os.path.dirname(obj)) - build[obj] = (src, ext) - - return macros, objects, extra, pp_opts, build - - def _get_cc_args(self, pp_opts, debug, before): - # works for unixccompiler, cygwinccompiler - cc_args = pp_opts + ['-c'] - if debug: - cc_args[:0] = ['-g'] - if before: - cc_args[:0] = before - return cc_args - - def _fix_compile_args(self, output_dir, macros, include_dirs): - """Typecheck and fix-up some of the arguments to the 'compile()' - method, and return fixed-up values. Specifically: if 'output_dir' - is None, replaces it with 'self.output_dir'; ensures that 'macros' - is a list, and augments it with 'self.macros'; ensures that - 'include_dirs' is a list, and augments it with 'self.include_dirs'. - Guarantees that the returned values are of the correct type, - i.e. for 'output_dir' either string or None, and for 'macros' and - 'include_dirs' either list or None. - """ - if output_dir is None: - output_dir = self.output_dir - elif not isinstance(output_dir, str): - raise TypeError("'output_dir' must be a string or None") - - if macros is None: - macros = self.macros - elif isinstance(macros, list): - macros = macros + (self.macros or []) - else: - raise TypeError("'macros' (if supplied) must be a list of tuples") - - if include_dirs is None: - include_dirs = self.include_dirs - elif isinstance(include_dirs, (list, tuple)): - include_dirs = list(include_dirs) + (self.include_dirs or []) - else: - raise TypeError( - "'include_dirs' (if supplied) must be a list of strings") - - return output_dir, macros, include_dirs - - def _prep_compile(self, sources, output_dir, depends=None): - """Decide which source files must be recompiled. - - Determine the list of object files corresponding to 'sources', - and figure out which ones really need to be recompiled. - Return a list of all object files and a dictionary telling - which source files can be skipped. - """ - # Get the list of expected output (object) files - objects = self.object_filenames(sources, output_dir=output_dir) - assert len(objects) == len(sources) - - # Return an empty dict for the "which source files can be skipped" - # return value to preserve API compatibility. - return objects, {} - - def _fix_object_args(self, objects, output_dir): - """Typecheck and fix up some arguments supplied to various methods. - Specifically: ensure that 'objects' is a list; if output_dir is - None, replace with self.output_dir. Return fixed versions of - 'objects' and 'output_dir'. - """ - if not isinstance(objects, (list, tuple)): - raise TypeError("'objects' must be a list or tuple of strings") - objects = list(objects) - - if output_dir is None: - output_dir = self.output_dir - elif not isinstance(output_dir, str): - raise TypeError("'output_dir' must be a string or None") - - return (objects, output_dir) - - def _fix_lib_args(self, libraries, library_dirs, runtime_library_dirs): - """Typecheck and fix up some of the arguments supplied to the - 'link_*' methods. Specifically: ensure that all arguments are - lists, and augment them with their permanent versions - (eg. 'self.libraries' augments 'libraries'). Return a tuple with - fixed versions of all arguments. - """ - if libraries is None: - libraries = self.libraries - elif isinstance(libraries, (list, tuple)): - libraries = list (libraries) + (self.libraries or []) - else: - raise TypeError( - "'libraries' (if supplied) must be a list of strings") - - if library_dirs is None: - library_dirs = self.library_dirs - elif isinstance(library_dirs, (list, tuple)): - library_dirs = list (library_dirs) + (self.library_dirs or []) - else: - raise TypeError( - "'library_dirs' (if supplied) must be a list of strings") - - if runtime_library_dirs is None: - runtime_library_dirs = self.runtime_library_dirs - elif isinstance(runtime_library_dirs, (list, tuple)): - runtime_library_dirs = (list(runtime_library_dirs) + - (self.runtime_library_dirs or [])) - else: - raise TypeError("'runtime_library_dirs' (if supplied) " - "must be a list of strings") - - return (libraries, library_dirs, runtime_library_dirs) - - def _need_link(self, objects, output_file): - """Return true if we need to relink the files listed in 'objects' - to recreate 'output_file'. - """ - if self.force: - return True - else: - if self.dry_run: - newer = newer_group (objects, output_file, missing='newer') - else: - newer = newer_group (objects, output_file) - return newer - - def detect_language(self, sources): - """Detect the language of a given file, or list of files. Uses - language_map, and language_order to do the job. - """ - if not isinstance(sources, list): - sources = [sources] - lang = None - index = len(self.language_order) - for source in sources: - base, ext = os.path.splitext(source) - extlang = self.language_map.get(ext) - try: - extindex = self.language_order.index(extlang) - if extindex < index: - lang = extlang - index = extindex - except ValueError: - pass - return lang - - - # -- Worker methods ------------------------------------------------ - # (must be implemented by subclasses) - - def preprocess(self, source, output_file=None, macros=None, - include_dirs=None, extra_preargs=None, extra_postargs=None): - """Preprocess a single C/C++ source file, named in 'source'. - Output will be written to file named 'output_file', or stdout if - 'output_file' not supplied. 'macros' is a list of macro - definitions as for 'compile()', which will augment the macros set - with 'define_macro()' and 'undefine_macro()'. 'include_dirs' is a - list of directory names that will be added to the default list. - - Raises PreprocessError on failure. - """ - pass - - def compile(self, sources, output_dir=None, macros=None, - include_dirs=None, debug=0, extra_preargs=None, - extra_postargs=None, depends=None): - """Compile one or more source files. - - 'sources' must be a list of filenames, most likely C/C++ - files, but in reality anything that can be handled by a - particular compiler and compiler class (eg. MSVCCompiler can - handle resource files in 'sources'). Return a list of object - filenames, one per source filename in 'sources'. Depending on - the implementation, not all source files will necessarily be - compiled, but all corresponding object filenames will be - returned. - - If 'output_dir' is given, object files will be put under it, while - retaining their original path component. That is, "foo/bar.c" - normally compiles to "foo/bar.o" (for a Unix implementation); if - 'output_dir' is "build", then it would compile to - "build/foo/bar.o". - - 'macros', if given, must be a list of macro definitions. A macro - definition is either a (name, value) 2-tuple or a (name,) 1-tuple. - The former defines a macro; if the value is None, the macro is - defined without an explicit value. The 1-tuple case undefines a - macro. Later definitions/redefinitions/ undefinitions take - precedence. - - 'include_dirs', if given, must be a list of strings, the - directories to add to the default include file search path for this - compilation only. - - 'debug' is a boolean; if true, the compiler will be instructed to - output debug symbols in (or alongside) the object file(s). - - 'extra_preargs' and 'extra_postargs' are implementation- dependent. - On platforms that have the notion of a command-line (e.g. Unix, - DOS/Windows), they are most likely lists of strings: extra - command-line arguments to prepend/append to the compiler command - line. On other platforms, consult the implementation class - documentation. In any event, they are intended as an escape hatch - for those occasions when the abstract compiler framework doesn't - cut the mustard. - - 'depends', if given, is a list of filenames that all targets - depend on. If a source file is older than any file in - depends, then the source file will be recompiled. This - supports dependency tracking, but only at a coarse - granularity. - - Raises CompileError on failure. - """ - # A concrete compiler class can either override this method - # entirely or implement _compile(). - macros, objects, extra_postargs, pp_opts, build = \ - self._setup_compile(output_dir, macros, include_dirs, sources, - depends, extra_postargs) - cc_args = self._get_cc_args(pp_opts, debug, extra_preargs) - - for obj in objects: - try: - src, ext = build[obj] - except KeyError: - continue - self._compile(obj, src, ext, cc_args, extra_postargs, pp_opts) - - # Return *all* object filenames, not just the ones we just built. - return objects - - def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts): - """Compile 'src' to product 'obj'.""" - # A concrete compiler class that does not override compile() - # should implement _compile(). - pass - - def create_static_lib(self, objects, output_libname, output_dir=None, - debug=0, target_lang=None): - """Link a bunch of stuff together to create a static library file. - The "bunch of stuff" consists of the list of object files supplied - as 'objects', the extra object files supplied to - 'add_link_object()' and/or 'set_link_objects()', the libraries - supplied to 'add_library()' and/or 'set_libraries()', and the - libraries supplied as 'libraries' (if any). - - 'output_libname' should be a library name, not a filename; the - filename will be inferred from the library name. 'output_dir' is - the directory where the library file will be put. - - 'debug' is a boolean; if true, debugging information will be - included in the library (note that on most platforms, it is the - compile step where this matters: the 'debug' flag is included here - just for consistency). - - 'target_lang' is the target language for which the given objects - are being compiled. This allows specific linkage time treatment of - certain languages. - - Raises LibError on failure. - """ - pass - - - # values for target_desc parameter in link() - SHARED_OBJECT = "shared_object" - SHARED_LIBRARY = "shared_library" - EXECUTABLE = "executable" - - def link(self, - target_desc, - objects, - output_filename, - output_dir=None, - libraries=None, - library_dirs=None, - runtime_library_dirs=None, - export_symbols=None, - debug=0, - extra_preargs=None, - extra_postargs=None, - build_temp=None, - target_lang=None): - """Link a bunch of stuff together to create an executable or - shared library file. - - The "bunch of stuff" consists of the list of object files supplied - as 'objects'. 'output_filename' should be a filename. If - 'output_dir' is supplied, 'output_filename' is relative to it - (i.e. 'output_filename' can provide directory components if - needed). - - 'libraries' is a list of libraries to link against. These are - library names, not filenames, since they're translated into - filenames in a platform-specific way (eg. "foo" becomes "libfoo.a" - on Unix and "foo.lib" on DOS/Windows). However, they can include a - directory component, which means the linker will look in that - specific directory rather than searching all the normal locations. - - 'library_dirs', if supplied, should be a list of directories to - search for libraries that were specified as bare library names - (ie. no directory component). These are on top of the system - default and those supplied to 'add_library_dir()' and/or - 'set_library_dirs()'. 'runtime_library_dirs' is a list of - directories that will be embedded into the shared library and used - to search for other shared libraries that *it* depends on at - run-time. (This may only be relevant on Unix.) - - 'export_symbols' is a list of symbols that the shared library will - export. (This appears to be relevant only on Windows.) - - 'debug' is as for 'compile()' and 'create_static_lib()', with the - slight distinction that it actually matters on most platforms (as - opposed to 'create_static_lib()', which includes a 'debug' flag - mostly for form's sake). - - 'extra_preargs' and 'extra_postargs' are as for 'compile()' (except - of course that they supply command-line arguments for the - particular linker being used). - - 'target_lang' is the target language for which the given objects - are being compiled. This allows specific linkage time treatment of - certain languages. - - Raises LinkError on failure. - """ - raise NotImplementedError - - - # Old 'link_*()' methods, rewritten to use the new 'link()' method. - - def link_shared_lib(self, - objects, - output_libname, - output_dir=None, - libraries=None, - library_dirs=None, - runtime_library_dirs=None, - export_symbols=None, - debug=0, - extra_preargs=None, - extra_postargs=None, - build_temp=None, - target_lang=None): - self.link(CCompiler.SHARED_LIBRARY, objects, - self.library_filename(output_libname, lib_type='shared'), - output_dir, - libraries, library_dirs, runtime_library_dirs, - export_symbols, debug, - extra_preargs, extra_postargs, build_temp, target_lang) - - - def link_shared_object(self, - objects, - output_filename, - output_dir=None, - libraries=None, - library_dirs=None, - runtime_library_dirs=None, - export_symbols=None, - debug=0, - extra_preargs=None, - extra_postargs=None, - build_temp=None, - target_lang=None): - self.link(CCompiler.SHARED_OBJECT, objects, - output_filename, output_dir, - libraries, library_dirs, runtime_library_dirs, - export_symbols, debug, - extra_preargs, extra_postargs, build_temp, target_lang) - - - def link_executable(self, - objects, - output_progname, - output_dir=None, - libraries=None, - library_dirs=None, - runtime_library_dirs=None, - debug=0, - extra_preargs=None, - extra_postargs=None, - target_lang=None): - self.link(CCompiler.EXECUTABLE, objects, - self.executable_filename(output_progname), output_dir, - libraries, library_dirs, runtime_library_dirs, None, - debug, extra_preargs, extra_postargs, None, target_lang) - - - # -- Miscellaneous methods ----------------------------------------- - # These are all used by the 'gen_lib_options() function; there is - # no appropriate default implementation so subclasses should - # implement all of these. - - def library_dir_option(self, dir): - """Return the compiler option to add 'dir' to the list of - directories searched for libraries. - """ - raise NotImplementedError - - def runtime_library_dir_option(self, dir): - """Return the compiler option to add 'dir' to the list of - directories searched for runtime libraries. - """ - raise NotImplementedError - - def library_option(self, lib): - """Return the compiler option to add 'lib' to the list of libraries - linked into the shared library or executable. - """ - raise NotImplementedError - - def has_function(self, funcname, includes=None, include_dirs=None, - libraries=None, library_dirs=None): - """Return a boolean indicating whether funcname is supported on - the current platform. The optional arguments can be used to - augment the compilation environment. - """ - # this can't be included at module scope because it tries to - # import math which might not be available at that point - maybe - # the necessary logic should just be inlined? - import tempfile - if includes is None: - includes = [] - if include_dirs is None: - include_dirs = [] - if libraries is None: - libraries = [] - if library_dirs is None: - library_dirs = [] - fd, fname = tempfile.mkstemp(".c", funcname, text=True) - f = os.fdopen(fd, "w") - try: - for incl in includes: - f.write("""#include "%s"\n""" % incl) - f.write("""\ -int main (int argc, char **argv) { - %s(); - return 0; -} -""" % funcname) - finally: - f.close() - try: - objects = self.compile([fname], include_dirs=include_dirs) - except CompileError: - return False - finally: - os.remove(fname) - - try: - self.link_executable(objects, "a.out", - libraries=libraries, - library_dirs=library_dirs) - except (LinkError, TypeError): - return False - else: - os.remove(os.path.join(self.output_dir or '', "a.out")) - finally: - for fn in objects: - os.remove(fn) - return True - - def find_library_file (self, dirs, lib, debug=0): - """Search the specified list of directories for a static or shared - library file 'lib' and return the full path to that file. If - 'debug' true, look for a debugging version (if that makes sense on - the current platform). Return None if 'lib' wasn't found in any of - the specified directories. - """ - raise NotImplementedError - - # -- Filename generation methods ----------------------------------- - - # The default implementation of the filename generating methods are - # prejudiced towards the Unix/DOS/Windows view of the world: - # * object files are named by replacing the source file extension - # (eg. .c/.cpp -> .o/.obj) - # * library files (shared or static) are named by plugging the - # library name and extension into a format string, eg. - # "lib%s.%s" % (lib_name, ".a") for Unix static libraries - # * executables are named by appending an extension (possibly - # empty) to the program name: eg. progname + ".exe" for - # Windows - # - # To reduce redundant code, these methods expect to find - # several attributes in the current object (presumably defined - # as class attributes): - # * src_extensions - - # list of C/C++ source file extensions, eg. ['.c', '.cpp'] - # * obj_extension - - # object file extension, eg. '.o' or '.obj' - # * static_lib_extension - - # extension for static library files, eg. '.a' or '.lib' - # * shared_lib_extension - - # extension for shared library/object files, eg. '.so', '.dll' - # * static_lib_format - - # format string for generating static library filenames, - # eg. 'lib%s.%s' or '%s.%s' - # * shared_lib_format - # format string for generating shared library filenames - # (probably same as static_lib_format, since the extension - # is one of the intended parameters to the format string) - # * exe_extension - - # extension for executable files, eg. '' or '.exe' - - def object_filenames(self, source_filenames, strip_dir=0, output_dir=''): - if output_dir is None: - output_dir = '' - obj_names = [] - for src_name in source_filenames: - base, ext = os.path.splitext(src_name) - base = os.path.splitdrive(base)[1] # Chop off the drive - base = base[os.path.isabs(base):] # If abs, chop off leading / - if ext not in self.src_extensions: - raise UnknownFileError( - "unknown file type '%s' (from '%s')" % (ext, src_name)) - if strip_dir: - base = os.path.basename(base) - obj_names.append(os.path.join(output_dir, - base + self.obj_extension)) - return obj_names - - def shared_object_filename(self, basename, strip_dir=0, output_dir=''): - assert output_dir is not None - if strip_dir: - basename = os.path.basename(basename) - return os.path.join(output_dir, basename + self.shared_lib_extension) - - def executable_filename(self, basename, strip_dir=0, output_dir=''): - assert output_dir is not None - if strip_dir: - basename = os.path.basename(basename) - return os.path.join(output_dir, basename + (self.exe_extension or '')) - - def library_filename(self, libname, lib_type='static', # or 'shared' - strip_dir=0, output_dir=''): - assert output_dir is not None - if lib_type not in ("static", "shared", "dylib", "xcode_stub"): - raise ValueError( - "'lib_type' must be \"static\", \"shared\", \"dylib\", or \"xcode_stub\"") - fmt = getattr(self, lib_type + "_lib_format") - ext = getattr(self, lib_type + "_lib_extension") - - dir, base = os.path.split(libname) - filename = fmt % (base, ext) - if strip_dir: - dir = '' - - return os.path.join(output_dir, dir, filename) - - - # -- Utility methods ----------------------------------------------- - - def announce(self, msg, level=1): - log.debug(msg) - - def debug_print(self, msg): - from distutils.debug import DEBUG - if DEBUG: - print(msg) - - def warn(self, msg): - sys.stderr.write("warning: %s\n" % msg) - - def execute(self, func, args, msg=None, level=1): - execute(func, args, msg, self.dry_run) - - def spawn(self, cmd, **kwargs): - spawn(cmd, dry_run=self.dry_run, **kwargs) - - def move_file(self, src, dst): - return move_file(src, dst, dry_run=self.dry_run) - - def mkpath (self, name, mode=0o777): - mkpath(name, mode, dry_run=self.dry_run) - - -# Map a sys.platform/os.name ('posix', 'nt') to the default compiler -# type for that platform. Keys are interpreted as re match -# patterns. Order is important; platform mappings are preferred over -# OS names. -_default_compilers = ( - - # Platform string mappings - - # on a cygwin built python we can use gcc like an ordinary UNIXish - # compiler - ('cygwin.*', 'unix'), - - # OS name mappings - ('posix', 'unix'), - ('nt', 'msvc'), - - ) - -def get_default_compiler(osname=None, platform=None): - """Determine the default compiler to use for the given platform. - - osname should be one of the standard Python OS names (i.e. the - ones returned by os.name) and platform the common value - returned by sys.platform for the platform in question. - - The default values are os.name and sys.platform in case the - parameters are not given. - """ - if osname is None: - osname = os.name - if platform is None: - platform = sys.platform - for pattern, compiler in _default_compilers: - if re.match(pattern, platform) is not None or \ - re.match(pattern, osname) is not None: - return compiler - # Default to Unix compiler - return 'unix' - -# Map compiler types to (module_name, class_name) pairs -- ie. where to -# find the code that implements an interface to this compiler. (The module -# is assumed to be in the 'distutils' package.) -compiler_class = { 'unix': ('unixccompiler', 'UnixCCompiler', - "standard UNIX-style compiler"), - 'msvc': ('_msvccompiler', 'MSVCCompiler', - "Microsoft Visual C++"), - 'cygwin': ('cygwinccompiler', 'CygwinCCompiler', - "Cygwin port of GNU C Compiler for Win32"), - 'mingw32': ('cygwinccompiler', 'Mingw32CCompiler', - "Mingw32 port of GNU C Compiler for Win32"), - 'bcpp': ('bcppcompiler', 'BCPPCompiler', - "Borland C++ Compiler"), - } - -def show_compilers(): - """Print list of available compilers (used by the "--help-compiler" - options to "build", "build_ext", "build_clib"). - """ - # XXX this "knows" that the compiler option it's describing is - # "--compiler", which just happens to be the case for the three - # commands that use it. - from distutils.fancy_getopt import FancyGetopt - compilers = [] - for compiler in compiler_class.keys(): - compilers.append(("compiler="+compiler, None, - compiler_class[compiler][2])) - compilers.sort() - pretty_printer = FancyGetopt(compilers) - pretty_printer.print_help("List of available compilers:") - - -def new_compiler(plat=None, compiler=None, verbose=0, dry_run=0, force=0): - """Generate an instance of some CCompiler subclass for the supplied - platform/compiler combination. 'plat' defaults to 'os.name' - (eg. 'posix', 'nt'), and 'compiler' defaults to the default compiler - for that platform. Currently only 'posix' and 'nt' are supported, and - the default compilers are "traditional Unix interface" (UnixCCompiler - class) and Visual C++ (MSVCCompiler class). Note that it's perfectly - possible to ask for a Unix compiler object under Windows, and a - Microsoft compiler object under Unix -- if you supply a value for - 'compiler', 'plat' is ignored. - """ - if plat is None: - plat = os.name - - try: - if compiler is None: - compiler = get_default_compiler(plat) - - (module_name, class_name, long_description) = compiler_class[compiler] - except KeyError: - msg = "don't know how to compile C/C++ code on platform '%s'" % plat - if compiler is not None: - msg = msg + " with '%s' compiler" % compiler - raise DistutilsPlatformError(msg) - - try: - module_name = "distutils." + module_name - __import__ (module_name) - module = sys.modules[module_name] - klass = vars(module)[class_name] - except ImportError: - raise DistutilsModuleError( - "can't compile C/C++ code: unable to load module '%s'" % \ - module_name) - except KeyError: - raise DistutilsModuleError( - "can't compile C/C++ code: unable to find class '%s' " - "in module '%s'" % (class_name, module_name)) - - # XXX The None is necessary to preserve backwards compatibility - # with classes that expect verbose to be the first positional - # argument. - return klass(None, dry_run, force) - - -def gen_preprocess_options(macros, include_dirs): - """Generate C pre-processor options (-D, -U, -I) as used by at least - two types of compilers: the typical Unix compiler and Visual C++. - 'macros' is the usual thing, a list of 1- or 2-tuples, where (name,) - means undefine (-U) macro 'name', and (name,value) means define (-D) - macro 'name' to 'value'. 'include_dirs' is just a list of directory - names to be added to the header file search path (-I). Returns a list - of command-line options suitable for either Unix compilers or Visual - C++. - """ - # XXX it would be nice (mainly aesthetic, and so we don't generate - # stupid-looking command lines) to go over 'macros' and eliminate - # redundant definitions/undefinitions (ie. ensure that only the - # latest mention of a particular macro winds up on the command - # line). I don't think it's essential, though, since most (all?) - # Unix C compilers only pay attention to the latest -D or -U - # mention of a macro on their command line. Similar situation for - # 'include_dirs'. I'm punting on both for now. Anyways, weeding out - # redundancies like this should probably be the province of - # CCompiler, since the data structures used are inherited from it - # and therefore common to all CCompiler classes. - pp_opts = [] - for macro in macros: - if not (isinstance(macro, tuple) and 1 <= len(macro) <= 2): - raise TypeError( - "bad macro definition '%s': " - "each element of 'macros' list must be a 1- or 2-tuple" - % macro) - - if len(macro) == 1: # undefine this macro - pp_opts.append("-U%s" % macro[0]) - elif len(macro) == 2: - if macro[1] is None: # define with no explicit value - pp_opts.append("-D%s" % macro[0]) - else: - # XXX *don't* need to be clever about quoting the - # macro value here, because we're going to avoid the - # shell at all costs when we spawn the command! - pp_opts.append("-D%s=%s" % macro) - - for dir in include_dirs: - pp_opts.append("-I%s" % dir) - return pp_opts - - -def gen_lib_options (compiler, library_dirs, runtime_library_dirs, libraries): - """Generate linker options for searching library directories and - linking with specific libraries. 'libraries' and 'library_dirs' are, - respectively, lists of library names (not filenames!) and search - directories. Returns a list of command-line options suitable for use - with some compiler (depending on the two format strings passed in). - """ - lib_opts = [] - - for dir in library_dirs: - lib_opts.append(compiler.library_dir_option(dir)) - - for dir in runtime_library_dirs: - opt = compiler.runtime_library_dir_option(dir) - if isinstance(opt, list): - lib_opts = lib_opts + opt - else: - lib_opts.append(opt) - - # XXX it's important that we *not* remove redundant library mentions! - # sometimes you really do have to say "-lfoo -lbar -lfoo" in order to - # resolve all symbols. I just hope we never have to say "-lfoo obj.o - # -lbar" to get things to work -- that's certainly a possibility, but a - # pretty nasty way to arrange your C code. - - for lib in libraries: - (lib_dir, lib_name) = os.path.split(lib) - if lib_dir: - lib_file = compiler.find_library_file([lib_dir], lib_name) - if lib_file: - lib_opts.append(lib_file) - else: - compiler.warn("no library file corresponding to " - "'%s' found (skipping)" % lib) - else: - lib_opts.append(compiler.library_option (lib)) - return lib_opts diff --git a/venv/Lib/site-packages/setuptools/_distutils/cmd.py b/venv/Lib/site-packages/setuptools/_distutils/cmd.py deleted file mode 100644 index dba3191..0000000 --- a/venv/Lib/site-packages/setuptools/_distutils/cmd.py +++ /dev/null @@ -1,403 +0,0 @@ -"""distutils.cmd - -Provides the Command class, the base class for the command classes -in the distutils.command package. -""" - -import sys, os, re -from distutils.errors import DistutilsOptionError -from distutils import util, dir_util, file_util, archive_util, dep_util -from distutils import log - -class Command: - """Abstract base class for defining command classes, the "worker bees" - of the Distutils. A useful analogy for command classes is to think of - them as subroutines with local variables called "options". The options - are "declared" in 'initialize_options()' and "defined" (given their - final values, aka "finalized") in 'finalize_options()', both of which - must be defined by every command class. The distinction between the - two is necessary because option values might come from the outside - world (command line, config file, ...), and any options dependent on - other options must be computed *after* these outside influences have - been processed -- hence 'finalize_options()'. The "body" of the - subroutine, where it does all its work based on the values of its - options, is the 'run()' method, which must also be implemented by every - command class. - """ - - # 'sub_commands' formalizes the notion of a "family" of commands, - # eg. "install" as the parent with sub-commands "install_lib", - # "install_headers", etc. The parent of a family of commands - # defines 'sub_commands' as a class attribute; it's a list of - # (command_name : string, predicate : unbound_method | string | None) - # tuples, where 'predicate' is a method of the parent command that - # determines whether the corresponding command is applicable in the - # current situation. (Eg. we "install_headers" is only applicable if - # we have any C header files to install.) If 'predicate' is None, - # that command is always applicable. - # - # 'sub_commands' is usually defined at the *end* of a class, because - # predicates can be unbound methods, so they must already have been - # defined. The canonical example is the "install" command. - sub_commands = [] - - - # -- Creation/initialization methods ------------------------------- - - def __init__(self, dist): - """Create and initialize a new Command object. Most importantly, - invokes the 'initialize_options()' method, which is the real - initializer and depends on the actual command being - instantiated. - """ - # late import because of mutual dependence between these classes - from distutils.dist import Distribution - - if not isinstance(dist, Distribution): - raise TypeError("dist must be a Distribution instance") - if self.__class__ is Command: - raise RuntimeError("Command is an abstract class") - - self.distribution = dist - self.initialize_options() - - # Per-command versions of the global flags, so that the user can - # customize Distutils' behaviour command-by-command and let some - # commands fall back on the Distribution's behaviour. None means - # "not defined, check self.distribution's copy", while 0 or 1 mean - # false and true (duh). Note that this means figuring out the real - # value of each flag is a touch complicated -- hence "self._dry_run" - # will be handled by __getattr__, below. - # XXX This needs to be fixed. - self._dry_run = None - - # verbose is largely ignored, but needs to be set for - # backwards compatibility (I think)? - self.verbose = dist.verbose - - # Some commands define a 'self.force' option to ignore file - # timestamps, but methods defined *here* assume that - # 'self.force' exists for all commands. So define it here - # just to be safe. - self.force = None - - # The 'help' flag is just used for command-line parsing, so - # none of that complicated bureaucracy is needed. - self.help = 0 - - # 'finalized' records whether or not 'finalize_options()' has been - # called. 'finalize_options()' itself should not pay attention to - # this flag: it is the business of 'ensure_finalized()', which - # always calls 'finalize_options()', to respect/update it. - self.finalized = 0 - - # XXX A more explicit way to customize dry_run would be better. - def __getattr__(self, attr): - if attr == 'dry_run': - myval = getattr(self, "_" + attr) - if myval is None: - return getattr(self.distribution, attr) - else: - return myval - else: - raise AttributeError(attr) - - def ensure_finalized(self): - if not self.finalized: - self.finalize_options() - self.finalized = 1 - - # Subclasses must define: - # initialize_options() - # provide default values for all options; may be customized by - # setup script, by options from config file(s), or by command-line - # options - # finalize_options() - # decide on the final values for all options; this is called - # after all possible intervention from the outside world - # (command-line, option file, etc.) has been processed - # run() - # run the command: do whatever it is we're here to do, - # controlled by the command's various option values - - def initialize_options(self): - """Set default values for all the options that this command - supports. Note that these defaults may be overridden by other - commands, by the setup script, by config files, or by the - command-line. Thus, this is not the place to code dependencies - between options; generally, 'initialize_options()' implementations - are just a bunch of "self.foo = None" assignments. - - This method must be implemented by all command classes. - """ - raise RuntimeError("abstract method -- subclass %s must override" - % self.__class__) - - def finalize_options(self): - """Set final values for all the options that this command supports. - This is always called as late as possible, ie. after any option - assignments from the command-line or from other commands have been - done. Thus, this is the place to code option dependencies: if - 'foo' depends on 'bar', then it is safe to set 'foo' from 'bar' as - long as 'foo' still has the same value it was assigned in - 'initialize_options()'. - - This method must be implemented by all command classes. - """ - raise RuntimeError("abstract method -- subclass %s must override" - % self.__class__) - - - def dump_options(self, header=None, indent=""): - from distutils.fancy_getopt import longopt_xlate - if header is None: - header = "command options for '%s':" % self.get_command_name() - self.announce(indent + header, level=log.INFO) - indent = indent + " " - for (option, _, _) in self.user_options: - option = option.translate(longopt_xlate) - if option[-1] == "=": - option = option[:-1] - value = getattr(self, option) - self.announce(indent + "%s = %s" % (option, value), - level=log.INFO) - - def run(self): - """A command's raison d'etre: carry out the action it exists to - perform, controlled by the options initialized in - 'initialize_options()', customized by other commands, the setup - script, the command-line, and config files, and finalized in - 'finalize_options()'. All terminal output and filesystem - interaction should be done by 'run()'. - - This method must be implemented by all command classes. - """ - raise RuntimeError("abstract method -- subclass %s must override" - % self.__class__) - - def announce(self, msg, level=1): - """If the current verbosity level is of greater than or equal to - 'level' print 'msg' to stdout. - """ - log.log(level, msg) - - def debug_print(self, msg): - """Print 'msg' to stdout if the global DEBUG (taken from the - DISTUTILS_DEBUG environment variable) flag is true. - """ - from distutils.debug import DEBUG - if DEBUG: - print(msg) - sys.stdout.flush() - - - # -- Option validation methods ------------------------------------- - # (these are very handy in writing the 'finalize_options()' method) - # - # NB. the general philosophy here is to ensure that a particular option - # value meets certain type and value constraints. If not, we try to - # force it into conformance (eg. if we expect a list but have a string, - # split the string on comma and/or whitespace). If we can't force the - # option into conformance, raise DistutilsOptionError. Thus, command - # classes need do nothing more than (eg.) - # self.ensure_string_list('foo') - # and they can be guaranteed that thereafter, self.foo will be - # a list of strings. - - def _ensure_stringlike(self, option, what, default=None): - val = getattr(self, option) - if val is None: - setattr(self, option, default) - return default - elif not isinstance(val, str): - raise DistutilsOptionError("'%s' must be a %s (got `%s`)" - % (option, what, val)) - return val - - def ensure_string(self, option, default=None): - """Ensure that 'option' is a string; if not defined, set it to - 'default'. - """ - self._ensure_stringlike(option, "string", default) - - def ensure_string_list(self, option): - r"""Ensure that 'option' is a list of strings. If 'option' is - currently a string, we split it either on /,\s*/ or /\s+/, so - "foo bar baz", "foo,bar,baz", and "foo, bar baz" all become - ["foo", "bar", "baz"]. - """ - val = getattr(self, option) - if val is None: - return - elif isinstance(val, str): - setattr(self, option, re.split(r',\s*|\s+', val)) - else: - if isinstance(val, list): - ok = all(isinstance(v, str) for v in val) - else: - ok = False - if not ok: - raise DistutilsOptionError( - "'%s' must be a list of strings (got %r)" - % (option, val)) - - def _ensure_tested_string(self, option, tester, what, error_fmt, - default=None): - val = self._ensure_stringlike(option, what, default) - if val is not None and not tester(val): - raise DistutilsOptionError(("error in '%s' option: " + error_fmt) - % (option, val)) - - def ensure_filename(self, option): - """Ensure that 'option' is the name of an existing file.""" - self._ensure_tested_string(option, os.path.isfile, - "filename", - "'%s' does not exist or is not a file") - - def ensure_dirname(self, option): - self._ensure_tested_string(option, os.path.isdir, - "directory name", - "'%s' does not exist or is not a directory") - - - # -- Convenience methods for commands ------------------------------ - - def get_command_name(self): - if hasattr(self, 'command_name'): - return self.command_name - else: - return self.__class__.__name__ - - def set_undefined_options(self, src_cmd, *option_pairs): - """Set the values of any "undefined" options from corresponding - option values in some other command object. "Undefined" here means - "is None", which is the convention used to indicate that an option - has not been changed between 'initialize_options()' and - 'finalize_options()'. Usually called from 'finalize_options()' for - options that depend on some other command rather than another - option of the same command. 'src_cmd' is the other command from - which option values will be taken (a command object will be created - for it if necessary); the remaining arguments are - '(src_option,dst_option)' tuples which mean "take the value of - 'src_option' in the 'src_cmd' command object, and copy it to - 'dst_option' in the current command object". - """ - # Option_pairs: list of (src_option, dst_option) tuples - src_cmd_obj = self.distribution.get_command_obj(src_cmd) - src_cmd_obj.ensure_finalized() - for (src_option, dst_option) in option_pairs: - if getattr(self, dst_option) is None: - setattr(self, dst_option, getattr(src_cmd_obj, src_option)) - - def get_finalized_command(self, command, create=1): - """Wrapper around Distribution's 'get_command_obj()' method: find - (create if necessary and 'create' is true) the command object for - 'command', call its 'ensure_finalized()' method, and return the - finalized command object. - """ - cmd_obj = self.distribution.get_command_obj(command, create) - cmd_obj.ensure_finalized() - return cmd_obj - - # XXX rename to 'get_reinitialized_command()'? (should do the - # same in dist.py, if so) - def reinitialize_command(self, command, reinit_subcommands=0): - return self.distribution.reinitialize_command(command, - reinit_subcommands) - - def run_command(self, command): - """Run some other command: uses the 'run_command()' method of - Distribution, which creates and finalizes the command object if - necessary and then invokes its 'run()' method. - """ - self.distribution.run_command(command) - - def get_sub_commands(self): - """Determine the sub-commands that are relevant in the current - distribution (ie., that need to be run). This is based on the - 'sub_commands' class attribute: each tuple in that list may include - a method that we call to determine if the subcommand needs to be - run for the current distribution. Return a list of command names. - """ - commands = [] - for (cmd_name, method) in self.sub_commands: - if method is None or method(self): - commands.append(cmd_name) - return commands - - - # -- External world manipulation ----------------------------------- - - def warn(self, msg): - log.warn("warning: %s: %s\n", self.get_command_name(), msg) - - def execute(self, func, args, msg=None, level=1): - util.execute(func, args, msg, dry_run=self.dry_run) - - def mkpath(self, name, mode=0o777): - dir_util.mkpath(name, mode, dry_run=self.dry_run) - - def copy_file(self, infile, outfile, preserve_mode=1, preserve_times=1, - link=None, level=1): - """Copy a file respecting verbose, dry-run and force flags. (The - former two default to whatever is in the Distribution object, and - the latter defaults to false for commands that don't define it.)""" - return file_util.copy_file(infile, outfile, preserve_mode, - preserve_times, not self.force, link, - dry_run=self.dry_run) - - def copy_tree(self, infile, outfile, preserve_mode=1, preserve_times=1, - preserve_symlinks=0, level=1): - """Copy an entire directory tree respecting verbose, dry-run, - and force flags. - """ - return dir_util.copy_tree(infile, outfile, preserve_mode, - preserve_times, preserve_symlinks, - not self.force, dry_run=self.dry_run) - - def move_file (self, src, dst, level=1): - """Move a file respecting dry-run flag.""" - return file_util.move_file(src, dst, dry_run=self.dry_run) - - def spawn(self, cmd, search_path=1, level=1): - """Spawn an external command respecting dry-run flag.""" - from distutils.spawn import spawn - spawn(cmd, search_path, dry_run=self.dry_run) - - def make_archive(self, base_name, format, root_dir=None, base_dir=None, - owner=None, group=None): - return archive_util.make_archive(base_name, format, root_dir, base_dir, - dry_run=self.dry_run, - owner=owner, group=group) - - def make_file(self, infiles, outfile, func, args, - exec_msg=None, skip_msg=None, level=1): - """Special case of 'execute()' for operations that process one or - more input files and generate one output file. Works just like - 'execute()', except the operation is skipped and a different - message printed if 'outfile' already exists and is newer than all - files listed in 'infiles'. If the command defined 'self.force', - and it is true, then the command is unconditionally run -- does no - timestamp checks. - """ - if skip_msg is None: - skip_msg = "skipping %s (inputs unchanged)" % outfile - - # Allow 'infiles' to be a single string - if isinstance(infiles, str): - infiles = (infiles,) - elif not isinstance(infiles, (list, tuple)): - raise TypeError( - "'infiles' must be a string, or a list or tuple of strings") - - if exec_msg is None: - exec_msg = "generating %s from %s" % (outfile, ', '.join(infiles)) - - # If 'outfile' must be regenerated (either because it doesn't - # exist, is out-of-date, or the 'force' flag is true) then - # perform the action that presumably regenerates it - if self.force or dep_util.newer_group(infiles, outfile): - self.execute(func, args, exec_msg, level) - # Otherwise, print the "skip" message - else: - log.debug(skip_msg) diff --git a/venv/Lib/site-packages/setuptools/_distutils/command/__init__.py b/venv/Lib/site-packages/setuptools/_distutils/command/__init__.py deleted file mode 100644 index 481eea9..0000000 --- a/venv/Lib/site-packages/setuptools/_distutils/command/__init__.py +++ /dev/null @@ -1,31 +0,0 @@ -"""distutils.command - -Package containing implementation of all the standard Distutils -commands.""" - -__all__ = ['build', - 'build_py', - 'build_ext', - 'build_clib', - 'build_scripts', - 'clean', - 'install', - 'install_lib', - 'install_headers', - 'install_scripts', - 'install_data', - 'sdist', - 'register', - 'bdist', - 'bdist_dumb', - 'bdist_rpm', - 'bdist_wininst', - 'check', - 'upload', - # These two are reserved for future use: - #'bdist_sdux', - #'bdist_pkgtool', - # Note: - # bdist_packager is not included because it only provides - # an abstract base class - ] diff --git a/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/__init__.cpython-39.pyc b/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/__init__.cpython-39.pyc deleted file mode 100644 index 6b65472..0000000 Binary files a/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/__init__.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/bdist.cpython-39.pyc b/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/bdist.cpython-39.pyc deleted file mode 100644 index 70f48aa..0000000 Binary files a/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/bdist.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/bdist_dumb.cpython-39.pyc b/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/bdist_dumb.cpython-39.pyc deleted file mode 100644 index 533cea4..0000000 Binary files a/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/bdist_dumb.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/bdist_msi.cpython-39.pyc b/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/bdist_msi.cpython-39.pyc deleted file mode 100644 index 3f892e2..0000000 Binary files a/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/bdist_msi.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/bdist_rpm.cpython-39.pyc b/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/bdist_rpm.cpython-39.pyc deleted file mode 100644 index b5b313b..0000000 Binary files a/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/bdist_rpm.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/bdist_wininst.cpython-39.pyc b/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/bdist_wininst.cpython-39.pyc deleted file mode 100644 index d17bfd8..0000000 Binary files a/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/bdist_wininst.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/build.cpython-39.pyc b/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/build.cpython-39.pyc deleted file mode 100644 index 59650a3..0000000 Binary files a/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/build.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/build_clib.cpython-39.pyc b/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/build_clib.cpython-39.pyc deleted file mode 100644 index dd4f9bc..0000000 Binary files a/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/build_clib.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/build_ext.cpython-39.pyc b/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/build_ext.cpython-39.pyc deleted file mode 100644 index 8e0e039..0000000 Binary files a/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/build_ext.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/build_py.cpython-39.pyc b/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/build_py.cpython-39.pyc deleted file mode 100644 index 3f12fe3..0000000 Binary files a/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/build_py.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/build_scripts.cpython-39.pyc b/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/build_scripts.cpython-39.pyc deleted file mode 100644 index b65c624..0000000 Binary files a/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/build_scripts.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/check.cpython-39.pyc b/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/check.cpython-39.pyc deleted file mode 100644 index 3758e60..0000000 Binary files a/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/check.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/clean.cpython-39.pyc b/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/clean.cpython-39.pyc deleted file mode 100644 index 09c1b9d..0000000 Binary files a/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/clean.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/config.cpython-39.pyc b/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/config.cpython-39.pyc deleted file mode 100644 index e4f58bb..0000000 Binary files a/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/config.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/install.cpython-39.pyc b/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/install.cpython-39.pyc deleted file mode 100644 index 1f8e96e..0000000 Binary files a/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/install.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/install_data.cpython-39.pyc b/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/install_data.cpython-39.pyc deleted file mode 100644 index 8d6ee28..0000000 Binary files a/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/install_data.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/install_egg_info.cpython-39.pyc b/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/install_egg_info.cpython-39.pyc deleted file mode 100644 index 93115c2..0000000 Binary files a/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/install_egg_info.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/install_headers.cpython-39.pyc b/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/install_headers.cpython-39.pyc deleted file mode 100644 index 6da9a5b..0000000 Binary files a/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/install_headers.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/install_lib.cpython-39.pyc b/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/install_lib.cpython-39.pyc deleted file mode 100644 index 202725b..0000000 Binary files a/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/install_lib.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/install_scripts.cpython-39.pyc b/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/install_scripts.cpython-39.pyc deleted file mode 100644 index 196d8e0..0000000 Binary files a/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/install_scripts.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/py37compat.cpython-39.pyc b/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/py37compat.cpython-39.pyc deleted file mode 100644 index 89360c4..0000000 Binary files a/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/py37compat.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/register.cpython-39.pyc b/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/register.cpython-39.pyc deleted file mode 100644 index fb6cf3c..0000000 Binary files a/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/register.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/sdist.cpython-39.pyc b/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/sdist.cpython-39.pyc deleted file mode 100644 index 3869956..0000000 Binary files a/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/sdist.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/upload.cpython-39.pyc b/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/upload.cpython-39.pyc deleted file mode 100644 index beb0734..0000000 Binary files a/venv/Lib/site-packages/setuptools/_distutils/command/__pycache__/upload.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/_distutils/command/bdist.py b/venv/Lib/site-packages/setuptools/_distutils/command/bdist.py deleted file mode 100644 index 014871d..0000000 --- a/venv/Lib/site-packages/setuptools/_distutils/command/bdist.py +++ /dev/null @@ -1,143 +0,0 @@ -"""distutils.command.bdist - -Implements the Distutils 'bdist' command (create a built [binary] -distribution).""" - -import os -from distutils.core import Command -from distutils.errors import * -from distutils.util import get_platform - - -def show_formats(): - """Print list of available formats (arguments to "--format" option). - """ - from distutils.fancy_getopt import FancyGetopt - formats = [] - for format in bdist.format_commands: - formats.append(("formats=" + format, None, - bdist.format_command[format][1])) - pretty_printer = FancyGetopt(formats) - pretty_printer.print_help("List of available distribution formats:") - - -class bdist(Command): - - description = "create a built (binary) distribution" - - user_options = [('bdist-base=', 'b', - "temporary directory for creating built distributions"), - ('plat-name=', 'p', - "platform name to embed in generated filenames " - "(default: %s)" % get_platform()), - ('formats=', None, - "formats for distribution (comma-separated list)"), - ('dist-dir=', 'd', - "directory to put final built distributions in " - "[default: dist]"), - ('skip-build', None, - "skip rebuilding everything (for testing/debugging)"), - ('owner=', 'u', - "Owner name used when creating a tar file" - " [default: current user]"), - ('group=', 'g', - "Group name used when creating a tar file" - " [default: current group]"), - ] - - boolean_options = ['skip-build'] - - help_options = [ - ('help-formats', None, - "lists available distribution formats", show_formats), - ] - - # The following commands do not take a format option from bdist - no_format_option = ('bdist_rpm',) - - # This won't do in reality: will need to distinguish RPM-ish Linux, - # Debian-ish Linux, Solaris, FreeBSD, ..., Windows, Mac OS. - default_format = {'posix': 'gztar', - 'nt': 'zip'} - - # Establish the preferred order (for the --help-formats option). - format_commands = ['rpm', 'gztar', 'bztar', 'xztar', 'ztar', 'tar', - 'wininst', 'zip', 'msi'] - - # And the real information. - format_command = {'rpm': ('bdist_rpm', "RPM distribution"), - 'gztar': ('bdist_dumb', "gzip'ed tar file"), - 'bztar': ('bdist_dumb', "bzip2'ed tar file"), - 'xztar': ('bdist_dumb', "xz'ed tar file"), - 'ztar': ('bdist_dumb', "compressed tar file"), - 'tar': ('bdist_dumb', "tar file"), - 'wininst': ('bdist_wininst', - "Windows executable installer"), - 'zip': ('bdist_dumb', "ZIP file"), - 'msi': ('bdist_msi', "Microsoft Installer") - } - - - def initialize_options(self): - self.bdist_base = None - self.plat_name = None - self.formats = None - self.dist_dir = None - self.skip_build = 0 - self.group = None - self.owner = None - - def finalize_options(self): - # have to finalize 'plat_name' before 'bdist_base' - if self.plat_name is None: - if self.skip_build: - self.plat_name = get_platform() - else: - self.plat_name = self.get_finalized_command('build').plat_name - - # 'bdist_base' -- parent of per-built-distribution-format - # temporary directories (eg. we'll probably have - # "build/bdist./dumb", "build/bdist./rpm", etc.) - if self.bdist_base is None: - build_base = self.get_finalized_command('build').build_base - self.bdist_base = os.path.join(build_base, - 'bdist.' + self.plat_name) - - self.ensure_string_list('formats') - if self.formats is None: - try: - self.formats = [self.default_format[os.name]] - except KeyError: - raise DistutilsPlatformError( - "don't know how to create built distributions " - "on platform %s" % os.name) - - if self.dist_dir is None: - self.dist_dir = "dist" - - def run(self): - # Figure out which sub-commands we need to run. - commands = [] - for format in self.formats: - try: - commands.append(self.format_command[format][0]) - except KeyError: - raise DistutilsOptionError("invalid format '%s'" % format) - - # Reinitialize and run each command. - for i in range(len(self.formats)): - cmd_name = commands[i] - sub_cmd = self.reinitialize_command(cmd_name) - if cmd_name not in self.no_format_option: - sub_cmd.format = self.formats[i] - - # passing the owner and group names for tar archiving - if cmd_name == 'bdist_dumb': - sub_cmd.owner = self.owner - sub_cmd.group = self.group - - # If we're going to need to run this command again, tell it to - # keep its temporary files around so subsequent runs go faster. - if cmd_name in commands[i+1:]: - sub_cmd.keep_temp = 1 - self.run_command(cmd_name) diff --git a/venv/Lib/site-packages/setuptools/_distutils/command/bdist_dumb.py b/venv/Lib/site-packages/setuptools/_distutils/command/bdist_dumb.py deleted file mode 100644 index f0d6b5b..0000000 --- a/venv/Lib/site-packages/setuptools/_distutils/command/bdist_dumb.py +++ /dev/null @@ -1,123 +0,0 @@ -"""distutils.command.bdist_dumb - -Implements the Distutils 'bdist_dumb' command (create a "dumb" built -distribution -- i.e., just an archive to be unpacked under $prefix or -$exec_prefix).""" - -import os -from distutils.core import Command -from distutils.util import get_platform -from distutils.dir_util import remove_tree, ensure_relative -from distutils.errors import * -from distutils.sysconfig import get_python_version -from distutils import log - -class bdist_dumb(Command): - - description = "create a \"dumb\" built distribution" - - user_options = [('bdist-dir=', 'd', - "temporary directory for creating the distribution"), - ('plat-name=', 'p', - "platform name to embed in generated filenames " - "(default: %s)" % get_platform()), - ('format=', 'f', - "archive format to create (tar, gztar, bztar, xztar, " - "ztar, zip)"), - ('keep-temp', 'k', - "keep the pseudo-installation tree around after " + - "creating the distribution archive"), - ('dist-dir=', 'd', - "directory to put final built distributions in"), - ('skip-build', None, - "skip rebuilding everything (for testing/debugging)"), - ('relative', None, - "build the archive using relative paths " - "(default: false)"), - ('owner=', 'u', - "Owner name used when creating a tar file" - " [default: current user]"), - ('group=', 'g', - "Group name used when creating a tar file" - " [default: current group]"), - ] - - boolean_options = ['keep-temp', 'skip-build', 'relative'] - - default_format = { 'posix': 'gztar', - 'nt': 'zip' } - - def initialize_options(self): - self.bdist_dir = None - self.plat_name = None - self.format = None - self.keep_temp = 0 - self.dist_dir = None - self.skip_build = None - self.relative = 0 - self.owner = None - self.group = None - - def finalize_options(self): - if self.bdist_dir is None: - bdist_base = self.get_finalized_command('bdist').bdist_base - self.bdist_dir = os.path.join(bdist_base, 'dumb') - - if self.format is None: - try: - self.format = self.default_format[os.name] - except KeyError: - raise DistutilsPlatformError( - "don't know how to create dumb built distributions " - "on platform %s" % os.name) - - self.set_undefined_options('bdist', - ('dist_dir', 'dist_dir'), - ('plat_name', 'plat_name'), - ('skip_build', 'skip_build')) - - def run(self): - if not self.skip_build: - self.run_command('build') - - install = self.reinitialize_command('install', reinit_subcommands=1) - install.root = self.bdist_dir - install.skip_build = self.skip_build - install.warn_dir = 0 - - log.info("installing to %s", self.bdist_dir) - self.run_command('install') - - # And make an archive relative to the root of the - # pseudo-installation tree. - archive_basename = "%s.%s" % (self.distribution.get_fullname(), - self.plat_name) - - pseudoinstall_root = os.path.join(self.dist_dir, archive_basename) - if not self.relative: - archive_root = self.bdist_dir - else: - if (self.distribution.has_ext_modules() and - (install.install_base != install.install_platbase)): - raise DistutilsPlatformError( - "can't make a dumb built distribution where " - "base and platbase are different (%s, %s)" - % (repr(install.install_base), - repr(install.install_platbase))) - else: - archive_root = os.path.join(self.bdist_dir, - ensure_relative(install.install_base)) - - # Make the archive - filename = self.make_archive(pseudoinstall_root, - self.format, root_dir=archive_root, - owner=self.owner, group=self.group) - if self.distribution.has_ext_modules(): - pyversion = get_python_version() - else: - pyversion = 'any' - self.distribution.dist_files.append(('bdist_dumb', pyversion, - filename)) - - if not self.keep_temp: - remove_tree(self.bdist_dir, dry_run=self.dry_run) diff --git a/venv/Lib/site-packages/setuptools/_distutils/command/bdist_msi.py b/venv/Lib/site-packages/setuptools/_distutils/command/bdist_msi.py deleted file mode 100644 index 1525953..0000000 --- a/venv/Lib/site-packages/setuptools/_distutils/command/bdist_msi.py +++ /dev/null @@ -1,749 +0,0 @@ -# Copyright (C) 2005, 2006 Martin von Löwis -# Licensed to PSF under a Contributor Agreement. -# The bdist_wininst command proper -# based on bdist_wininst -""" -Implements the bdist_msi command. -""" - -import os -import sys -import warnings -from distutils.core import Command -from distutils.dir_util import remove_tree -from distutils.sysconfig import get_python_version -from distutils.version import StrictVersion -from distutils.errors import DistutilsOptionError -from distutils.util import get_platform -from distutils import log -import msilib -from msilib import schema, sequence, text -from msilib import Directory, Feature, Dialog, add_data - -class PyDialog(Dialog): - """Dialog class with a fixed layout: controls at the top, then a ruler, - then a list of buttons: back, next, cancel. Optionally a bitmap at the - left.""" - def __init__(self, *args, **kw): - """Dialog(database, name, x, y, w, h, attributes, title, first, - default, cancel, bitmap=true)""" - super().__init__(*args) - ruler = self.h - 36 - bmwidth = 152*ruler/328 - #if kw.get("bitmap", True): - # self.bitmap("Bitmap", 0, 0, bmwidth, ruler, "PythonWin") - self.line("BottomLine", 0, ruler, self.w, 0) - - def title(self, title): - "Set the title text of the dialog at the top." - # name, x, y, w, h, flags=Visible|Enabled|Transparent|NoPrefix, - # text, in VerdanaBold10 - self.text("Title", 15, 10, 320, 60, 0x30003, - r"{\VerdanaBold10}%s" % title) - - def back(self, title, next, name = "Back", active = 1): - """Add a back button with a given title, the tab-next button, - its name in the Control table, possibly initially disabled. - - Return the button, so that events can be associated""" - if active: - flags = 3 # Visible|Enabled - else: - flags = 1 # Visible - return self.pushbutton(name, 180, self.h-27 , 56, 17, flags, title, next) - - def cancel(self, title, next, name = "Cancel", active = 1): - """Add a cancel button with a given title, the tab-next button, - its name in the Control table, possibly initially disabled. - - Return the button, so that events can be associated""" - if active: - flags = 3 # Visible|Enabled - else: - flags = 1 # Visible - return self.pushbutton(name, 304, self.h-27, 56, 17, flags, title, next) - - def next(self, title, next, name = "Next", active = 1): - """Add a Next button with a given title, the tab-next button, - its name in the Control table, possibly initially disabled. - - Return the button, so that events can be associated""" - if active: - flags = 3 # Visible|Enabled - else: - flags = 1 # Visible - return self.pushbutton(name, 236, self.h-27, 56, 17, flags, title, next) - - def xbutton(self, name, title, next, xpos): - """Add a button with a given title, the tab-next button, - its name in the Control table, giving its x position; the - y-position is aligned with the other buttons. - - Return the button, so that events can be associated""" - return self.pushbutton(name, int(self.w*xpos - 28), self.h-27, 56, 17, 3, title, next) - -class bdist_msi(Command): - - description = "create a Microsoft Installer (.msi) binary distribution" - - user_options = [('bdist-dir=', None, - "temporary directory for creating the distribution"), - ('plat-name=', 'p', - "platform name to embed in generated filenames " - "(default: %s)" % get_platform()), - ('keep-temp', 'k', - "keep the pseudo-installation tree around after " + - "creating the distribution archive"), - ('target-version=', None, - "require a specific python version" + - " on the target system"), - ('no-target-compile', 'c', - "do not compile .py to .pyc on the target system"), - ('no-target-optimize', 'o', - "do not compile .py to .pyo (optimized) " - "on the target system"), - ('dist-dir=', 'd', - "directory to put final built distributions in"), - ('skip-build', None, - "skip rebuilding everything (for testing/debugging)"), - ('install-script=', None, - "basename of installation script to be run after " - "installation or before deinstallation"), - ('pre-install-script=', None, - "Fully qualified filename of a script to be run before " - "any files are installed. This script need not be in the " - "distribution"), - ] - - boolean_options = ['keep-temp', 'no-target-compile', 'no-target-optimize', - 'skip-build'] - - all_versions = ['2.0', '2.1', '2.2', '2.3', '2.4', - '2.5', '2.6', '2.7', '2.8', '2.9', - '3.0', '3.1', '3.2', '3.3', '3.4', - '3.5', '3.6', '3.7', '3.8', '3.9'] - other_version = 'X' - - def __init__(self, *args, **kw): - super().__init__(*args, **kw) - warnings.warn("bdist_msi command is deprecated since Python 3.9, " - "use bdist_wheel (wheel packages) instead", - DeprecationWarning, 2) - - def initialize_options(self): - self.bdist_dir = None - self.plat_name = None - self.keep_temp = 0 - self.no_target_compile = 0 - self.no_target_optimize = 0 - self.target_version = None - self.dist_dir = None - self.skip_build = None - self.install_script = None - self.pre_install_script = None - self.versions = None - - def finalize_options(self): - self.set_undefined_options('bdist', ('skip_build', 'skip_build')) - - if self.bdist_dir is None: - bdist_base = self.get_finalized_command('bdist').bdist_base - self.bdist_dir = os.path.join(bdist_base, 'msi') - - short_version = get_python_version() - if (not self.target_version) and self.distribution.has_ext_modules(): - self.target_version = short_version - - if self.target_version: - self.versions = [self.target_version] - if not self.skip_build and self.distribution.has_ext_modules()\ - and self.target_version != short_version: - raise DistutilsOptionError( - "target version can only be %s, or the '--skip-build'" - " option must be specified" % (short_version,)) - else: - self.versions = list(self.all_versions) - - self.set_undefined_options('bdist', - ('dist_dir', 'dist_dir'), - ('plat_name', 'plat_name'), - ) - - if self.pre_install_script: - raise DistutilsOptionError( - "the pre-install-script feature is not yet implemented") - - if self.install_script: - for script in self.distribution.scripts: - if self.install_script == os.path.basename(script): - break - else: - raise DistutilsOptionError( - "install_script '%s' not found in scripts" - % self.install_script) - self.install_script_key = None - - def run(self): - if not self.skip_build: - self.run_command('build') - - install = self.reinitialize_command('install', reinit_subcommands=1) - install.prefix = self.bdist_dir - install.skip_build = self.skip_build - install.warn_dir = 0 - - install_lib = self.reinitialize_command('install_lib') - # we do not want to include pyc or pyo files - install_lib.compile = 0 - install_lib.optimize = 0 - - if self.distribution.has_ext_modules(): - # If we are building an installer for a Python version other - # than the one we are currently running, then we need to ensure - # our build_lib reflects the other Python version rather than ours. - # Note that for target_version!=sys.version, we must have skipped the - # build step, so there is no issue with enforcing the build of this - # version. - target_version = self.target_version - if not target_version: - assert self.skip_build, "Should have already checked this" - target_version = '%d.%d' % sys.version_info[:2] - plat_specifier = ".%s-%s" % (self.plat_name, target_version) - build = self.get_finalized_command('build') - build.build_lib = os.path.join(build.build_base, - 'lib' + plat_specifier) - - log.info("installing to %s", self.bdist_dir) - install.ensure_finalized() - - # avoid warning of 'install_lib' about installing - # into a directory not in sys.path - sys.path.insert(0, os.path.join(self.bdist_dir, 'PURELIB')) - - install.run() - - del sys.path[0] - - self.mkpath(self.dist_dir) - fullname = self.distribution.get_fullname() - installer_name = self.get_installer_filename(fullname) - installer_name = os.path.abspath(installer_name) - if os.path.exists(installer_name): os.unlink(installer_name) - - metadata = self.distribution.metadata - author = metadata.author - if not author: - author = metadata.maintainer - if not author: - author = "UNKNOWN" - version = metadata.get_version() - # ProductVersion must be strictly numeric - # XXX need to deal with prerelease versions - sversion = "%d.%d.%d" % StrictVersion(version).version - # Prefix ProductName with Python x.y, so that - # it sorts together with the other Python packages - # in Add-Remove-Programs (APR) - fullname = self.distribution.get_fullname() - if self.target_version: - product_name = "Python %s %s" % (self.target_version, fullname) - else: - product_name = "Python %s" % (fullname) - self.db = msilib.init_database(installer_name, schema, - product_name, msilib.gen_uuid(), - sversion, author) - msilib.add_tables(self.db, sequence) - props = [('DistVersion', version)] - email = metadata.author_email or metadata.maintainer_email - if email: - props.append(("ARPCONTACT", email)) - if metadata.url: - props.append(("ARPURLINFOABOUT", metadata.url)) - if props: - add_data(self.db, 'Property', props) - - self.add_find_python() - self.add_files() - self.add_scripts() - self.add_ui() - self.db.Commit() - - if hasattr(self.distribution, 'dist_files'): - tup = 'bdist_msi', self.target_version or 'any', fullname - self.distribution.dist_files.append(tup) - - if not self.keep_temp: - remove_tree(self.bdist_dir, dry_run=self.dry_run) - - def add_files(self): - db = self.db - cab = msilib.CAB("distfiles") - rootdir = os.path.abspath(self.bdist_dir) - - root = Directory(db, cab, None, rootdir, "TARGETDIR", "SourceDir") - f = Feature(db, "Python", "Python", "Everything", - 0, 1, directory="TARGETDIR") - - items = [(f, root, '')] - for version in self.versions + [self.other_version]: - target = "TARGETDIR" + version - name = default = "Python" + version - desc = "Everything" - if version is self.other_version: - title = "Python from another location" - level = 2 - else: - title = "Python %s from registry" % version - level = 1 - f = Feature(db, name, title, desc, 1, level, directory=target) - dir = Directory(db, cab, root, rootdir, target, default) - items.append((f, dir, version)) - db.Commit() - - seen = {} - for feature, dir, version in items: - todo = [dir] - while todo: - dir = todo.pop() - for file in os.listdir(dir.absolute): - afile = os.path.join(dir.absolute, file) - if os.path.isdir(afile): - short = "%s|%s" % (dir.make_short(file), file) - default = file + version - newdir = Directory(db, cab, dir, file, default, short) - todo.append(newdir) - else: - if not dir.component: - dir.start_component(dir.logical, feature, 0) - if afile not in seen: - key = seen[afile] = dir.add_file(file) - if file==self.install_script: - if self.install_script_key: - raise DistutilsOptionError( - "Multiple files with name %s" % file) - self.install_script_key = '[#%s]' % key - else: - key = seen[afile] - add_data(self.db, "DuplicateFile", - [(key + version, dir.component, key, None, dir.logical)]) - db.Commit() - cab.commit(db) - - def add_find_python(self): - """Adds code to the installer to compute the location of Python. - - Properties PYTHON.MACHINE.X.Y and PYTHON.USER.X.Y will be set from the - registry for each version of Python. - - Properties TARGETDIRX.Y will be set from PYTHON.USER.X.Y if defined, - else from PYTHON.MACHINE.X.Y. - - Properties PYTHONX.Y will be set to TARGETDIRX.Y\\python.exe""" - - start = 402 - for ver in self.versions: - install_path = r"SOFTWARE\Python\PythonCore\%s\InstallPath" % ver - machine_reg = "python.machine." + ver - user_reg = "python.user." + ver - machine_prop = "PYTHON.MACHINE." + ver - user_prop = "PYTHON.USER." + ver - machine_action = "PythonFromMachine" + ver - user_action = "PythonFromUser" + ver - exe_action = "PythonExe" + ver - target_dir_prop = "TARGETDIR" + ver - exe_prop = "PYTHON" + ver - if msilib.Win64: - # type: msidbLocatorTypeRawValue + msidbLocatorType64bit - Type = 2+16 - else: - Type = 2 - add_data(self.db, "RegLocator", - [(machine_reg, 2, install_path, None, Type), - (user_reg, 1, install_path, None, Type)]) - add_data(self.db, "AppSearch", - [(machine_prop, machine_reg), - (user_prop, user_reg)]) - add_data(self.db, "CustomAction", - [(machine_action, 51+256, target_dir_prop, "[" + machine_prop + "]"), - (user_action, 51+256, target_dir_prop, "[" + user_prop + "]"), - (exe_action, 51+256, exe_prop, "[" + target_dir_prop + "]\\python.exe"), - ]) - add_data(self.db, "InstallExecuteSequence", - [(machine_action, machine_prop, start), - (user_action, user_prop, start + 1), - (exe_action, None, start + 2), - ]) - add_data(self.db, "InstallUISequence", - [(machine_action, machine_prop, start), - (user_action, user_prop, start + 1), - (exe_action, None, start + 2), - ]) - add_data(self.db, "Condition", - [("Python" + ver, 0, "NOT TARGETDIR" + ver)]) - start += 4 - assert start < 500 - - def add_scripts(self): - if self.install_script: - start = 6800 - for ver in self.versions + [self.other_version]: - install_action = "install_script." + ver - exe_prop = "PYTHON" + ver - add_data(self.db, "CustomAction", - [(install_action, 50, exe_prop, self.install_script_key)]) - add_data(self.db, "InstallExecuteSequence", - [(install_action, "&Python%s=3" % ver, start)]) - start += 1 - # XXX pre-install scripts are currently refused in finalize_options() - # but if this feature is completed, it will also need to add - # entries for each version as the above code does - if self.pre_install_script: - scriptfn = os.path.join(self.bdist_dir, "preinstall.bat") - with open(scriptfn, "w") as f: - # The batch file will be executed with [PYTHON], so that %1 - # is the path to the Python interpreter; %0 will be the path - # of the batch file. - # rem =""" - # %1 %0 - # exit - # """ - # - f.write('rem ="""\n%1 %0\nexit\n"""\n') - with open(self.pre_install_script) as fin: - f.write(fin.read()) - add_data(self.db, "Binary", - [("PreInstall", msilib.Binary(scriptfn)) - ]) - add_data(self.db, "CustomAction", - [("PreInstall", 2, "PreInstall", None) - ]) - add_data(self.db, "InstallExecuteSequence", - [("PreInstall", "NOT Installed", 450)]) - - - def add_ui(self): - db = self.db - x = y = 50 - w = 370 - h = 300 - title = "[ProductName] Setup" - - # see "Dialog Style Bits" - modal = 3 # visible | modal - modeless = 1 # visible - track_disk_space = 32 - - # UI customization properties - add_data(db, "Property", - # See "DefaultUIFont Property" - [("DefaultUIFont", "DlgFont8"), - # See "ErrorDialog Style Bit" - ("ErrorDialog", "ErrorDlg"), - ("Progress1", "Install"), # modified in maintenance type dlg - ("Progress2", "installs"), - ("MaintenanceForm_Action", "Repair"), - # possible values: ALL, JUSTME - ("WhichUsers", "ALL") - ]) - - # Fonts, see "TextStyle Table" - add_data(db, "TextStyle", - [("DlgFont8", "Tahoma", 9, None, 0), - ("DlgFontBold8", "Tahoma", 8, None, 1), #bold - ("VerdanaBold10", "Verdana", 10, None, 1), - ("VerdanaRed9", "Verdana", 9, 255, 0), - ]) - - # UI Sequences, see "InstallUISequence Table", "Using a Sequence Table" - # Numbers indicate sequence; see sequence.py for how these action integrate - add_data(db, "InstallUISequence", - [("PrepareDlg", "Not Privileged or Windows9x or Installed", 140), - ("WhichUsersDlg", "Privileged and not Windows9x and not Installed", 141), - # In the user interface, assume all-users installation if privileged. - ("SelectFeaturesDlg", "Not Installed", 1230), - # XXX no support for resume installations yet - #("ResumeDlg", "Installed AND (RESUME OR Preselected)", 1240), - ("MaintenanceTypeDlg", "Installed AND NOT RESUME AND NOT Preselected", 1250), - ("ProgressDlg", None, 1280)]) - - add_data(db, 'ActionText', text.ActionText) - add_data(db, 'UIText', text.UIText) - ##################################################################### - # Standard dialogs: FatalError, UserExit, ExitDialog - fatal=PyDialog(db, "FatalError", x, y, w, h, modal, title, - "Finish", "Finish", "Finish") - fatal.title("[ProductName] Installer ended prematurely") - fatal.back("< Back", "Finish", active = 0) - fatal.cancel("Cancel", "Back", active = 0) - fatal.text("Description1", 15, 70, 320, 80, 0x30003, - "[ProductName] setup ended prematurely because of an error. Your system has not been modified. To install this program at a later time, please run the installation again.") - fatal.text("Description2", 15, 155, 320, 20, 0x30003, - "Click the Finish button to exit the Installer.") - c=fatal.next("Finish", "Cancel", name="Finish") - c.event("EndDialog", "Exit") - - user_exit=PyDialog(db, "UserExit", x, y, w, h, modal, title, - "Finish", "Finish", "Finish") - user_exit.title("[ProductName] Installer was interrupted") - user_exit.back("< Back", "Finish", active = 0) - user_exit.cancel("Cancel", "Back", active = 0) - user_exit.text("Description1", 15, 70, 320, 80, 0x30003, - "[ProductName] setup was interrupted. Your system has not been modified. " - "To install this program at a later time, please run the installation again.") - user_exit.text("Description2", 15, 155, 320, 20, 0x30003, - "Click the Finish button to exit the Installer.") - c = user_exit.next("Finish", "Cancel", name="Finish") - c.event("EndDialog", "Exit") - - exit_dialog = PyDialog(db, "ExitDialog", x, y, w, h, modal, title, - "Finish", "Finish", "Finish") - exit_dialog.title("Completing the [ProductName] Installer") - exit_dialog.back("< Back", "Finish", active = 0) - exit_dialog.cancel("Cancel", "Back", active = 0) - exit_dialog.text("Description", 15, 235, 320, 20, 0x30003, - "Click the Finish button to exit the Installer.") - c = exit_dialog.next("Finish", "Cancel", name="Finish") - c.event("EndDialog", "Return") - - ##################################################################### - # Required dialog: FilesInUse, ErrorDlg - inuse = PyDialog(db, "FilesInUse", - x, y, w, h, - 19, # KeepModeless|Modal|Visible - title, - "Retry", "Retry", "Retry", bitmap=False) - inuse.text("Title", 15, 6, 200, 15, 0x30003, - r"{\DlgFontBold8}Files in Use") - inuse.text("Description", 20, 23, 280, 20, 0x30003, - "Some files that need to be updated are currently in use.") - inuse.text("Text", 20, 55, 330, 50, 3, - "The following applications are using files that need to be updated by this setup. Close these applications and then click Retry to continue the installation or Cancel to exit it.") - inuse.control("List", "ListBox", 20, 107, 330, 130, 7, "FileInUseProcess", - None, None, None) - c=inuse.back("Exit", "Ignore", name="Exit") - c.event("EndDialog", "Exit") - c=inuse.next("Ignore", "Retry", name="Ignore") - c.event("EndDialog", "Ignore") - c=inuse.cancel("Retry", "Exit", name="Retry") - c.event("EndDialog","Retry") - - # See "Error Dialog". See "ICE20" for the required names of the controls. - error = Dialog(db, "ErrorDlg", - 50, 10, 330, 101, - 65543, # Error|Minimize|Modal|Visible - title, - "ErrorText", None, None) - error.text("ErrorText", 50,9,280,48,3, "") - #error.control("ErrorIcon", "Icon", 15, 9, 24, 24, 5242881, None, "py.ico", None, None) - error.pushbutton("N",120,72,81,21,3,"No",None).event("EndDialog","ErrorNo") - error.pushbutton("Y",240,72,81,21,3,"Yes",None).event("EndDialog","ErrorYes") - error.pushbutton("A",0,72,81,21,3,"Abort",None).event("EndDialog","ErrorAbort") - error.pushbutton("C",42,72,81,21,3,"Cancel",None).event("EndDialog","ErrorCancel") - error.pushbutton("I",81,72,81,21,3,"Ignore",None).event("EndDialog","ErrorIgnore") - error.pushbutton("O",159,72,81,21,3,"Ok",None).event("EndDialog","ErrorOk") - error.pushbutton("R",198,72,81,21,3,"Retry",None).event("EndDialog","ErrorRetry") - - ##################################################################### - # Global "Query Cancel" dialog - cancel = Dialog(db, "CancelDlg", 50, 10, 260, 85, 3, title, - "No", "No", "No") - cancel.text("Text", 48, 15, 194, 30, 3, - "Are you sure you want to cancel [ProductName] installation?") - #cancel.control("Icon", "Icon", 15, 15, 24, 24, 5242881, None, - # "py.ico", None, None) - c=cancel.pushbutton("Yes", 72, 57, 56, 17, 3, "Yes", "No") - c.event("EndDialog", "Exit") - - c=cancel.pushbutton("No", 132, 57, 56, 17, 3, "No", "Yes") - c.event("EndDialog", "Return") - - ##################################################################### - # Global "Wait for costing" dialog - costing = Dialog(db, "WaitForCostingDlg", 50, 10, 260, 85, modal, title, - "Return", "Return", "Return") - costing.text("Text", 48, 15, 194, 30, 3, - "Please wait while the installer finishes determining your disk space requirements.") - c = costing.pushbutton("Return", 102, 57, 56, 17, 3, "Return", None) - c.event("EndDialog", "Exit") - - ##################################################################### - # Preparation dialog: no user input except cancellation - prep = PyDialog(db, "PrepareDlg", x, y, w, h, modeless, title, - "Cancel", "Cancel", "Cancel") - prep.text("Description", 15, 70, 320, 40, 0x30003, - "Please wait while the Installer prepares to guide you through the installation.") - prep.title("Welcome to the [ProductName] Installer") - c=prep.text("ActionText", 15, 110, 320, 20, 0x30003, "Pondering...") - c.mapping("ActionText", "Text") - c=prep.text("ActionData", 15, 135, 320, 30, 0x30003, None) - c.mapping("ActionData", "Text") - prep.back("Back", None, active=0) - prep.next("Next", None, active=0) - c=prep.cancel("Cancel", None) - c.event("SpawnDialog", "CancelDlg") - - ##################################################################### - # Feature (Python directory) selection - seldlg = PyDialog(db, "SelectFeaturesDlg", x, y, w, h, modal, title, - "Next", "Next", "Cancel") - seldlg.title("Select Python Installations") - - seldlg.text("Hint", 15, 30, 300, 20, 3, - "Select the Python locations where %s should be installed." - % self.distribution.get_fullname()) - - seldlg.back("< Back", None, active=0) - c = seldlg.next("Next >", "Cancel") - order = 1 - c.event("[TARGETDIR]", "[SourceDir]", ordering=order) - for version in self.versions + [self.other_version]: - order += 1 - c.event("[TARGETDIR]", "[TARGETDIR%s]" % version, - "FEATURE_SELECTED AND &Python%s=3" % version, - ordering=order) - c.event("SpawnWaitDialog", "WaitForCostingDlg", ordering=order + 1) - c.event("EndDialog", "Return", ordering=order + 2) - c = seldlg.cancel("Cancel", "Features") - c.event("SpawnDialog", "CancelDlg") - - c = seldlg.control("Features", "SelectionTree", 15, 60, 300, 120, 3, - "FEATURE", None, "PathEdit", None) - c.event("[FEATURE_SELECTED]", "1") - ver = self.other_version - install_other_cond = "FEATURE_SELECTED AND &Python%s=3" % ver - dont_install_other_cond = "FEATURE_SELECTED AND &Python%s<>3" % ver - - c = seldlg.text("Other", 15, 200, 300, 15, 3, - "Provide an alternate Python location") - c.condition("Enable", install_other_cond) - c.condition("Show", install_other_cond) - c.condition("Disable", dont_install_other_cond) - c.condition("Hide", dont_install_other_cond) - - c = seldlg.control("PathEdit", "PathEdit", 15, 215, 300, 16, 1, - "TARGETDIR" + ver, None, "Next", None) - c.condition("Enable", install_other_cond) - c.condition("Show", install_other_cond) - c.condition("Disable", dont_install_other_cond) - c.condition("Hide", dont_install_other_cond) - - ##################################################################### - # Disk cost - cost = PyDialog(db, "DiskCostDlg", x, y, w, h, modal, title, - "OK", "OK", "OK", bitmap=False) - cost.text("Title", 15, 6, 200, 15, 0x30003, - r"{\DlgFontBold8}Disk Space Requirements") - cost.text("Description", 20, 20, 280, 20, 0x30003, - "The disk space required for the installation of the selected features.") - cost.text("Text", 20, 53, 330, 60, 3, - "The highlighted volumes (if any) do not have enough disk space " - "available for the currently selected features. You can either " - "remove some files from the highlighted volumes, or choose to " - "install less features onto local drive(s), or select different " - "destination drive(s).") - cost.control("VolumeList", "VolumeCostList", 20, 100, 330, 150, 393223, - None, "{120}{70}{70}{70}{70}", None, None) - cost.xbutton("OK", "Ok", None, 0.5).event("EndDialog", "Return") - - ##################################################################### - # WhichUsers Dialog. Only available on NT, and for privileged users. - # This must be run before FindRelatedProducts, because that will - # take into account whether the previous installation was per-user - # or per-machine. We currently don't support going back to this - # dialog after "Next" was selected; to support this, we would need to - # find how to reset the ALLUSERS property, and how to re-run - # FindRelatedProducts. - # On Windows9x, the ALLUSERS property is ignored on the command line - # and in the Property table, but installer fails according to the documentation - # if a dialog attempts to set ALLUSERS. - whichusers = PyDialog(db, "WhichUsersDlg", x, y, w, h, modal, title, - "AdminInstall", "Next", "Cancel") - whichusers.title("Select whether to install [ProductName] for all users of this computer.") - # A radio group with two options: allusers, justme - g = whichusers.radiogroup("AdminInstall", 15, 60, 260, 50, 3, - "WhichUsers", "", "Next") - g.add("ALL", 0, 5, 150, 20, "Install for all users") - g.add("JUSTME", 0, 25, 150, 20, "Install just for me") - - whichusers.back("Back", None, active=0) - - c = whichusers.next("Next >", "Cancel") - c.event("[ALLUSERS]", "1", 'WhichUsers="ALL"', 1) - c.event("EndDialog", "Return", ordering = 2) - - c = whichusers.cancel("Cancel", "AdminInstall") - c.event("SpawnDialog", "CancelDlg") - - ##################################################################### - # Installation Progress dialog (modeless) - progress = PyDialog(db, "ProgressDlg", x, y, w, h, modeless, title, - "Cancel", "Cancel", "Cancel", bitmap=False) - progress.text("Title", 20, 15, 200, 15, 0x30003, - r"{\DlgFontBold8}[Progress1] [ProductName]") - progress.text("Text", 35, 65, 300, 30, 3, - "Please wait while the Installer [Progress2] [ProductName]. " - "This may take several minutes.") - progress.text("StatusLabel", 35, 100, 35, 20, 3, "Status:") - - c=progress.text("ActionText", 70, 100, w-70, 20, 3, "Pondering...") - c.mapping("ActionText", "Text") - - #c=progress.text("ActionData", 35, 140, 300, 20, 3, None) - #c.mapping("ActionData", "Text") - - c=progress.control("ProgressBar", "ProgressBar", 35, 120, 300, 10, 65537, - None, "Progress done", None, None) - c.mapping("SetProgress", "Progress") - - progress.back("< Back", "Next", active=False) - progress.next("Next >", "Cancel", active=False) - progress.cancel("Cancel", "Back").event("SpawnDialog", "CancelDlg") - - ################################################################### - # Maintenance type: repair/uninstall - maint = PyDialog(db, "MaintenanceTypeDlg", x, y, w, h, modal, title, - "Next", "Next", "Cancel") - maint.title("Welcome to the [ProductName] Setup Wizard") - maint.text("BodyText", 15, 63, 330, 42, 3, - "Select whether you want to repair or remove [ProductName].") - g=maint.radiogroup("RepairRadioGroup", 15, 108, 330, 60, 3, - "MaintenanceForm_Action", "", "Next") - #g.add("Change", 0, 0, 200, 17, "&Change [ProductName]") - g.add("Repair", 0, 18, 200, 17, "&Repair [ProductName]") - g.add("Remove", 0, 36, 200, 17, "Re&move [ProductName]") - - maint.back("< Back", None, active=False) - c=maint.next("Finish", "Cancel") - # Change installation: Change progress dialog to "Change", then ask - # for feature selection - #c.event("[Progress1]", "Change", 'MaintenanceForm_Action="Change"', 1) - #c.event("[Progress2]", "changes", 'MaintenanceForm_Action="Change"', 2) - - # Reinstall: Change progress dialog to "Repair", then invoke reinstall - # Also set list of reinstalled features to "ALL" - c.event("[REINSTALL]", "ALL", 'MaintenanceForm_Action="Repair"', 5) - c.event("[Progress1]", "Repairing", 'MaintenanceForm_Action="Repair"', 6) - c.event("[Progress2]", "repairs", 'MaintenanceForm_Action="Repair"', 7) - c.event("Reinstall", "ALL", 'MaintenanceForm_Action="Repair"', 8) - - # Uninstall: Change progress to "Remove", then invoke uninstall - # Also set list of removed features to "ALL" - c.event("[REMOVE]", "ALL", 'MaintenanceForm_Action="Remove"', 11) - c.event("[Progress1]", "Removing", 'MaintenanceForm_Action="Remove"', 12) - c.event("[Progress2]", "removes", 'MaintenanceForm_Action="Remove"', 13) - c.event("Remove", "ALL", 'MaintenanceForm_Action="Remove"', 14) - - # Close dialog when maintenance action scheduled - c.event("EndDialog", "Return", 'MaintenanceForm_Action<>"Change"', 20) - #c.event("NewDialog", "SelectFeaturesDlg", 'MaintenanceForm_Action="Change"', 21) - - maint.cancel("Cancel", "RepairRadioGroup").event("SpawnDialog", "CancelDlg") - - def get_installer_filename(self, fullname): - # Factored out to allow overriding in subclasses - if self.target_version: - base_name = "%s.%s-py%s.msi" % (fullname, self.plat_name, - self.target_version) - else: - base_name = "%s.%s.msi" % (fullname, self.plat_name) - installer_name = os.path.join(self.dist_dir, base_name) - return installer_name diff --git a/venv/Lib/site-packages/setuptools/_distutils/command/bdist_rpm.py b/venv/Lib/site-packages/setuptools/_distutils/command/bdist_rpm.py deleted file mode 100644 index 550cbfa..0000000 --- a/venv/Lib/site-packages/setuptools/_distutils/command/bdist_rpm.py +++ /dev/null @@ -1,579 +0,0 @@ -"""distutils.command.bdist_rpm - -Implements the Distutils 'bdist_rpm' command (create RPM source and binary -distributions).""" - -import subprocess, sys, os -from distutils.core import Command -from distutils.debug import DEBUG -from distutils.file_util import write_file -from distutils.errors import * -from distutils.sysconfig import get_python_version -from distutils import log - -class bdist_rpm(Command): - - description = "create an RPM distribution" - - user_options = [ - ('bdist-base=', None, - "base directory for creating built distributions"), - ('rpm-base=', None, - "base directory for creating RPMs (defaults to \"rpm\" under " - "--bdist-base; must be specified for RPM 2)"), - ('dist-dir=', 'd', - "directory to put final RPM files in " - "(and .spec files if --spec-only)"), - ('python=', None, - "path to Python interpreter to hard-code in the .spec file " - "(default: \"python\")"), - ('fix-python', None, - "hard-code the exact path to the current Python interpreter in " - "the .spec file"), - ('spec-only', None, - "only regenerate spec file"), - ('source-only', None, - "only generate source RPM"), - ('binary-only', None, - "only generate binary RPM"), - ('use-bzip2', None, - "use bzip2 instead of gzip to create source distribution"), - - # More meta-data: too RPM-specific to put in the setup script, - # but needs to go in the .spec file -- so we make these options - # to "bdist_rpm". The idea is that packagers would put this - # info in setup.cfg, although they are of course free to - # supply it on the command line. - ('distribution-name=', None, - "name of the (Linux) distribution to which this " - "RPM applies (*not* the name of the module distribution!)"), - ('group=', None, - "package classification [default: \"Development/Libraries\"]"), - ('release=', None, - "RPM release number"), - ('serial=', None, - "RPM serial number"), - ('vendor=', None, - "RPM \"vendor\" (eg. \"Joe Blow \") " - "[default: maintainer or author from setup script]"), - ('packager=', None, - "RPM packager (eg. \"Jane Doe \") " - "[default: vendor]"), - ('doc-files=', None, - "list of documentation files (space or comma-separated)"), - ('changelog=', None, - "RPM changelog"), - ('icon=', None, - "name of icon file"), - ('provides=', None, - "capabilities provided by this package"), - ('requires=', None, - "capabilities required by this package"), - ('conflicts=', None, - "capabilities which conflict with this package"), - ('build-requires=', None, - "capabilities required to build this package"), - ('obsoletes=', None, - "capabilities made obsolete by this package"), - ('no-autoreq', None, - "do not automatically calculate dependencies"), - - # Actions to take when building RPM - ('keep-temp', 'k', - "don't clean up RPM build directory"), - ('no-keep-temp', None, - "clean up RPM build directory [default]"), - ('use-rpm-opt-flags', None, - "compile with RPM_OPT_FLAGS when building from source RPM"), - ('no-rpm-opt-flags', None, - "do not pass any RPM CFLAGS to compiler"), - ('rpm3-mode', None, - "RPM 3 compatibility mode (default)"), - ('rpm2-mode', None, - "RPM 2 compatibility mode"), - - # Add the hooks necessary for specifying custom scripts - ('prep-script=', None, - "Specify a script for the PREP phase of RPM building"), - ('build-script=', None, - "Specify a script for the BUILD phase of RPM building"), - - ('pre-install=', None, - "Specify a script for the pre-INSTALL phase of RPM building"), - ('install-script=', None, - "Specify a script for the INSTALL phase of RPM building"), - ('post-install=', None, - "Specify a script for the post-INSTALL phase of RPM building"), - - ('pre-uninstall=', None, - "Specify a script for the pre-UNINSTALL phase of RPM building"), - ('post-uninstall=', None, - "Specify a script for the post-UNINSTALL phase of RPM building"), - - ('clean-script=', None, - "Specify a script for the CLEAN phase of RPM building"), - - ('verify-script=', None, - "Specify a script for the VERIFY phase of the RPM build"), - - # Allow a packager to explicitly force an architecture - ('force-arch=', None, - "Force an architecture onto the RPM build process"), - - ('quiet', 'q', - "Run the INSTALL phase of RPM building in quiet mode"), - ] - - boolean_options = ['keep-temp', 'use-rpm-opt-flags', 'rpm3-mode', - 'no-autoreq', 'quiet'] - - negative_opt = {'no-keep-temp': 'keep-temp', - 'no-rpm-opt-flags': 'use-rpm-opt-flags', - 'rpm2-mode': 'rpm3-mode'} - - - def initialize_options(self): - self.bdist_base = None - self.rpm_base = None - self.dist_dir = None - self.python = None - self.fix_python = None - self.spec_only = None - self.binary_only = None - self.source_only = None - self.use_bzip2 = None - - self.distribution_name = None - self.group = None - self.release = None - self.serial = None - self.vendor = None - self.packager = None - self.doc_files = None - self.changelog = None - self.icon = None - - self.prep_script = None - self.build_script = None - self.install_script = None - self.clean_script = None - self.verify_script = None - self.pre_install = None - self.post_install = None - self.pre_uninstall = None - self.post_uninstall = None - self.prep = None - self.provides = None - self.requires = None - self.conflicts = None - self.build_requires = None - self.obsoletes = None - - self.keep_temp = 0 - self.use_rpm_opt_flags = 1 - self.rpm3_mode = 1 - self.no_autoreq = 0 - - self.force_arch = None - self.quiet = 0 - - def finalize_options(self): - self.set_undefined_options('bdist', ('bdist_base', 'bdist_base')) - if self.rpm_base is None: - if not self.rpm3_mode: - raise DistutilsOptionError( - "you must specify --rpm-base in RPM 2 mode") - self.rpm_base = os.path.join(self.bdist_base, "rpm") - - if self.python is None: - if self.fix_python: - self.python = sys.executable - else: - self.python = "python3" - elif self.fix_python: - raise DistutilsOptionError( - "--python and --fix-python are mutually exclusive options") - - if os.name != 'posix': - raise DistutilsPlatformError("don't know how to create RPM " - "distributions on platform %s" % os.name) - if self.binary_only and self.source_only: - raise DistutilsOptionError( - "cannot supply both '--source-only' and '--binary-only'") - - # don't pass CFLAGS to pure python distributions - if not self.distribution.has_ext_modules(): - self.use_rpm_opt_flags = 0 - - self.set_undefined_options('bdist', ('dist_dir', 'dist_dir')) - self.finalize_package_data() - - def finalize_package_data(self): - self.ensure_string('group', "Development/Libraries") - self.ensure_string('vendor', - "%s <%s>" % (self.distribution.get_contact(), - self.distribution.get_contact_email())) - self.ensure_string('packager') - self.ensure_string_list('doc_files') - if isinstance(self.doc_files, list): - for readme in ('README', 'README.txt'): - if os.path.exists(readme) and readme not in self.doc_files: - self.doc_files.append(readme) - - self.ensure_string('release', "1") - self.ensure_string('serial') # should it be an int? - - self.ensure_string('distribution_name') - - self.ensure_string('changelog') - # Format changelog correctly - self.changelog = self._format_changelog(self.changelog) - - self.ensure_filename('icon') - - self.ensure_filename('prep_script') - self.ensure_filename('build_script') - self.ensure_filename('install_script') - self.ensure_filename('clean_script') - self.ensure_filename('verify_script') - self.ensure_filename('pre_install') - self.ensure_filename('post_install') - self.ensure_filename('pre_uninstall') - self.ensure_filename('post_uninstall') - - # XXX don't forget we punted on summaries and descriptions -- they - # should be handled here eventually! - - # Now *this* is some meta-data that belongs in the setup script... - self.ensure_string_list('provides') - self.ensure_string_list('requires') - self.ensure_string_list('conflicts') - self.ensure_string_list('build_requires') - self.ensure_string_list('obsoletes') - - self.ensure_string('force_arch') - - def run(self): - if DEBUG: - print("before _get_package_data():") - print("vendor =", self.vendor) - print("packager =", self.packager) - print("doc_files =", self.doc_files) - print("changelog =", self.changelog) - - # make directories - if self.spec_only: - spec_dir = self.dist_dir - self.mkpath(spec_dir) - else: - rpm_dir = {} - for d in ('SOURCES', 'SPECS', 'BUILD', 'RPMS', 'SRPMS'): - rpm_dir[d] = os.path.join(self.rpm_base, d) - self.mkpath(rpm_dir[d]) - spec_dir = rpm_dir['SPECS'] - - # Spec file goes into 'dist_dir' if '--spec-only specified', - # build/rpm. otherwise. - spec_path = os.path.join(spec_dir, - "%s.spec" % self.distribution.get_name()) - self.execute(write_file, - (spec_path, - self._make_spec_file()), - "writing '%s'" % spec_path) - - if self.spec_only: # stop if requested - return - - # Make a source distribution and copy to SOURCES directory with - # optional icon. - saved_dist_files = self.distribution.dist_files[:] - sdist = self.reinitialize_command('sdist') - if self.use_bzip2: - sdist.formats = ['bztar'] - else: - sdist.formats = ['gztar'] - self.run_command('sdist') - self.distribution.dist_files = saved_dist_files - - source = sdist.get_archive_files()[0] - source_dir = rpm_dir['SOURCES'] - self.copy_file(source, source_dir) - - if self.icon: - if os.path.exists(self.icon): - self.copy_file(self.icon, source_dir) - else: - raise DistutilsFileError( - "icon file '%s' does not exist" % self.icon) - - # build package - log.info("building RPMs") - rpm_cmd = ['rpmbuild'] - - if self.source_only: # what kind of RPMs? - rpm_cmd.append('-bs') - elif self.binary_only: - rpm_cmd.append('-bb') - else: - rpm_cmd.append('-ba') - rpm_cmd.extend(['--define', '__python %s' % self.python]) - if self.rpm3_mode: - rpm_cmd.extend(['--define', - '_topdir %s' % os.path.abspath(self.rpm_base)]) - if not self.keep_temp: - rpm_cmd.append('--clean') - - if self.quiet: - rpm_cmd.append('--quiet') - - rpm_cmd.append(spec_path) - # Determine the binary rpm names that should be built out of this spec - # file - # Note that some of these may not be really built (if the file - # list is empty) - nvr_string = "%{name}-%{version}-%{release}" - src_rpm = nvr_string + ".src.rpm" - non_src_rpm = "%{arch}/" + nvr_string + ".%{arch}.rpm" - q_cmd = r"rpm -q --qf '%s %s\n' --specfile '%s'" % ( - src_rpm, non_src_rpm, spec_path) - - out = os.popen(q_cmd) - try: - binary_rpms = [] - source_rpm = None - while True: - line = out.readline() - if not line: - break - l = line.strip().split() - assert(len(l) == 2) - binary_rpms.append(l[1]) - # The source rpm is named after the first entry in the spec file - if source_rpm is None: - source_rpm = l[0] - - status = out.close() - if status: - raise DistutilsExecError("Failed to execute: %s" % repr(q_cmd)) - - finally: - out.close() - - self.spawn(rpm_cmd) - - if not self.dry_run: - if self.distribution.has_ext_modules(): - pyversion = get_python_version() - else: - pyversion = 'any' - - if not self.binary_only: - srpm = os.path.join(rpm_dir['SRPMS'], source_rpm) - assert(os.path.exists(srpm)) - self.move_file(srpm, self.dist_dir) - filename = os.path.join(self.dist_dir, source_rpm) - self.distribution.dist_files.append( - ('bdist_rpm', pyversion, filename)) - - if not self.source_only: - for rpm in binary_rpms: - rpm = os.path.join(rpm_dir['RPMS'], rpm) - if os.path.exists(rpm): - self.move_file(rpm, self.dist_dir) - filename = os.path.join(self.dist_dir, - os.path.basename(rpm)) - self.distribution.dist_files.append( - ('bdist_rpm', pyversion, filename)) - - def _dist_path(self, path): - return os.path.join(self.dist_dir, os.path.basename(path)) - - def _make_spec_file(self): - """Generate the text of an RPM spec file and return it as a - list of strings (one per line). - """ - # definitions and headers - spec_file = [ - '%define name ' + self.distribution.get_name(), - '%define version ' + self.distribution.get_version().replace('-','_'), - '%define unmangled_version ' + self.distribution.get_version(), - '%define release ' + self.release.replace('-','_'), - '', - 'Summary: ' + self.distribution.get_description(), - ] - - # Workaround for #14443 which affects some RPM based systems such as - # RHEL6 (and probably derivatives) - vendor_hook = subprocess.getoutput('rpm --eval %{__os_install_post}') - # Generate a potential replacement value for __os_install_post (whilst - # normalizing the whitespace to simplify the test for whether the - # invocation of brp-python-bytecompile passes in __python): - vendor_hook = '\n'.join([' %s \\' % line.strip() - for line in vendor_hook.splitlines()]) - problem = "brp-python-bytecompile \\\n" - fixed = "brp-python-bytecompile %{__python} \\\n" - fixed_hook = vendor_hook.replace(problem, fixed) - if fixed_hook != vendor_hook: - spec_file.append('# Workaround for http://bugs.python.org/issue14443') - spec_file.append('%define __os_install_post ' + fixed_hook + '\n') - - # put locale summaries into spec file - # XXX not supported for now (hard to put a dictionary - # in a config file -- arg!) - #for locale in self.summaries.keys(): - # spec_file.append('Summary(%s): %s' % (locale, - # self.summaries[locale])) - - spec_file.extend([ - 'Name: %{name}', - 'Version: %{version}', - 'Release: %{release}',]) - - # XXX yuck! this filename is available from the "sdist" command, - # but only after it has run: and we create the spec file before - # running "sdist", in case of --spec-only. - if self.use_bzip2: - spec_file.append('Source0: %{name}-%{unmangled_version}.tar.bz2') - else: - spec_file.append('Source0: %{name}-%{unmangled_version}.tar.gz') - - spec_file.extend([ - 'License: ' + self.distribution.get_license(), - 'Group: ' + self.group, - 'BuildRoot: %{_tmppath}/%{name}-%{version}-%{release}-buildroot', - 'Prefix: %{_prefix}', ]) - - if not self.force_arch: - # noarch if no extension modules - if not self.distribution.has_ext_modules(): - spec_file.append('BuildArch: noarch') - else: - spec_file.append( 'BuildArch: %s' % self.force_arch ) - - for field in ('Vendor', - 'Packager', - 'Provides', - 'Requires', - 'Conflicts', - 'Obsoletes', - ): - val = getattr(self, field.lower()) - if isinstance(val, list): - spec_file.append('%s: %s' % (field, ' '.join(val))) - elif val is not None: - spec_file.append('%s: %s' % (field, val)) - - - if self.distribution.get_url() != 'UNKNOWN': - spec_file.append('Url: ' + self.distribution.get_url()) - - if self.distribution_name: - spec_file.append('Distribution: ' + self.distribution_name) - - if self.build_requires: - spec_file.append('BuildRequires: ' + - ' '.join(self.build_requires)) - - if self.icon: - spec_file.append('Icon: ' + os.path.basename(self.icon)) - - if self.no_autoreq: - spec_file.append('AutoReq: 0') - - spec_file.extend([ - '', - '%description', - self.distribution.get_long_description() - ]) - - # put locale descriptions into spec file - # XXX again, suppressed because config file syntax doesn't - # easily support this ;-( - #for locale in self.descriptions.keys(): - # spec_file.extend([ - # '', - # '%description -l ' + locale, - # self.descriptions[locale], - # ]) - - # rpm scripts - # figure out default build script - def_setup_call = "%s %s" % (self.python,os.path.basename(sys.argv[0])) - def_build = "%s build" % def_setup_call - if self.use_rpm_opt_flags: - def_build = 'env CFLAGS="$RPM_OPT_FLAGS" ' + def_build - - # insert contents of files - - # XXX this is kind of misleading: user-supplied options are files - # that we open and interpolate into the spec file, but the defaults - # are just text that we drop in as-is. Hmmm. - - install_cmd = ('%s install -O1 --root=$RPM_BUILD_ROOT ' - '--record=INSTALLED_FILES') % def_setup_call - - script_options = [ - ('prep', 'prep_script', "%setup -n %{name}-%{unmangled_version}"), - ('build', 'build_script', def_build), - ('install', 'install_script', install_cmd), - ('clean', 'clean_script', "rm -rf $RPM_BUILD_ROOT"), - ('verifyscript', 'verify_script', None), - ('pre', 'pre_install', None), - ('post', 'post_install', None), - ('preun', 'pre_uninstall', None), - ('postun', 'post_uninstall', None), - ] - - for (rpm_opt, attr, default) in script_options: - # Insert contents of file referred to, if no file is referred to - # use 'default' as contents of script - val = getattr(self, attr) - if val or default: - spec_file.extend([ - '', - '%' + rpm_opt,]) - if val: - with open(val) as f: - spec_file.extend(f.read().split('\n')) - else: - spec_file.append(default) - - - # files section - spec_file.extend([ - '', - '%files -f INSTALLED_FILES', - '%defattr(-,root,root)', - ]) - - if self.doc_files: - spec_file.append('%doc ' + ' '.join(self.doc_files)) - - if self.changelog: - spec_file.extend([ - '', - '%changelog',]) - spec_file.extend(self.changelog) - - return spec_file - - def _format_changelog(self, changelog): - """Format the changelog correctly and convert it to a list of strings - """ - if not changelog: - return changelog - new_changelog = [] - for line in changelog.strip().split('\n'): - line = line.strip() - if line[0] == '*': - new_changelog.extend(['', line]) - elif line[0] == '-': - new_changelog.append(line) - else: - new_changelog.append(' ' + line) - - # strip trailing newline inserted by first changelog entry - if not new_changelog[0]: - del new_changelog[0] - - return new_changelog diff --git a/venv/Lib/site-packages/setuptools/_distutils/command/bdist_wininst.py b/venv/Lib/site-packages/setuptools/_distutils/command/bdist_wininst.py deleted file mode 100644 index 0e9ddaa..0000000 --- a/venv/Lib/site-packages/setuptools/_distutils/command/bdist_wininst.py +++ /dev/null @@ -1,377 +0,0 @@ -"""distutils.command.bdist_wininst - -Implements the Distutils 'bdist_wininst' command: create a windows installer -exe-program.""" - -import os -import sys -import warnings -from distutils.core import Command -from distutils.util import get_platform -from distutils.dir_util import remove_tree -from distutils.errors import * -from distutils.sysconfig import get_python_version -from distutils import log - -class bdist_wininst(Command): - - description = "create an executable installer for MS Windows" - - user_options = [('bdist-dir=', None, - "temporary directory for creating the distribution"), - ('plat-name=', 'p', - "platform name to embed in generated filenames " - "(default: %s)" % get_platform()), - ('keep-temp', 'k', - "keep the pseudo-installation tree around after " + - "creating the distribution archive"), - ('target-version=', None, - "require a specific python version" + - " on the target system"), - ('no-target-compile', 'c', - "do not compile .py to .pyc on the target system"), - ('no-target-optimize', 'o', - "do not compile .py to .pyo (optimized) " - "on the target system"), - ('dist-dir=', 'd', - "directory to put final built distributions in"), - ('bitmap=', 'b', - "bitmap to use for the installer instead of python-powered logo"), - ('title=', 't', - "title to display on the installer background instead of default"), - ('skip-build', None, - "skip rebuilding everything (for testing/debugging)"), - ('install-script=', None, - "basename of installation script to be run after " - "installation or before deinstallation"), - ('pre-install-script=', None, - "Fully qualified filename of a script to be run before " - "any files are installed. This script need not be in the " - "distribution"), - ('user-access-control=', None, - "specify Vista's UAC handling - 'none'/default=no " - "handling, 'auto'=use UAC if target Python installed for " - "all users, 'force'=always use UAC"), - ] - - boolean_options = ['keep-temp', 'no-target-compile', 'no-target-optimize', - 'skip-build'] - - # bpo-10945: bdist_wininst requires mbcs encoding only available on Windows - _unsupported = (sys.platform != "win32") - - def __init__(self, *args, **kw): - super().__init__(*args, **kw) - warnings.warn("bdist_wininst command is deprecated since Python 3.8, " - "use bdist_wheel (wheel packages) instead", - DeprecationWarning, 2) - - def initialize_options(self): - self.bdist_dir = None - self.plat_name = None - self.keep_temp = 0 - self.no_target_compile = 0 - self.no_target_optimize = 0 - self.target_version = None - self.dist_dir = None - self.bitmap = None - self.title = None - self.skip_build = None - self.install_script = None - self.pre_install_script = None - self.user_access_control = None - - - def finalize_options(self): - self.set_undefined_options('bdist', ('skip_build', 'skip_build')) - - if self.bdist_dir is None: - if self.skip_build and self.plat_name: - # If build is skipped and plat_name is overridden, bdist will - # not see the correct 'plat_name' - so set that up manually. - bdist = self.distribution.get_command_obj('bdist') - bdist.plat_name = self.plat_name - # next the command will be initialized using that name - bdist_base = self.get_finalized_command('bdist').bdist_base - self.bdist_dir = os.path.join(bdist_base, 'wininst') - - if not self.target_version: - self.target_version = "" - - if not self.skip_build and self.distribution.has_ext_modules(): - short_version = get_python_version() - if self.target_version and self.target_version != short_version: - raise DistutilsOptionError( - "target version can only be %s, or the '--skip-build'" \ - " option must be specified" % (short_version,)) - self.target_version = short_version - - self.set_undefined_options('bdist', - ('dist_dir', 'dist_dir'), - ('plat_name', 'plat_name'), - ) - - if self.install_script: - for script in self.distribution.scripts: - if self.install_script == os.path.basename(script): - break - else: - raise DistutilsOptionError( - "install_script '%s' not found in scripts" - % self.install_script) - - def run(self): - if (sys.platform != "win32" and - (self.distribution.has_ext_modules() or - self.distribution.has_c_libraries())): - raise DistutilsPlatformError \ - ("distribution contains extensions and/or C libraries; " - "must be compiled on a Windows 32 platform") - - if not self.skip_build: - self.run_command('build') - - install = self.reinitialize_command('install', reinit_subcommands=1) - install.root = self.bdist_dir - install.skip_build = self.skip_build - install.warn_dir = 0 - install.plat_name = self.plat_name - - install_lib = self.reinitialize_command('install_lib') - # we do not want to include pyc or pyo files - install_lib.compile = 0 - install_lib.optimize = 0 - - if self.distribution.has_ext_modules(): - # If we are building an installer for a Python version other - # than the one we are currently running, then we need to ensure - # our build_lib reflects the other Python version rather than ours. - # Note that for target_version!=sys.version, we must have skipped the - # build step, so there is no issue with enforcing the build of this - # version. - target_version = self.target_version - if not target_version: - assert self.skip_build, "Should have already checked this" - target_version = '%d.%d' % sys.version_info[:2] - plat_specifier = ".%s-%s" % (self.plat_name, target_version) - build = self.get_finalized_command('build') - build.build_lib = os.path.join(build.build_base, - 'lib' + plat_specifier) - - # Use a custom scheme for the zip-file, because we have to decide - # at installation time which scheme to use. - for key in ('purelib', 'platlib', 'headers', 'scripts', 'data'): - value = key.upper() - if key == 'headers': - value = value + '/Include/$dist_name' - setattr(install, - 'install_' + key, - value) - - log.info("installing to %s", self.bdist_dir) - install.ensure_finalized() - - # avoid warning of 'install_lib' about installing - # into a directory not in sys.path - sys.path.insert(0, os.path.join(self.bdist_dir, 'PURELIB')) - - install.run() - - del sys.path[0] - - # And make an archive relative to the root of the - # pseudo-installation tree. - from tempfile import mktemp - archive_basename = mktemp() - fullname = self.distribution.get_fullname() - arcname = self.make_archive(archive_basename, "zip", - root_dir=self.bdist_dir) - # create an exe containing the zip-file - self.create_exe(arcname, fullname, self.bitmap) - if self.distribution.has_ext_modules(): - pyversion = get_python_version() - else: - pyversion = 'any' - self.distribution.dist_files.append(('bdist_wininst', pyversion, - self.get_installer_filename(fullname))) - # remove the zip-file again - log.debug("removing temporary file '%s'", arcname) - os.remove(arcname) - - if not self.keep_temp: - remove_tree(self.bdist_dir, dry_run=self.dry_run) - - def get_inidata(self): - # Return data describing the installation. - lines = [] - metadata = self.distribution.metadata - - # Write the [metadata] section. - lines.append("[metadata]") - - # 'info' will be displayed in the installer's dialog box, - # describing the items to be installed. - info = (metadata.long_description or '') + '\n' - - # Escape newline characters - def escape(s): - return s.replace("\n", "\\n") - - for name in ["author", "author_email", "description", "maintainer", - "maintainer_email", "name", "url", "version"]: - data = getattr(metadata, name, "") - if data: - info = info + ("\n %s: %s" % \ - (name.capitalize(), escape(data))) - lines.append("%s=%s" % (name, escape(data))) - - # The [setup] section contains entries controlling - # the installer runtime. - lines.append("\n[Setup]") - if self.install_script: - lines.append("install_script=%s" % self.install_script) - lines.append("info=%s" % escape(info)) - lines.append("target_compile=%d" % (not self.no_target_compile)) - lines.append("target_optimize=%d" % (not self.no_target_optimize)) - if self.target_version: - lines.append("target_version=%s" % self.target_version) - if self.user_access_control: - lines.append("user_access_control=%s" % self.user_access_control) - - title = self.title or self.distribution.get_fullname() - lines.append("title=%s" % escape(title)) - import time - import distutils - build_info = "Built %s with distutils-%s" % \ - (time.ctime(time.time()), distutils.__version__) - lines.append("build_info=%s" % build_info) - return "\n".join(lines) - - def create_exe(self, arcname, fullname, bitmap=None): - import struct - - self.mkpath(self.dist_dir) - - cfgdata = self.get_inidata() - - installer_name = self.get_installer_filename(fullname) - self.announce("creating %s" % installer_name) - - if bitmap: - with open(bitmap, "rb") as f: - bitmapdata = f.read() - bitmaplen = len(bitmapdata) - else: - bitmaplen = 0 - - with open(installer_name, "wb") as file: - file.write(self.get_exe_bytes()) - if bitmap: - file.write(bitmapdata) - - # Convert cfgdata from unicode to ascii, mbcs encoded - if isinstance(cfgdata, str): - cfgdata = cfgdata.encode("mbcs") - - # Append the pre-install script - cfgdata = cfgdata + b"\0" - if self.pre_install_script: - # We need to normalize newlines, so we open in text mode and - # convert back to bytes. "latin-1" simply avoids any possible - # failures. - with open(self.pre_install_script, "r", - encoding="latin-1") as script: - script_data = script.read().encode("latin-1") - cfgdata = cfgdata + script_data + b"\n\0" - else: - # empty pre-install script - cfgdata = cfgdata + b"\0" - file.write(cfgdata) - - # The 'magic number' 0x1234567B is used to make sure that the - # binary layout of 'cfgdata' is what the wininst.exe binary - # expects. If the layout changes, increment that number, make - # the corresponding changes to the wininst.exe sources, and - # recompile them. - header = struct.pack("' under the base build directory. We only use one of - # them for a given distribution, though -- - if self.build_purelib is None: - self.build_purelib = os.path.join(self.build_base, 'lib') - if self.build_platlib is None: - self.build_platlib = os.path.join(self.build_base, - 'lib' + plat_specifier) - - # 'build_lib' is the actual directory that we will use for this - # particular module distribution -- if user didn't supply it, pick - # one of 'build_purelib' or 'build_platlib'. - if self.build_lib is None: - if self.distribution.has_ext_modules(): - self.build_lib = self.build_platlib - else: - self.build_lib = self.build_purelib - - # 'build_temp' -- temporary directory for compiler turds, - # "build/temp." - if self.build_temp is None: - self.build_temp = os.path.join(self.build_base, - 'temp' + plat_specifier) - if self.build_scripts is None: - self.build_scripts = os.path.join(self.build_base, - 'scripts-%d.%d' % sys.version_info[:2]) - - if self.executable is None and sys.executable: - self.executable = os.path.normpath(sys.executable) - - if isinstance(self.parallel, str): - try: - self.parallel = int(self.parallel) - except ValueError: - raise DistutilsOptionError("parallel should be an integer") - - def run(self): - # Run all relevant sub-commands. This will be some subset of: - # - build_py - pure Python modules - # - build_clib - standalone C libraries - # - build_ext - Python extensions - # - build_scripts - (Python) scripts - for cmd_name in self.get_sub_commands(): - self.run_command(cmd_name) - - - # -- Predicates for the sub-command list --------------------------- - - def has_pure_modules(self): - return self.distribution.has_pure_modules() - - def has_c_libraries(self): - return self.distribution.has_c_libraries() - - def has_ext_modules(self): - return self.distribution.has_ext_modules() - - def has_scripts(self): - return self.distribution.has_scripts() - - - sub_commands = [('build_py', has_pure_modules), - ('build_clib', has_c_libraries), - ('build_ext', has_ext_modules), - ('build_scripts', has_scripts), - ] diff --git a/venv/Lib/site-packages/setuptools/_distutils/command/build_clib.py b/venv/Lib/site-packages/setuptools/_distutils/command/build_clib.py deleted file mode 100644 index 3e20ef2..0000000 --- a/venv/Lib/site-packages/setuptools/_distutils/command/build_clib.py +++ /dev/null @@ -1,209 +0,0 @@ -"""distutils.command.build_clib - -Implements the Distutils 'build_clib' command, to build a C/C++ library -that is included in the module distribution and needed by an extension -module.""" - - -# XXX this module has *lots* of code ripped-off quite transparently from -# build_ext.py -- not surprisingly really, as the work required to build -# a static library from a collection of C source files is not really all -# that different from what's required to build a shared object file from -# a collection of C source files. Nevertheless, I haven't done the -# necessary refactoring to account for the overlap in code between the -# two modules, mainly because a number of subtle details changed in the -# cut 'n paste. Sigh. - -import os -from distutils.core import Command -from distutils.errors import * -from distutils.sysconfig import customize_compiler -from distutils import log - -def show_compilers(): - from distutils.ccompiler import show_compilers - show_compilers() - - -class build_clib(Command): - - description = "build C/C++ libraries used by Python extensions" - - user_options = [ - ('build-clib=', 'b', - "directory to build C/C++ libraries to"), - ('build-temp=', 't', - "directory to put temporary build by-products"), - ('debug', 'g', - "compile with debugging information"), - ('force', 'f', - "forcibly build everything (ignore file timestamps)"), - ('compiler=', 'c', - "specify the compiler type"), - ] - - boolean_options = ['debug', 'force'] - - help_options = [ - ('help-compiler', None, - "list available compilers", show_compilers), - ] - - def initialize_options(self): - self.build_clib = None - self.build_temp = None - - # List of libraries to build - self.libraries = None - - # Compilation options for all libraries - self.include_dirs = None - self.define = None - self.undef = None - self.debug = None - self.force = 0 - self.compiler = None - - - def finalize_options(self): - # This might be confusing: both build-clib and build-temp default - # to build-temp as defined by the "build" command. This is because - # I think that C libraries are really just temporary build - # by-products, at least from the point of view of building Python - # extensions -- but I want to keep my options open. - self.set_undefined_options('build', - ('build_temp', 'build_clib'), - ('build_temp', 'build_temp'), - ('compiler', 'compiler'), - ('debug', 'debug'), - ('force', 'force')) - - self.libraries = self.distribution.libraries - if self.libraries: - self.check_library_list(self.libraries) - - if self.include_dirs is None: - self.include_dirs = self.distribution.include_dirs or [] - if isinstance(self.include_dirs, str): - self.include_dirs = self.include_dirs.split(os.pathsep) - - # XXX same as for build_ext -- what about 'self.define' and - # 'self.undef' ? - - - def run(self): - if not self.libraries: - return - - # Yech -- this is cut 'n pasted from build_ext.py! - from distutils.ccompiler import new_compiler - self.compiler = new_compiler(compiler=self.compiler, - dry_run=self.dry_run, - force=self.force) - customize_compiler(self.compiler) - - if self.include_dirs is not None: - self.compiler.set_include_dirs(self.include_dirs) - if self.define is not None: - # 'define' option is a list of (name,value) tuples - for (name,value) in self.define: - self.compiler.define_macro(name, value) - if self.undef is not None: - for macro in self.undef: - self.compiler.undefine_macro(macro) - - self.build_libraries(self.libraries) - - - def check_library_list(self, libraries): - """Ensure that the list of libraries is valid. - - `library` is presumably provided as a command option 'libraries'. - This method checks that it is a list of 2-tuples, where the tuples - are (library_name, build_info_dict). - - Raise DistutilsSetupError if the structure is invalid anywhere; - just returns otherwise. - """ - if not isinstance(libraries, list): - raise DistutilsSetupError( - "'libraries' option must be a list of tuples") - - for lib in libraries: - if not isinstance(lib, tuple) and len(lib) != 2: - raise DistutilsSetupError( - "each element of 'libraries' must a 2-tuple") - - name, build_info = lib - - if not isinstance(name, str): - raise DistutilsSetupError( - "first element of each tuple in 'libraries' " - "must be a string (the library name)") - - if '/' in name or (os.sep != '/' and os.sep in name): - raise DistutilsSetupError("bad library name '%s': " - "may not contain directory separators" % lib[0]) - - if not isinstance(build_info, dict): - raise DistutilsSetupError( - "second element of each tuple in 'libraries' " - "must be a dictionary (build info)") - - - def get_library_names(self): - # Assume the library list is valid -- 'check_library_list()' is - # called from 'finalize_options()', so it should be! - if not self.libraries: - return None - - lib_names = [] - for (lib_name, build_info) in self.libraries: - lib_names.append(lib_name) - return lib_names - - - def get_source_files(self): - self.check_library_list(self.libraries) - filenames = [] - for (lib_name, build_info) in self.libraries: - sources = build_info.get('sources') - if sources is None or not isinstance(sources, (list, tuple)): - raise DistutilsSetupError( - "in 'libraries' option (library '%s'), " - "'sources' must be present and must be " - "a list of source filenames" % lib_name) - - filenames.extend(sources) - return filenames - - - def build_libraries(self, libraries): - for (lib_name, build_info) in libraries: - sources = build_info.get('sources') - if sources is None or not isinstance(sources, (list, tuple)): - raise DistutilsSetupError( - "in 'libraries' option (library '%s'), " - "'sources' must be present and must be " - "a list of source filenames" % lib_name) - sources = list(sources) - - log.info("building '%s' library", lib_name) - - # First, compile the source code to object files in the library - # directory. (This should probably change to putting object - # files in a temporary build directory.) - macros = build_info.get('macros') - include_dirs = build_info.get('include_dirs') - objects = self.compiler.compile(sources, - output_dir=self.build_temp, - macros=macros, - include_dirs=include_dirs, - debug=self.debug) - - # Now "link" the object files together into a static library. - # (On Unix at least, this isn't really linking -- it just - # builds an archive. Whatever.) - self.compiler.create_static_lib(objects, lib_name, - output_dir=self.build_clib, - debug=self.debug) diff --git a/venv/Lib/site-packages/setuptools/_distutils/command/build_ext.py b/venv/Lib/site-packages/setuptools/_distutils/command/build_ext.py deleted file mode 100644 index 181671b..0000000 --- a/venv/Lib/site-packages/setuptools/_distutils/command/build_ext.py +++ /dev/null @@ -1,755 +0,0 @@ -"""distutils.command.build_ext - -Implements the Distutils 'build_ext' command, for building extension -modules (currently limited to C extensions, should accommodate C++ -extensions ASAP).""" - -import contextlib -import os -import re -import sys -from distutils.core import Command -from distutils.errors import * -from distutils.sysconfig import customize_compiler, get_python_version -from distutils.sysconfig import get_config_h_filename -from distutils.dep_util import newer_group -from distutils.extension import Extension -from distutils.util import get_platform -from distutils import log -from . import py37compat - -from site import USER_BASE - -# An extension name is just a dot-separated list of Python NAMEs (ie. -# the same as a fully-qualified module name). -extension_name_re = re.compile \ - (r'^[a-zA-Z_][a-zA-Z_0-9]*(\.[a-zA-Z_][a-zA-Z_0-9]*)*$') - - -def show_compilers (): - from distutils.ccompiler import show_compilers - show_compilers() - - -class build_ext(Command): - - description = "build C/C++ extensions (compile/link to build directory)" - - # XXX thoughts on how to deal with complex command-line options like - # these, i.e. how to make it so fancy_getopt can suck them off the - # command line and make it look like setup.py defined the appropriate - # lists of tuples of what-have-you. - # - each command needs a callback to process its command-line options - # - Command.__init__() needs access to its share of the whole - # command line (must ultimately come from - # Distribution.parse_command_line()) - # - it then calls the current command class' option-parsing - # callback to deal with weird options like -D, which have to - # parse the option text and churn out some custom data - # structure - # - that data structure (in this case, a list of 2-tuples) - # will then be present in the command object by the time - # we get to finalize_options() (i.e. the constructor - # takes care of both command-line and client options - # in between initialize_options() and finalize_options()) - - sep_by = " (separated by '%s')" % os.pathsep - user_options = [ - ('build-lib=', 'b', - "directory for compiled extension modules"), - ('build-temp=', 't', - "directory for temporary files (build by-products)"), - ('plat-name=', 'p', - "platform name to cross-compile for, if supported " - "(default: %s)" % get_platform()), - ('inplace', 'i', - "ignore build-lib and put compiled extensions into the source " + - "directory alongside your pure Python modules"), - ('include-dirs=', 'I', - "list of directories to search for header files" + sep_by), - ('define=', 'D', - "C preprocessor macros to define"), - ('undef=', 'U', - "C preprocessor macros to undefine"), - ('libraries=', 'l', - "external C libraries to link with"), - ('library-dirs=', 'L', - "directories to search for external C libraries" + sep_by), - ('rpath=', 'R', - "directories to search for shared C libraries at runtime"), - ('link-objects=', 'O', - "extra explicit link objects to include in the link"), - ('debug', 'g', - "compile/link with debugging information"), - ('force', 'f', - "forcibly build everything (ignore file timestamps)"), - ('compiler=', 'c', - "specify the compiler type"), - ('parallel=', 'j', - "number of parallel build jobs"), - ('swig-cpp', None, - "make SWIG create C++ files (default is C)"), - ('swig-opts=', None, - "list of SWIG command line options"), - ('swig=', None, - "path to the SWIG executable"), - ('user', None, - "add user include, library and rpath") - ] - - boolean_options = ['inplace', 'debug', 'force', 'swig-cpp', 'user'] - - help_options = [ - ('help-compiler', None, - "list available compilers", show_compilers), - ] - - def initialize_options(self): - self.extensions = None - self.build_lib = None - self.plat_name = None - self.build_temp = None - self.inplace = 0 - self.package = None - - self.include_dirs = None - self.define = None - self.undef = None - self.libraries = None - self.library_dirs = None - self.rpath = None - self.link_objects = None - self.debug = None - self.force = None - self.compiler = None - self.swig = None - self.swig_cpp = None - self.swig_opts = None - self.user = None - self.parallel = None - - def finalize_options(self): - from distutils import sysconfig - - self.set_undefined_options('build', - ('build_lib', 'build_lib'), - ('build_temp', 'build_temp'), - ('compiler', 'compiler'), - ('debug', 'debug'), - ('force', 'force'), - ('parallel', 'parallel'), - ('plat_name', 'plat_name'), - ) - - if self.package is None: - self.package = self.distribution.ext_package - - self.extensions = self.distribution.ext_modules - - # Make sure Python's include directories (for Python.h, pyconfig.h, - # etc.) are in the include search path. - py_include = sysconfig.get_python_inc() - plat_py_include = sysconfig.get_python_inc(plat_specific=1) - if self.include_dirs is None: - self.include_dirs = self.distribution.include_dirs or [] - if isinstance(self.include_dirs, str): - self.include_dirs = self.include_dirs.split(os.pathsep) - - # If in a virtualenv, add its include directory - # Issue 16116 - if sys.exec_prefix != sys.base_exec_prefix: - self.include_dirs.append(os.path.join(sys.exec_prefix, 'include')) - - # Put the Python "system" include dir at the end, so that - # any local include dirs take precedence. - self.include_dirs.extend(py_include.split(os.path.pathsep)) - if plat_py_include != py_include: - self.include_dirs.extend( - plat_py_include.split(os.path.pathsep)) - - self.ensure_string_list('libraries') - self.ensure_string_list('link_objects') - - # Life is easier if we're not forever checking for None, so - # simplify these options to empty lists if unset - if self.libraries is None: - self.libraries = [] - if self.library_dirs is None: - self.library_dirs = [] - elif isinstance(self.library_dirs, str): - self.library_dirs = self.library_dirs.split(os.pathsep) - - if self.rpath is None: - self.rpath = [] - elif isinstance(self.rpath, str): - self.rpath = self.rpath.split(os.pathsep) - - # for extensions under windows use different directories - # for Release and Debug builds. - # also Python's library directory must be appended to library_dirs - if os.name == 'nt': - # the 'libs' directory is for binary installs - we assume that - # must be the *native* platform. But we don't really support - # cross-compiling via a binary install anyway, so we let it go. - self.library_dirs.append(os.path.join(sys.exec_prefix, 'libs')) - if sys.base_exec_prefix != sys.prefix: # Issue 16116 - self.library_dirs.append(os.path.join(sys.base_exec_prefix, 'libs')) - if self.debug: - self.build_temp = os.path.join(self.build_temp, "Debug") - else: - self.build_temp = os.path.join(self.build_temp, "Release") - - # Append the source distribution include and library directories, - # this allows distutils on windows to work in the source tree - self.include_dirs.append(os.path.dirname(get_config_h_filename())) - self.library_dirs.append(sys.base_exec_prefix) - - # Use the .lib files for the correct architecture - if self.plat_name == 'win32': - suffix = 'win32' - else: - # win-amd64 - suffix = self.plat_name[4:] - new_lib = os.path.join(sys.exec_prefix, 'PCbuild') - if suffix: - new_lib = os.path.join(new_lib, suffix) - self.library_dirs.append(new_lib) - - # For extensions under Cygwin, Python's library directory must be - # appended to library_dirs - if sys.platform[:6] == 'cygwin': - if not sysconfig.python_build: - # building third party extensions - self.library_dirs.append(os.path.join(sys.prefix, "lib", - "python" + get_python_version(), - "config")) - else: - # building python standard extensions - self.library_dirs.append('.') - - # For building extensions with a shared Python library, - # Python's library directory must be appended to library_dirs - # See Issues: #1600860, #4366 - if (sysconfig.get_config_var('Py_ENABLE_SHARED')): - if not sysconfig.python_build: - # building third party extensions - self.library_dirs.append(sysconfig.get_config_var('LIBDIR')) - else: - # building python standard extensions - self.library_dirs.append('.') - - # The argument parsing will result in self.define being a string, but - # it has to be a list of 2-tuples. All the preprocessor symbols - # specified by the 'define' option will be set to '1'. Multiple - # symbols can be separated with commas. - - if self.define: - defines = self.define.split(',') - self.define = [(symbol, '1') for symbol in defines] - - # The option for macros to undefine is also a string from the - # option parsing, but has to be a list. Multiple symbols can also - # be separated with commas here. - if self.undef: - self.undef = self.undef.split(',') - - if self.swig_opts is None: - self.swig_opts = [] - else: - self.swig_opts = self.swig_opts.split(' ') - - # Finally add the user include and library directories if requested - if self.user: - user_include = os.path.join(USER_BASE, "include") - user_lib = os.path.join(USER_BASE, "lib") - if os.path.isdir(user_include): - self.include_dirs.append(user_include) - if os.path.isdir(user_lib): - self.library_dirs.append(user_lib) - self.rpath.append(user_lib) - - if isinstance(self.parallel, str): - try: - self.parallel = int(self.parallel) - except ValueError: - raise DistutilsOptionError("parallel should be an integer") - - def run(self): - from distutils.ccompiler import new_compiler - - # 'self.extensions', as supplied by setup.py, is a list of - # Extension instances. See the documentation for Extension (in - # distutils.extension) for details. - # - # For backwards compatibility with Distutils 0.8.2 and earlier, we - # also allow the 'extensions' list to be a list of tuples: - # (ext_name, build_info) - # where build_info is a dictionary containing everything that - # Extension instances do except the name, with a few things being - # differently named. We convert these 2-tuples to Extension - # instances as needed. - - if not self.extensions: - return - - # If we were asked to build any C/C++ libraries, make sure that the - # directory where we put them is in the library search path for - # linking extensions. - if self.distribution.has_c_libraries(): - build_clib = self.get_finalized_command('build_clib') - self.libraries.extend(build_clib.get_library_names() or []) - self.library_dirs.append(build_clib.build_clib) - - # Setup the CCompiler object that we'll use to do all the - # compiling and linking - self.compiler = new_compiler(compiler=self.compiler, - verbose=self.verbose, - dry_run=self.dry_run, - force=self.force) - customize_compiler(self.compiler) - # If we are cross-compiling, init the compiler now (if we are not - # cross-compiling, init would not hurt, but people may rely on - # late initialization of compiler even if they shouldn't...) - if os.name == 'nt' and self.plat_name != get_platform(): - self.compiler.initialize(self.plat_name) - - # And make sure that any compile/link-related options (which might - # come from the command-line or from the setup script) are set in - # that CCompiler object -- that way, they automatically apply to - # all compiling and linking done here. - if self.include_dirs is not None: - self.compiler.set_include_dirs(self.include_dirs) - if self.define is not None: - # 'define' option is a list of (name,value) tuples - for (name, value) in self.define: - self.compiler.define_macro(name, value) - if self.undef is not None: - for macro in self.undef: - self.compiler.undefine_macro(macro) - if self.libraries is not None: - self.compiler.set_libraries(self.libraries) - if self.library_dirs is not None: - self.compiler.set_library_dirs(self.library_dirs) - if self.rpath is not None: - self.compiler.set_runtime_library_dirs(self.rpath) - if self.link_objects is not None: - self.compiler.set_link_objects(self.link_objects) - - # Now actually compile and link everything. - self.build_extensions() - - def check_extensions_list(self, extensions): - """Ensure that the list of extensions (presumably provided as a - command option 'extensions') is valid, i.e. it is a list of - Extension objects. We also support the old-style list of 2-tuples, - where the tuples are (ext_name, build_info), which are converted to - Extension instances here. - - Raise DistutilsSetupError if the structure is invalid anywhere; - just returns otherwise. - """ - if not isinstance(extensions, list): - raise DistutilsSetupError( - "'ext_modules' option must be a list of Extension instances") - - for i, ext in enumerate(extensions): - if isinstance(ext, Extension): - continue # OK! (assume type-checking done - # by Extension constructor) - - if not isinstance(ext, tuple) or len(ext) != 2: - raise DistutilsSetupError( - "each element of 'ext_modules' option must be an " - "Extension instance or 2-tuple") - - ext_name, build_info = ext - - log.warn("old-style (ext_name, build_info) tuple found in " - "ext_modules for extension '%s' " - "-- please convert to Extension instance", ext_name) - - if not (isinstance(ext_name, str) and - extension_name_re.match(ext_name)): - raise DistutilsSetupError( - "first element of each tuple in 'ext_modules' " - "must be the extension name (a string)") - - if not isinstance(build_info, dict): - raise DistutilsSetupError( - "second element of each tuple in 'ext_modules' " - "must be a dictionary (build info)") - - # OK, the (ext_name, build_info) dict is type-safe: convert it - # to an Extension instance. - ext = Extension(ext_name, build_info['sources']) - - # Easy stuff: one-to-one mapping from dict elements to - # instance attributes. - for key in ('include_dirs', 'library_dirs', 'libraries', - 'extra_objects', 'extra_compile_args', - 'extra_link_args'): - val = build_info.get(key) - if val is not None: - setattr(ext, key, val) - - # Medium-easy stuff: same syntax/semantics, different names. - ext.runtime_library_dirs = build_info.get('rpath') - if 'def_file' in build_info: - log.warn("'def_file' element of build info dict " - "no longer supported") - - # Non-trivial stuff: 'macros' split into 'define_macros' - # and 'undef_macros'. - macros = build_info.get('macros') - if macros: - ext.define_macros = [] - ext.undef_macros = [] - for macro in macros: - if not (isinstance(macro, tuple) and len(macro) in (1, 2)): - raise DistutilsSetupError( - "'macros' element of build info dict " - "must be 1- or 2-tuple") - if len(macro) == 1: - ext.undef_macros.append(macro[0]) - elif len(macro) == 2: - ext.define_macros.append(macro) - - extensions[i] = ext - - def get_source_files(self): - self.check_extensions_list(self.extensions) - filenames = [] - - # Wouldn't it be neat if we knew the names of header files too... - for ext in self.extensions: - filenames.extend(ext.sources) - return filenames - - def get_outputs(self): - # Sanity check the 'extensions' list -- can't assume this is being - # done in the same run as a 'build_extensions()' call (in fact, we - # can probably assume that it *isn't*!). - self.check_extensions_list(self.extensions) - - # And build the list of output (built) filenames. Note that this - # ignores the 'inplace' flag, and assumes everything goes in the - # "build" tree. - outputs = [] - for ext in self.extensions: - outputs.append(self.get_ext_fullpath(ext.name)) - return outputs - - def build_extensions(self): - # First, sanity-check the 'extensions' list - self.check_extensions_list(self.extensions) - if self.parallel: - self._build_extensions_parallel() - else: - self._build_extensions_serial() - - def _build_extensions_parallel(self): - workers = self.parallel - if self.parallel is True: - workers = os.cpu_count() # may return None - try: - from concurrent.futures import ThreadPoolExecutor - except ImportError: - workers = None - - if workers is None: - self._build_extensions_serial() - return - - with ThreadPoolExecutor(max_workers=workers) as executor: - futures = [executor.submit(self.build_extension, ext) - for ext in self.extensions] - for ext, fut in zip(self.extensions, futures): - with self._filter_build_errors(ext): - fut.result() - - def _build_extensions_serial(self): - for ext in self.extensions: - with self._filter_build_errors(ext): - self.build_extension(ext) - - @contextlib.contextmanager - def _filter_build_errors(self, ext): - try: - yield - except (CCompilerError, DistutilsError, CompileError) as e: - if not ext.optional: - raise - self.warn('building extension "%s" failed: %s' % - (ext.name, e)) - - def build_extension(self, ext): - sources = ext.sources - if sources is None or not isinstance(sources, (list, tuple)): - raise DistutilsSetupError( - "in 'ext_modules' option (extension '%s'), " - "'sources' must be present and must be " - "a list of source filenames" % ext.name) - # sort to make the resulting .so file build reproducible - sources = sorted(sources) - - ext_path = self.get_ext_fullpath(ext.name) - depends = sources + ext.depends - if not (self.force or newer_group(depends, ext_path, 'newer')): - log.debug("skipping '%s' extension (up-to-date)", ext.name) - return - else: - log.info("building '%s' extension", ext.name) - - # First, scan the sources for SWIG definition files (.i), run - # SWIG on 'em to create .c files, and modify the sources list - # accordingly. - sources = self.swig_sources(sources, ext) - - # Next, compile the source code to object files. - - # XXX not honouring 'define_macros' or 'undef_macros' -- the - # CCompiler API needs to change to accommodate this, and I - # want to do one thing at a time! - - # Two possible sources for extra compiler arguments: - # - 'extra_compile_args' in Extension object - # - CFLAGS environment variable (not particularly - # elegant, but people seem to expect it and I - # guess it's useful) - # The environment variable should take precedence, and - # any sensible compiler will give precedence to later - # command line args. Hence we combine them in order: - extra_args = ext.extra_compile_args or [] - - macros = ext.define_macros[:] - for undef in ext.undef_macros: - macros.append((undef,)) - - objects = self.compiler.compile(sources, - output_dir=self.build_temp, - macros=macros, - include_dirs=ext.include_dirs, - debug=self.debug, - extra_postargs=extra_args, - depends=ext.depends) - - # XXX outdated variable, kept here in case third-part code - # needs it. - self._built_objects = objects[:] - - # Now link the object files together into a "shared object" -- - # of course, first we have to figure out all the other things - # that go into the mix. - if ext.extra_objects: - objects.extend(ext.extra_objects) - extra_args = ext.extra_link_args or [] - - # Detect target language, if not provided - language = ext.language or self.compiler.detect_language(sources) - - self.compiler.link_shared_object( - objects, ext_path, - libraries=self.get_libraries(ext), - library_dirs=ext.library_dirs, - runtime_library_dirs=ext.runtime_library_dirs, - extra_postargs=extra_args, - export_symbols=self.get_export_symbols(ext), - debug=self.debug, - build_temp=self.build_temp, - target_lang=language) - - def swig_sources(self, sources, extension): - """Walk the list of source files in 'sources', looking for SWIG - interface (.i) files. Run SWIG on all that are found, and - return a modified 'sources' list with SWIG source files replaced - by the generated C (or C++) files. - """ - new_sources = [] - swig_sources = [] - swig_targets = {} - - # XXX this drops generated C/C++ files into the source tree, which - # is fine for developers who want to distribute the generated - # source -- but there should be an option to put SWIG output in - # the temp dir. - - if self.swig_cpp: - log.warn("--swig-cpp is deprecated - use --swig-opts=-c++") - - if self.swig_cpp or ('-c++' in self.swig_opts) or \ - ('-c++' in extension.swig_opts): - target_ext = '.cpp' - else: - target_ext = '.c' - - for source in sources: - (base, ext) = os.path.splitext(source) - if ext == ".i": # SWIG interface file - new_sources.append(base + '_wrap' + target_ext) - swig_sources.append(source) - swig_targets[source] = new_sources[-1] - else: - new_sources.append(source) - - if not swig_sources: - return new_sources - - swig = self.swig or self.find_swig() - swig_cmd = [swig, "-python"] - swig_cmd.extend(self.swig_opts) - if self.swig_cpp: - swig_cmd.append("-c++") - - # Do not override commandline arguments - if not self.swig_opts: - for o in extension.swig_opts: - swig_cmd.append(o) - - for source in swig_sources: - target = swig_targets[source] - log.info("swigging %s to %s", source, target) - self.spawn(swig_cmd + ["-o", target, source]) - - return new_sources - - def find_swig(self): - """Return the name of the SWIG executable. On Unix, this is - just "swig" -- it should be in the PATH. Tries a bit harder on - Windows. - """ - if os.name == "posix": - return "swig" - elif os.name == "nt": - # Look for SWIG in its standard installation directory on - # Windows (or so I presume!). If we find it there, great; - # if not, act like Unix and assume it's in the PATH. - for vers in ("1.3", "1.2", "1.1"): - fn = os.path.join("c:\\swig%s" % vers, "swig.exe") - if os.path.isfile(fn): - return fn - else: - return "swig.exe" - else: - raise DistutilsPlatformError( - "I don't know how to find (much less run) SWIG " - "on platform '%s'" % os.name) - - # -- Name generators ----------------------------------------------- - # (extension names, filenames, whatever) - def get_ext_fullpath(self, ext_name): - """Returns the path of the filename for a given extension. - - The file is located in `build_lib` or directly in the package - (inplace option). - """ - fullname = self.get_ext_fullname(ext_name) - modpath = fullname.split('.') - filename = self.get_ext_filename(modpath[-1]) - - if not self.inplace: - # no further work needed - # returning : - # build_dir/package/path/filename - filename = os.path.join(*modpath[:-1]+[filename]) - return os.path.join(self.build_lib, filename) - - # the inplace option requires to find the package directory - # using the build_py command for that - package = '.'.join(modpath[0:-1]) - build_py = self.get_finalized_command('build_py') - package_dir = os.path.abspath(build_py.get_package_dir(package)) - - # returning - # package_dir/filename - return os.path.join(package_dir, filename) - - def get_ext_fullname(self, ext_name): - """Returns the fullname of a given extension name. - - Adds the `package.` prefix""" - if self.package is None: - return ext_name - else: - return self.package + '.' + ext_name - - def get_ext_filename(self, ext_name): - r"""Convert the name of an extension (eg. "foo.bar") into the name - of the file from which it will be loaded (eg. "foo/bar.so", or - "foo\bar.pyd"). - """ - from distutils.sysconfig import get_config_var - ext_path = ext_name.split('.') - ext_suffix = get_config_var('EXT_SUFFIX') - return os.path.join(*ext_path) + ext_suffix - - def get_export_symbols(self, ext): - """Return the list of symbols that a shared extension has to - export. This either uses 'ext.export_symbols' or, if it's not - provided, "PyInit_" + module_name. Only relevant on Windows, where - the .pyd file (DLL) must export the module "PyInit_" function. - """ - name = ext.name.split('.')[-1] - try: - # Unicode module name support as defined in PEP-489 - # https://www.python.org/dev/peps/pep-0489/#export-hook-name - name.encode('ascii') - except UnicodeEncodeError: - suffix = 'U_' + name.encode('punycode').replace(b'-', b'_').decode('ascii') - else: - suffix = "_" + name - - initfunc_name = "PyInit" + suffix - if initfunc_name not in ext.export_symbols: - ext.export_symbols.append(initfunc_name) - return ext.export_symbols - - def get_libraries(self, ext): - """Return the list of libraries to link against when building a - shared extension. On most platforms, this is just 'ext.libraries'; - on Windows, we add the Python library (eg. python20.dll). - """ - # The python library is always needed on Windows. For MSVC, this - # is redundant, since the library is mentioned in a pragma in - # pyconfig.h that MSVC groks. The other Windows compilers all seem - # to need it mentioned explicitly, though, so that's what we do. - # Append '_d' to the python import library on debug builds. - if sys.platform == "win32": - from distutils._msvccompiler import MSVCCompiler - if not isinstance(self.compiler, MSVCCompiler): - template = "python%d%d" - if self.debug: - template = template + '_d' - pythonlib = (template % - (sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff)) - # don't extend ext.libraries, it may be shared with other - # extensions, it is a reference to the original list - return ext.libraries + [pythonlib] - else: - # On Android only the main executable and LD_PRELOADs are considered - # to be RTLD_GLOBAL, all the dependencies of the main executable - # remain RTLD_LOCAL and so the shared libraries must be linked with - # libpython when python is built with a shared python library (issue - # bpo-21536). - # On Cygwin (and if required, other POSIX-like platforms based on - # Windows like MinGW) it is simply necessary that all symbols in - # shared libraries are resolved at link time. - from distutils.sysconfig import get_config_var - link_libpython = False - if get_config_var('Py_ENABLE_SHARED'): - # A native build on an Android device or on Cygwin - if hasattr(sys, 'getandroidapilevel'): - link_libpython = True - elif sys.platform == 'cygwin': - link_libpython = True - elif '_PYTHON_HOST_PLATFORM' in os.environ: - # We are cross-compiling for one of the relevant platforms - if get_config_var('ANDROID_API_LEVEL') != 0: - link_libpython = True - elif get_config_var('MACHDEP') == 'cygwin': - link_libpython = True - - if link_libpython: - ldversion = get_config_var('LDVERSION') - return ext.libraries + ['python' + ldversion] - - return ext.libraries + py37compat.pythonlib() diff --git a/venv/Lib/site-packages/setuptools/_distutils/command/build_py.py b/venv/Lib/site-packages/setuptools/_distutils/command/build_py.py deleted file mode 100644 index 7ef9bce..0000000 --- a/venv/Lib/site-packages/setuptools/_distutils/command/build_py.py +++ /dev/null @@ -1,392 +0,0 @@ -"""distutils.command.build_py - -Implements the Distutils 'build_py' command.""" - -import os -import importlib.util -import sys -import glob - -from distutils.core import Command -from distutils.errors import * -from distutils.util import convert_path -from distutils import log - -class build_py (Command): - - description = "\"build\" pure Python modules (copy to build directory)" - - user_options = [ - ('build-lib=', 'd', "directory to \"build\" (copy) to"), - ('compile', 'c', "compile .py to .pyc"), - ('no-compile', None, "don't compile .py files [default]"), - ('optimize=', 'O', - "also compile with optimization: -O1 for \"python -O\", " - "-O2 for \"python -OO\", and -O0 to disable [default: -O0]"), - ('force', 'f', "forcibly build everything (ignore file timestamps)"), - ] - - boolean_options = ['compile', 'force'] - negative_opt = {'no-compile' : 'compile'} - - def initialize_options(self): - self.build_lib = None - self.py_modules = None - self.package = None - self.package_data = None - self.package_dir = None - self.compile = 0 - self.optimize = 0 - self.force = None - - def finalize_options(self): - self.set_undefined_options('build', - ('build_lib', 'build_lib'), - ('force', 'force')) - - # Get the distribution options that are aliases for build_py - # options -- list of packages and list of modules. - self.packages = self.distribution.packages - self.py_modules = self.distribution.py_modules - self.package_data = self.distribution.package_data - self.package_dir = {} - if self.distribution.package_dir: - for name, path in self.distribution.package_dir.items(): - self.package_dir[name] = convert_path(path) - self.data_files = self.get_data_files() - - # Ick, copied straight from install_lib.py (fancy_getopt needs a - # type system! Hell, *everything* needs a type system!!!) - if not isinstance(self.optimize, int): - try: - self.optimize = int(self.optimize) - assert 0 <= self.optimize <= 2 - except (ValueError, AssertionError): - raise DistutilsOptionError("optimize must be 0, 1, or 2") - - def run(self): - # XXX copy_file by default preserves atime and mtime. IMHO this is - # the right thing to do, but perhaps it should be an option -- in - # particular, a site administrator might want installed files to - # reflect the time of installation rather than the last - # modification time before the installed release. - - # XXX copy_file by default preserves mode, which appears to be the - # wrong thing to do: if a file is read-only in the working - # directory, we want it to be installed read/write so that the next - # installation of the same module distribution can overwrite it - # without problems. (This might be a Unix-specific issue.) Thus - # we turn off 'preserve_mode' when copying to the build directory, - # since the build directory is supposed to be exactly what the - # installation will look like (ie. we preserve mode when - # installing). - - # Two options control which modules will be installed: 'packages' - # and 'py_modules'. The former lets us work with whole packages, not - # specifying individual modules at all; the latter is for - # specifying modules one-at-a-time. - - if self.py_modules: - self.build_modules() - if self.packages: - self.build_packages() - self.build_package_data() - - self.byte_compile(self.get_outputs(include_bytecode=0)) - - def get_data_files(self): - """Generate list of '(package,src_dir,build_dir,filenames)' tuples""" - data = [] - if not self.packages: - return data - for package in self.packages: - # Locate package source directory - src_dir = self.get_package_dir(package) - - # Compute package build directory - build_dir = os.path.join(*([self.build_lib] + package.split('.'))) - - # Length of path to strip from found files - plen = 0 - if src_dir: - plen = len(src_dir)+1 - - # Strip directory from globbed filenames - filenames = [ - file[plen:] for file in self.find_data_files(package, src_dir) - ] - data.append((package, src_dir, build_dir, filenames)) - return data - - def find_data_files(self, package, src_dir): - """Return filenames for package's data files in 'src_dir'""" - globs = (self.package_data.get('', []) - + self.package_data.get(package, [])) - files = [] - for pattern in globs: - # Each pattern has to be converted to a platform-specific path - filelist = glob.glob(os.path.join(glob.escape(src_dir), convert_path(pattern))) - # Files that match more than one pattern are only added once - files.extend([fn for fn in filelist if fn not in files - and os.path.isfile(fn)]) - return files - - def build_package_data(self): - """Copy data files into build directory""" - lastdir = None - for package, src_dir, build_dir, filenames in self.data_files: - for filename in filenames: - target = os.path.join(build_dir, filename) - self.mkpath(os.path.dirname(target)) - self.copy_file(os.path.join(src_dir, filename), target, - preserve_mode=False) - - def get_package_dir(self, package): - """Return the directory, relative to the top of the source - distribution, where package 'package' should be found - (at least according to the 'package_dir' option, if any).""" - path = package.split('.') - - if not self.package_dir: - if path: - return os.path.join(*path) - else: - return '' - else: - tail = [] - while path: - try: - pdir = self.package_dir['.'.join(path)] - except KeyError: - tail.insert(0, path[-1]) - del path[-1] - else: - tail.insert(0, pdir) - return os.path.join(*tail) - else: - # Oops, got all the way through 'path' without finding a - # match in package_dir. If package_dir defines a directory - # for the root (nameless) package, then fallback on it; - # otherwise, we might as well have not consulted - # package_dir at all, as we just use the directory implied - # by 'tail' (which should be the same as the original value - # of 'path' at this point). - pdir = self.package_dir.get('') - if pdir is not None: - tail.insert(0, pdir) - - if tail: - return os.path.join(*tail) - else: - return '' - - def check_package(self, package, package_dir): - # Empty dir name means current directory, which we can probably - # assume exists. Also, os.path.exists and isdir don't know about - # my "empty string means current dir" convention, so we have to - # circumvent them. - if package_dir != "": - if not os.path.exists(package_dir): - raise DistutilsFileError( - "package directory '%s' does not exist" % package_dir) - if not os.path.isdir(package_dir): - raise DistutilsFileError( - "supposed package directory '%s' exists, " - "but is not a directory" % package_dir) - - # Require __init__.py for all but the "root package" - if package: - init_py = os.path.join(package_dir, "__init__.py") - if os.path.isfile(init_py): - return init_py - else: - log.warn(("package init file '%s' not found " + - "(or not a regular file)"), init_py) - - # Either not in a package at all (__init__.py not expected), or - # __init__.py doesn't exist -- so don't return the filename. - return None - - def check_module(self, module, module_file): - if not os.path.isfile(module_file): - log.warn("file %s (for module %s) not found", module_file, module) - return False - else: - return True - - def find_package_modules(self, package, package_dir): - self.check_package(package, package_dir) - module_files = glob.glob(os.path.join(glob.escape(package_dir), "*.py")) - modules = [] - setup_script = os.path.abspath(self.distribution.script_name) - - for f in module_files: - abs_f = os.path.abspath(f) - if abs_f != setup_script: - module = os.path.splitext(os.path.basename(f))[0] - modules.append((package, module, f)) - else: - self.debug_print("excluding %s" % setup_script) - return modules - - def find_modules(self): - """Finds individually-specified Python modules, ie. those listed by - module name in 'self.py_modules'. Returns a list of tuples (package, - module_base, filename): 'package' is a tuple of the path through - package-space to the module; 'module_base' is the bare (no - packages, no dots) module name, and 'filename' is the path to the - ".py" file (relative to the distribution root) that implements the - module. - """ - # Map package names to tuples of useful info about the package: - # (package_dir, checked) - # package_dir - the directory where we'll find source files for - # this package - # checked - true if we have checked that the package directory - # is valid (exists, contains __init__.py, ... ?) - packages = {} - - # List of (package, module, filename) tuples to return - modules = [] - - # We treat modules-in-packages almost the same as toplevel modules, - # just the "package" for a toplevel is empty (either an empty - # string or empty list, depending on context). Differences: - # - don't check for __init__.py in directory for empty package - for module in self.py_modules: - path = module.split('.') - package = '.'.join(path[0:-1]) - module_base = path[-1] - - try: - (package_dir, checked) = packages[package] - except KeyError: - package_dir = self.get_package_dir(package) - checked = 0 - - if not checked: - init_py = self.check_package(package, package_dir) - packages[package] = (package_dir, 1) - if init_py: - modules.append((package, "__init__", init_py)) - - # XXX perhaps we should also check for just .pyc files - # (so greedy closed-source bastards can distribute Python - # modules too) - module_file = os.path.join(package_dir, module_base + ".py") - if not self.check_module(module, module_file): - continue - - modules.append((package, module_base, module_file)) - - return modules - - def find_all_modules(self): - """Compute the list of all modules that will be built, whether - they are specified one-module-at-a-time ('self.py_modules') or - by whole packages ('self.packages'). Return a list of tuples - (package, module, module_file), just like 'find_modules()' and - 'find_package_modules()' do.""" - modules = [] - if self.py_modules: - modules.extend(self.find_modules()) - if self.packages: - for package in self.packages: - package_dir = self.get_package_dir(package) - m = self.find_package_modules(package, package_dir) - modules.extend(m) - return modules - - def get_source_files(self): - return [module[-1] for module in self.find_all_modules()] - - def get_module_outfile(self, build_dir, package, module): - outfile_path = [build_dir] + list(package) + [module + ".py"] - return os.path.join(*outfile_path) - - def get_outputs(self, include_bytecode=1): - modules = self.find_all_modules() - outputs = [] - for (package, module, module_file) in modules: - package = package.split('.') - filename = self.get_module_outfile(self.build_lib, package, module) - outputs.append(filename) - if include_bytecode: - if self.compile: - outputs.append(importlib.util.cache_from_source( - filename, optimization='')) - if self.optimize > 0: - outputs.append(importlib.util.cache_from_source( - filename, optimization=self.optimize)) - - outputs += [ - os.path.join(build_dir, filename) - for package, src_dir, build_dir, filenames in self.data_files - for filename in filenames - ] - - return outputs - - def build_module(self, module, module_file, package): - if isinstance(package, str): - package = package.split('.') - elif not isinstance(package, (list, tuple)): - raise TypeError( - "'package' must be a string (dot-separated), list, or tuple") - - # Now put the module source file into the "build" area -- this is - # easy, we just copy it somewhere under self.build_lib (the build - # directory for Python source). - outfile = self.get_module_outfile(self.build_lib, package, module) - dir = os.path.dirname(outfile) - self.mkpath(dir) - return self.copy_file(module_file, outfile, preserve_mode=0) - - def build_modules(self): - modules = self.find_modules() - for (package, module, module_file) in modules: - # Now "build" the module -- ie. copy the source file to - # self.build_lib (the build directory for Python source). - # (Actually, it gets copied to the directory for this package - # under self.build_lib.) - self.build_module(module, module_file, package) - - def build_packages(self): - for package in self.packages: - # Get list of (package, module, module_file) tuples based on - # scanning the package directory. 'package' is only included - # in the tuple so that 'find_modules()' and - # 'find_package_tuples()' have a consistent interface; it's - # ignored here (apart from a sanity check). Also, 'module' is - # the *unqualified* module name (ie. no dots, no package -- we - # already know its package!), and 'module_file' is the path to - # the .py file, relative to the current directory - # (ie. including 'package_dir'). - package_dir = self.get_package_dir(package) - modules = self.find_package_modules(package, package_dir) - - # Now loop over the modules we found, "building" each one (just - # copy it to self.build_lib). - for (package_, module, module_file) in modules: - assert package == package_ - self.build_module(module, module_file, package) - - def byte_compile(self, files): - if sys.dont_write_bytecode: - self.warn('byte-compiling is disabled, skipping.') - return - - from distutils.util import byte_compile - prefix = self.build_lib - if prefix[-1] != os.sep: - prefix = prefix + os.sep - - # XXX this code is essentially the same as the 'byte_compile() - # method of the "install_lib" command, except for the determination - # of the 'prefix' string. Hmmm. - if self.compile: - byte_compile(files, optimize=0, - force=self.force, prefix=prefix, dry_run=self.dry_run) - if self.optimize > 0: - byte_compile(files, optimize=self.optimize, - force=self.force, prefix=prefix, dry_run=self.dry_run) diff --git a/venv/Lib/site-packages/setuptools/_distutils/command/build_scripts.py b/venv/Lib/site-packages/setuptools/_distutils/command/build_scripts.py deleted file mode 100644 index e3312cf..0000000 --- a/venv/Lib/site-packages/setuptools/_distutils/command/build_scripts.py +++ /dev/null @@ -1,152 +0,0 @@ -"""distutils.command.build_scripts - -Implements the Distutils 'build_scripts' command.""" - -import os, re -from stat import ST_MODE -from distutils import sysconfig -from distutils.core import Command -from distutils.dep_util import newer -from distutils.util import convert_path -from distutils import log -import tokenize - -# check if Python is called on the first line with this expression -first_line_re = re.compile(b'^#!.*python[0-9.]*([ \t].*)?$') - -class build_scripts(Command): - - description = "\"build\" scripts (copy and fixup #! line)" - - user_options = [ - ('build-dir=', 'd', "directory to \"build\" (copy) to"), - ('force', 'f', "forcibly build everything (ignore file timestamps"), - ('executable=', 'e', "specify final destination interpreter path"), - ] - - boolean_options = ['force'] - - - def initialize_options(self): - self.build_dir = None - self.scripts = None - self.force = None - self.executable = None - self.outfiles = None - - def finalize_options(self): - self.set_undefined_options('build', - ('build_scripts', 'build_dir'), - ('force', 'force'), - ('executable', 'executable')) - self.scripts = self.distribution.scripts - - def get_source_files(self): - return self.scripts - - def run(self): - if not self.scripts: - return - self.copy_scripts() - - - def copy_scripts(self): - r"""Copy each script listed in 'self.scripts'; if it's marked as a - Python script in the Unix way (first line matches 'first_line_re', - ie. starts with "\#!" and contains "python"), then adjust the first - line to refer to the current Python interpreter as we copy. - """ - self.mkpath(self.build_dir) - outfiles = [] - updated_files = [] - for script in self.scripts: - adjust = False - script = convert_path(script) - outfile = os.path.join(self.build_dir, os.path.basename(script)) - outfiles.append(outfile) - - if not self.force and not newer(script, outfile): - log.debug("not copying %s (up-to-date)", script) - continue - - # Always open the file, but ignore failures in dry-run mode -- - # that way, we'll get accurate feedback if we can read the - # script. - try: - f = open(script, "rb") - except OSError: - if not self.dry_run: - raise - f = None - else: - encoding, lines = tokenize.detect_encoding(f.readline) - f.seek(0) - first_line = f.readline() - if not first_line: - self.warn("%s is an empty file (skipping)" % script) - continue - - match = first_line_re.match(first_line) - if match: - adjust = True - post_interp = match.group(1) or b'' - - if adjust: - log.info("copying and adjusting %s -> %s", script, - self.build_dir) - updated_files.append(outfile) - if not self.dry_run: - if not sysconfig.python_build: - executable = self.executable - else: - executable = os.path.join( - sysconfig.get_config_var("BINDIR"), - "python%s%s" % (sysconfig.get_config_var("VERSION"), - sysconfig.get_config_var("EXE"))) - executable = os.fsencode(executable) - shebang = b"#!" + executable + post_interp + b"\n" - # Python parser starts to read a script using UTF-8 until - # it gets a #coding:xxx cookie. The shebang has to be the - # first line of a file, the #coding:xxx cookie cannot be - # written before. So the shebang has to be decodable from - # UTF-8. - try: - shebang.decode('utf-8') - except UnicodeDecodeError: - raise ValueError( - "The shebang ({!r}) is not decodable " - "from utf-8".format(shebang)) - # If the script is encoded to a custom encoding (use a - # #coding:xxx cookie), the shebang has to be decodable from - # the script encoding too. - try: - shebang.decode(encoding) - except UnicodeDecodeError: - raise ValueError( - "The shebang ({!r}) is not decodable " - "from the script encoding ({})" - .format(shebang, encoding)) - with open(outfile, "wb") as outf: - outf.write(shebang) - outf.writelines(f.readlines()) - if f: - f.close() - else: - if f: - f.close() - updated_files.append(outfile) - self.copy_file(script, outfile) - - if os.name == 'posix': - for file in outfiles: - if self.dry_run: - log.info("changing mode of %s", file) - else: - oldmode = os.stat(file)[ST_MODE] & 0o7777 - newmode = (oldmode | 0o555) & 0o7777 - if newmode != oldmode: - log.info("changing mode of %s from %o to %o", - file, oldmode, newmode) - os.chmod(file, newmode) - # XXX should we modify self.outfiles? - return outfiles, updated_files diff --git a/venv/Lib/site-packages/setuptools/_distutils/command/check.py b/venv/Lib/site-packages/setuptools/_distutils/command/check.py deleted file mode 100644 index 525540b..0000000 --- a/venv/Lib/site-packages/setuptools/_distutils/command/check.py +++ /dev/null @@ -1,148 +0,0 @@ -"""distutils.command.check - -Implements the Distutils 'check' command. -""" -from distutils.core import Command -from distutils.errors import DistutilsSetupError - -try: - # docutils is installed - from docutils.utils import Reporter - from docutils.parsers.rst import Parser - from docutils import frontend - from docutils import nodes - - class SilentReporter(Reporter): - - def __init__(self, source, report_level, halt_level, stream=None, - debug=0, encoding='ascii', error_handler='replace'): - self.messages = [] - super().__init__(source, report_level, halt_level, stream, - debug, encoding, error_handler) - - def system_message(self, level, message, *children, **kwargs): - self.messages.append((level, message, children, kwargs)) - return nodes.system_message(message, level=level, - type=self.levels[level], - *children, **kwargs) - - HAS_DOCUTILS = True -except Exception: - # Catch all exceptions because exceptions besides ImportError probably - # indicate that docutils is not ported to Py3k. - HAS_DOCUTILS = False - -class check(Command): - """This command checks the meta-data of the package. - """ - description = ("perform some checks on the package") - user_options = [('metadata', 'm', 'Verify meta-data'), - ('restructuredtext', 'r', - ('Checks if long string meta-data syntax ' - 'are reStructuredText-compliant')), - ('strict', 's', - 'Will exit with an error if a check fails')] - - boolean_options = ['metadata', 'restructuredtext', 'strict'] - - def initialize_options(self): - """Sets default values for options.""" - self.restructuredtext = 0 - self.metadata = 1 - self.strict = 0 - self._warnings = 0 - - def finalize_options(self): - pass - - def warn(self, msg): - """Counts the number of warnings that occurs.""" - self._warnings += 1 - return Command.warn(self, msg) - - def run(self): - """Runs the command.""" - # perform the various tests - if self.metadata: - self.check_metadata() - if self.restructuredtext: - if HAS_DOCUTILS: - self.check_restructuredtext() - elif self.strict: - raise DistutilsSetupError('The docutils package is needed.') - - # let's raise an error in strict mode, if we have at least - # one warning - if self.strict and self._warnings > 0: - raise DistutilsSetupError('Please correct your package.') - - def check_metadata(self): - """Ensures that all required elements of meta-data are supplied. - - Required fields: - name, version, URL - - Recommended fields: - (author and author_email) or (maintainer and maintainer_email)) - - Warns if any are missing. - """ - metadata = self.distribution.metadata - - missing = [] - for attr in ('name', 'version', 'url'): - if not (hasattr(metadata, attr) and getattr(metadata, attr)): - missing.append(attr) - - if missing: - self.warn("missing required meta-data: %s" % ', '.join(missing)) - if metadata.author: - if not metadata.author_email: - self.warn("missing meta-data: if 'author' supplied, " + - "'author_email' should be supplied too") - elif metadata.maintainer: - if not metadata.maintainer_email: - self.warn("missing meta-data: if 'maintainer' supplied, " + - "'maintainer_email' should be supplied too") - else: - self.warn("missing meta-data: either (author and author_email) " + - "or (maintainer and maintainer_email) " + - "should be supplied") - - def check_restructuredtext(self): - """Checks if the long string fields are reST-compliant.""" - data = self.distribution.get_long_description() - for warning in self._check_rst_data(data): - line = warning[-1].get('line') - if line is None: - warning = warning[1] - else: - warning = '%s (line %s)' % (warning[1], line) - self.warn(warning) - - def _check_rst_data(self, data): - """Returns warnings when the provided data doesn't compile.""" - # the include and csv_table directives need this to be a path - source_path = self.distribution.script_name or 'setup.py' - parser = Parser() - settings = frontend.OptionParser(components=(Parser,)).get_default_values() - settings.tab_width = 4 - settings.pep_references = None - settings.rfc_references = None - reporter = SilentReporter(source_path, - settings.report_level, - settings.halt_level, - stream=settings.warning_stream, - debug=settings.debug, - encoding=settings.error_encoding, - error_handler=settings.error_encoding_error_handler) - - document = nodes.document(settings, reporter, source=source_path) - document.note_source(source_path, -1) - try: - parser.parse(data, document) - except AttributeError as e: - reporter.messages.append( - (-1, 'Could not finish the parsing: %s.' % e, '', {})) - - return reporter.messages diff --git a/venv/Lib/site-packages/setuptools/_distutils/command/clean.py b/venv/Lib/site-packages/setuptools/_distutils/command/clean.py deleted file mode 100644 index 0cb2701..0000000 --- a/venv/Lib/site-packages/setuptools/_distutils/command/clean.py +++ /dev/null @@ -1,76 +0,0 @@ -"""distutils.command.clean - -Implements the Distutils 'clean' command.""" - -# contributed by Bastian Kleineidam , added 2000-03-18 - -import os -from distutils.core import Command -from distutils.dir_util import remove_tree -from distutils import log - -class clean(Command): - - description = "clean up temporary files from 'build' command" - user_options = [ - ('build-base=', 'b', - "base build directory (default: 'build.build-base')"), - ('build-lib=', None, - "build directory for all modules (default: 'build.build-lib')"), - ('build-temp=', 't', - "temporary build directory (default: 'build.build-temp')"), - ('build-scripts=', None, - "build directory for scripts (default: 'build.build-scripts')"), - ('bdist-base=', None, - "temporary directory for built distributions"), - ('all', 'a', - "remove all build output, not just temporary by-products") - ] - - boolean_options = ['all'] - - def initialize_options(self): - self.build_base = None - self.build_lib = None - self.build_temp = None - self.build_scripts = None - self.bdist_base = None - self.all = None - - def finalize_options(self): - self.set_undefined_options('build', - ('build_base', 'build_base'), - ('build_lib', 'build_lib'), - ('build_scripts', 'build_scripts'), - ('build_temp', 'build_temp')) - self.set_undefined_options('bdist', - ('bdist_base', 'bdist_base')) - - def run(self): - # remove the build/temp. directory (unless it's already - # gone) - if os.path.exists(self.build_temp): - remove_tree(self.build_temp, dry_run=self.dry_run) - else: - log.debug("'%s' does not exist -- can't clean it", - self.build_temp) - - if self.all: - # remove build directories - for directory in (self.build_lib, - self.bdist_base, - self.build_scripts): - if os.path.exists(directory): - remove_tree(directory, dry_run=self.dry_run) - else: - log.warn("'%s' does not exist -- can't clean it", - directory) - - # just for the heck of it, try to remove the base build directory: - # we might have emptied it right now, but if not we don't care - if not self.dry_run: - try: - os.rmdir(self.build_base) - log.info("removing '%s'", self.build_base) - except OSError: - pass diff --git a/venv/Lib/site-packages/setuptools/_distutils/command/config.py b/venv/Lib/site-packages/setuptools/_distutils/command/config.py deleted file mode 100644 index aeda408..0000000 --- a/venv/Lib/site-packages/setuptools/_distutils/command/config.py +++ /dev/null @@ -1,344 +0,0 @@ -"""distutils.command.config - -Implements the Distutils 'config' command, a (mostly) empty command class -that exists mainly to be sub-classed by specific module distributions and -applications. The idea is that while every "config" command is different, -at least they're all named the same, and users always see "config" in the -list of standard commands. Also, this is a good place to put common -configure-like tasks: "try to compile this C code", or "figure out where -this header file lives". -""" - -import os, re - -from distutils.core import Command -from distutils.errors import DistutilsExecError -from distutils.sysconfig import customize_compiler -from distutils import log - -LANG_EXT = {"c": ".c", "c++": ".cxx"} - -class config(Command): - - description = "prepare to build" - - user_options = [ - ('compiler=', None, - "specify the compiler type"), - ('cc=', None, - "specify the compiler executable"), - ('include-dirs=', 'I', - "list of directories to search for header files"), - ('define=', 'D', - "C preprocessor macros to define"), - ('undef=', 'U', - "C preprocessor macros to undefine"), - ('libraries=', 'l', - "external C libraries to link with"), - ('library-dirs=', 'L', - "directories to search for external C libraries"), - - ('noisy', None, - "show every action (compile, link, run, ...) taken"), - ('dump-source', None, - "dump generated source files before attempting to compile them"), - ] - - - # The three standard command methods: since the "config" command - # does nothing by default, these are empty. - - def initialize_options(self): - self.compiler = None - self.cc = None - self.include_dirs = None - self.libraries = None - self.library_dirs = None - - # maximal output for now - self.noisy = 1 - self.dump_source = 1 - - # list of temporary files generated along-the-way that we have - # to clean at some point - self.temp_files = [] - - def finalize_options(self): - if self.include_dirs is None: - self.include_dirs = self.distribution.include_dirs or [] - elif isinstance(self.include_dirs, str): - self.include_dirs = self.include_dirs.split(os.pathsep) - - if self.libraries is None: - self.libraries = [] - elif isinstance(self.libraries, str): - self.libraries = [self.libraries] - - if self.library_dirs is None: - self.library_dirs = [] - elif isinstance(self.library_dirs, str): - self.library_dirs = self.library_dirs.split(os.pathsep) - - def run(self): - pass - - # Utility methods for actual "config" commands. The interfaces are - # loosely based on Autoconf macros of similar names. Sub-classes - # may use these freely. - - def _check_compiler(self): - """Check that 'self.compiler' really is a CCompiler object; - if not, make it one. - """ - # We do this late, and only on-demand, because this is an expensive - # import. - from distutils.ccompiler import CCompiler, new_compiler - if not isinstance(self.compiler, CCompiler): - self.compiler = new_compiler(compiler=self.compiler, - dry_run=self.dry_run, force=1) - customize_compiler(self.compiler) - if self.include_dirs: - self.compiler.set_include_dirs(self.include_dirs) - if self.libraries: - self.compiler.set_libraries(self.libraries) - if self.library_dirs: - self.compiler.set_library_dirs(self.library_dirs) - - def _gen_temp_sourcefile(self, body, headers, lang): - filename = "_configtest" + LANG_EXT[lang] - with open(filename, "w") as file: - if headers: - for header in headers: - file.write("#include <%s>\n" % header) - file.write("\n") - file.write(body) - if body[-1] != "\n": - file.write("\n") - return filename - - def _preprocess(self, body, headers, include_dirs, lang): - src = self._gen_temp_sourcefile(body, headers, lang) - out = "_configtest.i" - self.temp_files.extend([src, out]) - self.compiler.preprocess(src, out, include_dirs=include_dirs) - return (src, out) - - def _compile(self, body, headers, include_dirs, lang): - src = self._gen_temp_sourcefile(body, headers, lang) - if self.dump_source: - dump_file(src, "compiling '%s':" % src) - (obj,) = self.compiler.object_filenames([src]) - self.temp_files.extend([src, obj]) - self.compiler.compile([src], include_dirs=include_dirs) - return (src, obj) - - def _link(self, body, headers, include_dirs, libraries, library_dirs, - lang): - (src, obj) = self._compile(body, headers, include_dirs, lang) - prog = os.path.splitext(os.path.basename(src))[0] - self.compiler.link_executable([obj], prog, - libraries=libraries, - library_dirs=library_dirs, - target_lang=lang) - - if self.compiler.exe_extension is not None: - prog = prog + self.compiler.exe_extension - self.temp_files.append(prog) - - return (src, obj, prog) - - def _clean(self, *filenames): - if not filenames: - filenames = self.temp_files - self.temp_files = [] - log.info("removing: %s", ' '.join(filenames)) - for filename in filenames: - try: - os.remove(filename) - except OSError: - pass - - - # XXX these ignore the dry-run flag: what to do, what to do? even if - # you want a dry-run build, you still need some sort of configuration - # info. My inclination is to make it up to the real config command to - # consult 'dry_run', and assume a default (minimal) configuration if - # true. The problem with trying to do it here is that you'd have to - # return either true or false from all the 'try' methods, neither of - # which is correct. - - # XXX need access to the header search path and maybe default macros. - - def try_cpp(self, body=None, headers=None, include_dirs=None, lang="c"): - """Construct a source file from 'body' (a string containing lines - of C/C++ code) and 'headers' (a list of header files to include) - and run it through the preprocessor. Return true if the - preprocessor succeeded, false if there were any errors. - ('body' probably isn't of much use, but what the heck.) - """ - from distutils.ccompiler import CompileError - self._check_compiler() - ok = True - try: - self._preprocess(body, headers, include_dirs, lang) - except CompileError: - ok = False - - self._clean() - return ok - - def search_cpp(self, pattern, body=None, headers=None, include_dirs=None, - lang="c"): - """Construct a source file (just like 'try_cpp()'), run it through - the preprocessor, and return true if any line of the output matches - 'pattern'. 'pattern' should either be a compiled regex object or a - string containing a regex. If both 'body' and 'headers' are None, - preprocesses an empty file -- which can be useful to determine the - symbols the preprocessor and compiler set by default. - """ - self._check_compiler() - src, out = self._preprocess(body, headers, include_dirs, lang) - - if isinstance(pattern, str): - pattern = re.compile(pattern) - - with open(out) as file: - match = False - while True: - line = file.readline() - if line == '': - break - if pattern.search(line): - match = True - break - - self._clean() - return match - - def try_compile(self, body, headers=None, include_dirs=None, lang="c"): - """Try to compile a source file built from 'body' and 'headers'. - Return true on success, false otherwise. - """ - from distutils.ccompiler import CompileError - self._check_compiler() - try: - self._compile(body, headers, include_dirs, lang) - ok = True - except CompileError: - ok = False - - log.info(ok and "success!" or "failure.") - self._clean() - return ok - - def try_link(self, body, headers=None, include_dirs=None, libraries=None, - library_dirs=None, lang="c"): - """Try to compile and link a source file, built from 'body' and - 'headers', to executable form. Return true on success, false - otherwise. - """ - from distutils.ccompiler import CompileError, LinkError - self._check_compiler() - try: - self._link(body, headers, include_dirs, - libraries, library_dirs, lang) - ok = True - except (CompileError, LinkError): - ok = False - - log.info(ok and "success!" or "failure.") - self._clean() - return ok - - def try_run(self, body, headers=None, include_dirs=None, libraries=None, - library_dirs=None, lang="c"): - """Try to compile, link to an executable, and run a program - built from 'body' and 'headers'. Return true on success, false - otherwise. - """ - from distutils.ccompiler import CompileError, LinkError - self._check_compiler() - try: - src, obj, exe = self._link(body, headers, include_dirs, - libraries, library_dirs, lang) - self.spawn([exe]) - ok = True - except (CompileError, LinkError, DistutilsExecError): - ok = False - - log.info(ok and "success!" or "failure.") - self._clean() - return ok - - - # -- High-level methods -------------------------------------------- - # (these are the ones that are actually likely to be useful - # when implementing a real-world config command!) - - def check_func(self, func, headers=None, include_dirs=None, - libraries=None, library_dirs=None, decl=0, call=0): - """Determine if function 'func' is available by constructing a - source file that refers to 'func', and compiles and links it. - If everything succeeds, returns true; otherwise returns false. - - The constructed source file starts out by including the header - files listed in 'headers'. If 'decl' is true, it then declares - 'func' (as "int func()"); you probably shouldn't supply 'headers' - and set 'decl' true in the same call, or you might get errors about - a conflicting declarations for 'func'. Finally, the constructed - 'main()' function either references 'func' or (if 'call' is true) - calls it. 'libraries' and 'library_dirs' are used when - linking. - """ - self._check_compiler() - body = [] - if decl: - body.append("int %s ();" % func) - body.append("int main () {") - if call: - body.append(" %s();" % func) - else: - body.append(" %s;" % func) - body.append("}") - body = "\n".join(body) + "\n" - - return self.try_link(body, headers, include_dirs, - libraries, library_dirs) - - def check_lib(self, library, library_dirs=None, headers=None, - include_dirs=None, other_libraries=[]): - """Determine if 'library' is available to be linked against, - without actually checking that any particular symbols are provided - by it. 'headers' will be used in constructing the source file to - be compiled, but the only effect of this is to check if all the - header files listed are available. Any libraries listed in - 'other_libraries' will be included in the link, in case 'library' - has symbols that depend on other libraries. - """ - self._check_compiler() - return self.try_link("int main (void) { }", headers, include_dirs, - [library] + other_libraries, library_dirs) - - def check_header(self, header, include_dirs=None, library_dirs=None, - lang="c"): - """Determine if the system header file named by 'header_file' - exists and can be found by the preprocessor; return true if so, - false otherwise. - """ - return self.try_cpp(body="/* No body */", headers=[header], - include_dirs=include_dirs) - -def dump_file(filename, head=None): - """Dumps a file content into log.info. - - If head is not None, will be dumped before the file content. - """ - if head is None: - log.info('%s', filename) - else: - log.info(head) - file = open(filename) - try: - log.info(file.read()) - finally: - file.close() diff --git a/venv/Lib/site-packages/setuptools/_distutils/command/install.py b/venv/Lib/site-packages/setuptools/_distutils/command/install.py deleted file mode 100644 index 41c17d8..0000000 --- a/venv/Lib/site-packages/setuptools/_distutils/command/install.py +++ /dev/null @@ -1,781 +0,0 @@ -"""distutils.command.install - -Implements the Distutils 'install' command.""" - -import sys -import os -import contextlib -import sysconfig -import itertools - -from distutils import log -from distutils.core import Command -from distutils.debug import DEBUG -from distutils.sysconfig import get_config_vars -from distutils.errors import DistutilsPlatformError -from distutils.file_util import write_file -from distutils.util import convert_path, subst_vars, change_root -from distutils.util import get_platform -from distutils.errors import DistutilsOptionError -from .. import _collections - -from site import USER_BASE -from site import USER_SITE -HAS_USER_SITE = True - -WINDOWS_SCHEME = { - 'purelib': '{base}/Lib/site-packages', - 'platlib': '{base}/Lib/site-packages', - 'headers': '{base}/Include/{dist_name}', - 'scripts': '{base}/Scripts', - 'data' : '{base}', -} - -INSTALL_SCHEMES = { - 'posix_prefix': { - 'purelib': '{base}/lib/{implementation_lower}{py_version_short}/site-packages', - 'platlib': '{platbase}/{platlibdir}/{implementation_lower}{py_version_short}/site-packages', - 'headers': '{base}/include/{implementation_lower}{py_version_short}{abiflags}/{dist_name}', - 'scripts': '{base}/bin', - 'data' : '{base}', - }, - 'posix_home': { - 'purelib': '{base}/lib/{implementation_lower}', - 'platlib': '{base}/{platlibdir}/{implementation_lower}', - 'headers': '{base}/include/{implementation_lower}/{dist_name}', - 'scripts': '{base}/bin', - 'data' : '{base}', - }, - 'nt': WINDOWS_SCHEME, - 'pypy': { - 'purelib': '{base}/site-packages', - 'platlib': '{base}/site-packages', - 'headers': '{base}/include/{dist_name}', - 'scripts': '{base}/bin', - 'data' : '{base}', - }, - 'pypy_nt': { - 'purelib': '{base}/site-packages', - 'platlib': '{base}/site-packages', - 'headers': '{base}/include/{dist_name}', - 'scripts': '{base}/Scripts', - 'data' : '{base}', - }, - } - -# user site schemes -if HAS_USER_SITE: - INSTALL_SCHEMES['nt_user'] = { - 'purelib': '{usersite}', - 'platlib': '{usersite}', - 'headers': '{userbase}/{implementation}{py_version_nodot_plat}/Include/{dist_name}', - 'scripts': '{userbase}/{implementation}{py_version_nodot_plat}/Scripts', - 'data' : '{userbase}', - } - - INSTALL_SCHEMES['posix_user'] = { - 'purelib': '{usersite}', - 'platlib': '{usersite}', - 'headers': - '{userbase}/include/{implementation_lower}{py_version_short}{abiflags}/{dist_name}', - 'scripts': '{userbase}/bin', - 'data' : '{userbase}', - } - -# The keys to an installation scheme; if any new types of files are to be -# installed, be sure to add an entry to every installation scheme above, -# and to SCHEME_KEYS here. -SCHEME_KEYS = ('purelib', 'platlib', 'headers', 'scripts', 'data') - - -def _load_sysconfig_schemes(): - with contextlib.suppress(AttributeError): - return { - scheme: sysconfig.get_paths(scheme, expand=False) - for scheme in sysconfig.get_scheme_names() - } - - -def _load_schemes(): - """ - Extend default schemes with schemes from sysconfig. - """ - - sysconfig_schemes = _load_sysconfig_schemes() or {} - - return { - scheme: { - **INSTALL_SCHEMES.get(scheme, {}), - **sysconfig_schemes.get(scheme, {}), - } - for scheme in set(itertools.chain(INSTALL_SCHEMES, sysconfig_schemes)) - } - - -def _get_implementation(): - if hasattr(sys, 'pypy_version_info'): - return 'PyPy' - else: - return 'Python' - - -def _select_scheme(ob, name): - scheme = _inject_headers(name, _load_scheme(_resolve_scheme(name))) - vars(ob).update(_remove_set(ob, _scheme_attrs(scheme))) - - -def _remove_set(ob, attrs): - """ - Include only attrs that are None in ob. - """ - return { - key: value - for key, value in attrs.items() - if getattr(ob, key) is None - } - - -def _resolve_scheme(name): - os_name, sep, key = name.partition('_') - try: - resolved = sysconfig.get_preferred_scheme(key) - except Exception: - resolved = _pypy_hack(name) - return resolved - - -def _load_scheme(name): - return _load_schemes()[name] - - -def _inject_headers(name, scheme): - """ - Given a scheme name and the resolved scheme, - if the scheme does not include headers, resolve - the fallback scheme for the name and use headers - from it. pypa/distutils#88 - """ - # Bypass the preferred scheme, which may not - # have defined headers. - fallback = _load_scheme(_pypy_hack(name)) - scheme.setdefault('headers', fallback['headers']) - return scheme - - -def _scheme_attrs(scheme): - """Resolve install directories by applying the install schemes.""" - return { - f'install_{key}': scheme[key] - for key in SCHEME_KEYS - } - - -def _pypy_hack(name): - PY37 = sys.version_info < (3, 8) - old_pypy = hasattr(sys, 'pypy_version_info') and PY37 - prefix = not name.endswith(('_user', '_home')) - pypy_name = 'pypy' + '_nt' * (os.name == 'nt') - return pypy_name if old_pypy and prefix else name - - -class install(Command): - - description = "install everything from build directory" - - user_options = [ - # Select installation scheme and set base director(y|ies) - ('prefix=', None, - "installation prefix"), - ('exec-prefix=', None, - "(Unix only) prefix for platform-specific files"), - ('home=', None, - "(Unix only) home directory to install under"), - - # Or, just set the base director(y|ies) - ('install-base=', None, - "base installation directory (instead of --prefix or --home)"), - ('install-platbase=', None, - "base installation directory for platform-specific files " + - "(instead of --exec-prefix or --home)"), - ('root=', None, - "install everything relative to this alternate root directory"), - - # Or, explicitly set the installation scheme - ('install-purelib=', None, - "installation directory for pure Python module distributions"), - ('install-platlib=', None, - "installation directory for non-pure module distributions"), - ('install-lib=', None, - "installation directory for all module distributions " + - "(overrides --install-purelib and --install-platlib)"), - - ('install-headers=', None, - "installation directory for C/C++ headers"), - ('install-scripts=', None, - "installation directory for Python scripts"), - ('install-data=', None, - "installation directory for data files"), - - # Byte-compilation options -- see install_lib.py for details, as - # these are duplicated from there (but only install_lib does - # anything with them). - ('compile', 'c', "compile .py to .pyc [default]"), - ('no-compile', None, "don't compile .py files"), - ('optimize=', 'O', - "also compile with optimization: -O1 for \"python -O\", " - "-O2 for \"python -OO\", and -O0 to disable [default: -O0]"), - - # Miscellaneous control options - ('force', 'f', - "force installation (overwrite any existing files)"), - ('skip-build', None, - "skip rebuilding everything (for testing/debugging)"), - - # Where to install documentation (eventually!) - #('doc-format=', None, "format of documentation to generate"), - #('install-man=', None, "directory for Unix man pages"), - #('install-html=', None, "directory for HTML documentation"), - #('install-info=', None, "directory for GNU info files"), - - ('record=', None, - "filename in which to record list of installed files"), - ] - - boolean_options = ['compile', 'force', 'skip-build'] - - if HAS_USER_SITE: - user_options.append(('user', None, - "install in user site-package '%s'" % USER_SITE)) - boolean_options.append('user') - - negative_opt = {'no-compile' : 'compile'} - - - def initialize_options(self): - """Initializes options.""" - # High-level options: these select both an installation base - # and scheme. - self.prefix = None - self.exec_prefix = None - self.home = None - self.user = 0 - - # These select only the installation base; it's up to the user to - # specify the installation scheme (currently, that means supplying - # the --install-{platlib,purelib,scripts,data} options). - self.install_base = None - self.install_platbase = None - self.root = None - - # These options are the actual installation directories; if not - # supplied by the user, they are filled in using the installation - # scheme implied by prefix/exec-prefix/home and the contents of - # that installation scheme. - self.install_purelib = None # for pure module distributions - self.install_platlib = None # non-pure (dists w/ extensions) - self.install_headers = None # for C/C++ headers - self.install_lib = None # set to either purelib or platlib - self.install_scripts = None - self.install_data = None - self.install_userbase = USER_BASE - self.install_usersite = USER_SITE - - self.compile = None - self.optimize = None - - # Deprecated - # These two are for putting non-packagized distributions into their - # own directory and creating a .pth file if it makes sense. - # 'extra_path' comes from the setup file; 'install_path_file' can - # be turned off if it makes no sense to install a .pth file. (But - # better to install it uselessly than to guess wrong and not - # install it when it's necessary and would be used!) Currently, - # 'install_path_file' is always true unless some outsider meddles - # with it. - self.extra_path = None - self.install_path_file = 1 - - # 'force' forces installation, even if target files are not - # out-of-date. 'skip_build' skips running the "build" command, - # handy if you know it's not necessary. 'warn_dir' (which is *not* - # a user option, it's just there so the bdist_* commands can turn - # it off) determines whether we warn about installing to a - # directory not in sys.path. - self.force = 0 - self.skip_build = 0 - self.warn_dir = 1 - - # These are only here as a conduit from the 'build' command to the - # 'install_*' commands that do the real work. ('build_base' isn't - # actually used anywhere, but it might be useful in future.) They - # are not user options, because if the user told the install - # command where the build directory is, that wouldn't affect the - # build command. - self.build_base = None - self.build_lib = None - - # Not defined yet because we don't know anything about - # documentation yet. - #self.install_man = None - #self.install_html = None - #self.install_info = None - - self.record = None - - - # -- Option finalizing methods ------------------------------------- - # (This is rather more involved than for most commands, - # because this is where the policy for installing third- - # party Python modules on various platforms given a wide - # array of user input is decided. Yes, it's quite complex!) - - def finalize_options(self): - """Finalizes options.""" - # This method (and its helpers, like 'finalize_unix()', - # 'finalize_other()', and 'select_scheme()') is where the default - # installation directories for modules, extension modules, and - # anything else we care to install from a Python module - # distribution. Thus, this code makes a pretty important policy - # statement about how third-party stuff is added to a Python - # installation! Note that the actual work of installation is done - # by the relatively simple 'install_*' commands; they just take - # their orders from the installation directory options determined - # here. - - # Check for errors/inconsistencies in the options; first, stuff - # that's wrong on any platform. - - if ((self.prefix or self.exec_prefix or self.home) and - (self.install_base or self.install_platbase)): - raise DistutilsOptionError( - "must supply either prefix/exec-prefix/home or " + - "install-base/install-platbase -- not both") - - if self.home and (self.prefix or self.exec_prefix): - raise DistutilsOptionError( - "must supply either home or prefix/exec-prefix -- not both") - - if self.user and (self.prefix or self.exec_prefix or self.home or - self.install_base or self.install_platbase): - raise DistutilsOptionError("can't combine user with prefix, " - "exec_prefix/home, or install_(plat)base") - - # Next, stuff that's wrong (or dubious) only on certain platforms. - if os.name != "posix": - if self.exec_prefix: - self.warn("exec-prefix option ignored on this platform") - self.exec_prefix = None - - # Now the interesting logic -- so interesting that we farm it out - # to other methods. The goal of these methods is to set the final - # values for the install_{lib,scripts,data,...} options, using as - # input a heady brew of prefix, exec_prefix, home, install_base, - # install_platbase, user-supplied versions of - # install_{purelib,platlib,lib,scripts,data,...}, and the - # install schemes. Phew! - - self.dump_dirs("pre-finalize_{unix,other}") - - if os.name == 'posix': - self.finalize_unix() - else: - self.finalize_other() - - self.dump_dirs("post-finalize_{unix,other}()") - - # Expand configuration variables, tilde, etc. in self.install_base - # and self.install_platbase -- that way, we can use $base or - # $platbase in the other installation directories and not worry - # about needing recursive variable expansion (shudder). - - py_version = sys.version.split()[0] - (prefix, exec_prefix) = get_config_vars('prefix', 'exec_prefix') - try: - abiflags = sys.abiflags - except AttributeError: - # sys.abiflags may not be defined on all platforms. - abiflags = '' - local_vars = { - 'dist_name': self.distribution.get_name(), - 'dist_version': self.distribution.get_version(), - 'dist_fullname': self.distribution.get_fullname(), - 'py_version': py_version, - 'py_version_short': '%d.%d' % sys.version_info[:2], - 'py_version_nodot': '%d%d' % sys.version_info[:2], - 'sys_prefix': prefix, - 'prefix': prefix, - 'sys_exec_prefix': exec_prefix, - 'exec_prefix': exec_prefix, - 'abiflags': abiflags, - 'platlibdir': getattr(sys, 'platlibdir', 'lib'), - 'implementation_lower': _get_implementation().lower(), - 'implementation': _get_implementation(), - } - - # vars for compatibility on older Pythons - compat_vars = dict( - # Python 3.9 and earlier - py_version_nodot_plat=getattr(sys, 'winver', '').replace('.', ''), - ) - - if HAS_USER_SITE: - local_vars['userbase'] = self.install_userbase - local_vars['usersite'] = self.install_usersite - - self.config_vars = _collections.DictStack( - [compat_vars, sysconfig.get_config_vars(), local_vars]) - - self.expand_basedirs() - - self.dump_dirs("post-expand_basedirs()") - - # Now define config vars for the base directories so we can expand - # everything else. - local_vars['base'] = self.install_base - local_vars['platbase'] = self.install_platbase - - if DEBUG: - from pprint import pprint - print("config vars:") - pprint(dict(self.config_vars)) - - # Expand "~" and configuration variables in the installation - # directories. - self.expand_dirs() - - self.dump_dirs("post-expand_dirs()") - - # Create directories in the home dir: - if self.user: - self.create_home_path() - - # Pick the actual directory to install all modules to: either - # install_purelib or install_platlib, depending on whether this - # module distribution is pure or not. Of course, if the user - # already specified install_lib, use their selection. - if self.install_lib is None: - if self.distribution.has_ext_modules(): # has extensions: non-pure - self.install_lib = self.install_platlib - else: - self.install_lib = self.install_purelib - - - # Convert directories from Unix /-separated syntax to the local - # convention. - self.convert_paths('lib', 'purelib', 'platlib', - 'scripts', 'data', 'headers', - 'userbase', 'usersite') - - # Deprecated - # Well, we're not actually fully completely finalized yet: we still - # have to deal with 'extra_path', which is the hack for allowing - # non-packagized module distributions (hello, Numerical Python!) to - # get their own directories. - self.handle_extra_path() - self.install_libbase = self.install_lib # needed for .pth file - self.install_lib = os.path.join(self.install_lib, self.extra_dirs) - - # If a new root directory was supplied, make all the installation - # dirs relative to it. - if self.root is not None: - self.change_roots('libbase', 'lib', 'purelib', 'platlib', - 'scripts', 'data', 'headers') - - self.dump_dirs("after prepending root") - - # Find out the build directories, ie. where to install from. - self.set_undefined_options('build', - ('build_base', 'build_base'), - ('build_lib', 'build_lib')) - - # Punt on doc directories for now -- after all, we're punting on - # documentation completely! - - def dump_dirs(self, msg): - """Dumps the list of user options.""" - if not DEBUG: - return - from distutils.fancy_getopt import longopt_xlate - log.debug(msg + ":") - for opt in self.user_options: - opt_name = opt[0] - if opt_name[-1] == "=": - opt_name = opt_name[0:-1] - if opt_name in self.negative_opt: - opt_name = self.negative_opt[opt_name] - opt_name = opt_name.translate(longopt_xlate) - val = not getattr(self, opt_name) - else: - opt_name = opt_name.translate(longopt_xlate) - val = getattr(self, opt_name) - log.debug(" %s: %s", opt_name, val) - - def finalize_unix(self): - """Finalizes options for posix platforms.""" - if self.install_base is not None or self.install_platbase is not None: - incomplete_scheme = ( - ( - self.install_lib is None and - self.install_purelib is None and - self.install_platlib is None - ) or - self.install_headers is None or - self.install_scripts is None or - self.install_data is None - ) - if incomplete_scheme: - raise DistutilsOptionError( - "install-base or install-platbase supplied, but " - "installation scheme is incomplete") - return - - if self.user: - if self.install_userbase is None: - raise DistutilsPlatformError( - "User base directory is not specified") - self.install_base = self.install_platbase = self.install_userbase - self.select_scheme("posix_user") - elif self.home is not None: - self.install_base = self.install_platbase = self.home - self.select_scheme("posix_home") - else: - if self.prefix is None: - if self.exec_prefix is not None: - raise DistutilsOptionError( - "must not supply exec-prefix without prefix") - - # Allow Fedora to add components to the prefix - _prefix_addition = getattr(sysconfig, '_prefix_addition', "") - - self.prefix = ( - os.path.normpath(sys.prefix) + _prefix_addition) - self.exec_prefix = ( - os.path.normpath(sys.exec_prefix) + _prefix_addition) - - else: - if self.exec_prefix is None: - self.exec_prefix = self.prefix - - self.install_base = self.prefix - self.install_platbase = self.exec_prefix - self.select_scheme("posix_prefix") - - def finalize_other(self): - """Finalizes options for non-posix platforms""" - if self.user: - if self.install_userbase is None: - raise DistutilsPlatformError( - "User base directory is not specified") - self.install_base = self.install_platbase = self.install_userbase - self.select_scheme(os.name + "_user") - elif self.home is not None: - self.install_base = self.install_platbase = self.home - self.select_scheme("posix_home") - else: - if self.prefix is None: - self.prefix = os.path.normpath(sys.prefix) - - self.install_base = self.install_platbase = self.prefix - try: - self.select_scheme(os.name) - except KeyError: - raise DistutilsPlatformError( - "I don't know how to install stuff on '%s'" % os.name) - - def select_scheme(self, name): - _select_scheme(self, name) - - def _expand_attrs(self, attrs): - for attr in attrs: - val = getattr(self, attr) - if val is not None: - if os.name == 'posix' or os.name == 'nt': - val = os.path.expanduser(val) - val = subst_vars(val, self.config_vars) - setattr(self, attr, val) - - def expand_basedirs(self): - """Calls `os.path.expanduser` on install_base, install_platbase and - root.""" - self._expand_attrs(['install_base', 'install_platbase', 'root']) - - def expand_dirs(self): - """Calls `os.path.expanduser` on install dirs.""" - self._expand_attrs(['install_purelib', 'install_platlib', - 'install_lib', 'install_headers', - 'install_scripts', 'install_data',]) - - def convert_paths(self, *names): - """Call `convert_path` over `names`.""" - for name in names: - attr = "install_" + name - setattr(self, attr, convert_path(getattr(self, attr))) - - def handle_extra_path(self): - """Set `path_file` and `extra_dirs` using `extra_path`.""" - if self.extra_path is None: - self.extra_path = self.distribution.extra_path - - if self.extra_path is not None: - log.warn( - "Distribution option extra_path is deprecated. " - "See issue27919 for details." - ) - if isinstance(self.extra_path, str): - self.extra_path = self.extra_path.split(',') - - if len(self.extra_path) == 1: - path_file = extra_dirs = self.extra_path[0] - elif len(self.extra_path) == 2: - path_file, extra_dirs = self.extra_path - else: - raise DistutilsOptionError( - "'extra_path' option must be a list, tuple, or " - "comma-separated string with 1 or 2 elements") - - # convert to local form in case Unix notation used (as it - # should be in setup scripts) - extra_dirs = convert_path(extra_dirs) - else: - path_file = None - extra_dirs = '' - - # XXX should we warn if path_file and not extra_dirs? (in which - # case the path file would be harmless but pointless) - self.path_file = path_file - self.extra_dirs = extra_dirs - - def change_roots(self, *names): - """Change the install directories pointed by name using root.""" - for name in names: - attr = "install_" + name - setattr(self, attr, change_root(self.root, getattr(self, attr))) - - def create_home_path(self): - """Create directories under ~.""" - if not self.user: - return - home = convert_path(os.path.expanduser("~")) - for name, path in self.config_vars.items(): - if str(path).startswith(home) and not os.path.isdir(path): - self.debug_print("os.makedirs('%s', 0o700)" % path) - os.makedirs(path, 0o700) - - # -- Command execution methods ------------------------------------- - - def run(self): - """Runs the command.""" - # Obviously have to build before we can install - if not self.skip_build: - self.run_command('build') - # If we built for any other platform, we can't install. - build_plat = self.distribution.get_command_obj('build').plat_name - # check warn_dir - it is a clue that the 'install' is happening - # internally, and not to sys.path, so we don't check the platform - # matches what we are running. - if self.warn_dir and build_plat != get_platform(): - raise DistutilsPlatformError("Can't install when " - "cross-compiling") - - # Run all sub-commands (at least those that need to be run) - for cmd_name in self.get_sub_commands(): - self.run_command(cmd_name) - - if self.path_file: - self.create_path_file() - - # write list of installed files, if requested. - if self.record: - outputs = self.get_outputs() - if self.root: # strip any package prefix - root_len = len(self.root) - for counter in range(len(outputs)): - outputs[counter] = outputs[counter][root_len:] - self.execute(write_file, - (self.record, outputs), - "writing list of installed files to '%s'" % - self.record) - - sys_path = map(os.path.normpath, sys.path) - sys_path = map(os.path.normcase, sys_path) - install_lib = os.path.normcase(os.path.normpath(self.install_lib)) - if (self.warn_dir and - not (self.path_file and self.install_path_file) and - install_lib not in sys_path): - log.debug(("modules installed to '%s', which is not in " - "Python's module search path (sys.path) -- " - "you'll have to change the search path yourself"), - self.install_lib) - - def create_path_file(self): - """Creates the .pth file""" - filename = os.path.join(self.install_libbase, - self.path_file + ".pth") - if self.install_path_file: - self.execute(write_file, - (filename, [self.extra_dirs]), - "creating %s" % filename) - else: - self.warn("path file '%s' not created" % filename) - - - # -- Reporting methods --------------------------------------------- - - def get_outputs(self): - """Assembles the outputs of all the sub-commands.""" - outputs = [] - for cmd_name in self.get_sub_commands(): - cmd = self.get_finalized_command(cmd_name) - # Add the contents of cmd.get_outputs(), ensuring - # that outputs doesn't contain duplicate entries - for filename in cmd.get_outputs(): - if filename not in outputs: - outputs.append(filename) - - if self.path_file and self.install_path_file: - outputs.append(os.path.join(self.install_libbase, - self.path_file + ".pth")) - - return outputs - - def get_inputs(self): - """Returns the inputs of all the sub-commands""" - # XXX gee, this looks familiar ;-( - inputs = [] - for cmd_name in self.get_sub_commands(): - cmd = self.get_finalized_command(cmd_name) - inputs.extend(cmd.get_inputs()) - - return inputs - - # -- Predicates for sub-command list ------------------------------- - - def has_lib(self): - """Returns true if the current distribution has any Python - modules to install.""" - return (self.distribution.has_pure_modules() or - self.distribution.has_ext_modules()) - - def has_headers(self): - """Returns true if the current distribution has any headers to - install.""" - return self.distribution.has_headers() - - def has_scripts(self): - """Returns true if the current distribution has any scripts to. - install.""" - return self.distribution.has_scripts() - - def has_data(self): - """Returns true if the current distribution has any data to. - install.""" - return self.distribution.has_data_files() - - # 'sub_commands': a list of commands this command might have to run to - # get its work done. See cmd.py for more info. - sub_commands = [('install_lib', has_lib), - ('install_headers', has_headers), - ('install_scripts', has_scripts), - ('install_data', has_data), - ('install_egg_info', lambda self:True), - ] diff --git a/venv/Lib/site-packages/setuptools/_distutils/command/install_data.py b/venv/Lib/site-packages/setuptools/_distutils/command/install_data.py deleted file mode 100644 index 947cd76..0000000 --- a/venv/Lib/site-packages/setuptools/_distutils/command/install_data.py +++ /dev/null @@ -1,79 +0,0 @@ -"""distutils.command.install_data - -Implements the Distutils 'install_data' command, for installing -platform-independent data files.""" - -# contributed by Bastian Kleineidam - -import os -from distutils.core import Command -from distutils.util import change_root, convert_path - -class install_data(Command): - - description = "install data files" - - user_options = [ - ('install-dir=', 'd', - "base directory for installing data files " - "(default: installation base dir)"), - ('root=', None, - "install everything relative to this alternate root directory"), - ('force', 'f', "force installation (overwrite existing files)"), - ] - - boolean_options = ['force'] - - def initialize_options(self): - self.install_dir = None - self.outfiles = [] - self.root = None - self.force = 0 - self.data_files = self.distribution.data_files - self.warn_dir = 1 - - def finalize_options(self): - self.set_undefined_options('install', - ('install_data', 'install_dir'), - ('root', 'root'), - ('force', 'force'), - ) - - def run(self): - self.mkpath(self.install_dir) - for f in self.data_files: - if isinstance(f, str): - # it's a simple file, so copy it - f = convert_path(f) - if self.warn_dir: - self.warn("setup script did not provide a directory for " - "'%s' -- installing right in '%s'" % - (f, self.install_dir)) - (out, _) = self.copy_file(f, self.install_dir) - self.outfiles.append(out) - else: - # it's a tuple with path to install to and a list of files - dir = convert_path(f[0]) - if not os.path.isabs(dir): - dir = os.path.join(self.install_dir, dir) - elif self.root: - dir = change_root(self.root, dir) - self.mkpath(dir) - - if f[1] == []: - # If there are no files listed, the user must be - # trying to create an empty directory, so add the - # directory to the list of output files. - self.outfiles.append(dir) - else: - # Copy files, adding them to the list of output files. - for data in f[1]: - data = convert_path(data) - (out, _) = self.copy_file(data, dir) - self.outfiles.append(out) - - def get_inputs(self): - return self.data_files or [] - - def get_outputs(self): - return self.outfiles diff --git a/venv/Lib/site-packages/setuptools/_distutils/command/install_egg_info.py b/venv/Lib/site-packages/setuptools/_distutils/command/install_egg_info.py deleted file mode 100644 index adc0323..0000000 --- a/venv/Lib/site-packages/setuptools/_distutils/command/install_egg_info.py +++ /dev/null @@ -1,84 +0,0 @@ -"""distutils.command.install_egg_info - -Implements the Distutils 'install_egg_info' command, for installing -a package's PKG-INFO metadata.""" - - -from distutils.cmd import Command -from distutils import log, dir_util -import os, sys, re - -class install_egg_info(Command): - """Install an .egg-info file for the package""" - - description = "Install package's PKG-INFO metadata as an .egg-info file" - user_options = [ - ('install-dir=', 'd', "directory to install to"), - ] - - def initialize_options(self): - self.install_dir = None - - @property - def basename(self): - """ - Allow basename to be overridden by child class. - Ref pypa/distutils#2. - """ - return "%s-%s-py%d.%d.egg-info" % ( - to_filename(safe_name(self.distribution.get_name())), - to_filename(safe_version(self.distribution.get_version())), - *sys.version_info[:2] - ) - - def finalize_options(self): - self.set_undefined_options('install_lib',('install_dir','install_dir')) - self.target = os.path.join(self.install_dir, self.basename) - self.outputs = [self.target] - - def run(self): - target = self.target - if os.path.isdir(target) and not os.path.islink(target): - dir_util.remove_tree(target, dry_run=self.dry_run) - elif os.path.exists(target): - self.execute(os.unlink,(self.target,),"Removing "+target) - elif not os.path.isdir(self.install_dir): - self.execute(os.makedirs, (self.install_dir,), - "Creating "+self.install_dir) - log.info("Writing %s", target) - if not self.dry_run: - with open(target, 'w', encoding='UTF-8') as f: - self.distribution.metadata.write_pkg_file(f) - - def get_outputs(self): - return self.outputs - - -# The following routines are taken from setuptools' pkg_resources module and -# can be replaced by importing them from pkg_resources once it is included -# in the stdlib. - -def safe_name(name): - """Convert an arbitrary string to a standard distribution name - - Any runs of non-alphanumeric/. characters are replaced with a single '-'. - """ - return re.sub('[^A-Za-z0-9.]+', '-', name) - - -def safe_version(version): - """Convert an arbitrary string to a standard version string - - Spaces become dots, and all other non-alphanumeric characters become - dashes, with runs of multiple dashes condensed to a single dash. - """ - version = version.replace(' ','.') - return re.sub('[^A-Za-z0-9.]+', '-', version) - - -def to_filename(name): - """Convert a project or version name to its filename-escaped form - - Any '-' characters are currently replaced with '_'. - """ - return name.replace('-','_') diff --git a/venv/Lib/site-packages/setuptools/_distutils/command/install_headers.py b/venv/Lib/site-packages/setuptools/_distutils/command/install_headers.py deleted file mode 100644 index 9bb0b18..0000000 --- a/venv/Lib/site-packages/setuptools/_distutils/command/install_headers.py +++ /dev/null @@ -1,47 +0,0 @@ -"""distutils.command.install_headers - -Implements the Distutils 'install_headers' command, to install C/C++ header -files to the Python include directory.""" - -from distutils.core import Command - - -# XXX force is never used -class install_headers(Command): - - description = "install C/C++ header files" - - user_options = [('install-dir=', 'd', - "directory to install header files to"), - ('force', 'f', - "force installation (overwrite existing files)"), - ] - - boolean_options = ['force'] - - def initialize_options(self): - self.install_dir = None - self.force = 0 - self.outfiles = [] - - def finalize_options(self): - self.set_undefined_options('install', - ('install_headers', 'install_dir'), - ('force', 'force')) - - - def run(self): - headers = self.distribution.headers - if not headers: - return - - self.mkpath(self.install_dir) - for header in headers: - (out, _) = self.copy_file(header, self.install_dir) - self.outfiles.append(out) - - def get_inputs(self): - return self.distribution.headers or [] - - def get_outputs(self): - return self.outfiles diff --git a/venv/Lib/site-packages/setuptools/_distutils/command/install_lib.py b/venv/Lib/site-packages/setuptools/_distutils/command/install_lib.py deleted file mode 100644 index 6154cf0..0000000 --- a/venv/Lib/site-packages/setuptools/_distutils/command/install_lib.py +++ /dev/null @@ -1,217 +0,0 @@ -"""distutils.command.install_lib - -Implements the Distutils 'install_lib' command -(install all Python modules).""" - -import os -import importlib.util -import sys - -from distutils.core import Command -from distutils.errors import DistutilsOptionError - - -# Extension for Python source files. -PYTHON_SOURCE_EXTENSION = ".py" - -class install_lib(Command): - - description = "install all Python modules (extensions and pure Python)" - - # The byte-compilation options are a tad confusing. Here are the - # possible scenarios: - # 1) no compilation at all (--no-compile --no-optimize) - # 2) compile .pyc only (--compile --no-optimize; default) - # 3) compile .pyc and "opt-1" .pyc (--compile --optimize) - # 4) compile "opt-1" .pyc only (--no-compile --optimize) - # 5) compile .pyc and "opt-2" .pyc (--compile --optimize-more) - # 6) compile "opt-2" .pyc only (--no-compile --optimize-more) - # - # The UI for this is two options, 'compile' and 'optimize'. - # 'compile' is strictly boolean, and only decides whether to - # generate .pyc files. 'optimize' is three-way (0, 1, or 2), and - # decides both whether to generate .pyc files and what level of - # optimization to use. - - user_options = [ - ('install-dir=', 'd', "directory to install to"), - ('build-dir=','b', "build directory (where to install from)"), - ('force', 'f', "force installation (overwrite existing files)"), - ('compile', 'c', "compile .py to .pyc [default]"), - ('no-compile', None, "don't compile .py files"), - ('optimize=', 'O', - "also compile with optimization: -O1 for \"python -O\", " - "-O2 for \"python -OO\", and -O0 to disable [default: -O0]"), - ('skip-build', None, "skip the build steps"), - ] - - boolean_options = ['force', 'compile', 'skip-build'] - negative_opt = {'no-compile' : 'compile'} - - def initialize_options(self): - # let the 'install' command dictate our installation directory - self.install_dir = None - self.build_dir = None - self.force = 0 - self.compile = None - self.optimize = None - self.skip_build = None - - def finalize_options(self): - # Get all the information we need to install pure Python modules - # from the umbrella 'install' command -- build (source) directory, - # install (target) directory, and whether to compile .py files. - self.set_undefined_options('install', - ('build_lib', 'build_dir'), - ('install_lib', 'install_dir'), - ('force', 'force'), - ('compile', 'compile'), - ('optimize', 'optimize'), - ('skip_build', 'skip_build'), - ) - - if self.compile is None: - self.compile = True - if self.optimize is None: - self.optimize = False - - if not isinstance(self.optimize, int): - try: - self.optimize = int(self.optimize) - if self.optimize not in (0, 1, 2): - raise AssertionError - except (ValueError, AssertionError): - raise DistutilsOptionError("optimize must be 0, 1, or 2") - - def run(self): - # Make sure we have built everything we need first - self.build() - - # Install everything: simply dump the entire contents of the build - # directory to the installation directory (that's the beauty of - # having a build directory!) - outfiles = self.install() - - # (Optionally) compile .py to .pyc - if outfiles is not None and self.distribution.has_pure_modules(): - self.byte_compile(outfiles) - - # -- Top-level worker functions ------------------------------------ - # (called from 'run()') - - def build(self): - if not self.skip_build: - if self.distribution.has_pure_modules(): - self.run_command('build_py') - if self.distribution.has_ext_modules(): - self.run_command('build_ext') - - def install(self): - if os.path.isdir(self.build_dir): - outfiles = self.copy_tree(self.build_dir, self.install_dir) - else: - self.warn("'%s' does not exist -- no Python modules to install" % - self.build_dir) - return - return outfiles - - def byte_compile(self, files): - if sys.dont_write_bytecode: - self.warn('byte-compiling is disabled, skipping.') - return - - from distutils.util import byte_compile - - # Get the "--root" directory supplied to the "install" command, - # and use it as a prefix to strip off the purported filename - # encoded in bytecode files. This is far from complete, but it - # should at least generate usable bytecode in RPM distributions. - install_root = self.get_finalized_command('install').root - - if self.compile: - byte_compile(files, optimize=0, - force=self.force, prefix=install_root, - dry_run=self.dry_run) - if self.optimize > 0: - byte_compile(files, optimize=self.optimize, - force=self.force, prefix=install_root, - verbose=self.verbose, dry_run=self.dry_run) - - - # -- Utility methods ----------------------------------------------- - - def _mutate_outputs(self, has_any, build_cmd, cmd_option, output_dir): - if not has_any: - return [] - - build_cmd = self.get_finalized_command(build_cmd) - build_files = build_cmd.get_outputs() - build_dir = getattr(build_cmd, cmd_option) - - prefix_len = len(build_dir) + len(os.sep) - outputs = [] - for file in build_files: - outputs.append(os.path.join(output_dir, file[prefix_len:])) - - return outputs - - def _bytecode_filenames(self, py_filenames): - bytecode_files = [] - for py_file in py_filenames: - # Since build_py handles package data installation, the - # list of outputs can contain more than just .py files. - # Make sure we only report bytecode for the .py files. - ext = os.path.splitext(os.path.normcase(py_file))[1] - if ext != PYTHON_SOURCE_EXTENSION: - continue - if self.compile: - bytecode_files.append(importlib.util.cache_from_source( - py_file, optimization='')) - if self.optimize > 0: - bytecode_files.append(importlib.util.cache_from_source( - py_file, optimization=self.optimize)) - - return bytecode_files - - - # -- External interface -------------------------------------------- - # (called by outsiders) - - def get_outputs(self): - """Return the list of files that would be installed if this command - were actually run. Not affected by the "dry-run" flag or whether - modules have actually been built yet. - """ - pure_outputs = \ - self._mutate_outputs(self.distribution.has_pure_modules(), - 'build_py', 'build_lib', - self.install_dir) - if self.compile: - bytecode_outputs = self._bytecode_filenames(pure_outputs) - else: - bytecode_outputs = [] - - ext_outputs = \ - self._mutate_outputs(self.distribution.has_ext_modules(), - 'build_ext', 'build_lib', - self.install_dir) - - return pure_outputs + bytecode_outputs + ext_outputs - - def get_inputs(self): - """Get the list of files that are input to this command, ie. the - files that get installed as they are named in the build tree. - The files in this list correspond one-to-one to the output - filenames returned by 'get_outputs()'. - """ - inputs = [] - - if self.distribution.has_pure_modules(): - build_py = self.get_finalized_command('build_py') - inputs.extend(build_py.get_outputs()) - - if self.distribution.has_ext_modules(): - build_ext = self.get_finalized_command('build_ext') - inputs.extend(build_ext.get_outputs()) - - return inputs diff --git a/venv/Lib/site-packages/setuptools/_distutils/command/install_scripts.py b/venv/Lib/site-packages/setuptools/_distutils/command/install_scripts.py deleted file mode 100644 index 31a1130..0000000 --- a/venv/Lib/site-packages/setuptools/_distutils/command/install_scripts.py +++ /dev/null @@ -1,60 +0,0 @@ -"""distutils.command.install_scripts - -Implements the Distutils 'install_scripts' command, for installing -Python scripts.""" - -# contributed by Bastian Kleineidam - -import os -from distutils.core import Command -from distutils import log -from stat import ST_MODE - - -class install_scripts(Command): - - description = "install scripts (Python or otherwise)" - - user_options = [ - ('install-dir=', 'd', "directory to install scripts to"), - ('build-dir=','b', "build directory (where to install from)"), - ('force', 'f', "force installation (overwrite existing files)"), - ('skip-build', None, "skip the build steps"), - ] - - boolean_options = ['force', 'skip-build'] - - def initialize_options(self): - self.install_dir = None - self.force = 0 - self.build_dir = None - self.skip_build = None - - def finalize_options(self): - self.set_undefined_options('build', ('build_scripts', 'build_dir')) - self.set_undefined_options('install', - ('install_scripts', 'install_dir'), - ('force', 'force'), - ('skip_build', 'skip_build'), - ) - - def run(self): - if not self.skip_build: - self.run_command('build_scripts') - self.outfiles = self.copy_tree(self.build_dir, self.install_dir) - if os.name == 'posix': - # Set the executable bits (owner, group, and world) on - # all the scripts we just installed. - for file in self.get_outputs(): - if self.dry_run: - log.info("changing mode of %s", file) - else: - mode = ((os.stat(file)[ST_MODE]) | 0o555) & 0o7777 - log.info("changing mode of %s to %o", file, mode) - os.chmod(file, mode) - - def get_inputs(self): - return self.distribution.scripts or [] - - def get_outputs(self): - return self.outfiles or [] diff --git a/venv/Lib/site-packages/setuptools/_distutils/command/py37compat.py b/venv/Lib/site-packages/setuptools/_distutils/command/py37compat.py deleted file mode 100644 index 754715a..0000000 --- a/venv/Lib/site-packages/setuptools/_distutils/command/py37compat.py +++ /dev/null @@ -1,30 +0,0 @@ -import sys - - -def _pythonlib_compat(): - """ - On Python 3.7 and earlier, distutils would include the Python - library. See pypa/distutils#9. - """ - from distutils import sysconfig - if not sysconfig.get_config_var('Py_ENABLED_SHARED'): - return - - yield 'python{}.{}{}'.format( - sys.hexversion >> 24, - (sys.hexversion >> 16) & 0xff, - sysconfig.get_config_var('ABIFLAGS'), - ) - - -def compose(f1, f2): - return lambda *args, **kwargs: f1(f2(*args, **kwargs)) - - -pythonlib = ( - compose(list, _pythonlib_compat) - if sys.version_info < (3, 8) - and sys.platform != 'darwin' - and sys.platform[:3] != 'aix' - else list -) diff --git a/venv/Lib/site-packages/setuptools/_distutils/command/register.py b/venv/Lib/site-packages/setuptools/_distutils/command/register.py deleted file mode 100644 index 0fac94e..0000000 --- a/venv/Lib/site-packages/setuptools/_distutils/command/register.py +++ /dev/null @@ -1,304 +0,0 @@ -"""distutils.command.register - -Implements the Distutils 'register' command (register with the repository). -""" - -# created 2002/10/21, Richard Jones - -import getpass -import io -import urllib.parse, urllib.request -from warnings import warn - -from distutils.core import PyPIRCCommand -from distutils.errors import * -from distutils import log - -class register(PyPIRCCommand): - - description = ("register the distribution with the Python package index") - user_options = PyPIRCCommand.user_options + [ - ('list-classifiers', None, - 'list the valid Trove classifiers'), - ('strict', None , - 'Will stop the registering if the meta-data are not fully compliant') - ] - boolean_options = PyPIRCCommand.boolean_options + [ - 'verify', 'list-classifiers', 'strict'] - - sub_commands = [('check', lambda self: True)] - - def initialize_options(self): - PyPIRCCommand.initialize_options(self) - self.list_classifiers = 0 - self.strict = 0 - - def finalize_options(self): - PyPIRCCommand.finalize_options(self) - # setting options for the `check` subcommand - check_options = {'strict': ('register', self.strict), - 'restructuredtext': ('register', 1)} - self.distribution.command_options['check'] = check_options - - def run(self): - self.finalize_options() - self._set_config() - - # Run sub commands - for cmd_name in self.get_sub_commands(): - self.run_command(cmd_name) - - if self.dry_run: - self.verify_metadata() - elif self.list_classifiers: - self.classifiers() - else: - self.send_metadata() - - def check_metadata(self): - """Deprecated API.""" - warn("distutils.command.register.check_metadata is deprecated, \ - use the check command instead", PendingDeprecationWarning) - check = self.distribution.get_command_obj('check') - check.ensure_finalized() - check.strict = self.strict - check.restructuredtext = 1 - check.run() - - def _set_config(self): - ''' Reads the configuration file and set attributes. - ''' - config = self._read_pypirc() - if config != {}: - self.username = config['username'] - self.password = config['password'] - self.repository = config['repository'] - self.realm = config['realm'] - self.has_config = True - else: - if self.repository not in ('pypi', self.DEFAULT_REPOSITORY): - raise ValueError('%s not found in .pypirc' % self.repository) - if self.repository == 'pypi': - self.repository = self.DEFAULT_REPOSITORY - self.has_config = False - - def classifiers(self): - ''' Fetch the list of classifiers from the server. - ''' - url = self.repository+'?:action=list_classifiers' - response = urllib.request.urlopen(url) - log.info(self._read_pypi_response(response)) - - def verify_metadata(self): - ''' Send the metadata to the package index server to be checked. - ''' - # send the info to the server and report the result - (code, result) = self.post_to_server(self.build_post_data('verify')) - log.info('Server response (%s): %s', code, result) - - def send_metadata(self): - ''' Send the metadata to the package index server. - - Well, do the following: - 1. figure who the user is, and then - 2. send the data as a Basic auth'ed POST. - - First we try to read the username/password from $HOME/.pypirc, - which is a ConfigParser-formatted file with a section - [distutils] containing username and password entries (both - in clear text). Eg: - - [distutils] - index-servers = - pypi - - [pypi] - username: fred - password: sekrit - - Otherwise, to figure who the user is, we offer the user three - choices: - - 1. use existing login, - 2. register as a new user, or - 3. set the password to a random string and email the user. - - ''' - # see if we can short-cut and get the username/password from the - # config - if self.has_config: - choice = '1' - username = self.username - password = self.password - else: - choice = 'x' - username = password = '' - - # get the user's login info - choices = '1 2 3 4'.split() - while choice not in choices: - self.announce('''\ -We need to know who you are, so please choose either: - 1. use your existing login, - 2. register as a new user, - 3. have the server generate a new password for you (and email it to you), or - 4. quit -Your selection [default 1]: ''', log.INFO) - choice = input() - if not choice: - choice = '1' - elif choice not in choices: - print('Please choose one of the four options!') - - if choice == '1': - # get the username and password - while not username: - username = input('Username: ') - while not password: - password = getpass.getpass('Password: ') - - # set up the authentication - auth = urllib.request.HTTPPasswordMgr() - host = urllib.parse.urlparse(self.repository)[1] - auth.add_password(self.realm, host, username, password) - # send the info to the server and report the result - code, result = self.post_to_server(self.build_post_data('submit'), - auth) - self.announce('Server response (%s): %s' % (code, result), - log.INFO) - - # possibly save the login - if code == 200: - if self.has_config: - # sharing the password in the distribution instance - # so the upload command can reuse it - self.distribution.password = password - else: - self.announce(('I can store your PyPI login so future ' - 'submissions will be faster.'), log.INFO) - self.announce('(the login will be stored in %s)' % \ - self._get_rc_file(), log.INFO) - choice = 'X' - while choice.lower() not in 'yn': - choice = input('Save your login (y/N)?') - if not choice: - choice = 'n' - if choice.lower() == 'y': - self._store_pypirc(username, password) - - elif choice == '2': - data = {':action': 'user'} - data['name'] = data['password'] = data['email'] = '' - data['confirm'] = None - while not data['name']: - data['name'] = input('Username: ') - while data['password'] != data['confirm']: - while not data['password']: - data['password'] = getpass.getpass('Password: ') - while not data['confirm']: - data['confirm'] = getpass.getpass(' Confirm: ') - if data['password'] != data['confirm']: - data['password'] = '' - data['confirm'] = None - print("Password and confirm don't match!") - while not data['email']: - data['email'] = input(' EMail: ') - code, result = self.post_to_server(data) - if code != 200: - log.info('Server response (%s): %s', code, result) - else: - log.info('You will receive an email shortly.') - log.info(('Follow the instructions in it to ' - 'complete registration.')) - elif choice == '3': - data = {':action': 'password_reset'} - data['email'] = '' - while not data['email']: - data['email'] = input('Your email address: ') - code, result = self.post_to_server(data) - log.info('Server response (%s): %s', code, result) - - def build_post_data(self, action): - # figure the data to send - the metadata plus some additional - # information used by the package server - meta = self.distribution.metadata - data = { - ':action': action, - 'metadata_version' : '1.0', - 'name': meta.get_name(), - 'version': meta.get_version(), - 'summary': meta.get_description(), - 'home_page': meta.get_url(), - 'author': meta.get_contact(), - 'author_email': meta.get_contact_email(), - 'license': meta.get_licence(), - 'description': meta.get_long_description(), - 'keywords': meta.get_keywords(), - 'platform': meta.get_platforms(), - 'classifiers': meta.get_classifiers(), - 'download_url': meta.get_download_url(), - # PEP 314 - 'provides': meta.get_provides(), - 'requires': meta.get_requires(), - 'obsoletes': meta.get_obsoletes(), - } - if data['provides'] or data['requires'] or data['obsoletes']: - data['metadata_version'] = '1.1' - return data - - def post_to_server(self, data, auth=None): - ''' Post a query to the server, and return a string response. - ''' - if 'name' in data: - self.announce('Registering %s to %s' % (data['name'], - self.repository), - log.INFO) - # Build up the MIME payload for the urllib2 POST data - boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254' - sep_boundary = '\n--' + boundary - end_boundary = sep_boundary + '--' - body = io.StringIO() - for key, value in data.items(): - # handle multiple entries for the same name - if type(value) not in (type([]), type( () )): - value = [value] - for value in value: - value = str(value) - body.write(sep_boundary) - body.write('\nContent-Disposition: form-data; name="%s"'%key) - body.write("\n\n") - body.write(value) - if value and value[-1] == '\r': - body.write('\n') # write an extra newline (lurve Macs) - body.write(end_boundary) - body.write("\n") - body = body.getvalue().encode("utf-8") - - # build the Request - headers = { - 'Content-type': 'multipart/form-data; boundary=%s; charset=utf-8'%boundary, - 'Content-length': str(len(body)) - } - req = urllib.request.Request(self.repository, body, headers) - - # handle HTTP and include the Basic Auth handler - opener = urllib.request.build_opener( - urllib.request.HTTPBasicAuthHandler(password_mgr=auth) - ) - data = '' - try: - result = opener.open(req) - except urllib.error.HTTPError as e: - if self.show_response: - data = e.fp.read() - result = e.code, e.msg - except urllib.error.URLError as e: - result = 500, str(e) - else: - if self.show_response: - data = self._read_pypi_response(result) - result = 200, 'OK' - if self.show_response: - msg = '\n'.join(('-' * 75, data, '-' * 75)) - self.announce(msg, log.INFO) - return result diff --git a/venv/Lib/site-packages/setuptools/_distutils/command/sdist.py b/venv/Lib/site-packages/setuptools/_distutils/command/sdist.py deleted file mode 100644 index b4996fc..0000000 --- a/venv/Lib/site-packages/setuptools/_distutils/command/sdist.py +++ /dev/null @@ -1,494 +0,0 @@ -"""distutils.command.sdist - -Implements the Distutils 'sdist' command (create a source distribution).""" - -import os -import sys -from glob import glob -from warnings import warn - -from distutils.core import Command -from distutils import dir_util -from distutils import file_util -from distutils import archive_util -from distutils.text_file import TextFile -from distutils.filelist import FileList -from distutils import log -from distutils.util import convert_path -from distutils.errors import DistutilsTemplateError, DistutilsOptionError - - -def show_formats(): - """Print all possible values for the 'formats' option (used by - the "--help-formats" command-line option). - """ - from distutils.fancy_getopt import FancyGetopt - from distutils.archive_util import ARCHIVE_FORMATS - formats = [] - for format in ARCHIVE_FORMATS.keys(): - formats.append(("formats=" + format, None, - ARCHIVE_FORMATS[format][2])) - formats.sort() - FancyGetopt(formats).print_help( - "List of available source distribution formats:") - - -class sdist(Command): - - description = "create a source distribution (tarball, zip file, etc.)" - - def checking_metadata(self): - """Callable used for the check sub-command. - - Placed here so user_options can view it""" - return self.metadata_check - - user_options = [ - ('template=', 't', - "name of manifest template file [default: MANIFEST.in]"), - ('manifest=', 'm', - "name of manifest file [default: MANIFEST]"), - ('use-defaults', None, - "include the default file set in the manifest " - "[default; disable with --no-defaults]"), - ('no-defaults', None, - "don't include the default file set"), - ('prune', None, - "specifically exclude files/directories that should not be " - "distributed (build tree, RCS/CVS dirs, etc.) " - "[default; disable with --no-prune]"), - ('no-prune', None, - "don't automatically exclude anything"), - ('manifest-only', 'o', - "just regenerate the manifest and then stop " - "(implies --force-manifest)"), - ('force-manifest', 'f', - "forcibly regenerate the manifest and carry on as usual. " - "Deprecated: now the manifest is always regenerated."), - ('formats=', None, - "formats for source distribution (comma-separated list)"), - ('keep-temp', 'k', - "keep the distribution tree around after creating " + - "archive file(s)"), - ('dist-dir=', 'd', - "directory to put the source distribution archive(s) in " - "[default: dist]"), - ('metadata-check', None, - "Ensure that all required elements of meta-data " - "are supplied. Warn if any missing. [default]"), - ('owner=', 'u', - "Owner name used when creating a tar file [default: current user]"), - ('group=', 'g', - "Group name used when creating a tar file [default: current group]"), - ] - - boolean_options = ['use-defaults', 'prune', - 'manifest-only', 'force-manifest', - 'keep-temp', 'metadata-check'] - - help_options = [ - ('help-formats', None, - "list available distribution formats", show_formats), - ] - - negative_opt = {'no-defaults': 'use-defaults', - 'no-prune': 'prune' } - - sub_commands = [('check', checking_metadata)] - - READMES = ('README', 'README.txt', 'README.rst') - - def initialize_options(self): - # 'template' and 'manifest' are, respectively, the names of - # the manifest template and manifest file. - self.template = None - self.manifest = None - - # 'use_defaults': if true, we will include the default file set - # in the manifest - self.use_defaults = 1 - self.prune = 1 - - self.manifest_only = 0 - self.force_manifest = 0 - - self.formats = ['gztar'] - self.keep_temp = 0 - self.dist_dir = None - - self.archive_files = None - self.metadata_check = 1 - self.owner = None - self.group = None - - def finalize_options(self): - if self.manifest is None: - self.manifest = "MANIFEST" - if self.template is None: - self.template = "MANIFEST.in" - - self.ensure_string_list('formats') - - bad_format = archive_util.check_archive_formats(self.formats) - if bad_format: - raise DistutilsOptionError( - "unknown archive format '%s'" % bad_format) - - if self.dist_dir is None: - self.dist_dir = "dist" - - def run(self): - # 'filelist' contains the list of files that will make up the - # manifest - self.filelist = FileList() - - # Run sub commands - for cmd_name in self.get_sub_commands(): - self.run_command(cmd_name) - - # Do whatever it takes to get the list of files to process - # (process the manifest template, read an existing manifest, - # whatever). File list is accumulated in 'self.filelist'. - self.get_file_list() - - # If user just wanted us to regenerate the manifest, stop now. - if self.manifest_only: - return - - # Otherwise, go ahead and create the source distribution tarball, - # or zipfile, or whatever. - self.make_distribution() - - def check_metadata(self): - """Deprecated API.""" - warn("distutils.command.sdist.check_metadata is deprecated, \ - use the check command instead", PendingDeprecationWarning) - check = self.distribution.get_command_obj('check') - check.ensure_finalized() - check.run() - - def get_file_list(self): - """Figure out the list of files to include in the source - distribution, and put it in 'self.filelist'. This might involve - reading the manifest template (and writing the manifest), or just - reading the manifest, or just using the default file set -- it all - depends on the user's options. - """ - # new behavior when using a template: - # the file list is recalculated every time because - # even if MANIFEST.in or setup.py are not changed - # the user might have added some files in the tree that - # need to be included. - # - # This makes --force the default and only behavior with templates. - template_exists = os.path.isfile(self.template) - if not template_exists and self._manifest_is_not_generated(): - self.read_manifest() - self.filelist.sort() - self.filelist.remove_duplicates() - return - - if not template_exists: - self.warn(("manifest template '%s' does not exist " + - "(using default file list)") % - self.template) - self.filelist.findall() - - if self.use_defaults: - self.add_defaults() - - if template_exists: - self.read_template() - - if self.prune: - self.prune_file_list() - - self.filelist.sort() - self.filelist.remove_duplicates() - self.write_manifest() - - def add_defaults(self): - """Add all the default files to self.filelist: - - README or README.txt - - setup.py - - test/test*.py - - all pure Python modules mentioned in setup script - - all files pointed by package_data (build_py) - - all files defined in data_files. - - all files defined as scripts. - - all C sources listed as part of extensions or C libraries - in the setup script (doesn't catch C headers!) - Warns if (README or README.txt) or setup.py are missing; everything - else is optional. - """ - self._add_defaults_standards() - self._add_defaults_optional() - self._add_defaults_python() - self._add_defaults_data_files() - self._add_defaults_ext() - self._add_defaults_c_libs() - self._add_defaults_scripts() - - @staticmethod - def _cs_path_exists(fspath): - """ - Case-sensitive path existence check - - >>> sdist._cs_path_exists(__file__) - True - >>> sdist._cs_path_exists(__file__.upper()) - False - """ - if not os.path.exists(fspath): - return False - # make absolute so we always have a directory - abspath = os.path.abspath(fspath) - directory, filename = os.path.split(abspath) - return filename in os.listdir(directory) - - def _add_defaults_standards(self): - standards = [self.READMES, self.distribution.script_name] - for fn in standards: - if isinstance(fn, tuple): - alts = fn - got_it = False - for fn in alts: - if self._cs_path_exists(fn): - got_it = True - self.filelist.append(fn) - break - - if not got_it: - self.warn("standard file not found: should have one of " + - ', '.join(alts)) - else: - if self._cs_path_exists(fn): - self.filelist.append(fn) - else: - self.warn("standard file '%s' not found" % fn) - - def _add_defaults_optional(self): - optional = ['test/test*.py', 'setup.cfg'] - for pattern in optional: - files = filter(os.path.isfile, glob(pattern)) - self.filelist.extend(files) - - def _add_defaults_python(self): - # build_py is used to get: - # - python modules - # - files defined in package_data - build_py = self.get_finalized_command('build_py') - - # getting python files - if self.distribution.has_pure_modules(): - self.filelist.extend(build_py.get_source_files()) - - # getting package_data files - # (computed in build_py.data_files by build_py.finalize_options) - for pkg, src_dir, build_dir, filenames in build_py.data_files: - for filename in filenames: - self.filelist.append(os.path.join(src_dir, filename)) - - def _add_defaults_data_files(self): - # getting distribution.data_files - if self.distribution.has_data_files(): - for item in self.distribution.data_files: - if isinstance(item, str): - # plain file - item = convert_path(item) - if os.path.isfile(item): - self.filelist.append(item) - else: - # a (dirname, filenames) tuple - dirname, filenames = item - for f in filenames: - f = convert_path(f) - if os.path.isfile(f): - self.filelist.append(f) - - def _add_defaults_ext(self): - if self.distribution.has_ext_modules(): - build_ext = self.get_finalized_command('build_ext') - self.filelist.extend(build_ext.get_source_files()) - - def _add_defaults_c_libs(self): - if self.distribution.has_c_libraries(): - build_clib = self.get_finalized_command('build_clib') - self.filelist.extend(build_clib.get_source_files()) - - def _add_defaults_scripts(self): - if self.distribution.has_scripts(): - build_scripts = self.get_finalized_command('build_scripts') - self.filelist.extend(build_scripts.get_source_files()) - - def read_template(self): - """Read and parse manifest template file named by self.template. - - (usually "MANIFEST.in") The parsing and processing is done by - 'self.filelist', which updates itself accordingly. - """ - log.info("reading manifest template '%s'", self.template) - template = TextFile(self.template, strip_comments=1, skip_blanks=1, - join_lines=1, lstrip_ws=1, rstrip_ws=1, - collapse_join=1) - - try: - while True: - line = template.readline() - if line is None: # end of file - break - - try: - self.filelist.process_template_line(line) - # the call above can raise a DistutilsTemplateError for - # malformed lines, or a ValueError from the lower-level - # convert_path function - except (DistutilsTemplateError, ValueError) as msg: - self.warn("%s, line %d: %s" % (template.filename, - template.current_line, - msg)) - finally: - template.close() - - def prune_file_list(self): - """Prune off branches that might slip into the file list as created - by 'read_template()', but really don't belong there: - * the build tree (typically "build") - * the release tree itself (only an issue if we ran "sdist" - previously with --keep-temp, or it aborted) - * any RCS, CVS, .svn, .hg, .git, .bzr, _darcs directories - """ - build = self.get_finalized_command('build') - base_dir = self.distribution.get_fullname() - - self.filelist.exclude_pattern(None, prefix=build.build_base) - self.filelist.exclude_pattern(None, prefix=base_dir) - - if sys.platform == 'win32': - seps = r'/|\\' - else: - seps = '/' - - vcs_dirs = ['RCS', 'CVS', r'\.svn', r'\.hg', r'\.git', r'\.bzr', - '_darcs'] - vcs_ptrn = r'(^|%s)(%s)(%s).*' % (seps, '|'.join(vcs_dirs), seps) - self.filelist.exclude_pattern(vcs_ptrn, is_regex=1) - - def write_manifest(self): - """Write the file list in 'self.filelist' (presumably as filled in - by 'add_defaults()' and 'read_template()') to the manifest file - named by 'self.manifest'. - """ - if self._manifest_is_not_generated(): - log.info("not writing to manually maintained " - "manifest file '%s'" % self.manifest) - return - - content = self.filelist.files[:] - content.insert(0, '# file GENERATED by distutils, do NOT edit') - self.execute(file_util.write_file, (self.manifest, content), - "writing manifest file '%s'" % self.manifest) - - def _manifest_is_not_generated(self): - # check for special comment used in 3.1.3 and higher - if not os.path.isfile(self.manifest): - return False - - fp = open(self.manifest) - try: - first_line = fp.readline() - finally: - fp.close() - return first_line != '# file GENERATED by distutils, do NOT edit\n' - - def read_manifest(self): - """Read the manifest file (named by 'self.manifest') and use it to - fill in 'self.filelist', the list of files to include in the source - distribution. - """ - log.info("reading manifest file '%s'", self.manifest) - with open(self.manifest) as manifest: - for line in manifest: - # ignore comments and blank lines - line = line.strip() - if line.startswith('#') or not line: - continue - self.filelist.append(line) - - def make_release_tree(self, base_dir, files): - """Create the directory tree that will become the source - distribution archive. All directories implied by the filenames in - 'files' are created under 'base_dir', and then we hard link or copy - (if hard linking is unavailable) those files into place. - Essentially, this duplicates the developer's source tree, but in a - directory named after the distribution, containing only the files - to be distributed. - """ - # Create all the directories under 'base_dir' necessary to - # put 'files' there; the 'mkpath()' is just so we don't die - # if the manifest happens to be empty. - self.mkpath(base_dir) - dir_util.create_tree(base_dir, files, dry_run=self.dry_run) - - # And walk over the list of files, either making a hard link (if - # os.link exists) to each one that doesn't already exist in its - # corresponding location under 'base_dir', or copying each file - # that's out-of-date in 'base_dir'. (Usually, all files will be - # out-of-date, because by default we blow away 'base_dir' when - # we're done making the distribution archives.) - - if hasattr(os, 'link'): # can make hard links on this system - link = 'hard' - msg = "making hard links in %s..." % base_dir - else: # nope, have to copy - link = None - msg = "copying files to %s..." % base_dir - - if not files: - log.warn("no files to distribute -- empty manifest?") - else: - log.info(msg) - for file in files: - if not os.path.isfile(file): - log.warn("'%s' not a regular file -- skipping", file) - else: - dest = os.path.join(base_dir, file) - self.copy_file(file, dest, link=link) - - self.distribution.metadata.write_pkg_info(base_dir) - - def make_distribution(self): - """Create the source distribution(s). First, we create the release - tree with 'make_release_tree()'; then, we create all required - archive files (according to 'self.formats') from the release tree. - Finally, we clean up by blowing away the release tree (unless - 'self.keep_temp' is true). The list of archive files created is - stored so it can be retrieved later by 'get_archive_files()'. - """ - # Don't warn about missing meta-data here -- should be (and is!) - # done elsewhere. - base_dir = self.distribution.get_fullname() - base_name = os.path.join(self.dist_dir, base_dir) - - self.make_release_tree(base_dir, self.filelist.files) - archive_files = [] # remember names of files we create - # tar archive must be created last to avoid overwrite and remove - if 'tar' in self.formats: - self.formats.append(self.formats.pop(self.formats.index('tar'))) - - for fmt in self.formats: - file = self.make_archive(base_name, fmt, base_dir=base_dir, - owner=self.owner, group=self.group) - archive_files.append(file) - self.distribution.dist_files.append(('sdist', '', file)) - - self.archive_files = archive_files - - if not self.keep_temp: - dir_util.remove_tree(base_dir, dry_run=self.dry_run) - - def get_archive_files(self): - """Return the list of archive files created when the command - was run, or None if the command hasn't run yet. - """ - return self.archive_files diff --git a/venv/Lib/site-packages/setuptools/_distutils/command/upload.py b/venv/Lib/site-packages/setuptools/_distutils/command/upload.py deleted file mode 100644 index 95e9fda..0000000 --- a/venv/Lib/site-packages/setuptools/_distutils/command/upload.py +++ /dev/null @@ -1,214 +0,0 @@ -""" -distutils.command.upload - -Implements the Distutils 'upload' subcommand (upload package to a package -index). -""" - -import os -import io -import hashlib -from base64 import standard_b64encode -from urllib.request import urlopen, Request, HTTPError -from urllib.parse import urlparse -from distutils.errors import DistutilsError, DistutilsOptionError -from distutils.core import PyPIRCCommand -from distutils.spawn import spawn -from distutils import log - - -# PyPI Warehouse supports MD5, SHA256, and Blake2 (blake2-256) -# https://bugs.python.org/issue40698 -_FILE_CONTENT_DIGESTS = { - "md5_digest": getattr(hashlib, "md5", None), - "sha256_digest": getattr(hashlib, "sha256", None), - "blake2_256_digest": getattr(hashlib, "blake2b", None), -} - - -class upload(PyPIRCCommand): - - description = "upload binary package to PyPI" - - user_options = PyPIRCCommand.user_options + [ - ('sign', 's', - 'sign files to upload using gpg'), - ('identity=', 'i', 'GPG identity used to sign files'), - ] - - boolean_options = PyPIRCCommand.boolean_options + ['sign'] - - def initialize_options(self): - PyPIRCCommand.initialize_options(self) - self.username = '' - self.password = '' - self.show_response = 0 - self.sign = False - self.identity = None - - def finalize_options(self): - PyPIRCCommand.finalize_options(self) - if self.identity and not self.sign: - raise DistutilsOptionError( - "Must use --sign for --identity to have meaning" - ) - config = self._read_pypirc() - if config != {}: - self.username = config['username'] - self.password = config['password'] - self.repository = config['repository'] - self.realm = config['realm'] - - # getting the password from the distribution - # if previously set by the register command - if not self.password and self.distribution.password: - self.password = self.distribution.password - - def run(self): - if not self.distribution.dist_files: - msg = ("Must create and upload files in one command " - "(e.g. setup.py sdist upload)") - raise DistutilsOptionError(msg) - for command, pyversion, filename in self.distribution.dist_files: - self.upload_file(command, pyversion, filename) - - def upload_file(self, command, pyversion, filename): - # Makes sure the repository URL is compliant - schema, netloc, url, params, query, fragments = \ - urlparse(self.repository) - if params or query or fragments: - raise AssertionError("Incompatible url %s" % self.repository) - - if schema not in ('http', 'https'): - raise AssertionError("unsupported schema " + schema) - - # Sign if requested - if self.sign: - gpg_args = ["gpg", "--detach-sign", "-a", filename] - if self.identity: - gpg_args[2:2] = ["--local-user", self.identity] - spawn(gpg_args, - dry_run=self.dry_run) - - # Fill in the data - send all the meta-data in case we need to - # register a new release - f = open(filename,'rb') - try: - content = f.read() - finally: - f.close() - - meta = self.distribution.metadata - data = { - # action - ':action': 'file_upload', - 'protocol_version': '1', - - # identify release - 'name': meta.get_name(), - 'version': meta.get_version(), - - # file content - 'content': (os.path.basename(filename),content), - 'filetype': command, - 'pyversion': pyversion, - - # additional meta-data - 'metadata_version': '1.0', - 'summary': meta.get_description(), - 'home_page': meta.get_url(), - 'author': meta.get_contact(), - 'author_email': meta.get_contact_email(), - 'license': meta.get_licence(), - 'description': meta.get_long_description(), - 'keywords': meta.get_keywords(), - 'platform': meta.get_platforms(), - 'classifiers': meta.get_classifiers(), - 'download_url': meta.get_download_url(), - # PEP 314 - 'provides': meta.get_provides(), - 'requires': meta.get_requires(), - 'obsoletes': meta.get_obsoletes(), - } - - data['comment'] = '' - - # file content digests - for digest_name, digest_cons in _FILE_CONTENT_DIGESTS.items(): - if digest_cons is None: - continue - try: - data[digest_name] = digest_cons(content).hexdigest() - except ValueError: - # hash digest not available or blocked by security policy - pass - - if self.sign: - with open(filename + ".asc", "rb") as f: - data['gpg_signature'] = (os.path.basename(filename) + ".asc", - f.read()) - - # set up the authentication - user_pass = (self.username + ":" + self.password).encode('ascii') - # The exact encoding of the authentication string is debated. - # Anyway PyPI only accepts ascii for both username or password. - auth = "Basic " + standard_b64encode(user_pass).decode('ascii') - - # Build up the MIME payload for the POST data - boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254' - sep_boundary = b'\r\n--' + boundary.encode('ascii') - end_boundary = sep_boundary + b'--\r\n' - body = io.BytesIO() - for key, value in data.items(): - title = '\r\nContent-Disposition: form-data; name="%s"' % key - # handle multiple entries for the same name - if not isinstance(value, list): - value = [value] - for value in value: - if type(value) is tuple: - title += '; filename="%s"' % value[0] - value = value[1] - else: - value = str(value).encode('utf-8') - body.write(sep_boundary) - body.write(title.encode('utf-8')) - body.write(b"\r\n\r\n") - body.write(value) - body.write(end_boundary) - body = body.getvalue() - - msg = "Submitting %s to %s" % (filename, self.repository) - self.announce(msg, log.INFO) - - # build the Request - headers = { - 'Content-type': 'multipart/form-data; boundary=%s' % boundary, - 'Content-length': str(len(body)), - 'Authorization': auth, - } - - request = Request(self.repository, data=body, - headers=headers) - # send the data - try: - result = urlopen(request) - status = result.getcode() - reason = result.msg - except HTTPError as e: - status = e.code - reason = e.msg - except OSError as e: - self.announce(str(e), log.ERROR) - raise - - if status == 200: - self.announce('Server response (%s): %s' % (status, reason), - log.INFO) - if self.show_response: - text = self._read_pypi_response(result) - msg = '\n'.join(('-' * 75, text, '-' * 75)) - self.announce(msg, log.INFO) - else: - msg = 'Upload failed (%s): %s' % (status, reason) - self.announce(msg, log.ERROR) - raise DistutilsError(msg) diff --git a/venv/Lib/site-packages/setuptools/_distutils/config.py b/venv/Lib/site-packages/setuptools/_distutils/config.py deleted file mode 100644 index 2171abd..0000000 --- a/venv/Lib/site-packages/setuptools/_distutils/config.py +++ /dev/null @@ -1,130 +0,0 @@ -"""distutils.pypirc - -Provides the PyPIRCCommand class, the base class for the command classes -that uses .pypirc in the distutils.command package. -""" -import os -from configparser import RawConfigParser - -from distutils.cmd import Command - -DEFAULT_PYPIRC = """\ -[distutils] -index-servers = - pypi - -[pypi] -username:%s -password:%s -""" - -class PyPIRCCommand(Command): - """Base command that knows how to handle the .pypirc file - """ - DEFAULT_REPOSITORY = 'https://upload.pypi.org/legacy/' - DEFAULT_REALM = 'pypi' - repository = None - realm = None - - user_options = [ - ('repository=', 'r', - "url of repository [default: %s]" % \ - DEFAULT_REPOSITORY), - ('show-response', None, - 'display full response text from server')] - - boolean_options = ['show-response'] - - def _get_rc_file(self): - """Returns rc file path.""" - return os.path.join(os.path.expanduser('~'), '.pypirc') - - def _store_pypirc(self, username, password): - """Creates a default .pypirc file.""" - rc = self._get_rc_file() - with os.fdopen(os.open(rc, os.O_CREAT | os.O_WRONLY, 0o600), 'w') as f: - f.write(DEFAULT_PYPIRC % (username, password)) - - def _read_pypirc(self): - """Reads the .pypirc file.""" - rc = self._get_rc_file() - if os.path.exists(rc): - self.announce('Using PyPI login from %s' % rc) - repository = self.repository or self.DEFAULT_REPOSITORY - - config = RawConfigParser() - config.read(rc) - sections = config.sections() - if 'distutils' in sections: - # let's get the list of servers - index_servers = config.get('distutils', 'index-servers') - _servers = [server.strip() for server in - index_servers.split('\n') - if server.strip() != ''] - if _servers == []: - # nothing set, let's try to get the default pypi - if 'pypi' in sections: - _servers = ['pypi'] - else: - # the file is not properly defined, returning - # an empty dict - return {} - for server in _servers: - current = {'server': server} - current['username'] = config.get(server, 'username') - - # optional params - for key, default in (('repository', - self.DEFAULT_REPOSITORY), - ('realm', self.DEFAULT_REALM), - ('password', None)): - if config.has_option(server, key): - current[key] = config.get(server, key) - else: - current[key] = default - - # work around people having "repository" for the "pypi" - # section of their config set to the HTTP (rather than - # HTTPS) URL - if (server == 'pypi' and - repository in (self.DEFAULT_REPOSITORY, 'pypi')): - current['repository'] = self.DEFAULT_REPOSITORY - return current - - if (current['server'] == repository or - current['repository'] == repository): - return current - elif 'server-login' in sections: - # old format - server = 'server-login' - if config.has_option(server, 'repository'): - repository = config.get(server, 'repository') - else: - repository = self.DEFAULT_REPOSITORY - return {'username': config.get(server, 'username'), - 'password': config.get(server, 'password'), - 'repository': repository, - 'server': server, - 'realm': self.DEFAULT_REALM} - - return {} - - def _read_pypi_response(self, response): - """Read and decode a PyPI HTTP response.""" - import cgi - content_type = response.getheader('content-type', 'text/plain') - encoding = cgi.parse_header(content_type)[1].get('charset', 'ascii') - return response.read().decode(encoding) - - def initialize_options(self): - """Initialize options.""" - self.repository = None - self.realm = None - self.show_response = 0 - - def finalize_options(self): - """Finalizes options.""" - if self.repository is None: - self.repository = self.DEFAULT_REPOSITORY - if self.realm is None: - self.realm = self.DEFAULT_REALM diff --git a/venv/Lib/site-packages/setuptools/_distutils/core.py b/venv/Lib/site-packages/setuptools/_distutils/core.py deleted file mode 100644 index f43888e..0000000 --- a/venv/Lib/site-packages/setuptools/_distutils/core.py +++ /dev/null @@ -1,249 +0,0 @@ -"""distutils.core - -The only module that needs to be imported to use the Distutils; provides -the 'setup' function (which is to be called from the setup script). Also -indirectly provides the Distribution and Command classes, although they are -really defined in distutils.dist and distutils.cmd. -""" - -import os -import sys -import tokenize - -from distutils.debug import DEBUG -from distutils.errors import * - -# Mainly import these so setup scripts can "from distutils.core import" them. -from distutils.dist import Distribution -from distutils.cmd import Command -from distutils.config import PyPIRCCommand -from distutils.extension import Extension - -# This is a barebones help message generated displayed when the user -# runs the setup script with no arguments at all. More useful help -# is generated with various --help options: global help, list commands, -# and per-command help. -USAGE = """\ -usage: %(script)s [global_opts] cmd1 [cmd1_opts] [cmd2 [cmd2_opts] ...] - or: %(script)s --help [cmd1 cmd2 ...] - or: %(script)s --help-commands - or: %(script)s cmd --help -""" - -def gen_usage (script_name): - script = os.path.basename(script_name) - return USAGE % vars() - - -# Some mild magic to control the behaviour of 'setup()' from 'run_setup()'. -_setup_stop_after = None -_setup_distribution = None - -# Legal keyword arguments for the setup() function -setup_keywords = ('distclass', 'script_name', 'script_args', 'options', - 'name', 'version', 'author', 'author_email', - 'maintainer', 'maintainer_email', 'url', 'license', - 'description', 'long_description', 'keywords', - 'platforms', 'classifiers', 'download_url', - 'requires', 'provides', 'obsoletes', - ) - -# Legal keyword arguments for the Extension constructor -extension_keywords = ('name', 'sources', 'include_dirs', - 'define_macros', 'undef_macros', - 'library_dirs', 'libraries', 'runtime_library_dirs', - 'extra_objects', 'extra_compile_args', 'extra_link_args', - 'swig_opts', 'export_symbols', 'depends', 'language') - -def setup (**attrs): - """The gateway to the Distutils: do everything your setup script needs - to do, in a highly flexible and user-driven way. Briefly: create a - Distribution instance; find and parse config files; parse the command - line; run each Distutils command found there, customized by the options - supplied to 'setup()' (as keyword arguments), in config files, and on - the command line. - - The Distribution instance might be an instance of a class supplied via - the 'distclass' keyword argument to 'setup'; if no such class is - supplied, then the Distribution class (in dist.py) is instantiated. - All other arguments to 'setup' (except for 'cmdclass') are used to set - attributes of the Distribution instance. - - The 'cmdclass' argument, if supplied, is a dictionary mapping command - names to command classes. Each command encountered on the command line - will be turned into a command class, which is in turn instantiated; any - class found in 'cmdclass' is used in place of the default, which is - (for command 'foo_bar') class 'foo_bar' in module - 'distutils.command.foo_bar'. The command class must provide a - 'user_options' attribute which is a list of option specifiers for - 'distutils.fancy_getopt'. Any command-line options between the current - and the next command are used to set attributes of the current command - object. - - When the entire command-line has been successfully parsed, calls the - 'run()' method on each command object in turn. This method will be - driven entirely by the Distribution object (which each command object - has a reference to, thanks to its constructor), and the - command-specific options that became attributes of each command - object. - """ - - global _setup_stop_after, _setup_distribution - - # Determine the distribution class -- either caller-supplied or - # our Distribution (see below). - klass = attrs.get('distclass') - if klass: - del attrs['distclass'] - else: - klass = Distribution - - if 'script_name' not in attrs: - attrs['script_name'] = os.path.basename(sys.argv[0]) - if 'script_args' not in attrs: - attrs['script_args'] = sys.argv[1:] - - # Create the Distribution instance, using the remaining arguments - # (ie. everything except distclass) to initialize it - try: - _setup_distribution = dist = klass(attrs) - except DistutilsSetupError as msg: - if 'name' not in attrs: - raise SystemExit("error in setup command: %s" % msg) - else: - raise SystemExit("error in %s setup command: %s" % \ - (attrs['name'], msg)) - - if _setup_stop_after == "init": - return dist - - # Find and parse the config file(s): they will override options from - # the setup script, but be overridden by the command line. - dist.parse_config_files() - - if DEBUG: - print("options (after parsing config files):") - dist.dump_option_dicts() - - if _setup_stop_after == "config": - return dist - - # Parse the command line and override config files; any - # command-line errors are the end user's fault, so turn them into - # SystemExit to suppress tracebacks. - try: - ok = dist.parse_command_line() - except DistutilsArgError as msg: - raise SystemExit(gen_usage(dist.script_name) + "\nerror: %s" % msg) - - if DEBUG: - print("options (after parsing command line):") - dist.dump_option_dicts() - - if _setup_stop_after == "commandline": - return dist - - # And finally, run all the commands found on the command line. - if ok: - return run_commands(dist) - - return dist - -# setup () - - -def run_commands (dist): - """Given a Distribution object run all the commands, - raising ``SystemExit`` errors in the case of failure. - - This function assumes that either ``sys.argv`` or ``dist.script_args`` - is already set accordingly. - """ - try: - dist.run_commands() - except KeyboardInterrupt: - raise SystemExit("interrupted") - except OSError as exc: - if DEBUG: - sys.stderr.write("error: %s\n" % (exc,)) - raise - else: - raise SystemExit("error: %s" % (exc,)) - - except (DistutilsError, - CCompilerError) as msg: - if DEBUG: - raise - else: - raise SystemExit("error: " + str(msg)) - - return dist - - -def run_setup (script_name, script_args=None, stop_after="run"): - """Run a setup script in a somewhat controlled environment, and - return the Distribution instance that drives things. This is useful - if you need to find out the distribution meta-data (passed as - keyword args from 'script' to 'setup()', or the contents of the - config files or command-line. - - 'script_name' is a file that will be read and run with 'exec()'; - 'sys.argv[0]' will be replaced with 'script' for the duration of the - call. 'script_args' is a list of strings; if supplied, - 'sys.argv[1:]' will be replaced by 'script_args' for the duration of - the call. - - 'stop_after' tells 'setup()' when to stop processing; possible - values: - init - stop after the Distribution instance has been created and - populated with the keyword arguments to 'setup()' - config - stop after config files have been parsed (and their data - stored in the Distribution instance) - commandline - stop after the command-line ('sys.argv[1:]' or 'script_args') - have been parsed (and the data stored in the Distribution) - run [default] - stop after all commands have been run (the same as if 'setup()' - had been called in the usual way - - Returns the Distribution instance, which provides all information - used to drive the Distutils. - """ - if stop_after not in ('init', 'config', 'commandline', 'run'): - raise ValueError("invalid value for 'stop_after': %r" % (stop_after,)) - - global _setup_stop_after, _setup_distribution - _setup_stop_after = stop_after - - save_argv = sys.argv.copy() - g = {'__file__': script_name, '__name__': '__main__'} - try: - try: - sys.argv[0] = script_name - if script_args is not None: - sys.argv[1:] = script_args - # tokenize.open supports automatic encoding detection - with tokenize.open(script_name) as f: - code = f.read().replace(r'\r\n', r'\n') - exec(code, g) - finally: - sys.argv = save_argv - _setup_stop_after = None - except SystemExit: - # Hmm, should we do something if exiting with a non-zero code - # (ie. error)? - pass - - if _setup_distribution is None: - raise RuntimeError(("'distutils.core.setup()' was never called -- " - "perhaps '%s' is not a Distutils setup script?") % \ - script_name) - - # I wonder if the setup script's namespace -- g and l -- would be of - # any interest to callers? - #print "_setup_distribution:", _setup_distribution - return _setup_distribution - -# run_setup () diff --git a/venv/Lib/site-packages/setuptools/_distutils/cygwinccompiler.py b/venv/Lib/site-packages/setuptools/_distutils/cygwinccompiler.py deleted file mode 100644 index c5c86d8..0000000 --- a/venv/Lib/site-packages/setuptools/_distutils/cygwinccompiler.py +++ /dev/null @@ -1,362 +0,0 @@ -"""distutils.cygwinccompiler - -Provides the CygwinCCompiler class, a subclass of UnixCCompiler that -handles the Cygwin port of the GNU C compiler to Windows. It also contains -the Mingw32CCompiler class which handles the mingw32 port of GCC (same as -cygwin in no-cygwin mode). -""" - -# problems: -# -# * if you use a msvc compiled python version (1.5.2) -# 1. you have to insert a __GNUC__ section in its config.h -# 2. you have to generate an import library for its dll -# - create a def-file for python??.dll -# - create an import library using -# dlltool --dllname python15.dll --def python15.def \ -# --output-lib libpython15.a -# -# see also http://starship.python.net/crew/kernr/mingw32/Notes.html -# -# * We put export_symbols in a def-file, and don't use -# --export-all-symbols because it doesn't worked reliable in some -# tested configurations. And because other windows compilers also -# need their symbols specified this no serious problem. -# -# tested configurations: -# -# * cygwin gcc 2.91.57/ld 2.9.4/dllwrap 0.2.4 works -# (after patching python's config.h and for C++ some other include files) -# see also http://starship.python.net/crew/kernr/mingw32/Notes.html -# * mingw32 gcc 2.95.2/ld 2.9.4/dllwrap 0.2.4 works -# (ld doesn't support -shared, so we use dllwrap) -# * cygwin gcc 2.95.2/ld 2.10.90/dllwrap 2.10.90 works now -# - its dllwrap doesn't work, there is a bug in binutils 2.10.90 -# see also http://sources.redhat.com/ml/cygwin/2000-06/msg01274.html -# - using gcc -mdll instead dllwrap doesn't work without -static because -# it tries to link against dlls instead their import libraries. (If -# it finds the dll first.) -# By specifying -static we force ld to link against the import libraries, -# this is windows standard and there are normally not the necessary symbols -# in the dlls. -# *** only the version of June 2000 shows these problems -# * cygwin gcc 3.2/ld 2.13.90 works -# (ld supports -shared) -# * mingw gcc 3.2/ld 2.13 works -# (ld supports -shared) -# * llvm-mingw with Clang 11 works -# (lld supports -shared) - -import os -import sys -import copy -import shlex -import warnings -from subprocess import check_output - -from distutils.unixccompiler import UnixCCompiler -from distutils.file_util import write_file -from distutils.errors import (DistutilsExecError, CCompilerError, - CompileError, UnknownFileError) -from distutils.version import LooseVersion, suppress_known_deprecation - -def get_msvcr(): - """Include the appropriate MSVC runtime library if Python was built - with MSVC 7.0 or later. - """ - msc_pos = sys.version.find('MSC v.') - if msc_pos != -1: - msc_ver = sys.version[msc_pos+6:msc_pos+10] - if msc_ver == '1300': - # MSVC 7.0 - return ['msvcr70'] - elif msc_ver == '1310': - # MSVC 7.1 - return ['msvcr71'] - elif msc_ver == '1400': - # VS2005 / MSVC 8.0 - return ['msvcr80'] - elif msc_ver == '1500': - # VS2008 / MSVC 9.0 - return ['msvcr90'] - elif msc_ver == '1600': - # VS2010 / MSVC 10.0 - return ['msvcr100'] - elif msc_ver == '1700': - # VS2012 / MSVC 11.0 - return ['msvcr110'] - elif msc_ver == '1800': - # VS2013 / MSVC 12.0 - return ['msvcr120'] - elif 1900 <= int(msc_ver) < 2000: - # VS2015 / MSVC 14.0 - return ['ucrt', 'vcruntime140'] - else: - raise ValueError("Unknown MS Compiler version %s " % msc_ver) - - -class CygwinCCompiler(UnixCCompiler): - """ Handles the Cygwin port of the GNU C compiler to Windows. - """ - compiler_type = 'cygwin' - obj_extension = ".o" - static_lib_extension = ".a" - shared_lib_extension = ".dll" - static_lib_format = "lib%s%s" - shared_lib_format = "%s%s" - exe_extension = ".exe" - - def __init__(self, verbose=0, dry_run=0, force=0): - - super().__init__(verbose, dry_run, force) - - status, details = check_config_h() - self.debug_print("Python's GCC status: %s (details: %s)" % - (status, details)) - if status is not CONFIG_H_OK: - self.warn( - "Python's pyconfig.h doesn't seem to support your compiler. " - "Reason: %s. " - "Compiling may fail because of undefined preprocessor macros." - % details) - - self.cc = os.environ.get('CC', 'gcc') - self.cxx = os.environ.get('CXX', 'g++') - - self.linker_dll = self.cc - shared_option = "-shared" - - self.set_executables(compiler='%s -mcygwin -O -Wall' % self.cc, - compiler_so='%s -mcygwin -mdll -O -Wall' % self.cc, - compiler_cxx='%s -mcygwin -O -Wall' % self.cxx, - linker_exe='%s -mcygwin' % self.cc, - linker_so=('%s -mcygwin %s' % - (self.linker_dll, shared_option))) - - # Include the appropriate MSVC runtime library if Python was built - # with MSVC 7.0 or later. - self.dll_libraries = get_msvcr() - - @property - def gcc_version(self): - # Older numpy dependend on this existing to check for ancient - # gcc versions. This doesn't make much sense with clang etc so - # just hardcode to something recent. - # https://github.com/numpy/numpy/pull/20333 - warnings.warn( - "gcc_version attribute of CygwinCCompiler is deprecated. " - "Instead of returning actual gcc version a fixed value 11.2.0 is returned.", - DeprecationWarning, - stacklevel=2, - ) - with suppress_known_deprecation(): - return LooseVersion("11.2.0") - - def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts): - """Compiles the source by spawning GCC and windres if needed.""" - if ext == '.rc' or ext == '.res': - # gcc needs '.res' and '.rc' compiled to object files !!! - try: - self.spawn(["windres", "-i", src, "-o", obj]) - except DistutilsExecError as msg: - raise CompileError(msg) - else: # for other files use the C-compiler - try: - self.spawn(self.compiler_so + cc_args + [src, '-o', obj] + - extra_postargs) - except DistutilsExecError as msg: - raise CompileError(msg) - - def link(self, target_desc, objects, output_filename, output_dir=None, - libraries=None, library_dirs=None, runtime_library_dirs=None, - export_symbols=None, debug=0, extra_preargs=None, - extra_postargs=None, build_temp=None, target_lang=None): - """Link the objects.""" - # use separate copies, so we can modify the lists - extra_preargs = copy.copy(extra_preargs or []) - libraries = copy.copy(libraries or []) - objects = copy.copy(objects or []) - - # Additional libraries - libraries.extend(self.dll_libraries) - - # handle export symbols by creating a def-file - # with executables this only works with gcc/ld as linker - if ((export_symbols is not None) and - (target_desc != self.EXECUTABLE or self.linker_dll == "gcc")): - # (The linker doesn't do anything if output is up-to-date. - # So it would probably better to check if we really need this, - # but for this we had to insert some unchanged parts of - # UnixCCompiler, and this is not what we want.) - - # we want to put some files in the same directory as the - # object files are, build_temp doesn't help much - # where are the object files - temp_dir = os.path.dirname(objects[0]) - # name of dll to give the helper files the same base name - (dll_name, dll_extension) = os.path.splitext( - os.path.basename(output_filename)) - - # generate the filenames for these files - def_file = os.path.join(temp_dir, dll_name + ".def") - lib_file = os.path.join(temp_dir, 'lib' + dll_name + ".a") - - # Generate .def file - contents = [ - "LIBRARY %s" % os.path.basename(output_filename), - "EXPORTS"] - for sym in export_symbols: - contents.append(sym) - self.execute(write_file, (def_file, contents), - "writing %s" % def_file) - - # next add options for def-file and to creating import libraries - - # doesn't work: bfd_close build\...\libfoo.a: Invalid operation - #extra_preargs.extend(["-Wl,--out-implib,%s" % lib_file]) - # for gcc/ld the def-file is specified as any object files - objects.append(def_file) - - #end: if ((export_symbols is not None) and - # (target_desc != self.EXECUTABLE or self.linker_dll == "gcc")): - - # who wants symbols and a many times larger output file - # should explicitly switch the debug mode on - # otherwise we let ld strip the output file - # (On my machine: 10KiB < stripped_file < ??100KiB - # unstripped_file = stripped_file + XXX KiB - # ( XXX=254 for a typical python extension)) - if not debug: - extra_preargs.append("-s") - - UnixCCompiler.link(self, target_desc, objects, output_filename, - output_dir, libraries, library_dirs, - runtime_library_dirs, - None, # export_symbols, we do this in our def-file - debug, extra_preargs, extra_postargs, build_temp, - target_lang) - - # -- Miscellaneous methods ----------------------------------------- - - def object_filenames(self, source_filenames, strip_dir=0, output_dir=''): - """Adds supports for rc and res files.""" - if output_dir is None: - output_dir = '' - obj_names = [] - for src_name in source_filenames: - # use normcase to make sure '.rc' is really '.rc' and not '.RC' - base, ext = os.path.splitext(os.path.normcase(src_name)) - if ext not in (self.src_extensions + ['.rc','.res']): - raise UnknownFileError("unknown file type '%s' (from '%s')" % \ - (ext, src_name)) - if strip_dir: - base = os.path.basename (base) - if ext in ('.res', '.rc'): - # these need to be compiled to object files - obj_names.append (os.path.join(output_dir, - base + ext + self.obj_extension)) - else: - obj_names.append (os.path.join(output_dir, - base + self.obj_extension)) - return obj_names - -# the same as cygwin plus some additional parameters -class Mingw32CCompiler(CygwinCCompiler): - """ Handles the Mingw32 port of the GNU C compiler to Windows. - """ - compiler_type = 'mingw32' - - def __init__(self, verbose=0, dry_run=0, force=0): - - super().__init__ (verbose, dry_run, force) - - shared_option = "-shared" - - if is_cygwincc(self.cc): - raise CCompilerError( - 'Cygwin gcc cannot be used with --compiler=mingw32') - - self.set_executables(compiler='%s -O -Wall' % self.cc, - compiler_so='%s -mdll -O -Wall' % self.cc, - compiler_cxx='%s -O -Wall' % self.cxx, - linker_exe='%s' % self.cc, - linker_so='%s %s' - % (self.linker_dll, shared_option)) - - # Maybe we should also append -mthreads, but then the finished - # dlls need another dll (mingwm10.dll see Mingw32 docs) - # (-mthreads: Support thread-safe exception handling on `Mingw32') - - # no additional libraries needed - self.dll_libraries=[] - - # Include the appropriate MSVC runtime library if Python was built - # with MSVC 7.0 or later. - self.dll_libraries = get_msvcr() - -# Because these compilers aren't configured in Python's pyconfig.h file by -# default, we should at least warn the user if he is using an unmodified -# version. - -CONFIG_H_OK = "ok" -CONFIG_H_NOTOK = "not ok" -CONFIG_H_UNCERTAIN = "uncertain" - -def check_config_h(): - """Check if the current Python installation appears amenable to building - extensions with GCC. - - Returns a tuple (status, details), where 'status' is one of the following - constants: - - - CONFIG_H_OK: all is well, go ahead and compile - - CONFIG_H_NOTOK: doesn't look good - - CONFIG_H_UNCERTAIN: not sure -- unable to read pyconfig.h - - 'details' is a human-readable string explaining the situation. - - Note there are two ways to conclude "OK": either 'sys.version' contains - the string "GCC" (implying that this Python was built with GCC), or the - installed "pyconfig.h" contains the string "__GNUC__". - """ - - # XXX since this function also checks sys.version, it's not strictly a - # "pyconfig.h" check -- should probably be renamed... - - from distutils import sysconfig - - # if sys.version contains GCC then python was compiled with GCC, and the - # pyconfig.h file should be OK - if "GCC" in sys.version: - return CONFIG_H_OK, "sys.version mentions 'GCC'" - - # Clang would also work - if "Clang" in sys.version: - return CONFIG_H_OK, "sys.version mentions 'Clang'" - - # let's see if __GNUC__ is mentioned in python.h - fn = sysconfig.get_config_h_filename() - try: - config_h = open(fn) - try: - if "__GNUC__" in config_h.read(): - return CONFIG_H_OK, "'%s' mentions '__GNUC__'" % fn - else: - return CONFIG_H_NOTOK, "'%s' does not mention '__GNUC__'" % fn - finally: - config_h.close() - except OSError as exc: - return (CONFIG_H_UNCERTAIN, - "couldn't read '%s': %s" % (fn, exc.strerror)) - -def is_cygwincc(cc): - '''Try to determine if the compiler that would be used is from cygwin.''' - out_string = check_output(shlex.split(cc) + ['-dumpmachine']) - return out_string.strip().endswith(b'cygwin') - - -get_versions = None -""" -A stand-in for the previous get_versions() function to prevent failures -when monkeypatched. See pypa/setuptools#2969. -""" diff --git a/venv/Lib/site-packages/setuptools/_distutils/debug.py b/venv/Lib/site-packages/setuptools/_distutils/debug.py deleted file mode 100644 index daf1660..0000000 --- a/venv/Lib/site-packages/setuptools/_distutils/debug.py +++ /dev/null @@ -1,5 +0,0 @@ -import os - -# If DISTUTILS_DEBUG is anything other than the empty string, we run in -# debug mode. -DEBUG = os.environ.get('DISTUTILS_DEBUG') diff --git a/venv/Lib/site-packages/setuptools/_distutils/dep_util.py b/venv/Lib/site-packages/setuptools/_distutils/dep_util.py deleted file mode 100644 index d74f5e4..0000000 --- a/venv/Lib/site-packages/setuptools/_distutils/dep_util.py +++ /dev/null @@ -1,92 +0,0 @@ -"""distutils.dep_util - -Utility functions for simple, timestamp-based dependency of files -and groups of files; also, function based entirely on such -timestamp dependency analysis.""" - -import os -from distutils.errors import DistutilsFileError - - -def newer (source, target): - """Return true if 'source' exists and is more recently modified than - 'target', or if 'source' exists and 'target' doesn't. Return false if - both exist and 'target' is the same age or younger than 'source'. - Raise DistutilsFileError if 'source' does not exist. - """ - if not os.path.exists(source): - raise DistutilsFileError("file '%s' does not exist" % - os.path.abspath(source)) - if not os.path.exists(target): - return 1 - - from stat import ST_MTIME - mtime1 = os.stat(source)[ST_MTIME] - mtime2 = os.stat(target)[ST_MTIME] - - return mtime1 > mtime2 - -# newer () - - -def newer_pairwise (sources, targets): - """Walk two filename lists in parallel, testing if each source is newer - than its corresponding target. Return a pair of lists (sources, - targets) where source is newer than target, according to the semantics - of 'newer()'. - """ - if len(sources) != len(targets): - raise ValueError("'sources' and 'targets' must be same length") - - # build a pair of lists (sources, targets) where source is newer - n_sources = [] - n_targets = [] - for i in range(len(sources)): - if newer(sources[i], targets[i]): - n_sources.append(sources[i]) - n_targets.append(targets[i]) - - return (n_sources, n_targets) - -# newer_pairwise () - - -def newer_group (sources, target, missing='error'): - """Return true if 'target' is out-of-date with respect to any file - listed in 'sources'. In other words, if 'target' exists and is newer - than every file in 'sources', return false; otherwise return true. - 'missing' controls what we do when a source file is missing; the - default ("error") is to blow up with an OSError from inside 'stat()'; - if it is "ignore", we silently drop any missing source files; if it is - "newer", any missing source files make us assume that 'target' is - out-of-date (this is handy in "dry-run" mode: it'll make you pretend to - carry out commands that wouldn't work because inputs are missing, but - that doesn't matter because you're not actually going to run the - commands). - """ - # If the target doesn't even exist, then it's definitely out-of-date. - if not os.path.exists(target): - return 1 - - # Otherwise we have to find out the hard way: if *any* source file - # is more recent than 'target', then 'target' is out-of-date and - # we can immediately return true. If we fall through to the end - # of the loop, then 'target' is up-to-date and we return false. - from stat import ST_MTIME - target_mtime = os.stat(target)[ST_MTIME] - for source in sources: - if not os.path.exists(source): - if missing == 'error': # blow up when we stat() the file - pass - elif missing == 'ignore': # missing source dropped from - continue # target's dependency list - elif missing == 'newer': # missing source means target is - return 1 # out-of-date - - source_mtime = os.stat(source)[ST_MTIME] - if source_mtime > target_mtime: - return 1 - else: - return 0 - -# newer_group () diff --git a/venv/Lib/site-packages/setuptools/_distutils/dir_util.py b/venv/Lib/site-packages/setuptools/_distutils/dir_util.py deleted file mode 100644 index d5cd8e3..0000000 --- a/venv/Lib/site-packages/setuptools/_distutils/dir_util.py +++ /dev/null @@ -1,210 +0,0 @@ -"""distutils.dir_util - -Utility functions for manipulating directories and directory trees.""" - -import os -import errno -from distutils.errors import DistutilsFileError, DistutilsInternalError -from distutils import log - -# cache for by mkpath() -- in addition to cheapening redundant calls, -# eliminates redundant "creating /foo/bar/baz" messages in dry-run mode -_path_created = {} - -# I don't use os.makedirs because a) it's new to Python 1.5.2, and -# b) it blows up if the directory already exists (I want to silently -# succeed in that case). -def mkpath(name, mode=0o777, verbose=1, dry_run=0): - """Create a directory and any missing ancestor directories. - - If the directory already exists (or if 'name' is the empty string, which - means the current directory, which of course exists), then do nothing. - Raise DistutilsFileError if unable to create some directory along the way - (eg. some sub-path exists, but is a file rather than a directory). - If 'verbose' is true, print a one-line summary of each mkdir to stdout. - Return the list of directories actually created. - """ - - global _path_created - - # Detect a common bug -- name is None - if not isinstance(name, str): - raise DistutilsInternalError( - "mkpath: 'name' must be a string (got %r)" % (name,)) - - # XXX what's the better way to handle verbosity? print as we create - # each directory in the path (the current behaviour), or only announce - # the creation of the whole path? (quite easy to do the latter since - # we're not using a recursive algorithm) - - name = os.path.normpath(name) - created_dirs = [] - if os.path.isdir(name) or name == '': - return created_dirs - if _path_created.get(os.path.abspath(name)): - return created_dirs - - (head, tail) = os.path.split(name) - tails = [tail] # stack of lone dirs to create - - while head and tail and not os.path.isdir(head): - (head, tail) = os.path.split(head) - tails.insert(0, tail) # push next higher dir onto stack - - # now 'head' contains the deepest directory that already exists - # (that is, the child of 'head' in 'name' is the highest directory - # that does *not* exist) - for d in tails: - #print "head = %s, d = %s: " % (head, d), - head = os.path.join(head, d) - abs_head = os.path.abspath(head) - - if _path_created.get(abs_head): - continue - - if verbose >= 1: - log.info("creating %s", head) - - if not dry_run: - try: - os.mkdir(head, mode) - except OSError as exc: - if not (exc.errno == errno.EEXIST and os.path.isdir(head)): - raise DistutilsFileError( - "could not create '%s': %s" % (head, exc.args[-1])) - created_dirs.append(head) - - _path_created[abs_head] = 1 - return created_dirs - -def create_tree(base_dir, files, mode=0o777, verbose=1, dry_run=0): - """Create all the empty directories under 'base_dir' needed to put 'files' - there. - - 'base_dir' is just the name of a directory which doesn't necessarily - exist yet; 'files' is a list of filenames to be interpreted relative to - 'base_dir'. 'base_dir' + the directory portion of every file in 'files' - will be created if it doesn't already exist. 'mode', 'verbose' and - 'dry_run' flags are as for 'mkpath()'. - """ - # First get the list of directories to create - need_dir = set() - for file in files: - need_dir.add(os.path.join(base_dir, os.path.dirname(file))) - - # Now create them - for dir in sorted(need_dir): - mkpath(dir, mode, verbose=verbose, dry_run=dry_run) - -def copy_tree(src, dst, preserve_mode=1, preserve_times=1, - preserve_symlinks=0, update=0, verbose=1, dry_run=0): - """Copy an entire directory tree 'src' to a new location 'dst'. - - Both 'src' and 'dst' must be directory names. If 'src' is not a - directory, raise DistutilsFileError. If 'dst' does not exist, it is - created with 'mkpath()'. The end result of the copy is that every - file in 'src' is copied to 'dst', and directories under 'src' are - recursively copied to 'dst'. Return the list of files that were - copied or might have been copied, using their output name. The - return value is unaffected by 'update' or 'dry_run': it is simply - the list of all files under 'src', with the names changed to be - under 'dst'. - - 'preserve_mode' and 'preserve_times' are the same as for - 'copy_file'; note that they only apply to regular files, not to - directories. If 'preserve_symlinks' is true, symlinks will be - copied as symlinks (on platforms that support them!); otherwise - (the default), the destination of the symlink will be copied. - 'update' and 'verbose' are the same as for 'copy_file'. - """ - from distutils.file_util import copy_file - - if not dry_run and not os.path.isdir(src): - raise DistutilsFileError( - "cannot copy tree '%s': not a directory" % src) - try: - names = os.listdir(src) - except OSError as e: - if dry_run: - names = [] - else: - raise DistutilsFileError( - "error listing files in '%s': %s" % (src, e.strerror)) - - if not dry_run: - mkpath(dst, verbose=verbose) - - outputs = [] - - for n in names: - src_name = os.path.join(src, n) - dst_name = os.path.join(dst, n) - - if n.startswith('.nfs'): - # skip NFS rename files - continue - - if preserve_symlinks and os.path.islink(src_name): - link_dest = os.readlink(src_name) - if verbose >= 1: - log.info("linking %s -> %s", dst_name, link_dest) - if not dry_run: - os.symlink(link_dest, dst_name) - outputs.append(dst_name) - - elif os.path.isdir(src_name): - outputs.extend( - copy_tree(src_name, dst_name, preserve_mode, - preserve_times, preserve_symlinks, update, - verbose=verbose, dry_run=dry_run)) - else: - copy_file(src_name, dst_name, preserve_mode, - preserve_times, update, verbose=verbose, - dry_run=dry_run) - outputs.append(dst_name) - - return outputs - -def _build_cmdtuple(path, cmdtuples): - """Helper for remove_tree().""" - for f in os.listdir(path): - real_f = os.path.join(path,f) - if os.path.isdir(real_f) and not os.path.islink(real_f): - _build_cmdtuple(real_f, cmdtuples) - else: - cmdtuples.append((os.remove, real_f)) - cmdtuples.append((os.rmdir, path)) - -def remove_tree(directory, verbose=1, dry_run=0): - """Recursively remove an entire directory tree. - - Any errors are ignored (apart from being reported to stdout if 'verbose' - is true). - """ - global _path_created - - if verbose >= 1: - log.info("removing '%s' (and everything under it)", directory) - if dry_run: - return - cmdtuples = [] - _build_cmdtuple(directory, cmdtuples) - for cmd in cmdtuples: - try: - cmd[0](cmd[1]) - # remove dir from cache if it's already there - abspath = os.path.abspath(cmd[1]) - if abspath in _path_created: - del _path_created[abspath] - except OSError as exc: - log.warn("error removing %s: %s", directory, exc) - -def ensure_relative(path): - """Take the full path 'path', and make it a relative path. - - This is useful to make 'path' the second argument to os.path.join(). - """ - drive, path = os.path.splitdrive(path) - if path[0:1] == os.sep: - path = drive + path[1:] - return path diff --git a/venv/Lib/site-packages/setuptools/_distutils/dist.py b/venv/Lib/site-packages/setuptools/_distutils/dist.py deleted file mode 100644 index 37db4d6..0000000 --- a/venv/Lib/site-packages/setuptools/_distutils/dist.py +++ /dev/null @@ -1,1257 +0,0 @@ -"""distutils.dist - -Provides the Distribution class, which represents the module distribution -being built/installed/distributed. -""" - -import sys -import os -import re -from email import message_from_file - -try: - import warnings -except ImportError: - warnings = None - -from distutils.errors import * -from distutils.fancy_getopt import FancyGetopt, translate_longopt -from distutils.util import check_environ, strtobool, rfc822_escape -from distutils import log -from distutils.debug import DEBUG - -# Regex to define acceptable Distutils command names. This is not *quite* -# the same as a Python NAME -- I don't allow leading underscores. The fact -# that they're very similar is no coincidence; the default naming scheme is -# to look for a Python module named after the command. -command_re = re.compile(r'^[a-zA-Z]([a-zA-Z0-9_]*)$') - - -def _ensure_list(value, fieldname): - if isinstance(value, str): - # a string containing comma separated values is okay. It will - # be converted to a list by Distribution.finalize_options(). - pass - elif not isinstance(value, list): - # passing a tuple or an iterator perhaps, warn and convert - typename = type(value).__name__ - msg = "Warning: '{fieldname}' should be a list, got type '{typename}'" - msg = msg.format(**locals()) - log.log(log.WARN, msg) - value = list(value) - return value - - -class Distribution: - """The core of the Distutils. Most of the work hiding behind 'setup' - is really done within a Distribution instance, which farms the work out - to the Distutils commands specified on the command line. - - Setup scripts will almost never instantiate Distribution directly, - unless the 'setup()' function is totally inadequate to their needs. - However, it is conceivable that a setup script might wish to subclass - Distribution for some specialized purpose, and then pass the subclass - to 'setup()' as the 'distclass' keyword argument. If so, it is - necessary to respect the expectations that 'setup' has of Distribution. - See the code for 'setup()', in core.py, for details. - """ - - # 'global_options' describes the command-line options that may be - # supplied to the setup script prior to any actual commands. - # Eg. "./setup.py -n" or "./setup.py --quiet" both take advantage of - # these global options. This list should be kept to a bare minimum, - # since every global option is also valid as a command option -- and we - # don't want to pollute the commands with too many options that they - # have minimal control over. - # The fourth entry for verbose means that it can be repeated. - global_options = [ - ('verbose', 'v', "run verbosely (default)", 1), - ('quiet', 'q', "run quietly (turns verbosity off)"), - ('dry-run', 'n', "don't actually do anything"), - ('help', 'h', "show detailed help message"), - ('no-user-cfg', None, - 'ignore pydistutils.cfg in your home directory'), - ] - - # 'common_usage' is a short (2-3 line) string describing the common - # usage of the setup script. - common_usage = """\ -Common commands: (see '--help-commands' for more) - - setup.py build will build the package underneath 'build/' - setup.py install will install the package -""" - - # options that are not propagated to the commands - display_options = [ - ('help-commands', None, - "list all available commands"), - ('name', None, - "print package name"), - ('version', 'V', - "print package version"), - ('fullname', None, - "print -"), - ('author', None, - "print the author's name"), - ('author-email', None, - "print the author's email address"), - ('maintainer', None, - "print the maintainer's name"), - ('maintainer-email', None, - "print the maintainer's email address"), - ('contact', None, - "print the maintainer's name if known, else the author's"), - ('contact-email', None, - "print the maintainer's email address if known, else the author's"), - ('url', None, - "print the URL for this package"), - ('license', None, - "print the license of the package"), - ('licence', None, - "alias for --license"), - ('description', None, - "print the package description"), - ('long-description', None, - "print the long package description"), - ('platforms', None, - "print the list of platforms"), - ('classifiers', None, - "print the list of classifiers"), - ('keywords', None, - "print the list of keywords"), - ('provides', None, - "print the list of packages/modules provided"), - ('requires', None, - "print the list of packages/modules required"), - ('obsoletes', None, - "print the list of packages/modules made obsolete") - ] - display_option_names = [translate_longopt(x[0]) for x in display_options] - - # negative options are options that exclude other options - negative_opt = {'quiet': 'verbose'} - - # -- Creation/initialization methods ------------------------------- - - def __init__(self, attrs=None): - """Construct a new Distribution instance: initialize all the - attributes of a Distribution, and then use 'attrs' (a dictionary - mapping attribute names to values) to assign some of those - attributes their "real" values. (Any attributes not mentioned in - 'attrs' will be assigned to some null value: 0, None, an empty list - or dictionary, etc.) Most importantly, initialize the - 'command_obj' attribute to the empty dictionary; this will be - filled in with real command objects by 'parse_command_line()'. - """ - - # Default values for our command-line options - self.verbose = 1 - self.dry_run = 0 - self.help = 0 - for attr in self.display_option_names: - setattr(self, attr, 0) - - # Store the distribution meta-data (name, version, author, and so - # forth) in a separate object -- we're getting to have enough - # information here (and enough command-line options) that it's - # worth it. Also delegate 'get_XXX()' methods to the 'metadata' - # object in a sneaky and underhanded (but efficient!) way. - self.metadata = DistributionMetadata() - for basename in self.metadata._METHOD_BASENAMES: - method_name = "get_" + basename - setattr(self, method_name, getattr(self.metadata, method_name)) - - # 'cmdclass' maps command names to class objects, so we - # can 1) quickly figure out which class to instantiate when - # we need to create a new command object, and 2) have a way - # for the setup script to override command classes - self.cmdclass = {} - - # 'command_packages' is a list of packages in which commands - # are searched for. The factory for command 'foo' is expected - # to be named 'foo' in the module 'foo' in one of the packages - # named here. This list is searched from the left; an error - # is raised if no named package provides the command being - # searched for. (Always access using get_command_packages().) - self.command_packages = None - - # 'script_name' and 'script_args' are usually set to sys.argv[0] - # and sys.argv[1:], but they can be overridden when the caller is - # not necessarily a setup script run from the command-line. - self.script_name = None - self.script_args = None - - # 'command_options' is where we store command options between - # parsing them (from config files, the command-line, etc.) and when - # they are actually needed -- ie. when the command in question is - # instantiated. It is a dictionary of dictionaries of 2-tuples: - # command_options = { command_name : { option : (source, value) } } - self.command_options = {} - - # 'dist_files' is the list of (command, pyversion, file) that - # have been created by any dist commands run so far. This is - # filled regardless of whether the run is dry or not. pyversion - # gives sysconfig.get_python_version() if the dist file is - # specific to a Python version, 'any' if it is good for all - # Python versions on the target platform, and '' for a source - # file. pyversion should not be used to specify minimum or - # maximum required Python versions; use the metainfo for that - # instead. - self.dist_files = [] - - # These options are really the business of various commands, rather - # than of the Distribution itself. We provide aliases for them in - # Distribution as a convenience to the developer. - self.packages = None - self.package_data = {} - self.package_dir = None - self.py_modules = None - self.libraries = None - self.headers = None - self.ext_modules = None - self.ext_package = None - self.include_dirs = None - self.extra_path = None - self.scripts = None - self.data_files = None - self.password = '' - - # And now initialize bookkeeping stuff that can't be supplied by - # the caller at all. 'command_obj' maps command names to - # Command instances -- that's how we enforce that every command - # class is a singleton. - self.command_obj = {} - - # 'have_run' maps command names to boolean values; it keeps track - # of whether we have actually run a particular command, to make it - # cheap to "run" a command whenever we think we might need to -- if - # it's already been done, no need for expensive filesystem - # operations, we just check the 'have_run' dictionary and carry on. - # It's only safe to query 'have_run' for a command class that has - # been instantiated -- a false value will be inserted when the - # command object is created, and replaced with a true value when - # the command is successfully run. Thus it's probably best to use - # '.get()' rather than a straight lookup. - self.have_run = {} - - # Now we'll use the attrs dictionary (ultimately, keyword args from - # the setup script) to possibly override any or all of these - # distribution options. - - if attrs: - # Pull out the set of command options and work on them - # specifically. Note that this order guarantees that aliased - # command options will override any supplied redundantly - # through the general options dictionary. - options = attrs.get('options') - if options is not None: - del attrs['options'] - for (command, cmd_options) in options.items(): - opt_dict = self.get_option_dict(command) - for (opt, val) in cmd_options.items(): - opt_dict[opt] = ("setup script", val) - - if 'licence' in attrs: - attrs['license'] = attrs['licence'] - del attrs['licence'] - msg = "'licence' distribution option is deprecated; use 'license'" - if warnings is not None: - warnings.warn(msg) - else: - sys.stderr.write(msg + "\n") - - # Now work on the rest of the attributes. Any attribute that's - # not already defined is invalid! - for (key, val) in attrs.items(): - if hasattr(self.metadata, "set_" + key): - getattr(self.metadata, "set_" + key)(val) - elif hasattr(self.metadata, key): - setattr(self.metadata, key, val) - elif hasattr(self, key): - setattr(self, key, val) - else: - msg = "Unknown distribution option: %s" % repr(key) - warnings.warn(msg) - - # no-user-cfg is handled before other command line args - # because other args override the config files, and this - # one is needed before we can load the config files. - # If attrs['script_args'] wasn't passed, assume false. - # - # This also make sure we just look at the global options - self.want_user_cfg = True - - if self.script_args is not None: - for arg in self.script_args: - if not arg.startswith('-'): - break - if arg == '--no-user-cfg': - self.want_user_cfg = False - break - - self.finalize_options() - - def get_option_dict(self, command): - """Get the option dictionary for a given command. If that - command's option dictionary hasn't been created yet, then create it - and return the new dictionary; otherwise, return the existing - option dictionary. - """ - dict = self.command_options.get(command) - if dict is None: - dict = self.command_options[command] = {} - return dict - - def dump_option_dicts(self, header=None, commands=None, indent=""): - from pprint import pformat - - if commands is None: # dump all command option dicts - commands = sorted(self.command_options.keys()) - - if header is not None: - self.announce(indent + header) - indent = indent + " " - - if not commands: - self.announce(indent + "no commands known yet") - return - - for cmd_name in commands: - opt_dict = self.command_options.get(cmd_name) - if opt_dict is None: - self.announce(indent + - "no option dict for '%s' command" % cmd_name) - else: - self.announce(indent + - "option dict for '%s' command:" % cmd_name) - out = pformat(opt_dict) - for line in out.split('\n'): - self.announce(indent + " " + line) - - # -- Config file finding/parsing methods --------------------------- - - def find_config_files(self): - """Find as many configuration files as should be processed for this - platform, and return a list of filenames in the order in which they - should be parsed. The filenames returned are guaranteed to exist - (modulo nasty race conditions). - - There are three possible config files: distutils.cfg in the - Distutils installation directory (ie. where the top-level - Distutils __inst__.py file lives), a file in the user's home - directory named .pydistutils.cfg on Unix and pydistutils.cfg - on Windows/Mac; and setup.cfg in the current directory. - - The file in the user's home directory can be disabled with the - --no-user-cfg option. - """ - files = [] - check_environ() - - # Where to look for the system-wide Distutils config file - sys_dir = os.path.dirname(sys.modules['distutils'].__file__) - - # Look for the system config file - sys_file = os.path.join(sys_dir, "distutils.cfg") - if os.path.isfile(sys_file): - files.append(sys_file) - - # What to call the per-user config file - if os.name == 'posix': - user_filename = ".pydistutils.cfg" - else: - user_filename = "pydistutils.cfg" - - # And look for the user config file - if self.want_user_cfg: - user_file = os.path.join(os.path.expanduser('~'), user_filename) - if os.path.isfile(user_file): - files.append(user_file) - - # All platforms support local setup.cfg - local_file = "setup.cfg" - if os.path.isfile(local_file): - files.append(local_file) - - if DEBUG: - self.announce("using config files: %s" % ', '.join(files)) - - return files - - def parse_config_files(self, filenames=None): - from configparser import ConfigParser - - # Ignore install directory options if we have a venv - if sys.prefix != sys.base_prefix: - ignore_options = [ - 'install-base', 'install-platbase', 'install-lib', - 'install-platlib', 'install-purelib', 'install-headers', - 'install-scripts', 'install-data', 'prefix', 'exec-prefix', - 'home', 'user', 'root'] - else: - ignore_options = [] - - ignore_options = frozenset(ignore_options) - - if filenames is None: - filenames = self.find_config_files() - - if DEBUG: - self.announce("Distribution.parse_config_files():") - - parser = ConfigParser() - for filename in filenames: - if DEBUG: - self.announce(" reading %s" % filename) - parser.read(filename) - for section in parser.sections(): - options = parser.options(section) - opt_dict = self.get_option_dict(section) - - for opt in options: - if opt != '__name__' and opt not in ignore_options: - val = parser.get(section,opt) - opt = opt.replace('-', '_') - opt_dict[opt] = (filename, val) - - # Make the ConfigParser forget everything (so we retain - # the original filenames that options come from) - parser.__init__() - - # If there was a "global" section in the config file, use it - # to set Distribution options. - - if 'global' in self.command_options: - for (opt, (src, val)) in self.command_options['global'].items(): - alias = self.negative_opt.get(opt) - try: - if alias: - setattr(self, alias, not strtobool(val)) - elif opt in ('verbose', 'dry_run'): # ugh! - setattr(self, opt, strtobool(val)) - else: - setattr(self, opt, val) - except ValueError as msg: - raise DistutilsOptionError(msg) - - # -- Command-line parsing methods ---------------------------------- - - def parse_command_line(self): - """Parse the setup script's command line, taken from the - 'script_args' instance attribute (which defaults to 'sys.argv[1:]' - -- see 'setup()' in core.py). This list is first processed for - "global options" -- options that set attributes of the Distribution - instance. Then, it is alternately scanned for Distutils commands - and options for that command. Each new command terminates the - options for the previous command. The allowed options for a - command are determined by the 'user_options' attribute of the - command class -- thus, we have to be able to load command classes - in order to parse the command line. Any error in that 'options' - attribute raises DistutilsGetoptError; any error on the - command-line raises DistutilsArgError. If no Distutils commands - were found on the command line, raises DistutilsArgError. Return - true if command-line was successfully parsed and we should carry - on with executing commands; false if no errors but we shouldn't - execute commands (currently, this only happens if user asks for - help). - """ - # - # We now have enough information to show the Macintosh dialog - # that allows the user to interactively specify the "command line". - # - toplevel_options = self._get_toplevel_options() - - # We have to parse the command line a bit at a time -- global - # options, then the first command, then its options, and so on -- - # because each command will be handled by a different class, and - # the options that are valid for a particular class aren't known - # until we have loaded the command class, which doesn't happen - # until we know what the command is. - - self.commands = [] - parser = FancyGetopt(toplevel_options + self.display_options) - parser.set_negative_aliases(self.negative_opt) - parser.set_aliases({'licence': 'license'}) - args = parser.getopt(args=self.script_args, object=self) - option_order = parser.get_option_order() - log.set_verbosity(self.verbose) - - # for display options we return immediately - if self.handle_display_options(option_order): - return - while args: - args = self._parse_command_opts(parser, args) - if args is None: # user asked for help (and got it) - return - - # Handle the cases of --help as a "global" option, ie. - # "setup.py --help" and "setup.py --help command ...". For the - # former, we show global options (--verbose, --dry-run, etc.) - # and display-only options (--name, --version, etc.); for the - # latter, we omit the display-only options and show help for - # each command listed on the command line. - if self.help: - self._show_help(parser, - display_options=len(self.commands) == 0, - commands=self.commands) - return - - # Oops, no commands found -- an end-user error - if not self.commands: - raise DistutilsArgError("no commands supplied") - - # All is well: return true - return True - - def _get_toplevel_options(self): - """Return the non-display options recognized at the top level. - - This includes options that are recognized *only* at the top - level as well as options recognized for commands. - """ - return self.global_options + [ - ("command-packages=", None, - "list of packages that provide distutils commands"), - ] - - def _parse_command_opts(self, parser, args): - """Parse the command-line options for a single command. - 'parser' must be a FancyGetopt instance; 'args' must be the list - of arguments, starting with the current command (whose options - we are about to parse). Returns a new version of 'args' with - the next command at the front of the list; will be the empty - list if there are no more commands on the command line. Returns - None if the user asked for help on this command. - """ - # late import because of mutual dependence between these modules - from distutils.cmd import Command - - # Pull the current command from the head of the command line - command = args[0] - if not command_re.match(command): - raise SystemExit("invalid command name '%s'" % command) - self.commands.append(command) - - # Dig up the command class that implements this command, so we - # 1) know that it's a valid command, and 2) know which options - # it takes. - try: - cmd_class = self.get_command_class(command) - except DistutilsModuleError as msg: - raise DistutilsArgError(msg) - - # Require that the command class be derived from Command -- want - # to be sure that the basic "command" interface is implemented. - if not issubclass(cmd_class, Command): - raise DistutilsClassError( - "command class %s must subclass Command" % cmd_class) - - # Also make sure that the command object provides a list of its - # known options. - if not (hasattr(cmd_class, 'user_options') and - isinstance(cmd_class.user_options, list)): - msg = ("command class %s must provide " - "'user_options' attribute (a list of tuples)") - raise DistutilsClassError(msg % cmd_class) - - # If the command class has a list of negative alias options, - # merge it in with the global negative aliases. - negative_opt = self.negative_opt - if hasattr(cmd_class, 'negative_opt'): - negative_opt = negative_opt.copy() - negative_opt.update(cmd_class.negative_opt) - - # Check for help_options in command class. They have a different - # format (tuple of four) so we need to preprocess them here. - if (hasattr(cmd_class, 'help_options') and - isinstance(cmd_class.help_options, list)): - help_options = fix_help_options(cmd_class.help_options) - else: - help_options = [] - - # All commands support the global options too, just by adding - # in 'global_options'. - parser.set_option_table(self.global_options + - cmd_class.user_options + - help_options) - parser.set_negative_aliases(negative_opt) - (args, opts) = parser.getopt(args[1:]) - if hasattr(opts, 'help') and opts.help: - self._show_help(parser, display_options=0, commands=[cmd_class]) - return - - if (hasattr(cmd_class, 'help_options') and - isinstance(cmd_class.help_options, list)): - help_option_found=0 - for (help_option, short, desc, func) in cmd_class.help_options: - if hasattr(opts, parser.get_attr_name(help_option)): - help_option_found=1 - if callable(func): - func() - else: - raise DistutilsClassError( - "invalid help function %r for help option '%s': " - "must be a callable object (function, etc.)" - % (func, help_option)) - - if help_option_found: - return - - # Put the options from the command-line into their official - # holding pen, the 'command_options' dictionary. - opt_dict = self.get_option_dict(command) - for (name, value) in vars(opts).items(): - opt_dict[name] = ("command line", value) - - return args - - def finalize_options(self): - """Set final values for all the options on the Distribution - instance, analogous to the .finalize_options() method of Command - objects. - """ - for attr in ('keywords', 'platforms'): - value = getattr(self.metadata, attr) - if value is None: - continue - if isinstance(value, str): - value = [elm.strip() for elm in value.split(',')] - setattr(self.metadata, attr, value) - - def _show_help(self, parser, global_options=1, display_options=1, - commands=[]): - """Show help for the setup script command-line in the form of - several lists of command-line options. 'parser' should be a - FancyGetopt instance; do not expect it to be returned in the - same state, as its option table will be reset to make it - generate the correct help text. - - If 'global_options' is true, lists the global options: - --verbose, --dry-run, etc. If 'display_options' is true, lists - the "display-only" options: --name, --version, etc. Finally, - lists per-command help for every command name or command class - in 'commands'. - """ - # late import because of mutual dependence between these modules - from distutils.core import gen_usage - from distutils.cmd import Command - - if global_options: - if display_options: - options = self._get_toplevel_options() - else: - options = self.global_options - parser.set_option_table(options) - parser.print_help(self.common_usage + "\nGlobal options:") - print('') - - if display_options: - parser.set_option_table(self.display_options) - parser.print_help( - "Information display options (just display " + - "information, ignore any commands)") - print('') - - for command in self.commands: - if isinstance(command, type) and issubclass(command, Command): - klass = command - else: - klass = self.get_command_class(command) - if (hasattr(klass, 'help_options') and - isinstance(klass.help_options, list)): - parser.set_option_table(klass.user_options + - fix_help_options(klass.help_options)) - else: - parser.set_option_table(klass.user_options) - parser.print_help("Options for '%s' command:" % klass.__name__) - print('') - - print(gen_usage(self.script_name)) - - def handle_display_options(self, option_order): - """If there were any non-global "display-only" options - (--help-commands or the metadata display options) on the command - line, display the requested info and return true; else return - false. - """ - from distutils.core import gen_usage - - # User just wants a list of commands -- we'll print it out and stop - # processing now (ie. if they ran "setup --help-commands foo bar", - # we ignore "foo bar"). - if self.help_commands: - self.print_commands() - print('') - print(gen_usage(self.script_name)) - return 1 - - # If user supplied any of the "display metadata" options, then - # display that metadata in the order in which the user supplied the - # metadata options. - any_display_options = 0 - is_display_option = {} - for option in self.display_options: - is_display_option[option[0]] = 1 - - for (opt, val) in option_order: - if val and is_display_option.get(opt): - opt = translate_longopt(opt) - value = getattr(self.metadata, "get_"+opt)() - if opt in ['keywords', 'platforms']: - print(','.join(value)) - elif opt in ('classifiers', 'provides', 'requires', - 'obsoletes'): - print('\n'.join(value)) - else: - print(value) - any_display_options = 1 - - return any_display_options - - def print_command_list(self, commands, header, max_length): - """Print a subset of the list of all commands -- used by - 'print_commands()'. - """ - print(header + ":") - - for cmd in commands: - klass = self.cmdclass.get(cmd) - if not klass: - klass = self.get_command_class(cmd) - try: - description = klass.description - except AttributeError: - description = "(no description available)" - - print(" %-*s %s" % (max_length, cmd, description)) - - def print_commands(self): - """Print out a help message listing all available commands with a - description of each. The list is divided into "standard commands" - (listed in distutils.command.__all__) and "extra commands" - (mentioned in self.cmdclass, but not a standard command). The - descriptions come from the command class attribute - 'description'. - """ - import distutils.command - std_commands = distutils.command.__all__ - is_std = {} - for cmd in std_commands: - is_std[cmd] = 1 - - extra_commands = [] - for cmd in self.cmdclass.keys(): - if not is_std.get(cmd): - extra_commands.append(cmd) - - max_length = 0 - for cmd in (std_commands + extra_commands): - if len(cmd) > max_length: - max_length = len(cmd) - - self.print_command_list(std_commands, - "Standard commands", - max_length) - if extra_commands: - print() - self.print_command_list(extra_commands, - "Extra commands", - max_length) - - def get_command_list(self): - """Get a list of (command, description) tuples. - The list is divided into "standard commands" (listed in - distutils.command.__all__) and "extra commands" (mentioned in - self.cmdclass, but not a standard command). The descriptions come - from the command class attribute 'description'. - """ - # Currently this is only used on Mac OS, for the Mac-only GUI - # Distutils interface (by Jack Jansen) - import distutils.command - std_commands = distutils.command.__all__ - is_std = {} - for cmd in std_commands: - is_std[cmd] = 1 - - extra_commands = [] - for cmd in self.cmdclass.keys(): - if not is_std.get(cmd): - extra_commands.append(cmd) - - rv = [] - for cmd in (std_commands + extra_commands): - klass = self.cmdclass.get(cmd) - if not klass: - klass = self.get_command_class(cmd) - try: - description = klass.description - except AttributeError: - description = "(no description available)" - rv.append((cmd, description)) - return rv - - # -- Command class/object methods ---------------------------------- - - def get_command_packages(self): - """Return a list of packages from which commands are loaded.""" - pkgs = self.command_packages - if not isinstance(pkgs, list): - if pkgs is None: - pkgs = '' - pkgs = [pkg.strip() for pkg in pkgs.split(',') if pkg != ''] - if "distutils.command" not in pkgs: - pkgs.insert(0, "distutils.command") - self.command_packages = pkgs - return pkgs - - def get_command_class(self, command): - """Return the class that implements the Distutils command named by - 'command'. First we check the 'cmdclass' dictionary; if the - command is mentioned there, we fetch the class object from the - dictionary and return it. Otherwise we load the command module - ("distutils.command." + command) and fetch the command class from - the module. The loaded class is also stored in 'cmdclass' - to speed future calls to 'get_command_class()'. - - Raises DistutilsModuleError if the expected module could not be - found, or if that module does not define the expected class. - """ - klass = self.cmdclass.get(command) - if klass: - return klass - - for pkgname in self.get_command_packages(): - module_name = "%s.%s" % (pkgname, command) - klass_name = command - - try: - __import__(module_name) - module = sys.modules[module_name] - except ImportError: - continue - - try: - klass = getattr(module, klass_name) - except AttributeError: - raise DistutilsModuleError( - "invalid command '%s' (no class '%s' in module '%s')" - % (command, klass_name, module_name)) - - self.cmdclass[command] = klass - return klass - - raise DistutilsModuleError("invalid command '%s'" % command) - - def get_command_obj(self, command, create=1): - """Return the command object for 'command'. Normally this object - is cached on a previous call to 'get_command_obj()'; if no command - object for 'command' is in the cache, then we either create and - return it (if 'create' is true) or return None. - """ - cmd_obj = self.command_obj.get(command) - if not cmd_obj and create: - if DEBUG: - self.announce("Distribution.get_command_obj(): " - "creating '%s' command object" % command) - - klass = self.get_command_class(command) - cmd_obj = self.command_obj[command] = klass(self) - self.have_run[command] = 0 - - # Set any options that were supplied in config files - # or on the command line. (NB. support for error - # reporting is lame here: any errors aren't reported - # until 'finalize_options()' is called, which means - # we won't report the source of the error.) - options = self.command_options.get(command) - if options: - self._set_command_options(cmd_obj, options) - - return cmd_obj - - def _set_command_options(self, command_obj, option_dict=None): - """Set the options for 'command_obj' from 'option_dict'. Basically - this means copying elements of a dictionary ('option_dict') to - attributes of an instance ('command'). - - 'command_obj' must be a Command instance. If 'option_dict' is not - supplied, uses the standard option dictionary for this command - (from 'self.command_options'). - """ - command_name = command_obj.get_command_name() - if option_dict is None: - option_dict = self.get_option_dict(command_name) - - if DEBUG: - self.announce(" setting options for '%s' command:" % command_name) - for (option, (source, value)) in option_dict.items(): - if DEBUG: - self.announce(" %s = %s (from %s)" % (option, value, - source)) - try: - bool_opts = [translate_longopt(o) - for o in command_obj.boolean_options] - except AttributeError: - bool_opts = [] - try: - neg_opt = command_obj.negative_opt - except AttributeError: - neg_opt = {} - - try: - is_string = isinstance(value, str) - if option in neg_opt and is_string: - setattr(command_obj, neg_opt[option], not strtobool(value)) - elif option in bool_opts and is_string: - setattr(command_obj, option, strtobool(value)) - elif hasattr(command_obj, option): - setattr(command_obj, option, value) - else: - raise DistutilsOptionError( - "error in %s: command '%s' has no such option '%s'" - % (source, command_name, option)) - except ValueError as msg: - raise DistutilsOptionError(msg) - - def reinitialize_command(self, command, reinit_subcommands=0): - """Reinitializes a command to the state it was in when first - returned by 'get_command_obj()': ie., initialized but not yet - finalized. This provides the opportunity to sneak option - values in programmatically, overriding or supplementing - user-supplied values from the config files and command line. - You'll have to re-finalize the command object (by calling - 'finalize_options()' or 'ensure_finalized()') before using it for - real. - - 'command' should be a command name (string) or command object. If - 'reinit_subcommands' is true, also reinitializes the command's - sub-commands, as declared by the 'sub_commands' class attribute (if - it has one). See the "install" command for an example. Only - reinitializes the sub-commands that actually matter, ie. those - whose test predicates return true. - - Returns the reinitialized command object. - """ - from distutils.cmd import Command - if not isinstance(command, Command): - command_name = command - command = self.get_command_obj(command_name) - else: - command_name = command.get_command_name() - - if not command.finalized: - return command - command.initialize_options() - command.finalized = 0 - self.have_run[command_name] = 0 - self._set_command_options(command) - - if reinit_subcommands: - for sub in command.get_sub_commands(): - self.reinitialize_command(sub, reinit_subcommands) - - return command - - # -- Methods that operate on the Distribution ---------------------- - - def announce(self, msg, level=log.INFO): - log.log(level, msg) - - def run_commands(self): - """Run each command that was seen on the setup script command line. - Uses the list of commands found and cache of command objects - created by 'get_command_obj()'. - """ - for cmd in self.commands: - self.run_command(cmd) - - # -- Methods that operate on its Commands -------------------------- - - def run_command(self, command): - """Do whatever it takes to run a command (including nothing at all, - if the command has already been run). Specifically: if we have - already created and run the command named by 'command', return - silently without doing anything. If the command named by 'command' - doesn't even have a command object yet, create one. Then invoke - 'run()' on that command object (or an existing one). - """ - # Already been here, done that? then return silently. - if self.have_run.get(command): - return - - log.info("running %s", command) - cmd_obj = self.get_command_obj(command) - cmd_obj.ensure_finalized() - cmd_obj.run() - self.have_run[command] = 1 - - # -- Distribution query methods ------------------------------------ - - def has_pure_modules(self): - return len(self.packages or self.py_modules or []) > 0 - - def has_ext_modules(self): - return self.ext_modules and len(self.ext_modules) > 0 - - def has_c_libraries(self): - return self.libraries and len(self.libraries) > 0 - - def has_modules(self): - return self.has_pure_modules() or self.has_ext_modules() - - def has_headers(self): - return self.headers and len(self.headers) > 0 - - def has_scripts(self): - return self.scripts and len(self.scripts) > 0 - - def has_data_files(self): - return self.data_files and len(self.data_files) > 0 - - def is_pure(self): - return (self.has_pure_modules() and - not self.has_ext_modules() and - not self.has_c_libraries()) - - # -- Metadata query methods ---------------------------------------- - - # If you're looking for 'get_name()', 'get_version()', and so forth, - # they are defined in a sneaky way: the constructor binds self.get_XXX - # to self.metadata.get_XXX. The actual code is in the - # DistributionMetadata class, below. - -class DistributionMetadata: - """Dummy class to hold the distribution meta-data: name, version, - author, and so forth. - """ - - _METHOD_BASENAMES = ("name", "version", "author", "author_email", - "maintainer", "maintainer_email", "url", - "license", "description", "long_description", - "keywords", "platforms", "fullname", "contact", - "contact_email", "classifiers", "download_url", - # PEP 314 - "provides", "requires", "obsoletes", - ) - - def __init__(self, path=None): - if path is not None: - self.read_pkg_file(open(path)) - else: - self.name = None - self.version = None - self.author = None - self.author_email = None - self.maintainer = None - self.maintainer_email = None - self.url = None - self.license = None - self.description = None - self.long_description = None - self.keywords = None - self.platforms = None - self.classifiers = None - self.download_url = None - # PEP 314 - self.provides = None - self.requires = None - self.obsoletes = None - - def read_pkg_file(self, file): - """Reads the metadata values from a file object.""" - msg = message_from_file(file) - - def _read_field(name): - value = msg[name] - if value == 'UNKNOWN': - return None - return value - - def _read_list(name): - values = msg.get_all(name, None) - if values == []: - return None - return values - - metadata_version = msg['metadata-version'] - self.name = _read_field('name') - self.version = _read_field('version') - self.description = _read_field('summary') - # we are filling author only. - self.author = _read_field('author') - self.maintainer = None - self.author_email = _read_field('author-email') - self.maintainer_email = None - self.url = _read_field('home-page') - self.license = _read_field('license') - - if 'download-url' in msg: - self.download_url = _read_field('download-url') - else: - self.download_url = None - - self.long_description = _read_field('description') - self.description = _read_field('summary') - - if 'keywords' in msg: - self.keywords = _read_field('keywords').split(',') - - self.platforms = _read_list('platform') - self.classifiers = _read_list('classifier') - - # PEP 314 - these fields only exist in 1.1 - if metadata_version == '1.1': - self.requires = _read_list('requires') - self.provides = _read_list('provides') - self.obsoletes = _read_list('obsoletes') - else: - self.requires = None - self.provides = None - self.obsoletes = None - - def write_pkg_info(self, base_dir): - """Write the PKG-INFO file into the release tree. - """ - with open(os.path.join(base_dir, 'PKG-INFO'), 'w', - encoding='UTF-8') as pkg_info: - self.write_pkg_file(pkg_info) - - def write_pkg_file(self, file): - """Write the PKG-INFO format data to a file object. - """ - version = '1.0' - if (self.provides or self.requires or self.obsoletes or - self.classifiers or self.download_url): - version = '1.1' - - file.write('Metadata-Version: %s\n' % version) - file.write('Name: %s\n' % self.get_name()) - file.write('Version: %s\n' % self.get_version()) - file.write('Summary: %s\n' % self.get_description()) - file.write('Home-page: %s\n' % self.get_url()) - file.write('Author: %s\n' % self.get_contact()) - file.write('Author-email: %s\n' % self.get_contact_email()) - file.write('License: %s\n' % self.get_license()) - if self.download_url: - file.write('Download-URL: %s\n' % self.download_url) - - long_desc = rfc822_escape(self.get_long_description()) - file.write('Description: %s\n' % long_desc) - - keywords = ','.join(self.get_keywords()) - if keywords: - file.write('Keywords: %s\n' % keywords) - - self._write_list(file, 'Platform', self.get_platforms()) - self._write_list(file, 'Classifier', self.get_classifiers()) - - # PEP 314 - self._write_list(file, 'Requires', self.get_requires()) - self._write_list(file, 'Provides', self.get_provides()) - self._write_list(file, 'Obsoletes', self.get_obsoletes()) - - def _write_list(self, file, name, values): - for value in values: - file.write('%s: %s\n' % (name, value)) - - # -- Metadata query methods ---------------------------------------- - - def get_name(self): - return self.name or "UNKNOWN" - - def get_version(self): - return self.version or "0.0.0" - - def get_fullname(self): - return "%s-%s" % (self.get_name(), self.get_version()) - - def get_author(self): - return self.author or "UNKNOWN" - - def get_author_email(self): - return self.author_email or "UNKNOWN" - - def get_maintainer(self): - return self.maintainer or "UNKNOWN" - - def get_maintainer_email(self): - return self.maintainer_email or "UNKNOWN" - - def get_contact(self): - return self.maintainer or self.author or "UNKNOWN" - - def get_contact_email(self): - return self.maintainer_email or self.author_email or "UNKNOWN" - - def get_url(self): - return self.url or "UNKNOWN" - - def get_license(self): - return self.license or "UNKNOWN" - get_licence = get_license - - def get_description(self): - return self.description or "UNKNOWN" - - def get_long_description(self): - return self.long_description or "UNKNOWN" - - def get_keywords(self): - return self.keywords or [] - - def set_keywords(self, value): - self.keywords = _ensure_list(value, 'keywords') - - def get_platforms(self): - return self.platforms or ["UNKNOWN"] - - def set_platforms(self, value): - self.platforms = _ensure_list(value, 'platforms') - - def get_classifiers(self): - return self.classifiers or [] - - def set_classifiers(self, value): - self.classifiers = _ensure_list(value, 'classifiers') - - def get_download_url(self): - return self.download_url or "UNKNOWN" - - # PEP 314 - def get_requires(self): - return self.requires or [] - - def set_requires(self, value): - import distutils.versionpredicate - for v in value: - distutils.versionpredicate.VersionPredicate(v) - self.requires = list(value) - - def get_provides(self): - return self.provides or [] - - def set_provides(self, value): - value = [v.strip() for v in value] - for v in value: - import distutils.versionpredicate - distutils.versionpredicate.split_provision(v) - self.provides = value - - def get_obsoletes(self): - return self.obsoletes or [] - - def set_obsoletes(self, value): - import distutils.versionpredicate - for v in value: - distutils.versionpredicate.VersionPredicate(v) - self.obsoletes = list(value) - -def fix_help_options(options): - """Convert a 4-tuple 'help_options' list as found in various command - classes to the 3-tuple form required by FancyGetopt. - """ - new_options = [] - for help_tuple in options: - new_options.append(help_tuple[0:3]) - return new_options diff --git a/venv/Lib/site-packages/setuptools/_distutils/errors.py b/venv/Lib/site-packages/setuptools/_distutils/errors.py deleted file mode 100644 index 8b93059..0000000 --- a/venv/Lib/site-packages/setuptools/_distutils/errors.py +++ /dev/null @@ -1,97 +0,0 @@ -"""distutils.errors - -Provides exceptions used by the Distutils modules. Note that Distutils -modules may raise standard exceptions; in particular, SystemExit is -usually raised for errors that are obviously the end-user's fault -(eg. bad command-line arguments). - -This module is safe to use in "from ... import *" mode; it only exports -symbols whose names start with "Distutils" and end with "Error".""" - -class DistutilsError (Exception): - """The root of all Distutils evil.""" - pass - -class DistutilsModuleError (DistutilsError): - """Unable to load an expected module, or to find an expected class - within some module (in particular, command modules and classes).""" - pass - -class DistutilsClassError (DistutilsError): - """Some command class (or possibly distribution class, if anyone - feels a need to subclass Distribution) is found not to be holding - up its end of the bargain, ie. implementing some part of the - "command "interface.""" - pass - -class DistutilsGetoptError (DistutilsError): - """The option table provided to 'fancy_getopt()' is bogus.""" - pass - -class DistutilsArgError (DistutilsError): - """Raised by fancy_getopt in response to getopt.error -- ie. an - error in the command line usage.""" - pass - -class DistutilsFileError (DistutilsError): - """Any problems in the filesystem: expected file not found, etc. - Typically this is for problems that we detect before OSError - could be raised.""" - pass - -class DistutilsOptionError (DistutilsError): - """Syntactic/semantic errors in command options, such as use of - mutually conflicting options, or inconsistent options, - badly-spelled values, etc. No distinction is made between option - values originating in the setup script, the command line, config - files, or what-have-you -- but if we *know* something originated in - the setup script, we'll raise DistutilsSetupError instead.""" - pass - -class DistutilsSetupError (DistutilsError): - """For errors that can be definitely blamed on the setup script, - such as invalid keyword arguments to 'setup()'.""" - pass - -class DistutilsPlatformError (DistutilsError): - """We don't know how to do something on the current platform (but - we do know how to do it on some platform) -- eg. trying to compile - C files on a platform not supported by a CCompiler subclass.""" - pass - -class DistutilsExecError (DistutilsError): - """Any problems executing an external program (such as the C - compiler, when compiling C files).""" - pass - -class DistutilsInternalError (DistutilsError): - """Internal inconsistencies or impossibilities (obviously, this - should never be seen if the code is working!).""" - pass - -class DistutilsTemplateError (DistutilsError): - """Syntax error in a file list template.""" - -class DistutilsByteCompileError(DistutilsError): - """Byte compile error.""" - -# Exception classes used by the CCompiler implementation classes -class CCompilerError (Exception): - """Some compile/link operation failed.""" - -class PreprocessError (CCompilerError): - """Failure to preprocess one or more C/C++ files.""" - -class CompileError (CCompilerError): - """Failure to compile one or more C/C++ source files.""" - -class LibError (CCompilerError): - """Failure to create a static library from one or more C/C++ object - files.""" - -class LinkError (CCompilerError): - """Failure to link one or more C/C++ object files into an executable - or shared library file.""" - -class UnknownFileError (CCompilerError): - """Attempt to process an unknown file type.""" diff --git a/venv/Lib/site-packages/setuptools/_distutils/extension.py b/venv/Lib/site-packages/setuptools/_distutils/extension.py deleted file mode 100644 index c507da3..0000000 --- a/venv/Lib/site-packages/setuptools/_distutils/extension.py +++ /dev/null @@ -1,240 +0,0 @@ -"""distutils.extension - -Provides the Extension class, used to describe C/C++ extension -modules in setup scripts.""" - -import os -import warnings - -# This class is really only used by the "build_ext" command, so it might -# make sense to put it in distutils.command.build_ext. However, that -# module is already big enough, and I want to make this class a bit more -# complex to simplify some common cases ("foo" module in "foo.c") and do -# better error-checking ("foo.c" actually exists). -# -# Also, putting this in build_ext.py means every setup script would have to -# import that large-ish module (indirectly, through distutils.core) in -# order to do anything. - -class Extension: - """Just a collection of attributes that describes an extension - module and everything needed to build it (hopefully in a portable - way, but there are hooks that let you be as unportable as you need). - - Instance attributes: - name : string - the full name of the extension, including any packages -- ie. - *not* a filename or pathname, but Python dotted name - sources : [string] - list of source filenames, relative to the distribution root - (where the setup script lives), in Unix form (slash-separated) - for portability. Source files may be C, C++, SWIG (.i), - platform-specific resource files, or whatever else is recognized - by the "build_ext" command as source for a Python extension. - include_dirs : [string] - list of directories to search for C/C++ header files (in Unix - form for portability) - define_macros : [(name : string, value : string|None)] - list of macros to define; each macro is defined using a 2-tuple, - where 'value' is either the string to define it to or None to - define it without a particular value (equivalent of "#define - FOO" in source or -DFOO on Unix C compiler command line) - undef_macros : [string] - list of macros to undefine explicitly - library_dirs : [string] - list of directories to search for C/C++ libraries at link time - libraries : [string] - list of library names (not filenames or paths) to link against - runtime_library_dirs : [string] - list of directories to search for C/C++ libraries at run time - (for shared extensions, this is when the extension is loaded) - extra_objects : [string] - list of extra files to link with (eg. object files not implied - by 'sources', static library that must be explicitly specified, - binary resource files, etc.) - extra_compile_args : [string] - any extra platform- and compiler-specific information to use - when compiling the source files in 'sources'. For platforms and - compilers where "command line" makes sense, this is typically a - list of command-line arguments, but for other platforms it could - be anything. - extra_link_args : [string] - any extra platform- and compiler-specific information to use - when linking object files together to create the extension (or - to create a new static Python interpreter). Similar - interpretation as for 'extra_compile_args'. - export_symbols : [string] - list of symbols to be exported from a shared extension. Not - used on all platforms, and not generally necessary for Python - extensions, which typically export exactly one symbol: "init" + - extension_name. - swig_opts : [string] - any extra options to pass to SWIG if a source file has the .i - extension. - depends : [string] - list of files that the extension depends on - language : string - extension language (i.e. "c", "c++", "objc"). Will be detected - from the source extensions if not provided. - optional : boolean - specifies that a build failure in the extension should not abort the - build process, but simply not install the failing extension. - """ - - # When adding arguments to this constructor, be sure to update - # setup_keywords in core.py. - def __init__(self, name, sources, - include_dirs=None, - define_macros=None, - undef_macros=None, - library_dirs=None, - libraries=None, - runtime_library_dirs=None, - extra_objects=None, - extra_compile_args=None, - extra_link_args=None, - export_symbols=None, - swig_opts = None, - depends=None, - language=None, - optional=None, - **kw # To catch unknown keywords - ): - if not isinstance(name, str): - raise AssertionError("'name' must be a string") - if not (isinstance(sources, list) and - all(isinstance(v, str) for v in sources)): - raise AssertionError("'sources' must be a list of strings") - - self.name = name - self.sources = sources - self.include_dirs = include_dirs or [] - self.define_macros = define_macros or [] - self.undef_macros = undef_macros or [] - self.library_dirs = library_dirs or [] - self.libraries = libraries or [] - self.runtime_library_dirs = runtime_library_dirs or [] - self.extra_objects = extra_objects or [] - self.extra_compile_args = extra_compile_args or [] - self.extra_link_args = extra_link_args or [] - self.export_symbols = export_symbols or [] - self.swig_opts = swig_opts or [] - self.depends = depends or [] - self.language = language - self.optional = optional - - # If there are unknown keyword options, warn about them - if len(kw) > 0: - options = [repr(option) for option in kw] - options = ', '.join(sorted(options)) - msg = "Unknown Extension options: %s" % options - warnings.warn(msg) - - def __repr__(self): - return '<%s.%s(%r) at %#x>' % ( - self.__class__.__module__, - self.__class__.__qualname__, - self.name, - id(self)) - - -def read_setup_file(filename): - """Reads a Setup file and returns Extension instances.""" - from distutils.sysconfig import (parse_makefile, expand_makefile_vars, - _variable_rx) - - from distutils.text_file import TextFile - from distutils.util import split_quoted - - # First pass over the file to gather "VAR = VALUE" assignments. - vars = parse_makefile(filename) - - # Second pass to gobble up the real content: lines of the form - # ... [ ...] [ ...] [ ...] - file = TextFile(filename, - strip_comments=1, skip_blanks=1, join_lines=1, - lstrip_ws=1, rstrip_ws=1) - try: - extensions = [] - - while True: - line = file.readline() - if line is None: # eof - break - if _variable_rx.match(line): # VAR=VALUE, handled in first pass - continue - - if line[0] == line[-1] == "*": - file.warn("'%s' lines not handled yet" % line) - continue - - line = expand_makefile_vars(line, vars) - words = split_quoted(line) - - # NB. this parses a slightly different syntax than the old - # makesetup script: here, there must be exactly one extension per - # line, and it must be the first word of the line. I have no idea - # why the old syntax supported multiple extensions per line, as - # they all wind up being the same. - - module = words[0] - ext = Extension(module, []) - append_next_word = None - - for word in words[1:]: - if append_next_word is not None: - append_next_word.append(word) - append_next_word = None - continue - - suffix = os.path.splitext(word)[1] - switch = word[0:2] ; value = word[2:] - - if suffix in (".c", ".cc", ".cpp", ".cxx", ".c++", ".m", ".mm"): - # hmm, should we do something about C vs. C++ sources? - # or leave it up to the CCompiler implementation to - # worry about? - ext.sources.append(word) - elif switch == "-I": - ext.include_dirs.append(value) - elif switch == "-D": - equals = value.find("=") - if equals == -1: # bare "-DFOO" -- no value - ext.define_macros.append((value, None)) - else: # "-DFOO=blah" - ext.define_macros.append((value[0:equals], - value[equals+2:])) - elif switch == "-U": - ext.undef_macros.append(value) - elif switch == "-C": # only here 'cause makesetup has it! - ext.extra_compile_args.append(word) - elif switch == "-l": - ext.libraries.append(value) - elif switch == "-L": - ext.library_dirs.append(value) - elif switch == "-R": - ext.runtime_library_dirs.append(value) - elif word == "-rpath": - append_next_word = ext.runtime_library_dirs - elif word == "-Xlinker": - append_next_word = ext.extra_link_args - elif word == "-Xcompiler": - append_next_word = ext.extra_compile_args - elif switch == "-u": - ext.extra_link_args.append(word) - if not value: - append_next_word = ext.extra_link_args - elif suffix in (".a", ".so", ".sl", ".o", ".dylib"): - # NB. a really faithful emulation of makesetup would - # append a .o file to extra_objects only if it - # had a slash in it; otherwise, it would s/.o/.c/ - # and append it to sources. Hmmmm. - ext.extra_objects.append(word) - else: - file.warn("unrecognized argument '%s'" % word) - - extensions.append(ext) - finally: - file.close() - - return extensions diff --git a/venv/Lib/site-packages/setuptools/_distutils/fancy_getopt.py b/venv/Lib/site-packages/setuptools/_distutils/fancy_getopt.py deleted file mode 100644 index 7d170dd..0000000 --- a/venv/Lib/site-packages/setuptools/_distutils/fancy_getopt.py +++ /dev/null @@ -1,457 +0,0 @@ -"""distutils.fancy_getopt - -Wrapper around the standard getopt module that provides the following -additional features: - * short and long options are tied together - * options have help strings, so fancy_getopt could potentially - create a complete usage summary - * options set attributes of a passed-in object -""" - -import sys, string, re -import getopt -from distutils.errors import * - -# Much like command_re in distutils.core, this is close to but not quite -# the same as a Python NAME -- except, in the spirit of most GNU -# utilities, we use '-' in place of '_'. (The spirit of LISP lives on!) -# The similarities to NAME are again not a coincidence... -longopt_pat = r'[a-zA-Z](?:[a-zA-Z0-9-]*)' -longopt_re = re.compile(r'^%s$' % longopt_pat) - -# For recognizing "negative alias" options, eg. "quiet=!verbose" -neg_alias_re = re.compile("^(%s)=!(%s)$" % (longopt_pat, longopt_pat)) - -# This is used to translate long options to legitimate Python identifiers -# (for use as attributes of some object). -longopt_xlate = str.maketrans('-', '_') - -class FancyGetopt: - """Wrapper around the standard 'getopt()' module that provides some - handy extra functionality: - * short and long options are tied together - * options have help strings, and help text can be assembled - from them - * options set attributes of a passed-in object - * boolean options can have "negative aliases" -- eg. if - --quiet is the "negative alias" of --verbose, then "--quiet" - on the command line sets 'verbose' to false - """ - - def __init__(self, option_table=None): - # The option table is (currently) a list of tuples. The - # tuples may have 3 or four values: - # (long_option, short_option, help_string [, repeatable]) - # if an option takes an argument, its long_option should have '=' - # appended; short_option should just be a single character, no ':' - # in any case. If a long_option doesn't have a corresponding - # short_option, short_option should be None. All option tuples - # must have long options. - self.option_table = option_table - - # 'option_index' maps long option names to entries in the option - # table (ie. those 3-tuples). - self.option_index = {} - if self.option_table: - self._build_index() - - # 'alias' records (duh) alias options; {'foo': 'bar'} means - # --foo is an alias for --bar - self.alias = {} - - # 'negative_alias' keeps track of options that are the boolean - # opposite of some other option - self.negative_alias = {} - - # These keep track of the information in the option table. We - # don't actually populate these structures until we're ready to - # parse the command-line, since the 'option_table' passed in here - # isn't necessarily the final word. - self.short_opts = [] - self.long_opts = [] - self.short2long = {} - self.attr_name = {} - self.takes_arg = {} - - # And 'option_order' is filled up in 'getopt()'; it records the - # original order of options (and their values) on the command-line, - # but expands short options, converts aliases, etc. - self.option_order = [] - - def _build_index(self): - self.option_index.clear() - for option in self.option_table: - self.option_index[option[0]] = option - - def set_option_table(self, option_table): - self.option_table = option_table - self._build_index() - - def add_option(self, long_option, short_option=None, help_string=None): - if long_option in self.option_index: - raise DistutilsGetoptError( - "option conflict: already an option '%s'" % long_option) - else: - option = (long_option, short_option, help_string) - self.option_table.append(option) - self.option_index[long_option] = option - - def has_option(self, long_option): - """Return true if the option table for this parser has an - option with long name 'long_option'.""" - return long_option in self.option_index - - def get_attr_name(self, long_option): - """Translate long option name 'long_option' to the form it - has as an attribute of some object: ie., translate hyphens - to underscores.""" - return long_option.translate(longopt_xlate) - - def _check_alias_dict(self, aliases, what): - assert isinstance(aliases, dict) - for (alias, opt) in aliases.items(): - if alias not in self.option_index: - raise DistutilsGetoptError(("invalid %s '%s': " - "option '%s' not defined") % (what, alias, alias)) - if opt not in self.option_index: - raise DistutilsGetoptError(("invalid %s '%s': " - "aliased option '%s' not defined") % (what, alias, opt)) - - def set_aliases(self, alias): - """Set the aliases for this option parser.""" - self._check_alias_dict(alias, "alias") - self.alias = alias - - def set_negative_aliases(self, negative_alias): - """Set the negative aliases for this option parser. - 'negative_alias' should be a dictionary mapping option names to - option names, both the key and value must already be defined - in the option table.""" - self._check_alias_dict(negative_alias, "negative alias") - self.negative_alias = negative_alias - - def _grok_option_table(self): - """Populate the various data structures that keep tabs on the - option table. Called by 'getopt()' before it can do anything - worthwhile. - """ - self.long_opts = [] - self.short_opts = [] - self.short2long.clear() - self.repeat = {} - - for option in self.option_table: - if len(option) == 3: - long, short, help = option - repeat = 0 - elif len(option) == 4: - long, short, help, repeat = option - else: - # the option table is part of the code, so simply - # assert that it is correct - raise ValueError("invalid option tuple: %r" % (option,)) - - # Type- and value-check the option names - if not isinstance(long, str) or len(long) < 2: - raise DistutilsGetoptError(("invalid long option '%s': " - "must be a string of length >= 2") % long) - - if (not ((short is None) or - (isinstance(short, str) and len(short) == 1))): - raise DistutilsGetoptError("invalid short option '%s': " - "must a single character or None" % short) - - self.repeat[long] = repeat - self.long_opts.append(long) - - if long[-1] == '=': # option takes an argument? - if short: short = short + ':' - long = long[0:-1] - self.takes_arg[long] = 1 - else: - # Is option is a "negative alias" for some other option (eg. - # "quiet" == "!verbose")? - alias_to = self.negative_alias.get(long) - if alias_to is not None: - if self.takes_arg[alias_to]: - raise DistutilsGetoptError( - "invalid negative alias '%s': " - "aliased option '%s' takes a value" - % (long, alias_to)) - - self.long_opts[-1] = long # XXX redundant?! - self.takes_arg[long] = 0 - - # If this is an alias option, make sure its "takes arg" flag is - # the same as the option it's aliased to. - alias_to = self.alias.get(long) - if alias_to is not None: - if self.takes_arg[long] != self.takes_arg[alias_to]: - raise DistutilsGetoptError( - "invalid alias '%s': inconsistent with " - "aliased option '%s' (one of them takes a value, " - "the other doesn't" - % (long, alias_to)) - - # Now enforce some bondage on the long option name, so we can - # later translate it to an attribute name on some object. Have - # to do this a bit late to make sure we've removed any trailing - # '='. - if not longopt_re.match(long): - raise DistutilsGetoptError( - "invalid long option name '%s' " - "(must be letters, numbers, hyphens only" % long) - - self.attr_name[long] = self.get_attr_name(long) - if short: - self.short_opts.append(short) - self.short2long[short[0]] = long - - def getopt(self, args=None, object=None): - """Parse command-line options in args. Store as attributes on object. - - If 'args' is None or not supplied, uses 'sys.argv[1:]'. If - 'object' is None or not supplied, creates a new OptionDummy - object, stores option values there, and returns a tuple (args, - object). If 'object' is supplied, it is modified in place and - 'getopt()' just returns 'args'; in both cases, the returned - 'args' is a modified copy of the passed-in 'args' list, which - is left untouched. - """ - if args is None: - args = sys.argv[1:] - if object is None: - object = OptionDummy() - created_object = True - else: - created_object = False - - self._grok_option_table() - - short_opts = ' '.join(self.short_opts) - try: - opts, args = getopt.getopt(args, short_opts, self.long_opts) - except getopt.error as msg: - raise DistutilsArgError(msg) - - for opt, val in opts: - if len(opt) == 2 and opt[0] == '-': # it's a short option - opt = self.short2long[opt[1]] - else: - assert len(opt) > 2 and opt[:2] == '--' - opt = opt[2:] - - alias = self.alias.get(opt) - if alias: - opt = alias - - if not self.takes_arg[opt]: # boolean option? - assert val == '', "boolean option can't have value" - alias = self.negative_alias.get(opt) - if alias: - opt = alias - val = 0 - else: - val = 1 - - attr = self.attr_name[opt] - # The only repeating option at the moment is 'verbose'. - # It has a negative option -q quiet, which should set verbose = 0. - if val and self.repeat.get(attr) is not None: - val = getattr(object, attr, 0) + 1 - setattr(object, attr, val) - self.option_order.append((opt, val)) - - # for opts - if created_object: - return args, object - else: - return args - - def get_option_order(self): - """Returns the list of (option, value) tuples processed by the - previous run of 'getopt()'. Raises RuntimeError if - 'getopt()' hasn't been called yet. - """ - if self.option_order is None: - raise RuntimeError("'getopt()' hasn't been called yet") - else: - return self.option_order - - def generate_help(self, header=None): - """Generate help text (a list of strings, one per suggested line of - output) from the option table for this FancyGetopt object. - """ - # Blithely assume the option table is good: probably wouldn't call - # 'generate_help()' unless you've already called 'getopt()'. - - # First pass: determine maximum length of long option names - max_opt = 0 - for option in self.option_table: - long = option[0] - short = option[1] - l = len(long) - if long[-1] == '=': - l = l - 1 - if short is not None: - l = l + 5 # " (-x)" where short == 'x' - if l > max_opt: - max_opt = l - - opt_width = max_opt + 2 + 2 + 2 # room for indent + dashes + gutter - - # Typical help block looks like this: - # --foo controls foonabulation - # Help block for longest option looks like this: - # --flimflam set the flim-flam level - # and with wrapped text: - # --flimflam set the flim-flam level (must be between - # 0 and 100, except on Tuesdays) - # Options with short names will have the short name shown (but - # it doesn't contribute to max_opt): - # --foo (-f) controls foonabulation - # If adding the short option would make the left column too wide, - # we push the explanation off to the next line - # --flimflam (-l) - # set the flim-flam level - # Important parameters: - # - 2 spaces before option block start lines - # - 2 dashes for each long option name - # - min. 2 spaces between option and explanation (gutter) - # - 5 characters (incl. space) for short option name - - # Now generate lines of help text. (If 80 columns were good enough - # for Jesus, then 78 columns are good enough for me!) - line_width = 78 - text_width = line_width - opt_width - big_indent = ' ' * opt_width - if header: - lines = [header] - else: - lines = ['Option summary:'] - - for option in self.option_table: - long, short, help = option[:3] - text = wrap_text(help, text_width) - if long[-1] == '=': - long = long[0:-1] - - # Case 1: no short option at all (makes life easy) - if short is None: - if text: - lines.append(" --%-*s %s" % (max_opt, long, text[0])) - else: - lines.append(" --%-*s " % (max_opt, long)) - - # Case 2: we have a short option, so we have to include it - # just after the long option - else: - opt_names = "%s (-%s)" % (long, short) - if text: - lines.append(" --%-*s %s" % - (max_opt, opt_names, text[0])) - else: - lines.append(" --%-*s" % opt_names) - - for l in text[1:]: - lines.append(big_indent + l) - return lines - - def print_help(self, header=None, file=None): - if file is None: - file = sys.stdout - for line in self.generate_help(header): - file.write(line + "\n") - - -def fancy_getopt(options, negative_opt, object, args): - parser = FancyGetopt(options) - parser.set_negative_aliases(negative_opt) - return parser.getopt(args, object) - - -WS_TRANS = {ord(_wschar) : ' ' for _wschar in string.whitespace} - -def wrap_text(text, width): - """wrap_text(text : string, width : int) -> [string] - - Split 'text' into multiple lines of no more than 'width' characters - each, and return the list of strings that results. - """ - if text is None: - return [] - if len(text) <= width: - return [text] - - text = text.expandtabs() - text = text.translate(WS_TRANS) - chunks = re.split(r'( +|-+)', text) - chunks = [ch for ch in chunks if ch] # ' - ' results in empty strings - lines = [] - - while chunks: - cur_line = [] # list of chunks (to-be-joined) - cur_len = 0 # length of current line - - while chunks: - l = len(chunks[0]) - if cur_len + l <= width: # can squeeze (at least) this chunk in - cur_line.append(chunks[0]) - del chunks[0] - cur_len = cur_len + l - else: # this line is full - # drop last chunk if all space - if cur_line and cur_line[-1][0] == ' ': - del cur_line[-1] - break - - if chunks: # any chunks left to process? - # if the current line is still empty, then we had a single - # chunk that's too big too fit on a line -- so we break - # down and break it up at the line width - if cur_len == 0: - cur_line.append(chunks[0][0:width]) - chunks[0] = chunks[0][width:] - - # all-whitespace chunks at the end of a line can be discarded - # (and we know from the re.split above that if a chunk has - # *any* whitespace, it is *all* whitespace) - if chunks[0][0] == ' ': - del chunks[0] - - # and store this line in the list-of-all-lines -- as a single - # string, of course! - lines.append(''.join(cur_line)) - - return lines - - -def translate_longopt(opt): - """Convert a long option name to a valid Python identifier by - changing "-" to "_". - """ - return opt.translate(longopt_xlate) - - -class OptionDummy: - """Dummy class just used as a place to hold command-line option - values as instance attributes.""" - - def __init__(self, options=[]): - """Create a new OptionDummy instance. The attributes listed in - 'options' will be initialized to None.""" - for opt in options: - setattr(self, opt, None) - - -if __name__ == "__main__": - text = """\ -Tra-la-la, supercalifragilisticexpialidocious. -How *do* you spell that odd word, anyways? -(Someone ask Mary -- she'll know [or she'll -say, "How should I know?"].)""" - - for w in (10, 20, 30, 40): - print("width: %d" % w) - print("\n".join(wrap_text(text, w))) - print() diff --git a/venv/Lib/site-packages/setuptools/_distutils/file_util.py b/venv/Lib/site-packages/setuptools/_distutils/file_util.py deleted file mode 100644 index b3fee35..0000000 --- a/venv/Lib/site-packages/setuptools/_distutils/file_util.py +++ /dev/null @@ -1,238 +0,0 @@ -"""distutils.file_util - -Utility functions for operating on single files. -""" - -import os -from distutils.errors import DistutilsFileError -from distutils import log - -# for generating verbose output in 'copy_file()' -_copy_action = { None: 'copying', - 'hard': 'hard linking', - 'sym': 'symbolically linking' } - - -def _copy_file_contents(src, dst, buffer_size=16*1024): - """Copy the file 'src' to 'dst'; both must be filenames. Any error - opening either file, reading from 'src', or writing to 'dst', raises - DistutilsFileError. Data is read/written in chunks of 'buffer_size' - bytes (default 16k). No attempt is made to handle anything apart from - regular files. - """ - # Stolen from shutil module in the standard library, but with - # custom error-handling added. - fsrc = None - fdst = None - try: - try: - fsrc = open(src, 'rb') - except OSError as e: - raise DistutilsFileError("could not open '%s': %s" % (src, e.strerror)) - - if os.path.exists(dst): - try: - os.unlink(dst) - except OSError as e: - raise DistutilsFileError( - "could not delete '%s': %s" % (dst, e.strerror)) - - try: - fdst = open(dst, 'wb') - except OSError as e: - raise DistutilsFileError( - "could not create '%s': %s" % (dst, e.strerror)) - - while True: - try: - buf = fsrc.read(buffer_size) - except OSError as e: - raise DistutilsFileError( - "could not read from '%s': %s" % (src, e.strerror)) - - if not buf: - break - - try: - fdst.write(buf) - except OSError as e: - raise DistutilsFileError( - "could not write to '%s': %s" % (dst, e.strerror)) - finally: - if fdst: - fdst.close() - if fsrc: - fsrc.close() - -def copy_file(src, dst, preserve_mode=1, preserve_times=1, update=0, - link=None, verbose=1, dry_run=0): - """Copy a file 'src' to 'dst'. If 'dst' is a directory, then 'src' is - copied there with the same name; otherwise, it must be a filename. (If - the file exists, it will be ruthlessly clobbered.) If 'preserve_mode' - is true (the default), the file's mode (type and permission bits, or - whatever is analogous on the current platform) is copied. If - 'preserve_times' is true (the default), the last-modified and - last-access times are copied as well. If 'update' is true, 'src' will - only be copied if 'dst' does not exist, or if 'dst' does exist but is - older than 'src'. - - 'link' allows you to make hard links (os.link) or symbolic links - (os.symlink) instead of copying: set it to "hard" or "sym"; if it is - None (the default), files are copied. Don't set 'link' on systems that - don't support it: 'copy_file()' doesn't check if hard or symbolic - linking is available. If hardlink fails, falls back to - _copy_file_contents(). - - Under Mac OS, uses the native file copy function in macostools; on - other systems, uses '_copy_file_contents()' to copy file contents. - - Return a tuple (dest_name, copied): 'dest_name' is the actual name of - the output file, and 'copied' is true if the file was copied (or would - have been copied, if 'dry_run' true). - """ - # XXX if the destination file already exists, we clobber it if - # copying, but blow up if linking. Hmmm. And I don't know what - # macostools.copyfile() does. Should definitely be consistent, and - # should probably blow up if destination exists and we would be - # changing it (ie. it's not already a hard/soft link to src OR - # (not update) and (src newer than dst). - - from distutils.dep_util import newer - from stat import ST_ATIME, ST_MTIME, ST_MODE, S_IMODE - - if not os.path.isfile(src): - raise DistutilsFileError( - "can't copy '%s': doesn't exist or not a regular file" % src) - - if os.path.isdir(dst): - dir = dst - dst = os.path.join(dst, os.path.basename(src)) - else: - dir = os.path.dirname(dst) - - if update and not newer(src, dst): - if verbose >= 1: - log.debug("not copying %s (output up-to-date)", src) - return (dst, 0) - - try: - action = _copy_action[link] - except KeyError: - raise ValueError("invalid value '%s' for 'link' argument" % link) - - if verbose >= 1: - if os.path.basename(dst) == os.path.basename(src): - log.info("%s %s -> %s", action, src, dir) - else: - log.info("%s %s -> %s", action, src, dst) - - if dry_run: - return (dst, 1) - - # If linking (hard or symbolic), use the appropriate system call - # (Unix only, of course, but that's the caller's responsibility) - elif link == 'hard': - if not (os.path.exists(dst) and os.path.samefile(src, dst)): - try: - os.link(src, dst) - return (dst, 1) - except OSError: - # If hard linking fails, fall back on copying file - # (some special filesystems don't support hard linking - # even under Unix, see issue #8876). - pass - elif link == 'sym': - if not (os.path.exists(dst) and os.path.samefile(src, dst)): - os.symlink(src, dst) - return (dst, 1) - - # Otherwise (non-Mac, not linking), copy the file contents and - # (optionally) copy the times and mode. - _copy_file_contents(src, dst) - if preserve_mode or preserve_times: - st = os.stat(src) - - # According to David Ascher , utime() should be done - # before chmod() (at least under NT). - if preserve_times: - os.utime(dst, (st[ST_ATIME], st[ST_MTIME])) - if preserve_mode: - os.chmod(dst, S_IMODE(st[ST_MODE])) - - return (dst, 1) - - -# XXX I suspect this is Unix-specific -- need porting help! -def move_file (src, dst, - verbose=1, - dry_run=0): - - """Move a file 'src' to 'dst'. If 'dst' is a directory, the file will - be moved into it with the same name; otherwise, 'src' is just renamed - to 'dst'. Return the new full name of the file. - - Handles cross-device moves on Unix using 'copy_file()'. What about - other systems??? - """ - from os.path import exists, isfile, isdir, basename, dirname - import errno - - if verbose >= 1: - log.info("moving %s -> %s", src, dst) - - if dry_run: - return dst - - if not isfile(src): - raise DistutilsFileError("can't move '%s': not a regular file" % src) - - if isdir(dst): - dst = os.path.join(dst, basename(src)) - elif exists(dst): - raise DistutilsFileError( - "can't move '%s': destination '%s' already exists" % - (src, dst)) - - if not isdir(dirname(dst)): - raise DistutilsFileError( - "can't move '%s': destination '%s' not a valid path" % - (src, dst)) - - copy_it = False - try: - os.rename(src, dst) - except OSError as e: - (num, msg) = e.args - if num == errno.EXDEV: - copy_it = True - else: - raise DistutilsFileError( - "couldn't move '%s' to '%s': %s" % (src, dst, msg)) - - if copy_it: - copy_file(src, dst, verbose=verbose) - try: - os.unlink(src) - except OSError as e: - (num, msg) = e.args - try: - os.unlink(dst) - except OSError: - pass - raise DistutilsFileError( - "couldn't move '%s' to '%s' by copy/delete: " - "delete '%s' failed: %s" - % (src, dst, src, msg)) - return dst - - -def write_file (filename, contents): - """Create a file with the specified name and write 'contents' (a - sequence of strings without line terminators) to it. - """ - f = open(filename, "w") - try: - for line in contents: - f.write(line + "\n") - finally: - f.close() diff --git a/venv/Lib/site-packages/setuptools/_distutils/filelist.py b/venv/Lib/site-packages/setuptools/_distutils/filelist.py deleted file mode 100644 index 82a7738..0000000 --- a/venv/Lib/site-packages/setuptools/_distutils/filelist.py +++ /dev/null @@ -1,355 +0,0 @@ -"""distutils.filelist - -Provides the FileList class, used for poking about the filesystem -and building lists of files. -""" - -import os -import re -import fnmatch -import functools - -from distutils.util import convert_path -from distutils.errors import DistutilsTemplateError, DistutilsInternalError -from distutils import log - - -class FileList: - """A list of files built by on exploring the filesystem and filtered by - applying various patterns to what we find there. - - Instance attributes: - dir - directory from which files will be taken -- only used if - 'allfiles' not supplied to constructor - files - list of filenames currently being built/filtered/manipulated - allfiles - complete list of files under consideration (ie. without any - filtering applied) - """ - - def __init__(self, warn=None, debug_print=None): - # ignore argument to FileList, but keep them for backwards - # compatibility - self.allfiles = None - self.files = [] - - def set_allfiles(self, allfiles): - self.allfiles = allfiles - - def findall(self, dir=os.curdir): - self.allfiles = findall(dir) - - def debug_print(self, msg): - """Print 'msg' to stdout if the global DEBUG (taken from the - DISTUTILS_DEBUG environment variable) flag is true. - """ - from distutils.debug import DEBUG - if DEBUG: - print(msg) - - # Collection methods - - def append(self, item): - self.files.append(item) - - def extend(self, items): - self.files.extend(items) - - def sort(self): - # Not a strict lexical sort! - sortable_files = sorted(map(os.path.split, self.files)) - self.files = [] - for sort_tuple in sortable_files: - self.files.append(os.path.join(*sort_tuple)) - - # Other miscellaneous utility methods - - def remove_duplicates(self): - # Assumes list has been sorted! - for i in range(len(self.files) - 1, 0, -1): - if self.files[i] == self.files[i - 1]: - del self.files[i] - - # "File template" methods - - def _parse_template_line(self, line): - words = line.split() - action = words[0] - - patterns = dir = dir_pattern = None - - if action in ('include', 'exclude', - 'global-include', 'global-exclude'): - if len(words) < 2: - raise DistutilsTemplateError( - "'%s' expects ..." % action) - patterns = [convert_path(w) for w in words[1:]] - elif action in ('recursive-include', 'recursive-exclude'): - if len(words) < 3: - raise DistutilsTemplateError( - "'%s' expects ..." % action) - dir = convert_path(words[1]) - patterns = [convert_path(w) for w in words[2:]] - elif action in ('graft', 'prune'): - if len(words) != 2: - raise DistutilsTemplateError( - "'%s' expects a single " % action) - dir_pattern = convert_path(words[1]) - else: - raise DistutilsTemplateError("unknown action '%s'" % action) - - return (action, patterns, dir, dir_pattern) - - def process_template_line(self, line): - # Parse the line: split it up, make sure the right number of words - # is there, and return the relevant words. 'action' is always - # defined: it's the first word of the line. Which of the other - # three are defined depends on the action; it'll be either - # patterns, (dir and patterns), or (dir_pattern). - (action, patterns, dir, dir_pattern) = self._parse_template_line(line) - - # OK, now we know that the action is valid and we have the - # right number of words on the line for that action -- so we - # can proceed with minimal error-checking. - if action == 'include': - self.debug_print("include " + ' '.join(patterns)) - for pattern in patterns: - if not self.include_pattern(pattern, anchor=1): - log.warn("warning: no files found matching '%s'", - pattern) - - elif action == 'exclude': - self.debug_print("exclude " + ' '.join(patterns)) - for pattern in patterns: - if not self.exclude_pattern(pattern, anchor=1): - log.warn(("warning: no previously-included files " - "found matching '%s'"), pattern) - - elif action == 'global-include': - self.debug_print("global-include " + ' '.join(patterns)) - for pattern in patterns: - if not self.include_pattern(pattern, anchor=0): - log.warn(("warning: no files found matching '%s' " - "anywhere in distribution"), pattern) - - elif action == 'global-exclude': - self.debug_print("global-exclude " + ' '.join(patterns)) - for pattern in patterns: - if not self.exclude_pattern(pattern, anchor=0): - log.warn(("warning: no previously-included files matching " - "'%s' found anywhere in distribution"), - pattern) - - elif action == 'recursive-include': - self.debug_print("recursive-include %s %s" % - (dir, ' '.join(patterns))) - for pattern in patterns: - if not self.include_pattern(pattern, prefix=dir): - msg = ( - "warning: no files found matching '%s' " - "under directory '%s'" - ) - log.warn(msg, pattern, dir) - - elif action == 'recursive-exclude': - self.debug_print("recursive-exclude %s %s" % - (dir, ' '.join(patterns))) - for pattern in patterns: - if not self.exclude_pattern(pattern, prefix=dir): - log.warn(("warning: no previously-included files matching " - "'%s' found under directory '%s'"), - pattern, dir) - - elif action == 'graft': - self.debug_print("graft " + dir_pattern) - if not self.include_pattern(None, prefix=dir_pattern): - log.warn("warning: no directories found matching '%s'", - dir_pattern) - - elif action == 'prune': - self.debug_print("prune " + dir_pattern) - if not self.exclude_pattern(None, prefix=dir_pattern): - log.warn(("no previously-included directories found " - "matching '%s'"), dir_pattern) - else: - raise DistutilsInternalError( - "this cannot happen: invalid action '%s'" % action) - - # Filtering/selection methods - - def include_pattern(self, pattern, anchor=1, prefix=None, is_regex=0): - """Select strings (presumably filenames) from 'self.files' that - match 'pattern', a Unix-style wildcard (glob) pattern. Patterns - are not quite the same as implemented by the 'fnmatch' module: '*' - and '?' match non-special characters, where "special" is platform- - dependent: slash on Unix; colon, slash, and backslash on - DOS/Windows; and colon on Mac OS. - - If 'anchor' is true (the default), then the pattern match is more - stringent: "*.py" will match "foo.py" but not "foo/bar.py". If - 'anchor' is false, both of these will match. - - If 'prefix' is supplied, then only filenames starting with 'prefix' - (itself a pattern) and ending with 'pattern', with anything in between - them, will match. 'anchor' is ignored in this case. - - If 'is_regex' is true, 'anchor' and 'prefix' are ignored, and - 'pattern' is assumed to be either a string containing a regex or a - regex object -- no translation is done, the regex is just compiled - and used as-is. - - Selected strings will be added to self.files. - - Return True if files are found, False otherwise. - """ - # XXX docstring lying about what the special chars are? - files_found = False - pattern_re = translate_pattern(pattern, anchor, prefix, is_regex) - self.debug_print("include_pattern: applying regex r'%s'" % - pattern_re.pattern) - - # delayed loading of allfiles list - if self.allfiles is None: - self.findall() - - for name in self.allfiles: - if pattern_re.search(name): - self.debug_print(" adding " + name) - self.files.append(name) - files_found = True - return files_found - - def exclude_pattern( - self, pattern, anchor=1, prefix=None, is_regex=0): - """Remove strings (presumably filenames) from 'files' that match - 'pattern'. Other parameters are the same as for - 'include_pattern()', above. - The list 'self.files' is modified in place. - Return True if files are found, False otherwise. - """ - files_found = False - pattern_re = translate_pattern(pattern, anchor, prefix, is_regex) - self.debug_print("exclude_pattern: applying regex r'%s'" % - pattern_re.pattern) - for i in range(len(self.files)-1, -1, -1): - if pattern_re.search(self.files[i]): - self.debug_print(" removing " + self.files[i]) - del self.files[i] - files_found = True - return files_found - - -# Utility functions - -def _find_all_simple(path): - """ - Find all files under 'path' - """ - all_unique = _UniqueDirs.filter(os.walk(path, followlinks=True)) - results = ( - os.path.join(base, file) - for base, dirs, files in all_unique - for file in files - ) - return filter(os.path.isfile, results) - - -class _UniqueDirs(set): - """ - Exclude previously-seen dirs from walk results, - avoiding infinite recursion. - Ref https://bugs.python.org/issue44497. - """ - def __call__(self, walk_item): - """ - Given an item from an os.walk result, determine - if the item represents a unique dir for this instance - and if not, prevent further traversal. - """ - base, dirs, files = walk_item - stat = os.stat(base) - candidate = stat.st_dev, stat.st_ino - found = candidate in self - if found: - del dirs[:] - self.add(candidate) - return not found - - @classmethod - def filter(cls, items): - return filter(cls(), items) - - -def findall(dir=os.curdir): - """ - Find all files under 'dir' and return the list of full filenames. - Unless dir is '.', return full filenames with dir prepended. - """ - files = _find_all_simple(dir) - if dir == os.curdir: - make_rel = functools.partial(os.path.relpath, start=dir) - files = map(make_rel, files) - return list(files) - - -def glob_to_re(pattern): - """Translate a shell-like glob pattern to a regular expression; return - a string containing the regex. Differs from 'fnmatch.translate()' in - that '*' does not match "special characters" (which are - platform-specific). - """ - pattern_re = fnmatch.translate(pattern) - - # '?' and '*' in the glob pattern become '.' and '.*' in the RE, which - # IMHO is wrong -- '?' and '*' aren't supposed to match slash in Unix, - # and by extension they shouldn't match such "special characters" under - # any OS. So change all non-escaped dots in the RE to match any - # character except the special characters (currently: just os.sep). - sep = os.sep - if os.sep == '\\': - # we're using a regex to manipulate a regex, so we need - # to escape the backslash twice - sep = r'\\\\' - escaped = r'\1[^%s]' % sep - pattern_re = re.sub(r'((?= self.threshold: - if args: - msg = msg % args - if level in (WARN, ERROR, FATAL): - stream = sys.stderr - else: - stream = sys.stdout - try: - stream.write('%s\n' % msg) - except UnicodeEncodeError: - # emulate backslashreplace error handler - encoding = stream.encoding - msg = msg.encode(encoding, "backslashreplace").decode(encoding) - stream.write('%s\n' % msg) - stream.flush() - - def log(self, level, msg, *args): - self._log(level, msg, args) - - def debug(self, msg, *args): - self._log(DEBUG, msg, args) - - def info(self, msg, *args): - self._log(INFO, msg, args) - - def warn(self, msg, *args): - self._log(WARN, msg, args) - - def error(self, msg, *args): - self._log(ERROR, msg, args) - - def fatal(self, msg, *args): - self._log(FATAL, msg, args) - - -_global_log = Log() -log = _global_log.log -debug = _global_log.debug -info = _global_log.info -warn = _global_log.warn -error = _global_log.error -fatal = _global_log.fatal - - -def set_threshold(level): - # return the old threshold for use from tests - old = _global_log.threshold - _global_log.threshold = level - return old - - -def set_verbosity(v): - if v <= 0: - set_threshold(WARN) - elif v == 1: - set_threshold(INFO) - elif v >= 2: - set_threshold(DEBUG) diff --git a/venv/Lib/site-packages/setuptools/_distutils/msvc9compiler.py b/venv/Lib/site-packages/setuptools/_distutils/msvc9compiler.py deleted file mode 100644 index 6b62738..0000000 --- a/venv/Lib/site-packages/setuptools/_distutils/msvc9compiler.py +++ /dev/null @@ -1,788 +0,0 @@ -"""distutils.msvc9compiler - -Contains MSVCCompiler, an implementation of the abstract CCompiler class -for the Microsoft Visual Studio 2008. - -The module is compatible with VS 2005 and VS 2008. You can find legacy support -for older versions of VS in distutils.msvccompiler. -""" - -# Written by Perry Stoll -# hacked by Robin Becker and Thomas Heller to do a better job of -# finding DevStudio (through the registry) -# ported to VS2005 and VS 2008 by Christian Heimes - -import os -import subprocess -import sys -import re - -from distutils.errors import DistutilsExecError, DistutilsPlatformError, \ - CompileError, LibError, LinkError -from distutils.ccompiler import CCompiler, gen_lib_options -from distutils import log -from distutils.util import get_platform - -import winreg - -RegOpenKeyEx = winreg.OpenKeyEx -RegEnumKey = winreg.EnumKey -RegEnumValue = winreg.EnumValue -RegError = winreg.error - -HKEYS = (winreg.HKEY_USERS, - winreg.HKEY_CURRENT_USER, - winreg.HKEY_LOCAL_MACHINE, - winreg.HKEY_CLASSES_ROOT) - -NATIVE_WIN64 = (sys.platform == 'win32' and sys.maxsize > 2**32) -if NATIVE_WIN64: - # Visual C++ is a 32-bit application, so we need to look in - # the corresponding registry branch, if we're running a - # 64-bit Python on Win64 - VS_BASE = r"Software\Wow6432Node\Microsoft\VisualStudio\%0.1f" - WINSDK_BASE = r"Software\Wow6432Node\Microsoft\Microsoft SDKs\Windows" - NET_BASE = r"Software\Wow6432Node\Microsoft\.NETFramework" -else: - VS_BASE = r"Software\Microsoft\VisualStudio\%0.1f" - WINSDK_BASE = r"Software\Microsoft\Microsoft SDKs\Windows" - NET_BASE = r"Software\Microsoft\.NETFramework" - -# A map keyed by get_platform() return values to values accepted by -# 'vcvarsall.bat'. Note a cross-compile may combine these (eg, 'x86_amd64' is -# the param to cross-compile on x86 targeting amd64.) -PLAT_TO_VCVARS = { - 'win32' : 'x86', - 'win-amd64' : 'amd64', -} - -class Reg: - """Helper class to read values from the registry - """ - - def get_value(cls, path, key): - for base in HKEYS: - d = cls.read_values(base, path) - if d and key in d: - return d[key] - raise KeyError(key) - get_value = classmethod(get_value) - - def read_keys(cls, base, key): - """Return list of registry keys.""" - try: - handle = RegOpenKeyEx(base, key) - except RegError: - return None - L = [] - i = 0 - while True: - try: - k = RegEnumKey(handle, i) - except RegError: - break - L.append(k) - i += 1 - return L - read_keys = classmethod(read_keys) - - def read_values(cls, base, key): - """Return dict of registry keys and values. - - All names are converted to lowercase. - """ - try: - handle = RegOpenKeyEx(base, key) - except RegError: - return None - d = {} - i = 0 - while True: - try: - name, value, type = RegEnumValue(handle, i) - except RegError: - break - name = name.lower() - d[cls.convert_mbcs(name)] = cls.convert_mbcs(value) - i += 1 - return d - read_values = classmethod(read_values) - - def convert_mbcs(s): - dec = getattr(s, "decode", None) - if dec is not None: - try: - s = dec("mbcs") - except UnicodeError: - pass - return s - convert_mbcs = staticmethod(convert_mbcs) - -class MacroExpander: - - def __init__(self, version): - self.macros = {} - self.vsbase = VS_BASE % version - self.load_macros(version) - - def set_macro(self, macro, path, key): - self.macros["$(%s)" % macro] = Reg.get_value(path, key) - - def load_macros(self, version): - self.set_macro("VCInstallDir", self.vsbase + r"\Setup\VC", "productdir") - self.set_macro("VSInstallDir", self.vsbase + r"\Setup\VS", "productdir") - self.set_macro("FrameworkDir", NET_BASE, "installroot") - try: - if version >= 8.0: - self.set_macro("FrameworkSDKDir", NET_BASE, - "sdkinstallrootv2.0") - else: - raise KeyError("sdkinstallrootv2.0") - except KeyError: - raise DistutilsPlatformError( - """Python was built with Visual Studio 2008; -extensions must be built with a compiler than can generate compatible binaries. -Visual Studio 2008 was not found on this system. If you have Cygwin installed, -you can try compiling with MingW32, by passing "-c mingw32" to setup.py.""") - - if version >= 9.0: - self.set_macro("FrameworkVersion", self.vsbase, "clr version") - self.set_macro("WindowsSdkDir", WINSDK_BASE, "currentinstallfolder") - else: - p = r"Software\Microsoft\NET Framework Setup\Product" - for base in HKEYS: - try: - h = RegOpenKeyEx(base, p) - except RegError: - continue - key = RegEnumKey(h, 0) - d = Reg.get_value(base, r"%s\%s" % (p, key)) - self.macros["$(FrameworkVersion)"] = d["version"] - - def sub(self, s): - for k, v in self.macros.items(): - s = s.replace(k, v) - return s - -def get_build_version(): - """Return the version of MSVC that was used to build Python. - - For Python 2.3 and up, the version number is included in - sys.version. For earlier versions, assume the compiler is MSVC 6. - """ - prefix = "MSC v." - i = sys.version.find(prefix) - if i == -1: - return 6 - i = i + len(prefix) - s, rest = sys.version[i:].split(" ", 1) - majorVersion = int(s[:-2]) - 6 - if majorVersion >= 13: - # v13 was skipped and should be v14 - majorVersion += 1 - minorVersion = int(s[2:3]) / 10.0 - # I don't think paths are affected by minor version in version 6 - if majorVersion == 6: - minorVersion = 0 - if majorVersion >= 6: - return majorVersion + minorVersion - # else we don't know what version of the compiler this is - return None - -def normalize_and_reduce_paths(paths): - """Return a list of normalized paths with duplicates removed. - - The current order of paths is maintained. - """ - # Paths are normalized so things like: /a and /a/ aren't both preserved. - reduced_paths = [] - for p in paths: - np = os.path.normpath(p) - # XXX(nnorwitz): O(n**2), if reduced_paths gets long perhaps use a set. - if np not in reduced_paths: - reduced_paths.append(np) - return reduced_paths - -def removeDuplicates(variable): - """Remove duplicate values of an environment variable. - """ - oldList = variable.split(os.pathsep) - newList = [] - for i in oldList: - if i not in newList: - newList.append(i) - newVariable = os.pathsep.join(newList) - return newVariable - -def find_vcvarsall(version): - """Find the vcvarsall.bat file - - At first it tries to find the productdir of VS 2008 in the registry. If - that fails it falls back to the VS90COMNTOOLS env var. - """ - vsbase = VS_BASE % version - try: - productdir = Reg.get_value(r"%s\Setup\VC" % vsbase, - "productdir") - except KeyError: - log.debug("Unable to find productdir in registry") - productdir = None - - if not productdir or not os.path.isdir(productdir): - toolskey = "VS%0.f0COMNTOOLS" % version - toolsdir = os.environ.get(toolskey, None) - - if toolsdir and os.path.isdir(toolsdir): - productdir = os.path.join(toolsdir, os.pardir, os.pardir, "VC") - productdir = os.path.abspath(productdir) - if not os.path.isdir(productdir): - log.debug("%s is not a valid directory" % productdir) - return None - else: - log.debug("Env var %s is not set or invalid" % toolskey) - if not productdir: - log.debug("No productdir found") - return None - vcvarsall = os.path.join(productdir, "vcvarsall.bat") - if os.path.isfile(vcvarsall): - return vcvarsall - log.debug("Unable to find vcvarsall.bat") - return None - -def query_vcvarsall(version, arch="x86"): - """Launch vcvarsall.bat and read the settings from its environment - """ - vcvarsall = find_vcvarsall(version) - interesting = {"include", "lib", "libpath", "path"} - result = {} - - if vcvarsall is None: - raise DistutilsPlatformError("Unable to find vcvarsall.bat") - log.debug("Calling 'vcvarsall.bat %s' (version=%s)", arch, version) - popen = subprocess.Popen('"%s" %s & set' % (vcvarsall, arch), - stdout=subprocess.PIPE, - stderr=subprocess.PIPE) - try: - stdout, stderr = popen.communicate() - if popen.wait() != 0: - raise DistutilsPlatformError(stderr.decode("mbcs")) - - stdout = stdout.decode("mbcs") - for line in stdout.split("\n"): - line = Reg.convert_mbcs(line) - if '=' not in line: - continue - line = line.strip() - key, value = line.split('=', 1) - key = key.lower() - if key in interesting: - if value.endswith(os.pathsep): - value = value[:-1] - result[key] = removeDuplicates(value) - - finally: - popen.stdout.close() - popen.stderr.close() - - if len(result) != len(interesting): - raise ValueError(str(list(result.keys()))) - - return result - -# More globals -VERSION = get_build_version() -# MACROS = MacroExpander(VERSION) - -class MSVCCompiler(CCompiler) : - """Concrete class that implements an interface to Microsoft Visual C++, - as defined by the CCompiler abstract class.""" - - compiler_type = 'msvc' - - # Just set this so CCompiler's constructor doesn't barf. We currently - # don't use the 'set_executables()' bureaucracy provided by CCompiler, - # as it really isn't necessary for this sort of single-compiler class. - # Would be nice to have a consistent interface with UnixCCompiler, - # though, so it's worth thinking about. - executables = {} - - # Private class data (need to distinguish C from C++ source for compiler) - _c_extensions = ['.c'] - _cpp_extensions = ['.cc', '.cpp', '.cxx'] - _rc_extensions = ['.rc'] - _mc_extensions = ['.mc'] - - # Needed for the filename generation methods provided by the - # base class, CCompiler. - src_extensions = (_c_extensions + _cpp_extensions + - _rc_extensions + _mc_extensions) - res_extension = '.res' - obj_extension = '.obj' - static_lib_extension = '.lib' - shared_lib_extension = '.dll' - static_lib_format = shared_lib_format = '%s%s' - exe_extension = '.exe' - - def __init__(self, verbose=0, dry_run=0, force=0): - super().__init__(verbose, dry_run, force) - self.__version = VERSION - self.__root = r"Software\Microsoft\VisualStudio" - # self.__macros = MACROS - self.__paths = [] - # target platform (.plat_name is consistent with 'bdist') - self.plat_name = None - self.__arch = None # deprecated name - self.initialized = False - - def initialize(self, plat_name=None): - # multi-init means we would need to check platform same each time... - assert not self.initialized, "don't init multiple times" - if self.__version < 8.0: - raise DistutilsPlatformError("VC %0.1f is not supported by this module" % self.__version) - if plat_name is None: - plat_name = get_platform() - # sanity check for platforms to prevent obscure errors later. - ok_plats = 'win32', 'win-amd64' - if plat_name not in ok_plats: - raise DistutilsPlatformError("--plat-name must be one of %s" % - (ok_plats,)) - - if "DISTUTILS_USE_SDK" in os.environ and "MSSdk" in os.environ and self.find_exe("cl.exe"): - # Assume that the SDK set up everything alright; don't try to be - # smarter - self.cc = "cl.exe" - self.linker = "link.exe" - self.lib = "lib.exe" - self.rc = "rc.exe" - self.mc = "mc.exe" - else: - # On x86, 'vcvars32.bat amd64' creates an env that doesn't work; - # to cross compile, you use 'x86_amd64'. - # On AMD64, 'vcvars32.bat amd64' is a native build env; to cross - # compile use 'x86' (ie, it runs the x86 compiler directly) - if plat_name == get_platform() or plat_name == 'win32': - # native build or cross-compile to win32 - plat_spec = PLAT_TO_VCVARS[plat_name] - else: - # cross compile from win32 -> some 64bit - plat_spec = PLAT_TO_VCVARS[get_platform()] + '_' + \ - PLAT_TO_VCVARS[plat_name] - - vc_env = query_vcvarsall(VERSION, plat_spec) - - self.__paths = vc_env['path'].split(os.pathsep) - os.environ['lib'] = vc_env['lib'] - os.environ['include'] = vc_env['include'] - - if len(self.__paths) == 0: - raise DistutilsPlatformError("Python was built with %s, " - "and extensions need to be built with the same " - "version of the compiler, but it isn't installed." - % self.__product) - - self.cc = self.find_exe("cl.exe") - self.linker = self.find_exe("link.exe") - self.lib = self.find_exe("lib.exe") - self.rc = self.find_exe("rc.exe") # resource compiler - self.mc = self.find_exe("mc.exe") # message compiler - #self.set_path_env_var('lib') - #self.set_path_env_var('include') - - # extend the MSVC path with the current path - try: - for p in os.environ['path'].split(';'): - self.__paths.append(p) - except KeyError: - pass - self.__paths = normalize_and_reduce_paths(self.__paths) - os.environ['path'] = ";".join(self.__paths) - - self.preprocess_options = None - if self.__arch == "x86": - self.compile_options = [ '/nologo', '/O2', '/MD', '/W3', - '/DNDEBUG'] - self.compile_options_debug = ['/nologo', '/Od', '/MDd', '/W3', - '/Z7', '/D_DEBUG'] - else: - # Win64 - self.compile_options = [ '/nologo', '/O2', '/MD', '/W3', '/GS-' , - '/DNDEBUG'] - self.compile_options_debug = ['/nologo', '/Od', '/MDd', '/W3', '/GS-', - '/Z7', '/D_DEBUG'] - - self.ldflags_shared = ['/DLL', '/nologo', '/INCREMENTAL:NO'] - if self.__version >= 7: - self.ldflags_shared_debug = [ - '/DLL', '/nologo', '/INCREMENTAL:no', '/DEBUG' - ] - self.ldflags_static = [ '/nologo'] - - self.initialized = True - - # -- Worker methods ------------------------------------------------ - - def object_filenames(self, - source_filenames, - strip_dir=0, - output_dir=''): - # Copied from ccompiler.py, extended to return .res as 'object'-file - # for .rc input file - if output_dir is None: output_dir = '' - obj_names = [] - for src_name in source_filenames: - (base, ext) = os.path.splitext (src_name) - base = os.path.splitdrive(base)[1] # Chop off the drive - base = base[os.path.isabs(base):] # If abs, chop off leading / - if ext not in self.src_extensions: - # Better to raise an exception instead of silently continuing - # and later complain about sources and targets having - # different lengths - raise CompileError ("Don't know how to compile %s" % src_name) - if strip_dir: - base = os.path.basename (base) - if ext in self._rc_extensions: - obj_names.append (os.path.join (output_dir, - base + self.res_extension)) - elif ext in self._mc_extensions: - obj_names.append (os.path.join (output_dir, - base + self.res_extension)) - else: - obj_names.append (os.path.join (output_dir, - base + self.obj_extension)) - return obj_names - - - def compile(self, sources, - output_dir=None, macros=None, include_dirs=None, debug=0, - extra_preargs=None, extra_postargs=None, depends=None): - - if not self.initialized: - self.initialize() - compile_info = self._setup_compile(output_dir, macros, include_dirs, - sources, depends, extra_postargs) - macros, objects, extra_postargs, pp_opts, build = compile_info - - compile_opts = extra_preargs or [] - compile_opts.append ('/c') - if debug: - compile_opts.extend(self.compile_options_debug) - else: - compile_opts.extend(self.compile_options) - - for obj in objects: - try: - src, ext = build[obj] - except KeyError: - continue - if debug: - # pass the full pathname to MSVC in debug mode, - # this allows the debugger to find the source file - # without asking the user to browse for it - src = os.path.abspath(src) - - if ext in self._c_extensions: - input_opt = "/Tc" + src - elif ext in self._cpp_extensions: - input_opt = "/Tp" + src - elif ext in self._rc_extensions: - # compile .RC to .RES file - input_opt = src - output_opt = "/fo" + obj - try: - self.spawn([self.rc] + pp_opts + - [output_opt] + [input_opt]) - except DistutilsExecError as msg: - raise CompileError(msg) - continue - elif ext in self._mc_extensions: - # Compile .MC to .RC file to .RES file. - # * '-h dir' specifies the directory for the - # generated include file - # * '-r dir' specifies the target directory of the - # generated RC file and the binary message resource - # it includes - # - # For now (since there are no options to change this), - # we use the source-directory for the include file and - # the build directory for the RC file and message - # resources. This works at least for win32all. - h_dir = os.path.dirname(src) - rc_dir = os.path.dirname(obj) - try: - # first compile .MC to .RC and .H file - self.spawn([self.mc] + - ['-h', h_dir, '-r', rc_dir] + [src]) - base, _ = os.path.splitext (os.path.basename (src)) - rc_file = os.path.join (rc_dir, base + '.rc') - # then compile .RC to .RES file - self.spawn([self.rc] + - ["/fo" + obj] + [rc_file]) - - except DistutilsExecError as msg: - raise CompileError(msg) - continue - else: - # how to handle this file? - raise CompileError("Don't know how to compile %s to %s" - % (src, obj)) - - output_opt = "/Fo" + obj - try: - self.spawn([self.cc] + compile_opts + pp_opts + - [input_opt, output_opt] + - extra_postargs) - except DistutilsExecError as msg: - raise CompileError(msg) - - return objects - - - def create_static_lib(self, - objects, - output_libname, - output_dir=None, - debug=0, - target_lang=None): - - if not self.initialized: - self.initialize() - (objects, output_dir) = self._fix_object_args(objects, output_dir) - output_filename = self.library_filename(output_libname, - output_dir=output_dir) - - if self._need_link(objects, output_filename): - lib_args = objects + ['/OUT:' + output_filename] - if debug: - pass # XXX what goes here? - try: - self.spawn([self.lib] + lib_args) - except DistutilsExecError as msg: - raise LibError(msg) - else: - log.debug("skipping %s (up-to-date)", output_filename) - - - def link(self, - target_desc, - objects, - output_filename, - output_dir=None, - libraries=None, - library_dirs=None, - runtime_library_dirs=None, - export_symbols=None, - debug=0, - extra_preargs=None, - extra_postargs=None, - build_temp=None, - target_lang=None): - - if not self.initialized: - self.initialize() - (objects, output_dir) = self._fix_object_args(objects, output_dir) - fixed_args = self._fix_lib_args(libraries, library_dirs, - runtime_library_dirs) - (libraries, library_dirs, runtime_library_dirs) = fixed_args - - if runtime_library_dirs: - self.warn ("I don't know what to do with 'runtime_library_dirs': " - + str (runtime_library_dirs)) - - lib_opts = gen_lib_options(self, - library_dirs, runtime_library_dirs, - libraries) - if output_dir is not None: - output_filename = os.path.join(output_dir, output_filename) - - if self._need_link(objects, output_filename): - if target_desc == CCompiler.EXECUTABLE: - if debug: - ldflags = self.ldflags_shared_debug[1:] - else: - ldflags = self.ldflags_shared[1:] - else: - if debug: - ldflags = self.ldflags_shared_debug - else: - ldflags = self.ldflags_shared - - export_opts = [] - for sym in (export_symbols or []): - export_opts.append("/EXPORT:" + sym) - - ld_args = (ldflags + lib_opts + export_opts + - objects + ['/OUT:' + output_filename]) - - # The MSVC linker generates .lib and .exp files, which cannot be - # suppressed by any linker switches. The .lib files may even be - # needed! Make sure they are generated in the temporary build - # directory. Since they have different names for debug and release - # builds, they can go into the same directory. - build_temp = os.path.dirname(objects[0]) - if export_symbols is not None: - (dll_name, dll_ext) = os.path.splitext( - os.path.basename(output_filename)) - implib_file = os.path.join( - build_temp, - self.library_filename(dll_name)) - ld_args.append ('/IMPLIB:' + implib_file) - - self.manifest_setup_ldargs(output_filename, build_temp, ld_args) - - if extra_preargs: - ld_args[:0] = extra_preargs - if extra_postargs: - ld_args.extend(extra_postargs) - - self.mkpath(os.path.dirname(output_filename)) - try: - self.spawn([self.linker] + ld_args) - except DistutilsExecError as msg: - raise LinkError(msg) - - # embed the manifest - # XXX - this is somewhat fragile - if mt.exe fails, distutils - # will still consider the DLL up-to-date, but it will not have a - # manifest. Maybe we should link to a temp file? OTOH, that - # implies a build environment error that shouldn't go undetected. - mfinfo = self.manifest_get_embed_info(target_desc, ld_args) - if mfinfo is not None: - mffilename, mfid = mfinfo - out_arg = '-outputresource:%s;%s' % (output_filename, mfid) - try: - self.spawn(['mt.exe', '-nologo', '-manifest', - mffilename, out_arg]) - except DistutilsExecError as msg: - raise LinkError(msg) - else: - log.debug("skipping %s (up-to-date)", output_filename) - - def manifest_setup_ldargs(self, output_filename, build_temp, ld_args): - # If we need a manifest at all, an embedded manifest is recommended. - # See MSDN article titled - # "How to: Embed a Manifest Inside a C/C++ Application" - # (currently at http://msdn2.microsoft.com/en-us/library/ms235591(VS.80).aspx) - # Ask the linker to generate the manifest in the temp dir, so - # we can check it, and possibly embed it, later. - temp_manifest = os.path.join( - build_temp, - os.path.basename(output_filename) + ".manifest") - ld_args.append('/MANIFESTFILE:' + temp_manifest) - - def manifest_get_embed_info(self, target_desc, ld_args): - # If a manifest should be embedded, return a tuple of - # (manifest_filename, resource_id). Returns None if no manifest - # should be embedded. See http://bugs.python.org/issue7833 for why - # we want to avoid any manifest for extension modules if we can) - for arg in ld_args: - if arg.startswith("/MANIFESTFILE:"): - temp_manifest = arg.split(":", 1)[1] - break - else: - # no /MANIFESTFILE so nothing to do. - return None - if target_desc == CCompiler.EXECUTABLE: - # by default, executables always get the manifest with the - # CRT referenced. - mfid = 1 - else: - # Extension modules try and avoid any manifest if possible. - mfid = 2 - temp_manifest = self._remove_visual_c_ref(temp_manifest) - if temp_manifest is None: - return None - return temp_manifest, mfid - - def _remove_visual_c_ref(self, manifest_file): - try: - # Remove references to the Visual C runtime, so they will - # fall through to the Visual C dependency of Python.exe. - # This way, when installed for a restricted user (e.g. - # runtimes are not in WinSxS folder, but in Python's own - # folder), the runtimes do not need to be in every folder - # with .pyd's. - # Returns either the filename of the modified manifest or - # None if no manifest should be embedded. - manifest_f = open(manifest_file) - try: - manifest_buf = manifest_f.read() - finally: - manifest_f.close() - pattern = re.compile( - r"""|)""", - re.DOTALL) - manifest_buf = re.sub(pattern, "", manifest_buf) - pattern = r"\s*" - manifest_buf = re.sub(pattern, "", manifest_buf) - # Now see if any other assemblies are referenced - if not, we - # don't want a manifest embedded. - pattern = re.compile( - r"""|)""", re.DOTALL) - if re.search(pattern, manifest_buf) is None: - return None - - manifest_f = open(manifest_file, 'w') - try: - manifest_f.write(manifest_buf) - return manifest_file - finally: - manifest_f.close() - except OSError: - pass - - # -- Miscellaneous methods ----------------------------------------- - # These are all used by the 'gen_lib_options() function, in - # ccompiler.py. - - def library_dir_option(self, dir): - return "/LIBPATH:" + dir - - def runtime_library_dir_option(self, dir): - raise DistutilsPlatformError( - "don't know how to set runtime library search path for MSVC++") - - def library_option(self, lib): - return self.library_filename(lib) - - - def find_library_file(self, dirs, lib, debug=0): - # Prefer a debugging library if found (and requested), but deal - # with it if we don't have one. - if debug: - try_names = [lib + "_d", lib] - else: - try_names = [lib] - for dir in dirs: - for name in try_names: - libfile = os.path.join(dir, self.library_filename (name)) - if os.path.exists(libfile): - return libfile - else: - # Oops, didn't find it in *any* of 'dirs' - return None - - # Helper methods for using the MSVC registry settings - - def find_exe(self, exe): - """Return path to an MSVC executable program. - - Tries to find the program in several places: first, one of the - MSVC program search paths from the registry; next, the directories - in the PATH environment variable. If any of those work, return an - absolute path that is known to exist. If none of them work, just - return the original program name, 'exe'. - """ - for p in self.__paths: - fn = os.path.join(os.path.abspath(p), exe) - if os.path.isfile(fn): - return fn - - # didn't find it; try existing path - for p in os.environ['Path'].split(';'): - fn = os.path.join(os.path.abspath(p),exe) - if os.path.isfile(fn): - return fn - - return exe diff --git a/venv/Lib/site-packages/setuptools/_distutils/msvccompiler.py b/venv/Lib/site-packages/setuptools/_distutils/msvccompiler.py deleted file mode 100644 index e1367b8..0000000 --- a/venv/Lib/site-packages/setuptools/_distutils/msvccompiler.py +++ /dev/null @@ -1,643 +0,0 @@ -"""distutils.msvccompiler - -Contains MSVCCompiler, an implementation of the abstract CCompiler class -for the Microsoft Visual Studio. -""" - -# Written by Perry Stoll -# hacked by Robin Becker and Thomas Heller to do a better job of -# finding DevStudio (through the registry) - -import sys, os -from distutils.errors import \ - DistutilsExecError, DistutilsPlatformError, \ - CompileError, LibError, LinkError -from distutils.ccompiler import \ - CCompiler, gen_lib_options -from distutils import log - -_can_read_reg = False -try: - import winreg - - _can_read_reg = True - hkey_mod = winreg - - RegOpenKeyEx = winreg.OpenKeyEx - RegEnumKey = winreg.EnumKey - RegEnumValue = winreg.EnumValue - RegError = winreg.error - -except ImportError: - try: - import win32api - import win32con - _can_read_reg = True - hkey_mod = win32con - - RegOpenKeyEx = win32api.RegOpenKeyEx - RegEnumKey = win32api.RegEnumKey - RegEnumValue = win32api.RegEnumValue - RegError = win32api.error - except ImportError: - log.info("Warning: Can't read registry to find the " - "necessary compiler setting\n" - "Make sure that Python modules winreg, " - "win32api or win32con are installed.") - pass - -if _can_read_reg: - HKEYS = (hkey_mod.HKEY_USERS, - hkey_mod.HKEY_CURRENT_USER, - hkey_mod.HKEY_LOCAL_MACHINE, - hkey_mod.HKEY_CLASSES_ROOT) - -def read_keys(base, key): - """Return list of registry keys.""" - try: - handle = RegOpenKeyEx(base, key) - except RegError: - return None - L = [] - i = 0 - while True: - try: - k = RegEnumKey(handle, i) - except RegError: - break - L.append(k) - i += 1 - return L - -def read_values(base, key): - """Return dict of registry keys and values. - - All names are converted to lowercase. - """ - try: - handle = RegOpenKeyEx(base, key) - except RegError: - return None - d = {} - i = 0 - while True: - try: - name, value, type = RegEnumValue(handle, i) - except RegError: - break - name = name.lower() - d[convert_mbcs(name)] = convert_mbcs(value) - i += 1 - return d - -def convert_mbcs(s): - dec = getattr(s, "decode", None) - if dec is not None: - try: - s = dec("mbcs") - except UnicodeError: - pass - return s - -class MacroExpander: - def __init__(self, version): - self.macros = {} - self.load_macros(version) - - def set_macro(self, macro, path, key): - for base in HKEYS: - d = read_values(base, path) - if d: - self.macros["$(%s)" % macro] = d[key] - break - - def load_macros(self, version): - vsbase = r"Software\Microsoft\VisualStudio\%0.1f" % version - self.set_macro("VCInstallDir", vsbase + r"\Setup\VC", "productdir") - self.set_macro("VSInstallDir", vsbase + r"\Setup\VS", "productdir") - net = r"Software\Microsoft\.NETFramework" - self.set_macro("FrameworkDir", net, "installroot") - try: - if version > 7.0: - self.set_macro("FrameworkSDKDir", net, "sdkinstallrootv1.1") - else: - self.set_macro("FrameworkSDKDir", net, "sdkinstallroot") - except KeyError as exc: # - raise DistutilsPlatformError( - """Python was built with Visual Studio 2003; -extensions must be built with a compiler than can generate compatible binaries. -Visual Studio 2003 was not found on this system. If you have Cygwin installed, -you can try compiling with MingW32, by passing "-c mingw32" to setup.py.""") - - p = r"Software\Microsoft\NET Framework Setup\Product" - for base in HKEYS: - try: - h = RegOpenKeyEx(base, p) - except RegError: - continue - key = RegEnumKey(h, 0) - d = read_values(base, r"%s\%s" % (p, key)) - self.macros["$(FrameworkVersion)"] = d["version"] - - def sub(self, s): - for k, v in self.macros.items(): - s = s.replace(k, v) - return s - -def get_build_version(): - """Return the version of MSVC that was used to build Python. - - For Python 2.3 and up, the version number is included in - sys.version. For earlier versions, assume the compiler is MSVC 6. - """ - prefix = "MSC v." - i = sys.version.find(prefix) - if i == -1: - return 6 - i = i + len(prefix) - s, rest = sys.version[i:].split(" ", 1) - majorVersion = int(s[:-2]) - 6 - if majorVersion >= 13: - # v13 was skipped and should be v14 - majorVersion += 1 - minorVersion = int(s[2:3]) / 10.0 - # I don't think paths are affected by minor version in version 6 - if majorVersion == 6: - minorVersion = 0 - if majorVersion >= 6: - return majorVersion + minorVersion - # else we don't know what version of the compiler this is - return None - -def get_build_architecture(): - """Return the processor architecture. - - Possible results are "Intel" or "AMD64". - """ - - prefix = " bit (" - i = sys.version.find(prefix) - if i == -1: - return "Intel" - j = sys.version.find(")", i) - return sys.version[i+len(prefix):j] - -def normalize_and_reduce_paths(paths): - """Return a list of normalized paths with duplicates removed. - - The current order of paths is maintained. - """ - # Paths are normalized so things like: /a and /a/ aren't both preserved. - reduced_paths = [] - for p in paths: - np = os.path.normpath(p) - # XXX(nnorwitz): O(n**2), if reduced_paths gets long perhaps use a set. - if np not in reduced_paths: - reduced_paths.append(np) - return reduced_paths - - -class MSVCCompiler(CCompiler) : - """Concrete class that implements an interface to Microsoft Visual C++, - as defined by the CCompiler abstract class.""" - - compiler_type = 'msvc' - - # Just set this so CCompiler's constructor doesn't barf. We currently - # don't use the 'set_executables()' bureaucracy provided by CCompiler, - # as it really isn't necessary for this sort of single-compiler class. - # Would be nice to have a consistent interface with UnixCCompiler, - # though, so it's worth thinking about. - executables = {} - - # Private class data (need to distinguish C from C++ source for compiler) - _c_extensions = ['.c'] - _cpp_extensions = ['.cc', '.cpp', '.cxx'] - _rc_extensions = ['.rc'] - _mc_extensions = ['.mc'] - - # Needed for the filename generation methods provided by the - # base class, CCompiler. - src_extensions = (_c_extensions + _cpp_extensions + - _rc_extensions + _mc_extensions) - res_extension = '.res' - obj_extension = '.obj' - static_lib_extension = '.lib' - shared_lib_extension = '.dll' - static_lib_format = shared_lib_format = '%s%s' - exe_extension = '.exe' - - def __init__(self, verbose=0, dry_run=0, force=0): - super().__init__(verbose, dry_run, force) - self.__version = get_build_version() - self.__arch = get_build_architecture() - if self.__arch == "Intel": - # x86 - if self.__version >= 7: - self.__root = r"Software\Microsoft\VisualStudio" - self.__macros = MacroExpander(self.__version) - else: - self.__root = r"Software\Microsoft\Devstudio" - self.__product = "Visual Studio version %s" % self.__version - else: - # Win64. Assume this was built with the platform SDK - self.__product = "Microsoft SDK compiler %s" % (self.__version + 6) - - self.initialized = False - - def initialize(self): - self.__paths = [] - if "DISTUTILS_USE_SDK" in os.environ and "MSSdk" in os.environ and self.find_exe("cl.exe"): - # Assume that the SDK set up everything alright; don't try to be - # smarter - self.cc = "cl.exe" - self.linker = "link.exe" - self.lib = "lib.exe" - self.rc = "rc.exe" - self.mc = "mc.exe" - else: - self.__paths = self.get_msvc_paths("path") - - if len(self.__paths) == 0: - raise DistutilsPlatformError("Python was built with %s, " - "and extensions need to be built with the same " - "version of the compiler, but it isn't installed." - % self.__product) - - self.cc = self.find_exe("cl.exe") - self.linker = self.find_exe("link.exe") - self.lib = self.find_exe("lib.exe") - self.rc = self.find_exe("rc.exe") # resource compiler - self.mc = self.find_exe("mc.exe") # message compiler - self.set_path_env_var('lib') - self.set_path_env_var('include') - - # extend the MSVC path with the current path - try: - for p in os.environ['path'].split(';'): - self.__paths.append(p) - except KeyError: - pass - self.__paths = normalize_and_reduce_paths(self.__paths) - os.environ['path'] = ";".join(self.__paths) - - self.preprocess_options = None - if self.__arch == "Intel": - self.compile_options = [ '/nologo', '/O2', '/MD', '/W3', '/GX' , - '/DNDEBUG'] - self.compile_options_debug = ['/nologo', '/Od', '/MDd', '/W3', '/GX', - '/Z7', '/D_DEBUG'] - else: - # Win64 - self.compile_options = [ '/nologo', '/O2', '/MD', '/W3', '/GS-' , - '/DNDEBUG'] - self.compile_options_debug = ['/nologo', '/Od', '/MDd', '/W3', '/GS-', - '/Z7', '/D_DEBUG'] - - self.ldflags_shared = ['/DLL', '/nologo', '/INCREMENTAL:NO'] - if self.__version >= 7: - self.ldflags_shared_debug = [ - '/DLL', '/nologo', '/INCREMENTAL:no', '/DEBUG' - ] - else: - self.ldflags_shared_debug = [ - '/DLL', '/nologo', '/INCREMENTAL:no', '/pdb:None', '/DEBUG' - ] - self.ldflags_static = [ '/nologo'] - - self.initialized = True - - # -- Worker methods ------------------------------------------------ - - def object_filenames(self, - source_filenames, - strip_dir=0, - output_dir=''): - # Copied from ccompiler.py, extended to return .res as 'object'-file - # for .rc input file - if output_dir is None: output_dir = '' - obj_names = [] - for src_name in source_filenames: - (base, ext) = os.path.splitext (src_name) - base = os.path.splitdrive(base)[1] # Chop off the drive - base = base[os.path.isabs(base):] # If abs, chop off leading / - if ext not in self.src_extensions: - # Better to raise an exception instead of silently continuing - # and later complain about sources and targets having - # different lengths - raise CompileError ("Don't know how to compile %s" % src_name) - if strip_dir: - base = os.path.basename (base) - if ext in self._rc_extensions: - obj_names.append (os.path.join (output_dir, - base + self.res_extension)) - elif ext in self._mc_extensions: - obj_names.append (os.path.join (output_dir, - base + self.res_extension)) - else: - obj_names.append (os.path.join (output_dir, - base + self.obj_extension)) - return obj_names - - - def compile(self, sources, - output_dir=None, macros=None, include_dirs=None, debug=0, - extra_preargs=None, extra_postargs=None, depends=None): - - if not self.initialized: - self.initialize() - compile_info = self._setup_compile(output_dir, macros, include_dirs, - sources, depends, extra_postargs) - macros, objects, extra_postargs, pp_opts, build = compile_info - - compile_opts = extra_preargs or [] - compile_opts.append ('/c') - if debug: - compile_opts.extend(self.compile_options_debug) - else: - compile_opts.extend(self.compile_options) - - for obj in objects: - try: - src, ext = build[obj] - except KeyError: - continue - if debug: - # pass the full pathname to MSVC in debug mode, - # this allows the debugger to find the source file - # without asking the user to browse for it - src = os.path.abspath(src) - - if ext in self._c_extensions: - input_opt = "/Tc" + src - elif ext in self._cpp_extensions: - input_opt = "/Tp" + src - elif ext in self._rc_extensions: - # compile .RC to .RES file - input_opt = src - output_opt = "/fo" + obj - try: - self.spawn([self.rc] + pp_opts + - [output_opt] + [input_opt]) - except DistutilsExecError as msg: - raise CompileError(msg) - continue - elif ext in self._mc_extensions: - # Compile .MC to .RC file to .RES file. - # * '-h dir' specifies the directory for the - # generated include file - # * '-r dir' specifies the target directory of the - # generated RC file and the binary message resource - # it includes - # - # For now (since there are no options to change this), - # we use the source-directory for the include file and - # the build directory for the RC file and message - # resources. This works at least for win32all. - h_dir = os.path.dirname(src) - rc_dir = os.path.dirname(obj) - try: - # first compile .MC to .RC and .H file - self.spawn([self.mc] + - ['-h', h_dir, '-r', rc_dir] + [src]) - base, _ = os.path.splitext (os.path.basename (src)) - rc_file = os.path.join (rc_dir, base + '.rc') - # then compile .RC to .RES file - self.spawn([self.rc] + - ["/fo" + obj] + [rc_file]) - - except DistutilsExecError as msg: - raise CompileError(msg) - continue - else: - # how to handle this file? - raise CompileError("Don't know how to compile %s to %s" - % (src, obj)) - - output_opt = "/Fo" + obj - try: - self.spawn([self.cc] + compile_opts + pp_opts + - [input_opt, output_opt] + - extra_postargs) - except DistutilsExecError as msg: - raise CompileError(msg) - - return objects - - - def create_static_lib(self, - objects, - output_libname, - output_dir=None, - debug=0, - target_lang=None): - - if not self.initialized: - self.initialize() - (objects, output_dir) = self._fix_object_args(objects, output_dir) - output_filename = self.library_filename(output_libname, - output_dir=output_dir) - - if self._need_link(objects, output_filename): - lib_args = objects + ['/OUT:' + output_filename] - if debug: - pass # XXX what goes here? - try: - self.spawn([self.lib] + lib_args) - except DistutilsExecError as msg: - raise LibError(msg) - else: - log.debug("skipping %s (up-to-date)", output_filename) - - - def link(self, - target_desc, - objects, - output_filename, - output_dir=None, - libraries=None, - library_dirs=None, - runtime_library_dirs=None, - export_symbols=None, - debug=0, - extra_preargs=None, - extra_postargs=None, - build_temp=None, - target_lang=None): - - if not self.initialized: - self.initialize() - (objects, output_dir) = self._fix_object_args(objects, output_dir) - fixed_args = self._fix_lib_args(libraries, library_dirs, - runtime_library_dirs) - (libraries, library_dirs, runtime_library_dirs) = fixed_args - - if runtime_library_dirs: - self.warn ("I don't know what to do with 'runtime_library_dirs': " - + str (runtime_library_dirs)) - - lib_opts = gen_lib_options(self, - library_dirs, runtime_library_dirs, - libraries) - if output_dir is not None: - output_filename = os.path.join(output_dir, output_filename) - - if self._need_link(objects, output_filename): - if target_desc == CCompiler.EXECUTABLE: - if debug: - ldflags = self.ldflags_shared_debug[1:] - else: - ldflags = self.ldflags_shared[1:] - else: - if debug: - ldflags = self.ldflags_shared_debug - else: - ldflags = self.ldflags_shared - - export_opts = [] - for sym in (export_symbols or []): - export_opts.append("/EXPORT:" + sym) - - ld_args = (ldflags + lib_opts + export_opts + - objects + ['/OUT:' + output_filename]) - - # The MSVC linker generates .lib and .exp files, which cannot be - # suppressed by any linker switches. The .lib files may even be - # needed! Make sure they are generated in the temporary build - # directory. Since they have different names for debug and release - # builds, they can go into the same directory. - if export_symbols is not None: - (dll_name, dll_ext) = os.path.splitext( - os.path.basename(output_filename)) - implib_file = os.path.join( - os.path.dirname(objects[0]), - self.library_filename(dll_name)) - ld_args.append ('/IMPLIB:' + implib_file) - - if extra_preargs: - ld_args[:0] = extra_preargs - if extra_postargs: - ld_args.extend(extra_postargs) - - self.mkpath(os.path.dirname(output_filename)) - try: - self.spawn([self.linker] + ld_args) - except DistutilsExecError as msg: - raise LinkError(msg) - - else: - log.debug("skipping %s (up-to-date)", output_filename) - - - # -- Miscellaneous methods ----------------------------------------- - # These are all used by the 'gen_lib_options() function, in - # ccompiler.py. - - def library_dir_option(self, dir): - return "/LIBPATH:" + dir - - def runtime_library_dir_option(self, dir): - raise DistutilsPlatformError( - "don't know how to set runtime library search path for MSVC++") - - def library_option(self, lib): - return self.library_filename(lib) - - - def find_library_file(self, dirs, lib, debug=0): - # Prefer a debugging library if found (and requested), but deal - # with it if we don't have one. - if debug: - try_names = [lib + "_d", lib] - else: - try_names = [lib] - for dir in dirs: - for name in try_names: - libfile = os.path.join(dir, self.library_filename (name)) - if os.path.exists(libfile): - return libfile - else: - # Oops, didn't find it in *any* of 'dirs' - return None - - # Helper methods for using the MSVC registry settings - - def find_exe(self, exe): - """Return path to an MSVC executable program. - - Tries to find the program in several places: first, one of the - MSVC program search paths from the registry; next, the directories - in the PATH environment variable. If any of those work, return an - absolute path that is known to exist. If none of them work, just - return the original program name, 'exe'. - """ - for p in self.__paths: - fn = os.path.join(os.path.abspath(p), exe) - if os.path.isfile(fn): - return fn - - # didn't find it; try existing path - for p in os.environ['Path'].split(';'): - fn = os.path.join(os.path.abspath(p),exe) - if os.path.isfile(fn): - return fn - - return exe - - def get_msvc_paths(self, path, platform='x86'): - """Get a list of devstudio directories (include, lib or path). - - Return a list of strings. The list will be empty if unable to - access the registry or appropriate registry keys not found. - """ - if not _can_read_reg: - return [] - - path = path + " dirs" - if self.__version >= 7: - key = (r"%s\%0.1f\VC\VC_OBJECTS_PLATFORM_INFO\Win32\Directories" - % (self.__root, self.__version)) - else: - key = (r"%s\6.0\Build System\Components\Platforms" - r"\Win32 (%s)\Directories" % (self.__root, platform)) - - for base in HKEYS: - d = read_values(base, key) - if d: - if self.__version >= 7: - return self.__macros.sub(d[path]).split(";") - else: - return d[path].split(";") - # MSVC 6 seems to create the registry entries we need only when - # the GUI is run. - if self.__version == 6: - for base in HKEYS: - if read_values(base, r"%s\6.0" % self.__root) is not None: - self.warn("It seems you have Visual Studio 6 installed, " - "but the expected registry settings are not present.\n" - "You must at least run the Visual Studio GUI once " - "so that these entries are created.") - break - return [] - - def set_path_env_var(self, name): - """Set environment variable 'name' to an MSVC path type value. - - This is equivalent to a SET command prior to execution of spawned - commands. - """ - - if name == "lib": - p = self.get_msvc_paths("library") - else: - p = self.get_msvc_paths(name) - if p: - os.environ[name] = ';'.join(p) - - -if get_build_version() >= 8.0: - log.debug("Importing new compiler from distutils.msvc9compiler") - OldMSVCCompiler = MSVCCompiler - from distutils.msvc9compiler import MSVCCompiler - # get_build_architecture not really relevant now we support cross-compile - from distutils.msvc9compiler import MacroExpander diff --git a/venv/Lib/site-packages/setuptools/_distutils/py35compat.py b/venv/Lib/site-packages/setuptools/_distutils/py35compat.py deleted file mode 100644 index 79b2e7f..0000000 --- a/venv/Lib/site-packages/setuptools/_distutils/py35compat.py +++ /dev/null @@ -1,19 +0,0 @@ -import sys -import subprocess - - -def __optim_args_from_interpreter_flags(): - """Return a list of command-line arguments reproducing the current - optimization settings in sys.flags.""" - args = [] - value = sys.flags.optimize - if value > 0: - args.append("-" + "O" * value) - return args - - -_optim_args_from_interpreter_flags = getattr( - subprocess, - "_optim_args_from_interpreter_flags", - __optim_args_from_interpreter_flags, -) diff --git a/venv/Lib/site-packages/setuptools/_distutils/py38compat.py b/venv/Lib/site-packages/setuptools/_distutils/py38compat.py deleted file mode 100644 index 7dbe8ce..0000000 --- a/venv/Lib/site-packages/setuptools/_distutils/py38compat.py +++ /dev/null @@ -1,7 +0,0 @@ -def aix_platform(osname, version, release): - try: - import _aix_support - return _aix_support.aix_platform() - except ImportError: - pass - return "%s-%s.%s" % (osname, version, release) diff --git a/venv/Lib/site-packages/setuptools/_distutils/spawn.py b/venv/Lib/site-packages/setuptools/_distutils/spawn.py deleted file mode 100644 index b2d10e3..0000000 --- a/venv/Lib/site-packages/setuptools/_distutils/spawn.py +++ /dev/null @@ -1,106 +0,0 @@ -"""distutils.spawn - -Provides the 'spawn()' function, a front-end to various platform- -specific functions for launching another program in a sub-process. -Also provides the 'find_executable()' to search the path for a given -executable name. -""" - -import sys -import os -import subprocess - -from distutils.errors import DistutilsExecError -from distutils.debug import DEBUG -from distutils import log - - -def spawn(cmd, search_path=1, verbose=0, dry_run=0, env=None): - """Run another program, specified as a command list 'cmd', in a new process. - - 'cmd' is just the argument list for the new process, ie. - cmd[0] is the program to run and cmd[1:] are the rest of its arguments. - There is no way to run a program with a name different from that of its - executable. - - If 'search_path' is true (the default), the system's executable - search path will be used to find the program; otherwise, cmd[0] - must be the exact path to the executable. If 'dry_run' is true, - the command will not actually be run. - - Raise DistutilsExecError if running the program fails in any way; just - return on success. - """ - # cmd is documented as a list, but just in case some code passes a tuple - # in, protect our %-formatting code against horrible death - cmd = list(cmd) - - log.info(subprocess.list2cmdline(cmd)) - if dry_run: - return - - if search_path: - executable = find_executable(cmd[0]) - if executable is not None: - cmd[0] = executable - - env = env if env is not None else dict(os.environ) - - if sys.platform == 'darwin': - from distutils.util import MACOSX_VERSION_VAR, get_macosx_target_ver - macosx_target_ver = get_macosx_target_ver() - if macosx_target_ver: - env[MACOSX_VERSION_VAR] = macosx_target_ver - - try: - proc = subprocess.Popen(cmd, env=env) - proc.wait() - exitcode = proc.returncode - except OSError as exc: - if not DEBUG: - cmd = cmd[0] - raise DistutilsExecError( - "command %r failed: %s" % (cmd, exc.args[-1])) from exc - - if exitcode: - if not DEBUG: - cmd = cmd[0] - raise DistutilsExecError( - "command %r failed with exit code %s" % (cmd, exitcode)) - - -def find_executable(executable, path=None): - """Tries to find 'executable' in the directories listed in 'path'. - - A string listing directories separated by 'os.pathsep'; defaults to - os.environ['PATH']. Returns the complete filename or None if not found. - """ - _, ext = os.path.splitext(executable) - if (sys.platform == 'win32') and (ext != '.exe'): - executable = executable + '.exe' - - if os.path.isfile(executable): - return executable - - if path is None: - path = os.environ.get('PATH', None) - if path is None: - try: - path = os.confstr("CS_PATH") - except (AttributeError, ValueError): - # os.confstr() or CS_PATH is not available - path = os.defpath - # bpo-35755: Don't use os.defpath if the PATH environment variable is - # set to an empty string - - # PATH='' doesn't match, whereas PATH=':' looks in the current directory - if not path: - return None - - paths = path.split(os.pathsep) - for p in paths: - f = os.path.join(p, executable) - if os.path.isfile(f): - # the file exists, we have a shot at spawn working - return f - return None diff --git a/venv/Lib/site-packages/setuptools/_distutils/sysconfig.py b/venv/Lib/site-packages/setuptools/_distutils/sysconfig.py deleted file mode 100644 index 4a77a43..0000000 --- a/venv/Lib/site-packages/setuptools/_distutils/sysconfig.py +++ /dev/null @@ -1,567 +0,0 @@ -"""Provide access to Python's configuration information. The specific -configuration variables available depend heavily on the platform and -configuration. The values may be retrieved using -get_config_var(name), and the list of variables is available via -get_config_vars().keys(). Additional convenience functions are also -available. - -Written by: Fred L. Drake, Jr. -Email: -""" - -import _imp -import os -import re -import sys -import sysconfig - -from .errors import DistutilsPlatformError - -IS_PYPY = '__pypy__' in sys.builtin_module_names - -# These are needed in a couple of spots, so just compute them once. -PREFIX = os.path.normpath(sys.prefix) -EXEC_PREFIX = os.path.normpath(sys.exec_prefix) -BASE_PREFIX = os.path.normpath(sys.base_prefix) -BASE_EXEC_PREFIX = os.path.normpath(sys.base_exec_prefix) - -# Path to the base directory of the project. On Windows the binary may -# live in project/PCbuild/win32 or project/PCbuild/amd64. -# set for cross builds -if "_PYTHON_PROJECT_BASE" in os.environ: - project_base = os.path.abspath(os.environ["_PYTHON_PROJECT_BASE"]) -else: - if sys.executable: - project_base = os.path.dirname(os.path.abspath(sys.executable)) - else: - # sys.executable can be empty if argv[0] has been changed and Python is - # unable to retrieve the real program name - project_base = os.getcwd() - - -# python_build: (Boolean) if true, we're either building Python or -# building an extension with an un-installed Python, so we use -# different (hard-wired) directories. -def _is_python_source_dir(d): - for fn in ("Setup", "Setup.local"): - if os.path.isfile(os.path.join(d, "Modules", fn)): - return True - return False - -_sys_home = getattr(sys, '_home', None) - -if os.name == 'nt': - def _fix_pcbuild(d): - if d and os.path.normcase(d).startswith( - os.path.normcase(os.path.join(PREFIX, "PCbuild"))): - return PREFIX - return d - project_base = _fix_pcbuild(project_base) - _sys_home = _fix_pcbuild(_sys_home) - -def _python_build(): - if _sys_home: - return _is_python_source_dir(_sys_home) - return _is_python_source_dir(project_base) - -python_build = _python_build() - - -# Calculate the build qualifier flags if they are defined. Adding the flags -# to the include and lib directories only makes sense for an installation, not -# an in-source build. -build_flags = '' -try: - if not python_build: - build_flags = sys.abiflags -except AttributeError: - # It's not a configure-based build, so the sys module doesn't have - # this attribute, which is fine. - pass - -def get_python_version(): - """Return a string containing the major and minor Python version, - leaving off the patchlevel. Sample return values could be '1.5' - or '2.2'. - """ - return '%d.%d' % sys.version_info[:2] - - -def get_python_inc(plat_specific=0, prefix=None): - """Return the directory containing installed Python header files. - - If 'plat_specific' is false (the default), this is the path to the - non-platform-specific header files, i.e. Python.h and so on; - otherwise, this is the path to platform-specific header files - (namely pyconfig.h). - - If 'prefix' is supplied, use it instead of sys.base_prefix or - sys.base_exec_prefix -- i.e., ignore 'plat_specific'. - """ - if prefix is None: - prefix = plat_specific and BASE_EXEC_PREFIX or BASE_PREFIX - if os.name == "posix": - if IS_PYPY and sys.version_info < (3, 8): - return os.path.join(prefix, 'include') - if python_build: - # Assume the executable is in the build directory. The - # pyconfig.h file should be in the same directory. Since - # the build directory may not be the source directory, we - # must use "srcdir" from the makefile to find the "Include" - # directory. - if plat_specific: - return _sys_home or project_base - else: - incdir = os.path.join(get_config_var('srcdir'), 'Include') - return os.path.normpath(incdir) - implementation = 'pypy' if IS_PYPY else 'python' - python_dir = implementation + get_python_version() + build_flags - return os.path.join(prefix, "include", python_dir) - elif os.name == "nt": - if python_build: - # Include both the include and PC dir to ensure we can find - # pyconfig.h - return (os.path.join(prefix, "include") + os.path.pathsep + - os.path.join(prefix, "PC")) - return os.path.join(prefix, "include") - else: - raise DistutilsPlatformError( - "I don't know where Python installs its C header files " - "on platform '%s'" % os.name) - - -# allow this behavior to be monkey-patched. Ref pypa/distutils#2. -def _posix_lib(standard_lib, libpython, early_prefix, prefix): - if standard_lib: - return libpython - else: - return os.path.join(libpython, "site-packages") - - -def get_python_lib(plat_specific=0, standard_lib=0, prefix=None): - """Return the directory containing the Python library (standard or - site additions). - - If 'plat_specific' is true, return the directory containing - platform-specific modules, i.e. any module from a non-pure-Python - module distribution; otherwise, return the platform-shared library - directory. If 'standard_lib' is true, return the directory - containing standard Python library modules; otherwise, return the - directory for site-specific modules. - - If 'prefix' is supplied, use it instead of sys.base_prefix or - sys.base_exec_prefix -- i.e., ignore 'plat_specific'. - """ - - if IS_PYPY and sys.version_info < (3, 8): - # PyPy-specific schema - if prefix is None: - prefix = PREFIX - if standard_lib: - return os.path.join(prefix, "lib-python", sys.version[0]) - return os.path.join(prefix, 'site-packages') - - early_prefix = prefix - - if prefix is None: - if standard_lib: - prefix = plat_specific and BASE_EXEC_PREFIX or BASE_PREFIX - else: - prefix = plat_specific and EXEC_PREFIX or PREFIX - - if os.name == "posix": - if plat_specific or standard_lib: - # Platform-specific modules (any module from a non-pure-Python - # module distribution) or standard Python library modules. - libdir = getattr(sys, "platlibdir", "lib") - else: - # Pure Python - libdir = "lib" - implementation = 'pypy' if IS_PYPY else 'python' - libpython = os.path.join(prefix, libdir, - implementation + get_python_version()) - return _posix_lib(standard_lib, libpython, early_prefix, prefix) - elif os.name == "nt": - if standard_lib: - return os.path.join(prefix, "Lib") - else: - return os.path.join(prefix, "Lib", "site-packages") - else: - raise DistutilsPlatformError( - "I don't know where Python installs its library " - "on platform '%s'" % os.name) - - - -def customize_compiler(compiler): - """Do any platform-specific customization of a CCompiler instance. - - Mainly needed on Unix, so we can plug in the information that - varies across Unices and is stored in Python's Makefile. - """ - if compiler.compiler_type == "unix": - if sys.platform == "darwin": - # Perform first-time customization of compiler-related - # config vars on OS X now that we know we need a compiler. - # This is primarily to support Pythons from binary - # installers. The kind and paths to build tools on - # the user system may vary significantly from the system - # that Python itself was built on. Also the user OS - # version and build tools may not support the same set - # of CPU architectures for universal builds. - global _config_vars - # Use get_config_var() to ensure _config_vars is initialized. - if not get_config_var('CUSTOMIZED_OSX_COMPILER'): - import _osx_support - _osx_support.customize_compiler(_config_vars) - _config_vars['CUSTOMIZED_OSX_COMPILER'] = 'True' - - (cc, cxx, cflags, ccshared, ldshared, shlib_suffix, ar, ar_flags) = \ - get_config_vars('CC', 'CXX', 'CFLAGS', - 'CCSHARED', 'LDSHARED', 'SHLIB_SUFFIX', 'AR', 'ARFLAGS') - - if 'CC' in os.environ: - newcc = os.environ['CC'] - if('LDSHARED' not in os.environ - and ldshared.startswith(cc)): - # If CC is overridden, use that as the default - # command for LDSHARED as well - ldshared = newcc + ldshared[len(cc):] - cc = newcc - if 'CXX' in os.environ: - cxx = os.environ['CXX'] - if 'LDSHARED' in os.environ: - ldshared = os.environ['LDSHARED'] - if 'CPP' in os.environ: - cpp = os.environ['CPP'] - else: - cpp = cc + " -E" # not always - if 'LDFLAGS' in os.environ: - ldshared = ldshared + ' ' + os.environ['LDFLAGS'] - if 'CFLAGS' in os.environ: - cflags = cflags + ' ' + os.environ['CFLAGS'] - ldshared = ldshared + ' ' + os.environ['CFLAGS'] - if 'CPPFLAGS' in os.environ: - cpp = cpp + ' ' + os.environ['CPPFLAGS'] - cflags = cflags + ' ' + os.environ['CPPFLAGS'] - ldshared = ldshared + ' ' + os.environ['CPPFLAGS'] - if 'AR' in os.environ: - ar = os.environ['AR'] - if 'ARFLAGS' in os.environ: - archiver = ar + ' ' + os.environ['ARFLAGS'] - else: - archiver = ar + ' ' + ar_flags - - cc_cmd = cc + ' ' + cflags - compiler.set_executables( - preprocessor=cpp, - compiler=cc_cmd, - compiler_so=cc_cmd + ' ' + ccshared, - compiler_cxx=cxx, - linker_so=ldshared, - linker_exe=cc, - archiver=archiver) - - if 'RANLIB' in os.environ and compiler.executables.get('ranlib', None): - compiler.set_executables(ranlib=os.environ['RANLIB']) - - compiler.shared_lib_extension = shlib_suffix - - -def get_config_h_filename(): - """Return full pathname of installed pyconfig.h file.""" - if python_build: - if os.name == "nt": - inc_dir = os.path.join(_sys_home or project_base, "PC") - else: - inc_dir = _sys_home or project_base - return os.path.join(inc_dir, 'pyconfig.h') - else: - return sysconfig.get_config_h_filename() - - - -def get_makefile_filename(): - """Return full pathname of installed Makefile from the Python build.""" - return sysconfig.get_makefile_filename() - - -def parse_config_h(fp, g=None): - """Parse a config.h-style file. - - A dictionary containing name/value pairs is returned. If an - optional dictionary is passed in as the second argument, it is - used instead of a new dictionary. - """ - return sysconfig.parse_config_h(fp, vars=g) - - -# Regexes needed for parsing Makefile (and similar syntaxes, -# like old-style Setup files). -_variable_rx = re.compile(r"([a-zA-Z][a-zA-Z0-9_]+)\s*=\s*(.*)") -_findvar1_rx = re.compile(r"\$\(([A-Za-z][A-Za-z0-9_]*)\)") -_findvar2_rx = re.compile(r"\${([A-Za-z][A-Za-z0-9_]*)}") - -def parse_makefile(fn, g=None): - """Parse a Makefile-style file. - - A dictionary containing name/value pairs is returned. If an - optional dictionary is passed in as the second argument, it is - used instead of a new dictionary. - """ - from distutils.text_file import TextFile - fp = TextFile(fn, strip_comments=1, skip_blanks=1, join_lines=1, errors="surrogateescape") - - if g is None: - g = {} - done = {} - notdone = {} - - while True: - line = fp.readline() - if line is None: # eof - break - m = _variable_rx.match(line) - if m: - n, v = m.group(1, 2) - v = v.strip() - # `$$' is a literal `$' in make - tmpv = v.replace('$$', '') - - if "$" in tmpv: - notdone[n] = v - else: - try: - v = int(v) - except ValueError: - # insert literal `$' - done[n] = v.replace('$$', '$') - else: - done[n] = v - - # Variables with a 'PY_' prefix in the makefile. These need to - # be made available without that prefix through sysconfig. - # Special care is needed to ensure that variable expansion works, even - # if the expansion uses the name without a prefix. - renamed_variables = ('CFLAGS', 'LDFLAGS', 'CPPFLAGS') - - # do variable interpolation here - while notdone: - for name in list(notdone): - value = notdone[name] - m = _findvar1_rx.search(value) or _findvar2_rx.search(value) - if m: - n = m.group(1) - found = True - if n in done: - item = str(done[n]) - elif n in notdone: - # get it on a subsequent round - found = False - elif n in os.environ: - # do it like make: fall back to environment - item = os.environ[n] - - elif n in renamed_variables: - if name.startswith('PY_') and name[3:] in renamed_variables: - item = "" - - elif 'PY_' + n in notdone: - found = False - - else: - item = str(done['PY_' + n]) - else: - done[n] = item = "" - if found: - after = value[m.end():] - value = value[:m.start()] + item + after - if "$" in after: - notdone[name] = value - else: - try: value = int(value) - except ValueError: - done[name] = value.strip() - else: - done[name] = value - del notdone[name] - - if name.startswith('PY_') \ - and name[3:] in renamed_variables: - - name = name[3:] - if name not in done: - done[name] = value - else: - # bogus variable reference; just drop it since we can't deal - del notdone[name] - - fp.close() - - # strip spurious spaces - for k, v in done.items(): - if isinstance(v, str): - done[k] = v.strip() - - # save the results in the global dictionary - g.update(done) - return g - - -def expand_makefile_vars(s, vars): - """Expand Makefile-style variables -- "${foo}" or "$(foo)" -- in - 'string' according to 'vars' (a dictionary mapping variable names to - values). Variables not present in 'vars' are silently expanded to the - empty string. The variable values in 'vars' should not contain further - variable expansions; if 'vars' is the output of 'parse_makefile()', - you're fine. Returns a variable-expanded version of 's'. - """ - - # This algorithm does multiple expansion, so if vars['foo'] contains - # "${bar}", it will expand ${foo} to ${bar}, and then expand - # ${bar}... and so forth. This is fine as long as 'vars' comes from - # 'parse_makefile()', which takes care of such expansions eagerly, - # according to make's variable expansion semantics. - - while True: - m = _findvar1_rx.search(s) or _findvar2_rx.search(s) - if m: - (beg, end) = m.span() - s = s[0:beg] + vars.get(m.group(1)) + s[end:] - else: - break - return s - - -_config_vars = None - - -_sysconfig_name_tmpl = '_sysconfigdata_{abi}_{platform}_{multiarch}' - - -def _init_posix(): - """Initialize the module as appropriate for POSIX systems.""" - # _sysconfigdata is generated at build time, see the sysconfig module - name = os.environ.get( - '_PYTHON_SYSCONFIGDATA_NAME', - _sysconfig_name_tmpl.format( - abi=sys.abiflags, - platform=sys.platform, - multiarch=getattr(sys.implementation, '_multiarch', ''), - ), - ) - try: - _temp = __import__(name, globals(), locals(), ['build_time_vars'], 0) - except ImportError: - # Python 3.5 and pypy 7.3.1 - _temp = __import__( - '_sysconfigdata', globals(), locals(), ['build_time_vars'], 0) - build_time_vars = _temp.build_time_vars - global _config_vars - _config_vars = {} - _config_vars.update(build_time_vars) - - -def _init_nt(): - """Initialize the module as appropriate for NT""" - g = {} - # set basic install directories - g['LIBDEST'] = get_python_lib(plat_specific=0, standard_lib=1) - g['BINLIBDEST'] = get_python_lib(plat_specific=1, standard_lib=1) - - # XXX hmmm.. a normal install puts include files here - g['INCLUDEPY'] = get_python_inc(plat_specific=0) - - g['EXT_SUFFIX'] = _imp.extension_suffixes()[0] - g['EXE'] = ".exe" - g['VERSION'] = get_python_version().replace(".", "") - g['BINDIR'] = os.path.dirname(os.path.abspath(sys.executable)) - - global _config_vars - _config_vars = g - - -def get_config_vars(*args): - """With no arguments, return a dictionary of all configuration - variables relevant for the current platform. Generally this includes - everything needed to build extensions and install both pure modules and - extensions. On Unix, this means every variable defined in Python's - installed Makefile; on Windows it's a much smaller set. - - With arguments, return a list of values that result from looking up - each argument in the configuration variable dictionary. - """ - global _config_vars - if _config_vars is None: - func = globals().get("_init_" + os.name) - if func: - func() - else: - _config_vars = {} - - # Normalized versions of prefix and exec_prefix are handy to have; - # in fact, these are the standard versions used most places in the - # Distutils. - _config_vars['prefix'] = PREFIX - _config_vars['exec_prefix'] = EXEC_PREFIX - - if not IS_PYPY: - # For backward compatibility, see issue19555 - SO = _config_vars.get('EXT_SUFFIX') - if SO is not None: - _config_vars['SO'] = SO - - # Always convert srcdir to an absolute path - srcdir = _config_vars.get('srcdir', project_base) - if os.name == 'posix': - if python_build: - # If srcdir is a relative path (typically '.' or '..') - # then it should be interpreted relative to the directory - # containing Makefile. - base = os.path.dirname(get_makefile_filename()) - srcdir = os.path.join(base, srcdir) - else: - # srcdir is not meaningful since the installation is - # spread about the filesystem. We choose the - # directory containing the Makefile since we know it - # exists. - srcdir = os.path.dirname(get_makefile_filename()) - _config_vars['srcdir'] = os.path.abspath(os.path.normpath(srcdir)) - - # Convert srcdir into an absolute path if it appears necessary. - # Normally it is relative to the build directory. However, during - # testing, for example, we might be running a non-installed python - # from a different directory. - if python_build and os.name == "posix": - base = project_base - if (not os.path.isabs(_config_vars['srcdir']) and - base != os.getcwd()): - # srcdir is relative and we are not in the same directory - # as the executable. Assume executable is in the build - # directory and make srcdir absolute. - srcdir = os.path.join(base, _config_vars['srcdir']) - _config_vars['srcdir'] = os.path.normpath(srcdir) - - # OS X platforms require special customization to handle - # multi-architecture, multi-os-version installers - if sys.platform == 'darwin': - import _osx_support - _osx_support.customize_config_vars(_config_vars) - - if args: - vals = [] - for name in args: - vals.append(_config_vars.get(name)) - return vals - else: - return _config_vars - -def get_config_var(name): - """Return the value of a single variable using the dictionary - returned by 'get_config_vars()'. Equivalent to - get_config_vars().get(name) - """ - if name == 'SO': - import warnings - warnings.warn('SO is deprecated, use EXT_SUFFIX', DeprecationWarning, 2) - return get_config_vars().get(name) diff --git a/venv/Lib/site-packages/setuptools/_distutils/text_file.py b/venv/Lib/site-packages/setuptools/_distutils/text_file.py deleted file mode 100644 index 93abad3..0000000 --- a/venv/Lib/site-packages/setuptools/_distutils/text_file.py +++ /dev/null @@ -1,286 +0,0 @@ -"""text_file - -provides the TextFile class, which gives an interface to text files -that (optionally) takes care of stripping comments, ignoring blank -lines, and joining lines with backslashes.""" - -import sys, io - - -class TextFile: - """Provides a file-like object that takes care of all the things you - commonly want to do when processing a text file that has some - line-by-line syntax: strip comments (as long as "#" is your - comment character), skip blank lines, join adjacent lines by - escaping the newline (ie. backslash at end of line), strip - leading and/or trailing whitespace. All of these are optional - and independently controllable. - - Provides a 'warn()' method so you can generate warning messages that - report physical line number, even if the logical line in question - spans multiple physical lines. Also provides 'unreadline()' for - implementing line-at-a-time lookahead. - - Constructor is called as: - - TextFile (filename=None, file=None, **options) - - It bombs (RuntimeError) if both 'filename' and 'file' are None; - 'filename' should be a string, and 'file' a file object (or - something that provides 'readline()' and 'close()' methods). It is - recommended that you supply at least 'filename', so that TextFile - can include it in warning messages. If 'file' is not supplied, - TextFile creates its own using 'io.open()'. - - The options are all boolean, and affect the value returned by - 'readline()': - strip_comments [default: true] - strip from "#" to end-of-line, as well as any whitespace - leading up to the "#" -- unless it is escaped by a backslash - lstrip_ws [default: false] - strip leading whitespace from each line before returning it - rstrip_ws [default: true] - strip trailing whitespace (including line terminator!) from - each line before returning it - skip_blanks [default: true} - skip lines that are empty *after* stripping comments and - whitespace. (If both lstrip_ws and rstrip_ws are false, - then some lines may consist of solely whitespace: these will - *not* be skipped, even if 'skip_blanks' is true.) - join_lines [default: false] - if a backslash is the last non-newline character on a line - after stripping comments and whitespace, join the following line - to it to form one "logical line"; if N consecutive lines end - with a backslash, then N+1 physical lines will be joined to - form one logical line. - collapse_join [default: false] - strip leading whitespace from lines that are joined to their - predecessor; only matters if (join_lines and not lstrip_ws) - errors [default: 'strict'] - error handler used to decode the file content - - Note that since 'rstrip_ws' can strip the trailing newline, the - semantics of 'readline()' must differ from those of the builtin file - object's 'readline()' method! In particular, 'readline()' returns - None for end-of-file: an empty string might just be a blank line (or - an all-whitespace line), if 'rstrip_ws' is true but 'skip_blanks' is - not.""" - - default_options = { 'strip_comments': 1, - 'skip_blanks': 1, - 'lstrip_ws': 0, - 'rstrip_ws': 1, - 'join_lines': 0, - 'collapse_join': 0, - 'errors': 'strict', - } - - def __init__(self, filename=None, file=None, **options): - """Construct a new TextFile object. At least one of 'filename' - (a string) and 'file' (a file-like object) must be supplied. - They keyword argument options are described above and affect - the values returned by 'readline()'.""" - if filename is None and file is None: - raise RuntimeError("you must supply either or both of 'filename' and 'file'") - - # set values for all options -- either from client option hash - # or fallback to default_options - for opt in self.default_options.keys(): - if opt in options: - setattr(self, opt, options[opt]) - else: - setattr(self, opt, self.default_options[opt]) - - # sanity check client option hash - for opt in options.keys(): - if opt not in self.default_options: - raise KeyError("invalid TextFile option '%s'" % opt) - - if file is None: - self.open(filename) - else: - self.filename = filename - self.file = file - self.current_line = 0 # assuming that file is at BOF! - - # 'linebuf' is a stack of lines that will be emptied before we - # actually read from the file; it's only populated by an - # 'unreadline()' operation - self.linebuf = [] - - def open(self, filename): - """Open a new file named 'filename'. This overrides both the - 'filename' and 'file' arguments to the constructor.""" - self.filename = filename - self.file = io.open(self.filename, 'r', errors=self.errors) - self.current_line = 0 - - def close(self): - """Close the current file and forget everything we know about it - (filename, current line number).""" - file = self.file - self.file = None - self.filename = None - self.current_line = None - file.close() - - def gen_error(self, msg, line=None): - outmsg = [] - if line is None: - line = self.current_line - outmsg.append(self.filename + ", ") - if isinstance(line, (list, tuple)): - outmsg.append("lines %d-%d: " % tuple(line)) - else: - outmsg.append("line %d: " % line) - outmsg.append(str(msg)) - return "".join(outmsg) - - def error(self, msg, line=None): - raise ValueError("error: " + self.gen_error(msg, line)) - - def warn(self, msg, line=None): - """Print (to stderr) a warning message tied to the current logical - line in the current file. If the current logical line in the - file spans multiple physical lines, the warning refers to the - whole range, eg. "lines 3-5". If 'line' supplied, it overrides - the current line number; it may be a list or tuple to indicate a - range of physical lines, or an integer for a single physical - line.""" - sys.stderr.write("warning: " + self.gen_error(msg, line) + "\n") - - def readline(self): - """Read and return a single logical line from the current file (or - from an internal buffer if lines have previously been "unread" - with 'unreadline()'). If the 'join_lines' option is true, this - may involve reading multiple physical lines concatenated into a - single string. Updates the current line number, so calling - 'warn()' after 'readline()' emits a warning about the physical - line(s) just read. Returns None on end-of-file, since the empty - string can occur if 'rstrip_ws' is true but 'strip_blanks' is - not.""" - # If any "unread" lines waiting in 'linebuf', return the top - # one. (We don't actually buffer read-ahead data -- lines only - # get put in 'linebuf' if the client explicitly does an - # 'unreadline()'. - if self.linebuf: - line = self.linebuf[-1] - del self.linebuf[-1] - return line - - buildup_line = '' - - while True: - # read the line, make it None if EOF - line = self.file.readline() - if line == '': - line = None - - if self.strip_comments and line: - - # Look for the first "#" in the line. If none, never - # mind. If we find one and it's the first character, or - # is not preceded by "\", then it starts a comment -- - # strip the comment, strip whitespace before it, and - # carry on. Otherwise, it's just an escaped "#", so - # unescape it (and any other escaped "#"'s that might be - # lurking in there) and otherwise leave the line alone. - - pos = line.find("#") - if pos == -1: # no "#" -- no comments - pass - - # It's definitely a comment -- either "#" is the first - # character, or it's elsewhere and unescaped. - elif pos == 0 or line[pos-1] != "\\": - # Have to preserve the trailing newline, because it's - # the job of a later step (rstrip_ws) to remove it -- - # and if rstrip_ws is false, we'd better preserve it! - # (NB. this means that if the final line is all comment - # and has no trailing newline, we will think that it's - # EOF; I think that's OK.) - eol = (line[-1] == '\n') and '\n' or '' - line = line[0:pos] + eol - - # If all that's left is whitespace, then skip line - # *now*, before we try to join it to 'buildup_line' -- - # that way constructs like - # hello \\ - # # comment that should be ignored - # there - # result in "hello there". - if line.strip() == "": - continue - else: # it's an escaped "#" - line = line.replace("\\#", "#") - - # did previous line end with a backslash? then accumulate - if self.join_lines and buildup_line: - # oops: end of file - if line is None: - self.warn("continuation line immediately precedes " - "end-of-file") - return buildup_line - - if self.collapse_join: - line = line.lstrip() - line = buildup_line + line - - # careful: pay attention to line number when incrementing it - if isinstance(self.current_line, list): - self.current_line[1] = self.current_line[1] + 1 - else: - self.current_line = [self.current_line, - self.current_line + 1] - # just an ordinary line, read it as usual - else: - if line is None: # eof - return None - - # still have to be careful about incrementing the line number! - if isinstance(self.current_line, list): - self.current_line = self.current_line[1] + 1 - else: - self.current_line = self.current_line + 1 - - # strip whitespace however the client wants (leading and - # trailing, or one or the other, or neither) - if self.lstrip_ws and self.rstrip_ws: - line = line.strip() - elif self.lstrip_ws: - line = line.lstrip() - elif self.rstrip_ws: - line = line.rstrip() - - # blank line (whether we rstrip'ed or not)? skip to next line - # if appropriate - if (line == '' or line == '\n') and self.skip_blanks: - continue - - if self.join_lines: - if line[-1] == '\\': - buildup_line = line[:-1] - continue - - if line[-2:] == '\\\n': - buildup_line = line[0:-2] + '\n' - continue - - # well, I guess there's some actual content there: return it - return line - - def readlines(self): - """Read and return the list of all logical lines remaining in the - current file.""" - lines = [] - while True: - line = self.readline() - if line is None: - return lines - lines.append(line) - - def unreadline(self, line): - """Push 'line' (a string) onto an internal buffer that will be - checked by future 'readline()' calls. Handy for implementing - a parser with line-at-a-time lookahead.""" - self.linebuf.append(line) diff --git a/venv/Lib/site-packages/setuptools/_distutils/unixccompiler.py b/venv/Lib/site-packages/setuptools/_distutils/unixccompiler.py deleted file mode 100644 index a07e598..0000000 --- a/venv/Lib/site-packages/setuptools/_distutils/unixccompiler.py +++ /dev/null @@ -1,325 +0,0 @@ -"""distutils.unixccompiler - -Contains the UnixCCompiler class, a subclass of CCompiler that handles -the "typical" Unix-style command-line C compiler: - * macros defined with -Dname[=value] - * macros undefined with -Uname - * include search directories specified with -Idir - * libraries specified with -lllib - * library search directories specified with -Ldir - * compile handled by 'cc' (or similar) executable with -c option: - compiles .c to .o - * link static library handled by 'ar' command (possibly with 'ranlib') - * link shared library handled by 'cc -shared' -""" - -import os, sys, re, shlex - -from distutils import sysconfig -from distutils.dep_util import newer -from distutils.ccompiler import \ - CCompiler, gen_preprocess_options, gen_lib_options -from distutils.errors import \ - DistutilsExecError, CompileError, LibError, LinkError -from distutils import log - -if sys.platform == 'darwin': - import _osx_support - -# XXX Things not currently handled: -# * optimization/debug/warning flags; we just use whatever's in Python's -# Makefile and live with it. Is this adequate? If not, we might -# have to have a bunch of subclasses GNUCCompiler, SGICCompiler, -# SunCCompiler, and I suspect down that road lies madness. -# * even if we don't know a warning flag from an optimization flag, -# we need some way for outsiders to feed preprocessor/compiler/linker -# flags in to us -- eg. a sysadmin might want to mandate certain flags -# via a site config file, or a user might want to set something for -# compiling this module distribution only via the setup.py command -# line, whatever. As long as these options come from something on the -# current system, they can be as system-dependent as they like, and we -# should just happily stuff them into the preprocessor/compiler/linker -# options and carry on. - - -class UnixCCompiler(CCompiler): - - compiler_type = 'unix' - - # These are used by CCompiler in two places: the constructor sets - # instance attributes 'preprocessor', 'compiler', etc. from them, and - # 'set_executable()' allows any of these to be set. The defaults here - # are pretty generic; they will probably have to be set by an outsider - # (eg. using information discovered by the sysconfig about building - # Python extensions). - executables = {'preprocessor' : None, - 'compiler' : ["cc"], - 'compiler_so' : ["cc"], - 'compiler_cxx' : ["cc"], - 'linker_so' : ["cc", "-shared"], - 'linker_exe' : ["cc"], - 'archiver' : ["ar", "-cr"], - 'ranlib' : None, - } - - if sys.platform[:6] == "darwin": - executables['ranlib'] = ["ranlib"] - - # Needed for the filename generation methods provided by the base - # class, CCompiler. NB. whoever instantiates/uses a particular - # UnixCCompiler instance should set 'shared_lib_ext' -- we set a - # reasonable common default here, but it's not necessarily used on all - # Unices! - - src_extensions = [".c",".C",".cc",".cxx",".cpp",".m"] - obj_extension = ".o" - static_lib_extension = ".a" - shared_lib_extension = ".so" - dylib_lib_extension = ".dylib" - xcode_stub_lib_extension = ".tbd" - static_lib_format = shared_lib_format = dylib_lib_format = "lib%s%s" - xcode_stub_lib_format = dylib_lib_format - if sys.platform == "cygwin": - exe_extension = ".exe" - - def preprocess(self, source, output_file=None, macros=None, - include_dirs=None, extra_preargs=None, extra_postargs=None): - fixed_args = self._fix_compile_args(None, macros, include_dirs) - ignore, macros, include_dirs = fixed_args - pp_opts = gen_preprocess_options(macros, include_dirs) - pp_args = self.preprocessor + pp_opts - if output_file: - pp_args.extend(['-o', output_file]) - if extra_preargs: - pp_args[:0] = extra_preargs - if extra_postargs: - pp_args.extend(extra_postargs) - pp_args.append(source) - - # We need to preprocess: either we're being forced to, or we're - # generating output to stdout, or there's a target output file and - # the source file is newer than the target (or the target doesn't - # exist). - if self.force or output_file is None or newer(source, output_file): - if output_file: - self.mkpath(os.path.dirname(output_file)) - try: - self.spawn(pp_args) - except DistutilsExecError as msg: - raise CompileError(msg) - - def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts): - compiler_so = self.compiler_so - if sys.platform == 'darwin': - compiler_so = _osx_support.compiler_fixup(compiler_so, - cc_args + extra_postargs) - try: - self.spawn(compiler_so + cc_args + [src, '-o', obj] + - extra_postargs) - except DistutilsExecError as msg: - raise CompileError(msg) - - def create_static_lib(self, objects, output_libname, - output_dir=None, debug=0, target_lang=None): - objects, output_dir = self._fix_object_args(objects, output_dir) - - output_filename = \ - self.library_filename(output_libname, output_dir=output_dir) - - if self._need_link(objects, output_filename): - self.mkpath(os.path.dirname(output_filename)) - self.spawn(self.archiver + - [output_filename] + - objects + self.objects) - - # Not many Unices required ranlib anymore -- SunOS 4.x is, I - # think the only major Unix that does. Maybe we need some - # platform intelligence here to skip ranlib if it's not - # needed -- or maybe Python's configure script took care of - # it for us, hence the check for leading colon. - if self.ranlib: - try: - self.spawn(self.ranlib + [output_filename]) - except DistutilsExecError as msg: - raise LibError(msg) - else: - log.debug("skipping %s (up-to-date)", output_filename) - - def link(self, target_desc, objects, - output_filename, output_dir=None, libraries=None, - library_dirs=None, runtime_library_dirs=None, - export_symbols=None, debug=0, extra_preargs=None, - extra_postargs=None, build_temp=None, target_lang=None): - objects, output_dir = self._fix_object_args(objects, output_dir) - fixed_args = self._fix_lib_args(libraries, library_dirs, - runtime_library_dirs) - libraries, library_dirs, runtime_library_dirs = fixed_args - - lib_opts = gen_lib_options(self, library_dirs, runtime_library_dirs, - libraries) - if not isinstance(output_dir, (str, type(None))): - raise TypeError("'output_dir' must be a string or None") - if output_dir is not None: - output_filename = os.path.join(output_dir, output_filename) - - if self._need_link(objects, output_filename): - ld_args = (objects + self.objects + - lib_opts + ['-o', output_filename]) - if debug: - ld_args[:0] = ['-g'] - if extra_preargs: - ld_args[:0] = extra_preargs - if extra_postargs: - ld_args.extend(extra_postargs) - self.mkpath(os.path.dirname(output_filename)) - try: - if target_desc == CCompiler.EXECUTABLE: - linker = self.linker_exe[:] - else: - linker = self.linker_so[:] - if target_lang == "c++" and self.compiler_cxx: - # skip over environment variable settings if /usr/bin/env - # is used to set up the linker's environment. - # This is needed on OSX. Note: this assumes that the - # normal and C++ compiler have the same environment - # settings. - i = 0 - if os.path.basename(linker[0]) == "env": - i = 1 - while '=' in linker[i]: - i += 1 - - if os.path.basename(linker[i]) == 'ld_so_aix': - # AIX platforms prefix the compiler with the ld_so_aix - # script, so we need to adjust our linker index - offset = 1 - else: - offset = 0 - - linker[i+offset] = self.compiler_cxx[i] - - if sys.platform == 'darwin': - linker = _osx_support.compiler_fixup(linker, ld_args) - - self.spawn(linker + ld_args) - except DistutilsExecError as msg: - raise LinkError(msg) - else: - log.debug("skipping %s (up-to-date)", output_filename) - - # -- Miscellaneous methods ----------------------------------------- - # These are all used by the 'gen_lib_options() function, in - # ccompiler.py. - - def library_dir_option(self, dir): - return "-L" + dir - - def _is_gcc(self, compiler_name): - return "gcc" in compiler_name or "g++" in compiler_name - - def runtime_library_dir_option(self, dir): - # XXX Hackish, at the very least. See Python bug #445902: - # http://sourceforge.net/tracker/index.php - # ?func=detail&aid=445902&group_id=5470&atid=105470 - # Linkers on different platforms need different options to - # specify that directories need to be added to the list of - # directories searched for dependencies when a dynamic library - # is sought. GCC on GNU systems (Linux, FreeBSD, ...) has to - # be told to pass the -R option through to the linker, whereas - # other compilers and gcc on other systems just know this. - # Other compilers may need something slightly different. At - # this time, there's no way to determine this information from - # the configuration data stored in the Python installation, so - # we use this hack. - compiler = os.path.basename(shlex.split(sysconfig.get_config_var("CC"))[0]) - if sys.platform[:6] == "darwin": - from distutils.util import get_macosx_target_ver, split_version - macosx_target_ver = get_macosx_target_ver() - if macosx_target_ver and split_version(macosx_target_ver) >= [10, 5]: - return "-Wl,-rpath," + dir - else: # no support for -rpath on earlier macOS versions - return "-L" + dir - elif sys.platform[:7] == "freebsd": - return "-Wl,-rpath=" + dir - elif sys.platform[:5] == "hp-ux": - if self._is_gcc(compiler): - return ["-Wl,+s", "-L" + dir] - return ["+s", "-L" + dir] - - # For all compilers, `-Wl` is the presumed way to - # pass a compiler option to the linker and `-R` is - # the way to pass an RPATH. - if sysconfig.get_config_var("GNULD") == "yes": - # GNU ld needs an extra option to get a RUNPATH - # instead of just an RPATH. - return "-Wl,--enable-new-dtags,-R" + dir - else: - return "-Wl,-R" + dir - - def library_option(self, lib): - return "-l" + lib - - def find_library_file(self, dirs, lib, debug=0): - shared_f = self.library_filename(lib, lib_type='shared') - dylib_f = self.library_filename(lib, lib_type='dylib') - xcode_stub_f = self.library_filename(lib, lib_type='xcode_stub') - static_f = self.library_filename(lib, lib_type='static') - - if sys.platform == 'darwin': - # On OSX users can specify an alternate SDK using - # '-isysroot', calculate the SDK root if it is specified - # (and use it further on) - # - # Note that, as of Xcode 7, Apple SDKs may contain textual stub - # libraries with .tbd extensions rather than the normal .dylib - # shared libraries installed in /. The Apple compiler tool - # chain handles this transparently but it can cause problems - # for programs that are being built with an SDK and searching - # for specific libraries. Callers of find_library_file need to - # keep in mind that the base filename of the returned SDK library - # file might have a different extension from that of the library - # file installed on the running system, for example: - # /Applications/Xcode.app/Contents/Developer/Platforms/ - # MacOSX.platform/Developer/SDKs/MacOSX10.11.sdk/ - # usr/lib/libedit.tbd - # vs - # /usr/lib/libedit.dylib - cflags = sysconfig.get_config_var('CFLAGS') - m = re.search(r'-isysroot\s*(\S+)', cflags) - if m is None: - sysroot = '/' - else: - sysroot = m.group(1) - - - - for dir in dirs: - shared = os.path.join(dir, shared_f) - dylib = os.path.join(dir, dylib_f) - static = os.path.join(dir, static_f) - xcode_stub = os.path.join(dir, xcode_stub_f) - - if sys.platform == 'darwin' and ( - dir.startswith('/System/') or ( - dir.startswith('/usr/') and not dir.startswith('/usr/local/'))): - - shared = os.path.join(sysroot, dir[1:], shared_f) - dylib = os.path.join(sysroot, dir[1:], dylib_f) - static = os.path.join(sysroot, dir[1:], static_f) - xcode_stub = os.path.join(sysroot, dir[1:], xcode_stub_f) - - # We're second-guessing the linker here, with not much hard - # data to go on: GCC seems to prefer the shared library, so I'm - # assuming that *all* Unix C compilers do. And of course I'm - # ignoring even GCC's "-static" option. So sue me. - if os.path.exists(dylib): - return dylib - elif os.path.exists(xcode_stub): - return xcode_stub - elif os.path.exists(shared): - return shared - elif os.path.exists(static): - return static - - # Oops, didn't find it in *any* of 'dirs' - return None diff --git a/venv/Lib/site-packages/setuptools/_distutils/util.py b/venv/Lib/site-packages/setuptools/_distutils/util.py deleted file mode 100644 index 6d506d7..0000000 --- a/venv/Lib/site-packages/setuptools/_distutils/util.py +++ /dev/null @@ -1,496 +0,0 @@ -"""distutils.util - -Miscellaneous utility functions -- anything that doesn't fit into -one of the other *util.py modules. -""" - -import os -import re -import importlib.util -import string -import sys -import sysconfig -from distutils.errors import DistutilsPlatformError -from distutils.dep_util import newer -from distutils.spawn import spawn -from distutils import log -from distutils.errors import DistutilsByteCompileError -from .py35compat import _optim_args_from_interpreter_flags - - -def get_host_platform(): - """Return a string that identifies the current platform. This is used mainly to - distinguish platform-specific build directories and platform-specific built - distributions. - """ - - # We initially exposed platforms as defined in Python 3.9 - # even with older Python versions when distutils was split out. - # Now that we delegate to stdlib sysconfig we need to restore this - # in case anyone has started to depend on it. - - if sys.version_info < (3, 8): - if os.name == 'nt': - if '(arm)' in sys.version.lower(): - return 'win-arm32' - if '(arm64)' in sys.version.lower(): - return 'win-arm64' - - if sys.version_info < (3, 9): - if os.name == "posix" and hasattr(os, 'uname'): - osname, host, release, version, machine = os.uname() - if osname[:3] == "aix": - from .py38compat import aix_platform - return aix_platform(osname, version, release) - - return sysconfig.get_platform() - -def get_platform(): - if os.name == 'nt': - TARGET_TO_PLAT = { - 'x86' : 'win32', - 'x64' : 'win-amd64', - 'arm' : 'win-arm32', - 'arm64': 'win-arm64', - } - return TARGET_TO_PLAT.get(os.environ.get('VSCMD_ARG_TGT_ARCH')) or get_host_platform() - else: - return get_host_platform() - - -if sys.platform == 'darwin': - _syscfg_macosx_ver = None # cache the version pulled from sysconfig -MACOSX_VERSION_VAR = 'MACOSX_DEPLOYMENT_TARGET' - -def _clear_cached_macosx_ver(): - """For testing only. Do not call.""" - global _syscfg_macosx_ver - _syscfg_macosx_ver = None - -def get_macosx_target_ver_from_syscfg(): - """Get the version of macOS latched in the Python interpreter configuration. - Returns the version as a string or None if can't obtain one. Cached.""" - global _syscfg_macosx_ver - if _syscfg_macosx_ver is None: - from distutils import sysconfig - ver = sysconfig.get_config_var(MACOSX_VERSION_VAR) or '' - if ver: - _syscfg_macosx_ver = ver - return _syscfg_macosx_ver - -def get_macosx_target_ver(): - """Return the version of macOS for which we are building. - - The target version defaults to the version in sysconfig latched at time - the Python interpreter was built, unless overridden by an environment - variable. If neither source has a value, then None is returned""" - - syscfg_ver = get_macosx_target_ver_from_syscfg() - env_ver = os.environ.get(MACOSX_VERSION_VAR) - - if env_ver: - # Validate overridden version against sysconfig version, if have both. - # Ensure that the deployment target of the build process is not less - # than 10.3 if the interpreter was built for 10.3 or later. This - # ensures extension modules are built with correct compatibility - # values, specifically LDSHARED which can use - # '-undefined dynamic_lookup' which only works on >= 10.3. - if syscfg_ver and split_version(syscfg_ver) >= [10, 3] and \ - split_version(env_ver) < [10, 3]: - my_msg = ('$' + MACOSX_VERSION_VAR + ' mismatch: ' - 'now "%s" but "%s" during configure; ' - 'must use 10.3 or later' - % (env_ver, syscfg_ver)) - raise DistutilsPlatformError(my_msg) - return env_ver - return syscfg_ver - - -def split_version(s): - """Convert a dot-separated string into a list of numbers for comparisons""" - return [int(n) for n in s.split('.')] - - -def convert_path (pathname): - """Return 'pathname' as a name that will work on the native filesystem, - i.e. split it on '/' and put it back together again using the current - directory separator. Needed because filenames in the setup script are - always supplied in Unix style, and have to be converted to the local - convention before we can actually use them in the filesystem. Raises - ValueError on non-Unix-ish systems if 'pathname' either starts or - ends with a slash. - """ - if os.sep == '/': - return pathname - if not pathname: - return pathname - if pathname[0] == '/': - raise ValueError("path '%s' cannot be absolute" % pathname) - if pathname[-1] == '/': - raise ValueError("path '%s' cannot end with '/'" % pathname) - - paths = pathname.split('/') - while '.' in paths: - paths.remove('.') - if not paths: - return os.curdir - return os.path.join(*paths) - -# convert_path () - - -def change_root (new_root, pathname): - """Return 'pathname' with 'new_root' prepended. If 'pathname' is - relative, this is equivalent to "os.path.join(new_root,pathname)". - Otherwise, it requires making 'pathname' relative and then joining the - two, which is tricky on DOS/Windows and Mac OS. - """ - if os.name == 'posix': - if not os.path.isabs(pathname): - return os.path.join(new_root, pathname) - else: - return os.path.join(new_root, pathname[1:]) - - elif os.name == 'nt': - (drive, path) = os.path.splitdrive(pathname) - if path[0] == '\\': - path = path[1:] - return os.path.join(new_root, path) - - else: - raise DistutilsPlatformError("nothing known about platform '%s'" % os.name) - - -_environ_checked = 0 -def check_environ (): - """Ensure that 'os.environ' has all the environment variables we - guarantee that users can use in config files, command-line options, - etc. Currently this includes: - HOME - user's home directory (Unix only) - PLAT - description of the current platform, including hardware - and OS (see 'get_platform()') - """ - global _environ_checked - if _environ_checked: - return - - if os.name == 'posix' and 'HOME' not in os.environ: - try: - import pwd - os.environ['HOME'] = pwd.getpwuid(os.getuid())[5] - except (ImportError, KeyError): - # bpo-10496: if the current user identifier doesn't exist in the - # password database, do nothing - pass - - if 'PLAT' not in os.environ: - os.environ['PLAT'] = get_platform() - - _environ_checked = 1 - - -def subst_vars (s, local_vars): - """ - Perform variable substitution on 'string'. - Variables are indicated by format-style braces ("{var}"). - Variable is substituted by the value found in the 'local_vars' - dictionary or in 'os.environ' if it's not in 'local_vars'. - 'os.environ' is first checked/augmented to guarantee that it contains - certain values: see 'check_environ()'. Raise ValueError for any - variables not found in either 'local_vars' or 'os.environ'. - """ - check_environ() - lookup = dict(os.environ) - lookup.update((name, str(value)) for name, value in local_vars.items()) - try: - return _subst_compat(s).format_map(lookup) - except KeyError as var: - raise ValueError(f"invalid variable {var}") - -# subst_vars () - - -def _subst_compat(s): - """ - Replace shell/Perl-style variable substitution with - format-style. For compatibility. - """ - def _subst(match): - return f'{{{match.group(1)}}}' - repl = re.sub(r'\$([a-zA-Z_][a-zA-Z_0-9]*)', _subst, s) - if repl != s: - import warnings - warnings.warn( - "shell/Perl-style substitions are deprecated", - DeprecationWarning, - ) - return repl - - -def grok_environment_error (exc, prefix="error: "): - # Function kept for backward compatibility. - # Used to try clever things with EnvironmentErrors, - # but nowadays str(exception) produces good messages. - return prefix + str(exc) - - -# Needed by 'split_quoted()' -_wordchars_re = _squote_re = _dquote_re = None -def _init_regex(): - global _wordchars_re, _squote_re, _dquote_re - _wordchars_re = re.compile(r'[^\\\'\"%s ]*' % string.whitespace) - _squote_re = re.compile(r"'(?:[^'\\]|\\.)*'") - _dquote_re = re.compile(r'"(?:[^"\\]|\\.)*"') - -def split_quoted (s): - """Split a string up according to Unix shell-like rules for quotes and - backslashes. In short: words are delimited by spaces, as long as those - spaces are not escaped by a backslash, or inside a quoted string. - Single and double quotes are equivalent, and the quote characters can - be backslash-escaped. The backslash is stripped from any two-character - escape sequence, leaving only the escaped character. The quote - characters are stripped from any quoted string. Returns a list of - words. - """ - - # This is a nice algorithm for splitting up a single string, since it - # doesn't require character-by-character examination. It was a little - # bit of a brain-bender to get it working right, though... - if _wordchars_re is None: _init_regex() - - s = s.strip() - words = [] - pos = 0 - - while s: - m = _wordchars_re.match(s, pos) - end = m.end() - if end == len(s): - words.append(s[:end]) - break - - if s[end] in string.whitespace: # unescaped, unquoted whitespace: now - words.append(s[:end]) # we definitely have a word delimiter - s = s[end:].lstrip() - pos = 0 - - elif s[end] == '\\': # preserve whatever is being escaped; - # will become part of the current word - s = s[:end] + s[end+1:] - pos = end+1 - - else: - if s[end] == "'": # slurp singly-quoted string - m = _squote_re.match(s, end) - elif s[end] == '"': # slurp doubly-quoted string - m = _dquote_re.match(s, end) - else: - raise RuntimeError("this can't happen (bad char '%c')" % s[end]) - - if m is None: - raise ValueError("bad string (mismatched %s quotes?)" % s[end]) - - (beg, end) = m.span() - s = s[:beg] + s[beg+1:end-1] + s[end:] - pos = m.end() - 2 - - if pos >= len(s): - words.append(s) - break - - return words - -# split_quoted () - - -def execute (func, args, msg=None, verbose=0, dry_run=0): - """Perform some action that affects the outside world (eg. by - writing to the filesystem). Such actions are special because they - are disabled by the 'dry_run' flag. This method takes care of all - that bureaucracy for you; all you have to do is supply the - function to call and an argument tuple for it (to embody the - "external action" being performed), and an optional message to - print. - """ - if msg is None: - msg = "%s%r" % (func.__name__, args) - if msg[-2:] == ',)': # correct for singleton tuple - msg = msg[0:-2] + ')' - - log.info(msg) - if not dry_run: - func(*args) - - -def strtobool (val): - """Convert a string representation of truth to true (1) or false (0). - - True values are 'y', 'yes', 't', 'true', 'on', and '1'; false values - are 'n', 'no', 'f', 'false', 'off', and '0'. Raises ValueError if - 'val' is anything else. - """ - val = val.lower() - if val in ('y', 'yes', 't', 'true', 'on', '1'): - return 1 - elif val in ('n', 'no', 'f', 'false', 'off', '0'): - return 0 - else: - raise ValueError("invalid truth value %r" % (val,)) - - -def byte_compile (py_files, - optimize=0, force=0, - prefix=None, base_dir=None, - verbose=1, dry_run=0, - direct=None): - """Byte-compile a collection of Python source files to .pyc - files in a __pycache__ subdirectory. 'py_files' is a list - of files to compile; any files that don't end in ".py" are silently - skipped. 'optimize' must be one of the following: - 0 - don't optimize - 1 - normal optimization (like "python -O") - 2 - extra optimization (like "python -OO") - If 'force' is true, all files are recompiled regardless of - timestamps. - - The source filename encoded in each bytecode file defaults to the - filenames listed in 'py_files'; you can modify these with 'prefix' and - 'basedir'. 'prefix' is a string that will be stripped off of each - source filename, and 'base_dir' is a directory name that will be - prepended (after 'prefix' is stripped). You can supply either or both - (or neither) of 'prefix' and 'base_dir', as you wish. - - If 'dry_run' is true, doesn't actually do anything that would - affect the filesystem. - - Byte-compilation is either done directly in this interpreter process - with the standard py_compile module, or indirectly by writing a - temporary script and executing it. Normally, you should let - 'byte_compile()' figure out to use direct compilation or not (see - the source for details). The 'direct' flag is used by the script - generated in indirect mode; unless you know what you're doing, leave - it set to None. - """ - - # Late import to fix a bootstrap issue: _posixsubprocess is built by - # setup.py, but setup.py uses distutils. - import subprocess - - # nothing is done if sys.dont_write_bytecode is True - if sys.dont_write_bytecode: - raise DistutilsByteCompileError('byte-compiling is disabled.') - - # First, if the caller didn't force us into direct or indirect mode, - # figure out which mode we should be in. We take a conservative - # approach: choose direct mode *only* if the current interpreter is - # in debug mode and optimize is 0. If we're not in debug mode (-O - # or -OO), we don't know which level of optimization this - # interpreter is running with, so we can't do direct - # byte-compilation and be certain that it's the right thing. Thus, - # always compile indirectly if the current interpreter is in either - # optimize mode, or if either optimization level was requested by - # the caller. - if direct is None: - direct = (__debug__ and optimize == 0) - - # "Indirect" byte-compilation: write a temporary script and then - # run it with the appropriate flags. - if not direct: - try: - from tempfile import mkstemp - (script_fd, script_name) = mkstemp(".py") - except ImportError: - from tempfile import mktemp - (script_fd, script_name) = None, mktemp(".py") - log.info("writing byte-compilation script '%s'", script_name) - if not dry_run: - if script_fd is not None: - script = os.fdopen(script_fd, "w") - else: - script = open(script_name, "w") - - with script: - script.write("""\ -from distutils.util import byte_compile -files = [ -""") - - # XXX would be nice to write absolute filenames, just for - # safety's sake (script should be more robust in the face of - # chdir'ing before running it). But this requires abspath'ing - # 'prefix' as well, and that breaks the hack in build_lib's - # 'byte_compile()' method that carefully tacks on a trailing - # slash (os.sep really) to make sure the prefix here is "just - # right". This whole prefix business is rather delicate -- the - # problem is that it's really a directory, but I'm treating it - # as a dumb string, so trailing slashes and so forth matter. - - #py_files = map(os.path.abspath, py_files) - #if prefix: - # prefix = os.path.abspath(prefix) - - script.write(",\n".join(map(repr, py_files)) + "]\n") - script.write(""" -byte_compile(files, optimize=%r, force=%r, - prefix=%r, base_dir=%r, - verbose=%r, dry_run=0, - direct=1) -""" % (optimize, force, prefix, base_dir, verbose)) - - cmd = [sys.executable] - cmd.extend(_optim_args_from_interpreter_flags()) - cmd.append(script_name) - spawn(cmd, dry_run=dry_run) - execute(os.remove, (script_name,), "removing %s" % script_name, - dry_run=dry_run) - - # "Direct" byte-compilation: use the py_compile module to compile - # right here, right now. Note that the script generated in indirect - # mode simply calls 'byte_compile()' in direct mode, a weird sort of - # cross-process recursion. Hey, it works! - else: - from py_compile import compile - - for file in py_files: - if file[-3:] != ".py": - # This lets us be lazy and not filter filenames in - # the "install_lib" command. - continue - - # Terminology from the py_compile module: - # cfile - byte-compiled file - # dfile - purported source filename (same as 'file' by default) - if optimize >= 0: - opt = '' if optimize == 0 else optimize - cfile = importlib.util.cache_from_source( - file, optimization=opt) - else: - cfile = importlib.util.cache_from_source(file) - dfile = file - if prefix: - if file[:len(prefix)] != prefix: - raise ValueError("invalid prefix: filename %r doesn't start with %r" - % (file, prefix)) - dfile = dfile[len(prefix):] - if base_dir: - dfile = os.path.join(base_dir, dfile) - - cfile_base = os.path.basename(cfile) - if direct: - if force or newer(file, cfile): - log.info("byte-compiling %s to %s", file, cfile_base) - if not dry_run: - compile(file, cfile, dfile) - else: - log.debug("skipping byte-compilation of %s to %s", - file, cfile_base) - -# byte_compile () - -def rfc822_escape (header): - """Return a version of the string escaped for inclusion in an - RFC-822 header, by ensuring there are 8 spaces space after each newline. - """ - lines = header.split('\n') - sep = '\n' + 8 * ' ' - return sep.join(lines) diff --git a/venv/Lib/site-packages/setuptools/_distutils/version.py b/venv/Lib/site-packages/setuptools/_distutils/version.py deleted file mode 100644 index 35e181d..0000000 --- a/venv/Lib/site-packages/setuptools/_distutils/version.py +++ /dev/null @@ -1,363 +0,0 @@ -# -# distutils/version.py -# -# Implements multiple version numbering conventions for the -# Python Module Distribution Utilities. -# -# $Id$ -# - -"""Provides classes to represent module version numbers (one class for -each style of version numbering). There are currently two such classes -implemented: StrictVersion and LooseVersion. - -Every version number class implements the following interface: - * the 'parse' method takes a string and parses it to some internal - representation; if the string is an invalid version number, - 'parse' raises a ValueError exception - * the class constructor takes an optional string argument which, - if supplied, is passed to 'parse' - * __str__ reconstructs the string that was passed to 'parse' (or - an equivalent string -- ie. one that will generate an equivalent - version number instance) - * __repr__ generates Python code to recreate the version number instance - * _cmp compares the current instance with either another instance - of the same class or a string (which will be parsed to an instance - of the same class, thus must follow the same rules) -""" - -import re -import warnings -import contextlib - - -@contextlib.contextmanager -def suppress_known_deprecation(): - with warnings.catch_warnings(record=True) as ctx: - warnings.filterwarnings( - action='default', - category=DeprecationWarning, - message="distutils Version classes are deprecated.", - ) - yield ctx - - -class Version: - """Abstract base class for version numbering classes. Just provides - constructor (__init__) and reproducer (__repr__), because those - seem to be the same for all version numbering classes; and route - rich comparisons to _cmp. - """ - - def __init__ (self, vstring=None): - warnings.warn( - "distutils Version classes are deprecated. " - "Use packaging.version instead.", - DeprecationWarning, - stacklevel=2, - ) - if vstring: - self.parse(vstring) - - def __repr__ (self): - return "%s ('%s')" % (self.__class__.__name__, str(self)) - - def __eq__(self, other): - c = self._cmp(other) - if c is NotImplemented: - return c - return c == 0 - - def __lt__(self, other): - c = self._cmp(other) - if c is NotImplemented: - return c - return c < 0 - - def __le__(self, other): - c = self._cmp(other) - if c is NotImplemented: - return c - return c <= 0 - - def __gt__(self, other): - c = self._cmp(other) - if c is NotImplemented: - return c - return c > 0 - - def __ge__(self, other): - c = self._cmp(other) - if c is NotImplemented: - return c - return c >= 0 - - -# Interface for version-number classes -- must be implemented -# by the following classes (the concrete ones -- Version should -# be treated as an abstract class). -# __init__ (string) - create and take same action as 'parse' -# (string parameter is optional) -# parse (string) - convert a string representation to whatever -# internal representation is appropriate for -# this style of version numbering -# __str__ (self) - convert back to a string; should be very similar -# (if not identical to) the string supplied to parse -# __repr__ (self) - generate Python code to recreate -# the instance -# _cmp (self, other) - compare two version numbers ('other' may -# be an unparsed version string, or another -# instance of your version class) - - -class StrictVersion (Version): - - """Version numbering for anal retentives and software idealists. - Implements the standard interface for version number classes as - described above. A version number consists of two or three - dot-separated numeric components, with an optional "pre-release" tag - on the end. The pre-release tag consists of the letter 'a' or 'b' - followed by a number. If the numeric components of two version - numbers are equal, then one with a pre-release tag will always - be deemed earlier (lesser) than one without. - - The following are valid version numbers (shown in the order that - would be obtained by sorting according to the supplied cmp function): - - 0.4 0.4.0 (these two are equivalent) - 0.4.1 - 0.5a1 - 0.5b3 - 0.5 - 0.9.6 - 1.0 - 1.0.4a3 - 1.0.4b1 - 1.0.4 - - The following are examples of invalid version numbers: - - 1 - 2.7.2.2 - 1.3.a4 - 1.3pl1 - 1.3c4 - - The rationale for this version numbering system will be explained - in the distutils documentation. - """ - - version_re = re.compile(r'^(\d+) \. (\d+) (\. (\d+))? ([ab](\d+))?$', - re.VERBOSE | re.ASCII) - - - def parse (self, vstring): - match = self.version_re.match(vstring) - if not match: - raise ValueError("invalid version number '%s'" % vstring) - - (major, minor, patch, prerelease, prerelease_num) = \ - match.group(1, 2, 4, 5, 6) - - if patch: - self.version = tuple(map(int, [major, minor, patch])) - else: - self.version = tuple(map(int, [major, minor])) + (0,) - - if prerelease: - self.prerelease = (prerelease[0], int(prerelease_num)) - else: - self.prerelease = None - - - def __str__ (self): - - if self.version[2] == 0: - vstring = '.'.join(map(str, self.version[0:2])) - else: - vstring = '.'.join(map(str, self.version)) - - if self.prerelease: - vstring = vstring + self.prerelease[0] + str(self.prerelease[1]) - - return vstring - - - def _cmp (self, other): - if isinstance(other, str): - with suppress_known_deprecation(): - other = StrictVersion(other) - elif not isinstance(other, StrictVersion): - return NotImplemented - - if self.version != other.version: - # numeric versions don't match - # prerelease stuff doesn't matter - if self.version < other.version: - return -1 - else: - return 1 - - # have to compare prerelease - # case 1: neither has prerelease; they're equal - # case 2: self has prerelease, other doesn't; other is greater - # case 3: self doesn't have prerelease, other does: self is greater - # case 4: both have prerelease: must compare them! - - if (not self.prerelease and not other.prerelease): - return 0 - elif (self.prerelease and not other.prerelease): - return -1 - elif (not self.prerelease and other.prerelease): - return 1 - elif (self.prerelease and other.prerelease): - if self.prerelease == other.prerelease: - return 0 - elif self.prerelease < other.prerelease: - return -1 - else: - return 1 - else: - assert False, "never get here" - -# end class StrictVersion - - -# The rules according to Greg Stein: -# 1) a version number has 1 or more numbers separated by a period or by -# sequences of letters. If only periods, then these are compared -# left-to-right to determine an ordering. -# 2) sequences of letters are part of the tuple for comparison and are -# compared lexicographically -# 3) recognize the numeric components may have leading zeroes -# -# The LooseVersion class below implements these rules: a version number -# string is split up into a tuple of integer and string components, and -# comparison is a simple tuple comparison. This means that version -# numbers behave in a predictable and obvious way, but a way that might -# not necessarily be how people *want* version numbers to behave. There -# wouldn't be a problem if people could stick to purely numeric version -# numbers: just split on period and compare the numbers as tuples. -# However, people insist on putting letters into their version numbers; -# the most common purpose seems to be: -# - indicating a "pre-release" version -# ('alpha', 'beta', 'a', 'b', 'pre', 'p') -# - indicating a post-release patch ('p', 'pl', 'patch') -# but of course this can't cover all version number schemes, and there's -# no way to know what a programmer means without asking him. -# -# The problem is what to do with letters (and other non-numeric -# characters) in a version number. The current implementation does the -# obvious and predictable thing: keep them as strings and compare -# lexically within a tuple comparison. This has the desired effect if -# an appended letter sequence implies something "post-release": -# eg. "0.99" < "0.99pl14" < "1.0", and "5.001" < "5.001m" < "5.002". -# -# However, if letters in a version number imply a pre-release version, -# the "obvious" thing isn't correct. Eg. you would expect that -# "1.5.1" < "1.5.2a2" < "1.5.2", but under the tuple/lexical comparison -# implemented here, this just isn't so. -# -# Two possible solutions come to mind. The first is to tie the -# comparison algorithm to a particular set of semantic rules, as has -# been done in the StrictVersion class above. This works great as long -# as everyone can go along with bondage and discipline. Hopefully a -# (large) subset of Python module programmers will agree that the -# particular flavour of bondage and discipline provided by StrictVersion -# provides enough benefit to be worth using, and will submit their -# version numbering scheme to its domination. The free-thinking -# anarchists in the lot will never give in, though, and something needs -# to be done to accommodate them. -# -# Perhaps a "moderately strict" version class could be implemented that -# lets almost anything slide (syntactically), and makes some heuristic -# assumptions about non-digits in version number strings. This could -# sink into special-case-hell, though; if I was as talented and -# idiosyncratic as Larry Wall, I'd go ahead and implement a class that -# somehow knows that "1.2.1" < "1.2.2a2" < "1.2.2" < "1.2.2pl3", and is -# just as happy dealing with things like "2g6" and "1.13++". I don't -# think I'm smart enough to do it right though. -# -# In any case, I've coded the test suite for this module (see -# ../test/test_version.py) specifically to fail on things like comparing -# "1.2a2" and "1.2". That's not because the *code* is doing anything -# wrong, it's because the simple, obvious design doesn't match my -# complicated, hairy expectations for real-world version numbers. It -# would be a snap to fix the test suite to say, "Yep, LooseVersion does -# the Right Thing" (ie. the code matches the conception). But I'd rather -# have a conception that matches common notions about version numbers. - -class LooseVersion (Version): - - """Version numbering for anarchists and software realists. - Implements the standard interface for version number classes as - described above. A version number consists of a series of numbers, - separated by either periods or strings of letters. When comparing - version numbers, the numeric components will be compared - numerically, and the alphabetic components lexically. The following - are all valid version numbers, in no particular order: - - 1.5.1 - 1.5.2b2 - 161 - 3.10a - 8.02 - 3.4j - 1996.07.12 - 3.2.pl0 - 3.1.1.6 - 2g6 - 11g - 0.960923 - 2.2beta29 - 1.13++ - 5.5.kw - 2.0b1pl0 - - In fact, there is no such thing as an invalid version number under - this scheme; the rules for comparison are simple and predictable, - but may not always give the results you want (for some definition - of "want"). - """ - - component_re = re.compile(r'(\d+ | [a-z]+ | \.)', re.VERBOSE) - - def parse (self, vstring): - # I've given up on thinking I can reconstruct the version string - # from the parsed tuple -- so I just store the string here for - # use by __str__ - self.vstring = vstring - components = [x for x in self.component_re.split(vstring) - if x and x != '.'] - for i, obj in enumerate(components): - try: - components[i] = int(obj) - except ValueError: - pass - - self.version = components - - - def __str__ (self): - return self.vstring - - - def __repr__ (self): - return "LooseVersion ('%s')" % str(self) - - - def _cmp (self, other): - if isinstance(other, str): - other = LooseVersion(other) - elif not isinstance(other, LooseVersion): - return NotImplemented - - if self.version == other.version: - return 0 - if self.version < other.version: - return -1 - if self.version > other.version: - return 1 - - -# end class LooseVersion diff --git a/venv/Lib/site-packages/setuptools/_distutils/versionpredicate.py b/venv/Lib/site-packages/setuptools/_distutils/versionpredicate.py deleted file mode 100644 index 55f25d9..0000000 --- a/venv/Lib/site-packages/setuptools/_distutils/versionpredicate.py +++ /dev/null @@ -1,169 +0,0 @@ -"""Module for parsing and testing package version predicate strings. -""" -import re -import distutils.version -import operator - - -re_validPackage = re.compile(r"(?i)^\s*([a-z_]\w*(?:\.[a-z_]\w*)*)(.*)", - re.ASCII) -# (package) (rest) - -re_paren = re.compile(r"^\s*\((.*)\)\s*$") # (list) inside of parentheses -re_splitComparison = re.compile(r"^\s*(<=|>=|<|>|!=|==)\s*([^\s,]+)\s*$") -# (comp) (version) - - -def splitUp(pred): - """Parse a single version comparison. - - Return (comparison string, StrictVersion) - """ - res = re_splitComparison.match(pred) - if not res: - raise ValueError("bad package restriction syntax: %r" % pred) - comp, verStr = res.groups() - with distutils.version.suppress_known_deprecation(): - other = distutils.version.StrictVersion(verStr) - return (comp, other) - -compmap = {"<": operator.lt, "<=": operator.le, "==": operator.eq, - ">": operator.gt, ">=": operator.ge, "!=": operator.ne} - -class VersionPredicate: - """Parse and test package version predicates. - - >>> v = VersionPredicate('pyepat.abc (>1.0, <3333.3a1, !=1555.1b3)') - - The `name` attribute provides the full dotted name that is given:: - - >>> v.name - 'pyepat.abc' - - The str() of a `VersionPredicate` provides a normalized - human-readable version of the expression:: - - >>> print(v) - pyepat.abc (> 1.0, < 3333.3a1, != 1555.1b3) - - The `satisfied_by()` method can be used to determine with a given - version number is included in the set described by the version - restrictions:: - - >>> v.satisfied_by('1.1') - True - >>> v.satisfied_by('1.4') - True - >>> v.satisfied_by('1.0') - False - >>> v.satisfied_by('4444.4') - False - >>> v.satisfied_by('1555.1b3') - False - - `VersionPredicate` is flexible in accepting extra whitespace:: - - >>> v = VersionPredicate(' pat( == 0.1 ) ') - >>> v.name - 'pat' - >>> v.satisfied_by('0.1') - True - >>> v.satisfied_by('0.2') - False - - If any version numbers passed in do not conform to the - restrictions of `StrictVersion`, a `ValueError` is raised:: - - >>> v = VersionPredicate('p1.p2.p3.p4(>=1.0, <=1.3a1, !=1.2zb3)') - Traceback (most recent call last): - ... - ValueError: invalid version number '1.2zb3' - - It the module or package name given does not conform to what's - allowed as a legal module or package name, `ValueError` is - raised:: - - >>> v = VersionPredicate('foo-bar') - Traceback (most recent call last): - ... - ValueError: expected parenthesized list: '-bar' - - >>> v = VersionPredicate('foo bar (12.21)') - Traceback (most recent call last): - ... - ValueError: expected parenthesized list: 'bar (12.21)' - - """ - - def __init__(self, versionPredicateStr): - """Parse a version predicate string. - """ - # Fields: - # name: package name - # pred: list of (comparison string, StrictVersion) - - versionPredicateStr = versionPredicateStr.strip() - if not versionPredicateStr: - raise ValueError("empty package restriction") - match = re_validPackage.match(versionPredicateStr) - if not match: - raise ValueError("bad package name in %r" % versionPredicateStr) - self.name, paren = match.groups() - paren = paren.strip() - if paren: - match = re_paren.match(paren) - if not match: - raise ValueError("expected parenthesized list: %r" % paren) - str = match.groups()[0] - self.pred = [splitUp(aPred) for aPred in str.split(",")] - if not self.pred: - raise ValueError("empty parenthesized list in %r" - % versionPredicateStr) - else: - self.pred = [] - - def __str__(self): - if self.pred: - seq = [cond + " " + str(ver) for cond, ver in self.pred] - return self.name + " (" + ", ".join(seq) + ")" - else: - return self.name - - def satisfied_by(self, version): - """True if version is compatible with all the predicates in self. - The parameter version must be acceptable to the StrictVersion - constructor. It may be either a string or StrictVersion. - """ - for cond, ver in self.pred: - if not compmap[cond](version, ver): - return False - return True - - -_provision_rx = None - -def split_provision(value): - """Return the name and optional version number of a provision. - - The version number, if given, will be returned as a `StrictVersion` - instance, otherwise it will be `None`. - - >>> split_provision('mypkg') - ('mypkg', None) - >>> split_provision(' mypkg( 1.2 ) ') - ('mypkg', StrictVersion ('1.2')) - """ - global _provision_rx - if _provision_rx is None: - _provision_rx = re.compile( - r"([a-zA-Z_]\w*(?:\.[a-zA-Z_]\w*)*)(?:\s*\(\s*([^)\s]+)\s*\))?$", - re.ASCII) - value = value.strip() - m = _provision_rx.match(value) - if not m: - raise ValueError("illegal provides specification: %r" % value) - ver = m.group(2) or None - if ver: - with distutils.version.suppress_known_deprecation(): - ver = distutils.version.StrictVersion(ver) - return m.group(1), ver diff --git a/venv/Lib/site-packages/setuptools/_entry_points.py b/venv/Lib/site-packages/setuptools/_entry_points.py deleted file mode 100644 index f087681..0000000 --- a/venv/Lib/site-packages/setuptools/_entry_points.py +++ /dev/null @@ -1,86 +0,0 @@ -import functools -import operator -import itertools - -from .extern.jaraco.text import yield_lines -from .extern.jaraco.functools import pass_none -from ._importlib import metadata -from ._itertools import ensure_unique -from .extern.more_itertools import consume - - -def ensure_valid(ep): - """ - Exercise one of the dynamic properties to trigger - the pattern match. - """ - ep.extras - - -def load_group(value, group): - """ - Given a value of an entry point or series of entry points, - return each as an EntryPoint. - """ - # normalize to a single sequence of lines - lines = yield_lines(value) - text = f'[{group}]\n' + '\n'.join(lines) - return metadata.EntryPoints._from_text(text) - - -def by_group_and_name(ep): - return ep.group, ep.name - - -def validate(eps: metadata.EntryPoints): - """ - Ensure entry points are unique by group and name and validate each. - """ - consume(map(ensure_valid, ensure_unique(eps, key=by_group_and_name))) - return eps - - -@functools.singledispatch -def load(eps): - """ - Given a Distribution.entry_points, produce EntryPoints. - """ - groups = itertools.chain.from_iterable( - load_group(value, group) - for group, value in eps.items()) - return validate(metadata.EntryPoints(groups)) - - -@load.register(str) -def _(eps): - r""" - >>> ep, = load('[console_scripts]\nfoo=bar') - >>> ep.group - 'console_scripts' - >>> ep.name - 'foo' - >>> ep.value - 'bar' - """ - return validate(metadata.EntryPoints(metadata.EntryPoints._from_text(eps))) - - -load.register(type(None), lambda x: x) - - -@pass_none -def render(eps: metadata.EntryPoints): - by_group = operator.attrgetter('group') - groups = itertools.groupby(sorted(eps, key=by_group), by_group) - - return '\n'.join( - f'[{group}]\n{render_items(items)}\n' - for group, items in groups - ) - - -def render_items(eps): - return '\n'.join( - f'{ep.name} = {ep.value}' - for ep in sorted(eps) - ) diff --git a/venv/Lib/site-packages/setuptools/_imp.py b/venv/Lib/site-packages/setuptools/_imp.py deleted file mode 100644 index 47efd79..0000000 --- a/venv/Lib/site-packages/setuptools/_imp.py +++ /dev/null @@ -1,82 +0,0 @@ -""" -Re-implementation of find_module and get_frozen_object -from the deprecated imp module. -""" - -import os -import importlib.util -import importlib.machinery - -from .py34compat import module_from_spec - - -PY_SOURCE = 1 -PY_COMPILED = 2 -C_EXTENSION = 3 -C_BUILTIN = 6 -PY_FROZEN = 7 - - -def find_spec(module, paths): - finder = ( - importlib.machinery.PathFinder().find_spec - if isinstance(paths, list) else - importlib.util.find_spec - ) - return finder(module, paths) - - -def find_module(module, paths=None): - """Just like 'imp.find_module()', but with package support""" - spec = find_spec(module, paths) - if spec is None: - raise ImportError("Can't find %s" % module) - if not spec.has_location and hasattr(spec, 'submodule_search_locations'): - spec = importlib.util.spec_from_loader('__init__.py', spec.loader) - - kind = -1 - file = None - static = isinstance(spec.loader, type) - if spec.origin == 'frozen' or static and issubclass( - spec.loader, importlib.machinery.FrozenImporter): - kind = PY_FROZEN - path = None # imp compabilty - suffix = mode = '' # imp compatibility - elif spec.origin == 'built-in' or static and issubclass( - spec.loader, importlib.machinery.BuiltinImporter): - kind = C_BUILTIN - path = None # imp compabilty - suffix = mode = '' # imp compatibility - elif spec.has_location: - path = spec.origin - suffix = os.path.splitext(path)[1] - mode = 'r' if suffix in importlib.machinery.SOURCE_SUFFIXES else 'rb' - - if suffix in importlib.machinery.SOURCE_SUFFIXES: - kind = PY_SOURCE - elif suffix in importlib.machinery.BYTECODE_SUFFIXES: - kind = PY_COMPILED - elif suffix in importlib.machinery.EXTENSION_SUFFIXES: - kind = C_EXTENSION - - if kind in {PY_SOURCE, PY_COMPILED}: - file = open(path, mode) - else: - path = None - suffix = mode = '' - - return file, path, (suffix, mode, kind) - - -def get_frozen_object(module, paths=None): - spec = find_spec(module, paths) - if not spec: - raise ImportError("Can't find %s" % module) - return spec.loader.get_code(module) - - -def get_module(module, paths, info): - spec = find_spec(module, paths) - if not spec: - raise ImportError("Can't find %s" % module) - return module_from_spec(spec) diff --git a/venv/Lib/site-packages/setuptools/_importlib.py b/venv/Lib/site-packages/setuptools/_importlib.py deleted file mode 100644 index c1ac137..0000000 --- a/venv/Lib/site-packages/setuptools/_importlib.py +++ /dev/null @@ -1,36 +0,0 @@ -import sys - - -def disable_importlib_metadata_finder(metadata): - """ - Ensure importlib_metadata doesn't provide older, incompatible - Distributions. - - Workaround for #3102. - """ - try: - import importlib_metadata - except ImportError: - return - if importlib_metadata is metadata: - return - to_remove = [ - ob - for ob in sys.meta_path - if isinstance(ob, importlib_metadata.MetadataPathFinder) - ] - for item in to_remove: - sys.meta_path.remove(item) - - -if sys.version_info < (3, 10): - from setuptools.extern import importlib_metadata as metadata - disable_importlib_metadata_finder(metadata) -else: - import importlib.metadata as metadata # noqa: F401 - - -if sys.version_info < (3, 9): - from setuptools.extern import importlib_resources as resources -else: - import importlib.resources as resources # noqa: F401 diff --git a/venv/Lib/site-packages/setuptools/_itertools.py b/venv/Lib/site-packages/setuptools/_itertools.py deleted file mode 100644 index b8bf6d2..0000000 --- a/venv/Lib/site-packages/setuptools/_itertools.py +++ /dev/null @@ -1,23 +0,0 @@ -from setuptools.extern.more_itertools import consume # noqa: F401 - - -# copied from jaraco.itertools 6.1 -def ensure_unique(iterable, key=lambda x: x): - """ - Wrap an iterable to raise a ValueError if non-unique values are encountered. - - >>> list(ensure_unique('abc')) - ['a', 'b', 'c'] - >>> consume(ensure_unique('abca')) - Traceback (most recent call last): - ... - ValueError: Duplicate element 'a' encountered. - """ - seen = set() - seen_add = seen.add - for element in iterable: - k = key(element) - if k in seen: - raise ValueError(f"Duplicate element {element!r} encountered.") - seen_add(k) - yield element diff --git a/venv/Lib/site-packages/setuptools/_path.py b/venv/Lib/site-packages/setuptools/_path.py deleted file mode 100644 index ede9cb0..0000000 --- a/venv/Lib/site-packages/setuptools/_path.py +++ /dev/null @@ -1,7 +0,0 @@ -import os - - -def ensure_directory(path): - """Ensure that the parent directory of `path` exists""" - dirname = os.path.dirname(path) - os.makedirs(dirname, exist_ok=True) diff --git a/venv/Lib/site-packages/setuptools/_reqs.py b/venv/Lib/site-packages/setuptools/_reqs.py deleted file mode 100644 index ca72417..0000000 --- a/venv/Lib/site-packages/setuptools/_reqs.py +++ /dev/null @@ -1,19 +0,0 @@ -import setuptools.extern.jaraco.text as text - -from pkg_resources import Requirement - - -def parse_strings(strs): - """ - Yield requirement strings for each specification in `strs`. - - `strs` must be a string, or a (possibly-nested) iterable thereof. - """ - return text.join_continuation(map(text.drop_comment, text.yield_lines(strs))) - - -def parse(strs): - """ - Deprecated drop-in replacement for pkg_resources.parse_requirements. - """ - return map(Requirement, parse_strings(strs)) diff --git a/venv/Lib/site-packages/setuptools/_vendor/__init__.py b/venv/Lib/site-packages/setuptools/_vendor/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/venv/Lib/site-packages/setuptools/_vendor/__pycache__/__init__.cpython-39.pyc b/venv/Lib/site-packages/setuptools/_vendor/__pycache__/__init__.cpython-39.pyc deleted file mode 100644 index f94715e..0000000 Binary files a/venv/Lib/site-packages/setuptools/_vendor/__pycache__/__init__.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/_vendor/__pycache__/ordered_set.cpython-39.pyc b/venv/Lib/site-packages/setuptools/_vendor/__pycache__/ordered_set.cpython-39.pyc deleted file mode 100644 index 84799a9..0000000 Binary files a/venv/Lib/site-packages/setuptools/_vendor/__pycache__/ordered_set.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/_vendor/__pycache__/pyparsing.cpython-39.pyc b/venv/Lib/site-packages/setuptools/_vendor/__pycache__/pyparsing.cpython-39.pyc deleted file mode 100644 index 42c9bba..0000000 Binary files a/venv/Lib/site-packages/setuptools/_vendor/__pycache__/pyparsing.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/_vendor/__pycache__/typing_extensions.cpython-39.pyc b/venv/Lib/site-packages/setuptools/_vendor/__pycache__/typing_extensions.cpython-39.pyc deleted file mode 100644 index f072a2d..0000000 Binary files a/venv/Lib/site-packages/setuptools/_vendor/__pycache__/typing_extensions.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/_vendor/__pycache__/zipp.cpython-39.pyc b/venv/Lib/site-packages/setuptools/_vendor/__pycache__/zipp.cpython-39.pyc deleted file mode 100644 index 1124549..0000000 Binary files a/venv/Lib/site-packages/setuptools/_vendor/__pycache__/zipp.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/_vendor/importlib_metadata/__init__.py b/venv/Lib/site-packages/setuptools/_vendor/importlib_metadata/__init__.py deleted file mode 100644 index 292e0c6..0000000 --- a/venv/Lib/site-packages/setuptools/_vendor/importlib_metadata/__init__.py +++ /dev/null @@ -1,1047 +0,0 @@ -import os -import re -import abc -import csv -import sys -from .. import zipp -import email -import pathlib -import operator -import textwrap -import warnings -import functools -import itertools -import posixpath -import collections - -from . import _adapters, _meta -from ._collections import FreezableDefaultDict, Pair -from ._compat import ( - NullFinder, - install, - pypy_partial, -) -from ._functools import method_cache, pass_none -from ._itertools import always_iterable, unique_everseen -from ._meta import PackageMetadata, SimplePath - -from contextlib import suppress -from importlib import import_module -from importlib.abc import MetaPathFinder -from itertools import starmap -from typing import List, Mapping, Optional, Union - - -__all__ = [ - 'Distribution', - 'DistributionFinder', - 'PackageMetadata', - 'PackageNotFoundError', - 'distribution', - 'distributions', - 'entry_points', - 'files', - 'metadata', - 'packages_distributions', - 'requires', - 'version', -] - - -class PackageNotFoundError(ModuleNotFoundError): - """The package was not found.""" - - def __str__(self): - return f"No package metadata was found for {self.name}" - - @property - def name(self): - (name,) = self.args - return name - - -class Sectioned: - """ - A simple entry point config parser for performance - - >>> for item in Sectioned.read(Sectioned._sample): - ... print(item) - Pair(name='sec1', value='# comments ignored') - Pair(name='sec1', value='a = 1') - Pair(name='sec1', value='b = 2') - Pair(name='sec2', value='a = 2') - - >>> res = Sectioned.section_pairs(Sectioned._sample) - >>> item = next(res) - >>> item.name - 'sec1' - >>> item.value - Pair(name='a', value='1') - >>> item = next(res) - >>> item.value - Pair(name='b', value='2') - >>> item = next(res) - >>> item.name - 'sec2' - >>> item.value - Pair(name='a', value='2') - >>> list(res) - [] - """ - - _sample = textwrap.dedent( - """ - [sec1] - # comments ignored - a = 1 - b = 2 - - [sec2] - a = 2 - """ - ).lstrip() - - @classmethod - def section_pairs(cls, text): - return ( - section._replace(value=Pair.parse(section.value)) - for section in cls.read(text, filter_=cls.valid) - if section.name is not None - ) - - @staticmethod - def read(text, filter_=None): - lines = filter(filter_, map(str.strip, text.splitlines())) - name = None - for value in lines: - section_match = value.startswith('[') and value.endswith(']') - if section_match: - name = value.strip('[]') - continue - yield Pair(name, value) - - @staticmethod - def valid(line): - return line and not line.startswith('#') - - -class DeprecatedTuple: - """ - Provide subscript item access for backward compatibility. - - >>> recwarn = getfixture('recwarn') - >>> ep = EntryPoint(name='name', value='value', group='group') - >>> ep[:] - ('name', 'value', 'group') - >>> ep[0] - 'name' - >>> len(recwarn) - 1 - """ - - _warn = functools.partial( - warnings.warn, - "EntryPoint tuple interface is deprecated. Access members by name.", - DeprecationWarning, - stacklevel=pypy_partial(2), - ) - - def __getitem__(self, item): - self._warn() - return self._key()[item] - - -class EntryPoint(DeprecatedTuple): - """An entry point as defined by Python packaging conventions. - - See `the packaging docs on entry points - `_ - for more information. - """ - - pattern = re.compile( - r'(?P[\w.]+)\s*' - r'(:\s*(?P[\w.]+)\s*)?' - r'((?P\[.*\])\s*)?$' - ) - """ - A regular expression describing the syntax for an entry point, - which might look like: - - - module - - package.module - - package.module:attribute - - package.module:object.attribute - - package.module:attr [extra1, extra2] - - Other combinations are possible as well. - - The expression is lenient about whitespace around the ':', - following the attr, and following any extras. - """ - - dist: Optional['Distribution'] = None - - def __init__(self, name, value, group): - vars(self).update(name=name, value=value, group=group) - - def load(self): - """Load the entry point from its definition. If only a module - is indicated by the value, return that module. Otherwise, - return the named object. - """ - match = self.pattern.match(self.value) - module = import_module(match.group('module')) - attrs = filter(None, (match.group('attr') or '').split('.')) - return functools.reduce(getattr, attrs, module) - - @property - def module(self): - match = self.pattern.match(self.value) - return match.group('module') - - @property - def attr(self): - match = self.pattern.match(self.value) - return match.group('attr') - - @property - def extras(self): - match = self.pattern.match(self.value) - return list(re.finditer(r'\w+', match.group('extras') or '')) - - def _for(self, dist): - vars(self).update(dist=dist) - return self - - def __iter__(self): - """ - Supply iter so one may construct dicts of EntryPoints by name. - """ - msg = ( - "Construction of dict of EntryPoints is deprecated in " - "favor of EntryPoints." - ) - warnings.warn(msg, DeprecationWarning) - return iter((self.name, self)) - - def matches(self, **params): - attrs = (getattr(self, param) for param in params) - return all(map(operator.eq, params.values(), attrs)) - - def _key(self): - return self.name, self.value, self.group - - def __lt__(self, other): - return self._key() < other._key() - - def __eq__(self, other): - return self._key() == other._key() - - def __setattr__(self, name, value): - raise AttributeError("EntryPoint objects are immutable.") - - def __repr__(self): - return ( - f'EntryPoint(name={self.name!r}, value={self.value!r}, ' - f'group={self.group!r})' - ) - - def __hash__(self): - return hash(self._key()) - - -class DeprecatedList(list): - """ - Allow an otherwise immutable object to implement mutability - for compatibility. - - >>> recwarn = getfixture('recwarn') - >>> dl = DeprecatedList(range(3)) - >>> dl[0] = 1 - >>> dl.append(3) - >>> del dl[3] - >>> dl.reverse() - >>> dl.sort() - >>> dl.extend([4]) - >>> dl.pop(-1) - 4 - >>> dl.remove(1) - >>> dl += [5] - >>> dl + [6] - [1, 2, 5, 6] - >>> dl + (6,) - [1, 2, 5, 6] - >>> dl.insert(0, 0) - >>> dl - [0, 1, 2, 5] - >>> dl == [0, 1, 2, 5] - True - >>> dl == (0, 1, 2, 5) - True - >>> len(recwarn) - 1 - """ - - __slots__ = () - - _warn = functools.partial( - warnings.warn, - "EntryPoints list interface is deprecated. Cast to list if needed.", - DeprecationWarning, - stacklevel=pypy_partial(2), - ) - - def _wrap_deprecated_method(method_name: str): # type: ignore - def wrapped(self, *args, **kwargs): - self._warn() - return getattr(super(), method_name)(*args, **kwargs) - - return method_name, wrapped - - locals().update( - map( - _wrap_deprecated_method, - '__setitem__ __delitem__ append reverse extend pop remove ' - '__iadd__ insert sort'.split(), - ) - ) - - def __add__(self, other): - if not isinstance(other, tuple): - self._warn() - other = tuple(other) - return self.__class__(tuple(self) + other) - - def __eq__(self, other): - if not isinstance(other, tuple): - self._warn() - other = tuple(other) - - return tuple(self).__eq__(other) - - -class EntryPoints(DeprecatedList): - """ - An immutable collection of selectable EntryPoint objects. - """ - - __slots__ = () - - def __getitem__(self, name): # -> EntryPoint: - """ - Get the EntryPoint in self matching name. - """ - if isinstance(name, int): - warnings.warn( - "Accessing entry points by index is deprecated. " - "Cast to tuple if needed.", - DeprecationWarning, - stacklevel=2, - ) - return super().__getitem__(name) - try: - return next(iter(self.select(name=name))) - except StopIteration: - raise KeyError(name) - - def select(self, **params): - """ - Select entry points from self that match the - given parameters (typically group and/or name). - """ - return EntryPoints(ep for ep in self if ep.matches(**params)) - - @property - def names(self): - """ - Return the set of all names of all entry points. - """ - return {ep.name for ep in self} - - @property - def groups(self): - """ - Return the set of all groups of all entry points. - - For coverage while SelectableGroups is present. - >>> EntryPoints().groups - set() - """ - return {ep.group for ep in self} - - @classmethod - def _from_text_for(cls, text, dist): - return cls(ep._for(dist) for ep in cls._from_text(text)) - - @staticmethod - def _from_text(text): - return ( - EntryPoint(name=item.value.name, value=item.value.value, group=item.name) - for item in Sectioned.section_pairs(text or '') - ) - - -class Deprecated: - """ - Compatibility add-in for mapping to indicate that - mapping behavior is deprecated. - - >>> recwarn = getfixture('recwarn') - >>> class DeprecatedDict(Deprecated, dict): pass - >>> dd = DeprecatedDict(foo='bar') - >>> dd.get('baz', None) - >>> dd['foo'] - 'bar' - >>> list(dd) - ['foo'] - >>> list(dd.keys()) - ['foo'] - >>> 'foo' in dd - True - >>> list(dd.values()) - ['bar'] - >>> len(recwarn) - 1 - """ - - _warn = functools.partial( - warnings.warn, - "SelectableGroups dict interface is deprecated. Use select.", - DeprecationWarning, - stacklevel=pypy_partial(2), - ) - - def __getitem__(self, name): - self._warn() - return super().__getitem__(name) - - def get(self, name, default=None): - self._warn() - return super().get(name, default) - - def __iter__(self): - self._warn() - return super().__iter__() - - def __contains__(self, *args): - self._warn() - return super().__contains__(*args) - - def keys(self): - self._warn() - return super().keys() - - def values(self): - self._warn() - return super().values() - - -class SelectableGroups(Deprecated, dict): - """ - A backward- and forward-compatible result from - entry_points that fully implements the dict interface. - """ - - @classmethod - def load(cls, eps): - by_group = operator.attrgetter('group') - ordered = sorted(eps, key=by_group) - grouped = itertools.groupby(ordered, by_group) - return cls((group, EntryPoints(eps)) for group, eps in grouped) - - @property - def _all(self): - """ - Reconstruct a list of all entrypoints from the groups. - """ - groups = super(Deprecated, self).values() - return EntryPoints(itertools.chain.from_iterable(groups)) - - @property - def groups(self): - return self._all.groups - - @property - def names(self): - """ - for coverage: - >>> SelectableGroups().names - set() - """ - return self._all.names - - def select(self, **params): - if not params: - return self - return self._all.select(**params) - - -class PackagePath(pathlib.PurePosixPath): - """A reference to a path in a package""" - - def read_text(self, encoding='utf-8'): - with self.locate().open(encoding=encoding) as stream: - return stream.read() - - def read_binary(self): - with self.locate().open('rb') as stream: - return stream.read() - - def locate(self): - """Return a path-like object for this path""" - return self.dist.locate_file(self) - - -class FileHash: - def __init__(self, spec): - self.mode, _, self.value = spec.partition('=') - - def __repr__(self): - return f'' - - -class Distribution: - """A Python distribution package.""" - - @abc.abstractmethod - def read_text(self, filename): - """Attempt to load metadata file given by the name. - - :param filename: The name of the file in the distribution info. - :return: The text if found, otherwise None. - """ - - @abc.abstractmethod - def locate_file(self, path): - """ - Given a path to a file in this distribution, return a path - to it. - """ - - @classmethod - def from_name(cls, name): - """Return the Distribution for the given package name. - - :param name: The name of the distribution package to search for. - :return: The Distribution instance (or subclass thereof) for the named - package, if found. - :raises PackageNotFoundError: When the named package's distribution - metadata cannot be found. - """ - for resolver in cls._discover_resolvers(): - dists = resolver(DistributionFinder.Context(name=name)) - dist = next(iter(dists), None) - if dist is not None: - return dist - else: - raise PackageNotFoundError(name) - - @classmethod - def discover(cls, **kwargs): - """Return an iterable of Distribution objects for all packages. - - Pass a ``context`` or pass keyword arguments for constructing - a context. - - :context: A ``DistributionFinder.Context`` object. - :return: Iterable of Distribution objects for all packages. - """ - context = kwargs.pop('context', None) - if context and kwargs: - raise ValueError("cannot accept context and kwargs") - context = context or DistributionFinder.Context(**kwargs) - return itertools.chain.from_iterable( - resolver(context) for resolver in cls._discover_resolvers() - ) - - @staticmethod - def at(path): - """Return a Distribution for the indicated metadata path - - :param path: a string or path-like object - :return: a concrete Distribution instance for the path - """ - return PathDistribution(pathlib.Path(path)) - - @staticmethod - def _discover_resolvers(): - """Search the meta_path for resolvers.""" - declared = ( - getattr(finder, 'find_distributions', None) for finder in sys.meta_path - ) - return filter(None, declared) - - @property - def metadata(self) -> _meta.PackageMetadata: - """Return the parsed metadata for this Distribution. - - The returned object will have keys that name the various bits of - metadata. See PEP 566 for details. - """ - text = ( - self.read_text('METADATA') - or self.read_text('PKG-INFO') - # This last clause is here to support old egg-info files. Its - # effect is to just end up using the PathDistribution's self._path - # (which points to the egg-info file) attribute unchanged. - or self.read_text('') - ) - return _adapters.Message(email.message_from_string(text)) - - @property - def name(self): - """Return the 'Name' metadata for the distribution package.""" - return self.metadata['Name'] - - @property - def _normalized_name(self): - """Return a normalized version of the name.""" - return Prepared.normalize(self.name) - - @property - def version(self): - """Return the 'Version' metadata for the distribution package.""" - return self.metadata['Version'] - - @property - def entry_points(self): - return EntryPoints._from_text_for(self.read_text('entry_points.txt'), self) - - @property - def files(self): - """Files in this distribution. - - :return: List of PackagePath for this distribution or None - - Result is `None` if the metadata file that enumerates files - (i.e. RECORD for dist-info or SOURCES.txt for egg-info) is - missing. - Result may be empty if the metadata exists but is empty. - """ - - def make_file(name, hash=None, size_str=None): - result = PackagePath(name) - result.hash = FileHash(hash) if hash else None - result.size = int(size_str) if size_str else None - result.dist = self - return result - - @pass_none - def make_files(lines): - return list(starmap(make_file, csv.reader(lines))) - - return make_files(self._read_files_distinfo() or self._read_files_egginfo()) - - def _read_files_distinfo(self): - """ - Read the lines of RECORD - """ - text = self.read_text('RECORD') - return text and text.splitlines() - - def _read_files_egginfo(self): - """ - SOURCES.txt might contain literal commas, so wrap each line - in quotes. - """ - text = self.read_text('SOURCES.txt') - return text and map('"{}"'.format, text.splitlines()) - - @property - def requires(self): - """Generated requirements specified for this Distribution""" - reqs = self._read_dist_info_reqs() or self._read_egg_info_reqs() - return reqs and list(reqs) - - def _read_dist_info_reqs(self): - return self.metadata.get_all('Requires-Dist') - - def _read_egg_info_reqs(self): - source = self.read_text('requires.txt') - return pass_none(self._deps_from_requires_text)(source) - - @classmethod - def _deps_from_requires_text(cls, source): - return cls._convert_egg_info_reqs_to_simple_reqs(Sectioned.read(source)) - - @staticmethod - def _convert_egg_info_reqs_to_simple_reqs(sections): - """ - Historically, setuptools would solicit and store 'extra' - requirements, including those with environment markers, - in separate sections. More modern tools expect each - dependency to be defined separately, with any relevant - extras and environment markers attached directly to that - requirement. This method converts the former to the - latter. See _test_deps_from_requires_text for an example. - """ - - def make_condition(name): - return name and f'extra == "{name}"' - - def quoted_marker(section): - section = section or '' - extra, sep, markers = section.partition(':') - if extra and markers: - markers = f'({markers})' - conditions = list(filter(None, [markers, make_condition(extra)])) - return '; ' + ' and '.join(conditions) if conditions else '' - - def url_req_space(req): - """ - PEP 508 requires a space between the url_spec and the quoted_marker. - Ref python/importlib_metadata#357. - """ - # '@' is uniquely indicative of a url_req. - return ' ' * ('@' in req) - - for section in sections: - space = url_req_space(section.value) - yield section.value + space + quoted_marker(section.name) - - -class DistributionFinder(MetaPathFinder): - """ - A MetaPathFinder capable of discovering installed distributions. - """ - - class Context: - """ - Keyword arguments presented by the caller to - ``distributions()`` or ``Distribution.discover()`` - to narrow the scope of a search for distributions - in all DistributionFinders. - - Each DistributionFinder may expect any parameters - and should attempt to honor the canonical - parameters defined below when appropriate. - """ - - name = None - """ - Specific name for which a distribution finder should match. - A name of ``None`` matches all distributions. - """ - - def __init__(self, **kwargs): - vars(self).update(kwargs) - - @property - def path(self): - """ - The sequence of directory path that a distribution finder - should search. - - Typically refers to Python installed package paths such as - "site-packages" directories and defaults to ``sys.path``. - """ - return vars(self).get('path', sys.path) - - @abc.abstractmethod - def find_distributions(self, context=Context()): - """ - Find distributions. - - Return an iterable of all Distribution instances capable of - loading the metadata for packages matching the ``context``, - a DistributionFinder.Context instance. - """ - - -class FastPath: - """ - Micro-optimized class for searching a path for - children. - - >>> FastPath('').children() - ['...'] - """ - - @functools.lru_cache() # type: ignore - def __new__(cls, root): - return super().__new__(cls) - - def __init__(self, root): - self.root = str(root) - - def joinpath(self, child): - return pathlib.Path(self.root, child) - - def children(self): - with suppress(Exception): - return os.listdir(self.root or '.') - with suppress(Exception): - return self.zip_children() - return [] - - def zip_children(self): - zip_path = zipp.Path(self.root) - names = zip_path.root.namelist() - self.joinpath = zip_path.joinpath - - return dict.fromkeys(child.split(posixpath.sep, 1)[0] for child in names) - - def search(self, name): - return self.lookup(self.mtime).search(name) - - @property - def mtime(self): - with suppress(OSError): - return os.stat(self.root).st_mtime - self.lookup.cache_clear() - - @method_cache - def lookup(self, mtime): - return Lookup(self) - - -class Lookup: - def __init__(self, path: FastPath): - base = os.path.basename(path.root).lower() - base_is_egg = base.endswith(".egg") - self.infos = FreezableDefaultDict(list) - self.eggs = FreezableDefaultDict(list) - - for child in path.children(): - low = child.lower() - if low.endswith((".dist-info", ".egg-info")): - # rpartition is faster than splitext and suitable for this purpose. - name = low.rpartition(".")[0].partition("-")[0] - normalized = Prepared.normalize(name) - self.infos[normalized].append(path.joinpath(child)) - elif base_is_egg and low == "egg-info": - name = base.rpartition(".")[0].partition("-")[0] - legacy_normalized = Prepared.legacy_normalize(name) - self.eggs[legacy_normalized].append(path.joinpath(child)) - - self.infos.freeze() - self.eggs.freeze() - - def search(self, prepared): - infos = ( - self.infos[prepared.normalized] - if prepared - else itertools.chain.from_iterable(self.infos.values()) - ) - eggs = ( - self.eggs[prepared.legacy_normalized] - if prepared - else itertools.chain.from_iterable(self.eggs.values()) - ) - return itertools.chain(infos, eggs) - - -class Prepared: - """ - A prepared search for metadata on a possibly-named package. - """ - - normalized = None - legacy_normalized = None - - def __init__(self, name): - self.name = name - if name is None: - return - self.normalized = self.normalize(name) - self.legacy_normalized = self.legacy_normalize(name) - - @staticmethod - def normalize(name): - """ - PEP 503 normalization plus dashes as underscores. - """ - return re.sub(r"[-_.]+", "-", name).lower().replace('-', '_') - - @staticmethod - def legacy_normalize(name): - """ - Normalize the package name as found in the convention in - older packaging tools versions and specs. - """ - return name.lower().replace('-', '_') - - def __bool__(self): - return bool(self.name) - - -@install -class MetadataPathFinder(NullFinder, DistributionFinder): - """A degenerate finder for distribution packages on the file system. - - This finder supplies only a find_distributions() method for versions - of Python that do not have a PathFinder find_distributions(). - """ - - def find_distributions(self, context=DistributionFinder.Context()): - """ - Find distributions. - - Return an iterable of all Distribution instances capable of - loading the metadata for packages matching ``context.name`` - (or all names if ``None`` indicated) along the paths in the list - of directories ``context.path``. - """ - found = self._search_paths(context.name, context.path) - return map(PathDistribution, found) - - @classmethod - def _search_paths(cls, name, paths): - """Find metadata directories in paths heuristically.""" - prepared = Prepared(name) - return itertools.chain.from_iterable( - path.search(prepared) for path in map(FastPath, paths) - ) - - def invalidate_caches(cls): - FastPath.__new__.cache_clear() - - -class PathDistribution(Distribution): - def __init__(self, path: SimplePath): - """Construct a distribution. - - :param path: SimplePath indicating the metadata directory. - """ - self._path = path - - def read_text(self, filename): - with suppress( - FileNotFoundError, - IsADirectoryError, - KeyError, - NotADirectoryError, - PermissionError, - ): - return self._path.joinpath(filename).read_text(encoding='utf-8') - - read_text.__doc__ = Distribution.read_text.__doc__ - - def locate_file(self, path): - return self._path.parent / path - - @property - def _normalized_name(self): - """ - Performance optimization: where possible, resolve the - normalized name from the file system path. - """ - stem = os.path.basename(str(self._path)) - return self._name_from_stem(stem) or super()._normalized_name - - def _name_from_stem(self, stem): - name, ext = os.path.splitext(stem) - if ext not in ('.dist-info', '.egg-info'): - return - name, sep, rest = stem.partition('-') - return name - - -def distribution(distribution_name): - """Get the ``Distribution`` instance for the named package. - - :param distribution_name: The name of the distribution package as a string. - :return: A ``Distribution`` instance (or subclass thereof). - """ - return Distribution.from_name(distribution_name) - - -def distributions(**kwargs): - """Get all ``Distribution`` instances in the current environment. - - :return: An iterable of ``Distribution`` instances. - """ - return Distribution.discover(**kwargs) - - -def metadata(distribution_name) -> _meta.PackageMetadata: - """Get the metadata for the named package. - - :param distribution_name: The name of the distribution package to query. - :return: A PackageMetadata containing the parsed metadata. - """ - return Distribution.from_name(distribution_name).metadata - - -def version(distribution_name): - """Get the version string for the named package. - - :param distribution_name: The name of the distribution package to query. - :return: The version string for the package as defined in the package's - "Version" metadata key. - """ - return distribution(distribution_name).version - - -def entry_points(**params) -> Union[EntryPoints, SelectableGroups]: - """Return EntryPoint objects for all installed packages. - - Pass selection parameters (group or name) to filter the - result to entry points matching those properties (see - EntryPoints.select()). - - For compatibility, returns ``SelectableGroups`` object unless - selection parameters are supplied. In the future, this function - will return ``EntryPoints`` instead of ``SelectableGroups`` - even when no selection parameters are supplied. - - For maximum future compatibility, pass selection parameters - or invoke ``.select`` with parameters on the result. - - :return: EntryPoints or SelectableGroups for all installed packages. - """ - norm_name = operator.attrgetter('_normalized_name') - unique = functools.partial(unique_everseen, key=norm_name) - eps = itertools.chain.from_iterable( - dist.entry_points for dist in unique(distributions()) - ) - return SelectableGroups.load(eps).select(**params) - - -def files(distribution_name): - """Return a list of files for the named package. - - :param distribution_name: The name of the distribution package to query. - :return: List of files composing the distribution. - """ - return distribution(distribution_name).files - - -def requires(distribution_name): - """ - Return a list of requirements for the named package. - - :return: An iterator of requirements, suitable for - packaging.requirement.Requirement. - """ - return distribution(distribution_name).requires - - -def packages_distributions() -> Mapping[str, List[str]]: - """ - Return a mapping of top-level packages to their - distributions. - - >>> import collections.abc - >>> pkgs = packages_distributions() - >>> all(isinstance(dist, collections.abc.Sequence) for dist in pkgs.values()) - True - """ - pkg_to_dist = collections.defaultdict(list) - for dist in distributions(): - for pkg in _top_level_declared(dist) or _top_level_inferred(dist): - pkg_to_dist[pkg].append(dist.metadata['Name']) - return dict(pkg_to_dist) - - -def _top_level_declared(dist): - return (dist.read_text('top_level.txt') or '').split() - - -def _top_level_inferred(dist): - return { - f.parts[0] if len(f.parts) > 1 else f.with_suffix('').name - for f in always_iterable(dist.files) - if f.suffix == ".py" - } diff --git a/venv/Lib/site-packages/setuptools/_vendor/importlib_metadata/__pycache__/__init__.cpython-39.pyc b/venv/Lib/site-packages/setuptools/_vendor/importlib_metadata/__pycache__/__init__.cpython-39.pyc deleted file mode 100644 index c740bf6..0000000 Binary files a/venv/Lib/site-packages/setuptools/_vendor/importlib_metadata/__pycache__/__init__.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/_vendor/importlib_metadata/__pycache__/_adapters.cpython-39.pyc b/venv/Lib/site-packages/setuptools/_vendor/importlib_metadata/__pycache__/_adapters.cpython-39.pyc deleted file mode 100644 index 192bec2..0000000 Binary files a/venv/Lib/site-packages/setuptools/_vendor/importlib_metadata/__pycache__/_adapters.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/_vendor/importlib_metadata/__pycache__/_collections.cpython-39.pyc b/venv/Lib/site-packages/setuptools/_vendor/importlib_metadata/__pycache__/_collections.cpython-39.pyc deleted file mode 100644 index 30c799f..0000000 Binary files a/venv/Lib/site-packages/setuptools/_vendor/importlib_metadata/__pycache__/_collections.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/_vendor/importlib_metadata/__pycache__/_compat.cpython-39.pyc b/venv/Lib/site-packages/setuptools/_vendor/importlib_metadata/__pycache__/_compat.cpython-39.pyc deleted file mode 100644 index 0fffec6..0000000 Binary files a/venv/Lib/site-packages/setuptools/_vendor/importlib_metadata/__pycache__/_compat.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/_vendor/importlib_metadata/__pycache__/_functools.cpython-39.pyc b/venv/Lib/site-packages/setuptools/_vendor/importlib_metadata/__pycache__/_functools.cpython-39.pyc deleted file mode 100644 index ee2ef2e..0000000 Binary files a/venv/Lib/site-packages/setuptools/_vendor/importlib_metadata/__pycache__/_functools.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/_vendor/importlib_metadata/__pycache__/_itertools.cpython-39.pyc b/venv/Lib/site-packages/setuptools/_vendor/importlib_metadata/__pycache__/_itertools.cpython-39.pyc deleted file mode 100644 index 8c99a2c..0000000 Binary files a/venv/Lib/site-packages/setuptools/_vendor/importlib_metadata/__pycache__/_itertools.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/_vendor/importlib_metadata/__pycache__/_meta.cpython-39.pyc b/venv/Lib/site-packages/setuptools/_vendor/importlib_metadata/__pycache__/_meta.cpython-39.pyc deleted file mode 100644 index 7fdff50..0000000 Binary files a/venv/Lib/site-packages/setuptools/_vendor/importlib_metadata/__pycache__/_meta.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/_vendor/importlib_metadata/__pycache__/_text.cpython-39.pyc b/venv/Lib/site-packages/setuptools/_vendor/importlib_metadata/__pycache__/_text.cpython-39.pyc deleted file mode 100644 index 7e9bf60..0000000 Binary files a/venv/Lib/site-packages/setuptools/_vendor/importlib_metadata/__pycache__/_text.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/_vendor/importlib_metadata/_adapters.py b/venv/Lib/site-packages/setuptools/_vendor/importlib_metadata/_adapters.py deleted file mode 100644 index aa460d3..0000000 --- a/venv/Lib/site-packages/setuptools/_vendor/importlib_metadata/_adapters.py +++ /dev/null @@ -1,68 +0,0 @@ -import re -import textwrap -import email.message - -from ._text import FoldedCase - - -class Message(email.message.Message): - multiple_use_keys = set( - map( - FoldedCase, - [ - 'Classifier', - 'Obsoletes-Dist', - 'Platform', - 'Project-URL', - 'Provides-Dist', - 'Provides-Extra', - 'Requires-Dist', - 'Requires-External', - 'Supported-Platform', - 'Dynamic', - ], - ) - ) - """ - Keys that may be indicated multiple times per PEP 566. - """ - - def __new__(cls, orig: email.message.Message): - res = super().__new__(cls) - vars(res).update(vars(orig)) - return res - - def __init__(self, *args, **kwargs): - self._headers = self._repair_headers() - - # suppress spurious error from mypy - def __iter__(self): - return super().__iter__() - - def _repair_headers(self): - def redent(value): - "Correct for RFC822 indentation" - if not value or '\n' not in value: - return value - return textwrap.dedent(' ' * 8 + value) - - headers = [(key, redent(value)) for key, value in vars(self)['_headers']] - if self._payload: - headers.append(('Description', self.get_payload())) - return headers - - @property - def json(self): - """ - Convert PackageMetadata to a JSON-compatible format - per PEP 0566. - """ - - def transform(key): - value = self.get_all(key) if key in self.multiple_use_keys else self[key] - if key == 'Keywords': - value = re.split(r'\s+', value) - tk = key.lower().replace('-', '_') - return tk, value - - return dict(map(transform, map(FoldedCase, self))) diff --git a/venv/Lib/site-packages/setuptools/_vendor/importlib_metadata/_collections.py b/venv/Lib/site-packages/setuptools/_vendor/importlib_metadata/_collections.py deleted file mode 100644 index cf0954e..0000000 --- a/venv/Lib/site-packages/setuptools/_vendor/importlib_metadata/_collections.py +++ /dev/null @@ -1,30 +0,0 @@ -import collections - - -# from jaraco.collections 3.3 -class FreezableDefaultDict(collections.defaultdict): - """ - Often it is desirable to prevent the mutation of - a default dict after its initial construction, such - as to prevent mutation during iteration. - - >>> dd = FreezableDefaultDict(list) - >>> dd[0].append('1') - >>> dd.freeze() - >>> dd[1] - [] - >>> len(dd) - 1 - """ - - def __missing__(self, key): - return getattr(self, '_frozen', super().__missing__)(key) - - def freeze(self): - self._frozen = lambda key: self.default_factory() - - -class Pair(collections.namedtuple('Pair', 'name value')): - @classmethod - def parse(cls, text): - return cls(*map(str.strip, text.split("=", 1))) diff --git a/venv/Lib/site-packages/setuptools/_vendor/importlib_metadata/_compat.py b/venv/Lib/site-packages/setuptools/_vendor/importlib_metadata/_compat.py deleted file mode 100644 index ef3136f..0000000 --- a/venv/Lib/site-packages/setuptools/_vendor/importlib_metadata/_compat.py +++ /dev/null @@ -1,71 +0,0 @@ -import sys -import platform - - -__all__ = ['install', 'NullFinder', 'Protocol'] - - -try: - from typing import Protocol -except ImportError: # pragma: no cover - from ..typing_extensions import Protocol # type: ignore - - -def install(cls): - """ - Class decorator for installation on sys.meta_path. - - Adds the backport DistributionFinder to sys.meta_path and - attempts to disable the finder functionality of the stdlib - DistributionFinder. - """ - sys.meta_path.append(cls()) - disable_stdlib_finder() - return cls - - -def disable_stdlib_finder(): - """ - Give the backport primacy for discovering path-based distributions - by monkey-patching the stdlib O_O. - - See #91 for more background for rationale on this sketchy - behavior. - """ - - def matches(finder): - return getattr( - finder, '__module__', None - ) == '_frozen_importlib_external' and hasattr(finder, 'find_distributions') - - for finder in filter(matches, sys.meta_path): # pragma: nocover - del finder.find_distributions - - -class NullFinder: - """ - A "Finder" (aka "MetaClassFinder") that never finds any modules, - but may find distributions. - """ - - @staticmethod - def find_spec(*args, **kwargs): - return None - - # In Python 2, the import system requires finders - # to have a find_module() method, but this usage - # is deprecated in Python 3 in favor of find_spec(). - # For the purposes of this finder (i.e. being present - # on sys.meta_path but having no other import - # system functionality), the two methods are identical. - find_module = find_spec - - -def pypy_partial(val): - """ - Adjust for variable stacklevel on partial under PyPy. - - Workaround for #327. - """ - is_pypy = platform.python_implementation() == 'PyPy' - return val + is_pypy diff --git a/venv/Lib/site-packages/setuptools/_vendor/importlib_metadata/_functools.py b/venv/Lib/site-packages/setuptools/_vendor/importlib_metadata/_functools.py deleted file mode 100644 index 71f66bd..0000000 --- a/venv/Lib/site-packages/setuptools/_vendor/importlib_metadata/_functools.py +++ /dev/null @@ -1,104 +0,0 @@ -import types -import functools - - -# from jaraco.functools 3.3 -def method_cache(method, cache_wrapper=None): - """ - Wrap lru_cache to support storing the cache data in the object instances. - - Abstracts the common paradigm where the method explicitly saves an - underscore-prefixed protected property on first call and returns that - subsequently. - - >>> class MyClass: - ... calls = 0 - ... - ... @method_cache - ... def method(self, value): - ... self.calls += 1 - ... return value - - >>> a = MyClass() - >>> a.method(3) - 3 - >>> for x in range(75): - ... res = a.method(x) - >>> a.calls - 75 - - Note that the apparent behavior will be exactly like that of lru_cache - except that the cache is stored on each instance, so values in one - instance will not flush values from another, and when an instance is - deleted, so are the cached values for that instance. - - >>> b = MyClass() - >>> for x in range(35): - ... res = b.method(x) - >>> b.calls - 35 - >>> a.method(0) - 0 - >>> a.calls - 75 - - Note that if method had been decorated with ``functools.lru_cache()``, - a.calls would have been 76 (due to the cached value of 0 having been - flushed by the 'b' instance). - - Clear the cache with ``.cache_clear()`` - - >>> a.method.cache_clear() - - Same for a method that hasn't yet been called. - - >>> c = MyClass() - >>> c.method.cache_clear() - - Another cache wrapper may be supplied: - - >>> cache = functools.lru_cache(maxsize=2) - >>> MyClass.method2 = method_cache(lambda self: 3, cache_wrapper=cache) - >>> a = MyClass() - >>> a.method2() - 3 - - Caution - do not subsequently wrap the method with another decorator, such - as ``@property``, which changes the semantics of the function. - - See also - http://code.activestate.com/recipes/577452-a-memoize-decorator-for-instance-methods/ - for another implementation and additional justification. - """ - cache_wrapper = cache_wrapper or functools.lru_cache() - - def wrapper(self, *args, **kwargs): - # it's the first call, replace the method with a cached, bound method - bound_method = types.MethodType(method, self) - cached_method = cache_wrapper(bound_method) - setattr(self, method.__name__, cached_method) - return cached_method(*args, **kwargs) - - # Support cache clear even before cache has been created. - wrapper.cache_clear = lambda: None - - return wrapper - - -# From jaraco.functools 3.3 -def pass_none(func): - """ - Wrap func so it's not called if its first param is None - - >>> print_text = pass_none(print) - >>> print_text('text') - text - >>> print_text(None) - """ - - @functools.wraps(func) - def wrapper(param, *args, **kwargs): - if param is not None: - return func(param, *args, **kwargs) - - return wrapper diff --git a/venv/Lib/site-packages/setuptools/_vendor/importlib_metadata/_itertools.py b/venv/Lib/site-packages/setuptools/_vendor/importlib_metadata/_itertools.py deleted file mode 100644 index d4ca9b9..0000000 --- a/venv/Lib/site-packages/setuptools/_vendor/importlib_metadata/_itertools.py +++ /dev/null @@ -1,73 +0,0 @@ -from itertools import filterfalse - - -def unique_everseen(iterable, key=None): - "List unique elements, preserving order. Remember all elements ever seen." - # unique_everseen('AAAABBBCCDAABBB') --> A B C D - # unique_everseen('ABBCcAD', str.lower) --> A B C D - seen = set() - seen_add = seen.add - if key is None: - for element in filterfalse(seen.__contains__, iterable): - seen_add(element) - yield element - else: - for element in iterable: - k = key(element) - if k not in seen: - seen_add(k) - yield element - - -# copied from more_itertools 8.8 -def always_iterable(obj, base_type=(str, bytes)): - """If *obj* is iterable, return an iterator over its items:: - - >>> obj = (1, 2, 3) - >>> list(always_iterable(obj)) - [1, 2, 3] - - If *obj* is not iterable, return a one-item iterable containing *obj*:: - - >>> obj = 1 - >>> list(always_iterable(obj)) - [1] - - If *obj* is ``None``, return an empty iterable: - - >>> obj = None - >>> list(always_iterable(None)) - [] - - By default, binary and text strings are not considered iterable:: - - >>> obj = 'foo' - >>> list(always_iterable(obj)) - ['foo'] - - If *base_type* is set, objects for which ``isinstance(obj, base_type)`` - returns ``True`` won't be considered iterable. - - >>> obj = {'a': 1} - >>> list(always_iterable(obj)) # Iterate over the dict's keys - ['a'] - >>> list(always_iterable(obj, base_type=dict)) # Treat dicts as a unit - [{'a': 1}] - - Set *base_type* to ``None`` to avoid any special handling and treat objects - Python considers iterable as iterable: - - >>> obj = 'foo' - >>> list(always_iterable(obj, base_type=None)) - ['f', 'o', 'o'] - """ - if obj is None: - return iter(()) - - if (base_type is not None) and isinstance(obj, base_type): - return iter((obj,)) - - try: - return iter(obj) - except TypeError: - return iter((obj,)) diff --git a/venv/Lib/site-packages/setuptools/_vendor/importlib_metadata/_meta.py b/venv/Lib/site-packages/setuptools/_vendor/importlib_metadata/_meta.py deleted file mode 100644 index 37ee43e..0000000 --- a/venv/Lib/site-packages/setuptools/_vendor/importlib_metadata/_meta.py +++ /dev/null @@ -1,48 +0,0 @@ -from ._compat import Protocol -from typing import Any, Dict, Iterator, List, TypeVar, Union - - -_T = TypeVar("_T") - - -class PackageMetadata(Protocol): - def __len__(self) -> int: - ... # pragma: no cover - - def __contains__(self, item: str) -> bool: - ... # pragma: no cover - - def __getitem__(self, key: str) -> str: - ... # pragma: no cover - - def __iter__(self) -> Iterator[str]: - ... # pragma: no cover - - def get_all(self, name: str, failobj: _T = ...) -> Union[List[Any], _T]: - """ - Return all values associated with a possibly multi-valued key. - """ - - @property - def json(self) -> Dict[str, Union[str, List[str]]]: - """ - A JSON-compatible form of the metadata. - """ - - -class SimplePath(Protocol): - """ - A minimal subset of pathlib.Path required by PathDistribution. - """ - - def joinpath(self) -> 'SimplePath': - ... # pragma: no cover - - def __truediv__(self) -> 'SimplePath': - ... # pragma: no cover - - def parent(self) -> 'SimplePath': - ... # pragma: no cover - - def read_text(self) -> str: - ... # pragma: no cover diff --git a/venv/Lib/site-packages/setuptools/_vendor/importlib_metadata/_text.py b/venv/Lib/site-packages/setuptools/_vendor/importlib_metadata/_text.py deleted file mode 100644 index c88cfbb..0000000 --- a/venv/Lib/site-packages/setuptools/_vendor/importlib_metadata/_text.py +++ /dev/null @@ -1,99 +0,0 @@ -import re - -from ._functools import method_cache - - -# from jaraco.text 3.5 -class FoldedCase(str): - """ - A case insensitive string class; behaves just like str - except compares equal when the only variation is case. - - >>> s = FoldedCase('hello world') - - >>> s == 'Hello World' - True - - >>> 'Hello World' == s - True - - >>> s != 'Hello World' - False - - >>> s.index('O') - 4 - - >>> s.split('O') - ['hell', ' w', 'rld'] - - >>> sorted(map(FoldedCase, ['GAMMA', 'alpha', 'Beta'])) - ['alpha', 'Beta', 'GAMMA'] - - Sequence membership is straightforward. - - >>> "Hello World" in [s] - True - >>> s in ["Hello World"] - True - - You may test for set inclusion, but candidate and elements - must both be folded. - - >>> FoldedCase("Hello World") in {s} - True - >>> s in {FoldedCase("Hello World")} - True - - String inclusion works as long as the FoldedCase object - is on the right. - - >>> "hello" in FoldedCase("Hello World") - True - - But not if the FoldedCase object is on the left: - - >>> FoldedCase('hello') in 'Hello World' - False - - In that case, use in_: - - >>> FoldedCase('hello').in_('Hello World') - True - - >>> FoldedCase('hello') > FoldedCase('Hello') - False - """ - - def __lt__(self, other): - return self.lower() < other.lower() - - def __gt__(self, other): - return self.lower() > other.lower() - - def __eq__(self, other): - return self.lower() == other.lower() - - def __ne__(self, other): - return self.lower() != other.lower() - - def __hash__(self): - return hash(self.lower()) - - def __contains__(self, other): - return super().lower().__contains__(other.lower()) - - def in_(self, other): - "Does self appear in other?" - return self in FoldedCase(other) - - # cache lower since it's likely to be called frequently. - @method_cache - def lower(self): - return super().lower() - - def index(self, sub): - return self.lower().index(sub.lower()) - - def split(self, splitter=' ', maxsplit=0): - pattern = re.compile(re.escape(splitter), re.I) - return pattern.split(self, maxsplit) diff --git a/venv/Lib/site-packages/setuptools/_vendor/importlib_resources/__init__.py b/venv/Lib/site-packages/setuptools/_vendor/importlib_resources/__init__.py deleted file mode 100644 index 34e3a99..0000000 --- a/venv/Lib/site-packages/setuptools/_vendor/importlib_resources/__init__.py +++ /dev/null @@ -1,36 +0,0 @@ -"""Read resources contained within a package.""" - -from ._common import ( - as_file, - files, - Package, -) - -from ._legacy import ( - contents, - open_binary, - read_binary, - open_text, - read_text, - is_resource, - path, - Resource, -) - -from .abc import ResourceReader - - -__all__ = [ - 'Package', - 'Resource', - 'ResourceReader', - 'as_file', - 'contents', - 'files', - 'is_resource', - 'open_binary', - 'open_text', - 'path', - 'read_binary', - 'read_text', -] diff --git a/venv/Lib/site-packages/setuptools/_vendor/importlib_resources/__pycache__/__init__.cpython-39.pyc b/venv/Lib/site-packages/setuptools/_vendor/importlib_resources/__pycache__/__init__.cpython-39.pyc deleted file mode 100644 index 92e631f..0000000 Binary files a/venv/Lib/site-packages/setuptools/_vendor/importlib_resources/__pycache__/__init__.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/_vendor/importlib_resources/__pycache__/_adapters.cpython-39.pyc b/venv/Lib/site-packages/setuptools/_vendor/importlib_resources/__pycache__/_adapters.cpython-39.pyc deleted file mode 100644 index 8804afc..0000000 Binary files a/venv/Lib/site-packages/setuptools/_vendor/importlib_resources/__pycache__/_adapters.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/_vendor/importlib_resources/__pycache__/_common.cpython-39.pyc b/venv/Lib/site-packages/setuptools/_vendor/importlib_resources/__pycache__/_common.cpython-39.pyc deleted file mode 100644 index 05168f7..0000000 Binary files a/venv/Lib/site-packages/setuptools/_vendor/importlib_resources/__pycache__/_common.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/_vendor/importlib_resources/__pycache__/_compat.cpython-39.pyc b/venv/Lib/site-packages/setuptools/_vendor/importlib_resources/__pycache__/_compat.cpython-39.pyc deleted file mode 100644 index b91eed4..0000000 Binary files a/venv/Lib/site-packages/setuptools/_vendor/importlib_resources/__pycache__/_compat.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/_vendor/importlib_resources/__pycache__/_itertools.cpython-39.pyc b/venv/Lib/site-packages/setuptools/_vendor/importlib_resources/__pycache__/_itertools.cpython-39.pyc deleted file mode 100644 index 6ba28e1..0000000 Binary files a/venv/Lib/site-packages/setuptools/_vendor/importlib_resources/__pycache__/_itertools.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/_vendor/importlib_resources/__pycache__/_legacy.cpython-39.pyc b/venv/Lib/site-packages/setuptools/_vendor/importlib_resources/__pycache__/_legacy.cpython-39.pyc deleted file mode 100644 index 3365a5e..0000000 Binary files a/venv/Lib/site-packages/setuptools/_vendor/importlib_resources/__pycache__/_legacy.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/_vendor/importlib_resources/__pycache__/abc.cpython-39.pyc b/venv/Lib/site-packages/setuptools/_vendor/importlib_resources/__pycache__/abc.cpython-39.pyc deleted file mode 100644 index 7e15f3f..0000000 Binary files a/venv/Lib/site-packages/setuptools/_vendor/importlib_resources/__pycache__/abc.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/_vendor/importlib_resources/__pycache__/readers.cpython-39.pyc b/venv/Lib/site-packages/setuptools/_vendor/importlib_resources/__pycache__/readers.cpython-39.pyc deleted file mode 100644 index 11edfb4..0000000 Binary files a/venv/Lib/site-packages/setuptools/_vendor/importlib_resources/__pycache__/readers.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/_vendor/importlib_resources/__pycache__/simple.cpython-39.pyc b/venv/Lib/site-packages/setuptools/_vendor/importlib_resources/__pycache__/simple.cpython-39.pyc deleted file mode 100644 index 6b33e66..0000000 Binary files a/venv/Lib/site-packages/setuptools/_vendor/importlib_resources/__pycache__/simple.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/_vendor/importlib_resources/_adapters.py b/venv/Lib/site-packages/setuptools/_vendor/importlib_resources/_adapters.py deleted file mode 100644 index ea363d8..0000000 --- a/venv/Lib/site-packages/setuptools/_vendor/importlib_resources/_adapters.py +++ /dev/null @@ -1,170 +0,0 @@ -from contextlib import suppress -from io import TextIOWrapper - -from . import abc - - -class SpecLoaderAdapter: - """ - Adapt a package spec to adapt the underlying loader. - """ - - def __init__(self, spec, adapter=lambda spec: spec.loader): - self.spec = spec - self.loader = adapter(spec) - - def __getattr__(self, name): - return getattr(self.spec, name) - - -class TraversableResourcesLoader: - """ - Adapt a loader to provide TraversableResources. - """ - - def __init__(self, spec): - self.spec = spec - - def get_resource_reader(self, name): - return CompatibilityFiles(self.spec)._native() - - -def _io_wrapper(file, mode='r', *args, **kwargs): - if mode == 'r': - return TextIOWrapper(file, *args, **kwargs) - elif mode == 'rb': - return file - raise ValueError( - "Invalid mode value '{}', only 'r' and 'rb' are supported".format(mode) - ) - - -class CompatibilityFiles: - """ - Adapter for an existing or non-existent resource reader - to provide a compatibility .files(). - """ - - class SpecPath(abc.Traversable): - """ - Path tied to a module spec. - Can be read and exposes the resource reader children. - """ - - def __init__(self, spec, reader): - self._spec = spec - self._reader = reader - - def iterdir(self): - if not self._reader: - return iter(()) - return iter( - CompatibilityFiles.ChildPath(self._reader, path) - for path in self._reader.contents() - ) - - def is_file(self): - return False - - is_dir = is_file - - def joinpath(self, other): - if not self._reader: - return CompatibilityFiles.OrphanPath(other) - return CompatibilityFiles.ChildPath(self._reader, other) - - @property - def name(self): - return self._spec.name - - def open(self, mode='r', *args, **kwargs): - return _io_wrapper(self._reader.open_resource(None), mode, *args, **kwargs) - - class ChildPath(abc.Traversable): - """ - Path tied to a resource reader child. - Can be read but doesn't expose any meaningful children. - """ - - def __init__(self, reader, name): - self._reader = reader - self._name = name - - def iterdir(self): - return iter(()) - - def is_file(self): - return self._reader.is_resource(self.name) - - def is_dir(self): - return not self.is_file() - - def joinpath(self, other): - return CompatibilityFiles.OrphanPath(self.name, other) - - @property - def name(self): - return self._name - - def open(self, mode='r', *args, **kwargs): - return _io_wrapper( - self._reader.open_resource(self.name), mode, *args, **kwargs - ) - - class OrphanPath(abc.Traversable): - """ - Orphan path, not tied to a module spec or resource reader. - Can't be read and doesn't expose any meaningful children. - """ - - def __init__(self, *path_parts): - if len(path_parts) < 1: - raise ValueError('Need at least one path part to construct a path') - self._path = path_parts - - def iterdir(self): - return iter(()) - - def is_file(self): - return False - - is_dir = is_file - - def joinpath(self, other): - return CompatibilityFiles.OrphanPath(*self._path, other) - - @property - def name(self): - return self._path[-1] - - def open(self, mode='r', *args, **kwargs): - raise FileNotFoundError("Can't open orphan path") - - def __init__(self, spec): - self.spec = spec - - @property - def _reader(self): - with suppress(AttributeError): - return self.spec.loader.get_resource_reader(self.spec.name) - - def _native(self): - """ - Return the native reader if it supports files(). - """ - reader = self._reader - return reader if hasattr(reader, 'files') else self - - def __getattr__(self, attr): - return getattr(self._reader, attr) - - def files(self): - return CompatibilityFiles.SpecPath(self.spec, self._reader) - - -def wrap_spec(package): - """ - Construct a package spec with traversable compatibility - on the spec/loader/reader. - """ - return SpecLoaderAdapter(package.__spec__, TraversableResourcesLoader) diff --git a/venv/Lib/site-packages/setuptools/_vendor/importlib_resources/_common.py b/venv/Lib/site-packages/setuptools/_vendor/importlib_resources/_common.py deleted file mode 100644 index a12e2c7..0000000 --- a/venv/Lib/site-packages/setuptools/_vendor/importlib_resources/_common.py +++ /dev/null @@ -1,104 +0,0 @@ -import os -import pathlib -import tempfile -import functools -import contextlib -import types -import importlib - -from typing import Union, Optional -from .abc import ResourceReader, Traversable - -from ._compat import wrap_spec - -Package = Union[types.ModuleType, str] - - -def files(package): - # type: (Package) -> Traversable - """ - Get a Traversable resource from a package - """ - return from_package(get_package(package)) - - -def get_resource_reader(package): - # type: (types.ModuleType) -> Optional[ResourceReader] - """ - Return the package's loader if it's a ResourceReader. - """ - # We can't use - # a issubclass() check here because apparently abc.'s __subclasscheck__() - # hook wants to create a weak reference to the object, but - # zipimport.zipimporter does not support weak references, resulting in a - # TypeError. That seems terrible. - spec = package.__spec__ - reader = getattr(spec.loader, 'get_resource_reader', None) # type: ignore - if reader is None: - return None - return reader(spec.name) # type: ignore - - -def resolve(cand): - # type: (Package) -> types.ModuleType - return cand if isinstance(cand, types.ModuleType) else importlib.import_module(cand) - - -def get_package(package): - # type: (Package) -> types.ModuleType - """Take a package name or module object and return the module. - - Raise an exception if the resolved module is not a package. - """ - resolved = resolve(package) - if wrap_spec(resolved).submodule_search_locations is None: - raise TypeError(f'{package!r} is not a package') - return resolved - - -def from_package(package): - """ - Return a Traversable object for the given package. - - """ - spec = wrap_spec(package) - reader = spec.loader.get_resource_reader(spec.name) - return reader.files() - - -@contextlib.contextmanager -def _tempfile(reader, suffix=''): - # Not using tempfile.NamedTemporaryFile as it leads to deeper 'try' - # blocks due to the need to close the temporary file to work on Windows - # properly. - fd, raw_path = tempfile.mkstemp(suffix=suffix) - try: - try: - os.write(fd, reader()) - finally: - os.close(fd) - del reader - yield pathlib.Path(raw_path) - finally: - try: - os.remove(raw_path) - except FileNotFoundError: - pass - - -@functools.singledispatch -def as_file(path): - """ - Given a Traversable object, return that object as a - path on the local file system in a context manager. - """ - return _tempfile(path.read_bytes, suffix=path.name) - - -@as_file.register(pathlib.Path) -@contextlib.contextmanager -def _(path): - """ - Degenerate behavior for pathlib.Path objects. - """ - yield path diff --git a/venv/Lib/site-packages/setuptools/_vendor/importlib_resources/_compat.py b/venv/Lib/site-packages/setuptools/_vendor/importlib_resources/_compat.py deleted file mode 100644 index cb9fc82..0000000 --- a/venv/Lib/site-packages/setuptools/_vendor/importlib_resources/_compat.py +++ /dev/null @@ -1,98 +0,0 @@ -# flake8: noqa - -import abc -import sys -import pathlib -from contextlib import suppress - -if sys.version_info >= (3, 10): - from zipfile import Path as ZipPath # type: ignore -else: - from ..zipp import Path as ZipPath # type: ignore - - -try: - from typing import runtime_checkable # type: ignore -except ImportError: - - def runtime_checkable(cls): # type: ignore - return cls - - -try: - from typing import Protocol # type: ignore -except ImportError: - Protocol = abc.ABC # type: ignore - - -class TraversableResourcesLoader: - """ - Adapt loaders to provide TraversableResources and other - compatibility. - - Used primarily for Python 3.9 and earlier where the native - loaders do not yet implement TraversableResources. - """ - - def __init__(self, spec): - self.spec = spec - - @property - def path(self): - return self.spec.origin - - def get_resource_reader(self, name): - from . import readers, _adapters - - def _zip_reader(spec): - with suppress(AttributeError): - return readers.ZipReader(spec.loader, spec.name) - - def _namespace_reader(spec): - with suppress(AttributeError, ValueError): - return readers.NamespaceReader(spec.submodule_search_locations) - - def _available_reader(spec): - with suppress(AttributeError): - return spec.loader.get_resource_reader(spec.name) - - def _native_reader(spec): - reader = _available_reader(spec) - return reader if hasattr(reader, 'files') else None - - def _file_reader(spec): - try: - path = pathlib.Path(self.path) - except TypeError: - return None - if path.exists(): - return readers.FileReader(self) - - return ( - # native reader if it supplies 'files' - _native_reader(self.spec) - or - # local ZipReader if a zip module - _zip_reader(self.spec) - or - # local NamespaceReader if a namespace module - _namespace_reader(self.spec) - or - # local FileReader - _file_reader(self.spec) - # fallback - adapt the spec ResourceReader to TraversableReader - or _adapters.CompatibilityFiles(self.spec) - ) - - -def wrap_spec(package): - """ - Construct a package spec with traversable compatibility - on the spec/loader/reader. - - Supersedes _adapters.wrap_spec to use TraversableResourcesLoader - from above for older Python compatibility (<3.10). - """ - from . import _adapters - - return _adapters.SpecLoaderAdapter(package.__spec__, TraversableResourcesLoader) diff --git a/venv/Lib/site-packages/setuptools/_vendor/importlib_resources/_itertools.py b/venv/Lib/site-packages/setuptools/_vendor/importlib_resources/_itertools.py deleted file mode 100644 index cce0558..0000000 --- a/venv/Lib/site-packages/setuptools/_vendor/importlib_resources/_itertools.py +++ /dev/null @@ -1,35 +0,0 @@ -from itertools import filterfalse - -from typing import ( - Callable, - Iterable, - Iterator, - Optional, - Set, - TypeVar, - Union, -) - -# Type and type variable definitions -_T = TypeVar('_T') -_U = TypeVar('_U') - - -def unique_everseen( - iterable: Iterable[_T], key: Optional[Callable[[_T], _U]] = None -) -> Iterator[_T]: - "List unique elements, preserving order. Remember all elements ever seen." - # unique_everseen('AAAABBBCCDAABBB') --> A B C D - # unique_everseen('ABBCcAD', str.lower) --> A B C D - seen: Set[Union[_T, _U]] = set() - seen_add = seen.add - if key is None: - for element in filterfalse(seen.__contains__, iterable): - seen_add(element) - yield element - else: - for element in iterable: - k = key(element) - if k not in seen: - seen_add(k) - yield element diff --git a/venv/Lib/site-packages/setuptools/_vendor/importlib_resources/_legacy.py b/venv/Lib/site-packages/setuptools/_vendor/importlib_resources/_legacy.py deleted file mode 100644 index 1d5d3f1..0000000 --- a/venv/Lib/site-packages/setuptools/_vendor/importlib_resources/_legacy.py +++ /dev/null @@ -1,121 +0,0 @@ -import functools -import os -import pathlib -import types -import warnings - -from typing import Union, Iterable, ContextManager, BinaryIO, TextIO, Any - -from . import _common - -Package = Union[types.ModuleType, str] -Resource = str - - -def deprecated(func): - @functools.wraps(func) - def wrapper(*args, **kwargs): - warnings.warn( - f"{func.__name__} is deprecated. Use files() instead. " - "Refer to https://importlib-resources.readthedocs.io" - "/en/latest/using.html#migrating-from-legacy for migration advice.", - DeprecationWarning, - stacklevel=2, - ) - return func(*args, **kwargs) - - return wrapper - - -def normalize_path(path): - # type: (Any) -> str - """Normalize a path by ensuring it is a string. - - If the resulting string contains path separators, an exception is raised. - """ - str_path = str(path) - parent, file_name = os.path.split(str_path) - if parent: - raise ValueError(f'{path!r} must be only a file name') - return file_name - - -@deprecated -def open_binary(package: Package, resource: Resource) -> BinaryIO: - """Return a file-like object opened for binary reading of the resource.""" - return (_common.files(package) / normalize_path(resource)).open('rb') - - -@deprecated -def read_binary(package: Package, resource: Resource) -> bytes: - """Return the binary contents of the resource.""" - return (_common.files(package) / normalize_path(resource)).read_bytes() - - -@deprecated -def open_text( - package: Package, - resource: Resource, - encoding: str = 'utf-8', - errors: str = 'strict', -) -> TextIO: - """Return a file-like object opened for text reading of the resource.""" - return (_common.files(package) / normalize_path(resource)).open( - 'r', encoding=encoding, errors=errors - ) - - -@deprecated -def read_text( - package: Package, - resource: Resource, - encoding: str = 'utf-8', - errors: str = 'strict', -) -> str: - """Return the decoded string of the resource. - - The decoding-related arguments have the same semantics as those of - bytes.decode(). - """ - with open_text(package, resource, encoding, errors) as fp: - return fp.read() - - -@deprecated -def contents(package: Package) -> Iterable[str]: - """Return an iterable of entries in `package`. - - Note that not all entries are resources. Specifically, directories are - not considered resources. Use `is_resource()` on each entry returned here - to check if it is a resource or not. - """ - return [path.name for path in _common.files(package).iterdir()] - - -@deprecated -def is_resource(package: Package, name: str) -> bool: - """True if `name` is a resource inside `package`. - - Directories are *not* resources. - """ - resource = normalize_path(name) - return any( - traversable.name == resource and traversable.is_file() - for traversable in _common.files(package).iterdir() - ) - - -@deprecated -def path( - package: Package, - resource: Resource, -) -> ContextManager[pathlib.Path]: - """A context manager providing a file path object to the resource. - - If the resource does not already exist on its own on the file system, - a temporary file will be created. If the file was created, the file - will be deleted upon exiting the context manager (no exception is - raised if the file was deleted prior to the context manager - exiting). - """ - return _common.as_file(_common.files(package) / normalize_path(resource)) diff --git a/venv/Lib/site-packages/setuptools/_vendor/importlib_resources/abc.py b/venv/Lib/site-packages/setuptools/_vendor/importlib_resources/abc.py deleted file mode 100644 index d39dc1a..0000000 --- a/venv/Lib/site-packages/setuptools/_vendor/importlib_resources/abc.py +++ /dev/null @@ -1,137 +0,0 @@ -import abc -from typing import BinaryIO, Iterable, Text - -from ._compat import runtime_checkable, Protocol - - -class ResourceReader(metaclass=abc.ABCMeta): - """Abstract base class for loaders to provide resource reading support.""" - - @abc.abstractmethod - def open_resource(self, resource: Text) -> BinaryIO: - """Return an opened, file-like object for binary reading. - - The 'resource' argument is expected to represent only a file name. - If the resource cannot be found, FileNotFoundError is raised. - """ - # This deliberately raises FileNotFoundError instead of - # NotImplementedError so that if this method is accidentally called, - # it'll still do the right thing. - raise FileNotFoundError - - @abc.abstractmethod - def resource_path(self, resource: Text) -> Text: - """Return the file system path to the specified resource. - - The 'resource' argument is expected to represent only a file name. - If the resource does not exist on the file system, raise - FileNotFoundError. - """ - # This deliberately raises FileNotFoundError instead of - # NotImplementedError so that if this method is accidentally called, - # it'll still do the right thing. - raise FileNotFoundError - - @abc.abstractmethod - def is_resource(self, path: Text) -> bool: - """Return True if the named 'path' is a resource. - - Files are resources, directories are not. - """ - raise FileNotFoundError - - @abc.abstractmethod - def contents(self) -> Iterable[str]: - """Return an iterable of entries in `package`.""" - raise FileNotFoundError - - -@runtime_checkable -class Traversable(Protocol): - """ - An object with a subset of pathlib.Path methods suitable for - traversing directories and opening files. - """ - - @abc.abstractmethod - def iterdir(self): - """ - Yield Traversable objects in self - """ - - def read_bytes(self): - """ - Read contents of self as bytes - """ - with self.open('rb') as strm: - return strm.read() - - def read_text(self, encoding=None): - """ - Read contents of self as text - """ - with self.open(encoding=encoding) as strm: - return strm.read() - - @abc.abstractmethod - def is_dir(self) -> bool: - """ - Return True if self is a directory - """ - - @abc.abstractmethod - def is_file(self) -> bool: - """ - Return True if self is a file - """ - - @abc.abstractmethod - def joinpath(self, child): - """ - Return Traversable child in self - """ - - def __truediv__(self, child): - """ - Return Traversable child in self - """ - return self.joinpath(child) - - @abc.abstractmethod - def open(self, mode='r', *args, **kwargs): - """ - mode may be 'r' or 'rb' to open as text or binary. Return a handle - suitable for reading (same as pathlib.Path.open). - - When opening as text, accepts encoding parameters such as those - accepted by io.TextIOWrapper. - """ - - @abc.abstractproperty - def name(self) -> str: - """ - The base name of this object without any parent references. - """ - - -class TraversableResources(ResourceReader): - """ - The required interface for providing traversable - resources. - """ - - @abc.abstractmethod - def files(self): - """Return a Traversable object for the loaded package.""" - - def open_resource(self, resource): - return self.files().joinpath(resource).open('rb') - - def resource_path(self, resource): - raise FileNotFoundError(resource) - - def is_resource(self, path): - return self.files().joinpath(path).is_file() - - def contents(self): - return (item.name for item in self.files().iterdir()) diff --git a/venv/Lib/site-packages/setuptools/_vendor/importlib_resources/readers.py b/venv/Lib/site-packages/setuptools/_vendor/importlib_resources/readers.py deleted file mode 100644 index f1190ca..0000000 --- a/venv/Lib/site-packages/setuptools/_vendor/importlib_resources/readers.py +++ /dev/null @@ -1,122 +0,0 @@ -import collections -import pathlib -import operator - -from . import abc - -from ._itertools import unique_everseen -from ._compat import ZipPath - - -def remove_duplicates(items): - return iter(collections.OrderedDict.fromkeys(items)) - - -class FileReader(abc.TraversableResources): - def __init__(self, loader): - self.path = pathlib.Path(loader.path).parent - - def resource_path(self, resource): - """ - Return the file system path to prevent - `resources.path()` from creating a temporary - copy. - """ - return str(self.path.joinpath(resource)) - - def files(self): - return self.path - - -class ZipReader(abc.TraversableResources): - def __init__(self, loader, module): - _, _, name = module.rpartition('.') - self.prefix = loader.prefix.replace('\\', '/') + name + '/' - self.archive = loader.archive - - def open_resource(self, resource): - try: - return super().open_resource(resource) - except KeyError as exc: - raise FileNotFoundError(exc.args[0]) - - def is_resource(self, path): - # workaround for `zipfile.Path.is_file` returning true - # for non-existent paths. - target = self.files().joinpath(path) - return target.is_file() and target.exists() - - def files(self): - return ZipPath(self.archive, self.prefix) - - -class MultiplexedPath(abc.Traversable): - """ - Given a series of Traversable objects, implement a merged - version of the interface across all objects. Useful for - namespace packages which may be multihomed at a single - name. - """ - - def __init__(self, *paths): - self._paths = list(map(pathlib.Path, remove_duplicates(paths))) - if not self._paths: - message = 'MultiplexedPath must contain at least one path' - raise FileNotFoundError(message) - if not all(path.is_dir() for path in self._paths): - raise NotADirectoryError('MultiplexedPath only supports directories') - - def iterdir(self): - files = (file for path in self._paths for file in path.iterdir()) - return unique_everseen(files, key=operator.attrgetter('name')) - - def read_bytes(self): - raise FileNotFoundError(f'{self} is not a file') - - def read_text(self, *args, **kwargs): - raise FileNotFoundError(f'{self} is not a file') - - def is_dir(self): - return True - - def is_file(self): - return False - - def joinpath(self, child): - # first try to find child in current paths - for file in self.iterdir(): - if file.name == child: - return file - # if it does not exist, construct it with the first path - return self._paths[0] / child - - __truediv__ = joinpath - - def open(self, *args, **kwargs): - raise FileNotFoundError(f'{self} is not a file') - - @property - def name(self): - return self._paths[0].name - - def __repr__(self): - paths = ', '.join(f"'{path}'" for path in self._paths) - return f'MultiplexedPath({paths})' - - -class NamespaceReader(abc.TraversableResources): - def __init__(self, namespace_path): - if 'NamespacePath' not in str(namespace_path): - raise ValueError('Invalid path') - self.path = MultiplexedPath(*list(namespace_path)) - - def resource_path(self, resource): - """ - Return the file system path to prevent - `resources.path()` from creating a temporary - copy. - """ - return str(self.path.joinpath(resource)) - - def files(self): - return self.path diff --git a/venv/Lib/site-packages/setuptools/_vendor/importlib_resources/simple.py b/venv/Lib/site-packages/setuptools/_vendor/importlib_resources/simple.py deleted file mode 100644 index da073cb..0000000 --- a/venv/Lib/site-packages/setuptools/_vendor/importlib_resources/simple.py +++ /dev/null @@ -1,116 +0,0 @@ -""" -Interface adapters for low-level readers. -""" - -import abc -import io -import itertools -from typing import BinaryIO, List - -from .abc import Traversable, TraversableResources - - -class SimpleReader(abc.ABC): - """ - The minimum, low-level interface required from a resource - provider. - """ - - @abc.abstractproperty - def package(self): - # type: () -> str - """ - The name of the package for which this reader loads resources. - """ - - @abc.abstractmethod - def children(self): - # type: () -> List['SimpleReader'] - """ - Obtain an iterable of SimpleReader for available - child containers (e.g. directories). - """ - - @abc.abstractmethod - def resources(self): - # type: () -> List[str] - """ - Obtain available named resources for this virtual package. - """ - - @abc.abstractmethod - def open_binary(self, resource): - # type: (str) -> BinaryIO - """ - Obtain a File-like for a named resource. - """ - - @property - def name(self): - return self.package.split('.')[-1] - - -class ResourceHandle(Traversable): - """ - Handle to a named resource in a ResourceReader. - """ - - def __init__(self, parent, name): - # type: (ResourceContainer, str) -> None - self.parent = parent - self.name = name # type: ignore - - def is_file(self): - return True - - def is_dir(self): - return False - - def open(self, mode='r', *args, **kwargs): - stream = self.parent.reader.open_binary(self.name) - if 'b' not in mode: - stream = io.TextIOWrapper(*args, **kwargs) - return stream - - def joinpath(self, name): - raise RuntimeError("Cannot traverse into a resource") - - -class ResourceContainer(Traversable): - """ - Traversable container for a package's resources via its reader. - """ - - def __init__(self, reader): - # type: (SimpleReader) -> None - self.reader = reader - - def is_dir(self): - return True - - def is_file(self): - return False - - def iterdir(self): - files = (ResourceHandle(self, name) for name in self.reader.resources) - dirs = map(ResourceContainer, self.reader.children()) - return itertools.chain(files, dirs) - - def open(self, *args, **kwargs): - raise IsADirectoryError() - - def joinpath(self, name): - return next( - traversable for traversable in self.iterdir() if traversable.name == name - ) - - -class TraversableReader(TraversableResources, SimpleReader): - """ - A TraversableResources based on SimpleReader. Resource providers - may derive from this class to provide the TraversableResources - interface by supplying the SimpleReader interface. - """ - - def files(self): - return ResourceContainer(self) diff --git a/venv/Lib/site-packages/setuptools/_vendor/jaraco/__init__.py b/venv/Lib/site-packages/setuptools/_vendor/jaraco/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/venv/Lib/site-packages/setuptools/_vendor/jaraco/__pycache__/__init__.cpython-39.pyc b/venv/Lib/site-packages/setuptools/_vendor/jaraco/__pycache__/__init__.cpython-39.pyc deleted file mode 100644 index dfb4dbe..0000000 Binary files a/venv/Lib/site-packages/setuptools/_vendor/jaraco/__pycache__/__init__.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/_vendor/jaraco/__pycache__/context.cpython-39.pyc b/venv/Lib/site-packages/setuptools/_vendor/jaraco/__pycache__/context.cpython-39.pyc deleted file mode 100644 index 4544f57..0000000 Binary files a/venv/Lib/site-packages/setuptools/_vendor/jaraco/__pycache__/context.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/_vendor/jaraco/__pycache__/functools.cpython-39.pyc b/venv/Lib/site-packages/setuptools/_vendor/jaraco/__pycache__/functools.cpython-39.pyc deleted file mode 100644 index f667439..0000000 Binary files a/venv/Lib/site-packages/setuptools/_vendor/jaraco/__pycache__/functools.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/_vendor/jaraco/context.py b/venv/Lib/site-packages/setuptools/_vendor/jaraco/context.py deleted file mode 100644 index 87a4e3d..0000000 --- a/venv/Lib/site-packages/setuptools/_vendor/jaraco/context.py +++ /dev/null @@ -1,213 +0,0 @@ -import os -import subprocess -import contextlib -import functools -import tempfile -import shutil -import operator - - -@contextlib.contextmanager -def pushd(dir): - orig = os.getcwd() - os.chdir(dir) - try: - yield dir - finally: - os.chdir(orig) - - -@contextlib.contextmanager -def tarball_context(url, target_dir=None, runner=None, pushd=pushd): - """ - Get a tarball, extract it, change to that directory, yield, then - clean up. - `runner` is the function to invoke commands. - `pushd` is a context manager for changing the directory. - """ - if target_dir is None: - target_dir = os.path.basename(url).replace('.tar.gz', '').replace('.tgz', '') - if runner is None: - runner = functools.partial(subprocess.check_call, shell=True) - # In the tar command, use --strip-components=1 to strip the first path and - # then - # use -C to cause the files to be extracted to {target_dir}. This ensures - # that we always know where the files were extracted. - runner('mkdir {target_dir}'.format(**vars())) - try: - getter = 'wget {url} -O -' - extract = 'tar x{compression} --strip-components=1 -C {target_dir}' - cmd = ' | '.join((getter, extract)) - runner(cmd.format(compression=infer_compression(url), **vars())) - with pushd(target_dir): - yield target_dir - finally: - runner('rm -Rf {target_dir}'.format(**vars())) - - -def infer_compression(url): - """ - Given a URL or filename, infer the compression code for tar. - """ - # cheat and just assume it's the last two characters - compression_indicator = url[-2:] - mapping = dict(gz='z', bz='j', xz='J') - # Assume 'z' (gzip) if no match - return mapping.get(compression_indicator, 'z') - - -@contextlib.contextmanager -def temp_dir(remover=shutil.rmtree): - """ - Create a temporary directory context. Pass a custom remover - to override the removal behavior. - """ - temp_dir = tempfile.mkdtemp() - try: - yield temp_dir - finally: - remover(temp_dir) - - -@contextlib.contextmanager -def repo_context(url, branch=None, quiet=True, dest_ctx=temp_dir): - """ - Check out the repo indicated by url. - - If dest_ctx is supplied, it should be a context manager - to yield the target directory for the check out. - """ - exe = 'git' if 'git' in url else 'hg' - with dest_ctx() as repo_dir: - cmd = [exe, 'clone', url, repo_dir] - if branch: - cmd.extend(['--branch', branch]) - devnull = open(os.path.devnull, 'w') - stdout = devnull if quiet else None - subprocess.check_call(cmd, stdout=stdout) - yield repo_dir - - -@contextlib.contextmanager -def null(): - yield - - -class ExceptionTrap: - """ - A context manager that will catch certain exceptions and provide an - indication they occurred. - - >>> with ExceptionTrap() as trap: - ... raise Exception() - >>> bool(trap) - True - - >>> with ExceptionTrap() as trap: - ... pass - >>> bool(trap) - False - - >>> with ExceptionTrap(ValueError) as trap: - ... raise ValueError("1 + 1 is not 3") - >>> bool(trap) - True - - >>> with ExceptionTrap(ValueError) as trap: - ... raise Exception() - Traceback (most recent call last): - ... - Exception - - >>> bool(trap) - False - """ - - exc_info = None, None, None - - def __init__(self, exceptions=(Exception,)): - self.exceptions = exceptions - - def __enter__(self): - return self - - @property - def type(self): - return self.exc_info[0] - - @property - def value(self): - return self.exc_info[1] - - @property - def tb(self): - return self.exc_info[2] - - def __exit__(self, *exc_info): - type = exc_info[0] - matches = type and issubclass(type, self.exceptions) - if matches: - self.exc_info = exc_info - return matches - - def __bool__(self): - return bool(self.type) - - def raises(self, func, *, _test=bool): - """ - Wrap func and replace the result with the truth - value of the trap (True if an exception occurred). - - First, give the decorator an alias to support Python 3.8 - Syntax. - - >>> raises = ExceptionTrap(ValueError).raises - - Now decorate a function that always fails. - - >>> @raises - ... def fail(): - ... raise ValueError('failed') - >>> fail() - True - """ - - @functools.wraps(func) - def wrapper(*args, **kwargs): - with ExceptionTrap(self.exceptions) as trap: - func(*args, **kwargs) - return _test(trap) - - return wrapper - - def passes(self, func): - """ - Wrap func and replace the result with the truth - value of the trap (True if no exception). - - First, give the decorator an alias to support Python 3.8 - Syntax. - - >>> passes = ExceptionTrap(ValueError).passes - - Now decorate a function that always fails. - - >>> @passes - ... def fail(): - ... raise ValueError('failed') - - >>> fail() - False - """ - return self.raises(func, _test=operator.not_) - - -class suppress(contextlib.suppress, contextlib.ContextDecorator): - """ - A version of contextlib.suppress with decorator support. - - >>> @suppress(KeyError) - ... def key_error(): - ... {}[''] - >>> key_error() - """ diff --git a/venv/Lib/site-packages/setuptools/_vendor/jaraco/functools.py b/venv/Lib/site-packages/setuptools/_vendor/jaraco/functools.py deleted file mode 100644 index bbd8b29..0000000 --- a/venv/Lib/site-packages/setuptools/_vendor/jaraco/functools.py +++ /dev/null @@ -1,525 +0,0 @@ -import functools -import time -import inspect -import collections -import types -import itertools - -import setuptools.extern.more_itertools - -from typing import Callable, TypeVar - - -CallableT = TypeVar("CallableT", bound=Callable[..., object]) - - -def compose(*funcs): - """ - Compose any number of unary functions into a single unary function. - - >>> import textwrap - >>> expected = str.strip(textwrap.dedent(compose.__doc__)) - >>> strip_and_dedent = compose(str.strip, textwrap.dedent) - >>> strip_and_dedent(compose.__doc__) == expected - True - - Compose also allows the innermost function to take arbitrary arguments. - - >>> round_three = lambda x: round(x, ndigits=3) - >>> f = compose(round_three, int.__truediv__) - >>> [f(3*x, x+1) for x in range(1,10)] - [1.5, 2.0, 2.25, 2.4, 2.5, 2.571, 2.625, 2.667, 2.7] - """ - - def compose_two(f1, f2): - return lambda *args, **kwargs: f1(f2(*args, **kwargs)) - - return functools.reduce(compose_two, funcs) - - -def method_caller(method_name, *args, **kwargs): - """ - Return a function that will call a named method on the - target object with optional positional and keyword - arguments. - - >>> lower = method_caller('lower') - >>> lower('MyString') - 'mystring' - """ - - def call_method(target): - func = getattr(target, method_name) - return func(*args, **kwargs) - - return call_method - - -def once(func): - """ - Decorate func so it's only ever called the first time. - - This decorator can ensure that an expensive or non-idempotent function - will not be expensive on subsequent calls and is idempotent. - - >>> add_three = once(lambda a: a+3) - >>> add_three(3) - 6 - >>> add_three(9) - 6 - >>> add_three('12') - 6 - - To reset the stored value, simply clear the property ``saved_result``. - - >>> del add_three.saved_result - >>> add_three(9) - 12 - >>> add_three(8) - 12 - - Or invoke 'reset()' on it. - - >>> add_three.reset() - >>> add_three(-3) - 0 - >>> add_three(0) - 0 - """ - - @functools.wraps(func) - def wrapper(*args, **kwargs): - if not hasattr(wrapper, 'saved_result'): - wrapper.saved_result = func(*args, **kwargs) - return wrapper.saved_result - - wrapper.reset = lambda: vars(wrapper).__delitem__('saved_result') - return wrapper - - -def method_cache( - method: CallableT, - cache_wrapper: Callable[ - [CallableT], CallableT - ] = functools.lru_cache(), # type: ignore[assignment] -) -> CallableT: - """ - Wrap lru_cache to support storing the cache data in the object instances. - - Abstracts the common paradigm where the method explicitly saves an - underscore-prefixed protected property on first call and returns that - subsequently. - - >>> class MyClass: - ... calls = 0 - ... - ... @method_cache - ... def method(self, value): - ... self.calls += 1 - ... return value - - >>> a = MyClass() - >>> a.method(3) - 3 - >>> for x in range(75): - ... res = a.method(x) - >>> a.calls - 75 - - Note that the apparent behavior will be exactly like that of lru_cache - except that the cache is stored on each instance, so values in one - instance will not flush values from another, and when an instance is - deleted, so are the cached values for that instance. - - >>> b = MyClass() - >>> for x in range(35): - ... res = b.method(x) - >>> b.calls - 35 - >>> a.method(0) - 0 - >>> a.calls - 75 - - Note that if method had been decorated with ``functools.lru_cache()``, - a.calls would have been 76 (due to the cached value of 0 having been - flushed by the 'b' instance). - - Clear the cache with ``.cache_clear()`` - - >>> a.method.cache_clear() - - Same for a method that hasn't yet been called. - - >>> c = MyClass() - >>> c.method.cache_clear() - - Another cache wrapper may be supplied: - - >>> cache = functools.lru_cache(maxsize=2) - >>> MyClass.method2 = method_cache(lambda self: 3, cache_wrapper=cache) - >>> a = MyClass() - >>> a.method2() - 3 - - Caution - do not subsequently wrap the method with another decorator, such - as ``@property``, which changes the semantics of the function. - - See also - http://code.activestate.com/recipes/577452-a-memoize-decorator-for-instance-methods/ - for another implementation and additional justification. - """ - - def wrapper(self: object, *args: object, **kwargs: object) -> object: - # it's the first call, replace the method with a cached, bound method - bound_method: CallableT = types.MethodType( # type: ignore[assignment] - method, self - ) - cached_method = cache_wrapper(bound_method) - setattr(self, method.__name__, cached_method) - return cached_method(*args, **kwargs) - - # Support cache clear even before cache has been created. - wrapper.cache_clear = lambda: None # type: ignore[attr-defined] - - return ( # type: ignore[return-value] - _special_method_cache(method, cache_wrapper) or wrapper - ) - - -def _special_method_cache(method, cache_wrapper): - """ - Because Python treats special methods differently, it's not - possible to use instance attributes to implement the cached - methods. - - Instead, install the wrapper method under a different name - and return a simple proxy to that wrapper. - - https://github.com/jaraco/jaraco.functools/issues/5 - """ - name = method.__name__ - special_names = '__getattr__', '__getitem__' - if name not in special_names: - return - - wrapper_name = '__cached' + name - - def proxy(self, *args, **kwargs): - if wrapper_name not in vars(self): - bound = types.MethodType(method, self) - cache = cache_wrapper(bound) - setattr(self, wrapper_name, cache) - else: - cache = getattr(self, wrapper_name) - return cache(*args, **kwargs) - - return proxy - - -def apply(transform): - """ - Decorate a function with a transform function that is - invoked on results returned from the decorated function. - - >>> @apply(reversed) - ... def get_numbers(start): - ... "doc for get_numbers" - ... return range(start, start+3) - >>> list(get_numbers(4)) - [6, 5, 4] - >>> get_numbers.__doc__ - 'doc for get_numbers' - """ - - def wrap(func): - return functools.wraps(func)(compose(transform, func)) - - return wrap - - -def result_invoke(action): - r""" - Decorate a function with an action function that is - invoked on the results returned from the decorated - function (for its side-effect), then return the original - result. - - >>> @result_invoke(print) - ... def add_two(a, b): - ... return a + b - >>> x = add_two(2, 3) - 5 - >>> x - 5 - """ - - def wrap(func): - @functools.wraps(func) - def wrapper(*args, **kwargs): - result = func(*args, **kwargs) - action(result) - return result - - return wrapper - - return wrap - - -def call_aside(f, *args, **kwargs): - """ - Call a function for its side effect after initialization. - - >>> @call_aside - ... def func(): print("called") - called - >>> func() - called - - Use functools.partial to pass parameters to the initial call - - >>> @functools.partial(call_aside, name='bingo') - ... def func(name): print("called with", name) - called with bingo - """ - f(*args, **kwargs) - return f - - -class Throttler: - """ - Rate-limit a function (or other callable) - """ - - def __init__(self, func, max_rate=float('Inf')): - if isinstance(func, Throttler): - func = func.func - self.func = func - self.max_rate = max_rate - self.reset() - - def reset(self): - self.last_called = 0 - - def __call__(self, *args, **kwargs): - self._wait() - return self.func(*args, **kwargs) - - def _wait(self): - "ensure at least 1/max_rate seconds from last call" - elapsed = time.time() - self.last_called - must_wait = 1 / self.max_rate - elapsed - time.sleep(max(0, must_wait)) - self.last_called = time.time() - - def __get__(self, obj, type=None): - return first_invoke(self._wait, functools.partial(self.func, obj)) - - -def first_invoke(func1, func2): - """ - Return a function that when invoked will invoke func1 without - any parameters (for its side-effect) and then invoke func2 - with whatever parameters were passed, returning its result. - """ - - def wrapper(*args, **kwargs): - func1() - return func2(*args, **kwargs) - - return wrapper - - -def retry_call(func, cleanup=lambda: None, retries=0, trap=()): - """ - Given a callable func, trap the indicated exceptions - for up to 'retries' times, invoking cleanup on the - exception. On the final attempt, allow any exceptions - to propagate. - """ - attempts = itertools.count() if retries == float('inf') else range(retries) - for attempt in attempts: - try: - return func() - except trap: - cleanup() - - return func() - - -def retry(*r_args, **r_kwargs): - """ - Decorator wrapper for retry_call. Accepts arguments to retry_call - except func and then returns a decorator for the decorated function. - - Ex: - - >>> @retry(retries=3) - ... def my_func(a, b): - ... "this is my funk" - ... print(a, b) - >>> my_func.__doc__ - 'this is my funk' - """ - - def decorate(func): - @functools.wraps(func) - def wrapper(*f_args, **f_kwargs): - bound = functools.partial(func, *f_args, **f_kwargs) - return retry_call(bound, *r_args, **r_kwargs) - - return wrapper - - return decorate - - -def print_yielded(func): - """ - Convert a generator into a function that prints all yielded elements - - >>> @print_yielded - ... def x(): - ... yield 3; yield None - >>> x() - 3 - None - """ - print_all = functools.partial(map, print) - print_results = compose(more_itertools.consume, print_all, func) - return functools.wraps(func)(print_results) - - -def pass_none(func): - """ - Wrap func so it's not called if its first param is None - - >>> print_text = pass_none(print) - >>> print_text('text') - text - >>> print_text(None) - """ - - @functools.wraps(func) - def wrapper(param, *args, **kwargs): - if param is not None: - return func(param, *args, **kwargs) - - return wrapper - - -def assign_params(func, namespace): - """ - Assign parameters from namespace where func solicits. - - >>> def func(x, y=3): - ... print(x, y) - >>> assigned = assign_params(func, dict(x=2, z=4)) - >>> assigned() - 2 3 - - The usual errors are raised if a function doesn't receive - its required parameters: - - >>> assigned = assign_params(func, dict(y=3, z=4)) - >>> assigned() - Traceback (most recent call last): - TypeError: func() ...argument... - - It even works on methods: - - >>> class Handler: - ... def meth(self, arg): - ... print(arg) - >>> assign_params(Handler().meth, dict(arg='crystal', foo='clear'))() - crystal - """ - sig = inspect.signature(func) - params = sig.parameters.keys() - call_ns = {k: namespace[k] for k in params if k in namespace} - return functools.partial(func, **call_ns) - - -def save_method_args(method): - """ - Wrap a method such that when it is called, the args and kwargs are - saved on the method. - - >>> class MyClass: - ... @save_method_args - ... def method(self, a, b): - ... print(a, b) - >>> my_ob = MyClass() - >>> my_ob.method(1, 2) - 1 2 - >>> my_ob._saved_method.args - (1, 2) - >>> my_ob._saved_method.kwargs - {} - >>> my_ob.method(a=3, b='foo') - 3 foo - >>> my_ob._saved_method.args - () - >>> my_ob._saved_method.kwargs == dict(a=3, b='foo') - True - - The arguments are stored on the instance, allowing for - different instance to save different args. - - >>> your_ob = MyClass() - >>> your_ob.method({str('x'): 3}, b=[4]) - {'x': 3} [4] - >>> your_ob._saved_method.args - ({'x': 3},) - >>> my_ob._saved_method.args - () - """ - args_and_kwargs = collections.namedtuple('args_and_kwargs', 'args kwargs') - - @functools.wraps(method) - def wrapper(self, *args, **kwargs): - attr_name = '_saved_' + method.__name__ - attr = args_and_kwargs(args, kwargs) - setattr(self, attr_name, attr) - return method(self, *args, **kwargs) - - return wrapper - - -def except_(*exceptions, replace=None, use=None): - """ - Replace the indicated exceptions, if raised, with the indicated - literal replacement or evaluated expression (if present). - - >>> safe_int = except_(ValueError)(int) - >>> safe_int('five') - >>> safe_int('5') - 5 - - Specify a literal replacement with ``replace``. - - >>> safe_int_r = except_(ValueError, replace=0)(int) - >>> safe_int_r('five') - 0 - - Provide an expression to ``use`` to pass through particular parameters. - - >>> safe_int_pt = except_(ValueError, use='args[0]')(int) - >>> safe_int_pt('five') - 'five' - - """ - - def decorate(func): - @functools.wraps(func) - def wrapper(*args, **kwargs): - try: - return func(*args, **kwargs) - except exceptions: - try: - return eval(use) - except TypeError: - return replace - - return wrapper - - return decorate diff --git a/venv/Lib/site-packages/setuptools/_vendor/jaraco/text/__init__.py b/venv/Lib/site-packages/setuptools/_vendor/jaraco/text/__init__.py deleted file mode 100644 index a0306d5..0000000 --- a/venv/Lib/site-packages/setuptools/_vendor/jaraco/text/__init__.py +++ /dev/null @@ -1,599 +0,0 @@ -import re -import itertools -import textwrap -import functools - -try: - from importlib.resources import files # type: ignore -except ImportError: # pragma: nocover - from setuptools.extern.importlib_resources import files # type: ignore - -from setuptools.extern.jaraco.functools import compose, method_cache -from setuptools.extern.jaraco.context import ExceptionTrap - - -def substitution(old, new): - """ - Return a function that will perform a substitution on a string - """ - return lambda s: s.replace(old, new) - - -def multi_substitution(*substitutions): - """ - Take a sequence of pairs specifying substitutions, and create - a function that performs those substitutions. - - >>> multi_substitution(('foo', 'bar'), ('bar', 'baz'))('foo') - 'baz' - """ - substitutions = itertools.starmap(substitution, substitutions) - # compose function applies last function first, so reverse the - # substitutions to get the expected order. - substitutions = reversed(tuple(substitutions)) - return compose(*substitutions) - - -class FoldedCase(str): - """ - A case insensitive string class; behaves just like str - except compares equal when the only variation is case. - - >>> s = FoldedCase('hello world') - - >>> s == 'Hello World' - True - - >>> 'Hello World' == s - True - - >>> s != 'Hello World' - False - - >>> s.index('O') - 4 - - >>> s.split('O') - ['hell', ' w', 'rld'] - - >>> sorted(map(FoldedCase, ['GAMMA', 'alpha', 'Beta'])) - ['alpha', 'Beta', 'GAMMA'] - - Sequence membership is straightforward. - - >>> "Hello World" in [s] - True - >>> s in ["Hello World"] - True - - You may test for set inclusion, but candidate and elements - must both be folded. - - >>> FoldedCase("Hello World") in {s} - True - >>> s in {FoldedCase("Hello World")} - True - - String inclusion works as long as the FoldedCase object - is on the right. - - >>> "hello" in FoldedCase("Hello World") - True - - But not if the FoldedCase object is on the left: - - >>> FoldedCase('hello') in 'Hello World' - False - - In that case, use ``in_``: - - >>> FoldedCase('hello').in_('Hello World') - True - - >>> FoldedCase('hello') > FoldedCase('Hello') - False - """ - - def __lt__(self, other): - return self.lower() < other.lower() - - def __gt__(self, other): - return self.lower() > other.lower() - - def __eq__(self, other): - return self.lower() == other.lower() - - def __ne__(self, other): - return self.lower() != other.lower() - - def __hash__(self): - return hash(self.lower()) - - def __contains__(self, other): - return super().lower().__contains__(other.lower()) - - def in_(self, other): - "Does self appear in other?" - return self in FoldedCase(other) - - # cache lower since it's likely to be called frequently. - @method_cache - def lower(self): - return super().lower() - - def index(self, sub): - return self.lower().index(sub.lower()) - - def split(self, splitter=' ', maxsplit=0): - pattern = re.compile(re.escape(splitter), re.I) - return pattern.split(self, maxsplit) - - -# Python 3.8 compatibility -_unicode_trap = ExceptionTrap(UnicodeDecodeError) - - -@_unicode_trap.passes -def is_decodable(value): - r""" - Return True if the supplied value is decodable (using the default - encoding). - - >>> is_decodable(b'\xff') - False - >>> is_decodable(b'\x32') - True - """ - value.decode() - - -def is_binary(value): - r""" - Return True if the value appears to be binary (that is, it's a byte - string and isn't decodable). - - >>> is_binary(b'\xff') - True - >>> is_binary('\xff') - False - """ - return isinstance(value, bytes) and not is_decodable(value) - - -def trim(s): - r""" - Trim something like a docstring to remove the whitespace that - is common due to indentation and formatting. - - >>> trim("\n\tfoo = bar\n\t\tbar = baz\n") - 'foo = bar\n\tbar = baz' - """ - return textwrap.dedent(s).strip() - - -def wrap(s): - """ - Wrap lines of text, retaining existing newlines as - paragraph markers. - - >>> print(wrap(lorem_ipsum)) - Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do - eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad - minim veniam, quis nostrud exercitation ullamco laboris nisi ut - aliquip ex ea commodo consequat. Duis aute irure dolor in - reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla - pariatur. Excepteur sint occaecat cupidatat non proident, sunt in - culpa qui officia deserunt mollit anim id est laborum. - - Curabitur pretium tincidunt lacus. Nulla gravida orci a odio. Nullam - varius, turpis et commodo pharetra, est eros bibendum elit, nec luctus - magna felis sollicitudin mauris. Integer in mauris eu nibh euismod - gravida. Duis ac tellus et risus vulputate vehicula. Donec lobortis - risus a elit. Etiam tempor. Ut ullamcorper, ligula eu tempor congue, - eros est euismod turpis, id tincidunt sapien risus a quam. Maecenas - fermentum consequat mi. Donec fermentum. Pellentesque malesuada nulla - a mi. Duis sapien sem, aliquet nec, commodo eget, consequat quis, - neque. Aliquam faucibus, elit ut dictum aliquet, felis nisl adipiscing - sapien, sed malesuada diam lacus eget erat. Cras mollis scelerisque - nunc. Nullam arcu. Aliquam consequat. Curabitur augue lorem, dapibus - quis, laoreet et, pretium ac, nisi. Aenean magna nisl, mollis quis, - molestie eu, feugiat in, orci. In hac habitasse platea dictumst. - """ - paragraphs = s.splitlines() - wrapped = ('\n'.join(textwrap.wrap(para)) for para in paragraphs) - return '\n\n'.join(wrapped) - - -def unwrap(s): - r""" - Given a multi-line string, return an unwrapped version. - - >>> wrapped = wrap(lorem_ipsum) - >>> wrapped.count('\n') - 20 - >>> unwrapped = unwrap(wrapped) - >>> unwrapped.count('\n') - 1 - >>> print(unwrapped) - Lorem ipsum dolor sit amet, consectetur adipiscing ... - Curabitur pretium tincidunt lacus. Nulla gravida orci ... - - """ - paragraphs = re.split(r'\n\n+', s) - cleaned = (para.replace('\n', ' ') for para in paragraphs) - return '\n'.join(cleaned) - - - - -class Splitter(object): - """object that will split a string with the given arguments for each call - - >>> s = Splitter(',') - >>> s('hello, world, this is your, master calling') - ['hello', ' world', ' this is your', ' master calling'] - """ - - def __init__(self, *args): - self.args = args - - def __call__(self, s): - return s.split(*self.args) - - -def indent(string, prefix=' ' * 4): - """ - >>> indent('foo') - ' foo' - """ - return prefix + string - - -class WordSet(tuple): - """ - Given an identifier, return the words that identifier represents, - whether in camel case, underscore-separated, etc. - - >>> WordSet.parse("camelCase") - ('camel', 'Case') - - >>> WordSet.parse("under_sep") - ('under', 'sep') - - Acronyms should be retained - - >>> WordSet.parse("firstSNL") - ('first', 'SNL') - - >>> WordSet.parse("you_and_I") - ('you', 'and', 'I') - - >>> WordSet.parse("A simple test") - ('A', 'simple', 'test') - - Multiple caps should not interfere with the first cap of another word. - - >>> WordSet.parse("myABCClass") - ('my', 'ABC', 'Class') - - The result is a WordSet, so you can get the form you need. - - >>> WordSet.parse("myABCClass").underscore_separated() - 'my_ABC_Class' - - >>> WordSet.parse('a-command').camel_case() - 'ACommand' - - >>> WordSet.parse('someIdentifier').lowered().space_separated() - 'some identifier' - - Slices of the result should return another WordSet. - - >>> WordSet.parse('taken-out-of-context')[1:].underscore_separated() - 'out_of_context' - - >>> WordSet.from_class_name(WordSet()).lowered().space_separated() - 'word set' - - >>> example = WordSet.parse('figured it out') - >>> example.headless_camel_case() - 'figuredItOut' - >>> example.dash_separated() - 'figured-it-out' - - """ - - _pattern = re.compile('([A-Z]?[a-z]+)|([A-Z]+(?![a-z]))') - - def capitalized(self): - return WordSet(word.capitalize() for word in self) - - def lowered(self): - return WordSet(word.lower() for word in self) - - def camel_case(self): - return ''.join(self.capitalized()) - - def headless_camel_case(self): - words = iter(self) - first = next(words).lower() - new_words = itertools.chain((first,), WordSet(words).camel_case()) - return ''.join(new_words) - - def underscore_separated(self): - return '_'.join(self) - - def dash_separated(self): - return '-'.join(self) - - def space_separated(self): - return ' '.join(self) - - def trim_right(self, item): - """ - Remove the item from the end of the set. - - >>> WordSet.parse('foo bar').trim_right('foo') - ('foo', 'bar') - >>> WordSet.parse('foo bar').trim_right('bar') - ('foo',) - >>> WordSet.parse('').trim_right('bar') - () - """ - return self[:-1] if self and self[-1] == item else self - - def trim_left(self, item): - """ - Remove the item from the beginning of the set. - - >>> WordSet.parse('foo bar').trim_left('foo') - ('bar',) - >>> WordSet.parse('foo bar').trim_left('bar') - ('foo', 'bar') - >>> WordSet.parse('').trim_left('bar') - () - """ - return self[1:] if self and self[0] == item else self - - def trim(self, item): - """ - >>> WordSet.parse('foo bar').trim('foo') - ('bar',) - """ - return self.trim_left(item).trim_right(item) - - def __getitem__(self, item): - result = super(WordSet, self).__getitem__(item) - if isinstance(item, slice): - result = WordSet(result) - return result - - @classmethod - def parse(cls, identifier): - matches = cls._pattern.finditer(identifier) - return WordSet(match.group(0) for match in matches) - - @classmethod - def from_class_name(cls, subject): - return cls.parse(subject.__class__.__name__) - - -# for backward compatibility -words = WordSet.parse - - -def simple_html_strip(s): - r""" - Remove HTML from the string `s`. - - >>> str(simple_html_strip('')) - '' - - >>> print(simple_html_strip('A stormy day in paradise')) - A stormy day in paradise - - >>> print(simple_html_strip('Somebody tell the truth.')) - Somebody tell the truth. - - >>> print(simple_html_strip('What about
\nmultiple lines?')) - What about - multiple lines? - """ - html_stripper = re.compile('()|(<[^>]*>)|([^<]+)', re.DOTALL) - texts = (match.group(3) or '' for match in html_stripper.finditer(s)) - return ''.join(texts) - - -class SeparatedValues(str): - """ - A string separated by a separator. Overrides __iter__ for getting - the values. - - >>> list(SeparatedValues('a,b,c')) - ['a', 'b', 'c'] - - Whitespace is stripped and empty values are discarded. - - >>> list(SeparatedValues(' a, b , c, ')) - ['a', 'b', 'c'] - """ - - separator = ',' - - def __iter__(self): - parts = self.split(self.separator) - return filter(None, (part.strip() for part in parts)) - - -class Stripper: - r""" - Given a series of lines, find the common prefix and strip it from them. - - >>> lines = [ - ... 'abcdefg\n', - ... 'abc\n', - ... 'abcde\n', - ... ] - >>> res = Stripper.strip_prefix(lines) - >>> res.prefix - 'abc' - >>> list(res.lines) - ['defg\n', '\n', 'de\n'] - - If no prefix is common, nothing should be stripped. - - >>> lines = [ - ... 'abcd\n', - ... '1234\n', - ... ] - >>> res = Stripper.strip_prefix(lines) - >>> res.prefix = '' - >>> list(res.lines) - ['abcd\n', '1234\n'] - """ - - def __init__(self, prefix, lines): - self.prefix = prefix - self.lines = map(self, lines) - - @classmethod - def strip_prefix(cls, lines): - prefix_lines, lines = itertools.tee(lines) - prefix = functools.reduce(cls.common_prefix, prefix_lines) - return cls(prefix, lines) - - def __call__(self, line): - if not self.prefix: - return line - null, prefix, rest = line.partition(self.prefix) - return rest - - @staticmethod - def common_prefix(s1, s2): - """ - Return the common prefix of two lines. - """ - index = min(len(s1), len(s2)) - while s1[:index] != s2[:index]: - index -= 1 - return s1[:index] - - -def remove_prefix(text, prefix): - """ - Remove the prefix from the text if it exists. - - >>> remove_prefix('underwhelming performance', 'underwhelming ') - 'performance' - - >>> remove_prefix('something special', 'sample') - 'something special' - """ - null, prefix, rest = text.rpartition(prefix) - return rest - - -def remove_suffix(text, suffix): - """ - Remove the suffix from the text if it exists. - - >>> remove_suffix('name.git', '.git') - 'name' - - >>> remove_suffix('something special', 'sample') - 'something special' - """ - rest, suffix, null = text.partition(suffix) - return rest - - -def normalize_newlines(text): - r""" - Replace alternate newlines with the canonical newline. - - >>> normalize_newlines('Lorem Ipsum\u2029') - 'Lorem Ipsum\n' - >>> normalize_newlines('Lorem Ipsum\r\n') - 'Lorem Ipsum\n' - >>> normalize_newlines('Lorem Ipsum\x85') - 'Lorem Ipsum\n' - """ - newlines = ['\r\n', '\r', '\n', '\u0085', '\u2028', '\u2029'] - pattern = '|'.join(newlines) - return re.sub(pattern, '\n', text) - - -def _nonblank(str): - return str and not str.startswith('#') - - -@functools.singledispatch -def yield_lines(iterable): - r""" - Yield valid lines of a string or iterable. - - >>> list(yield_lines('')) - [] - >>> list(yield_lines(['foo', 'bar'])) - ['foo', 'bar'] - >>> list(yield_lines('foo\nbar')) - ['foo', 'bar'] - >>> list(yield_lines('\nfoo\n#bar\nbaz #comment')) - ['foo', 'baz #comment'] - >>> list(yield_lines(['foo\nbar', 'baz', 'bing\n\n\n'])) - ['foo', 'bar', 'baz', 'bing'] - """ - return itertools.chain.from_iterable(map(yield_lines, iterable)) - - -@yield_lines.register(str) -def _(text): - return filter(_nonblank, map(str.strip, text.splitlines())) - - -def drop_comment(line): - """ - Drop comments. - - >>> drop_comment('foo # bar') - 'foo' - - A hash without a space may be in a URL. - - >>> drop_comment('http://example.com/foo#bar') - 'http://example.com/foo#bar' - """ - return line.partition(' #')[0] - - -def join_continuation(lines): - r""" - Join lines continued by a trailing backslash. - - >>> list(join_continuation(['foo \\', 'bar', 'baz'])) - ['foobar', 'baz'] - >>> list(join_continuation(['foo \\', 'bar', 'baz'])) - ['foobar', 'baz'] - >>> list(join_continuation(['foo \\', 'bar \\', 'baz'])) - ['foobarbaz'] - - Not sure why, but... - The character preceeding the backslash is also elided. - - >>> list(join_continuation(['goo\\', 'dly'])) - ['godly'] - - A terrible idea, but... - If no line is available to continue, suppress the lines. - - >>> list(join_continuation(['foo', 'bar\\', 'baz\\'])) - ['foo'] - """ - lines = iter(lines) - for item in lines: - while item.endswith('\\'): - try: - item = item[:-2].strip() + next(lines) - except StopIteration: - return - yield item diff --git a/venv/Lib/site-packages/setuptools/_vendor/jaraco/text/__pycache__/__init__.cpython-39.pyc b/venv/Lib/site-packages/setuptools/_vendor/jaraco/text/__pycache__/__init__.cpython-39.pyc deleted file mode 100644 index b51b056..0000000 Binary files a/venv/Lib/site-packages/setuptools/_vendor/jaraco/text/__pycache__/__init__.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/_vendor/more_itertools/__init__.py b/venv/Lib/site-packages/setuptools/_vendor/more_itertools/__init__.py deleted file mode 100644 index 19a169f..0000000 --- a/venv/Lib/site-packages/setuptools/_vendor/more_itertools/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -from .more import * # noqa -from .recipes import * # noqa - -__version__ = '8.8.0' diff --git a/venv/Lib/site-packages/setuptools/_vendor/more_itertools/__pycache__/__init__.cpython-39.pyc b/venv/Lib/site-packages/setuptools/_vendor/more_itertools/__pycache__/__init__.cpython-39.pyc deleted file mode 100644 index 9acca37..0000000 Binary files a/venv/Lib/site-packages/setuptools/_vendor/more_itertools/__pycache__/__init__.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/_vendor/more_itertools/__pycache__/more.cpython-39.pyc b/venv/Lib/site-packages/setuptools/_vendor/more_itertools/__pycache__/more.cpython-39.pyc deleted file mode 100644 index e29ce31..0000000 Binary files a/venv/Lib/site-packages/setuptools/_vendor/more_itertools/__pycache__/more.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/_vendor/more_itertools/__pycache__/recipes.cpython-39.pyc b/venv/Lib/site-packages/setuptools/_vendor/more_itertools/__pycache__/recipes.cpython-39.pyc deleted file mode 100644 index 0fdc0ba..0000000 Binary files a/venv/Lib/site-packages/setuptools/_vendor/more_itertools/__pycache__/recipes.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/_vendor/more_itertools/more.py b/venv/Lib/site-packages/setuptools/_vendor/more_itertools/more.py deleted file mode 100644 index e6fca4d..0000000 --- a/venv/Lib/site-packages/setuptools/_vendor/more_itertools/more.py +++ /dev/null @@ -1,3824 +0,0 @@ -import warnings - -from collections import Counter, defaultdict, deque, abc -from collections.abc import Sequence -from functools import partial, reduce, wraps -from heapq import merge, heapify, heapreplace, heappop -from itertools import ( - chain, - compress, - count, - cycle, - dropwhile, - groupby, - islice, - repeat, - starmap, - takewhile, - tee, - zip_longest, -) -from math import exp, factorial, floor, log -from queue import Empty, Queue -from random import random, randrange, uniform -from operator import itemgetter, mul, sub, gt, lt -from sys import hexversion, maxsize -from time import monotonic - -from .recipes import ( - consume, - flatten, - pairwise, - powerset, - take, - unique_everseen, -) - -__all__ = [ - 'AbortThread', - 'adjacent', - 'always_iterable', - 'always_reversible', - 'bucket', - 'callback_iter', - 'chunked', - 'circular_shifts', - 'collapse', - 'collate', - 'consecutive_groups', - 'consumer', - 'countable', - 'count_cycle', - 'mark_ends', - 'difference', - 'distinct_combinations', - 'distinct_permutations', - 'distribute', - 'divide', - 'exactly_n', - 'filter_except', - 'first', - 'groupby_transform', - 'ilen', - 'interleave_longest', - 'interleave', - 'intersperse', - 'islice_extended', - 'iterate', - 'ichunked', - 'is_sorted', - 'last', - 'locate', - 'lstrip', - 'make_decorator', - 'map_except', - 'map_reduce', - 'nth_or_last', - 'nth_permutation', - 'nth_product', - 'numeric_range', - 'one', - 'only', - 'padded', - 'partitions', - 'set_partitions', - 'peekable', - 'repeat_last', - 'replace', - 'rlocate', - 'rstrip', - 'run_length', - 'sample', - 'seekable', - 'SequenceView', - 'side_effect', - 'sliced', - 'sort_together', - 'split_at', - 'split_after', - 'split_before', - 'split_when', - 'split_into', - 'spy', - 'stagger', - 'strip', - 'substrings', - 'substrings_indexes', - 'time_limited', - 'unique_to_each', - 'unzip', - 'windowed', - 'with_iter', - 'UnequalIterablesError', - 'zip_equal', - 'zip_offset', - 'windowed_complete', - 'all_unique', - 'value_chain', - 'product_index', - 'combination_index', - 'permutation_index', -] - -_marker = object() - - -def chunked(iterable, n, strict=False): - """Break *iterable* into lists of length *n*: - - >>> list(chunked([1, 2, 3, 4, 5, 6], 3)) - [[1, 2, 3], [4, 5, 6]] - - By the default, the last yielded list will have fewer than *n* elements - if the length of *iterable* is not divisible by *n*: - - >>> list(chunked([1, 2, 3, 4, 5, 6, 7, 8], 3)) - [[1, 2, 3], [4, 5, 6], [7, 8]] - - To use a fill-in value instead, see the :func:`grouper` recipe. - - If the length of *iterable* is not divisible by *n* and *strict* is - ``True``, then ``ValueError`` will be raised before the last - list is yielded. - - """ - iterator = iter(partial(take, n, iter(iterable)), []) - if strict: - - def ret(): - for chunk in iterator: - if len(chunk) != n: - raise ValueError('iterable is not divisible by n.') - yield chunk - - return iter(ret()) - else: - return iterator - - -def first(iterable, default=_marker): - """Return the first item of *iterable*, or *default* if *iterable* is - empty. - - >>> first([0, 1, 2, 3]) - 0 - >>> first([], 'some default') - 'some default' - - If *default* is not provided and there are no items in the iterable, - raise ``ValueError``. - - :func:`first` is useful when you have a generator of expensive-to-retrieve - values and want any arbitrary one. It is marginally shorter than - ``next(iter(iterable), default)``. - - """ - try: - return next(iter(iterable)) - except StopIteration as e: - if default is _marker: - raise ValueError( - 'first() was called on an empty iterable, and no ' - 'default value was provided.' - ) from e - return default - - -def last(iterable, default=_marker): - """Return the last item of *iterable*, or *default* if *iterable* is - empty. - - >>> last([0, 1, 2, 3]) - 3 - >>> last([], 'some default') - 'some default' - - If *default* is not provided and there are no items in the iterable, - raise ``ValueError``. - """ - try: - if isinstance(iterable, Sequence): - return iterable[-1] - # Work around https://bugs.python.org/issue38525 - elif hasattr(iterable, '__reversed__') and (hexversion != 0x030800F0): - return next(reversed(iterable)) - else: - return deque(iterable, maxlen=1)[-1] - except (IndexError, TypeError, StopIteration): - if default is _marker: - raise ValueError( - 'last() was called on an empty iterable, and no default was ' - 'provided.' - ) - return default - - -def nth_or_last(iterable, n, default=_marker): - """Return the nth or the last item of *iterable*, - or *default* if *iterable* is empty. - - >>> nth_or_last([0, 1, 2, 3], 2) - 2 - >>> nth_or_last([0, 1], 2) - 1 - >>> nth_or_last([], 0, 'some default') - 'some default' - - If *default* is not provided and there are no items in the iterable, - raise ``ValueError``. - """ - return last(islice(iterable, n + 1), default=default) - - -class peekable: - """Wrap an iterator to allow lookahead and prepending elements. - - Call :meth:`peek` on the result to get the value that will be returned - by :func:`next`. This won't advance the iterator: - - >>> p = peekable(['a', 'b']) - >>> p.peek() - 'a' - >>> next(p) - 'a' - - Pass :meth:`peek` a default value to return that instead of raising - ``StopIteration`` when the iterator is exhausted. - - >>> p = peekable([]) - >>> p.peek('hi') - 'hi' - - peekables also offer a :meth:`prepend` method, which "inserts" items - at the head of the iterable: - - >>> p = peekable([1, 2, 3]) - >>> p.prepend(10, 11, 12) - >>> next(p) - 10 - >>> p.peek() - 11 - >>> list(p) - [11, 12, 1, 2, 3] - - peekables can be indexed. Index 0 is the item that will be returned by - :func:`next`, index 1 is the item after that, and so on: - The values up to the given index will be cached. - - >>> p = peekable(['a', 'b', 'c', 'd']) - >>> p[0] - 'a' - >>> p[1] - 'b' - >>> next(p) - 'a' - - Negative indexes are supported, but be aware that they will cache the - remaining items in the source iterator, which may require significant - storage. - - To check whether a peekable is exhausted, check its truth value: - - >>> p = peekable(['a', 'b']) - >>> if p: # peekable has items - ... list(p) - ['a', 'b'] - >>> if not p: # peekable is exhausted - ... list(p) - [] - - """ - - def __init__(self, iterable): - self._it = iter(iterable) - self._cache = deque() - - def __iter__(self): - return self - - def __bool__(self): - try: - self.peek() - except StopIteration: - return False - return True - - def peek(self, default=_marker): - """Return the item that will be next returned from ``next()``. - - Return ``default`` if there are no items left. If ``default`` is not - provided, raise ``StopIteration``. - - """ - if not self._cache: - try: - self._cache.append(next(self._it)) - except StopIteration: - if default is _marker: - raise - return default - return self._cache[0] - - def prepend(self, *items): - """Stack up items to be the next ones returned from ``next()`` or - ``self.peek()``. The items will be returned in - first in, first out order:: - - >>> p = peekable([1, 2, 3]) - >>> p.prepend(10, 11, 12) - >>> next(p) - 10 - >>> list(p) - [11, 12, 1, 2, 3] - - It is possible, by prepending items, to "resurrect" a peekable that - previously raised ``StopIteration``. - - >>> p = peekable([]) - >>> next(p) - Traceback (most recent call last): - ... - StopIteration - >>> p.prepend(1) - >>> next(p) - 1 - >>> next(p) - Traceback (most recent call last): - ... - StopIteration - - """ - self._cache.extendleft(reversed(items)) - - def __next__(self): - if self._cache: - return self._cache.popleft() - - return next(self._it) - - def _get_slice(self, index): - # Normalize the slice's arguments - step = 1 if (index.step is None) else index.step - if step > 0: - start = 0 if (index.start is None) else index.start - stop = maxsize if (index.stop is None) else index.stop - elif step < 0: - start = -1 if (index.start is None) else index.start - stop = (-maxsize - 1) if (index.stop is None) else index.stop - else: - raise ValueError('slice step cannot be zero') - - # If either the start or stop index is negative, we'll need to cache - # the rest of the iterable in order to slice from the right side. - if (start < 0) or (stop < 0): - self._cache.extend(self._it) - # Otherwise we'll need to find the rightmost index and cache to that - # point. - else: - n = min(max(start, stop) + 1, maxsize) - cache_len = len(self._cache) - if n >= cache_len: - self._cache.extend(islice(self._it, n - cache_len)) - - return list(self._cache)[index] - - def __getitem__(self, index): - if isinstance(index, slice): - return self._get_slice(index) - - cache_len = len(self._cache) - if index < 0: - self._cache.extend(self._it) - elif index >= cache_len: - self._cache.extend(islice(self._it, index + 1 - cache_len)) - - return self._cache[index] - - -def collate(*iterables, **kwargs): - """Return a sorted merge of the items from each of several already-sorted - *iterables*. - - >>> list(collate('ACDZ', 'AZ', 'JKL')) - ['A', 'A', 'C', 'D', 'J', 'K', 'L', 'Z', 'Z'] - - Works lazily, keeping only the next value from each iterable in memory. Use - :func:`collate` to, for example, perform a n-way mergesort of items that - don't fit in memory. - - If a *key* function is specified, the iterables will be sorted according - to its result: - - >>> key = lambda s: int(s) # Sort by numeric value, not by string - >>> list(collate(['1', '10'], ['2', '11'], key=key)) - ['1', '2', '10', '11'] - - - If the *iterables* are sorted in descending order, set *reverse* to - ``True``: - - >>> list(collate([5, 3, 1], [4, 2, 0], reverse=True)) - [5, 4, 3, 2, 1, 0] - - If the elements of the passed-in iterables are out of order, you might get - unexpected results. - - On Python 3.5+, this function is an alias for :func:`heapq.merge`. - - """ - warnings.warn( - "collate is no longer part of more_itertools, use heapq.merge", - DeprecationWarning, - ) - return merge(*iterables, **kwargs) - - -def consumer(func): - """Decorator that automatically advances a PEP-342-style "reverse iterator" - to its first yield point so you don't have to call ``next()`` on it - manually. - - >>> @consumer - ... def tally(): - ... i = 0 - ... while True: - ... print('Thing number %s is %s.' % (i, (yield))) - ... i += 1 - ... - >>> t = tally() - >>> t.send('red') - Thing number 0 is red. - >>> t.send('fish') - Thing number 1 is fish. - - Without the decorator, you would have to call ``next(t)`` before - ``t.send()`` could be used. - - """ - - @wraps(func) - def wrapper(*args, **kwargs): - gen = func(*args, **kwargs) - next(gen) - return gen - - return wrapper - - -def ilen(iterable): - """Return the number of items in *iterable*. - - >>> ilen(x for x in range(1000000) if x % 3 == 0) - 333334 - - This consumes the iterable, so handle with care. - - """ - # This approach was selected because benchmarks showed it's likely the - # fastest of the known implementations at the time of writing. - # See GitHub tracker: #236, #230. - counter = count() - deque(zip(iterable, counter), maxlen=0) - return next(counter) - - -def iterate(func, start): - """Return ``start``, ``func(start)``, ``func(func(start))``, ... - - >>> from itertools import islice - >>> list(islice(iterate(lambda x: 2*x, 1), 10)) - [1, 2, 4, 8, 16, 32, 64, 128, 256, 512] - - """ - while True: - yield start - start = func(start) - - -def with_iter(context_manager): - """Wrap an iterable in a ``with`` statement, so it closes once exhausted. - - For example, this will close the file when the iterator is exhausted:: - - upper_lines = (line.upper() for line in with_iter(open('foo'))) - - Any context manager which returns an iterable is a candidate for - ``with_iter``. - - """ - with context_manager as iterable: - yield from iterable - - -def one(iterable, too_short=None, too_long=None): - """Return the first item from *iterable*, which is expected to contain only - that item. Raise an exception if *iterable* is empty or has more than one - item. - - :func:`one` is useful for ensuring that an iterable contains only one item. - For example, it can be used to retrieve the result of a database query - that is expected to return a single row. - - If *iterable* is empty, ``ValueError`` will be raised. You may specify a - different exception with the *too_short* keyword: - - >>> it = [] - >>> one(it) # doctest: +IGNORE_EXCEPTION_DETAIL - Traceback (most recent call last): - ... - ValueError: too many items in iterable (expected 1)' - >>> too_short = IndexError('too few items') - >>> one(it, too_short=too_short) # doctest: +IGNORE_EXCEPTION_DETAIL - Traceback (most recent call last): - ... - IndexError: too few items - - Similarly, if *iterable* contains more than one item, ``ValueError`` will - be raised. You may specify a different exception with the *too_long* - keyword: - - >>> it = ['too', 'many'] - >>> one(it) # doctest: +IGNORE_EXCEPTION_DETAIL - Traceback (most recent call last): - ... - ValueError: Expected exactly one item in iterable, but got 'too', - 'many', and perhaps more. - >>> too_long = RuntimeError - >>> one(it, too_long=too_long) # doctest: +IGNORE_EXCEPTION_DETAIL - Traceback (most recent call last): - ... - RuntimeError - - Note that :func:`one` attempts to advance *iterable* twice to ensure there - is only one item. See :func:`spy` or :func:`peekable` to check iterable - contents less destructively. - - """ - it = iter(iterable) - - try: - first_value = next(it) - except StopIteration as e: - raise ( - too_short or ValueError('too few items in iterable (expected 1)') - ) from e - - try: - second_value = next(it) - except StopIteration: - pass - else: - msg = ( - 'Expected exactly one item in iterable, but got {!r}, {!r}, ' - 'and perhaps more.'.format(first_value, second_value) - ) - raise too_long or ValueError(msg) - - return first_value - - -def distinct_permutations(iterable, r=None): - """Yield successive distinct permutations of the elements in *iterable*. - - >>> sorted(distinct_permutations([1, 0, 1])) - [(0, 1, 1), (1, 0, 1), (1, 1, 0)] - - Equivalent to ``set(permutations(iterable))``, except duplicates are not - generated and thrown away. For larger input sequences this is much more - efficient. - - Duplicate permutations arise when there are duplicated elements in the - input iterable. The number of items returned is - `n! / (x_1! * x_2! * ... * x_n!)`, where `n` is the total number of - items input, and each `x_i` is the count of a distinct item in the input - sequence. - - If *r* is given, only the *r*-length permutations are yielded. - - >>> sorted(distinct_permutations([1, 0, 1], r=2)) - [(0, 1), (1, 0), (1, 1)] - >>> sorted(distinct_permutations(range(3), r=2)) - [(0, 1), (0, 2), (1, 0), (1, 2), (2, 0), (2, 1)] - - """ - # Algorithm: https://w.wiki/Qai - def _full(A): - while True: - # Yield the permutation we have - yield tuple(A) - - # Find the largest index i such that A[i] < A[i + 1] - for i in range(size - 2, -1, -1): - if A[i] < A[i + 1]: - break - # If no such index exists, this permutation is the last one - else: - return - - # Find the largest index j greater than j such that A[i] < A[j] - for j in range(size - 1, i, -1): - if A[i] < A[j]: - break - - # Swap the value of A[i] with that of A[j], then reverse the - # sequence from A[i + 1] to form the new permutation - A[i], A[j] = A[j], A[i] - A[i + 1 :] = A[: i - size : -1] # A[i + 1:][::-1] - - # Algorithm: modified from the above - def _partial(A, r): - # Split A into the first r items and the last r items - head, tail = A[:r], A[r:] - right_head_indexes = range(r - 1, -1, -1) - left_tail_indexes = range(len(tail)) - - while True: - # Yield the permutation we have - yield tuple(head) - - # Starting from the right, find the first index of the head with - # value smaller than the maximum value of the tail - call it i. - pivot = tail[-1] - for i in right_head_indexes: - if head[i] < pivot: - break - pivot = head[i] - else: - return - - # Starting from the left, find the first value of the tail - # with a value greater than head[i] and swap. - for j in left_tail_indexes: - if tail[j] > head[i]: - head[i], tail[j] = tail[j], head[i] - break - # If we didn't find one, start from the right and find the first - # index of the head with a value greater than head[i] and swap. - else: - for j in right_head_indexes: - if head[j] > head[i]: - head[i], head[j] = head[j], head[i] - break - - # Reverse head[i + 1:] and swap it with tail[:r - (i + 1)] - tail += head[: i - r : -1] # head[i + 1:][::-1] - i += 1 - head[i:], tail[:] = tail[: r - i], tail[r - i :] - - items = sorted(iterable) - - size = len(items) - if r is None: - r = size - - if 0 < r <= size: - return _full(items) if (r == size) else _partial(items, r) - - return iter(() if r else ((),)) - - -def intersperse(e, iterable, n=1): - """Intersperse filler element *e* among the items in *iterable*, leaving - *n* items between each filler element. - - >>> list(intersperse('!', [1, 2, 3, 4, 5])) - [1, '!', 2, '!', 3, '!', 4, '!', 5] - - >>> list(intersperse(None, [1, 2, 3, 4, 5], n=2)) - [1, 2, None, 3, 4, None, 5] - - """ - if n == 0: - raise ValueError('n must be > 0') - elif n == 1: - # interleave(repeat(e), iterable) -> e, x_0, e, e, x_1, e, x_2... - # islice(..., 1, None) -> x_0, e, e, x_1, e, x_2... - return islice(interleave(repeat(e), iterable), 1, None) - else: - # interleave(filler, chunks) -> [e], [x_0, x_1], [e], [x_2, x_3]... - # islice(..., 1, None) -> [x_0, x_1], [e], [x_2, x_3]... - # flatten(...) -> x_0, x_1, e, x_2, x_3... - filler = repeat([e]) - chunks = chunked(iterable, n) - return flatten(islice(interleave(filler, chunks), 1, None)) - - -def unique_to_each(*iterables): - """Return the elements from each of the input iterables that aren't in the - other input iterables. - - For example, suppose you have a set of packages, each with a set of - dependencies:: - - {'pkg_1': {'A', 'B'}, 'pkg_2': {'B', 'C'}, 'pkg_3': {'B', 'D'}} - - If you remove one package, which dependencies can also be removed? - - If ``pkg_1`` is removed, then ``A`` is no longer necessary - it is not - associated with ``pkg_2`` or ``pkg_3``. Similarly, ``C`` is only needed for - ``pkg_2``, and ``D`` is only needed for ``pkg_3``:: - - >>> unique_to_each({'A', 'B'}, {'B', 'C'}, {'B', 'D'}) - [['A'], ['C'], ['D']] - - If there are duplicates in one input iterable that aren't in the others - they will be duplicated in the output. Input order is preserved:: - - >>> unique_to_each("mississippi", "missouri") - [['p', 'p'], ['o', 'u', 'r']] - - It is assumed that the elements of each iterable are hashable. - - """ - pool = [list(it) for it in iterables] - counts = Counter(chain.from_iterable(map(set, pool))) - uniques = {element for element in counts if counts[element] == 1} - return [list(filter(uniques.__contains__, it)) for it in pool] - - -def windowed(seq, n, fillvalue=None, step=1): - """Return a sliding window of width *n* over the given iterable. - - >>> all_windows = windowed([1, 2, 3, 4, 5], 3) - >>> list(all_windows) - [(1, 2, 3), (2, 3, 4), (3, 4, 5)] - - When the window is larger than the iterable, *fillvalue* is used in place - of missing values: - - >>> list(windowed([1, 2, 3], 4)) - [(1, 2, 3, None)] - - Each window will advance in increments of *step*: - - >>> list(windowed([1, 2, 3, 4, 5, 6], 3, fillvalue='!', step=2)) - [(1, 2, 3), (3, 4, 5), (5, 6, '!')] - - To slide into the iterable's items, use :func:`chain` to add filler items - to the left: - - >>> iterable = [1, 2, 3, 4] - >>> n = 3 - >>> padding = [None] * (n - 1) - >>> list(windowed(chain(padding, iterable), 3)) - [(None, None, 1), (None, 1, 2), (1, 2, 3), (2, 3, 4)] - """ - if n < 0: - raise ValueError('n must be >= 0') - if n == 0: - yield tuple() - return - if step < 1: - raise ValueError('step must be >= 1') - - window = deque(maxlen=n) - i = n - for _ in map(window.append, seq): - i -= 1 - if not i: - i = step - yield tuple(window) - - size = len(window) - if size < n: - yield tuple(chain(window, repeat(fillvalue, n - size))) - elif 0 < i < min(step, n): - window += (fillvalue,) * i - yield tuple(window) - - -def substrings(iterable): - """Yield all of the substrings of *iterable*. - - >>> [''.join(s) for s in substrings('more')] - ['m', 'o', 'r', 'e', 'mo', 'or', 're', 'mor', 'ore', 'more'] - - Note that non-string iterables can also be subdivided. - - >>> list(substrings([0, 1, 2])) - [(0,), (1,), (2,), (0, 1), (1, 2), (0, 1, 2)] - - """ - # The length-1 substrings - seq = [] - for item in iter(iterable): - seq.append(item) - yield (item,) - seq = tuple(seq) - item_count = len(seq) - - # And the rest - for n in range(2, item_count + 1): - for i in range(item_count - n + 1): - yield seq[i : i + n] - - -def substrings_indexes(seq, reverse=False): - """Yield all substrings and their positions in *seq* - - The items yielded will be a tuple of the form ``(substr, i, j)``, where - ``substr == seq[i:j]``. - - This function only works for iterables that support slicing, such as - ``str`` objects. - - >>> for item in substrings_indexes('more'): - ... print(item) - ('m', 0, 1) - ('o', 1, 2) - ('r', 2, 3) - ('e', 3, 4) - ('mo', 0, 2) - ('or', 1, 3) - ('re', 2, 4) - ('mor', 0, 3) - ('ore', 1, 4) - ('more', 0, 4) - - Set *reverse* to ``True`` to yield the same items in the opposite order. - - - """ - r = range(1, len(seq) + 1) - if reverse: - r = reversed(r) - return ( - (seq[i : i + L], i, i + L) for L in r for i in range(len(seq) - L + 1) - ) - - -class bucket: - """Wrap *iterable* and return an object that buckets it iterable into - child iterables based on a *key* function. - - >>> iterable = ['a1', 'b1', 'c1', 'a2', 'b2', 'c2', 'b3'] - >>> s = bucket(iterable, key=lambda x: x[0]) # Bucket by 1st character - >>> sorted(list(s)) # Get the keys - ['a', 'b', 'c'] - >>> a_iterable = s['a'] - >>> next(a_iterable) - 'a1' - >>> next(a_iterable) - 'a2' - >>> list(s['b']) - ['b1', 'b2', 'b3'] - - The original iterable will be advanced and its items will be cached until - they are used by the child iterables. This may require significant storage. - - By default, attempting to select a bucket to which no items belong will - exhaust the iterable and cache all values. - If you specify a *validator* function, selected buckets will instead be - checked against it. - - >>> from itertools import count - >>> it = count(1, 2) # Infinite sequence of odd numbers - >>> key = lambda x: x % 10 # Bucket by last digit - >>> validator = lambda x: x in {1, 3, 5, 7, 9} # Odd digits only - >>> s = bucket(it, key=key, validator=validator) - >>> 2 in s - False - >>> list(s[2]) - [] - - """ - - def __init__(self, iterable, key, validator=None): - self._it = iter(iterable) - self._key = key - self._cache = defaultdict(deque) - self._validator = validator or (lambda x: True) - - def __contains__(self, value): - if not self._validator(value): - return False - - try: - item = next(self[value]) - except StopIteration: - return False - else: - self._cache[value].appendleft(item) - - return True - - def _get_values(self, value): - """ - Helper to yield items from the parent iterator that match *value*. - Items that don't match are stored in the local cache as they - are encountered. - """ - while True: - # If we've cached some items that match the target value, emit - # the first one and evict it from the cache. - if self._cache[value]: - yield self._cache[value].popleft() - # Otherwise we need to advance the parent iterator to search for - # a matching item, caching the rest. - else: - while True: - try: - item = next(self._it) - except StopIteration: - return - item_value = self._key(item) - if item_value == value: - yield item - break - elif self._validator(item_value): - self._cache[item_value].append(item) - - def __iter__(self): - for item in self._it: - item_value = self._key(item) - if self._validator(item_value): - self._cache[item_value].append(item) - - yield from self._cache.keys() - - def __getitem__(self, value): - if not self._validator(value): - return iter(()) - - return self._get_values(value) - - -def spy(iterable, n=1): - """Return a 2-tuple with a list containing the first *n* elements of - *iterable*, and an iterator with the same items as *iterable*. - This allows you to "look ahead" at the items in the iterable without - advancing it. - - There is one item in the list by default: - - >>> iterable = 'abcdefg' - >>> head, iterable = spy(iterable) - >>> head - ['a'] - >>> list(iterable) - ['a', 'b', 'c', 'd', 'e', 'f', 'g'] - - You may use unpacking to retrieve items instead of lists: - - >>> (head,), iterable = spy('abcdefg') - >>> head - 'a' - >>> (first, second), iterable = spy('abcdefg', 2) - >>> first - 'a' - >>> second - 'b' - - The number of items requested can be larger than the number of items in - the iterable: - - >>> iterable = [1, 2, 3, 4, 5] - >>> head, iterable = spy(iterable, 10) - >>> head - [1, 2, 3, 4, 5] - >>> list(iterable) - [1, 2, 3, 4, 5] - - """ - it = iter(iterable) - head = take(n, it) - - return head.copy(), chain(head, it) - - -def interleave(*iterables): - """Return a new iterable yielding from each iterable in turn, - until the shortest is exhausted. - - >>> list(interleave([1, 2, 3], [4, 5], [6, 7, 8])) - [1, 4, 6, 2, 5, 7] - - For a version that doesn't terminate after the shortest iterable is - exhausted, see :func:`interleave_longest`. - - """ - return chain.from_iterable(zip(*iterables)) - - -def interleave_longest(*iterables): - """Return a new iterable yielding from each iterable in turn, - skipping any that are exhausted. - - >>> list(interleave_longest([1, 2, 3], [4, 5], [6, 7, 8])) - [1, 4, 6, 2, 5, 7, 3, 8] - - This function produces the same output as :func:`roundrobin`, but may - perform better for some inputs (in particular when the number of iterables - is large). - - """ - i = chain.from_iterable(zip_longest(*iterables, fillvalue=_marker)) - return (x for x in i if x is not _marker) - - -def collapse(iterable, base_type=None, levels=None): - """Flatten an iterable with multiple levels of nesting (e.g., a list of - lists of tuples) into non-iterable types. - - >>> iterable = [(1, 2), ([3, 4], [[5], [6]])] - >>> list(collapse(iterable)) - [1, 2, 3, 4, 5, 6] - - Binary and text strings are not considered iterable and - will not be collapsed. - - To avoid collapsing other types, specify *base_type*: - - >>> iterable = ['ab', ('cd', 'ef'), ['gh', 'ij']] - >>> list(collapse(iterable, base_type=tuple)) - ['ab', ('cd', 'ef'), 'gh', 'ij'] - - Specify *levels* to stop flattening after a certain level: - - >>> iterable = [('a', ['b']), ('c', ['d'])] - >>> list(collapse(iterable)) # Fully flattened - ['a', 'b', 'c', 'd'] - >>> list(collapse(iterable, levels=1)) # Only one level flattened - ['a', ['b'], 'c', ['d']] - - """ - - def walk(node, level): - if ( - ((levels is not None) and (level > levels)) - or isinstance(node, (str, bytes)) - or ((base_type is not None) and isinstance(node, base_type)) - ): - yield node - return - - try: - tree = iter(node) - except TypeError: - yield node - return - else: - for child in tree: - yield from walk(child, level + 1) - - yield from walk(iterable, 0) - - -def side_effect(func, iterable, chunk_size=None, before=None, after=None): - """Invoke *func* on each item in *iterable* (or on each *chunk_size* group - of items) before yielding the item. - - `func` must be a function that takes a single argument. Its return value - will be discarded. - - *before* and *after* are optional functions that take no arguments. They - will be executed before iteration starts and after it ends, respectively. - - `side_effect` can be used for logging, updating progress bars, or anything - that is not functionally "pure." - - Emitting a status message: - - >>> from more_itertools import consume - >>> func = lambda item: print('Received {}'.format(item)) - >>> consume(side_effect(func, range(2))) - Received 0 - Received 1 - - Operating on chunks of items: - - >>> pair_sums = [] - >>> func = lambda chunk: pair_sums.append(sum(chunk)) - >>> list(side_effect(func, [0, 1, 2, 3, 4, 5], 2)) - [0, 1, 2, 3, 4, 5] - >>> list(pair_sums) - [1, 5, 9] - - Writing to a file-like object: - - >>> from io import StringIO - >>> from more_itertools import consume - >>> f = StringIO() - >>> func = lambda x: print(x, file=f) - >>> before = lambda: print(u'HEADER', file=f) - >>> after = f.close - >>> it = [u'a', u'b', u'c'] - >>> consume(side_effect(func, it, before=before, after=after)) - >>> f.closed - True - - """ - try: - if before is not None: - before() - - if chunk_size is None: - for item in iterable: - func(item) - yield item - else: - for chunk in chunked(iterable, chunk_size): - func(chunk) - yield from chunk - finally: - if after is not None: - after() - - -def sliced(seq, n, strict=False): - """Yield slices of length *n* from the sequence *seq*. - - >>> list(sliced((1, 2, 3, 4, 5, 6), 3)) - [(1, 2, 3), (4, 5, 6)] - - By the default, the last yielded slice will have fewer than *n* elements - if the length of *seq* is not divisible by *n*: - - >>> list(sliced((1, 2, 3, 4, 5, 6, 7, 8), 3)) - [(1, 2, 3), (4, 5, 6), (7, 8)] - - If the length of *seq* is not divisible by *n* and *strict* is - ``True``, then ``ValueError`` will be raised before the last - slice is yielded. - - This function will only work for iterables that support slicing. - For non-sliceable iterables, see :func:`chunked`. - - """ - iterator = takewhile(len, (seq[i : i + n] for i in count(0, n))) - if strict: - - def ret(): - for _slice in iterator: - if len(_slice) != n: - raise ValueError("seq is not divisible by n.") - yield _slice - - return iter(ret()) - else: - return iterator - - -def split_at(iterable, pred, maxsplit=-1, keep_separator=False): - """Yield lists of items from *iterable*, where each list is delimited by - an item where callable *pred* returns ``True``. - - >>> list(split_at('abcdcba', lambda x: x == 'b')) - [['a'], ['c', 'd', 'c'], ['a']] - - >>> list(split_at(range(10), lambda n: n % 2 == 1)) - [[0], [2], [4], [6], [8], []] - - At most *maxsplit* splits are done. If *maxsplit* is not specified or -1, - then there is no limit on the number of splits: - - >>> list(split_at(range(10), lambda n: n % 2 == 1, maxsplit=2)) - [[0], [2], [4, 5, 6, 7, 8, 9]] - - By default, the delimiting items are not included in the output. - The include them, set *keep_separator* to ``True``. - - >>> list(split_at('abcdcba', lambda x: x == 'b', keep_separator=True)) - [['a'], ['b'], ['c', 'd', 'c'], ['b'], ['a']] - - """ - if maxsplit == 0: - yield list(iterable) - return - - buf = [] - it = iter(iterable) - for item in it: - if pred(item): - yield buf - if keep_separator: - yield [item] - if maxsplit == 1: - yield list(it) - return - buf = [] - maxsplit -= 1 - else: - buf.append(item) - yield buf - - -def split_before(iterable, pred, maxsplit=-1): - """Yield lists of items from *iterable*, where each list ends just before - an item for which callable *pred* returns ``True``: - - >>> list(split_before('OneTwo', lambda s: s.isupper())) - [['O', 'n', 'e'], ['T', 'w', 'o']] - - >>> list(split_before(range(10), lambda n: n % 3 == 0)) - [[0, 1, 2], [3, 4, 5], [6, 7, 8], [9]] - - At most *maxsplit* splits are done. If *maxsplit* is not specified or -1, - then there is no limit on the number of splits: - - >>> list(split_before(range(10), lambda n: n % 3 == 0, maxsplit=2)) - [[0, 1, 2], [3, 4, 5], [6, 7, 8, 9]] - """ - if maxsplit == 0: - yield list(iterable) - return - - buf = [] - it = iter(iterable) - for item in it: - if pred(item) and buf: - yield buf - if maxsplit == 1: - yield [item] + list(it) - return - buf = [] - maxsplit -= 1 - buf.append(item) - if buf: - yield buf - - -def split_after(iterable, pred, maxsplit=-1): - """Yield lists of items from *iterable*, where each list ends with an - item where callable *pred* returns ``True``: - - >>> list(split_after('one1two2', lambda s: s.isdigit())) - [['o', 'n', 'e', '1'], ['t', 'w', 'o', '2']] - - >>> list(split_after(range(10), lambda n: n % 3 == 0)) - [[0], [1, 2, 3], [4, 5, 6], [7, 8, 9]] - - At most *maxsplit* splits are done. If *maxsplit* is not specified or -1, - then there is no limit on the number of splits: - - >>> list(split_after(range(10), lambda n: n % 3 == 0, maxsplit=2)) - [[0], [1, 2, 3], [4, 5, 6, 7, 8, 9]] - - """ - if maxsplit == 0: - yield list(iterable) - return - - buf = [] - it = iter(iterable) - for item in it: - buf.append(item) - if pred(item) and buf: - yield buf - if maxsplit == 1: - yield list(it) - return - buf = [] - maxsplit -= 1 - if buf: - yield buf - - -def split_when(iterable, pred, maxsplit=-1): - """Split *iterable* into pieces based on the output of *pred*. - *pred* should be a function that takes successive pairs of items and - returns ``True`` if the iterable should be split in between them. - - For example, to find runs of increasing numbers, split the iterable when - element ``i`` is larger than element ``i + 1``: - - >>> list(split_when([1, 2, 3, 3, 2, 5, 2, 4, 2], lambda x, y: x > y)) - [[1, 2, 3, 3], [2, 5], [2, 4], [2]] - - At most *maxsplit* splits are done. If *maxsplit* is not specified or -1, - then there is no limit on the number of splits: - - >>> list(split_when([1, 2, 3, 3, 2, 5, 2, 4, 2], - ... lambda x, y: x > y, maxsplit=2)) - [[1, 2, 3, 3], [2, 5], [2, 4, 2]] - - """ - if maxsplit == 0: - yield list(iterable) - return - - it = iter(iterable) - try: - cur_item = next(it) - except StopIteration: - return - - buf = [cur_item] - for next_item in it: - if pred(cur_item, next_item): - yield buf - if maxsplit == 1: - yield [next_item] + list(it) - return - buf = [] - maxsplit -= 1 - - buf.append(next_item) - cur_item = next_item - - yield buf - - -def split_into(iterable, sizes): - """Yield a list of sequential items from *iterable* of length 'n' for each - integer 'n' in *sizes*. - - >>> list(split_into([1,2,3,4,5,6], [1,2,3])) - [[1], [2, 3], [4, 5, 6]] - - If the sum of *sizes* is smaller than the length of *iterable*, then the - remaining items of *iterable* will not be returned. - - >>> list(split_into([1,2,3,4,5,6], [2,3])) - [[1, 2], [3, 4, 5]] - - If the sum of *sizes* is larger than the length of *iterable*, fewer items - will be returned in the iteration that overruns *iterable* and further - lists will be empty: - - >>> list(split_into([1,2,3,4], [1,2,3,4])) - [[1], [2, 3], [4], []] - - When a ``None`` object is encountered in *sizes*, the returned list will - contain items up to the end of *iterable* the same way that itertools.slice - does: - - >>> list(split_into([1,2,3,4,5,6,7,8,9,0], [2,3,None])) - [[1, 2], [3, 4, 5], [6, 7, 8, 9, 0]] - - :func:`split_into` can be useful for grouping a series of items where the - sizes of the groups are not uniform. An example would be where in a row - from a table, multiple columns represent elements of the same feature - (e.g. a point represented by x,y,z) but, the format is not the same for - all columns. - """ - # convert the iterable argument into an iterator so its contents can - # be consumed by islice in case it is a generator - it = iter(iterable) - - for size in sizes: - if size is None: - yield list(it) - return - else: - yield list(islice(it, size)) - - -def padded(iterable, fillvalue=None, n=None, next_multiple=False): - """Yield the elements from *iterable*, followed by *fillvalue*, such that - at least *n* items are emitted. - - >>> list(padded([1, 2, 3], '?', 5)) - [1, 2, 3, '?', '?'] - - If *next_multiple* is ``True``, *fillvalue* will be emitted until the - number of items emitted is a multiple of *n*:: - - >>> list(padded([1, 2, 3, 4], n=3, next_multiple=True)) - [1, 2, 3, 4, None, None] - - If *n* is ``None``, *fillvalue* will be emitted indefinitely. - - """ - it = iter(iterable) - if n is None: - yield from chain(it, repeat(fillvalue)) - elif n < 1: - raise ValueError('n must be at least 1') - else: - item_count = 0 - for item in it: - yield item - item_count += 1 - - remaining = (n - item_count) % n if next_multiple else n - item_count - for _ in range(remaining): - yield fillvalue - - -def repeat_last(iterable, default=None): - """After the *iterable* is exhausted, keep yielding its last element. - - >>> list(islice(repeat_last(range(3)), 5)) - [0, 1, 2, 2, 2] - - If the iterable is empty, yield *default* forever:: - - >>> list(islice(repeat_last(range(0), 42), 5)) - [42, 42, 42, 42, 42] - - """ - item = _marker - for item in iterable: - yield item - final = default if item is _marker else item - yield from repeat(final) - - -def distribute(n, iterable): - """Distribute the items from *iterable* among *n* smaller iterables. - - >>> group_1, group_2 = distribute(2, [1, 2, 3, 4, 5, 6]) - >>> list(group_1) - [1, 3, 5] - >>> list(group_2) - [2, 4, 6] - - If the length of *iterable* is not evenly divisible by *n*, then the - length of the returned iterables will not be identical: - - >>> children = distribute(3, [1, 2, 3, 4, 5, 6, 7]) - >>> [list(c) for c in children] - [[1, 4, 7], [2, 5], [3, 6]] - - If the length of *iterable* is smaller than *n*, then the last returned - iterables will be empty: - - >>> children = distribute(5, [1, 2, 3]) - >>> [list(c) for c in children] - [[1], [2], [3], [], []] - - This function uses :func:`itertools.tee` and may require significant - storage. If you need the order items in the smaller iterables to match the - original iterable, see :func:`divide`. - - """ - if n < 1: - raise ValueError('n must be at least 1') - - children = tee(iterable, n) - return [islice(it, index, None, n) for index, it in enumerate(children)] - - -def stagger(iterable, offsets=(-1, 0, 1), longest=False, fillvalue=None): - """Yield tuples whose elements are offset from *iterable*. - The amount by which the `i`-th item in each tuple is offset is given by - the `i`-th item in *offsets*. - - >>> list(stagger([0, 1, 2, 3])) - [(None, 0, 1), (0, 1, 2), (1, 2, 3)] - >>> list(stagger(range(8), offsets=(0, 2, 4))) - [(0, 2, 4), (1, 3, 5), (2, 4, 6), (3, 5, 7)] - - By default, the sequence will end when the final element of a tuple is the - last item in the iterable. To continue until the first element of a tuple - is the last item in the iterable, set *longest* to ``True``:: - - >>> list(stagger([0, 1, 2, 3], longest=True)) - [(None, 0, 1), (0, 1, 2), (1, 2, 3), (2, 3, None), (3, None, None)] - - By default, ``None`` will be used to replace offsets beyond the end of the - sequence. Specify *fillvalue* to use some other value. - - """ - children = tee(iterable, len(offsets)) - - return zip_offset( - *children, offsets=offsets, longest=longest, fillvalue=fillvalue - ) - - -class UnequalIterablesError(ValueError): - def __init__(self, details=None): - msg = 'Iterables have different lengths' - if details is not None: - msg += (': index 0 has length {}; index {} has length {}').format( - *details - ) - - super().__init__(msg) - - -def _zip_equal_generator(iterables): - for combo in zip_longest(*iterables, fillvalue=_marker): - for val in combo: - if val is _marker: - raise UnequalIterablesError() - yield combo - - -def zip_equal(*iterables): - """``zip`` the input *iterables* together, but raise - ``UnequalIterablesError`` if they aren't all the same length. - - >>> it_1 = range(3) - >>> it_2 = iter('abc') - >>> list(zip_equal(it_1, it_2)) - [(0, 'a'), (1, 'b'), (2, 'c')] - - >>> it_1 = range(3) - >>> it_2 = iter('abcd') - >>> list(zip_equal(it_1, it_2)) # doctest: +IGNORE_EXCEPTION_DETAIL - Traceback (most recent call last): - ... - more_itertools.more.UnequalIterablesError: Iterables have different - lengths - - """ - if hexversion >= 0x30A00A6: - warnings.warn( - ( - 'zip_equal will be removed in a future version of ' - 'more-itertools. Use the builtin zip function with ' - 'strict=True instead.' - ), - DeprecationWarning, - ) - # Check whether the iterables are all the same size. - try: - first_size = len(iterables[0]) - for i, it in enumerate(iterables[1:], 1): - size = len(it) - if size != first_size: - break - else: - # If we didn't break out, we can use the built-in zip. - return zip(*iterables) - - # If we did break out, there was a mismatch. - raise UnequalIterablesError(details=(first_size, i, size)) - # If any one of the iterables didn't have a length, start reading - # them until one runs out. - except TypeError: - return _zip_equal_generator(iterables) - - -def zip_offset(*iterables, offsets, longest=False, fillvalue=None): - """``zip`` the input *iterables* together, but offset the `i`-th iterable - by the `i`-th item in *offsets*. - - >>> list(zip_offset('0123', 'abcdef', offsets=(0, 1))) - [('0', 'b'), ('1', 'c'), ('2', 'd'), ('3', 'e')] - - This can be used as a lightweight alternative to SciPy or pandas to analyze - data sets in which some series have a lead or lag relationship. - - By default, the sequence will end when the shortest iterable is exhausted. - To continue until the longest iterable is exhausted, set *longest* to - ``True``. - - >>> list(zip_offset('0123', 'abcdef', offsets=(0, 1), longest=True)) - [('0', 'b'), ('1', 'c'), ('2', 'd'), ('3', 'e'), (None, 'f')] - - By default, ``None`` will be used to replace offsets beyond the end of the - sequence. Specify *fillvalue* to use some other value. - - """ - if len(iterables) != len(offsets): - raise ValueError("Number of iterables and offsets didn't match") - - staggered = [] - for it, n in zip(iterables, offsets): - if n < 0: - staggered.append(chain(repeat(fillvalue, -n), it)) - elif n > 0: - staggered.append(islice(it, n, None)) - else: - staggered.append(it) - - if longest: - return zip_longest(*staggered, fillvalue=fillvalue) - - return zip(*staggered) - - -def sort_together(iterables, key_list=(0,), key=None, reverse=False): - """Return the input iterables sorted together, with *key_list* as the - priority for sorting. All iterables are trimmed to the length of the - shortest one. - - This can be used like the sorting function in a spreadsheet. If each - iterable represents a column of data, the key list determines which - columns are used for sorting. - - By default, all iterables are sorted using the ``0``-th iterable:: - - >>> iterables = [(4, 3, 2, 1), ('a', 'b', 'c', 'd')] - >>> sort_together(iterables) - [(1, 2, 3, 4), ('d', 'c', 'b', 'a')] - - Set a different key list to sort according to another iterable. - Specifying multiple keys dictates how ties are broken:: - - >>> iterables = [(3, 1, 2), (0, 1, 0), ('c', 'b', 'a')] - >>> sort_together(iterables, key_list=(1, 2)) - [(2, 3, 1), (0, 0, 1), ('a', 'c', 'b')] - - To sort by a function of the elements of the iterable, pass a *key* - function. Its arguments are the elements of the iterables corresponding to - the key list:: - - >>> names = ('a', 'b', 'c') - >>> lengths = (1, 2, 3) - >>> widths = (5, 2, 1) - >>> def area(length, width): - ... return length * width - >>> sort_together([names, lengths, widths], key_list=(1, 2), key=area) - [('c', 'b', 'a'), (3, 2, 1), (1, 2, 5)] - - Set *reverse* to ``True`` to sort in descending order. - - >>> sort_together([(1, 2, 3), ('c', 'b', 'a')], reverse=True) - [(3, 2, 1), ('a', 'b', 'c')] - - """ - if key is None: - # if there is no key function, the key argument to sorted is an - # itemgetter - key_argument = itemgetter(*key_list) - else: - # if there is a key function, call it with the items at the offsets - # specified by the key function as arguments - key_list = list(key_list) - if len(key_list) == 1: - # if key_list contains a single item, pass the item at that offset - # as the only argument to the key function - key_offset = key_list[0] - key_argument = lambda zipped_items: key(zipped_items[key_offset]) - else: - # if key_list contains multiple items, use itemgetter to return a - # tuple of items, which we pass as *args to the key function - get_key_items = itemgetter(*key_list) - key_argument = lambda zipped_items: key( - *get_key_items(zipped_items) - ) - - return list( - zip(*sorted(zip(*iterables), key=key_argument, reverse=reverse)) - ) - - -def unzip(iterable): - """The inverse of :func:`zip`, this function disaggregates the elements - of the zipped *iterable*. - - The ``i``-th iterable contains the ``i``-th element from each element - of the zipped iterable. The first element is used to to determine the - length of the remaining elements. - - >>> iterable = [('a', 1), ('b', 2), ('c', 3), ('d', 4)] - >>> letters, numbers = unzip(iterable) - >>> list(letters) - ['a', 'b', 'c', 'd'] - >>> list(numbers) - [1, 2, 3, 4] - - This is similar to using ``zip(*iterable)``, but it avoids reading - *iterable* into memory. Note, however, that this function uses - :func:`itertools.tee` and thus may require significant storage. - - """ - head, iterable = spy(iter(iterable)) - if not head: - # empty iterable, e.g. zip([], [], []) - return () - # spy returns a one-length iterable as head - head = head[0] - iterables = tee(iterable, len(head)) - - def itemgetter(i): - def getter(obj): - try: - return obj[i] - except IndexError: - # basically if we have an iterable like - # iter([(1, 2, 3), (4, 5), (6,)]) - # the second unzipped iterable would fail at the third tuple - # since it would try to access tup[1] - # same with the third unzipped iterable and the second tuple - # to support these "improperly zipped" iterables, - # we create a custom itemgetter - # which just stops the unzipped iterables - # at first length mismatch - raise StopIteration - - return getter - - return tuple(map(itemgetter(i), it) for i, it in enumerate(iterables)) - - -def divide(n, iterable): - """Divide the elements from *iterable* into *n* parts, maintaining - order. - - >>> group_1, group_2 = divide(2, [1, 2, 3, 4, 5, 6]) - >>> list(group_1) - [1, 2, 3] - >>> list(group_2) - [4, 5, 6] - - If the length of *iterable* is not evenly divisible by *n*, then the - length of the returned iterables will not be identical: - - >>> children = divide(3, [1, 2, 3, 4, 5, 6, 7]) - >>> [list(c) for c in children] - [[1, 2, 3], [4, 5], [6, 7]] - - If the length of the iterable is smaller than n, then the last returned - iterables will be empty: - - >>> children = divide(5, [1, 2, 3]) - >>> [list(c) for c in children] - [[1], [2], [3], [], []] - - This function will exhaust the iterable before returning and may require - significant storage. If order is not important, see :func:`distribute`, - which does not first pull the iterable into memory. - - """ - if n < 1: - raise ValueError('n must be at least 1') - - try: - iterable[:0] - except TypeError: - seq = tuple(iterable) - else: - seq = iterable - - q, r = divmod(len(seq), n) - - ret = [] - stop = 0 - for i in range(1, n + 1): - start = stop - stop += q + 1 if i <= r else q - ret.append(iter(seq[start:stop])) - - return ret - - -def always_iterable(obj, base_type=(str, bytes)): - """If *obj* is iterable, return an iterator over its items:: - - >>> obj = (1, 2, 3) - >>> list(always_iterable(obj)) - [1, 2, 3] - - If *obj* is not iterable, return a one-item iterable containing *obj*:: - - >>> obj = 1 - >>> list(always_iterable(obj)) - [1] - - If *obj* is ``None``, return an empty iterable: - - >>> obj = None - >>> list(always_iterable(None)) - [] - - By default, binary and text strings are not considered iterable:: - - >>> obj = 'foo' - >>> list(always_iterable(obj)) - ['foo'] - - If *base_type* is set, objects for which ``isinstance(obj, base_type)`` - returns ``True`` won't be considered iterable. - - >>> obj = {'a': 1} - >>> list(always_iterable(obj)) # Iterate over the dict's keys - ['a'] - >>> list(always_iterable(obj, base_type=dict)) # Treat dicts as a unit - [{'a': 1}] - - Set *base_type* to ``None`` to avoid any special handling and treat objects - Python considers iterable as iterable: - - >>> obj = 'foo' - >>> list(always_iterable(obj, base_type=None)) - ['f', 'o', 'o'] - """ - if obj is None: - return iter(()) - - if (base_type is not None) and isinstance(obj, base_type): - return iter((obj,)) - - try: - return iter(obj) - except TypeError: - return iter((obj,)) - - -def adjacent(predicate, iterable, distance=1): - """Return an iterable over `(bool, item)` tuples where the `item` is - drawn from *iterable* and the `bool` indicates whether - that item satisfies the *predicate* or is adjacent to an item that does. - - For example, to find whether items are adjacent to a ``3``:: - - >>> list(adjacent(lambda x: x == 3, range(6))) - [(False, 0), (False, 1), (True, 2), (True, 3), (True, 4), (False, 5)] - - Set *distance* to change what counts as adjacent. For example, to find - whether items are two places away from a ``3``: - - >>> list(adjacent(lambda x: x == 3, range(6), distance=2)) - [(False, 0), (True, 1), (True, 2), (True, 3), (True, 4), (True, 5)] - - This is useful for contextualizing the results of a search function. - For example, a code comparison tool might want to identify lines that - have changed, but also surrounding lines to give the viewer of the diff - context. - - The predicate function will only be called once for each item in the - iterable. - - See also :func:`groupby_transform`, which can be used with this function - to group ranges of items with the same `bool` value. - - """ - # Allow distance=0 mainly for testing that it reproduces results with map() - if distance < 0: - raise ValueError('distance must be at least 0') - - i1, i2 = tee(iterable) - padding = [False] * distance - selected = chain(padding, map(predicate, i1), padding) - adjacent_to_selected = map(any, windowed(selected, 2 * distance + 1)) - return zip(adjacent_to_selected, i2) - - -def groupby_transform(iterable, keyfunc=None, valuefunc=None, reducefunc=None): - """An extension of :func:`itertools.groupby` that can apply transformations - to the grouped data. - - * *keyfunc* is a function computing a key value for each item in *iterable* - * *valuefunc* is a function that transforms the individual items from - *iterable* after grouping - * *reducefunc* is a function that transforms each group of items - - >>> iterable = 'aAAbBBcCC' - >>> keyfunc = lambda k: k.upper() - >>> valuefunc = lambda v: v.lower() - >>> reducefunc = lambda g: ''.join(g) - >>> list(groupby_transform(iterable, keyfunc, valuefunc, reducefunc)) - [('A', 'aaa'), ('B', 'bbb'), ('C', 'ccc')] - - Each optional argument defaults to an identity function if not specified. - - :func:`groupby_transform` is useful when grouping elements of an iterable - using a separate iterable as the key. To do this, :func:`zip` the iterables - and pass a *keyfunc* that extracts the first element and a *valuefunc* - that extracts the second element:: - - >>> from operator import itemgetter - >>> keys = [0, 0, 1, 1, 1, 2, 2, 2, 3] - >>> values = 'abcdefghi' - >>> iterable = zip(keys, values) - >>> grouper = groupby_transform(iterable, itemgetter(0), itemgetter(1)) - >>> [(k, ''.join(g)) for k, g in grouper] - [(0, 'ab'), (1, 'cde'), (2, 'fgh'), (3, 'i')] - - Note that the order of items in the iterable is significant. - Only adjacent items are grouped together, so if you don't want any - duplicate groups, you should sort the iterable by the key function. - - """ - ret = groupby(iterable, keyfunc) - if valuefunc: - ret = ((k, map(valuefunc, g)) for k, g in ret) - if reducefunc: - ret = ((k, reducefunc(g)) for k, g in ret) - - return ret - - -class numeric_range(abc.Sequence, abc.Hashable): - """An extension of the built-in ``range()`` function whose arguments can - be any orderable numeric type. - - With only *stop* specified, *start* defaults to ``0`` and *step* - defaults to ``1``. The output items will match the type of *stop*: - - >>> list(numeric_range(3.5)) - [0.0, 1.0, 2.0, 3.0] - - With only *start* and *stop* specified, *step* defaults to ``1``. The - output items will match the type of *start*: - - >>> from decimal import Decimal - >>> start = Decimal('2.1') - >>> stop = Decimal('5.1') - >>> list(numeric_range(start, stop)) - [Decimal('2.1'), Decimal('3.1'), Decimal('4.1')] - - With *start*, *stop*, and *step* specified the output items will match - the type of ``start + step``: - - >>> from fractions import Fraction - >>> start = Fraction(1, 2) # Start at 1/2 - >>> stop = Fraction(5, 2) # End at 5/2 - >>> step = Fraction(1, 2) # Count by 1/2 - >>> list(numeric_range(start, stop, step)) - [Fraction(1, 2), Fraction(1, 1), Fraction(3, 2), Fraction(2, 1)] - - If *step* is zero, ``ValueError`` is raised. Negative steps are supported: - - >>> list(numeric_range(3, -1, -1.0)) - [3.0, 2.0, 1.0, 0.0] - - Be aware of the limitations of floating point numbers; the representation - of the yielded numbers may be surprising. - - ``datetime.datetime`` objects can be used for *start* and *stop*, if *step* - is a ``datetime.timedelta`` object: - - >>> import datetime - >>> start = datetime.datetime(2019, 1, 1) - >>> stop = datetime.datetime(2019, 1, 3) - >>> step = datetime.timedelta(days=1) - >>> items = iter(numeric_range(start, stop, step)) - >>> next(items) - datetime.datetime(2019, 1, 1, 0, 0) - >>> next(items) - datetime.datetime(2019, 1, 2, 0, 0) - - """ - - _EMPTY_HASH = hash(range(0, 0)) - - def __init__(self, *args): - argc = len(args) - if argc == 1: - (self._stop,) = args - self._start = type(self._stop)(0) - self._step = type(self._stop - self._start)(1) - elif argc == 2: - self._start, self._stop = args - self._step = type(self._stop - self._start)(1) - elif argc == 3: - self._start, self._stop, self._step = args - elif argc == 0: - raise TypeError( - 'numeric_range expected at least ' - '1 argument, got {}'.format(argc) - ) - else: - raise TypeError( - 'numeric_range expected at most ' - '3 arguments, got {}'.format(argc) - ) - - self._zero = type(self._step)(0) - if self._step == self._zero: - raise ValueError('numeric_range() arg 3 must not be zero') - self._growing = self._step > self._zero - self._init_len() - - def __bool__(self): - if self._growing: - return self._start < self._stop - else: - return self._start > self._stop - - def __contains__(self, elem): - if self._growing: - if self._start <= elem < self._stop: - return (elem - self._start) % self._step == self._zero - else: - if self._start >= elem > self._stop: - return (self._start - elem) % (-self._step) == self._zero - - return False - - def __eq__(self, other): - if isinstance(other, numeric_range): - empty_self = not bool(self) - empty_other = not bool(other) - if empty_self or empty_other: - return empty_self and empty_other # True if both empty - else: - return ( - self._start == other._start - and self._step == other._step - and self._get_by_index(-1) == other._get_by_index(-1) - ) - else: - return False - - def __getitem__(self, key): - if isinstance(key, int): - return self._get_by_index(key) - elif isinstance(key, slice): - step = self._step if key.step is None else key.step * self._step - - if key.start is None or key.start <= -self._len: - start = self._start - elif key.start >= self._len: - start = self._stop - else: # -self._len < key.start < self._len - start = self._get_by_index(key.start) - - if key.stop is None or key.stop >= self._len: - stop = self._stop - elif key.stop <= -self._len: - stop = self._start - else: # -self._len < key.stop < self._len - stop = self._get_by_index(key.stop) - - return numeric_range(start, stop, step) - else: - raise TypeError( - 'numeric range indices must be ' - 'integers or slices, not {}'.format(type(key).__name__) - ) - - def __hash__(self): - if self: - return hash((self._start, self._get_by_index(-1), self._step)) - else: - return self._EMPTY_HASH - - def __iter__(self): - values = (self._start + (n * self._step) for n in count()) - if self._growing: - return takewhile(partial(gt, self._stop), values) - else: - return takewhile(partial(lt, self._stop), values) - - def __len__(self): - return self._len - - def _init_len(self): - if self._growing: - start = self._start - stop = self._stop - step = self._step - else: - start = self._stop - stop = self._start - step = -self._step - distance = stop - start - if distance <= self._zero: - self._len = 0 - else: # distance > 0 and step > 0: regular euclidean division - q, r = divmod(distance, step) - self._len = int(q) + int(r != self._zero) - - def __reduce__(self): - return numeric_range, (self._start, self._stop, self._step) - - def __repr__(self): - if self._step == 1: - return "numeric_range({}, {})".format( - repr(self._start), repr(self._stop) - ) - else: - return "numeric_range({}, {}, {})".format( - repr(self._start), repr(self._stop), repr(self._step) - ) - - def __reversed__(self): - return iter( - numeric_range( - self._get_by_index(-1), self._start - self._step, -self._step - ) - ) - - def count(self, value): - return int(value in self) - - def index(self, value): - if self._growing: - if self._start <= value < self._stop: - q, r = divmod(value - self._start, self._step) - if r == self._zero: - return int(q) - else: - if self._start >= value > self._stop: - q, r = divmod(self._start - value, -self._step) - if r == self._zero: - return int(q) - - raise ValueError("{} is not in numeric range".format(value)) - - def _get_by_index(self, i): - if i < 0: - i += self._len - if i < 0 or i >= self._len: - raise IndexError("numeric range object index out of range") - return self._start + i * self._step - - -def count_cycle(iterable, n=None): - """Cycle through the items from *iterable* up to *n* times, yielding - the number of completed cycles along with each item. If *n* is omitted the - process repeats indefinitely. - - >>> list(count_cycle('AB', 3)) - [(0, 'A'), (0, 'B'), (1, 'A'), (1, 'B'), (2, 'A'), (2, 'B')] - - """ - iterable = tuple(iterable) - if not iterable: - return iter(()) - counter = count() if n is None else range(n) - return ((i, item) for i in counter for item in iterable) - - -def mark_ends(iterable): - """Yield 3-tuples of the form ``(is_first, is_last, item)``. - - >>> list(mark_ends('ABC')) - [(True, False, 'A'), (False, False, 'B'), (False, True, 'C')] - - Use this when looping over an iterable to take special action on its first - and/or last items: - - >>> iterable = ['Header', 100, 200, 'Footer'] - >>> total = 0 - >>> for is_first, is_last, item in mark_ends(iterable): - ... if is_first: - ... continue # Skip the header - ... if is_last: - ... continue # Skip the footer - ... total += item - >>> print(total) - 300 - """ - it = iter(iterable) - - try: - b = next(it) - except StopIteration: - return - - try: - for i in count(): - a = b - b = next(it) - yield i == 0, False, a - - except StopIteration: - yield i == 0, True, a - - -def locate(iterable, pred=bool, window_size=None): - """Yield the index of each item in *iterable* for which *pred* returns - ``True``. - - *pred* defaults to :func:`bool`, which will select truthy items: - - >>> list(locate([0, 1, 1, 0, 1, 0, 0])) - [1, 2, 4] - - Set *pred* to a custom function to, e.g., find the indexes for a particular - item. - - >>> list(locate(['a', 'b', 'c', 'b'], lambda x: x == 'b')) - [1, 3] - - If *window_size* is given, then the *pred* function will be called with - that many items. This enables searching for sub-sequences: - - >>> iterable = [0, 1, 2, 3, 0, 1, 2, 3, 0, 1, 2, 3] - >>> pred = lambda *args: args == (1, 2, 3) - >>> list(locate(iterable, pred=pred, window_size=3)) - [1, 5, 9] - - Use with :func:`seekable` to find indexes and then retrieve the associated - items: - - >>> from itertools import count - >>> from more_itertools import seekable - >>> source = (3 * n + 1 if (n % 2) else n // 2 for n in count()) - >>> it = seekable(source) - >>> pred = lambda x: x > 100 - >>> indexes = locate(it, pred=pred) - >>> i = next(indexes) - >>> it.seek(i) - >>> next(it) - 106 - - """ - if window_size is None: - return compress(count(), map(pred, iterable)) - - if window_size < 1: - raise ValueError('window size must be at least 1') - - it = windowed(iterable, window_size, fillvalue=_marker) - return compress(count(), starmap(pred, it)) - - -def lstrip(iterable, pred): - """Yield the items from *iterable*, but strip any from the beginning - for which *pred* returns ``True``. - - For example, to remove a set of items from the start of an iterable: - - >>> iterable = (None, False, None, 1, 2, None, 3, False, None) - >>> pred = lambda x: x in {None, False, ''} - >>> list(lstrip(iterable, pred)) - [1, 2, None, 3, False, None] - - This function is analogous to to :func:`str.lstrip`, and is essentially - an wrapper for :func:`itertools.dropwhile`. - - """ - return dropwhile(pred, iterable) - - -def rstrip(iterable, pred): - """Yield the items from *iterable*, but strip any from the end - for which *pred* returns ``True``. - - For example, to remove a set of items from the end of an iterable: - - >>> iterable = (None, False, None, 1, 2, None, 3, False, None) - >>> pred = lambda x: x in {None, False, ''} - >>> list(rstrip(iterable, pred)) - [None, False, None, 1, 2, None, 3] - - This function is analogous to :func:`str.rstrip`. - - """ - cache = [] - cache_append = cache.append - cache_clear = cache.clear - for x in iterable: - if pred(x): - cache_append(x) - else: - yield from cache - cache_clear() - yield x - - -def strip(iterable, pred): - """Yield the items from *iterable*, but strip any from the - beginning and end for which *pred* returns ``True``. - - For example, to remove a set of items from both ends of an iterable: - - >>> iterable = (None, False, None, 1, 2, None, 3, False, None) - >>> pred = lambda x: x in {None, False, ''} - >>> list(strip(iterable, pred)) - [1, 2, None, 3] - - This function is analogous to :func:`str.strip`. - - """ - return rstrip(lstrip(iterable, pred), pred) - - -class islice_extended: - """An extension of :func:`itertools.islice` that supports negative values - for *stop*, *start*, and *step*. - - >>> iterable = iter('abcdefgh') - >>> list(islice_extended(iterable, -4, -1)) - ['e', 'f', 'g'] - - Slices with negative values require some caching of *iterable*, but this - function takes care to minimize the amount of memory required. - - For example, you can use a negative step with an infinite iterator: - - >>> from itertools import count - >>> list(islice_extended(count(), 110, 99, -2)) - [110, 108, 106, 104, 102, 100] - - You can also use slice notation directly: - - >>> iterable = map(str, count()) - >>> it = islice_extended(iterable)[10:20:2] - >>> list(it) - ['10', '12', '14', '16', '18'] - - """ - - def __init__(self, iterable, *args): - it = iter(iterable) - if args: - self._iterable = _islice_helper(it, slice(*args)) - else: - self._iterable = it - - def __iter__(self): - return self - - def __next__(self): - return next(self._iterable) - - def __getitem__(self, key): - if isinstance(key, slice): - return islice_extended(_islice_helper(self._iterable, key)) - - raise TypeError('islice_extended.__getitem__ argument must be a slice') - - -def _islice_helper(it, s): - start = s.start - stop = s.stop - if s.step == 0: - raise ValueError('step argument must be a non-zero integer or None.') - step = s.step or 1 - - if step > 0: - start = 0 if (start is None) else start - - if start < 0: - # Consume all but the last -start items - cache = deque(enumerate(it, 1), maxlen=-start) - len_iter = cache[-1][0] if cache else 0 - - # Adjust start to be positive - i = max(len_iter + start, 0) - - # Adjust stop to be positive - if stop is None: - j = len_iter - elif stop >= 0: - j = min(stop, len_iter) - else: - j = max(len_iter + stop, 0) - - # Slice the cache - n = j - i - if n <= 0: - return - - for index, item in islice(cache, 0, n, step): - yield item - elif (stop is not None) and (stop < 0): - # Advance to the start position - next(islice(it, start, start), None) - - # When stop is negative, we have to carry -stop items while - # iterating - cache = deque(islice(it, -stop), maxlen=-stop) - - for index, item in enumerate(it): - cached_item = cache.popleft() - if index % step == 0: - yield cached_item - cache.append(item) - else: - # When both start and stop are positive we have the normal case - yield from islice(it, start, stop, step) - else: - start = -1 if (start is None) else start - - if (stop is not None) and (stop < 0): - # Consume all but the last items - n = -stop - 1 - cache = deque(enumerate(it, 1), maxlen=n) - len_iter = cache[-1][0] if cache else 0 - - # If start and stop are both negative they are comparable and - # we can just slice. Otherwise we can adjust start to be negative - # and then slice. - if start < 0: - i, j = start, stop - else: - i, j = min(start - len_iter, -1), None - - for index, item in list(cache)[i:j:step]: - yield item - else: - # Advance to the stop position - if stop is not None: - m = stop + 1 - next(islice(it, m, m), None) - - # stop is positive, so if start is negative they are not comparable - # and we need the rest of the items. - if start < 0: - i = start - n = None - # stop is None and start is positive, so we just need items up to - # the start index. - elif stop is None: - i = None - n = start + 1 - # Both stop and start are positive, so they are comparable. - else: - i = None - n = start - stop - if n <= 0: - return - - cache = list(islice(it, n)) - - yield from cache[i::step] - - -def always_reversible(iterable): - """An extension of :func:`reversed` that supports all iterables, not - just those which implement the ``Reversible`` or ``Sequence`` protocols. - - >>> print(*always_reversible(x for x in range(3))) - 2 1 0 - - If the iterable is already reversible, this function returns the - result of :func:`reversed()`. If the iterable is not reversible, - this function will cache the remaining items in the iterable and - yield them in reverse order, which may require significant storage. - """ - try: - return reversed(iterable) - except TypeError: - return reversed(list(iterable)) - - -def consecutive_groups(iterable, ordering=lambda x: x): - """Yield groups of consecutive items using :func:`itertools.groupby`. - The *ordering* function determines whether two items are adjacent by - returning their position. - - By default, the ordering function is the identity function. This is - suitable for finding runs of numbers: - - >>> iterable = [1, 10, 11, 12, 20, 30, 31, 32, 33, 40] - >>> for group in consecutive_groups(iterable): - ... print(list(group)) - [1] - [10, 11, 12] - [20] - [30, 31, 32, 33] - [40] - - For finding runs of adjacent letters, try using the :meth:`index` method - of a string of letters: - - >>> from string import ascii_lowercase - >>> iterable = 'abcdfgilmnop' - >>> ordering = ascii_lowercase.index - >>> for group in consecutive_groups(iterable, ordering): - ... print(list(group)) - ['a', 'b', 'c', 'd'] - ['f', 'g'] - ['i'] - ['l', 'm', 'n', 'o', 'p'] - - Each group of consecutive items is an iterator that shares it source with - *iterable*. When an an output group is advanced, the previous group is - no longer available unless its elements are copied (e.g., into a ``list``). - - >>> iterable = [1, 2, 11, 12, 21, 22] - >>> saved_groups = [] - >>> for group in consecutive_groups(iterable): - ... saved_groups.append(list(group)) # Copy group elements - >>> saved_groups - [[1, 2], [11, 12], [21, 22]] - - """ - for k, g in groupby( - enumerate(iterable), key=lambda x: x[0] - ordering(x[1]) - ): - yield map(itemgetter(1), g) - - -def difference(iterable, func=sub, *, initial=None): - """This function is the inverse of :func:`itertools.accumulate`. By default - it will compute the first difference of *iterable* using - :func:`operator.sub`: - - >>> from itertools import accumulate - >>> iterable = accumulate([0, 1, 2, 3, 4]) # produces 0, 1, 3, 6, 10 - >>> list(difference(iterable)) - [0, 1, 2, 3, 4] - - *func* defaults to :func:`operator.sub`, but other functions can be - specified. They will be applied as follows:: - - A, B, C, D, ... --> A, func(B, A), func(C, B), func(D, C), ... - - For example, to do progressive division: - - >>> iterable = [1, 2, 6, 24, 120] - >>> func = lambda x, y: x // y - >>> list(difference(iterable, func)) - [1, 2, 3, 4, 5] - - If the *initial* keyword is set, the first element will be skipped when - computing successive differences. - - >>> it = [10, 11, 13, 16] # from accumulate([1, 2, 3], initial=10) - >>> list(difference(it, initial=10)) - [1, 2, 3] - - """ - a, b = tee(iterable) - try: - first = [next(b)] - except StopIteration: - return iter([]) - - if initial is not None: - first = [] - - return chain(first, starmap(func, zip(b, a))) - - -class SequenceView(Sequence): - """Return a read-only view of the sequence object *target*. - - :class:`SequenceView` objects are analogous to Python's built-in - "dictionary view" types. They provide a dynamic view of a sequence's items, - meaning that when the sequence updates, so does the view. - - >>> seq = ['0', '1', '2'] - >>> view = SequenceView(seq) - >>> view - SequenceView(['0', '1', '2']) - >>> seq.append('3') - >>> view - SequenceView(['0', '1', '2', '3']) - - Sequence views support indexing, slicing, and length queries. They act - like the underlying sequence, except they don't allow assignment: - - >>> view[1] - '1' - >>> view[1:-1] - ['1', '2'] - >>> len(view) - 4 - - Sequence views are useful as an alternative to copying, as they don't - require (much) extra storage. - - """ - - def __init__(self, target): - if not isinstance(target, Sequence): - raise TypeError - self._target = target - - def __getitem__(self, index): - return self._target[index] - - def __len__(self): - return len(self._target) - - def __repr__(self): - return '{}({})'.format(self.__class__.__name__, repr(self._target)) - - -class seekable: - """Wrap an iterator to allow for seeking backward and forward. This - progressively caches the items in the source iterable so they can be - re-visited. - - Call :meth:`seek` with an index to seek to that position in the source - iterable. - - To "reset" an iterator, seek to ``0``: - - >>> from itertools import count - >>> it = seekable((str(n) for n in count())) - >>> next(it), next(it), next(it) - ('0', '1', '2') - >>> it.seek(0) - >>> next(it), next(it), next(it) - ('0', '1', '2') - >>> next(it) - '3' - - You can also seek forward: - - >>> it = seekable((str(n) for n in range(20))) - >>> it.seek(10) - >>> next(it) - '10' - >>> it.seek(20) # Seeking past the end of the source isn't a problem - >>> list(it) - [] - >>> it.seek(0) # Resetting works even after hitting the end - >>> next(it), next(it), next(it) - ('0', '1', '2') - - Call :meth:`peek` to look ahead one item without advancing the iterator: - - >>> it = seekable('1234') - >>> it.peek() - '1' - >>> list(it) - ['1', '2', '3', '4'] - >>> it.peek(default='empty') - 'empty' - - Before the iterator is at its end, calling :func:`bool` on it will return - ``True``. After it will return ``False``: - - >>> it = seekable('5678') - >>> bool(it) - True - >>> list(it) - ['5', '6', '7', '8'] - >>> bool(it) - False - - You may view the contents of the cache with the :meth:`elements` method. - That returns a :class:`SequenceView`, a view that updates automatically: - - >>> it = seekable((str(n) for n in range(10))) - >>> next(it), next(it), next(it) - ('0', '1', '2') - >>> elements = it.elements() - >>> elements - SequenceView(['0', '1', '2']) - >>> next(it) - '3' - >>> elements - SequenceView(['0', '1', '2', '3']) - - By default, the cache grows as the source iterable progresses, so beware of - wrapping very large or infinite iterables. Supply *maxlen* to limit the - size of the cache (this of course limits how far back you can seek). - - >>> from itertools import count - >>> it = seekable((str(n) for n in count()), maxlen=2) - >>> next(it), next(it), next(it), next(it) - ('0', '1', '2', '3') - >>> list(it.elements()) - ['2', '3'] - >>> it.seek(0) - >>> next(it), next(it), next(it), next(it) - ('2', '3', '4', '5') - >>> next(it) - '6' - - """ - - def __init__(self, iterable, maxlen=None): - self._source = iter(iterable) - if maxlen is None: - self._cache = [] - else: - self._cache = deque([], maxlen) - self._index = None - - def __iter__(self): - return self - - def __next__(self): - if self._index is not None: - try: - item = self._cache[self._index] - except IndexError: - self._index = None - else: - self._index += 1 - return item - - item = next(self._source) - self._cache.append(item) - return item - - def __bool__(self): - try: - self.peek() - except StopIteration: - return False - return True - - def peek(self, default=_marker): - try: - peeked = next(self) - except StopIteration: - if default is _marker: - raise - return default - if self._index is None: - self._index = len(self._cache) - self._index -= 1 - return peeked - - def elements(self): - return SequenceView(self._cache) - - def seek(self, index): - self._index = index - remainder = index - len(self._cache) - if remainder > 0: - consume(self, remainder) - - -class run_length: - """ - :func:`run_length.encode` compresses an iterable with run-length encoding. - It yields groups of repeated items with the count of how many times they - were repeated: - - >>> uncompressed = 'abbcccdddd' - >>> list(run_length.encode(uncompressed)) - [('a', 1), ('b', 2), ('c', 3), ('d', 4)] - - :func:`run_length.decode` decompresses an iterable that was previously - compressed with run-length encoding. It yields the items of the - decompressed iterable: - - >>> compressed = [('a', 1), ('b', 2), ('c', 3), ('d', 4)] - >>> list(run_length.decode(compressed)) - ['a', 'b', 'b', 'c', 'c', 'c', 'd', 'd', 'd', 'd'] - - """ - - @staticmethod - def encode(iterable): - return ((k, ilen(g)) for k, g in groupby(iterable)) - - @staticmethod - def decode(iterable): - return chain.from_iterable(repeat(k, n) for k, n in iterable) - - -def exactly_n(iterable, n, predicate=bool): - """Return ``True`` if exactly ``n`` items in the iterable are ``True`` - according to the *predicate* function. - - >>> exactly_n([True, True, False], 2) - True - >>> exactly_n([True, True, False], 1) - False - >>> exactly_n([0, 1, 2, 3, 4, 5], 3, lambda x: x < 3) - True - - The iterable will be advanced until ``n + 1`` truthy items are encountered, - so avoid calling it on infinite iterables. - - """ - return len(take(n + 1, filter(predicate, iterable))) == n - - -def circular_shifts(iterable): - """Return a list of circular shifts of *iterable*. - - >>> circular_shifts(range(4)) - [(0, 1, 2, 3), (1, 2, 3, 0), (2, 3, 0, 1), (3, 0, 1, 2)] - """ - lst = list(iterable) - return take(len(lst), windowed(cycle(lst), len(lst))) - - -def make_decorator(wrapping_func, result_index=0): - """Return a decorator version of *wrapping_func*, which is a function that - modifies an iterable. *result_index* is the position in that function's - signature where the iterable goes. - - This lets you use itertools on the "production end," i.e. at function - definition. This can augment what the function returns without changing the - function's code. - - For example, to produce a decorator version of :func:`chunked`: - - >>> from more_itertools import chunked - >>> chunker = make_decorator(chunked, result_index=0) - >>> @chunker(3) - ... def iter_range(n): - ... return iter(range(n)) - ... - >>> list(iter_range(9)) - [[0, 1, 2], [3, 4, 5], [6, 7, 8]] - - To only allow truthy items to be returned: - - >>> truth_serum = make_decorator(filter, result_index=1) - >>> @truth_serum(bool) - ... def boolean_test(): - ... return [0, 1, '', ' ', False, True] - ... - >>> list(boolean_test()) - [1, ' ', True] - - The :func:`peekable` and :func:`seekable` wrappers make for practical - decorators: - - >>> from more_itertools import peekable - >>> peekable_function = make_decorator(peekable) - >>> @peekable_function() - ... def str_range(*args): - ... return (str(x) for x in range(*args)) - ... - >>> it = str_range(1, 20, 2) - >>> next(it), next(it), next(it) - ('1', '3', '5') - >>> it.peek() - '7' - >>> next(it) - '7' - - """ - # See https://sites.google.com/site/bbayles/index/decorator_factory for - # notes on how this works. - def decorator(*wrapping_args, **wrapping_kwargs): - def outer_wrapper(f): - def inner_wrapper(*args, **kwargs): - result = f(*args, **kwargs) - wrapping_args_ = list(wrapping_args) - wrapping_args_.insert(result_index, result) - return wrapping_func(*wrapping_args_, **wrapping_kwargs) - - return inner_wrapper - - return outer_wrapper - - return decorator - - -def map_reduce(iterable, keyfunc, valuefunc=None, reducefunc=None): - """Return a dictionary that maps the items in *iterable* to categories - defined by *keyfunc*, transforms them with *valuefunc*, and - then summarizes them by category with *reducefunc*. - - *valuefunc* defaults to the identity function if it is unspecified. - If *reducefunc* is unspecified, no summarization takes place: - - >>> keyfunc = lambda x: x.upper() - >>> result = map_reduce('abbccc', keyfunc) - >>> sorted(result.items()) - [('A', ['a']), ('B', ['b', 'b']), ('C', ['c', 'c', 'c'])] - - Specifying *valuefunc* transforms the categorized items: - - >>> keyfunc = lambda x: x.upper() - >>> valuefunc = lambda x: 1 - >>> result = map_reduce('abbccc', keyfunc, valuefunc) - >>> sorted(result.items()) - [('A', [1]), ('B', [1, 1]), ('C', [1, 1, 1])] - - Specifying *reducefunc* summarizes the categorized items: - - >>> keyfunc = lambda x: x.upper() - >>> valuefunc = lambda x: 1 - >>> reducefunc = sum - >>> result = map_reduce('abbccc', keyfunc, valuefunc, reducefunc) - >>> sorted(result.items()) - [('A', 1), ('B', 2), ('C', 3)] - - You may want to filter the input iterable before applying the map/reduce - procedure: - - >>> all_items = range(30) - >>> items = [x for x in all_items if 10 <= x <= 20] # Filter - >>> keyfunc = lambda x: x % 2 # Evens map to 0; odds to 1 - >>> categories = map_reduce(items, keyfunc=keyfunc) - >>> sorted(categories.items()) - [(0, [10, 12, 14, 16, 18, 20]), (1, [11, 13, 15, 17, 19])] - >>> summaries = map_reduce(items, keyfunc=keyfunc, reducefunc=sum) - >>> sorted(summaries.items()) - [(0, 90), (1, 75)] - - Note that all items in the iterable are gathered into a list before the - summarization step, which may require significant storage. - - The returned object is a :obj:`collections.defaultdict` with the - ``default_factory`` set to ``None``, such that it behaves like a normal - dictionary. - - """ - valuefunc = (lambda x: x) if (valuefunc is None) else valuefunc - - ret = defaultdict(list) - for item in iterable: - key = keyfunc(item) - value = valuefunc(item) - ret[key].append(value) - - if reducefunc is not None: - for key, value_list in ret.items(): - ret[key] = reducefunc(value_list) - - ret.default_factory = None - return ret - - -def rlocate(iterable, pred=bool, window_size=None): - """Yield the index of each item in *iterable* for which *pred* returns - ``True``, starting from the right and moving left. - - *pred* defaults to :func:`bool`, which will select truthy items: - - >>> list(rlocate([0, 1, 1, 0, 1, 0, 0])) # Truthy at 1, 2, and 4 - [4, 2, 1] - - Set *pred* to a custom function to, e.g., find the indexes for a particular - item: - - >>> iterable = iter('abcb') - >>> pred = lambda x: x == 'b' - >>> list(rlocate(iterable, pred)) - [3, 1] - - If *window_size* is given, then the *pred* function will be called with - that many items. This enables searching for sub-sequences: - - >>> iterable = [0, 1, 2, 3, 0, 1, 2, 3, 0, 1, 2, 3] - >>> pred = lambda *args: args == (1, 2, 3) - >>> list(rlocate(iterable, pred=pred, window_size=3)) - [9, 5, 1] - - Beware, this function won't return anything for infinite iterables. - If *iterable* is reversible, ``rlocate`` will reverse it and search from - the right. Otherwise, it will search from the left and return the results - in reverse order. - - See :func:`locate` to for other example applications. - - """ - if window_size is None: - try: - len_iter = len(iterable) - return (len_iter - i - 1 for i in locate(reversed(iterable), pred)) - except TypeError: - pass - - return reversed(list(locate(iterable, pred, window_size))) - - -def replace(iterable, pred, substitutes, count=None, window_size=1): - """Yield the items from *iterable*, replacing the items for which *pred* - returns ``True`` with the items from the iterable *substitutes*. - - >>> iterable = [1, 1, 0, 1, 1, 0, 1, 1] - >>> pred = lambda x: x == 0 - >>> substitutes = (2, 3) - >>> list(replace(iterable, pred, substitutes)) - [1, 1, 2, 3, 1, 1, 2, 3, 1, 1] - - If *count* is given, the number of replacements will be limited: - - >>> iterable = [1, 1, 0, 1, 1, 0, 1, 1, 0] - >>> pred = lambda x: x == 0 - >>> substitutes = [None] - >>> list(replace(iterable, pred, substitutes, count=2)) - [1, 1, None, 1, 1, None, 1, 1, 0] - - Use *window_size* to control the number of items passed as arguments to - *pred*. This allows for locating and replacing subsequences. - - >>> iterable = [0, 1, 2, 5, 0, 1, 2, 5] - >>> window_size = 3 - >>> pred = lambda *args: args == (0, 1, 2) # 3 items passed to pred - >>> substitutes = [3, 4] # Splice in these items - >>> list(replace(iterable, pred, substitutes, window_size=window_size)) - [3, 4, 5, 3, 4, 5] - - """ - if window_size < 1: - raise ValueError('window_size must be at least 1') - - # Save the substitutes iterable, since it's used more than once - substitutes = tuple(substitutes) - - # Add padding such that the number of windows matches the length of the - # iterable - it = chain(iterable, [_marker] * (window_size - 1)) - windows = windowed(it, window_size) - - n = 0 - for w in windows: - # If the current window matches our predicate (and we haven't hit - # our maximum number of replacements), splice in the substitutes - # and then consume the following windows that overlap with this one. - # For example, if the iterable is (0, 1, 2, 3, 4...) - # and the window size is 2, we have (0, 1), (1, 2), (2, 3)... - # If the predicate matches on (0, 1), we need to zap (0, 1) and (1, 2) - if pred(*w): - if (count is None) or (n < count): - n += 1 - yield from substitutes - consume(windows, window_size - 1) - continue - - # If there was no match (or we've reached the replacement limit), - # yield the first item from the window. - if w and (w[0] is not _marker): - yield w[0] - - -def partitions(iterable): - """Yield all possible order-preserving partitions of *iterable*. - - >>> iterable = 'abc' - >>> for part in partitions(iterable): - ... print([''.join(p) for p in part]) - ['abc'] - ['a', 'bc'] - ['ab', 'c'] - ['a', 'b', 'c'] - - This is unrelated to :func:`partition`. - - """ - sequence = list(iterable) - n = len(sequence) - for i in powerset(range(1, n)): - yield [sequence[i:j] for i, j in zip((0,) + i, i + (n,))] - - -def set_partitions(iterable, k=None): - """ - Yield the set partitions of *iterable* into *k* parts. Set partitions are - not order-preserving. - - >>> iterable = 'abc' - >>> for part in set_partitions(iterable, 2): - ... print([''.join(p) for p in part]) - ['a', 'bc'] - ['ab', 'c'] - ['b', 'ac'] - - - If *k* is not given, every set partition is generated. - - >>> iterable = 'abc' - >>> for part in set_partitions(iterable): - ... print([''.join(p) for p in part]) - ['abc'] - ['a', 'bc'] - ['ab', 'c'] - ['b', 'ac'] - ['a', 'b', 'c'] - - """ - L = list(iterable) - n = len(L) - if k is not None: - if k < 1: - raise ValueError( - "Can't partition in a negative or zero number of groups" - ) - elif k > n: - return - - def set_partitions_helper(L, k): - n = len(L) - if k == 1: - yield [L] - elif n == k: - yield [[s] for s in L] - else: - e, *M = L - for p in set_partitions_helper(M, k - 1): - yield [[e], *p] - for p in set_partitions_helper(M, k): - for i in range(len(p)): - yield p[:i] + [[e] + p[i]] + p[i + 1 :] - - if k is None: - for k in range(1, n + 1): - yield from set_partitions_helper(L, k) - else: - yield from set_partitions_helper(L, k) - - -class time_limited: - """ - Yield items from *iterable* until *limit_seconds* have passed. - If the time limit expires before all items have been yielded, the - ``timed_out`` parameter will be set to ``True``. - - >>> from time import sleep - >>> def generator(): - ... yield 1 - ... yield 2 - ... sleep(0.2) - ... yield 3 - >>> iterable = time_limited(0.1, generator()) - >>> list(iterable) - [1, 2] - >>> iterable.timed_out - True - - Note that the time is checked before each item is yielded, and iteration - stops if the time elapsed is greater than *limit_seconds*. If your time - limit is 1 second, but it takes 2 seconds to generate the first item from - the iterable, the function will run for 2 seconds and not yield anything. - - """ - - def __init__(self, limit_seconds, iterable): - if limit_seconds < 0: - raise ValueError('limit_seconds must be positive') - self.limit_seconds = limit_seconds - self._iterable = iter(iterable) - self._start_time = monotonic() - self.timed_out = False - - def __iter__(self): - return self - - def __next__(self): - item = next(self._iterable) - if monotonic() - self._start_time > self.limit_seconds: - self.timed_out = True - raise StopIteration - - return item - - -def only(iterable, default=None, too_long=None): - """If *iterable* has only one item, return it. - If it has zero items, return *default*. - If it has more than one item, raise the exception given by *too_long*, - which is ``ValueError`` by default. - - >>> only([], default='missing') - 'missing' - >>> only([1]) - 1 - >>> only([1, 2]) # doctest: +IGNORE_EXCEPTION_DETAIL - Traceback (most recent call last): - ... - ValueError: Expected exactly one item in iterable, but got 1, 2, - and perhaps more.' - >>> only([1, 2], too_long=TypeError) # doctest: +IGNORE_EXCEPTION_DETAIL - Traceback (most recent call last): - ... - TypeError - - Note that :func:`only` attempts to advance *iterable* twice to ensure there - is only one item. See :func:`spy` or :func:`peekable` to check - iterable contents less destructively. - """ - it = iter(iterable) - first_value = next(it, default) - - try: - second_value = next(it) - except StopIteration: - pass - else: - msg = ( - 'Expected exactly one item in iterable, but got {!r}, {!r}, ' - 'and perhaps more.'.format(first_value, second_value) - ) - raise too_long or ValueError(msg) - - return first_value - - -def ichunked(iterable, n): - """Break *iterable* into sub-iterables with *n* elements each. - :func:`ichunked` is like :func:`chunked`, but it yields iterables - instead of lists. - - If the sub-iterables are read in order, the elements of *iterable* - won't be stored in memory. - If they are read out of order, :func:`itertools.tee` is used to cache - elements as necessary. - - >>> from itertools import count - >>> all_chunks = ichunked(count(), 4) - >>> c_1, c_2, c_3 = next(all_chunks), next(all_chunks), next(all_chunks) - >>> list(c_2) # c_1's elements have been cached; c_3's haven't been - [4, 5, 6, 7] - >>> list(c_1) - [0, 1, 2, 3] - >>> list(c_3) - [8, 9, 10, 11] - - """ - source = iter(iterable) - - while True: - # Check to see whether we're at the end of the source iterable - item = next(source, _marker) - if item is _marker: - return - - # Clone the source and yield an n-length slice - source, it = tee(chain([item], source)) - yield islice(it, n) - - # Advance the source iterable - consume(source, n) - - -def distinct_combinations(iterable, r): - """Yield the distinct combinations of *r* items taken from *iterable*. - - >>> list(distinct_combinations([0, 0, 1], 2)) - [(0, 0), (0, 1)] - - Equivalent to ``set(combinations(iterable))``, except duplicates are not - generated and thrown away. For larger input sequences this is much more - efficient. - - """ - if r < 0: - raise ValueError('r must be non-negative') - elif r == 0: - yield () - return - pool = tuple(iterable) - generators = [unique_everseen(enumerate(pool), key=itemgetter(1))] - current_combo = [None] * r - level = 0 - while generators: - try: - cur_idx, p = next(generators[-1]) - except StopIteration: - generators.pop() - level -= 1 - continue - current_combo[level] = p - if level + 1 == r: - yield tuple(current_combo) - else: - generators.append( - unique_everseen( - enumerate(pool[cur_idx + 1 :], cur_idx + 1), - key=itemgetter(1), - ) - ) - level += 1 - - -def filter_except(validator, iterable, *exceptions): - """Yield the items from *iterable* for which the *validator* function does - not raise one of the specified *exceptions*. - - *validator* is called for each item in *iterable*. - It should be a function that accepts one argument and raises an exception - if that item is not valid. - - >>> iterable = ['1', '2', 'three', '4', None] - >>> list(filter_except(int, iterable, ValueError, TypeError)) - ['1', '2', '4'] - - If an exception other than one given by *exceptions* is raised by - *validator*, it is raised like normal. - """ - for item in iterable: - try: - validator(item) - except exceptions: - pass - else: - yield item - - -def map_except(function, iterable, *exceptions): - """Transform each item from *iterable* with *function* and yield the - result, unless *function* raises one of the specified *exceptions*. - - *function* is called to transform each item in *iterable*. - It should be a accept one argument. - - >>> iterable = ['1', '2', 'three', '4', None] - >>> list(map_except(int, iterable, ValueError, TypeError)) - [1, 2, 4] - - If an exception other than one given by *exceptions* is raised by - *function*, it is raised like normal. - """ - for item in iterable: - try: - yield function(item) - except exceptions: - pass - - -def _sample_unweighted(iterable, k): - # Implementation of "Algorithm L" from the 1994 paper by Kim-Hung Li: - # "Reservoir-Sampling Algorithms of Time Complexity O(n(1+log(N/n)))". - - # Fill up the reservoir (collection of samples) with the first `k` samples - reservoir = take(k, iterable) - - # Generate random number that's the largest in a sample of k U(0,1) numbers - # Largest order statistic: https://en.wikipedia.org/wiki/Order_statistic - W = exp(log(random()) / k) - - # The number of elements to skip before changing the reservoir is a random - # number with a geometric distribution. Sample it using random() and logs. - next_index = k + floor(log(random()) / log(1 - W)) - - for index, element in enumerate(iterable, k): - - if index == next_index: - reservoir[randrange(k)] = element - # The new W is the largest in a sample of k U(0, `old_W`) numbers - W *= exp(log(random()) / k) - next_index += floor(log(random()) / log(1 - W)) + 1 - - return reservoir - - -def _sample_weighted(iterable, k, weights): - # Implementation of "A-ExpJ" from the 2006 paper by Efraimidis et al. : - # "Weighted random sampling with a reservoir". - - # Log-transform for numerical stability for weights that are small/large - weight_keys = (log(random()) / weight for weight in weights) - - # Fill up the reservoir (collection of samples) with the first `k` - # weight-keys and elements, then heapify the list. - reservoir = take(k, zip(weight_keys, iterable)) - heapify(reservoir) - - # The number of jumps before changing the reservoir is a random variable - # with an exponential distribution. Sample it using random() and logs. - smallest_weight_key, _ = reservoir[0] - weights_to_skip = log(random()) / smallest_weight_key - - for weight, element in zip(weights, iterable): - if weight >= weights_to_skip: - # The notation here is consistent with the paper, but we store - # the weight-keys in log-space for better numerical stability. - smallest_weight_key, _ = reservoir[0] - t_w = exp(weight * smallest_weight_key) - r_2 = uniform(t_w, 1) # generate U(t_w, 1) - weight_key = log(r_2) / weight - heapreplace(reservoir, (weight_key, element)) - smallest_weight_key, _ = reservoir[0] - weights_to_skip = log(random()) / smallest_weight_key - else: - weights_to_skip -= weight - - # Equivalent to [element for weight_key, element in sorted(reservoir)] - return [heappop(reservoir)[1] for _ in range(k)] - - -def sample(iterable, k, weights=None): - """Return a *k*-length list of elements chosen (without replacement) - from the *iterable*. Like :func:`random.sample`, but works on iterables - of unknown length. - - >>> iterable = range(100) - >>> sample(iterable, 5) # doctest: +SKIP - [81, 60, 96, 16, 4] - - An iterable with *weights* may also be given: - - >>> iterable = range(100) - >>> weights = (i * i + 1 for i in range(100)) - >>> sampled = sample(iterable, 5, weights=weights) # doctest: +SKIP - [79, 67, 74, 66, 78] - - The algorithm can also be used to generate weighted random permutations. - The relative weight of each item determines the probability that it - appears late in the permutation. - - >>> data = "abcdefgh" - >>> weights = range(1, len(data) + 1) - >>> sample(data, k=len(data), weights=weights) # doctest: +SKIP - ['c', 'a', 'b', 'e', 'g', 'd', 'h', 'f'] - """ - if k == 0: - return [] - - iterable = iter(iterable) - if weights is None: - return _sample_unweighted(iterable, k) - else: - weights = iter(weights) - return _sample_weighted(iterable, k, weights) - - -def is_sorted(iterable, key=None, reverse=False): - """Returns ``True`` if the items of iterable are in sorted order, and - ``False`` otherwise. *key* and *reverse* have the same meaning that they do - in the built-in :func:`sorted` function. - - >>> is_sorted(['1', '2', '3', '4', '5'], key=int) - True - >>> is_sorted([5, 4, 3, 1, 2], reverse=True) - False - - The function returns ``False`` after encountering the first out-of-order - item. If there are no out-of-order items, the iterable is exhausted. - """ - - compare = lt if reverse else gt - it = iterable if (key is None) else map(key, iterable) - return not any(starmap(compare, pairwise(it))) - - -class AbortThread(BaseException): - pass - - -class callback_iter: - """Convert a function that uses callbacks to an iterator. - - Let *func* be a function that takes a `callback` keyword argument. - For example: - - >>> def func(callback=None): - ... for i, c in [(1, 'a'), (2, 'b'), (3, 'c')]: - ... if callback: - ... callback(i, c) - ... return 4 - - - Use ``with callback_iter(func)`` to get an iterator over the parameters - that are delivered to the callback. - - >>> with callback_iter(func) as it: - ... for args, kwargs in it: - ... print(args) - (1, 'a') - (2, 'b') - (3, 'c') - - The function will be called in a background thread. The ``done`` property - indicates whether it has completed execution. - - >>> it.done - True - - If it completes successfully, its return value will be available - in the ``result`` property. - - >>> it.result - 4 - - Notes: - - * If the function uses some keyword argument besides ``callback``, supply - *callback_kwd*. - * If it finished executing, but raised an exception, accessing the - ``result`` property will raise the same exception. - * If it hasn't finished executing, accessing the ``result`` - property from within the ``with`` block will raise ``RuntimeError``. - * If it hasn't finished executing, accessing the ``result`` property from - outside the ``with`` block will raise a - ``more_itertools.AbortThread`` exception. - * Provide *wait_seconds* to adjust how frequently the it is polled for - output. - - """ - - def __init__(self, func, callback_kwd='callback', wait_seconds=0.1): - self._func = func - self._callback_kwd = callback_kwd - self._aborted = False - self._future = None - self._wait_seconds = wait_seconds - self._executor = __import__("concurrent.futures").futures.ThreadPoolExecutor(max_workers=1) - self._iterator = self._reader() - - def __enter__(self): - return self - - def __exit__(self, exc_type, exc_value, traceback): - self._aborted = True - self._executor.shutdown() - - def __iter__(self): - return self - - def __next__(self): - return next(self._iterator) - - @property - def done(self): - if self._future is None: - return False - return self._future.done() - - @property - def result(self): - if not self.done: - raise RuntimeError('Function has not yet completed') - - return self._future.result() - - def _reader(self): - q = Queue() - - def callback(*args, **kwargs): - if self._aborted: - raise AbortThread('canceled by user') - - q.put((args, kwargs)) - - self._future = self._executor.submit( - self._func, **{self._callback_kwd: callback} - ) - - while True: - try: - item = q.get(timeout=self._wait_seconds) - except Empty: - pass - else: - q.task_done() - yield item - - if self._future.done(): - break - - remaining = [] - while True: - try: - item = q.get_nowait() - except Empty: - break - else: - q.task_done() - remaining.append(item) - q.join() - yield from remaining - - -def windowed_complete(iterable, n): - """ - Yield ``(beginning, middle, end)`` tuples, where: - - * Each ``middle`` has *n* items from *iterable* - * Each ``beginning`` has the items before the ones in ``middle`` - * Each ``end`` has the items after the ones in ``middle`` - - >>> iterable = range(7) - >>> n = 3 - >>> for beginning, middle, end in windowed_complete(iterable, n): - ... print(beginning, middle, end) - () (0, 1, 2) (3, 4, 5, 6) - (0,) (1, 2, 3) (4, 5, 6) - (0, 1) (2, 3, 4) (5, 6) - (0, 1, 2) (3, 4, 5) (6,) - (0, 1, 2, 3) (4, 5, 6) () - - Note that *n* must be at least 0 and most equal to the length of - *iterable*. - - This function will exhaust the iterable and may require significant - storage. - """ - if n < 0: - raise ValueError('n must be >= 0') - - seq = tuple(iterable) - size = len(seq) - - if n > size: - raise ValueError('n must be <= len(seq)') - - for i in range(size - n + 1): - beginning = seq[:i] - middle = seq[i : i + n] - end = seq[i + n :] - yield beginning, middle, end - - -def all_unique(iterable, key=None): - """ - Returns ``True`` if all the elements of *iterable* are unique (no two - elements are equal). - - >>> all_unique('ABCB') - False - - If a *key* function is specified, it will be used to make comparisons. - - >>> all_unique('ABCb') - True - >>> all_unique('ABCb', str.lower) - False - - The function returns as soon as the first non-unique element is - encountered. Iterables with a mix of hashable and unhashable items can - be used, but the function will be slower for unhashable items. - """ - seenset = set() - seenset_add = seenset.add - seenlist = [] - seenlist_add = seenlist.append - for element in map(key, iterable) if key else iterable: - try: - if element in seenset: - return False - seenset_add(element) - except TypeError: - if element in seenlist: - return False - seenlist_add(element) - return True - - -def nth_product(index, *args): - """Equivalent to ``list(product(*args))[index]``. - - The products of *args* can be ordered lexicographically. - :func:`nth_product` computes the product at sort position *index* without - computing the previous products. - - >>> nth_product(8, range(2), range(2), range(2), range(2)) - (1, 0, 0, 0) - - ``IndexError`` will be raised if the given *index* is invalid. - """ - pools = list(map(tuple, reversed(args))) - ns = list(map(len, pools)) - - c = reduce(mul, ns) - - if index < 0: - index += c - - if not 0 <= index < c: - raise IndexError - - result = [] - for pool, n in zip(pools, ns): - result.append(pool[index % n]) - index //= n - - return tuple(reversed(result)) - - -def nth_permutation(iterable, r, index): - """Equivalent to ``list(permutations(iterable, r))[index]``` - - The subsequences of *iterable* that are of length *r* where order is - important can be ordered lexicographically. :func:`nth_permutation` - computes the subsequence at sort position *index* directly, without - computing the previous subsequences. - - >>> nth_permutation('ghijk', 2, 5) - ('h', 'i') - - ``ValueError`` will be raised If *r* is negative or greater than the length - of *iterable*. - ``IndexError`` will be raised if the given *index* is invalid. - """ - pool = list(iterable) - n = len(pool) - - if r is None or r == n: - r, c = n, factorial(n) - elif not 0 <= r < n: - raise ValueError - else: - c = factorial(n) // factorial(n - r) - - if index < 0: - index += c - - if not 0 <= index < c: - raise IndexError - - if c == 0: - return tuple() - - result = [0] * r - q = index * factorial(n) // c if r < n else index - for d in range(1, n + 1): - q, i = divmod(q, d) - if 0 <= n - d < r: - result[n - d] = i - if q == 0: - break - - return tuple(map(pool.pop, result)) - - -def value_chain(*args): - """Yield all arguments passed to the function in the same order in which - they were passed. If an argument itself is iterable then iterate over its - values. - - >>> list(value_chain(1, 2, 3, [4, 5, 6])) - [1, 2, 3, 4, 5, 6] - - Binary and text strings are not considered iterable and are emitted - as-is: - - >>> list(value_chain('12', '34', ['56', '78'])) - ['12', '34', '56', '78'] - - - Multiple levels of nesting are not flattened. - - """ - for value in args: - if isinstance(value, (str, bytes)): - yield value - continue - try: - yield from value - except TypeError: - yield value - - -def product_index(element, *args): - """Equivalent to ``list(product(*args)).index(element)`` - - The products of *args* can be ordered lexicographically. - :func:`product_index` computes the first index of *element* without - computing the previous products. - - >>> product_index([8, 2], range(10), range(5)) - 42 - - ``ValueError`` will be raised if the given *element* isn't in the product - of *args*. - """ - index = 0 - - for x, pool in zip_longest(element, args, fillvalue=_marker): - if x is _marker or pool is _marker: - raise ValueError('element is not a product of args') - - pool = tuple(pool) - index = index * len(pool) + pool.index(x) - - return index - - -def combination_index(element, iterable): - """Equivalent to ``list(combinations(iterable, r)).index(element)`` - - The subsequences of *iterable* that are of length *r* can be ordered - lexicographically. :func:`combination_index` computes the index of the - first *element*, without computing the previous combinations. - - >>> combination_index('adf', 'abcdefg') - 10 - - ``ValueError`` will be raised if the given *element* isn't one of the - combinations of *iterable*. - """ - element = enumerate(element) - k, y = next(element, (None, None)) - if k is None: - return 0 - - indexes = [] - pool = enumerate(iterable) - for n, x in pool: - if x == y: - indexes.append(n) - tmp, y = next(element, (None, None)) - if tmp is None: - break - else: - k = tmp - else: - raise ValueError('element is not a combination of iterable') - - n, _ = last(pool, default=(n, None)) - - # Python versiosn below 3.8 don't have math.comb - index = 1 - for i, j in enumerate(reversed(indexes), start=1): - j = n - j - if i <= j: - index += factorial(j) // (factorial(i) * factorial(j - i)) - - return factorial(n + 1) // (factorial(k + 1) * factorial(n - k)) - index - - -def permutation_index(element, iterable): - """Equivalent to ``list(permutations(iterable, r)).index(element)``` - - The subsequences of *iterable* that are of length *r* where order is - important can be ordered lexicographically. :func:`permutation_index` - computes the index of the first *element* directly, without computing - the previous permutations. - - >>> permutation_index([1, 3, 2], range(5)) - 19 - - ``ValueError`` will be raised if the given *element* isn't one of the - permutations of *iterable*. - """ - index = 0 - pool = list(iterable) - for i, x in zip(range(len(pool), -1, -1), element): - r = pool.index(x) - index = index * i + r - del pool[r] - - return index - - -class countable: - """Wrap *iterable* and keep a count of how many items have been consumed. - - The ``items_seen`` attribute starts at ``0`` and increments as the iterable - is consumed: - - >>> iterable = map(str, range(10)) - >>> it = countable(iterable) - >>> it.items_seen - 0 - >>> next(it), next(it) - ('0', '1') - >>> list(it) - ['2', '3', '4', '5', '6', '7', '8', '9'] - >>> it.items_seen - 10 - """ - - def __init__(self, iterable): - self._it = iter(iterable) - self.items_seen = 0 - - def __iter__(self): - return self - - def __next__(self): - item = next(self._it) - self.items_seen += 1 - - return item diff --git a/venv/Lib/site-packages/setuptools/_vendor/more_itertools/recipes.py b/venv/Lib/site-packages/setuptools/_vendor/more_itertools/recipes.py deleted file mode 100644 index 521abd7..0000000 --- a/venv/Lib/site-packages/setuptools/_vendor/more_itertools/recipes.py +++ /dev/null @@ -1,620 +0,0 @@ -"""Imported from the recipes section of the itertools documentation. - -All functions taken from the recipes section of the itertools library docs -[1]_. -Some backward-compatible usability improvements have been made. - -.. [1] http://docs.python.org/library/itertools.html#recipes - -""" -import warnings -from collections import deque -from itertools import ( - chain, - combinations, - count, - cycle, - groupby, - islice, - repeat, - starmap, - tee, - zip_longest, -) -import operator -from random import randrange, sample, choice - -__all__ = [ - 'all_equal', - 'consume', - 'convolve', - 'dotproduct', - 'first_true', - 'flatten', - 'grouper', - 'iter_except', - 'ncycles', - 'nth', - 'nth_combination', - 'padnone', - 'pad_none', - 'pairwise', - 'partition', - 'powerset', - 'prepend', - 'quantify', - 'random_combination_with_replacement', - 'random_combination', - 'random_permutation', - 'random_product', - 'repeatfunc', - 'roundrobin', - 'tabulate', - 'tail', - 'take', - 'unique_everseen', - 'unique_justseen', -] - - -def take(n, iterable): - """Return first *n* items of the iterable as a list. - - >>> take(3, range(10)) - [0, 1, 2] - - If there are fewer than *n* items in the iterable, all of them are - returned. - - >>> take(10, range(3)) - [0, 1, 2] - - """ - return list(islice(iterable, n)) - - -def tabulate(function, start=0): - """Return an iterator over the results of ``func(start)``, - ``func(start + 1)``, ``func(start + 2)``... - - *func* should be a function that accepts one integer argument. - - If *start* is not specified it defaults to 0. It will be incremented each - time the iterator is advanced. - - >>> square = lambda x: x ** 2 - >>> iterator = tabulate(square, -3) - >>> take(4, iterator) - [9, 4, 1, 0] - - """ - return map(function, count(start)) - - -def tail(n, iterable): - """Return an iterator over the last *n* items of *iterable*. - - >>> t = tail(3, 'ABCDEFG') - >>> list(t) - ['E', 'F', 'G'] - - """ - return iter(deque(iterable, maxlen=n)) - - -def consume(iterator, n=None): - """Advance *iterable* by *n* steps. If *n* is ``None``, consume it - entirely. - - Efficiently exhausts an iterator without returning values. Defaults to - consuming the whole iterator, but an optional second argument may be - provided to limit consumption. - - >>> i = (x for x in range(10)) - >>> next(i) - 0 - >>> consume(i, 3) - >>> next(i) - 4 - >>> consume(i) - >>> next(i) - Traceback (most recent call last): - File "", line 1, in - StopIteration - - If the iterator has fewer items remaining than the provided limit, the - whole iterator will be consumed. - - >>> i = (x for x in range(3)) - >>> consume(i, 5) - >>> next(i) - Traceback (most recent call last): - File "", line 1, in - StopIteration - - """ - # Use functions that consume iterators at C speed. - if n is None: - # feed the entire iterator into a zero-length deque - deque(iterator, maxlen=0) - else: - # advance to the empty slice starting at position n - next(islice(iterator, n, n), None) - - -def nth(iterable, n, default=None): - """Returns the nth item or a default value. - - >>> l = range(10) - >>> nth(l, 3) - 3 - >>> nth(l, 20, "zebra") - 'zebra' - - """ - return next(islice(iterable, n, None), default) - - -def all_equal(iterable): - """ - Returns ``True`` if all the elements are equal to each other. - - >>> all_equal('aaaa') - True - >>> all_equal('aaab') - False - - """ - g = groupby(iterable) - return next(g, True) and not next(g, False) - - -def quantify(iterable, pred=bool): - """Return the how many times the predicate is true. - - >>> quantify([True, False, True]) - 2 - - """ - return sum(map(pred, iterable)) - - -def pad_none(iterable): - """Returns the sequence of elements and then returns ``None`` indefinitely. - - >>> take(5, pad_none(range(3))) - [0, 1, 2, None, None] - - Useful for emulating the behavior of the built-in :func:`map` function. - - See also :func:`padded`. - - """ - return chain(iterable, repeat(None)) - - -padnone = pad_none - - -def ncycles(iterable, n): - """Returns the sequence elements *n* times - - >>> list(ncycles(["a", "b"], 3)) - ['a', 'b', 'a', 'b', 'a', 'b'] - - """ - return chain.from_iterable(repeat(tuple(iterable), n)) - - -def dotproduct(vec1, vec2): - """Returns the dot product of the two iterables. - - >>> dotproduct([10, 10], [20, 20]) - 400 - - """ - return sum(map(operator.mul, vec1, vec2)) - - -def flatten(listOfLists): - """Return an iterator flattening one level of nesting in a list of lists. - - >>> list(flatten([[0, 1], [2, 3]])) - [0, 1, 2, 3] - - See also :func:`collapse`, which can flatten multiple levels of nesting. - - """ - return chain.from_iterable(listOfLists) - - -def repeatfunc(func, times=None, *args): - """Call *func* with *args* repeatedly, returning an iterable over the - results. - - If *times* is specified, the iterable will terminate after that many - repetitions: - - >>> from operator import add - >>> times = 4 - >>> args = 3, 5 - >>> list(repeatfunc(add, times, *args)) - [8, 8, 8, 8] - - If *times* is ``None`` the iterable will not terminate: - - >>> from random import randrange - >>> times = None - >>> args = 1, 11 - >>> take(6, repeatfunc(randrange, times, *args)) # doctest:+SKIP - [2, 4, 8, 1, 8, 4] - - """ - if times is None: - return starmap(func, repeat(args)) - return starmap(func, repeat(args, times)) - - -def _pairwise(iterable): - """Returns an iterator of paired items, overlapping, from the original - - >>> take(4, pairwise(count())) - [(0, 1), (1, 2), (2, 3), (3, 4)] - - On Python 3.10 and above, this is an alias for :func:`itertools.pairwise`. - - """ - a, b = tee(iterable) - next(b, None) - yield from zip(a, b) - - -try: - from itertools import pairwise as itertools_pairwise -except ImportError: - pairwise = _pairwise -else: - - def pairwise(iterable): - yield from itertools_pairwise(iterable) - - pairwise.__doc__ = _pairwise.__doc__ - - -def grouper(iterable, n, fillvalue=None): - """Collect data into fixed-length chunks or blocks. - - >>> list(grouper('ABCDEFG', 3, 'x')) - [('A', 'B', 'C'), ('D', 'E', 'F'), ('G', 'x', 'x')] - - """ - if isinstance(iterable, int): - warnings.warn( - "grouper expects iterable as first parameter", DeprecationWarning - ) - n, iterable = iterable, n - args = [iter(iterable)] * n - return zip_longest(fillvalue=fillvalue, *args) - - -def roundrobin(*iterables): - """Yields an item from each iterable, alternating between them. - - >>> list(roundrobin('ABC', 'D', 'EF')) - ['A', 'D', 'E', 'B', 'F', 'C'] - - This function produces the same output as :func:`interleave_longest`, but - may perform better for some inputs (in particular when the number of - iterables is small). - - """ - # Recipe credited to George Sakkis - pending = len(iterables) - nexts = cycle(iter(it).__next__ for it in iterables) - while pending: - try: - for next in nexts: - yield next() - except StopIteration: - pending -= 1 - nexts = cycle(islice(nexts, pending)) - - -def partition(pred, iterable): - """ - Returns a 2-tuple of iterables derived from the input iterable. - The first yields the items that have ``pred(item) == False``. - The second yields the items that have ``pred(item) == True``. - - >>> is_odd = lambda x: x % 2 != 0 - >>> iterable = range(10) - >>> even_items, odd_items = partition(is_odd, iterable) - >>> list(even_items), list(odd_items) - ([0, 2, 4, 6, 8], [1, 3, 5, 7, 9]) - - If *pred* is None, :func:`bool` is used. - - >>> iterable = [0, 1, False, True, '', ' '] - >>> false_items, true_items = partition(None, iterable) - >>> list(false_items), list(true_items) - ([0, False, ''], [1, True, ' ']) - - """ - if pred is None: - pred = bool - - evaluations = ((pred(x), x) for x in iterable) - t1, t2 = tee(evaluations) - return ( - (x for (cond, x) in t1 if not cond), - (x for (cond, x) in t2 if cond), - ) - - -def powerset(iterable): - """Yields all possible subsets of the iterable. - - >>> list(powerset([1, 2, 3])) - [(), (1,), (2,), (3,), (1, 2), (1, 3), (2, 3), (1, 2, 3)] - - :func:`powerset` will operate on iterables that aren't :class:`set` - instances, so repeated elements in the input will produce repeated elements - in the output. Use :func:`unique_everseen` on the input to avoid generating - duplicates: - - >>> seq = [1, 1, 0] - >>> list(powerset(seq)) - [(), (1,), (1,), (0,), (1, 1), (1, 0), (1, 0), (1, 1, 0)] - >>> from more_itertools import unique_everseen - >>> list(powerset(unique_everseen(seq))) - [(), (1,), (0,), (1, 0)] - - """ - s = list(iterable) - return chain.from_iterable(combinations(s, r) for r in range(len(s) + 1)) - - -def unique_everseen(iterable, key=None): - """ - Yield unique elements, preserving order. - - >>> list(unique_everseen('AAAABBBCCDAABBB')) - ['A', 'B', 'C', 'D'] - >>> list(unique_everseen('ABBCcAD', str.lower)) - ['A', 'B', 'C', 'D'] - - Sequences with a mix of hashable and unhashable items can be used. - The function will be slower (i.e., `O(n^2)`) for unhashable items. - - Remember that ``list`` objects are unhashable - you can use the *key* - parameter to transform the list to a tuple (which is hashable) to - avoid a slowdown. - - >>> iterable = ([1, 2], [2, 3], [1, 2]) - >>> list(unique_everseen(iterable)) # Slow - [[1, 2], [2, 3]] - >>> list(unique_everseen(iterable, key=tuple)) # Faster - [[1, 2], [2, 3]] - - Similary, you may want to convert unhashable ``set`` objects with - ``key=frozenset``. For ``dict`` objects, - ``key=lambda x: frozenset(x.items())`` can be used. - - """ - seenset = set() - seenset_add = seenset.add - seenlist = [] - seenlist_add = seenlist.append - use_key = key is not None - - for element in iterable: - k = key(element) if use_key else element - try: - if k not in seenset: - seenset_add(k) - yield element - except TypeError: - if k not in seenlist: - seenlist_add(k) - yield element - - -def unique_justseen(iterable, key=None): - """Yields elements in order, ignoring serial duplicates - - >>> list(unique_justseen('AAAABBBCCDAABBB')) - ['A', 'B', 'C', 'D', 'A', 'B'] - >>> list(unique_justseen('ABBCcAD', str.lower)) - ['A', 'B', 'C', 'A', 'D'] - - """ - return map(next, map(operator.itemgetter(1), groupby(iterable, key))) - - -def iter_except(func, exception, first=None): - """Yields results from a function repeatedly until an exception is raised. - - Converts a call-until-exception interface to an iterator interface. - Like ``iter(func, sentinel)``, but uses an exception instead of a sentinel - to end the loop. - - >>> l = [0, 1, 2] - >>> list(iter_except(l.pop, IndexError)) - [2, 1, 0] - - """ - try: - if first is not None: - yield first() - while 1: - yield func() - except exception: - pass - - -def first_true(iterable, default=None, pred=None): - """ - Returns the first true value in the iterable. - - If no true value is found, returns *default* - - If *pred* is not None, returns the first item for which - ``pred(item) == True`` . - - >>> first_true(range(10)) - 1 - >>> first_true(range(10), pred=lambda x: x > 5) - 6 - >>> first_true(range(10), default='missing', pred=lambda x: x > 9) - 'missing' - - """ - return next(filter(pred, iterable), default) - - -def random_product(*args, repeat=1): - """Draw an item at random from each of the input iterables. - - >>> random_product('abc', range(4), 'XYZ') # doctest:+SKIP - ('c', 3, 'Z') - - If *repeat* is provided as a keyword argument, that many items will be - drawn from each iterable. - - >>> random_product('abcd', range(4), repeat=2) # doctest:+SKIP - ('a', 2, 'd', 3) - - This equivalent to taking a random selection from - ``itertools.product(*args, **kwarg)``. - - """ - pools = [tuple(pool) for pool in args] * repeat - return tuple(choice(pool) for pool in pools) - - -def random_permutation(iterable, r=None): - """Return a random *r* length permutation of the elements in *iterable*. - - If *r* is not specified or is ``None``, then *r* defaults to the length of - *iterable*. - - >>> random_permutation(range(5)) # doctest:+SKIP - (3, 4, 0, 1, 2) - - This equivalent to taking a random selection from - ``itertools.permutations(iterable, r)``. - - """ - pool = tuple(iterable) - r = len(pool) if r is None else r - return tuple(sample(pool, r)) - - -def random_combination(iterable, r): - """Return a random *r* length subsequence of the elements in *iterable*. - - >>> random_combination(range(5), 3) # doctest:+SKIP - (2, 3, 4) - - This equivalent to taking a random selection from - ``itertools.combinations(iterable, r)``. - - """ - pool = tuple(iterable) - n = len(pool) - indices = sorted(sample(range(n), r)) - return tuple(pool[i] for i in indices) - - -def random_combination_with_replacement(iterable, r): - """Return a random *r* length subsequence of elements in *iterable*, - allowing individual elements to be repeated. - - >>> random_combination_with_replacement(range(3), 5) # doctest:+SKIP - (0, 0, 1, 2, 2) - - This equivalent to taking a random selection from - ``itertools.combinations_with_replacement(iterable, r)``. - - """ - pool = tuple(iterable) - n = len(pool) - indices = sorted(randrange(n) for i in range(r)) - return tuple(pool[i] for i in indices) - - -def nth_combination(iterable, r, index): - """Equivalent to ``list(combinations(iterable, r))[index]``. - - The subsequences of *iterable* that are of length *r* can be ordered - lexicographically. :func:`nth_combination` computes the subsequence at - sort position *index* directly, without computing the previous - subsequences. - - >>> nth_combination(range(5), 3, 5) - (0, 3, 4) - - ``ValueError`` will be raised If *r* is negative or greater than the length - of *iterable*. - ``IndexError`` will be raised if the given *index* is invalid. - """ - pool = tuple(iterable) - n = len(pool) - if (r < 0) or (r > n): - raise ValueError - - c = 1 - k = min(r, n - r) - for i in range(1, k + 1): - c = c * (n - k + i) // i - - if index < 0: - index += c - - if (index < 0) or (index >= c): - raise IndexError - - result = [] - while r: - c, n, r = c * r // n, n - 1, r - 1 - while index >= c: - index -= c - c, n = c * (n - r) // n, n - 1 - result.append(pool[-1 - n]) - - return tuple(result) - - -def prepend(value, iterator): - """Yield *value*, followed by the elements in *iterator*. - - >>> value = '0' - >>> iterator = ['1', '2', '3'] - >>> list(prepend(value, iterator)) - ['0', '1', '2', '3'] - - To prepend multiple values, see :func:`itertools.chain` - or :func:`value_chain`. - - """ - return chain([value], iterator) - - -def convolve(signal, kernel): - """Convolve the iterable *signal* with the iterable *kernel*. - - >>> signal = (1, 2, 3, 4, 5) - >>> kernel = [3, 2, 1] - >>> list(convolve(signal, kernel)) - [3, 8, 14, 20, 26, 14, 5] - - Note: the input arguments are not interchangeable, as the *kernel* - is immediately consumed and stored. - - """ - kernel = tuple(kernel)[::-1] - n = len(kernel) - window = deque([0], maxlen=n) * n - for x in chain(signal, repeat(0, n - 1)): - window.append(x) - yield sum(map(operator.mul, kernel, window)) diff --git a/venv/Lib/site-packages/setuptools/_vendor/ordered_set.py b/venv/Lib/site-packages/setuptools/_vendor/ordered_set.py deleted file mode 100644 index 1487600..0000000 --- a/venv/Lib/site-packages/setuptools/_vendor/ordered_set.py +++ /dev/null @@ -1,488 +0,0 @@ -""" -An OrderedSet is a custom MutableSet that remembers its order, so that every -entry has an index that can be looked up. - -Based on a recipe originally posted to ActiveState Recipes by Raymond Hettiger, -and released under the MIT license. -""" -import itertools as it -from collections import deque - -try: - # Python 3 - from collections.abc import MutableSet, Sequence -except ImportError: - # Python 2.7 - from collections import MutableSet, Sequence - -SLICE_ALL = slice(None) -__version__ = "3.1" - - -def is_iterable(obj): - """ - Are we being asked to look up a list of things, instead of a single thing? - We check for the `__iter__` attribute so that this can cover types that - don't have to be known by this module, such as NumPy arrays. - - Strings, however, should be considered as atomic values to look up, not - iterables. The same goes for tuples, since they are immutable and therefore - valid entries. - - We don't need to check for the Python 2 `unicode` type, because it doesn't - have an `__iter__` attribute anyway. - """ - return ( - hasattr(obj, "__iter__") - and not isinstance(obj, str) - and not isinstance(obj, tuple) - ) - - -class OrderedSet(MutableSet, Sequence): - """ - An OrderedSet is a custom MutableSet that remembers its order, so that - every entry has an index that can be looked up. - - Example: - >>> OrderedSet([1, 1, 2, 3, 2]) - OrderedSet([1, 2, 3]) - """ - - def __init__(self, iterable=None): - self.items = [] - self.map = {} - if iterable is not None: - self |= iterable - - def __len__(self): - """ - Returns the number of unique elements in the ordered set - - Example: - >>> len(OrderedSet([])) - 0 - >>> len(OrderedSet([1, 2])) - 2 - """ - return len(self.items) - - def __getitem__(self, index): - """ - Get the item at a given index. - - If `index` is a slice, you will get back that slice of items, as a - new OrderedSet. - - If `index` is a list or a similar iterable, you'll get a list of - items corresponding to those indices. This is similar to NumPy's - "fancy indexing". The result is not an OrderedSet because you may ask - for duplicate indices, and the number of elements returned should be - the number of elements asked for. - - Example: - >>> oset = OrderedSet([1, 2, 3]) - >>> oset[1] - 2 - """ - if isinstance(index, slice) and index == SLICE_ALL: - return self.copy() - elif is_iterable(index): - return [self.items[i] for i in index] - elif hasattr(index, "__index__") or isinstance(index, slice): - result = self.items[index] - if isinstance(result, list): - return self.__class__(result) - else: - return result - else: - raise TypeError("Don't know how to index an OrderedSet by %r" % index) - - def copy(self): - """ - Return a shallow copy of this object. - - Example: - >>> this = OrderedSet([1, 2, 3]) - >>> other = this.copy() - >>> this == other - True - >>> this is other - False - """ - return self.__class__(self) - - def __getstate__(self): - if len(self) == 0: - # The state can't be an empty list. - # We need to return a truthy value, or else __setstate__ won't be run. - # - # This could have been done more gracefully by always putting the state - # in a tuple, but this way is backwards- and forwards- compatible with - # previous versions of OrderedSet. - return (None,) - else: - return list(self) - - def __setstate__(self, state): - if state == (None,): - self.__init__([]) - else: - self.__init__(state) - - def __contains__(self, key): - """ - Test if the item is in this ordered set - - Example: - >>> 1 in OrderedSet([1, 3, 2]) - True - >>> 5 in OrderedSet([1, 3, 2]) - False - """ - return key in self.map - - def add(self, key): - """ - Add `key` as an item to this OrderedSet, then return its index. - - If `key` is already in the OrderedSet, return the index it already - had. - - Example: - >>> oset = OrderedSet() - >>> oset.append(3) - 0 - >>> print(oset) - OrderedSet([3]) - """ - if key not in self.map: - self.map[key] = len(self.items) - self.items.append(key) - return self.map[key] - - append = add - - def update(self, sequence): - """ - Update the set with the given iterable sequence, then return the index - of the last element inserted. - - Example: - >>> oset = OrderedSet([1, 2, 3]) - >>> oset.update([3, 1, 5, 1, 4]) - 4 - >>> print(oset) - OrderedSet([1, 2, 3, 5, 4]) - """ - item_index = None - try: - for item in sequence: - item_index = self.add(item) - except TypeError: - raise ValueError( - "Argument needs to be an iterable, got %s" % type(sequence) - ) - return item_index - - def index(self, key): - """ - Get the index of a given entry, raising an IndexError if it's not - present. - - `key` can be an iterable of entries that is not a string, in which case - this returns a list of indices. - - Example: - >>> oset = OrderedSet([1, 2, 3]) - >>> oset.index(2) - 1 - """ - if is_iterable(key): - return [self.index(subkey) for subkey in key] - return self.map[key] - - # Provide some compatibility with pd.Index - get_loc = index - get_indexer = index - - def pop(self): - """ - Remove and return the last element from the set. - - Raises KeyError if the set is empty. - - Example: - >>> oset = OrderedSet([1, 2, 3]) - >>> oset.pop() - 3 - """ - if not self.items: - raise KeyError("Set is empty") - - elem = self.items[-1] - del self.items[-1] - del self.map[elem] - return elem - - def discard(self, key): - """ - Remove an element. Do not raise an exception if absent. - - The MutableSet mixin uses this to implement the .remove() method, which - *does* raise an error when asked to remove a non-existent item. - - Example: - >>> oset = OrderedSet([1, 2, 3]) - >>> oset.discard(2) - >>> print(oset) - OrderedSet([1, 3]) - >>> oset.discard(2) - >>> print(oset) - OrderedSet([1, 3]) - """ - if key in self: - i = self.map[key] - del self.items[i] - del self.map[key] - for k, v in self.map.items(): - if v >= i: - self.map[k] = v - 1 - - def clear(self): - """ - Remove all items from this OrderedSet. - """ - del self.items[:] - self.map.clear() - - def __iter__(self): - """ - Example: - >>> list(iter(OrderedSet([1, 2, 3]))) - [1, 2, 3] - """ - return iter(self.items) - - def __reversed__(self): - """ - Example: - >>> list(reversed(OrderedSet([1, 2, 3]))) - [3, 2, 1] - """ - return reversed(self.items) - - def __repr__(self): - if not self: - return "%s()" % (self.__class__.__name__,) - return "%s(%r)" % (self.__class__.__name__, list(self)) - - def __eq__(self, other): - """ - Returns true if the containers have the same items. If `other` is a - Sequence, then order is checked, otherwise it is ignored. - - Example: - >>> oset = OrderedSet([1, 3, 2]) - >>> oset == [1, 3, 2] - True - >>> oset == [1, 2, 3] - False - >>> oset == [2, 3] - False - >>> oset == OrderedSet([3, 2, 1]) - False - """ - # In Python 2 deque is not a Sequence, so treat it as one for - # consistent behavior with Python 3. - if isinstance(other, (Sequence, deque)): - # Check that this OrderedSet contains the same elements, in the - # same order, as the other object. - return list(self) == list(other) - try: - other_as_set = set(other) - except TypeError: - # If `other` can't be converted into a set, it's not equal. - return False - else: - return set(self) == other_as_set - - def union(self, *sets): - """ - Combines all unique items. - Each items order is defined by its first appearance. - - Example: - >>> oset = OrderedSet.union(OrderedSet([3, 1, 4, 1, 5]), [1, 3], [2, 0]) - >>> print(oset) - OrderedSet([3, 1, 4, 5, 2, 0]) - >>> oset.union([8, 9]) - OrderedSet([3, 1, 4, 5, 2, 0, 8, 9]) - >>> oset | {10} - OrderedSet([3, 1, 4, 5, 2, 0, 10]) - """ - cls = self.__class__ if isinstance(self, OrderedSet) else OrderedSet - containers = map(list, it.chain([self], sets)) - items = it.chain.from_iterable(containers) - return cls(items) - - def __and__(self, other): - # the parent implementation of this is backwards - return self.intersection(other) - - def intersection(self, *sets): - """ - Returns elements in common between all sets. Order is defined only - by the first set. - - Example: - >>> oset = OrderedSet.intersection(OrderedSet([0, 1, 2, 3]), [1, 2, 3]) - >>> print(oset) - OrderedSet([1, 2, 3]) - >>> oset.intersection([2, 4, 5], [1, 2, 3, 4]) - OrderedSet([2]) - >>> oset.intersection() - OrderedSet([1, 2, 3]) - """ - cls = self.__class__ if isinstance(self, OrderedSet) else OrderedSet - if sets: - common = set.intersection(*map(set, sets)) - items = (item for item in self if item in common) - else: - items = self - return cls(items) - - def difference(self, *sets): - """ - Returns all elements that are in this set but not the others. - - Example: - >>> OrderedSet([1, 2, 3]).difference(OrderedSet([2])) - OrderedSet([1, 3]) - >>> OrderedSet([1, 2, 3]).difference(OrderedSet([2]), OrderedSet([3])) - OrderedSet([1]) - >>> OrderedSet([1, 2, 3]) - OrderedSet([2]) - OrderedSet([1, 3]) - >>> OrderedSet([1, 2, 3]).difference() - OrderedSet([1, 2, 3]) - """ - cls = self.__class__ - if sets: - other = set.union(*map(set, sets)) - items = (item for item in self if item not in other) - else: - items = self - return cls(items) - - def issubset(self, other): - """ - Report whether another set contains this set. - - Example: - >>> OrderedSet([1, 2, 3]).issubset({1, 2}) - False - >>> OrderedSet([1, 2, 3]).issubset({1, 2, 3, 4}) - True - >>> OrderedSet([1, 2, 3]).issubset({1, 4, 3, 5}) - False - """ - if len(self) > len(other): # Fast check for obvious cases - return False - return all(item in other for item in self) - - def issuperset(self, other): - """ - Report whether this set contains another set. - - Example: - >>> OrderedSet([1, 2]).issuperset([1, 2, 3]) - False - >>> OrderedSet([1, 2, 3, 4]).issuperset({1, 2, 3}) - True - >>> OrderedSet([1, 4, 3, 5]).issuperset({1, 2, 3}) - False - """ - if len(self) < len(other): # Fast check for obvious cases - return False - return all(item in self for item in other) - - def symmetric_difference(self, other): - """ - Return the symmetric difference of two OrderedSets as a new set. - That is, the new set will contain all elements that are in exactly - one of the sets. - - Their order will be preserved, with elements from `self` preceding - elements from `other`. - - Example: - >>> this = OrderedSet([1, 4, 3, 5, 7]) - >>> other = OrderedSet([9, 7, 1, 3, 2]) - >>> this.symmetric_difference(other) - OrderedSet([4, 5, 9, 2]) - """ - cls = self.__class__ if isinstance(self, OrderedSet) else OrderedSet - diff1 = cls(self).difference(other) - diff2 = cls(other).difference(self) - return diff1.union(diff2) - - def _update_items(self, items): - """ - Replace the 'items' list of this OrderedSet with a new one, updating - self.map accordingly. - """ - self.items = items - self.map = {item: idx for (idx, item) in enumerate(items)} - - def difference_update(self, *sets): - """ - Update this OrderedSet to remove items from one or more other sets. - - Example: - >>> this = OrderedSet([1, 2, 3]) - >>> this.difference_update(OrderedSet([2, 4])) - >>> print(this) - OrderedSet([1, 3]) - - >>> this = OrderedSet([1, 2, 3, 4, 5]) - >>> this.difference_update(OrderedSet([2, 4]), OrderedSet([1, 4, 6])) - >>> print(this) - OrderedSet([3, 5]) - """ - items_to_remove = set() - for other in sets: - items_to_remove |= set(other) - self._update_items([item for item in self.items if item not in items_to_remove]) - - def intersection_update(self, other): - """ - Update this OrderedSet to keep only items in another set, preserving - their order in this set. - - Example: - >>> this = OrderedSet([1, 4, 3, 5, 7]) - >>> other = OrderedSet([9, 7, 1, 3, 2]) - >>> this.intersection_update(other) - >>> print(this) - OrderedSet([1, 3, 7]) - """ - other = set(other) - self._update_items([item for item in self.items if item in other]) - - def symmetric_difference_update(self, other): - """ - Update this OrderedSet to remove items from another set, then - add items from the other set that were not present in this set. - - Example: - >>> this = OrderedSet([1, 4, 3, 5, 7]) - >>> other = OrderedSet([9, 7, 1, 3, 2]) - >>> this.symmetric_difference_update(other) - >>> print(this) - OrderedSet([4, 5, 9, 2]) - """ - items_to_add = [item for item in other if item not in self] - items_to_remove = set(other) - self._update_items( - [item for item in self.items if item not in items_to_remove] + items_to_add - ) diff --git a/venv/Lib/site-packages/setuptools/_vendor/packaging/__about__.py b/venv/Lib/site-packages/setuptools/_vendor/packaging/__about__.py deleted file mode 100644 index 3551bc2..0000000 --- a/venv/Lib/site-packages/setuptools/_vendor/packaging/__about__.py +++ /dev/null @@ -1,26 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -__all__ = [ - "__title__", - "__summary__", - "__uri__", - "__version__", - "__author__", - "__email__", - "__license__", - "__copyright__", -] - -__title__ = "packaging" -__summary__ = "Core utilities for Python packages" -__uri__ = "https://github.com/pypa/packaging" - -__version__ = "21.3" - -__author__ = "Donald Stufft and individual contributors" -__email__ = "donald@stufft.io" - -__license__ = "BSD-2-Clause or Apache-2.0" -__copyright__ = "2014-2019 %s" % __author__ diff --git a/venv/Lib/site-packages/setuptools/_vendor/packaging/__init__.py b/venv/Lib/site-packages/setuptools/_vendor/packaging/__init__.py deleted file mode 100644 index 3c50c5d..0000000 --- a/venv/Lib/site-packages/setuptools/_vendor/packaging/__init__.py +++ /dev/null @@ -1,25 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from .__about__ import ( - __author__, - __copyright__, - __email__, - __license__, - __summary__, - __title__, - __uri__, - __version__, -) - -__all__ = [ - "__title__", - "__summary__", - "__uri__", - "__version__", - "__author__", - "__email__", - "__license__", - "__copyright__", -] diff --git a/venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/__about__.cpython-39.pyc b/venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/__about__.cpython-39.pyc deleted file mode 100644 index 0cff384..0000000 Binary files a/venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/__about__.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/__init__.cpython-39.pyc b/venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/__init__.cpython-39.pyc deleted file mode 100644 index f59f40c..0000000 Binary files a/venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/__init__.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/_manylinux.cpython-39.pyc b/venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/_manylinux.cpython-39.pyc deleted file mode 100644 index 51942c6..0000000 Binary files a/venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/_manylinux.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/_musllinux.cpython-39.pyc b/venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/_musllinux.cpython-39.pyc deleted file mode 100644 index 3834478..0000000 Binary files a/venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/_musllinux.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/_structures.cpython-39.pyc b/venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/_structures.cpython-39.pyc deleted file mode 100644 index 03ae05e..0000000 Binary files a/venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/_structures.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/markers.cpython-39.pyc b/venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/markers.cpython-39.pyc deleted file mode 100644 index 7fac144..0000000 Binary files a/venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/markers.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/requirements.cpython-39.pyc b/venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/requirements.cpython-39.pyc deleted file mode 100644 index 0ec0b8b..0000000 Binary files a/venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/requirements.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/specifiers.cpython-39.pyc b/venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/specifiers.cpython-39.pyc deleted file mode 100644 index fa7fb12..0000000 Binary files a/venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/specifiers.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/tags.cpython-39.pyc b/venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/tags.cpython-39.pyc deleted file mode 100644 index ce2ded6..0000000 Binary files a/venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/tags.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/utils.cpython-39.pyc b/venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/utils.cpython-39.pyc deleted file mode 100644 index 5527778..0000000 Binary files a/venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/utils.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/version.cpython-39.pyc b/venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/version.cpython-39.pyc deleted file mode 100644 index cc1db1b..0000000 Binary files a/venv/Lib/site-packages/setuptools/_vendor/packaging/__pycache__/version.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/_vendor/packaging/_manylinux.py b/venv/Lib/site-packages/setuptools/_vendor/packaging/_manylinux.py deleted file mode 100644 index 4c379aa..0000000 --- a/venv/Lib/site-packages/setuptools/_vendor/packaging/_manylinux.py +++ /dev/null @@ -1,301 +0,0 @@ -import collections -import functools -import os -import re -import struct -import sys -import warnings -from typing import IO, Dict, Iterator, NamedTuple, Optional, Tuple - - -# Python does not provide platform information at sufficient granularity to -# identify the architecture of the running executable in some cases, so we -# determine it dynamically by reading the information from the running -# process. This only applies on Linux, which uses the ELF format. -class _ELFFileHeader: - # https://en.wikipedia.org/wiki/Executable_and_Linkable_Format#File_header - class _InvalidELFFileHeader(ValueError): - """ - An invalid ELF file header was found. - """ - - ELF_MAGIC_NUMBER = 0x7F454C46 - ELFCLASS32 = 1 - ELFCLASS64 = 2 - ELFDATA2LSB = 1 - ELFDATA2MSB = 2 - EM_386 = 3 - EM_S390 = 22 - EM_ARM = 40 - EM_X86_64 = 62 - EF_ARM_ABIMASK = 0xFF000000 - EF_ARM_ABI_VER5 = 0x05000000 - EF_ARM_ABI_FLOAT_HARD = 0x00000400 - - def __init__(self, file: IO[bytes]) -> None: - def unpack(fmt: str) -> int: - try: - data = file.read(struct.calcsize(fmt)) - result: Tuple[int, ...] = struct.unpack(fmt, data) - except struct.error: - raise _ELFFileHeader._InvalidELFFileHeader() - return result[0] - - self.e_ident_magic = unpack(">I") - if self.e_ident_magic != self.ELF_MAGIC_NUMBER: - raise _ELFFileHeader._InvalidELFFileHeader() - self.e_ident_class = unpack("B") - if self.e_ident_class not in {self.ELFCLASS32, self.ELFCLASS64}: - raise _ELFFileHeader._InvalidELFFileHeader() - self.e_ident_data = unpack("B") - if self.e_ident_data not in {self.ELFDATA2LSB, self.ELFDATA2MSB}: - raise _ELFFileHeader._InvalidELFFileHeader() - self.e_ident_version = unpack("B") - self.e_ident_osabi = unpack("B") - self.e_ident_abiversion = unpack("B") - self.e_ident_pad = file.read(7) - format_h = "H" - format_i = "I" - format_q = "Q" - format_p = format_i if self.e_ident_class == self.ELFCLASS32 else format_q - self.e_type = unpack(format_h) - self.e_machine = unpack(format_h) - self.e_version = unpack(format_i) - self.e_entry = unpack(format_p) - self.e_phoff = unpack(format_p) - self.e_shoff = unpack(format_p) - self.e_flags = unpack(format_i) - self.e_ehsize = unpack(format_h) - self.e_phentsize = unpack(format_h) - self.e_phnum = unpack(format_h) - self.e_shentsize = unpack(format_h) - self.e_shnum = unpack(format_h) - self.e_shstrndx = unpack(format_h) - - -def _get_elf_header() -> Optional[_ELFFileHeader]: - try: - with open(sys.executable, "rb") as f: - elf_header = _ELFFileHeader(f) - except (OSError, TypeError, _ELFFileHeader._InvalidELFFileHeader): - return None - return elf_header - - -def _is_linux_armhf() -> bool: - # hard-float ABI can be detected from the ELF header of the running - # process - # https://static.docs.arm.com/ihi0044/g/aaelf32.pdf - elf_header = _get_elf_header() - if elf_header is None: - return False - result = elf_header.e_ident_class == elf_header.ELFCLASS32 - result &= elf_header.e_ident_data == elf_header.ELFDATA2LSB - result &= elf_header.e_machine == elf_header.EM_ARM - result &= ( - elf_header.e_flags & elf_header.EF_ARM_ABIMASK - ) == elf_header.EF_ARM_ABI_VER5 - result &= ( - elf_header.e_flags & elf_header.EF_ARM_ABI_FLOAT_HARD - ) == elf_header.EF_ARM_ABI_FLOAT_HARD - return result - - -def _is_linux_i686() -> bool: - elf_header = _get_elf_header() - if elf_header is None: - return False - result = elf_header.e_ident_class == elf_header.ELFCLASS32 - result &= elf_header.e_ident_data == elf_header.ELFDATA2LSB - result &= elf_header.e_machine == elf_header.EM_386 - return result - - -def _have_compatible_abi(arch: str) -> bool: - if arch == "armv7l": - return _is_linux_armhf() - if arch == "i686": - return _is_linux_i686() - return arch in {"x86_64", "aarch64", "ppc64", "ppc64le", "s390x"} - - -# If glibc ever changes its major version, we need to know what the last -# minor version was, so we can build the complete list of all versions. -# For now, guess what the highest minor version might be, assume it will -# be 50 for testing. Once this actually happens, update the dictionary -# with the actual value. -_LAST_GLIBC_MINOR: Dict[int, int] = collections.defaultdict(lambda: 50) - - -class _GLibCVersion(NamedTuple): - major: int - minor: int - - -def _glibc_version_string_confstr() -> Optional[str]: - """ - Primary implementation of glibc_version_string using os.confstr. - """ - # os.confstr is quite a bit faster than ctypes.DLL. It's also less likely - # to be broken or missing. This strategy is used in the standard library - # platform module. - # https://github.com/python/cpython/blob/fcf1d003bf4f0100c/Lib/platform.py#L175-L183 - try: - # os.confstr("CS_GNU_LIBC_VERSION") returns a string like "glibc 2.17". - version_string = os.confstr("CS_GNU_LIBC_VERSION") - assert version_string is not None - _, version = version_string.split() - except (AssertionError, AttributeError, OSError, ValueError): - # os.confstr() or CS_GNU_LIBC_VERSION not available (or a bad value)... - return None - return version - - -def _glibc_version_string_ctypes() -> Optional[str]: - """ - Fallback implementation of glibc_version_string using ctypes. - """ - try: - import ctypes - except ImportError: - return None - - # ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen - # manpage says, "If filename is NULL, then the returned handle is for the - # main program". This way we can let the linker do the work to figure out - # which libc our process is actually using. - # - # We must also handle the special case where the executable is not a - # dynamically linked executable. This can occur when using musl libc, - # for example. In this situation, dlopen() will error, leading to an - # OSError. Interestingly, at least in the case of musl, there is no - # errno set on the OSError. The single string argument used to construct - # OSError comes from libc itself and is therefore not portable to - # hard code here. In any case, failure to call dlopen() means we - # can proceed, so we bail on our attempt. - try: - process_namespace = ctypes.CDLL(None) - except OSError: - return None - - try: - gnu_get_libc_version = process_namespace.gnu_get_libc_version - except AttributeError: - # Symbol doesn't exist -> therefore, we are not linked to - # glibc. - return None - - # Call gnu_get_libc_version, which returns a string like "2.5" - gnu_get_libc_version.restype = ctypes.c_char_p - version_str: str = gnu_get_libc_version() - # py2 / py3 compatibility: - if not isinstance(version_str, str): - version_str = version_str.decode("ascii") - - return version_str - - -def _glibc_version_string() -> Optional[str]: - """Returns glibc version string, or None if not using glibc.""" - return _glibc_version_string_confstr() or _glibc_version_string_ctypes() - - -def _parse_glibc_version(version_str: str) -> Tuple[int, int]: - """Parse glibc version. - - We use a regexp instead of str.split because we want to discard any - random junk that might come after the minor version -- this might happen - in patched/forked versions of glibc (e.g. Linaro's version of glibc - uses version strings like "2.20-2014.11"). See gh-3588. - """ - m = re.match(r"(?P[0-9]+)\.(?P[0-9]+)", version_str) - if not m: - warnings.warn( - "Expected glibc version with 2 components major.minor," - " got: %s" % version_str, - RuntimeWarning, - ) - return -1, -1 - return int(m.group("major")), int(m.group("minor")) - - -@functools.lru_cache() -def _get_glibc_version() -> Tuple[int, int]: - version_str = _glibc_version_string() - if version_str is None: - return (-1, -1) - return _parse_glibc_version(version_str) - - -# From PEP 513, PEP 600 -def _is_compatible(name: str, arch: str, version: _GLibCVersion) -> bool: - sys_glibc = _get_glibc_version() - if sys_glibc < version: - return False - # Check for presence of _manylinux module. - try: - import _manylinux # noqa - except ImportError: - return True - if hasattr(_manylinux, "manylinux_compatible"): - result = _manylinux.manylinux_compatible(version[0], version[1], arch) - if result is not None: - return bool(result) - return True - if version == _GLibCVersion(2, 5): - if hasattr(_manylinux, "manylinux1_compatible"): - return bool(_manylinux.manylinux1_compatible) - if version == _GLibCVersion(2, 12): - if hasattr(_manylinux, "manylinux2010_compatible"): - return bool(_manylinux.manylinux2010_compatible) - if version == _GLibCVersion(2, 17): - if hasattr(_manylinux, "manylinux2014_compatible"): - return bool(_manylinux.manylinux2014_compatible) - return True - - -_LEGACY_MANYLINUX_MAP = { - # CentOS 7 w/ glibc 2.17 (PEP 599) - (2, 17): "manylinux2014", - # CentOS 6 w/ glibc 2.12 (PEP 571) - (2, 12): "manylinux2010", - # CentOS 5 w/ glibc 2.5 (PEP 513) - (2, 5): "manylinux1", -} - - -def platform_tags(linux: str, arch: str) -> Iterator[str]: - if not _have_compatible_abi(arch): - return - # Oldest glibc to be supported regardless of architecture is (2, 17). - too_old_glibc2 = _GLibCVersion(2, 16) - if arch in {"x86_64", "i686"}: - # On x86/i686 also oldest glibc to be supported is (2, 5). - too_old_glibc2 = _GLibCVersion(2, 4) - current_glibc = _GLibCVersion(*_get_glibc_version()) - glibc_max_list = [current_glibc] - # We can assume compatibility across glibc major versions. - # https://sourceware.org/bugzilla/show_bug.cgi?id=24636 - # - # Build a list of maximum glibc versions so that we can - # output the canonical list of all glibc from current_glibc - # down to too_old_glibc2, including all intermediary versions. - for glibc_major in range(current_glibc.major - 1, 1, -1): - glibc_minor = _LAST_GLIBC_MINOR[glibc_major] - glibc_max_list.append(_GLibCVersion(glibc_major, glibc_minor)) - for glibc_max in glibc_max_list: - if glibc_max.major == too_old_glibc2.major: - min_minor = too_old_glibc2.minor - else: - # For other glibc major versions oldest supported is (x, 0). - min_minor = -1 - for glibc_minor in range(glibc_max.minor, min_minor, -1): - glibc_version = _GLibCVersion(glibc_max.major, glibc_minor) - tag = "manylinux_{}_{}".format(*glibc_version) - if _is_compatible(tag, arch, glibc_version): - yield linux.replace("linux", tag) - # Handle the legacy manylinux1, manylinux2010, manylinux2014 tags. - if glibc_version in _LEGACY_MANYLINUX_MAP: - legacy_tag = _LEGACY_MANYLINUX_MAP[glibc_version] - if _is_compatible(legacy_tag, arch, glibc_version): - yield linux.replace("linux", legacy_tag) diff --git a/venv/Lib/site-packages/setuptools/_vendor/packaging/_musllinux.py b/venv/Lib/site-packages/setuptools/_vendor/packaging/_musllinux.py deleted file mode 100644 index 8ac3059..0000000 --- a/venv/Lib/site-packages/setuptools/_vendor/packaging/_musllinux.py +++ /dev/null @@ -1,136 +0,0 @@ -"""PEP 656 support. - -This module implements logic to detect if the currently running Python is -linked against musl, and what musl version is used. -""" - -import contextlib -import functools -import operator -import os -import re -import struct -import subprocess -import sys -from typing import IO, Iterator, NamedTuple, Optional, Tuple - - -def _read_unpacked(f: IO[bytes], fmt: str) -> Tuple[int, ...]: - return struct.unpack(fmt, f.read(struct.calcsize(fmt))) - - -def _parse_ld_musl_from_elf(f: IO[bytes]) -> Optional[str]: - """Detect musl libc location by parsing the Python executable. - - Based on: https://gist.github.com/lyssdod/f51579ae8d93c8657a5564aefc2ffbca - ELF header: https://refspecs.linuxfoundation.org/elf/gabi4+/ch4.eheader.html - """ - f.seek(0) - try: - ident = _read_unpacked(f, "16B") - except struct.error: - return None - if ident[:4] != tuple(b"\x7fELF"): # Invalid magic, not ELF. - return None - f.seek(struct.calcsize("HHI"), 1) # Skip file type, machine, and version. - - try: - # e_fmt: Format for program header. - # p_fmt: Format for section header. - # p_idx: Indexes to find p_type, p_offset, and p_filesz. - e_fmt, p_fmt, p_idx = { - 1: ("IIIIHHH", "IIIIIIII", (0, 1, 4)), # 32-bit. - 2: ("QQQIHHH", "IIQQQQQQ", (0, 2, 5)), # 64-bit. - }[ident[4]] - except KeyError: - return None - else: - p_get = operator.itemgetter(*p_idx) - - # Find the interpreter section and return its content. - try: - _, e_phoff, _, _, _, e_phentsize, e_phnum = _read_unpacked(f, e_fmt) - except struct.error: - return None - for i in range(e_phnum + 1): - f.seek(e_phoff + e_phentsize * i) - try: - p_type, p_offset, p_filesz = p_get(_read_unpacked(f, p_fmt)) - except struct.error: - return None - if p_type != 3: # Not PT_INTERP. - continue - f.seek(p_offset) - interpreter = os.fsdecode(f.read(p_filesz)).strip("\0") - if "musl" not in interpreter: - return None - return interpreter - return None - - -class _MuslVersion(NamedTuple): - major: int - minor: int - - -def _parse_musl_version(output: str) -> Optional[_MuslVersion]: - lines = [n for n in (n.strip() for n in output.splitlines()) if n] - if len(lines) < 2 or lines[0][:4] != "musl": - return None - m = re.match(r"Version (\d+)\.(\d+)", lines[1]) - if not m: - return None - return _MuslVersion(major=int(m.group(1)), minor=int(m.group(2))) - - -@functools.lru_cache() -def _get_musl_version(executable: str) -> Optional[_MuslVersion]: - """Detect currently-running musl runtime version. - - This is done by checking the specified executable's dynamic linking - information, and invoking the loader to parse its output for a version - string. If the loader is musl, the output would be something like:: - - musl libc (x86_64) - Version 1.2.2 - Dynamic Program Loader - """ - with contextlib.ExitStack() as stack: - try: - f = stack.enter_context(open(executable, "rb")) - except OSError: - return None - ld = _parse_ld_musl_from_elf(f) - if not ld: - return None - proc = subprocess.run([ld], stderr=subprocess.PIPE, universal_newlines=True) - return _parse_musl_version(proc.stderr) - - -def platform_tags(arch: str) -> Iterator[str]: - """Generate musllinux tags compatible to the current platform. - - :param arch: Should be the part of platform tag after the ``linux_`` - prefix, e.g. ``x86_64``. The ``linux_`` prefix is assumed as a - prerequisite for the current platform to be musllinux-compatible. - - :returns: An iterator of compatible musllinux tags. - """ - sys_musl = _get_musl_version(sys.executable) - if sys_musl is None: # Python not dynamically linked against musl. - return - for minor in range(sys_musl.minor, -1, -1): - yield f"musllinux_{sys_musl.major}_{minor}_{arch}" - - -if __name__ == "__main__": # pragma: no cover - import sysconfig - - plat = sysconfig.get_platform() - assert plat.startswith("linux-"), "not linux" - - print("plat:", plat) - print("musl:", _get_musl_version(sys.executable)) - print("tags:", end=" ") - for t in platform_tags(re.sub(r"[.-]", "_", plat.split("-", 1)[-1])): - print(t, end="\n ") diff --git a/venv/Lib/site-packages/setuptools/_vendor/packaging/_structures.py b/venv/Lib/site-packages/setuptools/_vendor/packaging/_structures.py deleted file mode 100644 index 90a6465..0000000 --- a/venv/Lib/site-packages/setuptools/_vendor/packaging/_structures.py +++ /dev/null @@ -1,61 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - - -class InfinityType: - def __repr__(self) -> str: - return "Infinity" - - def __hash__(self) -> int: - return hash(repr(self)) - - def __lt__(self, other: object) -> bool: - return False - - def __le__(self, other: object) -> bool: - return False - - def __eq__(self, other: object) -> bool: - return isinstance(other, self.__class__) - - def __gt__(self, other: object) -> bool: - return True - - def __ge__(self, other: object) -> bool: - return True - - def __neg__(self: object) -> "NegativeInfinityType": - return NegativeInfinity - - -Infinity = InfinityType() - - -class NegativeInfinityType: - def __repr__(self) -> str: - return "-Infinity" - - def __hash__(self) -> int: - return hash(repr(self)) - - def __lt__(self, other: object) -> bool: - return True - - def __le__(self, other: object) -> bool: - return True - - def __eq__(self, other: object) -> bool: - return isinstance(other, self.__class__) - - def __gt__(self, other: object) -> bool: - return False - - def __ge__(self, other: object) -> bool: - return False - - def __neg__(self: object) -> InfinityType: - return Infinity - - -NegativeInfinity = NegativeInfinityType() diff --git a/venv/Lib/site-packages/setuptools/_vendor/packaging/markers.py b/venv/Lib/site-packages/setuptools/_vendor/packaging/markers.py deleted file mode 100644 index eb0541b..0000000 --- a/venv/Lib/site-packages/setuptools/_vendor/packaging/markers.py +++ /dev/null @@ -1,304 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -import operator -import os -import platform -import sys -from typing import Any, Callable, Dict, List, Optional, Tuple, Union - -from setuptools.extern.pyparsing import ( # noqa: N817 - Forward, - Group, - Literal as L, - ParseException, - ParseResults, - QuotedString, - ZeroOrMore, - stringEnd, - stringStart, -) - -from .specifiers import InvalidSpecifier, Specifier - -__all__ = [ - "InvalidMarker", - "UndefinedComparison", - "UndefinedEnvironmentName", - "Marker", - "default_environment", -] - -Operator = Callable[[str, str], bool] - - -class InvalidMarker(ValueError): - """ - An invalid marker was found, users should refer to PEP 508. - """ - - -class UndefinedComparison(ValueError): - """ - An invalid operation was attempted on a value that doesn't support it. - """ - - -class UndefinedEnvironmentName(ValueError): - """ - A name was attempted to be used that does not exist inside of the - environment. - """ - - -class Node: - def __init__(self, value: Any) -> None: - self.value = value - - def __str__(self) -> str: - return str(self.value) - - def __repr__(self) -> str: - return f"<{self.__class__.__name__}('{self}')>" - - def serialize(self) -> str: - raise NotImplementedError - - -class Variable(Node): - def serialize(self) -> str: - return str(self) - - -class Value(Node): - def serialize(self) -> str: - return f'"{self}"' - - -class Op(Node): - def serialize(self) -> str: - return str(self) - - -VARIABLE = ( - L("implementation_version") - | L("platform_python_implementation") - | L("implementation_name") - | L("python_full_version") - | L("platform_release") - | L("platform_version") - | L("platform_machine") - | L("platform_system") - | L("python_version") - | L("sys_platform") - | L("os_name") - | L("os.name") # PEP-345 - | L("sys.platform") # PEP-345 - | L("platform.version") # PEP-345 - | L("platform.machine") # PEP-345 - | L("platform.python_implementation") # PEP-345 - | L("python_implementation") # undocumented setuptools legacy - | L("extra") # PEP-508 -) -ALIASES = { - "os.name": "os_name", - "sys.platform": "sys_platform", - "platform.version": "platform_version", - "platform.machine": "platform_machine", - "platform.python_implementation": "platform_python_implementation", - "python_implementation": "platform_python_implementation", -} -VARIABLE.setParseAction(lambda s, l, t: Variable(ALIASES.get(t[0], t[0]))) - -VERSION_CMP = ( - L("===") | L("==") | L(">=") | L("<=") | L("!=") | L("~=") | L(">") | L("<") -) - -MARKER_OP = VERSION_CMP | L("not in") | L("in") -MARKER_OP.setParseAction(lambda s, l, t: Op(t[0])) - -MARKER_VALUE = QuotedString("'") | QuotedString('"') -MARKER_VALUE.setParseAction(lambda s, l, t: Value(t[0])) - -BOOLOP = L("and") | L("or") - -MARKER_VAR = VARIABLE | MARKER_VALUE - -MARKER_ITEM = Group(MARKER_VAR + MARKER_OP + MARKER_VAR) -MARKER_ITEM.setParseAction(lambda s, l, t: tuple(t[0])) - -LPAREN = L("(").suppress() -RPAREN = L(")").suppress() - -MARKER_EXPR = Forward() -MARKER_ATOM = MARKER_ITEM | Group(LPAREN + MARKER_EXPR + RPAREN) -MARKER_EXPR << MARKER_ATOM + ZeroOrMore(BOOLOP + MARKER_EXPR) - -MARKER = stringStart + MARKER_EXPR + stringEnd - - -def _coerce_parse_result(results: Union[ParseResults, List[Any]]) -> List[Any]: - if isinstance(results, ParseResults): - return [_coerce_parse_result(i) for i in results] - else: - return results - - -def _format_marker( - marker: Union[List[str], Tuple[Node, ...], str], first: Optional[bool] = True -) -> str: - - assert isinstance(marker, (list, tuple, str)) - - # Sometimes we have a structure like [[...]] which is a single item list - # where the single item is itself it's own list. In that case we want skip - # the rest of this function so that we don't get extraneous () on the - # outside. - if ( - isinstance(marker, list) - and len(marker) == 1 - and isinstance(marker[0], (list, tuple)) - ): - return _format_marker(marker[0]) - - if isinstance(marker, list): - inner = (_format_marker(m, first=False) for m in marker) - if first: - return " ".join(inner) - else: - return "(" + " ".join(inner) + ")" - elif isinstance(marker, tuple): - return " ".join([m.serialize() for m in marker]) - else: - return marker - - -_operators: Dict[str, Operator] = { - "in": lambda lhs, rhs: lhs in rhs, - "not in": lambda lhs, rhs: lhs not in rhs, - "<": operator.lt, - "<=": operator.le, - "==": operator.eq, - "!=": operator.ne, - ">=": operator.ge, - ">": operator.gt, -} - - -def _eval_op(lhs: str, op: Op, rhs: str) -> bool: - try: - spec = Specifier("".join([op.serialize(), rhs])) - except InvalidSpecifier: - pass - else: - return spec.contains(lhs) - - oper: Optional[Operator] = _operators.get(op.serialize()) - if oper is None: - raise UndefinedComparison(f"Undefined {op!r} on {lhs!r} and {rhs!r}.") - - return oper(lhs, rhs) - - -class Undefined: - pass - - -_undefined = Undefined() - - -def _get_env(environment: Dict[str, str], name: str) -> str: - value: Union[str, Undefined] = environment.get(name, _undefined) - - if isinstance(value, Undefined): - raise UndefinedEnvironmentName( - f"{name!r} does not exist in evaluation environment." - ) - - return value - - -def _evaluate_markers(markers: List[Any], environment: Dict[str, str]) -> bool: - groups: List[List[bool]] = [[]] - - for marker in markers: - assert isinstance(marker, (list, tuple, str)) - - if isinstance(marker, list): - groups[-1].append(_evaluate_markers(marker, environment)) - elif isinstance(marker, tuple): - lhs, op, rhs = marker - - if isinstance(lhs, Variable): - lhs_value = _get_env(environment, lhs.value) - rhs_value = rhs.value - else: - lhs_value = lhs.value - rhs_value = _get_env(environment, rhs.value) - - groups[-1].append(_eval_op(lhs_value, op, rhs_value)) - else: - assert marker in ["and", "or"] - if marker == "or": - groups.append([]) - - return any(all(item) for item in groups) - - -def format_full_version(info: "sys._version_info") -> str: - version = "{0.major}.{0.minor}.{0.micro}".format(info) - kind = info.releaselevel - if kind != "final": - version += kind[0] + str(info.serial) - return version - - -def default_environment() -> Dict[str, str]: - iver = format_full_version(sys.implementation.version) - implementation_name = sys.implementation.name - return { - "implementation_name": implementation_name, - "implementation_version": iver, - "os_name": os.name, - "platform_machine": platform.machine(), - "platform_release": platform.release(), - "platform_system": platform.system(), - "platform_version": platform.version(), - "python_full_version": platform.python_version(), - "platform_python_implementation": platform.python_implementation(), - "python_version": ".".join(platform.python_version_tuple()[:2]), - "sys_platform": sys.platform, - } - - -class Marker: - def __init__(self, marker: str) -> None: - try: - self._markers = _coerce_parse_result(MARKER.parseString(marker)) - except ParseException as e: - raise InvalidMarker( - f"Invalid marker: {marker!r}, parse error at " - f"{marker[e.loc : e.loc + 8]!r}" - ) - - def __str__(self) -> str: - return _format_marker(self._markers) - - def __repr__(self) -> str: - return f"" - - def evaluate(self, environment: Optional[Dict[str, str]] = None) -> bool: - """Evaluate a marker. - - Return the boolean from evaluating the given marker against the - environment. environment is an optional argument to override all or - part of the determined environment. - - The environment is determined from the current Python process. - """ - current_environment = default_environment() - if environment is not None: - current_environment.update(environment) - - return _evaluate_markers(self._markers, current_environment) diff --git a/venv/Lib/site-packages/setuptools/_vendor/packaging/requirements.py b/venv/Lib/site-packages/setuptools/_vendor/packaging/requirements.py deleted file mode 100644 index 0d93231..0000000 --- a/venv/Lib/site-packages/setuptools/_vendor/packaging/requirements.py +++ /dev/null @@ -1,146 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -import re -import string -import urllib.parse -from typing import List, Optional as TOptional, Set - -from setuptools.extern.pyparsing import ( # noqa - Combine, - Literal as L, - Optional, - ParseException, - Regex, - Word, - ZeroOrMore, - originalTextFor, - stringEnd, - stringStart, -) - -from .markers import MARKER_EXPR, Marker -from .specifiers import LegacySpecifier, Specifier, SpecifierSet - - -class InvalidRequirement(ValueError): - """ - An invalid requirement was found, users should refer to PEP 508. - """ - - -ALPHANUM = Word(string.ascii_letters + string.digits) - -LBRACKET = L("[").suppress() -RBRACKET = L("]").suppress() -LPAREN = L("(").suppress() -RPAREN = L(")").suppress() -COMMA = L(",").suppress() -SEMICOLON = L(";").suppress() -AT = L("@").suppress() - -PUNCTUATION = Word("-_.") -IDENTIFIER_END = ALPHANUM | (ZeroOrMore(PUNCTUATION) + ALPHANUM) -IDENTIFIER = Combine(ALPHANUM + ZeroOrMore(IDENTIFIER_END)) - -NAME = IDENTIFIER("name") -EXTRA = IDENTIFIER - -URI = Regex(r"[^ ]+")("url") -URL = AT + URI - -EXTRAS_LIST = EXTRA + ZeroOrMore(COMMA + EXTRA) -EXTRAS = (LBRACKET + Optional(EXTRAS_LIST) + RBRACKET)("extras") - -VERSION_PEP440 = Regex(Specifier._regex_str, re.VERBOSE | re.IGNORECASE) -VERSION_LEGACY = Regex(LegacySpecifier._regex_str, re.VERBOSE | re.IGNORECASE) - -VERSION_ONE = VERSION_PEP440 ^ VERSION_LEGACY -VERSION_MANY = Combine( - VERSION_ONE + ZeroOrMore(COMMA + VERSION_ONE), joinString=",", adjacent=False -)("_raw_spec") -_VERSION_SPEC = Optional((LPAREN + VERSION_MANY + RPAREN) | VERSION_MANY) -_VERSION_SPEC.setParseAction(lambda s, l, t: t._raw_spec or "") - -VERSION_SPEC = originalTextFor(_VERSION_SPEC)("specifier") -VERSION_SPEC.setParseAction(lambda s, l, t: t[1]) - -MARKER_EXPR = originalTextFor(MARKER_EXPR())("marker") -MARKER_EXPR.setParseAction( - lambda s, l, t: Marker(s[t._original_start : t._original_end]) -) -MARKER_SEPARATOR = SEMICOLON -MARKER = MARKER_SEPARATOR + MARKER_EXPR - -VERSION_AND_MARKER = VERSION_SPEC + Optional(MARKER) -URL_AND_MARKER = URL + Optional(MARKER) - -NAMED_REQUIREMENT = NAME + Optional(EXTRAS) + (URL_AND_MARKER | VERSION_AND_MARKER) - -REQUIREMENT = stringStart + NAMED_REQUIREMENT + stringEnd -# setuptools.extern.pyparsing isn't thread safe during initialization, so we do it eagerly, see -# issue #104 -REQUIREMENT.parseString("x[]") - - -class Requirement: - """Parse a requirement. - - Parse a given requirement string into its parts, such as name, specifier, - URL, and extras. Raises InvalidRequirement on a badly-formed requirement - string. - """ - - # TODO: Can we test whether something is contained within a requirement? - # If so how do we do that? Do we need to test against the _name_ of - # the thing as well as the version? What about the markers? - # TODO: Can we normalize the name and extra name? - - def __init__(self, requirement_string: str) -> None: - try: - req = REQUIREMENT.parseString(requirement_string) - except ParseException as e: - raise InvalidRequirement( - f'Parse error at "{ requirement_string[e.loc : e.loc + 8]!r}": {e.msg}' - ) - - self.name: str = req.name - if req.url: - parsed_url = urllib.parse.urlparse(req.url) - if parsed_url.scheme == "file": - if urllib.parse.urlunparse(parsed_url) != req.url: - raise InvalidRequirement("Invalid URL given") - elif not (parsed_url.scheme and parsed_url.netloc) or ( - not parsed_url.scheme and not parsed_url.netloc - ): - raise InvalidRequirement(f"Invalid URL: {req.url}") - self.url: TOptional[str] = req.url - else: - self.url = None - self.extras: Set[str] = set(req.extras.asList() if req.extras else []) - self.specifier: SpecifierSet = SpecifierSet(req.specifier) - self.marker: TOptional[Marker] = req.marker if req.marker else None - - def __str__(self) -> str: - parts: List[str] = [self.name] - - if self.extras: - formatted_extras = ",".join(sorted(self.extras)) - parts.append(f"[{formatted_extras}]") - - if self.specifier: - parts.append(str(self.specifier)) - - if self.url: - parts.append(f"@ {self.url}") - if self.marker: - parts.append(" ") - - if self.marker: - parts.append(f"; {self.marker}") - - return "".join(parts) - - def __repr__(self) -> str: - return f"" diff --git a/venv/Lib/site-packages/setuptools/_vendor/packaging/specifiers.py b/venv/Lib/site-packages/setuptools/_vendor/packaging/specifiers.py deleted file mode 100644 index 0e218a6..0000000 --- a/venv/Lib/site-packages/setuptools/_vendor/packaging/specifiers.py +++ /dev/null @@ -1,802 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -import abc -import functools -import itertools -import re -import warnings -from typing import ( - Callable, - Dict, - Iterable, - Iterator, - List, - Optional, - Pattern, - Set, - Tuple, - TypeVar, - Union, -) - -from .utils import canonicalize_version -from .version import LegacyVersion, Version, parse - -ParsedVersion = Union[Version, LegacyVersion] -UnparsedVersion = Union[Version, LegacyVersion, str] -VersionTypeVar = TypeVar("VersionTypeVar", bound=UnparsedVersion) -CallableOperator = Callable[[ParsedVersion, str], bool] - - -class InvalidSpecifier(ValueError): - """ - An invalid specifier was found, users should refer to PEP 440. - """ - - -class BaseSpecifier(metaclass=abc.ABCMeta): - @abc.abstractmethod - def __str__(self) -> str: - """ - Returns the str representation of this Specifier like object. This - should be representative of the Specifier itself. - """ - - @abc.abstractmethod - def __hash__(self) -> int: - """ - Returns a hash value for this Specifier like object. - """ - - @abc.abstractmethod - def __eq__(self, other: object) -> bool: - """ - Returns a boolean representing whether or not the two Specifier like - objects are equal. - """ - - @abc.abstractproperty - def prereleases(self) -> Optional[bool]: - """ - Returns whether or not pre-releases as a whole are allowed by this - specifier. - """ - - @prereleases.setter - def prereleases(self, value: bool) -> None: - """ - Sets whether or not pre-releases as a whole are allowed by this - specifier. - """ - - @abc.abstractmethod - def contains(self, item: str, prereleases: Optional[bool] = None) -> bool: - """ - Determines if the given item is contained within this specifier. - """ - - @abc.abstractmethod - def filter( - self, iterable: Iterable[VersionTypeVar], prereleases: Optional[bool] = None - ) -> Iterable[VersionTypeVar]: - """ - Takes an iterable of items and filters them so that only items which - are contained within this specifier are allowed in it. - """ - - -class _IndividualSpecifier(BaseSpecifier): - - _operators: Dict[str, str] = {} - _regex: Pattern[str] - - def __init__(self, spec: str = "", prereleases: Optional[bool] = None) -> None: - match = self._regex.search(spec) - if not match: - raise InvalidSpecifier(f"Invalid specifier: '{spec}'") - - self._spec: Tuple[str, str] = ( - match.group("operator").strip(), - match.group("version").strip(), - ) - - # Store whether or not this Specifier should accept prereleases - self._prereleases = prereleases - - def __repr__(self) -> str: - pre = ( - f", prereleases={self.prereleases!r}" - if self._prereleases is not None - else "" - ) - - return f"<{self.__class__.__name__}({str(self)!r}{pre})>" - - def __str__(self) -> str: - return "{}{}".format(*self._spec) - - @property - def _canonical_spec(self) -> Tuple[str, str]: - return self._spec[0], canonicalize_version(self._spec[1]) - - def __hash__(self) -> int: - return hash(self._canonical_spec) - - def __eq__(self, other: object) -> bool: - if isinstance(other, str): - try: - other = self.__class__(str(other)) - except InvalidSpecifier: - return NotImplemented - elif not isinstance(other, self.__class__): - return NotImplemented - - return self._canonical_spec == other._canonical_spec - - def _get_operator(self, op: str) -> CallableOperator: - operator_callable: CallableOperator = getattr( - self, f"_compare_{self._operators[op]}" - ) - return operator_callable - - def _coerce_version(self, version: UnparsedVersion) -> ParsedVersion: - if not isinstance(version, (LegacyVersion, Version)): - version = parse(version) - return version - - @property - def operator(self) -> str: - return self._spec[0] - - @property - def version(self) -> str: - return self._spec[1] - - @property - def prereleases(self) -> Optional[bool]: - return self._prereleases - - @prereleases.setter - def prereleases(self, value: bool) -> None: - self._prereleases = value - - def __contains__(self, item: str) -> bool: - return self.contains(item) - - def contains( - self, item: UnparsedVersion, prereleases: Optional[bool] = None - ) -> bool: - - # Determine if prereleases are to be allowed or not. - if prereleases is None: - prereleases = self.prereleases - - # Normalize item to a Version or LegacyVersion, this allows us to have - # a shortcut for ``"2.0" in Specifier(">=2") - normalized_item = self._coerce_version(item) - - # Determine if we should be supporting prereleases in this specifier - # or not, if we do not support prereleases than we can short circuit - # logic if this version is a prereleases. - if normalized_item.is_prerelease and not prereleases: - return False - - # Actually do the comparison to determine if this item is contained - # within this Specifier or not. - operator_callable: CallableOperator = self._get_operator(self.operator) - return operator_callable(normalized_item, self.version) - - def filter( - self, iterable: Iterable[VersionTypeVar], prereleases: Optional[bool] = None - ) -> Iterable[VersionTypeVar]: - - yielded = False - found_prereleases = [] - - kw = {"prereleases": prereleases if prereleases is not None else True} - - # Attempt to iterate over all the values in the iterable and if any of - # them match, yield them. - for version in iterable: - parsed_version = self._coerce_version(version) - - if self.contains(parsed_version, **kw): - # If our version is a prerelease, and we were not set to allow - # prereleases, then we'll store it for later in case nothing - # else matches this specifier. - if parsed_version.is_prerelease and not ( - prereleases or self.prereleases - ): - found_prereleases.append(version) - # Either this is not a prerelease, or we should have been - # accepting prereleases from the beginning. - else: - yielded = True - yield version - - # Now that we've iterated over everything, determine if we've yielded - # any values, and if we have not and we have any prereleases stored up - # then we will go ahead and yield the prereleases. - if not yielded and found_prereleases: - for version in found_prereleases: - yield version - - -class LegacySpecifier(_IndividualSpecifier): - - _regex_str = r""" - (?P(==|!=|<=|>=|<|>)) - \s* - (?P - [^,;\s)]* # Since this is a "legacy" specifier, and the version - # string can be just about anything, we match everything - # except for whitespace, a semi-colon for marker support, - # a closing paren since versions can be enclosed in - # them, and a comma since it's a version separator. - ) - """ - - _regex = re.compile(r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE) - - _operators = { - "==": "equal", - "!=": "not_equal", - "<=": "less_than_equal", - ">=": "greater_than_equal", - "<": "less_than", - ">": "greater_than", - } - - def __init__(self, spec: str = "", prereleases: Optional[bool] = None) -> None: - super().__init__(spec, prereleases) - - warnings.warn( - "Creating a LegacyVersion has been deprecated and will be " - "removed in the next major release", - DeprecationWarning, - ) - - def _coerce_version(self, version: UnparsedVersion) -> LegacyVersion: - if not isinstance(version, LegacyVersion): - version = LegacyVersion(str(version)) - return version - - def _compare_equal(self, prospective: LegacyVersion, spec: str) -> bool: - return prospective == self._coerce_version(spec) - - def _compare_not_equal(self, prospective: LegacyVersion, spec: str) -> bool: - return prospective != self._coerce_version(spec) - - def _compare_less_than_equal(self, prospective: LegacyVersion, spec: str) -> bool: - return prospective <= self._coerce_version(spec) - - def _compare_greater_than_equal( - self, prospective: LegacyVersion, spec: str - ) -> bool: - return prospective >= self._coerce_version(spec) - - def _compare_less_than(self, prospective: LegacyVersion, spec: str) -> bool: - return prospective < self._coerce_version(spec) - - def _compare_greater_than(self, prospective: LegacyVersion, spec: str) -> bool: - return prospective > self._coerce_version(spec) - - -def _require_version_compare( - fn: Callable[["Specifier", ParsedVersion, str], bool] -) -> Callable[["Specifier", ParsedVersion, str], bool]: - @functools.wraps(fn) - def wrapped(self: "Specifier", prospective: ParsedVersion, spec: str) -> bool: - if not isinstance(prospective, Version): - return False - return fn(self, prospective, spec) - - return wrapped - - -class Specifier(_IndividualSpecifier): - - _regex_str = r""" - (?P(~=|==|!=|<=|>=|<|>|===)) - (?P - (?: - # The identity operators allow for an escape hatch that will - # do an exact string match of the version you wish to install. - # This will not be parsed by PEP 440 and we cannot determine - # any semantic meaning from it. This operator is discouraged - # but included entirely as an escape hatch. - (?<====) # Only match for the identity operator - \s* - [^\s]* # We just match everything, except for whitespace - # since we are only testing for strict identity. - ) - | - (?: - # The (non)equality operators allow for wild card and local - # versions to be specified so we have to define these two - # operators separately to enable that. - (?<===|!=) # Only match for equals and not equals - - \s* - v? - (?:[0-9]+!)? # epoch - [0-9]+(?:\.[0-9]+)* # release - (?: # pre release - [-_\.]? - (a|b|c|rc|alpha|beta|pre|preview) - [-_\.]? - [0-9]* - )? - (?: # post release - (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*) - )? - - # You cannot use a wild card and a dev or local version - # together so group them with a | and make them optional. - (?: - (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release - (?:\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*)? # local - | - \.\* # Wild card syntax of .* - )? - ) - | - (?: - # The compatible operator requires at least two digits in the - # release segment. - (?<=~=) # Only match for the compatible operator - - \s* - v? - (?:[0-9]+!)? # epoch - [0-9]+(?:\.[0-9]+)+ # release (We have a + instead of a *) - (?: # pre release - [-_\.]? - (a|b|c|rc|alpha|beta|pre|preview) - [-_\.]? - [0-9]* - )? - (?: # post release - (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*) - )? - (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release - ) - | - (?: - # All other operators only allow a sub set of what the - # (non)equality operators do. Specifically they do not allow - # local versions to be specified nor do they allow the prefix - # matching wild cards. - (?=": "greater_than_equal", - "<": "less_than", - ">": "greater_than", - "===": "arbitrary", - } - - @_require_version_compare - def _compare_compatible(self, prospective: ParsedVersion, spec: str) -> bool: - - # Compatible releases have an equivalent combination of >= and ==. That - # is that ~=2.2 is equivalent to >=2.2,==2.*. This allows us to - # implement this in terms of the other specifiers instead of - # implementing it ourselves. The only thing we need to do is construct - # the other specifiers. - - # We want everything but the last item in the version, but we want to - # ignore suffix segments. - prefix = ".".join( - list(itertools.takewhile(_is_not_suffix, _version_split(spec)))[:-1] - ) - - # Add the prefix notation to the end of our string - prefix += ".*" - - return self._get_operator(">=")(prospective, spec) and self._get_operator("==")( - prospective, prefix - ) - - @_require_version_compare - def _compare_equal(self, prospective: ParsedVersion, spec: str) -> bool: - - # We need special logic to handle prefix matching - if spec.endswith(".*"): - # In the case of prefix matching we want to ignore local segment. - prospective = Version(prospective.public) - # Split the spec out by dots, and pretend that there is an implicit - # dot in between a release segment and a pre-release segment. - split_spec = _version_split(spec[:-2]) # Remove the trailing .* - - # Split the prospective version out by dots, and pretend that there - # is an implicit dot in between a release segment and a pre-release - # segment. - split_prospective = _version_split(str(prospective)) - - # Shorten the prospective version to be the same length as the spec - # so that we can determine if the specifier is a prefix of the - # prospective version or not. - shortened_prospective = split_prospective[: len(split_spec)] - - # Pad out our two sides with zeros so that they both equal the same - # length. - padded_spec, padded_prospective = _pad_version( - split_spec, shortened_prospective - ) - - return padded_prospective == padded_spec - else: - # Convert our spec string into a Version - spec_version = Version(spec) - - # If the specifier does not have a local segment, then we want to - # act as if the prospective version also does not have a local - # segment. - if not spec_version.local: - prospective = Version(prospective.public) - - return prospective == spec_version - - @_require_version_compare - def _compare_not_equal(self, prospective: ParsedVersion, spec: str) -> bool: - return not self._compare_equal(prospective, spec) - - @_require_version_compare - def _compare_less_than_equal(self, prospective: ParsedVersion, spec: str) -> bool: - - # NB: Local version identifiers are NOT permitted in the version - # specifier, so local version labels can be universally removed from - # the prospective version. - return Version(prospective.public) <= Version(spec) - - @_require_version_compare - def _compare_greater_than_equal( - self, prospective: ParsedVersion, spec: str - ) -> bool: - - # NB: Local version identifiers are NOT permitted in the version - # specifier, so local version labels can be universally removed from - # the prospective version. - return Version(prospective.public) >= Version(spec) - - @_require_version_compare - def _compare_less_than(self, prospective: ParsedVersion, spec_str: str) -> bool: - - # Convert our spec to a Version instance, since we'll want to work with - # it as a version. - spec = Version(spec_str) - - # Check to see if the prospective version is less than the spec - # version. If it's not we can short circuit and just return False now - # instead of doing extra unneeded work. - if not prospective < spec: - return False - - # This special case is here so that, unless the specifier itself - # includes is a pre-release version, that we do not accept pre-release - # versions for the version mentioned in the specifier (e.g. <3.1 should - # not match 3.1.dev0, but should match 3.0.dev0). - if not spec.is_prerelease and prospective.is_prerelease: - if Version(prospective.base_version) == Version(spec.base_version): - return False - - # If we've gotten to here, it means that prospective version is both - # less than the spec version *and* it's not a pre-release of the same - # version in the spec. - return True - - @_require_version_compare - def _compare_greater_than(self, prospective: ParsedVersion, spec_str: str) -> bool: - - # Convert our spec to a Version instance, since we'll want to work with - # it as a version. - spec = Version(spec_str) - - # Check to see if the prospective version is greater than the spec - # version. If it's not we can short circuit and just return False now - # instead of doing extra unneeded work. - if not prospective > spec: - return False - - # This special case is here so that, unless the specifier itself - # includes is a post-release version, that we do not accept - # post-release versions for the version mentioned in the specifier - # (e.g. >3.1 should not match 3.0.post0, but should match 3.2.post0). - if not spec.is_postrelease and prospective.is_postrelease: - if Version(prospective.base_version) == Version(spec.base_version): - return False - - # Ensure that we do not allow a local version of the version mentioned - # in the specifier, which is technically greater than, to match. - if prospective.local is not None: - if Version(prospective.base_version) == Version(spec.base_version): - return False - - # If we've gotten to here, it means that prospective version is both - # greater than the spec version *and* it's not a pre-release of the - # same version in the spec. - return True - - def _compare_arbitrary(self, prospective: Version, spec: str) -> bool: - return str(prospective).lower() == str(spec).lower() - - @property - def prereleases(self) -> bool: - - # If there is an explicit prereleases set for this, then we'll just - # blindly use that. - if self._prereleases is not None: - return self._prereleases - - # Look at all of our specifiers and determine if they are inclusive - # operators, and if they are if they are including an explicit - # prerelease. - operator, version = self._spec - if operator in ["==", ">=", "<=", "~=", "==="]: - # The == specifier can include a trailing .*, if it does we - # want to remove before parsing. - if operator == "==" and version.endswith(".*"): - version = version[:-2] - - # Parse the version, and if it is a pre-release than this - # specifier allows pre-releases. - if parse(version).is_prerelease: - return True - - return False - - @prereleases.setter - def prereleases(self, value: bool) -> None: - self._prereleases = value - - -_prefix_regex = re.compile(r"^([0-9]+)((?:a|b|c|rc)[0-9]+)$") - - -def _version_split(version: str) -> List[str]: - result: List[str] = [] - for item in version.split("."): - match = _prefix_regex.search(item) - if match: - result.extend(match.groups()) - else: - result.append(item) - return result - - -def _is_not_suffix(segment: str) -> bool: - return not any( - segment.startswith(prefix) for prefix in ("dev", "a", "b", "rc", "post") - ) - - -def _pad_version(left: List[str], right: List[str]) -> Tuple[List[str], List[str]]: - left_split, right_split = [], [] - - # Get the release segment of our versions - left_split.append(list(itertools.takewhile(lambda x: x.isdigit(), left))) - right_split.append(list(itertools.takewhile(lambda x: x.isdigit(), right))) - - # Get the rest of our versions - left_split.append(left[len(left_split[0]) :]) - right_split.append(right[len(right_split[0]) :]) - - # Insert our padding - left_split.insert(1, ["0"] * max(0, len(right_split[0]) - len(left_split[0]))) - right_split.insert(1, ["0"] * max(0, len(left_split[0]) - len(right_split[0]))) - - return (list(itertools.chain(*left_split)), list(itertools.chain(*right_split))) - - -class SpecifierSet(BaseSpecifier): - def __init__( - self, specifiers: str = "", prereleases: Optional[bool] = None - ) -> None: - - # Split on , to break each individual specifier into it's own item, and - # strip each item to remove leading/trailing whitespace. - split_specifiers = [s.strip() for s in specifiers.split(",") if s.strip()] - - # Parsed each individual specifier, attempting first to make it a - # Specifier and falling back to a LegacySpecifier. - parsed: Set[_IndividualSpecifier] = set() - for specifier in split_specifiers: - try: - parsed.add(Specifier(specifier)) - except InvalidSpecifier: - parsed.add(LegacySpecifier(specifier)) - - # Turn our parsed specifiers into a frozen set and save them for later. - self._specs = frozenset(parsed) - - # Store our prereleases value so we can use it later to determine if - # we accept prereleases or not. - self._prereleases = prereleases - - def __repr__(self) -> str: - pre = ( - f", prereleases={self.prereleases!r}" - if self._prereleases is not None - else "" - ) - - return f"" - - def __str__(self) -> str: - return ",".join(sorted(str(s) for s in self._specs)) - - def __hash__(self) -> int: - return hash(self._specs) - - def __and__(self, other: Union["SpecifierSet", str]) -> "SpecifierSet": - if isinstance(other, str): - other = SpecifierSet(other) - elif not isinstance(other, SpecifierSet): - return NotImplemented - - specifier = SpecifierSet() - specifier._specs = frozenset(self._specs | other._specs) - - if self._prereleases is None and other._prereleases is not None: - specifier._prereleases = other._prereleases - elif self._prereleases is not None and other._prereleases is None: - specifier._prereleases = self._prereleases - elif self._prereleases == other._prereleases: - specifier._prereleases = self._prereleases - else: - raise ValueError( - "Cannot combine SpecifierSets with True and False prerelease " - "overrides." - ) - - return specifier - - def __eq__(self, other: object) -> bool: - if isinstance(other, (str, _IndividualSpecifier)): - other = SpecifierSet(str(other)) - elif not isinstance(other, SpecifierSet): - return NotImplemented - - return self._specs == other._specs - - def __len__(self) -> int: - return len(self._specs) - - def __iter__(self) -> Iterator[_IndividualSpecifier]: - return iter(self._specs) - - @property - def prereleases(self) -> Optional[bool]: - - # If we have been given an explicit prerelease modifier, then we'll - # pass that through here. - if self._prereleases is not None: - return self._prereleases - - # If we don't have any specifiers, and we don't have a forced value, - # then we'll just return None since we don't know if this should have - # pre-releases or not. - if not self._specs: - return None - - # Otherwise we'll see if any of the given specifiers accept - # prereleases, if any of them do we'll return True, otherwise False. - return any(s.prereleases for s in self._specs) - - @prereleases.setter - def prereleases(self, value: bool) -> None: - self._prereleases = value - - def __contains__(self, item: UnparsedVersion) -> bool: - return self.contains(item) - - def contains( - self, item: UnparsedVersion, prereleases: Optional[bool] = None - ) -> bool: - - # Ensure that our item is a Version or LegacyVersion instance. - if not isinstance(item, (LegacyVersion, Version)): - item = parse(item) - - # Determine if we're forcing a prerelease or not, if we're not forcing - # one for this particular filter call, then we'll use whatever the - # SpecifierSet thinks for whether or not we should support prereleases. - if prereleases is None: - prereleases = self.prereleases - - # We can determine if we're going to allow pre-releases by looking to - # see if any of the underlying items supports them. If none of them do - # and this item is a pre-release then we do not allow it and we can - # short circuit that here. - # Note: This means that 1.0.dev1 would not be contained in something - # like >=1.0.devabc however it would be in >=1.0.debabc,>0.0.dev0 - if not prereleases and item.is_prerelease: - return False - - # We simply dispatch to the underlying specs here to make sure that the - # given version is contained within all of them. - # Note: This use of all() here means that an empty set of specifiers - # will always return True, this is an explicit design decision. - return all(s.contains(item, prereleases=prereleases) for s in self._specs) - - def filter( - self, iterable: Iterable[VersionTypeVar], prereleases: Optional[bool] = None - ) -> Iterable[VersionTypeVar]: - - # Determine if we're forcing a prerelease or not, if we're not forcing - # one for this particular filter call, then we'll use whatever the - # SpecifierSet thinks for whether or not we should support prereleases. - if prereleases is None: - prereleases = self.prereleases - - # If we have any specifiers, then we want to wrap our iterable in the - # filter method for each one, this will act as a logical AND amongst - # each specifier. - if self._specs: - for spec in self._specs: - iterable = spec.filter(iterable, prereleases=bool(prereleases)) - return iterable - # If we do not have any specifiers, then we need to have a rough filter - # which will filter out any pre-releases, unless there are no final - # releases, and which will filter out LegacyVersion in general. - else: - filtered: List[VersionTypeVar] = [] - found_prereleases: List[VersionTypeVar] = [] - - item: UnparsedVersion - parsed_version: Union[Version, LegacyVersion] - - for item in iterable: - # Ensure that we some kind of Version class for this item. - if not isinstance(item, (LegacyVersion, Version)): - parsed_version = parse(item) - else: - parsed_version = item - - # Filter out any item which is parsed as a LegacyVersion - if isinstance(parsed_version, LegacyVersion): - continue - - # Store any item which is a pre-release for later unless we've - # already found a final version or we are accepting prereleases - if parsed_version.is_prerelease and not prereleases: - if not filtered: - found_prereleases.append(item) - else: - filtered.append(item) - - # If we've found no items except for pre-releases, then we'll go - # ahead and use the pre-releases - if not filtered and found_prereleases and prereleases is None: - return found_prereleases - - return filtered diff --git a/venv/Lib/site-packages/setuptools/_vendor/packaging/tags.py b/venv/Lib/site-packages/setuptools/_vendor/packaging/tags.py deleted file mode 100644 index 9a3d25a..0000000 --- a/venv/Lib/site-packages/setuptools/_vendor/packaging/tags.py +++ /dev/null @@ -1,487 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -import logging -import platform -import sys -import sysconfig -from importlib.machinery import EXTENSION_SUFFIXES -from typing import ( - Dict, - FrozenSet, - Iterable, - Iterator, - List, - Optional, - Sequence, - Tuple, - Union, - cast, -) - -from . import _manylinux, _musllinux - -logger = logging.getLogger(__name__) - -PythonVersion = Sequence[int] -MacVersion = Tuple[int, int] - -INTERPRETER_SHORT_NAMES: Dict[str, str] = { - "python": "py", # Generic. - "cpython": "cp", - "pypy": "pp", - "ironpython": "ip", - "jython": "jy", -} - - -_32_BIT_INTERPRETER = sys.maxsize <= 2 ** 32 - - -class Tag: - """ - A representation of the tag triple for a wheel. - - Instances are considered immutable and thus are hashable. Equality checking - is also supported. - """ - - __slots__ = ["_interpreter", "_abi", "_platform", "_hash"] - - def __init__(self, interpreter: str, abi: str, platform: str) -> None: - self._interpreter = interpreter.lower() - self._abi = abi.lower() - self._platform = platform.lower() - # The __hash__ of every single element in a Set[Tag] will be evaluated each time - # that a set calls its `.disjoint()` method, which may be called hundreds of - # times when scanning a page of links for packages with tags matching that - # Set[Tag]. Pre-computing the value here produces significant speedups for - # downstream consumers. - self._hash = hash((self._interpreter, self._abi, self._platform)) - - @property - def interpreter(self) -> str: - return self._interpreter - - @property - def abi(self) -> str: - return self._abi - - @property - def platform(self) -> str: - return self._platform - - def __eq__(self, other: object) -> bool: - if not isinstance(other, Tag): - return NotImplemented - - return ( - (self._hash == other._hash) # Short-circuit ASAP for perf reasons. - and (self._platform == other._platform) - and (self._abi == other._abi) - and (self._interpreter == other._interpreter) - ) - - def __hash__(self) -> int: - return self._hash - - def __str__(self) -> str: - return f"{self._interpreter}-{self._abi}-{self._platform}" - - def __repr__(self) -> str: - return f"<{self} @ {id(self)}>" - - -def parse_tag(tag: str) -> FrozenSet[Tag]: - """ - Parses the provided tag (e.g. `py3-none-any`) into a frozenset of Tag instances. - - Returning a set is required due to the possibility that the tag is a - compressed tag set. - """ - tags = set() - interpreters, abis, platforms = tag.split("-") - for interpreter in interpreters.split("."): - for abi in abis.split("."): - for platform_ in platforms.split("."): - tags.add(Tag(interpreter, abi, platform_)) - return frozenset(tags) - - -def _get_config_var(name: str, warn: bool = False) -> Union[int, str, None]: - value = sysconfig.get_config_var(name) - if value is None and warn: - logger.debug( - "Config variable '%s' is unset, Python ABI tag may be incorrect", name - ) - return value - - -def _normalize_string(string: str) -> str: - return string.replace(".", "_").replace("-", "_") - - -def _abi3_applies(python_version: PythonVersion) -> bool: - """ - Determine if the Python version supports abi3. - - PEP 384 was first implemented in Python 3.2. - """ - return len(python_version) > 1 and tuple(python_version) >= (3, 2) - - -def _cpython_abis(py_version: PythonVersion, warn: bool = False) -> List[str]: - py_version = tuple(py_version) # To allow for version comparison. - abis = [] - version = _version_nodot(py_version[:2]) - debug = pymalloc = ucs4 = "" - with_debug = _get_config_var("Py_DEBUG", warn) - has_refcount = hasattr(sys, "gettotalrefcount") - # Windows doesn't set Py_DEBUG, so checking for support of debug-compiled - # extension modules is the best option. - # https://github.com/pypa/pip/issues/3383#issuecomment-173267692 - has_ext = "_d.pyd" in EXTENSION_SUFFIXES - if with_debug or (with_debug is None and (has_refcount or has_ext)): - debug = "d" - if py_version < (3, 8): - with_pymalloc = _get_config_var("WITH_PYMALLOC", warn) - if with_pymalloc or with_pymalloc is None: - pymalloc = "m" - if py_version < (3, 3): - unicode_size = _get_config_var("Py_UNICODE_SIZE", warn) - if unicode_size == 4 or ( - unicode_size is None and sys.maxunicode == 0x10FFFF - ): - ucs4 = "u" - elif debug: - # Debug builds can also load "normal" extension modules. - # We can also assume no UCS-4 or pymalloc requirement. - abis.append(f"cp{version}") - abis.insert( - 0, - "cp{version}{debug}{pymalloc}{ucs4}".format( - version=version, debug=debug, pymalloc=pymalloc, ucs4=ucs4 - ), - ) - return abis - - -def cpython_tags( - python_version: Optional[PythonVersion] = None, - abis: Optional[Iterable[str]] = None, - platforms: Optional[Iterable[str]] = None, - *, - warn: bool = False, -) -> Iterator[Tag]: - """ - Yields the tags for a CPython interpreter. - - The tags consist of: - - cp-- - - cp-abi3- - - cp-none- - - cp-abi3- # Older Python versions down to 3.2. - - If python_version only specifies a major version then user-provided ABIs and - the 'none' ABItag will be used. - - If 'abi3' or 'none' are specified in 'abis' then they will be yielded at - their normal position and not at the beginning. - """ - if not python_version: - python_version = sys.version_info[:2] - - interpreter = f"cp{_version_nodot(python_version[:2])}" - - if abis is None: - if len(python_version) > 1: - abis = _cpython_abis(python_version, warn) - else: - abis = [] - abis = list(abis) - # 'abi3' and 'none' are explicitly handled later. - for explicit_abi in ("abi3", "none"): - try: - abis.remove(explicit_abi) - except ValueError: - pass - - platforms = list(platforms or platform_tags()) - for abi in abis: - for platform_ in platforms: - yield Tag(interpreter, abi, platform_) - if _abi3_applies(python_version): - yield from (Tag(interpreter, "abi3", platform_) for platform_ in platforms) - yield from (Tag(interpreter, "none", platform_) for platform_ in platforms) - - if _abi3_applies(python_version): - for minor_version in range(python_version[1] - 1, 1, -1): - for platform_ in platforms: - interpreter = "cp{version}".format( - version=_version_nodot((python_version[0], minor_version)) - ) - yield Tag(interpreter, "abi3", platform_) - - -def _generic_abi() -> Iterator[str]: - abi = sysconfig.get_config_var("SOABI") - if abi: - yield _normalize_string(abi) - - -def generic_tags( - interpreter: Optional[str] = None, - abis: Optional[Iterable[str]] = None, - platforms: Optional[Iterable[str]] = None, - *, - warn: bool = False, -) -> Iterator[Tag]: - """ - Yields the tags for a generic interpreter. - - The tags consist of: - - -- - - The "none" ABI will be added if it was not explicitly provided. - """ - if not interpreter: - interp_name = interpreter_name() - interp_version = interpreter_version(warn=warn) - interpreter = "".join([interp_name, interp_version]) - if abis is None: - abis = _generic_abi() - platforms = list(platforms or platform_tags()) - abis = list(abis) - if "none" not in abis: - abis.append("none") - for abi in abis: - for platform_ in platforms: - yield Tag(interpreter, abi, platform_) - - -def _py_interpreter_range(py_version: PythonVersion) -> Iterator[str]: - """ - Yields Python versions in descending order. - - After the latest version, the major-only version will be yielded, and then - all previous versions of that major version. - """ - if len(py_version) > 1: - yield f"py{_version_nodot(py_version[:2])}" - yield f"py{py_version[0]}" - if len(py_version) > 1: - for minor in range(py_version[1] - 1, -1, -1): - yield f"py{_version_nodot((py_version[0], minor))}" - - -def compatible_tags( - python_version: Optional[PythonVersion] = None, - interpreter: Optional[str] = None, - platforms: Optional[Iterable[str]] = None, -) -> Iterator[Tag]: - """ - Yields the sequence of tags that are compatible with a specific version of Python. - - The tags consist of: - - py*-none- - - -none-any # ... if `interpreter` is provided. - - py*-none-any - """ - if not python_version: - python_version = sys.version_info[:2] - platforms = list(platforms or platform_tags()) - for version in _py_interpreter_range(python_version): - for platform_ in platforms: - yield Tag(version, "none", platform_) - if interpreter: - yield Tag(interpreter, "none", "any") - for version in _py_interpreter_range(python_version): - yield Tag(version, "none", "any") - - -def _mac_arch(arch: str, is_32bit: bool = _32_BIT_INTERPRETER) -> str: - if not is_32bit: - return arch - - if arch.startswith("ppc"): - return "ppc" - - return "i386" - - -def _mac_binary_formats(version: MacVersion, cpu_arch: str) -> List[str]: - formats = [cpu_arch] - if cpu_arch == "x86_64": - if version < (10, 4): - return [] - formats.extend(["intel", "fat64", "fat32"]) - - elif cpu_arch == "i386": - if version < (10, 4): - return [] - formats.extend(["intel", "fat32", "fat"]) - - elif cpu_arch == "ppc64": - # TODO: Need to care about 32-bit PPC for ppc64 through 10.2? - if version > (10, 5) or version < (10, 4): - return [] - formats.append("fat64") - - elif cpu_arch == "ppc": - if version > (10, 6): - return [] - formats.extend(["fat32", "fat"]) - - if cpu_arch in {"arm64", "x86_64"}: - formats.append("universal2") - - if cpu_arch in {"x86_64", "i386", "ppc64", "ppc", "intel"}: - formats.append("universal") - - return formats - - -def mac_platforms( - version: Optional[MacVersion] = None, arch: Optional[str] = None -) -> Iterator[str]: - """ - Yields the platform tags for a macOS system. - - The `version` parameter is a two-item tuple specifying the macOS version to - generate platform tags for. The `arch` parameter is the CPU architecture to - generate platform tags for. Both parameters default to the appropriate value - for the current system. - """ - version_str, _, cpu_arch = platform.mac_ver() - if version is None: - version = cast("MacVersion", tuple(map(int, version_str.split(".")[:2]))) - else: - version = version - if arch is None: - arch = _mac_arch(cpu_arch) - else: - arch = arch - - if (10, 0) <= version and version < (11, 0): - # Prior to Mac OS 11, each yearly release of Mac OS bumped the - # "minor" version number. The major version was always 10. - for minor_version in range(version[1], -1, -1): - compat_version = 10, minor_version - binary_formats = _mac_binary_formats(compat_version, arch) - for binary_format in binary_formats: - yield "macosx_{major}_{minor}_{binary_format}".format( - major=10, minor=minor_version, binary_format=binary_format - ) - - if version >= (11, 0): - # Starting with Mac OS 11, each yearly release bumps the major version - # number. The minor versions are now the midyear updates. - for major_version in range(version[0], 10, -1): - compat_version = major_version, 0 - binary_formats = _mac_binary_formats(compat_version, arch) - for binary_format in binary_formats: - yield "macosx_{major}_{minor}_{binary_format}".format( - major=major_version, minor=0, binary_format=binary_format - ) - - if version >= (11, 0): - # Mac OS 11 on x86_64 is compatible with binaries from previous releases. - # Arm64 support was introduced in 11.0, so no Arm binaries from previous - # releases exist. - # - # However, the "universal2" binary format can have a - # macOS version earlier than 11.0 when the x86_64 part of the binary supports - # that version of macOS. - if arch == "x86_64": - for minor_version in range(16, 3, -1): - compat_version = 10, minor_version - binary_formats = _mac_binary_formats(compat_version, arch) - for binary_format in binary_formats: - yield "macosx_{major}_{minor}_{binary_format}".format( - major=compat_version[0], - minor=compat_version[1], - binary_format=binary_format, - ) - else: - for minor_version in range(16, 3, -1): - compat_version = 10, minor_version - binary_format = "universal2" - yield "macosx_{major}_{minor}_{binary_format}".format( - major=compat_version[0], - minor=compat_version[1], - binary_format=binary_format, - ) - - -def _linux_platforms(is_32bit: bool = _32_BIT_INTERPRETER) -> Iterator[str]: - linux = _normalize_string(sysconfig.get_platform()) - if is_32bit: - if linux == "linux_x86_64": - linux = "linux_i686" - elif linux == "linux_aarch64": - linux = "linux_armv7l" - _, arch = linux.split("_", 1) - yield from _manylinux.platform_tags(linux, arch) - yield from _musllinux.platform_tags(arch) - yield linux - - -def _generic_platforms() -> Iterator[str]: - yield _normalize_string(sysconfig.get_platform()) - - -def platform_tags() -> Iterator[str]: - """ - Provides the platform tags for this installation. - """ - if platform.system() == "Darwin": - return mac_platforms() - elif platform.system() == "Linux": - return _linux_platforms() - else: - return _generic_platforms() - - -def interpreter_name() -> str: - """ - Returns the name of the running interpreter. - """ - name = sys.implementation.name - return INTERPRETER_SHORT_NAMES.get(name) or name - - -def interpreter_version(*, warn: bool = False) -> str: - """ - Returns the version of the running interpreter. - """ - version = _get_config_var("py_version_nodot", warn=warn) - if version: - version = str(version) - else: - version = _version_nodot(sys.version_info[:2]) - return version - - -def _version_nodot(version: PythonVersion) -> str: - return "".join(map(str, version)) - - -def sys_tags(*, warn: bool = False) -> Iterator[Tag]: - """ - Returns the sequence of tag triples for the running interpreter. - - The order of the sequence corresponds to priority order for the - interpreter, from most to least important. - """ - - interp_name = interpreter_name() - if interp_name == "cp": - yield from cpython_tags(warn=warn) - else: - yield from generic_tags() - - if interp_name == "pp": - yield from compatible_tags(interpreter="pp3") - else: - yield from compatible_tags() diff --git a/venv/Lib/site-packages/setuptools/_vendor/packaging/utils.py b/venv/Lib/site-packages/setuptools/_vendor/packaging/utils.py deleted file mode 100644 index bab11b8..0000000 --- a/venv/Lib/site-packages/setuptools/_vendor/packaging/utils.py +++ /dev/null @@ -1,136 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -import re -from typing import FrozenSet, NewType, Tuple, Union, cast - -from .tags import Tag, parse_tag -from .version import InvalidVersion, Version - -BuildTag = Union[Tuple[()], Tuple[int, str]] -NormalizedName = NewType("NormalizedName", str) - - -class InvalidWheelFilename(ValueError): - """ - An invalid wheel filename was found, users should refer to PEP 427. - """ - - -class InvalidSdistFilename(ValueError): - """ - An invalid sdist filename was found, users should refer to the packaging user guide. - """ - - -_canonicalize_regex = re.compile(r"[-_.]+") -# PEP 427: The build number must start with a digit. -_build_tag_regex = re.compile(r"(\d+)(.*)") - - -def canonicalize_name(name: str) -> NormalizedName: - # This is taken from PEP 503. - value = _canonicalize_regex.sub("-", name).lower() - return cast(NormalizedName, value) - - -def canonicalize_version(version: Union[Version, str]) -> str: - """ - This is very similar to Version.__str__, but has one subtle difference - with the way it handles the release segment. - """ - if isinstance(version, str): - try: - parsed = Version(version) - except InvalidVersion: - # Legacy versions cannot be normalized - return version - else: - parsed = version - - parts = [] - - # Epoch - if parsed.epoch != 0: - parts.append(f"{parsed.epoch}!") - - # Release segment - # NB: This strips trailing '.0's to normalize - parts.append(re.sub(r"(\.0)+$", "", ".".join(str(x) for x in parsed.release))) - - # Pre-release - if parsed.pre is not None: - parts.append("".join(str(x) for x in parsed.pre)) - - # Post-release - if parsed.post is not None: - parts.append(f".post{parsed.post}") - - # Development release - if parsed.dev is not None: - parts.append(f".dev{parsed.dev}") - - # Local version segment - if parsed.local is not None: - parts.append(f"+{parsed.local}") - - return "".join(parts) - - -def parse_wheel_filename( - filename: str, -) -> Tuple[NormalizedName, Version, BuildTag, FrozenSet[Tag]]: - if not filename.endswith(".whl"): - raise InvalidWheelFilename( - f"Invalid wheel filename (extension must be '.whl'): {filename}" - ) - - filename = filename[:-4] - dashes = filename.count("-") - if dashes not in (4, 5): - raise InvalidWheelFilename( - f"Invalid wheel filename (wrong number of parts): {filename}" - ) - - parts = filename.split("-", dashes - 2) - name_part = parts[0] - # See PEP 427 for the rules on escaping the project name - if "__" in name_part or re.match(r"^[\w\d._]*$", name_part, re.UNICODE) is None: - raise InvalidWheelFilename(f"Invalid project name: {filename}") - name = canonicalize_name(name_part) - version = Version(parts[1]) - if dashes == 5: - build_part = parts[2] - build_match = _build_tag_regex.match(build_part) - if build_match is None: - raise InvalidWheelFilename( - f"Invalid build number: {build_part} in '{filename}'" - ) - build = cast(BuildTag, (int(build_match.group(1)), build_match.group(2))) - else: - build = () - tags = parse_tag(parts[-1]) - return (name, version, build, tags) - - -def parse_sdist_filename(filename: str) -> Tuple[NormalizedName, Version]: - if filename.endswith(".tar.gz"): - file_stem = filename[: -len(".tar.gz")] - elif filename.endswith(".zip"): - file_stem = filename[: -len(".zip")] - else: - raise InvalidSdistFilename( - f"Invalid sdist filename (extension must be '.tar.gz' or '.zip'):" - f" {filename}" - ) - - # We are requiring a PEP 440 version, which cannot contain dashes, - # so we split on the last dash. - name_part, sep, version_part = file_stem.rpartition("-") - if not sep: - raise InvalidSdistFilename(f"Invalid sdist filename: {filename}") - - name = canonicalize_name(name_part) - version = Version(version_part) - return (name, version) diff --git a/venv/Lib/site-packages/setuptools/_vendor/packaging/version.py b/venv/Lib/site-packages/setuptools/_vendor/packaging/version.py deleted file mode 100644 index de9a09a..0000000 --- a/venv/Lib/site-packages/setuptools/_vendor/packaging/version.py +++ /dev/null @@ -1,504 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -import collections -import itertools -import re -import warnings -from typing import Callable, Iterator, List, Optional, SupportsInt, Tuple, Union - -from ._structures import Infinity, InfinityType, NegativeInfinity, NegativeInfinityType - -__all__ = ["parse", "Version", "LegacyVersion", "InvalidVersion", "VERSION_PATTERN"] - -InfiniteTypes = Union[InfinityType, NegativeInfinityType] -PrePostDevType = Union[InfiniteTypes, Tuple[str, int]] -SubLocalType = Union[InfiniteTypes, int, str] -LocalType = Union[ - NegativeInfinityType, - Tuple[ - Union[ - SubLocalType, - Tuple[SubLocalType, str], - Tuple[NegativeInfinityType, SubLocalType], - ], - ..., - ], -] -CmpKey = Tuple[ - int, Tuple[int, ...], PrePostDevType, PrePostDevType, PrePostDevType, LocalType -] -LegacyCmpKey = Tuple[int, Tuple[str, ...]] -VersionComparisonMethod = Callable[ - [Union[CmpKey, LegacyCmpKey], Union[CmpKey, LegacyCmpKey]], bool -] - -_Version = collections.namedtuple( - "_Version", ["epoch", "release", "dev", "pre", "post", "local"] -) - - -def parse(version: str) -> Union["LegacyVersion", "Version"]: - """ - Parse the given version string and return either a :class:`Version` object - or a :class:`LegacyVersion` object depending on if the given version is - a valid PEP 440 version or a legacy version. - """ - try: - return Version(version) - except InvalidVersion: - return LegacyVersion(version) - - -class InvalidVersion(ValueError): - """ - An invalid version was found, users should refer to PEP 440. - """ - - -class _BaseVersion: - _key: Union[CmpKey, LegacyCmpKey] - - def __hash__(self) -> int: - return hash(self._key) - - # Please keep the duplicated `isinstance` check - # in the six comparisons hereunder - # unless you find a way to avoid adding overhead function calls. - def __lt__(self, other: "_BaseVersion") -> bool: - if not isinstance(other, _BaseVersion): - return NotImplemented - - return self._key < other._key - - def __le__(self, other: "_BaseVersion") -> bool: - if not isinstance(other, _BaseVersion): - return NotImplemented - - return self._key <= other._key - - def __eq__(self, other: object) -> bool: - if not isinstance(other, _BaseVersion): - return NotImplemented - - return self._key == other._key - - def __ge__(self, other: "_BaseVersion") -> bool: - if not isinstance(other, _BaseVersion): - return NotImplemented - - return self._key >= other._key - - def __gt__(self, other: "_BaseVersion") -> bool: - if not isinstance(other, _BaseVersion): - return NotImplemented - - return self._key > other._key - - def __ne__(self, other: object) -> bool: - if not isinstance(other, _BaseVersion): - return NotImplemented - - return self._key != other._key - - -class LegacyVersion(_BaseVersion): - def __init__(self, version: str) -> None: - self._version = str(version) - self._key = _legacy_cmpkey(self._version) - - warnings.warn( - "Creating a LegacyVersion has been deprecated and will be " - "removed in the next major release", - DeprecationWarning, - ) - - def __str__(self) -> str: - return self._version - - def __repr__(self) -> str: - return f"" - - @property - def public(self) -> str: - return self._version - - @property - def base_version(self) -> str: - return self._version - - @property - def epoch(self) -> int: - return -1 - - @property - def release(self) -> None: - return None - - @property - def pre(self) -> None: - return None - - @property - def post(self) -> None: - return None - - @property - def dev(self) -> None: - return None - - @property - def local(self) -> None: - return None - - @property - def is_prerelease(self) -> bool: - return False - - @property - def is_postrelease(self) -> bool: - return False - - @property - def is_devrelease(self) -> bool: - return False - - -_legacy_version_component_re = re.compile(r"(\d+ | [a-z]+ | \.| -)", re.VERBOSE) - -_legacy_version_replacement_map = { - "pre": "c", - "preview": "c", - "-": "final-", - "rc": "c", - "dev": "@", -} - - -def _parse_version_parts(s: str) -> Iterator[str]: - for part in _legacy_version_component_re.split(s): - part = _legacy_version_replacement_map.get(part, part) - - if not part or part == ".": - continue - - if part[:1] in "0123456789": - # pad for numeric comparison - yield part.zfill(8) - else: - yield "*" + part - - # ensure that alpha/beta/candidate are before final - yield "*final" - - -def _legacy_cmpkey(version: str) -> LegacyCmpKey: - - # We hardcode an epoch of -1 here. A PEP 440 version can only have a epoch - # greater than or equal to 0. This will effectively put the LegacyVersion, - # which uses the defacto standard originally implemented by setuptools, - # as before all PEP 440 versions. - epoch = -1 - - # This scheme is taken from pkg_resources.parse_version setuptools prior to - # it's adoption of the packaging library. - parts: List[str] = [] - for part in _parse_version_parts(version.lower()): - if part.startswith("*"): - # remove "-" before a prerelease tag - if part < "*final": - while parts and parts[-1] == "*final-": - parts.pop() - - # remove trailing zeros from each series of numeric parts - while parts and parts[-1] == "00000000": - parts.pop() - - parts.append(part) - - return epoch, tuple(parts) - - -# Deliberately not anchored to the start and end of the string, to make it -# easier for 3rd party code to reuse -VERSION_PATTERN = r""" - v? - (?: - (?:(?P[0-9]+)!)? # epoch - (?P[0-9]+(?:\.[0-9]+)*) # release segment - (?P
                                          # pre-release
-            [-_\.]?
-            (?P(a|b|c|rc|alpha|beta|pre|preview))
-            [-_\.]?
-            (?P[0-9]+)?
-        )?
-        (?P                                         # post release
-            (?:-(?P[0-9]+))
-            |
-            (?:
-                [-_\.]?
-                (?Ppost|rev|r)
-                [-_\.]?
-                (?P[0-9]+)?
-            )
-        )?
-        (?P                                          # dev release
-            [-_\.]?
-            (?Pdev)
-            [-_\.]?
-            (?P[0-9]+)?
-        )?
-    )
-    (?:\+(?P[a-z0-9]+(?:[-_\.][a-z0-9]+)*))?       # local version
-"""
-
-
-class Version(_BaseVersion):
-
-    _regex = re.compile(r"^\s*" + VERSION_PATTERN + r"\s*$", re.VERBOSE | re.IGNORECASE)
-
-    def __init__(self, version: str) -> None:
-
-        # Validate the version and parse it into pieces
-        match = self._regex.search(version)
-        if not match:
-            raise InvalidVersion(f"Invalid version: '{version}'")
-
-        # Store the parsed out pieces of the version
-        self._version = _Version(
-            epoch=int(match.group("epoch")) if match.group("epoch") else 0,
-            release=tuple(int(i) for i in match.group("release").split(".")),
-            pre=_parse_letter_version(match.group("pre_l"), match.group("pre_n")),
-            post=_parse_letter_version(
-                match.group("post_l"), match.group("post_n1") or match.group("post_n2")
-            ),
-            dev=_parse_letter_version(match.group("dev_l"), match.group("dev_n")),
-            local=_parse_local_version(match.group("local")),
-        )
-
-        # Generate a key which will be used for sorting
-        self._key = _cmpkey(
-            self._version.epoch,
-            self._version.release,
-            self._version.pre,
-            self._version.post,
-            self._version.dev,
-            self._version.local,
-        )
-
-    def __repr__(self) -> str:
-        return f""
-
-    def __str__(self) -> str:
-        parts = []
-
-        # Epoch
-        if self.epoch != 0:
-            parts.append(f"{self.epoch}!")
-
-        # Release segment
-        parts.append(".".join(str(x) for x in self.release))
-
-        # Pre-release
-        if self.pre is not None:
-            parts.append("".join(str(x) for x in self.pre))
-
-        # Post-release
-        if self.post is not None:
-            parts.append(f".post{self.post}")
-
-        # Development release
-        if self.dev is not None:
-            parts.append(f".dev{self.dev}")
-
-        # Local version segment
-        if self.local is not None:
-            parts.append(f"+{self.local}")
-
-        return "".join(parts)
-
-    @property
-    def epoch(self) -> int:
-        _epoch: int = self._version.epoch
-        return _epoch
-
-    @property
-    def release(self) -> Tuple[int, ...]:
-        _release: Tuple[int, ...] = self._version.release
-        return _release
-
-    @property
-    def pre(self) -> Optional[Tuple[str, int]]:
-        _pre: Optional[Tuple[str, int]] = self._version.pre
-        return _pre
-
-    @property
-    def post(self) -> Optional[int]:
-        return self._version.post[1] if self._version.post else None
-
-    @property
-    def dev(self) -> Optional[int]:
-        return self._version.dev[1] if self._version.dev else None
-
-    @property
-    def local(self) -> Optional[str]:
-        if self._version.local:
-            return ".".join(str(x) for x in self._version.local)
-        else:
-            return None
-
-    @property
-    def public(self) -> str:
-        return str(self).split("+", 1)[0]
-
-    @property
-    def base_version(self) -> str:
-        parts = []
-
-        # Epoch
-        if self.epoch != 0:
-            parts.append(f"{self.epoch}!")
-
-        # Release segment
-        parts.append(".".join(str(x) for x in self.release))
-
-        return "".join(parts)
-
-    @property
-    def is_prerelease(self) -> bool:
-        return self.dev is not None or self.pre is not None
-
-    @property
-    def is_postrelease(self) -> bool:
-        return self.post is not None
-
-    @property
-    def is_devrelease(self) -> bool:
-        return self.dev is not None
-
-    @property
-    def major(self) -> int:
-        return self.release[0] if len(self.release) >= 1 else 0
-
-    @property
-    def minor(self) -> int:
-        return self.release[1] if len(self.release) >= 2 else 0
-
-    @property
-    def micro(self) -> int:
-        return self.release[2] if len(self.release) >= 3 else 0
-
-
-def _parse_letter_version(
-    letter: str, number: Union[str, bytes, SupportsInt]
-) -> Optional[Tuple[str, int]]:
-
-    if letter:
-        # We consider there to be an implicit 0 in a pre-release if there is
-        # not a numeral associated with it.
-        if number is None:
-            number = 0
-
-        # We normalize any letters to their lower case form
-        letter = letter.lower()
-
-        # We consider some words to be alternate spellings of other words and
-        # in those cases we want to normalize the spellings to our preferred
-        # spelling.
-        if letter == "alpha":
-            letter = "a"
-        elif letter == "beta":
-            letter = "b"
-        elif letter in ["c", "pre", "preview"]:
-            letter = "rc"
-        elif letter in ["rev", "r"]:
-            letter = "post"
-
-        return letter, int(number)
-    if not letter and number:
-        # We assume if we are given a number, but we are not given a letter
-        # then this is using the implicit post release syntax (e.g. 1.0-1)
-        letter = "post"
-
-        return letter, int(number)
-
-    return None
-
-
-_local_version_separators = re.compile(r"[\._-]")
-
-
-def _parse_local_version(local: str) -> Optional[LocalType]:
-    """
-    Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").
-    """
-    if local is not None:
-        return tuple(
-            part.lower() if not part.isdigit() else int(part)
-            for part in _local_version_separators.split(local)
-        )
-    return None
-
-
-def _cmpkey(
-    epoch: int,
-    release: Tuple[int, ...],
-    pre: Optional[Tuple[str, int]],
-    post: Optional[Tuple[str, int]],
-    dev: Optional[Tuple[str, int]],
-    local: Optional[Tuple[SubLocalType]],
-) -> CmpKey:
-
-    # When we compare a release version, we want to compare it with all of the
-    # trailing zeros removed. So we'll use a reverse the list, drop all the now
-    # leading zeros until we come to something non zero, then take the rest
-    # re-reverse it back into the correct order and make it a tuple and use
-    # that for our sorting key.
-    _release = tuple(
-        reversed(list(itertools.dropwhile(lambda x: x == 0, reversed(release))))
-    )
-
-    # We need to "trick" the sorting algorithm to put 1.0.dev0 before 1.0a0.
-    # We'll do this by abusing the pre segment, but we _only_ want to do this
-    # if there is not a pre or a post segment. If we have one of those then
-    # the normal sorting rules will handle this case correctly.
-    if pre is None and post is None and dev is not None:
-        _pre: PrePostDevType = NegativeInfinity
-    # Versions without a pre-release (except as noted above) should sort after
-    # those with one.
-    elif pre is None:
-        _pre = Infinity
-    else:
-        _pre = pre
-
-    # Versions without a post segment should sort before those with one.
-    if post is None:
-        _post: PrePostDevType = NegativeInfinity
-
-    else:
-        _post = post
-
-    # Versions without a development segment should sort after those with one.
-    if dev is None:
-        _dev: PrePostDevType = Infinity
-
-    else:
-        _dev = dev
-
-    if local is None:
-        # Versions without a local segment should sort before those with one.
-        _local: LocalType = NegativeInfinity
-    else:
-        # Versions with a local segment need that segment parsed to implement
-        # the sorting rules in PEP440.
-        # - Alpha numeric segments sort before numeric segments
-        # - Alpha numeric segments sort lexicographically
-        # - Numeric segments sort numerically
-        # - Shorter versions sort before longer versions when the prefixes
-        #   match exactly
-        _local = tuple(
-            (i, "") if isinstance(i, int) else (NegativeInfinity, i) for i in local
-        )
-
-    return epoch, _release, _pre, _post, _dev, _local
diff --git a/venv/Lib/site-packages/setuptools/_vendor/pyparsing.py b/venv/Lib/site-packages/setuptools/_vendor/pyparsing.py
deleted file mode 100644
index 4aa30ee..0000000
--- a/venv/Lib/site-packages/setuptools/_vendor/pyparsing.py
+++ /dev/null
@@ -1,5742 +0,0 @@
-# module pyparsing.py
-#
-# Copyright (c) 2003-2018  Paul T. McGuire
-#
-# Permission is hereby granted, free of charge, to any person obtaining
-# a copy of this software and associated documentation files (the
-# "Software"), to deal in the Software without restriction, including
-# without limitation the rights to use, copy, modify, merge, publish,
-# distribute, sublicense, and/or sell copies of the Software, and to
-# permit persons to whom the Software is furnished to do so, subject to
-# the following conditions:
-#
-# The above copyright notice and this permission notice shall be
-# included in all copies or substantial portions of the Software.
-#
-# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
-# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
-# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
-# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
-# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-#
-
-__doc__ = \
-"""
-pyparsing module - Classes and methods to define and execute parsing grammars
-=============================================================================
-
-The pyparsing module is an alternative approach to creating and executing simple grammars,
-vs. the traditional lex/yacc approach, or the use of regular expressions.  With pyparsing, you
-don't need to learn a new syntax for defining grammars or matching expressions - the parsing module
-provides a library of classes that you use to construct the grammar directly in Python.
-
-Here is a program to parse "Hello, World!" (or any greeting of the form 
-C{", !"}), built up using L{Word}, L{Literal}, and L{And} elements 
-(L{'+'} operator gives L{And} expressions, strings are auto-converted to
-L{Literal} expressions)::
-
-    from pyparsing import Word, alphas
-
-    # define grammar of a greeting
-    greet = Word(alphas) + "," + Word(alphas) + "!"
-
-    hello = "Hello, World!"
-    print (hello, "->", greet.parseString(hello))
-
-The program outputs the following::
-
-    Hello, World! -> ['Hello', ',', 'World', '!']
-
-The Python representation of the grammar is quite readable, owing to the self-explanatory
-class names, and the use of '+', '|' and '^' operators.
-
-The L{ParseResults} object returned from L{ParserElement.parseString} can be accessed as a nested list, a dictionary, or an
-object with named attributes.
-
-The pyparsing module handles some of the problems that are typically vexing when writing text parsers:
- - extra or missing whitespace (the above program will also handle "Hello,World!", "Hello  ,  World  !", etc.)
- - quoted strings
- - embedded comments
-
-
-Getting Started -
------------------
-Visit the classes L{ParserElement} and L{ParseResults} to see the base classes that most other pyparsing
-classes inherit from. Use the docstrings for examples of how to:
- - construct literal match expressions from L{Literal} and L{CaselessLiteral} classes
- - construct character word-group expressions using the L{Word} class
- - see how to create repetitive expressions using L{ZeroOrMore} and L{OneOrMore} classes
- - use L{'+'}, L{'|'}, L{'^'}, and L{'&'} operators to combine simple expressions into more complex ones
- - associate names with your parsed results using L{ParserElement.setResultsName}
- - find some helpful expression short-cuts like L{delimitedList} and L{oneOf}
- - find more useful common expressions in the L{pyparsing_common} namespace class
-"""
-
-__version__ = "2.2.1"
-__versionTime__ = "18 Sep 2018 00:49 UTC"
-__author__ = "Paul McGuire "
-
-import string
-from weakref import ref as wkref
-import copy
-import sys
-import warnings
-import re
-import sre_constants
-import collections
-import pprint
-import traceback
-import types
-from datetime import datetime
-
-try:
-    from _thread import RLock
-except ImportError:
-    from threading import RLock
-
-try:
-    # Python 3
-    from collections.abc import Iterable
-    from collections.abc import MutableMapping
-except ImportError:
-    # Python 2.7
-    from collections import Iterable
-    from collections import MutableMapping
-
-try:
-    from collections import OrderedDict as _OrderedDict
-except ImportError:
-    try:
-        from ordereddict import OrderedDict as _OrderedDict
-    except ImportError:
-        _OrderedDict = None
-
-#~ sys.stderr.write( "testing pyparsing module, version %s, %s\n" % (__version__,__versionTime__ ) )
-
-__all__ = [
-'And', 'CaselessKeyword', 'CaselessLiteral', 'CharsNotIn', 'Combine', 'Dict', 'Each', 'Empty',
-'FollowedBy', 'Forward', 'GoToColumn', 'Group', 'Keyword', 'LineEnd', 'LineStart', 'Literal',
-'MatchFirst', 'NoMatch', 'NotAny', 'OneOrMore', 'OnlyOnce', 'Optional', 'Or',
-'ParseBaseException', 'ParseElementEnhance', 'ParseException', 'ParseExpression', 'ParseFatalException',
-'ParseResults', 'ParseSyntaxException', 'ParserElement', 'QuotedString', 'RecursiveGrammarException',
-'Regex', 'SkipTo', 'StringEnd', 'StringStart', 'Suppress', 'Token', 'TokenConverter', 
-'White', 'Word', 'WordEnd', 'WordStart', 'ZeroOrMore',
-'alphanums', 'alphas', 'alphas8bit', 'anyCloseTag', 'anyOpenTag', 'cStyleComment', 'col',
-'commaSeparatedList', 'commonHTMLEntity', 'countedArray', 'cppStyleComment', 'dblQuotedString',
-'dblSlashComment', 'delimitedList', 'dictOf', 'downcaseTokens', 'empty', 'hexnums',
-'htmlComment', 'javaStyleComment', 'line', 'lineEnd', 'lineStart', 'lineno',
-'makeHTMLTags', 'makeXMLTags', 'matchOnlyAtCol', 'matchPreviousExpr', 'matchPreviousLiteral',
-'nestedExpr', 'nullDebugAction', 'nums', 'oneOf', 'opAssoc', 'operatorPrecedence', 'printables',
-'punc8bit', 'pythonStyleComment', 'quotedString', 'removeQuotes', 'replaceHTMLEntity', 
-'replaceWith', 'restOfLine', 'sglQuotedString', 'srange', 'stringEnd',
-'stringStart', 'traceParseAction', 'unicodeString', 'upcaseTokens', 'withAttribute',
-'indentedBlock', 'originalTextFor', 'ungroup', 'infixNotation','locatedExpr', 'withClass',
-'CloseMatch', 'tokenMap', 'pyparsing_common',
-]
-
-system_version = tuple(sys.version_info)[:3]
-PY_3 = system_version[0] == 3
-if PY_3:
-    _MAX_INT = sys.maxsize
-    basestring = str
-    unichr = chr
-    _ustr = str
-
-    # build list of single arg builtins, that can be used as parse actions
-    singleArgBuiltins = [sum, len, sorted, reversed, list, tuple, set, any, all, min, max]
-
-else:
-    _MAX_INT = sys.maxint
-    range = xrange
-
-    def _ustr(obj):
-        """Drop-in replacement for str(obj) that tries to be Unicode friendly. It first tries
-           str(obj). If that fails with a UnicodeEncodeError, then it tries unicode(obj). It
-           then < returns the unicode object | encodes it with the default encoding | ... >.
-        """
-        if isinstance(obj,unicode):
-            return obj
-
-        try:
-            # If this works, then _ustr(obj) has the same behaviour as str(obj), so
-            # it won't break any existing code.
-            return str(obj)
-
-        except UnicodeEncodeError:
-            # Else encode it
-            ret = unicode(obj).encode(sys.getdefaultencoding(), 'xmlcharrefreplace')
-            xmlcharref = Regex(r'&#\d+;')
-            xmlcharref.setParseAction(lambda t: '\\u' + hex(int(t[0][2:-1]))[2:])
-            return xmlcharref.transformString(ret)
-
-    # build list of single arg builtins, tolerant of Python version, that can be used as parse actions
-    singleArgBuiltins = []
-    import __builtin__
-    for fname in "sum len sorted reversed list tuple set any all min max".split():
-        try:
-            singleArgBuiltins.append(getattr(__builtin__,fname))
-        except AttributeError:
-            continue
-            
-_generatorType = type((y for y in range(1)))
- 
-def _xml_escape(data):
-    """Escape &, <, >, ", ', etc. in a string of data."""
-
-    # ampersand must be replaced first
-    from_symbols = '&><"\''
-    to_symbols = ('&'+s+';' for s in "amp gt lt quot apos".split())
-    for from_,to_ in zip(from_symbols, to_symbols):
-        data = data.replace(from_, to_)
-    return data
-
-class _Constants(object):
-    pass
-
-alphas     = string.ascii_uppercase + string.ascii_lowercase
-nums       = "0123456789"
-hexnums    = nums + "ABCDEFabcdef"
-alphanums  = alphas + nums
-_bslash    = chr(92)
-printables = "".join(c for c in string.printable if c not in string.whitespace)
-
-class ParseBaseException(Exception):
-    """base exception class for all parsing runtime exceptions"""
-    # Performance tuning: we construct a *lot* of these, so keep this
-    # constructor as small and fast as possible
-    def __init__( self, pstr, loc=0, msg=None, elem=None ):
-        self.loc = loc
-        if msg is None:
-            self.msg = pstr
-            self.pstr = ""
-        else:
-            self.msg = msg
-            self.pstr = pstr
-        self.parserElement = elem
-        self.args = (pstr, loc, msg)
-
-    @classmethod
-    def _from_exception(cls, pe):
-        """
-        internal factory method to simplify creating one type of ParseException 
-        from another - avoids having __init__ signature conflicts among subclasses
-        """
-        return cls(pe.pstr, pe.loc, pe.msg, pe.parserElement)
-
-    def __getattr__( self, aname ):
-        """supported attributes by name are:
-            - lineno - returns the line number of the exception text
-            - col - returns the column number of the exception text
-            - line - returns the line containing the exception text
-        """
-        if( aname == "lineno" ):
-            return lineno( self.loc, self.pstr )
-        elif( aname in ("col", "column") ):
-            return col( self.loc, self.pstr )
-        elif( aname == "line" ):
-            return line( self.loc, self.pstr )
-        else:
-            raise AttributeError(aname)
-
-    def __str__( self ):
-        return "%s (at char %d), (line:%d, col:%d)" % \
-                ( self.msg, self.loc, self.lineno, self.column )
-    def __repr__( self ):
-        return _ustr(self)
-    def markInputline( self, markerString = ">!<" ):
-        """Extracts the exception line from the input string, and marks
-           the location of the exception with a special symbol.
-        """
-        line_str = self.line
-        line_column = self.column - 1
-        if markerString:
-            line_str = "".join((line_str[:line_column],
-                                markerString, line_str[line_column:]))
-        return line_str.strip()
-    def __dir__(self):
-        return "lineno col line".split() + dir(type(self))
-
-class ParseException(ParseBaseException):
-    """
-    Exception thrown when parse expressions don't match class;
-    supported attributes by name are:
-     - lineno - returns the line number of the exception text
-     - col - returns the column number of the exception text
-     - line - returns the line containing the exception text
-        
-    Example::
-        try:
-            Word(nums).setName("integer").parseString("ABC")
-        except ParseException as pe:
-            print(pe)
-            print("column: {}".format(pe.col))
-            
-    prints::
-       Expected integer (at char 0), (line:1, col:1)
-        column: 1
-    """
-    pass
-
-class ParseFatalException(ParseBaseException):
-    """user-throwable exception thrown when inconsistent parse content
-       is found; stops all parsing immediately"""
-    pass
-
-class ParseSyntaxException(ParseFatalException):
-    """just like L{ParseFatalException}, but thrown internally when an
-       L{ErrorStop} ('-' operator) indicates that parsing is to stop 
-       immediately because an unbacktrackable syntax error has been found"""
-    pass
-
-#~ class ReparseException(ParseBaseException):
-    #~ """Experimental class - parse actions can raise this exception to cause
-       #~ pyparsing to reparse the input string:
-        #~ - with a modified input string, and/or
-        #~ - with a modified start location
-       #~ Set the values of the ReparseException in the constructor, and raise the
-       #~ exception in a parse action to cause pyparsing to use the new string/location.
-       #~ Setting the values as None causes no change to be made.
-       #~ """
-    #~ def __init_( self, newstring, restartLoc ):
-        #~ self.newParseText = newstring
-        #~ self.reparseLoc = restartLoc
-
-class RecursiveGrammarException(Exception):
-    """exception thrown by L{ParserElement.validate} if the grammar could be improperly recursive"""
-    def __init__( self, parseElementList ):
-        self.parseElementTrace = parseElementList
-
-    def __str__( self ):
-        return "RecursiveGrammarException: %s" % self.parseElementTrace
-
-class _ParseResultsWithOffset(object):
-    def __init__(self,p1,p2):
-        self.tup = (p1,p2)
-    def __getitem__(self,i):
-        return self.tup[i]
-    def __repr__(self):
-        return repr(self.tup[0])
-    def setOffset(self,i):
-        self.tup = (self.tup[0],i)
-
-class ParseResults(object):
-    """
-    Structured parse results, to provide multiple means of access to the parsed data:
-       - as a list (C{len(results)})
-       - by list index (C{results[0], results[1]}, etc.)
-       - by attribute (C{results.} - see L{ParserElement.setResultsName})
-
-    Example::
-        integer = Word(nums)
-        date_str = (integer.setResultsName("year") + '/' 
-                        + integer.setResultsName("month") + '/' 
-                        + integer.setResultsName("day"))
-        # equivalent form:
-        # date_str = integer("year") + '/' + integer("month") + '/' + integer("day")
-
-        # parseString returns a ParseResults object
-        result = date_str.parseString("1999/12/31")
-
-        def test(s, fn=repr):
-            print("%s -> %s" % (s, fn(eval(s))))
-        test("list(result)")
-        test("result[0]")
-        test("result['month']")
-        test("result.day")
-        test("'month' in result")
-        test("'minutes' in result")
-        test("result.dump()", str)
-    prints::
-        list(result) -> ['1999', '/', '12', '/', '31']
-        result[0] -> '1999'
-        result['month'] -> '12'
-        result.day -> '31'
-        'month' in result -> True
-        'minutes' in result -> False
-        result.dump() -> ['1999', '/', '12', '/', '31']
-        - day: 31
-        - month: 12
-        - year: 1999
-    """
-    def __new__(cls, toklist=None, name=None, asList=True, modal=True ):
-        if isinstance(toklist, cls):
-            return toklist
-        retobj = object.__new__(cls)
-        retobj.__doinit = True
-        return retobj
-
-    # Performance tuning: we construct a *lot* of these, so keep this
-    # constructor as small and fast as possible
-    def __init__( self, toklist=None, name=None, asList=True, modal=True, isinstance=isinstance ):
-        if self.__doinit:
-            self.__doinit = False
-            self.__name = None
-            self.__parent = None
-            self.__accumNames = {}
-            self.__asList = asList
-            self.__modal = modal
-            if toklist is None:
-                toklist = []
-            if isinstance(toklist, list):
-                self.__toklist = toklist[:]
-            elif isinstance(toklist, _generatorType):
-                self.__toklist = list(toklist)
-            else:
-                self.__toklist = [toklist]
-            self.__tokdict = dict()
-
-        if name is not None and name:
-            if not modal:
-                self.__accumNames[name] = 0
-            if isinstance(name,int):
-                name = _ustr(name) # will always return a str, but use _ustr for consistency
-            self.__name = name
-            if not (isinstance(toklist, (type(None), basestring, list)) and toklist in (None,'',[])):
-                if isinstance(toklist,basestring):
-                    toklist = [ toklist ]
-                if asList:
-                    if isinstance(toklist,ParseResults):
-                        self[name] = _ParseResultsWithOffset(toklist.copy(),0)
-                    else:
-                        self[name] = _ParseResultsWithOffset(ParseResults(toklist[0]),0)
-                    self[name].__name = name
-                else:
-                    try:
-                        self[name] = toklist[0]
-                    except (KeyError,TypeError,IndexError):
-                        self[name] = toklist
-
-    def __getitem__( self, i ):
-        if isinstance( i, (int,slice) ):
-            return self.__toklist[i]
-        else:
-            if i not in self.__accumNames:
-                return self.__tokdict[i][-1][0]
-            else:
-                return ParseResults([ v[0] for v in self.__tokdict[i] ])
-
-    def __setitem__( self, k, v, isinstance=isinstance ):
-        if isinstance(v,_ParseResultsWithOffset):
-            self.__tokdict[k] = self.__tokdict.get(k,list()) + [v]
-            sub = v[0]
-        elif isinstance(k,(int,slice)):
-            self.__toklist[k] = v
-            sub = v
-        else:
-            self.__tokdict[k] = self.__tokdict.get(k,list()) + [_ParseResultsWithOffset(v,0)]
-            sub = v
-        if isinstance(sub,ParseResults):
-            sub.__parent = wkref(self)
-
-    def __delitem__( self, i ):
-        if isinstance(i,(int,slice)):
-            mylen = len( self.__toklist )
-            del self.__toklist[i]
-
-            # convert int to slice
-            if isinstance(i, int):
-                if i < 0:
-                    i += mylen
-                i = slice(i, i+1)
-            # get removed indices
-            removed = list(range(*i.indices(mylen)))
-            removed.reverse()
-            # fixup indices in token dictionary
-            for name,occurrences in self.__tokdict.items():
-                for j in removed:
-                    for k, (value, position) in enumerate(occurrences):
-                        occurrences[k] = _ParseResultsWithOffset(value, position - (position > j))
-        else:
-            del self.__tokdict[i]
-
-    def __contains__( self, k ):
-        return k in self.__tokdict
-
-    def __len__( self ): return len( self.__toklist )
-    def __bool__(self): return ( not not self.__toklist )
-    __nonzero__ = __bool__
-    def __iter__( self ): return iter( self.__toklist )
-    def __reversed__( self ): return iter( self.__toklist[::-1] )
-    def _iterkeys( self ):
-        if hasattr(self.__tokdict, "iterkeys"):
-            return self.__tokdict.iterkeys()
-        else:
-            return iter(self.__tokdict)
-
-    def _itervalues( self ):
-        return (self[k] for k in self._iterkeys())
-            
-    def _iteritems( self ):
-        return ((k, self[k]) for k in self._iterkeys())
-
-    if PY_3:
-        keys = _iterkeys       
-        """Returns an iterator of all named result keys (Python 3.x only)."""
-
-        values = _itervalues
-        """Returns an iterator of all named result values (Python 3.x only)."""
-
-        items = _iteritems
-        """Returns an iterator of all named result key-value tuples (Python 3.x only)."""
-
-    else:
-        iterkeys = _iterkeys
-        """Returns an iterator of all named result keys (Python 2.x only)."""
-
-        itervalues = _itervalues
-        """Returns an iterator of all named result values (Python 2.x only)."""
-
-        iteritems = _iteritems
-        """Returns an iterator of all named result key-value tuples (Python 2.x only)."""
-
-        def keys( self ):
-            """Returns all named result keys (as a list in Python 2.x, as an iterator in Python 3.x)."""
-            return list(self.iterkeys())
-
-        def values( self ):
-            """Returns all named result values (as a list in Python 2.x, as an iterator in Python 3.x)."""
-            return list(self.itervalues())
-                
-        def items( self ):
-            """Returns all named result key-values (as a list of tuples in Python 2.x, as an iterator in Python 3.x)."""
-            return list(self.iteritems())
-
-    def haskeys( self ):
-        """Since keys() returns an iterator, this method is helpful in bypassing
-           code that looks for the existence of any defined results names."""
-        return bool(self.__tokdict)
-        
-    def pop( self, *args, **kwargs):
-        """
-        Removes and returns item at specified index (default=C{last}).
-        Supports both C{list} and C{dict} semantics for C{pop()}. If passed no
-        argument or an integer argument, it will use C{list} semantics
-        and pop tokens from the list of parsed tokens. If passed a 
-        non-integer argument (most likely a string), it will use C{dict}
-        semantics and pop the corresponding value from any defined 
-        results names. A second default return value argument is 
-        supported, just as in C{dict.pop()}.
-
-        Example::
-            def remove_first(tokens):
-                tokens.pop(0)
-            print(OneOrMore(Word(nums)).parseString("0 123 321")) # -> ['0', '123', '321']
-            print(OneOrMore(Word(nums)).addParseAction(remove_first).parseString("0 123 321")) # -> ['123', '321']
-
-            label = Word(alphas)
-            patt = label("LABEL") + OneOrMore(Word(nums))
-            print(patt.parseString("AAB 123 321").dump())
-
-            # Use pop() in a parse action to remove named result (note that corresponding value is not
-            # removed from list form of results)
-            def remove_LABEL(tokens):
-                tokens.pop("LABEL")
-                return tokens
-            patt.addParseAction(remove_LABEL)
-            print(patt.parseString("AAB 123 321").dump())
-        prints::
-            ['AAB', '123', '321']
-            - LABEL: AAB
-
-            ['AAB', '123', '321']
-        """
-        if not args:
-            args = [-1]
-        for k,v in kwargs.items():
-            if k == 'default':
-                args = (args[0], v)
-            else:
-                raise TypeError("pop() got an unexpected keyword argument '%s'" % k)
-        if (isinstance(args[0], int) or 
-                        len(args) == 1 or 
-                        args[0] in self):
-            index = args[0]
-            ret = self[index]
-            del self[index]
-            return ret
-        else:
-            defaultvalue = args[1]
-            return defaultvalue
-
-    def get(self, key, defaultValue=None):
-        """
-        Returns named result matching the given key, or if there is no
-        such name, then returns the given C{defaultValue} or C{None} if no
-        C{defaultValue} is specified.
-
-        Similar to C{dict.get()}.
-        
-        Example::
-            integer = Word(nums)
-            date_str = integer("year") + '/' + integer("month") + '/' + integer("day")           
-
-            result = date_str.parseString("1999/12/31")
-            print(result.get("year")) # -> '1999'
-            print(result.get("hour", "not specified")) # -> 'not specified'
-            print(result.get("hour")) # -> None
-        """
-        if key in self:
-            return self[key]
-        else:
-            return defaultValue
-
-    def insert( self, index, insStr ):
-        """
-        Inserts new element at location index in the list of parsed tokens.
-        
-        Similar to C{list.insert()}.
-
-        Example::
-            print(OneOrMore(Word(nums)).parseString("0 123 321")) # -> ['0', '123', '321']
-
-            # use a parse action to insert the parse location in the front of the parsed results
-            def insert_locn(locn, tokens):
-                tokens.insert(0, locn)
-            print(OneOrMore(Word(nums)).addParseAction(insert_locn).parseString("0 123 321")) # -> [0, '0', '123', '321']
-        """
-        self.__toklist.insert(index, insStr)
-        # fixup indices in token dictionary
-        for name,occurrences in self.__tokdict.items():
-            for k, (value, position) in enumerate(occurrences):
-                occurrences[k] = _ParseResultsWithOffset(value, position + (position > index))
-
-    def append( self, item ):
-        """
-        Add single element to end of ParseResults list of elements.
-
-        Example::
-            print(OneOrMore(Word(nums)).parseString("0 123 321")) # -> ['0', '123', '321']
-            
-            # use a parse action to compute the sum of the parsed integers, and add it to the end
-            def append_sum(tokens):
-                tokens.append(sum(map(int, tokens)))
-            print(OneOrMore(Word(nums)).addParseAction(append_sum).parseString("0 123 321")) # -> ['0', '123', '321', 444]
-        """
-        self.__toklist.append(item)
-
-    def extend( self, itemseq ):
-        """
-        Add sequence of elements to end of ParseResults list of elements.
-
-        Example::
-            patt = OneOrMore(Word(alphas))
-            
-            # use a parse action to append the reverse of the matched strings, to make a palindrome
-            def make_palindrome(tokens):
-                tokens.extend(reversed([t[::-1] for t in tokens]))
-                return ''.join(tokens)
-            print(patt.addParseAction(make_palindrome).parseString("lskdj sdlkjf lksd")) # -> 'lskdjsdlkjflksddsklfjkldsjdksl'
-        """
-        if isinstance(itemseq, ParseResults):
-            self += itemseq
-        else:
-            self.__toklist.extend(itemseq)
-
-    def clear( self ):
-        """
-        Clear all elements and results names.
-        """
-        del self.__toklist[:]
-        self.__tokdict.clear()
-
-    def __getattr__( self, name ):
-        try:
-            return self[name]
-        except KeyError:
-            return ""
-            
-        if name in self.__tokdict:
-            if name not in self.__accumNames:
-                return self.__tokdict[name][-1][0]
-            else:
-                return ParseResults([ v[0] for v in self.__tokdict[name] ])
-        else:
-            return ""
-
-    def __add__( self, other ):
-        ret = self.copy()
-        ret += other
-        return ret
-
-    def __iadd__( self, other ):
-        if other.__tokdict:
-            offset = len(self.__toklist)
-            addoffset = lambda a: offset if a<0 else a+offset
-            otheritems = other.__tokdict.items()
-            otherdictitems = [(k, _ParseResultsWithOffset(v[0],addoffset(v[1])) )
-                                for (k,vlist) in otheritems for v in vlist]
-            for k,v in otherdictitems:
-                self[k] = v
-                if isinstance(v[0],ParseResults):
-                    v[0].__parent = wkref(self)
-            
-        self.__toklist += other.__toklist
-        self.__accumNames.update( other.__accumNames )
-        return self
-
-    def __radd__(self, other):
-        if isinstance(other,int) and other == 0:
-            # useful for merging many ParseResults using sum() builtin
-            return self.copy()
-        else:
-            # this may raise a TypeError - so be it
-            return other + self
-        
-    def __repr__( self ):
-        return "(%s, %s)" % ( repr( self.__toklist ), repr( self.__tokdict ) )
-
-    def __str__( self ):
-        return '[' + ', '.join(_ustr(i) if isinstance(i, ParseResults) else repr(i) for i in self.__toklist) + ']'
-
-    def _asStringList( self, sep='' ):
-        out = []
-        for item in self.__toklist:
-            if out and sep:
-                out.append(sep)
-            if isinstance( item, ParseResults ):
-                out += item._asStringList()
-            else:
-                out.append( _ustr(item) )
-        return out
-
-    def asList( self ):
-        """
-        Returns the parse results as a nested list of matching tokens, all converted to strings.
-
-        Example::
-            patt = OneOrMore(Word(alphas))
-            result = patt.parseString("sldkj lsdkj sldkj")
-            # even though the result prints in string-like form, it is actually a pyparsing ParseResults
-            print(type(result), result) # ->  ['sldkj', 'lsdkj', 'sldkj']
-            
-            # Use asList() to create an actual list
-            result_list = result.asList()
-            print(type(result_list), result_list) # ->  ['sldkj', 'lsdkj', 'sldkj']
-        """
-        return [res.asList() if isinstance(res,ParseResults) else res for res in self.__toklist]
-
-    def asDict( self ):
-        """
-        Returns the named parse results as a nested dictionary.
-
-        Example::
-            integer = Word(nums)
-            date_str = integer("year") + '/' + integer("month") + '/' + integer("day")
-            
-            result = date_str.parseString('12/31/1999')
-            print(type(result), repr(result)) # ->  (['12', '/', '31', '/', '1999'], {'day': [('1999', 4)], 'year': [('12', 0)], 'month': [('31', 2)]})
-            
-            result_dict = result.asDict()
-            print(type(result_dict), repr(result_dict)) # ->  {'day': '1999', 'year': '12', 'month': '31'}
-
-            # even though a ParseResults supports dict-like access, sometime you just need to have a dict
-            import json
-            print(json.dumps(result)) # -> Exception: TypeError: ... is not JSON serializable
-            print(json.dumps(result.asDict())) # -> {"month": "31", "day": "1999", "year": "12"}
-        """
-        if PY_3:
-            item_fn = self.items
-        else:
-            item_fn = self.iteritems
-            
-        def toItem(obj):
-            if isinstance(obj, ParseResults):
-                if obj.haskeys():
-                    return obj.asDict()
-                else:
-                    return [toItem(v) for v in obj]
-            else:
-                return obj
-                
-        return dict((k,toItem(v)) for k,v in item_fn())
-
-    def copy( self ):
-        """
-        Returns a new copy of a C{ParseResults} object.
-        """
-        ret = ParseResults( self.__toklist )
-        ret.__tokdict = self.__tokdict.copy()
-        ret.__parent = self.__parent
-        ret.__accumNames.update( self.__accumNames )
-        ret.__name = self.__name
-        return ret
-
-    def asXML( self, doctag=None, namedItemsOnly=False, indent="", formatted=True ):
-        """
-        (Deprecated) Returns the parse results as XML. Tags are created for tokens and lists that have defined results names.
-        """
-        nl = "\n"
-        out = []
-        namedItems = dict((v[1],k) for (k,vlist) in self.__tokdict.items()
-                                                            for v in vlist)
-        nextLevelIndent = indent + "  "
-
-        # collapse out indents if formatting is not desired
-        if not formatted:
-            indent = ""
-            nextLevelIndent = ""
-            nl = ""
-
-        selfTag = None
-        if doctag is not None:
-            selfTag = doctag
-        else:
-            if self.__name:
-                selfTag = self.__name
-
-        if not selfTag:
-            if namedItemsOnly:
-                return ""
-            else:
-                selfTag = "ITEM"
-
-        out += [ nl, indent, "<", selfTag, ">" ]
-
-        for i,res in enumerate(self.__toklist):
-            if isinstance(res,ParseResults):
-                if i in namedItems:
-                    out += [ res.asXML(namedItems[i],
-                                        namedItemsOnly and doctag is None,
-                                        nextLevelIndent,
-                                        formatted)]
-                else:
-                    out += [ res.asXML(None,
-                                        namedItemsOnly and doctag is None,
-                                        nextLevelIndent,
-                                        formatted)]
-            else:
-                # individual token, see if there is a name for it
-                resTag = None
-                if i in namedItems:
-                    resTag = namedItems[i]
-                if not resTag:
-                    if namedItemsOnly:
-                        continue
-                    else:
-                        resTag = "ITEM"
-                xmlBodyText = _xml_escape(_ustr(res))
-                out += [ nl, nextLevelIndent, "<", resTag, ">",
-                                                xmlBodyText,
-                                                "" ]
-
-        out += [ nl, indent, "" ]
-        return "".join(out)
-
-    def __lookup(self,sub):
-        for k,vlist in self.__tokdict.items():
-            for v,loc in vlist:
-                if sub is v:
-                    return k
-        return None
-
-    def getName(self):
-        r"""
-        Returns the results name for this token expression. Useful when several 
-        different expressions might match at a particular location.
-
-        Example::
-            integer = Word(nums)
-            ssn_expr = Regex(r"\d\d\d-\d\d-\d\d\d\d")
-            house_number_expr = Suppress('#') + Word(nums, alphanums)
-            user_data = (Group(house_number_expr)("house_number") 
-                        | Group(ssn_expr)("ssn")
-                        | Group(integer)("age"))
-            user_info = OneOrMore(user_data)
-            
-            result = user_info.parseString("22 111-22-3333 #221B")
-            for item in result:
-                print(item.getName(), ':', item[0])
-        prints::
-            age : 22
-            ssn : 111-22-3333
-            house_number : 221B
-        """
-        if self.__name:
-            return self.__name
-        elif self.__parent:
-            par = self.__parent()
-            if par:
-                return par.__lookup(self)
-            else:
-                return None
-        elif (len(self) == 1 and
-               len(self.__tokdict) == 1 and
-               next(iter(self.__tokdict.values()))[0][1] in (0,-1)):
-            return next(iter(self.__tokdict.keys()))
-        else:
-            return None
-
-    def dump(self, indent='', depth=0, full=True):
-        """
-        Diagnostic method for listing out the contents of a C{ParseResults}.
-        Accepts an optional C{indent} argument so that this string can be embedded
-        in a nested display of other data.
-
-        Example::
-            integer = Word(nums)
-            date_str = integer("year") + '/' + integer("month") + '/' + integer("day")
-            
-            result = date_str.parseString('12/31/1999')
-            print(result.dump())
-        prints::
-            ['12', '/', '31', '/', '1999']
-            - day: 1999
-            - month: 31
-            - year: 12
-        """
-        out = []
-        NL = '\n'
-        out.append( indent+_ustr(self.asList()) )
-        if full:
-            if self.haskeys():
-                items = sorted((str(k), v) for k,v in self.items())
-                for k,v in items:
-                    if out:
-                        out.append(NL)
-                    out.append( "%s%s- %s: " % (indent,('  '*depth), k) )
-                    if isinstance(v,ParseResults):
-                        if v:
-                            out.append( v.dump(indent,depth+1) )
-                        else:
-                            out.append(_ustr(v))
-                    else:
-                        out.append(repr(v))
-            elif any(isinstance(vv,ParseResults) for vv in self):
-                v = self
-                for i,vv in enumerate(v):
-                    if isinstance(vv,ParseResults):
-                        out.append("\n%s%s[%d]:\n%s%s%s" % (indent,('  '*(depth)),i,indent,('  '*(depth+1)),vv.dump(indent,depth+1) ))
-                    else:
-                        out.append("\n%s%s[%d]:\n%s%s%s" % (indent,('  '*(depth)),i,indent,('  '*(depth+1)),_ustr(vv)))
-            
-        return "".join(out)
-
-    def pprint(self, *args, **kwargs):
-        """
-        Pretty-printer for parsed results as a list, using the C{pprint} module.
-        Accepts additional positional or keyword args as defined for the 
-        C{pprint.pprint} method. (U{http://docs.python.org/3/library/pprint.html#pprint.pprint})
-
-        Example::
-            ident = Word(alphas, alphanums)
-            num = Word(nums)
-            func = Forward()
-            term = ident | num | Group('(' + func + ')')
-            func <<= ident + Group(Optional(delimitedList(term)))
-            result = func.parseString("fna a,b,(fnb c,d,200),100")
-            result.pprint(width=40)
-        prints::
-            ['fna',
-             ['a',
-              'b',
-              ['(', 'fnb', ['c', 'd', '200'], ')'],
-              '100']]
-        """
-        pprint.pprint(self.asList(), *args, **kwargs)
-
-    # add support for pickle protocol
-    def __getstate__(self):
-        return ( self.__toklist,
-                 ( self.__tokdict.copy(),
-                   self.__parent is not None and self.__parent() or None,
-                   self.__accumNames,
-                   self.__name ) )
-
-    def __setstate__(self,state):
-        self.__toklist = state[0]
-        (self.__tokdict,
-         par,
-         inAccumNames,
-         self.__name) = state[1]
-        self.__accumNames = {}
-        self.__accumNames.update(inAccumNames)
-        if par is not None:
-            self.__parent = wkref(par)
-        else:
-            self.__parent = None
-
-    def __getnewargs__(self):
-        return self.__toklist, self.__name, self.__asList, self.__modal
-
-    def __dir__(self):
-        return (dir(type(self)) + list(self.keys()))
-
-MutableMapping.register(ParseResults)
-
-def col (loc,strg):
-    """Returns current column within a string, counting newlines as line separators.
-   The first column is number 1.
-
-   Note: the default parsing behavior is to expand tabs in the input string
-   before starting the parsing process.  See L{I{ParserElement.parseString}} for more information
-   on parsing strings containing C{}s, and suggested methods to maintain a
-   consistent view of the parsed string, the parse location, and line and column
-   positions within the parsed string.
-   """
-    s = strg
-    return 1 if 0} for more information
-   on parsing strings containing C{}s, and suggested methods to maintain a
-   consistent view of the parsed string, the parse location, and line and column
-   positions within the parsed string.
-   """
-    return strg.count("\n",0,loc) + 1
-
-def line( loc, strg ):
-    """Returns the line of text containing loc within a string, counting newlines as line separators.
-       """
-    lastCR = strg.rfind("\n", 0, loc)
-    nextCR = strg.find("\n", loc)
-    if nextCR >= 0:
-        return strg[lastCR+1:nextCR]
-    else:
-        return strg[lastCR+1:]
-
-def _defaultStartDebugAction( instring, loc, expr ):
-    print (("Match " + _ustr(expr) + " at loc " + _ustr(loc) + "(%d,%d)" % ( lineno(loc,instring), col(loc,instring) )))
-
-def _defaultSuccessDebugAction( instring, startloc, endloc, expr, toks ):
-    print ("Matched " + _ustr(expr) + " -> " + str(toks.asList()))
-
-def _defaultExceptionDebugAction( instring, loc, expr, exc ):
-    print ("Exception raised:" + _ustr(exc))
-
-def nullDebugAction(*args):
-    """'Do-nothing' debug action, to suppress debugging output during parsing."""
-    pass
-
-# Only works on Python 3.x - nonlocal is toxic to Python 2 installs
-#~ 'decorator to trim function calls to match the arity of the target'
-#~ def _trim_arity(func, maxargs=3):
-    #~ if func in singleArgBuiltins:
-        #~ return lambda s,l,t: func(t)
-    #~ limit = 0
-    #~ foundArity = False
-    #~ def wrapper(*args):
-        #~ nonlocal limit,foundArity
-        #~ while 1:
-            #~ try:
-                #~ ret = func(*args[limit:])
-                #~ foundArity = True
-                #~ return ret
-            #~ except TypeError:
-                #~ if limit == maxargs or foundArity:
-                    #~ raise
-                #~ limit += 1
-                #~ continue
-    #~ return wrapper
-
-# this version is Python 2.x-3.x cross-compatible
-'decorator to trim function calls to match the arity of the target'
-def _trim_arity(func, maxargs=2):
-    if func in singleArgBuiltins:
-        return lambda s,l,t: func(t)
-    limit = [0]
-    foundArity = [False]
-    
-    # traceback return data structure changed in Py3.5 - normalize back to plain tuples
-    if system_version[:2] >= (3,5):
-        def extract_stack(limit=0):
-            # special handling for Python 3.5.0 - extra deep call stack by 1
-            offset = -3 if system_version == (3,5,0) else -2
-            frame_summary = traceback.extract_stack(limit=-offset+limit-1)[offset]
-            return [frame_summary[:2]]
-        def extract_tb(tb, limit=0):
-            frames = traceback.extract_tb(tb, limit=limit)
-            frame_summary = frames[-1]
-            return [frame_summary[:2]]
-    else:
-        extract_stack = traceback.extract_stack
-        extract_tb = traceback.extract_tb
-    
-    # synthesize what would be returned by traceback.extract_stack at the call to 
-    # user's parse action 'func', so that we don't incur call penalty at parse time
-    
-    LINE_DIFF = 6
-    # IF ANY CODE CHANGES, EVEN JUST COMMENTS OR BLANK LINES, BETWEEN THE NEXT LINE AND 
-    # THE CALL TO FUNC INSIDE WRAPPER, LINE_DIFF MUST BE MODIFIED!!!!
-    this_line = extract_stack(limit=2)[-1]
-    pa_call_line_synth = (this_line[0], this_line[1]+LINE_DIFF)
-
-    def wrapper(*args):
-        while 1:
-            try:
-                ret = func(*args[limit[0]:])
-                foundArity[0] = True
-                return ret
-            except TypeError:
-                # re-raise TypeErrors if they did not come from our arity testing
-                if foundArity[0]:
-                    raise
-                else:
-                    try:
-                        tb = sys.exc_info()[-1]
-                        if not extract_tb(tb, limit=2)[-1][:2] == pa_call_line_synth:
-                            raise
-                    finally:
-                        del tb
-
-                if limit[0] <= maxargs:
-                    limit[0] += 1
-                    continue
-                raise
-
-    # copy func name to wrapper for sensible debug output
-    func_name = ""
-    try:
-        func_name = getattr(func, '__name__', 
-                            getattr(func, '__class__').__name__)
-    except Exception:
-        func_name = str(func)
-    wrapper.__name__ = func_name
-
-    return wrapper
-
-class ParserElement(object):
-    """Abstract base level parser element class."""
-    DEFAULT_WHITE_CHARS = " \n\t\r"
-    verbose_stacktrace = False
-
-    @staticmethod
-    def setDefaultWhitespaceChars( chars ):
-        r"""
-        Overrides the default whitespace chars
-
-        Example::
-            # default whitespace chars are space,  and newline
-            OneOrMore(Word(alphas)).parseString("abc def\nghi jkl")  # -> ['abc', 'def', 'ghi', 'jkl']
-            
-            # change to just treat newline as significant
-            ParserElement.setDefaultWhitespaceChars(" \t")
-            OneOrMore(Word(alphas)).parseString("abc def\nghi jkl")  # -> ['abc', 'def']
-        """
-        ParserElement.DEFAULT_WHITE_CHARS = chars
-
-    @staticmethod
-    def inlineLiteralsUsing(cls):
-        """
-        Set class to be used for inclusion of string literals into a parser.
-        
-        Example::
-            # default literal class used is Literal
-            integer = Word(nums)
-            date_str = integer("year") + '/' + integer("month") + '/' + integer("day")           
-
-            date_str.parseString("1999/12/31")  # -> ['1999', '/', '12', '/', '31']
-
-
-            # change to Suppress
-            ParserElement.inlineLiteralsUsing(Suppress)
-            date_str = integer("year") + '/' + integer("month") + '/' + integer("day")           
-
-            date_str.parseString("1999/12/31")  # -> ['1999', '12', '31']
-        """
-        ParserElement._literalStringClass = cls
-
-    def __init__( self, savelist=False ):
-        self.parseAction = list()
-        self.failAction = None
-        #~ self.name = ""  # don't define self.name, let subclasses try/except upcall
-        self.strRepr = None
-        self.resultsName = None
-        self.saveAsList = savelist
-        self.skipWhitespace = True
-        self.whiteChars = ParserElement.DEFAULT_WHITE_CHARS
-        self.copyDefaultWhiteChars = True
-        self.mayReturnEmpty = False # used when checking for left-recursion
-        self.keepTabs = False
-        self.ignoreExprs = list()
-        self.debug = False
-        self.streamlined = False
-        self.mayIndexError = True # used to optimize exception handling for subclasses that don't advance parse index
-        self.errmsg = ""
-        self.modalResults = True # used to mark results names as modal (report only last) or cumulative (list all)
-        self.debugActions = ( None, None, None ) #custom debug actions
-        self.re = None
-        self.callPreparse = True # used to avoid redundant calls to preParse
-        self.callDuringTry = False
-
-    def copy( self ):
-        """
-        Make a copy of this C{ParserElement}.  Useful for defining different parse actions
-        for the same parsing pattern, using copies of the original parse element.
-        
-        Example::
-            integer = Word(nums).setParseAction(lambda toks: int(toks[0]))
-            integerK = integer.copy().addParseAction(lambda toks: toks[0]*1024) + Suppress("K")
-            integerM = integer.copy().addParseAction(lambda toks: toks[0]*1024*1024) + Suppress("M")
-            
-            print(OneOrMore(integerK | integerM | integer).parseString("5K 100 640K 256M"))
-        prints::
-            [5120, 100, 655360, 268435456]
-        Equivalent form of C{expr.copy()} is just C{expr()}::
-            integerM = integer().addParseAction(lambda toks: toks[0]*1024*1024) + Suppress("M")
-        """
-        cpy = copy.copy( self )
-        cpy.parseAction = self.parseAction[:]
-        cpy.ignoreExprs = self.ignoreExprs[:]
-        if self.copyDefaultWhiteChars:
-            cpy.whiteChars = ParserElement.DEFAULT_WHITE_CHARS
-        return cpy
-
-    def setName( self, name ):
-        """
-        Define name for this expression, makes debugging and exception messages clearer.
-        
-        Example::
-            Word(nums).parseString("ABC")  # -> Exception: Expected W:(0123...) (at char 0), (line:1, col:1)
-            Word(nums).setName("integer").parseString("ABC")  # -> Exception: Expected integer (at char 0), (line:1, col:1)
-        """
-        self.name = name
-        self.errmsg = "Expected " + self.name
-        if hasattr(self,"exception"):
-            self.exception.msg = self.errmsg
-        return self
-
-    def setResultsName( self, name, listAllMatches=False ):
-        """
-        Define name for referencing matching tokens as a nested attribute
-        of the returned parse results.
-        NOTE: this returns a *copy* of the original C{ParserElement} object;
-        this is so that the client can define a basic element, such as an
-        integer, and reference it in multiple places with different names.
-
-        You can also set results names using the abbreviated syntax,
-        C{expr("name")} in place of C{expr.setResultsName("name")} - 
-        see L{I{__call__}<__call__>}.
-
-        Example::
-            date_str = (integer.setResultsName("year") + '/' 
-                        + integer.setResultsName("month") + '/' 
-                        + integer.setResultsName("day"))
-
-            # equivalent form:
-            date_str = integer("year") + '/' + integer("month") + '/' + integer("day")
-        """
-        newself = self.copy()
-        if name.endswith("*"):
-            name = name[:-1]
-            listAllMatches=True
-        newself.resultsName = name
-        newself.modalResults = not listAllMatches
-        return newself
-
-    def setBreak(self,breakFlag = True):
-        """Method to invoke the Python pdb debugger when this element is
-           about to be parsed. Set C{breakFlag} to True to enable, False to
-           disable.
-        """
-        if breakFlag:
-            _parseMethod = self._parse
-            def breaker(instring, loc, doActions=True, callPreParse=True):
-                import pdb
-                pdb.set_trace()
-                return _parseMethod( instring, loc, doActions, callPreParse )
-            breaker._originalParseMethod = _parseMethod
-            self._parse = breaker
-        else:
-            if hasattr(self._parse,"_originalParseMethod"):
-                self._parse = self._parse._originalParseMethod
-        return self
-
-    def setParseAction( self, *fns, **kwargs ):
-        """
-        Define one or more actions to perform when successfully matching parse element definition.
-        Parse action fn is a callable method with 0-3 arguments, called as C{fn(s,loc,toks)},
-        C{fn(loc,toks)}, C{fn(toks)}, or just C{fn()}, where:
-         - s   = the original string being parsed (see note below)
-         - loc = the location of the matching substring
-         - toks = a list of the matched tokens, packaged as a C{L{ParseResults}} object
-        If the functions in fns modify the tokens, they can return them as the return
-        value from fn, and the modified list of tokens will replace the original.
-        Otherwise, fn does not need to return any value.
-
-        Optional keyword arguments:
-         - callDuringTry = (default=C{False}) indicate if parse action should be run during lookaheads and alternate testing
-
-        Note: the default parsing behavior is to expand tabs in the input string
-        before starting the parsing process.  See L{I{parseString}} for more information
-        on parsing strings containing C{}s, and suggested methods to maintain a
-        consistent view of the parsed string, the parse location, and line and column
-        positions within the parsed string.
-        
-        Example::
-            integer = Word(nums)
-            date_str = integer + '/' + integer + '/' + integer
-
-            date_str.parseString("1999/12/31")  # -> ['1999', '/', '12', '/', '31']
-
-            # use parse action to convert to ints at parse time
-            integer = Word(nums).setParseAction(lambda toks: int(toks[0]))
-            date_str = integer + '/' + integer + '/' + integer
-
-            # note that integer fields are now ints, not strings
-            date_str.parseString("1999/12/31")  # -> [1999, '/', 12, '/', 31]
-        """
-        self.parseAction = list(map(_trim_arity, list(fns)))
-        self.callDuringTry = kwargs.get("callDuringTry", False)
-        return self
-
-    def addParseAction( self, *fns, **kwargs ):
-        """
-        Add one or more parse actions to expression's list of parse actions. See L{I{setParseAction}}.
-        
-        See examples in L{I{copy}}.
-        """
-        self.parseAction += list(map(_trim_arity, list(fns)))
-        self.callDuringTry = self.callDuringTry or kwargs.get("callDuringTry", False)
-        return self
-
-    def addCondition(self, *fns, **kwargs):
-        """Add a boolean predicate function to expression's list of parse actions. See 
-        L{I{setParseAction}} for function call signatures. Unlike C{setParseAction}, 
-        functions passed to C{addCondition} need to return boolean success/fail of the condition.
-
-        Optional keyword arguments:
-         - message = define a custom message to be used in the raised exception
-         - fatal   = if True, will raise ParseFatalException to stop parsing immediately; otherwise will raise ParseException
-         
-        Example::
-            integer = Word(nums).setParseAction(lambda toks: int(toks[0]))
-            year_int = integer.copy()
-            year_int.addCondition(lambda toks: toks[0] >= 2000, message="Only support years 2000 and later")
-            date_str = year_int + '/' + integer + '/' + integer
-
-            result = date_str.parseString("1999/12/31")  # -> Exception: Only support years 2000 and later (at char 0), (line:1, col:1)
-        """
-        msg = kwargs.get("message", "failed user-defined condition")
-        exc_type = ParseFatalException if kwargs.get("fatal", False) else ParseException
-        for fn in fns:
-            def pa(s,l,t):
-                if not bool(_trim_arity(fn)(s,l,t)):
-                    raise exc_type(s,l,msg)
-            self.parseAction.append(pa)
-        self.callDuringTry = self.callDuringTry or kwargs.get("callDuringTry", False)
-        return self
-
-    def setFailAction( self, fn ):
-        """Define action to perform if parsing fails at this expression.
-           Fail acton fn is a callable function that takes the arguments
-           C{fn(s,loc,expr,err)} where:
-            - s = string being parsed
-            - loc = location where expression match was attempted and failed
-            - expr = the parse expression that failed
-            - err = the exception thrown
-           The function returns no value.  It may throw C{L{ParseFatalException}}
-           if it is desired to stop parsing immediately."""
-        self.failAction = fn
-        return self
-
-    def _skipIgnorables( self, instring, loc ):
-        exprsFound = True
-        while exprsFound:
-            exprsFound = False
-            for e in self.ignoreExprs:
-                try:
-                    while 1:
-                        loc,dummy = e._parse( instring, loc )
-                        exprsFound = True
-                except ParseException:
-                    pass
-        return loc
-
-    def preParse( self, instring, loc ):
-        if self.ignoreExprs:
-            loc = self._skipIgnorables( instring, loc )
-
-        if self.skipWhitespace:
-            wt = self.whiteChars
-            instrlen = len(instring)
-            while loc < instrlen and instring[loc] in wt:
-                loc += 1
-
-        return loc
-
-    def parseImpl( self, instring, loc, doActions=True ):
-        return loc, []
-
-    def postParse( self, instring, loc, tokenlist ):
-        return tokenlist
-
-    #~ @profile
-    def _parseNoCache( self, instring, loc, doActions=True, callPreParse=True ):
-        debugging = ( self.debug ) #and doActions )
-
-        if debugging or self.failAction:
-            #~ print ("Match",self,"at loc",loc,"(%d,%d)" % ( lineno(loc,instring), col(loc,instring) ))
-            if (self.debugActions[0] ):
-                self.debugActions[0]( instring, loc, self )
-            if callPreParse and self.callPreparse:
-                preloc = self.preParse( instring, loc )
-            else:
-                preloc = loc
-            tokensStart = preloc
-            try:
-                try:
-                    loc,tokens = self.parseImpl( instring, preloc, doActions )
-                except IndexError:
-                    raise ParseException( instring, len(instring), self.errmsg, self )
-            except ParseBaseException as err:
-                #~ print ("Exception raised:", err)
-                if self.debugActions[2]:
-                    self.debugActions[2]( instring, tokensStart, self, err )
-                if self.failAction:
-                    self.failAction( instring, tokensStart, self, err )
-                raise
-        else:
-            if callPreParse and self.callPreparse:
-                preloc = self.preParse( instring, loc )
-            else:
-                preloc = loc
-            tokensStart = preloc
-            if self.mayIndexError or preloc >= len(instring):
-                try:
-                    loc,tokens = self.parseImpl( instring, preloc, doActions )
-                except IndexError:
-                    raise ParseException( instring, len(instring), self.errmsg, self )
-            else:
-                loc,tokens = self.parseImpl( instring, preloc, doActions )
-
-        tokens = self.postParse( instring, loc, tokens )
-
-        retTokens = ParseResults( tokens, self.resultsName, asList=self.saveAsList, modal=self.modalResults )
-        if self.parseAction and (doActions or self.callDuringTry):
-            if debugging:
-                try:
-                    for fn in self.parseAction:
-                        tokens = fn( instring, tokensStart, retTokens )
-                        if tokens is not None:
-                            retTokens = ParseResults( tokens,
-                                                      self.resultsName,
-                                                      asList=self.saveAsList and isinstance(tokens,(ParseResults,list)),
-                                                      modal=self.modalResults )
-                except ParseBaseException as err:
-                    #~ print "Exception raised in user parse action:", err
-                    if (self.debugActions[2] ):
-                        self.debugActions[2]( instring, tokensStart, self, err )
-                    raise
-            else:
-                for fn in self.parseAction:
-                    tokens = fn( instring, tokensStart, retTokens )
-                    if tokens is not None:
-                        retTokens = ParseResults( tokens,
-                                                  self.resultsName,
-                                                  asList=self.saveAsList and isinstance(tokens,(ParseResults,list)),
-                                                  modal=self.modalResults )
-        if debugging:
-            #~ print ("Matched",self,"->",retTokens.asList())
-            if (self.debugActions[1] ):
-                self.debugActions[1]( instring, tokensStart, loc, self, retTokens )
-
-        return loc, retTokens
-
-    def tryParse( self, instring, loc ):
-        try:
-            return self._parse( instring, loc, doActions=False )[0]
-        except ParseFatalException:
-            raise ParseException( instring, loc, self.errmsg, self)
-    
-    def canParseNext(self, instring, loc):
-        try:
-            self.tryParse(instring, loc)
-        except (ParseException, IndexError):
-            return False
-        else:
-            return True
-
-    class _UnboundedCache(object):
-        def __init__(self):
-            cache = {}
-            self.not_in_cache = not_in_cache = object()
-
-            def get(self, key):
-                return cache.get(key, not_in_cache)
-
-            def set(self, key, value):
-                cache[key] = value
-
-            def clear(self):
-                cache.clear()
-                
-            def cache_len(self):
-                return len(cache)
-
-            self.get = types.MethodType(get, self)
-            self.set = types.MethodType(set, self)
-            self.clear = types.MethodType(clear, self)
-            self.__len__ = types.MethodType(cache_len, self)
-
-    if _OrderedDict is not None:
-        class _FifoCache(object):
-            def __init__(self, size):
-                self.not_in_cache = not_in_cache = object()
-
-                cache = _OrderedDict()
-
-                def get(self, key):
-                    return cache.get(key, not_in_cache)
-
-                def set(self, key, value):
-                    cache[key] = value
-                    while len(cache) > size:
-                        try:
-                            cache.popitem(False)
-                        except KeyError:
-                            pass
-
-                def clear(self):
-                    cache.clear()
-
-                def cache_len(self):
-                    return len(cache)
-
-                self.get = types.MethodType(get, self)
-                self.set = types.MethodType(set, self)
-                self.clear = types.MethodType(clear, self)
-                self.__len__ = types.MethodType(cache_len, self)
-
-    else:
-        class _FifoCache(object):
-            def __init__(self, size):
-                self.not_in_cache = not_in_cache = object()
-
-                cache = {}
-                key_fifo = collections.deque([], size)
-
-                def get(self, key):
-                    return cache.get(key, not_in_cache)
-
-                def set(self, key, value):
-                    cache[key] = value
-                    while len(key_fifo) > size:
-                        cache.pop(key_fifo.popleft(), None)
-                    key_fifo.append(key)
-
-                def clear(self):
-                    cache.clear()
-                    key_fifo.clear()
-
-                def cache_len(self):
-                    return len(cache)
-
-                self.get = types.MethodType(get, self)
-                self.set = types.MethodType(set, self)
-                self.clear = types.MethodType(clear, self)
-                self.__len__ = types.MethodType(cache_len, self)
-
-    # argument cache for optimizing repeated calls when backtracking through recursive expressions
-    packrat_cache = {} # this is set later by enabledPackrat(); this is here so that resetCache() doesn't fail
-    packrat_cache_lock = RLock()
-    packrat_cache_stats = [0, 0]
-
-    # this method gets repeatedly called during backtracking with the same arguments -
-    # we can cache these arguments and save ourselves the trouble of re-parsing the contained expression
-    def _parseCache( self, instring, loc, doActions=True, callPreParse=True ):
-        HIT, MISS = 0, 1
-        lookup = (self, instring, loc, callPreParse, doActions)
-        with ParserElement.packrat_cache_lock:
-            cache = ParserElement.packrat_cache
-            value = cache.get(lookup)
-            if value is cache.not_in_cache:
-                ParserElement.packrat_cache_stats[MISS] += 1
-                try:
-                    value = self._parseNoCache(instring, loc, doActions, callPreParse)
-                except ParseBaseException as pe:
-                    # cache a copy of the exception, without the traceback
-                    cache.set(lookup, pe.__class__(*pe.args))
-                    raise
-                else:
-                    cache.set(lookup, (value[0], value[1].copy()))
-                    return value
-            else:
-                ParserElement.packrat_cache_stats[HIT] += 1
-                if isinstance(value, Exception):
-                    raise value
-                return (value[0], value[1].copy())
-
-    _parse = _parseNoCache
-
-    @staticmethod
-    def resetCache():
-        ParserElement.packrat_cache.clear()
-        ParserElement.packrat_cache_stats[:] = [0] * len(ParserElement.packrat_cache_stats)
-
-    _packratEnabled = False
-    @staticmethod
-    def enablePackrat(cache_size_limit=128):
-        """Enables "packrat" parsing, which adds memoizing to the parsing logic.
-           Repeated parse attempts at the same string location (which happens
-           often in many complex grammars) can immediately return a cached value,
-           instead of re-executing parsing/validating code.  Memoizing is done of
-           both valid results and parsing exceptions.
-           
-           Parameters:
-            - cache_size_limit - (default=C{128}) - if an integer value is provided
-              will limit the size of the packrat cache; if None is passed, then
-              the cache size will be unbounded; if 0 is passed, the cache will
-              be effectively disabled.
-            
-           This speedup may break existing programs that use parse actions that
-           have side-effects.  For this reason, packrat parsing is disabled when
-           you first import pyparsing.  To activate the packrat feature, your
-           program must call the class method C{ParserElement.enablePackrat()}.  If
-           your program uses C{psyco} to "compile as you go", you must call
-           C{enablePackrat} before calling C{psyco.full()}.  If you do not do this,
-           Python will crash.  For best results, call C{enablePackrat()} immediately
-           after importing pyparsing.
-           
-           Example::
-               import pyparsing
-               pyparsing.ParserElement.enablePackrat()
-        """
-        if not ParserElement._packratEnabled:
-            ParserElement._packratEnabled = True
-            if cache_size_limit is None:
-                ParserElement.packrat_cache = ParserElement._UnboundedCache()
-            else:
-                ParserElement.packrat_cache = ParserElement._FifoCache(cache_size_limit)
-            ParserElement._parse = ParserElement._parseCache
-
-    def parseString( self, instring, parseAll=False ):
-        """
-        Execute the parse expression with the given string.
-        This is the main interface to the client code, once the complete
-        expression has been built.
-
-        If you want the grammar to require that the entire input string be
-        successfully parsed, then set C{parseAll} to True (equivalent to ending
-        the grammar with C{L{StringEnd()}}).
-
-        Note: C{parseString} implicitly calls C{expandtabs()} on the input string,
-        in order to report proper column numbers in parse actions.
-        If the input string contains tabs and
-        the grammar uses parse actions that use the C{loc} argument to index into the
-        string being parsed, you can ensure you have a consistent view of the input
-        string by:
-         - calling C{parseWithTabs} on your grammar before calling C{parseString}
-           (see L{I{parseWithTabs}})
-         - define your parse action using the full C{(s,loc,toks)} signature, and
-           reference the input string using the parse action's C{s} argument
-         - explictly expand the tabs in your input string before calling
-           C{parseString}
-        
-        Example::
-            Word('a').parseString('aaaaabaaa')  # -> ['aaaaa']
-            Word('a').parseString('aaaaabaaa', parseAll=True)  # -> Exception: Expected end of text
-        """
-        ParserElement.resetCache()
-        if not self.streamlined:
-            self.streamline()
-            #~ self.saveAsList = True
-        for e in self.ignoreExprs:
-            e.streamline()
-        if not self.keepTabs:
-            instring = instring.expandtabs()
-        try:
-            loc, tokens = self._parse( instring, 0 )
-            if parseAll:
-                loc = self.preParse( instring, loc )
-                se = Empty() + StringEnd()
-                se._parse( instring, loc )
-        except ParseBaseException as exc:
-            if ParserElement.verbose_stacktrace:
-                raise
-            else:
-                # catch and re-raise exception from here, clears out pyparsing internal stack trace
-                raise exc
-        else:
-            return tokens
-
-    def scanString( self, instring, maxMatches=_MAX_INT, overlap=False ):
-        """
-        Scan the input string for expression matches.  Each match will return the
-        matching tokens, start location, and end location.  May be called with optional
-        C{maxMatches} argument, to clip scanning after 'n' matches are found.  If
-        C{overlap} is specified, then overlapping matches will be reported.
-
-        Note that the start and end locations are reported relative to the string
-        being parsed.  See L{I{parseString}} for more information on parsing
-        strings with embedded tabs.
-
-        Example::
-            source = "sldjf123lsdjjkf345sldkjf879lkjsfd987"
-            print(source)
-            for tokens,start,end in Word(alphas).scanString(source):
-                print(' '*start + '^'*(end-start))
-                print(' '*start + tokens[0])
-        
-        prints::
-        
-            sldjf123lsdjjkf345sldkjf879lkjsfd987
-            ^^^^^
-            sldjf
-                    ^^^^^^^
-                    lsdjjkf
-                              ^^^^^^
-                              sldkjf
-                                       ^^^^^^
-                                       lkjsfd
-        """
-        if not self.streamlined:
-            self.streamline()
-        for e in self.ignoreExprs:
-            e.streamline()
-
-        if not self.keepTabs:
-            instring = _ustr(instring).expandtabs()
-        instrlen = len(instring)
-        loc = 0
-        preparseFn = self.preParse
-        parseFn = self._parse
-        ParserElement.resetCache()
-        matches = 0
-        try:
-            while loc <= instrlen and matches < maxMatches:
-                try:
-                    preloc = preparseFn( instring, loc )
-                    nextLoc,tokens = parseFn( instring, preloc, callPreParse=False )
-                except ParseException:
-                    loc = preloc+1
-                else:
-                    if nextLoc > loc:
-                        matches += 1
-                        yield tokens, preloc, nextLoc
-                        if overlap:
-                            nextloc = preparseFn( instring, loc )
-                            if nextloc > loc:
-                                loc = nextLoc
-                            else:
-                                loc += 1
-                        else:
-                            loc = nextLoc
-                    else:
-                        loc = preloc+1
-        except ParseBaseException as exc:
-            if ParserElement.verbose_stacktrace:
-                raise
-            else:
-                # catch and re-raise exception from here, clears out pyparsing internal stack trace
-                raise exc
-
-    def transformString( self, instring ):
-        """
-        Extension to C{L{scanString}}, to modify matching text with modified tokens that may
-        be returned from a parse action.  To use C{transformString}, define a grammar and
-        attach a parse action to it that modifies the returned token list.
-        Invoking C{transformString()} on a target string will then scan for matches,
-        and replace the matched text patterns according to the logic in the parse
-        action.  C{transformString()} returns the resulting transformed string.
-        
-        Example::
-            wd = Word(alphas)
-            wd.setParseAction(lambda toks: toks[0].title())
-            
-            print(wd.transformString("now is the winter of our discontent made glorious summer by this sun of york."))
-        Prints::
-            Now Is The Winter Of Our Discontent Made Glorious Summer By This Sun Of York.
-        """
-        out = []
-        lastE = 0
-        # force preservation of s, to minimize unwanted transformation of string, and to
-        # keep string locs straight between transformString and scanString
-        self.keepTabs = True
-        try:
-            for t,s,e in self.scanString( instring ):
-                out.append( instring[lastE:s] )
-                if t:
-                    if isinstance(t,ParseResults):
-                        out += t.asList()
-                    elif isinstance(t,list):
-                        out += t
-                    else:
-                        out.append(t)
-                lastE = e
-            out.append(instring[lastE:])
-            out = [o for o in out if o]
-            return "".join(map(_ustr,_flatten(out)))
-        except ParseBaseException as exc:
-            if ParserElement.verbose_stacktrace:
-                raise
-            else:
-                # catch and re-raise exception from here, clears out pyparsing internal stack trace
-                raise exc
-
-    def searchString( self, instring, maxMatches=_MAX_INT ):
-        """
-        Another extension to C{L{scanString}}, simplifying the access to the tokens found
-        to match the given parse expression.  May be called with optional
-        C{maxMatches} argument, to clip searching after 'n' matches are found.
-        
-        Example::
-            # a capitalized word starts with an uppercase letter, followed by zero or more lowercase letters
-            cap_word = Word(alphas.upper(), alphas.lower())
-            
-            print(cap_word.searchString("More than Iron, more than Lead, more than Gold I need Electricity"))
-
-            # the sum() builtin can be used to merge results into a single ParseResults object
-            print(sum(cap_word.searchString("More than Iron, more than Lead, more than Gold I need Electricity")))
-        prints::
-            [['More'], ['Iron'], ['Lead'], ['Gold'], ['I'], ['Electricity']]
-            ['More', 'Iron', 'Lead', 'Gold', 'I', 'Electricity']
-        """
-        try:
-            return ParseResults([ t for t,s,e in self.scanString( instring, maxMatches ) ])
-        except ParseBaseException as exc:
-            if ParserElement.verbose_stacktrace:
-                raise
-            else:
-                # catch and re-raise exception from here, clears out pyparsing internal stack trace
-                raise exc
-
-    def split(self, instring, maxsplit=_MAX_INT, includeSeparators=False):
-        """
-        Generator method to split a string using the given expression as a separator.
-        May be called with optional C{maxsplit} argument, to limit the number of splits;
-        and the optional C{includeSeparators} argument (default=C{False}), if the separating
-        matching text should be included in the split results.
-        
-        Example::        
-            punc = oneOf(list(".,;:/-!?"))
-            print(list(punc.split("This, this?, this sentence, is badly punctuated!")))
-        prints::
-            ['This', ' this', '', ' this sentence', ' is badly punctuated', '']
-        """
-        splits = 0
-        last = 0
-        for t,s,e in self.scanString(instring, maxMatches=maxsplit):
-            yield instring[last:s]
-            if includeSeparators:
-                yield t[0]
-            last = e
-        yield instring[last:]
-
-    def __add__(self, other ):
-        """
-        Implementation of + operator - returns C{L{And}}. Adding strings to a ParserElement
-        converts them to L{Literal}s by default.
-        
-        Example::
-            greet = Word(alphas) + "," + Word(alphas) + "!"
-            hello = "Hello, World!"
-            print (hello, "->", greet.parseString(hello))
-        Prints::
-            Hello, World! -> ['Hello', ',', 'World', '!']
-        """
-        if isinstance( other, basestring ):
-            other = ParserElement._literalStringClass( other )
-        if not isinstance( other, ParserElement ):
-            warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
-                    SyntaxWarning, stacklevel=2)
-            return None
-        return And( [ self, other ] )
-
-    def __radd__(self, other ):
-        """
-        Implementation of + operator when left operand is not a C{L{ParserElement}}
-        """
-        if isinstance( other, basestring ):
-            other = ParserElement._literalStringClass( other )
-        if not isinstance( other, ParserElement ):
-            warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
-                    SyntaxWarning, stacklevel=2)
-            return None
-        return other + self
-
-    def __sub__(self, other):
-        """
-        Implementation of - operator, returns C{L{And}} with error stop
-        """
-        if isinstance( other, basestring ):
-            other = ParserElement._literalStringClass( other )
-        if not isinstance( other, ParserElement ):
-            warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
-                    SyntaxWarning, stacklevel=2)
-            return None
-        return self + And._ErrorStop() + other
-
-    def __rsub__(self, other ):
-        """
-        Implementation of - operator when left operand is not a C{L{ParserElement}}
-        """
-        if isinstance( other, basestring ):
-            other = ParserElement._literalStringClass( other )
-        if not isinstance( other, ParserElement ):
-            warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
-                    SyntaxWarning, stacklevel=2)
-            return None
-        return other - self
-
-    def __mul__(self,other):
-        """
-        Implementation of * operator, allows use of C{expr * 3} in place of
-        C{expr + expr + expr}.  Expressions may also me multiplied by a 2-integer
-        tuple, similar to C{{min,max}} multipliers in regular expressions.  Tuples
-        may also include C{None} as in:
-         - C{expr*(n,None)} or C{expr*(n,)} is equivalent
-              to C{expr*n + L{ZeroOrMore}(expr)}
-              (read as "at least n instances of C{expr}")
-         - C{expr*(None,n)} is equivalent to C{expr*(0,n)}
-              (read as "0 to n instances of C{expr}")
-         - C{expr*(None,None)} is equivalent to C{L{ZeroOrMore}(expr)}
-         - C{expr*(1,None)} is equivalent to C{L{OneOrMore}(expr)}
-
-        Note that C{expr*(None,n)} does not raise an exception if
-        more than n exprs exist in the input stream; that is,
-        C{expr*(None,n)} does not enforce a maximum number of expr
-        occurrences.  If this behavior is desired, then write
-        C{expr*(None,n) + ~expr}
-        """
-        if isinstance(other,int):
-            minElements, optElements = other,0
-        elif isinstance(other,tuple):
-            other = (other + (None, None))[:2]
-            if other[0] is None:
-                other = (0, other[1])
-            if isinstance(other[0],int) and other[1] is None:
-                if other[0] == 0:
-                    return ZeroOrMore(self)
-                if other[0] == 1:
-                    return OneOrMore(self)
-                else:
-                    return self*other[0] + ZeroOrMore(self)
-            elif isinstance(other[0],int) and isinstance(other[1],int):
-                minElements, optElements = other
-                optElements -= minElements
-            else:
-                raise TypeError("cannot multiply 'ParserElement' and ('%s','%s') objects", type(other[0]),type(other[1]))
-        else:
-            raise TypeError("cannot multiply 'ParserElement' and '%s' objects", type(other))
-
-        if minElements < 0:
-            raise ValueError("cannot multiply ParserElement by negative value")
-        if optElements < 0:
-            raise ValueError("second tuple value must be greater or equal to first tuple value")
-        if minElements == optElements == 0:
-            raise ValueError("cannot multiply ParserElement by 0 or (0,0)")
-
-        if (optElements):
-            def makeOptionalList(n):
-                if n>1:
-                    return Optional(self + makeOptionalList(n-1))
-                else:
-                    return Optional(self)
-            if minElements:
-                if minElements == 1:
-                    ret = self + makeOptionalList(optElements)
-                else:
-                    ret = And([self]*minElements) + makeOptionalList(optElements)
-            else:
-                ret = makeOptionalList(optElements)
-        else:
-            if minElements == 1:
-                ret = self
-            else:
-                ret = And([self]*minElements)
-        return ret
-
-    def __rmul__(self, other):
-        return self.__mul__(other)
-
-    def __or__(self, other ):
-        """
-        Implementation of | operator - returns C{L{MatchFirst}}
-        """
-        if isinstance( other, basestring ):
-            other = ParserElement._literalStringClass( other )
-        if not isinstance( other, ParserElement ):
-            warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
-                    SyntaxWarning, stacklevel=2)
-            return None
-        return MatchFirst( [ self, other ] )
-
-    def __ror__(self, other ):
-        """
-        Implementation of | operator when left operand is not a C{L{ParserElement}}
-        """
-        if isinstance( other, basestring ):
-            other = ParserElement._literalStringClass( other )
-        if not isinstance( other, ParserElement ):
-            warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
-                    SyntaxWarning, stacklevel=2)
-            return None
-        return other | self
-
-    def __xor__(self, other ):
-        """
-        Implementation of ^ operator - returns C{L{Or}}
-        """
-        if isinstance( other, basestring ):
-            other = ParserElement._literalStringClass( other )
-        if not isinstance( other, ParserElement ):
-            warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
-                    SyntaxWarning, stacklevel=2)
-            return None
-        return Or( [ self, other ] )
-
-    def __rxor__(self, other ):
-        """
-        Implementation of ^ operator when left operand is not a C{L{ParserElement}}
-        """
-        if isinstance( other, basestring ):
-            other = ParserElement._literalStringClass( other )
-        if not isinstance( other, ParserElement ):
-            warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
-                    SyntaxWarning, stacklevel=2)
-            return None
-        return other ^ self
-
-    def __and__(self, other ):
-        """
-        Implementation of & operator - returns C{L{Each}}
-        """
-        if isinstance( other, basestring ):
-            other = ParserElement._literalStringClass( other )
-        if not isinstance( other, ParserElement ):
-            warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
-                    SyntaxWarning, stacklevel=2)
-            return None
-        return Each( [ self, other ] )
-
-    def __rand__(self, other ):
-        """
-        Implementation of & operator when left operand is not a C{L{ParserElement}}
-        """
-        if isinstance( other, basestring ):
-            other = ParserElement._literalStringClass( other )
-        if not isinstance( other, ParserElement ):
-            warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
-                    SyntaxWarning, stacklevel=2)
-            return None
-        return other & self
-
-    def __invert__( self ):
-        """
-        Implementation of ~ operator - returns C{L{NotAny}}
-        """
-        return NotAny( self )
-
-    def __call__(self, name=None):
-        """
-        Shortcut for C{L{setResultsName}}, with C{listAllMatches=False}.
-        
-        If C{name} is given with a trailing C{'*'} character, then C{listAllMatches} will be
-        passed as C{True}.
-           
-        If C{name} is omitted, same as calling C{L{copy}}.
-
-        Example::
-            # these are equivalent
-            userdata = Word(alphas).setResultsName("name") + Word(nums+"-").setResultsName("socsecno")
-            userdata = Word(alphas)("name") + Word(nums+"-")("socsecno")             
-        """
-        if name is not None:
-            return self.setResultsName(name)
-        else:
-            return self.copy()
-
-    def suppress( self ):
-        """
-        Suppresses the output of this C{ParserElement}; useful to keep punctuation from
-        cluttering up returned output.
-        """
-        return Suppress( self )
-
-    def leaveWhitespace( self ):
-        """
-        Disables the skipping of whitespace before matching the characters in the
-        C{ParserElement}'s defined pattern.  This is normally only used internally by
-        the pyparsing module, but may be needed in some whitespace-sensitive grammars.
-        """
-        self.skipWhitespace = False
-        return self
-
-    def setWhitespaceChars( self, chars ):
-        """
-        Overrides the default whitespace chars
-        """
-        self.skipWhitespace = True
-        self.whiteChars = chars
-        self.copyDefaultWhiteChars = False
-        return self
-
-    def parseWithTabs( self ):
-        """
-        Overrides default behavior to expand C{}s to spaces before parsing the input string.
-        Must be called before C{parseString} when the input grammar contains elements that
-        match C{} characters.
-        """
-        self.keepTabs = True
-        return self
-
-    def ignore( self, other ):
-        """
-        Define expression to be ignored (e.g., comments) while doing pattern
-        matching; may be called repeatedly, to define multiple comment or other
-        ignorable patterns.
-        
-        Example::
-            patt = OneOrMore(Word(alphas))
-            patt.parseString('ablaj /* comment */ lskjd') # -> ['ablaj']
-            
-            patt.ignore(cStyleComment)
-            patt.parseString('ablaj /* comment */ lskjd') # -> ['ablaj', 'lskjd']
-        """
-        if isinstance(other, basestring):
-            other = Suppress(other)
-
-        if isinstance( other, Suppress ):
-            if other not in self.ignoreExprs:
-                self.ignoreExprs.append(other)
-        else:
-            self.ignoreExprs.append( Suppress( other.copy() ) )
-        return self
-
-    def setDebugActions( self, startAction, successAction, exceptionAction ):
-        """
-        Enable display of debugging messages while doing pattern matching.
-        """
-        self.debugActions = (startAction or _defaultStartDebugAction,
-                             successAction or _defaultSuccessDebugAction,
-                             exceptionAction or _defaultExceptionDebugAction)
-        self.debug = True
-        return self
-
-    def setDebug( self, flag=True ):
-        """
-        Enable display of debugging messages while doing pattern matching.
-        Set C{flag} to True to enable, False to disable.
-
-        Example::
-            wd = Word(alphas).setName("alphaword")
-            integer = Word(nums).setName("numword")
-            term = wd | integer
-            
-            # turn on debugging for wd
-            wd.setDebug()
-
-            OneOrMore(term).parseString("abc 123 xyz 890")
-        
-        prints::
-            Match alphaword at loc 0(1,1)
-            Matched alphaword -> ['abc']
-            Match alphaword at loc 3(1,4)
-            Exception raised:Expected alphaword (at char 4), (line:1, col:5)
-            Match alphaword at loc 7(1,8)
-            Matched alphaword -> ['xyz']
-            Match alphaword at loc 11(1,12)
-            Exception raised:Expected alphaword (at char 12), (line:1, col:13)
-            Match alphaword at loc 15(1,16)
-            Exception raised:Expected alphaword (at char 15), (line:1, col:16)
-
-        The output shown is that produced by the default debug actions - custom debug actions can be
-        specified using L{setDebugActions}. Prior to attempting
-        to match the C{wd} expression, the debugging message C{"Match  at loc (,)"}
-        is shown. Then if the parse succeeds, a C{"Matched"} message is shown, or an C{"Exception raised"}
-        message is shown. Also note the use of L{setName} to assign a human-readable name to the expression,
-        which makes debugging and exception messages easier to understand - for instance, the default
-        name created for the C{Word} expression without calling C{setName} is C{"W:(ABCD...)"}.
-        """
-        if flag:
-            self.setDebugActions( _defaultStartDebugAction, _defaultSuccessDebugAction, _defaultExceptionDebugAction )
-        else:
-            self.debug = False
-        return self
-
-    def __str__( self ):
-        return self.name
-
-    def __repr__( self ):
-        return _ustr(self)
-
-    def streamline( self ):
-        self.streamlined = True
-        self.strRepr = None
-        return self
-
-    def checkRecursion( self, parseElementList ):
-        pass
-
-    def validate( self, validateTrace=[] ):
-        """
-        Check defined expressions for valid structure, check for infinite recursive definitions.
-        """
-        self.checkRecursion( [] )
-
-    def parseFile( self, file_or_filename, parseAll=False ):
-        """
-        Execute the parse expression on the given file or filename.
-        If a filename is specified (instead of a file object),
-        the entire file is opened, read, and closed before parsing.
-        """
-        try:
-            file_contents = file_or_filename.read()
-        except AttributeError:
-            with open(file_or_filename, "r") as f:
-                file_contents = f.read()
-        try:
-            return self.parseString(file_contents, parseAll)
-        except ParseBaseException as exc:
-            if ParserElement.verbose_stacktrace:
-                raise
-            else:
-                # catch and re-raise exception from here, clears out pyparsing internal stack trace
-                raise exc
-
-    def __eq__(self,other):
-        if isinstance(other, ParserElement):
-            return self is other or vars(self) == vars(other)
-        elif isinstance(other, basestring):
-            return self.matches(other)
-        else:
-            return super(ParserElement,self)==other
-
-    def __ne__(self,other):
-        return not (self == other)
-
-    def __hash__(self):
-        return hash(id(self))
-
-    def __req__(self,other):
-        return self == other
-
-    def __rne__(self,other):
-        return not (self == other)
-
-    def matches(self, testString, parseAll=True):
-        """
-        Method for quick testing of a parser against a test string. Good for simple 
-        inline microtests of sub expressions while building up larger parser.
-           
-        Parameters:
-         - testString - to test against this expression for a match
-         - parseAll - (default=C{True}) - flag to pass to C{L{parseString}} when running tests
-            
-        Example::
-            expr = Word(nums)
-            assert expr.matches("100")
-        """
-        try:
-            self.parseString(_ustr(testString), parseAll=parseAll)
-            return True
-        except ParseBaseException:
-            return False
-                
-    def runTests(self, tests, parseAll=True, comment='#', fullDump=True, printResults=True, failureTests=False):
-        """
-        Execute the parse expression on a series of test strings, showing each
-        test, the parsed results or where the parse failed. Quick and easy way to
-        run a parse expression against a list of sample strings.
-           
-        Parameters:
-         - tests - a list of separate test strings, or a multiline string of test strings
-         - parseAll - (default=C{True}) - flag to pass to C{L{parseString}} when running tests           
-         - comment - (default=C{'#'}) - expression for indicating embedded comments in the test 
-              string; pass None to disable comment filtering
-         - fullDump - (default=C{True}) - dump results as list followed by results names in nested outline;
-              if False, only dump nested list
-         - printResults - (default=C{True}) prints test output to stdout
-         - failureTests - (default=C{False}) indicates if these tests are expected to fail parsing
-
-        Returns: a (success, results) tuple, where success indicates that all tests succeeded
-        (or failed if C{failureTests} is True), and the results contain a list of lines of each 
-        test's output
-        
-        Example::
-            number_expr = pyparsing_common.number.copy()
-
-            result = number_expr.runTests('''
-                # unsigned integer
-                100
-                # negative integer
-                -100
-                # float with scientific notation
-                6.02e23
-                # integer with scientific notation
-                1e-12
-                ''')
-            print("Success" if result[0] else "Failed!")
-
-            result = number_expr.runTests('''
-                # stray character
-                100Z
-                # missing leading digit before '.'
-                -.100
-                # too many '.'
-                3.14.159
-                ''', failureTests=True)
-            print("Success" if result[0] else "Failed!")
-        prints::
-            # unsigned integer
-            100
-            [100]
-
-            # negative integer
-            -100
-            [-100]
-
-            # float with scientific notation
-            6.02e23
-            [6.02e+23]
-
-            # integer with scientific notation
-            1e-12
-            [1e-12]
-
-            Success
-            
-            # stray character
-            100Z
-               ^
-            FAIL: Expected end of text (at char 3), (line:1, col:4)
-
-            # missing leading digit before '.'
-            -.100
-            ^
-            FAIL: Expected {real number with scientific notation | real number | signed integer} (at char 0), (line:1, col:1)
-
-            # too many '.'
-            3.14.159
-                ^
-            FAIL: Expected end of text (at char 4), (line:1, col:5)
-
-            Success
-
-        Each test string must be on a single line. If you want to test a string that spans multiple
-        lines, create a test like this::
-
-            expr.runTest(r"this is a test\\n of strings that spans \\n 3 lines")
-        
-        (Note that this is a raw string literal, you must include the leading 'r'.)
-        """
-        if isinstance(tests, basestring):
-            tests = list(map(str.strip, tests.rstrip().splitlines()))
-        if isinstance(comment, basestring):
-            comment = Literal(comment)
-        allResults = []
-        comments = []
-        success = True
-        for t in tests:
-            if comment is not None and comment.matches(t, False) or comments and not t:
-                comments.append(t)
-                continue
-            if not t:
-                continue
-            out = ['\n'.join(comments), t]
-            comments = []
-            try:
-                t = t.replace(r'\n','\n')
-                result = self.parseString(t, parseAll=parseAll)
-                out.append(result.dump(full=fullDump))
-                success = success and not failureTests
-            except ParseBaseException as pe:
-                fatal = "(FATAL)" if isinstance(pe, ParseFatalException) else ""
-                if '\n' in t:
-                    out.append(line(pe.loc, t))
-                    out.append(' '*(col(pe.loc,t)-1) + '^' + fatal)
-                else:
-                    out.append(' '*pe.loc + '^' + fatal)
-                out.append("FAIL: " + str(pe))
-                success = success and failureTests
-                result = pe
-            except Exception as exc:
-                out.append("FAIL-EXCEPTION: " + str(exc))
-                success = success and failureTests
-                result = exc
-
-            if printResults:
-                if fullDump:
-                    out.append('')
-                print('\n'.join(out))
-
-            allResults.append((t, result))
-        
-        return success, allResults
-
-        
-class Token(ParserElement):
-    """
-    Abstract C{ParserElement} subclass, for defining atomic matching patterns.
-    """
-    def __init__( self ):
-        super(Token,self).__init__( savelist=False )
-
-
-class Empty(Token):
-    """
-    An empty token, will always match.
-    """
-    def __init__( self ):
-        super(Empty,self).__init__()
-        self.name = "Empty"
-        self.mayReturnEmpty = True
-        self.mayIndexError = False
-
-
-class NoMatch(Token):
-    """
-    A token that will never match.
-    """
-    def __init__( self ):
-        super(NoMatch,self).__init__()
-        self.name = "NoMatch"
-        self.mayReturnEmpty = True
-        self.mayIndexError = False
-        self.errmsg = "Unmatchable token"
-
-    def parseImpl( self, instring, loc, doActions=True ):
-        raise ParseException(instring, loc, self.errmsg, self)
-
-
-class Literal(Token):
-    """
-    Token to exactly match a specified string.
-    
-    Example::
-        Literal('blah').parseString('blah')  # -> ['blah']
-        Literal('blah').parseString('blahfooblah')  # -> ['blah']
-        Literal('blah').parseString('bla')  # -> Exception: Expected "blah"
-    
-    For case-insensitive matching, use L{CaselessLiteral}.
-    
-    For keyword matching (force word break before and after the matched string),
-    use L{Keyword} or L{CaselessKeyword}.
-    """
-    def __init__( self, matchString ):
-        super(Literal,self).__init__()
-        self.match = matchString
-        self.matchLen = len(matchString)
-        try:
-            self.firstMatchChar = matchString[0]
-        except IndexError:
-            warnings.warn("null string passed to Literal; use Empty() instead",
-                            SyntaxWarning, stacklevel=2)
-            self.__class__ = Empty
-        self.name = '"%s"' % _ustr(self.match)
-        self.errmsg = "Expected " + self.name
-        self.mayReturnEmpty = False
-        self.mayIndexError = False
-
-    # Performance tuning: this routine gets called a *lot*
-    # if this is a single character match string  and the first character matches,
-    # short-circuit as quickly as possible, and avoid calling startswith
-    #~ @profile
-    def parseImpl( self, instring, loc, doActions=True ):
-        if (instring[loc] == self.firstMatchChar and
-            (self.matchLen==1 or instring.startswith(self.match,loc)) ):
-            return loc+self.matchLen, self.match
-        raise ParseException(instring, loc, self.errmsg, self)
-_L = Literal
-ParserElement._literalStringClass = Literal
-
-class Keyword(Token):
-    """
-    Token to exactly match a specified string as a keyword, that is, it must be
-    immediately followed by a non-keyword character.  Compare with C{L{Literal}}:
-     - C{Literal("if")} will match the leading C{'if'} in C{'ifAndOnlyIf'}.
-     - C{Keyword("if")} will not; it will only match the leading C{'if'} in C{'if x=1'}, or C{'if(y==2)'}
-    Accepts two optional constructor arguments in addition to the keyword string:
-     - C{identChars} is a string of characters that would be valid identifier characters,
-          defaulting to all alphanumerics + "_" and "$"
-     - C{caseless} allows case-insensitive matching, default is C{False}.
-       
-    Example::
-        Keyword("start").parseString("start")  # -> ['start']
-        Keyword("start").parseString("starting")  # -> Exception
-
-    For case-insensitive matching, use L{CaselessKeyword}.
-    """
-    DEFAULT_KEYWORD_CHARS = alphanums+"_$"
-
-    def __init__( self, matchString, identChars=None, caseless=False ):
-        super(Keyword,self).__init__()
-        if identChars is None:
-            identChars = Keyword.DEFAULT_KEYWORD_CHARS
-        self.match = matchString
-        self.matchLen = len(matchString)
-        try:
-            self.firstMatchChar = matchString[0]
-        except IndexError:
-            warnings.warn("null string passed to Keyword; use Empty() instead",
-                            SyntaxWarning, stacklevel=2)
-        self.name = '"%s"' % self.match
-        self.errmsg = "Expected " + self.name
-        self.mayReturnEmpty = False
-        self.mayIndexError = False
-        self.caseless = caseless
-        if caseless:
-            self.caselessmatch = matchString.upper()
-            identChars = identChars.upper()
-        self.identChars = set(identChars)
-
-    def parseImpl( self, instring, loc, doActions=True ):
-        if self.caseless:
-            if ( (instring[ loc:loc+self.matchLen ].upper() == self.caselessmatch) and
-                 (loc >= len(instring)-self.matchLen or instring[loc+self.matchLen].upper() not in self.identChars) and
-                 (loc == 0 or instring[loc-1].upper() not in self.identChars) ):
-                return loc+self.matchLen, self.match
-        else:
-            if (instring[loc] == self.firstMatchChar and
-                (self.matchLen==1 or instring.startswith(self.match,loc)) and
-                (loc >= len(instring)-self.matchLen or instring[loc+self.matchLen] not in self.identChars) and
-                (loc == 0 or instring[loc-1] not in self.identChars) ):
-                return loc+self.matchLen, self.match
-        raise ParseException(instring, loc, self.errmsg, self)
-
-    def copy(self):
-        c = super(Keyword,self).copy()
-        c.identChars = Keyword.DEFAULT_KEYWORD_CHARS
-        return c
-
-    @staticmethod
-    def setDefaultKeywordChars( chars ):
-        """Overrides the default Keyword chars
-        """
-        Keyword.DEFAULT_KEYWORD_CHARS = chars
-
-class CaselessLiteral(Literal):
-    """
-    Token to match a specified string, ignoring case of letters.
-    Note: the matched results will always be in the case of the given
-    match string, NOT the case of the input text.
-
-    Example::
-        OneOrMore(CaselessLiteral("CMD")).parseString("cmd CMD Cmd10") # -> ['CMD', 'CMD', 'CMD']
-        
-    (Contrast with example for L{CaselessKeyword}.)
-    """
-    def __init__( self, matchString ):
-        super(CaselessLiteral,self).__init__( matchString.upper() )
-        # Preserve the defining literal.
-        self.returnString = matchString
-        self.name = "'%s'" % self.returnString
-        self.errmsg = "Expected " + self.name
-
-    def parseImpl( self, instring, loc, doActions=True ):
-        if instring[ loc:loc+self.matchLen ].upper() == self.match:
-            return loc+self.matchLen, self.returnString
-        raise ParseException(instring, loc, self.errmsg, self)
-
-class CaselessKeyword(Keyword):
-    """
-    Caseless version of L{Keyword}.
-
-    Example::
-        OneOrMore(CaselessKeyword("CMD")).parseString("cmd CMD Cmd10") # -> ['CMD', 'CMD']
-        
-    (Contrast with example for L{CaselessLiteral}.)
-    """
-    def __init__( self, matchString, identChars=None ):
-        super(CaselessKeyword,self).__init__( matchString, identChars, caseless=True )
-
-    def parseImpl( self, instring, loc, doActions=True ):
-        if ( (instring[ loc:loc+self.matchLen ].upper() == self.caselessmatch) and
-             (loc >= len(instring)-self.matchLen or instring[loc+self.matchLen].upper() not in self.identChars) ):
-            return loc+self.matchLen, self.match
-        raise ParseException(instring, loc, self.errmsg, self)
-
-class CloseMatch(Token):
-    """
-    A variation on L{Literal} which matches "close" matches, that is, 
-    strings with at most 'n' mismatching characters. C{CloseMatch} takes parameters:
-     - C{match_string} - string to be matched
-     - C{maxMismatches} - (C{default=1}) maximum number of mismatches allowed to count as a match
-    
-    The results from a successful parse will contain the matched text from the input string and the following named results:
-     - C{mismatches} - a list of the positions within the match_string where mismatches were found
-     - C{original} - the original match_string used to compare against the input string
-    
-    If C{mismatches} is an empty list, then the match was an exact match.
-    
-    Example::
-        patt = CloseMatch("ATCATCGAATGGA")
-        patt.parseString("ATCATCGAAXGGA") # -> (['ATCATCGAAXGGA'], {'mismatches': [[9]], 'original': ['ATCATCGAATGGA']})
-        patt.parseString("ATCAXCGAAXGGA") # -> Exception: Expected 'ATCATCGAATGGA' (with up to 1 mismatches) (at char 0), (line:1, col:1)
-
-        # exact match
-        patt.parseString("ATCATCGAATGGA") # -> (['ATCATCGAATGGA'], {'mismatches': [[]], 'original': ['ATCATCGAATGGA']})
-
-        # close match allowing up to 2 mismatches
-        patt = CloseMatch("ATCATCGAATGGA", maxMismatches=2)
-        patt.parseString("ATCAXCGAAXGGA") # -> (['ATCAXCGAAXGGA'], {'mismatches': [[4, 9]], 'original': ['ATCATCGAATGGA']})
-    """
-    def __init__(self, match_string, maxMismatches=1):
-        super(CloseMatch,self).__init__()
-        self.name = match_string
-        self.match_string = match_string
-        self.maxMismatches = maxMismatches
-        self.errmsg = "Expected %r (with up to %d mismatches)" % (self.match_string, self.maxMismatches)
-        self.mayIndexError = False
-        self.mayReturnEmpty = False
-
-    def parseImpl( self, instring, loc, doActions=True ):
-        start = loc
-        instrlen = len(instring)
-        maxloc = start + len(self.match_string)
-
-        if maxloc <= instrlen:
-            match_string = self.match_string
-            match_stringloc = 0
-            mismatches = []
-            maxMismatches = self.maxMismatches
-
-            for match_stringloc,s_m in enumerate(zip(instring[loc:maxloc], self.match_string)):
-                src,mat = s_m
-                if src != mat:
-                    mismatches.append(match_stringloc)
-                    if len(mismatches) > maxMismatches:
-                        break
-            else:
-                loc = match_stringloc + 1
-                results = ParseResults([instring[start:loc]])
-                results['original'] = self.match_string
-                results['mismatches'] = mismatches
-                return loc, results
-
-        raise ParseException(instring, loc, self.errmsg, self)
-
-
-class Word(Token):
-    """
-    Token for matching words composed of allowed character sets.
-    Defined with string containing all allowed initial characters,
-    an optional string containing allowed body characters (if omitted,
-    defaults to the initial character set), and an optional minimum,
-    maximum, and/or exact length.  The default value for C{min} is 1 (a
-    minimum value < 1 is not valid); the default values for C{max} and C{exact}
-    are 0, meaning no maximum or exact length restriction. An optional
-    C{excludeChars} parameter can list characters that might be found in 
-    the input C{bodyChars} string; useful to define a word of all printables
-    except for one or two characters, for instance.
-    
-    L{srange} is useful for defining custom character set strings for defining 
-    C{Word} expressions, using range notation from regular expression character sets.
-    
-    A common mistake is to use C{Word} to match a specific literal string, as in 
-    C{Word("Address")}. Remember that C{Word} uses the string argument to define
-    I{sets} of matchable characters. This expression would match "Add", "AAA",
-    "dAred", or any other word made up of the characters 'A', 'd', 'r', 'e', and 's'.
-    To match an exact literal string, use L{Literal} or L{Keyword}.
-
-    pyparsing includes helper strings for building Words:
-     - L{alphas}
-     - L{nums}
-     - L{alphanums}
-     - L{hexnums}
-     - L{alphas8bit} (alphabetic characters in ASCII range 128-255 - accented, tilded, umlauted, etc.)
-     - L{punc8bit} (non-alphabetic characters in ASCII range 128-255 - currency, symbols, superscripts, diacriticals, etc.)
-     - L{printables} (any non-whitespace character)
-
-    Example::
-        # a word composed of digits
-        integer = Word(nums) # equivalent to Word("0123456789") or Word(srange("0-9"))
-        
-        # a word with a leading capital, and zero or more lowercase
-        capital_word = Word(alphas.upper(), alphas.lower())
-
-        # hostnames are alphanumeric, with leading alpha, and '-'
-        hostname = Word(alphas, alphanums+'-')
-        
-        # roman numeral (not a strict parser, accepts invalid mix of characters)
-        roman = Word("IVXLCDM")
-        
-        # any string of non-whitespace characters, except for ','
-        csv_value = Word(printables, excludeChars=",")
-    """
-    def __init__( self, initChars, bodyChars=None, min=1, max=0, exact=0, asKeyword=False, excludeChars=None ):
-        super(Word,self).__init__()
-        if excludeChars:
-            initChars = ''.join(c for c in initChars if c not in excludeChars)
-            if bodyChars:
-                bodyChars = ''.join(c for c in bodyChars if c not in excludeChars)
-        self.initCharsOrig = initChars
-        self.initChars = set(initChars)
-        if bodyChars :
-            self.bodyCharsOrig = bodyChars
-            self.bodyChars = set(bodyChars)
-        else:
-            self.bodyCharsOrig = initChars
-            self.bodyChars = set(initChars)
-
-        self.maxSpecified = max > 0
-
-        if min < 1:
-            raise ValueError("cannot specify a minimum length < 1; use Optional(Word()) if zero-length word is permitted")
-
-        self.minLen = min
-
-        if max > 0:
-            self.maxLen = max
-        else:
-            self.maxLen = _MAX_INT
-
-        if exact > 0:
-            self.maxLen = exact
-            self.minLen = exact
-
-        self.name = _ustr(self)
-        self.errmsg = "Expected " + self.name
-        self.mayIndexError = False
-        self.asKeyword = asKeyword
-
-        if ' ' not in self.initCharsOrig+self.bodyCharsOrig and (min==1 and max==0 and exact==0):
-            if self.bodyCharsOrig == self.initCharsOrig:
-                self.reString = "[%s]+" % _escapeRegexRangeChars(self.initCharsOrig)
-            elif len(self.initCharsOrig) == 1:
-                self.reString = "%s[%s]*" % \
-                                      (re.escape(self.initCharsOrig),
-                                      _escapeRegexRangeChars(self.bodyCharsOrig),)
-            else:
-                self.reString = "[%s][%s]*" % \
-                                      (_escapeRegexRangeChars(self.initCharsOrig),
-                                      _escapeRegexRangeChars(self.bodyCharsOrig),)
-            if self.asKeyword:
-                self.reString = r"\b"+self.reString+r"\b"
-            try:
-                self.re = re.compile( self.reString )
-            except Exception:
-                self.re = None
-
-    def parseImpl( self, instring, loc, doActions=True ):
-        if self.re:
-            result = self.re.match(instring,loc)
-            if not result:
-                raise ParseException(instring, loc, self.errmsg, self)
-
-            loc = result.end()
-            return loc, result.group()
-
-        if not(instring[ loc ] in self.initChars):
-            raise ParseException(instring, loc, self.errmsg, self)
-
-        start = loc
-        loc += 1
-        instrlen = len(instring)
-        bodychars = self.bodyChars
-        maxloc = start + self.maxLen
-        maxloc = min( maxloc, instrlen )
-        while loc < maxloc and instring[loc] in bodychars:
-            loc += 1
-
-        throwException = False
-        if loc - start < self.minLen:
-            throwException = True
-        if self.maxSpecified and loc < instrlen and instring[loc] in bodychars:
-            throwException = True
-        if self.asKeyword:
-            if (start>0 and instring[start-1] in bodychars) or (loc4:
-                    return s[:4]+"..."
-                else:
-                    return s
-
-            if ( self.initCharsOrig != self.bodyCharsOrig ):
-                self.strRepr = "W:(%s,%s)" % ( charsAsStr(self.initCharsOrig), charsAsStr(self.bodyCharsOrig) )
-            else:
-                self.strRepr = "W:(%s)" % charsAsStr(self.initCharsOrig)
-
-        return self.strRepr
-
-
-class Regex(Token):
-    r"""
-    Token for matching strings that match a given regular expression.
-    Defined with string specifying the regular expression in a form recognized by the inbuilt Python re module.
-    If the given regex contains named groups (defined using C{(?P...)}), these will be preserved as 
-    named parse results.
-
-    Example::
-        realnum = Regex(r"[+-]?\d+\.\d*")
-        date = Regex(r'(?P\d{4})-(?P\d\d?)-(?P\d\d?)')
-        # ref: http://stackoverflow.com/questions/267399/how-do-you-match-only-valid-roman-numerals-with-a-regular-expression
-        roman = Regex(r"M{0,4}(CM|CD|D?C{0,3})(XC|XL|L?X{0,3})(IX|IV|V?I{0,3})")
-    """
-    compiledREtype = type(re.compile("[A-Z]"))
-    def __init__( self, pattern, flags=0):
-        """The parameters C{pattern} and C{flags} are passed to the C{re.compile()} function as-is. See the Python C{re} module for an explanation of the acceptable patterns and flags."""
-        super(Regex,self).__init__()
-
-        if isinstance(pattern, basestring):
-            if not pattern:
-                warnings.warn("null string passed to Regex; use Empty() instead",
-                        SyntaxWarning, stacklevel=2)
-
-            self.pattern = pattern
-            self.flags = flags
-
-            try:
-                self.re = re.compile(self.pattern, self.flags)
-                self.reString = self.pattern
-            except sre_constants.error:
-                warnings.warn("invalid pattern (%s) passed to Regex" % pattern,
-                    SyntaxWarning, stacklevel=2)
-                raise
-
-        elif isinstance(pattern, Regex.compiledREtype):
-            self.re = pattern
-            self.pattern = \
-            self.reString = str(pattern)
-            self.flags = flags
-            
-        else:
-            raise ValueError("Regex may only be constructed with a string or a compiled RE object")
-
-        self.name = _ustr(self)
-        self.errmsg = "Expected " + self.name
-        self.mayIndexError = False
-        self.mayReturnEmpty = True
-
-    def parseImpl( self, instring, loc, doActions=True ):
-        result = self.re.match(instring,loc)
-        if not result:
-            raise ParseException(instring, loc, self.errmsg, self)
-
-        loc = result.end()
-        d = result.groupdict()
-        ret = ParseResults(result.group())
-        if d:
-            for k in d:
-                ret[k] = d[k]
-        return loc,ret
-
-    def __str__( self ):
-        try:
-            return super(Regex,self).__str__()
-        except Exception:
-            pass
-
-        if self.strRepr is None:
-            self.strRepr = "Re:(%s)" % repr(self.pattern)
-
-        return self.strRepr
-
-
-class QuotedString(Token):
-    r"""
-    Token for matching strings that are delimited by quoting characters.
-    
-    Defined with the following parameters:
-        - quoteChar - string of one or more characters defining the quote delimiting string
-        - escChar - character to escape quotes, typically backslash (default=C{None})
-        - escQuote - special quote sequence to escape an embedded quote string (such as SQL's "" to escape an embedded ") (default=C{None})
-        - multiline - boolean indicating whether quotes can span multiple lines (default=C{False})
-        - unquoteResults - boolean indicating whether the matched text should be unquoted (default=C{True})
-        - endQuoteChar - string of one or more characters defining the end of the quote delimited string (default=C{None} => same as quoteChar)
-        - convertWhitespaceEscapes - convert escaped whitespace (C{'\t'}, C{'\n'}, etc.) to actual whitespace (default=C{True})
-
-    Example::
-        qs = QuotedString('"')
-        print(qs.searchString('lsjdf "This is the quote" sldjf'))
-        complex_qs = QuotedString('{{', endQuoteChar='}}')
-        print(complex_qs.searchString('lsjdf {{This is the "quote"}} sldjf'))
-        sql_qs = QuotedString('"', escQuote='""')
-        print(sql_qs.searchString('lsjdf "This is the quote with ""embedded"" quotes" sldjf'))
-    prints::
-        [['This is the quote']]
-        [['This is the "quote"']]
-        [['This is the quote with "embedded" quotes']]
-    """
-    def __init__( self, quoteChar, escChar=None, escQuote=None, multiline=False, unquoteResults=True, endQuoteChar=None, convertWhitespaceEscapes=True):
-        super(QuotedString,self).__init__()
-
-        # remove white space from quote chars - wont work anyway
-        quoteChar = quoteChar.strip()
-        if not quoteChar:
-            warnings.warn("quoteChar cannot be the empty string",SyntaxWarning,stacklevel=2)
-            raise SyntaxError()
-
-        if endQuoteChar is None:
-            endQuoteChar = quoteChar
-        else:
-            endQuoteChar = endQuoteChar.strip()
-            if not endQuoteChar:
-                warnings.warn("endQuoteChar cannot be the empty string",SyntaxWarning,stacklevel=2)
-                raise SyntaxError()
-
-        self.quoteChar = quoteChar
-        self.quoteCharLen = len(quoteChar)
-        self.firstQuoteChar = quoteChar[0]
-        self.endQuoteChar = endQuoteChar
-        self.endQuoteCharLen = len(endQuoteChar)
-        self.escChar = escChar
-        self.escQuote = escQuote
-        self.unquoteResults = unquoteResults
-        self.convertWhitespaceEscapes = convertWhitespaceEscapes
-
-        if multiline:
-            self.flags = re.MULTILINE | re.DOTALL
-            self.pattern = r'%s(?:[^%s%s]' % \
-                ( re.escape(self.quoteChar),
-                  _escapeRegexRangeChars(self.endQuoteChar[0]),
-                  (escChar is not None and _escapeRegexRangeChars(escChar) or '') )
-        else:
-            self.flags = 0
-            self.pattern = r'%s(?:[^%s\n\r%s]' % \
-                ( re.escape(self.quoteChar),
-                  _escapeRegexRangeChars(self.endQuoteChar[0]),
-                  (escChar is not None and _escapeRegexRangeChars(escChar) or '') )
-        if len(self.endQuoteChar) > 1:
-            self.pattern += (
-                '|(?:' + ')|(?:'.join("%s[^%s]" % (re.escape(self.endQuoteChar[:i]),
-                                               _escapeRegexRangeChars(self.endQuoteChar[i]))
-                                    for i in range(len(self.endQuoteChar)-1,0,-1)) + ')'
-                )
-        if escQuote:
-            self.pattern += (r'|(?:%s)' % re.escape(escQuote))
-        if escChar:
-            self.pattern += (r'|(?:%s.)' % re.escape(escChar))
-            self.escCharReplacePattern = re.escape(self.escChar)+"(.)"
-        self.pattern += (r')*%s' % re.escape(self.endQuoteChar))
-
-        try:
-            self.re = re.compile(self.pattern, self.flags)
-            self.reString = self.pattern
-        except sre_constants.error:
-            warnings.warn("invalid pattern (%s) passed to Regex" % self.pattern,
-                SyntaxWarning, stacklevel=2)
-            raise
-
-        self.name = _ustr(self)
-        self.errmsg = "Expected " + self.name
-        self.mayIndexError = False
-        self.mayReturnEmpty = True
-
-    def parseImpl( self, instring, loc, doActions=True ):
-        result = instring[loc] == self.firstQuoteChar and self.re.match(instring,loc) or None
-        if not result:
-            raise ParseException(instring, loc, self.errmsg, self)
-
-        loc = result.end()
-        ret = result.group()
-
-        if self.unquoteResults:
-
-            # strip off quotes
-            ret = ret[self.quoteCharLen:-self.endQuoteCharLen]
-
-            if isinstance(ret,basestring):
-                # replace escaped whitespace
-                if '\\' in ret and self.convertWhitespaceEscapes:
-                    ws_map = {
-                        r'\t' : '\t',
-                        r'\n' : '\n',
-                        r'\f' : '\f',
-                        r'\r' : '\r',
-                    }
-                    for wslit,wschar in ws_map.items():
-                        ret = ret.replace(wslit, wschar)
-
-                # replace escaped characters
-                if self.escChar:
-                    ret = re.sub(self.escCharReplacePattern, r"\g<1>", ret)
-
-                # replace escaped quotes
-                if self.escQuote:
-                    ret = ret.replace(self.escQuote, self.endQuoteChar)
-
-        return loc, ret
-
-    def __str__( self ):
-        try:
-            return super(QuotedString,self).__str__()
-        except Exception:
-            pass
-
-        if self.strRepr is None:
-            self.strRepr = "quoted string, starting with %s ending with %s" % (self.quoteChar, self.endQuoteChar)
-
-        return self.strRepr
-
-
-class CharsNotIn(Token):
-    """
-    Token for matching words composed of characters I{not} in a given set (will
-    include whitespace in matched characters if not listed in the provided exclusion set - see example).
-    Defined with string containing all disallowed characters, and an optional
-    minimum, maximum, and/or exact length.  The default value for C{min} is 1 (a
-    minimum value < 1 is not valid); the default values for C{max} and C{exact}
-    are 0, meaning no maximum or exact length restriction.
-
-    Example::
-        # define a comma-separated-value as anything that is not a ','
-        csv_value = CharsNotIn(',')
-        print(delimitedList(csv_value).parseString("dkls,lsdkjf,s12 34,@!#,213"))
-    prints::
-        ['dkls', 'lsdkjf', 's12 34', '@!#', '213']
-    """
-    def __init__( self, notChars, min=1, max=0, exact=0 ):
-        super(CharsNotIn,self).__init__()
-        self.skipWhitespace = False
-        self.notChars = notChars
-
-        if min < 1:
-            raise ValueError("cannot specify a minimum length < 1; use Optional(CharsNotIn()) if zero-length char group is permitted")
-
-        self.minLen = min
-
-        if max > 0:
-            self.maxLen = max
-        else:
-            self.maxLen = _MAX_INT
-
-        if exact > 0:
-            self.maxLen = exact
-            self.minLen = exact
-
-        self.name = _ustr(self)
-        self.errmsg = "Expected " + self.name
-        self.mayReturnEmpty = ( self.minLen == 0 )
-        self.mayIndexError = False
-
-    def parseImpl( self, instring, loc, doActions=True ):
-        if instring[loc] in self.notChars:
-            raise ParseException(instring, loc, self.errmsg, self)
-
-        start = loc
-        loc += 1
-        notchars = self.notChars
-        maxlen = min( start+self.maxLen, len(instring) )
-        while loc < maxlen and \
-              (instring[loc] not in notchars):
-            loc += 1
-
-        if loc - start < self.minLen:
-            raise ParseException(instring, loc, self.errmsg, self)
-
-        return loc, instring[start:loc]
-
-    def __str__( self ):
-        try:
-            return super(CharsNotIn, self).__str__()
-        except Exception:
-            pass
-
-        if self.strRepr is None:
-            if len(self.notChars) > 4:
-                self.strRepr = "!W:(%s...)" % self.notChars[:4]
-            else:
-                self.strRepr = "!W:(%s)" % self.notChars
-
-        return self.strRepr
-
-class White(Token):
-    """
-    Special matching class for matching whitespace.  Normally, whitespace is ignored
-    by pyparsing grammars.  This class is included when some whitespace structures
-    are significant.  Define with a string containing the whitespace characters to be
-    matched; default is C{" \\t\\r\\n"}.  Also takes optional C{min}, C{max}, and C{exact} arguments,
-    as defined for the C{L{Word}} class.
-    """
-    whiteStrs = {
-        " " : "",
-        "\t": "",
-        "\n": "",
-        "\r": "",
-        "\f": "",
-        }
-    def __init__(self, ws=" \t\r\n", min=1, max=0, exact=0):
-        super(White,self).__init__()
-        self.matchWhite = ws
-        self.setWhitespaceChars( "".join(c for c in self.whiteChars if c not in self.matchWhite) )
-        #~ self.leaveWhitespace()
-        self.name = ("".join(White.whiteStrs[c] for c in self.matchWhite))
-        self.mayReturnEmpty = True
-        self.errmsg = "Expected " + self.name
-
-        self.minLen = min
-
-        if max > 0:
-            self.maxLen = max
-        else:
-            self.maxLen = _MAX_INT
-
-        if exact > 0:
-            self.maxLen = exact
-            self.minLen = exact
-
-    def parseImpl( self, instring, loc, doActions=True ):
-        if not(instring[ loc ] in self.matchWhite):
-            raise ParseException(instring, loc, self.errmsg, self)
-        start = loc
-        loc += 1
-        maxloc = start + self.maxLen
-        maxloc = min( maxloc, len(instring) )
-        while loc < maxloc and instring[loc] in self.matchWhite:
-            loc += 1
-
-        if loc - start < self.minLen:
-            raise ParseException(instring, loc, self.errmsg, self)
-
-        return loc, instring[start:loc]
-
-
-class _PositionToken(Token):
-    def __init__( self ):
-        super(_PositionToken,self).__init__()
-        self.name=self.__class__.__name__
-        self.mayReturnEmpty = True
-        self.mayIndexError = False
-
-class GoToColumn(_PositionToken):
-    """
-    Token to advance to a specific column of input text; useful for tabular report scraping.
-    """
-    def __init__( self, colno ):
-        super(GoToColumn,self).__init__()
-        self.col = colno
-
-    def preParse( self, instring, loc ):
-        if col(loc,instring) != self.col:
-            instrlen = len(instring)
-            if self.ignoreExprs:
-                loc = self._skipIgnorables( instring, loc )
-            while loc < instrlen and instring[loc].isspace() and col( loc, instring ) != self.col :
-                loc += 1
-        return loc
-
-    def parseImpl( self, instring, loc, doActions=True ):
-        thiscol = col( loc, instring )
-        if thiscol > self.col:
-            raise ParseException( instring, loc, "Text not in expected column", self )
-        newloc = loc + self.col - thiscol
-        ret = instring[ loc: newloc ]
-        return newloc, ret
-
-
-class LineStart(_PositionToken):
-    """
-    Matches if current position is at the beginning of a line within the parse string
-    
-    Example::
-    
-        test = '''\
-        AAA this line
-        AAA and this line
-          AAA but not this one
-        B AAA and definitely not this one
-        '''
-
-        for t in (LineStart() + 'AAA' + restOfLine).searchString(test):
-            print(t)
-    
-    Prints::
-        ['AAA', ' this line']
-        ['AAA', ' and this line']    
-
-    """
-    def __init__( self ):
-        super(LineStart,self).__init__()
-        self.errmsg = "Expected start of line"
-
-    def parseImpl( self, instring, loc, doActions=True ):
-        if col(loc, instring) == 1:
-            return loc, []
-        raise ParseException(instring, loc, self.errmsg, self)
-
-class LineEnd(_PositionToken):
-    """
-    Matches if current position is at the end of a line within the parse string
-    """
-    def __init__( self ):
-        super(LineEnd,self).__init__()
-        self.setWhitespaceChars( ParserElement.DEFAULT_WHITE_CHARS.replace("\n","") )
-        self.errmsg = "Expected end of line"
-
-    def parseImpl( self, instring, loc, doActions=True ):
-        if loc len(instring):
-            return loc, []
-        else:
-            raise ParseException(instring, loc, self.errmsg, self)
-
-class WordStart(_PositionToken):
-    """
-    Matches if the current position is at the beginning of a Word, and
-    is not preceded by any character in a given set of C{wordChars}
-    (default=C{printables}). To emulate the C{\b} behavior of regular expressions,
-    use C{WordStart(alphanums)}. C{WordStart} will also match at the beginning of
-    the string being parsed, or at the beginning of a line.
-    """
-    def __init__(self, wordChars = printables):
-        super(WordStart,self).__init__()
-        self.wordChars = set(wordChars)
-        self.errmsg = "Not at the start of a word"
-
-    def parseImpl(self, instring, loc, doActions=True ):
-        if loc != 0:
-            if (instring[loc-1] in self.wordChars or
-                instring[loc] not in self.wordChars):
-                raise ParseException(instring, loc, self.errmsg, self)
-        return loc, []
-
-class WordEnd(_PositionToken):
-    """
-    Matches if the current position is at the end of a Word, and
-    is not followed by any character in a given set of C{wordChars}
-    (default=C{printables}). To emulate the C{\b} behavior of regular expressions,
-    use C{WordEnd(alphanums)}. C{WordEnd} will also match at the end of
-    the string being parsed, or at the end of a line.
-    """
-    def __init__(self, wordChars = printables):
-        super(WordEnd,self).__init__()
-        self.wordChars = set(wordChars)
-        self.skipWhitespace = False
-        self.errmsg = "Not at the end of a word"
-
-    def parseImpl(self, instring, loc, doActions=True ):
-        instrlen = len(instring)
-        if instrlen>0 and loc maxExcLoc:
-                    maxException = err
-                    maxExcLoc = err.loc
-            except IndexError:
-                if len(instring) > maxExcLoc:
-                    maxException = ParseException(instring,len(instring),e.errmsg,self)
-                    maxExcLoc = len(instring)
-            else:
-                # save match among all matches, to retry longest to shortest
-                matches.append((loc2, e))
-
-        if matches:
-            matches.sort(key=lambda x: -x[0])
-            for _,e in matches:
-                try:
-                    return e._parse( instring, loc, doActions )
-                except ParseException as err:
-                    err.__traceback__ = None
-                    if err.loc > maxExcLoc:
-                        maxException = err
-                        maxExcLoc = err.loc
-
-        if maxException is not None:
-            maxException.msg = self.errmsg
-            raise maxException
-        else:
-            raise ParseException(instring, loc, "no defined alternatives to match", self)
-
-
-    def __ixor__(self, other ):
-        if isinstance( other, basestring ):
-            other = ParserElement._literalStringClass( other )
-        return self.append( other ) #Or( [ self, other ] )
-
-    def __str__( self ):
-        if hasattr(self,"name"):
-            return self.name
-
-        if self.strRepr is None:
-            self.strRepr = "{" + " ^ ".join(_ustr(e) for e in self.exprs) + "}"
-
-        return self.strRepr
-
-    def checkRecursion( self, parseElementList ):
-        subRecCheckList = parseElementList[:] + [ self ]
-        for e in self.exprs:
-            e.checkRecursion( subRecCheckList )
-
-
-class MatchFirst(ParseExpression):
-    """
-    Requires that at least one C{ParseExpression} is found.
-    If two expressions match, the first one listed is the one that will match.
-    May be constructed using the C{'|'} operator.
-
-    Example::
-        # construct MatchFirst using '|' operator
-        
-        # watch the order of expressions to match
-        number = Word(nums) | Combine(Word(nums) + '.' + Word(nums))
-        print(number.searchString("123 3.1416 789")) #  Fail! -> [['123'], ['3'], ['1416'], ['789']]
-
-        # put more selective expression first
-        number = Combine(Word(nums) + '.' + Word(nums)) | Word(nums)
-        print(number.searchString("123 3.1416 789")) #  Better -> [['123'], ['3.1416'], ['789']]
-    """
-    def __init__( self, exprs, savelist = False ):
-        super(MatchFirst,self).__init__(exprs, savelist)
-        if self.exprs:
-            self.mayReturnEmpty = any(e.mayReturnEmpty for e in self.exprs)
-        else:
-            self.mayReturnEmpty = True
-
-    def parseImpl( self, instring, loc, doActions=True ):
-        maxExcLoc = -1
-        maxException = None
-        for e in self.exprs:
-            try:
-                ret = e._parse( instring, loc, doActions )
-                return ret
-            except ParseException as err:
-                if err.loc > maxExcLoc:
-                    maxException = err
-                    maxExcLoc = err.loc
-            except IndexError:
-                if len(instring) > maxExcLoc:
-                    maxException = ParseException(instring,len(instring),e.errmsg,self)
-                    maxExcLoc = len(instring)
-
-        # only got here if no expression matched, raise exception for match that made it the furthest
-        else:
-            if maxException is not None:
-                maxException.msg = self.errmsg
-                raise maxException
-            else:
-                raise ParseException(instring, loc, "no defined alternatives to match", self)
-
-    def __ior__(self, other ):
-        if isinstance( other, basestring ):
-            other = ParserElement._literalStringClass( other )
-        return self.append( other ) #MatchFirst( [ self, other ] )
-
-    def __str__( self ):
-        if hasattr(self,"name"):
-            return self.name
-
-        if self.strRepr is None:
-            self.strRepr = "{" + " | ".join(_ustr(e) for e in self.exprs) + "}"
-
-        return self.strRepr
-
-    def checkRecursion( self, parseElementList ):
-        subRecCheckList = parseElementList[:] + [ self ]
-        for e in self.exprs:
-            e.checkRecursion( subRecCheckList )
-
-
-class Each(ParseExpression):
-    """
-    Requires all given C{ParseExpression}s to be found, but in any order.
-    Expressions may be separated by whitespace.
-    May be constructed using the C{'&'} operator.
-
-    Example::
-        color = oneOf("RED ORANGE YELLOW GREEN BLUE PURPLE BLACK WHITE BROWN")
-        shape_type = oneOf("SQUARE CIRCLE TRIANGLE STAR HEXAGON OCTAGON")
-        integer = Word(nums)
-        shape_attr = "shape:" + shape_type("shape")
-        posn_attr = "posn:" + Group(integer("x") + ',' + integer("y"))("posn")
-        color_attr = "color:" + color("color")
-        size_attr = "size:" + integer("size")
-
-        # use Each (using operator '&') to accept attributes in any order 
-        # (shape and posn are required, color and size are optional)
-        shape_spec = shape_attr & posn_attr & Optional(color_attr) & Optional(size_attr)
-
-        shape_spec.runTests('''
-            shape: SQUARE color: BLACK posn: 100, 120
-            shape: CIRCLE size: 50 color: BLUE posn: 50,80
-            color:GREEN size:20 shape:TRIANGLE posn:20,40
-            '''
-            )
-    prints::
-        shape: SQUARE color: BLACK posn: 100, 120
-        ['shape:', 'SQUARE', 'color:', 'BLACK', 'posn:', ['100', ',', '120']]
-        - color: BLACK
-        - posn: ['100', ',', '120']
-          - x: 100
-          - y: 120
-        - shape: SQUARE
-
-
-        shape: CIRCLE size: 50 color: BLUE posn: 50,80
-        ['shape:', 'CIRCLE', 'size:', '50', 'color:', 'BLUE', 'posn:', ['50', ',', '80']]
-        - color: BLUE
-        - posn: ['50', ',', '80']
-          - x: 50
-          - y: 80
-        - shape: CIRCLE
-        - size: 50
-
-
-        color: GREEN size: 20 shape: TRIANGLE posn: 20,40
-        ['color:', 'GREEN', 'size:', '20', 'shape:', 'TRIANGLE', 'posn:', ['20', ',', '40']]
-        - color: GREEN
-        - posn: ['20', ',', '40']
-          - x: 20
-          - y: 40
-        - shape: TRIANGLE
-        - size: 20
-    """
-    def __init__( self, exprs, savelist = True ):
-        super(Each,self).__init__(exprs, savelist)
-        self.mayReturnEmpty = all(e.mayReturnEmpty for e in self.exprs)
-        self.skipWhitespace = True
-        self.initExprGroups = True
-
-    def parseImpl( self, instring, loc, doActions=True ):
-        if self.initExprGroups:
-            self.opt1map = dict((id(e.expr),e) for e in self.exprs if isinstance(e,Optional))
-            opt1 = [ e.expr for e in self.exprs if isinstance(e,Optional) ]
-            opt2 = [ e for e in self.exprs if e.mayReturnEmpty and not isinstance(e,Optional)]
-            self.optionals = opt1 + opt2
-            self.multioptionals = [ e.expr for e in self.exprs if isinstance(e,ZeroOrMore) ]
-            self.multirequired = [ e.expr for e in self.exprs if isinstance(e,OneOrMore) ]
-            self.required = [ e for e in self.exprs if not isinstance(e,(Optional,ZeroOrMore,OneOrMore)) ]
-            self.required += self.multirequired
-            self.initExprGroups = False
-        tmpLoc = loc
-        tmpReqd = self.required[:]
-        tmpOpt  = self.optionals[:]
-        matchOrder = []
-
-        keepMatching = True
-        while keepMatching:
-            tmpExprs = tmpReqd + tmpOpt + self.multioptionals + self.multirequired
-            failed = []
-            for e in tmpExprs:
-                try:
-                    tmpLoc = e.tryParse( instring, tmpLoc )
-                except ParseException:
-                    failed.append(e)
-                else:
-                    matchOrder.append(self.opt1map.get(id(e),e))
-                    if e in tmpReqd:
-                        tmpReqd.remove(e)
-                    elif e in tmpOpt:
-                        tmpOpt.remove(e)
-            if len(failed) == len(tmpExprs):
-                keepMatching = False
-
-        if tmpReqd:
-            missing = ", ".join(_ustr(e) for e in tmpReqd)
-            raise ParseException(instring,loc,"Missing one or more required elements (%s)" % missing )
-
-        # add any unmatched Optionals, in case they have default values defined
-        matchOrder += [e for e in self.exprs if isinstance(e,Optional) and e.expr in tmpOpt]
-
-        resultlist = []
-        for e in matchOrder:
-            loc,results = e._parse(instring,loc,doActions)
-            resultlist.append(results)
-
-        finalResults = sum(resultlist, ParseResults([]))
-        return loc, finalResults
-
-    def __str__( self ):
-        if hasattr(self,"name"):
-            return self.name
-
-        if self.strRepr is None:
-            self.strRepr = "{" + " & ".join(_ustr(e) for e in self.exprs) + "}"
-
-        return self.strRepr
-
-    def checkRecursion( self, parseElementList ):
-        subRecCheckList = parseElementList[:] + [ self ]
-        for e in self.exprs:
-            e.checkRecursion( subRecCheckList )
-
-
-class ParseElementEnhance(ParserElement):
-    """
-    Abstract subclass of C{ParserElement}, for combining and post-processing parsed tokens.
-    """
-    def __init__( self, expr, savelist=False ):
-        super(ParseElementEnhance,self).__init__(savelist)
-        if isinstance( expr, basestring ):
-            if issubclass(ParserElement._literalStringClass, Token):
-                expr = ParserElement._literalStringClass(expr)
-            else:
-                expr = ParserElement._literalStringClass(Literal(expr))
-        self.expr = expr
-        self.strRepr = None
-        if expr is not None:
-            self.mayIndexError = expr.mayIndexError
-            self.mayReturnEmpty = expr.mayReturnEmpty
-            self.setWhitespaceChars( expr.whiteChars )
-            self.skipWhitespace = expr.skipWhitespace
-            self.saveAsList = expr.saveAsList
-            self.callPreparse = expr.callPreparse
-            self.ignoreExprs.extend(expr.ignoreExprs)
-
-    def parseImpl( self, instring, loc, doActions=True ):
-        if self.expr is not None:
-            return self.expr._parse( instring, loc, doActions, callPreParse=False )
-        else:
-            raise ParseException("",loc,self.errmsg,self)
-
-    def leaveWhitespace( self ):
-        self.skipWhitespace = False
-        self.expr = self.expr.copy()
-        if self.expr is not None:
-            self.expr.leaveWhitespace()
-        return self
-
-    def ignore( self, other ):
-        if isinstance( other, Suppress ):
-            if other not in self.ignoreExprs:
-                super( ParseElementEnhance, self).ignore( other )
-                if self.expr is not None:
-                    self.expr.ignore( self.ignoreExprs[-1] )
-        else:
-            super( ParseElementEnhance, self).ignore( other )
-            if self.expr is not None:
-                self.expr.ignore( self.ignoreExprs[-1] )
-        return self
-
-    def streamline( self ):
-        super(ParseElementEnhance,self).streamline()
-        if self.expr is not None:
-            self.expr.streamline()
-        return self
-
-    def checkRecursion( self, parseElementList ):
-        if self in parseElementList:
-            raise RecursiveGrammarException( parseElementList+[self] )
-        subRecCheckList = parseElementList[:] + [ self ]
-        if self.expr is not None:
-            self.expr.checkRecursion( subRecCheckList )
-
-    def validate( self, validateTrace=[] ):
-        tmp = validateTrace[:]+[self]
-        if self.expr is not None:
-            self.expr.validate(tmp)
-        self.checkRecursion( [] )
-
-    def __str__( self ):
-        try:
-            return super(ParseElementEnhance,self).__str__()
-        except Exception:
-            pass
-
-        if self.strRepr is None and self.expr is not None:
-            self.strRepr = "%s:(%s)" % ( self.__class__.__name__, _ustr(self.expr) )
-        return self.strRepr
-
-
-class FollowedBy(ParseElementEnhance):
-    """
-    Lookahead matching of the given parse expression.  C{FollowedBy}
-    does I{not} advance the parsing position within the input string, it only
-    verifies that the specified parse expression matches at the current
-    position.  C{FollowedBy} always returns a null token list.
-
-    Example::
-        # use FollowedBy to match a label only if it is followed by a ':'
-        data_word = Word(alphas)
-        label = data_word + FollowedBy(':')
-        attr_expr = Group(label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join))
-        
-        OneOrMore(attr_expr).parseString("shape: SQUARE color: BLACK posn: upper left").pprint()
-    prints::
-        [['shape', 'SQUARE'], ['color', 'BLACK'], ['posn', 'upper left']]
-    """
-    def __init__( self, expr ):
-        super(FollowedBy,self).__init__(expr)
-        self.mayReturnEmpty = True
-
-    def parseImpl( self, instring, loc, doActions=True ):
-        self.expr.tryParse( instring, loc )
-        return loc, []
-
-
-class NotAny(ParseElementEnhance):
-    """
-    Lookahead to disallow matching with the given parse expression.  C{NotAny}
-    does I{not} advance the parsing position within the input string, it only
-    verifies that the specified parse expression does I{not} match at the current
-    position.  Also, C{NotAny} does I{not} skip over leading whitespace. C{NotAny}
-    always returns a null token list.  May be constructed using the '~' operator.
-
-    Example::
-        
-    """
-    def __init__( self, expr ):
-        super(NotAny,self).__init__(expr)
-        #~ self.leaveWhitespace()
-        self.skipWhitespace = False  # do NOT use self.leaveWhitespace(), don't want to propagate to exprs
-        self.mayReturnEmpty = True
-        self.errmsg = "Found unwanted token, "+_ustr(self.expr)
-
-    def parseImpl( self, instring, loc, doActions=True ):
-        if self.expr.canParseNext(instring, loc):
-            raise ParseException(instring, loc, self.errmsg, self)
-        return loc, []
-
-    def __str__( self ):
-        if hasattr(self,"name"):
-            return self.name
-
-        if self.strRepr is None:
-            self.strRepr = "~{" + _ustr(self.expr) + "}"
-
-        return self.strRepr
-
-class _MultipleMatch(ParseElementEnhance):
-    def __init__( self, expr, stopOn=None):
-        super(_MultipleMatch, self).__init__(expr)
-        self.saveAsList = True
-        ender = stopOn
-        if isinstance(ender, basestring):
-            ender = ParserElement._literalStringClass(ender)
-        self.not_ender = ~ender if ender is not None else None
-
-    def parseImpl( self, instring, loc, doActions=True ):
-        self_expr_parse = self.expr._parse
-        self_skip_ignorables = self._skipIgnorables
-        check_ender = self.not_ender is not None
-        if check_ender:
-            try_not_ender = self.not_ender.tryParse
-        
-        # must be at least one (but first see if we are the stopOn sentinel;
-        # if so, fail)
-        if check_ender:
-            try_not_ender(instring, loc)
-        loc, tokens = self_expr_parse( instring, loc, doActions, callPreParse=False )
-        try:
-            hasIgnoreExprs = (not not self.ignoreExprs)
-            while 1:
-                if check_ender:
-                    try_not_ender(instring, loc)
-                if hasIgnoreExprs:
-                    preloc = self_skip_ignorables( instring, loc )
-                else:
-                    preloc = loc
-                loc, tmptokens = self_expr_parse( instring, preloc, doActions )
-                if tmptokens or tmptokens.haskeys():
-                    tokens += tmptokens
-        except (ParseException,IndexError):
-            pass
-
-        return loc, tokens
-        
-class OneOrMore(_MultipleMatch):
-    """
-    Repetition of one or more of the given expression.
-    
-    Parameters:
-     - expr - expression that must match one or more times
-     - stopOn - (default=C{None}) - expression for a terminating sentinel
-          (only required if the sentinel would ordinarily match the repetition 
-          expression)          
-
-    Example::
-        data_word = Word(alphas)
-        label = data_word + FollowedBy(':')
-        attr_expr = Group(label + Suppress(':') + OneOrMore(data_word).setParseAction(' '.join))
-
-        text = "shape: SQUARE posn: upper left color: BLACK"
-        OneOrMore(attr_expr).parseString(text).pprint()  # Fail! read 'color' as data instead of next label -> [['shape', 'SQUARE color']]
-
-        # use stopOn attribute for OneOrMore to avoid reading label string as part of the data
-        attr_expr = Group(label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join))
-        OneOrMore(attr_expr).parseString(text).pprint() # Better -> [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'BLACK']]
-        
-        # could also be written as
-        (attr_expr * (1,)).parseString(text).pprint()
-    """
-
-    def __str__( self ):
-        if hasattr(self,"name"):
-            return self.name
-
-        if self.strRepr is None:
-            self.strRepr = "{" + _ustr(self.expr) + "}..."
-
-        return self.strRepr
-
-class ZeroOrMore(_MultipleMatch):
-    """
-    Optional repetition of zero or more of the given expression.
-    
-    Parameters:
-     - expr - expression that must match zero or more times
-     - stopOn - (default=C{None}) - expression for a terminating sentinel
-          (only required if the sentinel would ordinarily match the repetition 
-          expression)          
-
-    Example: similar to L{OneOrMore}
-    """
-    def __init__( self, expr, stopOn=None):
-        super(ZeroOrMore,self).__init__(expr, stopOn=stopOn)
-        self.mayReturnEmpty = True
-        
-    def parseImpl( self, instring, loc, doActions=True ):
-        try:
-            return super(ZeroOrMore, self).parseImpl(instring, loc, doActions)
-        except (ParseException,IndexError):
-            return loc, []
-
-    def __str__( self ):
-        if hasattr(self,"name"):
-            return self.name
-
-        if self.strRepr is None:
-            self.strRepr = "[" + _ustr(self.expr) + "]..."
-
-        return self.strRepr
-
-class _NullToken(object):
-    def __bool__(self):
-        return False
-    __nonzero__ = __bool__
-    def __str__(self):
-        return ""
-
-_optionalNotMatched = _NullToken()
-class Optional(ParseElementEnhance):
-    """
-    Optional matching of the given expression.
-
-    Parameters:
-     - expr - expression that must match zero or more times
-     - default (optional) - value to be returned if the optional expression is not found.
-
-    Example::
-        # US postal code can be a 5-digit zip, plus optional 4-digit qualifier
-        zip = Combine(Word(nums, exact=5) + Optional('-' + Word(nums, exact=4)))
-        zip.runTests('''
-            # traditional ZIP code
-            12345
-            
-            # ZIP+4 form
-            12101-0001
-            
-            # invalid ZIP
-            98765-
-            ''')
-    prints::
-        # traditional ZIP code
-        12345
-        ['12345']
-
-        # ZIP+4 form
-        12101-0001
-        ['12101-0001']
-
-        # invalid ZIP
-        98765-
-             ^
-        FAIL: Expected end of text (at char 5), (line:1, col:6)
-    """
-    def __init__( self, expr, default=_optionalNotMatched ):
-        super(Optional,self).__init__( expr, savelist=False )
-        self.saveAsList = self.expr.saveAsList
-        self.defaultValue = default
-        self.mayReturnEmpty = True
-
-    def parseImpl( self, instring, loc, doActions=True ):
-        try:
-            loc, tokens = self.expr._parse( instring, loc, doActions, callPreParse=False )
-        except (ParseException,IndexError):
-            if self.defaultValue is not _optionalNotMatched:
-                if self.expr.resultsName:
-                    tokens = ParseResults([ self.defaultValue ])
-                    tokens[self.expr.resultsName] = self.defaultValue
-                else:
-                    tokens = [ self.defaultValue ]
-            else:
-                tokens = []
-        return loc, tokens
-
-    def __str__( self ):
-        if hasattr(self,"name"):
-            return self.name
-
-        if self.strRepr is None:
-            self.strRepr = "[" + _ustr(self.expr) + "]"
-
-        return self.strRepr
-
-class SkipTo(ParseElementEnhance):
-    """
-    Token for skipping over all undefined text until the matched expression is found.
-
-    Parameters:
-     - expr - target expression marking the end of the data to be skipped
-     - include - (default=C{False}) if True, the target expression is also parsed 
-          (the skipped text and target expression are returned as a 2-element list).
-     - ignore - (default=C{None}) used to define grammars (typically quoted strings and 
-          comments) that might contain false matches to the target expression
-     - failOn - (default=C{None}) define expressions that are not allowed to be 
-          included in the skipped test; if found before the target expression is found, 
-          the SkipTo is not a match
-
-    Example::
-        report = '''
-            Outstanding Issues Report - 1 Jan 2000
-
-               # | Severity | Description                               |  Days Open
-            -----+----------+-------------------------------------------+-----------
-             101 | Critical | Intermittent system crash                 |          6
-              94 | Cosmetic | Spelling error on Login ('log|n')         |         14
-              79 | Minor    | System slow when running too many reports |         47
-            '''
-        integer = Word(nums)
-        SEP = Suppress('|')
-        # use SkipTo to simply match everything up until the next SEP
-        # - ignore quoted strings, so that a '|' character inside a quoted string does not match
-        # - parse action will call token.strip() for each matched token, i.e., the description body
-        string_data = SkipTo(SEP, ignore=quotedString)
-        string_data.setParseAction(tokenMap(str.strip))
-        ticket_expr = (integer("issue_num") + SEP 
-                      + string_data("sev") + SEP 
-                      + string_data("desc") + SEP 
-                      + integer("days_open"))
-        
-        for tkt in ticket_expr.searchString(report):
-            print tkt.dump()
-    prints::
-        ['101', 'Critical', 'Intermittent system crash', '6']
-        - days_open: 6
-        - desc: Intermittent system crash
-        - issue_num: 101
-        - sev: Critical
-        ['94', 'Cosmetic', "Spelling error on Login ('log|n')", '14']
-        - days_open: 14
-        - desc: Spelling error on Login ('log|n')
-        - issue_num: 94
-        - sev: Cosmetic
-        ['79', 'Minor', 'System slow when running too many reports', '47']
-        - days_open: 47
-        - desc: System slow when running too many reports
-        - issue_num: 79
-        - sev: Minor
-    """
-    def __init__( self, other, include=False, ignore=None, failOn=None ):
-        super( SkipTo, self ).__init__( other )
-        self.ignoreExpr = ignore
-        self.mayReturnEmpty = True
-        self.mayIndexError = False
-        self.includeMatch = include
-        self.asList = False
-        if isinstance(failOn, basestring):
-            self.failOn = ParserElement._literalStringClass(failOn)
-        else:
-            self.failOn = failOn
-        self.errmsg = "No match found for "+_ustr(self.expr)
-
-    def parseImpl( self, instring, loc, doActions=True ):
-        startloc = loc
-        instrlen = len(instring)
-        expr = self.expr
-        expr_parse = self.expr._parse
-        self_failOn_canParseNext = self.failOn.canParseNext if self.failOn is not None else None
-        self_ignoreExpr_tryParse = self.ignoreExpr.tryParse if self.ignoreExpr is not None else None
-        
-        tmploc = loc
-        while tmploc <= instrlen:
-            if self_failOn_canParseNext is not None:
-                # break if failOn expression matches
-                if self_failOn_canParseNext(instring, tmploc):
-                    break
-                    
-            if self_ignoreExpr_tryParse is not None:
-                # advance past ignore expressions
-                while 1:
-                    try:
-                        tmploc = self_ignoreExpr_tryParse(instring, tmploc)
-                    except ParseBaseException:
-                        break
-            
-            try:
-                expr_parse(instring, tmploc, doActions=False, callPreParse=False)
-            except (ParseException, IndexError):
-                # no match, advance loc in string
-                tmploc += 1
-            else:
-                # matched skipto expr, done
-                break
-
-        else:
-            # ran off the end of the input string without matching skipto expr, fail
-            raise ParseException(instring, loc, self.errmsg, self)
-
-        # build up return values
-        loc = tmploc
-        skiptext = instring[startloc:loc]
-        skipresult = ParseResults(skiptext)
-        
-        if self.includeMatch:
-            loc, mat = expr_parse(instring,loc,doActions,callPreParse=False)
-            skipresult += mat
-
-        return loc, skipresult
-
-class Forward(ParseElementEnhance):
-    """
-    Forward declaration of an expression to be defined later -
-    used for recursive grammars, such as algebraic infix notation.
-    When the expression is known, it is assigned to the C{Forward} variable using the '<<' operator.
-
-    Note: take care when assigning to C{Forward} not to overlook precedence of operators.
-    Specifically, '|' has a lower precedence than '<<', so that::
-        fwdExpr << a | b | c
-    will actually be evaluated as::
-        (fwdExpr << a) | b | c
-    thereby leaving b and c out as parseable alternatives.  It is recommended that you
-    explicitly group the values inserted into the C{Forward}::
-        fwdExpr << (a | b | c)
-    Converting to use the '<<=' operator instead will avoid this problem.
-
-    See L{ParseResults.pprint} for an example of a recursive parser created using
-    C{Forward}.
-    """
-    def __init__( self, other=None ):
-        super(Forward,self).__init__( other, savelist=False )
-
-    def __lshift__( self, other ):
-        if isinstance( other, basestring ):
-            other = ParserElement._literalStringClass(other)
-        self.expr = other
-        self.strRepr = None
-        self.mayIndexError = self.expr.mayIndexError
-        self.mayReturnEmpty = self.expr.mayReturnEmpty
-        self.setWhitespaceChars( self.expr.whiteChars )
-        self.skipWhitespace = self.expr.skipWhitespace
-        self.saveAsList = self.expr.saveAsList
-        self.ignoreExprs.extend(self.expr.ignoreExprs)
-        return self
-        
-    def __ilshift__(self, other):
-        return self << other
-    
-    def leaveWhitespace( self ):
-        self.skipWhitespace = False
-        return self
-
-    def streamline( self ):
-        if not self.streamlined:
-            self.streamlined = True
-            if self.expr is not None:
-                self.expr.streamline()
-        return self
-
-    def validate( self, validateTrace=[] ):
-        if self not in validateTrace:
-            tmp = validateTrace[:]+[self]
-            if self.expr is not None:
-                self.expr.validate(tmp)
-        self.checkRecursion([])
-
-    def __str__( self ):
-        if hasattr(self,"name"):
-            return self.name
-        return self.__class__.__name__ + ": ..."
-
-        # stubbed out for now - creates awful memory and perf issues
-        self._revertClass = self.__class__
-        self.__class__ = _ForwardNoRecurse
-        try:
-            if self.expr is not None:
-                retString = _ustr(self.expr)
-            else:
-                retString = "None"
-        finally:
-            self.__class__ = self._revertClass
-        return self.__class__.__name__ + ": " + retString
-
-    def copy(self):
-        if self.expr is not None:
-            return super(Forward,self).copy()
-        else:
-            ret = Forward()
-            ret <<= self
-            return ret
-
-class _ForwardNoRecurse(Forward):
-    def __str__( self ):
-        return "..."
-
-class TokenConverter(ParseElementEnhance):
-    """
-    Abstract subclass of C{ParseExpression}, for converting parsed results.
-    """
-    def __init__( self, expr, savelist=False ):
-        super(TokenConverter,self).__init__( expr )#, savelist )
-        self.saveAsList = False
-
-class Combine(TokenConverter):
-    """
-    Converter to concatenate all matching tokens to a single string.
-    By default, the matching patterns must also be contiguous in the input string;
-    this can be disabled by specifying C{'adjacent=False'} in the constructor.
-
-    Example::
-        real = Word(nums) + '.' + Word(nums)
-        print(real.parseString('3.1416')) # -> ['3', '.', '1416']
-        # will also erroneously match the following
-        print(real.parseString('3. 1416')) # -> ['3', '.', '1416']
-
-        real = Combine(Word(nums) + '.' + Word(nums))
-        print(real.parseString('3.1416')) # -> ['3.1416']
-        # no match when there are internal spaces
-        print(real.parseString('3. 1416')) # -> Exception: Expected W:(0123...)
-    """
-    def __init__( self, expr, joinString="", adjacent=True ):
-        super(Combine,self).__init__( expr )
-        # suppress whitespace-stripping in contained parse expressions, but re-enable it on the Combine itself
-        if adjacent:
-            self.leaveWhitespace()
-        self.adjacent = adjacent
-        self.skipWhitespace = True
-        self.joinString = joinString
-        self.callPreparse = True
-
-    def ignore( self, other ):
-        if self.adjacent:
-            ParserElement.ignore(self, other)
-        else:
-            super( Combine, self).ignore( other )
-        return self
-
-    def postParse( self, instring, loc, tokenlist ):
-        retToks = tokenlist.copy()
-        del retToks[:]
-        retToks += ParseResults([ "".join(tokenlist._asStringList(self.joinString)) ], modal=self.modalResults)
-
-        if self.resultsName and retToks.haskeys():
-            return [ retToks ]
-        else:
-            return retToks
-
-class Group(TokenConverter):
-    """
-    Converter to return the matched tokens as a list - useful for returning tokens of C{L{ZeroOrMore}} and C{L{OneOrMore}} expressions.
-
-    Example::
-        ident = Word(alphas)
-        num = Word(nums)
-        term = ident | num
-        func = ident + Optional(delimitedList(term))
-        print(func.parseString("fn a,b,100"))  # -> ['fn', 'a', 'b', '100']
-
-        func = ident + Group(Optional(delimitedList(term)))
-        print(func.parseString("fn a,b,100"))  # -> ['fn', ['a', 'b', '100']]
-    """
-    def __init__( self, expr ):
-        super(Group,self).__init__( expr )
-        self.saveAsList = True
-
-    def postParse( self, instring, loc, tokenlist ):
-        return [ tokenlist ]
-
-class Dict(TokenConverter):
-    """
-    Converter to return a repetitive expression as a list, but also as a dictionary.
-    Each element can also be referenced using the first token in the expression as its key.
-    Useful for tabular report scraping when the first column can be used as a item key.
-
-    Example::
-        data_word = Word(alphas)
-        label = data_word + FollowedBy(':')
-        attr_expr = Group(label + Suppress(':') + OneOrMore(data_word).setParseAction(' '.join))
-
-        text = "shape: SQUARE posn: upper left color: light blue texture: burlap"
-        attr_expr = (label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join))
-        
-        # print attributes as plain groups
-        print(OneOrMore(attr_expr).parseString(text).dump())
-        
-        # instead of OneOrMore(expr), parse using Dict(OneOrMore(Group(expr))) - Dict will auto-assign names
-        result = Dict(OneOrMore(Group(attr_expr))).parseString(text)
-        print(result.dump())
-        
-        # access named fields as dict entries, or output as dict
-        print(result['shape'])        
-        print(result.asDict())
-    prints::
-        ['shape', 'SQUARE', 'posn', 'upper left', 'color', 'light blue', 'texture', 'burlap']
-
-        [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'light blue'], ['texture', 'burlap']]
-        - color: light blue
-        - posn: upper left
-        - shape: SQUARE
-        - texture: burlap
-        SQUARE
-        {'color': 'light blue', 'posn': 'upper left', 'texture': 'burlap', 'shape': 'SQUARE'}
-    See more examples at L{ParseResults} of accessing fields by results name.
-    """
-    def __init__( self, expr ):
-        super(Dict,self).__init__( expr )
-        self.saveAsList = True
-
-    def postParse( self, instring, loc, tokenlist ):
-        for i,tok in enumerate(tokenlist):
-            if len(tok) == 0:
-                continue
-            ikey = tok[0]
-            if isinstance(ikey,int):
-                ikey = _ustr(tok[0]).strip()
-            if len(tok)==1:
-                tokenlist[ikey] = _ParseResultsWithOffset("",i)
-            elif len(tok)==2 and not isinstance(tok[1],ParseResults):
-                tokenlist[ikey] = _ParseResultsWithOffset(tok[1],i)
-            else:
-                dictvalue = tok.copy() #ParseResults(i)
-                del dictvalue[0]
-                if len(dictvalue)!= 1 or (isinstance(dictvalue,ParseResults) and dictvalue.haskeys()):
-                    tokenlist[ikey] = _ParseResultsWithOffset(dictvalue,i)
-                else:
-                    tokenlist[ikey] = _ParseResultsWithOffset(dictvalue[0],i)
-
-        if self.resultsName:
-            return [ tokenlist ]
-        else:
-            return tokenlist
-
-
-class Suppress(TokenConverter):
-    """
-    Converter for ignoring the results of a parsed expression.
-
-    Example::
-        source = "a, b, c,d"
-        wd = Word(alphas)
-        wd_list1 = wd + ZeroOrMore(',' + wd)
-        print(wd_list1.parseString(source))
-
-        # often, delimiters that are useful during parsing are just in the
-        # way afterward - use Suppress to keep them out of the parsed output
-        wd_list2 = wd + ZeroOrMore(Suppress(',') + wd)
-        print(wd_list2.parseString(source))
-    prints::
-        ['a', ',', 'b', ',', 'c', ',', 'd']
-        ['a', 'b', 'c', 'd']
-    (See also L{delimitedList}.)
-    """
-    def postParse( self, instring, loc, tokenlist ):
-        return []
-
-    def suppress( self ):
-        return self
-
-
-class OnlyOnce(object):
-    """
-    Wrapper for parse actions, to ensure they are only called once.
-    """
-    def __init__(self, methodCall):
-        self.callable = _trim_arity(methodCall)
-        self.called = False
-    def __call__(self,s,l,t):
-        if not self.called:
-            results = self.callable(s,l,t)
-            self.called = True
-            return results
-        raise ParseException(s,l,"")
-    def reset(self):
-        self.called = False
-
-def traceParseAction(f):
-    """
-    Decorator for debugging parse actions. 
-    
-    When the parse action is called, this decorator will print C{">> entering I{method-name}(line:I{current_source_line}, I{parse_location}, I{matched_tokens})".}
-    When the parse action completes, the decorator will print C{"<<"} followed by the returned value, or any exception that the parse action raised.
-
-    Example::
-        wd = Word(alphas)
-
-        @traceParseAction
-        def remove_duplicate_chars(tokens):
-            return ''.join(sorted(set(''.join(tokens))))
-
-        wds = OneOrMore(wd).setParseAction(remove_duplicate_chars)
-        print(wds.parseString("slkdjs sld sldd sdlf sdljf"))
-    prints::
-        >>entering remove_duplicate_chars(line: 'slkdjs sld sldd sdlf sdljf', 0, (['slkdjs', 'sld', 'sldd', 'sdlf', 'sdljf'], {}))
-        <3:
-            thisFunc = paArgs[0].__class__.__name__ + '.' + thisFunc
-        sys.stderr.write( ">>entering %s(line: '%s', %d, %r)\n" % (thisFunc,line(l,s),l,t) )
-        try:
-            ret = f(*paArgs)
-        except Exception as exc:
-            sys.stderr.write( "< ['aa', 'bb', 'cc']
-        delimitedList(Word(hexnums), delim=':', combine=True).parseString("AA:BB:CC:DD:EE") # -> ['AA:BB:CC:DD:EE']
-    """
-    dlName = _ustr(expr)+" ["+_ustr(delim)+" "+_ustr(expr)+"]..."
-    if combine:
-        return Combine( expr + ZeroOrMore( delim + expr ) ).setName(dlName)
-    else:
-        return ( expr + ZeroOrMore( Suppress( delim ) + expr ) ).setName(dlName)
-
-def countedArray( expr, intExpr=None ):
-    """
-    Helper to define a counted list of expressions.
-    This helper defines a pattern of the form::
-        integer expr expr expr...
-    where the leading integer tells how many expr expressions follow.
-    The matched tokens returns the array of expr tokens as a list - the leading count token is suppressed.
-    
-    If C{intExpr} is specified, it should be a pyparsing expression that produces an integer value.
-
-    Example::
-        countedArray(Word(alphas)).parseString('2 ab cd ef')  # -> ['ab', 'cd']
-
-        # in this parser, the leading integer value is given in binary,
-        # '10' indicating that 2 values are in the array
-        binaryConstant = Word('01').setParseAction(lambda t: int(t[0], 2))
-        countedArray(Word(alphas), intExpr=binaryConstant).parseString('10 ab cd ef')  # -> ['ab', 'cd']
-    """
-    arrayExpr = Forward()
-    def countFieldParseAction(s,l,t):
-        n = t[0]
-        arrayExpr << (n and Group(And([expr]*n)) or Group(empty))
-        return []
-    if intExpr is None:
-        intExpr = Word(nums).setParseAction(lambda t:int(t[0]))
-    else:
-        intExpr = intExpr.copy()
-    intExpr.setName("arrayLen")
-    intExpr.addParseAction(countFieldParseAction, callDuringTry=True)
-    return ( intExpr + arrayExpr ).setName('(len) ' + _ustr(expr) + '...')
-
-def _flatten(L):
-    ret = []
-    for i in L:
-        if isinstance(i,list):
-            ret.extend(_flatten(i))
-        else:
-            ret.append(i)
-    return ret
-
-def matchPreviousLiteral(expr):
-    """
-    Helper to define an expression that is indirectly defined from
-    the tokens matched in a previous expression, that is, it looks
-    for a 'repeat' of a previous expression.  For example::
-        first = Word(nums)
-        second = matchPreviousLiteral(first)
-        matchExpr = first + ":" + second
-    will match C{"1:1"}, but not C{"1:2"}.  Because this matches a
-    previous literal, will also match the leading C{"1:1"} in C{"1:10"}.
-    If this is not desired, use C{matchPreviousExpr}.
-    Do I{not} use with packrat parsing enabled.
-    """
-    rep = Forward()
-    def copyTokenToRepeater(s,l,t):
-        if t:
-            if len(t) == 1:
-                rep << t[0]
-            else:
-                # flatten t tokens
-                tflat = _flatten(t.asList())
-                rep << And(Literal(tt) for tt in tflat)
-        else:
-            rep << Empty()
-    expr.addParseAction(copyTokenToRepeater, callDuringTry=True)
-    rep.setName('(prev) ' + _ustr(expr))
-    return rep
-
-def matchPreviousExpr(expr):
-    """
-    Helper to define an expression that is indirectly defined from
-    the tokens matched in a previous expression, that is, it looks
-    for a 'repeat' of a previous expression.  For example::
-        first = Word(nums)
-        second = matchPreviousExpr(first)
-        matchExpr = first + ":" + second
-    will match C{"1:1"}, but not C{"1:2"}.  Because this matches by
-    expressions, will I{not} match the leading C{"1:1"} in C{"1:10"};
-    the expressions are evaluated first, and then compared, so
-    C{"1"} is compared with C{"10"}.
-    Do I{not} use with packrat parsing enabled.
-    """
-    rep = Forward()
-    e2 = expr.copy()
-    rep <<= e2
-    def copyTokenToRepeater(s,l,t):
-        matchTokens = _flatten(t.asList())
-        def mustMatchTheseTokens(s,l,t):
-            theseTokens = _flatten(t.asList())
-            if  theseTokens != matchTokens:
-                raise ParseException("",0,"")
-        rep.setParseAction( mustMatchTheseTokens, callDuringTry=True )
-    expr.addParseAction(copyTokenToRepeater, callDuringTry=True)
-    rep.setName('(prev) ' + _ustr(expr))
-    return rep
-
-def _escapeRegexRangeChars(s):
-    #~  escape these chars: ^-]
-    for c in r"\^-]":
-        s = s.replace(c,_bslash+c)
-    s = s.replace("\n",r"\n")
-    s = s.replace("\t",r"\t")
-    return _ustr(s)
-
-def oneOf( strs, caseless=False, useRegex=True ):
-    """
-    Helper to quickly define a set of alternative Literals, and makes sure to do
-    longest-first testing when there is a conflict, regardless of the input order,
-    but returns a C{L{MatchFirst}} for best performance.
-
-    Parameters:
-     - strs - a string of space-delimited literals, or a collection of string literals
-     - caseless - (default=C{False}) - treat all literals as caseless
-     - useRegex - (default=C{True}) - as an optimization, will generate a Regex
-          object; otherwise, will generate a C{MatchFirst} object (if C{caseless=True}, or
-          if creating a C{Regex} raises an exception)
-
-    Example::
-        comp_oper = oneOf("< = > <= >= !=")
-        var = Word(alphas)
-        number = Word(nums)
-        term = var | number
-        comparison_expr = term + comp_oper + term
-        print(comparison_expr.searchString("B = 12  AA=23 B<=AA AA>12"))
-    prints::
-        [['B', '=', '12'], ['AA', '=', '23'], ['B', '<=', 'AA'], ['AA', '>', '12']]
-    """
-    if caseless:
-        isequal = ( lambda a,b: a.upper() == b.upper() )
-        masks = ( lambda a,b: b.upper().startswith(a.upper()) )
-        parseElementClass = CaselessLiteral
-    else:
-        isequal = ( lambda a,b: a == b )
-        masks = ( lambda a,b: b.startswith(a) )
-        parseElementClass = Literal
-
-    symbols = []
-    if isinstance(strs,basestring):
-        symbols = strs.split()
-    elif isinstance(strs, Iterable):
-        symbols = list(strs)
-    else:
-        warnings.warn("Invalid argument to oneOf, expected string or iterable",
-                SyntaxWarning, stacklevel=2)
-    if not symbols:
-        return NoMatch()
-
-    i = 0
-    while i < len(symbols)-1:
-        cur = symbols[i]
-        for j,other in enumerate(symbols[i+1:]):
-            if ( isequal(other, cur) ):
-                del symbols[i+j+1]
-                break
-            elif ( masks(cur, other) ):
-                del symbols[i+j+1]
-                symbols.insert(i,other)
-                cur = other
-                break
-        else:
-            i += 1
-
-    if not caseless and useRegex:
-        #~ print (strs,"->", "|".join( [ _escapeRegexChars(sym) for sym in symbols] ))
-        try:
-            if len(symbols)==len("".join(symbols)):
-                return Regex( "[%s]" % "".join(_escapeRegexRangeChars(sym) for sym in symbols) ).setName(' | '.join(symbols))
-            else:
-                return Regex( "|".join(re.escape(sym) for sym in symbols) ).setName(' | '.join(symbols))
-        except Exception:
-            warnings.warn("Exception creating Regex for oneOf, building MatchFirst",
-                    SyntaxWarning, stacklevel=2)
-
-
-    # last resort, just use MatchFirst
-    return MatchFirst(parseElementClass(sym) for sym in symbols).setName(' | '.join(symbols))
-
-def dictOf( key, value ):
-    """
-    Helper to easily and clearly define a dictionary by specifying the respective patterns
-    for the key and value.  Takes care of defining the C{L{Dict}}, C{L{ZeroOrMore}}, and C{L{Group}} tokens
-    in the proper order.  The key pattern can include delimiting markers or punctuation,
-    as long as they are suppressed, thereby leaving the significant key text.  The value
-    pattern can include named results, so that the C{Dict} results can include named token
-    fields.
-
-    Example::
-        text = "shape: SQUARE posn: upper left color: light blue texture: burlap"
-        attr_expr = (label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join))
-        print(OneOrMore(attr_expr).parseString(text).dump())
-        
-        attr_label = label
-        attr_value = Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join)
-
-        # similar to Dict, but simpler call format
-        result = dictOf(attr_label, attr_value).parseString(text)
-        print(result.dump())
-        print(result['shape'])
-        print(result.shape)  # object attribute access works too
-        print(result.asDict())
-    prints::
-        [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'light blue'], ['texture', 'burlap']]
-        - color: light blue
-        - posn: upper left
-        - shape: SQUARE
-        - texture: burlap
-        SQUARE
-        SQUARE
-        {'color': 'light blue', 'shape': 'SQUARE', 'posn': 'upper left', 'texture': 'burlap'}
-    """
-    return Dict( ZeroOrMore( Group ( key + value ) ) )
-
-def originalTextFor(expr, asString=True):
-    """
-    Helper to return the original, untokenized text for a given expression.  Useful to
-    restore the parsed fields of an HTML start tag into the raw tag text itself, or to
-    revert separate tokens with intervening whitespace back to the original matching
-    input text. By default, returns astring containing the original parsed text.  
-       
-    If the optional C{asString} argument is passed as C{False}, then the return value is a 
-    C{L{ParseResults}} containing any results names that were originally matched, and a 
-    single token containing the original matched text from the input string.  So if 
-    the expression passed to C{L{originalTextFor}} contains expressions with defined
-    results names, you must set C{asString} to C{False} if you want to preserve those
-    results name values.
-
-    Example::
-        src = "this is test  bold text  normal text "
-        for tag in ("b","i"):
-            opener,closer = makeHTMLTags(tag)
-            patt = originalTextFor(opener + SkipTo(closer) + closer)
-            print(patt.searchString(src)[0])
-    prints::
-        [' bold text ']
-        ['text']
-    """
-    locMarker = Empty().setParseAction(lambda s,loc,t: loc)
-    endlocMarker = locMarker.copy()
-    endlocMarker.callPreparse = False
-    matchExpr = locMarker("_original_start") + expr + endlocMarker("_original_end")
-    if asString:
-        extractText = lambda s,l,t: s[t._original_start:t._original_end]
-    else:
-        def extractText(s,l,t):
-            t[:] = [s[t.pop('_original_start'):t.pop('_original_end')]]
-    matchExpr.setParseAction(extractText)
-    matchExpr.ignoreExprs = expr.ignoreExprs
-    return matchExpr
-
-def ungroup(expr): 
-    """
-    Helper to undo pyparsing's default grouping of And expressions, even
-    if all but one are non-empty.
-    """
-    return TokenConverter(expr).setParseAction(lambda t:t[0])
-
-def locatedExpr(expr):
-    """
-    Helper to decorate a returned token with its starting and ending locations in the input string.
-    This helper adds the following results names:
-     - locn_start = location where matched expression begins
-     - locn_end = location where matched expression ends
-     - value = the actual parsed results
-
-    Be careful if the input text contains C{} characters, you may want to call
-    C{L{ParserElement.parseWithTabs}}
-
-    Example::
-        wd = Word(alphas)
-        for match in locatedExpr(wd).searchString("ljsdf123lksdjjf123lkkjj1222"):
-            print(match)
-    prints::
-        [[0, 'ljsdf', 5]]
-        [[8, 'lksdjjf', 15]]
-        [[18, 'lkkjj', 23]]
-    """
-    locator = Empty().setParseAction(lambda s,l,t: l)
-    return Group(locator("locn_start") + expr("value") + locator.copy().leaveWhitespace()("locn_end"))
-
-
-# convenience constants for positional expressions
-empty       = Empty().setName("empty")
-lineStart   = LineStart().setName("lineStart")
-lineEnd     = LineEnd().setName("lineEnd")
-stringStart = StringStart().setName("stringStart")
-stringEnd   = StringEnd().setName("stringEnd")
-
-_escapedPunc = Word( _bslash, r"\[]-*.$+^?()~ ", exact=2 ).setParseAction(lambda s,l,t:t[0][1])
-_escapedHexChar = Regex(r"\\0?[xX][0-9a-fA-F]+").setParseAction(lambda s,l,t:unichr(int(t[0].lstrip(r'\0x'),16)))
-_escapedOctChar = Regex(r"\\0[0-7]+").setParseAction(lambda s,l,t:unichr(int(t[0][1:],8)))
-_singleChar = _escapedPunc | _escapedHexChar | _escapedOctChar | CharsNotIn(r'\]', exact=1)
-_charRange = Group(_singleChar + Suppress("-") + _singleChar)
-_reBracketExpr = Literal("[") + Optional("^").setResultsName("negate") + Group( OneOrMore( _charRange | _singleChar ) ).setResultsName("body") + "]"
-
-def srange(s):
-    r"""
-    Helper to easily define string ranges for use in Word construction.  Borrows
-    syntax from regexp '[]' string range definitions::
-        srange("[0-9]")   -> "0123456789"
-        srange("[a-z]")   -> "abcdefghijklmnopqrstuvwxyz"
-        srange("[a-z$_]") -> "abcdefghijklmnopqrstuvwxyz$_"
-    The input string must be enclosed in []'s, and the returned string is the expanded
-    character set joined into a single string.
-    The values enclosed in the []'s may be:
-     - a single character
-     - an escaped character with a leading backslash (such as C{\-} or C{\]})
-     - an escaped hex character with a leading C{'\x'} (C{\x21}, which is a C{'!'} character) 
-         (C{\0x##} is also supported for backwards compatibility) 
-     - an escaped octal character with a leading C{'\0'} (C{\041}, which is a C{'!'} character)
-     - a range of any of the above, separated by a dash (C{'a-z'}, etc.)
-     - any combination of the above (C{'aeiouy'}, C{'a-zA-Z0-9_$'}, etc.)
-    """
-    _expanded = lambda p: p if not isinstance(p,ParseResults) else ''.join(unichr(c) for c in range(ord(p[0]),ord(p[1])+1))
-    try:
-        return "".join(_expanded(part) for part in _reBracketExpr.parseString(s).body)
-    except Exception:
-        return ""
-
-def matchOnlyAtCol(n):
-    """
-    Helper method for defining parse actions that require matching at a specific
-    column in the input text.
-    """
-    def verifyCol(strg,locn,toks):
-        if col(locn,strg) != n:
-            raise ParseException(strg,locn,"matched token not at column %d" % n)
-    return verifyCol
-
-def replaceWith(replStr):
-    """
-    Helper method for common parse actions that simply return a literal value.  Especially
-    useful when used with C{L{transformString}()}.
-
-    Example::
-        num = Word(nums).setParseAction(lambda toks: int(toks[0]))
-        na = oneOf("N/A NA").setParseAction(replaceWith(math.nan))
-        term = na | num
-        
-        OneOrMore(term).parseString("324 234 N/A 234") # -> [324, 234, nan, 234]
-    """
-    return lambda s,l,t: [replStr]
-
-def removeQuotes(s,l,t):
-    """
-    Helper parse action for removing quotation marks from parsed quoted strings.
-
-    Example::
-        # by default, quotation marks are included in parsed results
-        quotedString.parseString("'Now is the Winter of our Discontent'") # -> ["'Now is the Winter of our Discontent'"]
-
-        # use removeQuotes to strip quotation marks from parsed results
-        quotedString.setParseAction(removeQuotes)
-        quotedString.parseString("'Now is the Winter of our Discontent'") # -> ["Now is the Winter of our Discontent"]
-    """
-    return t[0][1:-1]
-
-def tokenMap(func, *args):
-    """
-    Helper to define a parse action by mapping a function to all elements of a ParseResults list.If any additional 
-    args are passed, they are forwarded to the given function as additional arguments after
-    the token, as in C{hex_integer = Word(hexnums).setParseAction(tokenMap(int, 16))}, which will convert the
-    parsed data to an integer using base 16.
-
-    Example (compare the last to example in L{ParserElement.transformString}::
-        hex_ints = OneOrMore(Word(hexnums)).setParseAction(tokenMap(int, 16))
-        hex_ints.runTests('''
-            00 11 22 aa FF 0a 0d 1a
-            ''')
-        
-        upperword = Word(alphas).setParseAction(tokenMap(str.upper))
-        OneOrMore(upperword).runTests('''
-            my kingdom for a horse
-            ''')
-
-        wd = Word(alphas).setParseAction(tokenMap(str.title))
-        OneOrMore(wd).setParseAction(' '.join).runTests('''
-            now is the winter of our discontent made glorious summer by this sun of york
-            ''')
-    prints::
-        00 11 22 aa FF 0a 0d 1a
-        [0, 17, 34, 170, 255, 10, 13, 26]
-
-        my kingdom for a horse
-        ['MY', 'KINGDOM', 'FOR', 'A', 'HORSE']
-
-        now is the winter of our discontent made glorious summer by this sun of york
-        ['Now Is The Winter Of Our Discontent Made Glorious Summer By This Sun Of York']
-    """
-    def pa(s,l,t):
-        return [func(tokn, *args) for tokn in t]
-
-    try:
-        func_name = getattr(func, '__name__', 
-                            getattr(func, '__class__').__name__)
-    except Exception:
-        func_name = str(func)
-    pa.__name__ = func_name
-
-    return pa
-
-upcaseTokens = tokenMap(lambda t: _ustr(t).upper())
-"""(Deprecated) Helper parse action to convert tokens to upper case. Deprecated in favor of L{pyparsing_common.upcaseTokens}"""
-
-downcaseTokens = tokenMap(lambda t: _ustr(t).lower())
-"""(Deprecated) Helper parse action to convert tokens to lower case. Deprecated in favor of L{pyparsing_common.downcaseTokens}"""
-    
-def _makeTags(tagStr, xml):
-    """Internal helper to construct opening and closing tag expressions, given a tag name"""
-    if isinstance(tagStr,basestring):
-        resname = tagStr
-        tagStr = Keyword(tagStr, caseless=not xml)
-    else:
-        resname = tagStr.name
-
-    tagAttrName = Word(alphas,alphanums+"_-:")
-    if (xml):
-        tagAttrValue = dblQuotedString.copy().setParseAction( removeQuotes )
-        openTag = Suppress("<") + tagStr("tag") + \
-                Dict(ZeroOrMore(Group( tagAttrName + Suppress("=") + tagAttrValue ))) + \
-                Optional("/",default=[False]).setResultsName("empty").setParseAction(lambda s,l,t:t[0]=='/') + Suppress(">")
-    else:
-        printablesLessRAbrack = "".join(c for c in printables if c not in ">")
-        tagAttrValue = quotedString.copy().setParseAction( removeQuotes ) | Word(printablesLessRAbrack)
-        openTag = Suppress("<") + tagStr("tag") + \
-                Dict(ZeroOrMore(Group( tagAttrName.setParseAction(downcaseTokens) + \
-                Optional( Suppress("=") + tagAttrValue ) ))) + \
-                Optional("/",default=[False]).setResultsName("empty").setParseAction(lambda s,l,t:t[0]=='/') + Suppress(">")
-    closeTag = Combine(_L("")
-
-    openTag = openTag.setResultsName("start"+"".join(resname.replace(":"," ").title().split())).setName("<%s>" % resname)
-    closeTag = closeTag.setResultsName("end"+"".join(resname.replace(":"," ").title().split())).setName("" % resname)
-    openTag.tag = resname
-    closeTag.tag = resname
-    return openTag, closeTag
-
-def makeHTMLTags(tagStr):
-    """
-    Helper to construct opening and closing tag expressions for HTML, given a tag name. Matches
-    tags in either upper or lower case, attributes with namespaces and with quoted or unquoted values.
-
-    Example::
-        text = 'More info at the pyparsing wiki page'
-        # makeHTMLTags returns pyparsing expressions for the opening and closing tags as a 2-tuple
-        a,a_end = makeHTMLTags("A")
-        link_expr = a + SkipTo(a_end)("link_text") + a_end
-        
-        for link in link_expr.searchString(text):
-            # attributes in the  tag (like "href" shown here) are also accessible as named results
-            print(link.link_text, '->', link.href)
-    prints::
-        pyparsing -> http://pyparsing.wikispaces.com
-    """
-    return _makeTags( tagStr, False )
-
-def makeXMLTags(tagStr):
-    """
-    Helper to construct opening and closing tag expressions for XML, given a tag name. Matches
-    tags only in the given upper/lower case.
-
-    Example: similar to L{makeHTMLTags}
-    """
-    return _makeTags( tagStr, True )
-
-def withAttribute(*args,**attrDict):
-    """
-    Helper to create a validating parse action to be used with start tags created
-    with C{L{makeXMLTags}} or C{L{makeHTMLTags}}. Use C{withAttribute} to qualify a starting tag
-    with a required attribute value, to avoid false matches on common tags such as
-    C{} or C{
}. - - Call C{withAttribute} with a series of attribute names and values. Specify the list - of filter attributes names and values as: - - keyword arguments, as in C{(align="right")}, or - - as an explicit dict with C{**} operator, when an attribute name is also a Python - reserved word, as in C{**{"class":"Customer", "align":"right"}} - - a list of name-value tuples, as in ( ("ns1:class", "Customer"), ("ns2:align","right") ) - For attribute names with a namespace prefix, you must use the second form. Attribute - names are matched insensitive to upper/lower case. - - If just testing for C{class} (with or without a namespace), use C{L{withClass}}. - - To verify that the attribute exists, but without specifying a value, pass - C{withAttribute.ANY_VALUE} as the value. - - Example:: - html = ''' -
- Some text -
1 4 0 1 0
-
1,3 2,3 1,1
-
this has no type
-
- - ''' - div,div_end = makeHTMLTags("div") - - # only match div tag having a type attribute with value "grid" - div_grid = div().setParseAction(withAttribute(type="grid")) - grid_expr = div_grid + SkipTo(div | div_end)("body") - for grid_header in grid_expr.searchString(html): - print(grid_header.body) - - # construct a match with any div tag having a type attribute, regardless of the value - div_any_type = div().setParseAction(withAttribute(type=withAttribute.ANY_VALUE)) - div_expr = div_any_type + SkipTo(div | div_end)("body") - for div_header in div_expr.searchString(html): - print(div_header.body) - prints:: - 1 4 0 1 0 - - 1 4 0 1 0 - 1,3 2,3 1,1 - """ - if args: - attrs = args[:] - else: - attrs = attrDict.items() - attrs = [(k,v) for k,v in attrs] - def pa(s,l,tokens): - for attrName,attrValue in attrs: - if attrName not in tokens: - raise ParseException(s,l,"no matching attribute " + attrName) - if attrValue != withAttribute.ANY_VALUE and tokens[attrName] != attrValue: - raise ParseException(s,l,"attribute '%s' has value '%s', must be '%s'" % - (attrName, tokens[attrName], attrValue)) - return pa -withAttribute.ANY_VALUE = object() - -def withClass(classname, namespace=''): - """ - Simplified version of C{L{withAttribute}} when matching on a div class - made - difficult because C{class} is a reserved word in Python. - - Example:: - html = ''' -
- Some text -
1 4 0 1 0
-
1,3 2,3 1,1
-
this <div> has no class
-
- - ''' - div,div_end = makeHTMLTags("div") - div_grid = div().setParseAction(withClass("grid")) - - grid_expr = div_grid + SkipTo(div | div_end)("body") - for grid_header in grid_expr.searchString(html): - print(grid_header.body) - - div_any_type = div().setParseAction(withClass(withAttribute.ANY_VALUE)) - div_expr = div_any_type + SkipTo(div | div_end)("body") - for div_header in div_expr.searchString(html): - print(div_header.body) - prints:: - 1 4 0 1 0 - - 1 4 0 1 0 - 1,3 2,3 1,1 - """ - classattr = "%s:class" % namespace if namespace else "class" - return withAttribute(**{classattr : classname}) - -opAssoc = _Constants() -opAssoc.LEFT = object() -opAssoc.RIGHT = object() - -def infixNotation( baseExpr, opList, lpar=Suppress('('), rpar=Suppress(')') ): - """ - Helper method for constructing grammars of expressions made up of - operators working in a precedence hierarchy. Operators may be unary or - binary, left- or right-associative. Parse actions can also be attached - to operator expressions. The generated parser will also recognize the use - of parentheses to override operator precedences (see example below). - - Note: if you define a deep operator list, you may see performance issues - when using infixNotation. See L{ParserElement.enablePackrat} for a - mechanism to potentially improve your parser performance. - - Parameters: - - baseExpr - expression representing the most basic element for the nested - - opList - list of tuples, one for each operator precedence level in the - expression grammar; each tuple is of the form - (opExpr, numTerms, rightLeftAssoc, parseAction), where: - - opExpr is the pyparsing expression for the operator; - may also be a string, which will be converted to a Literal; - if numTerms is 3, opExpr is a tuple of two expressions, for the - two operators separating the 3 terms - - numTerms is the number of terms for this operator (must - be 1, 2, or 3) - - rightLeftAssoc is the indicator whether the operator is - right or left associative, using the pyparsing-defined - constants C{opAssoc.RIGHT} and C{opAssoc.LEFT}. - - parseAction is the parse action to be associated with - expressions matching this operator expression (the - parse action tuple member may be omitted); if the parse action - is passed a tuple or list of functions, this is equivalent to - calling C{setParseAction(*fn)} (L{ParserElement.setParseAction}) - - lpar - expression for matching left-parentheses (default=C{Suppress('(')}) - - rpar - expression for matching right-parentheses (default=C{Suppress(')')}) - - Example:: - # simple example of four-function arithmetic with ints and variable names - integer = pyparsing_common.signed_integer - varname = pyparsing_common.identifier - - arith_expr = infixNotation(integer | varname, - [ - ('-', 1, opAssoc.RIGHT), - (oneOf('* /'), 2, opAssoc.LEFT), - (oneOf('+ -'), 2, opAssoc.LEFT), - ]) - - arith_expr.runTests(''' - 5+3*6 - (5+3)*6 - -2--11 - ''', fullDump=False) - prints:: - 5+3*6 - [[5, '+', [3, '*', 6]]] - - (5+3)*6 - [[[5, '+', 3], '*', 6]] - - -2--11 - [[['-', 2], '-', ['-', 11]]] - """ - ret = Forward() - lastExpr = baseExpr | ( lpar + ret + rpar ) - for i,operDef in enumerate(opList): - opExpr,arity,rightLeftAssoc,pa = (operDef + (None,))[:4] - termName = "%s term" % opExpr if arity < 3 else "%s%s term" % opExpr - if arity == 3: - if opExpr is None or len(opExpr) != 2: - raise ValueError("if numterms=3, opExpr must be a tuple or list of two expressions") - opExpr1, opExpr2 = opExpr - thisExpr = Forward().setName(termName) - if rightLeftAssoc == opAssoc.LEFT: - if arity == 1: - matchExpr = FollowedBy(lastExpr + opExpr) + Group( lastExpr + OneOrMore( opExpr ) ) - elif arity == 2: - if opExpr is not None: - matchExpr = FollowedBy(lastExpr + opExpr + lastExpr) + Group( lastExpr + OneOrMore( opExpr + lastExpr ) ) - else: - matchExpr = FollowedBy(lastExpr+lastExpr) + Group( lastExpr + OneOrMore(lastExpr) ) - elif arity == 3: - matchExpr = FollowedBy(lastExpr + opExpr1 + lastExpr + opExpr2 + lastExpr) + \ - Group( lastExpr + opExpr1 + lastExpr + opExpr2 + lastExpr ) - else: - raise ValueError("operator must be unary (1), binary (2), or ternary (3)") - elif rightLeftAssoc == opAssoc.RIGHT: - if arity == 1: - # try to avoid LR with this extra test - if not isinstance(opExpr, Optional): - opExpr = Optional(opExpr) - matchExpr = FollowedBy(opExpr.expr + thisExpr) + Group( opExpr + thisExpr ) - elif arity == 2: - if opExpr is not None: - matchExpr = FollowedBy(lastExpr + opExpr + thisExpr) + Group( lastExpr + OneOrMore( opExpr + thisExpr ) ) - else: - matchExpr = FollowedBy(lastExpr + thisExpr) + Group( lastExpr + OneOrMore( thisExpr ) ) - elif arity == 3: - matchExpr = FollowedBy(lastExpr + opExpr1 + thisExpr + opExpr2 + thisExpr) + \ - Group( lastExpr + opExpr1 + thisExpr + opExpr2 + thisExpr ) - else: - raise ValueError("operator must be unary (1), binary (2), or ternary (3)") - else: - raise ValueError("operator must indicate right or left associativity") - if pa: - if isinstance(pa, (tuple, list)): - matchExpr.setParseAction(*pa) - else: - matchExpr.setParseAction(pa) - thisExpr <<= ( matchExpr.setName(termName) | lastExpr ) - lastExpr = thisExpr - ret <<= lastExpr - return ret - -operatorPrecedence = infixNotation -"""(Deprecated) Former name of C{L{infixNotation}}, will be dropped in a future release.""" - -dblQuotedString = Combine(Regex(r'"(?:[^"\n\r\\]|(?:"")|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*')+'"').setName("string enclosed in double quotes") -sglQuotedString = Combine(Regex(r"'(?:[^'\n\r\\]|(?:'')|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*")+"'").setName("string enclosed in single quotes") -quotedString = Combine(Regex(r'"(?:[^"\n\r\\]|(?:"")|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*')+'"'| - Regex(r"'(?:[^'\n\r\\]|(?:'')|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*")+"'").setName("quotedString using single or double quotes") -unicodeString = Combine(_L('u') + quotedString.copy()).setName("unicode string literal") - -def nestedExpr(opener="(", closer=")", content=None, ignoreExpr=quotedString.copy()): - """ - Helper method for defining nested lists enclosed in opening and closing - delimiters ("(" and ")" are the default). - - Parameters: - - opener - opening character for a nested list (default=C{"("}); can also be a pyparsing expression - - closer - closing character for a nested list (default=C{")"}); can also be a pyparsing expression - - content - expression for items within the nested lists (default=C{None}) - - ignoreExpr - expression for ignoring opening and closing delimiters (default=C{quotedString}) - - If an expression is not provided for the content argument, the nested - expression will capture all whitespace-delimited content between delimiters - as a list of separate values. - - Use the C{ignoreExpr} argument to define expressions that may contain - opening or closing characters that should not be treated as opening - or closing characters for nesting, such as quotedString or a comment - expression. Specify multiple expressions using an C{L{Or}} or C{L{MatchFirst}}. - The default is L{quotedString}, but if no expressions are to be ignored, - then pass C{None} for this argument. - - Example:: - data_type = oneOf("void int short long char float double") - decl_data_type = Combine(data_type + Optional(Word('*'))) - ident = Word(alphas+'_', alphanums+'_') - number = pyparsing_common.number - arg = Group(decl_data_type + ident) - LPAR,RPAR = map(Suppress, "()") - - code_body = nestedExpr('{', '}', ignoreExpr=(quotedString | cStyleComment)) - - c_function = (decl_data_type("type") - + ident("name") - + LPAR + Optional(delimitedList(arg), [])("args") + RPAR - + code_body("body")) - c_function.ignore(cStyleComment) - - source_code = ''' - int is_odd(int x) { - return (x%2); - } - - int dec_to_hex(char hchar) { - if (hchar >= '0' && hchar <= '9') { - return (ord(hchar)-ord('0')); - } else { - return (10+ord(hchar)-ord('A')); - } - } - ''' - for func in c_function.searchString(source_code): - print("%(name)s (%(type)s) args: %(args)s" % func) - - prints:: - is_odd (int) args: [['int', 'x']] - dec_to_hex (int) args: [['char', 'hchar']] - """ - if opener == closer: - raise ValueError("opening and closing strings cannot be the same") - if content is None: - if isinstance(opener,basestring) and isinstance(closer,basestring): - if len(opener) == 1 and len(closer)==1: - if ignoreExpr is not None: - content = (Combine(OneOrMore(~ignoreExpr + - CharsNotIn(opener+closer+ParserElement.DEFAULT_WHITE_CHARS,exact=1)) - ).setParseAction(lambda t:t[0].strip())) - else: - content = (empty.copy()+CharsNotIn(opener+closer+ParserElement.DEFAULT_WHITE_CHARS - ).setParseAction(lambda t:t[0].strip())) - else: - if ignoreExpr is not None: - content = (Combine(OneOrMore(~ignoreExpr + - ~Literal(opener) + ~Literal(closer) + - CharsNotIn(ParserElement.DEFAULT_WHITE_CHARS,exact=1)) - ).setParseAction(lambda t:t[0].strip())) - else: - content = (Combine(OneOrMore(~Literal(opener) + ~Literal(closer) + - CharsNotIn(ParserElement.DEFAULT_WHITE_CHARS,exact=1)) - ).setParseAction(lambda t:t[0].strip())) - else: - raise ValueError("opening and closing arguments must be strings if no content expression is given") - ret = Forward() - if ignoreExpr is not None: - ret <<= Group( Suppress(opener) + ZeroOrMore( ignoreExpr | ret | content ) + Suppress(closer) ) - else: - ret <<= Group( Suppress(opener) + ZeroOrMore( ret | content ) + Suppress(closer) ) - ret.setName('nested %s%s expression' % (opener,closer)) - return ret - -def indentedBlock(blockStatementExpr, indentStack, indent=True): - """ - Helper method for defining space-delimited indentation blocks, such as - those used to define block statements in Python source code. - - Parameters: - - blockStatementExpr - expression defining syntax of statement that - is repeated within the indented block - - indentStack - list created by caller to manage indentation stack - (multiple statementWithIndentedBlock expressions within a single grammar - should share a common indentStack) - - indent - boolean indicating whether block must be indented beyond the - the current level; set to False for block of left-most statements - (default=C{True}) - - A valid block must contain at least one C{blockStatement}. - - Example:: - data = ''' - def A(z): - A1 - B = 100 - G = A2 - A2 - A3 - B - def BB(a,b,c): - BB1 - def BBA(): - bba1 - bba2 - bba3 - C - D - def spam(x,y): - def eggs(z): - pass - ''' - - - indentStack = [1] - stmt = Forward() - - identifier = Word(alphas, alphanums) - funcDecl = ("def" + identifier + Group( "(" + Optional( delimitedList(identifier) ) + ")" ) + ":") - func_body = indentedBlock(stmt, indentStack) - funcDef = Group( funcDecl + func_body ) - - rvalue = Forward() - funcCall = Group(identifier + "(" + Optional(delimitedList(rvalue)) + ")") - rvalue << (funcCall | identifier | Word(nums)) - assignment = Group(identifier + "=" + rvalue) - stmt << ( funcDef | assignment | identifier ) - - module_body = OneOrMore(stmt) - - parseTree = module_body.parseString(data) - parseTree.pprint() - prints:: - [['def', - 'A', - ['(', 'z', ')'], - ':', - [['A1'], [['B', '=', '100']], [['G', '=', 'A2']], ['A2'], ['A3']]], - 'B', - ['def', - 'BB', - ['(', 'a', 'b', 'c', ')'], - ':', - [['BB1'], [['def', 'BBA', ['(', ')'], ':', [['bba1'], ['bba2'], ['bba3']]]]]], - 'C', - 'D', - ['def', - 'spam', - ['(', 'x', 'y', ')'], - ':', - [[['def', 'eggs', ['(', 'z', ')'], ':', [['pass']]]]]]] - """ - def checkPeerIndent(s,l,t): - if l >= len(s): return - curCol = col(l,s) - if curCol != indentStack[-1]: - if curCol > indentStack[-1]: - raise ParseFatalException(s,l,"illegal nesting") - raise ParseException(s,l,"not a peer entry") - - def checkSubIndent(s,l,t): - curCol = col(l,s) - if curCol > indentStack[-1]: - indentStack.append( curCol ) - else: - raise ParseException(s,l,"not a subentry") - - def checkUnindent(s,l,t): - if l >= len(s): return - curCol = col(l,s) - if not(indentStack and curCol < indentStack[-1] and curCol <= indentStack[-2]): - raise ParseException(s,l,"not an unindent") - indentStack.pop() - - NL = OneOrMore(LineEnd().setWhitespaceChars("\t ").suppress()) - INDENT = (Empty() + Empty().setParseAction(checkSubIndent)).setName('INDENT') - PEER = Empty().setParseAction(checkPeerIndent).setName('') - UNDENT = Empty().setParseAction(checkUnindent).setName('UNINDENT') - if indent: - smExpr = Group( Optional(NL) + - #~ FollowedBy(blockStatementExpr) + - INDENT + (OneOrMore( PEER + Group(blockStatementExpr) + Optional(NL) )) + UNDENT) - else: - smExpr = Group( Optional(NL) + - (OneOrMore( PEER + Group(blockStatementExpr) + Optional(NL) )) ) - blockStatementExpr.ignore(_bslash + LineEnd()) - return smExpr.setName('indented block') - -alphas8bit = srange(r"[\0xc0-\0xd6\0xd8-\0xf6\0xf8-\0xff]") -punc8bit = srange(r"[\0xa1-\0xbf\0xd7\0xf7]") - -anyOpenTag,anyCloseTag = makeHTMLTags(Word(alphas,alphanums+"_:").setName('any tag')) -_htmlEntityMap = dict(zip("gt lt amp nbsp quot apos".split(),'><& "\'')) -commonHTMLEntity = Regex('&(?P' + '|'.join(_htmlEntityMap.keys()) +");").setName("common HTML entity") -def replaceHTMLEntity(t): - """Helper parser action to replace common HTML entities with their special characters""" - return _htmlEntityMap.get(t.entity) - -# it's easy to get these comment structures wrong - they're very common, so may as well make them available -cStyleComment = Combine(Regex(r"/\*(?:[^*]|\*(?!/))*") + '*/').setName("C style comment") -"Comment of the form C{/* ... */}" - -htmlComment = Regex(r"").setName("HTML comment") -"Comment of the form C{}" - -restOfLine = Regex(r".*").leaveWhitespace().setName("rest of line") -dblSlashComment = Regex(r"//(?:\\\n|[^\n])*").setName("// comment") -"Comment of the form C{// ... (to end of line)}" - -cppStyleComment = Combine(Regex(r"/\*(?:[^*]|\*(?!/))*") + '*/'| dblSlashComment).setName("C++ style comment") -"Comment of either form C{L{cStyleComment}} or C{L{dblSlashComment}}" - -javaStyleComment = cppStyleComment -"Same as C{L{cppStyleComment}}" - -pythonStyleComment = Regex(r"#.*").setName("Python style comment") -"Comment of the form C{# ... (to end of line)}" - -_commasepitem = Combine(OneOrMore(Word(printables, excludeChars=',') + - Optional( Word(" \t") + - ~Literal(",") + ~LineEnd() ) ) ).streamline().setName("commaItem") -commaSeparatedList = delimitedList( Optional( quotedString.copy() | _commasepitem, default="") ).setName("commaSeparatedList") -"""(Deprecated) Predefined expression of 1 or more printable words or quoted strings, separated by commas. - This expression is deprecated in favor of L{pyparsing_common.comma_separated_list}.""" - -# some other useful expressions - using lower-case class name since we are really using this as a namespace -class pyparsing_common: - """ - Here are some common low-level expressions that may be useful in jump-starting parser development: - - numeric forms (L{integers}, L{reals}, L{scientific notation}) - - common L{programming identifiers} - - network addresses (L{MAC}, L{IPv4}, L{IPv6}) - - ISO8601 L{dates} and L{datetime} - - L{UUID} - - L{comma-separated list} - Parse actions: - - C{L{convertToInteger}} - - C{L{convertToFloat}} - - C{L{convertToDate}} - - C{L{convertToDatetime}} - - C{L{stripHTMLTags}} - - C{L{upcaseTokens}} - - C{L{downcaseTokens}} - - Example:: - pyparsing_common.number.runTests(''' - # any int or real number, returned as the appropriate type - 100 - -100 - +100 - 3.14159 - 6.02e23 - 1e-12 - ''') - - pyparsing_common.fnumber.runTests(''' - # any int or real number, returned as float - 100 - -100 - +100 - 3.14159 - 6.02e23 - 1e-12 - ''') - - pyparsing_common.hex_integer.runTests(''' - # hex numbers - 100 - FF - ''') - - pyparsing_common.fraction.runTests(''' - # fractions - 1/2 - -3/4 - ''') - - pyparsing_common.mixed_integer.runTests(''' - # mixed fractions - 1 - 1/2 - -3/4 - 1-3/4 - ''') - - import uuid - pyparsing_common.uuid.setParseAction(tokenMap(uuid.UUID)) - pyparsing_common.uuid.runTests(''' - # uuid - 12345678-1234-5678-1234-567812345678 - ''') - prints:: - # any int or real number, returned as the appropriate type - 100 - [100] - - -100 - [-100] - - +100 - [100] - - 3.14159 - [3.14159] - - 6.02e23 - [6.02e+23] - - 1e-12 - [1e-12] - - # any int or real number, returned as float - 100 - [100.0] - - -100 - [-100.0] - - +100 - [100.0] - - 3.14159 - [3.14159] - - 6.02e23 - [6.02e+23] - - 1e-12 - [1e-12] - - # hex numbers - 100 - [256] - - FF - [255] - - # fractions - 1/2 - [0.5] - - -3/4 - [-0.75] - - # mixed fractions - 1 - [1] - - 1/2 - [0.5] - - -3/4 - [-0.75] - - 1-3/4 - [1.75] - - # uuid - 12345678-1234-5678-1234-567812345678 - [UUID('12345678-1234-5678-1234-567812345678')] - """ - - convertToInteger = tokenMap(int) - """ - Parse action for converting parsed integers to Python int - """ - - convertToFloat = tokenMap(float) - """ - Parse action for converting parsed numbers to Python float - """ - - integer = Word(nums).setName("integer").setParseAction(convertToInteger) - """expression that parses an unsigned integer, returns an int""" - - hex_integer = Word(hexnums).setName("hex integer").setParseAction(tokenMap(int,16)) - """expression that parses a hexadecimal integer, returns an int""" - - signed_integer = Regex(r'[+-]?\d+').setName("signed integer").setParseAction(convertToInteger) - """expression that parses an integer with optional leading sign, returns an int""" - - fraction = (signed_integer().setParseAction(convertToFloat) + '/' + signed_integer().setParseAction(convertToFloat)).setName("fraction") - """fractional expression of an integer divided by an integer, returns a float""" - fraction.addParseAction(lambda t: t[0]/t[-1]) - - mixed_integer = (fraction | signed_integer + Optional(Optional('-').suppress() + fraction)).setName("fraction or mixed integer-fraction") - """mixed integer of the form 'integer - fraction', with optional leading integer, returns float""" - mixed_integer.addParseAction(sum) - - real = Regex(r'[+-]?\d+\.\d*').setName("real number").setParseAction(convertToFloat) - """expression that parses a floating point number and returns a float""" - - sci_real = Regex(r'[+-]?\d+([eE][+-]?\d+|\.\d*([eE][+-]?\d+)?)').setName("real number with scientific notation").setParseAction(convertToFloat) - """expression that parses a floating point number with optional scientific notation and returns a float""" - - # streamlining this expression makes the docs nicer-looking - number = (sci_real | real | signed_integer).streamline() - """any numeric expression, returns the corresponding Python type""" - - fnumber = Regex(r'[+-]?\d+\.?\d*([eE][+-]?\d+)?').setName("fnumber").setParseAction(convertToFloat) - """any int or real number, returned as float""" - - identifier = Word(alphas+'_', alphanums+'_').setName("identifier") - """typical code identifier (leading alpha or '_', followed by 0 or more alphas, nums, or '_')""" - - ipv4_address = Regex(r'(25[0-5]|2[0-4][0-9]|1?[0-9]{1,2})(\.(25[0-5]|2[0-4][0-9]|1?[0-9]{1,2})){3}').setName("IPv4 address") - "IPv4 address (C{0.0.0.0 - 255.255.255.255})" - - _ipv6_part = Regex(r'[0-9a-fA-F]{1,4}').setName("hex_integer") - _full_ipv6_address = (_ipv6_part + (':' + _ipv6_part)*7).setName("full IPv6 address") - _short_ipv6_address = (Optional(_ipv6_part + (':' + _ipv6_part)*(0,6)) + "::" + Optional(_ipv6_part + (':' + _ipv6_part)*(0,6))).setName("short IPv6 address") - _short_ipv6_address.addCondition(lambda t: sum(1 for tt in t if pyparsing_common._ipv6_part.matches(tt)) < 8) - _mixed_ipv6_address = ("::ffff:" + ipv4_address).setName("mixed IPv6 address") - ipv6_address = Combine((_full_ipv6_address | _mixed_ipv6_address | _short_ipv6_address).setName("IPv6 address")).setName("IPv6 address") - "IPv6 address (long, short, or mixed form)" - - mac_address = Regex(r'[0-9a-fA-F]{2}([:.-])[0-9a-fA-F]{2}(?:\1[0-9a-fA-F]{2}){4}').setName("MAC address") - "MAC address xx:xx:xx:xx:xx (may also have '-' or '.' delimiters)" - - @staticmethod - def convertToDate(fmt="%Y-%m-%d"): - """ - Helper to create a parse action for converting parsed date string to Python datetime.date - - Params - - - fmt - format to be passed to datetime.strptime (default=C{"%Y-%m-%d"}) - - Example:: - date_expr = pyparsing_common.iso8601_date.copy() - date_expr.setParseAction(pyparsing_common.convertToDate()) - print(date_expr.parseString("1999-12-31")) - prints:: - [datetime.date(1999, 12, 31)] - """ - def cvt_fn(s,l,t): - try: - return datetime.strptime(t[0], fmt).date() - except ValueError as ve: - raise ParseException(s, l, str(ve)) - return cvt_fn - - @staticmethod - def convertToDatetime(fmt="%Y-%m-%dT%H:%M:%S.%f"): - """ - Helper to create a parse action for converting parsed datetime string to Python datetime.datetime - - Params - - - fmt - format to be passed to datetime.strptime (default=C{"%Y-%m-%dT%H:%M:%S.%f"}) - - Example:: - dt_expr = pyparsing_common.iso8601_datetime.copy() - dt_expr.setParseAction(pyparsing_common.convertToDatetime()) - print(dt_expr.parseString("1999-12-31T23:59:59.999")) - prints:: - [datetime.datetime(1999, 12, 31, 23, 59, 59, 999000)] - """ - def cvt_fn(s,l,t): - try: - return datetime.strptime(t[0], fmt) - except ValueError as ve: - raise ParseException(s, l, str(ve)) - return cvt_fn - - iso8601_date = Regex(r'(?P\d{4})(?:-(?P\d\d)(?:-(?P\d\d))?)?').setName("ISO8601 date") - "ISO8601 date (C{yyyy-mm-dd})" - - iso8601_datetime = Regex(r'(?P\d{4})-(?P\d\d)-(?P\d\d)[T ](?P\d\d):(?P\d\d)(:(?P\d\d(\.\d*)?)?)?(?PZ|[+-]\d\d:?\d\d)?').setName("ISO8601 datetime") - "ISO8601 datetime (C{yyyy-mm-ddThh:mm:ss.s(Z|+-00:00)}) - trailing seconds, milliseconds, and timezone optional; accepts separating C{'T'} or C{' '}" - - uuid = Regex(r'[0-9a-fA-F]{8}(-[0-9a-fA-F]{4}){3}-[0-9a-fA-F]{12}').setName("UUID") - "UUID (C{xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx})" - - _html_stripper = anyOpenTag.suppress() | anyCloseTag.suppress() - @staticmethod - def stripHTMLTags(s, l, tokens): - """ - Parse action to remove HTML tags from web page HTML source - - Example:: - # strip HTML links from normal text - text = 'More info at the
pyparsing wiki page' - td,td_end = makeHTMLTags("TD") - table_text = td + SkipTo(td_end).setParseAction(pyparsing_common.stripHTMLTags)("body") + td_end - - print(table_text.parseString(text).body) # -> 'More info at the pyparsing wiki page' - """ - return pyparsing_common._html_stripper.transformString(tokens[0]) - - _commasepitem = Combine(OneOrMore(~Literal(",") + ~LineEnd() + Word(printables, excludeChars=',') - + Optional( White(" \t") ) ) ).streamline().setName("commaItem") - comma_separated_list = delimitedList( Optional( quotedString.copy() | _commasepitem, default="") ).setName("comma separated list") - """Predefined expression of 1 or more printable words or quoted strings, separated by commas.""" - - upcaseTokens = staticmethod(tokenMap(lambda t: _ustr(t).upper())) - """Parse action to convert tokens to upper case.""" - - downcaseTokens = staticmethod(tokenMap(lambda t: _ustr(t).lower())) - """Parse action to convert tokens to lower case.""" - - -if __name__ == "__main__": - - selectToken = CaselessLiteral("select") - fromToken = CaselessLiteral("from") - - ident = Word(alphas, alphanums + "_$") - - columnName = delimitedList(ident, ".", combine=True).setParseAction(upcaseTokens) - columnNameList = Group(delimitedList(columnName)).setName("columns") - columnSpec = ('*' | columnNameList) - - tableName = delimitedList(ident, ".", combine=True).setParseAction(upcaseTokens) - tableNameList = Group(delimitedList(tableName)).setName("tables") - - simpleSQL = selectToken("command") + columnSpec("columns") + fromToken + tableNameList("tables") - - # demo runTests method, including embedded comments in test string - simpleSQL.runTests(""" - # '*' as column list and dotted table name - select * from SYS.XYZZY - - # caseless match on "SELECT", and casts back to "select" - SELECT * from XYZZY, ABC - - # list of column names, and mixed case SELECT keyword - Select AA,BB,CC from Sys.dual - - # multiple tables - Select A, B, C from Sys.dual, Table2 - - # invalid SELECT keyword - should fail - Xelect A, B, C from Sys.dual - - # incomplete command - should fail - Select - - # invalid column name - should fail - Select ^^^ frox Sys.dual - - """) - - pyparsing_common.number.runTests(""" - 100 - -100 - +100 - 3.14159 - 6.02e23 - 1e-12 - """) - - # any int or real number, returned as float - pyparsing_common.fnumber.runTests(""" - 100 - -100 - +100 - 3.14159 - 6.02e23 - 1e-12 - """) - - pyparsing_common.hex_integer.runTests(""" - 100 - FF - """) - - import uuid - pyparsing_common.uuid.setParseAction(tokenMap(uuid.UUID)) - pyparsing_common.uuid.runTests(""" - 12345678-1234-5678-1234-567812345678 - """) diff --git a/venv/Lib/site-packages/setuptools/_vendor/typing_extensions.py b/venv/Lib/site-packages/setuptools/_vendor/typing_extensions.py deleted file mode 100644 index 9f1c7aa..0000000 --- a/venv/Lib/site-packages/setuptools/_vendor/typing_extensions.py +++ /dev/null @@ -1,2296 +0,0 @@ -import abc -import collections -import collections.abc -import operator -import sys -import typing - -# After PEP 560, internal typing API was substantially reworked. -# This is especially important for Protocol class which uses internal APIs -# quite extensively. -PEP_560 = sys.version_info[:3] >= (3, 7, 0) - -if PEP_560: - GenericMeta = type -else: - # 3.6 - from typing import GenericMeta, _type_vars # noqa - -# The two functions below are copies of typing internal helpers. -# They are needed by _ProtocolMeta - - -def _no_slots_copy(dct): - dict_copy = dict(dct) - if '__slots__' in dict_copy: - for slot in dict_copy['__slots__']: - dict_copy.pop(slot, None) - return dict_copy - - -def _check_generic(cls, parameters): - if not cls.__parameters__: - raise TypeError(f"{cls} is not a generic class") - alen = len(parameters) - elen = len(cls.__parameters__) - if alen != elen: - raise TypeError(f"Too {'many' if alen > elen else 'few'} arguments for {cls};" - f" actual {alen}, expected {elen}") - - -# Please keep __all__ alphabetized within each category. -__all__ = [ - # Super-special typing primitives. - 'ClassVar', - 'Concatenate', - 'Final', - 'ParamSpec', - 'Self', - 'Type', - - # ABCs (from collections.abc). - 'Awaitable', - 'AsyncIterator', - 'AsyncIterable', - 'Coroutine', - 'AsyncGenerator', - 'AsyncContextManager', - 'ChainMap', - - # Concrete collection types. - 'ContextManager', - 'Counter', - 'Deque', - 'DefaultDict', - 'OrderedDict', - 'TypedDict', - - # Structural checks, a.k.a. protocols. - 'SupportsIndex', - - # One-off things. - 'Annotated', - 'final', - 'IntVar', - 'Literal', - 'NewType', - 'overload', - 'Protocol', - 'runtime', - 'runtime_checkable', - 'Text', - 'TypeAlias', - 'TypeGuard', - 'TYPE_CHECKING', -] - -if PEP_560: - __all__.extend(["get_args", "get_origin", "get_type_hints"]) - -# 3.6.2+ -if hasattr(typing, 'NoReturn'): - NoReturn = typing.NoReturn -# 3.6.0-3.6.1 -else: - class _NoReturn(typing._FinalTypingBase, _root=True): - """Special type indicating functions that never return. - Example:: - - from typing import NoReturn - - def stop() -> NoReturn: - raise Exception('no way') - - This type is invalid in other positions, e.g., ``List[NoReturn]`` - will fail in static type checkers. - """ - __slots__ = () - - def __instancecheck__(self, obj): - raise TypeError("NoReturn cannot be used with isinstance().") - - def __subclasscheck__(self, cls): - raise TypeError("NoReturn cannot be used with issubclass().") - - NoReturn = _NoReturn(_root=True) - -# Some unconstrained type variables. These are used by the container types. -# (These are not for export.) -T = typing.TypeVar('T') # Any type. -KT = typing.TypeVar('KT') # Key type. -VT = typing.TypeVar('VT') # Value type. -T_co = typing.TypeVar('T_co', covariant=True) # Any type covariant containers. -T_contra = typing.TypeVar('T_contra', contravariant=True) # Ditto contravariant. - -ClassVar = typing.ClassVar - -# On older versions of typing there is an internal class named "Final". -# 3.8+ -if hasattr(typing, 'Final') and sys.version_info[:2] >= (3, 7): - Final = typing.Final -# 3.7 -elif sys.version_info[:2] >= (3, 7): - class _FinalForm(typing._SpecialForm, _root=True): - - def __repr__(self): - return 'typing_extensions.' + self._name - - def __getitem__(self, parameters): - item = typing._type_check(parameters, - f'{self._name} accepts only single type') - return typing._GenericAlias(self, (item,)) - - Final = _FinalForm('Final', - doc="""A special typing construct to indicate that a name - cannot be re-assigned or overridden in a subclass. - For example: - - MAX_SIZE: Final = 9000 - MAX_SIZE += 1 # Error reported by type checker - - class Connection: - TIMEOUT: Final[int] = 10 - class FastConnector(Connection): - TIMEOUT = 1 # Error reported by type checker - - There is no runtime checking of these properties.""") -# 3.6 -else: - class _Final(typing._FinalTypingBase, _root=True): - """A special typing construct to indicate that a name - cannot be re-assigned or overridden in a subclass. - For example: - - MAX_SIZE: Final = 9000 - MAX_SIZE += 1 # Error reported by type checker - - class Connection: - TIMEOUT: Final[int] = 10 - class FastConnector(Connection): - TIMEOUT = 1 # Error reported by type checker - - There is no runtime checking of these properties. - """ - - __slots__ = ('__type__',) - - def __init__(self, tp=None, **kwds): - self.__type__ = tp - - def __getitem__(self, item): - cls = type(self) - if self.__type__ is None: - return cls(typing._type_check(item, - f'{cls.__name__[1:]} accepts only single type.'), - _root=True) - raise TypeError(f'{cls.__name__[1:]} cannot be further subscripted') - - def _eval_type(self, globalns, localns): - new_tp = typing._eval_type(self.__type__, globalns, localns) - if new_tp == self.__type__: - return self - return type(self)(new_tp, _root=True) - - def __repr__(self): - r = super().__repr__() - if self.__type__ is not None: - r += f'[{typing._type_repr(self.__type__)}]' - return r - - def __hash__(self): - return hash((type(self).__name__, self.__type__)) - - def __eq__(self, other): - if not isinstance(other, _Final): - return NotImplemented - if self.__type__ is not None: - return self.__type__ == other.__type__ - return self is other - - Final = _Final(_root=True) - - -# 3.8+ -if hasattr(typing, 'final'): - final = typing.final -# 3.6-3.7 -else: - def final(f): - """This decorator can be used to indicate to type checkers that - the decorated method cannot be overridden, and decorated class - cannot be subclassed. For example: - - class Base: - @final - def done(self) -> None: - ... - class Sub(Base): - def done(self) -> None: # Error reported by type checker - ... - @final - class Leaf: - ... - class Other(Leaf): # Error reported by type checker - ... - - There is no runtime checking of these properties. - """ - return f - - -def IntVar(name): - return typing.TypeVar(name) - - -# 3.8+: -if hasattr(typing, 'Literal'): - Literal = typing.Literal -# 3.7: -elif sys.version_info[:2] >= (3, 7): - class _LiteralForm(typing._SpecialForm, _root=True): - - def __repr__(self): - return 'typing_extensions.' + self._name - - def __getitem__(self, parameters): - return typing._GenericAlias(self, parameters) - - Literal = _LiteralForm('Literal', - doc="""A type that can be used to indicate to type checkers - that the corresponding value has a value literally equivalent - to the provided parameter. For example: - - var: Literal[4] = 4 - - The type checker understands that 'var' is literally equal to - the value 4 and no other value. - - Literal[...] cannot be subclassed. There is no runtime - checking verifying that the parameter is actually a value - instead of a type.""") -# 3.6: -else: - class _Literal(typing._FinalTypingBase, _root=True): - """A type that can be used to indicate to type checkers that the - corresponding value has a value literally equivalent to the - provided parameter. For example: - - var: Literal[4] = 4 - - The type checker understands that 'var' is literally equal to the - value 4 and no other value. - - Literal[...] cannot be subclassed. There is no runtime checking - verifying that the parameter is actually a value instead of a type. - """ - - __slots__ = ('__values__',) - - def __init__(self, values=None, **kwds): - self.__values__ = values - - def __getitem__(self, values): - cls = type(self) - if self.__values__ is None: - if not isinstance(values, tuple): - values = (values,) - return cls(values, _root=True) - raise TypeError(f'{cls.__name__[1:]} cannot be further subscripted') - - def _eval_type(self, globalns, localns): - return self - - def __repr__(self): - r = super().__repr__() - if self.__values__ is not None: - r += f'[{", ".join(map(typing._type_repr, self.__values__))}]' - return r - - def __hash__(self): - return hash((type(self).__name__, self.__values__)) - - def __eq__(self, other): - if not isinstance(other, _Literal): - return NotImplemented - if self.__values__ is not None: - return self.__values__ == other.__values__ - return self is other - - Literal = _Literal(_root=True) - - -_overload_dummy = typing._overload_dummy # noqa -overload = typing.overload - - -# This is not a real generic class. Don't use outside annotations. -Type = typing.Type - -# Various ABCs mimicking those in collections.abc. -# A few are simply re-exported for completeness. - - -class _ExtensionsGenericMeta(GenericMeta): - def __subclasscheck__(self, subclass): - """This mimics a more modern GenericMeta.__subclasscheck__() logic - (that does not have problems with recursion) to work around interactions - between collections, typing, and typing_extensions on older - versions of Python, see https://github.com/python/typing/issues/501. - """ - if self.__origin__ is not None: - if sys._getframe(1).f_globals['__name__'] not in ['abc', 'functools']: - raise TypeError("Parameterized generics cannot be used with class " - "or instance checks") - return False - if not self.__extra__: - return super().__subclasscheck__(subclass) - res = self.__extra__.__subclasshook__(subclass) - if res is not NotImplemented: - return res - if self.__extra__ in subclass.__mro__: - return True - for scls in self.__extra__.__subclasses__(): - if isinstance(scls, GenericMeta): - continue - if issubclass(subclass, scls): - return True - return False - - -Awaitable = typing.Awaitable -Coroutine = typing.Coroutine -AsyncIterable = typing.AsyncIterable -AsyncIterator = typing.AsyncIterator - -# 3.6.1+ -if hasattr(typing, 'Deque'): - Deque = typing.Deque -# 3.6.0 -else: - class Deque(collections.deque, typing.MutableSequence[T], - metaclass=_ExtensionsGenericMeta, - extra=collections.deque): - __slots__ = () - - def __new__(cls, *args, **kwds): - if cls._gorg is Deque: - return collections.deque(*args, **kwds) - return typing._generic_new(collections.deque, cls, *args, **kwds) - -ContextManager = typing.ContextManager -# 3.6.2+ -if hasattr(typing, 'AsyncContextManager'): - AsyncContextManager = typing.AsyncContextManager -# 3.6.0-3.6.1 -else: - from _collections_abc import _check_methods as _check_methods_in_mro # noqa - - class AsyncContextManager(typing.Generic[T_co]): - __slots__ = () - - async def __aenter__(self): - return self - - @abc.abstractmethod - async def __aexit__(self, exc_type, exc_value, traceback): - return None - - @classmethod - def __subclasshook__(cls, C): - if cls is AsyncContextManager: - return _check_methods_in_mro(C, "__aenter__", "__aexit__") - return NotImplemented - -DefaultDict = typing.DefaultDict - -# 3.7.2+ -if hasattr(typing, 'OrderedDict'): - OrderedDict = typing.OrderedDict -# 3.7.0-3.7.2 -elif (3, 7, 0) <= sys.version_info[:3] < (3, 7, 2): - OrderedDict = typing._alias(collections.OrderedDict, (KT, VT)) -# 3.6 -else: - class OrderedDict(collections.OrderedDict, typing.MutableMapping[KT, VT], - metaclass=_ExtensionsGenericMeta, - extra=collections.OrderedDict): - - __slots__ = () - - def __new__(cls, *args, **kwds): - if cls._gorg is OrderedDict: - return collections.OrderedDict(*args, **kwds) - return typing._generic_new(collections.OrderedDict, cls, *args, **kwds) - -# 3.6.2+ -if hasattr(typing, 'Counter'): - Counter = typing.Counter -# 3.6.0-3.6.1 -else: - class Counter(collections.Counter, - typing.Dict[T, int], - metaclass=_ExtensionsGenericMeta, extra=collections.Counter): - - __slots__ = () - - def __new__(cls, *args, **kwds): - if cls._gorg is Counter: - return collections.Counter(*args, **kwds) - return typing._generic_new(collections.Counter, cls, *args, **kwds) - -# 3.6.1+ -if hasattr(typing, 'ChainMap'): - ChainMap = typing.ChainMap -elif hasattr(collections, 'ChainMap'): - class ChainMap(collections.ChainMap, typing.MutableMapping[KT, VT], - metaclass=_ExtensionsGenericMeta, - extra=collections.ChainMap): - - __slots__ = () - - def __new__(cls, *args, **kwds): - if cls._gorg is ChainMap: - return collections.ChainMap(*args, **kwds) - return typing._generic_new(collections.ChainMap, cls, *args, **kwds) - -# 3.6.1+ -if hasattr(typing, 'AsyncGenerator'): - AsyncGenerator = typing.AsyncGenerator -# 3.6.0 -else: - class AsyncGenerator(AsyncIterator[T_co], typing.Generic[T_co, T_contra], - metaclass=_ExtensionsGenericMeta, - extra=collections.abc.AsyncGenerator): - __slots__ = () - -NewType = typing.NewType -Text = typing.Text -TYPE_CHECKING = typing.TYPE_CHECKING - - -def _gorg(cls): - """This function exists for compatibility with old typing versions.""" - assert isinstance(cls, GenericMeta) - if hasattr(cls, '_gorg'): - return cls._gorg - while cls.__origin__ is not None: - cls = cls.__origin__ - return cls - - -_PROTO_WHITELIST = ['Callable', 'Awaitable', - 'Iterable', 'Iterator', 'AsyncIterable', 'AsyncIterator', - 'Hashable', 'Sized', 'Container', 'Collection', 'Reversible', - 'ContextManager', 'AsyncContextManager'] - - -def _get_protocol_attrs(cls): - attrs = set() - for base in cls.__mro__[:-1]: # without object - if base.__name__ in ('Protocol', 'Generic'): - continue - annotations = getattr(base, '__annotations__', {}) - for attr in list(base.__dict__.keys()) + list(annotations.keys()): - if (not attr.startswith('_abc_') and attr not in ( - '__abstractmethods__', '__annotations__', '__weakref__', - '_is_protocol', '_is_runtime_protocol', '__dict__', - '__args__', '__slots__', - '__next_in_mro__', '__parameters__', '__origin__', - '__orig_bases__', '__extra__', '__tree_hash__', - '__doc__', '__subclasshook__', '__init__', '__new__', - '__module__', '_MutableMapping__marker', '_gorg')): - attrs.add(attr) - return attrs - - -def _is_callable_members_only(cls): - return all(callable(getattr(cls, attr, None)) for attr in _get_protocol_attrs(cls)) - - -# 3.8+ -if hasattr(typing, 'Protocol'): - Protocol = typing.Protocol -# 3.7 -elif PEP_560: - from typing import _collect_type_vars # noqa - - def _no_init(self, *args, **kwargs): - if type(self)._is_protocol: - raise TypeError('Protocols cannot be instantiated') - - class _ProtocolMeta(abc.ABCMeta): - # This metaclass is a bit unfortunate and exists only because of the lack - # of __instancehook__. - def __instancecheck__(cls, instance): - # We need this method for situations where attributes are - # assigned in __init__. - if ((not getattr(cls, '_is_protocol', False) or - _is_callable_members_only(cls)) and - issubclass(instance.__class__, cls)): - return True - if cls._is_protocol: - if all(hasattr(instance, attr) and - (not callable(getattr(cls, attr, None)) or - getattr(instance, attr) is not None) - for attr in _get_protocol_attrs(cls)): - return True - return super().__instancecheck__(instance) - - class Protocol(metaclass=_ProtocolMeta): - # There is quite a lot of overlapping code with typing.Generic. - # Unfortunately it is hard to avoid this while these live in two different - # modules. The duplicated code will be removed when Protocol is moved to typing. - """Base class for protocol classes. Protocol classes are defined as:: - - class Proto(Protocol): - def meth(self) -> int: - ... - - Such classes are primarily used with static type checkers that recognize - structural subtyping (static duck-typing), for example:: - - class C: - def meth(self) -> int: - return 0 - - def func(x: Proto) -> int: - return x.meth() - - func(C()) # Passes static type check - - See PEP 544 for details. Protocol classes decorated with - @typing_extensions.runtime act as simple-minded runtime protocol that checks - only the presence of given attributes, ignoring their type signatures. - - Protocol classes can be generic, they are defined as:: - - class GenProto(Protocol[T]): - def meth(self) -> T: - ... - """ - __slots__ = () - _is_protocol = True - - def __new__(cls, *args, **kwds): - if cls is Protocol: - raise TypeError("Type Protocol cannot be instantiated; " - "it can only be used as a base class") - return super().__new__(cls) - - @typing._tp_cache - def __class_getitem__(cls, params): - if not isinstance(params, tuple): - params = (params,) - if not params and cls is not typing.Tuple: - raise TypeError( - f"Parameter list to {cls.__qualname__}[...] cannot be empty") - msg = "Parameters to generic types must be types." - params = tuple(typing._type_check(p, msg) for p in params) # noqa - if cls is Protocol: - # Generic can only be subscripted with unique type variables. - if not all(isinstance(p, typing.TypeVar) for p in params): - i = 0 - while isinstance(params[i], typing.TypeVar): - i += 1 - raise TypeError( - "Parameters to Protocol[...] must all be type variables." - f" Parameter {i + 1} is {params[i]}") - if len(set(params)) != len(params): - raise TypeError( - "Parameters to Protocol[...] must all be unique") - else: - # Subscripting a regular Generic subclass. - _check_generic(cls, params) - return typing._GenericAlias(cls, params) - - def __init_subclass__(cls, *args, **kwargs): - tvars = [] - if '__orig_bases__' in cls.__dict__: - error = typing.Generic in cls.__orig_bases__ - else: - error = typing.Generic in cls.__bases__ - if error: - raise TypeError("Cannot inherit from plain Generic") - if '__orig_bases__' in cls.__dict__: - tvars = _collect_type_vars(cls.__orig_bases__) - # Look for Generic[T1, ..., Tn] or Protocol[T1, ..., Tn]. - # If found, tvars must be a subset of it. - # If not found, tvars is it. - # Also check for and reject plain Generic, - # and reject multiple Generic[...] and/or Protocol[...]. - gvars = None - for base in cls.__orig_bases__: - if (isinstance(base, typing._GenericAlias) and - base.__origin__ in (typing.Generic, Protocol)): - # for error messages - the_base = base.__origin__.__name__ - if gvars is not None: - raise TypeError( - "Cannot inherit from Generic[...]" - " and/or Protocol[...] multiple types.") - gvars = base.__parameters__ - if gvars is None: - gvars = tvars - else: - tvarset = set(tvars) - gvarset = set(gvars) - if not tvarset <= gvarset: - s_vars = ', '.join(str(t) for t in tvars if t not in gvarset) - s_args = ', '.join(str(g) for g in gvars) - raise TypeError(f"Some type variables ({s_vars}) are" - f" not listed in {the_base}[{s_args}]") - tvars = gvars - cls.__parameters__ = tuple(tvars) - - # Determine if this is a protocol or a concrete subclass. - if not cls.__dict__.get('_is_protocol', None): - cls._is_protocol = any(b is Protocol for b in cls.__bases__) - - # Set (or override) the protocol subclass hook. - def _proto_hook(other): - if not cls.__dict__.get('_is_protocol', None): - return NotImplemented - if not getattr(cls, '_is_runtime_protocol', False): - if sys._getframe(2).f_globals['__name__'] in ['abc', 'functools']: - return NotImplemented - raise TypeError("Instance and class checks can only be used with" - " @runtime protocols") - if not _is_callable_members_only(cls): - if sys._getframe(2).f_globals['__name__'] in ['abc', 'functools']: - return NotImplemented - raise TypeError("Protocols with non-method members" - " don't support issubclass()") - if not isinstance(other, type): - # Same error as for issubclass(1, int) - raise TypeError('issubclass() arg 1 must be a class') - for attr in _get_protocol_attrs(cls): - for base in other.__mro__: - if attr in base.__dict__: - if base.__dict__[attr] is None: - return NotImplemented - break - annotations = getattr(base, '__annotations__', {}) - if (isinstance(annotations, typing.Mapping) and - attr in annotations and - isinstance(other, _ProtocolMeta) and - other._is_protocol): - break - else: - return NotImplemented - return True - if '__subclasshook__' not in cls.__dict__: - cls.__subclasshook__ = _proto_hook - - # We have nothing more to do for non-protocols. - if not cls._is_protocol: - return - - # Check consistency of bases. - for base in cls.__bases__: - if not (base in (object, typing.Generic) or - base.__module__ == 'collections.abc' and - base.__name__ in _PROTO_WHITELIST or - isinstance(base, _ProtocolMeta) and base._is_protocol): - raise TypeError('Protocols can only inherit from other' - f' protocols, got {repr(base)}') - cls.__init__ = _no_init -# 3.6 -else: - from typing import _next_in_mro, _type_check # noqa - - def _no_init(self, *args, **kwargs): - if type(self)._is_protocol: - raise TypeError('Protocols cannot be instantiated') - - class _ProtocolMeta(GenericMeta): - """Internal metaclass for Protocol. - - This exists so Protocol classes can be generic without deriving - from Generic. - """ - def __new__(cls, name, bases, namespace, - tvars=None, args=None, origin=None, extra=None, orig_bases=None): - # This is just a version copied from GenericMeta.__new__ that - # includes "Protocol" special treatment. (Comments removed for brevity.) - assert extra is None # Protocols should not have extra - if tvars is not None: - assert origin is not None - assert all(isinstance(t, typing.TypeVar) for t in tvars), tvars - else: - tvars = _type_vars(bases) - gvars = None - for base in bases: - if base is typing.Generic: - raise TypeError("Cannot inherit from plain Generic") - if (isinstance(base, GenericMeta) and - base.__origin__ in (typing.Generic, Protocol)): - if gvars is not None: - raise TypeError( - "Cannot inherit from Generic[...] or" - " Protocol[...] multiple times.") - gvars = base.__parameters__ - if gvars is None: - gvars = tvars - else: - tvarset = set(tvars) - gvarset = set(gvars) - if not tvarset <= gvarset: - s_vars = ", ".join(str(t) for t in tvars if t not in gvarset) - s_args = ", ".join(str(g) for g in gvars) - cls_name = "Generic" if any(b.__origin__ is typing.Generic - for b in bases) else "Protocol" - raise TypeError(f"Some type variables ({s_vars}) are" - f" not listed in {cls_name}[{s_args}]") - tvars = gvars - - initial_bases = bases - if (extra is not None and type(extra) is abc.ABCMeta and - extra not in bases): - bases = (extra,) + bases - bases = tuple(_gorg(b) if isinstance(b, GenericMeta) else b - for b in bases) - if any(isinstance(b, GenericMeta) and b is not typing.Generic for b in bases): - bases = tuple(b for b in bases if b is not typing.Generic) - namespace.update({'__origin__': origin, '__extra__': extra}) - self = super(GenericMeta, cls).__new__(cls, name, bases, namespace, - _root=True) - super(GenericMeta, self).__setattr__('_gorg', - self if not origin else - _gorg(origin)) - self.__parameters__ = tvars - self.__args__ = tuple(... if a is typing._TypingEllipsis else - () if a is typing._TypingEmpty else - a for a in args) if args else None - self.__next_in_mro__ = _next_in_mro(self) - if orig_bases is None: - self.__orig_bases__ = initial_bases - elif origin is not None: - self._abc_registry = origin._abc_registry - self._abc_cache = origin._abc_cache - if hasattr(self, '_subs_tree'): - self.__tree_hash__ = (hash(self._subs_tree()) if origin else - super(GenericMeta, self).__hash__()) - return self - - def __init__(cls, *args, **kwargs): - super().__init__(*args, **kwargs) - if not cls.__dict__.get('_is_protocol', None): - cls._is_protocol = any(b is Protocol or - isinstance(b, _ProtocolMeta) and - b.__origin__ is Protocol - for b in cls.__bases__) - if cls._is_protocol: - for base in cls.__mro__[1:]: - if not (base in (object, typing.Generic) or - base.__module__ == 'collections.abc' and - base.__name__ in _PROTO_WHITELIST or - isinstance(base, typing.TypingMeta) and base._is_protocol or - isinstance(base, GenericMeta) and - base.__origin__ is typing.Generic): - raise TypeError(f'Protocols can only inherit from other' - f' protocols, got {repr(base)}') - - cls.__init__ = _no_init - - def _proto_hook(other): - if not cls.__dict__.get('_is_protocol', None): - return NotImplemented - if not isinstance(other, type): - # Same error as for issubclass(1, int) - raise TypeError('issubclass() arg 1 must be a class') - for attr in _get_protocol_attrs(cls): - for base in other.__mro__: - if attr in base.__dict__: - if base.__dict__[attr] is None: - return NotImplemented - break - annotations = getattr(base, '__annotations__', {}) - if (isinstance(annotations, typing.Mapping) and - attr in annotations and - isinstance(other, _ProtocolMeta) and - other._is_protocol): - break - else: - return NotImplemented - return True - if '__subclasshook__' not in cls.__dict__: - cls.__subclasshook__ = _proto_hook - - def __instancecheck__(self, instance): - # We need this method for situations where attributes are - # assigned in __init__. - if ((not getattr(self, '_is_protocol', False) or - _is_callable_members_only(self)) and - issubclass(instance.__class__, self)): - return True - if self._is_protocol: - if all(hasattr(instance, attr) and - (not callable(getattr(self, attr, None)) or - getattr(instance, attr) is not None) - for attr in _get_protocol_attrs(self)): - return True - return super(GenericMeta, self).__instancecheck__(instance) - - def __subclasscheck__(self, cls): - if self.__origin__ is not None: - if sys._getframe(1).f_globals['__name__'] not in ['abc', 'functools']: - raise TypeError("Parameterized generics cannot be used with class " - "or instance checks") - return False - if (self.__dict__.get('_is_protocol', None) and - not self.__dict__.get('_is_runtime_protocol', None)): - if sys._getframe(1).f_globals['__name__'] in ['abc', - 'functools', - 'typing']: - return False - raise TypeError("Instance and class checks can only be used with" - " @runtime protocols") - if (self.__dict__.get('_is_runtime_protocol', None) and - not _is_callable_members_only(self)): - if sys._getframe(1).f_globals['__name__'] in ['abc', - 'functools', - 'typing']: - return super(GenericMeta, self).__subclasscheck__(cls) - raise TypeError("Protocols with non-method members" - " don't support issubclass()") - return super(GenericMeta, self).__subclasscheck__(cls) - - @typing._tp_cache - def __getitem__(self, params): - # We also need to copy this from GenericMeta.__getitem__ to get - # special treatment of "Protocol". (Comments removed for brevity.) - if not isinstance(params, tuple): - params = (params,) - if not params and _gorg(self) is not typing.Tuple: - raise TypeError( - f"Parameter list to {self.__qualname__}[...] cannot be empty") - msg = "Parameters to generic types must be types." - params = tuple(_type_check(p, msg) for p in params) - if self in (typing.Generic, Protocol): - if not all(isinstance(p, typing.TypeVar) for p in params): - raise TypeError( - f"Parameters to {repr(self)}[...] must all be type variables") - if len(set(params)) != len(params): - raise TypeError( - f"Parameters to {repr(self)}[...] must all be unique") - tvars = params - args = params - elif self in (typing.Tuple, typing.Callable): - tvars = _type_vars(params) - args = params - elif self.__origin__ in (typing.Generic, Protocol): - raise TypeError(f"Cannot subscript already-subscripted {repr(self)}") - else: - _check_generic(self, params) - tvars = _type_vars(params) - args = params - - prepend = (self,) if self.__origin__ is None else () - return self.__class__(self.__name__, - prepend + self.__bases__, - _no_slots_copy(self.__dict__), - tvars=tvars, - args=args, - origin=self, - extra=self.__extra__, - orig_bases=self.__orig_bases__) - - class Protocol(metaclass=_ProtocolMeta): - """Base class for protocol classes. Protocol classes are defined as:: - - class Proto(Protocol): - def meth(self) -> int: - ... - - Such classes are primarily used with static type checkers that recognize - structural subtyping (static duck-typing), for example:: - - class C: - def meth(self) -> int: - return 0 - - def func(x: Proto) -> int: - return x.meth() - - func(C()) # Passes static type check - - See PEP 544 for details. Protocol classes decorated with - @typing_extensions.runtime act as simple-minded runtime protocol that checks - only the presence of given attributes, ignoring their type signatures. - - Protocol classes can be generic, they are defined as:: - - class GenProto(Protocol[T]): - def meth(self) -> T: - ... - """ - __slots__ = () - _is_protocol = True - - def __new__(cls, *args, **kwds): - if _gorg(cls) is Protocol: - raise TypeError("Type Protocol cannot be instantiated; " - "it can be used only as a base class") - return typing._generic_new(cls.__next_in_mro__, cls, *args, **kwds) - - -# 3.8+ -if hasattr(typing, 'runtime_checkable'): - runtime_checkable = typing.runtime_checkable -# 3.6-3.7 -else: - def runtime_checkable(cls): - """Mark a protocol class as a runtime protocol, so that it - can be used with isinstance() and issubclass(). Raise TypeError - if applied to a non-protocol class. - - This allows a simple-minded structural check very similar to the - one-offs in collections.abc such as Hashable. - """ - if not isinstance(cls, _ProtocolMeta) or not cls._is_protocol: - raise TypeError('@runtime_checkable can be only applied to protocol classes,' - f' got {cls!r}') - cls._is_runtime_protocol = True - return cls - - -# Exists for backwards compatibility. -runtime = runtime_checkable - - -# 3.8+ -if hasattr(typing, 'SupportsIndex'): - SupportsIndex = typing.SupportsIndex -# 3.6-3.7 -else: - @runtime_checkable - class SupportsIndex(Protocol): - __slots__ = () - - @abc.abstractmethod - def __index__(self) -> int: - pass - - -if sys.version_info >= (3, 9, 2): - # The standard library TypedDict in Python 3.8 does not store runtime information - # about which (if any) keys are optional. See https://bugs.python.org/issue38834 - # The standard library TypedDict in Python 3.9.0/1 does not honour the "total" - # keyword with old-style TypedDict(). See https://bugs.python.org/issue42059 - TypedDict = typing.TypedDict -else: - def _check_fails(cls, other): - try: - if sys._getframe(1).f_globals['__name__'] not in ['abc', - 'functools', - 'typing']: - # Typed dicts are only for static structural subtyping. - raise TypeError('TypedDict does not support instance and class checks') - except (AttributeError, ValueError): - pass - return False - - def _dict_new(*args, **kwargs): - if not args: - raise TypeError('TypedDict.__new__(): not enough arguments') - _, args = args[0], args[1:] # allow the "cls" keyword be passed - return dict(*args, **kwargs) - - _dict_new.__text_signature__ = '($cls, _typename, _fields=None, /, **kwargs)' - - def _typeddict_new(*args, total=True, **kwargs): - if not args: - raise TypeError('TypedDict.__new__(): not enough arguments') - _, args = args[0], args[1:] # allow the "cls" keyword be passed - if args: - typename, args = args[0], args[1:] # allow the "_typename" keyword be passed - elif '_typename' in kwargs: - typename = kwargs.pop('_typename') - import warnings - warnings.warn("Passing '_typename' as keyword argument is deprecated", - DeprecationWarning, stacklevel=2) - else: - raise TypeError("TypedDict.__new__() missing 1 required positional " - "argument: '_typename'") - if args: - try: - fields, = args # allow the "_fields" keyword be passed - except ValueError: - raise TypeError('TypedDict.__new__() takes from 2 to 3 ' - f'positional arguments but {len(args) + 2} ' - 'were given') - elif '_fields' in kwargs and len(kwargs) == 1: - fields = kwargs.pop('_fields') - import warnings - warnings.warn("Passing '_fields' as keyword argument is deprecated", - DeprecationWarning, stacklevel=2) - else: - fields = None - - if fields is None: - fields = kwargs - elif kwargs: - raise TypeError("TypedDict takes either a dict or keyword arguments," - " but not both") - - ns = {'__annotations__': dict(fields)} - try: - # Setting correct module is necessary to make typed dict classes pickleable. - ns['__module__'] = sys._getframe(1).f_globals.get('__name__', '__main__') - except (AttributeError, ValueError): - pass - - return _TypedDictMeta(typename, (), ns, total=total) - - _typeddict_new.__text_signature__ = ('($cls, _typename, _fields=None,' - ' /, *, total=True, **kwargs)') - - class _TypedDictMeta(type): - def __init__(cls, name, bases, ns, total=True): - super().__init__(name, bases, ns) - - def __new__(cls, name, bases, ns, total=True): - # Create new typed dict class object. - # This method is called directly when TypedDict is subclassed, - # or via _typeddict_new when TypedDict is instantiated. This way - # TypedDict supports all three syntaxes described in its docstring. - # Subclasses and instances of TypedDict return actual dictionaries - # via _dict_new. - ns['__new__'] = _typeddict_new if name == 'TypedDict' else _dict_new - tp_dict = super().__new__(cls, name, (dict,), ns) - - annotations = {} - own_annotations = ns.get('__annotations__', {}) - own_annotation_keys = set(own_annotations.keys()) - msg = "TypedDict('Name', {f0: t0, f1: t1, ...}); each t must be a type" - own_annotations = { - n: typing._type_check(tp, msg) for n, tp in own_annotations.items() - } - required_keys = set() - optional_keys = set() - - for base in bases: - annotations.update(base.__dict__.get('__annotations__', {})) - required_keys.update(base.__dict__.get('__required_keys__', ())) - optional_keys.update(base.__dict__.get('__optional_keys__', ())) - - annotations.update(own_annotations) - if total: - required_keys.update(own_annotation_keys) - else: - optional_keys.update(own_annotation_keys) - - tp_dict.__annotations__ = annotations - tp_dict.__required_keys__ = frozenset(required_keys) - tp_dict.__optional_keys__ = frozenset(optional_keys) - if not hasattr(tp_dict, '__total__'): - tp_dict.__total__ = total - return tp_dict - - __instancecheck__ = __subclasscheck__ = _check_fails - - TypedDict = _TypedDictMeta('TypedDict', (dict,), {}) - TypedDict.__module__ = __name__ - TypedDict.__doc__ = \ - """A simple typed name space. At runtime it is equivalent to a plain dict. - - TypedDict creates a dictionary type that expects all of its - instances to have a certain set of keys, with each key - associated with a value of a consistent type. This expectation - is not checked at runtime but is only enforced by type checkers. - Usage:: - - class Point2D(TypedDict): - x: int - y: int - label: str - - a: Point2D = {'x': 1, 'y': 2, 'label': 'good'} # OK - b: Point2D = {'z': 3, 'label': 'bad'} # Fails type check - - assert Point2D(x=1, y=2, label='first') == dict(x=1, y=2, label='first') - - The type info can be accessed via the Point2D.__annotations__ dict, and - the Point2D.__required_keys__ and Point2D.__optional_keys__ frozensets. - TypedDict supports two additional equivalent forms:: - - Point2D = TypedDict('Point2D', x=int, y=int, label=str) - Point2D = TypedDict('Point2D', {'x': int, 'y': int, 'label': str}) - - The class syntax is only supported in Python 3.6+, while two other - syntax forms work for Python 2.7 and 3.2+ - """ - - -# Python 3.9+ has PEP 593 (Annotated and modified get_type_hints) -if hasattr(typing, 'Annotated'): - Annotated = typing.Annotated - get_type_hints = typing.get_type_hints - # Not exported and not a public API, but needed for get_origin() and get_args() - # to work. - _AnnotatedAlias = typing._AnnotatedAlias -# 3.7-3.8 -elif PEP_560: - class _AnnotatedAlias(typing._GenericAlias, _root=True): - """Runtime representation of an annotated type. - - At its core 'Annotated[t, dec1, dec2, ...]' is an alias for the type 't' - with extra annotations. The alias behaves like a normal typing alias, - instantiating is the same as instantiating the underlying type, binding - it to types is also the same. - """ - def __init__(self, origin, metadata): - if isinstance(origin, _AnnotatedAlias): - metadata = origin.__metadata__ + metadata - origin = origin.__origin__ - super().__init__(origin, origin) - self.__metadata__ = metadata - - def copy_with(self, params): - assert len(params) == 1 - new_type = params[0] - return _AnnotatedAlias(new_type, self.__metadata__) - - def __repr__(self): - return (f"typing_extensions.Annotated[{typing._type_repr(self.__origin__)}, " - f"{', '.join(repr(a) for a in self.__metadata__)}]") - - def __reduce__(self): - return operator.getitem, ( - Annotated, (self.__origin__,) + self.__metadata__ - ) - - def __eq__(self, other): - if not isinstance(other, _AnnotatedAlias): - return NotImplemented - if self.__origin__ != other.__origin__: - return False - return self.__metadata__ == other.__metadata__ - - def __hash__(self): - return hash((self.__origin__, self.__metadata__)) - - class Annotated: - """Add context specific metadata to a type. - - Example: Annotated[int, runtime_check.Unsigned] indicates to the - hypothetical runtime_check module that this type is an unsigned int. - Every other consumer of this type can ignore this metadata and treat - this type as int. - - The first argument to Annotated must be a valid type (and will be in - the __origin__ field), the remaining arguments are kept as a tuple in - the __extra__ field. - - Details: - - - It's an error to call `Annotated` with less than two arguments. - - Nested Annotated are flattened:: - - Annotated[Annotated[T, Ann1, Ann2], Ann3] == Annotated[T, Ann1, Ann2, Ann3] - - - Instantiating an annotated type is equivalent to instantiating the - underlying type:: - - Annotated[C, Ann1](5) == C(5) - - - Annotated can be used as a generic type alias:: - - Optimized = Annotated[T, runtime.Optimize()] - Optimized[int] == Annotated[int, runtime.Optimize()] - - OptimizedList = Annotated[List[T], runtime.Optimize()] - OptimizedList[int] == Annotated[List[int], runtime.Optimize()] - """ - - __slots__ = () - - def __new__(cls, *args, **kwargs): - raise TypeError("Type Annotated cannot be instantiated.") - - @typing._tp_cache - def __class_getitem__(cls, params): - if not isinstance(params, tuple) or len(params) < 2: - raise TypeError("Annotated[...] should be used " - "with at least two arguments (a type and an " - "annotation).") - msg = "Annotated[t, ...]: t must be a type." - origin = typing._type_check(params[0], msg) - metadata = tuple(params[1:]) - return _AnnotatedAlias(origin, metadata) - - def __init_subclass__(cls, *args, **kwargs): - raise TypeError( - f"Cannot subclass {cls.__module__}.Annotated" - ) - - def _strip_annotations(t): - """Strips the annotations from a given type. - """ - if isinstance(t, _AnnotatedAlias): - return _strip_annotations(t.__origin__) - if isinstance(t, typing._GenericAlias): - stripped_args = tuple(_strip_annotations(a) for a in t.__args__) - if stripped_args == t.__args__: - return t - res = t.copy_with(stripped_args) - res._special = t._special - return res - return t - - def get_type_hints(obj, globalns=None, localns=None, include_extras=False): - """Return type hints for an object. - - This is often the same as obj.__annotations__, but it handles - forward references encoded as string literals, adds Optional[t] if a - default value equal to None is set and recursively replaces all - 'Annotated[T, ...]' with 'T' (unless 'include_extras=True'). - - The argument may be a module, class, method, or function. The annotations - are returned as a dictionary. For classes, annotations include also - inherited members. - - TypeError is raised if the argument is not of a type that can contain - annotations, and an empty dictionary is returned if no annotations are - present. - - BEWARE -- the behavior of globalns and localns is counterintuitive - (unless you are familiar with how eval() and exec() work). The - search order is locals first, then globals. - - - If no dict arguments are passed, an attempt is made to use the - globals from obj (or the respective module's globals for classes), - and these are also used as the locals. If the object does not appear - to have globals, an empty dictionary is used. - - - If one dict argument is passed, it is used for both globals and - locals. - - - If two dict arguments are passed, they specify globals and - locals, respectively. - """ - hint = typing.get_type_hints(obj, globalns=globalns, localns=localns) - if include_extras: - return hint - return {k: _strip_annotations(t) for k, t in hint.items()} -# 3.6 -else: - - def _is_dunder(name): - """Returns True if name is a __dunder_variable_name__.""" - return len(name) > 4 and name.startswith('__') and name.endswith('__') - - # Prior to Python 3.7 types did not have `copy_with`. A lot of the equality - # checks, argument expansion etc. are done on the _subs_tre. As a result we - # can't provide a get_type_hints function that strips out annotations. - - class AnnotatedMeta(typing.GenericMeta): - """Metaclass for Annotated""" - - def __new__(cls, name, bases, namespace, **kwargs): - if any(b is not object for b in bases): - raise TypeError("Cannot subclass " + str(Annotated)) - return super().__new__(cls, name, bases, namespace, **kwargs) - - @property - def __metadata__(self): - return self._subs_tree()[2] - - def _tree_repr(self, tree): - cls, origin, metadata = tree - if not isinstance(origin, tuple): - tp_repr = typing._type_repr(origin) - else: - tp_repr = origin[0]._tree_repr(origin) - metadata_reprs = ", ".join(repr(arg) for arg in metadata) - return f'{cls}[{tp_repr}, {metadata_reprs}]' - - def _subs_tree(self, tvars=None, args=None): # noqa - if self is Annotated: - return Annotated - res = super()._subs_tree(tvars=tvars, args=args) - # Flatten nested Annotated - if isinstance(res[1], tuple) and res[1][0] is Annotated: - sub_tp = res[1][1] - sub_annot = res[1][2] - return (Annotated, sub_tp, sub_annot + res[2]) - return res - - def _get_cons(self): - """Return the class used to create instance of this type.""" - if self.__origin__ is None: - raise TypeError("Cannot get the underlying type of a " - "non-specialized Annotated type.") - tree = self._subs_tree() - while isinstance(tree, tuple) and tree[0] is Annotated: - tree = tree[1] - if isinstance(tree, tuple): - return tree[0] - else: - return tree - - @typing._tp_cache - def __getitem__(self, params): - if not isinstance(params, tuple): - params = (params,) - if self.__origin__ is not None: # specializing an instantiated type - return super().__getitem__(params) - elif not isinstance(params, tuple) or len(params) < 2: - raise TypeError("Annotated[...] should be instantiated " - "with at least two arguments (a type and an " - "annotation).") - else: - msg = "Annotated[t, ...]: t must be a type." - tp = typing._type_check(params[0], msg) - metadata = tuple(params[1:]) - return self.__class__( - self.__name__, - self.__bases__, - _no_slots_copy(self.__dict__), - tvars=_type_vars((tp,)), - # Metadata is a tuple so it won't be touched by _replace_args et al. - args=(tp, metadata), - origin=self, - ) - - def __call__(self, *args, **kwargs): - cons = self._get_cons() - result = cons(*args, **kwargs) - try: - result.__orig_class__ = self - except AttributeError: - pass - return result - - def __getattr__(self, attr): - # For simplicity we just don't relay all dunder names - if self.__origin__ is not None and not _is_dunder(attr): - return getattr(self._get_cons(), attr) - raise AttributeError(attr) - - def __setattr__(self, attr, value): - if _is_dunder(attr) or attr.startswith('_abc_'): - super().__setattr__(attr, value) - elif self.__origin__ is None: - raise AttributeError(attr) - else: - setattr(self._get_cons(), attr, value) - - def __instancecheck__(self, obj): - raise TypeError("Annotated cannot be used with isinstance().") - - def __subclasscheck__(self, cls): - raise TypeError("Annotated cannot be used with issubclass().") - - class Annotated(metaclass=AnnotatedMeta): - """Add context specific metadata to a type. - - Example: Annotated[int, runtime_check.Unsigned] indicates to the - hypothetical runtime_check module that this type is an unsigned int. - Every other consumer of this type can ignore this metadata and treat - this type as int. - - The first argument to Annotated must be a valid type, the remaining - arguments are kept as a tuple in the __metadata__ field. - - Details: - - - It's an error to call `Annotated` with less than two arguments. - - Nested Annotated are flattened:: - - Annotated[Annotated[T, Ann1, Ann2], Ann3] == Annotated[T, Ann1, Ann2, Ann3] - - - Instantiating an annotated type is equivalent to instantiating the - underlying type:: - - Annotated[C, Ann1](5) == C(5) - - - Annotated can be used as a generic type alias:: - - Optimized = Annotated[T, runtime.Optimize()] - Optimized[int] == Annotated[int, runtime.Optimize()] - - OptimizedList = Annotated[List[T], runtime.Optimize()] - OptimizedList[int] == Annotated[List[int], runtime.Optimize()] - """ - -# Python 3.8 has get_origin() and get_args() but those implementations aren't -# Annotated-aware, so we can't use those. Python 3.9's versions don't support -# ParamSpecArgs and ParamSpecKwargs, so only Python 3.10's versions will do. -if sys.version_info[:2] >= (3, 10): - get_origin = typing.get_origin - get_args = typing.get_args -# 3.7-3.9 -elif PEP_560: - try: - # 3.9+ - from typing import _BaseGenericAlias - except ImportError: - _BaseGenericAlias = typing._GenericAlias - try: - # 3.9+ - from typing import GenericAlias - except ImportError: - GenericAlias = typing._GenericAlias - - def get_origin(tp): - """Get the unsubscripted version of a type. - - This supports generic types, Callable, Tuple, Union, Literal, Final, ClassVar - and Annotated. Return None for unsupported types. Examples:: - - get_origin(Literal[42]) is Literal - get_origin(int) is None - get_origin(ClassVar[int]) is ClassVar - get_origin(Generic) is Generic - get_origin(Generic[T]) is Generic - get_origin(Union[T, int]) is Union - get_origin(List[Tuple[T, T]][int]) == list - get_origin(P.args) is P - """ - if isinstance(tp, _AnnotatedAlias): - return Annotated - if isinstance(tp, (typing._GenericAlias, GenericAlias, _BaseGenericAlias, - ParamSpecArgs, ParamSpecKwargs)): - return tp.__origin__ - if tp is typing.Generic: - return typing.Generic - return None - - def get_args(tp): - """Get type arguments with all substitutions performed. - - For unions, basic simplifications used by Union constructor are performed. - Examples:: - get_args(Dict[str, int]) == (str, int) - get_args(int) == () - get_args(Union[int, Union[T, int], str][int]) == (int, str) - get_args(Union[int, Tuple[T, int]][str]) == (int, Tuple[str, int]) - get_args(Callable[[], T][int]) == ([], int) - """ - if isinstance(tp, _AnnotatedAlias): - return (tp.__origin__,) + tp.__metadata__ - if isinstance(tp, (typing._GenericAlias, GenericAlias)): - if getattr(tp, "_special", False): - return () - res = tp.__args__ - if get_origin(tp) is collections.abc.Callable and res[0] is not Ellipsis: - res = (list(res[:-1]), res[-1]) - return res - return () - - -# 3.10+ -if hasattr(typing, 'TypeAlias'): - TypeAlias = typing.TypeAlias -# 3.9 -elif sys.version_info[:2] >= (3, 9): - class _TypeAliasForm(typing._SpecialForm, _root=True): - def __repr__(self): - return 'typing_extensions.' + self._name - - @_TypeAliasForm - def TypeAlias(self, parameters): - """Special marker indicating that an assignment should - be recognized as a proper type alias definition by type - checkers. - - For example:: - - Predicate: TypeAlias = Callable[..., bool] - - It's invalid when used anywhere except as in the example above. - """ - raise TypeError(f"{self} is not subscriptable") -# 3.7-3.8 -elif sys.version_info[:2] >= (3, 7): - class _TypeAliasForm(typing._SpecialForm, _root=True): - def __repr__(self): - return 'typing_extensions.' + self._name - - TypeAlias = _TypeAliasForm('TypeAlias', - doc="""Special marker indicating that an assignment should - be recognized as a proper type alias definition by type - checkers. - - For example:: - - Predicate: TypeAlias = Callable[..., bool] - - It's invalid when used anywhere except as in the example - above.""") -# 3.6 -else: - class _TypeAliasMeta(typing.TypingMeta): - """Metaclass for TypeAlias""" - - def __repr__(self): - return 'typing_extensions.TypeAlias' - - class _TypeAliasBase(typing._FinalTypingBase, metaclass=_TypeAliasMeta, _root=True): - """Special marker indicating that an assignment should - be recognized as a proper type alias definition by type - checkers. - - For example:: - - Predicate: TypeAlias = Callable[..., bool] - - It's invalid when used anywhere except as in the example above. - """ - __slots__ = () - - def __instancecheck__(self, obj): - raise TypeError("TypeAlias cannot be used with isinstance().") - - def __subclasscheck__(self, cls): - raise TypeError("TypeAlias cannot be used with issubclass().") - - def __repr__(self): - return 'typing_extensions.TypeAlias' - - TypeAlias = _TypeAliasBase(_root=True) - - -# Python 3.10+ has PEP 612 -if hasattr(typing, 'ParamSpecArgs'): - ParamSpecArgs = typing.ParamSpecArgs - ParamSpecKwargs = typing.ParamSpecKwargs -# 3.6-3.9 -else: - class _Immutable: - """Mixin to indicate that object should not be copied.""" - __slots__ = () - - def __copy__(self): - return self - - def __deepcopy__(self, memo): - return self - - class ParamSpecArgs(_Immutable): - """The args for a ParamSpec object. - - Given a ParamSpec object P, P.args is an instance of ParamSpecArgs. - - ParamSpecArgs objects have a reference back to their ParamSpec: - - P.args.__origin__ is P - - This type is meant for runtime introspection and has no special meaning to - static type checkers. - """ - def __init__(self, origin): - self.__origin__ = origin - - def __repr__(self): - return f"{self.__origin__.__name__}.args" - - class ParamSpecKwargs(_Immutable): - """The kwargs for a ParamSpec object. - - Given a ParamSpec object P, P.kwargs is an instance of ParamSpecKwargs. - - ParamSpecKwargs objects have a reference back to their ParamSpec: - - P.kwargs.__origin__ is P - - This type is meant for runtime introspection and has no special meaning to - static type checkers. - """ - def __init__(self, origin): - self.__origin__ = origin - - def __repr__(self): - return f"{self.__origin__.__name__}.kwargs" - -# 3.10+ -if hasattr(typing, 'ParamSpec'): - ParamSpec = typing.ParamSpec -# 3.6-3.9 -else: - - # Inherits from list as a workaround for Callable checks in Python < 3.9.2. - class ParamSpec(list): - """Parameter specification variable. - - Usage:: - - P = ParamSpec('P') - - Parameter specification variables exist primarily for the benefit of static - type checkers. They are used to forward the parameter types of one - callable to another callable, a pattern commonly found in higher order - functions and decorators. They are only valid when used in ``Concatenate``, - or s the first argument to ``Callable``. In Python 3.10 and higher, - they are also supported in user-defined Generics at runtime. - See class Generic for more information on generic types. An - example for annotating a decorator:: - - T = TypeVar('T') - P = ParamSpec('P') - - def add_logging(f: Callable[P, T]) -> Callable[P, T]: - '''A type-safe decorator to add logging to a function.''' - def inner(*args: P.args, **kwargs: P.kwargs) -> T: - logging.info(f'{f.__name__} was called') - return f(*args, **kwargs) - return inner - - @add_logging - def add_two(x: float, y: float) -> float: - '''Add two numbers together.''' - return x + y - - Parameter specification variables defined with covariant=True or - contravariant=True can be used to declare covariant or contravariant - generic types. These keyword arguments are valid, but their actual semantics - are yet to be decided. See PEP 612 for details. - - Parameter specification variables can be introspected. e.g.: - - P.__name__ == 'T' - P.__bound__ == None - P.__covariant__ == False - P.__contravariant__ == False - - Note that only parameter specification variables defined in global scope can - be pickled. - """ - - # Trick Generic __parameters__. - __class__ = typing.TypeVar - - @property - def args(self): - return ParamSpecArgs(self) - - @property - def kwargs(self): - return ParamSpecKwargs(self) - - def __init__(self, name, *, bound=None, covariant=False, contravariant=False): - super().__init__([self]) - self.__name__ = name - self.__covariant__ = bool(covariant) - self.__contravariant__ = bool(contravariant) - if bound: - self.__bound__ = typing._type_check(bound, 'Bound must be a type.') - else: - self.__bound__ = None - - # for pickling: - try: - def_mod = sys._getframe(1).f_globals.get('__name__', '__main__') - except (AttributeError, ValueError): - def_mod = None - if def_mod != 'typing_extensions': - self.__module__ = def_mod - - def __repr__(self): - if self.__covariant__: - prefix = '+' - elif self.__contravariant__: - prefix = '-' - else: - prefix = '~' - return prefix + self.__name__ - - def __hash__(self): - return object.__hash__(self) - - def __eq__(self, other): - return self is other - - def __reduce__(self): - return self.__name__ - - # Hack to get typing._type_check to pass. - def __call__(self, *args, **kwargs): - pass - - if not PEP_560: - # Only needed in 3.6. - def _get_type_vars(self, tvars): - if self not in tvars: - tvars.append(self) - - -# 3.6-3.9 -if not hasattr(typing, 'Concatenate'): - # Inherits from list as a workaround for Callable checks in Python < 3.9.2. - class _ConcatenateGenericAlias(list): - - # Trick Generic into looking into this for __parameters__. - if PEP_560: - __class__ = typing._GenericAlias - else: - __class__ = typing._TypingBase - - # Flag in 3.8. - _special = False - # Attribute in 3.6 and earlier. - _gorg = typing.Generic - - def __init__(self, origin, args): - super().__init__(args) - self.__origin__ = origin - self.__args__ = args - - def __repr__(self): - _type_repr = typing._type_repr - return (f'{_type_repr(self.__origin__)}' - f'[{", ".join(_type_repr(arg) for arg in self.__args__)}]') - - def __hash__(self): - return hash((self.__origin__, self.__args__)) - - # Hack to get typing._type_check to pass in Generic. - def __call__(self, *args, **kwargs): - pass - - @property - def __parameters__(self): - return tuple( - tp for tp in self.__args__ if isinstance(tp, (typing.TypeVar, ParamSpec)) - ) - - if not PEP_560: - # Only required in 3.6. - def _get_type_vars(self, tvars): - if self.__origin__ and self.__parameters__: - typing._get_type_vars(self.__parameters__, tvars) - - -# 3.6-3.9 -@typing._tp_cache -def _concatenate_getitem(self, parameters): - if parameters == (): - raise TypeError("Cannot take a Concatenate of no types.") - if not isinstance(parameters, tuple): - parameters = (parameters,) - if not isinstance(parameters[-1], ParamSpec): - raise TypeError("The last parameter to Concatenate should be a " - "ParamSpec variable.") - msg = "Concatenate[arg, ...]: each arg must be a type." - parameters = tuple(typing._type_check(p, msg) for p in parameters) - return _ConcatenateGenericAlias(self, parameters) - - -# 3.10+ -if hasattr(typing, 'Concatenate'): - Concatenate = typing.Concatenate - _ConcatenateGenericAlias = typing._ConcatenateGenericAlias # noqa -# 3.9 -elif sys.version_info[:2] >= (3, 9): - @_TypeAliasForm - def Concatenate(self, parameters): - """Used in conjunction with ``ParamSpec`` and ``Callable`` to represent a - higher order function which adds, removes or transforms parameters of a - callable. - - For example:: - - Callable[Concatenate[int, P], int] - - See PEP 612 for detailed information. - """ - return _concatenate_getitem(self, parameters) -# 3.7-8 -elif sys.version_info[:2] >= (3, 7): - class _ConcatenateForm(typing._SpecialForm, _root=True): - def __repr__(self): - return 'typing_extensions.' + self._name - - def __getitem__(self, parameters): - return _concatenate_getitem(self, parameters) - - Concatenate = _ConcatenateForm( - 'Concatenate', - doc="""Used in conjunction with ``ParamSpec`` and ``Callable`` to represent a - higher order function which adds, removes or transforms parameters of a - callable. - - For example:: - - Callable[Concatenate[int, P], int] - - See PEP 612 for detailed information. - """) -# 3.6 -else: - class _ConcatenateAliasMeta(typing.TypingMeta): - """Metaclass for Concatenate.""" - - def __repr__(self): - return 'typing_extensions.Concatenate' - - class _ConcatenateAliasBase(typing._FinalTypingBase, - metaclass=_ConcatenateAliasMeta, - _root=True): - """Used in conjunction with ``ParamSpec`` and ``Callable`` to represent a - higher order function which adds, removes or transforms parameters of a - callable. - - For example:: - - Callable[Concatenate[int, P], int] - - See PEP 612 for detailed information. - """ - __slots__ = () - - def __instancecheck__(self, obj): - raise TypeError("Concatenate cannot be used with isinstance().") - - def __subclasscheck__(self, cls): - raise TypeError("Concatenate cannot be used with issubclass().") - - def __repr__(self): - return 'typing_extensions.Concatenate' - - def __getitem__(self, parameters): - return _concatenate_getitem(self, parameters) - - Concatenate = _ConcatenateAliasBase(_root=True) - -# 3.10+ -if hasattr(typing, 'TypeGuard'): - TypeGuard = typing.TypeGuard -# 3.9 -elif sys.version_info[:2] >= (3, 9): - class _TypeGuardForm(typing._SpecialForm, _root=True): - def __repr__(self): - return 'typing_extensions.' + self._name - - @_TypeGuardForm - def TypeGuard(self, parameters): - """Special typing form used to annotate the return type of a user-defined - type guard function. ``TypeGuard`` only accepts a single type argument. - At runtime, functions marked this way should return a boolean. - - ``TypeGuard`` aims to benefit *type narrowing* -- a technique used by static - type checkers to determine a more precise type of an expression within a - program's code flow. Usually type narrowing is done by analyzing - conditional code flow and applying the narrowing to a block of code. The - conditional expression here is sometimes referred to as a "type guard". - - Sometimes it would be convenient to use a user-defined boolean function - as a type guard. Such a function should use ``TypeGuard[...]`` as its - return type to alert static type checkers to this intention. - - Using ``-> TypeGuard`` tells the static type checker that for a given - function: - - 1. The return value is a boolean. - 2. If the return value is ``True``, the type of its argument - is the type inside ``TypeGuard``. - - For example:: - - def is_str(val: Union[str, float]): - # "isinstance" type guard - if isinstance(val, str): - # Type of ``val`` is narrowed to ``str`` - ... - else: - # Else, type of ``val`` is narrowed to ``float``. - ... - - Strict type narrowing is not enforced -- ``TypeB`` need not be a narrower - form of ``TypeA`` (it can even be a wider form) and this may lead to - type-unsafe results. The main reason is to allow for things like - narrowing ``List[object]`` to ``List[str]`` even though the latter is not - a subtype of the former, since ``List`` is invariant. The responsibility of - writing type-safe type guards is left to the user. - - ``TypeGuard`` also works with type variables. For more information, see - PEP 647 (User-Defined Type Guards). - """ - item = typing._type_check(parameters, f'{self} accepts only single type.') - return typing._GenericAlias(self, (item,)) -# 3.7-3.8 -elif sys.version_info[:2] >= (3, 7): - class _TypeGuardForm(typing._SpecialForm, _root=True): - - def __repr__(self): - return 'typing_extensions.' + self._name - - def __getitem__(self, parameters): - item = typing._type_check(parameters, - f'{self._name} accepts only a single type') - return typing._GenericAlias(self, (item,)) - - TypeGuard = _TypeGuardForm( - 'TypeGuard', - doc="""Special typing form used to annotate the return type of a user-defined - type guard function. ``TypeGuard`` only accepts a single type argument. - At runtime, functions marked this way should return a boolean. - - ``TypeGuard`` aims to benefit *type narrowing* -- a technique used by static - type checkers to determine a more precise type of an expression within a - program's code flow. Usually type narrowing is done by analyzing - conditional code flow and applying the narrowing to a block of code. The - conditional expression here is sometimes referred to as a "type guard". - - Sometimes it would be convenient to use a user-defined boolean function - as a type guard. Such a function should use ``TypeGuard[...]`` as its - return type to alert static type checkers to this intention. - - Using ``-> TypeGuard`` tells the static type checker that for a given - function: - - 1. The return value is a boolean. - 2. If the return value is ``True``, the type of its argument - is the type inside ``TypeGuard``. - - For example:: - - def is_str(val: Union[str, float]): - # "isinstance" type guard - if isinstance(val, str): - # Type of ``val`` is narrowed to ``str`` - ... - else: - # Else, type of ``val`` is narrowed to ``float``. - ... - - Strict type narrowing is not enforced -- ``TypeB`` need not be a narrower - form of ``TypeA`` (it can even be a wider form) and this may lead to - type-unsafe results. The main reason is to allow for things like - narrowing ``List[object]`` to ``List[str]`` even though the latter is not - a subtype of the former, since ``List`` is invariant. The responsibility of - writing type-safe type guards is left to the user. - - ``TypeGuard`` also works with type variables. For more information, see - PEP 647 (User-Defined Type Guards). - """) -# 3.6 -else: - class _TypeGuard(typing._FinalTypingBase, _root=True): - """Special typing form used to annotate the return type of a user-defined - type guard function. ``TypeGuard`` only accepts a single type argument. - At runtime, functions marked this way should return a boolean. - - ``TypeGuard`` aims to benefit *type narrowing* -- a technique used by static - type checkers to determine a more precise type of an expression within a - program's code flow. Usually type narrowing is done by analyzing - conditional code flow and applying the narrowing to a block of code. The - conditional expression here is sometimes referred to as a "type guard". - - Sometimes it would be convenient to use a user-defined boolean function - as a type guard. Such a function should use ``TypeGuard[...]`` as its - return type to alert static type checkers to this intention. - - Using ``-> TypeGuard`` tells the static type checker that for a given - function: - - 1. The return value is a boolean. - 2. If the return value is ``True``, the type of its argument - is the type inside ``TypeGuard``. - - For example:: - - def is_str(val: Union[str, float]): - # "isinstance" type guard - if isinstance(val, str): - # Type of ``val`` is narrowed to ``str`` - ... - else: - # Else, type of ``val`` is narrowed to ``float``. - ... - - Strict type narrowing is not enforced -- ``TypeB`` need not be a narrower - form of ``TypeA`` (it can even be a wider form) and this may lead to - type-unsafe results. The main reason is to allow for things like - narrowing ``List[object]`` to ``List[str]`` even though the latter is not - a subtype of the former, since ``List`` is invariant. The responsibility of - writing type-safe type guards is left to the user. - - ``TypeGuard`` also works with type variables. For more information, see - PEP 647 (User-Defined Type Guards). - """ - - __slots__ = ('__type__',) - - def __init__(self, tp=None, **kwds): - self.__type__ = tp - - def __getitem__(self, item): - cls = type(self) - if self.__type__ is None: - return cls(typing._type_check(item, - f'{cls.__name__[1:]} accepts only a single type.'), - _root=True) - raise TypeError(f'{cls.__name__[1:]} cannot be further subscripted') - - def _eval_type(self, globalns, localns): - new_tp = typing._eval_type(self.__type__, globalns, localns) - if new_tp == self.__type__: - return self - return type(self)(new_tp, _root=True) - - def __repr__(self): - r = super().__repr__() - if self.__type__ is not None: - r += f'[{typing._type_repr(self.__type__)}]' - return r - - def __hash__(self): - return hash((type(self).__name__, self.__type__)) - - def __eq__(self, other): - if not isinstance(other, _TypeGuard): - return NotImplemented - if self.__type__ is not None: - return self.__type__ == other.__type__ - return self is other - - TypeGuard = _TypeGuard(_root=True) - -if hasattr(typing, "Self"): - Self = typing.Self -elif sys.version_info[:2] >= (3, 7): - # Vendored from cpython typing._SpecialFrom - class _SpecialForm(typing._Final, _root=True): - __slots__ = ('_name', '__doc__', '_getitem') - - def __init__(self, getitem): - self._getitem = getitem - self._name = getitem.__name__ - self.__doc__ = getitem.__doc__ - - def __getattr__(self, item): - if item in {'__name__', '__qualname__'}: - return self._name - - raise AttributeError(item) - - def __mro_entries__(self, bases): - raise TypeError(f"Cannot subclass {self!r}") - - def __repr__(self): - return f'typing_extensions.{self._name}' - - def __reduce__(self): - return self._name - - def __call__(self, *args, **kwds): - raise TypeError(f"Cannot instantiate {self!r}") - - def __or__(self, other): - return typing.Union[self, other] - - def __ror__(self, other): - return typing.Union[other, self] - - def __instancecheck__(self, obj): - raise TypeError(f"{self} cannot be used with isinstance()") - - def __subclasscheck__(self, cls): - raise TypeError(f"{self} cannot be used with issubclass()") - - @typing._tp_cache - def __getitem__(self, parameters): - return self._getitem(self, parameters) - - @_SpecialForm - def Self(self, params): - """Used to spell the type of "self" in classes. - - Example:: - - from typing import Self - - class ReturnsSelf: - def parse(self, data: bytes) -> Self: - ... - return self - - """ - - raise TypeError(f"{self} is not subscriptable") -else: - class _Self(typing._FinalTypingBase, _root=True): - """Used to spell the type of "self" in classes. - - Example:: - - from typing import Self - - class ReturnsSelf: - def parse(self, data: bytes) -> Self: - ... - return self - - """ - - __slots__ = () - - def __instancecheck__(self, obj): - raise TypeError(f"{self} cannot be used with isinstance().") - - def __subclasscheck__(self, cls): - raise TypeError(f"{self} cannot be used with issubclass().") - - Self = _Self(_root=True) - - -if hasattr(typing, 'Required'): - Required = typing.Required - NotRequired = typing.NotRequired -elif sys.version_info[:2] >= (3, 9): - class _ExtensionsSpecialForm(typing._SpecialForm, _root=True): - def __repr__(self): - return 'typing_extensions.' + self._name - - @_ExtensionsSpecialForm - def Required(self, parameters): - """A special typing construct to mark a key of a total=False TypedDict - as required. For example: - - class Movie(TypedDict, total=False): - title: Required[str] - year: int - - m = Movie( - title='The Matrix', # typechecker error if key is omitted - year=1999, - ) - - There is no runtime checking that a required key is actually provided - when instantiating a related TypedDict. - """ - item = typing._type_check(parameters, f'{self._name} accepts only single type') - return typing._GenericAlias(self, (item,)) - - @_ExtensionsSpecialForm - def NotRequired(self, parameters): - """A special typing construct to mark a key of a TypedDict as - potentially missing. For example: - - class Movie(TypedDict): - title: str - year: NotRequired[int] - - m = Movie( - title='The Matrix', # typechecker error if key is omitted - year=1999, - ) - """ - item = typing._type_check(parameters, f'{self._name} accepts only single type') - return typing._GenericAlias(self, (item,)) - -elif sys.version_info[:2] >= (3, 7): - class _RequiredForm(typing._SpecialForm, _root=True): - def __repr__(self): - return 'typing_extensions.' + self._name - - def __getitem__(self, parameters): - item = typing._type_check(parameters, - '{} accepts only single type'.format(self._name)) - return typing._GenericAlias(self, (item,)) - - Required = _RequiredForm( - 'Required', - doc="""A special typing construct to mark a key of a total=False TypedDict - as required. For example: - - class Movie(TypedDict, total=False): - title: Required[str] - year: int - - m = Movie( - title='The Matrix', # typechecker error if key is omitted - year=1999, - ) - - There is no runtime checking that a required key is actually provided - when instantiating a related TypedDict. - """) - NotRequired = _RequiredForm( - 'NotRequired', - doc="""A special typing construct to mark a key of a TypedDict as - potentially missing. For example: - - class Movie(TypedDict): - title: str - year: NotRequired[int] - - m = Movie( - title='The Matrix', # typechecker error if key is omitted - year=1999, - ) - """) -else: - # NOTE: Modeled after _Final's implementation when _FinalTypingBase available - class _MaybeRequired(typing._FinalTypingBase, _root=True): - __slots__ = ('__type__',) - - def __init__(self, tp=None, **kwds): - self.__type__ = tp - - def __getitem__(self, item): - cls = type(self) - if self.__type__ is None: - return cls(typing._type_check(item, - '{} accepts only single type.'.format(cls.__name__[1:])), - _root=True) - raise TypeError('{} cannot be further subscripted' - .format(cls.__name__[1:])) - - def _eval_type(self, globalns, localns): - new_tp = typing._eval_type(self.__type__, globalns, localns) - if new_tp == self.__type__: - return self - return type(self)(new_tp, _root=True) - - def __repr__(self): - r = super().__repr__() - if self.__type__ is not None: - r += '[{}]'.format(typing._type_repr(self.__type__)) - return r - - def __hash__(self): - return hash((type(self).__name__, self.__type__)) - - def __eq__(self, other): - if not isinstance(other, type(self)): - return NotImplemented - if self.__type__ is not None: - return self.__type__ == other.__type__ - return self is other - - class _Required(_MaybeRequired, _root=True): - """A special typing construct to mark a key of a total=False TypedDict - as required. For example: - - class Movie(TypedDict, total=False): - title: Required[str] - year: int - - m = Movie( - title='The Matrix', # typechecker error if key is omitted - year=1999, - ) - - There is no runtime checking that a required key is actually provided - when instantiating a related TypedDict. - """ - - class _NotRequired(_MaybeRequired, _root=True): - """A special typing construct to mark a key of a TypedDict as - potentially missing. For example: - - class Movie(TypedDict): - title: str - year: NotRequired[int] - - m = Movie( - title='The Matrix', # typechecker error if key is omitted - year=1999, - ) - """ - - Required = _Required(_root=True) - NotRequired = _NotRequired(_root=True) diff --git a/venv/Lib/site-packages/setuptools/_vendor/zipp.py b/venv/Lib/site-packages/setuptools/_vendor/zipp.py deleted file mode 100644 index 26b723c..0000000 --- a/venv/Lib/site-packages/setuptools/_vendor/zipp.py +++ /dev/null @@ -1,329 +0,0 @@ -import io -import posixpath -import zipfile -import itertools -import contextlib -import sys -import pathlib - -if sys.version_info < (3, 7): - from collections import OrderedDict -else: - OrderedDict = dict - - -__all__ = ['Path'] - - -def _parents(path): - """ - Given a path with elements separated by - posixpath.sep, generate all parents of that path. - - >>> list(_parents('b/d')) - ['b'] - >>> list(_parents('/b/d/')) - ['/b'] - >>> list(_parents('b/d/f/')) - ['b/d', 'b'] - >>> list(_parents('b')) - [] - >>> list(_parents('')) - [] - """ - return itertools.islice(_ancestry(path), 1, None) - - -def _ancestry(path): - """ - Given a path with elements separated by - posixpath.sep, generate all elements of that path - - >>> list(_ancestry('b/d')) - ['b/d', 'b'] - >>> list(_ancestry('/b/d/')) - ['/b/d', '/b'] - >>> list(_ancestry('b/d/f/')) - ['b/d/f', 'b/d', 'b'] - >>> list(_ancestry('b')) - ['b'] - >>> list(_ancestry('')) - [] - """ - path = path.rstrip(posixpath.sep) - while path and path != posixpath.sep: - yield path - path, tail = posixpath.split(path) - - -_dedupe = OrderedDict.fromkeys -"""Deduplicate an iterable in original order""" - - -def _difference(minuend, subtrahend): - """ - Return items in minuend not in subtrahend, retaining order - with O(1) lookup. - """ - return itertools.filterfalse(set(subtrahend).__contains__, minuend) - - -class CompleteDirs(zipfile.ZipFile): - """ - A ZipFile subclass that ensures that implied directories - are always included in the namelist. - """ - - @staticmethod - def _implied_dirs(names): - parents = itertools.chain.from_iterable(map(_parents, names)) - as_dirs = (p + posixpath.sep for p in parents) - return _dedupe(_difference(as_dirs, names)) - - def namelist(self): - names = super(CompleteDirs, self).namelist() - return names + list(self._implied_dirs(names)) - - def _name_set(self): - return set(self.namelist()) - - def resolve_dir(self, name): - """ - If the name represents a directory, return that name - as a directory (with the trailing slash). - """ - names = self._name_set() - dirname = name + '/' - dir_match = name not in names and dirname in names - return dirname if dir_match else name - - @classmethod - def make(cls, source): - """ - Given a source (filename or zipfile), return an - appropriate CompleteDirs subclass. - """ - if isinstance(source, CompleteDirs): - return source - - if not isinstance(source, zipfile.ZipFile): - return cls(_pathlib_compat(source)) - - # Only allow for FastLookup when supplied zipfile is read-only - if 'r' not in source.mode: - cls = CompleteDirs - - source.__class__ = cls - return source - - -class FastLookup(CompleteDirs): - """ - ZipFile subclass to ensure implicit - dirs exist and are resolved rapidly. - """ - - def namelist(self): - with contextlib.suppress(AttributeError): - return self.__names - self.__names = super(FastLookup, self).namelist() - return self.__names - - def _name_set(self): - with contextlib.suppress(AttributeError): - return self.__lookup - self.__lookup = super(FastLookup, self)._name_set() - return self.__lookup - - -def _pathlib_compat(path): - """ - For path-like objects, convert to a filename for compatibility - on Python 3.6.1 and earlier. - """ - try: - return path.__fspath__() - except AttributeError: - return str(path) - - -class Path: - """ - A pathlib-compatible interface for zip files. - - Consider a zip file with this structure:: - - . - ├── a.txt - └── b - ├── c.txt - └── d - └── e.txt - - >>> data = io.BytesIO() - >>> zf = zipfile.ZipFile(data, 'w') - >>> zf.writestr('a.txt', 'content of a') - >>> zf.writestr('b/c.txt', 'content of c') - >>> zf.writestr('b/d/e.txt', 'content of e') - >>> zf.filename = 'mem/abcde.zip' - - Path accepts the zipfile object itself or a filename - - >>> root = Path(zf) - - From there, several path operations are available. - - Directory iteration (including the zip file itself): - - >>> a, b = root.iterdir() - >>> a - Path('mem/abcde.zip', 'a.txt') - >>> b - Path('mem/abcde.zip', 'b/') - - name property: - - >>> b.name - 'b' - - join with divide operator: - - >>> c = b / 'c.txt' - >>> c - Path('mem/abcde.zip', 'b/c.txt') - >>> c.name - 'c.txt' - - Read text: - - >>> c.read_text() - 'content of c' - - existence: - - >>> c.exists() - True - >>> (b / 'missing.txt').exists() - False - - Coercion to string: - - >>> import os - >>> str(c).replace(os.sep, posixpath.sep) - 'mem/abcde.zip/b/c.txt' - - At the root, ``name``, ``filename``, and ``parent`` - resolve to the zipfile. Note these attributes are not - valid and will raise a ``ValueError`` if the zipfile - has no filename. - - >>> root.name - 'abcde.zip' - >>> str(root.filename).replace(os.sep, posixpath.sep) - 'mem/abcde.zip' - >>> str(root.parent) - 'mem' - """ - - __repr = "{self.__class__.__name__}({self.root.filename!r}, {self.at!r})" - - def __init__(self, root, at=""): - """ - Construct a Path from a ZipFile or filename. - - Note: When the source is an existing ZipFile object, - its type (__class__) will be mutated to a - specialized type. If the caller wishes to retain the - original type, the caller should either create a - separate ZipFile object or pass a filename. - """ - self.root = FastLookup.make(root) - self.at = at - - def open(self, mode='r', *args, pwd=None, **kwargs): - """ - Open this entry as text or binary following the semantics - of ``pathlib.Path.open()`` by passing arguments through - to io.TextIOWrapper(). - """ - if self.is_dir(): - raise IsADirectoryError(self) - zip_mode = mode[0] - if not self.exists() and zip_mode == 'r': - raise FileNotFoundError(self) - stream = self.root.open(self.at, zip_mode, pwd=pwd) - if 'b' in mode: - if args or kwargs: - raise ValueError("encoding args invalid for binary operation") - return stream - return io.TextIOWrapper(stream, *args, **kwargs) - - @property - def name(self): - return pathlib.Path(self.at).name or self.filename.name - - @property - def suffix(self): - return pathlib.Path(self.at).suffix or self.filename.suffix - - @property - def suffixes(self): - return pathlib.Path(self.at).suffixes or self.filename.suffixes - - @property - def stem(self): - return pathlib.Path(self.at).stem or self.filename.stem - - @property - def filename(self): - return pathlib.Path(self.root.filename).joinpath(self.at) - - def read_text(self, *args, **kwargs): - with self.open('r', *args, **kwargs) as strm: - return strm.read() - - def read_bytes(self): - with self.open('rb') as strm: - return strm.read() - - def _is_child(self, path): - return posixpath.dirname(path.at.rstrip("/")) == self.at.rstrip("/") - - def _next(self, at): - return self.__class__(self.root, at) - - def is_dir(self): - return not self.at or self.at.endswith("/") - - def is_file(self): - return self.exists() and not self.is_dir() - - def exists(self): - return self.at in self.root._name_set() - - def iterdir(self): - if not self.is_dir(): - raise ValueError("Can't listdir a file") - subs = map(self._next, self.root.namelist()) - return filter(self._is_child, subs) - - def __str__(self): - return posixpath.join(self.root.filename, self.at) - - def __repr__(self): - return self.__repr.format(self=self) - - def joinpath(self, *other): - next = posixpath.join(self.at, *map(_pathlib_compat, other)) - return self._next(self.root.resolve_dir(next)) - - __truediv__ = joinpath - - @property - def parent(self): - if not self.at: - return self.filename.parent - parent_at = posixpath.dirname(self.at.rstrip('/')) - if parent_at: - parent_at += '/' - return self._next(parent_at) diff --git a/venv/Lib/site-packages/setuptools/archive_util.py b/venv/Lib/site-packages/setuptools/archive_util.py deleted file mode 100644 index 73b2db7..0000000 --- a/venv/Lib/site-packages/setuptools/archive_util.py +++ /dev/null @@ -1,205 +0,0 @@ -"""Utilities for extracting common archive formats""" - -import zipfile -import tarfile -import os -import shutil -import posixpath -import contextlib -from distutils.errors import DistutilsError - -from ._path import ensure_directory - -__all__ = [ - "unpack_archive", "unpack_zipfile", "unpack_tarfile", "default_filter", - "UnrecognizedFormat", "extraction_drivers", "unpack_directory", -] - - -class UnrecognizedFormat(DistutilsError): - """Couldn't recognize the archive type""" - - -def default_filter(src, dst): - """The default progress/filter callback; returns True for all files""" - return dst - - -def unpack_archive( - filename, extract_dir, progress_filter=default_filter, - drivers=None): - """Unpack `filename` to `extract_dir`, or raise ``UnrecognizedFormat`` - - `progress_filter` is a function taking two arguments: a source path - internal to the archive ('/'-separated), and a filesystem path where it - will be extracted. The callback must return the desired extract path - (which may be the same as the one passed in), or else ``None`` to skip - that file or directory. The callback can thus be used to report on the - progress of the extraction, as well as to filter the items extracted or - alter their extraction paths. - - `drivers`, if supplied, must be a non-empty sequence of functions with the - same signature as this function (minus the `drivers` argument), that raise - ``UnrecognizedFormat`` if they do not support extracting the designated - archive type. The `drivers` are tried in sequence until one is found that - does not raise an error, or until all are exhausted (in which case - ``UnrecognizedFormat`` is raised). If you do not supply a sequence of - drivers, the module's ``extraction_drivers`` constant will be used, which - means that ``unpack_zipfile`` and ``unpack_tarfile`` will be tried, in that - order. - """ - for driver in drivers or extraction_drivers: - try: - driver(filename, extract_dir, progress_filter) - except UnrecognizedFormat: - continue - else: - return - else: - raise UnrecognizedFormat( - "Not a recognized archive type: %s" % filename - ) - - -def unpack_directory(filename, extract_dir, progress_filter=default_filter): - """"Unpack" a directory, using the same interface as for archives - - Raises ``UnrecognizedFormat`` if `filename` is not a directory - """ - if not os.path.isdir(filename): - raise UnrecognizedFormat("%s is not a directory" % filename) - - paths = { - filename: ('', extract_dir), - } - for base, dirs, files in os.walk(filename): - src, dst = paths[base] - for d in dirs: - paths[os.path.join(base, d)] = src + d + '/', os.path.join(dst, d) - for f in files: - target = os.path.join(dst, f) - target = progress_filter(src + f, target) - if not target: - # skip non-files - continue - ensure_directory(target) - f = os.path.join(base, f) - shutil.copyfile(f, target) - shutil.copystat(f, target) - - -def unpack_zipfile(filename, extract_dir, progress_filter=default_filter): - """Unpack zip `filename` to `extract_dir` - - Raises ``UnrecognizedFormat`` if `filename` is not a zipfile (as determined - by ``zipfile.is_zipfile()``). See ``unpack_archive()`` for an explanation - of the `progress_filter` argument. - """ - - if not zipfile.is_zipfile(filename): - raise UnrecognizedFormat("%s is not a zip file" % (filename,)) - - with zipfile.ZipFile(filename) as z: - for info in z.infolist(): - name = info.filename - - # don't extract absolute paths or ones with .. in them - if name.startswith('/') or '..' in name.split('/'): - continue - - target = os.path.join(extract_dir, *name.split('/')) - target = progress_filter(name, target) - if not target: - continue - if name.endswith('/'): - # directory - ensure_directory(target) - else: - # file - ensure_directory(target) - data = z.read(info.filename) - with open(target, 'wb') as f: - f.write(data) - unix_attributes = info.external_attr >> 16 - if unix_attributes: - os.chmod(target, unix_attributes) - - -def _resolve_tar_file_or_dir(tar_obj, tar_member_obj): - """Resolve any links and extract link targets as normal files.""" - while tar_member_obj is not None and ( - tar_member_obj.islnk() or tar_member_obj.issym()): - linkpath = tar_member_obj.linkname - if tar_member_obj.issym(): - base = posixpath.dirname(tar_member_obj.name) - linkpath = posixpath.join(base, linkpath) - linkpath = posixpath.normpath(linkpath) - tar_member_obj = tar_obj._getmember(linkpath) - - is_file_or_dir = ( - tar_member_obj is not None and - (tar_member_obj.isfile() or tar_member_obj.isdir()) - ) - if is_file_or_dir: - return tar_member_obj - - raise LookupError('Got unknown file type') - - -def _iter_open_tar(tar_obj, extract_dir, progress_filter): - """Emit member-destination pairs from a tar archive.""" - # don't do any chowning! - tar_obj.chown = lambda *args: None - - with contextlib.closing(tar_obj): - for member in tar_obj: - name = member.name - # don't extract absolute paths or ones with .. in them - if name.startswith('/') or '..' in name.split('/'): - continue - - prelim_dst = os.path.join(extract_dir, *name.split('/')) - - try: - member = _resolve_tar_file_or_dir(tar_obj, member) - except LookupError: - continue - - final_dst = progress_filter(name, prelim_dst) - if not final_dst: - continue - - if final_dst.endswith(os.sep): - final_dst = final_dst[:-1] - - yield member, final_dst - - -def unpack_tarfile(filename, extract_dir, progress_filter=default_filter): - """Unpack tar/tar.gz/tar.bz2 `filename` to `extract_dir` - - Raises ``UnrecognizedFormat`` if `filename` is not a tarfile (as determined - by ``tarfile.open()``). See ``unpack_archive()`` for an explanation - of the `progress_filter` argument. - """ - try: - tarobj = tarfile.open(filename) - except tarfile.TarError as e: - raise UnrecognizedFormat( - "%s is not a compressed or uncompressed tar file" % (filename,) - ) from e - - for member, final_dst in _iter_open_tar( - tarobj, extract_dir, progress_filter, - ): - try: - # XXX Ugh - tarobj._extract_member(member, final_dst) - except tarfile.ExtractError: - # chown/chmod/mkfifo/mknode/makedev failed - pass - - return True - - -extraction_drivers = unpack_directory, unpack_zipfile, unpack_tarfile diff --git a/venv/Lib/site-packages/setuptools/build_meta.py b/venv/Lib/site-packages/setuptools/build_meta.py deleted file mode 100644 index 5dc65e2..0000000 --- a/venv/Lib/site-packages/setuptools/build_meta.py +++ /dev/null @@ -1,304 +0,0 @@ -"""A PEP 517 interface to setuptools - -Previously, when a user or a command line tool (let's call it a "frontend") -needed to make a request of setuptools to take a certain action, for -example, generating a list of installation requirements, the frontend would -would call "setup.py egg_info" or "setup.py bdist_wheel" on the command line. - -PEP 517 defines a different method of interfacing with setuptools. Rather -than calling "setup.py" directly, the frontend should: - - 1. Set the current directory to the directory with a setup.py file - 2. Import this module into a safe python interpreter (one in which - setuptools can potentially set global variables or crash hard). - 3. Call one of the functions defined in PEP 517. - -What each function does is defined in PEP 517. However, here is a "casual" -definition of the functions (this definition should not be relied on for -bug reports or API stability): - - - `build_wheel`: build a wheel in the folder and return the basename - - `get_requires_for_build_wheel`: get the `setup_requires` to build - - `prepare_metadata_for_build_wheel`: get the `install_requires` - - `build_sdist`: build an sdist in the folder and return the basename - - `get_requires_for_build_sdist`: get the `setup_requires` to build - -Again, this is not a formal definition! Just a "taste" of the module. -""" - -import io -import os -import sys -import tokenize -import shutil -import contextlib -import tempfile -import warnings - -import setuptools -import distutils -from ._reqs import parse_strings -from .extern.more_itertools import always_iterable - - -__all__ = ['get_requires_for_build_sdist', - 'get_requires_for_build_wheel', - 'prepare_metadata_for_build_wheel', - 'build_wheel', - 'build_sdist', - '__legacy__', - 'SetupRequirementsError'] - - -class SetupRequirementsError(BaseException): - def __init__(self, specifiers): - self.specifiers = specifiers - - -class Distribution(setuptools.dist.Distribution): - def fetch_build_eggs(self, specifiers): - specifier_list = list(parse_strings(specifiers)) - - raise SetupRequirementsError(specifier_list) - - @classmethod - @contextlib.contextmanager - def patch(cls): - """ - Replace - distutils.dist.Distribution with this class - for the duration of this context. - """ - orig = distutils.core.Distribution - distutils.core.Distribution = cls - try: - yield - finally: - distutils.core.Distribution = orig - - -@contextlib.contextmanager -def no_install_setup_requires(): - """Temporarily disable installing setup_requires - - Under PEP 517, the backend reports build dependencies to the frontend, - and the frontend is responsible for ensuring they're installed. - So setuptools (acting as a backend) should not try to install them. - """ - orig = setuptools._install_setup_requires - setuptools._install_setup_requires = lambda attrs: None - try: - yield - finally: - setuptools._install_setup_requires = orig - - -def _get_immediate_subdirectories(a_dir): - return [name for name in os.listdir(a_dir) - if os.path.isdir(os.path.join(a_dir, name))] - - -def _file_with_extension(directory, extension): - matching = ( - f for f in os.listdir(directory) - if f.endswith(extension) - ) - try: - file, = matching - except ValueError: - raise ValueError( - 'No distribution was found. Ensure that `setup.py` ' - 'is not empty and that it calls `setup()`.') - return file - - -def _open_setup_script(setup_script): - if not os.path.exists(setup_script): - # Supply a default setup.py - return io.StringIO(u"from setuptools import setup; setup()") - - return getattr(tokenize, 'open', open)(setup_script) - - -@contextlib.contextmanager -def suppress_known_deprecation(): - with warnings.catch_warnings(): - warnings.filterwarnings('ignore', 'setup.py install is deprecated') - yield - - -class _BuildMetaBackend: - - @staticmethod - def _fix_config(config_settings): - """ - Ensure config settings meet certain expectations. - - >>> fc = _BuildMetaBackend._fix_config - >>> fc(None) - {'--global-option': []} - >>> fc({}) - {'--global-option': []} - >>> fc({'--global-option': 'foo'}) - {'--global-option': ['foo']} - >>> fc({'--global-option': ['foo']}) - {'--global-option': ['foo']} - """ - config_settings = config_settings or {} - config_settings['--global-option'] = list(always_iterable( - config_settings.get('--global-option'))) - return config_settings - - def _get_build_requires(self, config_settings, requirements): - config_settings = self._fix_config(config_settings) - - sys.argv = sys.argv[:1] + ['egg_info'] + \ - config_settings["--global-option"] - try: - with Distribution.patch(): - self.run_setup() - except SetupRequirementsError as e: - requirements += e.specifiers - - return requirements - - def run_setup(self, setup_script='setup.py'): - # Note that we can reuse our build directory between calls - # Correctness comes first, then optimization later - __file__ = setup_script - __name__ = '__main__' - - with _open_setup_script(__file__) as f: - code = f.read().replace(r'\r\n', r'\n') - - exec(compile(code, __file__, 'exec'), locals()) - - def get_requires_for_build_wheel(self, config_settings=None): - return self._get_build_requires( - config_settings, requirements=['wheel']) - - def get_requires_for_build_sdist(self, config_settings=None): - return self._get_build_requires(config_settings, requirements=[]) - - def prepare_metadata_for_build_wheel(self, metadata_directory, - config_settings=None): - sys.argv = sys.argv[:1] + [ - 'dist_info', '--egg-base', metadata_directory] - with no_install_setup_requires(): - self.run_setup() - - dist_info_directory = metadata_directory - while True: - dist_infos = [f for f in os.listdir(dist_info_directory) - if f.endswith('.dist-info')] - - if ( - len(dist_infos) == 0 and - len(_get_immediate_subdirectories(dist_info_directory)) == 1 - ): - - dist_info_directory = os.path.join( - dist_info_directory, os.listdir(dist_info_directory)[0]) - continue - - assert len(dist_infos) == 1 - break - - # PEP 517 requires that the .dist-info directory be placed in the - # metadata_directory. To comply, we MUST copy the directory to the root - if dist_info_directory != metadata_directory: - shutil.move( - os.path.join(dist_info_directory, dist_infos[0]), - metadata_directory) - shutil.rmtree(dist_info_directory, ignore_errors=True) - - return dist_infos[0] - - def _build_with_temp_dir(self, setup_command, result_extension, - result_directory, config_settings): - config_settings = self._fix_config(config_settings) - result_directory = os.path.abspath(result_directory) - - # Build in a temporary directory, then copy to the target. - os.makedirs(result_directory, exist_ok=True) - with tempfile.TemporaryDirectory(dir=result_directory) as tmp_dist_dir: - sys.argv = (sys.argv[:1] + setup_command + - ['--dist-dir', tmp_dist_dir] + - config_settings["--global-option"]) - with no_install_setup_requires(): - self.run_setup() - - result_basename = _file_with_extension( - tmp_dist_dir, result_extension) - result_path = os.path.join(result_directory, result_basename) - if os.path.exists(result_path): - # os.rename will fail overwriting on non-Unix. - os.remove(result_path) - os.rename(os.path.join(tmp_dist_dir, result_basename), result_path) - - return result_basename - - def build_wheel(self, wheel_directory, config_settings=None, - metadata_directory=None): - with suppress_known_deprecation(): - return self._build_with_temp_dir(['bdist_wheel'], '.whl', - wheel_directory, config_settings) - - def build_sdist(self, sdist_directory, config_settings=None): - return self._build_with_temp_dir(['sdist', '--formats', 'gztar'], - '.tar.gz', sdist_directory, - config_settings) - - -class _BuildMetaLegacyBackend(_BuildMetaBackend): - """Compatibility backend for setuptools - - This is a version of setuptools.build_meta that endeavors - to maintain backwards - compatibility with pre-PEP 517 modes of invocation. It - exists as a temporary - bridge between the old packaging mechanism and the new - packaging mechanism, - and will eventually be removed. - """ - def run_setup(self, setup_script='setup.py'): - # In order to maintain compatibility with scripts assuming that - # the setup.py script is in a directory on the PYTHONPATH, inject - # '' into sys.path. (pypa/setuptools#1642) - sys_path = list(sys.path) # Save the original path - - script_dir = os.path.dirname(os.path.abspath(setup_script)) - if script_dir not in sys.path: - sys.path.insert(0, script_dir) - - # Some setup.py scripts (e.g. in pygame and numpy) use sys.argv[0] to - # get the directory of the source code. They expect it to refer to the - # setup.py script. - sys_argv_0 = sys.argv[0] - sys.argv[0] = setup_script - - try: - super(_BuildMetaLegacyBackend, - self).run_setup(setup_script=setup_script) - finally: - # While PEP 517 frontends should be calling each hook in a fresh - # subprocess according to the standard (and thus it should not be - # strictly necessary to restore the old sys.path), we'll restore - # the original path so that the path manipulation does not persist - # within the hook after run_setup is called. - sys.path[:] = sys_path - sys.argv[0] = sys_argv_0 - - -# The primary backend -_BACKEND = _BuildMetaBackend() - -get_requires_for_build_wheel = _BACKEND.get_requires_for_build_wheel -get_requires_for_build_sdist = _BACKEND.get_requires_for_build_sdist -prepare_metadata_for_build_wheel = _BACKEND.prepare_metadata_for_build_wheel -build_wheel = _BACKEND.build_wheel -build_sdist = _BACKEND.build_sdist - - -# The legacy backend -__legacy__ = _BuildMetaLegacyBackend() diff --git a/venv/Lib/site-packages/setuptools/cli-32.exe b/venv/Lib/site-packages/setuptools/cli-32.exe deleted file mode 100644 index b1487b7..0000000 Binary files a/venv/Lib/site-packages/setuptools/cli-32.exe and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/cli-64.exe b/venv/Lib/site-packages/setuptools/cli-64.exe deleted file mode 100644 index 675e6bf..0000000 Binary files a/venv/Lib/site-packages/setuptools/cli-64.exe and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/cli-arm64.exe b/venv/Lib/site-packages/setuptools/cli-arm64.exe deleted file mode 100644 index 7a87ce4..0000000 Binary files a/venv/Lib/site-packages/setuptools/cli-arm64.exe and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/cli.exe b/venv/Lib/site-packages/setuptools/cli.exe deleted file mode 100644 index b1487b7..0000000 Binary files a/venv/Lib/site-packages/setuptools/cli.exe and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/command/__init__.py b/venv/Lib/site-packages/setuptools/command/__init__.py deleted file mode 100644 index b966dce..0000000 --- a/venv/Lib/site-packages/setuptools/command/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -from distutils.command.bdist import bdist -import sys - -if 'egg' not in bdist.format_commands: - bdist.format_command['egg'] = ('bdist_egg', "Python .egg file") - bdist.format_commands.append('egg') - -del bdist, sys diff --git a/venv/Lib/site-packages/setuptools/command/__pycache__/__init__.cpython-39.pyc b/venv/Lib/site-packages/setuptools/command/__pycache__/__init__.cpython-39.pyc deleted file mode 100644 index 1dc3b2f..0000000 Binary files a/venv/Lib/site-packages/setuptools/command/__pycache__/__init__.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/command/__pycache__/alias.cpython-39.pyc b/venv/Lib/site-packages/setuptools/command/__pycache__/alias.cpython-39.pyc deleted file mode 100644 index b712987..0000000 Binary files a/venv/Lib/site-packages/setuptools/command/__pycache__/alias.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/command/__pycache__/bdist_egg.cpython-39.pyc b/venv/Lib/site-packages/setuptools/command/__pycache__/bdist_egg.cpython-39.pyc deleted file mode 100644 index 5e4b473..0000000 Binary files a/venv/Lib/site-packages/setuptools/command/__pycache__/bdist_egg.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/command/__pycache__/bdist_rpm.cpython-39.pyc b/venv/Lib/site-packages/setuptools/command/__pycache__/bdist_rpm.cpython-39.pyc deleted file mode 100644 index 95eb2ef..0000000 Binary files a/venv/Lib/site-packages/setuptools/command/__pycache__/bdist_rpm.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/command/__pycache__/build_clib.cpython-39.pyc b/venv/Lib/site-packages/setuptools/command/__pycache__/build_clib.cpython-39.pyc deleted file mode 100644 index 89aab03..0000000 Binary files a/venv/Lib/site-packages/setuptools/command/__pycache__/build_clib.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/command/__pycache__/build_ext.cpython-39.pyc b/venv/Lib/site-packages/setuptools/command/__pycache__/build_ext.cpython-39.pyc deleted file mode 100644 index 956831c..0000000 Binary files a/venv/Lib/site-packages/setuptools/command/__pycache__/build_ext.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/command/__pycache__/build_py.cpython-39.pyc b/venv/Lib/site-packages/setuptools/command/__pycache__/build_py.cpython-39.pyc deleted file mode 100644 index d749a4d..0000000 Binary files a/venv/Lib/site-packages/setuptools/command/__pycache__/build_py.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/command/__pycache__/develop.cpython-39.pyc b/venv/Lib/site-packages/setuptools/command/__pycache__/develop.cpython-39.pyc deleted file mode 100644 index 4fd1f67..0000000 Binary files a/venv/Lib/site-packages/setuptools/command/__pycache__/develop.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/command/__pycache__/dist_info.cpython-39.pyc b/venv/Lib/site-packages/setuptools/command/__pycache__/dist_info.cpython-39.pyc deleted file mode 100644 index a2d7f7d..0000000 Binary files a/venv/Lib/site-packages/setuptools/command/__pycache__/dist_info.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/command/__pycache__/easy_install.cpython-39.pyc b/venv/Lib/site-packages/setuptools/command/__pycache__/easy_install.cpython-39.pyc deleted file mode 100644 index c1c359b..0000000 Binary files a/venv/Lib/site-packages/setuptools/command/__pycache__/easy_install.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/command/__pycache__/egg_info.cpython-39.pyc b/venv/Lib/site-packages/setuptools/command/__pycache__/egg_info.cpython-39.pyc deleted file mode 100644 index deac943..0000000 Binary files a/venv/Lib/site-packages/setuptools/command/__pycache__/egg_info.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/command/__pycache__/install.cpython-39.pyc b/venv/Lib/site-packages/setuptools/command/__pycache__/install.cpython-39.pyc deleted file mode 100644 index 27ea4e5..0000000 Binary files a/venv/Lib/site-packages/setuptools/command/__pycache__/install.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/command/__pycache__/install_egg_info.cpython-39.pyc b/venv/Lib/site-packages/setuptools/command/__pycache__/install_egg_info.cpython-39.pyc deleted file mode 100644 index 29eb897..0000000 Binary files a/venv/Lib/site-packages/setuptools/command/__pycache__/install_egg_info.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/command/__pycache__/install_lib.cpython-39.pyc b/venv/Lib/site-packages/setuptools/command/__pycache__/install_lib.cpython-39.pyc deleted file mode 100644 index 3f3c56c..0000000 Binary files a/venv/Lib/site-packages/setuptools/command/__pycache__/install_lib.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/command/__pycache__/install_scripts.cpython-39.pyc b/venv/Lib/site-packages/setuptools/command/__pycache__/install_scripts.cpython-39.pyc deleted file mode 100644 index 1856c04..0000000 Binary files a/venv/Lib/site-packages/setuptools/command/__pycache__/install_scripts.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/command/__pycache__/py36compat.cpython-39.pyc b/venv/Lib/site-packages/setuptools/command/__pycache__/py36compat.cpython-39.pyc deleted file mode 100644 index 454f3ef..0000000 Binary files a/venv/Lib/site-packages/setuptools/command/__pycache__/py36compat.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/command/__pycache__/register.cpython-39.pyc b/venv/Lib/site-packages/setuptools/command/__pycache__/register.cpython-39.pyc deleted file mode 100644 index 3112ce9..0000000 Binary files a/venv/Lib/site-packages/setuptools/command/__pycache__/register.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/command/__pycache__/rotate.cpython-39.pyc b/venv/Lib/site-packages/setuptools/command/__pycache__/rotate.cpython-39.pyc deleted file mode 100644 index 5ec41fd..0000000 Binary files a/venv/Lib/site-packages/setuptools/command/__pycache__/rotate.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/command/__pycache__/saveopts.cpython-39.pyc b/venv/Lib/site-packages/setuptools/command/__pycache__/saveopts.cpython-39.pyc deleted file mode 100644 index 9305219..0000000 Binary files a/venv/Lib/site-packages/setuptools/command/__pycache__/saveopts.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/command/__pycache__/sdist.cpython-39.pyc b/venv/Lib/site-packages/setuptools/command/__pycache__/sdist.cpython-39.pyc deleted file mode 100644 index b4595a8..0000000 Binary files a/venv/Lib/site-packages/setuptools/command/__pycache__/sdist.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/command/__pycache__/setopt.cpython-39.pyc b/venv/Lib/site-packages/setuptools/command/__pycache__/setopt.cpython-39.pyc deleted file mode 100644 index a099f50..0000000 Binary files a/venv/Lib/site-packages/setuptools/command/__pycache__/setopt.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/command/__pycache__/test.cpython-39.pyc b/venv/Lib/site-packages/setuptools/command/__pycache__/test.cpython-39.pyc deleted file mode 100644 index 611b461..0000000 Binary files a/venv/Lib/site-packages/setuptools/command/__pycache__/test.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/command/__pycache__/upload.cpython-39.pyc b/venv/Lib/site-packages/setuptools/command/__pycache__/upload.cpython-39.pyc deleted file mode 100644 index 7eb437d..0000000 Binary files a/venv/Lib/site-packages/setuptools/command/__pycache__/upload.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/command/__pycache__/upload_docs.cpython-39.pyc b/venv/Lib/site-packages/setuptools/command/__pycache__/upload_docs.cpython-39.pyc deleted file mode 100644 index d5c7539..0000000 Binary files a/venv/Lib/site-packages/setuptools/command/__pycache__/upload_docs.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/command/alias.py b/venv/Lib/site-packages/setuptools/command/alias.py deleted file mode 100644 index 452a924..0000000 --- a/venv/Lib/site-packages/setuptools/command/alias.py +++ /dev/null @@ -1,78 +0,0 @@ -from distutils.errors import DistutilsOptionError - -from setuptools.command.setopt import edit_config, option_base, config_file - - -def shquote(arg): - """Quote an argument for later parsing by shlex.split()""" - for c in '"', "'", "\\", "#": - if c in arg: - return repr(arg) - if arg.split() != [arg]: - return repr(arg) - return arg - - -class alias(option_base): - """Define a shortcut that invokes one or more commands""" - - description = "define a shortcut to invoke one or more commands" - command_consumes_arguments = True - - user_options = [ - ('remove', 'r', 'remove (unset) the alias'), - ] + option_base.user_options - - boolean_options = option_base.boolean_options + ['remove'] - - def initialize_options(self): - option_base.initialize_options(self) - self.args = None - self.remove = None - - def finalize_options(self): - option_base.finalize_options(self) - if self.remove and len(self.args) != 1: - raise DistutilsOptionError( - "Must specify exactly one argument (the alias name) when " - "using --remove" - ) - - def run(self): - aliases = self.distribution.get_option_dict('aliases') - - if not self.args: - print("Command Aliases") - print("---------------") - for alias in aliases: - print("setup.py alias", format_alias(alias, aliases)) - return - - elif len(self.args) == 1: - alias, = self.args - if self.remove: - command = None - elif alias in aliases: - print("setup.py alias", format_alias(alias, aliases)) - return - else: - print("No alias definition found for %r" % alias) - return - else: - alias = self.args[0] - command = ' '.join(map(shquote, self.args[1:])) - - edit_config(self.filename, {'aliases': {alias: command}}, self.dry_run) - - -def format_alias(name, aliases): - source, command = aliases[name] - if source == config_file('global'): - source = '--global-config ' - elif source == config_file('user'): - source = '--user-config ' - elif source == config_file('local'): - source = '' - else: - source = '--filename=%r' % source - return source + name + ' ' + command diff --git a/venv/Lib/site-packages/setuptools/command/bdist_egg.py b/venv/Lib/site-packages/setuptools/command/bdist_egg.py deleted file mode 100644 index 11a1c6b..0000000 --- a/venv/Lib/site-packages/setuptools/command/bdist_egg.py +++ /dev/null @@ -1,457 +0,0 @@ -"""setuptools.command.bdist_egg - -Build .egg distributions""" - -from distutils.dir_util import remove_tree, mkpath -from distutils import log -from types import CodeType -import sys -import os -import re -import textwrap -import marshal - -from pkg_resources import get_build_platform, Distribution -from setuptools.extension import Library -from setuptools import Command -from .._path import ensure_directory - -from sysconfig import get_path, get_python_version - - -def _get_purelib(): - return get_path("purelib") - - -def strip_module(filename): - if '.' in filename: - filename = os.path.splitext(filename)[0] - if filename.endswith('module'): - filename = filename[:-6] - return filename - - -def sorted_walk(dir): - """Do os.walk in a reproducible way, - independent of indeterministic filesystem readdir order - """ - for base, dirs, files in os.walk(dir): - dirs.sort() - files.sort() - yield base, dirs, files - - -def write_stub(resource, pyfile): - _stub_template = textwrap.dedent(""" - def __bootstrap__(): - global __bootstrap__, __loader__, __file__ - import sys, pkg_resources, importlib.util - __file__ = pkg_resources.resource_filename(__name__, %r) - __loader__ = None; del __bootstrap__, __loader__ - spec = importlib.util.spec_from_file_location(__name__,__file__) - mod = importlib.util.module_from_spec(spec) - spec.loader.exec_module(mod) - __bootstrap__() - """).lstrip() - with open(pyfile, 'w') as f: - f.write(_stub_template % resource) - - -class bdist_egg(Command): - description = "create an \"egg\" distribution" - - user_options = [ - ('bdist-dir=', 'b', - "temporary directory for creating the distribution"), - ('plat-name=', 'p', "platform name to embed in generated filenames " - "(default: %s)" % get_build_platform()), - ('exclude-source-files', None, - "remove all .py files from the generated egg"), - ('keep-temp', 'k', - "keep the pseudo-installation tree around after " + - "creating the distribution archive"), - ('dist-dir=', 'd', - "directory to put final built distributions in"), - ('skip-build', None, - "skip rebuilding everything (for testing/debugging)"), - ] - - boolean_options = [ - 'keep-temp', 'skip-build', 'exclude-source-files' - ] - - def initialize_options(self): - self.bdist_dir = None - self.plat_name = None - self.keep_temp = 0 - self.dist_dir = None - self.skip_build = 0 - self.egg_output = None - self.exclude_source_files = None - - def finalize_options(self): - ei_cmd = self.ei_cmd = self.get_finalized_command("egg_info") - self.egg_info = ei_cmd.egg_info - - if self.bdist_dir is None: - bdist_base = self.get_finalized_command('bdist').bdist_base - self.bdist_dir = os.path.join(bdist_base, 'egg') - - if self.plat_name is None: - self.plat_name = get_build_platform() - - self.set_undefined_options('bdist', ('dist_dir', 'dist_dir')) - - if self.egg_output is None: - - # Compute filename of the output egg - basename = Distribution( - None, None, ei_cmd.egg_name, ei_cmd.egg_version, - get_python_version(), - self.distribution.has_ext_modules() and self.plat_name - ).egg_name() - - self.egg_output = os.path.join(self.dist_dir, basename + '.egg') - - def do_install_data(self): - # Hack for packages that install data to install's --install-lib - self.get_finalized_command('install').install_lib = self.bdist_dir - - site_packages = os.path.normcase(os.path.realpath(_get_purelib())) - old, self.distribution.data_files = self.distribution.data_files, [] - - for item in old: - if isinstance(item, tuple) and len(item) == 2: - if os.path.isabs(item[0]): - realpath = os.path.realpath(item[0]) - normalized = os.path.normcase(realpath) - if normalized == site_packages or normalized.startswith( - site_packages + os.sep - ): - item = realpath[len(site_packages) + 1:], item[1] - # XXX else: raise ??? - self.distribution.data_files.append(item) - - try: - log.info("installing package data to %s", self.bdist_dir) - self.call_command('install_data', force=0, root=None) - finally: - self.distribution.data_files = old - - def get_outputs(self): - return [self.egg_output] - - def call_command(self, cmdname, **kw): - """Invoke reinitialized command `cmdname` with keyword args""" - for dirname in INSTALL_DIRECTORY_ATTRS: - kw.setdefault(dirname, self.bdist_dir) - kw.setdefault('skip_build', self.skip_build) - kw.setdefault('dry_run', self.dry_run) - cmd = self.reinitialize_command(cmdname, **kw) - self.run_command(cmdname) - return cmd - - def run(self): # noqa: C901 # is too complex (14) # FIXME - # Generate metadata first - self.run_command("egg_info") - # We run install_lib before install_data, because some data hacks - # pull their data path from the install_lib command. - log.info("installing library code to %s", self.bdist_dir) - instcmd = self.get_finalized_command('install') - old_root = instcmd.root - instcmd.root = None - if self.distribution.has_c_libraries() and not self.skip_build: - self.run_command('build_clib') - cmd = self.call_command('install_lib', warn_dir=0) - instcmd.root = old_root - - all_outputs, ext_outputs = self.get_ext_outputs() - self.stubs = [] - to_compile = [] - for (p, ext_name) in enumerate(ext_outputs): - filename, ext = os.path.splitext(ext_name) - pyfile = os.path.join(self.bdist_dir, strip_module(filename) + - '.py') - self.stubs.append(pyfile) - log.info("creating stub loader for %s", ext_name) - if not self.dry_run: - write_stub(os.path.basename(ext_name), pyfile) - to_compile.append(pyfile) - ext_outputs[p] = ext_name.replace(os.sep, '/') - - if to_compile: - cmd.byte_compile(to_compile) - if self.distribution.data_files: - self.do_install_data() - - # Make the EGG-INFO directory - archive_root = self.bdist_dir - egg_info = os.path.join(archive_root, 'EGG-INFO') - self.mkpath(egg_info) - if self.distribution.scripts: - script_dir = os.path.join(egg_info, 'scripts') - log.info("installing scripts to %s", script_dir) - self.call_command('install_scripts', install_dir=script_dir, - no_ep=1) - - self.copy_metadata_to(egg_info) - native_libs = os.path.join(egg_info, "native_libs.txt") - if all_outputs: - log.info("writing %s", native_libs) - if not self.dry_run: - ensure_directory(native_libs) - libs_file = open(native_libs, 'wt') - libs_file.write('\n'.join(all_outputs)) - libs_file.write('\n') - libs_file.close() - elif os.path.isfile(native_libs): - log.info("removing %s", native_libs) - if not self.dry_run: - os.unlink(native_libs) - - write_safety_flag( - os.path.join(archive_root, 'EGG-INFO'), self.zip_safe() - ) - - if os.path.exists(os.path.join(self.egg_info, 'depends.txt')): - log.warn( - "WARNING: 'depends.txt' will not be used by setuptools 0.6!\n" - "Use the install_requires/extras_require setup() args instead." - ) - - if self.exclude_source_files: - self.zap_pyfiles() - - # Make the archive - make_zipfile(self.egg_output, archive_root, verbose=self.verbose, - dry_run=self.dry_run, mode=self.gen_header()) - if not self.keep_temp: - remove_tree(self.bdist_dir, dry_run=self.dry_run) - - # Add to 'Distribution.dist_files' so that the "upload" command works - getattr(self.distribution, 'dist_files', []).append( - ('bdist_egg', get_python_version(), self.egg_output)) - - def zap_pyfiles(self): - log.info("Removing .py files from temporary directory") - for base, dirs, files in walk_egg(self.bdist_dir): - for name in files: - path = os.path.join(base, name) - - if name.endswith('.py'): - log.debug("Deleting %s", path) - os.unlink(path) - - if base.endswith('__pycache__'): - path_old = path - - pattern = r'(?P.+)\.(?P[^.]+)\.pyc' - m = re.match(pattern, name) - path_new = os.path.join( - base, os.pardir, m.group('name') + '.pyc') - log.info( - "Renaming file from [%s] to [%s]" - % (path_old, path_new)) - try: - os.remove(path_new) - except OSError: - pass - os.rename(path_old, path_new) - - def zip_safe(self): - safe = getattr(self.distribution, 'zip_safe', None) - if safe is not None: - return safe - log.warn("zip_safe flag not set; analyzing archive contents...") - return analyze_egg(self.bdist_dir, self.stubs) - - def gen_header(self): - return 'w' - - def copy_metadata_to(self, target_dir): - "Copy metadata (egg info) to the target_dir" - # normalize the path (so that a forward-slash in egg_info will - # match using startswith below) - norm_egg_info = os.path.normpath(self.egg_info) - prefix = os.path.join(norm_egg_info, '') - for path in self.ei_cmd.filelist.files: - if path.startswith(prefix): - target = os.path.join(target_dir, path[len(prefix):]) - ensure_directory(target) - self.copy_file(path, target) - - def get_ext_outputs(self): - """Get a list of relative paths to C extensions in the output distro""" - - all_outputs = [] - ext_outputs = [] - - paths = {self.bdist_dir: ''} - for base, dirs, files in sorted_walk(self.bdist_dir): - for filename in files: - if os.path.splitext(filename)[1].lower() in NATIVE_EXTENSIONS: - all_outputs.append(paths[base] + filename) - for filename in dirs: - paths[os.path.join(base, filename)] = (paths[base] + - filename + '/') - - if self.distribution.has_ext_modules(): - build_cmd = self.get_finalized_command('build_ext') - for ext in build_cmd.extensions: - if isinstance(ext, Library): - continue - fullname = build_cmd.get_ext_fullname(ext.name) - filename = build_cmd.get_ext_filename(fullname) - if not os.path.basename(filename).startswith('dl-'): - if os.path.exists(os.path.join(self.bdist_dir, filename)): - ext_outputs.append(filename) - - return all_outputs, ext_outputs - - -NATIVE_EXTENSIONS = dict.fromkeys('.dll .so .dylib .pyd'.split()) - - -def walk_egg(egg_dir): - """Walk an unpacked egg's contents, skipping the metadata directory""" - walker = sorted_walk(egg_dir) - base, dirs, files = next(walker) - if 'EGG-INFO' in dirs: - dirs.remove('EGG-INFO') - yield base, dirs, files - for bdf in walker: - yield bdf - - -def analyze_egg(egg_dir, stubs): - # check for existing flag in EGG-INFO - for flag, fn in safety_flags.items(): - if os.path.exists(os.path.join(egg_dir, 'EGG-INFO', fn)): - return flag - if not can_scan(): - return False - safe = True - for base, dirs, files in walk_egg(egg_dir): - for name in files: - if name.endswith('.py') or name.endswith('.pyw'): - continue - elif name.endswith('.pyc') or name.endswith('.pyo'): - # always scan, even if we already know we're not safe - safe = scan_module(egg_dir, base, name, stubs) and safe - return safe - - -def write_safety_flag(egg_dir, safe): - # Write or remove zip safety flag file(s) - for flag, fn in safety_flags.items(): - fn = os.path.join(egg_dir, fn) - if os.path.exists(fn): - if safe is None or bool(safe) != flag: - os.unlink(fn) - elif safe is not None and bool(safe) == flag: - f = open(fn, 'wt') - f.write('\n') - f.close() - - -safety_flags = { - True: 'zip-safe', - False: 'not-zip-safe', -} - - -def scan_module(egg_dir, base, name, stubs): - """Check whether module possibly uses unsafe-for-zipfile stuff""" - - filename = os.path.join(base, name) - if filename[:-1] in stubs: - return True # Extension module - pkg = base[len(egg_dir) + 1:].replace(os.sep, '.') - module = pkg + (pkg and '.' or '') + os.path.splitext(name)[0] - if sys.version_info < (3, 7): - skip = 12 # skip magic & date & file size - else: - skip = 16 # skip magic & reserved? & date & file size - f = open(filename, 'rb') - f.read(skip) - code = marshal.load(f) - f.close() - safe = True - symbols = dict.fromkeys(iter_symbols(code)) - for bad in ['__file__', '__path__']: - if bad in symbols: - log.warn("%s: module references %s", module, bad) - safe = False - if 'inspect' in symbols: - for bad in [ - 'getsource', 'getabsfile', 'getsourcefile', 'getfile' - 'getsourcelines', 'findsource', 'getcomments', 'getframeinfo', - 'getinnerframes', 'getouterframes', 'stack', 'trace' - ]: - if bad in symbols: - log.warn("%s: module MAY be using inspect.%s", module, bad) - safe = False - return safe - - -def iter_symbols(code): - """Yield names and strings used by `code` and its nested code objects""" - for name in code.co_names: - yield name - for const in code.co_consts: - if isinstance(const, str): - yield const - elif isinstance(const, CodeType): - for name in iter_symbols(const): - yield name - - -def can_scan(): - if not sys.platform.startswith('java') and sys.platform != 'cli': - # CPython, PyPy, etc. - return True - log.warn("Unable to analyze compiled code on this platform.") - log.warn("Please ask the author to include a 'zip_safe'" - " setting (either True or False) in the package's setup.py") - - -# Attribute names of options for commands that might need to be convinced to -# install to the egg build directory - -INSTALL_DIRECTORY_ATTRS = [ - 'install_lib', 'install_dir', 'install_data', 'install_base' -] - - -def make_zipfile(zip_filename, base_dir, verbose=0, dry_run=0, compress=True, - mode='w'): - """Create a zip file from all the files under 'base_dir'. The output - zip file will be named 'base_dir' + ".zip". Uses either the "zipfile" - Python module (if available) or the InfoZIP "zip" utility (if installed - and found on the default search path). If neither tool is available, - raises DistutilsExecError. Returns the name of the output zip file. - """ - import zipfile - - mkpath(os.path.dirname(zip_filename), dry_run=dry_run) - log.info("creating '%s' and adding '%s' to it", zip_filename, base_dir) - - def visit(z, dirname, names): - for name in names: - path = os.path.normpath(os.path.join(dirname, name)) - if os.path.isfile(path): - p = path[len(base_dir) + 1:] - if not dry_run: - z.write(path, p) - log.debug("adding '%s'", p) - - compression = zipfile.ZIP_DEFLATED if compress else zipfile.ZIP_STORED - if not dry_run: - z = zipfile.ZipFile(zip_filename, mode, compression=compression) - for dirname, dirs, files in sorted_walk(base_dir): - visit(z, dirname, files) - z.close() - else: - for dirname, dirs, files in sorted_walk(base_dir): - visit(None, dirname, files) - return zip_filename diff --git a/venv/Lib/site-packages/setuptools/command/bdist_rpm.py b/venv/Lib/site-packages/setuptools/command/bdist_rpm.py deleted file mode 100644 index 98bf5de..0000000 --- a/venv/Lib/site-packages/setuptools/command/bdist_rpm.py +++ /dev/null @@ -1,40 +0,0 @@ -import distutils.command.bdist_rpm as orig -import warnings - -from setuptools import SetuptoolsDeprecationWarning - - -class bdist_rpm(orig.bdist_rpm): - """ - Override the default bdist_rpm behavior to do the following: - - 1. Run egg_info to ensure the name and version are properly calculated. - 2. Always run 'install' using --single-version-externally-managed to - disable eggs in RPM distributions. - """ - - def run(self): - warnings.warn( - "bdist_rpm is deprecated and will be removed in a future " - "version. Use bdist_wheel (wheel packages) instead.", - SetuptoolsDeprecationWarning, - ) - - # ensure distro name is up-to-date - self.run_command('egg_info') - - orig.bdist_rpm.run(self) - - def _make_spec_file(self): - spec = orig.bdist_rpm._make_spec_file(self) - spec = [ - line.replace( - "setup.py install ", - "setup.py install --single-version-externally-managed " - ).replace( - "%setup", - "%setup -n %{name}-%{unmangled_version}" - ) - for line in spec - ] - return spec diff --git a/venv/Lib/site-packages/setuptools/command/build_clib.py b/venv/Lib/site-packages/setuptools/command/build_clib.py deleted file mode 100644 index 67ce244..0000000 --- a/venv/Lib/site-packages/setuptools/command/build_clib.py +++ /dev/null @@ -1,101 +0,0 @@ -import distutils.command.build_clib as orig -from distutils.errors import DistutilsSetupError -from distutils import log -from setuptools.dep_util import newer_pairwise_group - - -class build_clib(orig.build_clib): - """ - Override the default build_clib behaviour to do the following: - - 1. Implement a rudimentary timestamp-based dependency system - so 'compile()' doesn't run every time. - 2. Add more keys to the 'build_info' dictionary: - * obj_deps - specify dependencies for each object compiled. - this should be a dictionary mapping a key - with the source filename to a list of - dependencies. Use an empty string for global - dependencies. - * cflags - specify a list of additional flags to pass to - the compiler. - """ - - def build_libraries(self, libraries): - for (lib_name, build_info) in libraries: - sources = build_info.get('sources') - if sources is None or not isinstance(sources, (list, tuple)): - raise DistutilsSetupError( - "in 'libraries' option (library '%s'), " - "'sources' must be present and must be " - "a list of source filenames" % lib_name) - sources = list(sources) - - log.info("building '%s' library", lib_name) - - # Make sure everything is the correct type. - # obj_deps should be a dictionary of keys as sources - # and a list/tuple of files that are its dependencies. - obj_deps = build_info.get('obj_deps', dict()) - if not isinstance(obj_deps, dict): - raise DistutilsSetupError( - "in 'libraries' option (library '%s'), " - "'obj_deps' must be a dictionary of " - "type 'source: list'" % lib_name) - dependencies = [] - - # Get the global dependencies that are specified by the '' key. - # These will go into every source's dependency list. - global_deps = obj_deps.get('', list()) - if not isinstance(global_deps, (list, tuple)): - raise DistutilsSetupError( - "in 'libraries' option (library '%s'), " - "'obj_deps' must be a dictionary of " - "type 'source: list'" % lib_name) - - # Build the list to be used by newer_pairwise_group - # each source will be auto-added to its dependencies. - for source in sources: - src_deps = [source] - src_deps.extend(global_deps) - extra_deps = obj_deps.get(source, list()) - if not isinstance(extra_deps, (list, tuple)): - raise DistutilsSetupError( - "in 'libraries' option (library '%s'), " - "'obj_deps' must be a dictionary of " - "type 'source: list'" % lib_name) - src_deps.extend(extra_deps) - dependencies.append(src_deps) - - expected_objects = self.compiler.object_filenames( - sources, - output_dir=self.build_temp, - ) - - if ( - newer_pairwise_group(dependencies, expected_objects) - != ([], []) - ): - # First, compile the source code to object files in the library - # directory. (This should probably change to putting object - # files in a temporary build directory.) - macros = build_info.get('macros') - include_dirs = build_info.get('include_dirs') - cflags = build_info.get('cflags') - self.compiler.compile( - sources, - output_dir=self.build_temp, - macros=macros, - include_dirs=include_dirs, - extra_postargs=cflags, - debug=self.debug - ) - - # Now "link" the object files together into a static library. - # (On Unix at least, this isn't really linking -- it just - # builds an archive. Whatever.) - self.compiler.create_static_lib( - expected_objects, - lib_name, - output_dir=self.build_clib, - debug=self.debug - ) diff --git a/venv/Lib/site-packages/setuptools/command/build_ext.py b/venv/Lib/site-packages/setuptools/command/build_ext.py deleted file mode 100644 index c59eff8..0000000 --- a/venv/Lib/site-packages/setuptools/command/build_ext.py +++ /dev/null @@ -1,328 +0,0 @@ -import os -import sys -import itertools -from importlib.machinery import EXTENSION_SUFFIXES -from distutils.command.build_ext import build_ext as _du_build_ext -from distutils.file_util import copy_file -from distutils.ccompiler import new_compiler -from distutils.sysconfig import customize_compiler, get_config_var -from distutils.errors import DistutilsError -from distutils import log - -from setuptools.extension import Library - -try: - # Attempt to use Cython for building extensions, if available - from Cython.Distutils.build_ext import build_ext as _build_ext - # Additionally, assert that the compiler module will load - # also. Ref #1229. - __import__('Cython.Compiler.Main') -except ImportError: - _build_ext = _du_build_ext - -# make sure _config_vars is initialized -get_config_var("LDSHARED") -from distutils.sysconfig import _config_vars as _CONFIG_VARS # noqa - - -def _customize_compiler_for_shlib(compiler): - if sys.platform == "darwin": - # building .dylib requires additional compiler flags on OSX; here we - # temporarily substitute the pyconfig.h variables so that distutils' - # 'customize_compiler' uses them before we build the shared libraries. - tmp = _CONFIG_VARS.copy() - try: - # XXX Help! I don't have any idea whether these are right... - _CONFIG_VARS['LDSHARED'] = ( - "gcc -Wl,-x -dynamiclib -undefined dynamic_lookup") - _CONFIG_VARS['CCSHARED'] = " -dynamiclib" - _CONFIG_VARS['SO'] = ".dylib" - customize_compiler(compiler) - finally: - _CONFIG_VARS.clear() - _CONFIG_VARS.update(tmp) - else: - customize_compiler(compiler) - - -have_rtld = False -use_stubs = False -libtype = 'shared' - -if sys.platform == "darwin": - use_stubs = True -elif os.name != 'nt': - try: - import dl - use_stubs = have_rtld = hasattr(dl, 'RTLD_NOW') - except ImportError: - pass - - -def if_dl(s): - return s if have_rtld else '' - - -def get_abi3_suffix(): - """Return the file extension for an abi3-compliant Extension()""" - for suffix in EXTENSION_SUFFIXES: - if '.abi3' in suffix: # Unix - return suffix - elif suffix == '.pyd': # Windows - return suffix - - -class build_ext(_build_ext): - def run(self): - """Build extensions in build directory, then copy if --inplace""" - old_inplace, self.inplace = self.inplace, 0 - _build_ext.run(self) - self.inplace = old_inplace - if old_inplace: - self.copy_extensions_to_source() - - def copy_extensions_to_source(self): - build_py = self.get_finalized_command('build_py') - for ext in self.extensions: - fullname = self.get_ext_fullname(ext.name) - filename = self.get_ext_filename(fullname) - modpath = fullname.split('.') - package = '.'.join(modpath[:-1]) - package_dir = build_py.get_package_dir(package) - dest_filename = os.path.join(package_dir, - os.path.basename(filename)) - src_filename = os.path.join(self.build_lib, filename) - - # Always copy, even if source is older than destination, to ensure - # that the right extensions for the current Python/platform are - # used. - copy_file( - src_filename, dest_filename, verbose=self.verbose, - dry_run=self.dry_run - ) - if ext._needs_stub: - self.write_stub(package_dir or os.curdir, ext, True) - - def get_ext_filename(self, fullname): - so_ext = os.getenv('SETUPTOOLS_EXT_SUFFIX') - if so_ext: - filename = os.path.join(*fullname.split('.')) + so_ext - else: - filename = _build_ext.get_ext_filename(self, fullname) - so_ext = get_config_var('EXT_SUFFIX') - - if fullname in self.ext_map: - ext = self.ext_map[fullname] - use_abi3 = getattr(ext, 'py_limited_api') and get_abi3_suffix() - if use_abi3: - filename = filename[:-len(so_ext)] - so_ext = get_abi3_suffix() - filename = filename + so_ext - if isinstance(ext, Library): - fn, ext = os.path.splitext(filename) - return self.shlib_compiler.library_filename(fn, libtype) - elif use_stubs and ext._links_to_dynamic: - d, fn = os.path.split(filename) - return os.path.join(d, 'dl-' + fn) - return filename - - def initialize_options(self): - _build_ext.initialize_options(self) - self.shlib_compiler = None - self.shlibs = [] - self.ext_map = {} - - def finalize_options(self): - _build_ext.finalize_options(self) - self.extensions = self.extensions or [] - self.check_extensions_list(self.extensions) - self.shlibs = [ext for ext in self.extensions - if isinstance(ext, Library)] - if self.shlibs: - self.setup_shlib_compiler() - for ext in self.extensions: - ext._full_name = self.get_ext_fullname(ext.name) - for ext in self.extensions: - fullname = ext._full_name - self.ext_map[fullname] = ext - - # distutils 3.1 will also ask for module names - # XXX what to do with conflicts? - self.ext_map[fullname.split('.')[-1]] = ext - - ltd = self.shlibs and self.links_to_dynamic(ext) or False - ns = ltd and use_stubs and not isinstance(ext, Library) - ext._links_to_dynamic = ltd - ext._needs_stub = ns - filename = ext._file_name = self.get_ext_filename(fullname) - libdir = os.path.dirname(os.path.join(self.build_lib, filename)) - if ltd and libdir not in ext.library_dirs: - ext.library_dirs.append(libdir) - if ltd and use_stubs and os.curdir not in ext.runtime_library_dirs: - ext.runtime_library_dirs.append(os.curdir) - - def setup_shlib_compiler(self): - compiler = self.shlib_compiler = new_compiler( - compiler=self.compiler, dry_run=self.dry_run, force=self.force - ) - _customize_compiler_for_shlib(compiler) - - if self.include_dirs is not None: - compiler.set_include_dirs(self.include_dirs) - if self.define is not None: - # 'define' option is a list of (name,value) tuples - for (name, value) in self.define: - compiler.define_macro(name, value) - if self.undef is not None: - for macro in self.undef: - compiler.undefine_macro(macro) - if self.libraries is not None: - compiler.set_libraries(self.libraries) - if self.library_dirs is not None: - compiler.set_library_dirs(self.library_dirs) - if self.rpath is not None: - compiler.set_runtime_library_dirs(self.rpath) - if self.link_objects is not None: - compiler.set_link_objects(self.link_objects) - - # hack so distutils' build_extension() builds a library instead - compiler.link_shared_object = link_shared_object.__get__(compiler) - - def get_export_symbols(self, ext): - if isinstance(ext, Library): - return ext.export_symbols - return _build_ext.get_export_symbols(self, ext) - - def build_extension(self, ext): - ext._convert_pyx_sources_to_lang() - _compiler = self.compiler - try: - if isinstance(ext, Library): - self.compiler = self.shlib_compiler - _build_ext.build_extension(self, ext) - if ext._needs_stub: - cmd = self.get_finalized_command('build_py').build_lib - self.write_stub(cmd, ext) - finally: - self.compiler = _compiler - - def links_to_dynamic(self, ext): - """Return true if 'ext' links to a dynamic lib in the same package""" - # XXX this should check to ensure the lib is actually being built - # XXX as dynamic, and not just using a locally-found version or a - # XXX static-compiled version - libnames = dict.fromkeys([lib._full_name for lib in self.shlibs]) - pkg = '.'.join(ext._full_name.split('.')[:-1] + ['']) - return any(pkg + libname in libnames for libname in ext.libraries) - - def get_outputs(self): - return _build_ext.get_outputs(self) + self.__get_stubs_outputs() - - def __get_stubs_outputs(self): - # assemble the base name for each extension that needs a stub - ns_ext_bases = ( - os.path.join(self.build_lib, *ext._full_name.split('.')) - for ext in self.extensions - if ext._needs_stub - ) - # pair each base with the extension - pairs = itertools.product(ns_ext_bases, self.__get_output_extensions()) - return list(base + fnext for base, fnext in pairs) - - def __get_output_extensions(self): - yield '.py' - yield '.pyc' - if self.get_finalized_command('build_py').optimize: - yield '.pyo' - - def write_stub(self, output_dir, ext, compile=False): - log.info("writing stub loader for %s to %s", ext._full_name, - output_dir) - stub_file = (os.path.join(output_dir, *ext._full_name.split('.')) + - '.py') - if compile and os.path.exists(stub_file): - raise DistutilsError(stub_file + " already exists! Please delete.") - if not self.dry_run: - f = open(stub_file, 'w') - f.write( - '\n'.join([ - "def __bootstrap__():", - " global __bootstrap__, __file__, __loader__", - " import sys, os, pkg_resources, importlib.util" + - if_dl(", dl"), - " __file__ = pkg_resources.resource_filename" - "(__name__,%r)" - % os.path.basename(ext._file_name), - " del __bootstrap__", - " if '__loader__' in globals():", - " del __loader__", - if_dl(" old_flags = sys.getdlopenflags()"), - " old_dir = os.getcwd()", - " try:", - " os.chdir(os.path.dirname(__file__))", - if_dl(" sys.setdlopenflags(dl.RTLD_NOW)"), - " spec = importlib.util.spec_from_file_location(", - " __name__, __file__)", - " mod = importlib.util.module_from_spec(spec)", - " spec.loader.exec_module(mod)", - " finally:", - if_dl(" sys.setdlopenflags(old_flags)"), - " os.chdir(old_dir)", - "__bootstrap__()", - "" # terminal \n - ]) - ) - f.close() - if compile: - from distutils.util import byte_compile - - byte_compile([stub_file], optimize=0, - force=True, dry_run=self.dry_run) - optimize = self.get_finalized_command('install_lib').optimize - if optimize > 0: - byte_compile([stub_file], optimize=optimize, - force=True, dry_run=self.dry_run) - if os.path.exists(stub_file) and not self.dry_run: - os.unlink(stub_file) - - -if use_stubs or os.name == 'nt': - # Build shared libraries - # - def link_shared_object( - self, objects, output_libname, output_dir=None, libraries=None, - library_dirs=None, runtime_library_dirs=None, export_symbols=None, - debug=0, extra_preargs=None, extra_postargs=None, build_temp=None, - target_lang=None): - self.link( - self.SHARED_LIBRARY, objects, output_libname, - output_dir, libraries, library_dirs, runtime_library_dirs, - export_symbols, debug, extra_preargs, extra_postargs, - build_temp, target_lang - ) -else: - # Build static libraries everywhere else - libtype = 'static' - - def link_shared_object( - self, objects, output_libname, output_dir=None, libraries=None, - library_dirs=None, runtime_library_dirs=None, export_symbols=None, - debug=0, extra_preargs=None, extra_postargs=None, build_temp=None, - target_lang=None): - # XXX we need to either disallow these attrs on Library instances, - # or warn/abort here if set, or something... - # libraries=None, library_dirs=None, runtime_library_dirs=None, - # export_symbols=None, extra_preargs=None, extra_postargs=None, - # build_temp=None - - assert output_dir is None # distutils build_ext doesn't pass this - output_dir, filename = os.path.split(output_libname) - basename, ext = os.path.splitext(filename) - if self.library_filename("x").startswith('lib'): - # strip 'lib' prefix; this is kludgy if some platform uses - # a different prefix - basename = basename[3:] - - self.create_static_lib( - objects, basename, output_dir, debug, target_lang - ) diff --git a/venv/Lib/site-packages/setuptools/command/build_py.py b/venv/Lib/site-packages/setuptools/command/build_py.py deleted file mode 100644 index c3fdc09..0000000 --- a/venv/Lib/site-packages/setuptools/command/build_py.py +++ /dev/null @@ -1,242 +0,0 @@ -from glob import glob -from distutils.util import convert_path -import distutils.command.build_py as orig -import os -import fnmatch -import textwrap -import io -import distutils.errors -import itertools -import stat -from setuptools.extern.more_itertools import unique_everseen - - -def make_writable(target): - os.chmod(target, os.stat(target).st_mode | stat.S_IWRITE) - - -class build_py(orig.build_py): - """Enhanced 'build_py' command that includes data files with packages - - The data files are specified via a 'package_data' argument to 'setup()'. - See 'setuptools.dist.Distribution' for more details. - - Also, this version of the 'build_py' command allows you to specify both - 'py_modules' and 'packages' in the same setup operation. - """ - - def finalize_options(self): - orig.build_py.finalize_options(self) - self.package_data = self.distribution.package_data - self.exclude_package_data = self.distribution.exclude_package_data or {} - if 'data_files' in self.__dict__: - del self.__dict__['data_files'] - self.__updated_files = [] - - def run(self): - """Build modules, packages, and copy data files to build directory""" - if not self.py_modules and not self.packages: - return - - if self.py_modules: - self.build_modules() - - if self.packages: - self.build_packages() - self.build_package_data() - - # Only compile actual .py files, using our base class' idea of what our - # output files are. - self.byte_compile(orig.build_py.get_outputs(self, include_bytecode=0)) - - def __getattr__(self, attr): - "lazily compute data files" - if attr == 'data_files': - self.data_files = self._get_data_files() - return self.data_files - return orig.build_py.__getattr__(self, attr) - - def build_module(self, module, module_file, package): - outfile, copied = orig.build_py.build_module(self, module, module_file, package) - if copied: - self.__updated_files.append(outfile) - return outfile, copied - - def _get_data_files(self): - """Generate list of '(package,src_dir,build_dir,filenames)' tuples""" - self.analyze_manifest() - return list(map(self._get_pkg_data_files, self.packages or ())) - - def get_data_files_without_manifest(self): - """ - Generate list of ``(package,src_dir,build_dir,filenames)`` tuples, - but without triggering any attempt to analyze or build the manifest. - """ - # Prevent eventual errors from unset `manifest_files` - # (that would otherwise be set by `analyze_manifest`) - self.__dict__.setdefault('manifest_files', {}) - return list(map(self._get_pkg_data_files, self.packages or ())) - - def _get_pkg_data_files(self, package): - # Locate package source directory - src_dir = self.get_package_dir(package) - - # Compute package build directory - build_dir = os.path.join(*([self.build_lib] + package.split('.'))) - - # Strip directory from globbed filenames - filenames = [ - os.path.relpath(file, src_dir) - for file in self.find_data_files(package, src_dir) - ] - return package, src_dir, build_dir, filenames - - def find_data_files(self, package, src_dir): - """Return filenames for package's data files in 'src_dir'""" - patterns = self._get_platform_patterns( - self.package_data, - package, - src_dir, - ) - globs_expanded = map(glob, patterns) - # flatten the expanded globs into an iterable of matches - globs_matches = itertools.chain.from_iterable(globs_expanded) - glob_files = filter(os.path.isfile, globs_matches) - files = itertools.chain( - self.manifest_files.get(package, []), - glob_files, - ) - return self.exclude_data_files(package, src_dir, files) - - def build_package_data(self): - """Copy data files into build directory""" - for package, src_dir, build_dir, filenames in self.data_files: - for filename in filenames: - target = os.path.join(build_dir, filename) - self.mkpath(os.path.dirname(target)) - srcfile = os.path.join(src_dir, filename) - outf, copied = self.copy_file(srcfile, target) - make_writable(target) - srcfile = os.path.abspath(srcfile) - - def analyze_manifest(self): - self.manifest_files = mf = {} - if not self.distribution.include_package_data: - return - src_dirs = {} - for package in self.packages or (): - # Locate package source directory - src_dirs[assert_relative(self.get_package_dir(package))] = package - - self.run_command('egg_info') - ei_cmd = self.get_finalized_command('egg_info') - for path in ei_cmd.filelist.files: - d, f = os.path.split(assert_relative(path)) - prev = None - oldf = f - while d and d != prev and d not in src_dirs: - prev = d - d, df = os.path.split(d) - f = os.path.join(df, f) - if d in src_dirs: - if path.endswith('.py') and f == oldf: - continue # it's a module, not data - mf.setdefault(src_dirs[d], []).append(path) - - def get_data_files(self): - pass # Lazily compute data files in _get_data_files() function. - - def check_package(self, package, package_dir): - """Check namespace packages' __init__ for declare_namespace""" - try: - return self.packages_checked[package] - except KeyError: - pass - - init_py = orig.build_py.check_package(self, package, package_dir) - self.packages_checked[package] = init_py - - if not init_py or not self.distribution.namespace_packages: - return init_py - - for pkg in self.distribution.namespace_packages: - if pkg == package or pkg.startswith(package + '.'): - break - else: - return init_py - - with io.open(init_py, 'rb') as f: - contents = f.read() - if b'declare_namespace' not in contents: - raise distutils.errors.DistutilsError( - "Namespace package problem: %s is a namespace package, but " - "its\n__init__.py does not call declare_namespace()! Please " - 'fix it.\n(See the setuptools manual under ' - '"Namespace Packages" for details.)\n"' % (package,) - ) - return init_py - - def initialize_options(self): - self.packages_checked = {} - orig.build_py.initialize_options(self) - - def get_package_dir(self, package): - res = orig.build_py.get_package_dir(self, package) - if self.distribution.src_root is not None: - return os.path.join(self.distribution.src_root, res) - return res - - def exclude_data_files(self, package, src_dir, files): - """Filter filenames for package's data files in 'src_dir'""" - files = list(files) - patterns = self._get_platform_patterns( - self.exclude_package_data, - package, - src_dir, - ) - match_groups = (fnmatch.filter(files, pattern) for pattern in patterns) - # flatten the groups of matches into an iterable of matches - matches = itertools.chain.from_iterable(match_groups) - bad = set(matches) - keepers = (fn for fn in files if fn not in bad) - # ditch dupes - return list(unique_everseen(keepers)) - - @staticmethod - def _get_platform_patterns(spec, package, src_dir): - """ - yield platform-specific path patterns (suitable for glob - or fn_match) from a glob-based spec (such as - self.package_data or self.exclude_package_data) - matching package in src_dir. - """ - raw_patterns = itertools.chain( - spec.get('', []), - spec.get(package, []), - ) - return ( - # Each pattern has to be converted to a platform-specific path - os.path.join(src_dir, convert_path(pattern)) - for pattern in raw_patterns - ) - - -def assert_relative(path): - if not os.path.isabs(path): - return path - from distutils.errors import DistutilsSetupError - - msg = ( - textwrap.dedent( - """ - Error: setup script specifies an absolute path: - - %s - - setup() arguments must *always* be /-separated paths relative to the - setup.py directory, *never* absolute paths. - """ - ).lstrip() - % path - ) - raise DistutilsSetupError(msg) diff --git a/venv/Lib/site-packages/setuptools/command/develop.py b/venv/Lib/site-packages/setuptools/command/develop.py deleted file mode 100644 index 24fb0a7..0000000 --- a/venv/Lib/site-packages/setuptools/command/develop.py +++ /dev/null @@ -1,193 +0,0 @@ -from distutils.util import convert_path -from distutils import log -from distutils.errors import DistutilsError, DistutilsOptionError -import os -import glob -import io - -import pkg_resources -from setuptools.command.easy_install import easy_install -from setuptools import namespaces -import setuptools - - -class develop(namespaces.DevelopInstaller, easy_install): - """Set up package for development""" - - description = "install package in 'development mode'" - - user_options = easy_install.user_options + [ - ("uninstall", "u", "Uninstall this source package"), - ("egg-path=", None, "Set the path to be used in the .egg-link file"), - ] - - boolean_options = easy_install.boolean_options + ['uninstall'] - - command_consumes_arguments = False # override base - - def run(self): - if self.uninstall: - self.multi_version = True - self.uninstall_link() - self.uninstall_namespaces() - else: - self.install_for_development() - self.warn_deprecated_options() - - def initialize_options(self): - self.uninstall = None - self.egg_path = None - easy_install.initialize_options(self) - self.setup_path = None - self.always_copy_from = '.' # always copy eggs installed in curdir - - def finalize_options(self): - ei = self.get_finalized_command("egg_info") - if ei.broken_egg_info: - template = "Please rename %r to %r before using 'develop'" - args = ei.egg_info, ei.broken_egg_info - raise DistutilsError(template % args) - self.args = [ei.egg_name] - - easy_install.finalize_options(self) - self.expand_basedirs() - self.expand_dirs() - # pick up setup-dir .egg files only: no .egg-info - self.package_index.scan(glob.glob('*.egg')) - - egg_link_fn = ei.egg_name + '.egg-link' - self.egg_link = os.path.join(self.install_dir, egg_link_fn) - self.egg_base = ei.egg_base - if self.egg_path is None: - self.egg_path = os.path.abspath(ei.egg_base) - - target = pkg_resources.normalize_path(self.egg_base) - egg_path = pkg_resources.normalize_path( - os.path.join(self.install_dir, self.egg_path) - ) - if egg_path != target: - raise DistutilsOptionError( - "--egg-path must be a relative path from the install" - " directory to " + target - ) - - # Make a distribution for the package's source - self.dist = pkg_resources.Distribution( - target, - pkg_resources.PathMetadata(target, os.path.abspath(ei.egg_info)), - project_name=ei.egg_name, - ) - - self.setup_path = self._resolve_setup_path( - self.egg_base, - self.install_dir, - self.egg_path, - ) - - @staticmethod - def _resolve_setup_path(egg_base, install_dir, egg_path): - """ - Generate a path from egg_base back to '.' where the - setup script resides and ensure that path points to the - setup path from $install_dir/$egg_path. - """ - path_to_setup = egg_base.replace(os.sep, '/').rstrip('/') - if path_to_setup != os.curdir: - path_to_setup = '../' * (path_to_setup.count('/') + 1) - resolved = pkg_resources.normalize_path( - os.path.join(install_dir, egg_path, path_to_setup) - ) - if resolved != pkg_resources.normalize_path(os.curdir): - raise DistutilsOptionError( - "Can't get a consistent path to setup script from" - " installation directory", - resolved, - pkg_resources.normalize_path(os.curdir), - ) - return path_to_setup - - def install_for_development(self): - self.run_command('egg_info') - - # Build extensions in-place - self.reinitialize_command('build_ext', inplace=1) - self.run_command('build_ext') - - if setuptools.bootstrap_install_from: - self.easy_install(setuptools.bootstrap_install_from) - setuptools.bootstrap_install_from = None - - self.install_namespaces() - - # create an .egg-link in the installation dir, pointing to our egg - log.info("Creating %s (link to %s)", self.egg_link, self.egg_base) - if not self.dry_run: - with open(self.egg_link, "w") as f: - f.write(self.egg_path + "\n" + self.setup_path) - # postprocess the installed distro, fixing up .pth, installing scripts, - # and handling requirements - self.process_distribution(None, self.dist, not self.no_deps) - - def uninstall_link(self): - if os.path.exists(self.egg_link): - log.info("Removing %s (link to %s)", self.egg_link, self.egg_base) - egg_link_file = open(self.egg_link) - contents = [line.rstrip() for line in egg_link_file] - egg_link_file.close() - if contents not in ([self.egg_path], [self.egg_path, self.setup_path]): - log.warn("Link points to %s: uninstall aborted", contents) - return - if not self.dry_run: - os.unlink(self.egg_link) - if not self.dry_run: - self.update_pth(self.dist) # remove any .pth link to us - if self.distribution.scripts: - # XXX should also check for entry point scripts! - log.warn("Note: you must uninstall or replace scripts manually!") - - def install_egg_scripts(self, dist): - if dist is not self.dist: - # Installing a dependency, so fall back to normal behavior - return easy_install.install_egg_scripts(self, dist) - - # create wrapper scripts in the script dir, pointing to dist.scripts - - # new-style... - self.install_wrapper_scripts(dist) - - # ...and old-style - for script_name in self.distribution.scripts or []: - script_path = os.path.abspath(convert_path(script_name)) - script_name = os.path.basename(script_path) - with io.open(script_path) as strm: - script_text = strm.read() - self.install_script(dist, script_name, script_text, script_path) - - def install_wrapper_scripts(self, dist): - dist = VersionlessRequirement(dist) - return easy_install.install_wrapper_scripts(self, dist) - - -class VersionlessRequirement: - """ - Adapt a pkg_resources.Distribution to simply return the project - name as the 'requirement' so that scripts will work across - multiple versions. - - >>> from pkg_resources import Distribution - >>> dist = Distribution(project_name='foo', version='1.0') - >>> str(dist.as_requirement()) - 'foo==1.0' - >>> adapted_dist = VersionlessRequirement(dist) - >>> str(adapted_dist.as_requirement()) - 'foo' - """ - - def __init__(self, dist): - self.__dist = dist - - def __getattr__(self, name): - return getattr(self.__dist, name) - - def as_requirement(self): - return self.project_name diff --git a/venv/Lib/site-packages/setuptools/command/dist_info.py b/venv/Lib/site-packages/setuptools/command/dist_info.py deleted file mode 100644 index c45258f..0000000 --- a/venv/Lib/site-packages/setuptools/command/dist_info.py +++ /dev/null @@ -1,36 +0,0 @@ -""" -Create a dist_info directory -As defined in the wheel specification -""" - -import os - -from distutils.core import Command -from distutils import log - - -class dist_info(Command): - - description = 'create a .dist-info directory' - - user_options = [ - ('egg-base=', 'e', "directory containing .egg-info directories" - " (default: top of the source tree)"), - ] - - def initialize_options(self): - self.egg_base = None - - def finalize_options(self): - pass - - def run(self): - egg_info = self.get_finalized_command('egg_info') - egg_info.egg_base = self.egg_base - egg_info.finalize_options() - egg_info.run() - dist_info_dir = egg_info.egg_info[:-len('.egg-info')] + '.dist-info' - log.info("creating '{}'".format(os.path.abspath(dist_info_dir))) - - bdist_wheel = self.get_finalized_command('bdist_wheel') - bdist_wheel.egg2dist(egg_info.egg_info, dist_info_dir) diff --git a/venv/Lib/site-packages/setuptools/command/easy_install.py b/venv/Lib/site-packages/setuptools/command/easy_install.py deleted file mode 100644 index 5b73e6e..0000000 --- a/venv/Lib/site-packages/setuptools/command/easy_install.py +++ /dev/null @@ -1,2320 +0,0 @@ -""" -Easy Install ------------- - -A tool for doing automatic download/extract/build of distutils-based Python -packages. For detailed documentation, see the accompanying EasyInstall.txt -file, or visit the `EasyInstall home page`__. - -__ https://setuptools.pypa.io/en/latest/deprecated/easy_install.html - -""" - -from glob import glob -from distutils.util import get_platform -from distutils.util import convert_path, subst_vars -from distutils.errors import ( - DistutilsArgError, DistutilsOptionError, - DistutilsError, DistutilsPlatformError, -) -from distutils import log, dir_util -from distutils.command.build_scripts import first_line_re -from distutils.spawn import find_executable -from distutils.command import install -import sys -import os -import zipimport -import shutil -import tempfile -import zipfile -import re -import stat -import random -import textwrap -import warnings -import site -import struct -import contextlib -import subprocess -import shlex -import io -import configparser -import sysconfig - - -from sysconfig import get_path - -from setuptools import SetuptoolsDeprecationWarning - -from setuptools import Command -from setuptools.sandbox import run_setup -from setuptools.command import setopt -from setuptools.archive_util import unpack_archive -from setuptools.package_index import ( - PackageIndex, parse_requirement_arg, URL_SCHEME, -) -from setuptools.command import bdist_egg, egg_info -from setuptools.wheel import Wheel -from pkg_resources import ( - normalize_path, resource_string, - get_distribution, find_distributions, Environment, Requirement, - Distribution, PathMetadata, EggMetadata, WorkingSet, DistributionNotFound, - VersionConflict, DEVELOP_DIST, -) -import pkg_resources -from .._path import ensure_directory -from ..extern.jaraco.text import yield_lines - - -# Turn on PEP440Warnings -warnings.filterwarnings("default", category=pkg_resources.PEP440Warning) - -__all__ = [ - 'samefile', 'easy_install', 'PthDistributions', 'extract_wininst_cfg', - 'get_exe_prefixes', -] - - -def is_64bit(): - return struct.calcsize("P") == 8 - - -def samefile(p1, p2): - """ - Determine if two paths reference the same file. - - Augments os.path.samefile to work on Windows and - suppresses errors if the path doesn't exist. - """ - both_exist = os.path.exists(p1) and os.path.exists(p2) - use_samefile = hasattr(os.path, 'samefile') and both_exist - if use_samefile: - return os.path.samefile(p1, p2) - norm_p1 = os.path.normpath(os.path.normcase(p1)) - norm_p2 = os.path.normpath(os.path.normcase(p2)) - return norm_p1 == norm_p2 - - -def _to_bytes(s): - return s.encode('utf8') - - -def isascii(s): - try: - s.encode('ascii') - return True - except UnicodeError: - return False - - -def _one_liner(text): - return textwrap.dedent(text).strip().replace('\n', '; ') - - -class easy_install(Command): - """Manage a download/build/install process""" - description = "Find/get/install Python packages" - command_consumes_arguments = True - - user_options = [ - ('prefix=', None, "installation prefix"), - ("zip-ok", "z", "install package as a zipfile"), - ("multi-version", "m", "make apps have to require() a version"), - ("upgrade", "U", "force upgrade (searches PyPI for latest versions)"), - ("install-dir=", "d", "install package to DIR"), - ("script-dir=", "s", "install scripts to DIR"), - ("exclude-scripts", "x", "Don't install scripts"), - ("always-copy", "a", "Copy all needed packages to install dir"), - ("index-url=", "i", "base URL of Python Package Index"), - ("find-links=", "f", "additional URL(s) to search for packages"), - ("build-directory=", "b", - "download/extract/build in DIR; keep the results"), - ('optimize=', 'O', - "also compile with optimization: -O1 for \"python -O\", " - "-O2 for \"python -OO\", and -O0 to disable [default: -O0]"), - ('record=', None, - "filename in which to record list of installed files"), - ('always-unzip', 'Z', "don't install as a zipfile, no matter what"), - ('site-dirs=', 'S', "list of directories where .pth files work"), - ('editable', 'e', "Install specified packages in editable form"), - ('no-deps', 'N', "don't install dependencies"), - ('allow-hosts=', 'H', "pattern(s) that hostnames must match"), - ('local-snapshots-ok', 'l', - "allow building eggs from local checkouts"), - ('version', None, "print version information and exit"), - ('no-find-links', None, - "Don't load find-links defined in packages being installed"), - ('user', None, "install in user site-package '%s'" % site.USER_SITE) - ] - boolean_options = [ - 'zip-ok', 'multi-version', 'exclude-scripts', 'upgrade', 'always-copy', - 'editable', - 'no-deps', 'local-snapshots-ok', 'version', - 'user' - ] - - negative_opt = {'always-unzip': 'zip-ok'} - create_index = PackageIndex - - def initialize_options(self): - warnings.warn( - "easy_install command is deprecated. " - "Use build and pip and other standards-based tools.", - EasyInstallDeprecationWarning, - ) - - # the --user option seems to be an opt-in one, - # so the default should be False. - self.user = 0 - self.zip_ok = self.local_snapshots_ok = None - self.install_dir = self.script_dir = self.exclude_scripts = None - self.index_url = None - self.find_links = None - self.build_directory = None - self.args = None - self.optimize = self.record = None - self.upgrade = self.always_copy = self.multi_version = None - self.editable = self.no_deps = self.allow_hosts = None - self.root = self.prefix = self.no_report = None - self.version = None - self.install_purelib = None # for pure module distributions - self.install_platlib = None # non-pure (dists w/ extensions) - self.install_headers = None # for C/C++ headers - self.install_lib = None # set to either purelib or platlib - self.install_scripts = None - self.install_data = None - self.install_base = None - self.install_platbase = None - if site.ENABLE_USER_SITE: - self.install_userbase = site.USER_BASE - self.install_usersite = site.USER_SITE - else: - self.install_userbase = None - self.install_usersite = None - self.no_find_links = None - - # Options not specifiable via command line - self.package_index = None - self.pth_file = self.always_copy_from = None - self.site_dirs = None - self.installed_projects = {} - # Always read easy_install options, even if we are subclassed, or have - # an independent instance created. This ensures that defaults will - # always come from the standard configuration file(s)' "easy_install" - # section, even if this is a "develop" or "install" command, or some - # other embedding. - self._dry_run = None - self.verbose = self.distribution.verbose - self.distribution._set_command_options( - self, self.distribution.get_option_dict('easy_install') - ) - - def delete_blockers(self, blockers): - extant_blockers = ( - filename for filename in blockers - if os.path.exists(filename) or os.path.islink(filename) - ) - list(map(self._delete_path, extant_blockers)) - - def _delete_path(self, path): - log.info("Deleting %s", path) - if self.dry_run: - return - - is_tree = os.path.isdir(path) and not os.path.islink(path) - remover = rmtree if is_tree else os.unlink - remover(path) - - @staticmethod - def _render_version(): - """ - Render the Setuptools version and installation details, then exit. - """ - ver = '{}.{}'.format(*sys.version_info) - dist = get_distribution('setuptools') - tmpl = 'setuptools {dist.version} from {dist.location} (Python {ver})' - print(tmpl.format(**locals())) - raise SystemExit() - - def finalize_options(self): # noqa: C901 # is too complex (25) # FIXME - self.version and self._render_version() - - py_version = sys.version.split()[0] - - self.config_vars = dict(sysconfig.get_config_vars()) - - self.config_vars.update({ - 'dist_name': self.distribution.get_name(), - 'dist_version': self.distribution.get_version(), - 'dist_fullname': self.distribution.get_fullname(), - 'py_version': py_version, - 'py_version_short': f'{sys.version_info.major}.{sys.version_info.minor}', - 'py_version_nodot': f'{sys.version_info.major}{sys.version_info.minor}', - 'sys_prefix': self.config_vars['prefix'], - 'sys_exec_prefix': self.config_vars['exec_prefix'], - # Only python 3.2+ has abiflags - 'abiflags': getattr(sys, 'abiflags', ''), - 'platlibdir': getattr(sys, 'platlibdir', 'lib'), - }) - with contextlib.suppress(AttributeError): - # only for distutils outside stdlib - self.config_vars.update({ - 'implementation_lower': install._get_implementation().lower(), - 'implementation': install._get_implementation(), - }) - - # pypa/distutils#113 Python 3.9 compat - self.config_vars.setdefault( - 'py_version_nodot_plat', - getattr(sys, 'windir', '').replace('.', ''), - ) - - if site.ENABLE_USER_SITE: - self.config_vars['userbase'] = self.install_userbase - self.config_vars['usersite'] = self.install_usersite - - elif self.user: - log.warn("WARNING: The user site-packages directory is disabled.") - - self._fix_install_dir_for_user_site() - - self.expand_basedirs() - self.expand_dirs() - - self._expand( - 'install_dir', 'script_dir', 'build_directory', - 'site_dirs', - ) - # If a non-default installation directory was specified, default the - # script directory to match it. - if self.script_dir is None: - self.script_dir = self.install_dir - - if self.no_find_links is None: - self.no_find_links = False - - # Let install_dir get set by install_lib command, which in turn - # gets its info from the install command, and takes into account - # --prefix and --home and all that other crud. - self.set_undefined_options( - 'install_lib', ('install_dir', 'install_dir') - ) - # Likewise, set default script_dir from 'install_scripts.install_dir' - self.set_undefined_options( - 'install_scripts', ('install_dir', 'script_dir') - ) - - if self.user and self.install_purelib: - self.install_dir = self.install_purelib - self.script_dir = self.install_scripts - # default --record from the install command - self.set_undefined_options('install', ('record', 'record')) - # Should this be moved to the if statement below? It's not used - # elsewhere - normpath = map(normalize_path, sys.path) - self.all_site_dirs = get_site_dirs() - if self.site_dirs is not None: - site_dirs = [ - os.path.expanduser(s.strip()) for s in - self.site_dirs.split(',') - ] - for d in site_dirs: - if not os.path.isdir(d): - log.warn("%s (in --site-dirs) does not exist", d) - elif normalize_path(d) not in normpath: - raise DistutilsOptionError( - d + " (in --site-dirs) is not on sys.path" - ) - else: - self.all_site_dirs.append(normalize_path(d)) - if not self.editable: - self.check_site_dir() - self.index_url = self.index_url or "https://pypi.org/simple/" - self.shadow_path = self.all_site_dirs[:] - for path_item in self.install_dir, normalize_path(self.script_dir): - if path_item not in self.shadow_path: - self.shadow_path.insert(0, path_item) - - if self.allow_hosts is not None: - hosts = [s.strip() for s in self.allow_hosts.split(',')] - else: - hosts = ['*'] - if self.package_index is None: - self.package_index = self.create_index( - self.index_url, search_path=self.shadow_path, hosts=hosts, - ) - self.local_index = Environment(self.shadow_path + sys.path) - - if self.find_links is not None: - if isinstance(self.find_links, str): - self.find_links = self.find_links.split() - else: - self.find_links = [] - if self.local_snapshots_ok: - self.package_index.scan_egg_links(self.shadow_path + sys.path) - if not self.no_find_links: - self.package_index.add_find_links(self.find_links) - self.set_undefined_options('install_lib', ('optimize', 'optimize')) - if not isinstance(self.optimize, int): - try: - self.optimize = int(self.optimize) - if not (0 <= self.optimize <= 2): - raise ValueError - except ValueError as e: - raise DistutilsOptionError( - "--optimize must be 0, 1, or 2" - ) from e - - if self.editable and not self.build_directory: - raise DistutilsArgError( - "Must specify a build directory (-b) when using --editable" - ) - if not self.args: - raise DistutilsArgError( - "No urls, filenames, or requirements specified (see --help)") - - self.outputs = [] - - def _fix_install_dir_for_user_site(self): - """ - Fix the install_dir if "--user" was used. - """ - if not self.user or not site.ENABLE_USER_SITE: - return - - self.create_home_path() - if self.install_userbase is None: - msg = "User base directory is not specified" - raise DistutilsPlatformError(msg) - self.install_base = self.install_platbase = self.install_userbase - scheme_name = f'{os.name}_user' - self.select_scheme(scheme_name) - - def _expand_attrs(self, attrs): - for attr in attrs: - val = getattr(self, attr) - if val is not None: - if os.name == 'posix' or os.name == 'nt': - val = os.path.expanduser(val) - val = subst_vars(val, self.config_vars) - setattr(self, attr, val) - - def expand_basedirs(self): - """Calls `os.path.expanduser` on install_base, install_platbase and - root.""" - self._expand_attrs(['install_base', 'install_platbase', 'root']) - - def expand_dirs(self): - """Calls `os.path.expanduser` on install dirs.""" - dirs = [ - 'install_purelib', - 'install_platlib', - 'install_lib', - 'install_headers', - 'install_scripts', - 'install_data', - ] - self._expand_attrs(dirs) - - def run(self, show_deprecation=True): - if show_deprecation: - self.announce( - "WARNING: The easy_install command is deprecated " - "and will be removed in a future version.", - log.WARN, - ) - if self.verbose != self.distribution.verbose: - log.set_verbosity(self.verbose) - try: - for spec in self.args: - self.easy_install(spec, not self.no_deps) - if self.record: - outputs = self.outputs - if self.root: # strip any package prefix - root_len = len(self.root) - for counter in range(len(outputs)): - outputs[counter] = outputs[counter][root_len:] - from distutils import file_util - - self.execute( - file_util.write_file, (self.record, outputs), - "writing list of installed files to '%s'" % - self.record - ) - self.warn_deprecated_options() - finally: - log.set_verbosity(self.distribution.verbose) - - def pseudo_tempname(self): - """Return a pseudo-tempname base in the install directory. - This code is intentionally naive; if a malicious party can write to - the target directory you're already in deep doodoo. - """ - try: - pid = os.getpid() - except Exception: - pid = random.randint(0, sys.maxsize) - return os.path.join(self.install_dir, "test-easy-install-%s" % pid) - - def warn_deprecated_options(self): - pass - - def check_site_dir(self): # noqa: C901 # is too complex (12) # FIXME - """Verify that self.install_dir is .pth-capable dir, if needed""" - - instdir = normalize_path(self.install_dir) - pth_file = os.path.join(instdir, 'easy-install.pth') - - if not os.path.exists(instdir): - try: - os.makedirs(instdir) - except (OSError, IOError): - self.cant_write_to_target() - - # Is it a configured, PYTHONPATH, implicit, or explicit site dir? - is_site_dir = instdir in self.all_site_dirs - - if not is_site_dir and not self.multi_version: - # No? Then directly test whether it does .pth file processing - is_site_dir = self.check_pth_processing() - else: - # make sure we can write to target dir - testfile = self.pseudo_tempname() + '.write-test' - test_exists = os.path.exists(testfile) - try: - if test_exists: - os.unlink(testfile) - open(testfile, 'w').close() - os.unlink(testfile) - except (OSError, IOError): - self.cant_write_to_target() - - if not is_site_dir and not self.multi_version: - # Can't install non-multi to non-site dir with easy_install - pythonpath = os.environ.get('PYTHONPATH', '') - log.warn(self.__no_default_msg, self.install_dir, pythonpath) - - if is_site_dir: - if self.pth_file is None: - self.pth_file = PthDistributions(pth_file, self.all_site_dirs) - else: - self.pth_file = None - - if self.multi_version and not os.path.exists(pth_file): - self.pth_file = None # don't create a .pth file - self.install_dir = instdir - - __cant_write_msg = textwrap.dedent(""" - can't create or remove files in install directory - - The following error occurred while trying to add or remove files in the - installation directory: - - %s - - The installation directory you specified (via --install-dir, --prefix, or - the distutils default setting) was: - - %s - """).lstrip() # noqa - - __not_exists_id = textwrap.dedent(""" - This directory does not currently exist. Please create it and try again, or - choose a different installation directory (using the -d or --install-dir - option). - """).lstrip() # noqa - - __access_msg = textwrap.dedent(""" - Perhaps your account does not have write access to this directory? If the - installation directory is a system-owned directory, you may need to sign in - as the administrator or "root" account. If you do not have administrative - access to this machine, you may wish to choose a different installation - directory, preferably one that is listed in your PYTHONPATH environment - variable. - - For information on other options, you may wish to consult the - documentation at: - - https://setuptools.pypa.io/en/latest/deprecated/easy_install.html - - Please make the appropriate changes for your system and try again. - """).lstrip() # noqa - - def cant_write_to_target(self): - msg = self.__cant_write_msg % (sys.exc_info()[1], self.install_dir,) - - if not os.path.exists(self.install_dir): - msg += '\n' + self.__not_exists_id - else: - msg += '\n' + self.__access_msg - raise DistutilsError(msg) - - def check_pth_processing(self): - """Empirically verify whether .pth files are supported in inst. dir""" - instdir = self.install_dir - log.info("Checking .pth file support in %s", instdir) - pth_file = self.pseudo_tempname() + ".pth" - ok_file = pth_file + '.ok' - ok_exists = os.path.exists(ok_file) - tmpl = _one_liner(""" - import os - f = open({ok_file!r}, 'w') - f.write('OK') - f.close() - """) + '\n' - try: - if ok_exists: - os.unlink(ok_file) - dirname = os.path.dirname(ok_file) - os.makedirs(dirname, exist_ok=True) - f = open(pth_file, 'w') - except (OSError, IOError): - self.cant_write_to_target() - else: - try: - f.write(tmpl.format(**locals())) - f.close() - f = None - executable = sys.executable - if os.name == 'nt': - dirname, basename = os.path.split(executable) - alt = os.path.join(dirname, 'pythonw.exe') - use_alt = ( - basename.lower() == 'python.exe' and - os.path.exists(alt) - ) - if use_alt: - # use pythonw.exe to avoid opening a console window - executable = alt - - from distutils.spawn import spawn - - spawn([executable, '-E', '-c', 'pass'], 0) - - if os.path.exists(ok_file): - log.info( - "TEST PASSED: %s appears to support .pth files", - instdir - ) - return True - finally: - if f: - f.close() - if os.path.exists(ok_file): - os.unlink(ok_file) - if os.path.exists(pth_file): - os.unlink(pth_file) - if not self.multi_version: - log.warn("TEST FAILED: %s does NOT support .pth files", instdir) - return False - - def install_egg_scripts(self, dist): - """Write all the scripts for `dist`, unless scripts are excluded""" - if not self.exclude_scripts and dist.metadata_isdir('scripts'): - for script_name in dist.metadata_listdir('scripts'): - if dist.metadata_isdir('scripts/' + script_name): - # The "script" is a directory, likely a Python 3 - # __pycache__ directory, so skip it. - continue - self.install_script( - dist, script_name, - dist.get_metadata('scripts/' + script_name) - ) - self.install_wrapper_scripts(dist) - - def add_output(self, path): - if os.path.isdir(path): - for base, dirs, files in os.walk(path): - for filename in files: - self.outputs.append(os.path.join(base, filename)) - else: - self.outputs.append(path) - - def not_editable(self, spec): - if self.editable: - raise DistutilsArgError( - "Invalid argument %r: you can't use filenames or URLs " - "with --editable (except via the --find-links option)." - % (spec,) - ) - - def check_editable(self, spec): - if not self.editable: - return - - if os.path.exists(os.path.join(self.build_directory, spec.key)): - raise DistutilsArgError( - "%r already exists in %s; can't do a checkout there" % - (spec.key, self.build_directory) - ) - - @contextlib.contextmanager - def _tmpdir(self): - tmpdir = tempfile.mkdtemp(prefix=u"easy_install-") - try: - # cast to str as workaround for #709 and #710 and #712 - yield str(tmpdir) - finally: - os.path.exists(tmpdir) and rmtree(tmpdir) - - def easy_install(self, spec, deps=False): - with self._tmpdir() as tmpdir: - if not isinstance(spec, Requirement): - if URL_SCHEME(spec): - # It's a url, download it to tmpdir and process - self.not_editable(spec) - dl = self.package_index.download(spec, tmpdir) - return self.install_item(None, dl, tmpdir, deps, True) - - elif os.path.exists(spec): - # Existing file or directory, just process it directly - self.not_editable(spec) - return self.install_item(None, spec, tmpdir, deps, True) - else: - spec = parse_requirement_arg(spec) - - self.check_editable(spec) - dist = self.package_index.fetch_distribution( - spec, tmpdir, self.upgrade, self.editable, - not self.always_copy, self.local_index - ) - if dist is None: - msg = "Could not find suitable distribution for %r" % spec - if self.always_copy: - msg += " (--always-copy skips system and development eggs)" - raise DistutilsError(msg) - elif dist.precedence == DEVELOP_DIST: - # .egg-info dists don't need installing, just process deps - self.process_distribution(spec, dist, deps, "Using") - return dist - else: - return self.install_item(spec, dist.location, tmpdir, deps) - - def install_item(self, spec, download, tmpdir, deps, install_needed=False): - - # Installation is also needed if file in tmpdir or is not an egg - install_needed = install_needed or self.always_copy - install_needed = install_needed or os.path.dirname(download) == tmpdir - install_needed = install_needed or not download.endswith('.egg') - install_needed = install_needed or ( - self.always_copy_from is not None and - os.path.dirname(normalize_path(download)) == - normalize_path(self.always_copy_from) - ) - - if spec and not install_needed: - # at this point, we know it's a local .egg, we just don't know if - # it's already installed. - for dist in self.local_index[spec.project_name]: - if dist.location == download: - break - else: - install_needed = True # it's not in the local index - - log.info("Processing %s", os.path.basename(download)) - - if install_needed: - dists = self.install_eggs(spec, download, tmpdir) - for dist in dists: - self.process_distribution(spec, dist, deps) - else: - dists = [self.egg_distribution(download)] - self.process_distribution(spec, dists[0], deps, "Using") - - if spec is not None: - for dist in dists: - if dist in spec: - return dist - - def select_scheme(self, name): - try: - install._select_scheme(self, name) - except AttributeError: - # stdlib distutils - install.install.select_scheme(self, name.replace('posix', 'unix')) - - # FIXME: 'easy_install.process_distribution' is too complex (12) - def process_distribution( # noqa: C901 - self, requirement, dist, deps=True, *info, - ): - self.update_pth(dist) - self.package_index.add(dist) - if dist in self.local_index[dist.key]: - self.local_index.remove(dist) - self.local_index.add(dist) - self.install_egg_scripts(dist) - self.installed_projects[dist.key] = dist - log.info(self.installation_report(requirement, dist, *info)) - if (dist.has_metadata('dependency_links.txt') and - not self.no_find_links): - self.package_index.add_find_links( - dist.get_metadata_lines('dependency_links.txt') - ) - if not deps and not self.always_copy: - return - elif requirement is not None and dist.key != requirement.key: - log.warn("Skipping dependencies for %s", dist) - return # XXX this is not the distribution we were looking for - elif requirement is None or dist not in requirement: - # if we wound up with a different version, resolve what we've got - distreq = dist.as_requirement() - requirement = Requirement(str(distreq)) - log.info("Processing dependencies for %s", requirement) - try: - distros = WorkingSet([]).resolve( - [requirement], self.local_index, self.easy_install - ) - except DistributionNotFound as e: - raise DistutilsError(str(e)) from e - except VersionConflict as e: - raise DistutilsError(e.report()) from e - if self.always_copy or self.always_copy_from: - # Force all the relevant distros to be copied or activated - for dist in distros: - if dist.key not in self.installed_projects: - self.easy_install(dist.as_requirement()) - log.info("Finished processing dependencies for %s", requirement) - - def should_unzip(self, dist): - if self.zip_ok is not None: - return not self.zip_ok - if dist.has_metadata('not-zip-safe'): - return True - if not dist.has_metadata('zip-safe'): - return True - return False - - def maybe_move(self, spec, dist_filename, setup_base): - dst = os.path.join(self.build_directory, spec.key) - if os.path.exists(dst): - msg = ( - "%r already exists in %s; build directory %s will not be kept" - ) - log.warn(msg, spec.key, self.build_directory, setup_base) - return setup_base - if os.path.isdir(dist_filename): - setup_base = dist_filename - else: - if os.path.dirname(dist_filename) == setup_base: - os.unlink(dist_filename) # get it out of the tmp dir - contents = os.listdir(setup_base) - if len(contents) == 1: - dist_filename = os.path.join(setup_base, contents[0]) - if os.path.isdir(dist_filename): - # if the only thing there is a directory, move it instead - setup_base = dist_filename - ensure_directory(dst) - shutil.move(setup_base, dst) - return dst - - def install_wrapper_scripts(self, dist): - if self.exclude_scripts: - return - for args in ScriptWriter.best().get_args(dist): - self.write_script(*args) - - def install_script(self, dist, script_name, script_text, dev_path=None): - """Generate a legacy script wrapper and install it""" - spec = str(dist.as_requirement()) - is_script = is_python_script(script_text, script_name) - - if is_script: - body = self._load_template(dev_path) % locals() - script_text = ScriptWriter.get_header(script_text) + body - self.write_script(script_name, _to_bytes(script_text), 'b') - - @staticmethod - def _load_template(dev_path): - """ - There are a couple of template scripts in the package. This - function loads one of them and prepares it for use. - """ - # See https://github.com/pypa/setuptools/issues/134 for info - # on script file naming and downstream issues with SVR4 - name = 'script.tmpl' - if dev_path: - name = name.replace('.tmpl', ' (dev).tmpl') - - raw_bytes = resource_string('setuptools', name) - return raw_bytes.decode('utf-8') - - def write_script(self, script_name, contents, mode="t", blockers=()): - """Write an executable file to the scripts directory""" - self.delete_blockers( # clean up old .py/.pyw w/o a script - [os.path.join(self.script_dir, x) for x in blockers] - ) - log.info("Installing %s script to %s", script_name, self.script_dir) - target = os.path.join(self.script_dir, script_name) - self.add_output(target) - - if self.dry_run: - return - - mask = current_umask() - ensure_directory(target) - if os.path.exists(target): - os.unlink(target) - with open(target, "w" + mode) as f: - f.write(contents) - chmod(target, 0o777 - mask) - - def install_eggs(self, spec, dist_filename, tmpdir): - # .egg dirs or files are already built, so just return them - installer_map = { - '.egg': self.install_egg, - '.exe': self.install_exe, - '.whl': self.install_wheel, - } - try: - install_dist = installer_map[ - dist_filename.lower()[-4:] - ] - except KeyError: - pass - else: - return [install_dist(dist_filename, tmpdir)] - - # Anything else, try to extract and build - setup_base = tmpdir - if os.path.isfile(dist_filename) and not dist_filename.endswith('.py'): - unpack_archive(dist_filename, tmpdir, self.unpack_progress) - elif os.path.isdir(dist_filename): - setup_base = os.path.abspath(dist_filename) - - if (setup_base.startswith(tmpdir) # something we downloaded - and self.build_directory and spec is not None): - setup_base = self.maybe_move(spec, dist_filename, setup_base) - - # Find the setup.py file - setup_script = os.path.join(setup_base, 'setup.py') - - if not os.path.exists(setup_script): - setups = glob(os.path.join(setup_base, '*', 'setup.py')) - if not setups: - raise DistutilsError( - "Couldn't find a setup script in %s" % - os.path.abspath(dist_filename) - ) - if len(setups) > 1: - raise DistutilsError( - "Multiple setup scripts in %s" % - os.path.abspath(dist_filename) - ) - setup_script = setups[0] - - # Now run it, and return the result - if self.editable: - log.info(self.report_editable(spec, setup_script)) - return [] - else: - return self.build_and_install(setup_script, setup_base) - - def egg_distribution(self, egg_path): - if os.path.isdir(egg_path): - metadata = PathMetadata(egg_path, os.path.join(egg_path, - 'EGG-INFO')) - else: - metadata = EggMetadata(zipimport.zipimporter(egg_path)) - return Distribution.from_filename(egg_path, metadata=metadata) - - # FIXME: 'easy_install.install_egg' is too complex (11) - def install_egg(self, egg_path, tmpdir): # noqa: C901 - destination = os.path.join( - self.install_dir, - os.path.basename(egg_path), - ) - destination = os.path.abspath(destination) - if not self.dry_run: - ensure_directory(destination) - - dist = self.egg_distribution(egg_path) - if not samefile(egg_path, destination): - if os.path.isdir(destination) and not os.path.islink(destination): - dir_util.remove_tree(destination, dry_run=self.dry_run) - elif os.path.exists(destination): - self.execute( - os.unlink, - (destination,), - "Removing " + destination, - ) - try: - new_dist_is_zipped = False - if os.path.isdir(egg_path): - if egg_path.startswith(tmpdir): - f, m = shutil.move, "Moving" - else: - f, m = shutil.copytree, "Copying" - elif self.should_unzip(dist): - self.mkpath(destination) - f, m = self.unpack_and_compile, "Extracting" - else: - new_dist_is_zipped = True - if egg_path.startswith(tmpdir): - f, m = shutil.move, "Moving" - else: - f, m = shutil.copy2, "Copying" - self.execute( - f, - (egg_path, destination), - (m + " %s to %s") % ( - os.path.basename(egg_path), - os.path.dirname(destination) - ), - ) - update_dist_caches( - destination, - fix_zipimporter_caches=new_dist_is_zipped, - ) - except Exception: - update_dist_caches(destination, fix_zipimporter_caches=False) - raise - - self.add_output(destination) - return self.egg_distribution(destination) - - def install_exe(self, dist_filename, tmpdir): - # See if it's valid, get data - cfg = extract_wininst_cfg(dist_filename) - if cfg is None: - raise DistutilsError( - "%s is not a valid distutils Windows .exe" % dist_filename - ) - # Create a dummy distribution object until we build the real distro - dist = Distribution( - None, - project_name=cfg.get('metadata', 'name'), - version=cfg.get('metadata', 'version'), platform=get_platform(), - ) - - # Convert the .exe to an unpacked egg - egg_path = os.path.join(tmpdir, dist.egg_name() + '.egg') - dist.location = egg_path - egg_tmp = egg_path + '.tmp' - _egg_info = os.path.join(egg_tmp, 'EGG-INFO') - pkg_inf = os.path.join(_egg_info, 'PKG-INFO') - ensure_directory(pkg_inf) # make sure EGG-INFO dir exists - dist._provider = PathMetadata(egg_tmp, _egg_info) # XXX - self.exe_to_egg(dist_filename, egg_tmp) - - # Write EGG-INFO/PKG-INFO - if not os.path.exists(pkg_inf): - f = open(pkg_inf, 'w') - f.write('Metadata-Version: 1.0\n') - for k, v in cfg.items('metadata'): - if k != 'target_version': - f.write('%s: %s\n' % (k.replace('_', '-').title(), v)) - f.close() - script_dir = os.path.join(_egg_info, 'scripts') - # delete entry-point scripts to avoid duping - self.delete_blockers([ - os.path.join(script_dir, args[0]) - for args in ScriptWriter.get_args(dist) - ]) - # Build .egg file from tmpdir - bdist_egg.make_zipfile( - egg_path, egg_tmp, verbose=self.verbose, dry_run=self.dry_run, - ) - # install the .egg - return self.install_egg(egg_path, tmpdir) - - # FIXME: 'easy_install.exe_to_egg' is too complex (12) - def exe_to_egg(self, dist_filename, egg_tmp): # noqa: C901 - """Extract a bdist_wininst to the directories an egg would use""" - # Check for .pth file and set up prefix translations - prefixes = get_exe_prefixes(dist_filename) - to_compile = [] - native_libs = [] - top_level = {} - - def process(src, dst): - s = src.lower() - for old, new in prefixes: - if s.startswith(old): - src = new + src[len(old):] - parts = src.split('/') - dst = os.path.join(egg_tmp, *parts) - dl = dst.lower() - if dl.endswith('.pyd') or dl.endswith('.dll'): - parts[-1] = bdist_egg.strip_module(parts[-1]) - top_level[os.path.splitext(parts[0])[0]] = 1 - native_libs.append(src) - elif dl.endswith('.py') and old != 'SCRIPTS/': - top_level[os.path.splitext(parts[0])[0]] = 1 - to_compile.append(dst) - return dst - if not src.endswith('.pth'): - log.warn("WARNING: can't process %s", src) - return None - - # extract, tracking .pyd/.dll->native_libs and .py -> to_compile - unpack_archive(dist_filename, egg_tmp, process) - stubs = [] - for res in native_libs: - if res.lower().endswith('.pyd'): # create stubs for .pyd's - parts = res.split('/') - resource = parts[-1] - parts[-1] = bdist_egg.strip_module(parts[-1]) + '.py' - pyfile = os.path.join(egg_tmp, *parts) - to_compile.append(pyfile) - stubs.append(pyfile) - bdist_egg.write_stub(resource, pyfile) - self.byte_compile(to_compile) # compile .py's - bdist_egg.write_safety_flag( - os.path.join(egg_tmp, 'EGG-INFO'), - bdist_egg.analyze_egg(egg_tmp, stubs)) # write zip-safety flag - - for name in 'top_level', 'native_libs': - if locals()[name]: - txt = os.path.join(egg_tmp, 'EGG-INFO', name + '.txt') - if not os.path.exists(txt): - f = open(txt, 'w') - f.write('\n'.join(locals()[name]) + '\n') - f.close() - - def install_wheel(self, wheel_path, tmpdir): - wheel = Wheel(wheel_path) - assert wheel.is_compatible() - destination = os.path.join(self.install_dir, wheel.egg_name()) - destination = os.path.abspath(destination) - if not self.dry_run: - ensure_directory(destination) - if os.path.isdir(destination) and not os.path.islink(destination): - dir_util.remove_tree(destination, dry_run=self.dry_run) - elif os.path.exists(destination): - self.execute( - os.unlink, - (destination,), - "Removing " + destination, - ) - try: - self.execute( - wheel.install_as_egg, - (destination,), - ("Installing %s to %s") % ( - os.path.basename(wheel_path), - os.path.dirname(destination) - ), - ) - finally: - update_dist_caches(destination, fix_zipimporter_caches=False) - self.add_output(destination) - return self.egg_distribution(destination) - - __mv_warning = textwrap.dedent(""" - Because this distribution was installed --multi-version, before you can - import modules from this package in an application, you will need to - 'import pkg_resources' and then use a 'require()' call similar to one of - these examples, in order to select the desired version: - - pkg_resources.require("%(name)s") # latest installed version - pkg_resources.require("%(name)s==%(version)s") # this exact version - pkg_resources.require("%(name)s>=%(version)s") # this version or higher - """).lstrip() # noqa - - __id_warning = textwrap.dedent(""" - Note also that the installation directory must be on sys.path at runtime for - this to work. (e.g. by being the application's script directory, by being on - PYTHONPATH, or by being added to sys.path by your code.) - """) # noqa - - def installation_report(self, req, dist, what="Installed"): - """Helpful installation message for display to package users""" - msg = "\n%(what)s %(eggloc)s%(extras)s" - if self.multi_version and not self.no_report: - msg += '\n' + self.__mv_warning - if self.install_dir not in map(normalize_path, sys.path): - msg += '\n' + self.__id_warning - - eggloc = dist.location - name = dist.project_name - version = dist.version - extras = '' # TODO: self.report_extras(req, dist) - return msg % locals() - - __editable_msg = textwrap.dedent(""" - Extracted editable version of %(spec)s to %(dirname)s - - If it uses setuptools in its setup script, you can activate it in - "development" mode by going to that directory and running:: - - %(python)s setup.py develop - - See the setuptools documentation for the "develop" command for more info. - """).lstrip() # noqa - - def report_editable(self, spec, setup_script): - dirname = os.path.dirname(setup_script) - python = sys.executable - return '\n' + self.__editable_msg % locals() - - def run_setup(self, setup_script, setup_base, args): - sys.modules.setdefault('distutils.command.bdist_egg', bdist_egg) - sys.modules.setdefault('distutils.command.egg_info', egg_info) - - args = list(args) - if self.verbose > 2: - v = 'v' * (self.verbose - 1) - args.insert(0, '-' + v) - elif self.verbose < 2: - args.insert(0, '-q') - if self.dry_run: - args.insert(0, '-n') - log.info( - "Running %s %s", setup_script[len(setup_base) + 1:], ' '.join(args) - ) - try: - run_setup(setup_script, args) - except SystemExit as v: - raise DistutilsError( - "Setup script exited with %s" % (v.args[0],) - ) from v - - def build_and_install(self, setup_script, setup_base): - args = ['bdist_egg', '--dist-dir'] - - dist_dir = tempfile.mkdtemp( - prefix='egg-dist-tmp-', dir=os.path.dirname(setup_script) - ) - try: - self._set_fetcher_options(os.path.dirname(setup_script)) - args.append(dist_dir) - - self.run_setup(setup_script, setup_base, args) - all_eggs = Environment([dist_dir]) - eggs = [] - for key in all_eggs: - for dist in all_eggs[key]: - eggs.append(self.install_egg(dist.location, setup_base)) - if not eggs and not self.dry_run: - log.warn("No eggs found in %s (setup script problem?)", - dist_dir) - return eggs - finally: - rmtree(dist_dir) - log.set_verbosity(self.verbose) # restore our log verbosity - - def _set_fetcher_options(self, base): - """ - When easy_install is about to run bdist_egg on a source dist, that - source dist might have 'setup_requires' directives, requiring - additional fetching. Ensure the fetcher options given to easy_install - are available to that command as well. - """ - # find the fetch options from easy_install and write them out - # to the setup.cfg file. - ei_opts = self.distribution.get_option_dict('easy_install').copy() - fetch_directives = ( - 'find_links', 'site_dirs', 'index_url', 'optimize', 'allow_hosts', - ) - fetch_options = {} - for key, val in ei_opts.items(): - if key not in fetch_directives: - continue - fetch_options[key] = val[1] - # create a settings dictionary suitable for `edit_config` - settings = dict(easy_install=fetch_options) - cfg_filename = os.path.join(base, 'setup.cfg') - setopt.edit_config(cfg_filename, settings) - - def update_pth(self, dist): # noqa: C901 # is too complex (11) # FIXME - if self.pth_file is None: - return - - for d in self.pth_file[dist.key]: # drop old entries - if not self.multi_version and d.location == dist.location: - continue - - log.info("Removing %s from easy-install.pth file", d) - self.pth_file.remove(d) - if d.location in self.shadow_path: - self.shadow_path.remove(d.location) - - if not self.multi_version: - if dist.location in self.pth_file.paths: - log.info( - "%s is already the active version in easy-install.pth", - dist, - ) - else: - log.info("Adding %s to easy-install.pth file", dist) - self.pth_file.add(dist) # add new entry - if dist.location not in self.shadow_path: - self.shadow_path.append(dist.location) - - if self.dry_run: - return - - self.pth_file.save() - - if dist.key != 'setuptools': - return - - # Ensure that setuptools itself never becomes unavailable! - # XXX should this check for latest version? - filename = os.path.join(self.install_dir, 'setuptools.pth') - if os.path.islink(filename): - os.unlink(filename) - with open(filename, 'wt') as f: - f.write(self.pth_file.make_relative(dist.location) + '\n') - - def unpack_progress(self, src, dst): - # Progress filter for unpacking - log.debug("Unpacking %s to %s", src, dst) - return dst # only unpack-and-compile skips files for dry run - - def unpack_and_compile(self, egg_path, destination): - to_compile = [] - to_chmod = [] - - def pf(src, dst): - if dst.endswith('.py') and not src.startswith('EGG-INFO/'): - to_compile.append(dst) - elif dst.endswith('.dll') or dst.endswith('.so'): - to_chmod.append(dst) - self.unpack_progress(src, dst) - return not self.dry_run and dst or None - - unpack_archive(egg_path, destination, pf) - self.byte_compile(to_compile) - if not self.dry_run: - for f in to_chmod: - mode = ((os.stat(f)[stat.ST_MODE]) | 0o555) & 0o7755 - chmod(f, mode) - - def byte_compile(self, to_compile): - if sys.dont_write_bytecode: - return - - from distutils.util import byte_compile - - try: - # try to make the byte compile messages quieter - log.set_verbosity(self.verbose - 1) - - byte_compile(to_compile, optimize=0, force=1, dry_run=self.dry_run) - if self.optimize: - byte_compile( - to_compile, optimize=self.optimize, force=1, - dry_run=self.dry_run, - ) - finally: - log.set_verbosity(self.verbose) # restore original verbosity - - __no_default_msg = textwrap.dedent(""" - bad install directory or PYTHONPATH - - You are attempting to install a package to a directory that is not - on PYTHONPATH and which Python does not read ".pth" files from. The - installation directory you specified (via --install-dir, --prefix, or - the distutils default setting) was: - - %s - - and your PYTHONPATH environment variable currently contains: - - %r - - Here are some of your options for correcting the problem: - - * You can choose a different installation directory, i.e., one that is - on PYTHONPATH or supports .pth files - - * You can add the installation directory to the PYTHONPATH environment - variable. (It must then also be on PYTHONPATH whenever you run - Python and want to use the package(s) you are installing.) - - * You can set up the installation directory to support ".pth" files by - using one of the approaches described here: - - https://setuptools.pypa.io/en/latest/deprecated/easy_install.html#custom-installation-locations - - - Please make the appropriate changes for your system and try again. - """).strip() - - def create_home_path(self): - """Create directories under ~.""" - if not self.user: - return - home = convert_path(os.path.expanduser("~")) - for path in only_strs(self.config_vars.values()): - if path.startswith(home) and not os.path.isdir(path): - self.debug_print("os.makedirs('%s', 0o700)" % path) - os.makedirs(path, 0o700) - - INSTALL_SCHEMES = dict( - posix=dict( - install_dir='$base/lib/python$py_version_short/site-packages', - script_dir='$base/bin', - ), - ) - - DEFAULT_SCHEME = dict( - install_dir='$base/Lib/site-packages', - script_dir='$base/Scripts', - ) - - def _expand(self, *attrs): - config_vars = self.get_finalized_command('install').config_vars - - if self.prefix: - # Set default install_dir/scripts from --prefix - config_vars = dict(config_vars) - config_vars['base'] = self.prefix - scheme = self.INSTALL_SCHEMES.get(os.name, self.DEFAULT_SCHEME) - for attr, val in scheme.items(): - if getattr(self, attr, None) is None: - setattr(self, attr, val) - - from distutils.util import subst_vars - - for attr in attrs: - val = getattr(self, attr) - if val is not None: - val = subst_vars(val, config_vars) - if os.name == 'posix': - val = os.path.expanduser(val) - setattr(self, attr, val) - - -def _pythonpath(): - items = os.environ.get('PYTHONPATH', '').split(os.pathsep) - return filter(None, items) - - -def get_site_dirs(): - """ - Return a list of 'site' dirs - """ - - sitedirs = [] - - # start with PYTHONPATH - sitedirs.extend(_pythonpath()) - - prefixes = [sys.prefix] - if sys.exec_prefix != sys.prefix: - prefixes.append(sys.exec_prefix) - for prefix in prefixes: - if not prefix: - continue - - if sys.platform in ('os2emx', 'riscos'): - sitedirs.append(os.path.join(prefix, "Lib", "site-packages")) - elif os.sep == '/': - sitedirs.extend([ - os.path.join( - prefix, - "lib", - "python{}.{}".format(*sys.version_info), - "site-packages", - ), - os.path.join(prefix, "lib", "site-python"), - ]) - else: - sitedirs.extend([ - prefix, - os.path.join(prefix, "lib", "site-packages"), - ]) - if sys.platform != 'darwin': - continue - - # for framework builds *only* we add the standard Apple - # locations. Currently only per-user, but /Library and - # /Network/Library could be added too - if 'Python.framework' not in prefix: - continue - - home = os.environ.get('HOME') - if not home: - continue - - home_sp = os.path.join( - home, - 'Library', - 'Python', - '{}.{}'.format(*sys.version_info), - 'site-packages', - ) - sitedirs.append(home_sp) - lib_paths = get_path('purelib'), get_path('platlib') - - sitedirs.extend(s for s in lib_paths if s not in sitedirs) - - if site.ENABLE_USER_SITE: - sitedirs.append(site.USER_SITE) - - with contextlib.suppress(AttributeError): - sitedirs.extend(site.getsitepackages()) - - sitedirs = list(map(normalize_path, sitedirs)) - - return sitedirs - - -def expand_paths(inputs): # noqa: C901 # is too complex (11) # FIXME - """Yield sys.path directories that might contain "old-style" packages""" - - seen = {} - - for dirname in inputs: - dirname = normalize_path(dirname) - if dirname in seen: - continue - - seen[dirname] = 1 - if not os.path.isdir(dirname): - continue - - files = os.listdir(dirname) - yield dirname, files - - for name in files: - if not name.endswith('.pth'): - # We only care about the .pth files - continue - if name in ('easy-install.pth', 'setuptools.pth'): - # Ignore .pth files that we control - continue - - # Read the .pth file - f = open(os.path.join(dirname, name)) - lines = list(yield_lines(f)) - f.close() - - # Yield existing non-dupe, non-import directory lines from it - for line in lines: - if line.startswith("import"): - continue - - line = normalize_path(line.rstrip()) - if line in seen: - continue - - seen[line] = 1 - if not os.path.isdir(line): - continue - - yield line, os.listdir(line) - - -def extract_wininst_cfg(dist_filename): - """Extract configuration data from a bdist_wininst .exe - - Returns a configparser.RawConfigParser, or None - """ - f = open(dist_filename, 'rb') - try: - endrec = zipfile._EndRecData(f) - if endrec is None: - return None - - prepended = (endrec[9] - endrec[5]) - endrec[6] - if prepended < 12: # no wininst data here - return None - f.seek(prepended - 12) - - tag, cfglen, bmlen = struct.unpack("egg path translations for a given .exe file""" - - prefixes = [ - ('PURELIB/', ''), - ('PLATLIB/pywin32_system32', ''), - ('PLATLIB/', ''), - ('SCRIPTS/', 'EGG-INFO/scripts/'), - ('DATA/lib/site-packages', ''), - ] - z = zipfile.ZipFile(exe_filename) - try: - for info in z.infolist(): - name = info.filename - parts = name.split('/') - if len(parts) == 3 and parts[2] == 'PKG-INFO': - if parts[1].endswith('.egg-info'): - prefixes.insert(0, ('/'.join(parts[:2]), 'EGG-INFO/')) - break - if len(parts) != 2 or not name.endswith('.pth'): - continue - if name.endswith('-nspkg.pth'): - continue - if parts[0].upper() in ('PURELIB', 'PLATLIB'): - contents = z.read(name).decode() - for pth in yield_lines(contents): - pth = pth.strip().replace('\\', '/') - if not pth.startswith('import'): - prefixes.append((('%s/%s/' % (parts[0], pth)), '')) - finally: - z.close() - prefixes = [(x.lower(), y) for x, y in prefixes] - prefixes.sort() - prefixes.reverse() - return prefixes - - -class PthDistributions(Environment): - """A .pth file with Distribution paths in it""" - - dirty = False - - def __init__(self, filename, sitedirs=()): - self.filename = filename - self.sitedirs = list(map(normalize_path, sitedirs)) - self.basedir = normalize_path(os.path.dirname(self.filename)) - self._load() - super().__init__([], None, None) - for path in yield_lines(self.paths): - list(map(self.add, find_distributions(path, True))) - - def _load(self): - self.paths = [] - saw_import = False - seen = dict.fromkeys(self.sitedirs) - if os.path.isfile(self.filename): - f = open(self.filename, 'rt') - for line in f: - if line.startswith('import'): - saw_import = True - continue - path = line.rstrip() - self.paths.append(path) - if not path.strip() or path.strip().startswith('#'): - continue - # skip non-existent paths, in case somebody deleted a package - # manually, and duplicate paths as well - path = self.paths[-1] = normalize_path( - os.path.join(self.basedir, path) - ) - if not os.path.exists(path) or path in seen: - self.paths.pop() # skip it - self.dirty = True # we cleaned up, so we're dirty now :) - continue - seen[path] = 1 - f.close() - - if self.paths and not saw_import: - self.dirty = True # ensure anything we touch has import wrappers - while self.paths and not self.paths[-1].strip(): - self.paths.pop() - - def save(self): - """Write changed .pth file back to disk""" - if not self.dirty: - return - - rel_paths = list(map(self.make_relative, self.paths)) - if rel_paths: - log.debug("Saving %s", self.filename) - lines = self._wrap_lines(rel_paths) - data = '\n'.join(lines) + '\n' - - if os.path.islink(self.filename): - os.unlink(self.filename) - with open(self.filename, 'wt') as f: - f.write(data) - - elif os.path.exists(self.filename): - log.debug("Deleting empty %s", self.filename) - os.unlink(self.filename) - - self.dirty = False - - @staticmethod - def _wrap_lines(lines): - return lines - - def add(self, dist): - """Add `dist` to the distribution map""" - new_path = ( - dist.location not in self.paths and ( - dist.location not in self.sitedirs or - # account for '.' being in PYTHONPATH - dist.location == os.getcwd() - ) - ) - if new_path: - self.paths.append(dist.location) - self.dirty = True - Environment.add(self, dist) - - def remove(self, dist): - """Remove `dist` from the distribution map""" - while dist.location in self.paths: - self.paths.remove(dist.location) - self.dirty = True - Environment.remove(self, dist) - - def make_relative(self, path): - npath, last = os.path.split(normalize_path(path)) - baselen = len(self.basedir) - parts = [last] - sep = os.altsep == '/' and '/' or os.sep - while len(npath) >= baselen: - if npath == self.basedir: - parts.append(os.curdir) - parts.reverse() - return sep.join(parts) - npath, last = os.path.split(npath) - parts.append(last) - else: - return path - - -class RewritePthDistributions(PthDistributions): - @classmethod - def _wrap_lines(cls, lines): - yield cls.prelude - for line in lines: - yield line - yield cls.postlude - - prelude = _one_liner(""" - import sys - sys.__plen = len(sys.path) - """) - postlude = _one_liner(""" - import sys - new = sys.path[sys.__plen:] - del sys.path[sys.__plen:] - p = getattr(sys, '__egginsert', 0) - sys.path[p:p] = new - sys.__egginsert = p + len(new) - """) - - -if os.environ.get('SETUPTOOLS_SYS_PATH_TECHNIQUE', 'raw') == 'rewrite': - PthDistributions = RewritePthDistributions - - -def _first_line_re(): - """ - Return a regular expression based on first_line_re suitable for matching - strings. - """ - if isinstance(first_line_re.pattern, str): - return first_line_re - - # first_line_re in Python >=3.1.4 and >=3.2.1 is a bytes pattern. - return re.compile(first_line_re.pattern.decode()) - - -def auto_chmod(func, arg, exc): - if func in [os.unlink, os.remove] and os.name == 'nt': - chmod(arg, stat.S_IWRITE) - return func(arg) - et, ev, _ = sys.exc_info() - # TODO: This code doesn't make sense. What is it trying to do? - raise (ev[0], ev[1] + (" %s %s" % (func, arg))) - - -def update_dist_caches(dist_path, fix_zipimporter_caches): - """ - Fix any globally cached `dist_path` related data - - `dist_path` should be a path of a newly installed egg distribution (zipped - or unzipped). - - sys.path_importer_cache contains finder objects that have been cached when - importing data from the original distribution. Any such finders need to be - cleared since the replacement distribution might be packaged differently, - e.g. a zipped egg distribution might get replaced with an unzipped egg - folder or vice versa. Having the old finders cached may then cause Python - to attempt loading modules from the replacement distribution using an - incorrect loader. - - zipimport.zipimporter objects are Python loaders charged with importing - data packaged inside zip archives. If stale loaders referencing the - original distribution, are left behind, they can fail to load modules from - the replacement distribution. E.g. if an old zipimport.zipimporter instance - is used to load data from a new zipped egg archive, it may cause the - operation to attempt to locate the requested data in the wrong location - - one indicated by the original distribution's zip archive directory - information. Such an operation may then fail outright, e.g. report having - read a 'bad local file header', or even worse, it may fail silently & - return invalid data. - - zipimport._zip_directory_cache contains cached zip archive directory - information for all existing zipimport.zipimporter instances and all such - instances connected to the same archive share the same cached directory - information. - - If asked, and the underlying Python implementation allows it, we can fix - all existing zipimport.zipimporter instances instead of having to track - them down and remove them one by one, by updating their shared cached zip - archive directory information. This, of course, assumes that the - replacement distribution is packaged as a zipped egg. - - If not asked to fix existing zipimport.zipimporter instances, we still do - our best to clear any remaining zipimport.zipimporter related cached data - that might somehow later get used when attempting to load data from the new - distribution and thus cause such load operations to fail. Note that when - tracking down such remaining stale data, we can not catch every conceivable - usage from here, and we clear only those that we know of and have found to - cause problems if left alive. Any remaining caches should be updated by - whomever is in charge of maintaining them, i.e. they should be ready to - handle us replacing their zip archives with new distributions at runtime. - - """ - # There are several other known sources of stale zipimport.zipimporter - # instances that we do not clear here, but might if ever given a reason to - # do so: - # * Global setuptools pkg_resources.working_set (a.k.a. 'master working - # set') may contain distributions which may in turn contain their - # zipimport.zipimporter loaders. - # * Several zipimport.zipimporter loaders held by local variables further - # up the function call stack when running the setuptools installation. - # * Already loaded modules may have their __loader__ attribute set to the - # exact loader instance used when importing them. Python 3.4 docs state - # that this information is intended mostly for introspection and so is - # not expected to cause us problems. - normalized_path = normalize_path(dist_path) - _uncache(normalized_path, sys.path_importer_cache) - if fix_zipimporter_caches: - _replace_zip_directory_cache_data(normalized_path) - else: - # Here, even though we do not want to fix existing and now stale - # zipimporter cache information, we still want to remove it. Related to - # Python's zip archive directory information cache, we clear each of - # its stale entries in two phases: - # 1. Clear the entry so attempting to access zip archive information - # via any existing stale zipimport.zipimporter instances fails. - # 2. Remove the entry from the cache so any newly constructed - # zipimport.zipimporter instances do not end up using old stale - # zip archive directory information. - # This whole stale data removal step does not seem strictly necessary, - # but has been left in because it was done before we started replacing - # the zip archive directory information cache content if possible, and - # there are no relevant unit tests that we can depend on to tell us if - # this is really needed. - _remove_and_clear_zip_directory_cache_data(normalized_path) - - -def _collect_zipimporter_cache_entries(normalized_path, cache): - """ - Return zipimporter cache entry keys related to a given normalized path. - - Alternative path spellings (e.g. those using different character case or - those using alternative path separators) related to the same path are - included. Any sub-path entries are included as well, i.e. those - corresponding to zip archives embedded in other zip archives. - - """ - result = [] - prefix_len = len(normalized_path) - for p in cache: - np = normalize_path(p) - if (np.startswith(normalized_path) and - np[prefix_len:prefix_len + 1] in (os.sep, '')): - result.append(p) - return result - - -def _update_zipimporter_cache(normalized_path, cache, updater=None): - """ - Update zipimporter cache data for a given normalized path. - - Any sub-path entries are processed as well, i.e. those corresponding to zip - archives embedded in other zip archives. - - Given updater is a callable taking a cache entry key and the original entry - (after already removing the entry from the cache), and expected to update - the entry and possibly return a new one to be inserted in its place. - Returning None indicates that the entry should not be replaced with a new - one. If no updater is given, the cache entries are simply removed without - any additional processing, the same as if the updater simply returned None. - - """ - for p in _collect_zipimporter_cache_entries(normalized_path, cache): - # N.B. pypy's custom zipimport._zip_directory_cache implementation does - # not support the complete dict interface: - # * Does not support item assignment, thus not allowing this function - # to be used only for removing existing cache entries. - # * Does not support the dict.pop() method, forcing us to use the - # get/del patterns instead. For more detailed information see the - # following links: - # https://github.com/pypa/setuptools/issues/202#issuecomment-202913420 - # http://bit.ly/2h9itJX - old_entry = cache[p] - del cache[p] - new_entry = updater and updater(p, old_entry) - if new_entry is not None: - cache[p] = new_entry - - -def _uncache(normalized_path, cache): - _update_zipimporter_cache(normalized_path, cache) - - -def _remove_and_clear_zip_directory_cache_data(normalized_path): - def clear_and_remove_cached_zip_archive_directory_data(path, old_entry): - old_entry.clear() - - _update_zipimporter_cache( - normalized_path, zipimport._zip_directory_cache, - updater=clear_and_remove_cached_zip_archive_directory_data) - - -# PyPy Python implementation does not allow directly writing to the -# zipimport._zip_directory_cache and so prevents us from attempting to correct -# its content. The best we can do there is clear the problematic cache content -# and have PyPy repopulate it as needed. The downside is that if there are any -# stale zipimport.zipimporter instances laying around, attempting to use them -# will fail due to not having its zip archive directory information available -# instead of being automatically corrected to use the new correct zip archive -# directory information. -if '__pypy__' in sys.builtin_module_names: - _replace_zip_directory_cache_data = \ - _remove_and_clear_zip_directory_cache_data -else: - - def _replace_zip_directory_cache_data(normalized_path): - def replace_cached_zip_archive_directory_data(path, old_entry): - # N.B. In theory, we could load the zip directory information just - # once for all updated path spellings, and then copy it locally and - # update its contained path strings to contain the correct - # spelling, but that seems like a way too invasive move (this cache - # structure is not officially documented anywhere and could in - # theory change with new Python releases) for no significant - # benefit. - old_entry.clear() - zipimport.zipimporter(path) - old_entry.update(zipimport._zip_directory_cache[path]) - return old_entry - - _update_zipimporter_cache( - normalized_path, zipimport._zip_directory_cache, - updater=replace_cached_zip_archive_directory_data) - - -def is_python(text, filename=''): - "Is this string a valid Python script?" - try: - compile(text, filename, 'exec') - except (SyntaxError, TypeError): - return False - else: - return True - - -def is_sh(executable): - """Determine if the specified executable is a .sh (contains a #! line)""" - try: - with io.open(executable, encoding='latin-1') as fp: - magic = fp.read(2) - except (OSError, IOError): - return executable - return magic == '#!' - - -def nt_quote_arg(arg): - """Quote a command line argument according to Windows parsing rules""" - return subprocess.list2cmdline([arg]) - - -def is_python_script(script_text, filename): - """Is this text, as a whole, a Python script? (as opposed to shell/bat/etc. - """ - if filename.endswith('.py') or filename.endswith('.pyw'): - return True # extension says it's Python - if is_python(script_text, filename): - return True # it's syntactically valid Python - if script_text.startswith('#!'): - # It begins with a '#!' line, so check if 'python' is in it somewhere - return 'python' in script_text.splitlines()[0].lower() - - return False # Not any Python I can recognize - - -try: - from os import chmod as _chmod -except ImportError: - # Jython compatibility - def _chmod(*args): - pass - - -def chmod(path, mode): - log.debug("changing mode of %s to %o", path, mode) - try: - _chmod(path, mode) - except os.error as e: - log.debug("chmod failed: %s", e) - - -class CommandSpec(list): - """ - A command spec for a #! header, specified as a list of arguments akin to - those passed to Popen. - """ - - options = [] - split_args = dict() - - @classmethod - def best(cls): - """ - Choose the best CommandSpec class based on environmental conditions. - """ - return cls - - @classmethod - def _sys_executable(cls): - _default = os.path.normpath(sys.executable) - return os.environ.get('__PYVENV_LAUNCHER__', _default) - - @classmethod - def from_param(cls, param): - """ - Construct a CommandSpec from a parameter to build_scripts, which may - be None. - """ - if isinstance(param, cls): - return param - if isinstance(param, list): - return cls(param) - if param is None: - return cls.from_environment() - # otherwise, assume it's a string. - return cls.from_string(param) - - @classmethod - def from_environment(cls): - return cls([cls._sys_executable()]) - - @classmethod - def from_string(cls, string): - """ - Construct a command spec from a simple string representing a command - line parseable by shlex.split. - """ - items = shlex.split(string, **cls.split_args) - return cls(items) - - def install_options(self, script_text): - self.options = shlex.split(self._extract_options(script_text)) - cmdline = subprocess.list2cmdline(self) - if not isascii(cmdline): - self.options[:0] = ['-x'] - - @staticmethod - def _extract_options(orig_script): - """ - Extract any options from the first line of the script. - """ - first = (orig_script + '\n').splitlines()[0] - match = _first_line_re().match(first) - options = match.group(1) or '' if match else '' - return options.strip() - - def as_header(self): - return self._render(self + list(self.options)) - - @staticmethod - def _strip_quotes(item): - _QUOTES = '"\'' - for q in _QUOTES: - if item.startswith(q) and item.endswith(q): - return item[1:-1] - return item - - @staticmethod - def _render(items): - cmdline = subprocess.list2cmdline( - CommandSpec._strip_quotes(item.strip()) for item in items) - return '#!' + cmdline + '\n' - - -# For pbr compat; will be removed in a future version. -sys_executable = CommandSpec._sys_executable() - - -class WindowsCommandSpec(CommandSpec): - split_args = dict(posix=False) - - -class ScriptWriter: - """ - Encapsulates behavior around writing entry point scripts for console and - gui apps. - """ - - template = textwrap.dedent(r""" - # EASY-INSTALL-ENTRY-SCRIPT: %(spec)r,%(group)r,%(name)r - import re - import sys - - # for compatibility with easy_install; see #2198 - __requires__ = %(spec)r - - try: - from importlib.metadata import distribution - except ImportError: - try: - from importlib_metadata import distribution - except ImportError: - from pkg_resources import load_entry_point - - - def importlib_load_entry_point(spec, group, name): - dist_name, _, _ = spec.partition('==') - matches = ( - entry_point - for entry_point in distribution(dist_name).entry_points - if entry_point.group == group and entry_point.name == name - ) - return next(matches).load() - - - globals().setdefault('load_entry_point', importlib_load_entry_point) - - - if __name__ == '__main__': - sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) - sys.exit(load_entry_point(%(spec)r, %(group)r, %(name)r)()) - """).lstrip() - - command_spec_class = CommandSpec - - @classmethod - def get_script_args(cls, dist, executable=None, wininst=False): - # for backward compatibility - warnings.warn("Use get_args", EasyInstallDeprecationWarning) - writer = (WindowsScriptWriter if wininst else ScriptWriter).best() - header = cls.get_script_header("", executable, wininst) - return writer.get_args(dist, header) - - @classmethod - def get_script_header(cls, script_text, executable=None, wininst=False): - # for backward compatibility - warnings.warn( - "Use get_header", EasyInstallDeprecationWarning, stacklevel=2) - if wininst: - executable = "python.exe" - return cls.get_header(script_text, executable) - - @classmethod - def get_args(cls, dist, header=None): - """ - Yield write_script() argument tuples for a distribution's - console_scripts and gui_scripts entry points. - """ - if header is None: - header = cls.get_header() - spec = str(dist.as_requirement()) - for type_ in 'console', 'gui': - group = type_ + '_scripts' - for name, ep in dist.get_entry_map(group).items(): - cls._ensure_safe_name(name) - script_text = cls.template % locals() - args = cls._get_script_args(type_, name, header, script_text) - for res in args: - yield res - - @staticmethod - def _ensure_safe_name(name): - """ - Prevent paths in *_scripts entry point names. - """ - has_path_sep = re.search(r'[\\/]', name) - if has_path_sep: - raise ValueError("Path separators not allowed in script names") - - @classmethod - def get_writer(cls, force_windows): - # for backward compatibility - warnings.warn("Use best", EasyInstallDeprecationWarning) - return WindowsScriptWriter.best() if force_windows else cls.best() - - @classmethod - def best(cls): - """ - Select the best ScriptWriter for this environment. - """ - if sys.platform == 'win32' or (os.name == 'java' and os._name == 'nt'): - return WindowsScriptWriter.best() - else: - return cls - - @classmethod - def _get_script_args(cls, type_, name, header, script_text): - # Simply write the stub with no extension. - yield (name, header + script_text) - - @classmethod - def get_header(cls, script_text="", executable=None): - """Create a #! line, getting options (if any) from script_text""" - cmd = cls.command_spec_class.best().from_param(executable) - cmd.install_options(script_text) - return cmd.as_header() - - -class WindowsScriptWriter(ScriptWriter): - command_spec_class = WindowsCommandSpec - - @classmethod - def get_writer(cls): - # for backward compatibility - warnings.warn("Use best", EasyInstallDeprecationWarning) - return cls.best() - - @classmethod - def best(cls): - """ - Select the best ScriptWriter suitable for Windows - """ - writer_lookup = dict( - executable=WindowsExecutableLauncherWriter, - natural=cls, - ) - # for compatibility, use the executable launcher by default - launcher = os.environ.get('SETUPTOOLS_LAUNCHER', 'executable') - return writer_lookup[launcher] - - @classmethod - def _get_script_args(cls, type_, name, header, script_text): - "For Windows, add a .py extension" - ext = dict(console='.pya', gui='.pyw')[type_] - if ext not in os.environ['PATHEXT'].lower().split(';'): - msg = ( - "{ext} not listed in PATHEXT; scripts will not be " - "recognized as executables." - ).format(**locals()) - warnings.warn(msg, UserWarning) - old = ['.pya', '.py', '-script.py', '.pyc', '.pyo', '.pyw', '.exe'] - old.remove(ext) - header = cls._adjust_header(type_, header) - blockers = [name + x for x in old] - yield name + ext, header + script_text, 't', blockers - - @classmethod - def _adjust_header(cls, type_, orig_header): - """ - Make sure 'pythonw' is used for gui and 'python' is used for - console (regardless of what sys.executable is). - """ - pattern = 'pythonw.exe' - repl = 'python.exe' - if type_ == 'gui': - pattern, repl = repl, pattern - pattern_ob = re.compile(re.escape(pattern), re.IGNORECASE) - new_header = pattern_ob.sub(string=orig_header, repl=repl) - return new_header if cls._use_header(new_header) else orig_header - - @staticmethod - def _use_header(new_header): - """ - Should _adjust_header use the replaced header? - - On non-windows systems, always use. On - Windows systems, only use the replaced header if it resolves - to an executable on the system. - """ - clean_header = new_header[2:-1].strip('"') - return sys.platform != 'win32' or find_executable(clean_header) - - -class WindowsExecutableLauncherWriter(WindowsScriptWriter): - @classmethod - def _get_script_args(cls, type_, name, header, script_text): - """ - For Windows, add a .py extension and an .exe launcher - """ - if type_ == 'gui': - launcher_type = 'gui' - ext = '-script.pyw' - old = ['.pyw'] - else: - launcher_type = 'cli' - ext = '-script.py' - old = ['.py', '.pyc', '.pyo'] - hdr = cls._adjust_header(type_, header) - blockers = [name + x for x in old] - yield (name + ext, hdr + script_text, 't', blockers) - yield ( - name + '.exe', get_win_launcher(launcher_type), - 'b' # write in binary mode - ) - if not is_64bit(): - # install a manifest for the launcher to prevent Windows - # from detecting it as an installer (which it will for - # launchers like easy_install.exe). Consider only - # adding a manifest for launchers detected as installers. - # See Distribute #143 for details. - m_name = name + '.exe.manifest' - yield (m_name, load_launcher_manifest(name), 't') - - -# for backward-compatibility -get_script_args = ScriptWriter.get_script_args -get_script_header = ScriptWriter.get_script_header - - -def get_win_launcher(type): - """ - Load the Windows launcher (executable) suitable for launching a script. - - `type` should be either 'cli' or 'gui' - - Returns the executable as a byte string. - """ - launcher_fn = '%s.exe' % type - if is_64bit(): - if get_platform() == "win-arm64": - launcher_fn = launcher_fn.replace(".", "-arm64.") - else: - launcher_fn = launcher_fn.replace(".", "-64.") - else: - launcher_fn = launcher_fn.replace(".", "-32.") - return resource_string('setuptools', launcher_fn) - - -def load_launcher_manifest(name): - manifest = pkg_resources.resource_string(__name__, 'launcher manifest.xml') - return manifest.decode('utf-8') % vars() - - -def rmtree(path, ignore_errors=False, onerror=auto_chmod): - return shutil.rmtree(path, ignore_errors, onerror) - - -def current_umask(): - tmp = os.umask(0o022) - os.umask(tmp) - return tmp - - -def only_strs(values): - """ - Exclude non-str values. Ref #3063. - """ - return filter(lambda val: isinstance(val, str), values) - - -class EasyInstallDeprecationWarning(SetuptoolsDeprecationWarning): - """ - Warning for EasyInstall deprecations, bypassing suppression. - """ diff --git a/venv/Lib/site-packages/setuptools/command/egg_info.py b/venv/Lib/site-packages/setuptools/command/egg_info.py deleted file mode 100644 index 6338965..0000000 --- a/venv/Lib/site-packages/setuptools/command/egg_info.py +++ /dev/null @@ -1,743 +0,0 @@ -"""setuptools.command.egg_info - -Create a distribution's .egg-info directory and contents""" - -from distutils.filelist import FileList as _FileList -from distutils.errors import DistutilsInternalError -from distutils.util import convert_path -from distutils import log -import distutils.errors -import distutils.filelist -import functools -import os -import re -import sys -import io -import warnings -import time -import collections - -from .._importlib import metadata -from .. import _entry_points - -from setuptools import Command -from setuptools.command.sdist import sdist -from setuptools.command.sdist import walk_revctrl -from setuptools.command.setopt import edit_config -from setuptools.command import bdist_egg -from pkg_resources import ( - Requirement, safe_name, parse_version, - safe_version, to_filename) -import setuptools.unicode_utils as unicode_utils -from setuptools.glob import glob - -from setuptools.extern import packaging -from setuptools.extern.jaraco.text import yield_lines -from setuptools import SetuptoolsDeprecationWarning - - -def translate_pattern(glob): # noqa: C901 # is too complex (14) # FIXME - """ - Translate a file path glob like '*.txt' in to a regular expression. - This differs from fnmatch.translate which allows wildcards to match - directory separators. It also knows about '**/' which matches any number of - directories. - """ - pat = '' - - # This will split on '/' within [character classes]. This is deliberate. - chunks = glob.split(os.path.sep) - - sep = re.escape(os.sep) - valid_char = '[^%s]' % (sep,) - - for c, chunk in enumerate(chunks): - last_chunk = c == len(chunks) - 1 - - # Chunks that are a literal ** are globstars. They match anything. - if chunk == '**': - if last_chunk: - # Match anything if this is the last component - pat += '.*' - else: - # Match '(name/)*' - pat += '(?:%s+%s)*' % (valid_char, sep) - continue # Break here as the whole path component has been handled - - # Find any special characters in the remainder - i = 0 - chunk_len = len(chunk) - while i < chunk_len: - char = chunk[i] - if char == '*': - # Match any number of name characters - pat += valid_char + '*' - elif char == '?': - # Match a name character - pat += valid_char - elif char == '[': - # Character class - inner_i = i + 1 - # Skip initial !/] chars - if inner_i < chunk_len and chunk[inner_i] == '!': - inner_i = inner_i + 1 - if inner_i < chunk_len and chunk[inner_i] == ']': - inner_i = inner_i + 1 - - # Loop till the closing ] is found - while inner_i < chunk_len and chunk[inner_i] != ']': - inner_i = inner_i + 1 - - if inner_i >= chunk_len: - # Got to the end of the string without finding a closing ] - # Do not treat this as a matching group, but as a literal [ - pat += re.escape(char) - else: - # Grab the insides of the [brackets] - inner = chunk[i + 1:inner_i] - char_class = '' - - # Class negation - if inner[0] == '!': - char_class = '^' - inner = inner[1:] - - char_class += re.escape(inner) - pat += '[%s]' % (char_class,) - - # Skip to the end ] - i = inner_i - else: - pat += re.escape(char) - i += 1 - - # Join each chunk with the dir separator - if not last_chunk: - pat += sep - - pat += r'\Z' - return re.compile(pat, flags=re.MULTILINE | re.DOTALL) - - -class InfoCommon: - tag_build = None - tag_date = None - - @property - def name(self): - return safe_name(self.distribution.get_name()) - - def tagged_version(self): - return safe_version(self._maybe_tag(self.distribution.get_version())) - - def _maybe_tag(self, version): - """ - egg_info may be called more than once for a distribution, - in which case the version string already contains all tags. - """ - return ( - version if self.vtags and version.endswith(self.vtags) - else version + self.vtags - ) - - def tags(self): - version = '' - if self.tag_build: - version += self.tag_build - if self.tag_date: - version += time.strftime("-%Y%m%d") - return version - vtags = property(tags) - - -class egg_info(InfoCommon, Command): - description = "create a distribution's .egg-info directory" - - user_options = [ - ('egg-base=', 'e', "directory containing .egg-info directories" - " (default: top of the source tree)"), - ('tag-date', 'd', "Add date stamp (e.g. 20050528) to version number"), - ('tag-build=', 'b', "Specify explicit tag to add to version number"), - ('no-date', 'D', "Don't include date stamp [default]"), - ] - - boolean_options = ['tag-date'] - negative_opt = { - 'no-date': 'tag-date', - } - - def initialize_options(self): - self.egg_base = None - self.egg_name = None - self.egg_info = None - self.egg_version = None - self.broken_egg_info = False - - #################################### - # allow the 'tag_svn_revision' to be detected and - # set, supporting sdists built on older Setuptools. - @property - def tag_svn_revision(self): - pass - - @tag_svn_revision.setter - def tag_svn_revision(self, value): - pass - #################################### - - def save_version_info(self, filename): - """ - Materialize the value of date into the - build tag. Install build keys in a deterministic order - to avoid arbitrary reordering on subsequent builds. - """ - egg_info = collections.OrderedDict() - # follow the order these keys would have been added - # when PYTHONHASHSEED=0 - egg_info['tag_build'] = self.tags() - egg_info['tag_date'] = 0 - edit_config(filename, dict(egg_info=egg_info)) - - def finalize_options(self): - # Note: we need to capture the current value returned - # by `self.tagged_version()`, so we can later update - # `self.distribution.metadata.version` without - # repercussions. - self.egg_name = self.name - self.egg_version = self.tagged_version() - parsed_version = parse_version(self.egg_version) - - try: - is_version = isinstance(parsed_version, packaging.version.Version) - spec = "%s==%s" if is_version else "%s===%s" - Requirement(spec % (self.egg_name, self.egg_version)) - except ValueError as e: - raise distutils.errors.DistutilsOptionError( - "Invalid distribution name or version syntax: %s-%s" % - (self.egg_name, self.egg_version) - ) from e - - if self.egg_base is None: - dirs = self.distribution.package_dir - self.egg_base = (dirs or {}).get('', os.curdir) - - self.ensure_dirname('egg_base') - self.egg_info = to_filename(self.egg_name) + '.egg-info' - if self.egg_base != os.curdir: - self.egg_info = os.path.join(self.egg_base, self.egg_info) - if '-' in self.egg_name: - self.check_broken_egg_info() - - # Set package version for the benefit of dumber commands - # (e.g. sdist, bdist_wininst, etc.) - # - self.distribution.metadata.version = self.egg_version - - # If we bootstrapped around the lack of a PKG-INFO, as might be the - # case in a fresh checkout, make sure that any special tags get added - # to the version info - # - pd = self.distribution._patched_dist - if pd is not None and pd.key == self.egg_name.lower(): - pd._version = self.egg_version - pd._parsed_version = parse_version(self.egg_version) - self.distribution._patched_dist = None - - def write_or_delete_file(self, what, filename, data, force=False): - """Write `data` to `filename` or delete if empty - - If `data` is non-empty, this routine is the same as ``write_file()``. - If `data` is empty but not ``None``, this is the same as calling - ``delete_file(filename)`. If `data` is ``None``, then this is a no-op - unless `filename` exists, in which case a warning is issued about the - orphaned file (if `force` is false), or deleted (if `force` is true). - """ - if data: - self.write_file(what, filename, data) - elif os.path.exists(filename): - if data is None and not force: - log.warn( - "%s not set in setup(), but %s exists", what, filename - ) - return - else: - self.delete_file(filename) - - def write_file(self, what, filename, data): - """Write `data` to `filename` (if not a dry run) after announcing it - - `what` is used in a log message to identify what is being written - to the file. - """ - log.info("writing %s to %s", what, filename) - data = data.encode("utf-8") - if not self.dry_run: - f = open(filename, 'wb') - f.write(data) - f.close() - - def delete_file(self, filename): - """Delete `filename` (if not a dry run) after announcing it""" - log.info("deleting %s", filename) - if not self.dry_run: - os.unlink(filename) - - def run(self): - self.mkpath(self.egg_info) - os.utime(self.egg_info, None) - for ep in metadata.entry_points(group='egg_info.writers'): - self.distribution._install_dependencies(ep) - writer = ep.load() - writer(self, ep.name, os.path.join(self.egg_info, ep.name)) - - # Get rid of native_libs.txt if it was put there by older bdist_egg - nl = os.path.join(self.egg_info, "native_libs.txt") - if os.path.exists(nl): - self.delete_file(nl) - - self.find_sources() - - def find_sources(self): - """Generate SOURCES.txt manifest file""" - manifest_filename = os.path.join(self.egg_info, "SOURCES.txt") - mm = manifest_maker(self.distribution) - mm.manifest = manifest_filename - mm.run() - self.filelist = mm.filelist - - def check_broken_egg_info(self): - bei = self.egg_name + '.egg-info' - if self.egg_base != os.curdir: - bei = os.path.join(self.egg_base, bei) - if os.path.exists(bei): - log.warn( - "-" * 78 + '\n' - "Note: Your current .egg-info directory has a '-' in its name;" - '\nthis will not work correctly with "setup.py develop".\n\n' - 'Please rename %s to %s to correct this problem.\n' + '-' * 78, - bei, self.egg_info - ) - self.broken_egg_info = self.egg_info - self.egg_info = bei # make it work for now - - -class FileList(_FileList): - # Implementations of the various MANIFEST.in commands - - def process_template_line(self, line): - # Parse the line: split it up, make sure the right number of words - # is there, and return the relevant words. 'action' is always - # defined: it's the first word of the line. Which of the other - # three are defined depends on the action; it'll be either - # patterns, (dir and patterns), or (dir_pattern). - (action, patterns, dir, dir_pattern) = self._parse_template_line(line) - - action_map = { - 'include': self.include, - 'exclude': self.exclude, - 'global-include': self.global_include, - 'global-exclude': self.global_exclude, - 'recursive-include': functools.partial( - self.recursive_include, dir, - ), - 'recursive-exclude': functools.partial( - self.recursive_exclude, dir, - ), - 'graft': self.graft, - 'prune': self.prune, - } - log_map = { - 'include': "warning: no files found matching '%s'", - 'exclude': ( - "warning: no previously-included files found " - "matching '%s'" - ), - 'global-include': ( - "warning: no files found matching '%s' " - "anywhere in distribution" - ), - 'global-exclude': ( - "warning: no previously-included files matching " - "'%s' found anywhere in distribution" - ), - 'recursive-include': ( - "warning: no files found matching '%s' " - "under directory '%s'" - ), - 'recursive-exclude': ( - "warning: no previously-included files matching " - "'%s' found under directory '%s'" - ), - 'graft': "warning: no directories found matching '%s'", - 'prune': "no previously-included directories found matching '%s'", - } - - try: - process_action = action_map[action] - except KeyError: - raise DistutilsInternalError( - "this cannot happen: invalid action '{action!s}'". - format(action=action), - ) - - # OK, now we know that the action is valid and we have the - # right number of words on the line for that action -- so we - # can proceed with minimal error-checking. - - action_is_recursive = action.startswith('recursive-') - if action in {'graft', 'prune'}: - patterns = [dir_pattern] - extra_log_args = (dir, ) if action_is_recursive else () - log_tmpl = log_map[action] - - self.debug_print( - ' '.join( - [action] + - ([dir] if action_is_recursive else []) + - patterns, - ) - ) - for pattern in patterns: - if not process_action(pattern): - log.warn(log_tmpl, pattern, *extra_log_args) - - def _remove_files(self, predicate): - """ - Remove all files from the file list that match the predicate. - Return True if any matching files were removed - """ - found = False - for i in range(len(self.files) - 1, -1, -1): - if predicate(self.files[i]): - self.debug_print(" removing " + self.files[i]) - del self.files[i] - found = True - return found - - def include(self, pattern): - """Include files that match 'pattern'.""" - found = [f for f in glob(pattern) if not os.path.isdir(f)] - self.extend(found) - return bool(found) - - def exclude(self, pattern): - """Exclude files that match 'pattern'.""" - match = translate_pattern(pattern) - return self._remove_files(match.match) - - def recursive_include(self, dir, pattern): - """ - Include all files anywhere in 'dir/' that match the pattern. - """ - full_pattern = os.path.join(dir, '**', pattern) - found = [f for f in glob(full_pattern, recursive=True) - if not os.path.isdir(f)] - self.extend(found) - return bool(found) - - def recursive_exclude(self, dir, pattern): - """ - Exclude any file anywhere in 'dir/' that match the pattern. - """ - match = translate_pattern(os.path.join(dir, '**', pattern)) - return self._remove_files(match.match) - - def graft(self, dir): - """Include all files from 'dir/'.""" - found = [ - item - for match_dir in glob(dir) - for item in distutils.filelist.findall(match_dir) - ] - self.extend(found) - return bool(found) - - def prune(self, dir): - """Filter out files from 'dir/'.""" - match = translate_pattern(os.path.join(dir, '**')) - return self._remove_files(match.match) - - def global_include(self, pattern): - """ - Include all files anywhere in the current directory that match the - pattern. This is very inefficient on large file trees. - """ - if self.allfiles is None: - self.findall() - match = translate_pattern(os.path.join('**', pattern)) - found = [f for f in self.allfiles if match.match(f)] - self.extend(found) - return bool(found) - - def global_exclude(self, pattern): - """ - Exclude all files anywhere that match the pattern. - """ - match = translate_pattern(os.path.join('**', pattern)) - return self._remove_files(match.match) - - def append(self, item): - if item.endswith('\r'): # Fix older sdists built on Windows - item = item[:-1] - path = convert_path(item) - - if self._safe_path(path): - self.files.append(path) - - def extend(self, paths): - self.files.extend(filter(self._safe_path, paths)) - - def _repair(self): - """ - Replace self.files with only safe paths - - Because some owners of FileList manipulate the underlying - ``files`` attribute directly, this method must be called to - repair those paths. - """ - self.files = list(filter(self._safe_path, self.files)) - - def _safe_path(self, path): - enc_warn = "'%s' not %s encodable -- skipping" - - # To avoid accidental trans-codings errors, first to unicode - u_path = unicode_utils.filesys_decode(path) - if u_path is None: - log.warn("'%s' in unexpected encoding -- skipping" % path) - return False - - # Must ensure utf-8 encodability - utf8_path = unicode_utils.try_encode(u_path, "utf-8") - if utf8_path is None: - log.warn(enc_warn, path, 'utf-8') - return False - - try: - # accept is either way checks out - if os.path.exists(u_path) or os.path.exists(utf8_path): - return True - # this will catch any encode errors decoding u_path - except UnicodeEncodeError: - log.warn(enc_warn, path, sys.getfilesystemencoding()) - - -class manifest_maker(sdist): - template = "MANIFEST.in" - - def initialize_options(self): - self.use_defaults = 1 - self.prune = 1 - self.manifest_only = 1 - self.force_manifest = 1 - - def finalize_options(self): - pass - - def run(self): - self.filelist = FileList() - if not os.path.exists(self.manifest): - self.write_manifest() # it must exist so it'll get in the list - self.add_defaults() - if os.path.exists(self.template): - self.read_template() - self.add_license_files() - self.prune_file_list() - self.filelist.sort() - self.filelist.remove_duplicates() - self.write_manifest() - - def _manifest_normalize(self, path): - path = unicode_utils.filesys_decode(path) - return path.replace(os.sep, '/') - - def write_manifest(self): - """ - Write the file list in 'self.filelist' to the manifest file - named by 'self.manifest'. - """ - self.filelist._repair() - - # Now _repairs should encodability, but not unicode - files = [self._manifest_normalize(f) for f in self.filelist.files] - msg = "writing manifest file '%s'" % self.manifest - self.execute(write_file, (self.manifest, files), msg) - - def warn(self, msg): - if not self._should_suppress_warning(msg): - sdist.warn(self, msg) - - @staticmethod - def _should_suppress_warning(msg): - """ - suppress missing-file warnings from sdist - """ - return re.match(r"standard file .*not found", msg) - - def add_defaults(self): - sdist.add_defaults(self) - self.filelist.append(self.template) - self.filelist.append(self.manifest) - rcfiles = list(walk_revctrl()) - if rcfiles: - self.filelist.extend(rcfiles) - elif os.path.exists(self.manifest): - self.read_manifest() - - if os.path.exists("setup.py"): - # setup.py should be included by default, even if it's not - # the script called to create the sdist - self.filelist.append("setup.py") - - ei_cmd = self.get_finalized_command('egg_info') - self.filelist.graft(ei_cmd.egg_info) - - def add_license_files(self): - license_files = self.distribution.metadata.license_files or [] - for lf in license_files: - log.info("adding license file '%s'", lf) - pass - self.filelist.extend(license_files) - - def prune_file_list(self): - build = self.get_finalized_command('build') - base_dir = self.distribution.get_fullname() - self.filelist.prune(build.build_base) - self.filelist.prune(base_dir) - sep = re.escape(os.sep) - self.filelist.exclude_pattern(r'(^|' + sep + r')(RCS|CVS|\.svn)' + sep, - is_regex=1) - - def _safe_data_files(self, build_py): - """ - The parent class implementation of this method - (``sdist``) will try to include data files, which - might cause recursion problems when - ``include_package_data=True``. - - Therefore, avoid triggering any attempt of - analyzing/building the manifest again. - """ - if hasattr(build_py, 'get_data_files_without_manifest'): - return build_py.get_data_files_without_manifest() - - warnings.warn( - "Custom 'build_py' does not implement " - "'get_data_files_without_manifest'.\nPlease extend command classes" - " from setuptools instead of distutils.", - SetuptoolsDeprecationWarning - ) - return build_py.get_data_files() - - -def write_file(filename, contents): - """Create a file with the specified name and write 'contents' (a - sequence of strings without line terminators) to it. - """ - contents = "\n".join(contents) - - # assuming the contents has been vetted for utf-8 encoding - contents = contents.encode("utf-8") - - with open(filename, "wb") as f: # always write POSIX-style manifest - f.write(contents) - - -def write_pkg_info(cmd, basename, filename): - log.info("writing %s", filename) - if not cmd.dry_run: - metadata = cmd.distribution.metadata - metadata.version, oldver = cmd.egg_version, metadata.version - metadata.name, oldname = cmd.egg_name, metadata.name - - try: - # write unescaped data to PKG-INFO, so older pkg_resources - # can still parse it - metadata.write_pkg_info(cmd.egg_info) - finally: - metadata.name, metadata.version = oldname, oldver - - safe = getattr(cmd.distribution, 'zip_safe', None) - - bdist_egg.write_safety_flag(cmd.egg_info, safe) - - -def warn_depends_obsolete(cmd, basename, filename): - if os.path.exists(filename): - log.warn( - "WARNING: 'depends.txt' is not used by setuptools 0.6!\n" - "Use the install_requires/extras_require setup() args instead." - ) - - -def _write_requirements(stream, reqs): - lines = yield_lines(reqs or ()) - - def append_cr(line): - return line + '\n' - lines = map(append_cr, lines) - stream.writelines(lines) - - -def write_requirements(cmd, basename, filename): - dist = cmd.distribution - data = io.StringIO() - _write_requirements(data, dist.install_requires) - extras_require = dist.extras_require or {} - for extra in sorted(extras_require): - data.write('\n[{extra}]\n'.format(**vars())) - _write_requirements(data, extras_require[extra]) - cmd.write_or_delete_file("requirements", filename, data.getvalue()) - - -def write_setup_requirements(cmd, basename, filename): - data = io.StringIO() - _write_requirements(data, cmd.distribution.setup_requires) - cmd.write_or_delete_file("setup-requirements", filename, data.getvalue()) - - -def write_toplevel_names(cmd, basename, filename): - pkgs = dict.fromkeys( - [ - k.split('.', 1)[0] - for k in cmd.distribution.iter_distribution_names() - ] - ) - cmd.write_file("top-level names", filename, '\n'.join(sorted(pkgs)) + '\n') - - -def overwrite_arg(cmd, basename, filename): - write_arg(cmd, basename, filename, True) - - -def write_arg(cmd, basename, filename, force=False): - argname = os.path.splitext(basename)[0] - value = getattr(cmd.distribution, argname, None) - if value is not None: - value = '\n'.join(value) + '\n' - cmd.write_or_delete_file(argname, filename, value, force) - - -def write_entries(cmd, basename, filename): - eps = _entry_points.load(cmd.distribution.entry_points) - defn = _entry_points.render(eps) - cmd.write_or_delete_file('entry points', filename, defn, True) - - -def get_pkg_info_revision(): - """ - Get a -r### off of PKG-INFO Version in case this is an sdist of - a subversion revision. - """ - warnings.warn( - "get_pkg_info_revision is deprecated.", EggInfoDeprecationWarning) - if os.path.exists('PKG-INFO'): - with io.open('PKG-INFO') as f: - for line in f: - match = re.match(r"Version:.*-r(\d+)\s*$", line) - if match: - return int(match.group(1)) - return 0 - - -class EggInfoDeprecationWarning(SetuptoolsDeprecationWarning): - """Deprecated behavior warning for EggInfo, bypassing suppression.""" diff --git a/venv/Lib/site-packages/setuptools/command/install.py b/venv/Lib/site-packages/setuptools/command/install.py deleted file mode 100644 index 35e54d2..0000000 --- a/venv/Lib/site-packages/setuptools/command/install.py +++ /dev/null @@ -1,132 +0,0 @@ -from distutils.errors import DistutilsArgError -import inspect -import glob -import warnings -import platform -import distutils.command.install as orig - -import setuptools - -# Prior to numpy 1.9, NumPy relies on the '_install' name, so provide it for -# now. See https://github.com/pypa/setuptools/issues/199/ -_install = orig.install - - -class install(orig.install): - """Use easy_install to install the package, w/dependencies""" - - user_options = orig.install.user_options + [ - ('old-and-unmanageable', None, "Try not to use this!"), - ('single-version-externally-managed', None, - "used by system package builders to create 'flat' eggs"), - ] - boolean_options = orig.install.boolean_options + [ - 'old-and-unmanageable', 'single-version-externally-managed', - ] - new_commands = [ - ('install_egg_info', lambda self: True), - ('install_scripts', lambda self: True), - ] - _nc = dict(new_commands) - - def initialize_options(self): - - warnings.warn( - "setup.py install is deprecated. " - "Use build and pip and other standards-based tools.", - setuptools.SetuptoolsDeprecationWarning, - ) - - orig.install.initialize_options(self) - self.old_and_unmanageable = None - self.single_version_externally_managed = None - - def finalize_options(self): - orig.install.finalize_options(self) - if self.root: - self.single_version_externally_managed = True - elif self.single_version_externally_managed: - if not self.root and not self.record: - raise DistutilsArgError( - "You must specify --record or --root when building system" - " packages" - ) - - def handle_extra_path(self): - if self.root or self.single_version_externally_managed: - # explicit backward-compatibility mode, allow extra_path to work - return orig.install.handle_extra_path(self) - - # Ignore extra_path when installing an egg (or being run by another - # command without --root or --single-version-externally-managed - self.path_file = None - self.extra_dirs = '' - - def run(self): - # Explicit request for old-style install? Just do it - if self.old_and_unmanageable or self.single_version_externally_managed: - return orig.install.run(self) - - if not self._called_from_setup(inspect.currentframe()): - # Run in backward-compatibility mode to support bdist_* commands. - orig.install.run(self) - else: - self.do_egg_install() - - @staticmethod - def _called_from_setup(run_frame): - """ - Attempt to detect whether run() was called from setup() or by another - command. If called by setup(), the parent caller will be the - 'run_command' method in 'distutils.dist', and *its* caller will be - the 'run_commands' method. If called any other way, the - immediate caller *might* be 'run_command', but it won't have been - called by 'run_commands'. Return True in that case or if a call stack - is unavailable. Return False otherwise. - """ - if run_frame is None: - msg = "Call stack not available. bdist_* commands may fail." - warnings.warn(msg) - if platform.python_implementation() == 'IronPython': - msg = "For best results, pass -X:Frames to enable call stack." - warnings.warn(msg) - return True - res = inspect.getouterframes(run_frame)[2] - caller, = res[:1] - info = inspect.getframeinfo(caller) - caller_module = caller.f_globals.get('__name__', '') - return ( - caller_module == 'distutils.dist' - and info.function == 'run_commands' - ) - - def do_egg_install(self): - - easy_install = self.distribution.get_command_class('easy_install') - - cmd = easy_install( - self.distribution, args="x", root=self.root, record=self.record, - ) - cmd.ensure_finalized() # finalize before bdist_egg munges install cmd - cmd.always_copy_from = '.' # make sure local-dir eggs get installed - - # pick up setup-dir .egg files only: no .egg-info - cmd.package_index.scan(glob.glob('*.egg')) - - self.run_command('bdist_egg') - args = [self.distribution.get_command_obj('bdist_egg').egg_output] - - if setuptools.bootstrap_install_from: - # Bootstrap self-installation of setuptools - args.insert(0, setuptools.bootstrap_install_from) - - cmd.args = args - cmd.run(show_deprecation=False) - setuptools.bootstrap_install_from = None - - -# XXX Python 3.1 doesn't see _nc if this is inside the class -install.sub_commands = ( - [cmd for cmd in orig.install.sub_commands if cmd[0] not in install._nc] + - install.new_commands -) diff --git a/venv/Lib/site-packages/setuptools/command/install_egg_info.py b/venv/Lib/site-packages/setuptools/command/install_egg_info.py deleted file mode 100644 index 65ede40..0000000 --- a/venv/Lib/site-packages/setuptools/command/install_egg_info.py +++ /dev/null @@ -1,63 +0,0 @@ -from distutils import log, dir_util -import os - -from setuptools import Command -from setuptools import namespaces -from setuptools.archive_util import unpack_archive -from .._path import ensure_directory -import pkg_resources - - -class install_egg_info(namespaces.Installer, Command): - """Install an .egg-info directory for the package""" - - description = "Install an .egg-info directory for the package" - - user_options = [ - ('install-dir=', 'd', "directory to install to"), - ] - - def initialize_options(self): - self.install_dir = None - - def finalize_options(self): - self.set_undefined_options('install_lib', - ('install_dir', 'install_dir')) - ei_cmd = self.get_finalized_command("egg_info") - basename = pkg_resources.Distribution( - None, None, ei_cmd.egg_name, ei_cmd.egg_version - ).egg_name() + '.egg-info' - self.source = ei_cmd.egg_info - self.target = os.path.join(self.install_dir, basename) - self.outputs = [] - - def run(self): - self.run_command('egg_info') - if os.path.isdir(self.target) and not os.path.islink(self.target): - dir_util.remove_tree(self.target, dry_run=self.dry_run) - elif os.path.exists(self.target): - self.execute(os.unlink, (self.target,), "Removing " + self.target) - if not self.dry_run: - ensure_directory(self.target) - self.execute( - self.copytree, (), "Copying %s to %s" % (self.source, self.target) - ) - self.install_namespaces() - - def get_outputs(self): - return self.outputs - - def copytree(self): - # Copy the .egg-info tree to site-packages - def skimmer(src, dst): - # filter out source-control directories; note that 'src' is always - # a '/'-separated path, regardless of platform. 'dst' is a - # platform-specific path. - for skip in '.svn/', 'CVS/': - if src.startswith(skip) or '/' + skip in src: - return None - self.outputs.append(dst) - log.debug("Copying %s to %s", src, dst) - return dst - - unpack_archive(self.source, self.target, skimmer) diff --git a/venv/Lib/site-packages/setuptools/command/install_lib.py b/venv/Lib/site-packages/setuptools/command/install_lib.py deleted file mode 100644 index 2e9d875..0000000 --- a/venv/Lib/site-packages/setuptools/command/install_lib.py +++ /dev/null @@ -1,122 +0,0 @@ -import os -import sys -from itertools import product, starmap -import distutils.command.install_lib as orig - - -class install_lib(orig.install_lib): - """Don't add compiled flags to filenames of non-Python files""" - - def run(self): - self.build() - outfiles = self.install() - if outfiles is not None: - # always compile, in case we have any extension stubs to deal with - self.byte_compile(outfiles) - - def get_exclusions(self): - """ - Return a collections.Sized collections.Container of paths to be - excluded for single_version_externally_managed installations. - """ - all_packages = ( - pkg - for ns_pkg in self._get_SVEM_NSPs() - for pkg in self._all_packages(ns_pkg) - ) - - excl_specs = product(all_packages, self._gen_exclusion_paths()) - return set(starmap(self._exclude_pkg_path, excl_specs)) - - def _exclude_pkg_path(self, pkg, exclusion_path): - """ - Given a package name and exclusion path within that package, - compute the full exclusion path. - """ - parts = pkg.split('.') + [exclusion_path] - return os.path.join(self.install_dir, *parts) - - @staticmethod - def _all_packages(pkg_name): - """ - >>> list(install_lib._all_packages('foo.bar.baz')) - ['foo.bar.baz', 'foo.bar', 'foo'] - """ - while pkg_name: - yield pkg_name - pkg_name, sep, child = pkg_name.rpartition('.') - - def _get_SVEM_NSPs(self): - """ - Get namespace packages (list) but only for - single_version_externally_managed installations and empty otherwise. - """ - # TODO: is it necessary to short-circuit here? i.e. what's the cost - # if get_finalized_command is called even when namespace_packages is - # False? - if not self.distribution.namespace_packages: - return [] - - install_cmd = self.get_finalized_command('install') - svem = install_cmd.single_version_externally_managed - - return self.distribution.namespace_packages if svem else [] - - @staticmethod - def _gen_exclusion_paths(): - """ - Generate file paths to be excluded for namespace packages (bytecode - cache files). - """ - # always exclude the package module itself - yield '__init__.py' - - yield '__init__.pyc' - yield '__init__.pyo' - - if not hasattr(sys, 'implementation'): - return - - base = os.path.join( - '__pycache__', '__init__.' + sys.implementation.cache_tag) - yield base + '.pyc' - yield base + '.pyo' - yield base + '.opt-1.pyc' - yield base + '.opt-2.pyc' - - def copy_tree( - self, infile, outfile, - preserve_mode=1, preserve_times=1, preserve_symlinks=0, level=1 - ): - assert preserve_mode and preserve_times and not preserve_symlinks - exclude = self.get_exclusions() - - if not exclude: - return orig.install_lib.copy_tree(self, infile, outfile) - - # Exclude namespace package __init__.py* files from the output - - from setuptools.archive_util import unpack_directory - from distutils import log - - outfiles = [] - - def pf(src, dst): - if dst in exclude: - log.warn("Skipping installation of %s (namespace package)", - dst) - return False - - log.info("copying %s -> %s", src, os.path.dirname(dst)) - outfiles.append(dst) - return dst - - unpack_directory(infile, outfile, pf) - return outfiles - - def get_outputs(self): - outputs = orig.install_lib.get_outputs(self) - exclude = self.get_exclusions() - if exclude: - return [f for f in outputs if f not in exclude] - return outputs diff --git a/venv/Lib/site-packages/setuptools/command/install_scripts.py b/venv/Lib/site-packages/setuptools/command/install_scripts.py deleted file mode 100644 index aeb0e42..0000000 --- a/venv/Lib/site-packages/setuptools/command/install_scripts.py +++ /dev/null @@ -1,70 +0,0 @@ -from distutils import log -import distutils.command.install_scripts as orig -from distutils.errors import DistutilsModuleError -import os -import sys - -from pkg_resources import Distribution, PathMetadata -from .._path import ensure_directory - - -class install_scripts(orig.install_scripts): - """Do normal script install, plus any egg_info wrapper scripts""" - - def initialize_options(self): - orig.install_scripts.initialize_options(self) - self.no_ep = False - - def run(self): - import setuptools.command.easy_install as ei - - self.run_command("egg_info") - if self.distribution.scripts: - orig.install_scripts.run(self) # run first to set up self.outfiles - else: - self.outfiles = [] - if self.no_ep: - # don't install entry point scripts into .egg file! - return - - ei_cmd = self.get_finalized_command("egg_info") - dist = Distribution( - ei_cmd.egg_base, PathMetadata(ei_cmd.egg_base, ei_cmd.egg_info), - ei_cmd.egg_name, ei_cmd.egg_version, - ) - bs_cmd = self.get_finalized_command('build_scripts') - exec_param = getattr(bs_cmd, 'executable', None) - try: - bw_cmd = self.get_finalized_command("bdist_wininst") - is_wininst = getattr(bw_cmd, '_is_running', False) - except (ImportError, DistutilsModuleError): - is_wininst = False - writer = ei.ScriptWriter - if is_wininst: - exec_param = "python.exe" - writer = ei.WindowsScriptWriter - if exec_param == sys.executable: - # In case the path to the Python executable contains a space, wrap - # it so it's not split up. - exec_param = [exec_param] - # resolve the writer to the environment - writer = writer.best() - cmd = writer.command_spec_class.best().from_param(exec_param) - for args in writer.get_args(dist, cmd.as_header()): - self.write_script(*args) - - def write_script(self, script_name, contents, mode="t", *ignored): - """Write an executable file to the scripts directory""" - from setuptools.command.easy_install import chmod, current_umask - - log.info("Installing %s script to %s", script_name, self.install_dir) - target = os.path.join(self.install_dir, script_name) - self.outfiles.append(target) - - mask = current_umask() - if not self.dry_run: - ensure_directory(target) - f = open(target, "w" + mode) - f.write(contents) - f.close() - chmod(target, 0o777 - mask) diff --git a/venv/Lib/site-packages/setuptools/command/launcher manifest.xml b/venv/Lib/site-packages/setuptools/command/launcher manifest.xml deleted file mode 100644 index 5972a96..0000000 --- a/venv/Lib/site-packages/setuptools/command/launcher manifest.xml +++ /dev/null @@ -1,15 +0,0 @@ - - - - - - - - - - - - diff --git a/venv/Lib/site-packages/setuptools/command/py36compat.py b/venv/Lib/site-packages/setuptools/command/py36compat.py deleted file mode 100644 index 343547a..0000000 --- a/venv/Lib/site-packages/setuptools/command/py36compat.py +++ /dev/null @@ -1,134 +0,0 @@ -import os -from glob import glob -from distutils.util import convert_path -from distutils.command import sdist - - -class sdist_add_defaults: - """ - Mix-in providing forward-compatibility for functionality as found in - distutils on Python 3.7. - - Do not edit the code in this class except to update functionality - as implemented in distutils. Instead, override in the subclass. - """ - - def add_defaults(self): - """Add all the default files to self.filelist: - - README or README.txt - - setup.py - - test/test*.py - - all pure Python modules mentioned in setup script - - all files pointed by package_data (build_py) - - all files defined in data_files. - - all files defined as scripts. - - all C sources listed as part of extensions or C libraries - in the setup script (doesn't catch C headers!) - Warns if (README or README.txt) or setup.py are missing; everything - else is optional. - """ - self._add_defaults_standards() - self._add_defaults_optional() - self._add_defaults_python() - self._add_defaults_data_files() - self._add_defaults_ext() - self._add_defaults_c_libs() - self._add_defaults_scripts() - - @staticmethod - def _cs_path_exists(fspath): - """ - Case-sensitive path existence check - - >>> sdist_add_defaults._cs_path_exists(__file__) - True - >>> sdist_add_defaults._cs_path_exists(__file__.upper()) - False - """ - if not os.path.exists(fspath): - return False - # make absolute so we always have a directory - abspath = os.path.abspath(fspath) - directory, filename = os.path.split(abspath) - return filename in os.listdir(directory) - - def _add_defaults_standards(self): - standards = [self.READMES, self.distribution.script_name] - for fn in standards: - if isinstance(fn, tuple): - alts = fn - got_it = False - for fn in alts: - if self._cs_path_exists(fn): - got_it = True - self.filelist.append(fn) - break - - if not got_it: - self.warn("standard file not found: should have one of " + - ', '.join(alts)) - else: - if self._cs_path_exists(fn): - self.filelist.append(fn) - else: - self.warn("standard file '%s' not found" % fn) - - def _add_defaults_optional(self): - optional = ['test/test*.py', 'setup.cfg'] - for pattern in optional: - files = filter(os.path.isfile, glob(pattern)) - self.filelist.extend(files) - - def _add_defaults_python(self): - # build_py is used to get: - # - python modules - # - files defined in package_data - build_py = self.get_finalized_command('build_py') - - # getting python files - if self.distribution.has_pure_modules(): - self.filelist.extend(build_py.get_source_files()) - - # getting package_data files - # (computed in build_py.data_files by build_py.finalize_options) - for pkg, src_dir, build_dir, filenames in build_py.data_files: - for filename in filenames: - self.filelist.append(os.path.join(src_dir, filename)) - - def _add_defaults_data_files(self): - # getting distribution.data_files - if self.distribution.has_data_files(): - for item in self.distribution.data_files: - if isinstance(item, str): - # plain file - item = convert_path(item) - if os.path.isfile(item): - self.filelist.append(item) - else: - # a (dirname, filenames) tuple - dirname, filenames = item - for f in filenames: - f = convert_path(f) - if os.path.isfile(f): - self.filelist.append(f) - - def _add_defaults_ext(self): - if self.distribution.has_ext_modules(): - build_ext = self.get_finalized_command('build_ext') - self.filelist.extend(build_ext.get_source_files()) - - def _add_defaults_c_libs(self): - if self.distribution.has_c_libraries(): - build_clib = self.get_finalized_command('build_clib') - self.filelist.extend(build_clib.get_source_files()) - - def _add_defaults_scripts(self): - if self.distribution.has_scripts(): - build_scripts = self.get_finalized_command('build_scripts') - self.filelist.extend(build_scripts.get_source_files()) - - -if hasattr(sdist.sdist, '_add_defaults_standards'): - # disable the functionality already available upstream - class sdist_add_defaults: # noqa - pass diff --git a/venv/Lib/site-packages/setuptools/command/register.py b/venv/Lib/site-packages/setuptools/command/register.py deleted file mode 100644 index b8266b9..0000000 --- a/venv/Lib/site-packages/setuptools/command/register.py +++ /dev/null @@ -1,18 +0,0 @@ -from distutils import log -import distutils.command.register as orig - -from setuptools.errors import RemovedCommandError - - -class register(orig.register): - """Formerly used to register packages on PyPI.""" - - def run(self): - msg = ( - "The register command has been removed, use twine to upload " - + "instead (https://pypi.org/p/twine)" - ) - - self.announce("ERROR: " + msg, log.ERROR) - - raise RemovedCommandError(msg) diff --git a/venv/Lib/site-packages/setuptools/command/rotate.py b/venv/Lib/site-packages/setuptools/command/rotate.py deleted file mode 100644 index 74795ba..0000000 --- a/venv/Lib/site-packages/setuptools/command/rotate.py +++ /dev/null @@ -1,64 +0,0 @@ -from distutils.util import convert_path -from distutils import log -from distutils.errors import DistutilsOptionError -import os -import shutil - -from setuptools import Command - - -class rotate(Command): - """Delete older distributions""" - - description = "delete older distributions, keeping N newest files" - user_options = [ - ('match=', 'm', "patterns to match (required)"), - ('dist-dir=', 'd', "directory where the distributions are"), - ('keep=', 'k', "number of matching distributions to keep"), - ] - - boolean_options = [] - - def initialize_options(self): - self.match = None - self.dist_dir = None - self.keep = None - - def finalize_options(self): - if self.match is None: - raise DistutilsOptionError( - "Must specify one or more (comma-separated) match patterns " - "(e.g. '.zip' or '.egg')" - ) - if self.keep is None: - raise DistutilsOptionError("Must specify number of files to keep") - try: - self.keep = int(self.keep) - except ValueError as e: - raise DistutilsOptionError("--keep must be an integer") from e - if isinstance(self.match, str): - self.match = [ - convert_path(p.strip()) for p in self.match.split(',') - ] - self.set_undefined_options('bdist', ('dist_dir', 'dist_dir')) - - def run(self): - self.run_command("egg_info") - from glob import glob - - for pattern in self.match: - pattern = self.distribution.get_name() + '*' + pattern - files = glob(os.path.join(self.dist_dir, pattern)) - files = [(os.path.getmtime(f), f) for f in files] - files.sort() - files.reverse() - - log.info("%d file(s) matching %s", len(files), pattern) - files = files[self.keep:] - for (t, f) in files: - log.info("Deleting %s", f) - if not self.dry_run: - if os.path.isdir(f): - shutil.rmtree(f) - else: - os.unlink(f) diff --git a/venv/Lib/site-packages/setuptools/command/saveopts.py b/venv/Lib/site-packages/setuptools/command/saveopts.py deleted file mode 100644 index 611cec5..0000000 --- a/venv/Lib/site-packages/setuptools/command/saveopts.py +++ /dev/null @@ -1,22 +0,0 @@ -from setuptools.command.setopt import edit_config, option_base - - -class saveopts(option_base): - """Save command-line options to a file""" - - description = "save supplied options to setup.cfg or other config file" - - def run(self): - dist = self.distribution - settings = {} - - for cmd in dist.command_options: - - if cmd == 'saveopts': - continue # don't save our own options! - - for opt, (src, val) in dist.get_option_dict(cmd).items(): - if src == "command line": - settings.setdefault(cmd, {})[opt] = val - - edit_config(self.filename, settings, self.dry_run) diff --git a/venv/Lib/site-packages/setuptools/command/sdist.py b/venv/Lib/site-packages/setuptools/command/sdist.py deleted file mode 100644 index 0ffeacf..0000000 --- a/venv/Lib/site-packages/setuptools/command/sdist.py +++ /dev/null @@ -1,196 +0,0 @@ -from distutils import log -import distutils.command.sdist as orig -import os -import sys -import io -import contextlib - -from .py36compat import sdist_add_defaults - -from .._importlib import metadata - -_default_revctrl = list - - -def walk_revctrl(dirname=''): - """Find all files under revision control""" - for ep in metadata.entry_points(group='setuptools.file_finders'): - for item in ep.load()(dirname): - yield item - - -class sdist(sdist_add_defaults, orig.sdist): - """Smart sdist that finds anything supported by revision control""" - - user_options = [ - ('formats=', None, - "formats for source distribution (comma-separated list)"), - ('keep-temp', 'k', - "keep the distribution tree around after creating " + - "archive file(s)"), - ('dist-dir=', 'd', - "directory to put the source distribution archive(s) in " - "[default: dist]"), - ('owner=', 'u', - "Owner name used when creating a tar file [default: current user]"), - ('group=', 'g', - "Group name used when creating a tar file [default: current group]"), - ] - - negative_opt = {} - - README_EXTENSIONS = ['', '.rst', '.txt', '.md'] - READMES = tuple('README{0}'.format(ext) for ext in README_EXTENSIONS) - - def run(self): - self.run_command('egg_info') - ei_cmd = self.get_finalized_command('egg_info') - self.filelist = ei_cmd.filelist - self.filelist.append(os.path.join(ei_cmd.egg_info, 'SOURCES.txt')) - self.check_readme() - - # Run sub commands - for cmd_name in self.get_sub_commands(): - self.run_command(cmd_name) - - self.make_distribution() - - dist_files = getattr(self.distribution, 'dist_files', []) - for file in self.archive_files: - data = ('sdist', '', file) - if data not in dist_files: - dist_files.append(data) - - def initialize_options(self): - orig.sdist.initialize_options(self) - - self._default_to_gztar() - - def _default_to_gztar(self): - # only needed on Python prior to 3.6. - if sys.version_info >= (3, 6, 0, 'beta', 1): - return - self.formats = ['gztar'] - - def make_distribution(self): - """ - Workaround for #516 - """ - with self._remove_os_link(): - orig.sdist.make_distribution(self) - - @staticmethod - @contextlib.contextmanager - def _remove_os_link(): - """ - In a context, remove and restore os.link if it exists - """ - - class NoValue: - pass - - orig_val = getattr(os, 'link', NoValue) - try: - del os.link - except Exception: - pass - try: - yield - finally: - if orig_val is not NoValue: - setattr(os, 'link', orig_val) - - def _add_defaults_optional(self): - super()._add_defaults_optional() - if os.path.isfile('pyproject.toml'): - self.filelist.append('pyproject.toml') - - def _add_defaults_python(self): - """getting python files""" - if self.distribution.has_pure_modules(): - build_py = self.get_finalized_command('build_py') - self.filelist.extend(build_py.get_source_files()) - self._add_data_files(self._safe_data_files(build_py)) - - def _safe_data_files(self, build_py): - """ - Since the ``sdist`` class is also used to compute the MANIFEST - (via :obj:`setuptools.command.egg_info.manifest_maker`), - there might be recursion problems when trying to obtain the list of - data_files and ``include_package_data=True`` (which in turn depends on - the files included in the MANIFEST). - - To avoid that, ``manifest_maker`` should be able to overwrite this - method and avoid recursive attempts to build/analyze the MANIFEST. - """ - return build_py.data_files - - def _add_data_files(self, data_files): - """ - Add data files as found in build_py.data_files. - """ - self.filelist.extend( - os.path.join(src_dir, name) - for _, src_dir, _, filenames in data_files - for name in filenames - ) - - def _add_defaults_data_files(self): - try: - super()._add_defaults_data_files() - except TypeError: - log.warn("data_files contains unexpected objects") - - def check_readme(self): - for f in self.READMES: - if os.path.exists(f): - return - else: - self.warn( - "standard file not found: should have one of " + - ', '.join(self.READMES) - ) - - def make_release_tree(self, base_dir, files): - orig.sdist.make_release_tree(self, base_dir, files) - - # Save any egg_info command line options used to create this sdist - dest = os.path.join(base_dir, 'setup.cfg') - if hasattr(os, 'link') and os.path.exists(dest): - # unlink and re-copy, since it might be hard-linked, and - # we don't want to change the source version - os.unlink(dest) - self.copy_file('setup.cfg', dest) - - self.get_finalized_command('egg_info').save_version_info(dest) - - def _manifest_is_not_generated(self): - # check for special comment used in 2.7.1 and higher - if not os.path.isfile(self.manifest): - return False - - with io.open(self.manifest, 'rb') as fp: - first_line = fp.readline() - return (first_line != - '# file GENERATED by distutils, do NOT edit\n'.encode()) - - def read_manifest(self): - """Read the manifest file (named by 'self.manifest') and use it to - fill in 'self.filelist', the list of files to include in the source - distribution. - """ - log.info("reading manifest file '%s'", self.manifest) - manifest = open(self.manifest, 'rb') - for line in manifest: - # The manifest must contain UTF-8. See #303. - try: - line = line.decode('UTF-8') - except UnicodeDecodeError: - log.warn("%r not UTF-8 decodable -- skipping" % line) - continue - # ignore comments and blank lines - line = line.strip() - if line.startswith('#') or not line: - continue - self.filelist.append(line) - manifest.close() diff --git a/venv/Lib/site-packages/setuptools/command/setopt.py b/venv/Lib/site-packages/setuptools/command/setopt.py deleted file mode 100644 index 6358c04..0000000 --- a/venv/Lib/site-packages/setuptools/command/setopt.py +++ /dev/null @@ -1,149 +0,0 @@ -from distutils.util import convert_path -from distutils import log -from distutils.errors import DistutilsOptionError -import distutils -import os -import configparser - -from setuptools import Command - -__all__ = ['config_file', 'edit_config', 'option_base', 'setopt'] - - -def config_file(kind="local"): - """Get the filename of the distutils, local, global, or per-user config - - `kind` must be one of "local", "global", or "user" - """ - if kind == 'local': - return 'setup.cfg' - if kind == 'global': - return os.path.join( - os.path.dirname(distutils.__file__), 'distutils.cfg' - ) - if kind == 'user': - dot = os.name == 'posix' and '.' or '' - return os.path.expanduser(convert_path("~/%spydistutils.cfg" % dot)) - raise ValueError( - "config_file() type must be 'local', 'global', or 'user'", kind - ) - - -def edit_config(filename, settings, dry_run=False): - """Edit a configuration file to include `settings` - - `settings` is a dictionary of dictionaries or ``None`` values, keyed by - command/section name. A ``None`` value means to delete the entire section, - while a dictionary lists settings to be changed or deleted in that section. - A setting of ``None`` means to delete that setting. - """ - log.debug("Reading configuration from %s", filename) - opts = configparser.RawConfigParser() - opts.optionxform = lambda x: x - opts.read([filename]) - for section, options in settings.items(): - if options is None: - log.info("Deleting section [%s] from %s", section, filename) - opts.remove_section(section) - else: - if not opts.has_section(section): - log.debug("Adding new section [%s] to %s", section, filename) - opts.add_section(section) - for option, value in options.items(): - if value is None: - log.debug( - "Deleting %s.%s from %s", - section, option, filename - ) - opts.remove_option(section, option) - if not opts.options(section): - log.info("Deleting empty [%s] section from %s", - section, filename) - opts.remove_section(section) - else: - log.debug( - "Setting %s.%s to %r in %s", - section, option, value, filename - ) - opts.set(section, option, value) - - log.info("Writing %s", filename) - if not dry_run: - with open(filename, 'w') as f: - opts.write(f) - - -class option_base(Command): - """Abstract base class for commands that mess with config files""" - - user_options = [ - ('global-config', 'g', - "save options to the site-wide distutils.cfg file"), - ('user-config', 'u', - "save options to the current user's pydistutils.cfg file"), - ('filename=', 'f', - "configuration file to use (default=setup.cfg)"), - ] - - boolean_options = [ - 'global-config', 'user-config', - ] - - def initialize_options(self): - self.global_config = None - self.user_config = None - self.filename = None - - def finalize_options(self): - filenames = [] - if self.global_config: - filenames.append(config_file('global')) - if self.user_config: - filenames.append(config_file('user')) - if self.filename is not None: - filenames.append(self.filename) - if not filenames: - filenames.append(config_file('local')) - if len(filenames) > 1: - raise DistutilsOptionError( - "Must specify only one configuration file option", - filenames - ) - self.filename, = filenames - - -class setopt(option_base): - """Save command-line options to a file""" - - description = "set an option in setup.cfg or another config file" - - user_options = [ - ('command=', 'c', 'command to set an option for'), - ('option=', 'o', 'option to set'), - ('set-value=', 's', 'value of the option'), - ('remove', 'r', 'remove (unset) the value'), - ] + option_base.user_options - - boolean_options = option_base.boolean_options + ['remove'] - - def initialize_options(self): - option_base.initialize_options(self) - self.command = None - self.option = None - self.set_value = None - self.remove = None - - def finalize_options(self): - option_base.finalize_options(self) - if self.command is None or self.option is None: - raise DistutilsOptionError("Must specify --command *and* --option") - if self.set_value is None and not self.remove: - raise DistutilsOptionError("Must specify --set-value or --remove") - - def run(self): - edit_config( - self.filename, { - self.command: {self.option.replace('-', '_'): self.set_value} - }, - self.dry_run - ) diff --git a/venv/Lib/site-packages/setuptools/command/test.py b/venv/Lib/site-packages/setuptools/command/test.py deleted file mode 100644 index 652f3e4..0000000 --- a/venv/Lib/site-packages/setuptools/command/test.py +++ /dev/null @@ -1,251 +0,0 @@ -import os -import operator -import sys -import contextlib -import itertools -import unittest -from distutils.errors import DistutilsError, DistutilsOptionError -from distutils import log -from unittest import TestLoader - -from pkg_resources import ( - resource_listdir, - resource_exists, - normalize_path, - working_set, - evaluate_marker, - add_activation_listener, - require, -) -from .._importlib import metadata -from setuptools import Command -from setuptools.extern.more_itertools import unique_everseen -from setuptools.extern.jaraco.functools import pass_none - - -class ScanningLoader(TestLoader): - def __init__(self): - TestLoader.__init__(self) - self._visited = set() - - def loadTestsFromModule(self, module, pattern=None): - """Return a suite of all tests cases contained in the given module - - If the module is a package, load tests from all the modules in it. - If the module has an ``additional_tests`` function, call it and add - the return value to the tests. - """ - if module in self._visited: - return None - self._visited.add(module) - - tests = [] - tests.append(TestLoader.loadTestsFromModule(self, module)) - - if hasattr(module, "additional_tests"): - tests.append(module.additional_tests()) - - if hasattr(module, '__path__'): - for file in resource_listdir(module.__name__, ''): - if file.endswith('.py') and file != '__init__.py': - submodule = module.__name__ + '.' + file[:-3] - else: - if resource_exists(module.__name__, file + '/__init__.py'): - submodule = module.__name__ + '.' + file - else: - continue - tests.append(self.loadTestsFromName(submodule)) - - if len(tests) != 1: - return self.suiteClass(tests) - else: - return tests[0] # don't create a nested suite for only one return - - -# adapted from jaraco.classes.properties:NonDataProperty -class NonDataProperty: - def __init__(self, fget): - self.fget = fget - - def __get__(self, obj, objtype=None): - if obj is None: - return self - return self.fget(obj) - - -class test(Command): - """Command to run unit tests after in-place build""" - - description = "run unit tests after in-place build (deprecated)" - - user_options = [ - ('test-module=', 'm', "Run 'test_suite' in specified module"), - ( - 'test-suite=', - 's', - "Run single test, case or suite (e.g. 'module.test_suite')", - ), - ('test-runner=', 'r', "Test runner to use"), - ] - - def initialize_options(self): - self.test_suite = None - self.test_module = None - self.test_loader = None - self.test_runner = None - - def finalize_options(self): - - if self.test_suite and self.test_module: - msg = "You may specify a module or a suite, but not both" - raise DistutilsOptionError(msg) - - if self.test_suite is None: - if self.test_module is None: - self.test_suite = self.distribution.test_suite - else: - self.test_suite = self.test_module + ".test_suite" - - if self.test_loader is None: - self.test_loader = getattr(self.distribution, 'test_loader', None) - if self.test_loader is None: - self.test_loader = "setuptools.command.test:ScanningLoader" - if self.test_runner is None: - self.test_runner = getattr(self.distribution, 'test_runner', None) - - @NonDataProperty - def test_args(self): - return list(self._test_args()) - - def _test_args(self): - if not self.test_suite and sys.version_info >= (2, 7): - yield 'discover' - if self.verbose: - yield '--verbose' - if self.test_suite: - yield self.test_suite - - def with_project_on_sys_path(self, func): - """ - Backward compatibility for project_on_sys_path context. - """ - with self.project_on_sys_path(): - func() - - @contextlib.contextmanager - def project_on_sys_path(self, include_dists=[]): - self.run_command('egg_info') - - # Build extensions in-place - self.reinitialize_command('build_ext', inplace=1) - self.run_command('build_ext') - - ei_cmd = self.get_finalized_command("egg_info") - - old_path = sys.path[:] - old_modules = sys.modules.copy() - - try: - project_path = normalize_path(ei_cmd.egg_base) - sys.path.insert(0, project_path) - working_set.__init__() - add_activation_listener(lambda dist: dist.activate()) - require('%s==%s' % (ei_cmd.egg_name, ei_cmd.egg_version)) - with self.paths_on_pythonpath([project_path]): - yield - finally: - sys.path[:] = old_path - sys.modules.clear() - sys.modules.update(old_modules) - working_set.__init__() - - @staticmethod - @contextlib.contextmanager - def paths_on_pythonpath(paths): - """ - Add the indicated paths to the head of the PYTHONPATH environment - variable so that subprocesses will also see the packages at - these paths. - - Do this in a context that restores the value on exit. - """ - nothing = object() - orig_pythonpath = os.environ.get('PYTHONPATH', nothing) - current_pythonpath = os.environ.get('PYTHONPATH', '') - try: - prefix = os.pathsep.join(unique_everseen(paths)) - to_join = filter(None, [prefix, current_pythonpath]) - new_path = os.pathsep.join(to_join) - if new_path: - os.environ['PYTHONPATH'] = new_path - yield - finally: - if orig_pythonpath is nothing: - os.environ.pop('PYTHONPATH', None) - else: - os.environ['PYTHONPATH'] = orig_pythonpath - - @staticmethod - def install_dists(dist): - """ - Install the requirements indicated by self.distribution and - return an iterable of the dists that were built. - """ - ir_d = dist.fetch_build_eggs(dist.install_requires) - tr_d = dist.fetch_build_eggs(dist.tests_require or []) - er_d = dist.fetch_build_eggs( - v - for k, v in dist.extras_require.items() - if k.startswith(':') and evaluate_marker(k[1:]) - ) - return itertools.chain(ir_d, tr_d, er_d) - - def run(self): - self.announce( - "WARNING: Testing via this command is deprecated and will be " - "removed in a future version. Users looking for a generic test " - "entry point independent of test runner are encouraged to use " - "tox.", - log.WARN, - ) - - installed_dists = self.install_dists(self.distribution) - - cmd = ' '.join(self._argv) - if self.dry_run: - self.announce('skipping "%s" (dry run)' % cmd) - return - - self.announce('running "%s"' % cmd) - - paths = map(operator.attrgetter('location'), installed_dists) - with self.paths_on_pythonpath(paths): - with self.project_on_sys_path(): - self.run_tests() - - def run_tests(self): - test = unittest.main( - None, - None, - self._argv, - testLoader=self._resolve_as_ep(self.test_loader), - testRunner=self._resolve_as_ep(self.test_runner), - exit=False, - ) - if not test.result.wasSuccessful(): - msg = 'Test failed: %s' % test.result - self.announce(msg, log.ERROR) - raise DistutilsError(msg) - - @property - def _argv(self): - return ['unittest'] + self.test_args - - @staticmethod - @pass_none - def _resolve_as_ep(val): - """ - Load the indicated attribute value, called, as a as if it were - specified as an entry point. - """ - return metadata.EntryPoint(value=val, name=None, group=None).load()() diff --git a/venv/Lib/site-packages/setuptools/command/upload.py b/venv/Lib/site-packages/setuptools/command/upload.py deleted file mode 100644 index ec7f81e..0000000 --- a/venv/Lib/site-packages/setuptools/command/upload.py +++ /dev/null @@ -1,17 +0,0 @@ -from distutils import log -from distutils.command import upload as orig - -from setuptools.errors import RemovedCommandError - - -class upload(orig.upload): - """Formerly used to upload packages to PyPI.""" - - def run(self): - msg = ( - "The upload command has been removed, use twine to upload " - + "instead (https://pypi.org/p/twine)" - ) - - self.announce("ERROR: " + msg, log.ERROR) - raise RemovedCommandError(msg) diff --git a/venv/Lib/site-packages/setuptools/command/upload_docs.py b/venv/Lib/site-packages/setuptools/command/upload_docs.py deleted file mode 100644 index f429f56..0000000 --- a/venv/Lib/site-packages/setuptools/command/upload_docs.py +++ /dev/null @@ -1,204 +0,0 @@ -# -*- coding: utf-8 -*- -"""upload_docs - -Implements a Distutils 'upload_docs' subcommand (upload documentation to -sites other than PyPi such as devpi). -""" - -from base64 import standard_b64encode -from distutils import log -from distutils.errors import DistutilsOptionError -import os -import socket -import zipfile -import tempfile -import shutil -import itertools -import functools -import http.client -import urllib.parse - -from .._importlib import metadata - -from .upload import upload - - -def _encode(s): - return s.encode('utf-8', 'surrogateescape') - - -class upload_docs(upload): - # override the default repository as upload_docs isn't - # supported by Warehouse (and won't be). - DEFAULT_REPOSITORY = 'https://pypi.python.org/pypi/' - - description = 'Upload documentation to sites other than PyPi such as devpi' - - user_options = [ - ('repository=', 'r', - "url of repository [default: %s]" % upload.DEFAULT_REPOSITORY), - ('show-response', None, - 'display full response text from server'), - ('upload-dir=', None, 'directory to upload'), - ] - boolean_options = upload.boolean_options - - def has_sphinx(self): - return bool( - self.upload_dir is None - and metadata.entry_points(group='distutils.commands', name='build_sphinx') - ) - - sub_commands = [('build_sphinx', has_sphinx)] - - def initialize_options(self): - upload.initialize_options(self) - self.upload_dir = None - self.target_dir = None - - def finalize_options(self): - upload.finalize_options(self) - if self.upload_dir is None: - if self.has_sphinx(): - build_sphinx = self.get_finalized_command('build_sphinx') - self.target_dir = dict(build_sphinx.builder_target_dirs)['html'] - else: - build = self.get_finalized_command('build') - self.target_dir = os.path.join(build.build_base, 'docs') - else: - self.ensure_dirname('upload_dir') - self.target_dir = self.upload_dir - if 'pypi.python.org' in self.repository: - log.warn("Upload_docs command is deprecated for PyPi. Use RTD instead.") - self.announce('Using upload directory %s' % self.target_dir) - - def create_zipfile(self, filename): - zip_file = zipfile.ZipFile(filename, "w") - try: - self.mkpath(self.target_dir) # just in case - for root, dirs, files in os.walk(self.target_dir): - if root == self.target_dir and not files: - tmpl = "no files found in upload directory '%s'" - raise DistutilsOptionError(tmpl % self.target_dir) - for name in files: - full = os.path.join(root, name) - relative = root[len(self.target_dir):].lstrip(os.path.sep) - dest = os.path.join(relative, name) - zip_file.write(full, dest) - finally: - zip_file.close() - - def run(self): - # Run sub commands - for cmd_name in self.get_sub_commands(): - self.run_command(cmd_name) - - tmp_dir = tempfile.mkdtemp() - name = self.distribution.metadata.get_name() - zip_file = os.path.join(tmp_dir, "%s.zip" % name) - try: - self.create_zipfile(zip_file) - self.upload_file(zip_file) - finally: - shutil.rmtree(tmp_dir) - - @staticmethod - def _build_part(item, sep_boundary): - key, values = item - title = '\nContent-Disposition: form-data; name="%s"' % key - # handle multiple entries for the same name - if not isinstance(values, list): - values = [values] - for value in values: - if isinstance(value, tuple): - title += '; filename="%s"' % value[0] - value = value[1] - else: - value = _encode(value) - yield sep_boundary - yield _encode(title) - yield b"\n\n" - yield value - if value and value[-1:] == b'\r': - yield b'\n' # write an extra newline (lurve Macs) - - @classmethod - def _build_multipart(cls, data): - """ - Build up the MIME payload for the POST data - """ - boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254' - sep_boundary = b'\n--' + boundary.encode('ascii') - end_boundary = sep_boundary + b'--' - end_items = end_boundary, b"\n", - builder = functools.partial( - cls._build_part, - sep_boundary=sep_boundary, - ) - part_groups = map(builder, data.items()) - parts = itertools.chain.from_iterable(part_groups) - body_items = itertools.chain(parts, end_items) - content_type = 'multipart/form-data; boundary=%s' % boundary - return b''.join(body_items), content_type - - def upload_file(self, filename): - with open(filename, 'rb') as f: - content = f.read() - meta = self.distribution.metadata - data = { - ':action': 'doc_upload', - 'name': meta.get_name(), - 'content': (os.path.basename(filename), content), - } - # set up the authentication - credentials = _encode(self.username + ':' + self.password) - credentials = standard_b64encode(credentials).decode('ascii') - auth = "Basic " + credentials - - body, ct = self._build_multipart(data) - - msg = "Submitting documentation to %s" % (self.repository) - self.announce(msg, log.INFO) - - # build the Request - # We can't use urllib2 since we need to send the Basic - # auth right with the first request - schema, netloc, url, params, query, fragments = \ - urllib.parse.urlparse(self.repository) - assert not params and not query and not fragments - if schema == 'http': - conn = http.client.HTTPConnection(netloc) - elif schema == 'https': - conn = http.client.HTTPSConnection(netloc) - else: - raise AssertionError("unsupported schema " + schema) - - data = '' - try: - conn.connect() - conn.putrequest("POST", url) - content_type = ct - conn.putheader('Content-type', content_type) - conn.putheader('Content-length', str(len(body))) - conn.putheader('Authorization', auth) - conn.endheaders() - conn.send(body) - except socket.error as e: - self.announce(str(e), log.ERROR) - return - - r = conn.getresponse() - if r.status == 200: - msg = 'Server response (%s): %s' % (r.status, r.reason) - self.announce(msg, log.INFO) - elif r.status == 301: - location = r.getheader('Location') - if location is None: - location = 'https://pythonhosted.org/%s/' % meta.get_name() - msg = 'Upload successful. Visit %s' % location - self.announce(msg, log.INFO) - else: - msg = 'Upload failed (%s): %s' % (r.status, r.reason) - self.announce(msg, log.ERROR) - if self.show_response: - print('-' * 75, r.read(), '-' * 75) diff --git a/venv/Lib/site-packages/setuptools/config.py b/venv/Lib/site-packages/setuptools/config.py deleted file mode 100644 index b4e968e..0000000 --- a/venv/Lib/site-packages/setuptools/config.py +++ /dev/null @@ -1,751 +0,0 @@ -import ast -import io -import os -import sys - -import warnings -import functools -import importlib -from collections import defaultdict -from functools import partial -from functools import wraps -from glob import iglob -import contextlib - -from distutils.errors import DistutilsOptionError, DistutilsFileError -from setuptools.extern.packaging.version import Version, InvalidVersion -from setuptools.extern.packaging.specifiers import SpecifierSet - - -class StaticModule: - """ - Attempt to load the module by the name - """ - - def __init__(self, name): - spec = importlib.util.find_spec(name) - with open(spec.origin) as strm: - src = strm.read() - module = ast.parse(src) - vars(self).update(locals()) - del self.self - - def __getattr__(self, attr): - try: - return next( - ast.literal_eval(statement.value) - for statement in self.module.body - if isinstance(statement, ast.Assign) - for target in statement.targets - if isinstance(target, ast.Name) and target.id == attr - ) - except Exception as e: - raise AttributeError( - "{self.name} has no attribute {attr}".format(**locals()) - ) from e - - -@contextlib.contextmanager -def patch_path(path): - """ - Add path to front of sys.path for the duration of the context. - """ - try: - sys.path.insert(0, path) - yield - finally: - sys.path.remove(path) - - -def read_configuration(filepath, find_others=False, ignore_option_errors=False): - """Read given configuration file and returns options from it as a dict. - - :param str|unicode filepath: Path to configuration file - to get options from. - - :param bool find_others: Whether to search for other configuration files - which could be on in various places. - - :param bool ignore_option_errors: Whether to silently ignore - options, values of which could not be resolved (e.g. due to exceptions - in directives such as file:, attr:, etc.). - If False exceptions are propagated as expected. - - :rtype: dict - """ - from setuptools.dist import Distribution, _Distribution - - filepath = os.path.abspath(filepath) - - if not os.path.isfile(filepath): - raise DistutilsFileError('Configuration file %s does not exist.' % filepath) - - current_directory = os.getcwd() - os.chdir(os.path.dirname(filepath)) - - try: - dist = Distribution() - - filenames = dist.find_config_files() if find_others else [] - if filepath not in filenames: - filenames.append(filepath) - - _Distribution.parse_config_files(dist, filenames=filenames) - - handlers = parse_configuration( - dist, dist.command_options, ignore_option_errors=ignore_option_errors - ) - - finally: - os.chdir(current_directory) - - return configuration_to_dict(handlers) - - -def _get_option(target_obj, key): - """ - Given a target object and option key, get that option from - the target object, either through a get_{key} method or - from an attribute directly. - """ - getter_name = 'get_{key}'.format(**locals()) - by_attribute = functools.partial(getattr, target_obj, key) - getter = getattr(target_obj, getter_name, by_attribute) - return getter() - - -def configuration_to_dict(handlers): - """Returns configuration data gathered by given handlers as a dict. - - :param list[ConfigHandler] handlers: Handlers list, - usually from parse_configuration() - - :rtype: dict - """ - config_dict = defaultdict(dict) - - for handler in handlers: - for option in handler.set_options: - value = _get_option(handler.target_obj, option) - config_dict[handler.section_prefix][option] = value - - return config_dict - - -def parse_configuration(distribution, command_options, ignore_option_errors=False): - """Performs additional parsing of configuration options - for a distribution. - - Returns a list of used option handlers. - - :param Distribution distribution: - :param dict command_options: - :param bool ignore_option_errors: Whether to silently ignore - options, values of which could not be resolved (e.g. due to exceptions - in directives such as file:, attr:, etc.). - If False exceptions are propagated as expected. - :rtype: list - """ - options = ConfigOptionsHandler(distribution, command_options, ignore_option_errors) - options.parse() - - meta = ConfigMetadataHandler( - distribution.metadata, - command_options, - ignore_option_errors, - distribution.package_dir, - ) - meta.parse() - - return meta, options - - -class ConfigHandler: - """Handles metadata supplied in configuration files.""" - - section_prefix = None - """Prefix for config sections handled by this handler. - Must be provided by class heirs. - - """ - - aliases = {} - """Options aliases. - For compatibility with various packages. E.g.: d2to1 and pbr. - Note: `-` in keys is replaced with `_` by config parser. - - """ - - def __init__(self, target_obj, options, ignore_option_errors=False): - sections = {} - - section_prefix = self.section_prefix - for section_name, section_options in options.items(): - if not section_name.startswith(section_prefix): - continue - - section_name = section_name.replace(section_prefix, '').strip('.') - sections[section_name] = section_options - - self.ignore_option_errors = ignore_option_errors - self.target_obj = target_obj - self.sections = sections - self.set_options = [] - - @property - def parsers(self): - """Metadata item name to parser function mapping.""" - raise NotImplementedError( - '%s must provide .parsers property' % self.__class__.__name__ - ) - - def __setitem__(self, option_name, value): - unknown = tuple() - target_obj = self.target_obj - - # Translate alias into real name. - option_name = self.aliases.get(option_name, option_name) - - current_value = getattr(target_obj, option_name, unknown) - - if current_value is unknown: - raise KeyError(option_name) - - if current_value: - # Already inhabited. Skipping. - return - - skip_option = False - parser = self.parsers.get(option_name) - if parser: - try: - value = parser(value) - - except Exception: - skip_option = True - if not self.ignore_option_errors: - raise - - if skip_option: - return - - setter = getattr(target_obj, 'set_%s' % option_name, None) - if setter is None: - setattr(target_obj, option_name, value) - else: - setter(value) - - self.set_options.append(option_name) - - @classmethod - def _parse_list(cls, value, separator=','): - """Represents value as a list. - - Value is split either by separator (defaults to comma) or by lines. - - :param value: - :param separator: List items separator character. - :rtype: list - """ - if isinstance(value, list): # _get_parser_compound case - return value - - if '\n' in value: - value = value.splitlines() - else: - value = value.split(separator) - - return [chunk.strip() for chunk in value if chunk.strip()] - - @classmethod - def _parse_list_glob(cls, value, separator=','): - """Equivalent to _parse_list() but expands any glob patterns using glob(). - - However, unlike with glob() calls, the results remain relative paths. - - :param value: - :param separator: List items separator character. - :rtype: list - """ - glob_characters = ('*', '?', '[', ']', '{', '}') - values = cls._parse_list(value, separator=separator) - expanded_values = [] - for value in values: - - # Has globby characters? - if any(char in value for char in glob_characters): - # then expand the glob pattern while keeping paths *relative*: - expanded_values.extend(sorted( - os.path.relpath(path, os.getcwd()) - for path in iglob(os.path.abspath(value)))) - - else: - # take the value as-is: - expanded_values.append(value) - - return expanded_values - - @classmethod - def _parse_dict(cls, value): - """Represents value as a dict. - - :param value: - :rtype: dict - """ - separator = '=' - result = {} - for line in cls._parse_list(value): - key, sep, val = line.partition(separator) - if sep != separator: - raise DistutilsOptionError( - 'Unable to parse option value to dict: %s' % value - ) - result[key.strip()] = val.strip() - - return result - - @classmethod - def _parse_bool(cls, value): - """Represents value as boolean. - - :param value: - :rtype: bool - """ - value = value.lower() - return value in ('1', 'true', 'yes') - - @classmethod - def _exclude_files_parser(cls, key): - """Returns a parser function to make sure field inputs - are not files. - - Parses a value after getting the key so error messages are - more informative. - - :param key: - :rtype: callable - """ - - def parser(value): - exclude_directive = 'file:' - if value.startswith(exclude_directive): - raise ValueError( - 'Only strings are accepted for the {0} field, ' - 'files are not accepted'.format(key) - ) - return value - - return parser - - @classmethod - def _parse_file(cls, value): - """Represents value as a string, allowing including text - from nearest files using `file:` directive. - - Directive is sandboxed and won't reach anything outside - directory with setup.py. - - Examples: - file: README.rst, CHANGELOG.md, src/file.txt - - :param str value: - :rtype: str - """ - include_directive = 'file:' - - if not isinstance(value, str): - return value - - if not value.startswith(include_directive): - return value - - spec = value[len(include_directive) :] - filepaths = (os.path.abspath(path.strip()) for path in spec.split(',')) - return '\n'.join( - cls._read_file(path) - for path in filepaths - if (cls._assert_local(path) or True) and os.path.isfile(path) - ) - - @staticmethod - def _assert_local(filepath): - if not filepath.startswith(os.getcwd()): - raise DistutilsOptionError('`file:` directive can not access %s' % filepath) - - @staticmethod - def _read_file(filepath): - with io.open(filepath, encoding='utf-8') as f: - return f.read() - - @classmethod - def _parse_attr(cls, value, package_dir=None): - """Represents value as a module attribute. - - Examples: - attr: package.attr - attr: package.module.attr - - :param str value: - :rtype: str - """ - attr_directive = 'attr:' - if not value.startswith(attr_directive): - return value - - attrs_path = value.replace(attr_directive, '').strip().split('.') - attr_name = attrs_path.pop() - - module_name = '.'.join(attrs_path) - module_name = module_name or '__init__' - - parent_path = os.getcwd() - if package_dir: - if attrs_path[0] in package_dir: - # A custom path was specified for the module we want to import - custom_path = package_dir[attrs_path[0]] - parts = custom_path.rsplit('/', 1) - if len(parts) > 1: - parent_path = os.path.join(os.getcwd(), parts[0]) - module_name = parts[1] - else: - module_name = custom_path - elif '' in package_dir: - # A custom parent directory was specified for all root modules - parent_path = os.path.join(os.getcwd(), package_dir['']) - - with patch_path(parent_path): - try: - # attempt to load value statically - return getattr(StaticModule(module_name), attr_name) - except Exception: - # fallback to simple import - module = importlib.import_module(module_name) - - return getattr(module, attr_name) - - @classmethod - def _get_parser_compound(cls, *parse_methods): - """Returns parser function to represents value as a list. - - Parses a value applying given methods one after another. - - :param parse_methods: - :rtype: callable - """ - - def parse(value): - parsed = value - - for method in parse_methods: - parsed = method(parsed) - - return parsed - - return parse - - @classmethod - def _parse_section_to_dict(cls, section_options, values_parser=None): - """Parses section options into a dictionary. - - Optionally applies a given parser to values. - - :param dict section_options: - :param callable values_parser: - :rtype: dict - """ - value = {} - values_parser = values_parser or (lambda val: val) - for key, (_, val) in section_options.items(): - value[key] = values_parser(val) - return value - - def parse_section(self, section_options): - """Parses configuration file section. - - :param dict section_options: - """ - for (name, (_, value)) in section_options.items(): - try: - self[name] = value - - except KeyError: - pass # Keep silent for a new option may appear anytime. - - def parse(self): - """Parses configuration file items from one - or more related sections. - - """ - for section_name, section_options in self.sections.items(): - - method_postfix = '' - if section_name: # [section.option] variant - method_postfix = '_%s' % section_name - - section_parser_method = getattr( - self, - # Dots in section names are translated into dunderscores. - ('parse_section%s' % method_postfix).replace('.', '__'), - None, - ) - - if section_parser_method is None: - raise DistutilsOptionError( - 'Unsupported distribution option section: [%s.%s]' - % (self.section_prefix, section_name) - ) - - section_parser_method(section_options) - - def _deprecated_config_handler(self, func, msg, warning_class): - """this function will wrap around parameters that are deprecated - - :param msg: deprecation message - :param warning_class: class of warning exception to be raised - :param func: function to be wrapped around - """ - - @wraps(func) - def config_handler(*args, **kwargs): - warnings.warn(msg, warning_class) - return func(*args, **kwargs) - - return config_handler - - -class ConfigMetadataHandler(ConfigHandler): - - section_prefix = 'metadata' - - aliases = { - 'home_page': 'url', - 'summary': 'description', - 'classifier': 'classifiers', - 'platform': 'platforms', - } - - strict_mode = False - """We need to keep it loose, to be partially compatible with - `pbr` and `d2to1` packages which also uses `metadata` section. - - """ - - def __init__( - self, target_obj, options, ignore_option_errors=False, package_dir=None - ): - super(ConfigMetadataHandler, self).__init__( - target_obj, options, ignore_option_errors - ) - self.package_dir = package_dir - - @property - def parsers(self): - """Metadata item name to parser function mapping.""" - parse_list = self._parse_list - parse_file = self._parse_file - parse_dict = self._parse_dict - exclude_files_parser = self._exclude_files_parser - - return { - 'platforms': parse_list, - 'keywords': parse_list, - 'provides': parse_list, - 'requires': self._deprecated_config_handler( - parse_list, - "The requires parameter is deprecated, please use " - "install_requires for runtime dependencies.", - DeprecationWarning, - ), - 'obsoletes': parse_list, - 'classifiers': self._get_parser_compound(parse_file, parse_list), - 'license': exclude_files_parser('license'), - 'license_file': self._deprecated_config_handler( - exclude_files_parser('license_file'), - "The license_file parameter is deprecated, " - "use license_files instead.", - DeprecationWarning, - ), - 'license_files': parse_list, - 'description': parse_file, - 'long_description': parse_file, - 'version': self._parse_version, - 'project_urls': parse_dict, - } - - def _parse_version(self, value): - """Parses `version` option value. - - :param value: - :rtype: str - - """ - version = self._parse_file(value) - - if version != value: - version = version.strip() - # Be strict about versions loaded from file because it's easy to - # accidentally include newlines and other unintended content - try: - Version(version) - except InvalidVersion: - tmpl = ( - 'Version loaded from {value} does not ' - 'comply with PEP 440: {version}' - ) - raise DistutilsOptionError(tmpl.format(**locals())) - - return version - - version = self._parse_attr(value, self.package_dir) - - if callable(version): - version = version() - - if not isinstance(version, str): - if hasattr(version, '__iter__'): - version = '.'.join(map(str, version)) - else: - version = '%s' % version - - return version - - -class ConfigOptionsHandler(ConfigHandler): - - section_prefix = 'options' - - @property - def parsers(self): - """Metadata item name to parser function mapping.""" - parse_list = self._parse_list - parse_list_semicolon = partial(self._parse_list, separator=';') - parse_bool = self._parse_bool - parse_dict = self._parse_dict - parse_cmdclass = self._parse_cmdclass - - return { - 'zip_safe': parse_bool, - 'include_package_data': parse_bool, - 'package_dir': parse_dict, - 'scripts': parse_list, - 'eager_resources': parse_list, - 'dependency_links': parse_list, - 'namespace_packages': parse_list, - 'install_requires': parse_list_semicolon, - 'setup_requires': parse_list_semicolon, - 'tests_require': parse_list_semicolon, - 'packages': self._parse_packages, - 'entry_points': self._parse_file, - 'py_modules': parse_list, - 'python_requires': SpecifierSet, - 'cmdclass': parse_cmdclass, - } - - def _parse_cmdclass(self, value): - def resolve_class(qualified_class_name): - idx = qualified_class_name.rfind('.') - class_name = qualified_class_name[idx + 1 :] - pkg_name = qualified_class_name[:idx] - - module = __import__(pkg_name) - - return getattr(module, class_name) - - return {k: resolve_class(v) for k, v in self._parse_dict(value).items()} - - def _parse_packages(self, value): - """Parses `packages` option value. - - :param value: - :rtype: list - """ - find_directives = ['find:', 'find_namespace:'] - trimmed_value = value.strip() - - if trimmed_value not in find_directives: - return self._parse_list(value) - - findns = trimmed_value == find_directives[1] - - # Read function arguments from a dedicated section. - find_kwargs = self.parse_section_packages__find( - self.sections.get('packages.find', {}) - ) - - if findns: - from setuptools import find_namespace_packages as find_packages - else: - from setuptools import find_packages - - return find_packages(**find_kwargs) - - def parse_section_packages__find(self, section_options): - """Parses `packages.find` configuration file section. - - To be used in conjunction with _parse_packages(). - - :param dict section_options: - """ - section_data = self._parse_section_to_dict(section_options, self._parse_list) - - valid_keys = ['where', 'include', 'exclude'] - - find_kwargs = dict( - [(k, v) for k, v in section_data.items() if k in valid_keys and v] - ) - - where = find_kwargs.get('where') - if where is not None: - find_kwargs['where'] = where[0] # cast list to single val - - return find_kwargs - - def parse_section_entry_points(self, section_options): - """Parses `entry_points` configuration file section. - - :param dict section_options: - """ - parsed = self._parse_section_to_dict(section_options, self._parse_list) - self['entry_points'] = parsed - - def _parse_package_data(self, section_options): - parsed = self._parse_section_to_dict(section_options, self._parse_list) - - root = parsed.get('*') - if root: - parsed[''] = root - del parsed['*'] - - return parsed - - def parse_section_package_data(self, section_options): - """Parses `package_data` configuration file section. - - :param dict section_options: - """ - self['package_data'] = self._parse_package_data(section_options) - - def parse_section_exclude_package_data(self, section_options): - """Parses `exclude_package_data` configuration file section. - - :param dict section_options: - """ - self['exclude_package_data'] = self._parse_package_data(section_options) - - def parse_section_extras_require(self, section_options): - """Parses `extras_require` configuration file section. - - :param dict section_options: - """ - parse_list = partial(self._parse_list, separator=';') - self['extras_require'] = self._parse_section_to_dict( - section_options, parse_list - ) - - def parse_section_data_files(self, section_options): - """Parses `data_files` configuration file section. - - :param dict section_options: - """ - parsed = self._parse_section_to_dict(section_options, self._parse_list_glob) - self['data_files'] = [(k, v) for k, v in parsed.items()] diff --git a/venv/Lib/site-packages/setuptools/dep_util.py b/venv/Lib/site-packages/setuptools/dep_util.py deleted file mode 100644 index 521eb71..0000000 --- a/venv/Lib/site-packages/setuptools/dep_util.py +++ /dev/null @@ -1,25 +0,0 @@ -from distutils.dep_util import newer_group - - -# yes, this is was almost entirely copy-pasted from -# 'newer_pairwise()', this is just another convenience -# function. -def newer_pairwise_group(sources_groups, targets): - """Walk both arguments in parallel, testing if each source group is newer - than its corresponding target. Returns a pair of lists (sources_groups, - targets) where sources is newer than target, according to the semantics - of 'newer_group()'. - """ - if len(sources_groups) != len(targets): - raise ValueError( - "'sources_group' and 'targets' must be the same length") - - # build a pair of lists (sources_groups, targets) where source is newer - n_sources = [] - n_targets = [] - for i in range(len(sources_groups)): - if newer_group(sources_groups[i], targets[i]): - n_sources.append(sources_groups[i]) - n_targets.append(targets[i]) - - return n_sources, n_targets diff --git a/venv/Lib/site-packages/setuptools/depends.py b/venv/Lib/site-packages/setuptools/depends.py deleted file mode 100644 index adffd12..0000000 --- a/venv/Lib/site-packages/setuptools/depends.py +++ /dev/null @@ -1,176 +0,0 @@ -import sys -import marshal -import contextlib -import dis - -from setuptools.extern.packaging import version - -from ._imp import find_module, PY_COMPILED, PY_FROZEN, PY_SOURCE -from . import _imp - - -__all__ = [ - 'Require', 'find_module', 'get_module_constant', 'extract_constant' -] - - -class Require: - """A prerequisite to building or installing a distribution""" - - def __init__( - self, name, requested_version, module, homepage='', - attribute=None, format=None): - - if format is None and requested_version is not None: - format = version.Version - - if format is not None: - requested_version = format(requested_version) - if attribute is None: - attribute = '__version__' - - self.__dict__.update(locals()) - del self.self - - def full_name(self): - """Return full package/distribution name, w/version""" - if self.requested_version is not None: - return '%s-%s' % (self.name, self.requested_version) - return self.name - - def version_ok(self, version): - """Is 'version' sufficiently up-to-date?""" - return self.attribute is None or self.format is None or \ - str(version) != "unknown" and self.format(version) >= self.requested_version - - def get_version(self, paths=None, default="unknown"): - """Get version number of installed module, 'None', or 'default' - - Search 'paths' for module. If not found, return 'None'. If found, - return the extracted version attribute, or 'default' if no version - attribute was specified, or the value cannot be determined without - importing the module. The version is formatted according to the - requirement's version format (if any), unless it is 'None' or the - supplied 'default'. - """ - - if self.attribute is None: - try: - f, p, i = find_module(self.module, paths) - if f: - f.close() - return default - except ImportError: - return None - - v = get_module_constant(self.module, self.attribute, default, paths) - - if v is not None and v is not default and self.format is not None: - return self.format(v) - - return v - - def is_present(self, paths=None): - """Return true if dependency is present on 'paths'""" - return self.get_version(paths) is not None - - def is_current(self, paths=None): - """Return true if dependency is present and up-to-date on 'paths'""" - version = self.get_version(paths) - if version is None: - return False - return self.version_ok(str(version)) - - -def maybe_close(f): - @contextlib.contextmanager - def empty(): - yield - return - if not f: - return empty() - - return contextlib.closing(f) - - -def get_module_constant(module, symbol, default=-1, paths=None): - """Find 'module' by searching 'paths', and extract 'symbol' - - Return 'None' if 'module' does not exist on 'paths', or it does not define - 'symbol'. If the module defines 'symbol' as a constant, return the - constant. Otherwise, return 'default'.""" - - try: - f, path, (suffix, mode, kind) = info = find_module(module, paths) - except ImportError: - # Module doesn't exist - return None - - with maybe_close(f): - if kind == PY_COMPILED: - f.read(8) # skip magic & date - code = marshal.load(f) - elif kind == PY_FROZEN: - code = _imp.get_frozen_object(module, paths) - elif kind == PY_SOURCE: - code = compile(f.read(), path, 'exec') - else: - # Not something we can parse; we'll have to import it. :( - imported = _imp.get_module(module, paths, info) - return getattr(imported, symbol, None) - - return extract_constant(code, symbol, default) - - -def extract_constant(code, symbol, default=-1): - """Extract the constant value of 'symbol' from 'code' - - If the name 'symbol' is bound to a constant value by the Python code - object 'code', return that value. If 'symbol' is bound to an expression, - return 'default'. Otherwise, return 'None'. - - Return value is based on the first assignment to 'symbol'. 'symbol' must - be a global, or at least a non-"fast" local in the code block. That is, - only 'STORE_NAME' and 'STORE_GLOBAL' opcodes are checked, and 'symbol' - must be present in 'code.co_names'. - """ - if symbol not in code.co_names: - # name's not there, can't possibly be an assignment - return None - - name_idx = list(code.co_names).index(symbol) - - STORE_NAME = 90 - STORE_GLOBAL = 97 - LOAD_CONST = 100 - - const = default - - for byte_code in dis.Bytecode(code): - op = byte_code.opcode - arg = byte_code.arg - - if op == LOAD_CONST: - const = code.co_consts[arg] - elif arg == name_idx and (op == STORE_NAME or op == STORE_GLOBAL): - return const - else: - const = default - - -def _update_globals(): - """ - Patch the globals to remove the objects not available on some platforms. - - XXX it'd be better to test assertions about bytecode instead. - """ - - if not sys.platform.startswith('java') and sys.platform != 'cli': - return - incompatible = 'extract_constant', 'get_module_constant' - for name in incompatible: - del globals()[name] - __all__.remove(name) - - -_update_globals() diff --git a/venv/Lib/site-packages/setuptools/dist.py b/venv/Lib/site-packages/setuptools/dist.py deleted file mode 100644 index e825785..0000000 --- a/venv/Lib/site-packages/setuptools/dist.py +++ /dev/null @@ -1,1192 +0,0 @@ -# -*- coding: utf-8 -*- -__all__ = ['Distribution'] - -import io -import sys -import re -import os -import warnings -import numbers -import distutils.log -import distutils.core -import distutils.cmd -import distutils.dist -import distutils.command -from distutils.util import strtobool -from distutils.debug import DEBUG -from distutils.fancy_getopt import translate_longopt -from glob import iglob -import itertools -import textwrap -from typing import List, Optional, TYPE_CHECKING - -from collections import defaultdict -from email import message_from_file - -from distutils.errors import DistutilsOptionError, DistutilsSetupError -from distutils.util import rfc822_escape - -from setuptools.extern import packaging -from setuptools.extern import ordered_set -from setuptools.extern.more_itertools import unique_everseen, always_iterable - -from ._importlib import metadata - -from . import SetuptoolsDeprecationWarning - -import setuptools -import setuptools.command -from setuptools import windows_support -from setuptools.monkey import get_unpatched -from setuptools.config import parse_configuration -import pkg_resources -from setuptools.extern.packaging import version, requirements -from . import _reqs -from . import _entry_points - -if TYPE_CHECKING: - from email.message import Message - -__import__('setuptools.extern.packaging.specifiers') -__import__('setuptools.extern.packaging.version') - - -def _get_unpatched(cls): - warnings.warn("Do not call this function", DistDeprecationWarning) - return get_unpatched(cls) - - -def get_metadata_version(self): - mv = getattr(self, 'metadata_version', None) - if mv is None: - mv = version.Version('2.1') - self.metadata_version = mv - return mv - - -def rfc822_unescape(content: str) -> str: - """Reverse RFC-822 escaping by removing leading whitespaces from content.""" - lines = content.splitlines() - if len(lines) == 1: - return lines[0].lstrip() - return '\n'.join((lines[0].lstrip(), textwrap.dedent('\n'.join(lines[1:])))) - - -def _read_field_from_msg(msg: "Message", field: str) -> Optional[str]: - """Read Message header field.""" - value = msg[field] - if value == 'UNKNOWN': - return None - return value - - -def _read_field_unescaped_from_msg(msg: "Message", field: str) -> Optional[str]: - """Read Message header field and apply rfc822_unescape.""" - value = _read_field_from_msg(msg, field) - if value is None: - return value - return rfc822_unescape(value) - - -def _read_list_from_msg(msg: "Message", field: str) -> Optional[List[str]]: - """Read Message header field and return all results as list.""" - values = msg.get_all(field, None) - if values == []: - return None - return values - - -def _read_payload_from_msg(msg: "Message") -> Optional[str]: - value = msg.get_payload().strip() - if value == 'UNKNOWN': - return None - return value - - -def read_pkg_file(self, file): - """Reads the metadata values from a file object.""" - msg = message_from_file(file) - - self.metadata_version = version.Version(msg['metadata-version']) - self.name = _read_field_from_msg(msg, 'name') - self.version = _read_field_from_msg(msg, 'version') - self.description = _read_field_from_msg(msg, 'summary') - # we are filling author only. - self.author = _read_field_from_msg(msg, 'author') - self.maintainer = None - self.author_email = _read_field_from_msg(msg, 'author-email') - self.maintainer_email = None - self.url = _read_field_from_msg(msg, 'home-page') - self.download_url = _read_field_from_msg(msg, 'download-url') - self.license = _read_field_unescaped_from_msg(msg, 'license') - - self.long_description = _read_field_unescaped_from_msg(msg, 'description') - if ( - self.long_description is None and - self.metadata_version >= version.Version('2.1') - ): - self.long_description = _read_payload_from_msg(msg) - self.description = _read_field_from_msg(msg, 'summary') - - if 'keywords' in msg: - self.keywords = _read_field_from_msg(msg, 'keywords').split(',') - - self.platforms = _read_list_from_msg(msg, 'platform') - self.classifiers = _read_list_from_msg(msg, 'classifier') - - # PEP 314 - these fields only exist in 1.1 - if self.metadata_version == version.Version('1.1'): - self.requires = _read_list_from_msg(msg, 'requires') - self.provides = _read_list_from_msg(msg, 'provides') - self.obsoletes = _read_list_from_msg(msg, 'obsoletes') - else: - self.requires = None - self.provides = None - self.obsoletes = None - - self.license_files = _read_list_from_msg(msg, 'license-file') - - -def single_line(val): - """ - Quick and dirty validation for Summary pypa/setuptools#1390. - """ - if '\n' in val: - # TODO: Replace with `raise ValueError("newlines not allowed")` - # after reviewing #2893. - warnings.warn("newlines not allowed and will break in the future") - val = val.strip().split('\n')[0] - return val - - -# Based on Python 3.5 version -def write_pkg_file(self, file): # noqa: C901 # is too complex (14) # FIXME - """Write the PKG-INFO format data to a file object.""" - version = self.get_metadata_version() - - def write_field(key, value): - file.write("%s: %s\n" % (key, value)) - - write_field('Metadata-Version', str(version)) - write_field('Name', self.get_name()) - write_field('Version', self.get_version()) - write_field('Summary', single_line(self.get_description())) - - optional_fields = ( - ('Home-page', 'url'), - ('Download-URL', 'download_url'), - ('Author', 'author'), - ('Author-email', 'author_email'), - ('Maintainer', 'maintainer'), - ('Maintainer-email', 'maintainer_email'), - ) - - for field, attr in optional_fields: - attr_val = getattr(self, attr, None) - if attr_val is not None: - write_field(field, attr_val) - - license = rfc822_escape(self.get_license()) - write_field('License', license) - for project_url in self.project_urls.items(): - write_field('Project-URL', '%s, %s' % project_url) - - keywords = ','.join(self.get_keywords()) - if keywords: - write_field('Keywords', keywords) - - for platform in self.get_platforms(): - write_field('Platform', platform) - - self._write_list(file, 'Classifier', self.get_classifiers()) - - # PEP 314 - self._write_list(file, 'Requires', self.get_requires()) - self._write_list(file, 'Provides', self.get_provides()) - self._write_list(file, 'Obsoletes', self.get_obsoletes()) - - # Setuptools specific for PEP 345 - if hasattr(self, 'python_requires'): - write_field('Requires-Python', self.python_requires) - - # PEP 566 - if self.long_description_content_type: - write_field('Description-Content-Type', self.long_description_content_type) - if self.provides_extras: - for extra in self.provides_extras: - write_field('Provides-Extra', extra) - - self._write_list(file, 'License-File', self.license_files or []) - - file.write("\n%s\n\n" % self.get_long_description()) - - -sequence = tuple, list - - -def check_importable(dist, attr, value): - try: - ep = metadata.EntryPoint(value=value, name=None, group=None) - assert not ep.extras - except (TypeError, ValueError, AttributeError, AssertionError) as e: - raise DistutilsSetupError( - "%r must be importable 'module:attrs' string (got %r)" % (attr, value) - ) from e - - -def assert_string_list(dist, attr, value): - """Verify that value is a string list""" - try: - # verify that value is a list or tuple to exclude unordered - # or single-use iterables - assert isinstance(value, (list, tuple)) - # verify that elements of value are strings - assert ''.join(value) != value - except (TypeError, ValueError, AttributeError, AssertionError) as e: - raise DistutilsSetupError( - "%r must be a list of strings (got %r)" % (attr, value) - ) from e - - -def check_nsp(dist, attr, value): - """Verify that namespace packages are valid""" - ns_packages = value - assert_string_list(dist, attr, ns_packages) - for nsp in ns_packages: - if not dist.has_contents_for(nsp): - raise DistutilsSetupError( - "Distribution contains no modules or packages for " - + "namespace package %r" % nsp - ) - parent, sep, child = nsp.rpartition('.') - if parent and parent not in ns_packages: - distutils.log.warn( - "WARNING: %r is declared as a package namespace, but %r" - " is not: please correct this in setup.py", - nsp, - parent, - ) - - -def check_extras(dist, attr, value): - """Verify that extras_require mapping is valid""" - try: - list(itertools.starmap(_check_extra, value.items())) - except (TypeError, ValueError, AttributeError) as e: - raise DistutilsSetupError( - "'extras_require' must be a dictionary whose values are " - "strings or lists of strings containing valid project/version " - "requirement specifiers." - ) from e - - -def _check_extra(extra, reqs): - name, sep, marker = extra.partition(':') - if marker and pkg_resources.invalid_marker(marker): - raise DistutilsSetupError("Invalid environment marker: " + marker) - list(_reqs.parse(reqs)) - - -def assert_bool(dist, attr, value): - """Verify that value is True, False, 0, or 1""" - if bool(value) != value: - tmpl = "{attr!r} must be a boolean value (got {value!r})" - raise DistutilsSetupError(tmpl.format(attr=attr, value=value)) - - -def invalid_unless_false(dist, attr, value): - if not value: - warnings.warn(f"{attr} is ignored.", DistDeprecationWarning) - return - raise DistutilsSetupError(f"{attr} is invalid.") - - -def check_requirements(dist, attr, value): - """Verify that install_requires is a valid requirements list""" - try: - list(_reqs.parse(value)) - if isinstance(value, (dict, set)): - raise TypeError("Unordered types are not allowed") - except (TypeError, ValueError) as error: - tmpl = ( - "{attr!r} must be a string or list of strings " - "containing valid project/version requirement specifiers; {error}" - ) - raise DistutilsSetupError(tmpl.format(attr=attr, error=error)) from error - - -def check_specifier(dist, attr, value): - """Verify that value is a valid version specifier""" - try: - packaging.specifiers.SpecifierSet(value) - except (packaging.specifiers.InvalidSpecifier, AttributeError) as error: - tmpl = ( - "{attr!r} must be a string " "containing valid version specifiers; {error}" - ) - raise DistutilsSetupError(tmpl.format(attr=attr, error=error)) from error - - -def check_entry_points(dist, attr, value): - """Verify that entry_points map is parseable""" - try: - _entry_points.load(value) - except Exception as e: - raise DistutilsSetupError(e) from e - - -def check_test_suite(dist, attr, value): - if not isinstance(value, str): - raise DistutilsSetupError("test_suite must be a string") - - -def check_package_data(dist, attr, value): - """Verify that value is a dictionary of package names to glob lists""" - if not isinstance(value, dict): - raise DistutilsSetupError( - "{!r} must be a dictionary mapping package names to lists of " - "string wildcard patterns".format(attr) - ) - for k, v in value.items(): - if not isinstance(k, str): - raise DistutilsSetupError( - "keys of {!r} dict must be strings (got {!r})".format(attr, k) - ) - assert_string_list(dist, 'values of {!r} dict'.format(attr), v) - - -def check_packages(dist, attr, value): - for pkgname in value: - if not re.match(r'\w+(\.\w+)*', pkgname): - distutils.log.warn( - "WARNING: %r not a valid package name; please use only " - ".-separated package names in setup.py", - pkgname, - ) - - -_Distribution = get_unpatched(distutils.core.Distribution) - - -class Distribution(_Distribution): - """Distribution with support for tests and package data - - This is an enhanced version of 'distutils.dist.Distribution' that - effectively adds the following new optional keyword arguments to 'setup()': - - 'install_requires' -- a string or sequence of strings specifying project - versions that the distribution requires when installed, in the format - used by 'pkg_resources.require()'. They will be installed - automatically when the package is installed. If you wish to use - packages that are not available in PyPI, or want to give your users an - alternate download location, you can add a 'find_links' option to the - '[easy_install]' section of your project's 'setup.cfg' file, and then - setuptools will scan the listed web pages for links that satisfy the - requirements. - - 'extras_require' -- a dictionary mapping names of optional "extras" to the - additional requirement(s) that using those extras incurs. For example, - this:: - - extras_require = dict(reST = ["docutils>=0.3", "reSTedit"]) - - indicates that the distribution can optionally provide an extra - capability called "reST", but it can only be used if docutils and - reSTedit are installed. If the user installs your package using - EasyInstall and requests one of your extras, the corresponding - additional requirements will be installed if needed. - - 'test_suite' -- the name of a test suite to run for the 'test' command. - If the user runs 'python setup.py test', the package will be installed, - and the named test suite will be run. The format is the same as - would be used on a 'unittest.py' command line. That is, it is the - dotted name of an object to import and call to generate a test suite. - - 'package_data' -- a dictionary mapping package names to lists of filenames - or globs to use to find data files contained in the named packages. - If the dictionary has filenames or globs listed under '""' (the empty - string), those names will be searched for in every package, in addition - to any names for the specific package. Data files found using these - names/globs will be installed along with the package, in the same - location as the package. Note that globs are allowed to reference - the contents of non-package subdirectories, as long as you use '/' as - a path separator. (Globs are automatically converted to - platform-specific paths at runtime.) - - In addition to these new keywords, this class also has several new methods - for manipulating the distribution's contents. For example, the 'include()' - and 'exclude()' methods can be thought of as in-place add and subtract - commands that add or remove packages, modules, extensions, and so on from - the distribution. - """ - - _DISTUTILS_UNSUPPORTED_METADATA = { - 'long_description_content_type': lambda: None, - 'project_urls': dict, - 'provides_extras': ordered_set.OrderedSet, - 'license_file': lambda: None, - 'license_files': lambda: None, - } - - _patched_dist = None - - def patch_missing_pkg_info(self, attrs): - # Fake up a replacement for the data that would normally come from - # PKG-INFO, but which might not yet be built if this is a fresh - # checkout. - # - if not attrs or 'name' not in attrs or 'version' not in attrs: - return - key = pkg_resources.safe_name(str(attrs['name'])).lower() - dist = pkg_resources.working_set.by_key.get(key) - if dist is not None and not dist.has_metadata('PKG-INFO'): - dist._version = pkg_resources.safe_version(str(attrs['version'])) - self._patched_dist = dist - - def __init__(self, attrs=None): - have_package_data = hasattr(self, "package_data") - if not have_package_data: - self.package_data = {} - attrs = attrs or {} - self.dist_files = [] - # Filter-out setuptools' specific options. - self.src_root = attrs.pop("src_root", None) - self.patch_missing_pkg_info(attrs) - self.dependency_links = attrs.pop('dependency_links', []) - self.setup_requires = attrs.pop('setup_requires', []) - for ep in metadata.entry_points(group='distutils.setup_keywords'): - vars(self).setdefault(ep.name, None) - _Distribution.__init__( - self, - { - k: v - for k, v in attrs.items() - if k not in self._DISTUTILS_UNSUPPORTED_METADATA - }, - ) - - self._set_metadata_defaults(attrs) - - self.metadata.version = self._normalize_version( - self._validate_version(self.metadata.version) - ) - self._finalize_requires() - - def _validate_metadata(self): - required = {"name"} - provided = { - key - for key in vars(self.metadata) - if getattr(self.metadata, key, None) is not None - } - missing = required - provided - - if missing: - msg = f"Required package metadata is missing: {missing}" - raise DistutilsSetupError(msg) - - def _set_metadata_defaults(self, attrs): - """ - Fill-in missing metadata fields not supported by distutils. - Some fields may have been set by other tools (e.g. pbr). - Those fields (vars(self.metadata)) take precedence to - supplied attrs. - """ - for option, default in self._DISTUTILS_UNSUPPORTED_METADATA.items(): - vars(self.metadata).setdefault(option, attrs.get(option, default())) - - @staticmethod - def _normalize_version(version): - if isinstance(version, setuptools.sic) or version is None: - return version - - normalized = str(packaging.version.Version(version)) - if version != normalized: - tmpl = "Normalizing '{version}' to '{normalized}'" - warnings.warn(tmpl.format(**locals())) - return normalized - return version - - @staticmethod - def _validate_version(version): - if isinstance(version, numbers.Number): - # Some people apparently take "version number" too literally :) - version = str(version) - - if version is not None: - try: - packaging.version.Version(version) - except (packaging.version.InvalidVersion, TypeError): - warnings.warn( - "The version specified (%r) is an invalid version, this " - "may not work as expected with newer versions of " - "setuptools, pip, and PyPI. Please see PEP 440 for more " - "details." % version - ) - return setuptools.sic(version) - return version - - def _finalize_requires(self): - """ - Set `metadata.python_requires` and fix environment markers - in `install_requires` and `extras_require`. - """ - if getattr(self, 'python_requires', None): - self.metadata.python_requires = self.python_requires - - if getattr(self, 'extras_require', None): - for extra in self.extras_require.keys(): - # Since this gets called multiple times at points where the - # keys have become 'converted' extras, ensure that we are only - # truly adding extras we haven't seen before here. - extra = extra.split(':')[0] - if extra: - self.metadata.provides_extras.add(extra) - - self._convert_extras_requirements() - self._move_install_requirements_markers() - - def _convert_extras_requirements(self): - """ - Convert requirements in `extras_require` of the form - `"extra": ["barbazquux; {marker}"]` to - `"extra:{marker}": ["barbazquux"]`. - """ - spec_ext_reqs = getattr(self, 'extras_require', None) or {} - self._tmp_extras_require = defaultdict(list) - for section, v in spec_ext_reqs.items(): - # Do not strip empty sections. - self._tmp_extras_require[section] - for r in _reqs.parse(v): - suffix = self._suffix_for(r) - self._tmp_extras_require[section + suffix].append(r) - - @staticmethod - def _suffix_for(req): - """ - For a requirement, return the 'extras_require' suffix for - that requirement. - """ - return ':' + str(req.marker) if req.marker else '' - - def _move_install_requirements_markers(self): - """ - Move requirements in `install_requires` that are using environment - markers `extras_require`. - """ - - # divide the install_requires into two sets, simple ones still - # handled by install_requires and more complex ones handled - # by extras_require. - - def is_simple_req(req): - return not req.marker - - spec_inst_reqs = getattr(self, 'install_requires', None) or () - inst_reqs = list(_reqs.parse(spec_inst_reqs)) - simple_reqs = filter(is_simple_req, inst_reqs) - complex_reqs = itertools.filterfalse(is_simple_req, inst_reqs) - self.install_requires = list(map(str, simple_reqs)) - - for r in complex_reqs: - self._tmp_extras_require[':' + str(r.marker)].append(r) - self.extras_require = dict( - (k, [str(r) for r in map(self._clean_req, v)]) - for k, v in self._tmp_extras_require.items() - ) - - def _clean_req(self, req): - """ - Given a Requirement, remove environment markers and return it. - """ - req.marker = None - return req - - def _finalize_license_files(self): - """Compute names of all license files which should be included.""" - license_files: Optional[List[str]] = self.metadata.license_files - patterns: List[str] = license_files if license_files else [] - - license_file: Optional[str] = self.metadata.license_file - if license_file and license_file not in patterns: - patterns.append(license_file) - - if license_files is None and license_file is None: - # Default patterns match the ones wheel uses - # See https://wheel.readthedocs.io/en/stable/user_guide.html - # -> 'Including license files in the generated wheel file' - patterns = ('LICEN[CS]E*', 'COPYING*', 'NOTICE*', 'AUTHORS*') - - self.metadata.license_files = list( - unique_everseen(self._expand_patterns(patterns)) - ) - - @staticmethod - def _expand_patterns(patterns): - """ - >>> list(Distribution._expand_patterns(['LICENSE'])) - ['LICENSE'] - >>> list(Distribution._expand_patterns(['setup.cfg', 'LIC*'])) - ['setup.cfg', 'LICENSE'] - """ - return ( - path - for pattern in patterns - for path in sorted(iglob(pattern)) - if not path.endswith('~') and os.path.isfile(path) - ) - - # FIXME: 'Distribution._parse_config_files' is too complex (14) - def _parse_config_files(self, filenames=None): # noqa: C901 - """ - Adapted from distutils.dist.Distribution.parse_config_files, - this method provides the same functionality in subtly-improved - ways. - """ - from configparser import ConfigParser - - # Ignore install directory options if we have a venv - ignore_options = ( - [] - if sys.prefix == sys.base_prefix - else [ - 'install-base', - 'install-platbase', - 'install-lib', - 'install-platlib', - 'install-purelib', - 'install-headers', - 'install-scripts', - 'install-data', - 'prefix', - 'exec-prefix', - 'home', - 'user', - 'root', - ] - ) - - ignore_options = frozenset(ignore_options) - - if filenames is None: - filenames = self.find_config_files() - - if DEBUG: - self.announce("Distribution.parse_config_files():") - - parser = ConfigParser() - parser.optionxform = str - for filename in filenames: - with io.open(filename, encoding='utf-8') as reader: - if DEBUG: - self.announce(" reading {filename}".format(**locals())) - parser.read_file(reader) - for section in parser.sections(): - options = parser.options(section) - opt_dict = self.get_option_dict(section) - - for opt in options: - if opt == '__name__' or opt in ignore_options: - continue - - val = parser.get(section, opt) - opt = self.warn_dash_deprecation(opt, section) - opt = self.make_option_lowercase(opt, section) - opt_dict[opt] = (filename, val) - - # Make the ConfigParser forget everything (so we retain - # the original filenames that options come from) - parser.__init__() - - if 'global' not in self.command_options: - return - - # If there was a "global" section in the config file, use it - # to set Distribution options. - - for (opt, (src, val)) in self.command_options['global'].items(): - alias = self.negative_opt.get(opt) - if alias: - val = not strtobool(val) - elif opt in ('verbose', 'dry_run'): # ugh! - val = strtobool(val) - - try: - setattr(self, alias or opt, val) - except ValueError as e: - raise DistutilsOptionError(e) from e - - def warn_dash_deprecation(self, opt, section): - if section in ( - 'options.extras_require', - 'options.data_files', - ): - return opt - - underscore_opt = opt.replace('-', '_') - commands = list(itertools.chain( - distutils.command.__all__, - self._setuptools_commands(), - )) - if ( - not section.startswith('options') - and section != 'metadata' - and section not in commands - ): - return underscore_opt - - if '-' in opt: - warnings.warn( - "Usage of dash-separated '%s' will not be supported in future " - "versions. Please use the underscore name '%s' instead" - % (opt, underscore_opt) - ) - return underscore_opt - - def _setuptools_commands(self): - try: - return metadata.distribution('setuptools').entry_points.names - except metadata.PackageNotFoundError: - # during bootstrapping, distribution doesn't exist - return [] - - def make_option_lowercase(self, opt, section): - if section != 'metadata' or opt.islower(): - return opt - - lowercase_opt = opt.lower() - warnings.warn( - "Usage of uppercase key '%s' in '%s' will be deprecated in future " - "versions. Please use lowercase '%s' instead" - % (opt, section, lowercase_opt) - ) - return lowercase_opt - - # FIXME: 'Distribution._set_command_options' is too complex (14) - def _set_command_options(self, command_obj, option_dict=None): # noqa: C901 - """ - Set the options for 'command_obj' from 'option_dict'. Basically - this means copying elements of a dictionary ('option_dict') to - attributes of an instance ('command'). - - 'command_obj' must be a Command instance. If 'option_dict' is not - supplied, uses the standard option dictionary for this command - (from 'self.command_options'). - - (Adopted from distutils.dist.Distribution._set_command_options) - """ - command_name = command_obj.get_command_name() - if option_dict is None: - option_dict = self.get_option_dict(command_name) - - if DEBUG: - self.announce(" setting options for '%s' command:" % command_name) - for (option, (source, value)) in option_dict.items(): - if DEBUG: - self.announce(" %s = %s (from %s)" % (option, value, source)) - try: - bool_opts = [translate_longopt(o) for o in command_obj.boolean_options] - except AttributeError: - bool_opts = [] - try: - neg_opt = command_obj.negative_opt - except AttributeError: - neg_opt = {} - - try: - is_string = isinstance(value, str) - if option in neg_opt and is_string: - setattr(command_obj, neg_opt[option], not strtobool(value)) - elif option in bool_opts and is_string: - setattr(command_obj, option, strtobool(value)) - elif hasattr(command_obj, option): - setattr(command_obj, option, value) - else: - raise DistutilsOptionError( - "error in %s: command '%s' has no such option '%s'" - % (source, command_name, option) - ) - except ValueError as e: - raise DistutilsOptionError(e) from e - - def parse_config_files(self, filenames=None, ignore_option_errors=False): - """Parses configuration files from various levels - and loads configuration. - - """ - self._parse_config_files(filenames=filenames) - - parse_configuration( - self, self.command_options, ignore_option_errors=ignore_option_errors - ) - self._finalize_requires() - self._finalize_license_files() - - def fetch_build_eggs(self, requires): - """Resolve pre-setup requirements""" - resolved_dists = pkg_resources.working_set.resolve( - _reqs.parse(requires), - installer=self.fetch_build_egg, - replace_conflicting=True, - ) - for dist in resolved_dists: - pkg_resources.working_set.add(dist, replace=True) - return resolved_dists - - def finalize_options(self): - """ - Allow plugins to apply arbitrary operations to the - distribution. Each hook may optionally define a 'order' - to influence the order of execution. Smaller numbers - go first and the default is 0. - """ - group = 'setuptools.finalize_distribution_options' - - def by_order(hook): - return getattr(hook, 'order', 0) - - defined = metadata.entry_points(group=group) - filtered = itertools.filterfalse(self._removed, defined) - loaded = map(lambda e: e.load(), filtered) - for ep in sorted(loaded, key=by_order): - ep(self) - - @staticmethod - def _removed(ep): - """ - When removing an entry point, if metadata is loaded - from an older version of Setuptools, that removed - entry point will attempt to be loaded and will fail. - See #2765 for more details. - """ - removed = { - # removed 2021-09-05 - '2to3_doctests', - } - return ep.name in removed - - def _finalize_setup_keywords(self): - for ep in metadata.entry_points(group='distutils.setup_keywords'): - value = getattr(self, ep.name, None) - if value is not None: - self._install_dependencies(ep) - ep.load()(self, ep.name, value) - - def _install_dependencies(self, ep): - """ - Given an entry point, ensure that any declared extras for - its distribution are installed. - """ - reqs = { - req - for req in map(requirements.Requirement, always_iterable(ep.dist.requires)) - for extra in ep.extras - if extra in req.extras - } - missing = itertools.filterfalse(self._is_installed, reqs) - for req in missing: - # fetch_build_egg expects pkg_resources.Requirement - self.fetch_build_egg(pkg_resources.Requirement(str(req))) - - def _is_installed(self, req): - try: - dist = metadata.distribution(req.name) - except metadata.PackageNotFoundError: - return False - found_ver = packaging.version.Version(dist.version()) - return found_ver in req.specifier - - def get_egg_cache_dir(self): - egg_cache_dir = os.path.join(os.curdir, '.eggs') - if not os.path.exists(egg_cache_dir): - os.mkdir(egg_cache_dir) - windows_support.hide_file(egg_cache_dir) - readme_txt_filename = os.path.join(egg_cache_dir, 'README.txt') - with open(readme_txt_filename, 'w') as f: - f.write( - 'This directory contains eggs that were downloaded ' - 'by setuptools to build, test, and run plug-ins.\n\n' - ) - f.write( - 'This directory caches those eggs to prevent ' - 'repeated downloads.\n\n' - ) - f.write('However, it is safe to delete this directory.\n\n') - - return egg_cache_dir - - def fetch_build_egg(self, req): - """Fetch an egg needed for building""" - from setuptools.installer import fetch_build_egg - - return fetch_build_egg(self, req) - - def get_command_class(self, command): - """Pluggable version of get_command_class()""" - if command in self.cmdclass: - return self.cmdclass[command] - - eps = metadata.entry_points(group='distutils.commands', name=command) - for ep in eps: - self._install_dependencies(ep) - self.cmdclass[command] = cmdclass = ep.load() - return cmdclass - else: - return _Distribution.get_command_class(self, command) - - def print_commands(self): - for ep in metadata.entry_points(group='distutils.commands'): - if ep.name not in self.cmdclass: - cmdclass = ep.load() - self.cmdclass[ep.name] = cmdclass - return _Distribution.print_commands(self) - - def get_command_list(self): - for ep in metadata.entry_points(group='distutils.commands'): - if ep.name not in self.cmdclass: - cmdclass = ep.load() - self.cmdclass[ep.name] = cmdclass - return _Distribution.get_command_list(self) - - def include(self, **attrs): - """Add items to distribution that are named in keyword arguments - - For example, 'dist.include(py_modules=["x"])' would add 'x' to - the distribution's 'py_modules' attribute, if it was not already - there. - - Currently, this method only supports inclusion for attributes that are - lists or tuples. If you need to add support for adding to other - attributes in this or a subclass, you can add an '_include_X' method, - where 'X' is the name of the attribute. The method will be called with - the value passed to 'include()'. So, 'dist.include(foo={"bar":"baz"})' - will try to call 'dist._include_foo({"bar":"baz"})', which can then - handle whatever special inclusion logic is needed. - """ - for k, v in attrs.items(): - include = getattr(self, '_include_' + k, None) - if include: - include(v) - else: - self._include_misc(k, v) - - def exclude_package(self, package): - """Remove packages, modules, and extensions in named package""" - - pfx = package + '.' - if self.packages: - self.packages = [ - p for p in self.packages if p != package and not p.startswith(pfx) - ] - - if self.py_modules: - self.py_modules = [ - p for p in self.py_modules if p != package and not p.startswith(pfx) - ] - - if self.ext_modules: - self.ext_modules = [ - p - for p in self.ext_modules - if p.name != package and not p.name.startswith(pfx) - ] - - def has_contents_for(self, package): - """Return true if 'exclude_package(package)' would do something""" - - pfx = package + '.' - - for p in self.iter_distribution_names(): - if p == package or p.startswith(pfx): - return True - - def _exclude_misc(self, name, value): - """Handle 'exclude()' for list/tuple attrs without a special handler""" - if not isinstance(value, sequence): - raise DistutilsSetupError( - "%s: setting must be a list or tuple (%r)" % (name, value) - ) - try: - old = getattr(self, name) - except AttributeError as e: - raise DistutilsSetupError("%s: No such distribution setting" % name) from e - if old is not None and not isinstance(old, sequence): - raise DistutilsSetupError( - name + ": this setting cannot be changed via include/exclude" - ) - elif old: - setattr(self, name, [item for item in old if item not in value]) - - def _include_misc(self, name, value): - """Handle 'include()' for list/tuple attrs without a special handler""" - - if not isinstance(value, sequence): - raise DistutilsSetupError("%s: setting must be a list (%r)" % (name, value)) - try: - old = getattr(self, name) - except AttributeError as e: - raise DistutilsSetupError("%s: No such distribution setting" % name) from e - if old is None: - setattr(self, name, value) - elif not isinstance(old, sequence): - raise DistutilsSetupError( - name + ": this setting cannot be changed via include/exclude" - ) - else: - new = [item for item in value if item not in old] - setattr(self, name, old + new) - - def exclude(self, **attrs): - """Remove items from distribution that are named in keyword arguments - - For example, 'dist.exclude(py_modules=["x"])' would remove 'x' from - the distribution's 'py_modules' attribute. Excluding packages uses - the 'exclude_package()' method, so all of the package's contained - packages, modules, and extensions are also excluded. - - Currently, this method only supports exclusion from attributes that are - lists or tuples. If you need to add support for excluding from other - attributes in this or a subclass, you can add an '_exclude_X' method, - where 'X' is the name of the attribute. The method will be called with - the value passed to 'exclude()'. So, 'dist.exclude(foo={"bar":"baz"})' - will try to call 'dist._exclude_foo({"bar":"baz"})', which can then - handle whatever special exclusion logic is needed. - """ - for k, v in attrs.items(): - exclude = getattr(self, '_exclude_' + k, None) - if exclude: - exclude(v) - else: - self._exclude_misc(k, v) - - def _exclude_packages(self, packages): - if not isinstance(packages, sequence): - raise DistutilsSetupError( - "packages: setting must be a list or tuple (%r)" % (packages,) - ) - list(map(self.exclude_package, packages)) - - def _parse_command_opts(self, parser, args): - # Remove --with-X/--without-X options when processing command args - self.global_options = self.__class__.global_options - self.negative_opt = self.__class__.negative_opt - - # First, expand any aliases - command = args[0] - aliases = self.get_option_dict('aliases') - while command in aliases: - src, alias = aliases[command] - del aliases[command] # ensure each alias can expand only once! - import shlex - - args[:1] = shlex.split(alias, True) - command = args[0] - - nargs = _Distribution._parse_command_opts(self, parser, args) - - # Handle commands that want to consume all remaining arguments - cmd_class = self.get_command_class(command) - if getattr(cmd_class, 'command_consumes_arguments', None): - self.get_option_dict(command)['args'] = ("command line", nargs) - if nargs is not None: - return [] - - return nargs - - def get_cmdline_options(self): - """Return a '{cmd: {opt:val}}' map of all command-line options - - Option names are all long, but do not include the leading '--', and - contain dashes rather than underscores. If the option doesn't take - an argument (e.g. '--quiet'), the 'val' is 'None'. - - Note that options provided by config files are intentionally excluded. - """ - - d = {} - - for cmd, opts in self.command_options.items(): - - for opt, (src, val) in opts.items(): - - if src != "command line": - continue - - opt = opt.replace('_', '-') - - if val == 0: - cmdobj = self.get_command_obj(cmd) - neg_opt = self.negative_opt.copy() - neg_opt.update(getattr(cmdobj, 'negative_opt', {})) - for neg, pos in neg_opt.items(): - if pos == opt: - opt = neg - val = None - break - else: - raise AssertionError("Shouldn't be able to get here") - - elif val == 1: - val = None - - d.setdefault(cmd, {})[opt] = val - - return d - - def iter_distribution_names(self): - """Yield all packages, modules, and extension names in distribution""" - - for pkg in self.packages or (): - yield pkg - - for module in self.py_modules or (): - yield module - - for ext in self.ext_modules or (): - if isinstance(ext, tuple): - name, buildinfo = ext - else: - name = ext.name - if name.endswith('module'): - name = name[:-6] - yield name - - def handle_display_options(self, option_order): - """If there were any non-global "display-only" options - (--help-commands or the metadata display options) on the command - line, display the requested info and return true; else return - false. - """ - import sys - - if self.help_commands: - return _Distribution.handle_display_options(self, option_order) - - # Stdout may be StringIO (e.g. in tests) - if not isinstance(sys.stdout, io.TextIOWrapper): - return _Distribution.handle_display_options(self, option_order) - - # Don't wrap stdout if utf-8 is already the encoding. Provides - # workaround for #334. - if sys.stdout.encoding.lower() in ('utf-8', 'utf8'): - return _Distribution.handle_display_options(self, option_order) - - # Print metadata in UTF-8 no matter the platform - encoding = sys.stdout.encoding - errors = sys.stdout.errors - newline = sys.platform != 'win32' and '\n' or None - line_buffering = sys.stdout.line_buffering - - sys.stdout = io.TextIOWrapper( - sys.stdout.detach(), 'utf-8', errors, newline, line_buffering - ) - try: - return _Distribution.handle_display_options(self, option_order) - finally: - sys.stdout = io.TextIOWrapper( - sys.stdout.detach(), encoding, errors, newline, line_buffering - ) - - -class DistDeprecationWarning(SetuptoolsDeprecationWarning): - """Class for warning about deprecations in dist in - setuptools. Not ignored by default, unlike DeprecationWarning.""" diff --git a/venv/Lib/site-packages/setuptools/errors.py b/venv/Lib/site-packages/setuptools/errors.py deleted file mode 100644 index f4d35a6..0000000 --- a/venv/Lib/site-packages/setuptools/errors.py +++ /dev/null @@ -1,40 +0,0 @@ -"""setuptools.errors - -Provides exceptions used by setuptools modules. -""" - -from distutils import errors as _distutils_errors -from distutils.errors import DistutilsError - - -class RemovedCommandError(DistutilsError, RuntimeError): - """Error used for commands that have been removed in setuptools. - - Since ``setuptools`` is built on ``distutils``, simply removing a command - from ``setuptools`` will make the behavior fall back to ``distutils``; this - error is raised if a command exists in ``distutils`` but has been actively - removed in ``setuptools``. - """ - - -# Re-export errors from distutils to facilitate the migration to PEP632 - -ByteCompileError = _distutils_errors.DistutilsByteCompileError -CCompilerError = _distutils_errors.CCompilerError -ClassError = _distutils_errors.DistutilsClassError -CompileError = _distutils_errors.CompileError -ExecError = _distutils_errors.DistutilsExecError -FileError = _distutils_errors.DistutilsFileError -InternalError = _distutils_errors.DistutilsInternalError -LibError = _distutils_errors.LibError -LinkError = _distutils_errors.LinkError -ModuleError = _distutils_errors.DistutilsModuleError -OptionError = _distutils_errors.DistutilsOptionError -PlatformError = _distutils_errors.DistutilsPlatformError -PreprocessError = _distutils_errors.PreprocessError -SetupError = _distutils_errors.DistutilsSetupError -TemplateError = _distutils_errors.DistutilsTemplateError -UnknownFileError = _distutils_errors.UnknownFileError - -# The root error class in the hierarchy -BaseError = _distutils_errors.DistutilsError diff --git a/venv/Lib/site-packages/setuptools/extension.py b/venv/Lib/site-packages/setuptools/extension.py deleted file mode 100644 index f696c9c..0000000 --- a/venv/Lib/site-packages/setuptools/extension.py +++ /dev/null @@ -1,55 +0,0 @@ -import re -import functools -import distutils.core -import distutils.errors -import distutils.extension - -from .monkey import get_unpatched - - -def _have_cython(): - """ - Return True if Cython can be imported. - """ - cython_impl = 'Cython.Distutils.build_ext' - try: - # from (cython_impl) import build_ext - __import__(cython_impl, fromlist=['build_ext']).build_ext - return True - except Exception: - pass - return False - - -# for compatibility -have_pyrex = _have_cython - -_Extension = get_unpatched(distutils.core.Extension) - - -class Extension(_Extension): - """Extension that uses '.c' files in place of '.pyx' files""" - - def __init__(self, name, sources, *args, **kw): - # The *args is needed for compatibility as calls may use positional - # arguments. py_limited_api may be set only via keyword. - self.py_limited_api = kw.pop("py_limited_api", False) - super().__init__(name, sources, *args, **kw) - - def _convert_pyx_sources_to_lang(self): - """ - Replace sources with .pyx extensions to sources with the target - language extension. This mechanism allows language authors to supply - pre-converted sources but to prefer the .pyx sources. - """ - if _have_cython(): - # the build has Cython, so allow it to compile the .pyx files - return - lang = self.language or '' - target_ext = '.cpp' if lang.lower() == 'c++' else '.c' - sub = functools.partial(re.sub, '.pyx$', target_ext) - self.sources = list(map(sub, self.sources)) - - -class Library(Extension): - """Just like a regular Extension, but built as a library instead""" diff --git a/venv/Lib/site-packages/setuptools/extern/__init__.py b/venv/Lib/site-packages/setuptools/extern/__init__.py deleted file mode 100644 index 98235a4..0000000 --- a/venv/Lib/site-packages/setuptools/extern/__init__.py +++ /dev/null @@ -1,76 +0,0 @@ -import importlib.util -import sys - - -class VendorImporter: - """ - A PEP 302 meta path importer for finding optionally-vendored - or otherwise naturally-installed packages from root_name. - """ - - def __init__(self, root_name, vendored_names=(), vendor_pkg=None): - self.root_name = root_name - self.vendored_names = set(vendored_names) - self.vendor_pkg = vendor_pkg or root_name.replace('extern', '_vendor') - - @property - def search_path(self): - """ - Search first the vendor package then as a natural package. - """ - yield self.vendor_pkg + '.' - yield '' - - def _module_matches_namespace(self, fullname): - """Figure out if the target module is vendored.""" - root, base, target = fullname.partition(self.root_name + '.') - return not root and any(map(target.startswith, self.vendored_names)) - - def load_module(self, fullname): - """ - Iterate over the search path to locate and load fullname. - """ - root, base, target = fullname.partition(self.root_name + '.') - for prefix in self.search_path: - try: - extant = prefix + target - __import__(extant) - mod = sys.modules[extant] - sys.modules[fullname] = mod - return mod - except ImportError: - pass - else: - raise ImportError( - "The '{target}' package is required; " - "normally this is bundled with this package so if you get " - "this warning, consult the packager of your " - "distribution.".format(**locals()) - ) - - def create_module(self, spec): - return self.load_module(spec.name) - - def exec_module(self, module): - pass - - def find_spec(self, fullname, path=None, target=None): - """Return a module spec for vendored names.""" - return ( - importlib.util.spec_from_loader(fullname, self) - if self._module_matches_namespace(fullname) else None - ) - - def install(self): - """ - Install this importer into sys.meta_path if not already present. - """ - if self not in sys.meta_path: - sys.meta_path.append(self) - - -names = ( - 'packaging', 'pyparsing', 'ordered_set', 'more_itertools', 'importlib_metadata', - 'zipp', 'importlib_resources', 'jaraco', 'typing_extensions', -) -VendorImporter(__name__, names, 'setuptools._vendor').install() diff --git a/venv/Lib/site-packages/setuptools/extern/__pycache__/__init__.cpython-39.pyc b/venv/Lib/site-packages/setuptools/extern/__pycache__/__init__.cpython-39.pyc deleted file mode 100644 index 474cbcd..0000000 Binary files a/venv/Lib/site-packages/setuptools/extern/__pycache__/__init__.cpython-39.pyc and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/glob.py b/venv/Lib/site-packages/setuptools/glob.py deleted file mode 100644 index 87062b8..0000000 --- a/venv/Lib/site-packages/setuptools/glob.py +++ /dev/null @@ -1,167 +0,0 @@ -""" -Filename globbing utility. Mostly a copy of `glob` from Python 3.5. - -Changes include: - * `yield from` and PEP3102 `*` removed. - * Hidden files are not ignored. -""" - -import os -import re -import fnmatch - -__all__ = ["glob", "iglob", "escape"] - - -def glob(pathname, recursive=False): - """Return a list of paths matching a pathname pattern. - - The pattern may contain simple shell-style wildcards a la - fnmatch. However, unlike fnmatch, filenames starting with a - dot are special cases that are not matched by '*' and '?' - patterns. - - If recursive is true, the pattern '**' will match any files and - zero or more directories and subdirectories. - """ - return list(iglob(pathname, recursive=recursive)) - - -def iglob(pathname, recursive=False): - """Return an iterator which yields the paths matching a pathname pattern. - - The pattern may contain simple shell-style wildcards a la - fnmatch. However, unlike fnmatch, filenames starting with a - dot are special cases that are not matched by '*' and '?' - patterns. - - If recursive is true, the pattern '**' will match any files and - zero or more directories and subdirectories. - """ - it = _iglob(pathname, recursive) - if recursive and _isrecursive(pathname): - s = next(it) # skip empty string - assert not s - return it - - -def _iglob(pathname, recursive): - dirname, basename = os.path.split(pathname) - glob_in_dir = glob2 if recursive and _isrecursive(basename) else glob1 - - if not has_magic(pathname): - if basename: - if os.path.lexists(pathname): - yield pathname - else: - # Patterns ending with a slash should match only directories - if os.path.isdir(dirname): - yield pathname - return - - if not dirname: - yield from glob_in_dir(dirname, basename) - return - # `os.path.split()` returns the argument itself as a dirname if it is a - # drive or UNC path. Prevent an infinite recursion if a drive or UNC path - # contains magic characters (i.e. r'\\?\C:'). - if dirname != pathname and has_magic(dirname): - dirs = _iglob(dirname, recursive) - else: - dirs = [dirname] - if not has_magic(basename): - glob_in_dir = glob0 - for dirname in dirs: - for name in glob_in_dir(dirname, basename): - yield os.path.join(dirname, name) - - -# These 2 helper functions non-recursively glob inside a literal directory. -# They return a list of basenames. `glob1` accepts a pattern while `glob0` -# takes a literal basename (so it only has to check for its existence). - - -def glob1(dirname, pattern): - if not dirname: - if isinstance(pattern, bytes): - dirname = os.curdir.encode('ASCII') - else: - dirname = os.curdir - try: - names = os.listdir(dirname) - except OSError: - return [] - return fnmatch.filter(names, pattern) - - -def glob0(dirname, basename): - if not basename: - # `os.path.split()` returns an empty basename for paths ending with a - # directory separator. 'q*x/' should match only directories. - if os.path.isdir(dirname): - return [basename] - else: - if os.path.lexists(os.path.join(dirname, basename)): - return [basename] - return [] - - -# This helper function recursively yields relative pathnames inside a literal -# directory. - - -def glob2(dirname, pattern): - assert _isrecursive(pattern) - yield pattern[:0] - for x in _rlistdir(dirname): - yield x - - -# Recursively yields relative pathnames inside a literal directory. -def _rlistdir(dirname): - if not dirname: - if isinstance(dirname, bytes): - dirname = os.curdir.encode('ASCII') - else: - dirname = os.curdir - try: - names = os.listdir(dirname) - except os.error: - return - for x in names: - yield x - path = os.path.join(dirname, x) if dirname else x - for y in _rlistdir(path): - yield os.path.join(x, y) - - -magic_check = re.compile('([*?[])') -magic_check_bytes = re.compile(b'([*?[])') - - -def has_magic(s): - if isinstance(s, bytes): - match = magic_check_bytes.search(s) - else: - match = magic_check.search(s) - return match is not None - - -def _isrecursive(pattern): - if isinstance(pattern, bytes): - return pattern == b'**' - else: - return pattern == '**' - - -def escape(pathname): - """Escape all special characters. - """ - # Escaping is done by wrapping any of "*?[" between square brackets. - # Metacharacters do not work in the drive part and shouldn't be escaped. - drive, pathname = os.path.splitdrive(pathname) - if isinstance(pathname, bytes): - pathname = magic_check_bytes.sub(br'[\1]', pathname) - else: - pathname = magic_check.sub(r'[\1]', pathname) - return drive + pathname diff --git a/venv/Lib/site-packages/setuptools/gui-32.exe b/venv/Lib/site-packages/setuptools/gui-32.exe deleted file mode 100644 index f8d3509..0000000 Binary files a/venv/Lib/site-packages/setuptools/gui-32.exe and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/gui-64.exe b/venv/Lib/site-packages/setuptools/gui-64.exe deleted file mode 100644 index 330c51a..0000000 Binary files a/venv/Lib/site-packages/setuptools/gui-64.exe and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/gui-arm64.exe b/venv/Lib/site-packages/setuptools/gui-arm64.exe deleted file mode 100644 index 5730f11..0000000 Binary files a/venv/Lib/site-packages/setuptools/gui-arm64.exe and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/gui.exe b/venv/Lib/site-packages/setuptools/gui.exe deleted file mode 100644 index f8d3509..0000000 Binary files a/venv/Lib/site-packages/setuptools/gui.exe and /dev/null differ diff --git a/venv/Lib/site-packages/setuptools/installer.py b/venv/Lib/site-packages/setuptools/installer.py deleted file mode 100644 index b7096df..0000000 --- a/venv/Lib/site-packages/setuptools/installer.py +++ /dev/null @@ -1,104 +0,0 @@ -import glob -import os -import subprocess -import sys -import tempfile -import warnings -from distutils import log -from distutils.errors import DistutilsError - -import pkg_resources -from setuptools.wheel import Wheel -from ._deprecation_warning import SetuptoolsDeprecationWarning - - -def _fixup_find_links(find_links): - """Ensure find-links option end-up being a list of strings.""" - if isinstance(find_links, str): - return find_links.split() - assert isinstance(find_links, (tuple, list)) - return find_links - - -def fetch_build_egg(dist, req): # noqa: C901 # is too complex (16) # FIXME - """Fetch an egg needed for building. - - Use pip/wheel to fetch/build a wheel.""" - warnings.warn( - "setuptools.installer is deprecated. Requirements should " - "be satisfied by a PEP 517 installer.", - SetuptoolsDeprecationWarning, - ) - # Warn if wheel is not available - try: - pkg_resources.get_distribution('wheel') - except pkg_resources.DistributionNotFound: - dist.announce('WARNING: The wheel package is not available.', log.WARN) - # Ignore environment markers; if supplied, it is required. - req = strip_marker(req) - # Take easy_install options into account, but do not override relevant - # pip environment variables (like PIP_INDEX_URL or PIP_QUIET); they'll - # take precedence. - opts = dist.get_option_dict('easy_install') - if 'allow_hosts' in opts: - raise DistutilsError('the `allow-hosts` option is not supported ' - 'when using pip to install requirements.') - quiet = 'PIP_QUIET' not in os.environ and 'PIP_VERBOSE' not in os.environ - if 'PIP_INDEX_URL' in os.environ: - index_url = None - elif 'index_url' in opts: - index_url = opts['index_url'][1] - else: - index_url = None - find_links = ( - _fixup_find_links(opts['find_links'][1])[:] if 'find_links' in opts - else [] - ) - if dist.dependency_links: - find_links.extend(dist.dependency_links) - eggs_dir = os.path.realpath(dist.get_egg_cache_dir()) - environment = pkg_resources.Environment() - for egg_dist in pkg_resources.find_distributions(eggs_dir): - if egg_dist in req and environment.can_add(egg_dist): - return egg_dist - with tempfile.TemporaryDirectory() as tmpdir: - cmd = [ - sys.executable, '-m', 'pip', - '--disable-pip-version-check', - 'wheel', '--no-deps', - '-w', tmpdir, - ] - if quiet: - cmd.append('--quiet') - if index_url is not None: - cmd.extend(('--index-url', index_url)) - for link in find_links or []: - cmd.extend(('--find-links', link)) - # If requirement is a PEP 508 direct URL, directly pass - # the URL to pip, as `req @ url` does not work on the - # command line. - cmd.append(req.url or str(req)) - try: - subprocess.check_call(cmd) - except subprocess.CalledProcessError as e: - raise DistutilsError(str(e)) from e - wheel = Wheel(glob.glob(os.path.join(tmpdir, '*.whl'))[0]) - dist_location = os.path.join(eggs_dir, wheel.egg_name()) - wheel.install_as_egg(dist_location) - dist_metadata = pkg_resources.PathMetadata( - dist_location, os.path.join(dist_location, 'EGG-INFO')) - dist = pkg_resources.Distribution.from_filename( - dist_location, metadata=dist_metadata) - return dist - - -def strip_marker(req): - """ - Return a new requirement without the environment marker to avoid - calling pip with something like `babel; extra == "i18n"`, which - would always be ignored. - """ - # create a copy to avoid mutating the input - req = pkg_resources.Requirement.parse(str(req)) - req.marker = None - return req diff --git a/venv/Lib/site-packages/setuptools/launch.py b/venv/Lib/site-packages/setuptools/launch.py deleted file mode 100644 index 0208fdf..0000000 --- a/venv/Lib/site-packages/setuptools/launch.py +++ /dev/null @@ -1,36 +0,0 @@ -""" -Launch the Python script on the command line after -setuptools is bootstrapped via import. -""" - -# Note that setuptools gets imported implicitly by the -# invocation of this script using python -m setuptools.launch - -import tokenize -import sys - - -def run(): - """ - Run the script in sys.argv[1] as if it had - been invoked naturally. - """ - __builtins__ - script_name = sys.argv[1] - namespace = dict( - __file__=script_name, - __name__='__main__', - __doc__=None, - ) - sys.argv[:] = sys.argv[1:] - - open_ = getattr(tokenize, 'open', open) - with open_(script_name) as fid: - script = fid.read() - norm_script = script.replace('\\r\\n', '\\n') - code = compile(norm_script, script_name, 'exec') - exec(code, namespace) - - -if __name__ == '__main__': - run() diff --git a/venv/Lib/site-packages/setuptools/logging.py b/venv/Lib/site-packages/setuptools/logging.py deleted file mode 100644 index 15b5761..0000000 --- a/venv/Lib/site-packages/setuptools/logging.py +++ /dev/null @@ -1,36 +0,0 @@ -import sys -import logging -import distutils.log -from . import monkey - - -def _not_warning(record): - return record.levelno < logging.WARNING - - -def configure(): - """ - Configure logging to emit warning and above to stderr - and everything else to stdout. This behavior is provided - for compatibilty with distutils.log but may change in - the future. - """ - err_handler = logging.StreamHandler() - err_handler.setLevel(logging.WARNING) - out_handler = logging.StreamHandler(sys.stdout) - out_handler.addFilter(_not_warning) - handlers = err_handler, out_handler - logging.basicConfig( - format="{message}", style='{', handlers=handlers, level=logging.DEBUG) - monkey.patch_func(set_threshold, distutils.log, 'set_threshold') - - # For some reason `distutils.log` module is getting cached in `distutils.dist` - # and then loaded again when patched, - # implying: id(distutils.log) != id(distutils.dist.log). - # Make sure the same module object is used everywhere: - distutils.dist.log = distutils.log - - -def set_threshold(level): - logging.root.setLevel(level*10) - return set_threshold.unpatched(level) diff --git a/venv/Lib/site-packages/setuptools/monkey.py b/venv/Lib/site-packages/setuptools/monkey.py deleted file mode 100644 index fb36dc1..0000000 --- a/venv/Lib/site-packages/setuptools/monkey.py +++ /dev/null @@ -1,177 +0,0 @@ -""" -Monkey patching of distutils. -""" - -import sys -import distutils.filelist -import platform -import types -import functools -from importlib import import_module -import inspect - -import setuptools - -__all__ = [] -""" -Everything is private. Contact the project team -if you think you need this functionality. -""" - - -def _get_mro(cls): - """ - Returns the bases classes for cls sorted by the MRO. - - Works around an issue on Jython where inspect.getmro will not return all - base classes if multiple classes share the same name. Instead, this - function will return a tuple containing the class itself, and the contents - of cls.__bases__. See https://github.com/pypa/setuptools/issues/1024. - """ - if platform.python_implementation() == "Jython": - return (cls,) + cls.__bases__ - return inspect.getmro(cls) - - -def get_unpatched(item): - lookup = ( - get_unpatched_class if isinstance(item, type) else - get_unpatched_function if isinstance(item, types.FunctionType) else - lambda item: None - ) - return lookup(item) - - -def get_unpatched_class(cls): - """Protect against re-patching the distutils if reloaded - - Also ensures that no other distutils extension monkeypatched the distutils - first. - """ - external_bases = ( - cls - for cls in _get_mro(cls) - if not cls.__module__.startswith('setuptools') - ) - base = next(external_bases) - if not base.__module__.startswith('distutils'): - msg = "distutils has already been patched by %r" % cls - raise AssertionError(msg) - return base - - -def patch_all(): - # we can't patch distutils.cmd, alas - distutils.core.Command = setuptools.Command - - has_issue_12885 = sys.version_info <= (3, 5, 3) - - if has_issue_12885: - # fix findall bug in distutils (http://bugs.python.org/issue12885) - distutils.filelist.findall = setuptools.findall - - needs_warehouse = ( - sys.version_info < (2, 7, 13) - or - (3, 4) < sys.version_info < (3, 4, 6) - or - (3, 5) < sys.version_info <= (3, 5, 3) - ) - - if needs_warehouse: - warehouse = 'https://upload.pypi.org/legacy/' - distutils.config.PyPIRCCommand.DEFAULT_REPOSITORY = warehouse - - _patch_distribution_metadata() - - # Install Distribution throughout the distutils - for module in distutils.dist, distutils.core, distutils.cmd: - module.Distribution = setuptools.dist.Distribution - - # Install the patched Extension - distutils.core.Extension = setuptools.extension.Extension - distutils.extension.Extension = setuptools.extension.Extension - if 'distutils.command.build_ext' in sys.modules: - sys.modules['distutils.command.build_ext'].Extension = ( - setuptools.extension.Extension - ) - - patch_for_msvc_specialized_compiler() - - -def _patch_distribution_metadata(): - """Patch write_pkg_file and read_pkg_file for higher metadata standards""" - for attr in ('write_pkg_file', 'read_pkg_file', 'get_metadata_version'): - new_val = getattr(setuptools.dist, attr) - setattr(distutils.dist.DistributionMetadata, attr, new_val) - - -def patch_func(replacement, target_mod, func_name): - """ - Patch func_name in target_mod with replacement - - Important - original must be resolved by name to avoid - patching an already patched function. - """ - original = getattr(target_mod, func_name) - - # set the 'unpatched' attribute on the replacement to - # point to the original. - vars(replacement).setdefault('unpatched', original) - - # replace the function in the original module - setattr(target_mod, func_name, replacement) - - -def get_unpatched_function(candidate): - return getattr(candidate, 'unpatched') - - -def patch_for_msvc_specialized_compiler(): - """ - Patch functions in distutils to use standalone Microsoft Visual C++ - compilers. - """ - # import late to avoid circular imports on Python < 3.5 - msvc = import_module('setuptools.msvc') - - if platform.system() != 'Windows': - # Compilers only available on Microsoft Windows - return - - def patch_params(mod_name, func_name): - """ - Prepare the parameters for patch_func to patch indicated function. - """ - repl_prefix = 'msvc9_' if 'msvc9' in mod_name else 'msvc14_' - repl_name = repl_prefix + func_name.lstrip('_') - repl = getattr(msvc, repl_name) - mod = import_module(mod_name) - if not hasattr(mod, func_name): - raise ImportError(func_name) - return repl, mod, func_name - - # Python 2.7 to 3.4 - msvc9 = functools.partial(patch_params, 'distutils.msvc9compiler') - - # Python 3.5+ - msvc14 = functools.partial(patch_params, 'distutils._msvccompiler') - - try: - # Patch distutils.msvc9compiler - patch_func(*msvc9('find_vcvarsall')) - patch_func(*msvc9('query_vcvarsall')) - except ImportError: - pass - - try: - # Patch distutils._msvccompiler._get_vc_env - patch_func(*msvc14('_get_vc_env')) - except ImportError: - pass - - try: - # Patch distutils._msvccompiler.gen_lib_options for Numpy - patch_func(*msvc14('gen_lib_options')) - except ImportError: - pass diff --git a/venv/Lib/site-packages/setuptools/msvc.py b/venv/Lib/site-packages/setuptools/msvc.py deleted file mode 100644 index 281ea1c..0000000 --- a/venv/Lib/site-packages/setuptools/msvc.py +++ /dev/null @@ -1,1805 +0,0 @@ -""" -Improved support for Microsoft Visual C++ compilers. - -Known supported compilers: --------------------------- -Microsoft Visual C++ 9.0: - Microsoft Visual C++ Compiler for Python 2.7 (x86, amd64) - Microsoft Windows SDK 6.1 (x86, x64, ia64) - Microsoft Windows SDK 7.0 (x86, x64, ia64) - -Microsoft Visual C++ 10.0: - Microsoft Windows SDK 7.1 (x86, x64, ia64) - -Microsoft Visual C++ 14.X: - Microsoft Visual C++ Build Tools 2015 (x86, x64, arm) - Microsoft Visual Studio Build Tools 2017 (x86, x64, arm, arm64) - Microsoft Visual Studio Build Tools 2019 (x86, x64, arm, arm64) - -This may also support compilers shipped with compatible Visual Studio versions. -""" - -import json -from io import open -from os import listdir, pathsep -from os.path import join, isfile, isdir, dirname -import sys -import contextlib -import platform -import itertools -import subprocess -import distutils.errors -from setuptools.extern.packaging.version import LegacyVersion -from setuptools.extern.more_itertools import unique_everseen - -from .monkey import get_unpatched - -if platform.system() == 'Windows': - import winreg - from os import environ -else: - # Mock winreg and environ so the module can be imported on this platform. - - class winreg: - HKEY_USERS = None - HKEY_CURRENT_USER = None - HKEY_LOCAL_MACHINE = None - HKEY_CLASSES_ROOT = None - - environ = dict() - -_msvc9_suppress_errors = ( - # msvc9compiler isn't available on some platforms - ImportError, - - # msvc9compiler raises DistutilsPlatformError in some - # environments. See #1118. - distutils.errors.DistutilsPlatformError, -) - -try: - from distutils.msvc9compiler import Reg -except _msvc9_suppress_errors: - pass - - -def msvc9_find_vcvarsall(version): - """ - Patched "distutils.msvc9compiler.find_vcvarsall" to use the standalone - compiler build for Python - (VCForPython / Microsoft Visual C++ Compiler for Python 2.7). - - Fall back to original behavior when the standalone compiler is not - available. - - Redirect the path of "vcvarsall.bat". - - Parameters - ---------- - version: float - Required Microsoft Visual C++ version. - - Return - ------ - str - vcvarsall.bat path - """ - vc_base = r'Software\%sMicrosoft\DevDiv\VCForPython\%0.1f' - key = vc_base % ('', version) - try: - # Per-user installs register the compiler path here - productdir = Reg.get_value(key, "installdir") - except KeyError: - try: - # All-user installs on a 64-bit system register here - key = vc_base % ('Wow6432Node\\', version) - productdir = Reg.get_value(key, "installdir") - except KeyError: - productdir = None - - if productdir: - vcvarsall = join(productdir, "vcvarsall.bat") - if isfile(vcvarsall): - return vcvarsall - - return get_unpatched(msvc9_find_vcvarsall)(version) - - -def msvc9_query_vcvarsall(ver, arch='x86', *args, **kwargs): - """ - Patched "distutils.msvc9compiler.query_vcvarsall" for support extra - Microsoft Visual C++ 9.0 and 10.0 compilers. - - Set environment without use of "vcvarsall.bat". - - Parameters - ---------- - ver: float - Required Microsoft Visual C++ version. - arch: str - Target architecture. - - Return - ------ - dict - environment - """ - # Try to get environment from vcvarsall.bat (Classical way) - try: - orig = get_unpatched(msvc9_query_vcvarsall) - return orig(ver, arch, *args, **kwargs) - except distutils.errors.DistutilsPlatformError: - # Pass error if Vcvarsall.bat is missing - pass - except ValueError: - # Pass error if environment not set after executing vcvarsall.bat - pass - - # If error, try to set environment directly - try: - return EnvironmentInfo(arch, ver).return_env() - except distutils.errors.DistutilsPlatformError as exc: - _augment_exception(exc, ver, arch) - raise - - -def _msvc14_find_vc2015(): - """Python 3.8 "distutils/_msvccompiler.py" backport""" - try: - key = winreg.OpenKey( - winreg.HKEY_LOCAL_MACHINE, - r"Software\Microsoft\VisualStudio\SxS\VC7", - 0, - winreg.KEY_READ | winreg.KEY_WOW64_32KEY - ) - except OSError: - return None, None - - best_version = 0 - best_dir = None - with key: - for i in itertools.count(): - try: - v, vc_dir, vt = winreg.EnumValue(key, i) - except OSError: - break - if v and vt == winreg.REG_SZ and isdir(vc_dir): - try: - version = int(float(v)) - except (ValueError, TypeError): - continue - if version >= 14 and version > best_version: - best_version, best_dir = version, vc_dir - return best_version, best_dir - - -def _msvc14_find_vc2017(): - """Python 3.8 "distutils/_msvccompiler.py" backport - - Returns "15, path" based on the result of invoking vswhere.exe - If no install is found, returns "None, None" - - The version is returned to avoid unnecessarily changing the function - result. It may be ignored when the path is not None. - - If vswhere.exe is not available, by definition, VS 2017 is not - installed. - """ - root = environ.get("ProgramFiles(x86)") or environ.get("ProgramFiles") - if not root: - return None, None - - try: - path = subprocess.check_output([ - join(root, "Microsoft Visual Studio", "Installer", "vswhere.exe"), - "-latest", - "-prerelease", - "-requiresAny", - "-requires", "Microsoft.VisualStudio.Component.VC.Tools.x86.x64", - "-requires", "Microsoft.VisualStudio.Workload.WDExpress", - "-property", "installationPath", - "-products", "*", - ]).decode(encoding="mbcs", errors="strict").strip() - except (subprocess.CalledProcessError, OSError, UnicodeDecodeError): - return None, None - - path = join(path, "VC", "Auxiliary", "Build") - if isdir(path): - return 15, path - - return None, None - - -PLAT_SPEC_TO_RUNTIME = { - 'x86': 'x86', - 'x86_amd64': 'x64', - 'x86_arm': 'arm', - 'x86_arm64': 'arm64' -} - - -def _msvc14_find_vcvarsall(plat_spec): - """Python 3.8 "distutils/_msvccompiler.py" backport""" - _, best_dir = _msvc14_find_vc2017() - vcruntime = None - - if plat_spec in PLAT_SPEC_TO_RUNTIME: - vcruntime_plat = PLAT_SPEC_TO_RUNTIME[plat_spec] - else: - vcruntime_plat = 'x64' if 'amd64' in plat_spec else 'x86' - - if best_dir: - vcredist = join(best_dir, "..", "..", "redist", "MSVC", "**", - vcruntime_plat, "Microsoft.VC14*.CRT", - "vcruntime140.dll") - try: - import glob - vcruntime = glob.glob(vcredist, recursive=True)[-1] - except (ImportError, OSError, LookupError): - vcruntime = None - - if not best_dir: - best_version, best_dir = _msvc14_find_vc2015() - if best_version: - vcruntime = join(best_dir, 'redist', vcruntime_plat, - "Microsoft.VC140.CRT", "vcruntime140.dll") - - if not best_dir: - return None, None - - vcvarsall = join(best_dir, "vcvarsall.bat") - if not isfile(vcvarsall): - return None, None - - if not vcruntime or not isfile(vcruntime): - vcruntime = None - - return vcvarsall, vcruntime - - -def _msvc14_get_vc_env(plat_spec): - """Python 3.8 "distutils/_msvccompiler.py" backport""" - if "DISTUTILS_USE_SDK" in environ: - return { - key.lower(): value - for key, value in environ.items() - } - - vcvarsall, vcruntime = _msvc14_find_vcvarsall(plat_spec) - if not vcvarsall: - raise distutils.errors.DistutilsPlatformError( - "Unable to find vcvarsall.bat" - ) - - try: - out = subprocess.check_output( - 'cmd /u /c "{}" {} && set'.format(vcvarsall, plat_spec), - stderr=subprocess.STDOUT, - ).decode('utf-16le', errors='replace') - except subprocess.CalledProcessError as exc: - raise distutils.errors.DistutilsPlatformError( - "Error executing {}".format(exc.cmd) - ) from exc - - env = { - key.lower(): value - for key, _, value in - (line.partition('=') for line in out.splitlines()) - if key and value - } - - if vcruntime: - env['py_vcruntime_redist'] = vcruntime - return env - - -def msvc14_get_vc_env(plat_spec): - """ - Patched "distutils._msvccompiler._get_vc_env" for support extra - Microsoft Visual C++ 14.X compilers. - - Set environment without use of "vcvarsall.bat". - - Parameters - ---------- - plat_spec: str - Target architecture. - - Return - ------ - dict - environment - """ - - # Always use backport from CPython 3.8 - try: - return _msvc14_get_vc_env(plat_spec) - except distutils.errors.DistutilsPlatformError as exc: - _augment_exception(exc, 14.0) - raise - - -def msvc14_gen_lib_options(*args, **kwargs): - """ - Patched "distutils._msvccompiler.gen_lib_options" for fix - compatibility between "numpy.distutils" and "distutils._msvccompiler" - (for Numpy < 1.11.2) - """ - if "numpy.distutils" in sys.modules: - import numpy as np - if LegacyVersion(np.__version__) < LegacyVersion('1.11.2'): - return np.distutils.ccompiler.gen_lib_options(*args, **kwargs) - return get_unpatched(msvc14_gen_lib_options)(*args, **kwargs) - - -def _augment_exception(exc, version, arch=''): - """ - Add details to the exception message to help guide the user - as to what action will resolve it. - """ - # Error if MSVC++ directory not found or environment not set - message = exc.args[0] - - if "vcvarsall" in message.lower() or "visual c" in message.lower(): - # Special error message if MSVC++ not installed - tmpl = 'Microsoft Visual C++ {version:0.1f} or greater is required.' - message = tmpl.format(**locals()) - msdownload = 'www.microsoft.com/download/details.aspx?id=%d' - if version == 9.0: - if arch.lower().find('ia64') > -1: - # For VC++ 9.0, if IA64 support is needed, redirect user - # to Windows SDK 7.0. - # Note: No download link available from Microsoft. - message += ' Get it with "Microsoft Windows SDK 7.0"' - else: - # For VC++ 9.0 redirect user to Vc++ for Python 2.7 : - # This redirection link is maintained by Microsoft. - # Contact vspython@microsoft.com if it needs updating. - message += ' Get it from http://aka.ms/vcpython27' - elif version == 10.0: - # For VC++ 10.0 Redirect user to Windows SDK 7.1 - message += ' Get it with "Microsoft Windows SDK 7.1": ' - message += msdownload % 8279 - elif version >= 14.0: - # For VC++ 14.X Redirect user to latest Visual C++ Build Tools - message += (' Get it with "Microsoft C++ Build Tools": ' - r'https://visualstudio.microsoft.com' - r'/visual-cpp-build-tools/') - - exc.args = (message, ) - - -class PlatformInfo: - """ - Current and Target Architectures information. - - Parameters - ---------- - arch: str - Target architecture. - """ - current_cpu = environ.get('processor_architecture', '').lower() - - def __init__(self, arch): - self.arch = arch.lower().replace('x64', 'amd64') - - @property - def target_cpu(self): - """ - Return Target CPU architecture. - - Return - ------ - str - Target CPU - """ - return self.arch[self.arch.find('_') + 1:] - - def target_is_x86(self): - """ - Return True if target CPU is x86 32 bits.. - - Return - ------ - bool - CPU is x86 32 bits - """ - return self.target_cpu == 'x86' - - def current_is_x86(self): - """ - Return True if current CPU is x86 32 bits.. - - Return - ------ - bool - CPU is x86 32 bits - """ - return self.current_cpu == 'x86' - - def current_dir(self, hidex86=False, x64=False): - """ - Current platform specific subfolder. - - Parameters - ---------- - hidex86: bool - return '' and not '\x86' if architecture is x86. - x64: bool - return '\x64' and not '\amd64' if architecture is amd64. - - Return - ------ - str - subfolder: '\target', or '' (see hidex86 parameter) - """ - return ( - '' if (self.current_cpu == 'x86' and hidex86) else - r'\x64' if (self.current_cpu == 'amd64' and x64) else - r'\%s' % self.current_cpu - ) - - def target_dir(self, hidex86=False, x64=False): - r""" - Target platform specific subfolder. - - Parameters - ---------- - hidex86: bool - return '' and not '\x86' if architecture is x86. - x64: bool - return '\x64' and not '\amd64' if architecture is amd64. - - Return - ------ - str - subfolder: '\current', or '' (see hidex86 parameter) - """ - return ( - '' if (self.target_cpu == 'x86' and hidex86) else - r'\x64' if (self.target_cpu == 'amd64' and x64) else - r'\%s' % self.target_cpu - ) - - def cross_dir(self, forcex86=False): - r""" - Cross platform specific subfolder. - - Parameters - ---------- - forcex86: bool - Use 'x86' as current architecture even if current architecture is - not x86. - - Return - ------ - str - subfolder: '' if target architecture is current architecture, - '\current_target' if not. - """ - current = 'x86' if forcex86 else self.current_cpu - return ( - '' if self.target_cpu == current else - self.target_dir().replace('\\', '\\%s_' % current) - ) - - -class RegistryInfo: - """ - Microsoft Visual Studio related registry information. - - Parameters - ---------- - platform_info: PlatformInfo - "PlatformInfo" instance. - """ - HKEYS = (winreg.HKEY_USERS, - winreg.HKEY_CURRENT_USER, - winreg.HKEY_LOCAL_MACHINE, - winreg.HKEY_CLASSES_ROOT) - - def __init__(self, platform_info): - self.pi = platform_info - - @property - def visualstudio(self): - """ - Microsoft Visual Studio root registry key. - - Return - ------ - str - Registry key - """ - return 'VisualStudio' - - @property - def sxs(self): - """ - Microsoft Visual Studio SxS registry key. - - Return - ------ - str - Registry key - """ - return join(self.visualstudio, 'SxS') - - @property - def vc(self): - """ - Microsoft Visual C++ VC7 registry key. - - Return - ------ - str - Registry key - """ - return join(self.sxs, 'VC7') - - @property - def vs(self): - """ - Microsoft Visual Studio VS7 registry key. - - Return - ------ - str - Registry key - """ - return join(self.sxs, 'VS7') - - @property - def vc_for_python(self): - """ - Microsoft Visual C++ for Python registry key. - - Return - ------ - str - Registry key - """ - return r'DevDiv\VCForPython' - - @property - def microsoft_sdk(self): - """ - Microsoft SDK registry key. - - Return - ------ - str - Registry key - """ - return 'Microsoft SDKs' - - @property - def windows_sdk(self): - """ - Microsoft Windows/Platform SDK registry key. - - Return - ------ - str - Registry key - """ - return join(self.microsoft_sdk, 'Windows') - - @property - def netfx_sdk(self): - """ - Microsoft .NET Framework SDK registry key. - - Return - ------ - str - Registry key - """ - return join(self.microsoft_sdk, 'NETFXSDK') - - @property - def windows_kits_roots(self): - """ - Microsoft Windows Kits Roots registry key. - - Return - ------ - str - Registry key - """ - return r'Windows Kits\Installed Roots' - - def microsoft(self, key, x86=False): - """ - Return key in Microsoft software registry. - - Parameters - ---------- - key: str - Registry key path where look. - x86: str - Force x86 software registry. - - Return - ------ - str - Registry key - """ - node64 = '' if self.pi.current_is_x86() or x86 else 'Wow6432Node' - return join('Software', node64, 'Microsoft', key) - - def lookup(self, key, name): - """ - Look for values in registry in Microsoft software registry. - - Parameters - ---------- - key: str - Registry key path where look. - name: str - Value name to find. - - Return - ------ - str - value - """ - key_read = winreg.KEY_READ - openkey = winreg.OpenKey - closekey = winreg.CloseKey - ms = self.microsoft - for hkey in self.HKEYS: - bkey = None - try: - bkey = openkey(hkey, ms(key), 0, key_read) - except (OSError, IOError): - if not self.pi.current_is_x86(): - try: - bkey = openkey(hkey, ms(key, True), 0, key_read) - except (OSError, IOError): - continue - else: - continue - try: - return winreg.QueryValueEx(bkey, name)[0] - except (OSError, IOError): - pass - finally: - if bkey: - closekey(bkey) - - -class SystemInfo: - """ - Microsoft Windows and Visual Studio related system information. - - Parameters - ---------- - registry_info: RegistryInfo - "RegistryInfo" instance. - vc_ver: float - Required Microsoft Visual C++ version. - """ - - # Variables and properties in this class use originals CamelCase variables - # names from Microsoft source files for more easy comparison. - WinDir = environ.get('WinDir', '') - ProgramFiles = environ.get('ProgramFiles', '') - ProgramFilesx86 = environ.get('ProgramFiles(x86)', ProgramFiles) - - def __init__(self, registry_info, vc_ver=None): - self.ri = registry_info - self.pi = self.ri.pi - - self.known_vs_paths = self.find_programdata_vs_vers() - - # Except for VS15+, VC version is aligned with VS version - self.vs_ver = self.vc_ver = ( - vc_ver or self._find_latest_available_vs_ver()) - - def _find_latest_available_vs_ver(self): - """ - Find the latest VC version - - Return - ------ - float - version - """ - reg_vc_vers = self.find_reg_vs_vers() - - if not (reg_vc_vers or self.known_vs_paths): - raise distutils.errors.DistutilsPlatformError( - 'No Microsoft Visual C++ version found') - - vc_vers = set(reg_vc_vers) - vc_vers.update(self.known_vs_paths) - return sorted(vc_vers)[-1] - - def find_reg_vs_vers(self): - """ - Find Microsoft Visual Studio versions available in registry. - - Return - ------ - list of float - Versions - """ - ms = self.ri.microsoft - vckeys = (self.ri.vc, self.ri.vc_for_python, self.ri.vs) - vs_vers = [] - for hkey, key in itertools.product(self.ri.HKEYS, vckeys): - try: - bkey = winreg.OpenKey(hkey, ms(key), 0, winreg.KEY_READ) - except (OSError, IOError): - continue - with bkey: - subkeys, values, _ = winreg.QueryInfoKey(bkey) - for i in range(values): - with contextlib.suppress(ValueError): - ver = float(winreg.EnumValue(bkey, i)[0]) - if ver not in vs_vers: - vs_vers.append(ver) - for i in range(subkeys): - with contextlib.suppress(ValueError): - ver = float(winreg.EnumKey(bkey, i)) - if ver not in vs_vers: - vs_vers.append(ver) - return sorted(vs_vers) - - def find_programdata_vs_vers(self): - r""" - Find Visual studio 2017+ versions from information in - "C:\ProgramData\Microsoft\VisualStudio\Packages\_Instances". - - Return - ------ - dict - float version as key, path as value. - """ - vs_versions = {} - instances_dir = \ - r'C:\ProgramData\Microsoft\VisualStudio\Packages\_Instances' - - try: - hashed_names = listdir(instances_dir) - - except (OSError, IOError): - # Directory not exists with all Visual Studio versions - return vs_versions - - for name in hashed_names: - try: - # Get VS installation path from "state.json" file - state_path = join(instances_dir, name, 'state.json') - with open(state_path, 'rt', encoding='utf-8') as state_file: - state = json.load(state_file) - vs_path = state['installationPath'] - - # Raises OSError if this VS installation does not contain VC - listdir(join(vs_path, r'VC\Tools\MSVC')) - - # Store version and path - vs_versions[self._as_float_version( - state['installationVersion'])] = vs_path - - except (OSError, IOError, KeyError): - # Skip if "state.json" file is missing or bad format - continue - - return vs_versions - - @staticmethod - def _as_float_version(version): - """ - Return a string version as a simplified float version (major.minor) - - Parameters - ---------- - version: str - Version. - - Return - ------ - float - version - """ - return float('.'.join(version.split('.')[:2])) - - @property - def VSInstallDir(self): - """ - Microsoft Visual Studio directory. - - Return - ------ - str - path - """ - # Default path - default = join(self.ProgramFilesx86, - 'Microsoft Visual Studio %0.1f' % self.vs_ver) - - # Try to get path from registry, if fail use default path - return self.ri.lookup(self.ri.vs, '%0.1f' % self.vs_ver) or default - - @property - def VCInstallDir(self): - """ - Microsoft Visual C++ directory. - - Return - ------ - str - path - """ - path = self._guess_vc() or self._guess_vc_legacy() - - if not isdir(path): - msg = 'Microsoft Visual C++ directory not found' - raise distutils.errors.DistutilsPlatformError(msg) - - return path - - def _guess_vc(self): - """ - Locate Visual C++ for VS2017+. - - Return - ------ - str - path - """ - if self.vs_ver <= 14.0: - return '' - - try: - # First search in known VS paths - vs_dir = self.known_vs_paths[self.vs_ver] - except KeyError: - # Else, search with path from registry - vs_dir = self.VSInstallDir - - guess_vc = join(vs_dir, r'VC\Tools\MSVC') - - # Subdir with VC exact version as name - try: - # Update the VC version with real one instead of VS version - vc_ver = listdir(guess_vc)[-1] - self.vc_ver = self._as_float_version(vc_ver) - return join(guess_vc, vc_ver) - except (OSError, IOError, IndexError): - return '' - - def _guess_vc_legacy(self): - """ - Locate Visual C++ for versions prior to 2017. - - Return - ------ - str - path - """ - default = join(self.ProgramFilesx86, - r'Microsoft Visual Studio %0.1f\VC' % self.vs_ver) - - # Try to get "VC++ for Python" path from registry as default path - reg_path = join(self.ri.vc_for_python, '%0.1f' % self.vs_ver) - python_vc = self.ri.lookup(reg_path, 'installdir') - default_vc = join(python_vc, 'VC') if python_vc else default - - # Try to get path from registry, if fail use default path - return self.ri.lookup(self.ri.vc, '%0.1f' % self.vs_ver) or default_vc - - @property - def WindowsSdkVersion(self): - """ - Microsoft Windows SDK versions for specified MSVC++ version. - - Return - ------ - tuple of str - versions - """ - if self.vs_ver <= 9.0: - return '7.0', '6.1', '6.0a' - elif self.vs_ver == 10.0: - return '7.1', '7.0a' - elif self.vs_ver == 11.0: - return '8.0', '8.0a' - elif self.vs_ver == 12.0: - return '8.1', '8.1a' - elif self.vs_ver >= 14.0: - return '10.0', '8.1' - - @property - def WindowsSdkLastVersion(self): - """ - Microsoft Windows SDK last version. - - Return - ------ - str - version - """ - return self._use_last_dir_name(join(self.WindowsSdkDir, 'lib')) - - @property # noqa: C901 - def WindowsSdkDir(self): # noqa: C901 # is too complex (12) # FIXME - """ - Microsoft Windows SDK directory. - - Return - ------ - str - path - """ - sdkdir = '' - for ver in self.WindowsSdkVersion: - # Try to get it from registry - loc = join(self.ri.windows_sdk, 'v%s' % ver) - sdkdir = self.ri.lookup(loc, 'installationfolder') - if sdkdir: - break - if not sdkdir or not isdir(sdkdir): - # Try to get "VC++ for Python" version from registry - path = join(self.ri.vc_for_python, '%0.1f' % self.vc_ver) - install_base = self.ri.lookup(path, 'installdir') - if install_base: - sdkdir = join(install_base, 'WinSDK') - if not sdkdir or not isdir(sdkdir): - # If fail, use default new path - for ver in self.WindowsSdkVersion: - intver = ver[:ver.rfind('.')] - path = r'Microsoft SDKs\Windows Kits\%s' % intver - d = join(self.ProgramFiles, path) - if isdir(d): - sdkdir = d - if not sdkdir or not isdir(sdkdir): - # If fail, use default old path - for ver in self.WindowsSdkVersion: - path = r'Microsoft SDKs\Windows\v%s' % ver - d = join(self.ProgramFiles, path) - if isdir(d): - sdkdir = d - if not sdkdir: - # If fail, use Platform SDK - sdkdir = join(self.VCInstallDir, 'PlatformSDK') - return sdkdir - - @property - def WindowsSDKExecutablePath(self): - """ - Microsoft Windows SDK executable directory. - - Return - ------ - str - path - """ - # Find WinSDK NetFx Tools registry dir name - if self.vs_ver <= 11.0: - netfxver = 35 - arch = '' - else: - netfxver = 40 - hidex86 = True if self.vs_ver <= 12.0 else False - arch = self.pi.current_dir(x64=True, hidex86=hidex86) - fx = 'WinSDK-NetFx%dTools%s' % (netfxver, arch.replace('\\', '-')) - - # list all possibles registry paths - regpaths = [] - if self.vs_ver >= 14.0: - for ver in self.NetFxSdkVersion: - regpaths += [join(self.ri.netfx_sdk, ver, fx)] - - for ver in self.WindowsSdkVersion: - regpaths += [join(self.ri.windows_sdk, 'v%sA' % ver, fx)] - - # Return installation folder from the more recent path - for path in regpaths: - execpath = self.ri.lookup(path, 'installationfolder') - if execpath: - return execpath - - @property - def FSharpInstallDir(self): - """ - Microsoft Visual F# directory. - - Return - ------ - str - path - """ - path = join(self.ri.visualstudio, r'%0.1f\Setup\F#' % self.vs_ver) - return self.ri.lookup(path, 'productdir') or '' - - @property - def UniversalCRTSdkDir(self): - """ - Microsoft Universal CRT SDK directory. - - Return - ------ - str - path - """ - # Set Kit Roots versions for specified MSVC++ version - vers = ('10', '81') if self.vs_ver >= 14.0 else () - - # Find path of the more recent Kit - for ver in vers: - sdkdir = self.ri.lookup(self.ri.windows_kits_roots, - 'kitsroot%s' % ver) - if sdkdir: - return sdkdir or '' - - @property - def UniversalCRTSdkLastVersion(self): - """ - Microsoft Universal C Runtime SDK last version. - - Return - ------ - str - version - """ - return self._use_last_dir_name(join(self.UniversalCRTSdkDir, 'lib')) - - @property - def NetFxSdkVersion(self): - """ - Microsoft .NET Framework SDK versions. - - Return - ------ - tuple of str - versions - """ - # Set FxSdk versions for specified VS version - return (('4.7.2', '4.7.1', '4.7', - '4.6.2', '4.6.1', '4.6', - '4.5.2', '4.5.1', '4.5') - if self.vs_ver >= 14.0 else ()) - - @property - def NetFxSdkDir(self): - """ - Microsoft .NET Framework SDK directory. - - Return - ------ - str - path - """ - sdkdir = '' - for ver in self.NetFxSdkVersion: - loc = join(self.ri.netfx_sdk, ver) - sdkdir = self.ri.lookup(loc, 'kitsinstallationfolder') - if sdkdir: - break - return sdkdir - - @property - def FrameworkDir32(self): - """ - Microsoft .NET Framework 32bit directory. - - Return - ------ - str - path - """ - # Default path - guess_fw = join(self.WinDir, r'Microsoft.NET\Framework') - - # Try to get path from registry, if fail use default path - return self.ri.lookup(self.ri.vc, 'frameworkdir32') or guess_fw - - @property - def FrameworkDir64(self): - """ - Microsoft .NET Framework 64bit directory. - - Return - ------ - str - path - """ - # Default path - guess_fw = join(self.WinDir, r'Microsoft.NET\Framework64') - - # Try to get path from registry, if fail use default path - return self.ri.lookup(self.ri.vc, 'frameworkdir64') or guess_fw - - @property - def FrameworkVersion32(self): - """ - Microsoft .NET Framework 32bit versions. - - Return - ------ - tuple of str - versions - """ - return self._find_dot_net_versions(32) - - @property - def FrameworkVersion64(self): - """ - Microsoft .NET Framework 64bit versions. - - Return - ------ - tuple of str - versions - """ - return self._find_dot_net_versions(64) - - def _find_dot_net_versions(self, bits): - """ - Find Microsoft .NET Framework versions. - - Parameters - ---------- - bits: int - Platform number of bits: 32 or 64. - - Return - ------ - tuple of str - versions - """ - # Find actual .NET version in registry - reg_ver = self.ri.lookup(self.ri.vc, 'frameworkver%d' % bits) - dot_net_dir = getattr(self, 'FrameworkDir%d' % bits) - ver = reg_ver or self._use_last_dir_name(dot_net_dir, 'v') or '' - - # Set .NET versions for specified MSVC++ version - if self.vs_ver >= 12.0: - return ver, 'v4.0' - elif self.vs_ver >= 10.0: - return 'v4.0.30319' if ver.lower()[:2] != 'v4' else ver, 'v3.5' - elif self.vs_ver == 9.0: - return 'v3.5', 'v2.0.50727' - elif self.vs_ver == 8.0: - return 'v3.0', 'v2.0.50727' - - @staticmethod - def _use_last_dir_name(path, prefix=''): - """ - Return name of the last dir in path or '' if no dir found. - - Parameters - ---------- - path: str - Use dirs in this path - prefix: str - Use only dirs starting by this prefix - - Return - ------ - str - name - """ - matching_dirs = ( - dir_name - for dir_name in reversed(listdir(path)) - if isdir(join(path, dir_name)) and - dir_name.startswith(prefix) - ) - return next(matching_dirs, None) or '' - - -class EnvironmentInfo: - """ - Return environment variables for specified Microsoft Visual C++ version - and platform : Lib, Include, Path and libpath. - - This function is compatible with Microsoft Visual C++ 9.0 to 14.X. - - Script created by analysing Microsoft environment configuration files like - "vcvars[...].bat", "SetEnv.Cmd", "vcbuildtools.bat", ... - - Parameters - ---------- - arch: str - Target architecture. - vc_ver: float - Required Microsoft Visual C++ version. If not set, autodetect the last - version. - vc_min_ver: float - Minimum Microsoft Visual C++ version. - """ - - # Variables and properties in this class use originals CamelCase variables - # names from Microsoft source files for more easy comparison. - - def __init__(self, arch, vc_ver=None, vc_min_ver=0): - self.pi = PlatformInfo(arch) - self.ri = RegistryInfo(self.pi) - self.si = SystemInfo(self.ri, vc_ver) - - if self.vc_ver < vc_min_ver: - err = 'No suitable Microsoft Visual C++ version found' - raise distutils.errors.DistutilsPlatformError(err) - - @property - def vs_ver(self): - """ - Microsoft Visual Studio. - - Return - ------ - float - version - """ - return self.si.vs_ver - - @property - def vc_ver(self): - """ - Microsoft Visual C++ version. - - Return - ------ - float - version - """ - return self.si.vc_ver - - @property - def VSTools(self): - """ - Microsoft Visual Studio Tools. - - Return - ------ - list of str - paths - """ - paths = [r'Common7\IDE', r'Common7\Tools'] - - if self.vs_ver >= 14.0: - arch_subdir = self.pi.current_dir(hidex86=True, x64=True) - paths += [r'Common7\IDE\CommonExtensions\Microsoft\TestWindow'] - paths += [r'Team Tools\Performance Tools'] - paths += [r'Team Tools\Performance Tools%s' % arch_subdir] - - return [join(self.si.VSInstallDir, path) for path in paths] - - @property - def VCIncludes(self): - """ - Microsoft Visual C++ & Microsoft Foundation Class Includes. - - Return - ------ - list of str - paths - """ - return [join(self.si.VCInstallDir, 'Include'), - join(self.si.VCInstallDir, r'ATLMFC\Include')] - - @property - def VCLibraries(self): - """ - Microsoft Visual C++ & Microsoft Foundation Class Libraries. - - Return - ------ - list of str - paths - """ - if self.vs_ver >= 15.0: - arch_subdir = self.pi.target_dir(x64=True) - else: - arch_subdir = self.pi.target_dir(hidex86=True) - paths = ['Lib%s' % arch_subdir, r'ATLMFC\Lib%s' % arch_subdir] - - if self.vs_ver >= 14.0: - paths += [r'Lib\store%s' % arch_subdir] - - return [join(self.si.VCInstallDir, path) for path in paths] - - @property - def VCStoreRefs(self): - """ - Microsoft Visual C++ store references Libraries. - - Return - ------ - list of str - paths - """ - if self.vs_ver < 14.0: - return [] - return [join(self.si.VCInstallDir, r'Lib\store\references')] - - @property - def VCTools(self): - """ - Microsoft Visual C++ Tools. - - Return - ------ - list of str - paths - """ - si = self.si - tools = [join(si.VCInstallDir, 'VCPackages')] - - forcex86 = True if self.vs_ver <= 10.0 else False - arch_subdir = self.pi.cross_dir(forcex86) - if arch_subdir: - tools += [join(si.VCInstallDir, 'Bin%s' % arch_subdir)] - - if self.vs_ver == 14.0: - path = 'Bin%s' % self.pi.current_dir(hidex86=True) - tools += [join(si.VCInstallDir, path)] - - elif self.vs_ver >= 15.0: - host_dir = (r'bin\HostX86%s' if self.pi.current_is_x86() else - r'bin\HostX64%s') - tools += [join( - si.VCInstallDir, host_dir % self.pi.target_dir(x64=True))] - - if self.pi.current_cpu != self.pi.target_cpu: - tools += [join( - si.VCInstallDir, host_dir % self.pi.current_dir(x64=True))] - - else: - tools += [join(si.VCInstallDir, 'Bin')] - - return tools - - @property - def OSLibraries(self): - """ - Microsoft Windows SDK Libraries. - - Return - ------ - list of str - paths - """ - if self.vs_ver <= 10.0: - arch_subdir = self.pi.target_dir(hidex86=True, x64=True) - return [join(self.si.WindowsSdkDir, 'Lib%s' % arch_subdir)] - - else: - arch_subdir = self.pi.target_dir(x64=True) - lib = join(self.si.WindowsSdkDir, 'lib') - libver = self._sdk_subdir - return [join(lib, '%sum%s' % (libver, arch_subdir))] - - @property - def OSIncludes(self): - """ - Microsoft Windows SDK Include. - - Return - ------ - list of str - paths - """ - include = join(self.si.WindowsSdkDir, 'include') - - if self.vs_ver <= 10.0: - return [include, join(include, 'gl')] - - else: - if self.vs_ver >= 14.0: - sdkver = self._sdk_subdir - else: - sdkver = '' - return [join(include, '%sshared' % sdkver), - join(include, '%sum' % sdkver), - join(include, '%swinrt' % sdkver)] - - @property - def OSLibpath(self): - """ - Microsoft Windows SDK Libraries Paths. - - Return - ------ - list of str - paths - """ - ref = join(self.si.WindowsSdkDir, 'References') - libpath = [] - - if self.vs_ver <= 9.0: - libpath += self.OSLibraries - - if self.vs_ver >= 11.0: - libpath += [join(ref, r'CommonConfiguration\Neutral')] - - if self.vs_ver >= 14.0: - libpath += [ - ref, - join(self.si.WindowsSdkDir, 'UnionMetadata'), - join( - ref, 'Windows.Foundation.UniversalApiContract', '1.0.0.0'), - join(ref, 'Windows.Foundation.FoundationContract', '1.0.0.0'), - join( - ref, 'Windows.Networking.Connectivity.WwanContract', - '1.0.0.0'), - join( - self.si.WindowsSdkDir, 'ExtensionSDKs', 'Microsoft.VCLibs', - '%0.1f' % self.vs_ver, 'References', 'CommonConfiguration', - 'neutral'), - ] - return libpath - - @property - def SdkTools(self): - """ - Microsoft Windows SDK Tools. - - Return - ------ - list of str - paths - """ - return list(self._sdk_tools()) - - def _sdk_tools(self): - """ - Microsoft Windows SDK Tools paths generator. - - Return - ------ - generator of str - paths - """ - if self.vs_ver < 15.0: - bin_dir = 'Bin' if self.vs_ver <= 11.0 else r'Bin\x86' - yield join(self.si.WindowsSdkDir, bin_dir) - - if not self.pi.current_is_x86(): - arch_subdir = self.pi.current_dir(x64=True) - path = 'Bin%s' % arch_subdir - yield join(self.si.WindowsSdkDir, path) - - if self.vs_ver in (10.0, 11.0): - if self.pi.target_is_x86(): - arch_subdir = '' - else: - arch_subdir = self.pi.current_dir(hidex86=True, x64=True) - path = r'Bin\NETFX 4.0 Tools%s' % arch_subdir - yield join(self.si.WindowsSdkDir, path) - - elif self.vs_ver >= 15.0: - path = join(self.si.WindowsSdkDir, 'Bin') - arch_subdir = self.pi.current_dir(x64=True) - sdkver = self.si.WindowsSdkLastVersion - yield join(path, '%s%s' % (sdkver, arch_subdir)) - - if self.si.WindowsSDKExecutablePath: - yield self.si.WindowsSDKExecutablePath - - @property - def _sdk_subdir(self): - """ - Microsoft Windows SDK version subdir. - - Return - ------ - str - subdir - """ - ucrtver = self.si.WindowsSdkLastVersion - return ('%s\\' % ucrtver) if ucrtver else '' - - @property - def SdkSetup(self): - """ - Microsoft Windows SDK Setup. - - Return - ------ - list of str - paths - """ - if self.vs_ver > 9.0: - return [] - - return [join(self.si.WindowsSdkDir, 'Setup')] - - @property - def FxTools(self): - """ - Microsoft .NET Framework Tools. - - Return - ------ - list of str - paths - """ - pi = self.pi - si = self.si - - if self.vs_ver <= 10.0: - include32 = True - include64 = not pi.target_is_x86() and not pi.current_is_x86() - else: - include32 = pi.target_is_x86() or pi.current_is_x86() - include64 = pi.current_cpu == 'amd64' or pi.target_cpu == 'amd64' - - tools = [] - if include32: - tools += [join(si.FrameworkDir32, ver) - for ver in si.FrameworkVersion32] - if include64: - tools += [join(si.FrameworkDir64, ver) - for ver in si.FrameworkVersion64] - return tools - - @property - def NetFxSDKLibraries(self): - """ - Microsoft .Net Framework SDK Libraries. - - Return - ------ - list of str - paths - """ - if self.vs_ver < 14.0 or not self.si.NetFxSdkDir: - return [] - - arch_subdir = self.pi.target_dir(x64=True) - return [join(self.si.NetFxSdkDir, r'lib\um%s' % arch_subdir)] - - @property - def NetFxSDKIncludes(self): - """ - Microsoft .Net Framework SDK Includes. - - Return - ------ - list of str - paths - """ - if self.vs_ver < 14.0 or not self.si.NetFxSdkDir: - return [] - - return [join(self.si.NetFxSdkDir, r'include\um')] - - @property - def VsTDb(self): - """ - Microsoft Visual Studio Team System Database. - - Return - ------ - list of str - paths - """ - return [join(self.si.VSInstallDir, r'VSTSDB\Deploy')] - - @property - def MSBuild(self): - """ - Microsoft Build Engine. - - Return - ------ - list of str - paths - """ - if self.vs_ver < 12.0: - return [] - elif self.vs_ver < 15.0: - base_path = self.si.ProgramFilesx86 - arch_subdir = self.pi.current_dir(hidex86=True) - else: - base_path = self.si.VSInstallDir - arch_subdir = '' - - path = r'MSBuild\%0.1f\bin%s' % (self.vs_ver, arch_subdir) - build = [join(base_path, path)] - - if self.vs_ver >= 15.0: - # Add Roslyn C# & Visual Basic Compiler - build += [join(base_path, path, 'Roslyn')] - - return build - - @property - def HTMLHelpWorkshop(self): - """ - Microsoft HTML Help Workshop. - - Return - ------ - list of str - paths - """ - if self.vs_ver < 11.0: - return [] - - return [join(self.si.ProgramFilesx86, 'HTML Help Workshop')] - - @property - def UCRTLibraries(self): - """ - Microsoft Universal C Runtime SDK Libraries. - - Return - ------ - list of str - paths - """ - if self.vs_ver < 14.0: - return [] - - arch_subdir = self.pi.target_dir(x64=True) - lib = join(self.si.UniversalCRTSdkDir, 'lib') - ucrtver = self._ucrt_subdir - return [join(lib, '%sucrt%s' % (ucrtver, arch_subdir))] - - @property - def UCRTIncludes(self): - """ - Microsoft Universal C Runtime SDK Include. - - Return - ------ - list of str - paths - """ - if self.vs_ver < 14.0: - return [] - - include = join(self.si.UniversalCRTSdkDir, 'include') - return [join(include, '%sucrt' % self._ucrt_subdir)] - - @property - def _ucrt_subdir(self): - """ - Microsoft Universal C Runtime SDK version subdir. - - Return - ------ - str - subdir - """ - ucrtver = self.si.UniversalCRTSdkLastVersion - return ('%s\\' % ucrtver) if ucrtver else '' - - @property - def FSharp(self): - """ - Microsoft Visual F#. - - Return - ------ - list of str - paths - """ - if 11.0 > self.vs_ver > 12.0: - return [] - - return [self.si.FSharpInstallDir] - - @property - def VCRuntimeRedist(self): - """ - Microsoft Visual C++ runtime redistributable dll. - - Return - ------ - str - path - """ - vcruntime = 'vcruntime%d0.dll' % self.vc_ver - arch_subdir = self.pi.target_dir(x64=True).strip('\\') - - # Installation prefixes candidates - prefixes = [] - tools_path = self.si.VCInstallDir - redist_path = dirname(tools_path.replace(r'\Tools', r'\Redist')) - if isdir(redist_path): - # Redist version may not be exactly the same as tools - redist_path = join(redist_path, listdir(redist_path)[-1]) - prefixes += [redist_path, join(redist_path, 'onecore')] - - prefixes += [join(tools_path, 'redist')] # VS14 legacy path - - # CRT directory - crt_dirs = ('Microsoft.VC%d.CRT' % (self.vc_ver * 10), - # Sometime store in directory with VS version instead of VC - 'Microsoft.VC%d.CRT' % (int(self.vs_ver) * 10)) - - # vcruntime path - for prefix, crt_dir in itertools.product(prefixes, crt_dirs): - path = join(prefix, arch_subdir, crt_dir, vcruntime) - if isfile(path): - return path - - def return_env(self, exists=True): - """ - Return environment dict. - - Parameters - ---------- - exists: bool - It True, only return existing paths. - - Return - ------ - dict - environment - """ - env = dict( - include=self._build_paths('include', - [self.VCIncludes, - self.OSIncludes, - self.UCRTIncludes, - self.NetFxSDKIncludes], - exists), - lib=self._build_paths('lib', - [self.VCLibraries, - self.OSLibraries, - self.FxTools, - self.UCRTLibraries, - self.NetFxSDKLibraries], - exists), - libpath=self._build_paths('libpath', - [self.VCLibraries, - self.FxTools, - self.VCStoreRefs, - self.OSLibpath], - exists), - path=self._build_paths('path', - [self.VCTools, - self.VSTools, - self.VsTDb, - self.SdkTools, - self.SdkSetup, - self.FxTools, - self.MSBuild, - self.HTMLHelpWorkshop, - self.FSharp], - exists), - ) - if self.vs_ver >= 14 and isfile(self.VCRuntimeRedist): - env['py_vcruntime_redist'] = self.VCRuntimeRedist - return env - - def _build_paths(self, name, spec_path_lists, exists): - """ - Given an environment variable name and specified paths, - return a pathsep-separated string of paths containing - unique, extant, directories from those paths and from - the environment variable. Raise an error if no paths - are resolved. - - Parameters - ---------- - name: str - Environment variable name - spec_path_lists: list of str - Paths - exists: bool - It True, only return existing paths. - - Return - ------ - str - Pathsep-separated paths - """ - # flatten spec_path_lists - spec_paths = itertools.chain.from_iterable(spec_path_lists) - env_paths = environ.get(name, '').split(pathsep) - paths = itertools.chain(spec_paths, env_paths) - extant_paths = list(filter(isdir, paths)) if exists else paths - if not extant_paths: - msg = "%s environment variable is empty" % name.upper() - raise distutils.errors.DistutilsPlatformError(msg) - unique_paths = unique_everseen(extant_paths) - return pathsep.join(unique_paths) diff --git a/venv/Lib/site-packages/setuptools/namespaces.py b/venv/Lib/site-packages/setuptools/namespaces.py deleted file mode 100644 index 44939e1..0000000 --- a/venv/Lib/site-packages/setuptools/namespaces.py +++ /dev/null @@ -1,107 +0,0 @@ -import os -from distutils import log -import itertools - - -flatten = itertools.chain.from_iterable - - -class Installer: - - nspkg_ext = '-nspkg.pth' - - def install_namespaces(self): - nsp = self._get_all_ns_packages() - if not nsp: - return - filename, ext = os.path.splitext(self._get_target()) - filename += self.nspkg_ext - self.outputs.append(filename) - log.info("Installing %s", filename) - lines = map(self._gen_nspkg_line, nsp) - - if self.dry_run: - # always generate the lines, even in dry run - list(lines) - return - - with open(filename, 'wt') as f: - f.writelines(lines) - - def uninstall_namespaces(self): - filename, ext = os.path.splitext(self._get_target()) - filename += self.nspkg_ext - if not os.path.exists(filename): - return - log.info("Removing %s", filename) - os.remove(filename) - - def _get_target(self): - return self.target - - _nspkg_tmpl = ( - "import sys, types, os", - "has_mfs = sys.version_info > (3, 5)", - "p = os.path.join(%(root)s, *%(pth)r)", - "importlib = has_mfs and __import__('importlib.util')", - "has_mfs and __import__('importlib.machinery')", - ( - "m = has_mfs and " - "sys.modules.setdefault(%(pkg)r, " - "importlib.util.module_from_spec(" - "importlib.machinery.PathFinder.find_spec(%(pkg)r, " - "[os.path.dirname(p)])))" - ), - ( - "m = m or " - "sys.modules.setdefault(%(pkg)r, types.ModuleType(%(pkg)r))" - ), - "mp = (m or []) and m.__dict__.setdefault('__path__',[])", - "(p not in mp) and mp.append(p)", - ) - "lines for the namespace installer" - - _nspkg_tmpl_multi = ( - 'm and setattr(sys.modules[%(parent)r], %(child)r, m)', - ) - "additional line(s) when a parent package is indicated" - - def _get_root(self): - return "sys._getframe(1).f_locals['sitedir']" - - def _gen_nspkg_line(self, pkg): - pth = tuple(pkg.split('.')) - root = self._get_root() - tmpl_lines = self._nspkg_tmpl - parent, sep, child = pkg.rpartition('.') - if parent: - tmpl_lines += self._nspkg_tmpl_multi - return ';'.join(tmpl_lines) % locals() + '\n' - - def _get_all_ns_packages(self): - """Return sorted list of all package namespaces""" - pkgs = self.distribution.namespace_packages or [] - return sorted(flatten(map(self._pkg_names, pkgs))) - - @staticmethod - def _pkg_names(pkg): - """ - Given a namespace package, yield the components of that - package. - - >>> names = Installer._pkg_names('a.b.c') - >>> set(names) == set(['a', 'a.b', 'a.b.c']) - True - """ - parts = pkg.split('.') - while parts: - yield '.'.join(parts) - parts.pop() - - -class DevelopInstaller(Installer): - def _get_root(self): - return repr(str(self.egg_path)) - - def _get_target(self): - return self.egg_link diff --git a/venv/Lib/site-packages/setuptools/package_index.py b/venv/Lib/site-packages/setuptools/package_index.py deleted file mode 100644 index 051e523..0000000 --- a/venv/Lib/site-packages/setuptools/package_index.py +++ /dev/null @@ -1,1127 +0,0 @@ -"""PyPI and direct package downloading""" -import sys -import os -import re -import io -import shutil -import socket -import base64 -import hashlib -import itertools -import warnings -import configparser -import html -import http.client -import urllib.parse -import urllib.request -import urllib.error -from functools import wraps - -import setuptools -from pkg_resources import ( - CHECKOUT_DIST, Distribution, BINARY_DIST, normalize_path, SOURCE_DIST, - Environment, find_distributions, safe_name, safe_version, - to_filename, Requirement, DEVELOP_DIST, EGG_DIST, parse_version, -) -from distutils import log -from distutils.errors import DistutilsError -from fnmatch import translate -from setuptools.wheel import Wheel -from setuptools.extern.more_itertools import unique_everseen - - -EGG_FRAGMENT = re.compile(r'^egg=([-A-Za-z0-9_.+!]+)$') -HREF = re.compile(r"""href\s*=\s*['"]?([^'"> ]+)""", re.I) -PYPI_MD5 = re.compile( - r'([^<]+)\n\s+\(md5\)' -) -URL_SCHEME = re.compile('([-+.a-z0-9]{2,}):', re.I).match -EXTENSIONS = ".tar.gz .tar.bz2 .tar .zip .tgz".split() - -__all__ = [ - 'PackageIndex', 'distros_for_url', 'parse_bdist_wininst', - 'interpret_distro_name', -] - -_SOCKET_TIMEOUT = 15 - -_tmpl = "setuptools/{setuptools.__version__} Python-urllib/{py_major}" -user_agent = _tmpl.format( - py_major='{}.{}'.format(*sys.version_info), setuptools=setuptools) - - -def parse_requirement_arg(spec): - try: - return Requirement.parse(spec) - except ValueError as e: - raise DistutilsError( - "Not a URL, existing file, or requirement spec: %r" % (spec,) - ) from e - - -def parse_bdist_wininst(name): - """Return (base,pyversion) or (None,None) for possible .exe name""" - - lower = name.lower() - base, py_ver, plat = None, None, None - - if lower.endswith('.exe'): - if lower.endswith('.win32.exe'): - base = name[:-10] - plat = 'win32' - elif lower.startswith('.win32-py', -16): - py_ver = name[-7:-4] - base = name[:-16] - plat = 'win32' - elif lower.endswith('.win-amd64.exe'): - base = name[:-14] - plat = 'win-amd64' - elif lower.startswith('.win-amd64-py', -20): - py_ver = name[-7:-4] - base = name[:-20] - plat = 'win-amd64' - return base, py_ver, plat - - -def egg_info_for_url(url): - parts = urllib.parse.urlparse(url) - scheme, server, path, parameters, query, fragment = parts - base = urllib.parse.unquote(path.split('/')[-1]) - if server == 'sourceforge.net' and base == 'download': # XXX Yuck - base = urllib.parse.unquote(path.split('/')[-2]) - if '#' in base: - base, fragment = base.split('#', 1) - return base, fragment - - -def distros_for_url(url, metadata=None): - """Yield egg or source distribution objects that might be found at a URL""" - base, fragment = egg_info_for_url(url) - for dist in distros_for_location(url, base, metadata): - yield dist - if fragment: - match = EGG_FRAGMENT.match(fragment) - if match: - for dist in interpret_distro_name( - url, match.group(1), metadata, precedence=CHECKOUT_DIST - ): - yield dist - - -def distros_for_location(location, basename, metadata=None): - """Yield egg or source distribution objects based on basename""" - if basename.endswith('.egg.zip'): - basename = basename[:-4] # strip the .zip - if basename.endswith('.egg') and '-' in basename: - # only one, unambiguous interpretation - return [Distribution.from_location(location, basename, metadata)] - if basename.endswith('.whl') and '-' in basename: - wheel = Wheel(basename) - if not wheel.is_compatible(): - return [] - return [Distribution( - location=location, - project_name=wheel.project_name, - version=wheel.version, - # Increase priority over eggs. - precedence=EGG_DIST + 1, - )] - if basename.endswith('.exe'): - win_base, py_ver, platform = parse_bdist_wininst(basename) - if win_base is not None: - return interpret_distro_name( - location, win_base, metadata, py_ver, BINARY_DIST, platform - ) - # Try source distro extensions (.zip, .tgz, etc.) - # - for ext in EXTENSIONS: - if basename.endswith(ext): - basename = basename[:-len(ext)] - return interpret_distro_name(location, basename, metadata) - return [] # no extension matched - - -def distros_for_filename(filename, metadata=None): - """Yield possible egg or source distribution objects based on a filename""" - return distros_for_location( - normalize_path(filename), os.path.basename(filename), metadata - ) - - -def interpret_distro_name( - location, basename, metadata, py_version=None, precedence=SOURCE_DIST, - platform=None -): - """Generate alternative interpretations of a source distro name - - Note: if `location` is a filesystem filename, you should call - ``pkg_resources.normalize_path()`` on it before passing it to this - routine! - """ - # Generate alternative interpretations of a source distro name - # Because some packages are ambiguous as to name/versions split - # e.g. "adns-python-1.1.0", "egenix-mx-commercial", etc. - # So, we generate each possible interpretation (e.g. "adns, python-1.1.0" - # "adns-python, 1.1.0", and "adns-python-1.1.0, no version"). In practice, - # the spurious interpretations should be ignored, because in the event - # there's also an "adns" package, the spurious "python-1.1.0" version will - # compare lower than any numeric version number, and is therefore unlikely - # to match a request for it. It's still a potential problem, though, and - # in the long run PyPI and the distutils should go for "safe" names and - # versions in distribution archive names (sdist and bdist). - - parts = basename.split('-') - if not py_version and any(re.match(r'py\d\.\d$', p) for p in parts[2:]): - # it is a bdist_dumb, not an sdist -- bail out - return - - for p in range(1, len(parts) + 1): - yield Distribution( - location, metadata, '-'.join(parts[:p]), '-'.join(parts[p:]), - py_version=py_version, precedence=precedence, - platform=platform - ) - - -def unique_values(func): - """ - Wrap a function returning an iterable such that the resulting iterable - only ever yields unique items. - """ - - @wraps(func) - def wrapper(*args, **kwargs): - return unique_everseen(func(*args, **kwargs)) - - return wrapper - - -REL = re.compile(r"""<([^>]*\srel\s*=\s*['"]?([^'">]+)[^>]*)>""", re.I) -# this line is here to fix emacs' cruddy broken syntax highlighting - - -@unique_values -def find_external_links(url, page): - """Find rel="homepage" and rel="download" links in `page`, yielding URLs""" - - for match in REL.finditer(page): - tag, rel = match.groups() - rels = set(map(str.strip, rel.lower().split(','))) - if 'homepage' in rels or 'download' in rels: - for match in HREF.finditer(tag): - yield urllib.parse.urljoin(url, htmldecode(match.group(1))) - - for tag in ("Home Page", "Download URL"): - pos = page.find(tag) - if pos != -1: - match = HREF.search(page, pos) - if match: - yield urllib.parse.urljoin(url, htmldecode(match.group(1))) - - -class ContentChecker: - """ - A null content checker that defines the interface for checking content - """ - - def feed(self, block): - """ - Feed a block of data to the hash. - """ - return - - def is_valid(self): - """ - Check the hash. Return False if validation fails. - """ - return True - - def report(self, reporter, template): - """ - Call reporter with information about the checker (hash name) - substituted into the template. - """ - return - - -class HashChecker(ContentChecker): - pattern = re.compile( - r'(?Psha1|sha224|sha384|sha256|sha512|md5)=' - r'(?P[a-f0-9]+)' - ) - - def __init__(self, hash_name, expected): - self.hash_name = hash_name - self.hash = hashlib.new(hash_name) - self.expected = expected - - @classmethod - def from_url(cls, url): - "Construct a (possibly null) ContentChecker from a URL" - fragment = urllib.parse.urlparse(url)[-1] - if not fragment: - return ContentChecker() - match = cls.pattern.search(fragment) - if not match: - return ContentChecker() - return cls(**match.groupdict()) - - def feed(self, block): - self.hash.update(block) - - def is_valid(self): - return self.hash.hexdigest() == self.expected - - def report(self, reporter, template): - msg = template % self.hash_name - return reporter(msg) - - -class PackageIndex(Environment): - """A distribution index that scans web pages for download URLs""" - - def __init__( - self, index_url="https://pypi.org/simple/", hosts=('*',), - ca_bundle=None, verify_ssl=True, *args, **kw - ): - super().__init__(*args, **kw) - self.index_url = index_url + "/" [:not index_url.endswith('/')] - self.scanned_urls = {} - self.fetched_urls = {} - self.package_pages = {} - self.allows = re.compile('|'.join(map(translate, hosts))).match - self.to_scan = [] - self.opener = urllib.request.urlopen - - def add(self, dist): - # ignore invalid versions - try: - parse_version(dist.version) - except Exception: - return - return super().add(dist) - - # FIXME: 'PackageIndex.process_url' is too complex (14) - def process_url(self, url, retrieve=False): # noqa: C901 - """Evaluate a URL as a possible download, and maybe retrieve it""" - if url in self.scanned_urls and not retrieve: - return - self.scanned_urls[url] = True - if not URL_SCHEME(url): - self.process_filename(url) - return - else: - dists = list(distros_for_url(url)) - if dists: - if not self.url_ok(url): - return - self.debug("Found link: %s", url) - - if dists or not retrieve or url in self.fetched_urls: - list(map(self.add, dists)) - return # don't need the actual page - - if not self.url_ok(url): - self.fetched_urls[url] = True - return - - self.info("Reading %s", url) - self.fetched_urls[url] = True # prevent multiple fetch attempts - tmpl = "Download error on %s: %%s -- Some packages may not be found!" - f = self.open_url(url, tmpl % url) - if f is None: - return - if isinstance(f, urllib.error.HTTPError) and f.code == 401: - self.info("Authentication error: %s" % f.msg) - self.fetched_urls[f.url] = True - if 'html' not in f.headers.get('content-type', '').lower(): - f.close() # not html, we can't process it - return - - base = f.url # handle redirects - page = f.read() - if not isinstance(page, str): - # In Python 3 and got bytes but want str. - if isinstance(f, urllib.error.HTTPError): - # Errors have no charset, assume latin1: - charset = 'latin-1' - else: - charset = f.headers.get_param('charset') or 'latin-1' - page = page.decode(charset, "ignore") - f.close() - for match in HREF.finditer(page): - link = urllib.parse.urljoin(base, htmldecode(match.group(1))) - self.process_url(link) - if url.startswith(self.index_url) and getattr(f, 'code', None) != 404: - page = self.process_index(url, page) - - def process_filename(self, fn, nested=False): - # process filenames or directories - if not os.path.exists(fn): - self.warn("Not found: %s", fn) - return - - if os.path.isdir(fn) and not nested: - path = os.path.realpath(fn) - for item in os.listdir(path): - self.process_filename(os.path.join(path, item), True) - - dists = distros_for_filename(fn) - if dists: - self.debug("Found: %s", fn) - list(map(self.add, dists)) - - def url_ok(self, url, fatal=False): - s = URL_SCHEME(url) - is_file = s and s.group(1).lower() == 'file' - if is_file or self.allows(urllib.parse.urlparse(url)[1]): - return True - msg = ( - "\nNote: Bypassing %s (disallowed host; see " - "http://bit.ly/2hrImnY for details).\n") - if fatal: - raise DistutilsError(msg % url) - else: - self.warn(msg, url) - - def scan_egg_links(self, search_path): - dirs = filter(os.path.isdir, search_path) - egg_links = ( - (path, entry) - for path in dirs - for entry in os.listdir(path) - if entry.endswith('.egg-link') - ) - list(itertools.starmap(self.scan_egg_link, egg_links)) - - def scan_egg_link(self, path, entry): - with open(os.path.join(path, entry)) as raw_lines: - # filter non-empty lines - lines = list(filter(None, map(str.strip, raw_lines))) - - if len(lines) != 2: - # format is not recognized; punt - return - - egg_path, setup_path = lines - - for dist in find_distributions(os.path.join(path, egg_path)): - dist.location = os.path.join(path, *lines) - dist.precedence = SOURCE_DIST - self.add(dist) - - def _scan(self, link): - # Process a URL to see if it's for a package page - NO_MATCH_SENTINEL = None, None - if not link.startswith(self.index_url): - return NO_MATCH_SENTINEL - - parts = list(map( - urllib.parse.unquote, link[len(self.index_url):].split('/') - )) - if len(parts) != 2 or '#' in parts[1]: - return NO_MATCH_SENTINEL - - # it's a package page, sanitize and index it - pkg = safe_name(parts[0]) - ver = safe_version(parts[1]) - self.package_pages.setdefault(pkg.lower(), {})[link] = True - return to_filename(pkg), to_filename(ver) - - def process_index(self, url, page): - """Process the contents of a PyPI page""" - - # process an index page into the package-page index - for match in HREF.finditer(page): - try: - self._scan(urllib.parse.urljoin(url, htmldecode(match.group(1)))) - except ValueError: - pass - - pkg, ver = self._scan(url) # ensure this page is in the page index - if not pkg: - return "" # no sense double-scanning non-package pages - - # process individual package page - for new_url in find_external_links(url, page): - # Process the found URL - base, frag = egg_info_for_url(new_url) - if base.endswith('.py') and not frag: - if ver: - new_url += '#egg=%s-%s' % (pkg, ver) - else: - self.need_version_info(url) - self.scan_url(new_url) - - return PYPI_MD5.sub( - lambda m: '%s' % m.group(1, 3, 2), page - ) - - def need_version_info(self, url): - self.scan_all( - "Page at %s links to .py file(s) without version info; an index " - "scan is required.", url - ) - - def scan_all(self, msg=None, *args): - if self.index_url not in self.fetched_urls: - if msg: - self.warn(msg, *args) - self.info( - "Scanning index of all packages (this may take a while)" - ) - self.scan_url(self.index_url) - - def find_packages(self, requirement): - self.scan_url(self.index_url + requirement.unsafe_name + '/') - - if not self.package_pages.get(requirement.key): - # Fall back to safe version of the name - self.scan_url(self.index_url + requirement.project_name + '/') - - if not self.package_pages.get(requirement.key): - # We couldn't find the target package, so search the index page too - self.not_found_in_index(requirement) - - for url in list(self.package_pages.get(requirement.key, ())): - # scan each page that might be related to the desired package - self.scan_url(url) - - def obtain(self, requirement, installer=None): - self.prescan() - self.find_packages(requirement) - for dist in self[requirement.key]: - if dist in requirement: - return dist - self.debug("%s does not match %s", requirement, dist) - return super(PackageIndex, self).obtain(requirement, installer) - - def check_hash(self, checker, filename, tfp): - """ - checker is a ContentChecker - """ - checker.report( - self.debug, - "Validating %%s checksum for %s" % filename) - if not checker.is_valid(): - tfp.close() - os.unlink(filename) - raise DistutilsError( - "%s validation failed for %s; " - "possible download problem?" - % (checker.hash.name, os.path.basename(filename)) - ) - - def add_find_links(self, urls): - """Add `urls` to the list that will be prescanned for searches""" - for url in urls: - if ( - self.to_scan is None # if we have already "gone online" - or not URL_SCHEME(url) # or it's a local file/directory - or url.startswith('file:') - or list(distros_for_url(url)) # or a direct package link - ): - # then go ahead and process it now - self.scan_url(url) - else: - # otherwise, defer retrieval till later - self.to_scan.append(url) - - def prescan(self): - """Scan urls scheduled for prescanning (e.g. --find-links)""" - if self.to_scan: - list(map(self.scan_url, self.to_scan)) - self.to_scan = None # from now on, go ahead and process immediately - - def not_found_in_index(self, requirement): - if self[requirement.key]: # we've seen at least one distro - meth, msg = self.info, "Couldn't retrieve index page for %r" - else: # no distros seen for this name, might be misspelled - meth, msg = ( - self.warn, - "Couldn't find index page for %r (maybe misspelled?)") - meth(msg, requirement.unsafe_name) - self.scan_all() - - def download(self, spec, tmpdir): - """Locate and/or download `spec` to `tmpdir`, returning a local path - - `spec` may be a ``Requirement`` object, or a string containing a URL, - an existing local filename, or a project/version requirement spec - (i.e. the string form of a ``Requirement`` object). If it is the URL - of a .py file with an unambiguous ``#egg=name-version`` tag (i.e., one - that escapes ``-`` as ``_`` throughout), a trivial ``setup.py`` is - automatically created alongside the downloaded file. - - If `spec` is a ``Requirement`` object or a string containing a - project/version requirement spec, this method returns the location of - a matching distribution (possibly after downloading it to `tmpdir`). - If `spec` is a locally existing file or directory name, it is simply - returned unchanged. If `spec` is a URL, it is downloaded to a subpath - of `tmpdir`, and the local filename is returned. Various errors may be - raised if a problem occurs during downloading. - """ - if not isinstance(spec, Requirement): - scheme = URL_SCHEME(spec) - if scheme: - # It's a url, download it to tmpdir - found = self._download_url(scheme.group(1), spec, tmpdir) - base, fragment = egg_info_for_url(spec) - if base.endswith('.py'): - found = self.gen_setup(found, fragment, tmpdir) - return found - elif os.path.exists(spec): - # Existing file or directory, just return it - return spec - else: - spec = parse_requirement_arg(spec) - return getattr(self.fetch_distribution(spec, tmpdir), 'location', None) - - def fetch_distribution( # noqa: C901 # is too complex (14) # FIXME - self, requirement, tmpdir, force_scan=False, source=False, - develop_ok=False, local_index=None): - """Obtain a distribution suitable for fulfilling `requirement` - - `requirement` must be a ``pkg_resources.Requirement`` instance. - If necessary, or if the `force_scan` flag is set, the requirement is - searched for in the (online) package index as well as the locally - installed packages. If a distribution matching `requirement` is found, - the returned distribution's ``location`` is the value you would have - gotten from calling the ``download()`` method with the matching - distribution's URL or filename. If no matching distribution is found, - ``None`` is returned. - - If the `source` flag is set, only source distributions and source - checkout links will be considered. Unless the `develop_ok` flag is - set, development and system eggs (i.e., those using the ``.egg-info`` - format) will be ignored. - """ - # process a Requirement - self.info("Searching for %s", requirement) - skipped = {} - dist = None - - def find(req, env=None): - if env is None: - env = self - # Find a matching distribution; may be called more than once - - for dist in env[req.key]: - - if dist.precedence == DEVELOP_DIST and not develop_ok: - if dist not in skipped: - self.warn( - "Skipping development or system egg: %s", dist, - ) - skipped[dist] = 1 - continue - - test = ( - dist in req - and (dist.precedence <= SOURCE_DIST or not source) - ) - if test: - loc = self.download(dist.location, tmpdir) - dist.download_location = loc - if os.path.exists(dist.download_location): - return dist - - if force_scan: - self.prescan() - self.find_packages(requirement) - dist = find(requirement) - - if not dist and local_index is not None: - dist = find(requirement, local_index) - - if dist is None: - if self.to_scan is not None: - self.prescan() - dist = find(requirement) - - if dist is None and not force_scan: - self.find_packages(requirement) - dist = find(requirement) - - if dist is None: - self.warn( - "No local packages or working download links found for %s%s", - (source and "a source distribution of " or ""), - requirement, - ) - else: - self.info("Best match: %s", dist) - return dist.clone(location=dist.download_location) - - def fetch(self, requirement, tmpdir, force_scan=False, source=False): - """Obtain a file suitable for fulfilling `requirement` - - DEPRECATED; use the ``fetch_distribution()`` method now instead. For - backward compatibility, this routine is identical but returns the - ``location`` of the downloaded distribution instead of a distribution - object. - """ - dist = self.fetch_distribution(requirement, tmpdir, force_scan, source) - if dist is not None: - return dist.location - return None - - def gen_setup(self, filename, fragment, tmpdir): - match = EGG_FRAGMENT.match(fragment) - dists = match and [ - d for d in - interpret_distro_name(filename, match.group(1), None) if d.version - ] or [] - - if len(dists) == 1: # unambiguous ``#egg`` fragment - basename = os.path.basename(filename) - - # Make sure the file has been downloaded to the temp dir. - if os.path.dirname(filename) != tmpdir: - dst = os.path.join(tmpdir, basename) - from setuptools.command.easy_install import samefile - if not samefile(filename, dst): - shutil.copy2(filename, dst) - filename = dst - - with open(os.path.join(tmpdir, 'setup.py'), 'w') as file: - file.write( - "from setuptools import setup\n" - "setup(name=%r, version=%r, py_modules=[%r])\n" - % ( - dists[0].project_name, dists[0].version, - os.path.splitext(basename)[0] - ) - ) - return filename - - elif match: - raise DistutilsError( - "Can't unambiguously interpret project/version identifier %r; " - "any dashes in the name or version should be escaped using " - "underscores. %r" % (fragment, dists) - ) - else: - raise DistutilsError( - "Can't process plain .py files without an '#egg=name-version'" - " suffix to enable automatic setup script generation." - ) - - dl_blocksize = 8192 - - def _download_to(self, url, filename): - self.info("Downloading %s", url) - # Download the file - fp = None - try: - checker = HashChecker.from_url(url) - fp = self.open_url(url) - if isinstance(fp, urllib.error.HTTPError): - raise DistutilsError( - "Can't download %s: %s %s" % (url, fp.code, fp.msg) - ) - headers = fp.info() - blocknum = 0 - bs = self.dl_blocksize - size = -1 - if "content-length" in headers: - # Some servers return multiple Content-Length headers :( - sizes = headers.get_all('Content-Length') - size = max(map(int, sizes)) - self.reporthook(url, filename, blocknum, bs, size) - with open(filename, 'wb') as tfp: - while True: - block = fp.read(bs) - if block: - checker.feed(block) - tfp.write(block) - blocknum += 1 - self.reporthook(url, filename, blocknum, bs, size) - else: - break - self.check_hash(checker, filename, tfp) - return headers - finally: - if fp: - fp.close() - - def reporthook(self, url, filename, blocknum, blksize, size): - pass # no-op - - # FIXME: - def open_url(self, url, warning=None): # noqa: C901 # is too complex (12) - if url.startswith('file:'): - return local_open(url) - try: - return open_with_auth(url, self.opener) - except (ValueError, http.client.InvalidURL) as v: - msg = ' '.join([str(arg) for arg in v.args]) - if warning: - self.warn(warning, msg) - else: - raise DistutilsError('%s %s' % (url, msg)) from v - except urllib.error.HTTPError as v: - return v - except urllib.error.URLError as v: - if warning: - self.warn(warning, v.reason) - else: - raise DistutilsError("Download error for %s: %s" - % (url, v.reason)) from v - except http.client.BadStatusLine as v: - if warning: - self.warn(warning, v.line) - else: - raise DistutilsError( - '%s returned a bad status line. The server might be ' - 'down, %s' % - (url, v.line) - ) from v - except (http.client.HTTPException, socket.error) as v: - if warning: - self.warn(warning, v) - else: - raise DistutilsError("Download error for %s: %s" - % (url, v)) from v - - def _download_url(self, scheme, url, tmpdir): - # Determine download filename - # - name, fragment = egg_info_for_url(url) - if name: - while '..' in name: - name = name.replace('..', '.').replace('\\', '_') - else: - name = "__downloaded__" # default if URL has no path contents - - if name.endswith('.egg.zip'): - name = name[:-4] # strip the extra .zip before download - - filename = os.path.join(tmpdir, name) - - # Download the file - # - if scheme == 'svn' or scheme.startswith('svn+'): - return self._download_svn(url, filename) - elif scheme == 'git' or scheme.startswith('git+'): - return self._download_git(url, filename) - elif scheme.startswith('hg+'): - return self._download_hg(url, filename) - elif scheme == 'file': - return urllib.request.url2pathname(urllib.parse.urlparse(url)[2]) - else: - self.url_ok(url, True) # raises error if not allowed - return self._attempt_download(url, filename) - - def scan_url(self, url): - self.process_url(url, True) - - def _attempt_download(self, url, filename): - headers = self._download_to(url, filename) - if 'html' in headers.get('content-type', '').lower(): - return self._download_html(url, headers, filename) - else: - return filename - - def _download_html(self, url, headers, filename): - file = open(filename) - for line in file: - if line.strip(): - # Check for a subversion index page - if re.search(r'([^- ]+ - )?Revision \d+:', line): - # it's a subversion index page: - file.close() - os.unlink(filename) - return self._download_svn(url, filename) - break # not an index page - file.close() - os.unlink(filename) - raise DistutilsError("Unexpected HTML page found at " + url) - - def _download_svn(self, url, filename): - warnings.warn("SVN download support is deprecated", UserWarning) - url = url.split('#', 1)[0] # remove any fragment for svn's sake - creds = '' - if url.lower().startswith('svn:') and '@' in url: - scheme, netloc, path, p, q, f = urllib.parse.urlparse(url) - if not netloc and path.startswith('//') and '/' in path[2:]: - netloc, path = path[2:].split('/', 1) - auth, host = _splituser(netloc) - if auth: - if ':' in auth: - user, pw = auth.split(':', 1) - creds = " --username=%s --password=%s" % (user, pw) - else: - creds = " --username=" + auth - netloc = host - parts = scheme, netloc, url, p, q, f - url = urllib.parse.urlunparse(parts) - self.info("Doing subversion checkout from %s to %s", url, filename) - os.system("svn checkout%s -q %s %s" % (creds, url, filename)) - return filename - - @staticmethod - def _vcs_split_rev_from_url(url, pop_prefix=False): - scheme, netloc, path, query, frag = urllib.parse.urlsplit(url) - - scheme = scheme.split('+', 1)[-1] - - # Some fragment identification fails - path = path.split('#', 1)[0] - - rev = None - if '@' in path: - path, rev = path.rsplit('@', 1) - - # Also, discard fragment - url = urllib.parse.urlunsplit((scheme, netloc, path, query, '')) - - return url, rev - - def _download_git(self, url, filename): - filename = filename.split('#', 1)[0] - url, rev = self._vcs_split_rev_from_url(url, pop_prefix=True) - - self.info("Doing git clone from %s to %s", url, filename) - os.system("git clone --quiet %s %s" % (url, filename)) - - if rev is not None: - self.info("Checking out %s", rev) - os.system("git -C %s checkout --quiet %s" % ( - filename, - rev, - )) - - return filename - - def _download_hg(self, url, filename): - filename = filename.split('#', 1)[0] - url, rev = self._vcs_split_rev_from_url(url, pop_prefix=True) - - self.info("Doing hg clone from %s to %s", url, filename) - os.system("hg clone --quiet %s %s" % (url, filename)) - - if rev is not None: - self.info("Updating to %s", rev) - os.system("hg --cwd %s up -C -r %s -q" % ( - filename, - rev, - )) - - return filename - - def debug(self, msg, *args): - log.debug(msg, *args) - - def info(self, msg, *args): - log.info(msg, *args) - - def warn(self, msg, *args): - log.warn(msg, *args) - - -# This pattern matches a character entity reference (a decimal numeric -# references, a hexadecimal numeric reference, or a named reference). -entity_sub = re.compile(r'&(#(\d+|x[\da-fA-F]+)|[\w.:-]+);?').sub - - -def decode_entity(match): - what = match.group(0) - return html.unescape(what) - - -def htmldecode(text): - """ - Decode HTML entities in the given text. - - >>> htmldecode( - ... 'https://../package_name-0.1.2.tar.gz' - ... '?tokena=A&tokenb=B">package_name-0.1.2.tar.gz') - 'https://../package_name-0.1.2.tar.gz?tokena=A&tokenb=B">package_name-0.1.2.tar.gz' - """ - return entity_sub(decode_entity, text) - - -def socket_timeout(timeout=15): - def _socket_timeout(func): - def _socket_timeout(*args, **kwargs): - old_timeout = socket.getdefaulttimeout() - socket.setdefaulttimeout(timeout) - try: - return func(*args, **kwargs) - finally: - socket.setdefaulttimeout(old_timeout) - - return _socket_timeout - - return _socket_timeout - - -def _encode_auth(auth): - """ - Encode auth from a URL suitable for an HTTP header. - >>> str(_encode_auth('username%3Apassword')) - 'dXNlcm5hbWU6cGFzc3dvcmQ=' - - Long auth strings should not cause a newline to be inserted. - >>> long_auth = 'username:' + 'password'*10 - >>> chr(10) in str(_encode_auth(long_auth)) - False - """ - auth_s = urllib.parse.unquote(auth) - # convert to bytes - auth_bytes = auth_s.encode() - encoded_bytes = base64.b64encode(auth_bytes) - # convert back to a string - encoded = encoded_bytes.decode() - # strip the trailing carriage return - return encoded.replace('\n', '') - - -class Credential: - """ - A username/password pair. Use like a namedtuple. - """ - - def __init__(self, username, password): - self.username = username - self.password = password - - def __iter__(self): - yield self.username - yield self.password - - def __str__(self): - return '%(username)s:%(password)s' % vars(self) - - -class PyPIConfig(configparser.RawConfigParser): - def __init__(self): - """ - Load from ~/.pypirc - """ - defaults = dict.fromkeys(['username', 'password', 'repository'], '') - super().__init__(defaults) - - rc = os.path.join(os.path.expanduser('~'), '.pypirc') - if os.path.exists(rc): - self.read(rc) - - @property - def creds_by_repository(self): - sections_with_repositories = [ - section for section in self.sections() - if self.get(section, 'repository').strip() - ] - - return dict(map(self._get_repo_cred, sections_with_repositories)) - - def _get_repo_cred(self, section): - repo = self.get(section, 'repository').strip() - return repo, Credential( - self.get(section, 'username').strip(), - self.get(section, 'password').strip(), - ) - - def find_credential(self, url): - """ - If the URL indicated appears to be a repository defined in this - config, return the credential for that repository. - """ - for repository, cred in self.creds_by_repository.items(): - if url.startswith(repository): - return cred - - -def open_with_auth(url, opener=urllib.request.urlopen): - """Open a urllib2 request, handling HTTP authentication""" - - parsed = urllib.parse.urlparse(url) - scheme, netloc, path, params, query, frag = parsed - - # Double scheme does not raise on macOS as revealed by a - # failing test. We would expect "nonnumeric port". Refs #20. - if netloc.endswith(':'): - raise http.client.InvalidURL("nonnumeric port: ''") - - if scheme in ('http', 'https'): - auth, address = _splituser(netloc) - else: - auth = None - - if not auth: - cred = PyPIConfig().find_credential(url) - if cred: - auth = str(cred) - info = cred.username, url - log.info('Authenticating as %s for %s (from .pypirc)', *info) - - if auth: - auth = "Basic " + _encode_auth(auth) - parts = scheme, address, path, params, query, frag - new_url = urllib.parse.urlunparse(parts) - request = urllib.request.Request(new_url) - request.add_header("Authorization", auth) - else: - request = urllib.request.Request(url) - - request.add_header('User-Agent', user_agent) - fp = opener(request) - - if auth: - # Put authentication info back into request URL if same host, - # so that links found on the page will work - s2, h2, path2, param2, query2, frag2 = urllib.parse.urlparse(fp.url) - if s2 == scheme and h2 == address: - parts = s2, netloc, path2, param2, query2, frag2 - fp.url = urllib.parse.urlunparse(parts) - - return fp - - -# copy of urllib.parse._splituser from Python 3.8 -def _splituser(host): - """splituser('user[:passwd]@host[:port]') - --> 'user[:passwd]', 'host[:port]'.""" - user, delim, host = host.rpartition('@') - return (user if delim else None), host - - -# adding a timeout to avoid freezing package_index -open_with_auth = socket_timeout(_SOCKET_TIMEOUT)(open_with_auth) - - -def fix_sf_url(url): - return url # backward compatibility - - -def local_open(url): - """Read a local path, with special support for directories""" - scheme, server, path, param, query, frag = urllib.parse.urlparse(url) - filename = urllib.request.url2pathname(path) - if os.path.isfile(filename): - return urllib.request.urlopen(url) - elif path.endswith('/') and os.path.isdir(filename): - files = [] - for f in os.listdir(filename): - filepath = os.path.join(filename, f) - if f == 'index.html': - with open(filepath, 'r') as fp: - body = fp.read() - break - elif os.path.isdir(filepath): - f += '/' - files.append('<a href="{name}">{name}</a>'.format(name=f)) - else: - tmpl = ( - "<html><head><title>{url}" - "{files}") - body = tmpl.format(url=url, files='\n'.join(files)) - status, message = 200, "OK" - else: - status, message, body = 404, "Path not found", "Not found" - - headers = {'content-type': 'text/html'} - body_stream = io.StringIO(body) - return urllib.error.HTTPError(url, status, message, headers, body_stream) diff --git a/venv/Lib/site-packages/setuptools/py34compat.py b/venv/Lib/site-packages/setuptools/py34compat.py deleted file mode 100644 index 3ad9172..0000000 --- a/venv/Lib/site-packages/setuptools/py34compat.py +++ /dev/null @@ -1,13 +0,0 @@ -import importlib - -try: - import importlib.util -except ImportError: - pass - - -try: - module_from_spec = importlib.util.module_from_spec -except AttributeError: - def module_from_spec(spec): - return spec.loader.load_module(spec.name) diff --git a/venv/Lib/site-packages/setuptools/sandbox.py b/venv/Lib/site-packages/setuptools/sandbox.py deleted file mode 100644 index 034fc80..0000000 --- a/venv/Lib/site-packages/setuptools/sandbox.py +++ /dev/null @@ -1,530 +0,0 @@ -import os -import sys -import tempfile -import operator -import functools -import itertools -import re -import contextlib -import pickle -import textwrap -import builtins - -import pkg_resources -from distutils.errors import DistutilsError -from pkg_resources import working_set - -if sys.platform.startswith('java'): - import org.python.modules.posix.PosixModule as _os -else: - _os = sys.modules[os.name] -try: - _file = file -except NameError: - _file = None -_open = open - - -__all__ = [ - "AbstractSandbox", - "DirectorySandbox", - "SandboxViolation", - "run_setup", -] - - -def _execfile(filename, globals, locals=None): - """ - Python 3 implementation of execfile. - """ - mode = 'rb' - with open(filename, mode) as stream: - script = stream.read() - if locals is None: - locals = globals - code = compile(script, filename, 'exec') - exec(code, globals, locals) - - -@contextlib.contextmanager -def save_argv(repl=None): - saved = sys.argv[:] - if repl is not None: - sys.argv[:] = repl - try: - yield saved - finally: - sys.argv[:] = saved - - -@contextlib.contextmanager -def save_path(): - saved = sys.path[:] - try: - yield saved - finally: - sys.path[:] = saved - - -@contextlib.contextmanager -def override_temp(replacement): - """ - Monkey-patch tempfile.tempdir with replacement, ensuring it exists - """ - os.makedirs(replacement, exist_ok=True) - - saved = tempfile.tempdir - - tempfile.tempdir = replacement - - try: - yield - finally: - tempfile.tempdir = saved - - -@contextlib.contextmanager -def pushd(target): - saved = os.getcwd() - os.chdir(target) - try: - yield saved - finally: - os.chdir(saved) - - -class UnpickleableException(Exception): - """ - An exception representing another Exception that could not be pickled. - """ - - @staticmethod - def dump(type, exc): - """ - Always return a dumped (pickled) type and exc. If exc can't be pickled, - wrap it in UnpickleableException first. - """ - try: - return pickle.dumps(type), pickle.dumps(exc) - except Exception: - # get UnpickleableException inside the sandbox - from setuptools.sandbox import UnpickleableException as cls - - return cls.dump(cls, cls(repr(exc))) - - -class ExceptionSaver: - """ - A Context Manager that will save an exception, serialized, and restore it - later. - """ - - def __enter__(self): - return self - - def __exit__(self, type, exc, tb): - if not exc: - return - - # dump the exception - self._saved = UnpickleableException.dump(type, exc) - self._tb = tb - - # suppress the exception - return True - - def resume(self): - "restore and re-raise any exception" - - if '_saved' not in vars(self): - return - - type, exc = map(pickle.loads, self._saved) - raise exc.with_traceback(self._tb) - - -@contextlib.contextmanager -def save_modules(): - """ - Context in which imported modules are saved. - - Translates exceptions internal to the context into the equivalent exception - outside the context. - """ - saved = sys.modules.copy() - with ExceptionSaver() as saved_exc: - yield saved - - sys.modules.update(saved) - # remove any modules imported since - del_modules = ( - mod_name - for mod_name in sys.modules - if mod_name not in saved - # exclude any encodings modules. See #285 - and not mod_name.startswith('encodings.') - ) - _clear_modules(del_modules) - - saved_exc.resume() - - -def _clear_modules(module_names): - for mod_name in list(module_names): - del sys.modules[mod_name] - - -@contextlib.contextmanager -def save_pkg_resources_state(): - saved = pkg_resources.__getstate__() - try: - yield saved - finally: - pkg_resources.__setstate__(saved) - - -@contextlib.contextmanager -def setup_context(setup_dir): - temp_dir = os.path.join(setup_dir, 'temp') - with save_pkg_resources_state(): - with save_modules(): - with save_path(): - hide_setuptools() - with save_argv(): - with override_temp(temp_dir): - with pushd(setup_dir): - # ensure setuptools commands are available - __import__('setuptools') - yield - - -_MODULES_TO_HIDE = { - 'setuptools', - 'distutils', - 'pkg_resources', - 'Cython', - '_distutils_hack', -} - - -def _needs_hiding(mod_name): - """ - >>> _needs_hiding('setuptools') - True - >>> _needs_hiding('pkg_resources') - True - >>> _needs_hiding('setuptools_plugin') - False - >>> _needs_hiding('setuptools.__init__') - True - >>> _needs_hiding('distutils') - True - >>> _needs_hiding('os') - False - >>> _needs_hiding('Cython') - True - """ - base_module = mod_name.split('.', 1)[0] - return base_module in _MODULES_TO_HIDE - - -def hide_setuptools(): - """ - Remove references to setuptools' modules from sys.modules to allow the - invocation to import the most appropriate setuptools. This technique is - necessary to avoid issues such as #315 where setuptools upgrading itself - would fail to find a function declared in the metadata. - """ - _distutils_hack = sys.modules.get('_distutils_hack', None) - if _distutils_hack is not None: - _distutils_hack.remove_shim() - - modules = filter(_needs_hiding, sys.modules) - _clear_modules(modules) - - -def run_setup(setup_script, args): - """Run a distutils setup script, sandboxed in its directory""" - setup_dir = os.path.abspath(os.path.dirname(setup_script)) - with setup_context(setup_dir): - try: - sys.argv[:] = [setup_script] + list(args) - sys.path.insert(0, setup_dir) - # reset to include setup dir, w/clean callback list - working_set.__init__() - working_set.callbacks.append(lambda dist: dist.activate()) - - with DirectorySandbox(setup_dir): - ns = dict(__file__=setup_script, __name__='__main__') - _execfile(setup_script, ns) - except SystemExit as v: - if v.args and v.args[0]: - raise - # Normal exit, just return - - -class AbstractSandbox: - """Wrap 'os' module and 'open()' builtin for virtualizing setup scripts""" - - _active = False - - def __init__(self): - self._attrs = [ - name - for name in dir(_os) - if not name.startswith('_') and hasattr(self, name) - ] - - def _copy(self, source): - for name in self._attrs: - setattr(os, name, getattr(source, name)) - - def __enter__(self): - self._copy(self) - if _file: - builtins.file = self._file - builtins.open = self._open - self._active = True - - def __exit__(self, exc_type, exc_value, traceback): - self._active = False - if _file: - builtins.file = _file - builtins.open = _open - self._copy(_os) - - def run(self, func): - """Run 'func' under os sandboxing""" - with self: - return func() - - def _mk_dual_path_wrapper(name): - original = getattr(_os, name) - - def wrap(self, src, dst, *args, **kw): - if self._active: - src, dst = self._remap_pair(name, src, dst, *args, **kw) - return original(src, dst, *args, **kw) - - return wrap - - for name in ["rename", "link", "symlink"]: - if hasattr(_os, name): - locals()[name] = _mk_dual_path_wrapper(name) - - def _mk_single_path_wrapper(name, original=None): - original = original or getattr(_os, name) - - def wrap(self, path, *args, **kw): - if self._active: - path = self._remap_input(name, path, *args, **kw) - return original(path, *args, **kw) - - return wrap - - if _file: - _file = _mk_single_path_wrapper('file', _file) - _open = _mk_single_path_wrapper('open', _open) - for name in [ - "stat", - "listdir", - "chdir", - "open", - "chmod", - "chown", - "mkdir", - "remove", - "unlink", - "rmdir", - "utime", - "lchown", - "chroot", - "lstat", - "startfile", - "mkfifo", - "mknod", - "pathconf", - "access", - ]: - if hasattr(_os, name): - locals()[name] = _mk_single_path_wrapper(name) - - def _mk_single_with_return(name): - original = getattr(_os, name) - - def wrap(self, path, *args, **kw): - if self._active: - path = self._remap_input(name, path, *args, **kw) - return self._remap_output(name, original(path, *args, **kw)) - return original(path, *args, **kw) - - return wrap - - for name in ['readlink', 'tempnam']: - if hasattr(_os, name): - locals()[name] = _mk_single_with_return(name) - - def _mk_query(name): - original = getattr(_os, name) - - def wrap(self, *args, **kw): - retval = original(*args, **kw) - if self._active: - return self._remap_output(name, retval) - return retval - - return wrap - - for name in ['getcwd', 'tmpnam']: - if hasattr(_os, name): - locals()[name] = _mk_query(name) - - def _validate_path(self, path): - """Called to remap or validate any path, whether input or output""" - return path - - def _remap_input(self, operation, path, *args, **kw): - """Called for path inputs""" - return self._validate_path(path) - - def _remap_output(self, operation, path): - """Called for path outputs""" - return self._validate_path(path) - - def _remap_pair(self, operation, src, dst, *args, **kw): - """Called for path pairs like rename, link, and symlink operations""" - return ( - self._remap_input(operation + '-from', src, *args, **kw), - self._remap_input(operation + '-to', dst, *args, **kw), - ) - - -if hasattr(os, 'devnull'): - _EXCEPTIONS = [os.devnull] -else: - _EXCEPTIONS = [] - - -class DirectorySandbox(AbstractSandbox): - """Restrict operations to a single subdirectory - pseudo-chroot""" - - write_ops = dict.fromkeys( - [ - "open", - "chmod", - "chown", - "mkdir", - "remove", - "unlink", - "rmdir", - "utime", - "lchown", - "chroot", - "mkfifo", - "mknod", - "tempnam", - ] - ) - - _exception_patterns = [] - "exempt writing to paths that match the pattern" - - def __init__(self, sandbox, exceptions=_EXCEPTIONS): - self._sandbox = os.path.normcase(os.path.realpath(sandbox)) - self._prefix = os.path.join(self._sandbox, '') - self._exceptions = [ - os.path.normcase(os.path.realpath(path)) for path in exceptions - ] - AbstractSandbox.__init__(self) - - def _violation(self, operation, *args, **kw): - from setuptools.sandbox import SandboxViolation - - raise SandboxViolation(operation, args, kw) - - if _file: - - def _file(self, path, mode='r', *args, **kw): - if mode not in ('r', 'rt', 'rb', 'rU', 'U') and not self._ok(path): - self._violation("file", path, mode, *args, **kw) - return _file(path, mode, *args, **kw) - - def _open(self, path, mode='r', *args, **kw): - if mode not in ('r', 'rt', 'rb', 'rU', 'U') and not self._ok(path): - self._violation("open", path, mode, *args, **kw) - return _open(path, mode, *args, **kw) - - def tmpnam(self): - self._violation("tmpnam") - - def _ok(self, path): - active = self._active - try: - self._active = False - realpath = os.path.normcase(os.path.realpath(path)) - return ( - self._exempted(realpath) - or realpath == self._sandbox - or realpath.startswith(self._prefix) - ) - finally: - self._active = active - - def _exempted(self, filepath): - start_matches = ( - filepath.startswith(exception) for exception in self._exceptions - ) - pattern_matches = ( - re.match(pattern, filepath) for pattern in self._exception_patterns - ) - candidates = itertools.chain(start_matches, pattern_matches) - return any(candidates) - - def _remap_input(self, operation, path, *args, **kw): - """Called for path inputs""" - if operation in self.write_ops and not self._ok(path): - self._violation(operation, os.path.realpath(path), *args, **kw) - return path - - def _remap_pair(self, operation, src, dst, *args, **kw): - """Called for path pairs like rename, link, and symlink operations""" - if not self._ok(src) or not self._ok(dst): - self._violation(operation, src, dst, *args, **kw) - return (src, dst) - - def open(self, file, flags, mode=0o777, *args, **kw): - """Called for low-level os.open()""" - if flags & WRITE_FLAGS and not self._ok(file): - self._violation("os.open", file, flags, mode, *args, **kw) - return _os.open(file, flags, mode, *args, **kw) - - -WRITE_FLAGS = functools.reduce( - operator.or_, - [ - getattr(_os, a, 0) - for a in "O_WRONLY O_RDWR O_APPEND O_CREAT O_TRUNC O_TEMPORARY".split() - ], -) - - -class SandboxViolation(DistutilsError): - """A setup script attempted to modify the filesystem outside the sandbox""" - - tmpl = textwrap.dedent( - """ - SandboxViolation: {cmd}{args!r} {kwargs} - - The package setup script has attempted to modify files on your system - that are not within the EasyInstall build area, and has been aborted. - - This package cannot be safely installed by EasyInstall, and may not - support alternate installation locations even if you run its setup - script by hand. Please inform the package's author and the EasyInstall - maintainers to find out if a fix or workaround is available. - """ - ).lstrip() - - def __str__(self): - cmd, args, kwargs = self.args - return self.tmpl.format(**locals()) diff --git a/venv/Lib/site-packages/setuptools/script (dev).tmpl b/venv/Lib/site-packages/setuptools/script (dev).tmpl deleted file mode 100644 index 39a24b0..0000000 --- a/venv/Lib/site-packages/setuptools/script (dev).tmpl +++ /dev/null @@ -1,6 +0,0 @@ -# EASY-INSTALL-DEV-SCRIPT: %(spec)r,%(script_name)r -__requires__ = %(spec)r -__import__('pkg_resources').require(%(spec)r) -__file__ = %(dev_path)r -with open(__file__) as f: - exec(compile(f.read(), __file__, 'exec')) diff --git a/venv/Lib/site-packages/setuptools/script.tmpl b/venv/Lib/site-packages/setuptools/script.tmpl deleted file mode 100644 index ff5efbc..0000000 --- a/venv/Lib/site-packages/setuptools/script.tmpl +++ /dev/null @@ -1,3 +0,0 @@ -# EASY-INSTALL-SCRIPT: %(spec)r,%(script_name)r -__requires__ = %(spec)r -__import__('pkg_resources').run_script(%(spec)r, %(script_name)r) diff --git a/venv/Lib/site-packages/setuptools/unicode_utils.py b/venv/Lib/site-packages/setuptools/unicode_utils.py deleted file mode 100644 index e84e65e..0000000 --- a/venv/Lib/site-packages/setuptools/unicode_utils.py +++ /dev/null @@ -1,42 +0,0 @@ -import unicodedata -import sys - - -# HFS Plus uses decomposed UTF-8 -def decompose(path): - if isinstance(path, str): - return unicodedata.normalize('NFD', path) - try: - path = path.decode('utf-8') - path = unicodedata.normalize('NFD', path) - path = path.encode('utf-8') - except UnicodeError: - pass # Not UTF-8 - return path - - -def filesys_decode(path): - """ - Ensure that the given path is decoded, - NONE when no expected encoding works - """ - - if isinstance(path, str): - return path - - fs_enc = sys.getfilesystemencoding() or 'utf-8' - candidates = fs_enc, 'utf-8' - - for enc in candidates: - try: - return path.decode(enc) - except UnicodeDecodeError: - continue - - -def try_encode(string, enc): - "turn unicode encoding into a functional routine" - try: - return string.encode(enc) - except UnicodeEncodeError: - return None diff --git a/venv/Lib/site-packages/setuptools/version.py b/venv/Lib/site-packages/setuptools/version.py deleted file mode 100644 index 95e1869..0000000 --- a/venv/Lib/site-packages/setuptools/version.py +++ /dev/null @@ -1,6 +0,0 @@ -import pkg_resources - -try: - __version__ = pkg_resources.get_distribution('setuptools').version -except Exception: - __version__ = 'unknown' diff --git a/venv/Lib/site-packages/setuptools/wheel.py b/venv/Lib/site-packages/setuptools/wheel.py deleted file mode 100644 index 9819e8b..0000000 --- a/venv/Lib/site-packages/setuptools/wheel.py +++ /dev/null @@ -1,213 +0,0 @@ -"""Wheels support.""" - -from distutils.util import get_platform -from distutils import log -import email -import itertools -import os -import posixpath -import re -import zipfile - -import pkg_resources -import setuptools -from pkg_resources import parse_version -from setuptools.extern.packaging.tags import sys_tags -from setuptools.extern.packaging.utils import canonicalize_name -from setuptools.command.egg_info import write_requirements - - -WHEEL_NAME = re.compile( - r"""^(?P.+?)-(?P\d.*?) - ((-(?P\d.*?))?-(?P.+?)-(?P.+?)-(?P.+?) - )\.whl$""", - re.VERBOSE).match - -NAMESPACE_PACKAGE_INIT = \ - "__import__('pkg_resources').declare_namespace(__name__)\n" - - -def unpack(src_dir, dst_dir): - '''Move everything under `src_dir` to `dst_dir`, and delete the former.''' - for dirpath, dirnames, filenames in os.walk(src_dir): - subdir = os.path.relpath(dirpath, src_dir) - for f in filenames: - src = os.path.join(dirpath, f) - dst = os.path.join(dst_dir, subdir, f) - os.renames(src, dst) - for n, d in reversed(list(enumerate(dirnames))): - src = os.path.join(dirpath, d) - dst = os.path.join(dst_dir, subdir, d) - if not os.path.exists(dst): - # Directory does not exist in destination, - # rename it and prune it from os.walk list. - os.renames(src, dst) - del dirnames[n] - # Cleanup. - for dirpath, dirnames, filenames in os.walk(src_dir, topdown=True): - assert not filenames - os.rmdir(dirpath) - - -class Wheel: - - def __init__(self, filename): - match = WHEEL_NAME(os.path.basename(filename)) - if match is None: - raise ValueError('invalid wheel name: %r' % filename) - self.filename = filename - for k, v in match.groupdict().items(): - setattr(self, k, v) - - def tags(self): - '''List tags (py_version, abi, platform) supported by this wheel.''' - return itertools.product( - self.py_version.split('.'), - self.abi.split('.'), - self.platform.split('.'), - ) - - def is_compatible(self): - '''Is the wheel is compatible with the current platform?''' - supported_tags = set( - (t.interpreter, t.abi, t.platform) for t in sys_tags()) - return next((True for t in self.tags() if t in supported_tags), False) - - def egg_name(self): - return pkg_resources.Distribution( - project_name=self.project_name, version=self.version, - platform=(None if self.platform == 'any' else get_platform()), - ).egg_name() + '.egg' - - def get_dist_info(self, zf): - # find the correct name of the .dist-info dir in the wheel file - for member in zf.namelist(): - dirname = posixpath.dirname(member) - if (dirname.endswith('.dist-info') and - canonicalize_name(dirname).startswith( - canonicalize_name(self.project_name))): - return dirname - raise ValueError("unsupported wheel format. .dist-info not found") - - def install_as_egg(self, destination_eggdir): - '''Install wheel as an egg directory.''' - with zipfile.ZipFile(self.filename) as zf: - self._install_as_egg(destination_eggdir, zf) - - def _install_as_egg(self, destination_eggdir, zf): - dist_basename = '%s-%s' % (self.project_name, self.version) - dist_info = self.get_dist_info(zf) - dist_data = '%s.data' % dist_basename - egg_info = os.path.join(destination_eggdir, 'EGG-INFO') - - self._convert_metadata(zf, destination_eggdir, dist_info, egg_info) - self._move_data_entries(destination_eggdir, dist_data) - self._fix_namespace_packages(egg_info, destination_eggdir) - - @staticmethod - def _convert_metadata(zf, destination_eggdir, dist_info, egg_info): - def get_metadata(name): - with zf.open(posixpath.join(dist_info, name)) as fp: - value = fp.read().decode('utf-8') - return email.parser.Parser().parsestr(value) - - wheel_metadata = get_metadata('WHEEL') - # Check wheel format version is supported. - wheel_version = parse_version(wheel_metadata.get('Wheel-Version')) - wheel_v1 = ( - parse_version('1.0') <= wheel_version < parse_version('2.0dev0') - ) - if not wheel_v1: - raise ValueError( - 'unsupported wheel format version: %s' % wheel_version) - # Extract to target directory. - os.mkdir(destination_eggdir) - zf.extractall(destination_eggdir) - # Convert metadata. - dist_info = os.path.join(destination_eggdir, dist_info) - dist = pkg_resources.Distribution.from_location( - destination_eggdir, dist_info, - metadata=pkg_resources.PathMetadata(destination_eggdir, dist_info), - ) - - # Note: Evaluate and strip markers now, - # as it's difficult to convert back from the syntax: - # foobar; "linux" in sys_platform and extra == 'test' - def raw_req(req): - req.marker = None - return str(req) - install_requires = list(map(raw_req, dist.requires())) - extras_require = { - extra: [ - req - for req in map(raw_req, dist.requires((extra,))) - if req not in install_requires - ] - for extra in dist.extras - } - os.rename(dist_info, egg_info) - os.rename( - os.path.join(egg_info, 'METADATA'), - os.path.join(egg_info, 'PKG-INFO'), - ) - setup_dist = setuptools.Distribution( - attrs=dict( - install_requires=install_requires, - extras_require=extras_require, - ), - ) - # Temporarily disable info traces. - log_threshold = log._global_log.threshold - log.set_threshold(log.WARN) - try: - write_requirements( - setup_dist.get_command_obj('egg_info'), - None, - os.path.join(egg_info, 'requires.txt'), - ) - finally: - log.set_threshold(log_threshold) - - @staticmethod - def _move_data_entries(destination_eggdir, dist_data): - """Move data entries to their correct location.""" - dist_data = os.path.join(destination_eggdir, dist_data) - dist_data_scripts = os.path.join(dist_data, 'scripts') - if os.path.exists(dist_data_scripts): - egg_info_scripts = os.path.join( - destination_eggdir, 'EGG-INFO', 'scripts') - os.mkdir(egg_info_scripts) - for entry in os.listdir(dist_data_scripts): - # Remove bytecode, as it's not properly handled - # during easy_install scripts install phase. - if entry.endswith('.pyc'): - os.unlink(os.path.join(dist_data_scripts, entry)) - else: - os.rename( - os.path.join(dist_data_scripts, entry), - os.path.join(egg_info_scripts, entry), - ) - os.rmdir(dist_data_scripts) - for subdir in filter(os.path.exists, ( - os.path.join(dist_data, d) - for d in ('data', 'headers', 'purelib', 'platlib') - )): - unpack(subdir, destination_eggdir) - if os.path.exists(dist_data): - os.rmdir(dist_data) - - @staticmethod - def _fix_namespace_packages(egg_info, destination_eggdir): - namespace_packages = os.path.join( - egg_info, 'namespace_packages.txt') - if os.path.exists(namespace_packages): - with open(namespace_packages) as fp: - namespace_packages = fp.read().split() - for mod in namespace_packages: - mod_dir = os.path.join(destination_eggdir, *mod.split('.')) - mod_init = os.path.join(mod_dir, '__init__.py') - if not os.path.exists(mod_dir): - os.mkdir(mod_dir) - if not os.path.exists(mod_init): - with open(mod_init, 'w') as fp: - fp.write(NAMESPACE_PACKAGE_INIT) diff --git a/venv/Lib/site-packages/setuptools/windows_support.py b/venv/Lib/site-packages/setuptools/windows_support.py deleted file mode 100644 index cb977cf..0000000 --- a/venv/Lib/site-packages/setuptools/windows_support.py +++ /dev/null @@ -1,29 +0,0 @@ -import platform -import ctypes - - -def windows_only(func): - if platform.system() != 'Windows': - return lambda *args, **kwargs: None - return func - - -@windows_only -def hide_file(path): - """ - Set the hidden attribute on a file or directory. - - From http://stackoverflow.com/questions/19622133/ - - `path` must be text. - """ - __import__('ctypes.wintypes') - SetFileAttributes = ctypes.windll.kernel32.SetFileAttributesW - SetFileAttributes.argtypes = ctypes.wintypes.LPWSTR, ctypes.wintypes.DWORD - SetFileAttributes.restype = ctypes.wintypes.BOOL - - FILE_ATTRIBUTE_HIDDEN = 0x02 - - ret = SetFileAttributes(path, FILE_ATTRIBUTE_HIDDEN) - if not ret: - raise ctypes.WinError() diff --git a/venv/Scripts/Activate.ps1 b/venv/Scripts/Activate.ps1 deleted file mode 100644 index 64c5aa3..0000000 --- a/venv/Scripts/Activate.ps1 +++ /dev/null @@ -1,398 +0,0 @@ -<# -.Synopsis -Activate a Python virtual environment for the current PowerShell session. - -.Description -Pushes the python executable for a virtual environment to the front of the -$Env:PATH environment variable and sets the prompt to signify that you are -in a Python virtual environment. Makes use of the command line switches as -well as the `pyvenv.cfg` file values present in the virtual environment. - -.Parameter VenvDir -Path to the directory that contains the virtual environment to activate. The -default value for this is the parent of the directory that the Activate.ps1 -script is located within. - -.Parameter Prompt -The prompt prefix to display when this virtual environment is activated. By -default, this prompt is the name of the virtual environment folder (VenvDir) -surrounded by parentheses and followed by a single space (ie. '(.venv) '). - -.Example -Activate.ps1 -Activates the Python virtual environment that contains the Activate.ps1 script. - -.Example -Activate.ps1 -Verbose -Activates the Python virtual environment that contains the Activate.ps1 script, -and shows extra information about the activation as it executes. - -.Example -Activate.ps1 -VenvDir C:\Users\MyUser\Common\.venv -Activates the Python virtual environment located in the specified location. - -.Example -Activate.ps1 -Prompt "MyPython" -Activates the Python virtual environment that contains the Activate.ps1 script, -and prefixes the current prompt with the specified string (surrounded in -parentheses) while the virtual environment is active. - -.Notes -On Windows, it may be required to enable this Activate.ps1 script by setting the -execution policy for the user. You can do this by issuing the following PowerShell -command: - -PS C:\> Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentUser - -For more information on Execution Policies: -https://go.microsoft.com/fwlink/?LinkID=135170 - -#> -Param( - [Parameter(Mandatory = $false)] - [String] - $VenvDir, - [Parameter(Mandatory = $false)] - [String] - $Prompt -) - -<# Function declarations --------------------------------------------------- #> - -<# -.Synopsis -Remove all shell session elements added by the Activate script, including the -addition of the virtual environment's Python executable from the beginning of -the PATH variable. - -.Parameter NonDestructive -If present, do not remove this function from the global namespace for the -session. - -#> -function global:deactivate ([switch]$NonDestructive) { - # Revert to original values - - # The prior prompt: - if (Test-Path -Path Function:_OLD_VIRTUAL_PROMPT) { - Copy-Item -Path Function:_OLD_VIRTUAL_PROMPT -Destination Function:prompt - Remove-Item -Path Function:_OLD_VIRTUAL_PROMPT - } - - # The prior PYTHONHOME: - if (Test-Path -Path Env:_OLD_VIRTUAL_PYTHONHOME) { - Copy-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME -Destination Env:PYTHONHOME - Remove-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME - } - - # The prior PATH: - if (Test-Path -Path Env:_OLD_VIRTUAL_PATH) { - Copy-Item -Path Env:_OLD_VIRTUAL_PATH -Destination Env:PATH - Remove-Item -Path Env:_OLD_VIRTUAL_PATH - } - - # Just remove the VIRTUAL_ENV altogether: - if (Test-Path -Path Env:VIRTUAL_ENV) { - Remove-Item -Path env:VIRTUAL_ENV - } - - # Just remove the _PYTHON_VENV_PROMPT_PREFIX altogether: - if (Get-Variable -Name "_PYTHON_VENV_PROMPT_PREFIX" -ErrorAction SilentlyContinue) { - Remove-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Scope Global -Force - } - - # Leave deactivate function in the global namespace if requested: - if (-not $NonDestructive) { - Remove-Item -Path function:deactivate - } -} - -<# -.Description -Get-PyVenvConfig parses the values from the pyvenv.cfg file located in the -given folder, and returns them in a map. - -For each line in the pyvenv.cfg file, if that line can be parsed into exactly -two strings separated by `=` (with any amount of whitespace surrounding the =) -then it is considered a `key = value` line. The left hand string is the key, -the right hand is the value. - -If the value starts with a `'` or a `"` then the first and last character is -stripped from the value before being captured. - -.Parameter ConfigDir -Path to the directory that contains the `pyvenv.cfg` file. -#> -function Get-PyVenvConfig( - [String] - $ConfigDir -) { - Write-Verbose "Given ConfigDir=$ConfigDir, obtain values in pyvenv.cfg" - - # Ensure the file exists, and issue a warning if it doesn't (but still allow the function to continue). - $pyvenvConfigPath = Join-Path -Resolve -Path $ConfigDir -ChildPath 'pyvenv.cfg' -ErrorAction Continue - - # An empty map will be returned if no config file is found. - $pyvenvConfig = @{ } - - if ($pyvenvConfigPath) { - - Write-Verbose "File exists, parse `key = value` lines" - $pyvenvConfigContent = Get-Content -Path $pyvenvConfigPath - - $pyvenvConfigContent | ForEach-Object { - $keyval = $PSItem -split "\s*=\s*", 2 - if ($keyval[0] -and $keyval[1]) { - $val = $keyval[1] - - # Remove extraneous quotations around a string value. - if ("'""".Contains($val.Substring(0, 1))) { - $val = $val.Substring(1, $val.Length - 2) - } - - $pyvenvConfig[$keyval[0]] = $val - Write-Verbose "Adding Key: '$($keyval[0])'='$val'" - } - } - } - return $pyvenvConfig -} - - -<# Begin Activate script --------------------------------------------------- #> - -# Determine the containing directory of this script -$VenvExecPath = Split-Path -Parent $MyInvocation.MyCommand.Definition -$VenvExecDir = Get-Item -Path $VenvExecPath - -Write-Verbose "Activation script is located in path: '$VenvExecPath'" -Write-Verbose "VenvExecDir Fullname: '$($VenvExecDir.FullName)" -Write-Verbose "VenvExecDir Name: '$($VenvExecDir.Name)" - -# Set values required in priority: CmdLine, ConfigFile, Default -# First, get the location of the virtual environment, it might not be -# VenvExecDir if specified on the command line. -if ($VenvDir) { - Write-Verbose "VenvDir given as parameter, using '$VenvDir' to determine values" -} -else { - Write-Verbose "VenvDir not given as a parameter, using parent directory name as VenvDir." - $VenvDir = $VenvExecDir.Parent.FullName.TrimEnd("\\/") - Write-Verbose "VenvDir=$VenvDir" -} - -# Next, read the `pyvenv.cfg` file to determine any required value such -# as `prompt`. -$pyvenvCfg = Get-PyVenvConfig -ConfigDir $VenvDir - -# Next, set the prompt from the command line, or the config file, or -# just use the name of the virtual environment folder. -if ($Prompt) { - Write-Verbose "Prompt specified as argument, using '$Prompt'" -} -else { - Write-Verbose "Prompt not specified as argument to script, checking pyvenv.cfg value" - if ($pyvenvCfg -and $pyvenvCfg['prompt']) { - Write-Verbose " Setting based on value in pyvenv.cfg='$($pyvenvCfg['prompt'])'" - $Prompt = $pyvenvCfg['prompt']; - } - else { - Write-Verbose " Setting prompt based on parent's directory's name. (Is the directory name passed to venv module when creating the virutal environment)" - Write-Verbose " Got leaf-name of $VenvDir='$(Split-Path -Path $venvDir -Leaf)'" - $Prompt = Split-Path -Path $venvDir -Leaf - } -} - -Write-Verbose "Prompt = '$Prompt'" -Write-Verbose "VenvDir='$VenvDir'" - -# Deactivate any currently active virtual environment, but leave the -# deactivate function in place. -deactivate -nondestructive - -# Now set the environment variable VIRTUAL_ENV, used by many tools to determine -# that there is an activated venv. -$env:VIRTUAL_ENV = $VenvDir - -if (-not $Env:VIRTUAL_ENV_DISABLE_PROMPT) { - - Write-Verbose "Setting prompt to '$Prompt'" - - # Set the prompt to include the env name - # Make sure _OLD_VIRTUAL_PROMPT is global - function global:_OLD_VIRTUAL_PROMPT { "" } - Copy-Item -Path function:prompt -Destination function:_OLD_VIRTUAL_PROMPT - New-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Description "Python virtual environment prompt prefix" -Scope Global -Option ReadOnly -Visibility Public -Value $Prompt - - function global:prompt { - Write-Host -NoNewline -ForegroundColor Green "($_PYTHON_VENV_PROMPT_PREFIX) " - _OLD_VIRTUAL_PROMPT - } -} - -# Clear PYTHONHOME -if (Test-Path -Path Env:PYTHONHOME) { - Copy-Item -Path Env:PYTHONHOME -Destination Env:_OLD_VIRTUAL_PYTHONHOME - Remove-Item -Path Env:PYTHONHOME -} - -# Add the venv to the PATH -Copy-Item -Path Env:PATH -Destination Env:_OLD_VIRTUAL_PATH -$Env:PATH = "$VenvExecDir$([System.IO.Path]::PathSeparator)$Env:PATH" - -# SIG # Begin signature block -# MIIcvwYJKoZIhvcNAQcCoIIcsDCCHKwCAQExDzANBglghkgBZQMEAgEFADB5Bgor -# BgEEAYI3AgEEoGswaTA0BgorBgEEAYI3AgEeMCYCAwEAAAQQH8w7YFlLCE63JNLG -# KX7zUQIBAAIBAAIBAAIBAAIBADAxMA0GCWCGSAFlAwQCAQUABCAwnDYwEHaCQq0n -# 8NAvsN7H7BO7/48rXCNwrg891FS5vaCCC38wggUwMIIEGKADAgECAhAECRgbX9W7 -# ZnVTQ7VvlVAIMA0GCSqGSIb3DQEBCwUAMGUxCzAJBgNVBAYTAlVTMRUwEwYDVQQK -# EwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5jb20xJDAiBgNV -# BAMTG0RpZ2lDZXJ0IEFzc3VyZWQgSUQgUm9vdCBDQTAeFw0xMzEwMjIxMjAwMDBa -# Fw0yODEwMjIxMjAwMDBaMHIxCzAJBgNVBAYTAlVTMRUwEwYDVQQKEwxEaWdpQ2Vy -# dCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5jb20xMTAvBgNVBAMTKERpZ2lD -# ZXJ0IFNIQTIgQXNzdXJlZCBJRCBDb2RlIFNpZ25pbmcgQ0EwggEiMA0GCSqGSIb3 -# DQEBAQUAA4IBDwAwggEKAoIBAQD407Mcfw4Rr2d3B9MLMUkZz9D7RZmxOttE9X/l -# qJ3bMtdx6nadBS63j/qSQ8Cl+YnUNxnXtqrwnIal2CWsDnkoOn7p0WfTxvspJ8fT -# eyOU5JEjlpB3gvmhhCNmElQzUHSxKCa7JGnCwlLyFGeKiUXULaGj6YgsIJWuHEqH -# CN8M9eJNYBi+qsSyrnAxZjNxPqxwoqvOf+l8y5Kh5TsxHM/q8grkV7tKtel05iv+ -# bMt+dDk2DZDv5LVOpKnqagqrhPOsZ061xPeM0SAlI+sIZD5SlsHyDxL0xY4PwaLo -# LFH3c7y9hbFig3NBggfkOItqcyDQD2RzPJ6fpjOp/RnfJZPRAgMBAAGjggHNMIIB -# yTASBgNVHRMBAf8ECDAGAQH/AgEAMA4GA1UdDwEB/wQEAwIBhjATBgNVHSUEDDAK -# BggrBgEFBQcDAzB5BggrBgEFBQcBAQRtMGswJAYIKwYBBQUHMAGGGGh0dHA6Ly9v -# Y3NwLmRpZ2ljZXJ0LmNvbTBDBggrBgEFBQcwAoY3aHR0cDovL2NhY2VydHMuZGln -# aWNlcnQuY29tL0RpZ2lDZXJ0QXNzdXJlZElEUm9vdENBLmNydDCBgQYDVR0fBHow -# eDA6oDigNoY0aHR0cDovL2NybDQuZGlnaWNlcnQuY29tL0RpZ2lDZXJ0QXNzdXJl -# ZElEUm9vdENBLmNybDA6oDigNoY0aHR0cDovL2NybDMuZGlnaWNlcnQuY29tL0Rp -# Z2lDZXJ0QXNzdXJlZElEUm9vdENBLmNybDBPBgNVHSAESDBGMDgGCmCGSAGG/WwA -# AgQwKjAoBggrBgEFBQcCARYcaHR0cHM6Ly93d3cuZGlnaWNlcnQuY29tL0NQUzAK -# BghghkgBhv1sAzAdBgNVHQ4EFgQUWsS5eyoKo6XqcQPAYPkt9mV1DlgwHwYDVR0j -# BBgwFoAUReuir/SSy4IxLVGLp6chnfNtyA8wDQYJKoZIhvcNAQELBQADggEBAD7s -# DVoks/Mi0RXILHwlKXaoHV0cLToaxO8wYdd+C2D9wz0PxK+L/e8q3yBVN7Dh9tGS -# dQ9RtG6ljlriXiSBThCk7j9xjmMOE0ut119EefM2FAaK95xGTlz/kLEbBw6RFfu6 -# r7VRwo0kriTGxycqoSkoGjpxKAI8LpGjwCUR4pwUR6F6aGivm6dcIFzZcbEMj7uo -# +MUSaJ/PQMtARKUT8OZkDCUIQjKyNookAv4vcn4c10lFluhZHen6dGRrsutmQ9qz -# sIzV6Q3d9gEgzpkxYz0IGhizgZtPxpMQBvwHgfqL2vmCSfdibqFT+hKUGIUukpHq -# aGxEMrJmoecYpJpkUe8wggZHMIIFL6ADAgECAhADPtXtoGXRuMkd/PkqbJvYMA0G -# CSqGSIb3DQEBCwUAMHIxCzAJBgNVBAYTAlVTMRUwEwYDVQQKEwxEaWdpQ2VydCBJ -# bmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5jb20xMTAvBgNVBAMTKERpZ2lDZXJ0 -# IFNIQTIgQXNzdXJlZCBJRCBDb2RlIFNpZ25pbmcgQ0EwHhcNMTgxMjE4MDAwMDAw -# WhcNMjExMjIyMTIwMDAwWjCBgzELMAkGA1UEBhMCVVMxFjAUBgNVBAgTDU5ldyBI -# YW1wc2hpcmUxEjAQBgNVBAcTCVdvbGZlYm9ybzEjMCEGA1UEChMaUHl0aG9uIFNv -# ZnR3YXJlIEZvdW5kYXRpb24xIzAhBgNVBAMTGlB5dGhvbiBTb2Z0d2FyZSBGb3Vu -# ZGF0aW9uMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAqr2kS7J1uW7o -# JRxlsdrETAjKarfoH5TI8PWST6Yb2xPooP7vHT4iaVXyL5Lze1f53Jw67Sp+u524 -# fJXf30qHViEWxumy2RWG0nciU2d+mMqzjlaAWSZNF0u4RcvyDJokEV0RUOqI5CG5 -# zPI3W9uQ6LiUk3HCYW6kpH177A5T3pw/Po8O8KErJGn1anaqtIICq99ySxrMad/2 -# hPMBRf6Ndah7f7HPn1gkSSTAoejyuqF5h+B0qI4+JK5+VLvz659VTbAWJsYakkxZ -# xVWYpFv4KeQSSwoo0DzMvmERsTzNvVBMWhu9OriJNg+QfFmf96zVTu93cZ+r7xMp -# bXyfIOGKhHMaRuZ8ihuWIx3gI9WHDFX6fBKR8+HlhdkaiBEWIsXRoy+EQUyK7zUs -# +FqOo2sRYttbs8MTF9YDKFZwyPjn9Wn+gLGd5NUEVyNvD9QVGBEtN7vx87bduJUB -# 8F4DylEsMtZTfjw/au6AmOnmneK5UcqSJuwRyZaGNk7y3qj06utx+HTTqHgi975U -# pxfyrwAqkovoZEWBVSpvku8PVhkBXcLmNe6MEHlFiaMoiADAeKmX5RFRkN+VrmYG -# Tg4zajxfdHeIY8TvLf48tTfmnQJd98geJQv/01NUy/FxuwqAuTkaez5Nl1LxP0Cp -# THhghzO4FRD4itT2wqTh4jpojw9QZnsCAwEAAaOCAcUwggHBMB8GA1UdIwQYMBaA -# FFrEuXsqCqOl6nEDwGD5LfZldQ5YMB0GA1UdDgQWBBT8Kr9+1L6s84KcpM97IgE7 -# uI8H8jAOBgNVHQ8BAf8EBAMCB4AwEwYDVR0lBAwwCgYIKwYBBQUHAwMwdwYDVR0f -# BHAwbjA1oDOgMYYvaHR0cDovL2NybDMuZGlnaWNlcnQuY29tL3NoYTItYXNzdXJl -# ZC1jcy1nMS5jcmwwNaAzoDGGL2h0dHA6Ly9jcmw0LmRpZ2ljZXJ0LmNvbS9zaGEy -# LWFzc3VyZWQtY3MtZzEuY3JsMEwGA1UdIARFMEMwNwYJYIZIAYb9bAMBMCowKAYI -# KwYBBQUHAgEWHGh0dHBzOi8vd3d3LmRpZ2ljZXJ0LmNvbS9DUFMwCAYGZ4EMAQQB -# MIGEBggrBgEFBQcBAQR4MHYwJAYIKwYBBQUHMAGGGGh0dHA6Ly9vY3NwLmRpZ2lj -# ZXJ0LmNvbTBOBggrBgEFBQcwAoZCaHR0cDovL2NhY2VydHMuZGlnaWNlcnQuY29t -# L0RpZ2lDZXJ0U0hBMkFzc3VyZWRJRENvZGVTaWduaW5nQ0EuY3J0MAwGA1UdEwEB -# /wQCMAAwDQYJKoZIhvcNAQELBQADggEBAEt1oS21X0axiafPjyY+vlYqjWKuUu/Y -# FuYWIEq6iRRaFabNDhj9RBFQF/aJiE5msrQEOfAD6/6gVSH91lZWBqg6NEeG9T9S -# XbiAPvJ9CEWFsdkXUrjbWhvCnuZ7kqUuU5BAumI1QRbpYgZL3UA+iZXkmjbGh1ln -# 8rUhWIxbBYL4Sg2nqpB44p7CUFYkPj/MbwU2gvBV2pXjj5WaskoZtsACMv5g42BN -# oVLoRAi+ev6s07POt+JtHRIm87lTyuc8wh0swTPUwksKbLU1Zdj9CpqtzXnuVE0w -# 50exJvRSK3Vt4g+0vigpI3qPmDdpkf9+4Mvy0XMNcqrthw20R+PkIlMxghCWMIIQ -# kgIBATCBhjByMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkw -# FwYDVQQLExB3d3cuZGlnaWNlcnQuY29tMTEwLwYDVQQDEyhEaWdpQ2VydCBTSEEy -# IEFzc3VyZWQgSUQgQ29kZSBTaWduaW5nIENBAhADPtXtoGXRuMkd/PkqbJvYMA0G -# CWCGSAFlAwQCAQUAoIGYMBkGCSqGSIb3DQEJAzEMBgorBgEEAYI3AgEEMBwGCisG -# AQQBgjcCAQsxDjAMBgorBgEEAYI3AgEVMCwGCisGAQQBgjcCAQwxHjAcoBqAGABQ -# AHkAdABoAG8AbgAgADMALgA5AC4ANDAvBgkqhkiG9w0BCQQxIgQgBrni4mcRv7sM -# JHsxpROjRopOz2wuQVrJnn+lD7X7y+gwDQYJKoZIhvcNAQEBBQAEggIAgnraC5Ax -# LdvDJz/AUld/6WGZ21jxAG4ijZvDnAS7Hopm0vclO2+7jtddNTP0w1tbebW2o987 -# AjD16hqG+D96N/sB3vfZ86fVjARf3XuyCWBYuIkLnjir+MfaXNU1n+kJuT7DNpo6 -# H+BIUM8PYqLGo4SwHXC2H2d+VfMLNyZ+91LmqT9qAAC6aT+VuTvlC+BUF/J4N81f -# 3TCa0F7C9KT1cdAmKtt6EMIdAYqWp8r1merIFjD/olBTq9nLcyjTqE9lCb4Nf6J9 -# jyM8/FA8hD41nHZTCKRSPCFKNZRqVYOaiWBHxQxPtYKuLJzMgxK0QHQhjWNpXTLs -# C1G1hQxX0MOWzLmcgtvxh5AhlQS+oHUs4/ebzmaovVzjbQRPqZHLDzYOQeG+79JM -# qi5gQt4L7TksfvmQ/dI4nJtzVDYAjN1v9rJY1snSqBlnSWgOyyZJX7aYBgVM3uJV -# u6j5tKXnPW7/u6USlVjtD4yKxKKoctomYiSIjjJA7DVL9CoCSF2ZyqxtuXDR8VD7 -# fb8gS2XklEJ3wi8MbUg9LJtI5Q3e/Qursr9RpEL5uTjhW9xTV+ubc6SWMWMEj3RT -# +7PUi23Vdh2917qGR+jyrUap+GMCXrUyUsLkMR5UkiltErrubmRnSPTbkFJTfEcf -# aniVMNn3x63CGwXdmSgVJleq1n28KcM/A02hgg1FMIINQQYKKwYBBAGCNwMDATGC -# DTEwgg0tBgkqhkiG9w0BBwKggg0eMIINGgIBAzEPMA0GCWCGSAFlAwQCAQUAMHgG -# CyqGSIb3DQEJEAEEoGkEZzBlAgEBBglghkgBhv1sBwEwMTANBglghkgBZQMEAgEF -# AAQgnJMkz3GdNmSHANJI2WUD6lOcmRKl+QqVqKICyZcEo3wCEQCoQmR+Bv/7IKIC -# +H/HED8fGA8yMDIxMDQwNjE0MDc1MFqgggo3MIIE/jCCA+agAwIBAgIQDUJK4L46 -# iP9gQCHOFADw3TANBgkqhkiG9w0BAQsFADByMQswCQYDVQQGEwJVUzEVMBMGA1UE -# ChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQuY29tMTEwLwYD -# VQQDEyhEaWdpQ2VydCBTSEEyIEFzc3VyZWQgSUQgVGltZXN0YW1waW5nIENBMB4X -# DTIxMDEwMTAwMDAwMFoXDTMxMDEwNjAwMDAwMFowSDELMAkGA1UEBhMCVVMxFzAV -# BgNVBAoTDkRpZ2lDZXJ0LCBJbmMuMSAwHgYDVQQDExdEaWdpQ2VydCBUaW1lc3Rh -# bXAgMjAyMTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMLmYYRnxYr1 -# DQikRcpja1HXOhFCvQp1dU2UtAxQtSYQ/h3Ib5FrDJbnGlxI70Tlv5thzRWRYlq4 -# /2cLnGP9NmqB+in43Stwhd4CGPN4bbx9+cdtCT2+anaH6Yq9+IRdHnbJ5MZ2djpT -# 0dHTWjaPxqPhLxs6t2HWc+xObTOKfF1FLUuxUOZBOjdWhtyTI433UCXoZObd048v -# V7WHIOsOjizVI9r0TXhG4wODMSlKXAwxikqMiMX3MFr5FK8VX2xDSQn9JiNT9o1j -# 6BqrW7EdMMKbaYK02/xWVLwfoYervnpbCiAvSwnJlaeNsvrWY4tOpXIc7p96AXP4 -# Gdb+DUmEvQECAwEAAaOCAbgwggG0MA4GA1UdDwEB/wQEAwIHgDAMBgNVHRMBAf8E -# AjAAMBYGA1UdJQEB/wQMMAoGCCsGAQUFBwMIMEEGA1UdIAQ6MDgwNgYJYIZIAYb9 -# bAcBMCkwJwYIKwYBBQUHAgEWG2h0dHA6Ly93d3cuZGlnaWNlcnQuY29tL0NQUzAf -# BgNVHSMEGDAWgBT0tuEgHf4prtLkYaWyoiWyyBc1bjAdBgNVHQ4EFgQUNkSGjqS6 -# sGa+vCgtHUQ23eNqerwwcQYDVR0fBGowaDAyoDCgLoYsaHR0cDovL2NybDMuZGln -# aWNlcnQuY29tL3NoYTItYXNzdXJlZC10cy5jcmwwMqAwoC6GLGh0dHA6Ly9jcmw0 -# LmRpZ2ljZXJ0LmNvbS9zaGEyLWFzc3VyZWQtdHMuY3JsMIGFBggrBgEFBQcBAQR5 -# MHcwJAYIKwYBBQUHMAGGGGh0dHA6Ly9vY3NwLmRpZ2ljZXJ0LmNvbTBPBggrBgEF -# BQcwAoZDaHR0cDovL2NhY2VydHMuZGlnaWNlcnQuY29tL0RpZ2lDZXJ0U0hBMkFz -# c3VyZWRJRFRpbWVzdGFtcGluZ0NBLmNydDANBgkqhkiG9w0BAQsFAAOCAQEASBzc -# temaI7znGucgDo5nRv1CclF0CiNHo6uS0iXEcFm+FKDlJ4GlTRQVGQd58NEEw4bZ -# O73+RAJmTe1ppA/2uHDPYuj1UUp4eTZ6J7fz51Kfk6ftQ55757TdQSKJ+4eiRgNO -# /PT+t2R3Y18jUmmDgvoaU+2QzI2hF3MN9PNlOXBL85zWenvaDLw9MtAby/Vh/HUI -# AHa8gQ74wOFcz8QRcucbZEnYIpp1FUL1LTI4gdr0YKK6tFL7XOBhJCVPst/JKahz -# Q1HavWPWH1ub9y4bTxMd90oNcX6Xt/Q/hOvB46NJofrOp79Wz7pZdmGJX36ntI5n -# ePk2mOHLKNpbh6aKLzCCBTEwggQZoAMCAQICEAqhJdbWMht+QeQF2jaXwhUwDQYJ -# KoZIhvcNAQELBQAwZTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDERpZ2lDZXJ0IElu -# YzEZMBcGA1UECxMQd3d3LmRpZ2ljZXJ0LmNvbTEkMCIGA1UEAxMbRGlnaUNlcnQg -# QXNzdXJlZCBJRCBSb290IENBMB4XDTE2MDEwNzEyMDAwMFoXDTMxMDEwNzEyMDAw -# MFowcjELMAkGA1UEBhMCVVMxFTATBgNVBAoTDERpZ2lDZXJ0IEluYzEZMBcGA1UE -# CxMQd3d3LmRpZ2ljZXJ0LmNvbTExMC8GA1UEAxMoRGlnaUNlcnQgU0hBMiBBc3N1 -# cmVkIElEIFRpbWVzdGFtcGluZyBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCC -# AQoCggEBAL3QMu5LzY9/3am6gpnFOVQoV7YjSsQOB0UzURB90Pl9TWh+57ag9I2z -# iOSXv2MhkJi/E7xX08PhfgjWahQAOPcuHjvuzKb2Mln+X2U/4Jvr40ZHBhpVfgsn -# fsCi9aDg3iI/Dv9+lfvzo7oiPhisEeTwmQNtO4V8CdPuXciaC1TjqAlxa+DPIhAP -# dc9xck4Krd9AOly3UeGheRTGTSQjMF287DxgaqwvB8z98OpH2YhQXv1mblZhJymJ -# hFHmgudGUP2UKiyn5HU+upgPhH+fMRTWrdXyZMt7HgXQhBlyF/EXBu89zdZN7wZC -# /aJTKk+FHcQdPK/P2qwQ9d2srOlW/5MCAwEAAaOCAc4wggHKMB0GA1UdDgQWBBT0 -# tuEgHf4prtLkYaWyoiWyyBc1bjAfBgNVHSMEGDAWgBRF66Kv9JLLgjEtUYunpyGd -# 823IDzASBgNVHRMBAf8ECDAGAQH/AgEAMA4GA1UdDwEB/wQEAwIBhjATBgNVHSUE -# DDAKBggrBgEFBQcDCDB5BggrBgEFBQcBAQRtMGswJAYIKwYBBQUHMAGGGGh0dHA6 -# Ly9vY3NwLmRpZ2ljZXJ0LmNvbTBDBggrBgEFBQcwAoY3aHR0cDovL2NhY2VydHMu -# ZGlnaWNlcnQuY29tL0RpZ2lDZXJ0QXNzdXJlZElEUm9vdENBLmNydDCBgQYDVR0f -# BHoweDA6oDigNoY0aHR0cDovL2NybDQuZGlnaWNlcnQuY29tL0RpZ2lDZXJ0QXNz -# dXJlZElEUm9vdENBLmNybDA6oDigNoY0aHR0cDovL2NybDMuZGlnaWNlcnQuY29t -# L0RpZ2lDZXJ0QXNzdXJlZElEUm9vdENBLmNybDBQBgNVHSAESTBHMDgGCmCGSAGG -# /WwAAgQwKjAoBggrBgEFBQcCARYcaHR0cHM6Ly93d3cuZGlnaWNlcnQuY29tL0NQ -# UzALBglghkgBhv1sBwEwDQYJKoZIhvcNAQELBQADggEBAHGVEulRh1Zpze/d2nyq -# Y3qzeM8GN0CE70uEv8rPAwL9xafDDiBCLK938ysfDCFaKrcFNB1qrpn4J6Jmvwmq -# YN92pDqTD/iy0dh8GWLoXoIlHsS6HHssIeLWWywUNUMEaLLbdQLgcseY1jxk5R9I -# EBhfiThhTWJGJIdjjJFSLK8pieV4H9YLFKWA1xJHcLN11ZOFk362kmf7U2GJqPVr -# lsD0WGkNfMgBsbkodbeZY4UijGHKeZR+WfyMD+NvtQEmtmyl7odRIeRYYJu6DC0r -# baLEfrvEJStHAgh8Sa4TtuF8QkIoxhhWz0E0tmZdtnR79VYzIi8iNrJLokqV2PWm -# jlIxggJNMIICSQIBATCBhjByMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNl -# cnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQuY29tMTEwLwYDVQQDEyhEaWdp -# Q2VydCBTSEEyIEFzc3VyZWQgSUQgVGltZXN0YW1waW5nIENBAhANQkrgvjqI/2BA -# Ic4UAPDdMA0GCWCGSAFlAwQCAQUAoIGYMBoGCSqGSIb3DQEJAzENBgsqhkiG9w0B -# CRABBDAcBgkqhkiG9w0BCQUxDxcNMjEwNDA2MTQwNzUwWjArBgsqhkiG9w0BCRAC -# DDEcMBowGDAWBBTh14Ko4ZG+72vKFpG1qrSUpiSb8zAvBgkqhkiG9w0BCQQxIgQg -# 5mFO2l6qrJzEhKgscyI4e20+BlIPLZai0pXpS+XFVIowDQYJKoZIhvcNAQEBBQAE -# ggEApEkQXZn24/PS2O3rXicGnIfxtSxqOLcJFE8C4TcyBsvtgHfiDXPbbctdnpbb -# KZhX60fHqjr98I17Lqg7GHop2SOZHrR3NOEJcbHxHsI74qrCg6b70MHXh2Q1OLzQ -# hCc4JQUv7O/63bzVyJ9H4W1MgHOdmAlNSc3fWGtj4K4jhcM3uHnVl1gF4bJOWhMs -# W5IxHeBmpO4/Xv0upkbQXtmPooNgxwYRTosEyU6tkuDWRvQlddhNndOgX53r6Qsz -# CWdCDv2CiUaUyKOJW8vhO+DKqyK9Cobq537UKIl047zb5yFXfzQ4u/YGeMukkoBt -# 10uT/66Q5dEY8U/Y04CnnzJ83w== -# SIG # End signature block diff --git a/venv/Scripts/activate b/venv/Scripts/activate deleted file mode 100644 index 5d8da04..0000000 --- a/venv/Scripts/activate +++ /dev/null @@ -1,66 +0,0 @@ -# This file must be used with "source bin/activate" *from bash* -# you cannot run it directly - -deactivate () { - # reset old environment variables - if [ -n "${_OLD_VIRTUAL_PATH:-}" ] ; then - PATH="${_OLD_VIRTUAL_PATH:-}" - export PATH - unset _OLD_VIRTUAL_PATH - fi - if [ -n "${_OLD_VIRTUAL_PYTHONHOME:-}" ] ; then - PYTHONHOME="${_OLD_VIRTUAL_PYTHONHOME:-}" - export PYTHONHOME - unset _OLD_VIRTUAL_PYTHONHOME - fi - - # This should detect bash and zsh, which have a hash command that must - # be called to get it to forget past commands. Without forgetting - # past commands the $PATH changes we made may not be respected - if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then - hash -r 2> /dev/null - fi - - if [ -n "${_OLD_VIRTUAL_PS1:-}" ] ; then - PS1="${_OLD_VIRTUAL_PS1:-}" - export PS1 - unset _OLD_VIRTUAL_PS1 - fi - - unset VIRTUAL_ENV - if [ ! "${1:-}" = "nondestructive" ] ; then - # Self destruct! - unset -f deactivate - fi -} - -# unset irrelevant variables -deactivate nondestructive - -VIRTUAL_ENV="C:\Users\annas\Desktop\projekty_studia\AI_wozek_widlowy\venv" -export VIRTUAL_ENV - -_OLD_VIRTUAL_PATH="$PATH" -PATH="$VIRTUAL_ENV/Scripts:$PATH" -export PATH - -# unset PYTHONHOME if set -# this will fail if PYTHONHOME is set to the empty string (which is bad anyway) -# could use `if (set -u; : $PYTHONHOME) ;` in bash -if [ -n "${PYTHONHOME:-}" ] ; then - _OLD_VIRTUAL_PYTHONHOME="${PYTHONHOME:-}" - unset PYTHONHOME -fi - -if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT:-}" ] ; then - _OLD_VIRTUAL_PS1="${PS1:-}" - PS1="(venv) ${PS1:-}" - export PS1 -fi - -# This should detect bash and zsh, which have a hash command that must -# be called to get it to forget past commands. Without forgetting -# past commands the $PATH changes we made may not be respected -if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then - hash -r 2> /dev/null -fi diff --git a/venv/Scripts/activate.bat b/venv/Scripts/activate.bat deleted file mode 100644 index d2b07a3..0000000 --- a/venv/Scripts/activate.bat +++ /dev/null @@ -1,33 +0,0 @@ -@echo off - -rem This file is UTF-8 encoded, so we need to update the current code page while executing it -for /f "tokens=2 delims=:." %%a in ('"%SystemRoot%\System32\chcp.com"') do ( - set _OLD_CODEPAGE=%%a -) -if defined _OLD_CODEPAGE ( - "%SystemRoot%\System32\chcp.com" 65001 > nul -) - -set VIRTUAL_ENV=C:\Users\annas\Desktop\projekty_studia\AI_wozek_widlowy\venv - -if not defined PROMPT set PROMPT=$P$G - -if defined _OLD_VIRTUAL_PROMPT set PROMPT=%_OLD_VIRTUAL_PROMPT% -if defined _OLD_VIRTUAL_PYTHONHOME set PYTHONHOME=%_OLD_VIRTUAL_PYTHONHOME% - -set _OLD_VIRTUAL_PROMPT=%PROMPT% -set PROMPT=(venv) %PROMPT% - -if defined PYTHONHOME set _OLD_VIRTUAL_PYTHONHOME=%PYTHONHOME% -set PYTHONHOME= - -if defined _OLD_VIRTUAL_PATH set PATH=%_OLD_VIRTUAL_PATH% -if not defined _OLD_VIRTUAL_PATH set _OLD_VIRTUAL_PATH=%PATH% - -set PATH=%VIRTUAL_ENV%\Scripts;%PATH% - -:END -if defined _OLD_CODEPAGE ( - "%SystemRoot%\System32\chcp.com" %_OLD_CODEPAGE% > nul - set _OLD_CODEPAGE= -) diff --git a/venv/Scripts/deactivate.bat b/venv/Scripts/deactivate.bat deleted file mode 100644 index 1205c61..0000000 --- a/venv/Scripts/deactivate.bat +++ /dev/null @@ -1,21 +0,0 @@ -@echo off - -if defined _OLD_VIRTUAL_PROMPT ( - set "PROMPT=%_OLD_VIRTUAL_PROMPT%" -) -set _OLD_VIRTUAL_PROMPT= - -if defined _OLD_VIRTUAL_PYTHONHOME ( - set "PYTHONHOME=%_OLD_VIRTUAL_PYTHONHOME%" - set _OLD_VIRTUAL_PYTHONHOME= -) - -if defined _OLD_VIRTUAL_PATH ( - set "PATH=%_OLD_VIRTUAL_PATH%" -) - -set _OLD_VIRTUAL_PATH= - -set VIRTUAL_ENV= - -:END diff --git a/venv/Scripts/pip.exe b/venv/Scripts/pip.exe deleted file mode 100644 index 5f7120a..0000000 Binary files a/venv/Scripts/pip.exe and /dev/null differ diff --git a/venv/Scripts/pip3.9.exe b/venv/Scripts/pip3.9.exe deleted file mode 100644 index 5f7120a..0000000 Binary files a/venv/Scripts/pip3.9.exe and /dev/null differ diff --git a/venv/Scripts/pip3.exe b/venv/Scripts/pip3.exe deleted file mode 100644 index 5f7120a..0000000 Binary files a/venv/Scripts/pip3.exe and /dev/null differ diff --git a/venv/Scripts/python.exe b/venv/Scripts/python.exe deleted file mode 100644 index 2b3a907..0000000 Binary files a/venv/Scripts/python.exe and /dev/null differ diff --git a/venv/Scripts/pythonw.exe b/venv/Scripts/pythonw.exe deleted file mode 100644 index 254cb97..0000000 Binary files a/venv/Scripts/pythonw.exe and /dev/null differ diff --git a/venv/pyvenv.cfg b/venv/pyvenv.cfg deleted file mode 100644 index 279fcda..0000000 --- a/venv/pyvenv.cfg +++ /dev/null @@ -1,3 +0,0 @@ -home = C:\Users\annas\AppData\Local\Programs\Python\Python39 -include-system-site-packages = false -version = 3.9.4